<$button tooltip="View the top level of the Author Index">
{{$:/core/images/up-arrow}} Author Index
<$action-navigate $to="Author Index"/>
</$button>
\rules except wikilink
@@.cpredtext
!!! Press to Save {{$:/core/ui/Buttons/save-wiki}}
@@
!!! Problem Tiddlers
Count of subscript/superscript: <$count filter="[regexp[,,]] [regexp[\^\^]]" />, Count of missing: <$count filter="[all[missing]sort[title]]" /> <$link to="$:/causal/ProblemTiddlers"><$button>View</$button></$link>
!!! Sidebar Tabs
| <$fieldmangler tiddler="$:/core/ui/SideBar/More"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/More"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler> |[[$:/core/ui/SideBar/More]] |
| <$fieldmangler tiddler="$:/core/ui/SideBar/Tools"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/Tools"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/core/ui/SideBar/Tools]] |
| <$fieldmangler tiddler="$:/core/ui/SideBar/Recent"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/Recent"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/core/ui/SideBar/Recent]] |
| <$fieldmangler tiddler="$:/core/ui/SideBar/History"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/core/ui/SideBar/History"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/core/ui/SideBar/History]] |
| <$fieldmangler tiddler="$:/plugins/wimmoermans/history/HistoryTab"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/plugins/wimmoermans/history/HistoryTab"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/plugins/wimmoermans/history/HistoryTab]] |
| <$fieldmangler tiddler="$:/causal/Causal Productions History View"><$button><$action-sendmessage $message="tm-add-tag" $param="$:/tags/SideBar" />Add</$button></$fieldmangler> | <$fieldmangler tiddler="$:/causal/Causal Productions History View"><$button><$action-sendmessage $message="tm-remove-tag" $param="$:/tags/SideBar" />Remove</$button></$fieldmangler>|[[$:/causal/Causal Productions History View]] |
!!! Sidebar Buttons
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home]] |
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-tiddler" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-tiddler" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-tiddler]] |
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/control-panel" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/control-panel" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/control-panel]] |
| <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/save-wiki" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/save-wiki" text="hide"/>Remove</$button> |[[$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/save-wiki]] |
!!! Toolbar Buttons
| <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions" text="hide"/>Remove</$button> |[[$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions]] |
| <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/edit" text="show"/>Add</$button> | <$button><$action-setfield $tiddler="$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/edit" text="hide"/>Remove</$button> |[[$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/edit]] |
!!! Causal PDF/MEDIA Display Configuration
| <$button><$action-setfield $tiddler="$:/causal/config/hidePDFandMEDIA" text="hide"/>Hide</$button> | <$button><$action-setfield $tiddler="$:/causal/config/hidePDFandMEDIA" text="show"/>Show</$button> |[[$:/causal/config/hidePDFandMEDIA]] |
<hr>
!!! Style Sheets
<<list-links "[tag[$:/tags/Stylesheet]]">>
{{{[history[]]}}}
<$button>Clear History<$action-setfield $tiddler="$:/HistoryList" text=""></$button>
<$button tooltip="View the session which holds this paper">
{{$:/core/images/up-arrow}} This Session
<$action-navigate $to={{!!current_session}}/>
</$button>
iVBORw0KGgoAAAANSUhEUgAABtoAAATZCAIAAADVEVdyAAAKQ2lDQ1BJQ0MgUHJvZmlsZQAAeJydU3dYk/cWPt/3ZQ9WQtjwsZdsgQAiI6wIyBBZohCSAGGEEBJAxYWIClYUFRGcSFXEgtUKSJ2I4qAouGdBiohai1VcOO4f3Ke1fXrv7e371/u855zn/M55zw+AERImkeaiagA5UoU8Otgfj09IxMm9gAIVSOAEIBDmy8JnBcUAAPADeXh+dLA//AGvbwACAHDVLiQSx+H/g7pQJlcAIJEA4CIS5wsBkFIAyC5UyBQAyBgAsFOzZAoAlAAAbHl8QiIAqg0A7PRJPgUA2KmT3BcA2KIcqQgAjQEAmShHJAJAuwBgVYFSLALAwgCgrEAiLgTArgGAWbYyRwKAvQUAdo5YkA9AYACAmUIszAAgOAIAQx4TzQMgTAOgMNK/4KlfcIW4SAEAwMuVzZdL0jMUuJXQGnfy8ODiIeLCbLFCYRcpEGYJ5CKcl5sjE0jnA0zODAAAGvnRwf44P5Dn5uTh5mbnbO/0xaL+a/BvIj4h8d/+vIwCBAAQTs/v2l/l5dYDcMcBsHW/a6lbANpWAGjf+V0z2wmgWgrQevmLeTj8QB6eoVDIPB0cCgsL7SViob0w44s+/zPhb+CLfvb8QB7+23rwAHGaQJmtwKOD/XFhbnauUo7nywRCMW735yP+x4V//Y4p0eI0sVwsFYrxWIm4UCJNx3m5UpFEIcmV4hLpfzLxH5b9CZN3DQCshk/ATrYHtctswH7uAQKLDljSdgBAfvMtjBoLkQAQZzQyefcAAJO/+Y9AKwEAzZek4wAAvOgYXKiUF0zGCAAARKCBKrBBBwzBFKzADpzBHbzAFwJhBkRADCTAPBBCBuSAHAqhGJZBGVTAOtgEtbADGqARmuEQtMExOA3n4BJcgetwFwZgGJ7CGLyGCQRByAgTYSE6iBFijtgizggXmY4EImFINJKApCDpiBRRIsXIcqQCqUJqkV1II/ItchQ5jVxA+pDbyCAyivyKvEcxlIGyUQPUAnVAuagfGorGoHPRdDQPXYCWomvRGrQePYC2oqfRS+h1dAB9io5jgNExDmaM2WFcjIdFYIlYGibHFmPlWDVWjzVjHVg3dhUbwJ5h7wgkAouAE+wIXoQQwmyCkJBHWExYQ6gl7CO0EroIVwmDhDHCJyKTqE+0JXoS+cR4YjqxkFhGrCbuIR4hniVeJw4TX5NIJA7JkuROCiElkDJJC0lrSNtILaRTpD7SEGmcTCbrkG3J3uQIsoCsIJeRt5APkE+S+8nD5LcUOsWI4kwJoiRSpJQSSjVlP+UEpZ8yQpmgqlHNqZ7UCKqIOp9aSW2gdlAvU4epEzR1miXNmxZDy6Qto9XQmmlnafdoL+l0ugndgx5Fl9CX0mvoB+nn6YP0dwwNhg2Dx0hiKBlrGXsZpxi3GS+ZTKYF05eZyFQw1zIbmWeYD5hvVVgq9ip8FZHKEpU6lVaVfpXnqlRVc1U/1XmqC1SrVQ+rXlZ9pkZVs1DjqQnUFqvVqR1Vu6k2rs5Sd1KPUM9RX6O+X/2C+mMNsoaFRqCGSKNUY7fGGY0hFsYyZfFYQtZyVgPrLGuYTWJbsvnsTHYF+xt2L3tMU0NzqmasZpFmneZxzQEOxrHg8DnZnErOIc4NznstAy0/LbHWaq1mrX6tN9p62r7aYu1y7Rbt69rvdXCdQJ0snfU6bTr3dQm6NrpRuoW623XP6j7TY+t56Qn1yvUO6d3RR/Vt9KP1F+rv1u/RHzcwNAg2kBlsMThj8MyQY+hrmGm40fCE4agRy2i6kcRoo9FJoye4Ju6HZ+M1eBc+ZqxvHGKsNN5l3Gs8YWJpMtukxKTF5L4pzZRrmma60bTTdMzMyCzcrNisyeyOOdWca55hvtm82/yNhaVFnMVKizaLx5balnzLBZZNlvesmFY+VnlW9VbXrEnWXOss623WV2xQG1ebDJs6m8u2qK2brcR2m23fFOIUjynSKfVTbtox7PzsCuya7AbtOfZh9iX2bfbPHcwcEh3WO3Q7fHJ0dcx2bHC866ThNMOpxKnD6VdnG2ehc53zNRemS5DLEpd2lxdTbaeKp26fesuV5RruutK10/Wjm7ub3K3ZbdTdzD3Ffav7TS6bG8ldwz3vQfTw91jicczjnaebp8LzkOcvXnZeWV77vR5Ps5wmntYwbcjbxFvgvct7YDo+PWX6zukDPsY+Ap96n4e+pr4i3z2+I37Wfpl+B/ye+zv6y/2P+L/hefIW8U4FYAHBAeUBvYEagbMDawMfBJkEpQc1BY0FuwYvDD4VQgwJDVkfcpNvwBfyG/ljM9xnLJrRFcoInRVaG/owzCZMHtYRjobPCN8Qfm+m+UzpzLYIiOBHbIi4H2kZmRf5fRQpKjKqLupRtFN0cXT3LNas5Fn7Z72O8Y+pjLk722q2cnZnrGpsUmxj7Ju4gLiquIF4h/hF8ZcSdBMkCe2J5MTYxD2J43MC52yaM5zkmlSWdGOu5dyiuRfm6c7Lnnc8WTVZkHw4hZgSl7I/5YMgQlAvGE/lp25NHRPyhJuFT0W+oo2iUbG3uEo8kuadVpX2ON07fUP6aIZPRnXGMwlPUit5kRmSuSPzTVZE1t6sz9lx2S05lJyUnKNSDWmWtCvXMLcot09mKyuTDeR55m3KG5OHyvfkI/lz89sVbIVM0aO0Uq5QDhZML6greFsYW3i4SL1IWtQz32b+6vkjC4IWfL2QsFC4sLPYuHhZ8eAiv0W7FiOLUxd3LjFdUrpkeGnw0n3LaMuylv1Q4lhSVfJqedzyjlKD0qWlQyuCVzSVqZTJy26u9Fq5YxVhlWRV72qX1VtWfyoXlV+scKyorviwRrjm4ldOX9V89Xlt2treSrfK7etI66Trbqz3Wb+vSr1qQdXQhvANrRvxjeUbX21K3nShemr1js20zcrNAzVhNe1bzLas2/KhNqP2ep1/XctW/a2rt77ZJtrWv913e/MOgx0VO97vlOy8tSt4V2u9RX31btLugt2PGmIbur/mft24R3dPxZ6Pe6V7B/ZF7+tqdG9s3K+/v7IJbVI2jR5IOnDlm4Bv2pvtmne1cFoqDsJB5cEn36Z8e+NQ6KHOw9zDzd+Zf7f1COtIeSvSOr91rC2jbaA9ob3v6IyjnR1eHUe+t/9+7zHjY3XHNY9XnqCdKD3x+eSCk+OnZKeenU4/PdSZ3Hn3TPyZa11RXb1nQ8+ePxd07ky3X/fJ897nj13wvHD0Ivdi2yW3S609rj1HfnD94UivW2/rZffL7Vc8rnT0Tes70e/Tf/pqwNVz1/jXLl2feb3vxuwbt24m3Ry4Jbr1+Hb27Rd3Cu5M3F16j3iv/L7a/eoH+g/qf7T+sWXAbeD4YMBgz8NZD+8OCYee/pT/04fh0kfMR9UjRiONj50fHxsNGr3yZM6T4aeypxPPyn5W/3nrc6vn3/3i+0vPWPzY8Av5i8+/rnmp83Lvq6mvOscjxx+8znk98ab8rc7bfe+477rfx70fmSj8QP5Q89H6Y8en0E/3Pud8/vwv94Tz+5fBhlYAAAAJcEhZcwAAFxIAABcSAWef0lIAACAASURBVHic7L3bti3HkR02Z9Y6fHAToIafbX2ENfwXlr9DbgAkQPonZI3uVvPS+kyLAPmgxq4IP0TMiMiqtQ/YIsgG5JUk9tl7raq8REbGZWZkJP/d//p/uDtJjOKev9ABwNxJuBnJtZbDF9fpDuDyrrsDWFxEVuEACJAASLqbvtG38acqIXwBThB0B0HAuegWdSOai3adWUl+N6qmw93hDi4CDgccBHxRDZLMdt0BnG7qC6MVZg+d2a6bOYC1FjUi0zDGjyRC0cTdozkS7iBpZnCgKOcA8/PuUT/gQBIluzcpNqZORHc4HM51mAchUPQt+kRlYIyy64zhu8bibjmExfF+vox4Y9HN7DyrIa5FLjI60jO09dmca0WnuRbczTypFG/pPRLAImExOvZXNXQ65yxg8uVa2Ep2092iRfWKa62vf/MP+AuUf/PzXzaH3HsTc5RMB+/FBU1gzrvD11oAzB2ebOkGLh7rOM0811xOJcTKZmfMZDS4VvIRSD8N+/zEq14d0n+9ZL2/zA7Pfq7mZHKNVZPPnOdZKyh47FhHPB3MuEinuxkc5m5mNU3HcfhcDEBSQxxBwk80Ky8WNd7Ob7PD5nM1OXCsZeK6GqpkSHZJE1hkosHgTi4AdhqIxVVz1CMac232po5z0nQs9+5EMXkNpzqXa0RkWaR5rAFPyeYXRpvdaCa5FtLNzAxaERIIcLib5aSS5uZm5BpjSYlK1++O83wDnOsgYW4ExMCnpku9ciBINwVjDsMB0Jm9DmLVanJwsXgbsSTEHYvLzJyIGTQzAlzL7JxUOtYxeCin3gG9By3TpNIQnF66oulQVAYXLiPqaV1rpYQ1J8G1AKwFd9j5LSEhO5jBN8bvz7WUupl4MRo8QiWCXJwdqJ5ThHURvYZd9Nfzov1VqmenLh/W37Fsm4ehBWZXiejOtQhwperbWTWpEVOcCr1riVVgHo2Zx2Kko22PxQVNYLDNWscczxyZtKH3QoiJsLQzQspIKNHhoMMcQMwsAC4uEoswk/GxtwKaYVgN0XDKnGDONEXqJYetwVju4sYY5HUwLsPgOI7rOPVLLvyczKASAZpbLNuS2NFaLK5gNA8hUTYA2TN7kzTFK379fHwLZEsWFkjr8aXnps4H/d6Sw0jGZCAtDTpsbSto/hMWb5aqTwzvaf45zMzdFxelkZnCB6HyyKtpPQdJDb/U4vx5HGs+nsuTtOoqCXdzyz6b+Vosw6lsgKB+S6VekGyl5jYUjQMuu0jWWDQevH8Cu0FVAsIhxcf5cSyKBabAZs2+WxoSSV50b1s3Yoo7jWeXPXt3/PJu/nOTM817a5+lZj80V7TwzY74tXO7JrvVSBCpQLfebr/0QEeXQrxsMliccmt7VOyhsrwkPLm4Yq3WspGSyTGlfh++TKm6MfTqr4uXdwl96W3Ml1QrAWDtAx4qbFCsatOg52JvKrXD6TcDe9di4U+2/roRLfmkeKZeKyqkDxvrIn1UAjg9vZPjOMIxDOWO07fJpAMGGODkozvdFL2M/la8F2SIhnDQ/OhH/vB3/8+7r7/Kq7zKq/zwysNlEV2tQgDyG2HGxdBp4TzYshDHV6FZamz777loLdHraWGnujc4GxYYTg+n9hpWVbgf0jTxmhpMs0nOPKbaLm0Y1upaK/veyogEnPTz7F6Gg7GihtDGrJG7sIVhLbl6F2ojlQfAoSnzPSGhrSr1MFYiNYDDV9OOLNMwnnAXpcwMgpak9D3wnWPMHtM8LKcX7ianH8NxqB7kpAqFoJul0yX7nWRYyNG9GgvbTsUi3QJ9CNQNi8vd1jrM3VF2W3giUvzpDiUti36Pdcg4GQ5NABAOAN/89i+CM/6J5b/++u//lMc+/eIrQA6KWzBwLZ9pQdXowsdkTPoSSK/nKSc1Pds53TXfKMPLRbhhQl2YdPL2boUm0mEOp5vJxk/eCOYunOs4jkD3pwWZkImWtPYUvCC/6Hv7AU2rIFcOaOEYbmRyjiPgbwfAgw64WSzh9FfbzBMLubv7eb4B9ADeGM65AzA/4xEypIPTKeNbjpMXBYUmsUzonspGMlxjdw80MGfJbHfQ2kFtYTW8WQBrybgfsr2gK4yyYQKWXQC170L6EEQAVnTMYPQhFJI4GlK8w8Vlepej4XKlmuGKarsyYriw8n3Sr6tOuwflo3faNBk0dkw2lXhidTOrCbKbgSS04ZQESyDmONbb27d7zzz9pDHJ3NAAD2RtDDx4XJQnFmBJipxBk+uGoUgTwqqFihTS7Q5zseSBeq+9KamBxCSFexW12eyVi07UY/0YzjIwOBsNxLS4qNmW6vGCMAuLC3yHB0p9J58A4Fr00zBET/ZT6gRAqYlmGdLeTsDBxbFmtXPKGuxcG+62tTKYpVV6CcSUg4lz5UOt3twm6LDSmY1dAu0VTCntK/YHSwjV5I8lnxp7OMAXCADkbQAlP7OhHg7aIU/3tiTDGNLTHQs26djShF6rSTRFb5j40Fizf/faNyaT4N6mt02IRJ4aLuE+q/H80qarIXs8GTVbbEwAGEZKqUUxrJkTxDFI4RK8ASeXUoKk8tabSSVSOxWsFrIzO4qS260A/U0bLVwWunatMIpSceYIWdPXVrl0TyHaxcCFDonbGqECyOgPs2s19vrhUUNZGz2PEgtNWL4z7ddvSuy3KkcCzveFupf5rbsE590P6Yo9TYt6J0a7QJso0RBFSWWxzg7ZsZbZEIeXB/SbGOTd3k3GiKry+dzF1rucyt2sDOaqKq1881MDqN5NyaDtFxsD7e6NnqPh2SdioshQhpw5mPtHl83S1sLFmWPCupqNHarLAlyfF16GQNGu14EmoNRZcMPk5tjAXmsZViwlv1QbkLFZqBp3Nz9D7bWx3SYP1fF92O9R8p2iXWkv/fR8j/lVXuVVXuWHXR5lacXfpT49wYu0qlOCA1w0WNr37JAeylbwrk22tSp4uhklz0LmV9rItXsKEGvq3X5LHlHjTq3Qydl3Ahb6hVv4QXbQE4/w8EqqM9oel+kf4GRjbjWmjBSbyrMUmXtgLOnRhqfpZVuMZ8KIqFA+1ZkIDji0/7ScJ1gFJmjIhB2ziuFrOixh1Kmbx0jt7WRPXE4fa0OVY0KR75zqoeBUkDBzMzuOhAmK4G2iab+apFnqUjeZ7LXFPXgsG8qBFwLAsm6++ad/xI+5fCQ282e/+FU4ukHJ4zgAxp8k3fzt/OfjwwdcjHEkZhK26JGMPa3LdQYYQpl7VbjVUmhkfSVezhW7SHI5bB2Hu5WP57lvvM7T7TxBHsex1mE58cH4Z3RmHQ+G14fYBXEGeHkcQLSygm86HCp+FkgJHI9DCFgEYLThGPy5uBwwMqMVKvSmQgvbpXBWSBS51oHYnwDMXEMTOTN+/OIvJYl6CQjF5PZIcb4E71hQNuRrSx+Ge0yMeBWSEVqWKPBoGInNgo9HTeVwRebUt5RzNzc/1mGSQIuLXIBF1Cq5wn0tOHIuefJYNLPTgWMtlDs0RphPM8G9Waa4KQ8s/0pQuH6GLG4vrByztRYc5uZewcUZpCZEJkkRnO4Kego4OlfcWms9uM6Wvw5oW0RRhxcieniJFwxtPFY7Rs0O7mZ2PtbRcr5w3g0LkkPY3uzuXvYLbuZrHfJYk9uZDLGRXACWz/enGxiIB28sM5aB9E7BjgAcax3TpS2orPg5MZXivoZNxmBGa8Dg7eJXpgpMzhQMamaP4zG7O2ciXdomXGqxSdCCyyVZ2hq5eMgWIZA5kDVwmVJmqzSqtDZ8BpKmnIMGaTXm6uDF5CkMZcO/Cr1QRzn6X3wPbdolcl7QatVe1o2CcNNSW3WggdCSlJKfVBuVfWyFoxHMbhlY9KYogJTbYnxvGdKVJLUOBTNqEcaTl06MvsT2EmvnVHoAaZTMdRYB6RGiaG4kFhc8exRHROztHABr/pRsXDWL7rFvEVGWEAqrlZj99/M8EUcNgrWJxcXFFGiDGYk6CcTo4bRPgURC7XwL1e1up52pytfSJqLH7hodXCHubPEA21xwLVQWRXrXsPjLbxNfE7yKKPlcLpirgtxXWa3UK46X+DuLyF527v5YdyHkMIeGzQ1BrUHgsqzUCU+Q98blo99tLifmf5UXkrGl/dqVkN0rSCtgsXhnX6BIDqmuxT7I2jvlqU0i1kSrplhMfE/tkGXNc/pYs7Ev3VvZthJqU+z6zPRmmsLvsUtuRvYrGFJMzgn7nxYJ9ThZ3kUx8DaMAagXaVovyT+M8yhVDRfoOBmunEm/mehfyn2qzhtHDbJ859eOQV/PCXtHsL3Kq7zKq/ygC//dv/336ZNZIlRxzAThqLetK3OUUlzPnNh4JB1yS09Dx+jg4REBSHtVluvYKaOMpHcFsR7zobzTyJblbe6Lqzf3hrKhT7BP8jxaS+czjK/pf0GoGe62T2CyBQh4HhFb53nKK8sTWOnvuLwCXoySgCzTOHU4ZcpHMTlYcJ3/CrDS22y4aFUQFZfUh5uik4zt9rCf0xiVryxUoU06eW6jy5pSDzPoWGutZe4WSI374lrHOr/9trDOAWo6AiaIM2xpDQMBfdDXgB3iJEYeFht+8te/+3Ejj99LiWhKRUOFJ+wAwvEmg6XLX/BeuaPY+cbgbCZIEc7tofCoEZCVK+Pbt7d1HIUOmRnc1zrWsQpnK8cpjSdzMws/x+HHccSbax3ei1QrOTnO17HuEI6eYoTxnucZ7dYhRHuLpRdmoqAywt5MoiWc6V5HTp7nmzvWyg2AgK5Ich3qQ/aqHD53P47HWsvsVKqBWmIUZXA8DgfsPJnuwe7aJAJKX9v8TPBROA0uNvhaj5wd4dQCQHjam4Rr9TdJvIIx2urt3phZHT9M1IEkeeAwP5MysEDgjrXAVZ2jozJopBBx5ukwwszMzwpiPdaqk4ZBN/neq/jKPc+/n3ae59vBR4wuHM6kj2L9EhdEgs4u0SeMt5WKpuacHmnCV3GMN2Og0ltLuMHM3Y8PR0y0nSfX4WbriNanJ8naPHJo486rmcU8vFYaVnlJNB1MqKLhsus81Yf6xfukZH08hhfUDpiFHXLr7ud5rnVULgXvfhIR26yT70zQqhinUKBUv+5YR/Kfnae5Eyvos9bi6TUpWxf7fKcGKjVxPH5iduaJ4DAP1sP8XOuA23me7kauyCEzsEVDYdKey4sIUGZ4rI1h3Z3fJviFqsP28ehYSAkzS2vHfOzEBH8e0SszW4+CbFmtJCEqFldh5uorE4zox8saO5eWauBZ7n6aHVwGGzTYGMbNAyP1iquRXWOVDCBgnBT5PJGHtVfGSlvaA8Q424LZlLdJUq54AZnQqtlIm7xXiURmrzkYZDQUNhwXlb0ErqQ6TNjlQBO4Ja+9eYH8BWTHt4/HI/b9CNQJcTc3tzVyHbjmrGGNtWKmpHVB8HSr1VRdDqU0N30LdA4iHWU15ZmBWKxx9hM6dSt9QAIee2yTMGz4xksPzK4UQczNToNCiuFYCjz12ERPXshkF7EtV/NHLgPcTgHa7EW8L62e8JxsqYyNV8cYLu+6dtI3O3f7V+YQi5gYu1OlyPvthHE2CVAzrI2D5qhGqGK3FJuPwSBRIMvmqGQR7sexhuK60SOs9ALRmrYsp+wipKrlahzTtlDXrmffWCtR7wUPwB+ZFAL7mASRbw1DbtOqBTGa4M3K2buwavdyP3uhcV+QaBQzsDv8rF6ON2ZPZUq1GOnRUYh1SmJradO1uDuUQyRtm8jFoTxiq0RJoqKh9pevapWAI3d1CfrIQeQ6EsEhtVMyx+6y6FkONcGUw+BaPA+4pXH6x3/4j+9R/lVe5VVe5QdYHgENLOm/sDxKXyU8MRXD9N/khuiDeIKn+XBrQGV+cun4shr0wce6mNv5+4fxp84jgEhNHp/38eDWt6OPbYsxHg7NlCbsAB5Qls9wvq6Oi3yJyjbmwFpWhkVqW6QycZfNsUU3eKUCuvS1Qcu0aEmYERXzVfrsaqMFgpkVSdWporKfYwg+Bt3jKmtTnhKddfw9mjvoAM7QyR2wkLWaQs/kCQySaWzuuQeZEIP78TjsNCpT1fn2BuIPL/DxWblHU/7081/AM15Dx5uxr1wAUDhu/d0hgeHSCZOuRysWFaByT27hSzpqKCgh/jfToWbioiVZ4PAIKokkO/HOiAtca5WHFm1kryudAwByHWutZSb3XZKnf6KY8AYDIkYWcaZasoKUpv1fcakrHLb0E05zs7eKPaF7BUBnswQrZKS6ErEC0+f20bmChDAfgII3hhNrdj7xsBqqw9bKE39mf0lBE7Ugw2lYZGwztO8TOETayC60GyslZwVRR8oBoW+4RT8+68imFYLf2JjMGOpdM7EYb2D0/b96JL4YMlM48m2luBxmcqUg9cJfAztdgNtpCC+r/P0MhUeEKc0kCi46nKadAFSIV6mb4vAk5MVHmmBjVJi6+9kTcC9HaRtjb4xNFxhQzBcSrMuFkafBR8ZdiM1idkJJmvWuVlFb5O815sCCf+tv9TlrsrWWOQYS4gKeue0YE5DOmlXaEj/VSmnn7sd3858ecxQ/lZ6+PjMR2dGiJlP4RaZJjR21NoDes3y0mnrC2VE5bMMpzrknf8iP9SPwvMEwFyBGPntzdttEkEnlHQaLBh9gQrUqW3ObMEVir6qq+wjxVRaY3RL5FVFaqeziSslnto+TaWIRdiqMHvWEHzYZKZce6JwohTc96dyTUN39IxFZ6rMSHF/GEY0HdTvFLcmwrpKsDXphG/Vqm7ywtniuNgU0Xk12PFPz5D7t4/hrkwjqY5Gth9mY0TQfKmXBuhJkChR5DEApp6Gf09DYK7jXUyz9pyiSZx+mStxV7HsL8d6X8U5qhiRJGkaBiVPWfat06Y78c7eFNUooZ8JmSIufb87HOzVVdVeVsY+EVgHZ5QrNyjyPJD8ldGo0+Q71WdW2NXUrnr7jwOlyUkr0U9aCGBmD9hOU5J4j4NloleqgFmSw7PZKo+e8sW7WNL3B+rTc2+cTQXgt49aEVr5Xz3Vns+WdkrOjGIwwpF1auM/J8Cqv8iqv8kMujzBo1pI94SXSWk1rP/Jmenm6IdvnOj03nhtFNqvv2vZdXQJpoYvSDVyg9u9lXNbPS9KwuWe2ueTTkq7bOai90PCXUs63OpndiNggCCZJglngNdVnKTfXX2q5qJy61y0dz3JmivJp4nt+fMh/fn8HMita0AUncPe1wAqCYzsaZaSO/bryyQiv2zkGPQUz6n/DMGGl1KGA1DaNZEX4jTuyl+tYv//13707rld5v/zhtxtu+7Of/2p1HHJ7OJNp2OazXKzeRcZuGyNqqKyC6SJxhWeV5qRXWEG/FCieuy8sZ6dXM7MZDIvd0xBrTjuePaBE6Ne4b0QrBEMyjGXmVWW+Q4DmusOhQNiIUYogcVNMGOSra3mu44g4mrWWTgQDcYZaIdVx0ROYkVOKmUMZ/4qxuKzlJ45Hhl3JQ3R393NsSNT6yjl0OTl6ANMs/miZ0CgdeQtNINxHxMIggdf26XwENTvA3BHxiBIJ3LeCenYWvI1yH3fOcn00CcTxjG7TiJGXAz7kk2Ra8X0b9Nkr3+RSLYWVqdngywmeOsKv3Pazw9ZVOgxnoOtIBiwa4Thq12rzzCHp2prr5iAxFZPWq0Y5+lGVNX0wSS/wD81QF5SgKby3vHc5lRQJrLXezhOWp1UhtaLNOACeM5lWxUx3u0udeJjL7Oy/zUC6W8TVg86SBt0ZDI8tKegdezUJ9L7hUY/7rP1CFI/7CmQg1OE+1hHUDKBBHv+3us1v3xtIMGIYKpA90aKBsjQKUyJ1+cSSbGDu7tTDmKcsorFVw48zhl4t1NFyH1KKXJfbSwi9t0mtCw9PPZIGozgWdpkvjX2/7FCnZ0lvE+VudFohPW1nJaK38ro8ASstEOfs19aTuMVZPDNF/ztlrEHILpZIrx3zIGsSFe6OusglWw+oMSTn+eYOLnGFB0TvzCtucj3lnnTH9vWWQqhHr/SavRIr3aq6XcZga/W0oSkEvJaPDD6DkyszqFTcFupMkiYd228tfQftBtdoTlJpr5050Nrd701sLV0sy+0DzgpH289nuPZRqmLXn6w1sOvVVj1ByuTHTo7cTD8cG9nMsmo6s+HULfswBy+PjlBY5Oj07OFlFq4yd7TWJ8N7aLNX89vNaHxe36w4Z35Eu1aST7WjWBTlVL50tC2PIUuH4eIIrX3p2zYDLdxbeSoacR9Lc6BrIWdH2c1dy+SKOfIyMxoDbWLJuunxzimQiglJkZECBBih/9erkl7lVV7lVX4E5aEt5T6PVDfCYpOjMquk/RaXE7umVA2J3KWc5MQa2mb6k/uYYn9TEH3sZDqmFZdVxng3mqqzt96eaErPtPOK+XSv7DsiRjgA0+Dz1qhOhRvopOMY8lQ4rRlHkFfrn46uCXNlN0aYD1y396Y69DbsXPhN2TnqedY3zLo6XZgfjEmKyyvqlXzPBTwt/cZpW3gctt0d7gqw1I2cDA/h69/9a1418z9quaC6n3z+S+RiRpuRZRv3UuFwZdMdKtc8n22jO2Z4pWezjkwESXL1VZLhusQVNyPkd7mfvqVa05nE4KgNzoiiw13p8MvinnjNxRkpIcEyV9PwhNhPra/xLREXMQeQlN1mZRw4zbiYUBsSd0i8lMJ11+JamWMRdNiMqekJGDJzdnsY/y7btZC1ciarjhK2CQeHFb/NtSa3CFN7+32eH2Bk7JWQsMifeAFQdAq55Wu7bmKKbEWRBxUZvuuAzduRGK9xxb9Lmyh31RF8onA8XbE9gpOuzYTQWftoMKc9KcIxkhWxMEL+4gr10m4rEgV4CcLszjpo59uU/nVkOND5mzNUvTgU8MB2H/fBTyf9jlfWxETHIqIZsV843N7SMjOWpmd7qOyesoxRnu2MOYyIWCXT9IHmsDPOAVrb2706+jdVOVZsB9a1Og47eJifjreoPvc/JLAiMnC6b+JMThIV52+A/bMyg2auOFgcIAlyrsxTufL6nNS+wY2yUJ7WKzpf4WZRxGG660jNBnHcocvuFwGGnIkb4TZa7isukgCWMQGCJuc6Y+9KRsn82u8eZkRj1j7nhWLbb+8ELo1dlP3d3tMgGOAki4fEG1ur5utgNVYyLKm0h2HWHyuPi+YimebUtNx6N6vb752A2kmKT2uuE/1wwH3N2/y2Cq+3e192CfOFYDummVnyEADdOcyuSjGOKV1DGNnMUlKsF4MJxbZ85Y6FLLgWffojP0jLb+6gtDzppVfrRXbEzhcXLVFUv7PUlHttwmL/d9T9jN3ubPrsqacP+WThIVMvfs3Q4SJwB6hqfedp/iLSUPnJL1C2KI7sm88HtD0TWqS2l7YxjlkA2z/iheELYru08/GifbMn9HyK0HVJLHPjRuwxx/7RLrTSf7fhqvRP6FvzmQ/F1d/mipzf9Kibj7m9kMqouLvs29zgjEHsIxYDoCR+qXhssxWRLtFQCfVXeZVXeZUfXXnkGZfNaKi/SrjdFQ39mabZlefNtFI03G6fbobyO8XLIiLcUAECLfljP3FgCV4inUIqPG6EGNtiGGZ5/EXUJvfNiq5629KbXZiqL5Ly+xxtmL4E8+ph7fxh0DfysKg1NeQAsI5H+2gKQb34L1vngMWMxXB33WdHIE9uyxeSOTsG/Mwg3I4OIrf3ZAPJ4C5zmWvBPC9q6Kg8mekqX//2P+NV/orlm9/2Bd+ffPHL7bu0phPsrnnzDApTHMZu6eVkrktsD7fQ1zbHMw5Hi4FrER6RN7tTmpha5cPaGtV9M/IXGWHFsr3X8mcnAV1i4jpqB4hjLeSlB7WoE+bSxcj6Eomxunsk4Vt5dYwJDjgKduGxlmjX9FRIW3YBI26nYd9t3Bxu2tgO8bUOrV0O78Kjb23CqqGxxossfvkkfaIWaR54UEkkgyLJxSrPiK25cIEnSelrqNZNv2zIZzm6qyPLSkeFT35vfRvOStDN14A10m3zEqle/NHdym43bkLg1PUiqmb7pb3MfAOLtJzwUo/umc7VKu5P/DXJMdRJrh5uszSoly7O2kGjQWh7qq9zp0FOnYbh7oEo5qCVrmHMC9NlU0d7F8oj7his9J2MWWi1GO1KhVXcvRTv9IkLu2nfjALQvo11YwESx/8Bz2QQctWrymaZZ67o/SM5hBvwMcHUjVcK6qQTBQiaWWz3knR4JUutFlBkFctUU9013edcPdID7jh1Z18yOUintx2XHv/eaPQTHQE4xlIMLapsidXUdC6YpG+l275SsUXpVUYIuhqjlFXRQU+tbIruN8xdJpRYF1BKgeoCnhXWVR5b80WEQYBazldJ1QIyhtR1FcBO3VtWkqtuTczIzVqE291sROrgIF7gnlQKhRRfsTiHHqnt6/zXMzhAuXrcx0zUJcdkhT+iMZdOeR4ailB68Xpwyssm4BN1IMM965499tsq247xPiP3O01sD96KtFev38sf78ztGOWmKHfrRs8RBM7Tevu/xlHb+d4fVlXvKLGPlzDLUv7voNY7oZ5qNVUOK2GzA3E0a7z4rqzcOyGWeXZ1+btlaMtSiz1HmAQZWgZqw/flHTlS9vW7vb6Jn3cGNKyj2vadS1uR2rLPQoLdN1u4TwXX6lGVjnUwbzS0Xq8t9WRe1RjHn52QZVAOQ2o+H96rvMqrvMoPuDzSbc49/LB6ZLn0Yy6f56mo2+xzJ/zUZvxuUkyPrQ2SP30/J42WFN4eCbeHpwHpjtTx5TexWnIN59qoy3TIpgTNjO+7B9uL285e9cHjko1otw7ACeNJXee7UYI8GL59NtGfyidexryPNDjXUhEUYXcOa97DKF3VjMOhfVu4X8P9A4v00zinEBF+NeEFFumQ9F8uPDQ+/MN/+fWzvv7gyqef/SIDbuDFGEkfVthZmyKrgABXfBQSIfp/f/33T5v4Vy/f/GZCk1+W0VM+ZLq5uWVejEYGoL1DSL8WPQAAIABJREFUNwM/MsUq+eTNjMnSGqpDuzzymCEA97wVp+ocq+vioqMEFsfv2Fbp5NboGwSyKM9pHG8EuI7d667RxuFHQDckjHqJvOR0AcB5VogioKu6tSgcGYBzX6xeITqCfW+Jzkp6JCI0Iqu2uASBa2QJ8Ivz9DFp6wAWD7FwnK1Wuk9DhYXm5OZV6gMlLHfCKcTVNFdZTOb05cbPco00CbmCXP5+DDQBrpSsc0yXaSer2SBGXWY+3NCBqHk8ISetNmy8FYyDCu02IIJeVwGNffi0w/0AnHaCSsfZvcua6263OTfST+YjUdx3uaypVLdnbt7hBZrh+EA6fsQPeuFc2Xyxd1zA22pcATJgpMNLRefwdr3TwIBwEi8g+A7SxU4FMmGrTQ7PpUQyIsN67ajj1tKIsZWyj3CQ2qdntxHr5syXCCrBl1EvugMwOycxZLVQRLbY3KCu5mra3tp13zvbkY5eKwsFRrWY8d4Eoi8cl9U0Z9hFaE3NoMRkm6quQYFa6rm5BCbkVmQfTOWJqc2G0L/vgr1X5oVRm3WqW+xfgbxvOB+tiMVewEXabi7l8zDcUqJo71YUE8BXf6kHKYkGMsI5BomlydzeBmmJ+0GYMWg2VFPrmhCIX9dmTCHPBZ2HR4UYdIuplfOyRaRSr9uBh+D2MF9111AyoQ/g13MjqvhKFrNnmPWueXsvoWacNWqxWK4QSH81wZ4JvvVMk+Y7doU4o4ESaM08HxWptVY0X4BEXX6wzzdqgUkY1RGFEALukaylNh2bo3ycWc5hXzjtae/G8+tC8m2EempnsOy7j9m6rbBWKpenxPlyLbxX7Z8AjIld7kqqe3B9511eENfN+i+tq2dB8Ku0H6Obi3fSZideVNnH1bOGZ9NEZ1u0kJupiJDYByweQNJxbCeV1tGykv7bhGR0lwt1s+mrvMqrvMqPqDz+tTvwKv+/KD8QFPKnn/1icSWsYr4IBdTCFNqWRsX63pT6//zl//1mNrP4e1xX6w73tY7wxv917+r55jcdpvqzS9Tkq7zKq7zKq7zKq7zKq7zKq7zKq7zKq7zK91ceHlEKHSwydmUu24IZjZ+buXkP9fXEzSzEvgflte08toK/Y3sSIx5o7PvFbjzXAvqSXR0xg3ueWtMW7gg7Wbf6WT3xe3yJa9hjv7n3wBq9ApC7ytspxnwq9/Vue2fz2AqBp0fqFCEmACszfbKyVmojUKFhlSYyauPi2Gvuw4pzv7T25rZtXhFNGdSy+nyv89I5Kh4QHlmKQPJf8SD2p59/iRFbZ3bmJiTjzObioufB/TFkhZqJq1lBKio5xtPz5nQzZ5zHNDvmKRLqsAV11BmOylsPHRYlzjPJ9ekXX5GMANhAKx+PRxDzr0zJ34+oyZ9+8UsH1oix6LsWvffkvTf5vZg/onmDoEA+sJYOX5NA5PJn8Y/7DD8BauWPKyqBnBMzj7Nuay33Uy1kFEidpBHLZp8VNnUYPDNqcXGRWG5nR4ICcf4lLqu5LssKWHMD3O2El6DCeZ5AH0HtwKK4Q7yDleBTSFbg4Rzk/k/KEfTWeGDo8UAd20vem52u0JfZ1tPCOpHuqJTCc2KqI7y8NG/hzLCHwNwd3g/XpRNbYGDHPl0Ca6AwhBhkNhHB8Vpx7hkjlsFoWyTLVv0MZnfjIPcI6JgRk0W7YsjML2wpJtjZUeMhKn6tqllY3ZU8ZqluRxq/lN9uFQsaySgzkrAOf2VEBe4lwiaeHskGsMWwTHXgDuvbTIaqvNWezBnELTNgxt60KqM01biLwszWWjH5FUPjiHyc1Y6WwMrAE/ZBiJKrjHjJgx8Mp8NSttelvm4jbGgb0SV+xTuKq5+7RZQmI4+YZZ/L1txX2gEAsBbN3e0U2SM77ej+iHwe0WFjtwrIgLiOECvDa5sSICyhzFSdB2wd5qjb6sRb26iL/jJNWHNq3pkY9I8jQ06BEV/kF15iZ+zMhXHpKiA5TkTanBuh9aOtk8Fiuqx8DKcsLDtPaKOxPvcteutCBlyuoadr4XJJtuVZoTQjVkS+nmM08JblEj7DsorQ4HUqWSyS0NFaisYxmk5akATKuZPl7e6ZwjspMeLRt/QfPcj4gkfseurgwiRfneRATm5GuhYV61vAaWnJZv6BZGCPGMA4CNPzOBcY2VG+msJLPKCUylyjGQqmKWsh6oPuHynznMHQWMVkPv57rwqiDEL3eTg+7ZwKXpa4HoxZg00rWVzR/avGO+xfnDFjvAdJLm/3iLZeZ2eaI+vFJYtLa2UmkVT6+lnXWM4l6mMCS0RjTBvxXkrH2T/qUSls6s3Lg5Kekj+XgY+J7N7exlAcnDX5laptK1B5n4eqHafI1UgqrslSfVTN29C0vPBPBIslFGcqvPrLEsxEJFG4cuT8m9ml5DV5F3CfcvdVXuVVXuVHUx5m5q5zhTMUHLInANQxtyq3uHQOLGYda/98yNZ6aTOQ+9DJvfhugHbT5DhWzGxrdb6baz0fORnQrnsa78PwzdeEyG1k8DqMcSuFhc0kbMO0rl5n+11jvQ+dc4vO6LqYVEUK8YsTkz2aBk/pp6+V9msl6kIZV9lJKyOKOgV21b3Ro2Nm+Ct8wnUCJQ/o/eF3f9VAyE8/+0X0bj0eDsUdolgXCAjM86LnoMDisgV7ezNr1pVdXoffrgZAPsYCX9OlcR3DAcTxPXkeydLcLA1EGSNcINdxHO6ey5B5X8ficnM3i0n/6Wc/R+YVhSvV2u9//de4+ecPgiY//eJXRqxnjFElzO3FPPUfOfZWwUYwKJ9pLgnPMZ7nKT92KQ0Q2kO5zoWAADOPiysgyFO7ERx2bTo5ZGZr6HNupzy9zJNVjtnww7B4fPvtfyPJtVa1I9ZfkYDM3RxrrcXl7uf5ZiaB6XFCU9dcgBEMmwLAuyHgMspN1kRdSASnbFuP+6Dijywrue+eQDN6PR3mO2jjkdqWAGO8ucKzu+WVyN1wr0tdEC4pSnpx2Xmi2gzSXfzNd0tiALnmpEd47Xf1Z7rZwvtyRCWoClpAezCDAo44k+jHMXDYEpc50XmeMZh8kZEuNEjkDoeZjzkBhc+Vk5QiXHWHZCLogtSyv+1ITs1597U0litlBp2lPkr6SOiZr8XUAKabTG6enFefPCaBBYtjvJELM9o28FhLWyynnxjIYx8YZa0Cee2QenGQB9xYR0rdQQayefAnwJvhjXAuuqEytBLHM68V75UnEOT+3p48YXu6fHqrc9nmb+d5HHHpcAgWXfmSBNu2PTVn10VRH/oQ/lvTBCqBisVjcbN8zGnOVgGSOe2e1/3kIpVMkLySzSYHuYwKbbGF3VVgmzYg36MdJC1ysG1BiLnbxgFgp92FUjSXRFCK0PqiII3iwRt4250pLMgaJESA2fWm7zlomtys8YzulUHqPVLfzc3rgNSSmfXgi8IASBOiXas4erg+HPlHqFGTCau1NQSFE7lrvY5HbJxJp2qRDUhl7uBTdlGAJwsLdLPF5STf7O0oAxmh34hWTM9pnwCUT/s6+x7vvvPqRoT+4KrdJpW2oANBQ6J98xt2ffqxwkrg0cvVV6HrJbbMtShi1IztkwSqXNfilXVYszFPb+/u1Wb9XMX+cxsVgPD1SSBltqi0KOZOT0UPkDitzea0oqCNmI8RByVkECJERHvvldNkMIj5taauI6pDyptCHDPKfe4nKeLXS2cS6QUwbyXtefW997XU+2Kh+GbNeRnlypfmPvYj8sZ6OFzHsXKroccT+o7tkm1jj76JVsl/IW/86pe/yqu8yqv8OAr/t//l35v7hwMmn5PhdMp1AYZXVRt8ug3z4s+WfWFma/E8z+N4mJ1lRwJlipc5E8b0GZDoSscj1VOEJa1jCWXD6Q7gtJNSotF2eejZ3y1DU5djD8lB2uxhsuXe+jB0AoIgcYbKPc+Q+VIScU3wIkA3mewOyxulp2WIGCsBtwRSuWqvm3FY+DiOeH3m34EmIU3ITSNTUWhFVMVOuZKR715M/LpW5tJMAGO456Vxo8Vyg2zNWhp7c+Cb3/yVLsX+5POvwohH+EqeoYXu/vjw4DAs3Twvro0SXGbncTzgiWL5eVbgWyR/NOXLO99OJmmziajG3I7Hw+Xo5cJYJNd5fpv1MK5ooQN517LZWmug3kB73L4SKYgwn8DTDYQbjuMAYWbFXQWFlhWHXFn+zW//Sse9f/b5L52GvMI1uNItnVKu46hrHObOhJ3n4/HhfHvzaU0BgUjGM5Ha33Qb6ToOs7N2fxFe2LTU06nupaB6DjtNDkP2bR0HKxdh3gkrVifI4zzfzHq6G50MHxV1cTbjGuWD9BaLiSab2/l2eg2PswZVBCiaJQhBgHDy2OTVXP+nGdsyTifcgcW8x9zrIpNIcbiOcjq39Z++H1wgKQCs3nCy8w0ksTS5sQR8rcechRC0sRdib2/s9IiFWSDRXwpLhLsg4NGVQjnKx+gOF9YDEs7z/GeyoRQJ4WymhnjBzOMyGTdHrWWyxIhAjF5OVLrUdgC8OxM36iR0eVqA7MdxeN9rWz1hvYlCS/SQai5vKacKSO1LHsU+kxtsBmNm7c0XLhgCGo8LhiRxHAe5zrNj9wzWTll7xPHPu9xoROzEDT2eBFqJvJuUy4AgB+if32KBOO0NVVG7lmKSzikXitId/uHxIUR06FsR25Fi5wikILZ5osJjHQ2FDR6pxV6On8bL7Z+nhdIyeTcR8hcF3ulHC+0n0SsaHhkR9plmd/rP7s5IURsPOmAwcxxHrQ+kkZAOKlPDGyqVouROJd0uD3jzdJkxktldB7EmxhByxsoL5mDIqnSsYFJwau1b5qvFNb1yygZF/R6oKRVdJ+nRs6OIfYkF8V8MeMVVZLzOO5aZ6Y4XUCmh3QN9K0HkPDQRcCYGT6gNSfcwOXoxhyEHB4/d5hwAcVCCg6r1dkZhVn0u3SeJElPlGdAKknYQw8jIC43y0TVz3E3D2La7ucqmYAKdZKa+BRxWYipmpGRaS1E5EqJK9pPrON9sXHmfWsxd6KH0VGkiSZfnJUX0GNJY0VMiQoqpL/qaci1745nxlrcmIw5e1sLQCVJ2Q3re33cp/dSHh/JQX8amyDjMGrqft6tYxhoYBBlOz+M4TjdNaEiNxVUnUVhvlHXg7lONXiikv/i8KxLQ8x2MFTeWhqPvVmoDS/2QTTLajzdb8W0LJaegpaXIgLRnbFggs8InvBUUMfNjLRnbuTBkOnqzugadWzZk15HyNpSgk4t5qAPMrNl2mi2WZOhdzzm4GkstyujfJLXm3MED4sZv/vE/3kf3Kq/yKq/ygy0P2QIcOqcs23YJUJojHit98U5wOCPiaK2wRPuMtoezVZpDMnUNYywOg8yLCnb1VI/J28Nl+y499VKTQ4PW3vtQhBQY11a11Jln+FvsmOWxgeGnxsnT4Q+gNJRc9/FJ1ewAHZYndtA7Y4F31HA3vzV6MjW9LLoa9TYV7yTAZv+nSMn+KzualjvDbkjdenOD8Yff/cUPEf/s868SASe5eNpJsg+oRt8JOOw8wzPM8DAyYD4AcX0tp/ladNz4LH8Ok0qohxiffWyubOc4yWZyawGMAKk0UdMg8uIFym94xtvx3jpWQ1krXTESaz0gdvXahQd/9vOv4E5E5mz+5TDi3+t67k9//hVMxqVDuHwivO4VVZd/nnau4zjPUwYfTkWAVuV5xjm8STc41rHOOEYNrMfhZqeCj4DhXdlIyi9X0BaYHinjku4tPMmrb3CchSqq4pg0ZqRfLKk8buoI71hBiMlpyBhYxmFbAAla+ToOAufbWYe4RwmG8AaSMJyqIEv/SegOBDaKDSDvcwiQyMy4MghiOuGqQkBo3TVRteuGh/By0wtG0kyXo868HSIiRyVI+dPIY8hZHpD4t3MGVXld2GBxCRhj1LmP5eZAXiPO++LxbfPm4s6FtIUchj1kRKRxT0eIBOIMtcShBhG/RZhzuaKQpnzix1UTzo4w4ea4eF68otqkJ6d8C456X1DUIEHElcKVr0NncbMtFv+X2Gcc7FW4zASvP5I+17XnJfisSOmXx+YaYwCVcs1LdI75GK+H5FgkF9x0hjCnPwle5Jpiu+DFwJIKnxE221QNQyWWfIlv+Zk167ejc3Nq9sVzmY92V6syXXM0aaumLVV8HrdWn0HnaW/HYl6SlPdguJvzwGWWCpF0d6/LKsqyCwWh49jVjzY5LrNQc22WZ+BXX5HscIc/l2d7PUXXzYLR8IM7NxOujDTxR0vDbeMhmbZwruLAzfrZZEOu8RApqsdJJi6Y4j1R0CdTu6+TKeUKfYqyyJt57GqDyE0UmXENC6Gk1BxVmw56SLrIHKATq68HqxCC+MC8X9wsHJ3rhwC3DVzTfBUog/pzrok0jlfCLcizCYWNObBMwZmYX5XszXqIgvUvQv1KxkhzodbLrJ2GeDsRfL+iMYSLJPLxT1FkY9MG/4rrLr0cDcwpA9AXyeRgXS5Qs35xX/XC90pne6wTId8x1q3Ds9yh2Gdv1/Gr/PM727u9j9nHCVsDe+YTJPb/nLjYJqN/H/AqYhkXUUqR8zlrSTrkJI2Wc+0QNWe9AnKJye6VXRSWkytfR64m2TbrPfXqzRVTfoYiK9pt/k/4y1L+/6LpeJVXeZVX+UGUR/mN8lsal9yzswxbXyHkQyHll56ej2MeR5q2YKradglCvh5TNvumq7z0wzCZmUomA5OGfJYYH6bSMDRop8kTa9Qhvk9TSo5Wjsh3gynVW1PDMlMVBpKQ5ur8a6M6GEe7DD4iDh2xARgNMs8CFBkzdnXJUq2e2cUTT2N6EW9+nQFtso02FTbwfhEiq4f++E9/wePY/+bzX5n3CSaXnZ5uI8gAXBIocsRZbLrZKRvW60R778zWpIf7yHLFGo1MJxlwM/ZOuSApAKCtXCdtrMaBFxeOhnK448UlSzm8GwAK5PCa6nJAyplgdQh5gXkgrVxr6bDaSlCkoBH31avWP/niSyGhC+Dv//E/fe+T9bUOjP/0b38edFkj/mHtliYZqQEsPj3P83g8jsfj/PbbskmpU5l1sr4w6AhRcTOQjwxQdWjPWmUIlqhniz7bSq+sYCwsnYjsY/fuwQzZt3JACrpRNHdPAXSwsePHZ1jx0yL3dMqtKVCqx+1sZtVENxRLILHXigsb3rKqJbCOXbjPZ4ZDirJ93YHFtWCIA81mPI6A5zomZI8AdPKI/X9ieNTjQDf2Ad/otEtzP8+TudugI/fZGKuzHWain1IQvf6irbrGahI4zhN73k+50TtCYU1ODvIcZC7umwidH3Wai+S1Ins1UaK4/pLUu1Q+nPfREosO5ERcgyJkXnFeYHn1knvGSdatwnyW3U9PaU6mIm/aSnENwBqo8JacqoxZo8JULys03o0thOF2ZpTXNt1eS6GI6Z5HK3wNjGZDqHbAl0uZSkppltP9MaQ5B3wpY8shhIZ2tdATulXKzL5SsiXeLDTlOA6CcS090gJax8Gdf6oyZSbLPcWCAHJeYmtTO10pIgIIyKveU7hmbQB8Jd+WNWUSUS5m3QkxALCUMBrNnV6itaZ1N81U7zTZckSScQ1O98wo5Rxw+Sp7UeBE9bf5xJI1vW2HtOOooyfQwustlzKbvUxp5i731EO91LmFXG1Q+Zohft3rYbt5GhFELqVKW44yOETQzKZRZ29rZghFwVObhz0g9TcnGPVvqTwJRl2tnrxXmrIgW2NpVZh1++Mf7TRofNai6i6INDnju82SnTO/vy2doj9zM4i3Z/KXIVW2TCM9W0WsJ3Lial3XY2VShsm6PYBSK6kiW7iOLy9vqLEnz/R7KReaKsQY0nM590QLXGj4p5QiVoxlGoofbaoLB23GG3P9Pp2AtkLHrH+klYGBbpsLWv7IXWq4XOfSyDm+nuS2DSSo2laapmsxQ47kCXlLIt9R47R/3s949iqv8iqv8gMvDykWl1OFciTKzpbzpHg5ffakuLtyrAgtm96Utis5jX7IXgVkB0zzxHXwMb36tGm4OJLlbLYd3G3pXFN0tnQDS8ejmlObYyO0Dd9LJ4dpqqqLbj4+HzZ3WyuKXwwz1TP8K56uaKULfX18KMcLPfBLbwlTEnxuptqT4tvG5rZnOZLt5Fy6+x//Yrdjf/rZV9lZwuOkP5fcURpWBI44M2WVDAVvwCXPMg8mssGFbSom+RpOny+JIuNIGcrRj64q3izMukmzpldbr8w1MwxadUKverQwVgY0GYGLRYBxfkqSjNtvUGApC3uSwbJW+STlZvzs578qZOqb337PgZN/EEL9qW7lrtUJFIlo5sdxmMdBlYyCjCOItVl8nieBx+NDQD+6ASMSTuI4Doeb+1p1gk4saia8wx8ffrJyQbrAADc/1zpq831bG4WTLsJyRQeQkVdIARXpuRbj/N0i397ecveFBDxSRgYOK5AnE2vFpK9jTb91Ymc3z2kPhcY1FVQJUfMRUOARxwR3X+tAYWe7Cb//Fnjd2N9AgpaZh2ItLpxv5p1QMbqtwErtQw3HaP8hxCNv/gEAZH7G8m/1crjH5RqjXUIex4HhPc5xcJe/F+HbCiWXeDrgw5cJaLEhF4y/pxo4MpjXQ4YLbeiO3F2sQxk5mjT5igsS9ZK0esIhPEUrG9eKx90eQy1t8OUFgIsH5rxgnJtrjARDDD0rfd+AephNe6c6Ka7uoxF5mlWZ5pKk7dbvtJMK9U45GRfXEAAsQk1aQXpOEEnz0x2VyoPay3oyIFkG1YnuwybC3qFD7hkIENXzduXE7uUAsoaN4dixmKi6qUwyQUZB+QTJ4xxHOubrxQJs6sC0Esw8VIS2BFwRv9lDzWYf8F2CI8FCwZjBrWXYzN5DylYqmwWMlhFVjDgU6Tipm22VUahcawNmLNqrVfbWtRfpWR0U8HZdpskfPUvNcRcv31vQRPBjNyU8TK/GnrJ4q3dMhjniXm0S1QNmhGYaBuoHADYA65njI7MRrrVOdNpNF7YNAJaXQSUzbMZlXtETtPM5lcWeVDCnulHQ9M32EXpbEVtF05ZIo6KLSJyFXN+1/FppYeRbJMen9zduxe+3p2wvDDArU0OWa1PviVtHrU97q6fKOgstO7JJN4o/2a6mIvmj+74z8qaung41oPZ9g5QfIdSzKfgXApHzrblYIC4QPTEU4uyL7Jd7u2OenwziI+XdCUcScaKMaKaaKtK37qfkUQQApJcjtW6G96cIbAH4tB9Dmf9LBjRk+Ku8yqu8yo+sPMLdTjkaGiJ9rVYAGYPkVM4cAO8I0i7TWhlu36aVp1Pj1ZYeDMOp4CIprZGPOZ9jHNBVPd1Mm+L9D/cGfDxUb47d+WzJx00XyH3baI+VbYXq8VNNUt+JLpxRHmWxD/vOi3DyYHJDPyGGjN/joHP0YuzD3XohbEZGckZBeMEWqqd04te/+4uc+f3p3/7Cg7PCH+l9yGuaHJa1nl4RIwF8473mcWR1mBBxiE5wz3TAcUGeywq8uTUAhluUj3PE+HAjvlLRZTap7kQdexyOikaVjmWdd6xWkJ56zZK7WUx/HYDurguRYwdBBCmHxT5Y65PPvyxW+/p7PdP99W/+HsAnn3+JANzNyv0sNzfI1pPg4LEySyxNS97hWORbGFnCBPMEsTs4Y7qSYEsHjZOfyjxbSa6B3JVr0ISKuTDZwwlkLB8Z6HRbheqggmC450Yo5Lf6FouTWPPzKRYldwejjgdidu9n/1YfdfdIRaRUj6tF8BUuGA6u12ntaQbfJIdnDqaugoJElrudGR9TwlmS3L1Jl4Y6OQe4t7JNyt4+RMD+RINhSb1R86RuBiuVn5yk4+ojneUbpvSbLLtBjus4Sk6QdL8Iz9ll9W+VqN5cDXesvB0qvPfhkQZHpbTaCbVJs2xDq2nPp6HLedWhgMy995pAIqNrq7e9UJ8MRutGsd1jFmKo0pJUBOtQKg5zIOPFwbjBayKtQwsAmo/ZG7qHMG0VXEJEFJbQbAaP3l2HNDs3zJJNCb1j5UwtvWEZTL1QRw6nWNjlwAwkzt8ih7BqjrQFSUy35Fcullg3s0prmOSrlQJ58SSEOGdq5I/64nUHV+vAImraDcmWrv9S6ioqblAjPtE8XRl5LIXLrxP5REmW2MxYnrtyY331rQ8c3FLwuw9OLbZvLB+pMXwQT/szU5lqtsXfyNmwzo0QVldwm3dzm+Dq8YpIQ8jL/hDLNG6FSFzA83yrp7OJlODmbjOjBnqJ+jHQnyFRxq5rNsfqc29yZxh4KgaO4N+UneIFOFyB4MyDTaUO5jIeE43tfEqiimV3PzOlh7gYDD8E+WQzn/9M4TvLUxCn7NGGj3ys4ctb1zVVj8wMm23QoQ3AoN20HZJBn415X75F98EoDfZfB+hF0UbG00mSmQBcDYz3tNu/vORGwQY+QnKmFuqf2tBGr48+2G00//GdZ0rgKQi9m/N9juWZjc/rmFMvnvixBM2DLZZvlsyFCTdn7Nbb67jnCniVV3mVV/nxlUfY++0RpOkZf/T2dav9aW2+owm0g51xT7Wd7btN6EPH1s2T4OUGsbSR/OJQlJUrRKuMvyxj33l+tVwmjswl9fDqcan2vjNQfi88btHs/u/79dwGh+257joX21cn5RquaVYBU0uF3T38DdRG3LXrQWCSmxWo7zy6sg+4ozGY1PvegchPPv+y4FQEcUEQ5n0VokdU1/iz7C0K+h2DbrPaMuAubQFvqFMAnn7IJiLMOy9YBAbElnz5E7wHUnTY67bFPc33BKymWdgWdP5M6KPYPuO2iqnR5kXuacdUDndRPlQOuX203ibVx7KC0vStfgL49IuvILjvj//0/VyG881vM6noTz/7RfkL0YvTLG57NztdrizBuM7CuR6PB7nMTjN7PD5w5R2jQRo7T5Jcy8xnEsaLOW/tQ7pIED7RE1dF2xsZUrqEGE7ANJ8jETjo21vQeWUg6hSUANxO1zU+dDMsHMcDgG5O9n60AAAgAElEQVQRyUevnOXNqJeSruZIepoAplskYQSwjhXBka6OqftztRNA5vfULkh/AfA4cpFoWfi4CCif5EJccWORUtaHVy3ShWrJevMudY6YrPN0DWMbcCSp6yWu65sClo5dmMj2Nxdeg569cHvAit6spGYRtRBxoPWgO5QIYncOZ//O8y2Wal5aNTGbq/NQ/smYAWmTnNSSTUNOKJtbny1vbhldSxmerDcw2dn1fLJSkXgC9UlrRwde1+yVwn/mGnLU7LOHkOBVcgNsfl9BcHLLELFc4/wjt55EMwbNXhIMlbtMOMGGF8SfKf5LTpoanAHGu1xfBdICNUMOx+16h7vHHHtOSTyms8kip2JyWYsWztx/VA+LfjmnPo43OySkMCwaFCl74c1I/KB/ZJhN8qgSRc9v+mOj++7rMm96rXr7Kyf28EEthO70/PYi2uYc7TXHwOcWjyPNsFzuV/vKtceXy6KBxH6yrC14ouUb591KrNzJM91TMv9U1HplM0zrr3fYKZNrDIwAcHBpy6ClTOC2rVa0iN3IRa4jCREQbAHDhjprA1ysw7YzgrsmC99gkLIW8u85aZyfufa0VYsjdjtaSdUOwcXKplrqSpMMBi/yju/UufqzVlZzrfeUXcuQD/chz8dur9L8DAcjKVe0S1v9ogH2l3dUcbyeJBpLe+YwKP+hpvJ5d/f80vFaj1FtSLyGKHRf2Pqwqaa9PEe4ak6/q2xbA2WHDMmDwUt+bbHXQ8uvmvqrIOn+9zezn5s5gE01z9Fe3KkpW3sU2aMdRJ6VhYOYDa1juTt0q5Awdptu42UJ9prHO+W6aMeS+e5peZVXeZVX+cGVR+kIoLYqDTKF2Skd514yZGN15uRhIADp+8vRIMaFFnq97dasXTvMKAvVR6/Glu/AmdDQJ/Iqi1Jq0xEaP66Ws5ctFR+1W5sW3fTUPEO3+jwLZeGVB0Rk9MKleUIe2dgvrRPlDgIX46JVZv6RPjjSrhE9NnO/zBx/x53snrOmLJP/xDdf66KS76X89LMvy0rIwXTXLe9ahRlwyHZiJnifVnCO3s4TkexsPCFQ4nJ1tddW7NhATlQjXFa4U9k72ztcgKEiI9TlrnZaLNN4uV9REtUxKicq0dc4pnfd/8/IJwca7YJcj+Suu0un/+inTduytmSx8vrTOC2iCSmv6lhwg//N3/4ievaH7wmX/MPvsp5PPv+ShJuvSDjofqzD3d/Ot+M4iqpay4EhrrjK5ng8zre88/p4POAel3SbnSMDXfsJDniE8k3Ltib/bskmOvaGxCQ538qEksj7JUJS6QKhQM+JiIcSYc+zYEoAFdG4kX0v77kD44ktcGRUZ2MXASD6Biex/hWEDQk1RGRGnJUoLsKIYnC3x+OotpmpPCSk3XXjlWeeRGVDWMfywD6zJ/QmQLw6Rp0rtN278GDTq6A/jsc2eP3qt0/niFln0+aHkOSuVwd2nxjreBbDFzZHrTngtob3t1IIsXYR1DtPYKrQC5KdAzI/CukjQU2UUEJN+hXD3qGEllHOuJZ6+OvuTqeZQmRKR416J50vGO9NEmnUIuu22ghgyfPMeDaSizjf3rKFbc3WMf5ITOcIkehGLs+Tqvp261DAynUPc0h7rHVUTGUaDVIJcChks6BRHTf1OxgwFROG3aNvx1Xg1W1IgytpWua02em3SlVxMHbNaKzEmKpFXHdps97868xw7z4lwmRZmi6in1tTkYDC3k5wbrkpcrqMlmSqhTIdO1y3RxQxlMTUiRfrpmnGuW86+XmXi/W8e9lK0RkdmNjQhGameGhDG9a+NiQVcyRrvFIyQuAq1fcUafugiotloVBvzpKv2Zhk76HH3KR8UfCcc8Gsb/cKxWV1TkC7ZRqyJis+KPnMMYkbUWtU+bLJSm8bFIl3n8r+OPCmrNQijhfNCEi+vcx+21VzwoNdu0VO0s92Ij667eAJY70L38n+evKF5FZzVUWdFkH9+nYDqkX1izl3U+pKit21pugldF0fds30MbNgrNPBpt140xgpmC7DT800PuB497pgnw/y40XT2hXcXia0z/rRep9jo1njZd17NToNpslTV5SyluxcQRqqKyF4cqrHlYUysKXZWclgwB70uLtsEQQt7q97Aui2RExDYQi0WhQhxTW7Y210NPSrvMqrvMqPrDxk+PoGX6QozIduu5gAeNFJ8piQj+SJnnB4MsjBgDhO+ERJyzoacCTKCePl6e3N/C9RDPVw6zKH0N5vfumGHKgTKNNPkI5RdFK1SQBr0e5mwJM/1A1uoRNqeHoUfqNO20GOuMZhOh0OmcSJL6wMk3G/k7l6k5Pt9HJxv/mn7/Oa7P/pP3yxjoCKVnauXDLoGN+ZEKB7RX15RmOtlSmR6O5xqYLLBh7RUTdrIEKr3CPezwYQOTczpcqHqZRsEGn+KL6FciqNeka6t1Eb/FiHWkkHOLLukTQ/0+NqUygdvZmNlfvcH8cRWNh5nun7uHvdwBihEovzleyn7rKXf+EVnXY9ziluFwiYHwaIHJ7P778PeDriJT/5/Mu4i+Y4lp0ZxBeU103ZfPv227WODz/5Celm5zoW3KkLKeKoaWSQlNho/6rMUK6DkdjLzpxLT1jsJhjKMYO7wyJs2YHipQMRjJIHz1lQI8mERDsvGtzpbsfxocCRuGI7bqkaK/devsOQDJt4bo+AMKurupUugAyPcbmQ7KfAUQNduajS3TbLUUIc6w4w4jpz4DUdJMlv//m/1ZnU8njUhMUpwqJGeyXT3xxzstaR0T66jaSmysa1m2wECSgXvoR5twCssUJ2WTt9nAlXrEUvMSp1EP8ej0dcGsFc2hbr8pmWKiHdIGzhO7HsAkAo72lCNO1phofNmFr43tDVZeQAJURhkhGLKu+qKOAeV3+24g5XC4CD9yOZo9mNet2seyLSntK8D6CuFVC+O1xRh9PjB247aBEZjaXFZeZYFnHE7I3ADVh29UhCEyHakkO6xxuGIXbcJfSNwNfCsn2Qv+UCKdfR3HTnUfS37goqm8SLmtryJEicroTC67RvVwr85cmaZmaHThIUEltbxFOXTKApYsuxscJ7FhbnL16/1b6pxG3q130ti4TPCcj9Nx38wIzfz+8lovIao6fFM3Z8w6IkpUva7/bAfjI10X6qsZSPVV2YG8l4HbIdOmUwTVSlKO3Sql6VJGSR/IwWwWK+MHBqF6t20QxYc+EVPpZEiQD8YbqGgZFjngPpX9FT1RLZ2zDySmcUq4zuztNGBHQ+2rggJug1RiGUNoVLcYgrF3ayp2xic6Hms7uTo9JemyOeGmYOUvOTK/TGRmmZNzSqKeysqdpUy2KyxJ76Jx+RGlIqDszDYO2KVafruyvo/U7F0AjeEV2hwZ5oq6EuLh+LHHuIeLL1u/jg6NGTOscfUgG5NicEeBkl4Tfb9Xmlqjr+raj3oceriRJjni3UAZRc1fTa1QKXkqKY1EoJP0jmt+NJdrRBE2rl9rCb42P3oKdMkDzfSN1qphZGKrvY6XuPSq/yKq/yKj/Ywv/93/6fACyP+IGxC+QIH2sdIUwXsXgQOMO2Jo832Lw5d8nndUQwQJrROAgJZXOnrhwZXQAjA1ccCYw9KKWcM6ujUlTdI9AQnYOjGkebm17vKejJzzefj7kineIBt0hW3RephKugyweR+/Dk4/EAYHV7Rg6KsjScShiYCGJp/D4lVDZJa0oecuE8/cjCEg60rUvyPM+IalkfDpv2nJScmcUVtKhESDKh5CXmNHyPd9R88tmXZbck2rFS+Zqd5r7I9TgCe9L5433XEjfAZjo55VsMv8XNASPriHdNguwPGflt9ZOIPFnIKJw0HcpMxCJ5CGlCs1Lebirsyau2CN16O99yH9U97zg+32R3yDUYkLQiaeSLTRx9LSXvzwPLAM/z/PDhAwCz005zYDEwz7wuw+F2ntkQGEeGzWylretlaRWad1of8qIMqfYL9Pjvf/13/1J+eK98+vkvzzOhRjNb64h+OixCpY7jwbjSBIoOUSEThU2nKBa4PszpN3MzrkWuON8NRdD0QtRN5e5+nrbWWms7exrHydNRmbEnFRMjUSGu9Lh7x90WH2tcYBIcKURpQXN+STLhXA53O+Nm58AOklcVIxbyaq0lj7lieDW3xd7nKUiiUddyfTWz80sA4GIc3Q/xmNQDwLgMPVduiNPj+CA+3I6Hr7bFlVgsFuKYphxU2vG5WtmxPqKJKiy8NPHzmgS5kBXw1oitcIe5AyRa+QpSCdzXaWY4/DAEzhiS285Wa8fxkINTC4UE3c9Rv7oe+1ze7nqcJLZcqgT7ENdljajvkwxRw/TK5K5PCXp1NTUuN/dYFDxDnrgfjw95IroWu5qyPNQQ3M2STy5IxQssLqnKZX4e60HgtNMdh+5TMjPt/+Q6SK6VwxXkLQ4xc3c7AkvmljQ1OGA1WupNguz5yVDCSR/PbcBV0bVqfFA7uYH9iNRI4IlsyKBY5m0Nn1CoYmg0L7qg2NndH8dxuXNg5Y4dAMsY/xJBja+TlDUSLYUlpBMGRNyRBUdm5zzlhhdQSP2MmVWF+ioY7jylOiMwPCjCuiRlGlxR8ZGgfNAwup13+TC2LTTY0spqfRdHHsNC0auWTcpl7QMOdCZ7dYSd6ZGBIWcqqrpgRiUsH3FDmqTNGSKXy+FLDWQIYi8r0ivbRgvvUhuisK5z3MChqkRTuq3U1CxFqATaxBleD5VCsRRQufJa/KshKS0g7xxyvVb95Fp2Sk6xQSeCIfTiTsjMrJJNW8d7baImkZpY32nT5UXtARS33ixyBhsAqATNpW4IciH0eHKsiBM8H2dH4sWeX4TyDIk+Fp/E+PNCD1XrElbpwOiIBuLmIve8lASl9PcZlE34bjusOd0YPz8LCqy93j0tdb+8N79RT0yTfsxqI8Tczc7jWGuFieJlJwO0U93RKo2UJKGyRlMpo9xdmwTj2NPEVdH4WMr8+NM8T/+AHoYZknmaeBKEYRrkCu3haUzA43FMAzGWZloO11dKBEzCOskIYNgslDaucrSlHyRF5ixm5Hu4hFGJXZhB09brTEG+bqZbBOF+kutuqeVvms36Iljxn8+3euyP//if8Cqv8iqv8uMpj+9+5FX+xy3fFxD56edfeliTI+3jq/wPVn72xa/CKov7av6cEgkBPvn8q++hW6/yKq/yKq/yKq/yKq/yKq/yKq/yKq/yoyoPKBgstilH5GHv3la4QDwYO0KsrSEoVgTIB6hYs94vdAD0+46iah5BINR+NXubMJrISAd9MvZLL1vm0CFQnZJycwUvaNt27wZv+1YacQ6g/4qgoe2TGkTsTyKztCgmLrYNK4ZnbL3VK8rW1AnjSaXv0VZcv+YZtTNeRO+Z1i+e54x9o5rjj//0PaGQn30ZR+9wJA+sbjt/rHWc5+kwksdaEX/ieap0xREkz0DKSLHPeSbjsi1sp7F2AytqYdGxYh/blakuQl1iCnSfchFIv3cmdigSKg8uVmTNmGZdTDqPXnBFQO+qXEvXNFgjPEKxvJfopeSDCCarQBLcF0pGNOoKjvio/vNeFLft9Gtx3aWQMR17Ujg8aXuQAZ9+/suIpPivf1685De/zbuSPvn8q+aYaCsjV50rgtgqnkjb5yMyTeFxHVuqcAOvUDLGfnWHoSpiWuEDXHQgz5JHQM3xOM83BTtQbTeBmOFL8xxcXHXdIrGjTfpPl3zcn5lFoxrxS9v3rmTqQ56yyBescDweLimtDAN6OZ7eQoJFqDMF5nEcSeKMLwIyVArGDBA+zzOvLinpVN1lzmNEK5CIVLme8U9Y68hZHVfVg1SwXkchSOnQzrdxmG07wRbi+xoXuAno+cVICajwB1C5gaHcfxUovAzAkQkPh6DVmUNnpsnLGAqFyVcgTIfQlYIFFJE55jxpmKezOw3hiKW/Rd70+axnQiO+pp1vax3gcjMeS7kRALjC6zZSAshAsD2yNEek4JhbYyMA2zPSozuyVT8HvgAsmDNivdcCF2FuETjjNUamaHbQtijRTUW6GTJm/Mi//XTzY+0v7GSKyq/H4JIBcT0kjynZB69JyRaL+Db7jAzXrlBQYJBL5taQDNSA4tIhBeObSxusrn0GDEdoGXTXc5kX6pRi0KjUJuCgZ9l94sm45M1HIx0ZlIrILwyhB4YRlbKoe4syUjSFScDZj1FjsdA8LZ4ihEpNAThXGJtafv1PT1JwV/xliLg+MNT4HrWoVbot4h7pjPcEatmX5efY3uZUAPvwcPmgfoy+y5bImwqjnTTZzevOKiCOOumlTOhhDsZRKABcxGmK1gcq8s/h8MXb1nJpJc393t2pibA9Eb/KsPf5anPNPvxej0GZmkqmsSRSWwWh3om3fzBWylV4NQMW0fOjPlGRdsfSqmSc5O01m9ybWmwwCFrnXnhnFk4Sjv7dn34m5GsgUw4Fm4xBhYlCZk5zsxTZ6lo/zFCG9RVHQzJZe4EHV8xkRPBKCN7WTytPX6DVBPrCLfFQE6WNrbb8VzfU4hLua5wqV7iob2q2ZupK15roGXhY35VDd31lUF52zHygVuBlTACcWM7MYm9uTSQewzRkvzYaa/YIZnSsVyzIq7zKq/xoS/qrkZbcIRdIFlnawnHdsMp0BN2xKqluW7g6lCojW56lPXem9r/rKJR0W/Vje2oLgpeFX0/mAQRv9e9AQGejZai/FzXI2U2O1kqRpJlkeYZJ9pegzu0wrIYTCvqiruSQsDWhY5CpYBgzD0ghMituJKrhxOk5GQHuqezKev/jf/kN/uzy6We/qG4n4XUYB0OX6zSEuVmck42T+HXC/cOHnwSwG96TzlStmSOmeQfDTbiDd+6+3FGQFIFxLYkXxH2xFQb3uQi6SNDdwsoXH+W0GzK3oLjNYJG0XxY/GccOSWYGhLD/HGDVo9aFNnU32oHIZ7praXWNDHqC5CALkLsdPLm67l/WEFGnHU+ISzxW9Dbdww7dqOfAp198Fa3//td/Vrxk4JI//dtfgLrKPvGrtci8e1QzkDsMxDpWs4d35iedNUZle0yCrz54Bc2HpVBwN3scBxLCy4/IdTzW+XaWf77RpEhfkoeZDcqmY8HI0pp/xHKcGaxMQF0CyWRfhutjiRerevneV4xE3BC/5a1M9TGRxyojXWDe1OApOqL6x4cP7m60ODdtboxTe4+DS/nd3NdxIHH/uo9ryqJ8zNyJxnMBFkA8EkqmMJFTE2s1GC5nLzYWOE+dbaLG5wG3u45xBAApGS73HcCZIjumZMV/kZAhaEhm4o04ycWYw74HKC88sZHxAEiJQDmFqHOTAhkLI0lGTMUXyzhoSACaoOF/zIHd7m3Y3MLx2FpH+EZrHVzrPONqeFtrPc82xUpyqG8tscbhId5u4pJzWvM6tgmv4rdesTwiCMCNzrhA3d0rVYtUWGF7HLM/uqxfjiPSAGQmFsm6J6+MPs2dBZdykajWozndo//bBEg2jf0gzXV1jswVTqHS9VUdldR4suWVl4OnYZGX5Tn6ltyyHjSOfDBqshKTWjwlwVzrKV5WVlgmyJXbcu3Qe7FqDXjjTvWgRtziVInZBn7atAXLYphz6VqwRWHustiRuAZqoZDj9iMoD1zK6DTKPKy32pPzsCmS8TQzQO0ma0YqC+kYQ8u/NAu82iEJs7LA5orqFHZdRVPlhpgVvScVBt6TkvGKpOoKLUBqdGzE5MZJm8LzJreS0+6FKMUOnlffnyzootQmiDdba5s8v73W5UIe8UuMqhPzmfuqlxvvGyybirruw1R9HUMgwnZv81XZX7UxNAfR/S0Wrtf2sU87qn8b66oIQmVXalv2u8tzYeham44xBXk7X6rQsAeYndl29LzW94U+aNWljWLXxWtBqxulspPNXdo5QDPibu7U28oAoxzZRa5t2ZE1HSXTgFyvzatecz8qmmSTGdlY7BAcmzITl5RwFTqqZcVt5rseAeiLcFfQSU03E3DfXxtUtOs68va377tmr/Iqr/IqP4LySJV0yKop0ZfxKHd1mHZPeJir9KCchY47Gca9tESFFpW1FfmPBBZlcanBSO13t/7quZs1JAWVqTAvpiNn5B2HLmmj4nq7tTrE7JAAnwzhQaQ3Hs5H1rIRTracczPJfOp4Ap4Ofs9DEGb4drfJ6LYpjSgiV7MA+cff/bl3Jf/N//UFdwJN/w3q6eaDhN+9eETqy/Zv0wgpxS8qYEA7cyKm9of8qvo2fIsza5rIRNZfs7SRb42wjPAYSC5fJE3WwG5akQQWWYCgd6Z4F/LokatxHXAPSKv89sn+EfJT2CIViZYEWjzPmUpyWDiozkRZk+TvFk9O0K99+QhBFMQylswF6bhUNxftp198FdbtN7/972ezuM770y++LA5eWGZvGP5pDDg6bQEVF1fISvO+BvGM2x6c7uYHudYRQVWMhLCAm5ER+0P9KfCCNDNYZUdCwmsYFJqt16ctAaNDLV3jey/bVY9YSRIKrFQQ9EW2oFL31c+ioJyKzaUIZHa6Qe4YTJ0SWAu7Ym/DIBbBM4dXQyUAuDwvI2qp6hrhJlh7rXr5enU3TvnLHrHPSbP071JFMEz8GX/QnjOJumry5mAM2sX6kVhsYLuf8byHpfVfrJlkwfrH2xcG4U6v3LU1fSWlOMVkzoXJDT5SFM6ZLFRpDuAGSw/p65iyd7xck3Cs5RE0qBu3EMHmioprPSuQfY1gwtHBkUWs416SOBl+HEQRp0roN96K8l7hBOKup/T2s2eZX3FkIpYU0DhH7sgLYcKj93bbwIh6H5/Uc+3TYQj6+mxQ2DHS6VZr7F849j+1nMo66tmLz0k5t7kpoLyV+n4sIq511JW8daNGzOCprMFLO6ApIKfU0Bi8+pKdKyBSA9uQxMSeEi33Ud1cz3H64GLw5Ntl5+nfWkxgI4b5dpHkCqZnxZpxTqIDxHJz05JFUQCXMrewmRpfK0RhpIjLAIdhhkJI1w5kdLWSBSzCZTeSQBttikaXnen+ombn/k3hjsOW6B2oIQRUU44kth8AIK/hildWWnQ9DTPKN55xLQMm3JG21BMCbzU8//b6zbMNAtxoTEUpqwdUbr44Z5MitvTsbdWVHViNBM+LmmVL9SiofYdRmaRbfLQuOz81ls0xkb0isbrfx3J9cF9Zz0HG+XLLbHf5TXfa5+pjMRdzOy78vziKApJrHXm/peT0db9r67ZWstTctX/eua37QQKZXjazq2MtRFyC2XEcG/8MgTv0R3Vu0kj+XirDp+Zwa5/7mAIIDINfFlwtjOC7fnJYavuCAxpIlJXW27FRa7qQnj4JZdkPDL963zEMmwGQImWaApEL/+LjvMqrvMqr/CjKo3Ju70Zr/m/lATSPw2dtbENXtbTVPXeD0iAH0qKtFOQV7VIlHBLuzachmw1e3TAggc28d6Y+bfRzP4VbHXP1qIR+m3i15dsuQSkI31ywMM5RPa7ODWu0bYJpE6Ea3osgXoelqwuZ8mXv5D2kO9iwWwVtWq40IT0gnj+n/M1/+EKo1yq3RaetI85u+XBmCyiJv+N64nWkwRFjiiGcp7UKlnp1i2OPk34iUlmfbYMV/WjmnNb4rtoJnd7aHmn7T+ee9P08VF9PMg0eYwZNlHu61pEW/7ROxW/DYqoS3wkKyMP9spdJRx0Fchli2bsiggzcMG3CXh/2dC6+NAePxwNyR+F5pj2Isx6HFsFkrWcbAc3lIu/wXtz8p5/9PPCOr3/zD/jvKl//5j8D+NkXv/RYERtmWh35/9h722XZjuNKbGXtvhNhEyCk37bfwvMYnrdwmAKJCxKUn8Fhh0RRIEHJTzkiQP4Q0TtzfuRamVm7+0C8kCdEhLtI3HNO9971kZWVH6uysmIucNvuPwDC7ThMwFPIpfKIoyoE3DKv+xkRZkt77xJJY9hJe2EZu0x6LANbB9dv89z0GS4mcWzH+mTo9jPW3+4vSnrkQ/th02SK7flAhPvZzls9bwBw3nklSxzLki5rGex+3m3Y58PjugyDY6EkXBU9sV35RVLEmEsvhM6aXADyFDQQFjHuIhi/1Vuix16e+VNmujNHwbj0aNWp6oZF6DR5BFZRbC8j5ii93pLCgBnOeXISES4c6HYsaakH4VB1zuU23K/Zj2fe1agIcL+vdRwIP+9Yi8jOgQjdCdNkeCThk/IdTxKNzJaL+Vc7rZfuqap2VokP7Pqtqib/7Np2diYvucpbGPItR+DcXOM5kq2NiXvyARM2sCkjq6jDvbIAbBnjflCSiVVww4keKdQ9cAdg0q+NorzKCZmgWdfEwOj5llwiv0UqEN7GMVSpVFHuK7jHzkxTG5YuoDEnuZUfr5bCPAAgU6qpY7YsY49nP1rY0HMO2Yk5cs3YTtUQEXobRgBT3mnid6cpOgVBVuV5XjU3klt6L1thQ2zyZquBQKlx8lw4FNSaBOvJDSDRAEPtl8lUuuj+7ey+P49M/s71F+OZqorTtuklEjLIOdVV260uDbKYKv9QXo402ND61yp2P4a1I33c+66K769Rvz2gOSRVzNqvUt14AxjMKig2m5Pa3qdfTJdTMaMjN+3zzt5x1mAAD4jQN8Eqc98c44TVZYInJb5PCVkFw5i+jP7y/D7dQgXn6RazHdiq6dXacWx7a1FW0MXL6FFZy6nqmzYzx2xlMqDNDBp27BMHKgnQ+G6EhA+/LNupumcttXIuebZkzsEGDQ998sQmeFxGu1LQeKe26mrWWiTUmsId3K+sNtDdc3duyK1VcQkxEe/Z0Y2xgjXX51EffV/+e5VXeZVX+Y8rtym7ue0p2T6cT27jgEfiOkbgiQRneXCaaUlIX+xfDASAFmo2mZbEFpCkh2C4mDjTGLHN3lJsxzBGH7yY1jVD9YwjNjJp1E59NjwOVEUK19eDrV6Cp2ELsNNbejiICwU30dofxxhxaj7T9ZFNXz3wx383EJkX1DDtYsRah66Vhh+uEAAAACAASURBVGBgWt9x1oXnZVwKBHav07+Z52eMOudjFYBT84RkilnSDeJVgCjbpb5jWGIEeNYqb/pdABAFYhrG/HaXUV1PGDitou18FxQ3NUyaglTMTYdPBNTu+GDUUsp/gpYZ1nG0dyVX0x7uZkzfk3F/HHNZdXsOAr3w7JNaSLZ3aXdOZKf3se2Kq+nZKAeVIyr/CwEs+/hnvzDg699+T1Dy97/5FYC/ev+3liFaxVV5fjg8PI5jaSltKzCdmdiYSx6LmY2YLzN4Xqdt5n6udcttDg9dnh6x1uF+DllhtmwpIsn9nPbxsIJlXKZbVU52gOjp8P668yUxOrJoCJQy1E3bMXNG9HZ9kleOZvbGOB3i8dzFKddfsLYmO+mtqIi6qzYlQd3JGxF1eIoycB5QtRodj823IBWnrWP56RF+nox5TOwzLi56KFKsTpmRUBWvtS3pIY0bEy3fk4hY8WpdBQ6jo+XkG7Uh92DOErqVCqKuyDvLPaNxn29KR2Fa0V4031SYxJhKmK01MrpOl3CKhiEA+4lnnn/eU4/c86pLPHk3fQu7igOF6LvpXipnTcuIJ9qZsTze0QEqwajqakLu928HdfVe6vcEP0YnWjvvLc7GGAspSyYrwrLKFbiV6H+UUBdWjELZm631UgTQ+NZYdHSL2y5qwiU/Gea5whyMeKNUG2cFVn9L4UmfmiGPHehq7zDBCoFAeB1TsWotnXVUXI+QimVGvLOYXpQRmpW6EUWNMm+2hTF/CxiWw9XcE2FXFGt8Ttbo9kRPlE6iOHlihef41k5S1ewaP0CV2yhiKv+S3iSV6bB29U2jrPU1lPA26snstR4HexRV56EH7C9poFOWTQKEMZVNjJfJmCMbRROXtdmSShz83kh1ALBFRXWJDNN+fgdxsY3AZh83J0SybARGJuFHE2UM6zKg/Y+rLWOcRSjdBxdKCH+/3Ezd4npER3aiklrLkir551zXUzjEWB3FPZe5etyurL23bmiObXwQQGVp3chxicuIkGB4q92QJmlDIii1vMFBkTR/Dp3aAwcQ2BtXbYgMbXxetv7sBtJalptykBpSqvfaDuyR5jjXWtdaBgF7xfWgDAAR/++67fyx2yv7MbcIq1fktqn4ChsNrQw6sFldoISONAMth3yIxuTG/k8M+MdyVYBtcrzKq7zKq/ywyg2ZLyS4/+5SB2m2L8lTAkmBPKFgUB7GNsymM9DODLa9N2jr6kFbeyf/omYK5xmh7N529khG8RC+0zeJUMxL/lmGBd8aSlp6Rfp/Ol6QAq9PAkmoofDc/eKNqGavRuMS0tinJgZKmTpoS8RGrUTKnI4+OKwwAXaDg7MFd/zxn/5dQORHf/OZCchTiInRiqLPIuiiqFk0fbChj9vRWIHyAjZrQCb7PDofoxrWLgzu4AloniiVnRVBpFJENR7zTBPFFW3DysiU0WnK0SNhV8U2gylc3kyGliSQ4OcJYMVB6yd46o55JteCmZdt0lxaeAiw0nIv2xiE4tPUqznWq8e7G2gzMYulYistT6w/mdRg5jvZLFCgDGTuDT8kh/HWqQ8tpwETcWRZ4d3vRxxJuY8/+0WcHuHfL2npv3z5d/nLX3/+S4XIIVaYL6zwOGst9NCUhzHZaq0j6ux2xHk/OzrJiQsVvhvu7gFDXvRhZqf7/X7XLNA+PZTXx3pytDDYcsyFTePTOFk6UVNuXf7MpeBiPNK4DVOBGOU9xXRgocq0DgHr8CfNTRj71zizlStZVnUa1Fy0MWxpP08BMcuJZBm5VfnFhn9qsHGYzmSCiyjLVqyIs9yklDZLp+mTmsCgZEtGszqryy/Ps4igsMJ2FqOp2WLeVtdQUN0YcijUQLjOgi1j8lKDznQRedQB/0J6vLtsBWW1q1FQV62g1j2ch5zlEvj03fr8smq5+IstZIbQSFXlcSrWleQ6bodTWwF549isofoVRUciwTYOpVmxStKOG0JiXHXgSSiYFal64BjsUMcBxnLi9Wk8LjB7ULWulee963qxheOSlbhGNtGXikxqZTTmQJWrrajp1hjFrrVB1EtKG5SnnyWsalBszq0mcjC7NGUBdTSHLCIKRkyZMYVOR1k1IFHAfckt8dnUtImta8csZZmNI8zZAg0CAw51dKJ4ZuG91b2wvfvmLxpDBbqNR4QmR9sDUttGANeK9BLDwLGZgnMmMW9DIktjRYStpSfnNkGOi6HEKYxLhwKNglH3qH/XEaqi7ftpREriRGuYvZuqr15JEdyQLLQa9fjCCprxvQgjEIh3795xjvPvvav51Q70ds97mjRVmeu5RFOdKMibkJ6bJt2iFauWDTgNXSQHBzLPSpu/sde3/dE0/I6D0vkVd960CnJop8KRl21ehizG/i/GOObEbWqneboXXdTnMfTU1tWyDktw2IU5i4CgtGesbnCHER3G3Caw1D5vM7O8EqoUlD3sQ1fPSzXPLrSCxnSdahCTJsZd9xT2ksUx3pIgpu9xdJqRITiTMHkrZkQEo5iLFEwg3uJzgPZj/aD5SbNnk3OGGmwfD9u/IocxIW+O8kF9oMZqaiTf8/JyNnj7UWeS/pMVk4KPfPMqr/Iqr/LDKDfj3QkApaDFOITC/UaaBaEwPQRPpoyzw0PCmxW0QgtfAncLc6higAu8420gBiyDU5te9JrpRyXaa9VY3t7YjTQ5NwHuC16bf1KoaEtB5FjHSzINBH9Nc9ugK/cAC6bYrNc71nHzsgq13JWKvIUyJmz1e0b8i5355qvvGYyW5aNP36tBW4zaGUaPDB30H8AwRwQ51J/FOXmVjWW8ojvRxOMgXeTxyICA8AlckyEVzlS0qZ/Oa4Wsrj502ZFy3/YGooyM4UnJKS3rY6b7jPQT3DPubERANdaZRrcZDbs8FkdWnE5h81bmspbZpc4o/GTYnhqqjiZ7mIXnXSxRsMjOPzZfnGv1mdleFty0+N4o6YIMi81sLBjDSgNR9uSPPn1vtv7w1a+/u9a3yn/9x78H8Nef/3J2mPTJVEeo8FzA4H5GxFor8X3iXO7uMQIkDIi81AWB8zzXWsZLSxyZ3q+OsuZFrLI+85B9Y4dyt/PepoiwA5p0NC5AdjdAp4omPTWuNT+pmRjHN21KnsukzL9Weh29JkNhLi1wGvHkr2sthADX9GcDAI7jcHfNpxGOpFTfkEGgVybXmA3OC1GmBlKAim204vgHlQbjVrKBJ2ViFexML+QoUiobX5GCvSUn08+xXpnLjrW+9W8BpfZA8bugjdiaTzIylmscrFWNJSPrR3d51FEaZhuyDe7A9ctBfASAe6aSCJz30w7jzdo5M6F9pn0HYjhZcw7MgPCIcT3T5h3mMozA3Fl7Wlp0kK/LK8yQlgVv9Ec6Xv9Xss6kf8c1AmTO6B0obj6dW+u94jaBuTnDVlNWxB6znhaSVRy/5lD954eK6ZEyylCg1qBc8mZxyodNopSm00ppEcf9sHEFq2gIKWK64TY6nCijA4sqadmaIBqYYzSYm0L8E4HFrAJyuYstOi9D6SyucVMU525ytfqWDkaMep4yyzC9SpIUgSvPyjSnsm7uqnezoye6WdvmK75cjU2xucnP/TdU40InQ68I+ohpRDwfoT370zhwVVqTUXzD4RN7rdvPNcA0vgKArUPJMDYZa5nDV6ZGKwfrXHXbAhHqKS7rDhdP6N5Ak6i9DG9I8Qto90iZHnoJo6RBZh00bsSuvpDnat5vDV8U5OhNbhRecHpGXpTCSOCuEurMpMnJ5iWkroJ62s01p9exxiOTol/qzhZpbO7kaAG0TLbLyzLWFkP8kAkbTWdIzNZ5P3tZi/CBdv3UeAsAPbJJaZDNULSoyPReS9lkXTAYEZ73vNcIa7sndY6XhuiNFiSquRAemZFZrFztPLMrNsrMfqlrG9eXZR5PHMh6i9sQ7QqoiaH4+z6vtEDCt3cB9IGeZ71kb6ZuHh1owr7Kq7zKq/ygiv3n/+W/3I7bn/71X7mDb/NCr90cK1WWkvSM3tmpq1oTE1GMl1AbMNRuGdaK070Oz/LOmTgEsgRv0zVGtOXm5N7nxhavAVzDgJ+YnwzMiERJ9DQVTKOvZUx2UwaD3f3MOwcjeJt2unPH7fj2X88yQI0IaQh0UHe76zznGVd7xZK+nqET460o48bK/eeNzfTy14qIf09E5I9+8lkUEgGFPlIDRwj1WCIggLVuBpx+AmELkJ0QMqmiL9zLqcRaR4KGiQ0BwOkYbgh3+CrOCGNSkIYA/Pw2UxMuTkea2okQ5gXRlm5qhoYRTLjdTDZgkdeYPC6GZhdHyfJkF8rrt5zXg9Bn3mxiaZhqvKSkiRvDbC1buULy27ymYdErVZSe7jdEN8Y4GvBgrJ1+HscBX4Mk3XN5G5tLq1Z4v+eETaP4PRNwDZZgP9slJyXq9/N+bx7Z6mMeHDM7jsPMzrNOOtt5fpuz/++58eav3v8y63PEef82m17HcRxM3xkMji42rnnEsVbvWJi5n35yOWc6d0bz9VFrWNIhTeYInmgrIDqD1cozFIHv93txGkNXkPm4zAAGQWsXw6OcxrxlONGijAYicnRKMuQegbuvtfy8H8etPJXzPA123A53P08/dENIS29Z2SsyoAAZIdw153oszFIGtSl7RjG5cddimaVITAk7YDSYmTkqeJzhC1aWvsxvxuxY+ibLFpdEzVpa6mtZhqZ7nOGxdM03ItZxhBYzY2XVPwD3zIYpXg6ho+fZ+JRN+YowLLn9F18SsczDLR1ipccwW668ljoaHoXynfsZ4Ym7xGWPa5Ivg0EwEFNu0zTuudZhsD5Bf96n9yJGT9e5po/QRTazzDxOW2vZIh8a1jrWOvzc8buqc85gqUpQcC5fjpMwAdZay+MEMqBJqqyHHyEpZliGZVVnbupYzl2hQBEkmU0nT8gjB3V+ex7HbchDM6tDuMPZbdpiVfTftA9srZUSrPVESZSifPudKu5nqtElLd3GSQr/NewQbgmb+eLt5ZbjaxSrbjwOeOZ6WGYGuzNCHMfttmwV5JTL/zz9ODKIEgDWsgxwdvfKlhsecB8mzzKYQ1FLJjttps+edAqslXF3aRuS4SLcg9mPJ8M3Vh86zZD7PlKzt3dLuQRydSxjagUHjtbgRep8lQ1FILgJIG26EusQz5ZKOzGTAFkbk2br4JEObmhuwIQ17Ni3D01LOfsAKyj4hNdWFk+ImBmwVoZaF1U3ozDY0ZARmKvXzz8BWHYsO1K982nDmfNogGJCtV6IZyc2M23mlPDZ7bqFL+V4BqOd55m7MgVIDdO0rViZcDvYNKgGPFwk1caSMtR32FkOfske7tWVIj0ijttBENGFhfWh18grBHmoy8zDbdj+zY5NbM+1vFnmEYC5n7AE7plyt+TxKlNZ0vVivRfFAnSHbofEVoxGAM2SlWCseqw3Jrpv+duyQ51hHiRbS9qM0n68QiNxqO8SiP3PsOomWFyTfpnDPMFTCiopK9Wed981MaLqD9oK/MMExu0RrAapyACO45bzPQkLSl+n/u8clpZXsd3P+8p1ksLeZaP1vZpzYPlmjSgubRl0YH8+QC44ZARz7tKOqoz5SOm6FmnujlCUbww5iQhGvUeE89OVNllKbKfeHN6Kwe5jrv/w6//nOk+v8iqv8ip/weUWkZGJAEo1qPQZjd6O2ZSLHgOGck9N25Y+yv8CeMSbqjKm0uVmnKJyht0K7A28+dEwCmnId6da9+9JKqUJqEplMo76K65OBp28VgBx+marynyU2h0dlLaV4tlNz+E57h2+xD/JeWIfLAJ//N33jDgD8NFP3kNpq3ZqboeULUm61lKIqPtpuY230hdo4tX5nbTSbB2OE6W7N8YINgROgQ3KzMaBon3BzCOPjy7JoEkzoiUIS7N3xVppDWmCZM/PqSnOrC9B7uWReZvgAdLzTvSnUb8oz9PDHLF4/9OqQ7MR53mH3PMwy0tmE7j89tu7GRSlaoaI9CNN4R5zysreGqxVnc+ksEr+tPmF9jj57XiXYBCOoQpXGppRJmiawskbbmbryEhY9/D02Y7jKHp98v4LAL//8lf48PIvX/49gE9+9gumFDDkVSPdE6JppiXX817GYpNIbls6Y7ZWh8yEPEEDTFcd5Bd0IXw9BgYCQOOhCYC1AEWGYRbROnLHzOrADheGDPe97hgT285b+yuc637hQdoAQjuGI7l9azx7LURSTsqW0bUF97aK9TCN9th2uHZMYOPBpET46aV6eJ1CfhEwxOnnWmsdq2/jWXaeZ15kbzndAWE5D+MGuln92T0eEnwgg/24mWUHYImimn7nQhiuY7/2GJeRhLiquL1LNt2hnClr4apFXY4RitOftlVCNaDFAQTgjP8tXysi4Kfz9MNb5Qldq/fGXJR65pETnUwRDZc8IQEZbrzXEtnyEgZ1W4ZLUe4EhhLlAbo9zGsq6VJ3JeISU8oMcxtdC8pFbTk+OYPe2yGaLTn8JX6H7uiRLrPC7wcsUI53Il0IhZ4tXXYfuh8JQIIn0BILKGopoc4QGVuom1nBd9TxMsT2bo/5rB/xdJlNZIkB6CXbNAPWk1SUdGXgwNC/FuaEL7kBsIk7hFIwc+IY+AXNwDbvE3rUh9F1FozzFo9XOymiYwivpkwbVWG2OrGB9EcEN71qmZWkZX9ijpHKNiKO2y0HlRf7eDgoZTHjFadOtzysI6V1FRP8nAkBiutOP+vxZctsCcuKXQ18eNH6EefzzyYfkckyvBGSfMWH5/0UTyflCgu1sEFLxlCabdGcRaWeujXCbIlD5zIW5vimPCxBVytoiO4peXJZ2Vhf492cgmAwvObk3yJlFBRYVgN14ANwSFFk3I9L8Hjjy0pW8jDSCUxeq82cjxGyKFC0F/wpeTO/o5XGDr/BUlyoPUn7oLTUS3ZRB3AEEYB2MygYDUqiWl28jmoERze+ftFCQAjcrxGVbpUvya60IEYJI4rDscJLITdZ9W8A3HW5ZBsYGzM7M73Kq7zKq/zQys0EjOTf0fubQyVZy/l8qtyFcgGoSTsXWbh7I0wqec8y2wL9ZpsVTance09TTP9ZAxsbleX3yYKcj1WzlS5kNqK28rqLfFz74QBwPu738sVp/MwIvzYWMEbVBKVZqCF3FAqvlcwx5MD+8M9f/lm0eFY++slnNF/p96OiOHbDo5Wsn2c5P3lS7FgHnAftG+0qbxlgdhjegdiG42zkMp+xfzLpm9u5tEkSp8hYNsYbLEYvGi+RzP3fpOJwHkIOTRkz2VJc+EO7oYAOxQTCPXgKeUSCWB5tuzgcee9HhOdh7IiCsutsTIKV55nxCAcUdQhiv6Azh91wtpgf9J5qWzmdgxLDgSsGL4vw6RQMgl9mpq+A5bWkA1omOFXug7svGIMsAoCHL+Xkyrc+ef8FYL//8u/faP+7yu9/+w+fvP+C0XkokpO8lQJOrpbwhU4XGGnq5tlkL+JpIBxRTya9CFYnyrzloSQ6Bi1jhY1HhGcEYAFYg7LGJIdmaX2aLcC2oBL6vuVwWTNC29eyvB+XWQuPzP3HFW7DL4yr80C+uSbeEl14fKlMalEs8U4esGpHre3mXRQ0yRWUx1SMQzTbTMln86aXgSYUobZ0e+2MbV76hTwmEhlTlJV3VzMfmQ9tCPeUL3OG6r9nDuVoNR4+efZoILPUGhSgVJfkMqhWYMoDM86/mTzCxBg59RGBM1zCch3HkYvao5NUjr7WX+X6AuQfHu8UI0W/KK+pp69zUL41ds1ICeEScpAf6EyWp6/VW4d2ra5O2gWK2eAbBVIVo5pUSCUzjdmLt3oMQAH+k1h7R56/lyyttWT9lQnm5lohX3qEraNvzWJmwIUVCQDqpLscZI+RRpkJY1PoLelKUJF31yVBrRdBd7ln2drmqs8JoCdLrHpwM3wuCL5xM0hWGVc/k5EOsLiIl/+jFRtikcaO8gzG7LgwgQ13CUwcZrfPtrkqGziryJoZsFwyo2LTAjC4VEuzcqOwlFGFD6MfEDuUEoPp7ArlQi3krGPVflt1ca6MGutkyAJuuFT1YeXntsoGa0BsqNG+wLS6HpdHUXYLlH2oZsgO66+SXR37O5C6KeCyhTjtkspBrM5Xd3Yy6MOBN4b4Kg8PTyHTKqtAz02V2RUKDCV9xNhwf1hBWmyhX7eRPugICaKa7lA7lJC2kbckNUxJhvTkZSKaNaYb0n7hk7kdgOhFTWxmRPV6SLZaZ8+uY6xq8nEyJC5TGRGMU90GrUlYlQfaJMMCaZNXZ9STizVitWyfEGh7ebSHi8uLGaAgEUa55xvFmiTKBQ+r5MrRVoUpgYzN1/a19Sqv8iqv8sMqt1Sq0iS1cw/tXD9oxVIlTxxxQi6poTITy6IStvkMK8OGfj7VilH/2PWhslDqEWx2VloAOo/AIIyp5tv2pBSfxqBsp6iG6JXXNlxUe8N5L6MmKjyEQ9SthKk8J9G6x2PLvgHiYbqHohv+8L0uBgHwo598hgxtW7Z6NNBuqUGGwSACtMfZTslxDJrR/Mf0LGokfTBfFC9jfG7Pls3Mb4erO22Aztcz+UOZhCKvYsqKBsDqHktWGubFSCbPhO2YplnAsay9+rMiEAf10v7VCfS6uyNRBHbSPDxOnrfN8K6keoTd3t3C4zxP3Dsd/rd/+tNxO6A4RMF8qy47Ul/LYjLT/QZmwrsrlLdixojatcVkMPcHpAkA5vKPosG0HuU+9IwH725NWgnaURV9A1FErd+PPv0cEX/4pw/G1jO48pP3v8RCuBOeIIuW81NZ/tQrekyJXIuefmZcKi9Gr+FHROB2HLwPObQIJF6eUQ0wHLbqEvphBnPsmaGsiGdmsZYtO8+7HcvCmEs3/TCRWp4P/1kdPpMfUgyxk+34JDy9m6rTFrchCAAryHK6E/TINu+9OeJqx5PYtuFSEPpjRd6aDnEwWSrBfAAelIoZCRv3+3EcEXGe0WeeTjerxHZW6fefGPqX8BhrZyzQfn7de45K9DReylwEATvWAo/rpjQdubGi58Weqsq9PPfEUkYyzIshhwCQORMplKW9L67RGy0K44oVkgvMDZpBknYsHpjAM5kw+in12hWb6TramCwxHplhQiGf+YkbqjdolgwEIAoO2WbXAKuAt4i8bc74/GWZll6RIpeI3AGTJTiyrSGxS2GaQgCejMD6dtwLtBAJWl1lq/oj/lULGqtJ8tZwevtlGE7jq7mvWdsysytLScwQ0GItmjd+kD9jgwCnFSWKtWIvC1Hpj2ldTNVamh9tLJlIKt0iRW0Zuha6pqxRw2JCpS2JFiOD4hgWRomq8VwaZqax1M5LrWVOzRiCVh131YGUVsMu1DfKxWIketdHY6YBv9FswdFhSOGShmymDqA0zsPaSsni3sticJd4uJq9cpe2N6ge2Y2VSW+YwsEz5Sn2ddw1TlI/LT2+R7U5tFEaU9MqpqYYo0nmoWFvPPrREr+uzxnDtBQRz+0c9qmbVdoiMu9RVW0IUEyx+zDw1mva1Oys72MK2lbbqbGLrLfIumGOBf23lSCOLqEHGS/t9OE6G0DZyf3a2G2vNZ6jq3rUaFfZZsGD+Inal5hRAca8lk0GayMjF/4D94TEgLrX0ipnvmVkbbN0JRfguIXYE4HZA3zG54yN4F5PtxouNULh79qsNWaikMtcLwUjVnrNxlWTPLE9bViIr/Iqr/IqP6xy+4/uwKt8cPl+WOTHf/MewJvm2Ku8yn9o+fjTzwF887sPzimZwZUf//Tz/+/79Cqv8iqv8iqv8iqv8iqv8iqv8iqv8ir/Hcpt2wAeIVPc0Hs4jDzPN3F7a9+PjQhbljdvXr7SBpVr1zE3g8I9Vu21AZ1szLjhjH5dFW4hErUjOLdp1bZ3P6+D6SiH3iTXdv+oCgDDpGDQec8AdHdh9eG6Lb/1pGIFYm5g7ceOOnRrG6f++cP3uq/mRz/5zPbELGZ1+ka3wYDBIN3oNhBtqDpPCicReKOO6WZD1O61tvYC53nv+1SWdghr37IjKyoaQNvUTzZtGWZRc5GDWtoFNqvQFe1HD47Gk1HVXjXmPEALIHcy+/kwXCjJ4CketcqKojZzdVVjxYZcSkSsZeeZ94wf0BYrEO/evSPTPRBpi1mehNo/xh7xsG2nbnvYbxfeRRvN3uDP089Kr12DccWVrbUyqDOHk7EGy9b9/NO2SHWG/+ApM3z86ec5xb//7YfdEf/NV/8I4Mc/+zmGaAnwqFXuPCumZYaG5G8RmVxCMoJRfEp2bobAcquL2uc6LxFykUK8FGyEOhkQBvNw6Pxg3TYZwIrsq1ZAxA1QYMGY5IdDuaFY45LAyVcRD9M8ooowwjLyz2tgs81TlLtM1NrhkLBdWTGoayUUZ2gk7xzAqXOW4ClLh3HFieWS50dQQzBWJhCrJJWNi0HAeJrr0J+XZ8qFh8MBdKSalraZhS5nz/iOjGOVDlEUB1lki9x4FrZwSazJdloiRYQixCp2tLppPL02BN51MFYCmeqVrNj3x0Tcbu+kog5EXjdu4ZFXAahfWjnNJB1QzwhWq3sErBN3VCwhdUKJQgbgLJiPFWkVSQPx1QzM637PIdaX3ddiXhcXZmSKXiJnNjVXzWrFv+SdRP2UTCWz6Obiqls23sv4w7myNundg6h3QxO0rfOdvbnukh4V3clxaf3kVxl607KJYXSReZ91u4cz/QN4J0wtALQqnME6o1dPNXWMB6ekjC30TaKuvszmArpdN8Lq5ilbb+ympgLuqKql0Ze4v0guLZWpERUcuo3g2URdW443/7wanFb/aCXsQ7iEXBUTSZ2k5DeftIVXakOS8mJnKND90aBFUSKoOyRrFB/MTNBFd/FmOHRE41l5aukAfUyhJcIYyD7s7mdZ+vpCByykW6Z01nsmpV3qwxRC+dBuhXx6uOsYQSaByStDMp0Le3FVj89HbfOh2dwaiZv2nmQM7HdEl75VKli61CyJobNIo2eSjTpnzAAAIABJREFUQskB4gOdKinR/KDnY45vG5JU+nz+8tc2oyXbIf0AkZVVXYyIkQDCpWqL8s0VqYErOByQYQKFIurMNoew4m1pEm+cFuqHHpfYRridIuso3c0u5PntwEzp8Ris3UNTnQ/csZFq0PFVXuVVXuUHVm6ptPKgIjAs5jds5lJoNjUUIN0Byzx6fWthyLxNfUFXsnAMd+y2mZXZDJ1GwzhyKYV56d80XXoICR/RS7qoevU3K+0Mk0/keWtzGW3tEg4tSM19sZ+yt+kY5PBdqnf3THrU1uAeyx//3+8VEfnp5xevphxIoRp9rCuiQeCp53SUAGut8zyJfZSvjDXMNVY8aRN52R1PK2TzV5T7Ypfz2MI0LsezeTQvKS1TcgnZtrBVVghk/1hdwQNgc1+nnxvTCJBlNlwaks/WhrPT5LWAn3XlBnieKU0PwCxs2W3dyvaFGNA9juMAwk8P4HbcQEs3dIMeAIS7rYXIDIz1OpJe9tzKlWOOi9Wo1SgLCW8Ud50LK6dbDoB7LPNpTwUnNjP3511VIj4fcBldlic614o64aKeRwQi4sc//fn3ACW//u2vAXzy/ovT3YROQ+eJom5ThPowLhItqzdK/FGGCR5biyCLhhbBg7MXmhv6bpON4umSHIflRSjtMGezNd0xQEfhBBN0AF+iDztYvX61PlI3s1JsO07i0E0WKm0l+b/EgXvTskR61cV1ICDYjZdlr4IJigt0mnqZKR88wJtn+4OEh5iEAZ73pyZ2n5N1nmdm6OO12ruUmKuhpfU4v3oV8vp7TkC9G/1MwHXntXugNmbuODRBWYPeLDF18R2+q2y+VnVWB0G7r1qN5fXtR4dtyP5M4uta8wOKWGsdvP2TV32mtFxTPjQm1by0jUVYZH2kBdU9gOS/krSEUgpsfvJOCd+mqnKEJgMFOjlF98kQDneXbMFAI0bn81SwmFP1hoSlnEMzME+Dhqd6zMbyj6n8G/e0qm0zlZi3hepgGRxS/SUrWjTn/sJuLRSWqg5rciJbN5NKcylfIC/sam6miNxT/lGAtaxoFLnaVsNXZOmiQMdrJdmg1fkIvaSc5W2/2SVzsSNMl2jv7wYiQol5mb601StAoa+J15AuIKWk3RihTl5r/bFDst0eeVXQfNtu1ruRrXgiWsh8F3aQA2iwibxxrNtsjBrMDG7AUYAZF1ozeA+/0k9IsZjqbkAjhQEbyswDed9whPu5jsusDyLY9tl8bph5FyvlQgWqNZvUH+agdU/ZP7tMZ/ApsXdq5BZ+j5173CQowKuRL6sz9UXOUVH0i3bZ/Z0Duwi5sRpl9+Vvj488Lc92N2qZNE3i+gRQlmNxd8y0Wq0jDeMO+pqTy2xeVmXZJ8/Gygesycg32mTb3yuZbFph/VBbKuDSl45farRQ61wO68nMbN3+jlKGXE+8UsDUjUfYZoX6tO7mOo4j4pCTUkS5LoGi50yzUMIUY4X091ndW4laXuVVXuVV/oLLzd0zlEkyjuL2mh1mpubOC6l7643xClLbrMC2LCA0KYZumLiVMvWjxTJ14FQ1Q/byao6glVad7B7TS9lgGjPrYBTpvQoY6STe7VRgttGoJgGK9KOj0NXyZSAIj01L3Q6Csqne6zYawRnNwMrsex7N/uinn9e+OHWUlOUqt3H0wSJga0MY+iepb2sdgC51IemWLUTnhZ6+cFZua9laVjADDGYjMZkMxDR4R9LGgdk1KJAGU90BSjxireITs2VkxbxJKVMBrgIEaFaT5oNH2zQHOkqx/Rc9FpaLxbZkQ/NdgwBQJlOjO6fbadDObaKbeYvCWlBmKFhmosxQYhk0+VbZ4bVSeStEfrZbfPVbp0gbqXri4TnNK5l7M2xCsJy8093gLvc6k/o5U8cu3WEA97MMSnC92ToWhkzwmf4K9snPvoiIr7/6MFAyc0p+/LNfNN4hT7uSDGYKL48oyNUZCgrJNZSfE1HxW1oOO1dcSBEB93OnarGcODUAU44qBMLWWkm0vGKGYOHFn9TDCa6qNdLf0Ci4TQG3TTQt9lwugQo2KbQras2LaAbEqkxwJUzpIWT+xBodtt9tC0yYTNp3ZoqxxxIsDwWGvAkCQN7cEi0st+W0+YST9LUESdUmzeUZ2vKlcB7yRmZZJt8i8rrLCIcbDtZmezcqBn/rW1x/I01y5RkAuI/VAsmB8KXu6i17OvSdKzc/UyhFsvfpTkctfBE9TZOg0+CCjFG/P7Tn4rap0cKKgUX4gkkiEPDOVnclND/saxey9lAvuqW8R6zUGu2Gsh4g4f+AexQ7Buyo6GmZCkg5bhF5xXBy2gCXwtx5ZVPt2Wi1KyCRSEnxuvhgznpLqKEYRl/NzN3ZJ71NVS7NfnoE04BaVOC6wSM6XWEKbY+FxS7FbCTZyOnWD355MjFSppDFdFE9OU9VR30XpTVzodiW0DWQH61iac6KCzaRDIuuLoDwM0L35i0zWHj4fKVmIT/IdyBoyajVSKOoYY7JivFciwh+EwB0QVl+ZqhsrynpCkpR3baSc7tX82exUInDFHyKJUYepkmVoX1AIXiFuRTMOA2VmkE2ZMbrgMdK1V+NkCHWWqnE5+DZxavdccVBu70qF4HYelVLAaid5o04xIpL/+QlmmN159aLrKLwwPJwX8dtthaF/mv91Q1UvZmaPkRoG1LY7ehzexnsck9Ci4Oai1aD1ZWm2rTObH/kUQ1t70ok5G8SU9kBGxQ1xb+24rBZeRne+8Q8CHwpA426HqzuFvW6SA4X25U4yI5ZaBcPWtljOVVt2fWSIZaWM+bsRE+SJT1U0dLiekrPbZCj9z2JWWVLjcsafaiE9lJoVRojb2HIEATrTY9NR+u/R3h6CJ5LQ4a3GOVVXuVVXuUvu9xiE7KK3+Pu+K4FiSjRibhukI4ywtbqHmQgU6dHQhJpCst/ICSW7+aTz2rdXfIWxo8aFHl58bS6uF85DI7Sjlmf1TkY0642hmJRny+a7EGrdYTC9m17KqO+7T7Yym6cPi7wh999n4uzP/r0c45ZWjntKSrxZTmPtONkYsnGA3ANSDFZxuf9XKvmzhEwWx7h57lurQg389GwGD4JR8CdR4CG1SmiyTuyMhNp0ISIJzfUUJYh2WzlzIBw8+rgtJys1Q5v/xKDLo/zlR7ugs0AwACCd1lAVmrBLgN+LgQBZUWtwfPuTrKv5bzfJvwcZ4UavDN2gNBY3myxWUebUVhGe1tukQeBlvXFxHLB0cQPeiGaBatJuU5VABFr0T/sVZxMZ+bux3GsY53n6e5pMPY0RSDha8FZ+oJ3yIzbkyOjdn/86c9h+PqrX+NDyje//YdP3v/SbNE0Vkcr2NAAQ/gZsLwvSACHbsswQoOmg+dRV3A31vZGcQ9yupnTERgxmGZhJzzqbH4kqEU+jjGnF6GH/ro+LmeHq4fzYQ9PVo9H4JtNY73q47tyb4whCXKnavXRAarl2R5hapNrDvstgoxQDsFQxKL7uYq7hVNSFNzP81gru3isIz/PeEm099g02Ryt8e/b/h2/Lv21a0GLwLKVXL1shfnpZz0/6fdYqZz/a1uPU9xna5OqSwOJAS0QoYjGR7ag8mfNUxpcnRjkiuTkBtZhkldbL0VSQd4DeQyFI+08W/CZqmgkDyU/I3A8U/gsWyxLcKeOGQLamZ0PLSKSPYVMFrIeQDP9HdDRbN2DcQ9bkXbJbVlsuWcg/3Ny+16tqDD0Io+0n+ToBrrcg+oLsrKq1ugKDTRrtDRzUK3C3F2RhTZjG2GD+QIOHPMMgZqWaMrNNhQ0G00mYFN0gxpCE1oLmWS8VnLJ4SbSIFx/6msdtR8UEbzLKUMgaTjVVozsk+RXy8j8bEjS9UgQagCs2cOzdEH3oOLn3pAOUdbkPnx2n3gxTWjVRFhsBQAvlMRsLTv9zcZy25g3KqU8NkPgfp4A1soT7cVmkufqIYfcNjLPJgF2ba+BGz5GHChvXlExWziw1oHa/nxbhNaejLag8UC0Z5dAqc7BbBVnWHRbQN+MXBIxeGkigDiOm6bbkum5w9d3x9ebbC3cFw/VUqckwdaSCyNSzn31R1pOtQZIWkWQY+1xzJfBT89DNLzIt72xpnjv5RcJpQ2rxpiTQcL24xvtH6a4rUP+TW5qfuPbvagl5hqInCuI9p0wWKPRIHBXih/F/SKg6jUZkJD1CBo+NLeyG3XqJMUDIjziIUZyTo2NNZE/cl20sV1uTo2em7IUwPzVzGBHJX3q6Ql392VS7cKOawKMm50XdhnasxhZi2bZxWN/lVd5lVf5wRT7X//n/2IB10V1Hop8bN+TEEPoGE0wHm3BCDEhheAK0+FrISk8V9EGtmFK0HYHzr537yJQA2Vdy4CWLbHsFuG2AONR2cWjcxCGQxvYgjdvrsoyxuFltCDTxhy2OXQ09vuycOqZ0kbtXlEtGeqIrbLrBNVUmkOJQdWWL78MnYY4bIVHhH+PoMiPfvo5QTRVTi9/og2yUg4sZXIMKE+QdPwhM6cso3CEHAJjvwO1K7nWMU7dZtv8c61DMz4NHY66FOogeTqnw5AKVJAJsOvb0ctldF/SCLi7u3tmIeDcbGAcELGOFTrTF40WNeoiI0mjWgbguP0nP093P5Y4HFFnansYNU7I0t/tvuzVWaEIdcBW3651S6OjRmxAGPx+ryrCrEwaKAxtrRUcFCxDDgkWICIW7LjdrE6VWfm6dKDKTHe/Z62ZGzRZt82fLF6U4pCO23Ge51pLOTFXPrMOwkzhmUIxEloC0diTQ1oWHqf7Wuvdu3d+nh5eQF5GPn5Q+fFnX2jZusXM49bTDQQZaLqtirlYvhTUY4j0beQM5Tm2CRw7IrxvUJVT1FSK5eFQokR2LF3pI5e/vFnAGLqOuPsSGFHS2MzucTaXZahyxDLLWXg0rGtgbdPr/BkFbO3WR8gdpNHfKgFyvWudyDjeGkqQIrMJpxxey90pec5ItM8R4Z729DILbgAUS1awLtzOdF6Mec3oZ5nhsIN/RHvlXGLnaVbgsBks/DzPO3hnvfoaYZlNL2KtPoQ/XNEc98FPk4QWHEGcAJDCXXou31uC+4sqxQwhAC9PtROezk/iAIlmRx4PT0JlB1LVwANeIvlYt1Tiyn6VgekWEVUDKVZMYYhvj+G+tm6mw7YUSYdwd4eHx1HOdUkeyP/P7CvcbtGMAB5+pBiGJZdm62ZxdmAyccPcQbKww/LYdZAU0r/uYe9QSA2KoGPCFCHI5iLiWDryzDXfnJwJNEuIu0BDRBw4zHAGzoShpb4DwePwY8I9PPdjChXI71oRKxfm0JX8k2kHEOAsUtsuLLdAMMV2OC85DiBwXrAKgSWG/CqlnSLtMmLvLlc5AOzZcq7JTDXN2jbDiPLniGqbKeYyyZ5To5VW6codjIEMYFnmGkaqjDnRkz6p2mb99ZV/6x7nOuL27siUMse6+RlA5h+YAr9tmIt4LPRO8yuCvl2iX5pLp4V/9tPdK+vpyryE4bmW265pNc/JQjOpLDqiIlvHTMaP+97tsTzHsK5jMgn2iBDE308P7uX4Arz+uKb7OBKjpGwRHSYWndbjuhAzKykpscxsrXDkGZHUd9kLKsprGH4om4c0BJBRsekyJBHESy6AtW3i0m4BfzAuRad9/udfZZHW6s5FcDuOIk62Xkm0Tz/3NqzoHAoE6NngE+m2pBuy8RhtBbFvzTO4SwdQVZYyBYCMyWsJUJswiMUzUqHnbfBZ8dM+B5KTAFOXrN6yME1+/92VZQUtN8xQe8H9ZNFh9abVnhcEnTSoutvTNHvC33p9j8f6rZzURAzz0dzSYMrHSBeY/jIq1MAJ5VtRR+96nPTwbCF4fUJG/WRwg5+ORVAVhttxfPvtaYv8yesWFDqa2RkuVAJgxypb5fe//r/xKq/yKq/ywyk3KorUC8LerLXwMGSGWO99mTKUZKmAxwSGnBz+fRtaOwa11q596iy42dloHnVkaKO3EB7pTbRZKUyV22mjCtv1UHQvrJCh6ij33lIZbOOYhqseBg2haZezr+216EIYezB2Agb7+ncfFgWGiogs+qkVW32MV32BrKf52YPBbeok4ygTaA64lTW8lhWeM1+ssSQeEMzQFWPfcyvFOXIwNgO8Z+7t4efrlT4u52zlOW0B6lt7+pG2HOGeso5Cptg4SMYueQg+qIcLyxu20WNf+crGF8FopgrqhJwlJNkhsOpaU0Pg4veqUZE3g715RnUllFxxnWUiBoaPV6zfVUTGGxXhZKw9c9MiYOfpxAtE5DwRzH2LrH4xFpnEXBBiyBhBeROxjiPOHtPHP/35Nx8YJvn1b34F4OPPvtgsYrJUiRQZjzMOKAksc3s4WlFLt8Fg4tHhHuFhboMNCl5s+SNvoASYQZECdfOT59ZEejLQMoLYJgkupIbizcAQZk3Xs9hxptM10Z8wiQSjfCzJKy6ULWeTEdDR+qpmJlfUEXCrZKrZ1U1imFja6J0aEBnrok2N1CiZ8wEK7dLBKxGyUNFd/jKZQzjyjF/IwYPZzBkR4oscV3mnD2Fxkelwo+iVX6zc+dDybIEQlzpGAEymjihObD9pma2uPyKP0JJ7XC9HkotSuEejnvfWlGnbIGd/703081tIdMk4diSgfaEaGFcQfWVYUU6KtGdBOyJ6J3/pTSxRLb9whVqyY+qhGeDAOlrsJVdkIHPW4V7viZdRDDwUQIkjs0nsHHUBbLpCgSov/2UVxANoRDHJ6VA6HVtVTfITk5XTJZd71JH96qrFkuYecZ8LtdfZphWKAWuaQwHnlrfXtKW2Nb7rkidoaU1805Aij4y5Cxpr6nNBDbVVolCCJQdie6/M6oAs/9zNudE9vdGKT0am6ZR+rbKUbU9izuzRJvs3SnGnerSLCqDmSMcsIno+KlPHM6OhVMzThkvsAKh8O7vVPX8ZAn68PWSOcPs1jOdraxQlPeihKh+7Z9sXW+ybNEsaxh3WGHuonnW7pkqq5hxQm4/PerETQadDBrxmAxN/JHU/N996aARC3CHO2k2wrsrdM6ifywao8xGAoPYWa+p3y7AL2etLfc2aAjoeNnvbOOOol2KIsORlcC1SynhlpKGZaa/rGT34b8VrbnRrnqgwQnHbs3hT273FWvYP1W1NP3M1tlolHAbZUFijGVMtkFRlJpXh9ITDq9F9Fbrfi0fz8s8gyrl0IEYWANnTTyAq14RQ7oHaDik3hhkRFv/GsF/lVV7lVf4yy63dAxmdIQyvXYRKUUSXA+OlzcVIyVoHTAC5mtKoi1f4KUxJKjN3/0CLJMZhJMtG5LmX2RQKfKxi1Q4qH59NEyct2Yu8vkrvTdvZULc68AF6CwtSZtE+HE1fXd/BSmRtcrTCXApOojL64z998Onsjz/93At/lELj0bnsWVwQuTmizcYZhAx5YVST6dWmF95eXOEVYWdlWmwymkjeuJZfmyKuIsdEFIzYGO3y1hsWesKv6Y0l6y7t2K99zjWLpH1elMtDXhozH5DlOHS+uZ+a8YqebRRiL9O9iti5r+xWM4JaS8E7OTPdskxduYzdElFXPRQV1TNsbnZk6xEXL8SUQlfEp4gIXvadXTVnbzNSaZy83mrm8TquOlpy3Hy3DOPq8ylFlDxSnf2n8Zpnss6TGRULE4n4+Kc/X8f60DDJb37zq49++vOmyJb+SxxD+68WSMAsFMtFIkdzgwEZgDPs6CKn9SuQJ8ZGXOytIOuUdmi8RpFmk08pCSlQ10pmDffih4iMoFwyWHOynyCS7r65CpRROSNeUZFcjRHBq7p7KTRbj0Po5Xa0h7jWCocZY7CI8ki3xHw9wNnXE/l9OYPGBBEolG0FZ9LQlyZ3oYI4boe7Cy5GxoOttc6IQd6eU0qt9sitvwYnMoWtjojDhDY8QesqIrHYxvrPnOCLSEs4msgLtZcTiioXLkWH9KT271T/0gorn7Ovr+65BlNXtYYwaQTre4yjEV7LKbUhdFqasPbWK6gY+fzXT57x19AYll7bIcUj9TJPWGOMTUrUdTM76R5z4cYyBle126kjFCJDsYkEEcR9+k3zJjNoMzKyI6kp5Thu8lqVSxon27oPKm1QWsfQ8bsxnoxPJ4ky2l19fbKbSLZagQjdFYgheauHO9Q+e23zD4polQtkFwqub9tCP91PrYBcbIrwArgHxZCgyAjxXWk99mrrPFAjS8R2mZWETHGa5F3WgsmeVk5cVKK6an3ajX7pyQd2eSuUoagiuPLkJqX0Uyiy1jhiY1SaDNWasFnx3NMO6Zd9LOMvGf6IYDSr9P/+SkNG7HJZaNVZGLFvK2nS1h9ibzWXUhSKSjRFIt/M8sK3aL7ogVvuOVSn2lYi9ZYGUeOtmHdKl7TGBOiPEzk7AePC0hsVn/HJNklDmuWfHpVoPYVpo/a8VY91SEWOpudnZby9UZ5sQtaklnJQbdfsL/ox87pe+Cf9NMZs0rxpm4ASot950LHyMpOXYh+j2husrlb3L/jgBbDcFyDV0cNaHnEzxvSjVHVKj2J1dCJk+FlT5OL6PisRYebnaUxvj4g4TweXP/LcD/JMBeAZx+3qWoksSczSZVbE2JZYXGnzKq/yKq/yAyk3nr4MT40yPFd5xPlHHfbUm0G/BuV2te1kLcUhEx40BWnMtPVv1GTT6Go9X7ZSlzb4SyHQhBq9pQKqjPL11RuEMJOPPMxRQ/lbDU9OG95GV+3yXxQxLvYEgZ98sXJZfvNP//hG194sP/rJ+0jzmsp+KLAk/HSL5r9KI/iQa0RaP5TOqk0BBhkNcCSf5b9TLY7vm0u+g/JI67Q+2sHmZ+Fdl/7qvYhpC64lBn02/WWSMURBIS41f+UFV4zXNKTkDHDUa2026KUMh54zb3WuPDvQBohhRHpuFuhOsSorDxpFLcG2wtPgS1qm6RPBFF/lO9LBdudAtcgDgcqGM5J1WoW17POQvNLDQ5325NzG4CWUAHkwdyMPbQEweISfd/Y26cSw4vj4Zz+3ha+//IBIyT989WsAH//sC6AZuRhayb22QSUJt4vgx++u52U7crxrOJ8YoC0dMHm90HwptbmtZU74QAc1lct/mUWFkaT9HYhwM2ZdkDHNu6p0XFd2v/g9mXAt405/cLkHwLC7VW4z/9OwezdiyGAokrUoNgg4MNfWHYJZTPGeZgQ7wsI9LM72xwoqCvcAb7EKzVtro9G4hFr7XBqE8rcCFmYr/NymJutcunG8WtHIbOgR1Z16QxfAVKmth0uI4KQR1yyPZHZMYdZqovkGgAPFWZnYsJQPz/gbtHvU1AOi0ufWTEWPoX1FKzrkyC6R1Jao67LyrUaSU/Xfg3tV5Rc2fDchWfr/PukTxRPyOmuDoPWN9UDIP48umMnYqPGbIKoS62OyNpPDirlZ8TqWJgYRgbMXwbFWireClZITBpQIskIqDOMhdMg4qpoZRyk07TKgcvAFeTJ4po+zlvDveTmy+5RIQMxzL6W/moe6M9iW3oW6ZbxwKhYjzK5mGgX+yAuqdZ8jKi4nh+WUZedHZG5Sj9iF1UfNBgVvcq+X5mDJLpqQBUTaIFEPaWwKPGGoZxR643sU3ZJEtUgqvZ1235qr62er+4mJjq6OvhnPPTTQt4EuY1WjbHC+v7NXKe36IAKZJnLTz3vwNGpdznc17UFts+v3uPwHlHlHaWnaUTaYmTvW2GeYExPknFIIPT2agG2qAvD7OUiSpKPZt2Rx7v19QixcOWAAxHP26svuIAJ5SER7Kigx1W3sfWCNmYBFg6wY3gkXfldpxFp9moO4AMWqucZikPzHEDhE73KDkGtMC1b0qF+vfL5PWY3beGK9yja6ZwfKJEt21t/fjSefStsVSboLYwan2olSZH8e0WMaMaXIYlab9ygyiXxb3nUAa5glKchX1IwMbrNB723gr/Iqr/IqP5jCm+bGRYZTlhFhKSgu1Te3nRm1EUqKYTCel7roEODqBE0TODWbK3ijLVbtlNbls9E/Sil1UA7GQ6kxaQ1fQNLrGNURM6WZyRAvfukRoS2schWs3Km8VdZkGmzG4DA2W1dGDa5gxG9+98FA5Ec/ec9BUmtW6hSaOLXp2ESVDyLDPDYzcngX8vAe7PW16g2qU8US9mS0WRqxnZ9r42mYX3I2pgq1zC1YAHNCvoWdsGvT6COSvIjRFKxYl7sXmJxPF3C4YAElRa9DrzWtA19sWz73t0EUmL4P2ezRINo8Lll1Is/GI4hMSlj8HYE3b4tq6y5NlxXw9jR0oBWsidQvu1+2UK/rDEoMQENLY37pioncKI4IuGc4gTGNqHzL7lPwOgHTfkbEYk6b4HXhzK7qpssJtn10w+DtQGCZrXUY7zARygYA8dHPfm6xvvnqAyIlv/ntrwD8+LNf9rZC9tZVbXntogRhhlpvYxpoR8pbrqm+vaMM4TogrcmgaAdEPyRh1vg0rey2aa9xF0BgHYvLv+6EofsQtsyfhAwm7dtPsGWK+Otl0jCETF+tX314jY1hde2g5niLZBfXkN2HkIGHjHUiS37dwZXWjF611dJtYTOKI5dWejvBvQF4pens3uSR+AFOzZ4zaBQLRGbZj3wvEyB21xoRMzGTBJ2RNik4QssTaC1nvKV3kI+6RyIi2yoiGMxspRgbu0xi8Ctha34gWLcBC2tvmYSJJjgHI4Q5mu5FsQzXnQmAOS3pyvY6ykpaJg4R2nTcLpFxMAfHPpZrWVY7GhLRecp86fxyabkka5S737tnxEbCLLPfErWfjjrbF1gbUscJq2vZ9lKB8fZnUi6qM7VkJPwoPSuO2PKi84R4zEzqeF4pGAUdj+MkMNNF1bnn5uG5USQj6qFc2GVCe217jf2CkPUzkpsM2Le2hIutROmnrWuV1+/jR6/00kxBAcau6Mw3vFYOcwZqL5l5sL1Vl2reLdfxeTX155XB0UBefhUBZSFZa3F1h5vV9DVd9CcJaNeaS9Om6fPc9tD3e30DdX/GHI3bAAAgAElEQVR8LGonIIV/419zUCFKjr5aiwaIqmGzfcKLDYWVvuyNjKzjyRn6N0t3fdhEg/daW6YQYBpF/Vt2eGj8ZhsdL+DXA78+rKCqtY8h61RXyRrubZSlF3HmL3UY5dpCrwD2dfT3quyuryXpLzs+MQk3R6atJEnn50KCnTCNI8Sl187MaXns5OysHpn2Tfth6P5c4PfLOOYUbXtY19UbvPilpnhqwhikkxDVGSryT1Ow1kLVH9IeQIZBHmZKe1oZ2IGIeJcJi1VFhwVH3HWV5djqKQ0xruGxISJotf7Zy+dVXuVVXuUvptyCN5CsVC/Gw9QXm0Vy0AUWhIzozeBJQb2ZIZfItjpM2p+kEhOulUEMFaEQ4A1oA4GK2bNUnbUH2XoqITffxDX6Bu+qo+zjwghsDj/o4ytsQRUNy0u2DthtugOtottHl74otMW++cBMkf/D//5TM8Map8HLQ2pjQMgrnZmIoTX3EvO91moBg4XQxSTC1N+wOsfcgIfZTttId4onkuSysktVU1mmhRbnu9t0o7pYs3W13XsHErTU1jzJo8GykkL9jF5R3b63MtdA6LmBRIoarnZyNciP5bfPyLx1Ve/uEyJvt6l5sdRjn11+pNljwBJMx/pMzSko0iB0JHgVUQCAa4lBR8+FYdqp62XWuMomF6K7z0iJxGgAcXdeEqIpL/OrLk0urxllhUaZnKuMbKN7kI6S8uxEJcLkev6rz/5P9/j6q7/7btLP8vVv/v7H779Qo0OI2ZjphMpk89IgtJ7HMF5FNC7NHHPTU53istM8TkhfHcgI1hOA2coANE/893QAcQzWnaxQtqkpUCsGykZvQYQ0XiawVs0XtqyOWxMxJIdxOZdEsDpiZXRItIBJnrLsC10IhFVKJtSaFVYLy1NLq7Pp15JQu6wl+bQgp5L8Jbi1CZWVeDmJ7EtLpLFJwcoIQSfniiCoxwnNknYFN1r4nSyR3VUGBzMLV+bHWYpfahXokV6GNjwiFaULtJacCZwxGLw3C0dTl7S1bC95s8SjtX9rQqNF+7ppm24x5f8mnQ1ALc3ye1FXxq+1KiAW1fsJN9bI6kh6Yl0dzt9h9L07pTGqu/oR21cx6EzDYbNgdt3ZiX1Rt9NBir+gBnfPqZKWrY5ZmzDdnat6KM5nHdV1mlmySGy5e+RtsNZaGYChLQHLMO7WghpcI83QHa9Wi0RkHLuphEqfazIBotuHMf7X43pwjZWERbKdrFR40NMGL83Q1mqtnJPE0Mg0BTAHkHJL2wFBWdzUeT7KJkQ3zuF/x0syX9gtltXA2KJIyI2DoT8vrzzE2FYPxhLpZWcXHK1pST3Vmxgxv08izmYvCJy4OtoAHqGK/RIn0VBw4BwadUWG0UUMF6Hi/yDwKe3/FtLfAU8WX/fiR6oYmiIYExKwwwbeV0b+jHgf/W4ifIdJV9+M3Zvo3Zp62Ui34kubBErzIQrnujQQLRmC1Gi75Y1FyltWiACPUZW0uDLXfHdWWvo3eyaFHCJ/5j62J29uVV4HJQLMdzhZPQND0XFtlXgZZpbk5IV37VItRm31F/+l4OSmhpK9WjNYWk1SQxTZhqsc3IeZWmGZVkWaFMZ0Zu6+jlUXaY4konkMZimaoRrhz773LbTyqZD6evFXeZVXeZUfVrn9R3fg/7/lQ4Mi/8f/47PyRF/lVV7lUj7+9ItvfvcBYZJff5lX3Pziv1uPXuVVXuVVXuVVXuVVXuVVXuVVXuVVXuVJuWnLSOeNHLWRWkEsGVFYIf+1QcqNLYUKcLdrWdRpVu7GdqZIZT8b0RB22eyK3tGsLfttyx0KvlBMnaIe6sSbIbCY1i37gkpkV0XhlByJNtpDW5hqaIQ9sIvd3/CO66xhBYOqegM0tz3r9JABHxoU+aOfvAeYlA69Z6oYj5jDMBFFqd25Ra7IibEzrb1lmzXoV905mMeKImyFd5yTAk4Cdbfw3PKFKaZyDzEg1XuzF81EqhQ7vUXa0Ogev+qoFOMxtGLd2B6pXxTgawENgJxZYXr180mLEYaAjW3rOcAxMBFYsRvjaZs9yegYzVXGIZ4dnwQAVmFE2gVWSEJz8lpma7m78glugUfjVFJHWhiWRyxbGSDt4YjAsrUMZ01rzTZ31ce27ZQFUXNQ4XYxtncnK3BReGWx1I70WrlZDGAdx3meKUFyKt3dzFbGZsIOHomNZQGzv/rZLwH8y2///nHK3irf/OYfPv7sFwePpStKvFcDXLd4WcdZdPZDj1gMEwzNhqGDCESCfKcXY1Vm419OEuNJdnq5u+ZBMbtmeQywKlq2FOdoa9VOPoYEI/84sjZTINa456UrtHFDRxgTJCbvdmhkRq4Ngd0TjDBFYxU/KCfmfDjC3Zdu4XFdv9rVcBxRl5PUwUfUOuLtYPvsFgsuIHD6mRUfy4A4jpu7d7gr5X6F/Slushh4BFSYVJUO39qxbkXrymTSMRs1hp6JCN6gZaNCPRZg1ElGj3lI5lsGZy+GgzHgLaMwj1hneOXf0jKHLR7tbbK0puyliwrIijoknIrdSr9GRLhn2GhnTugoJeVd0fnYmpIp/TzDRfPNmEul1BvbZPCINdVKurcJULPe6ixnMeP7bcZfDgHNEM4hlEv9jIc55ihShLh9Ge7hU4tKqlVXmoFyBThDaqoPUa/KiBkkq94yJDbqZGlxiQ6SZLMSRVsYYkctMtpnt2iuqZ3bGHizdEgQVXzRN4qv8q91LDDXSlFDUsVlkpjlfWWj/kkAq0DokJ5JLcMOAHVk2RW36e4VL7gy6bD72QsaFS++jzVa9gLPqDBNkwvh6tNa8OTPERGZYdNIHjZb/KOE72Q9ZQTumkdM9962AWXwXrs7H4rnndaUhwa/8p+KJaTuqjPxpdp1BzwiXMkoTLYfWY5N9EqSIjCbrETOnbHPJTGkkeeMFD9YrQlpRwUGUpiVTj/Pb1mLYsr2+LYrwbvhf6s0Yw2a5s8YhxkA0OAp42ks91wCsY06Kx4HUYKn2HKk39k7DUci9frV5TcyZblTGIq3qTyCWCnZYi55ND+j8i1tRXWWtMQUOH2WQsu0ariIPvT3j0V5oZ58U3H3EVdBp2MiFDaD16MWR7TCvKTuNFNgcGz0O8NX5+OONLanmaSRdrZxKFE7ZBzPwVh07eMSBqnxP4thX+VVXuVV/rLKLbMphUTZBhP1EZPYdRsgmY3SFVKR0cfkmBprJhRSjshx1iYQPApWioaGSIE46gMgjaC8zn2gCrCydDDc2fwCOmEUu47azM/SMvI36M2s1dqlzjdFv9KHQ6wOPg4DwuQ3IhD4wz9/WFDkRz/5HDBgOe70nYL+eFl+7WJVzhI1LdU5uosria4lyoWiIRMeCLgVfTEAJDRCms4GidGpZUa15VqUG9b0izfyx136NnOO1xRDx/hWeanZwzx2sR5tBwAW3ph7ongDObEytLrxKM7d/MhHA+Bixg7EYRA4uw16JPmPTuHbsuU4s4UHg668UZ0orLpNJR+wTGwHT3dMNATRxAVgrXWenrZXgCeKk9N4dliXJprZgoWtMODkubhePOMQX4zbeJo/WbkQFp0Nj2GuAbbsCOPRlbLbTXSBsCIg4GZr8cZng9nKLz7+my/str7+zZ97dvub3/wDgE/e/+1Kg7wZMfj/mBb9hkGv1eQmXkqxVsnNJGXKXNwOAOWkIZeu7ssOnKdgBRO+6bBkcK7cdOPv93tWvBLPC4t8ci3kWfsJBtT8myBW6IiYOMgFRQ2wMKLywPa5a21YGSHb3f7v6ZJ/ga51dwE9JCfWWuu438901SFpD7iHh/vtdjPjIfaCkYixSLHMYu0dWUS4O8LXQXwgkzqpn+xrugyrfQM80G7Y/uizmbfbLekS7h6utMNMtFr8Y1q/Y/Ifz7pFKAFiCssWpFEgLEXdOD1X53Rj0j+VgN6SszlkbW1ZbS+C3uRqkICqAK47cCX8eukbShln9xEmN5tnrrlryDQeUtA12XX4LBAcukgwqUVVWx0jcqvOB1xKShewxYXwxY+sw/cvRIVSc6FrHLSv2G745jaygvYTm8c8PLZEK/X0fsNVjnMg8oVdMG9GVZvoxj4iq/yziX/OhN4DN966MFaolNSu6y/Dk74KCsF6XkBJyqoA70OLmr6NIqw/wbrQGcnZ3LA0lRMjmvpRPe3+PaidpewTdc87OXMC4bJldlJO3V6Q9EaHHttGomKc6opZ3gwTBMptLVsN6M+Kqr7RgZRhcG24CmxWw1GL+2kx3XtjIxfOkDriBwpTmFle/KIeUT6KzyU5R4/LJuPRV/TJ25zSzP0SAqGf0E0f1O9jTq9DU3bU5tzNMOMnSscHwLCWToMFZDVRong8YrmXivPFvRtRqUe0YQMgLkuSHk6+OxJEBpQjyEjbQ/ZnXnakPctpqO9W7Bu2p8yOC6/2ZD3DCesD2xmJ2xkclCIiUPK0+jV5/lJlTWAt2otJzArt8g5Q9DVzrxHv/EATpjj5kSbxONrQnUIo9UYtG2vdQju+VcG0YUrjbhJpfkUeNwDnedoQZ2cEzPMq7fP81j2Wre2aUFnioa6Tqarn3O+X7aQRKx3Im0LgVV7lVV7lL7bYf/6f/rf054MXTdD7THPtbIsHBQSk7WiO83QPP9YB4NA2OPfLoQQZU81ft0z742l9ssFWQr6OlaEFoPFKW8JPryztmTIvdBcwTU6rZgtEaLWiLbTRtLRlZcoyzyFXzhZaD/wTBu+sfEMX2sr7O1R1/vdBQZE/+sl72fYAcDKvXDdGi1t2t5XrKEUfApLiYoEMyogYrcM7YX9Rz909busY3lIOf9qPNHlpjDqjHs647y6Q1T0DOaKyYrNxCBApnLDMAoPpIuNqdhjrqHR2Y1jts4kC+joiYGstZm9Zy8IQ54llfp5F1MmrppdF9Ro3gL5k+YGZ7Yx7JrDnAE8X3MY0i/lkIkoA43ksrh2ojvOPYdNpRtzM3KPwvbyT0ROLsTVSwPWgvDxhLXMQMOh2Lal1HGkJ+d0L0yJVaZ/G8e4/+emN6St+aC3jRdBredIgTs5t4FirmMHWCnd3NRHTMGzpcZ5jbyPKmgWAWMvMfv/lB2STBPDJT39hay1bU2opgK5wUBuTYrUxQrSLHbTCEjxC6TsBhNkK88Lwu8PIPRjdWUzj3YgYarojvFblWkdERJyaOqaoBBh05/f7Wov48qpgorzJ8d30riN0rzrvF0ejTcVtwdhb8Ux/67nnDztuBwfmgXG1fXNV72jA73861pFYZDpp7r6Ow45bZaTNC5MywtJixSpvpN2yQMDiAPPrtczXhcIZLGxGBREKPcjLYpLxMiYXaf2740bQxwaXp9toXunliZeJ5lqcUeO0nOJ1HOfJKNFlTGobAVvmpx/LYExsyZAvDYDSPscbdEl0s3ZLbBNKd+JbflBlKvCW+z0rEWEJ3Y2viBBRelkMgZY8FqebZdxXVs7QJgBx+lqWerhMC9YZuryeqzWWAQt5mQM6bzU7I+BGABwKi6GtkjGDKzdIJLKXmZ8+YAj5i4qd52raaZTr1Z1bIwEca2kSXb6iuXshgMdxxLgrijo2YNZZVch4ShAW7raOfCwU8c0ZpKhLZlzI69RQ9tJQQ/NHnf7AMvquwf3DWCYgboAYXOQmMmyJ0mocIlmoe174lw2+AwDz+7nWMga1J4YVKSvilAzvDvD1zhYuDkoSXa7iXkYbsjS1OqZKNem9Ewwg12jDDNuES2J7WxKmywXNLPlHKywXZnVd1ktp4p6X+93XYbaMexIelgHft0TxKl44barTI44CyznRuraxdHsAgHtk/tnDVtrklMO5TVXS8jhQW+U1cyUqN1tpo0ZJ+2m9aIylm0ouIO9vHFKmnoYtT8nqfppZXs8N4DxPRJvV873R4qXKJ2U4CNy606xJUTGrdcae2hzreb8jDdCwQCzYaiSdt5xdPJQUNReIxwbbFO0k6AwbDjh4XGSKXtTcmyj6v1XSA6qaSndfoKchNuGn1zbGsJ7CHkDMizu29bVwtzVCZWP8DKzSWfsQxIDjz2Gk2Zya2Z3aO7GOL1lmtpbDU18P+9kQsdaRh2a2/hnTqTP/s+hVkzzvg7n0oe2WQd/JF+MwCVAWGvdVaw8tOcEYC9O3h3kSIfeeyTaR8GcHUMfpli5DppKMWGvZOpKKdIpWfZv3eZV4xO9//X/hVV7lVV7lh1NuQ/+WxE5nLbc2S6fEvNc1qPCxJPOjzBbI9dEvQJtDrfmGhdSmBP/q/k0dLCVFYzKbGNWl+JfSuKj2aWvzl82UhHzHAinrU2Zcj64p0ZnqEs9HVa1AXd9SlXz91Yedzv7ob95DTovaxcUGvBxMHuCqNH52BQ9hkNGuQHZ7dlcn6QRt5jRbUGNqzNPKLd1ccyejA5k+PPHY9L5OPzkbq3swSyHMkIOXTacFGR3iuruU/fi10nRNsE0sT0vh0YixZcesYQvQDWAdC8kQwUOjzjhajjyGZ8kqCtDa0Pbk7Zq1djsD4R7HlWfzm4nG6nv134MX17iQusSraIFZr8HiE1o2PYXZYA0jm99sfa3/NHSvtmxikc2s1U35He6nIwxxrBVcUPexq9zmtkEnrzLIiV/ioc3uOORfAfjxZ38bHt989eee3f79V/8A4K/f/21TO0GKcFPfjGuCEUhBtzoeSMrhr4rBNQLZZZhDwRvlo8jZmCtzG6BakWTY/WHTebl8om4X7p11WscW7qEYWA2kbkN69FWqlU2u9gpZtgp2GCV0Oc9eIX/JiIC83V6wh2VoqILoscKQSSLcDA8BkClFrGqwgZWghVWN6iGYxEpQJ/LFGUqXISDPcpNH9ccIk8z5AKarV4LmQYu1GHh06alNJXrZDKHwUPgSoIsP6Aqno5fuCkJbAYJKDdQWmm31JQDYJcKOksCg0C1D2HDetDQvca5CM4PwZd94Ma5/YaAe9WktEh6njbBMOCCKLVPMYi0xwUaU4NLMQwkFBX7r5aLRPv3lTRqA4zg877UnDFSP04XskPyGbpW8IMRCWrPuucWDFQur0YaIDmgcKlTsFEX5oX04BW1/SBtpzQyVBl4MtAGA02gYuoPI8pUFR90FRnb1zx6lbvdBgsxhk0qokPmd8hSIxZ+jq1N9amn3fBgqQ8LQnbspMVtrm3UbQ+83jpFYr71ZVetBCd6qkuSWbCjADutYAJbZ8vjWuneMPbR1GMJc8wAAymUkxo2egJwrC/QuX+EtPZBS4vsMJb10auaNIMphRZAWEyXczwtfVz0AxdSLq1IAFEY4VuNVSzzlqrfLePyqi4REExfSxU781Ggwc88+GcrdgXBijvFAmYtgfFBwUT+jh/qd3d+HX1C5rOs3SpmO++s2pUH7Vprip0sb/UZ/du1br6R/Yzx71Zdil9874PORB3u9t7bNUPA0tO+n3ZZIXcbWmJ0Y7tGwS+fyngJiExHYxMezIZvUiFhn8mFPvWkmckkuGOI8bR20pd1rl7QWfvJj7Rk5FwvVDHWM2Tb1UvwopvXK6PKk96/yKq/yKn/h5YY0tdvZkCPBn3QAbdjIvdkuRAKQ09WiOF0OWQmr1QRUCYb+q0Mr4xM9JXsCowo5Sq12BHBxL2zsN5abBBrH0xnbFObFCIXxQEgfbLaptThKneScQRywETdh3/zTB2CRH/3N++6OdZzm3XWUobstnRzjjzLTh/27DZSqkK4WTf3NlB3hFANSMjpL8oIS5rA6JP/wQrsW6oyMqq7iWXkwhbGkretE3YY5Tv7QaIW+DYdlonsGC7ifUACvaBQ8rjymb1RuZhbuuyXHRTAicyG0QN52RmIpfahsEETmswPMeKdwU2Sh5qaCPlk/vaNtBp9SElqRZcBBq3l4whWSVfTxTEkmiNZ0J2YvxwcsMs+XADA/1Zll8n/UcIY9hgPMoJNM7uVky0kgdXScLQI+jOZxUm3AQxsN2lr7+Ke/NMPXf3ZCyf/65d998rMvelRNaxSb9Lm/XhHZsRBpGf2T+AzkfBOv6jUifhZ0KCQAJMA0MB8Wd0TwImPV13USEF4CiDLmizgvAzmFKenFZARepz75PnvTPLOzG8GpZ55bRQSjrXa+b+uICAZyeuUHiA6oyzbREHwIFDKuCZJimUW4JOPjWrCpRRqS0B4LAIOHrsocF7/WotsFaq0Dk1ew+/jWr1og4J0UxAY3CQ5Od2TLsgEdr97YZOA0ttGTVF24BZJplD3SThC0zkWjMbIdOjrZmYiOOZuTZWMsV4dnkqRHtZEimmuiEl+h8CSDecbGnjDYkXfKn2NNGGxobX5mpQ3FI2OV0sObXTMgnLs06NeLaXSiXEvPF4QRLIQzL+FaHXUH4H6/j04KQrbMids00BMIW6e7CfWszAcDOt92JthL3pmd38ummSy5N1V/BKLnGkBsx+JDMkKLi4J9riKKNxs7H4V/auoPHgQJMzDLgWzA2tC69FKvZ0QjdKetYL3eny6tEKFsO/NQi3QstW/UxA5TJZfp6AO5fFnRfDZUMvih1wUdzYlK0Q4AON4dBl5wr9AuJciE00407U2ROLQ3KJ91ZgJ2VBxXsSpGOC0ArA05tevy5Jx0yiR92CzRlVkpptgHL/VgRfPWRmOl9zkchzbFlmiWE0hdv8uHCxMH5peD9JcXZrP8TTswVBplHwXiUmWMhlJSLLyFRbYVlKPQnPFVEWtgfHNOus+b5nvoTT7wlhH37K1SGn3sYmpf606Uf7ZXODigc+xsx9T4qnGeiwg9zr1vV81rmNM5pGGt/GfDfEiTYhU+uBTBCqOjZ2adGUAPN2OOhbPLkzcQ8Quev+/GcHN8Hs9KvdNLZUUfQGgyesSt/sxxLzPLQ9zMoJpuc2+cRKZDSF0SWIvrpzHNNLg9KZN259Dpr/Iqr/IqP7ByQ+RpKei8FQXykqmFFPrRHiFQWzLWFl3I6p9wWZahkeTYNJbSWhNVPVTjqEMG+BUQVPiH0Q+TsT6eiPl6uOw0xUKigwn0zTy6TAqg7LEBAEUfAZ/xC4CCuv74z19+B/Ufy8effj51JZVNaUsMDV+2QoHDIhOgmWsLejsXarBK7dc0UgtK2waUt11AcxmTGu6MXpGj1FOFcjoi6Ees9n/eytYzDoaww3niFVGHzPY5pgPB40vRD+xmR0GwTZiAAN/97AZDgSPAuz34phlw3A6drRxVKahTp8SGq2OAwcNdvEe7XNOiyere5tnqo45uAftqeLO0S6ezIen+5Hkjmq/0JMZMoTibVmg2eZ55n8yyflgmkSgisQAaRHIXDYjMSImoELPIU4QASsIACKzKpyaiCwkdToJIl8JKQ9gHrydNBr7JyP7k/S9//+WfHSb5218B+PFP89LtPoW9jKcvMwxtwe5xR6+IwiLD46SHWSFmOfywulWLruEo2vXIPySFitKUdPVnYrcVNqocFkaTvbx7iFbiM8PIemDa7hfc/UDTNH7Nm6tp+/N8I1D3BWzcBN6uA6kHk3toABwRCUrDgBOGtVYek9y9F+asNKxwt7XExBNoqWUU7djV+qNHVTlG/xt777Yt23Fkh80Zqw5lkQTZ/Wz9hfUb8mfY3SAIAqT1C36wRxNUA01K+soGAWpYfWpl+CFiRkSuqgNseAwNE1IlibP3rlorL5GRcZkZGZnVFrYclM/oPKdnxGUdWHUfh/cNhk4RpUOdxcxZa0fNMfJcDvbUKque1/RNHZvs1M6OsK4zG+15KKFcp8wizs+xiCNcFa/lwBLUJQYT50mZKUk/MnlR1O5dyQvEg2LRlMbJ8f1EYisFIZRmK+wpUAEiwmCnuhnEK/jPU4ELjeypjFBRHxh5qsm6i21s0OWPSBzsWmbBsAQjCQxymjFPBXb1yAaTD8Zk52Ht7Hnjj+BAEEpYKceDxHB1r9Va91h/5bQ1ASSDmhVF4EKCBvoSXBU9qq6Nt0Qp1iyzpqR2DeA+DK1u4XYcGdSnKRLuVe95DfZRpKfm9ahuzcx1NXwO6oyvuHV8oyDnyztO6Ruhx+hrvOTof1fuqFMduenoQJ6Ef/eTfzUU5phbT5UAr4CAlBqWNHVpCe171cqtrRhIC4/t8K0kXv8BA2IOuMyQYdiMtgqodiGrW2uSPFq2Udvqr9DsoYl50infpvG77J5m104VEZaYNRKanaWQnXyloNHoLKeEh2yGK0D5wJwbv4042afslvR4CLyIXw/yPD88WK0S7NRpWHCEEECo+vXpid1eOX9+v+FwifZvM52GyDSon89UeWfXab2Ooz4ys8D8ljt0rkQsb0KAy7TJ2/usxUAhkl2nGKHpwO5wPZhLq0lzMS/zeyXtTzO41B3Io4Vm9HLBieO4HcdRGhq+QEbO8RV3K13JR9StpamJazsgFpdLzQCudBtgxUO8yqu8yqv86MqN6W/6cj9Si7VbId/XeycQCO3v2pRTJqso6Z6GDcrcwW1Dwdt182n0bp7apgwZOFRZw/WjTXwvtaBhldmkXpXaLD20OSSlfjYDXN1wKKcbZPoN1Zb/uazVUmv4QVikgiLlqjXNRmNhTbVhX5brpAWHWdE6rn1Q0nV97vAf5l53wg4Di4SPfJpbnQ5HHHUpxlFnus/BE5kVqrDdiHEddM031upTmbrRIt2QcMWtn+0uz/ksm6Ltq7ZF69/kUlRueA5CBXRHeSjRoTQ16r5uPdlDGrBPGZCJB2ywYo06OW62LV/ONwM3Zp3N12WejmmpwJrNNixUSpbUfCC7OZbK4G32vwo13YbeLzIBZ22wh38a+w11oijvxTYD8sKPgLDiGpBsxc9kOp+Hc9v0L1vyg3YXMy4zEIsI+aPzb3/zu/tacXHNW8qf/+mLX37yOZDpkQobygP6haeXsz9lVCV5LAhCYOTY3NkM317w+kFdoxz1lB+D3HqhZ9x2UYQiOtImL/tdpEkBYYaBWI6GNzp2NQgZOFwoCFZlCe0JouT/VkUZiwc6e+lanhdNRkeMNNq2TABdd1pa+qwAACAASURBVBLvJgQaDKbIhL4GCcJiNaLK3ZHjGUhS9HwJHOvDUrG4BKMX4FKjKgCl1vSukVoKUjrnerlBzXX69alkwE1Kr1wPA8/iZQisnmWFdTQ+dBw9Q38AZqLeEuv9GhpJYkNs1WoyUsFbkwJzQBlf5ReFXdzXM6P5cDiUIBHGOLrqkNhPsGGqqfxQQcMsETp64+Wbe1NXePGYhgl+SdxLFMc9WaG0lp+lExF4Uiiyul5LrdffLKx8/OuAO+rarjkisXybQUjbaqhnVY0PlCci8cp1ksNS3MlQ1Bn4YTPkMqjX6qiBPnYRJK9DHHsTfRKlTISGdEtaikNK+ukdKKQ6QXQvML2hCK2I0e2WYMnAA1aJVXk9bz5F45B/ZdU23avP17uDtMkH4Fx3PWap5Q7xhiMAr8gHqgjCvNikQW3vCUpNURQvA2aTwVoHZc89LZ4A1dCnRYEevZZ+Ipt+bS/t/iLW09Zqe14uQ4uu/bEPdvbDw3jWVlkvdUYhBCn08RRSu7LzRMOX+jtWpA9htWGolx5IcANQbMP4Ltva8j+NU++jtbbQnhbXY7W5262UkS9au5b21s+mRzIWa008tMuyt7Q2e1hzUT0M5Nrt4DrlF7i28XSkfbuT1lZ2g8lFeYjBqhPhgOQJ/bFkyyhpGfz0PtPLAKQauWvtwPRra7MEvve7QxUnOp3mXSR/DIq5u68TOIBIHJ98CHddjUPk7akifk7fOCDV54MynJsY8vlVXuVVXuXHVhIFkOXhHPrJQanqNlvyeXlSLOBMirlqy4qnld3G6LNd3Fl91ljapz2/YYNBG5ztZJV7H0owFFTr3Q1G7M3eC0Cz22uqT1Dq/khvxBXuYMQ3f/rhQKQ78qI0KVnZcEHVjBtiO7dlXGRXXEaIjFgFgnaXh+U6auk62iEpdy4+tkc1N/QwYa4LRxssANxpRyULl1cxjto+LVuisqB65VSqhNDDh4tuN1oqdpgBEg2tqsfxylqZpy5MQppt3ojssaza6I513uV9hi9nudV5vyMtFl4dBCcibfm+oY441M90GMaBRLKO+BZL69LtaYDK+klD5DxXXiPuuszX0/uMu5W8VlFXkpeftpuXxi/TWbJtOHSP2wDlARSBKOAgrLUlVKzOw8p2L6fG0+QsZgSR3m1MSHmmW6AYJufOtVjFZKON1Zm28Ue//vztiOTXX33x0cefupLhmbx3XWDDtc7N33lg7KSO0Nzyejb30TNQe+4IKMhKeySicB3fDjI2Wlckl3OUEhHNJ2WXR1BBWfs7+YYJHhPPwgQ9L1JRt2SRKyNAgz5DKPUvDqLOnRYom2IreCguB9fJfFYPM05iWQq4Ec+SHcvo6EmPAfZuE5JlzU/IdCfNsVwXbWBISgJxbre0nEN4xu4I9C4DcqkNtmih6u66gYQbfwcBJaPqvZU8YV2Tgs2SzquBUnVe1DisNVjF9QitBgronbtZoxVNeYyICvhUryUJfEqyVMDJ8EvRnoPjCn7JGRDSXsBvmhq1F+JuoZkCSXpGMteB9Gbvuru9poqoJen5Sq1pAlgewSbuXOMdrOh0tLtaOHuB85QHyRpTDzryd479pI1nKIvHOS7DRjGZOtK/trxWXpn81IeAHE20JVZ9K0xuiFcNRLq0t8SALacA64J3Z0L/YgQ6LI2W2pDBEAjWSFypEYdjLZg1ZwyTImw67dE1h48n1f1EW7T8pT02QaB1PXpV4Mswdx8sFS/K0ScxVGfeVnUwOTUkBgPDjhSFDYwSlicxN62+1nkErzCvM/PxinqaShQP3ejnrh1/+CjJUphqQnylo6MFGcm+s+XWJMf7U7ygl0+y2hRQ2H+/juFhEOMt3/8UZWRFj1XjOibR7RnTyvhgA28oNSclTC5HZbvFAnp7v6dGMVXWh8qz2aymUaSF5Bl0/fKl+skEsWyv/akl3IoAF0J9qJ9PuBCFxu4df+JQAAgLVgr5kOEE2Yt1AmNzYoJdDYiMz0mFMlkzOPsDcz1Wuu8fzwNv3dkhCncvVjIkd009DYrUSnlh5nF45CL33CWP9Vu5GwDIFvJR6VZYgbjSsvG73VoCv8qrvMqr/LjKLfcvh4kxIJGnXhSAPCDkO4wA1oaQjDxe1Z9ux37cWJtIYYvl0bKUantzrC3NJ0KbcYNnOV8OxwKsVGN2WyilTCl4/Va9cTlZvtWPJIHLqA56ffunL6+d+XDp09mkx4WhDVQUfNetppVQINlFK7ZvOc2i9twcNVJ6TU6NpU1z1gtlpIcfvm/BKfeYkavBKcgoJJyw3rUtBzdNEtvZqrrbmbIml5A4z3toXsSdM00VjlFmI93LuHaug3oLXjRCEOdaZYUnH6Sub0s29syP221unntceikfU9j3Nml5m0n7+e3+HLfDR/3xQNwRnBFsFxLVB7x+HD/ad8gJSiytWujLIC50GueVwtmiNSxbn/uMjRkLudgiLrspXLwcp9U3P+iEPzuGruKhjIyrORsuiuCbZi/Wp4PdmlSPhnOct+fKg64/+9VvCHz7T/8Bbyjf/PEfAXz0yW9LDMEr48OcHp/tu4L2+i4U+Wbel+HWN5TwQW8DVORjUviUzC1ik2RHl+GCoackDn9HAEjJ54oEL4epDtGqlh5MTFoFJYw2GvFzRyGlVZw2dyOU9ihfynveXRmjMj0fbVWGwdBD3r2ATgNuXeRwn+jMKy5biI99JNafEgWm4RQxmq3VQDo2SVdNGlt37B5fL6uM2GxS6bNYBoasoGGsemhCJJ1MryuJrjYG7DNoAvW1CHCkTh6pMUOFLiEaQYJWjE29mLwSggn3Yy/P3L3tozo/LRC0/M5eWwVMh8O1BDuLX9vBd195asH1YrUpNZkfWCZfHH5xDjAl0QJYIroFa2yJULfrkNpSYr50HEcJbs8KHcAt0ylmU8JXS3/6xEabQwLfbWSaEl+bk+nFZsSO7yKDsnXOvpVK2wVZMed+Y2gwyVa1LwtAUnrgUx3iRWD5Wew+EB9Pn7mGUXm4p6lXhky8Tpcez9XeNMEIlHUETKwN9NTuOep9teVbSYepNX3TlEUY0s+R5bmnsRVOS5sxnIPHisZi32Ahj6FI+KJ+CELsC4JT4W6ihOqRa/k1TIGNJy6S+7EUKLTb3axxVZOaQB91ioU4D6FskCX7wyn/vHanzCJSvlDuD9gx3122ZXAd7tgD63ML6kELsNTabN4juSVhkNJ5dFIeWiwGqumgJqol76XmrL8m0iUZv6uhMUfD3ueY+jaN8gO9+CCq28N4VoRujYqeA3l7Ux/ofinEbrm/uP6m6kq/Zth1zFgGJjucSuKYyVvcYL7CrJbUKb2cG1ezl7vPOwahXqans89/mgtNvxRSZYsoNoWdHoEOX4vHkermvC9f8OWWA8s1HcEGlVSYkfLBexaHDTOg2H1q+P1z9Sqv8iqv8tdZbmuFkW3ppmqvaTqbTw8ItS3yKOfL4ij7td9LI7qc9PSmOhqFT0wORxl1l07IstgtJXn27WtLVA81K5shrW7S4iqKtgyEMtIzDiINjQHBpLvvsq/+8p/eikX+/OPPwMIAEGRe5xJhNAcVKdiWqwaI6wb9oPn8EQEXpcwA+MwYjcSoiqptppYxWnF6Oc+b39aINOoXQvhD3rjZ1fMDppd711hTB7FI2bbRfnavqFVeRzNb/WI6aeiZ/i6Z9FynuxOH+uXuvtbpa9lxyG7snUui8XQ0XpD+WrqANIFLxZA8/SQrj150LXPk7TusWXVETGxjGAYUG226DjZy0iwn63yxu9GWbJ/yaYhGHDxSzZVllWfUJ6jq7jAaSXdbVJzW5hJ5Loh1jvDBNCCjDtrha53ucKcxKgTwft2tT/LnAJUBahWZJyli3Q9uaR4sT6BkUsUgTCv757/67Ns333f/zVe//6jut5kyj9NgF5IwEYAkbHop7aYhffz8+roiom62mBVyR5TrTp2Qn6gxU6oxUNOgiO984nDLjagMovVJr+7CHMI4NWxVD3oVMu+NnG8PcZWHcY8izFqgWd5BHH5H3HAKb7g/AFcvsNS1FeblMRfYsXlQ2ef+QDzhzSCDsAxgMIKv8oKgEjwFDKYbk5imJNC2Hi+qrmZqfpizw460LAauUZPnWrWIptSMy6+6H6I2CCylTqEc8tx1Sjgyu8jGqzL46uoyeu6NdS0hG5dS3d1EVHHOnDGR6UG2qc8UN4Nw8Ah+CLnF0IMALG5WzZbCEQzxSw+VXBFwPQ09Lz5iSyMFZO0EtCrThuLhxaYG9GZCzPpaa/kymoRSssVGtd7L8nPl6TyzY2ROgHukJXVkAE3NXVswpdQL0mjFeOFw121htcSLbQegv5XU/gJjguyjyrKjphgIQREVKs+LdGJP6mgghwRkQLksqbG0G9OkZkLGYL0cSyh0sFDCPGda+GovOfdhTYlwGJw4zKOgfwngoccIRIA+xaC7RFzLBLFWdHWypRkzshq2lpPLT/d18JBNJaWVUsjPc0UsJTJO7dBI0tpSPkoBvG0wcO/y7Oll5odEhiRmfbbpBwdYN6RLrBUh584axydVhgQuy3jMpiaOA9N5kBGX8n2AYA169xoctfZ1RsqJI5KSS4ak0HLZqhtNB2445druhJT+G7u7bSQ/7W3sTO/2cHanb0/8ABl69Wv9swxFiV30Xx8qell024FX77fZ3s33Vidjd7Mhdl8OczH5tYIsccFhfqF0PqCZ2RkBBUiMD4CvMwH8JTQSLiyyAeEczWjNLwxz6UWB988e0CKsnTE9GburzDhxXXPNG83i1j7t3JCH2bHOewKs0nptV5ThUT33ljOQPZQ2vZ46/H9yn7Ecr/Iqr/IqP5rC/+V//nfufqOVU02jdnni6uHSzLKSSuFxnNUmoc3eY6rbocXj3dvtCF/LV0Y3+HKzI2V+NxAXXPCkR7RYCP2ypH05eRTQIHc0h3FmMM/QRA4Ay88BdljK9HBtTM6zR9gcMixiZRopUairlEHjjh+WKfKnv/q8DFQkfUJ7eVzrcZ4roj8G9CnbodtPOlUtY7LCXNbFjkXVeBBurLGvYYCkBs06lMlE0zdOWJQ+l+HRdWsg+puMjXF3O9KXS0c3LN84Eyc/Fcz7arSrXSSX28yi/RgQEGee4+B0ZayLnh/W7C2fhUTcbUf2nmR43XD4O75LR3W3Lt1hCZAEtJlhGaSxk94Ux7d1RjLOesfQPN+iWRy5lTU8zbnaIY2nC78LTl6rppZk5Igb93Hn8Bnoh/HuZxAHHpFAGQ251joOYyTkjNMuFoFuFfmsiXHKoQQj/EfEifR/7kWzJf5pkUGjOd19BSpMIijpDY7H7Fj6fWutZTdTsGSQKG+IJpnpGUnz8jiTeHnVMgs7cJJr23FuOfbnN4OSv/z1b7HxXgJ+GXm7Fi0jYQ9jXAQEpx10rvM86Xazf7VwP8977AAFamx2hKQVUwTM28SHuGYtX2tFaEfMpuCanIwJTJ62Lr5H81bxX5n+esKwXGd+SSOYc4Sf4BowkpzrByyv7NC1P8Ekib6mg5CTWGY3TgFtJut+OfxYR61KZLY1MMRImNte5ElNQVow1VonwON2uHsqL2R0WYIm6Qqv5X7QCtObUCZBrnJzMdAgTcgkuT49N0/Vvc7Ne9xnIg5fWvvg8hBN7l7pfLMzEfRlllCFu59T81ZnK7+jA8CRy3RlgtPCMECl9617jiSlGfn1kYtXYql85qF/AjEO/mk0DbkeyxWVjMsFO3z1B+zHc4pzbyZAOTFl7G14ye1JcGYIWjNwTeXwije+B7DWqVh43N7dfPn9/Wl2sDgVfOhd+tFit95d80RtEjUNj1iBYIXWgWlcqSqhcK2TZVnlNU3S35o9uOOo2FYp2IIM9gXZdhdomX6XGRwazrDZEZNpRmdmajbtNDzeh+DXnznNLTqyl5PSuSnnI8Jd89Zd3Qgt49K5aoGFlCs0K5XXkDNxvGGlhRZv0Bfi1jjdiZd6orqZCs3cYLnduxAWadTRmSOHbRadigQmqdoFjwRj39+/X2vZcfO1zI5Q+U/YaWzvm9nqDNqdlLoM11DuWox0X4fd0Hwdq7tIbyl5axpSmfB+P2PBpsktWRNaxFNrz90XF5mAwVfRmVUh+Q+m2Iifnwy+jaVm+xE1i1GOZXxtoBWjc+kioGxYBinhq2CZsEwIOG7HkVT2heW91MOquy6maSugpq9uj08h1P1rWD4My6LYpTagXypMcMjOSWx8qKSIHIlo3SUni813RLXUTLgMRmbuwiXdYyXVh2FwgSN3u9svkQ27FMDlnNCgg6wPxRVouGW9e3gObDuTpOu6H+lCGA/Asc4FDxkB5pKIJ+2s2zaD4poX41KQRvKk0mVYuh2bNIUCtOVNxja7N29EIobQsmFph238HnYcMah1SgFFEmHL2UlDXfb2YXZqpLXTVuoAXKirlsTuIM2OWsNf/4f/84FlXuVVXuVV/nrL9TDmq/x/Kz8Mi/z4s/92PXmVV3mVH1R++cnnb3zy6y9///WXv/9v2plXeZVXeZVXeZVXeZVXeZVXeZVXeZX/7stt/O6976TA/97JxNgu9cxspw3c+DEivAAFTuauzgjNyYRZHV5X717y/rlCeDw2AX0LLUMcqPEIXfEGVjMawBTNoBgCfTWiKDNXfu2PPWwGj/i/GQHgM+rtXOu/vPmA9s9+9flI0zTKiMXB3uGtjK1GDW1U9bAfG+m195ia2up0JRXMPUn24d0+rV5tuvenl2Yhym6xB7XVDkA3dXQwU1VSw8AIsGHH/XiTwT9IliTf3Mq+DKMy9VREhntl09/Jq1i+h0bUZSqWhuxxb2VMpY+jkNvkXTtf29tBPfcM8cPk4v29jOpcaxVNXVEbeoRwRII1Fjdvdcb2bMcJzEa2zFZJoMz9dQl2icgNUVGXShc/KW5BYQ8iierpw+ma4ojZYiS2yzwB8UDEQ8R1zOa1/z4O3vWudeyo1/Gz7LdvcwF3xy8/+XzBv/nqTWGSX3/5Dx8JwVQUCxiJFxSNWgKlYxQyUMzhbmbWe+lYkTRAoSyK4BPT1JpwR55mQow799ihOJBsqBb9A2eqpusCnsX7J2fVEbNweUiLOEX2Fg7cNfZx7rGCi1KKhIrYhoglFMtFiKUi8vLlCFXoY0kOrAzqU86+kmnQolc/1ZeKdxArb1GmGMqwhluRHB0202SrPJgavSixqZT62bnDJiVR7SSnjOjsjiNpOcJNayrPSEXIVBwKuCIaiJZpfoPxkFF9/ZKCNDre2CdBUk848sCAS0I/nG/D5c/v4LuOgdKE1XXY1xFvjbFf9PhQJHXdjTPIiRCZFSi0nIh4c9GvRtvFgcyKoD9QxgDSQhFLgdBtVyPpbiY1lpKckZUlhkrAfrhopissKFlTx0bYx/aU21eh4dUoUoln/tD46mnD3Ele1Hn+AZX0xrdvR3IDjHau4VJes8xc/upgi4g1Yr5igmutSF5Cus2SVgZdwM7kJp3xL15WOyjqjJXbDVSHWc9Wv+MTB4DjOGKBnW4p/wmZVT5e2kqRup/SK5JpB0GHcuPtVUzaDi2ZnKnstHv6wtEi3U+/8HzQ0/YZvrLJhyL3WslXTci1cZGao4ZSGsNQTx2nwag6bxXTj/o49d1CdNhhekLj9tFKcmoScPTK92VQFbsUyjwRjCGANuJc5ittrQp/lajlONPz9tIG9EOJbJjbvMvSx2qRQzzyU09HzeDzZngxSa/lO798XkaenzAFOtv/Ou9p/FFJf0m3lWG/XlHQGydQwt9RKTplWnygf+UUtvrG1C9XzZbfVzSwYi3jLlGrS8AxmIjbazlZOpiO1rp5/5vrI2BkFqZsInNkNhz/7hl5lVd5lVf56yy30vxp60oVpSWZ3sGAGuUvIP3nrEgPZPB5V+vTGAsPa/mwFVhuVlt2u+6L+w100DXSk9FI8PROiThPTQAgzTfRnDHv5nUstMX7EyWhQVWn6ryDmiGAb//0ptswouRtGPwufUHoaCEtfSb3OqA6x/LwGh4z0Pkw0gQasO9ayaMSTPAEzJNoZRNqHgrQnGcV8h+2eh72XTCPtH72ZurgrDBP3Y47jyirUWfCJrU4X+6vdjs0P2HZnDUKve7bO5OwNOQBec19MWi9kwnU4HFJTyMW13lNDHbrajWE56VAKJ/2UpGChPsSDMPDlISqeudyv0Yvwt5Vlrh8tDDIpJWbTrhwGPeDosPmlw09TW0vB43YsJ0GfLKjdT5dL7OPXMHXgq6LIJnHwEHdtZygFpBpqNCIe9PwaqcHOde57LD2iZKHE2xYa/3s40//8sc3hTl/89UXv/jkc+EM2QHKiQry1LZHzEfcRR69CTcTEBKkpzZDN3lg2sC5VD1lMgoPqhdMoFsz/5PCzmsaP9vbx5pJv5SXF0AJzv5XztxYsTWlKQLaAyzbvoG6/ldvhfxbzagpAmPsnknWwmmxTo9A8sxtHgtvIgiT+OW8Hqz7Ps7nJntDMFdyVuA6vgvchggbKqz0qBva2gzu5eFAU1rCKO9qDk9xyJzcLYprkzIbQWK3LY6T0guxW7d8pSBvL7K6YQ76iktbmuCkNa6bSG/7b0Wu8YA2ZLyHtdTBVh27hu9SknkW29xIR3ibk/KbdO2R6y0vw2FjuoLyEynvjBoOX+HtGkuRFR9UQ9ncFK+ohjxlXsA+mAugYYrYaPU6V55KtjNcFllaslbdLTivqqqwTG85l5OdQsl95QUJS4kscyEvhzWCk3ilFvGHtykusziWrvT/wM67r6wnmhldk1KqSBWSxsOpveexSzxWL7vJHBVzyyop5WClnBndT6Wq7q+1CAosIAAdYi06i7JjlQtNHmPLkue4aeAJiZCSFJPnHxRUEzXIYcS53IJCDBUXtlnbJY/G6vxTmqi3+fbW6/eHbqjqnoFReW95bS90A+LH1tZ6j3MHjuME8WUklJKYBkb9GMaUh4z0kmilKEnCTTcyNUvKuEIyyZAgxRWTjASJtR4XRXIt67nx+WV0O93YfSgRPbV+SvlHY/J58f0f9DjKV5tCY/hxqN257YGaC83+NlnZ0LaspjK/dKxE1Ej785DvpTFcdyfv70/PhBiIPA0OJXsfjL8oYSWdlakcqtdtO2fyMe6G8dpoXIta3C7sth4IPUdg5OsvwUvEfdqWbJVGWqidcXK8qIbhgLqs33J5MqF22U5SYcnQRi/28uUVN6AdzFd5lVd5lR9diejINNyWD0+GJQ3bxWhTkqkghvby/qqRjlL/MkqH+S2jbHhq6AbLDS5UKyopqAJlwI+uDgtZJnb2xQt7G3hEW7BPDY4a9tyRLbv62zcf0P7Zrz6TK9JVFg2r4nCHQ6fm9SORN82AypsOgOO6ndguLuXvTQ05A9FK6c4gegx8mIJs9K51rMzp4Qy0AdbOS1F1Ytg9ERpUWhbDLHagMo1316aLdrH02ogrFya+cbgSJw2SKs9/TFt6ak0b3N4dcaF20M59cZnShF7Md9agSSMWxIZaHYNIadikQYdpG6I/67WGRrT0t6xd15MigDCFnLIcjXwkExGtV0123bye95xeNndlZfTE9wDoMpk572IqNXk14pNU4TtFahzXfR0WucxIeUmeXroN98UB+Iq5ZOb2Cw4rW3Rpg7wkUqA0q5Nd5tofgsWzTNM8KtTqjmjvn//qN2+8cfvPX30B4Be//ryDAqmVsMMlZSSL+iHzlrtXFqooQRG5VYVHVmXNlu1pDFd3yGjfFtITC7UmtHEcPc0iTywYAfR6Q/9lzk7ivNdVlFU7t8s0UrNgLgOSyWqurzbzXkTLhWJlk0NzX0s3F1vcXx+3SfhiX++hOOMlkJJacpus0rItbKZURcdqNfHY0GP2B0UDtlRy9xJE6LnFYr9WitSm8CTgvvKuZJZkr8jNphoLvclhpIjIACsjGfd5JmzDFvs73OPCxIuw4+flpTU2FHqOu+s7bYd8nA+ObrcUHP/gUjbPfRgqE/CRIkMK0bwPwYmDR3RZFyCo3VpO82oUGRHVDMSCWn7B5iX7GgQ/rMIwUys7FvyQHuAOFNUJk4062zaLt54UbUm4W/GD1HVXpvySSkloktVWpHVtTGzo4dRkePKrxpvT53Pe69ft+VafYqc43dLmH2semPI73yKbKEtE6spJZiwtAHjQBAEhhVQ4540rufSRSTMr3XOtsU3AYnBTCLBIQIzxWm24BfW15mM7g8+y50GW2vaF9yTAgUxpmpI8RKAdNO8YOu0KlGUw+l6/xzTNj3Zpr7ML+7qcvSpey463KXu9yiYbvP4+uovB9hxL5/vKgPd71Q9R+9BpmTdVe7SdQ4ZPZsvp2070gE+ocQFJvTMbVkON9z0Z8mSFzhMKX06rdfiDyv54KuExBTE8DSsyaLs1x17A6gfR3GSQEpgLZPsGkxTdwblU5/eTuKl7QmHXaQk1XWo5O1jiYQw8Y9S7C+yTA9p55NwC2LV/1sn+xhPX7h5KlEOu2xUzDm4o43ij1OZLB+3c3fNm8LyCMmi11sp2XFSJx5tclE0hqUWQvJ/vn63gV3mVV3mVH0G5SewrLHyqmdYlcTIrThO4kwe5Isu4niE3rYQyh4dgJuL2MRkRJXGHpSTsiylmla93tV2U0fwLQlKknRQYAKlV7UQiPUxZvOUnqFEWihq64zJ86Khxun0EfgAWqUyRidU9M08pN6fazMD9VG/7C6EKa5LWBn+kapfjrc/Lm8ofHgQsPR1NtkEV/mar6dZy0+tW14NE3YdtnMwP6o8xBdMi8nA2KqRgM3i8GnmgRRsg6qA8fH3kw3+ZVsoYUOM4lJX/zDSMOfH6yqtrmqaywKdlVzBB8XB2cDSSgFSZZkh/pg8ZNZ4VXWlQu5p0R54hmg1nYwXbiMfHm7LFOu2360KnptLYotWJ7LZrUVa98bw73O1m4fXF+rRbk2iGrgY+VPZ7OWUkQfN1R2LFC4CTwdsrbsuh3Ahs0yYX1j1fLgJpWYU/mwAAIABJREFUOB07UOZi1vPRJ5/58m//+DZQ8ssvfvnrz9trE1HnIvdacnDHut3euXtd3xHPrHWu5TwODmGIYRDrRBoyWG5IwzmPNSp/Jmi6Zh//7qThoKq7l/GNBFCKX2udVovb4rzaxZegXXUnaNO+WG5QoflaXglJ9zOxp7wBRveKkH2jF4klyk0J1DI2HtKujjw23T4fEk2MJEg2h4wPFvakpJSJhRs+RWM3cV9WjpFLmEXFFw6KBfmb2CmoBs0sKRsVR9KDNbgLZjSj2XG/n5Hf4ICtksB097xHzmsJuC/6lGPVHyAvwYpOuDC1TZNKyjQ1mgXA+mcn3qWpnbf6kaHnrvK2I0730pzuQN3GRl1vARAw8sxJr1CduQ0wJF1QXTGWnpbJPsLlBVLGYQ6PvRWcdRWfjKamWwkPL2E8R7Fhv2GMQGbQdaDdY+bCzIvWLQMHq85tRZZAmh9uK2j/2K/PDCPwMq5obj69M01NN7dvgmoM8xO7PCs7aQqlGGybcZqyCdQhpZlUt3RHTXF9LY07pMEc9iax9LcXBUOQEpkt5CkNG0zMucqFkBI3g8nEsYOmF5LGZYy6j2WMVYKiMGOZOfmde5xN6C5tawybJhu/Uj3ZPwnS1WqudnQa50qCJyTJm9OHlntgwZ5FzCfU89owUDVtm6TkbMnEaz3cW5mFsptqlq6dHyurH6qNRjwTa3tT6sPbT9w+rZApz4kKw8gBEIdQNeparkdp86yHj+SYXb7Ykz+olGajI3Yp8g5N5sEpOSLeezJlcPbevDrDWpMLs2PSFzV5s8u7aBnjEy+3vMBIbjTfYpxz8PI/UEvXS+w6nMrrMqDgi2jsv3sdXJd9HkGEoztzWbqv8iqv8io/mnJDinDElnLKNjLyQ4YEt+HZpSS2TPDlQ15zN13SWhlxNai8IOV7DeU+MMsJtsm8aGyNseNMjySUslnchxbPKstYTX/QcRwXMKqKwJrhKw1vIdVQ7MN/+6cfEBep2oMgZXY+N0oaClw61A7KMZK7UJowdZ20GnVmJSP9I79eP56vq400y9SzizXhio4sw37zBhRiM/BbXmrQVwrH4ThPlQNbsr3GNzp3s1GoGFDu+6AZMgPUmGx4Bf0RwLr0TNWd63ThBMWEfDAHWV87mFFBBRQUVdY+LUIYwjSxZz1AOSBi+m0VDICnkFMAwBEXlK+095nQCycO2zY7ggIeMVRrc8ayK52iMqGSHP6TZEbJxRXE55cvgbiie2wEtNN79Sjz7nbfLDO9Fh1dEK7klYLHgmyCTIRGKnAjp8/jvkGUCRt58wb7A9OlcB2c5kEAP//4N29EJL/+8ouPPvmsqHNBmYs02SuPzW+c52lmcdc8AOKIK6yDdtM97tit9nOciBs+5zl3hHu7y2FRmg9/ty97cTLdg2NX/J44VZ3ozXeST70kPQSpqaqr6BnE8KIJsC029wxmszycOBw0q3t6QcTNmHXQUiEV+a1cAe8Bpl7zmqcUgYO42TeikX+/dHjusgyMsaZpAr51Rqz3UpL27Pdy6Sn6cRBrSUInwkdtZ0yerZkz+MJ5Oumk3Y6qhnCsc8UdxLnaalbiInuFLefJcXjeTJwX00NPxqHj3IjwZLwYtdb4AzONSS/KT/V+FSJ9n/cugjfOqlcH/jXAgYp4npoCBFznLse5UZG1N1TqzZS9LViULk5Sm76WYExNIST6AQDGBgwdDl8001mIlsCDQvkkh9rB+GKnpS+sSxRyjjRjxtN+i0wLbICipw5wKzYEAGuSwke9sy99LL0eG9h+U9z1cF3FXAROvt8oINEv0qQozVGFfcO9acSl5tqVGawl5MJ4qJ61Ul65cD49P8+QpmGJvsV8I2/ReIyT9WYaEEZLQIWGoUZrz0GrSpqOO4nJc51H3LxLC8VxOt2XLjJWk1IAIa8wqTOKj6aBDGuMjcVzST9euD8X4gMPdt74nQ6SACO+/hGCviodXjgmq0iDNPMkPChTQHpyfCR1KXIwV0Kx2qXTDplnJVquY9163yiTapnL4xnV99GVJHWSIxA+A0K41tM6vqvUsu2GcpJptsnZLKvVUCo4D+ndkgtdUT34DLH06+QP+SUKjatSW4T2PoS6kWMJ7j2R+gWALyV7PWkG823TLxZHeSY1Lk9B97BSJU4IwDlx+LIzUvModGUcrteaGcKzCRk/KZ4tq4iRqblaSjLnIjMeMCKueu89Apodvs4W82HxslyOGIQ0SLojbnmd93MmfpVXeZVX+WsuN6RLUrs6ktZpGToAmbyeQhFlBg4rVlu4CDHuvuAJlMj1ddmSsh1YWqSMo/FFdsRia0m2vjsiK1Kb00B4z60jCApbGECbAzo4qoYwVZRn+GPrt2GgxRDenizyZ+P67Bh9egJDlbXHTKT7sMJvhC/XpTel6GtwmMpwWiImIKapLAqrJ4k25UUh00wrK0s7jzUJMuDn+KjtuWzi2lIOKg3b6alRt4uAh6xXkr5kygvZq2GFeRv/nMV/o50GFXxMV8XcuW+wGhNIDYd/IgBwX+cigMNu2ys5SAJuYDjkJiaU13FGN3dMHmkgbdEtLLPZLm1kNwQttqPTb0dU8olznSuHaDV3tRrbUNdKUPQpmz9Qyy4nOhnfAoiqzs4OBO7vSwtdnyIJvoBI9boy82n25DxPoI5uM/J0xptrnXKGMuA6zlcyjj2OGY4mmceOzibzxdnos2WAe2yeeO66T4rWSkwiRnBNdPHtiOQ3X/3ho08+LyywxWDTLTp4LrivNKNZ6Ji7xdaBbnGZInNQfg0Jm7zhqyYqregcs9ycpx2OSnxjSpScSSlt8OWxeKyenEsztxGGZ7XxTOJww35PQTIEYhBGy9Td6bTD1HMh3xFVT/IWTGVbNisHA5nNDEqmGlcywaIRwu1d/czdlAkGyeHUKuHGJSgXJ5e1hldUy3qqZxKBnY6ugM7okPIIcrxVrFsKFBPIT63JvSk6jH6PPMshWkXTBbgfx+EOX2s5FuUcGkisMwSXp4pPqb1HTQl49j5HVhPdDJEo0ocCbtTzqb0C/ew6kqjhC2ZDFzQyxr18UaGl85nUJc8ab7bxmhie61wncExRUGMuly/tBfFsjtd4SwOGtdw2xxOMimv9hJDw2usNckWsjg2hjbK1mtK5CSQU3P0a/dXPhiPtfjItsFxNAE3IdIyQyDVQgfFvKFSHmhkucz6BooF1cgpg6UE9KRHTz8f4kv+9bE8QSteZS8XcpDFHXbF4C5qvja3oydbdKdLQaFyL1l6evVpLK4HoU5a+ycgrloaSGhqA7Kxp0ciuAIdZvBWBEhKzCCE4hM+2fI8GQocBqunR4Jl38GwTsyuyreqNQ8eEbCNRHZcaCp+a70sQdB8nojk4tDvoJSWyQvN1D5Gb77niXBNgrSGlFohd5jnYHoOH0BtaotIj5EGNOdx8Yx4raVKIjDtelJ3ep+QHl2loVXMlKjcU2OcY4/NUmAQrcfMQ7K7KN9XWRsA+ysdh4xnnPPQ/flBRF3E9AIxYmUw/nEAbDiVkysui2BpCidHrWg8GunacQJpmIsB+4Ku0eSnxffzhyOjWxtwSjb6uOemUrnLwdhBwrvRvORZ+9tGzq2J6yKoxMG8VoMW6XX4HcWGDV3mVV3mVH0W5QfpCPmzavO6OjM5LBBBDaXp53tIANrRfKo7QfBn/lK/Fj2pe4hdlnHOAYSXzjSyUJzKDLekktvw1bBES9c/UTEMJXUCstAtrE+qJ1vzmzVjkv/77T2tnsE2B3Q59KN5P5TWKSCf6wwZKTkReBVAOFX09HGtWSReGeWp+2gppZ7OmpL8f5kw6NgMFbYNj6+uYbaZ2b8MXiCvknLLI6/65VYBA9qlzDG3W7W77PIyWZT2MOUdZrSDO8+T2Vdo3/LDZBBRW6D4tvUbvqjss6lWUWX3ZnLAhK+VG+e6f4Nmusq91guFeyrouENbnk9u8BEU7Surq6LjMICYynkb8vBlzN7DkiWT17nFZzXmecDdLY3etlVhjRlhXr6LTdY1AOndqaBEcm8dPqHO1wNIULJc0oB+TaT1f6llevpLhTLiA2d9+9u/XWl//4z/g+8o3SiV5YUvmDkfYmgTgy83MMqPfAuBrgTcS3JJ1ZqDatPsb8Shh24G3uTAxD51eV0k9WRzh/fCUnszIkWLIXHQS2fqyGXlvSfJ2a4g6il0td6taKDXU/C1CIhdgcF+RF3KXI1povhx0mBUWW6e55WFMiTeotg/ea9voQajkGOX4TpJuLl8NP8K2N7WjauWeSsJvwixZPCDrzRMcShjlhQal/d3t8HR/cuBGMzNm/KPxMAxN6Yk3V1zHAyVml8ppv1400s/OdemXqR20emgg/UamOCpfWPfiPfWwPMVoTWT1xuQUFrc9E2IhdYiFc62ZRmZ2r3N21ebT0JmHHb3vVxDHHsEvFEIGRoa1Y2yS2ZUZHykVOT1bDC7pHi9FGdZT8WaabU7azQa140T/M4KqyaZQ0+Ehvr+/eipidjklnJo9Tctdp6hDSGjRbRF+OQM+2tntIfdceoSOr6TF0Zp51ewUDk8q9t+bzkKel+Eo6dPCBh3HOM9/l5p3d1/LLbqR0uiJfEk5PTJeXkgoNozAR2KFQWjW9ZXh4aII81omcWei6bv05XV/xYGK0NcElXDLJVPfPc5yifCu0GuDWzk+VK2vNetI2aUdIqDT/7XkmDJ19Bl4RN8EM117mZqKFwLP6uYQa153EzBImcJWguLpKhouSTlIU0P3V1VzrNuLlfZ8vb25LF8AL5VQay274WGBjgl0zdsQ3l6rZ5DlcdXX9ure4vNxPJXo0aPuSnoM9LLnZK24FvlY6KMSR/PmE6593vyIzGjhtek3Fsjd6HT9CDU6FkL2MCX/0PnisJwIsg4b1pJua5fbjzG8Wimp7vOgDK8466u8yqu8yo+g8N/+m/81jVpK1kZARVhZK4VwHN6yw8LXMTJOArv7oQyOa51xz8xaeb4pVFfhLaRlsit5WnEi3Czccsni8HJCkC9/dzt8KRYLALjO5e5m5oekMcp1cwBrrcPk6u+S3Gh1jkv2S7vHGQChS9eMYWD6u9vt63/64o00/enf/QYkj6HghVWF3beW06aZC5lh1EGpGBGG6q/4Q5IHSdclquYsFMNXnlOI31eDG5sBv+IwUTkKCRSk6WQZNiUQxcq+ovpShlX+TxaX7zZW/NJ6+MGgLUdtkeRxOLDWCWukSjRIL66thfBcs1W4ww7qyENf4BAtrvNknENeDvIwc/fzPGnm53sN6ABgB81M4Zmbcdj0g9/v/7LWIg4zOh3Gw96t+wkgYk/WOr3WlHvE3J1rmR0ADjviyXPlPRXhXgwILU7jipRJgUr/lM8ch5xh0s8FcnnuFg+sI5fTwqIxIqFILvkGRGzOZk5Gi2jECFkF9LAL4FhZ53FQ/tk2nURcCn+/n+5OizhKB3C73e7v32O5k3Yc5CE3MmPfioGCGddaJG52CyY4zzMXgACB2+12nmcM5DgScwGw1iItZU5Mh/z2y/HzCI9KGq3gQ7PDWhQBcNzP92/Pz/DRJ5+vdd7sXYFfSQfkQlu6aKi5S7KUHmDlDQzczZefx7sDQKTt1fqK1WQk1tkoUkX+hU3f6Smv8QE4ddhaZnUtYyzIafTMpigKHe4LiVDjXGctJTHwIKwMeVsQgJYAePmoOsMYKGf3z4xr5fXxBM0MhPtaaxFmgo1CbqgCmjHohfaO25KP00/L/bCbu691mt2QiiVl7FqLiGsrhekL7ehdhci/F6vVzBAZ6MMJn1eP99l2h994AMhgXuO5TjvsXKcZ192NZICGkPDOiThBOBd0Zw95WCTQHKNTN8N9k+coHdfpS+zdWhE7uZ0+Xmv5gfN+nuvuvsyS4NWNWuSF5br7GbSSgC1pAzjN3P2ww93v593MQuitdb67HR45GpfWKE/j8T4vBpUAacmDU3eGNpIsBCMvGE1dhalfWFdEgZUTFI6b2diOqMzXeXhyRx866YEPDVgLMLt3niwcyAsk8opbt8OU0HMhD41MRFSzE2tkKUy0xq9hmJSAhpXXfOVRxtGniMN39yNw+WTHUtiywCZuMlbfPUeRb5YBgEPOsoQ2AKzgfFdnr16+WYVgp412eSBRd/0e5lsHCrUJESqv8ayJQRQinyqpcL604o5CC3ekoofuyT6JzKROTRkjUhAkT79TJebcdadTHDqppQrpXz2StVpa2u7ucXHehXYxoWud0QlTyGJVci0F4FiPWlQSsDLQ0wtYlqdwpAbr1fMUAlMLv2RBsVPrjyynpwEZnJiRXDTXuZG5zSwv4ET1sDerAHiY4ARSumq1p1FTKzRAM7ERLfV+0RxhdbfBGZ3PHcq1vG7tG2hkJOEtzHTjtkHhCx/mWs7ULxnGkVliwhKM3i5v8tIMxDpdGnxrQtZ+wXAOpCIE5qr0Mo0hQf3YPVXos/M9pjIZNRW5MA6rV6R4y8LKOsporyIjM9e+O2ZS4+wJpmhLwb51ORlp3fjOcyl1KCuNBp7r7BHsZqmvlELRqKd7aHRz2dZLLJSdqPCOAveCuu5utf1DisjVWSBXkx0HM9Q0zvL10iphCGTm3zKWSjj5ch7yX1Kcp1OTz7Xe01og7+f7Gvdfvvq/8Sqv8iqv8uMpN23JEAKWyhqgXBBKx8UfJi0BuQcAfAG6S7U93XCwx45naSuXB5xbqNcjrkIGiXXmUbu6ZtqMXpmRUlt4vxudb9fOZY2VK9NlmPhbUbhCqpE3YpE//9XniKNwszPC1TDVlufZUIgy6izkBuhJfe/Vf3bHy45HuTTTIk0zt01+F0xYv6DeLCNexnd9US30GOoYYtlXV8ts0IXtK47gqKave237527n+LsaZKOeUN62zRJAfe3u4KoYFYevcyU+6Lif93BMiie2sbmqH+XCI5b42cGAI8cOraWVAsSZHbJuAGb1U5a4Gc91ObzTRBt4VTGFi5w19/lAMNRhNhKXyZ0b1ehnO1STy5q9wrkr8EtOQfoi5DpPjUI0Q+Jnt3e3YvmY+bQ33Q87FttD0ABmEAlSNqisQGzbI1V4LtZ53tfpnuyCkS/e7ZDX0rhCbUPED0/EtZAGpR9A+USu4/l2/M2vf/vPX/7+2TxdyzdfffHRJ59hue4ZGe0SpK11lmM0ZsgnR6fn2c5xOKXpHrsjkctx8Epkm2jBRdq1H/JkfQLVTy16y4O+SZZVtxJBnobii6w4cvN70XhYBwgE8dUJrzf3Yhx9KsFHBcPt3zrDeR46JLyMIJ0Z5MF4iAcRvRpPh0la8Or2D0FMs9RrmrdMszwTZYGj58nDrmbddS/TEjAdl89oY0nsCJEqFl34Jxbee32BucAzhV7Pf4mRlagJda8GuzMnAD/MVmIImfC5V4q4FylpMgd0OOqTSBXZ5nCPhJIF7yDA30Bd27KYVTwe8uYF5dKAHd4BgtLrDQcgz2Ze3PG97lZ26OxebUis4tklDZ3n2dkMPMSo3P8g4FxSBWfLwd/VC9vSSh892xj9NbOKEaztKTh1Z/xYfLr9r9ZiiZmmz5lDSnE6ruMZV2ONlZlOcVFOTV6F+UPxUukoxTin4/Iq6USmcEFCSJpd0NfZ7w+3fkWoPaXNtWTGukDTAMMEKkb0WWlNT+J1VZM7tMvZWdDJVEBYIKwOgUwJUkPduTnpn7e3PIEaQ9BHbdEX163wl9qf27Hc+pGz5luooGciC8rgH0r/Wltxf6/YEu9bE48Duej4HJmPz8ruGvxtnXe6W8jhCohkSSYx+QCkglZU+qcammrw8fc+oPrmyt4Dw8OTKctqxUIlIwmUCUof0Z2usbcO7crKTMg8My3eIyIOk1cR99099ulDK/TRPFip7D1poO68f/9fNbrShdoX1Lg/FJZXPFFiToO6dgdE4cITbrO438Z7kucAXAp8tndVGdVPb26oz7JXmqmKfinPt4WAZz4fjhfLOwIrzUimNHqyFGQOA2Wq18JK6vBQppSAmaur2n+9UC36ehzPZ/lVXuVVXuWvv8RVNlRuRsprSgCp1K3E5JR3adNpu0sWcB6eIZhHtumtkeppON0yT7EtaRlAbnkryhN+RDChy+YzGyZ+qYq6ixrpXeoUj7TVs4Qu2HzY1AtltgAg//K2M9o//fvfYJ5WmAip92el+Z7aj4XECPxpzJJYXnvdqm47D+TjxbSI0j+pgccvkT2ZGdUkR4al8hwmm17H5N39mE6Ufl/PUlehOkmZj5SR6ZtPIv/IF3Cus1R5t9MNMkwUCGQKr3N7yucRrb7O1uxYOOFpYvi53D13dev0hbS/E33lAdparBI3kAQmZKQflb6JmjEyon3ltsdOthefZ5BhZEgMwj85kqiZpABlL5S+prtGGr08bEJAKErGGiEOMw8fjzrivfKAW81/ezCFXLGdk4tj1QzvnncorZU4QbnkYeKu5bfj4FoKuW6b8kLhubojRhKyxNAIua1zZUVijd5MWTVmCttz+L4UaltZnYic7troiGuThM4Af/vp787z/PM//QHfV7756g+/+Pi300ULl2m52E1gVpEvDxnWNotiOVhCWKFgg/4MptqcwcLskbzN4YtDf3bYdHJfd54icWYYzK8sPO+SphHNFyjJMOzzl4p9WGN3pr9OPvtAjsHpLcsxRR3k9/bRBZ173r2Zt4hCIqJyR/qKOK5CczNCfV9IWxdS/IkmTU+094DyPR2eeSCf6ZiI2l419uZBLbqc2VK4GYLJ/uzqQaEc2CEFSyZu8BfcI9Kfi80f6Qd5RAAZaXaIRkE45cGafnPqyHIaNyQAweQVEaNbgYUgZ6wQIlaIqRwGE7TuyhYL9WlZ1MzTwECAmmotlWZA6aWASHD1jsMF1fHBmGpQvOcsmISlYAHN+9D4ARhxBhtLE6dfWXjEBs8EBXSvxiBbkSKPXubTHYfrY23XNCUILe3eAl3L2YcplOMsSmamilTXNTeF7LLNGE2M6PusVMu8fiapn2aK64PxILVrNHCk2qCRWTHYD2N5TWR8dGRor1k27E1ypC98Ka2n6bVKV+MidG3H00tpVzfV1VowU2CGafNAoPpt+0dXw7tWes/cM0kW1Omt+xAbA3uXLBg2w06Y618cbUl7df9GjDjb2ONEXsYb7AvSvBtPheJd71OxuuutqiLX6LBMmOnIS3uGgGpDhKlme89Mq+Y7ofa2DR5Jt09283qhkdssp9x43haZLs+1/nHeX8hqm8LPeLzX8oe+atWnamLa1jlgYJJxIZdtHNi74KPaZLZYJany+y21VZTecp74VhnETbssEY9oLnw2X0+g5yDL2i2nbCvYdkUOnHYxMqqmJHos2BK8hSQzDlfEK76GdqidqCGNXQCzzzUlsLyu8kPN5lgr41EWkdbjpL/Kq7zKq/woyk3WQmdRbzuKbJEdnwxroV2nxn+cbsO/J8BlQ0BSmAIA+IpTaO4LvB19n2PIXtmf2MxLhFfjeFCouNhjLuM7cRG2VdZPy/iYH3fOTID45o/fjz4A+On//qkDXimaNvU1CFDDGepvjNlbsUHeRut4unu4cuVkEFjIEBuiwxxSw9cNJ2W6qf46z5snaNKTGgZAh30Ag95++eGYOZDKCtpMT8FB5RoJCFtxXMmRl2aEqeh1ZYcqGb833FJmE4tcItRGaoDGAwfkudGMF4AGinIIdtnwlSdWHWGnr7WWk4clZOw6x0q4mcVoFnQzPdIoF0HSprDjeNoOB/lAUn5Z9B85ZWmd+TjkebHRLxwH4aXl59fkFefops2soeg6rEDe4px4OanuVBYqGn3hdiNhIN1Pw4E28YnMBkuLUZmte561GVw6N9t7lskUVunYGy0vHnV3j2QQYS3HTESwT8K/Oqt+IY+LW9JWbmvdPe4gWk4zMzMev/jkd3/+6vtTSf75j7//m09+l7Mn2zoOoJdh2lPFPhEcx9vzutTDoAPmFYl43VHflqJWWS0/PdVxDrtglTjoefbl9bD88sw0W/4Q6fQDChLyc3nEBYmU6pns7BJB0x9AJbgtVA6AL107ozkAkJe6KBlDBh6XoFzuh97wiDTqUSKuT8pTl+Xq4nS3ArtztuXZTKesIWsJYUmzOD8Fcy532GCt9hzdgTitPDA7MXF9OvqAOKYdzp4PXbc8lYppcpuTtISV72EwcDlxUH6OFkQEJmrWUH+JJ73hmtKgz3FUXTW5yJfzmuPcC2AdgpZsRV6QnOrY97jSKQVLmYyF0to65EfUUNQOSmXK6q5OtB+afbY3BjKFp6sPghJKwBZ9V1x1zFwuyK2h3hdJE2QVKtlObFXClOx+ZuwbLUc0LKxcSjaOD8IBaxSjVlA87tvMhW5y7fa14JnDD+o161bF1QE9XShAU9F3ZSOsrhhI32jNL289k2wfv6zUsSidkxg5cZRV8mCP1FmHOT05ai+CS5MUu+r9Bnaq8Z7VGiM9jlBr/6Nrg+ibC6vkhwTcvPlvw4NCKY5BqfHqBNwncTGe2rSyt3ZALUc4nJ7R4cWbWg49mdjDOr3j1Lr2qNZhluTejPbxaOqpDt2L1zfUC6nkr1kd9U0K21gz1eGsCJIgGzl7AqtjKXWmpqwlPChXbJhr3FvqP3M1JlWuWSxrIBfaSUAEQ4zlXy+s/rQWUPxucWBincW3BFcK6SQx4x9OGgE7h1x6/uxbCYpNJfnt9u4R1vc9nP2xmVQoQ2L6s8fm4/f7qV0lPei+HGutgxlBUd1L6VJWqPrRK0c/WEJcK1Z+1FZPLkawqJjrN96SVrng0DIWcvdW7zqGXV57n93RvPJIsnXjM2HkO2mnBe4dF5zV0d7hu8j7Kq/yKq/y11vs+x/5H7V8+5/+8M2f3nRG+2d//9n3P/Qqr/Iq/12Un3/8pvX+z29ALV/lVV7lVV7lVV7lVV7lVV7lVV7lVf4HLHmzdsZURMDCyl10HYbLrTaOfUl3P3Q1weooJnrvaTvjupuDt9kRAAAgAElEQVTlrsNFJOwS3o+5Vf+8VBZobZrhPBd0UiC3nNjH9FDbY71B2vtMy7dN/m3PFB5hV3R8+5/fFhT5958xYpdyfyx2uxZh3jGWvQW3Kh6ld+4qhMF7UMpHr6253u2rHUPm2c/uzIrtMo/fsYDD+4nL/nJsvrmiZ1z70NhvWoSOTjvcI+KsgjBiq5Oo4atr0T92LCCw1kqmikvkjADWuejL8phyBh0RdcHiTr7YP4595ad5Z3LnU1ur24mO3OrPGLoIr9PucNRlsIVFxAqYR5N9bwW+lnNFBJv+hJ84bjcAqy6EcZ4xIrN1P4tWk0461ThPcPSIWfvoCgeKZyquKoImcq+YBLDWfSPZ4JyVfEUQZlZhyrUNDHfSHDUVik/0jJqoznnmZscYVIV+MCefB/M61pHqvK6kqtDILS4kK1JbDqcy0FEn4fKY8Io7texgnrzy2qvPMzYZ6kUXpTnZygftR5yC+X7jqWIZ4BHbEBmT/KNPfvvNV9+fSjIQyV98/LnOliq+IImn3kTYm7i3LqAJoRzn2eJ2XO9MmLkMuZ8gHmWkNNKWPYAOk7xspFdgyFokkxPQMah+3isWw5cO9y+C1OKatW0BC2yZry5Bh+Uewie0IiWvFhygaUJdx/E6OobsS9gc+4UXaMbLv6tj53nWs2QOiuMQ3zM5MwPxkoEyTcjMP9e/d3zEiLybi6V1QIW01FXBMSgCI+ZJq7LeF4cCiDPpD52V1K6kano1tGHeN7IUTUwotq4Wvc6niemyip7jbcaKQIekLmQUaLKUSiOCn8YrreFqEONQRdyJEVwZYdERO9whq8kaI1I15OSKMFWrGfU9P8Zak26Yr88YFC+5C2lCyZ2KLUtpkYGky6VDWQPO98oGKdbQyb0ihG+9CPXOikuXLpvda2Yqqo6A7zoSUfNaH2EMSpJq8OFUKUVqxO8dJ9XhhmFLDNOA9UoQHHXSoQ4564CiYgl3CVXT9chrlQ9AUxRfEgDWWYeuI6LI/TKpqo6bgCjy9YL3NRKibm/HaurHJYHq1HbJaSkcUbYHUz8fA9YqpI7zhvMH2TQ1QKxXGcZeqzXbTYoPa25khsjxh8zw7lepV26U2fpay2v2sUN1t7U0wru3KmTOiNG2kLL83Axd54hGLQ7e2gGA9j7qCjTmAapoc3XwIquvD3p1GAePw4/6M9NrtF1RpaCRcdXJpFRHAcZ/olGHxOZKzUMTJAwWCQMmpYu0s3vc28IQTdVb9l2j14mAVjTtUKKH9kweVpBoxX1sZUfuM+OVu2avZdJSh2Qcdbw6qLKKWhkCvdGRc8gSYCxPloAuO0oml8s4LRVvzRkjilNVfpn3egZiyMqdVDO4j7D8OqcrK3sxSc/n1K6TR8jyIJMD1gIIm9cRvMqrvMqr/KjKDcCQlu0hxN9lVJEsnY+6eTnUgazZyAvlNGCFDyssIR1sXZ8b78myMYTrW8ZwIQ+eb6VRMkCy8PKGyde9leLJl7wPgm1KUQpmWJClPf7yZizS5WC5jOU0pQKGLTuWHIYF0toahzVlMG4meFvBmgRfCzZMh9Ckstk4cc6LMT108zOzELop72r2jbKbNcNGH3bZllMq8R/BMGE6mNKURpWOtdyMokDBBw8l+m0wB/oQ0DYIyFRmsqvmeq3zOI7wd9bK03UrUmSiro8glznO7MvzuGEHcFc/48LV8OTXwi3Mxrgm0kCzdd7hftBWAIFPLjRMz178c51zPVZu1lgYyWmxLhNdW3kxTlYwQRutjuTGy/GWXL3seajcbAPnz59EXik7v3UUFoSEngPz9ZUXAipB7fBd4WWNbQxHSuigDMdsh4FyxnLyyibo20G1uNmWjHNq7utsedFJugr3CiffB+G6H+4ws7YZPXv0y0//Dxr/+Q/ff4Phn//4xS8/+R2BedGRDMdaOkVG+BKhIiNpPLckIMtCxmbLT/9Ahr5wkl6WkpDbu4Kbk6+yVcr0j8vL4kuhNrk5tJYbwcMKdvNdMjAHTCAPpF8t5sYiRZp8IVaKvnTk4SSxnQnFAGi6QUKSUaSYzjHT62gJGQ4D051pOvbycxQWgOLKjVsLGIr0ZNuC0K2juv44GvJMnRhzcfTKgbqtvSKtSeT+TVSw4Wa7PF7rDqGr0blEX5QBIztSbkv+RR1klwohseZxtJSlseAMbQk8k9Qwq7yuBiAyJLDO4OcqatDn0YOSfGyqVeVze+bCTuI8QauSlgUB6qqffNelu1Ho+dA9xchKkZbvdd43ZXnknIbeBMgucVQOneTbtLJLcVkszJzCGlepBvH7kMnF6/FP4t8eFgh2aTYJiZm4dTi4A8+utThlP7Q8JI5q1SZS2iPCVmR2XdSKiBIC3RpncgSvJwxrxmCkzcBZ8NwJ1s3LaKQ8n5z39E7pMCYlfzaJ2U1k5oEqec1669QuwfINaZZa6dlW9XFsXhoJ+88hxrBtSvce75ig6xIUnXv6ANg8CjzeSLYbpddN35shzAfOvOXxcvNO57jLn5vJAPWkpNhkoe2pZDbNUm0S7I86icpC/zAVgzknBs659dnrccp317Lz1kaPwqmW4Zb/tfs/JrrVGfObUi+Td+R6DMug64DkV2Xb7USc1UOXsNw+6AaSAuU7PCKS/YvUdU5WpmFeYwEXFXbBoJfmTAA6jc+SuTEnxd71HgDcjkNEi4TL6+CB8CbPBe2LoeqQGJePpVmQRUJyRIaU7dl3fFevmaImRfTKjdAcCM20o1ZgLAdxx8ZLNi8ycVpXg58cLlR1dj50Y21aEeDmp6b2LgZzUWv5JRPRq7zKq7zKj6bc2rQq/yCs+fZ/WoKWqvPlMAecKKyBXIWbhItFmWVQXqCyBOMNaW4Ot3OLi4N7RHAQyotoaaA6MudSWXit3k07aVJIPWBCu4rlbZSacID+zX98Exb5r//uN6kQp9IHFJXQju5om8NcudgLaYUsuMWW2XxLs+DehJJhuuW6DOpZjyxJ5605yzLeDb/d5PZ8lOV7VS2FpJVO3pGk+p1ziDLfcxxl6Esbh6bPO0egXFzVUtUbOQGHmTgsm2bk9EBHdyicLqyTFf7NmkwICw52gODpa1i5LCoBWCspG4GejjPA8R0nwCLd6I6VNwPSzM64FbcIvFHJO2dY0a+Iypq1CVxoxTlIc18Z0+aF1DStSHP35X6YkZEEC4CwtshKOZeR7PFcgQFtkKsNS98mNtMt0YznfQlQc8KoPYF13gOR8l71oLsVoYdBXolYp/MikMwlFnQ5VpuJ6TWx3Y2c+3S6xZIDqU2BItMWlyLLMELTKs0j4P43n/77f/7H70ckv/7qH/7m179Dx2SGrBhtMXIbZhClmRG1rxOedkRht+kdr57F4psjEODR9Hr2ldi3NKS8nltQLAlJeGICiilW7WbJarVBUiyJaiDCpjKaUT2rEbP7pE7TtP9UOxNCrknHwoJSh64WPPlkQuBqa4SHjTUj89+ljRS538FLRldGSXYDyDhkJe/zwZdeHt0uDvc4q6T8Egsk89ccjPlTxHh1ImfTH7VKTVRqhSG2pvrzitPa1SvzXjDUTQDRqml1eON/KJVQmNvsS7lf5EGAsQ/haFzVGZJTArs0iyZy8nAx7mXAwZGWEfchh+RI5vurM+V1W3C67iiTsGDR+ZGqotvsoHhR+tDjSjT9vSkA9vPlGLfU9llfFmNhkfldzeNaK5udX5HnWvmJA9QtcH1D2uzHRkIpnhykFwsq+LhR1whRGre7FWdGpnCv4NWiUoeZmRT+pHFK46RvjbjFS9fFpsFuGg4aD+mTQ5jGzGVqXe14ZqBjf9z7fv1hIwXD4tBD24RH7K0w/YLdsw+aeXU8Fm0GYtd3Ini3KdqVDbC1eaHW/gVQeD0J22IRxwtNxG3piZVaagLauVv5SQkGzB5CMZVXhun5Y/fAR6RBsWXAVmFoELmPNyg/L42scfQ8M1Kglujbmqxd11RO3Bl7W5INXz0rF6U2OMVroc/ntWpQ2i8Jl9tvh8R4z1p1A2TkGc+dV/HZlCHJz3nT4zWOr/bCmSaExPgkHIU/pn2keNnGheuud3b8Q1PYgSbZWj2FSVuWdu5/o1PxS20euO4fFyfVjYvBGuJXWiOkY1KGidROy2AJOVLxZ0WsjlllmnvVz1Q0rveS7lZpvRcjUzJHFab0xsMOq8CXTF2d/7GYZBiJJa+UkRJlGeXTxSrjpVd5lVd5lR9ZuZEw4xm3V0gBep4dlBgujEA6ablbqMM2teHW6FeaGCUdSx3M/OWS9LSpT+RDSxGu5ZbeTd6rQ8uop82vaxsg2+r6KnDGQTvqCQ7hHd369j++9RLt8jPbDXWHDo49RI1w6A4wr22bZpoo58tLZRcZ5bL5owk2x2oKQ1WjXfPVMKnqq38sU6wMBWpXN0cwIMvE+yrcT+2125b+fkaPIh2TsgUcAG/GM18w2jL3sw4jtboFWtEm6DDc43x20LttD7Gr5UVJbna4r/M83XEcx1pn+YgZTIfcAeVqo6odEhLAcbPzXFiF+MFI3kyAALw8Adl2o5/xksUBSbMjPUFMW/3B/i1G3xwjmaEkKcBt30fVTGUdwqLpHPvqOebuYW7APxR1NEia1/Bm3zN+ReEbXMuXyZTUdvg6V8VJySloi74lwLXh2umGDNflvpbdrEaQZCpgqwiq2LV8c/gaA75QzdNbUi0k1joJc0vf1JefSHDwwO3nH//22z++4eD2l//wy08+v5A1qZ4LBQ7HgvsiDjOeOlMGAHHlUwbc+Nw+aG+qw2+vX1R7w5ELp8MzlFzdoW5ecoh47qQddhA8USEziY+xYu7gvTsTP2fKim2HpadgUqMRlBknHLKHIHGey2hxPVrNa66y1A4uqMdzXy1QHq9drFVfJmqWK0f+BdFnvXuewsuSIJ3Lq3rOcVPnWFyA87i1I7QhYH4uZ1w1nX5Rcupa6zhM+lf4jrtjgYeoV1IxkWPLQO+OtfX5U2+M5e0A/TwjhwZ6lwEY3myIKHWAqfdQwxmzmHg+peU57q7DvE4jpXlQYVNbJfLjW6ZUGRsVsQ7OJSi+90FaHLpnBhLtfZCGc63yw33QxVuNjyoxiNQxOJsEztWRHCA1OmvKMaaRxOaJjnEqXzQ3WEo7Dz7MQ9ler6fe6AXG2s9LAseM7tNTM8H5x6AEpAzoF+BA0zZ35kJ5+jn00S5yNF+SD+gNL0EMLFUQU3Kusx6iZjQXrNmYgp6bfYhXzcUNrhCcopm4pD6IusTqm0GZfWghPL8BkPFTuWVcKyZnt3QLJxlXk/0iVfoMQUoE4IlY2kc8QqYA7QUVP88kJFq8JeWlxqOJbWO12IPjLxf/teKQvZR9bnxlDvuhCNuXzpaASTnF7b2YpSCf2UW6tY1rRhlfo6GMARfTorHgOgK/KaBGgp/1u+pU40U+cmx79PIT5J/yrInl4jbBlLUlse1RkRbifa211hm0t23h4rv62p2QHJpP6+UhaZkXgsVDjRhjO4VVzM/C91vAtCySuUB/7KEPPyNuT92MCADu61x+y4mrswXqNanTPVuZi6H7nmPMFCUD1evWNDdXil4DLySLUrMXCxSIGn6Nn5MRA9TMrri6T8BrF43LcVD7GdmKRAGSzvNStES0bfLLq7zKq7zKj6nw3/6bf+fuXCBBi+wT8kLXwsjP2Ocmlp/nuv3kJ3ESq88kLwDHu9u7//r+X44jr2KFL5fbcBx2LiB35+RXkGa83+cxHAcr2d8qo2LYXmmwLF+320Ep6TJ7idqVSswkHRayYnk2xbzWcv9//vOXbyHZT//u0zJ8jsPOcx2kQph4BuXSykltCR1fFfi1jDzPE/B4z9dKCwnL7DDaWn6eq40kYN3XcRxxpemZVwWHx+FM7HcdvBlwrpPOc523263oVn5X9NzMIhVNxqD5qijUuy87jsBq11oWufpo54p5CYo5MmQjuw1ngGtpvRh9nSQiCG+Fa1Fet4jPGAMNkbLNo1o3JaS73+/v7AaOdHWokbjs0ZiMvG+6HKL659TpV3kh+WreCxxPq2NhHR5m53knLVy8vDOaZFw3L4stjFqSChaTCU2K99zdj8POdR7Hcb/fAZgda52327swUcu2l2UctvKyo26lDXdUXpk8mYSlDqNqMY8Don7e70GkNGF9HbzJdWN9GFNpRiOXr3WeMSuyseNicONxGM2Xn+eJRAdw3CIeU3RbOOwG0P3+/v1743F79xP3008jDgCw+7mWmS0/4/j1Wm5mazl1bi2WbN7pGVZXgC9acdShIQJxYDfdVzvgkU0O93W/8SbvyZe8dIcftCNXauLlcevxWqs+mYb2WutcbjfSb4SdawHLjJa7GrbW+3jhLddtA/jok88Hd+4emqPwFHEpwPzwjOyrBGkeqTLlYl3BvfQBPA+nAoWFyZ9RS/Ug3GpVHhF7CNTeERACuSGbOA5rRCTlW27HkU5eDmUFb4SAmSipZ3QsHDAewYfMK+YjtdYCLeDFtdxonU4hKie1F5JTDID7VswEC5QbcUFedXx5xCQW9VD+k1c8bwXcXkrA8VFhHPsyK+IUkfLhlDKlDvIzgjjf/8txO+BOs/NcKd7P090zv4Q8U+ERTLCgApACZiUInH4mD1f/xSdWqTCH5Ay81twCjxMO7SBvdtz9BEAac9nJG8LSImo+ckUxrfuCwWDrdBrohjM7TNr9fs/eHATw/v4e7of2CNOVE9mluLUWUgLDHcuXWaLDgWB0/8lYF5lEgzDaYaTxfj5MpEhhPIKQY9okVLN7jWDqGeeySpxKJbEhCRMm5T54US2gQnmn/7iIWzMIOjwxG7cCUZw0NOKqyi98ulJKXtzThOwHSfWxGq3u5lcFAo3nSi7Edz6/Qi0aM4rbIvLTDEwdF2IZtMNoXL7WcsBjH7BsJ3arhQ219x8dJpXx1tKy9LTpupMJG7TpBzuOSZlYFiu6m7iXlA4J4HCAcWxCZPE6EqB5TBpuM76ZmpeQPe9fgQRlhHFY2Q9jyyRBtJkLGwlELxrMDtUfAE1YVmtaz428MIEcj2h3T9EX8yjL0BtOKqumdo1T1/QJhYjgyykvwY+o+fAhT5uMmGYcBNw5wu7aQxWYRkICefVxUcnsiFOrU/xekCXv//TZloK13/Vmh14pRctCS2uU24jynQ1vHRCbV/31FlXaFJXNdqVQMJXZWPMaztwCyYq64yTrhmiMYcbPUgH7ypnE5BiFg7xsJu2jCydl9J1TQW7dvoB/ydr60Iwr9dWFU2A0L0WIMXI4RnqZGlJOZsRt+8YLhXJqzaZVH/ah0XQ+3CvzS2mrda+JQgmEkr4SJUMakCccvpChqArFXO7uR2rD7LkDtbdgMU00KOI5D+3cT/Ekvvnq/3oyK6/yKq/yKn+tJU5J0kx4WhRmtEFpy+ndxfdrrbAfU3+aUTeEpJXtgXF5GolKyRE2RVXqdXVAGj1Zf5TZLEEOdYXwxrMKGbd76Y+YO+vPyUC+EYv8+d/9xgACluGfbkfioe4JiRrtsMP0DAE6AkyywI/OteIqFOEky301avJ0KHnJgM4zB7mePfedRTScLngQSgYg/DgOswABjsAg3LHWOQG75SvPylHoG1Bq3QWWubSj9vfYHQGjnvNc6zzjWom5/ZiGeP6qXvrGE9xMPy+r0Nu70tdeHw7bN7roYiGhL2wTgOk8o5h6mp1MkJu2s9aVzcLXqmw11EHvYZ0IiyzbWQsrOuHwZpc5UO7Hlspk5rZB3U6TTN2lenwsRaDXiAxBLdu2asEnK6kJGuAsKvldkMcajY/Xa3jFIfm5qJzzWBQfjj0v7XL+KUOQW5XVblEmqlyerFx1zfF626bL4dYYQjKNKsJHH/8WbyjffPXF9z+0A+0+3aZy5r7j5SofknZPH56tY4hOXl/Y3B/VOOANPV+U3zyI6oIYfcQ5IA1z46XsHl5UXsJmODotZDj/KiQNk9WmLOCsG+UL1otb3dAMbcJXq6n+uHRoGzqLTy23H1bCXGMF+ZiMprd7RrS2Swg0WvSBOYfwR+zdprZpeh0NeXmp4vJX7fZ5y4d273rlJlmKChFA7R4xNGVmABg7TixvHd5O3d68JofUTUcsLDKObphZJRmI/Jg7C1/Mmm0+sQm6noLJCJ6soFkfpH3gKg5a70FCIsdK1SoboOS9+0aAq67h82n/4PJ/uEDiQ/VIBJdtsHOFeOb75RH3p3rqwHK4db71eSUREx/EWfCVPaNb6Uctn7KR9sWnCEXpBJ9W2MakpTiLt1egwF4pgMqMurAMM2X4I0mkT7x+kZqZTw0RU5bexCIbyvFmPMXcYeyBqovNxqxuzz4BdlD2PmmQDXIxlYZwKwthdBoS5GnMlf3XD+Q/aQjUTLQyvU45KkSxGLEw8/QjcmQui8EDUh7dnOBvs8dyTX0yQEv8IR9jmh6dhsFV/vDx/mnKcC92qrnfKpyGSpln+txkiu3ChfVTyq4p++HFL+58UF9tW0yl+IFqxgCfLP7vlAc/tJSgHX+3YG3xvMqCTwHxoKEvnQxuaku03daSHqW90n6dPdEqrwndDZZ8Zkx36YouyjCZ/EF3ep8nG5p/7/mUHS2bYXbYkf//YTR+lVd5lVf5/7vcWHdqI9R/Stj4WlK5nAzKY+Rap9HKVSAtUjZ6JD4zW+eKsJfDzEvQrxWb0toyBzJGvWNf0hQqReh7+ha2kTIAsuW5k6R/9Awgb0uGi+qpCvFf/vT9Z7R/9r99WqFTu2tQARPuSwCv0c8xgNSh6e15xge5Y+zAl80UrlSl7FGpkLEwMGNMuoS3STW20jdTYupS6bqczeGdOSJq4P9l7922LUuO67AZuXY1CBEX+d32T3gMfYb1FTYBQgZA29/gYT+YACUAFL/SIhsXomuvDD9EzIiZudauKshjSA16Z5+uc84+a+UlrjMib5aniAca8uecsTeWouG5mmyaN+PS6TaYSwTcsa5BWVC3c5xFSQHd/JHhsXR92/GR2GxuqLYHZgS34D4Tz1R2SPyU3ibMq0jAZKMSHLIfvmBDPDagJ0m79qQQxPIB3Oc54TMPyY4tJFl33GSY9HPkKQfIXmlFeuabu9m0Ym6euWCps5C994FhJhmWC4FECjyPMtMOZ9cypVkMX59xGxbnUQbCOniKYyE2SFVMj4LruTIzWPtxb0K+5GbuucsKpS8FK5O9MkctHS6oOGvMtC1F7JSB6dPNc6mqpehgaxX4wl3bX//qF7VG8tKr7BmasftT20f9jGi4yl6J6F1feFaV2nwmR66NLHksPpy6vdVvuRjPGE0aclVj291YNZHa2xYy8vJzAFwIUKtsKgZz6NF20qEe12IV0Ups1bv9JXl99zkSAGX0GvIvli9lz66daTuxfurIM9GoTa6PmzsPjKjqfQmOKyOZvZVDFnZ+ObSrW7/Wbrm8e1E8/uios1rbB+dbpXf8UVbW5ZGjTnmLeS/u9uV7i0tCHFu7RtlBDVkTQ6cXj8xYqKXJF79leLzk0pLXgyNN1iAxMsfThjwGS1jg3g3Rs4bYF9voTZpxRVB3+ADzsbYccxs2KUTBaS7XxZM1Hluq3QGL65NOV7z4uLWq64vLr4abTuhzorZcf0bb4mU23B2Tu86NFkEaS/J7o5atK52Y48l22ImsNZoZLPZ2XEo0P0u+04UAxDo7maqpXFvJzzedu5rQVNwexwtrVFhIqNEqxHMVeZ1XruUb0l7aE22sukSXKT4gq44awhWGQpVMbtllWnB3GOdcg7vli+sfHaXzvCVTWlIcHZ7bhQnOWBvlxEr/ZnEJsydq9h8qjAG0//FhWcL2g1cWsJulWWspIyUv+9oQ/2pUTWdmlvxd1IMj9UuL7TdbKdpLdT2r3Ev/9U+Fu0vFdhPVw+FlblXXJ3J+/1mlOmNWjLOmWUl0OT5DXFfe6e+QY3WVHK6qgOmHplLad/uwT+lbSTvKcvClna56zfpUhaF8p7qjIrWxLYLPat3FIXTwK9Hjhtff5V3e5V3+rMojI0TOsnB/C4E+f6GXckQoN0zOziNQyFUbGUDG5QydYwK8ts/oYcnw6V4rF/Po7dcgTYFQBT9oi+4A0RMRkdWlGGKuC1b+7jefv7vmez/62bTaEKLpDTvPc+SKT3oeYsIrHEh6JYxo2lgDwriJsZ6p56vvGTItTlnIAYNPz84IUKiGKp3muTVVMopJKcP0c2aahktZstHdcyJ724GzDpU7zJj8K4zPv/JwAI5Ddn4UbEwQmYdgLpgxxwYXCJh9k0UK7fmdrHIAGXAWDKy/mpmIqhVCi0nr8Rg8ErO7QiAXsIaJpJBlwzhGzMTmtnoH3J/n8/nxHAODE+DoOKHQd1+xwtU9FCgRL+Z82E9cop9lDeUyb1yKc4XYppBHJo2N1zYLLM8NgImfh/mJvGVb2FXJXelw/GFUk8xhpNQNFRciMelhVpt5jxohz2IUSNmRb1ZmdyIbL8+mTozWcgez0ao4sKVkYPDv/fhvzOzrzyUlv/7VL35wzUhKB2hPU5z4WRKTSY2G49vbShwtmR7sN3hAgxydxBMQdTLnLvRaWgLIVCuQb5VaVwVv1J5rOVySTd1pY79cAzUdkkm8YesCOldRAMqrga2Ceu2GFcJL33Sg9X4pDn9gwKCS3kFFi8ZCqrXBclZrmx3uoDZr8xekRjuKTOFBirEVankqHFZ/kL0GgBGXzrWlNTO9KYgPR0W1SiQlMnQsE0Jx5C6HtQ2V/eYaLgB5UoflhFQNIO8/8luXmwZs5NFmlvKzTqewqW5YEuOXThWNXT62DPC4CGVfguqqGxybZ7N98KJxLWTNE+6JNUv65cGDsZd5mbdJMtcL7KRhHY9vVL/XW3EV4iXUY9xofGuAZHnuzOelcU6XGTYJDOVr4XFYXJW21eDpfYYoU+cfSZKkadO690VTJo3D7B+krZoopc0h4OBDVihjG7jaN2t9lpHeMCKN7B2LBBSFu02ZkSnDHC6l0qhWtVkcg30AACAASURBVFM8p6sdHrtZecjv0p/UmwR37nHOkLv7dDseNVTzckc5dl/YZARPkROt8eX9kxxr2oqFcsh5qY0y4aosLvfjQRipX5pWtMvc0VXHyxgJ2C5Wakc+Icz+UjGECmzsTvHaiVMsX1RTU4St3Iu4pcnIn10/5Xv85UaJ44SVV02bHPbMUV0ed6HoJr5tWO4bWKvrX7qvrdBrUjc0fCPuDZUr8moR99uH2Z7YPgBxk94MmWx94Z1dXq9w4kPO0bRuaHUUu7UoeRVNcZrDxEUOeWCrN9yduGLmZN391p68y7u8y7t82wsTdQb0BlsvnxCHul1B7hh2PI5xJGyKUti81ywgq2cF+Yi1BccSi8Zn1v8mGLrvvHuc4pc2PBEZHO5zntMZrnEvLfdkSvnd339JLvKnMrAKmizcwvk855zuZmPYGDYC9d1chxA+hhvTCyFnz6ugQ5sKtdoXxifu7nP2JJ58SbxiTCGi2ARuIWfUVA2NOife53nGjpvYdjMZChssztMe44hjLLmsj30NqjjTA5m3AlFCsVxIMpJulpch86bkC/GATQqv5IUirT323jx707h6lGDZBaAX3mMwMOccsfeY8uZ5JsGKCwu4GDyOXwyAY5UV83nOOc9qZPmuQGUJkYHLMwsQ59JhNmTOBHTt/eNeNh5UNDpDWvUtQdj6QwlNIviIMnKJUOxqa5EDEFvV9WyCFMa8tWMC3vdmemb+mFpfqCDiXal8k8Q9hctytDMa6UyxsW0v1JvmY4yMx1ZRyQZypw4DZ4dzyVZW4do8fvCT/x2fK//0atd2Wl/fcmoRxiyDTT68so6f+7QNhWbYHPurOZ8QD5J4oedJ1lqGZrJDGcmHJclVZpMELfxdjZW7YU1L6nobPrJXS7S2jrFXEynFrN3UWrGV5Sq+i0EoO+ZSNSJ2ytZMhlSTVL71qL+CntHccWhD7BadTqmIcXnFGj67PL9wYQlrF/tRUxBWczPF6k0KGObQoCsVgglNSPSjNYr8eGj8FvY//s0TMYyPl2o7yk73tBldo5zAQuudp5p2esarB7eaUmHpal3bW7Wd8P7FnN3RTsnwmwkcL6e16BtX8gLIraPxHTP3Ek9aMfT/3YKSpSWm/u76wfbzwperbl7GsUoVaXZvfa4VFGdpm6v/aqABi+0lwmzyMI3L4vuCFQYfLTXFaeKZUpxiAtKFjGHlzEvV2nmKbEcX3UsANq/UPuCaD1BWW7PsYsmFrK3/F/MuHOremSybdoKYfgTKvjIFVOXc+05TXYMtvljblaT+llMsTjbjUi9LSbD05saK6xCpMDQrCcWS1E6T3I+qiVBfovMFrr1jj1vQbHlFur2K/Au/ug1A4XxbEmLlFz6MZkcigfobwUnTIWpxopISUvJxcTlNWVeOagdWtbodEWnia5Udsyz1vkQmJSGXHuC+19rD1XJGVZf1sJHPy85eptVYW3e2IHFHBFgpRFnuKEN9H9zr/AhW3Y+rTIbdKSPEmI68zJFqCvVKxN7OHWnJ/KGNwpeI6Lu8y7u8y7eqxE0dMJMUy6U02BmGkdDveAzjrSa+HBreOIZHyAHTUbfa0Z21hebhUSvIaXd86XZGwhXqLjmxMOxWvj9QTTjpWYgbX5yLrIQnuAjLC7qle4lmjLc9EIqVO+xkXEPaxFkRbDBJY3F/7uIr08f1Eh/BxIK0CRSjcpOeSemgmkjvCrz6xKqZvEXeYJMheuzGHXEuVwvNkhqhd7ReEtvI2BI/W1YVP8VxinvCGLZmZRrcyahgDNIlRmxgyfcIbd290VJFBwkfCulGzcTljEbYeEOwXE3AN5aNvBl85uZf0rjCuA7SmnUbtyZhdwUwe0Am1IgheEOl1DbC+IRM/ZblRR9gkqkyAAheSMcTKd9xIyXNGUuHesTopk/30/1kLBqilVYjri8Ab4FwXlMbB3slaYwZJydSM/gY3bRIQhDca5aYA1iCuEa2yXS52aXIXUEjzPJ4rQj9uIvIGGMoQSzG8sOffP4oyS0jWZnv5aG2jNJO5YYoRUWEhbta91Yk/bw/q4FEMR3AzeIRfazFMuW85U9DlE58RCARIyMxYcZ6NH1draWZ5JB2KfeSEyzjk4HVbe8StXbEWLG6UVQ2slWsUXWYTjSYDHAlkWQgl+TFlnrcO9uGlJFMUaqXIdEXqq3spySEEkrXqD0mHTGtd+5bXYVDRt6Ucn8MpWp1iJojLfTLNbYGA0WADtZontWb1l/iQ9MY3MuZzi3S9WKb0LhgANsFsGfXomLD8mbLT+MLphnadUtDkOiS4ioraerxsjfCORXPVb2KQCIb+sZyogfWkohmCe0J9vQ5UUv5LAdCb4r7Ug5uW55kbT1R3tibzx5Buni4UsXFGJiIV20xaCxGp7rQoEyHc8Cit0Ian8AEpiGPdyvgtyQ3XX5wEHjsnRbi2sJPQpOk0yZ7mzXoSgKFi4sCeooRbMBjn3XyVBOqtZenzYmZISiIAeQ5rF51pokLfnXXlPMOAMMKgrWWm5EzqH/ke1G+TYKQ5ZxnpOVTt2Pid72kiMKQ89pxTLuWGshifWkT1rhjedMo+IuMvxL4fd6Ldbx6vt5y5iJj9O18GhRNw2TkYTS2BolE+PVSH9cWX/2xgUTXC7VnyZqq4LJiseq5VH0j0surDX+8qEnjne78oiU5oJJjVudYScmG9redDbY8rTKJRHRIRbiZgKl+rVpGOjWErs5J0FXeQ2TRAe5YEytaThA9zcRJHQDAsINx2cC7vMu7vMufVfn/r9n6slzkz/4L9ORd3uVd/uWVH37B5TZfdLPNu7zLu7zLu7zLu7zLu7zLu7zLu7zLv6wyaqIYOo85rPfPDk52jlg7BgN4k7AB4M10AGKWqae7xxjDrJZCxQSmzh7POV2vkpAlEdnBXnVxM7VWj9WyjJqNPcZRjdXkY02c/fbLzovsecvuXl7MV2txjuOIQ6zmnPM8OZ+sNXmNz73X1yxPcMYyFwhwWtrluu2YaJus33Lp2TrPKZNwBpznzAvQvSbuZLmJAbkCLqdoY2sYgNiPPQZvIcmOufM2ynnO2MetFGIvoyO1sNCXuy4BXQphHGY8Y5wJ7IUZsniT49qnWWMG0jBQF6zGooyRX7L2YJ3npqSq8PdyBRXCXCUAWK737LMluSsse7ccTh/rKnyeZ+xKrjV20eQ4jnFcb+XmeiZv4sGQ6sR1Cv2SSYvGpUBcJrIMtpe0eDUFilyv2MoNIC8ms9nmnSS7w+c54wSisBK16RAUbdQKXC5c4Dx//GLGc9xsmDnMY0F272Hpnf+10CNpR+nJZT5ewykOL+tqWipK7xxAHrsw9oUWFlsJfeTdjdORh5U5u7NMZP/wr/+3T9Awyte/+kUveCo6LiJ/+W7ovtt1JRE5sbKmjKzr07KKsRSua3q5gDLbKKuVdDI37iH2WozUhWpotcIiGg+9Aqr1Wu/hXMoEarFnp2UNVS6Qk0XMRZFSXR1AHt4nI9XlCmHppgsJxbGMHqxUSKcpS36yulXa27Sn0c8DEH3Th5LNWjdHHnTNxdYaZ1StJqiFpu0/xC443D2Owh1lyA1umMhTN5AK6LEwZFwElXyT9RxNzu6r1XbwlmAAiBtF6MhpV7OzDsgpjFw4k0tHHO6x76LWCFW3Fo0yLgYW7W+Z4uK4Uopatokwv1STwgBK/V4HU+03ospVZimtaYB1hV8vBzMz7jEPaUpvk0WlEcma5Uq3GJBTgdvbuVpp375fR6R/LFbus9aesMzVmNwVa+LSG7Z0WJl9tA/d0VMDrxImW76suSzmxeN2k4YdPOeXTJikTdkMtAwcXCFoboZhsX44AIauKIzWyn5Y/a+UioaUSl5/K9bvBpZnW1AbhPXlZFJSUrgnb2OOrwJUbKtpIb4fZomTgnq6QWqhHhL8L1Z9WVWYKpOX3Bciat9US/qxkqjRIE2x2rtSsf6kBlG2tuxk6PqtRHozGxu9e72gAGX96ib5w73Qi4kvc78+UQse0R6ZQyD+5IsddBhNcXgTODDDRO9mtp2T1T/ssS+suxFIUGHbd66yosq2fPskVtzK4r4uRZit0QStcalD0scMciTFshBRUMZNF7I2KrO42Tr5XXYNgu5D3XE+vLo6a09FOVpinwV8ebzO08RklG5DxXsjEVq+6NqydhGvd3mXd3mXP6My0kk7sBpuunjrCF/QvZXVvUAT3jvqZhbxvM9yJMbroQlfuPGC7e9B9Au/pVFi+xV+YmZWyTQOL3ClAfjdbz6/Iukv/+pnYuKx+EWXFCHvoZ7Tz+f5fD7P59mUu3S4SE2qVuza33Xg6dU8faF7hrblp9TxVDzbUbb+nRhq91RGjzljM8w0mOXpkLnsP8MqZiHPM0faGcnC9A3npFeSm5QPC5FFIneSLhlYbawGOvekZO1gi6AuMZo68kZlCiQi50AYEbkGSKRKgiXaYe2RXhOwtuPrKxhwHQ4hk0kGo1jWNRF4OVMngvFxGdmqKUoiT+Lwc5J9p/AthMkHL1GWNELMbnUWVSq0cR4jNGaOJGyKizZf8Tbl1TIJW43007cmYR39Gt7RjAXuW06kwGVEbB9FcQB1tkBU0eFKHVpUb+2RDr4oI/nrtkhX2VEhv8StF31YXu3X+xPvwC3GuxN0V7JFmFK9bad41V8QmR1YI0i0wGANH3pPLP+khjcjZD7tUpOINJVyQ+vSB6oP73uoEKMeiBq5OzBTBiozpFHZjcpFrFJVP/giaZZji82HeeAJZ+Y6RipjxpDTKm5arN5CVW1EBWYfv5Rr6HIfW5Li7U+YN9AmKrHW8d1KOVqgdpFr9N4oIHzJpb+sJE5odilkO8RcUGzyDI6tjurR+opwTp2/KRXpigQkVDS46aE+d/lOnhrM4i51VpLfZ8wSttvYyPC5Ijag37bte09FSM8KjuzeVHqghvxFoRzI7+JLcrDZxOsqQrYl4189V/gHEfY8qHuSbnyofJ8XZMkxLOIx7KiDrcdxjOMYIzBlJg+KbSLiNwa6isoGm6yLyFpZwZ6BmngD2CrzubjUvMOQzdWrpvhM+2eRbs3nDcxy55QG02mV9zRpautX1RzTeLSOFOI8y73FelXcbXR17h9GHlZeWKlOCJIFEU1BX8ZNste/wSTOsfPI44wLXPyW+JKGrJAEUzWhvFYt4/s1ASV8KdfnbY8twxM93rreMutTKVh7E6yCtFYLo2fbi7fBt7sq1CRID+iQbH/zhbCn8/oiA7W/eidbC2nls/x5QSj6c1iGewPlzcFW30Q37PkouO2Bej7d9c7kIqqK5RtolqoVL1ZueBVpq3Z0KmN3ZCBYUuvuOM9zzjNO/v90T9/lXd7lXb5txf7Nf/dvn8/ncTzM7OPz4zAbj2POCfNzzsfxAMAFWhjDLC7Rm/7848ev/uLD6fN0APjw4S/Mzc/nMJwA4JHeiYNfwvae53yEzSd2I2CBHRnZ+/SAGvN0dz+OA36qPw97Hu5jmsMRBt/GOA6eKYk5cJznpI/JGPA4xtdfkov88c+tUI27O47H4C/7w7PyaIoszSyOzsw404eN+MtxPM75EUHWmWgwoFrE28cx3OHnNOMVh0GZuEe024og1yZOgq0xxnD36JKNYTOjug9ffZgzLxSe5xzH8IFhB+BzTvgcZtN9zhPAeHyI3pmNPNtvzsJEBRNhNs/EVGPkkYuKdBnduMGmz2McDEmNRHO42zGck4RzctQKmmGjLpgemX5uKiefMMbI3EF+5gbMQKnnNBtxpaBLPjd7mEz1zmIaYnXU5DGBk2PxOR/jg/Df5b7TQOR5OB1RyFSiRVvnOd09bgSafkaTPoOHjeTMrBYeKzbxkYMPtDOOvI19HMNscJUukAuaA2UhlidrXiRzzWbT/Dyfc06DxV2WEQTEymV3tzEMeD5POGLdsVvepFQKEXJ1Ps8IFkLrbRxgIuAYx5zPoEwIQOQGHcAYFUeMMeAIcR1jzOczaBK3a57nhNuw8Xh88Omnfzxn4MAHfGCcAGBzzgzWwvokUHd387o1+zgOg7nDTjPYOGwy8k8uINdEBbwb4yAjLLpnsDkK1doYI8Nc92MgMuw+3Yb902/+b3yufP/HP/MBTsavgSZh+rUEufKJFNGMAWNeIfjPZdQesZ2XDY4V1jOoBAB5zq1pm2YWj/QzYYNC7o/Ba208zx+cyCkUP+cScUtqYp5MVZgN1FSMTT8hWolYJDvMbMznhCFWFFYGIaK00CHvDzpw85SQJmw8lwY1I9oc2IxcIW9mpyEqu2vT3MyGjed8BnMzLU1S0UsiDjub7g8bYxz0JzPbgY9xfPzmm2McdgyqiQE4xrAxgmW19MMYOH30mPHKOT3jclG4+TAanOhL27o5J40Lajog/jnGKLajiWHCAS9rHp70xMxsVVrbjrItPUuuZjazCp+Y/HbnZedx4BsF24sZmqo4jgNGPwu3FGCf4d+jDxJngxfmUNiMYlBzeuksAcw5wVkF9w5EaTCSAHQ3Rt4lZQ4f9WG+ZdZKy1CZPASAA7nSr/IfTL0CI9EKFVpza7NTvtUVGIyqveZNLbq0BbSFamyrX3IHz9PMPFTMpxtsuJnNJ+sP97YkZ2+KZEbYuPwHYGAQy0W6Ke3YEx+L8hwtMRyr6Ypr6aybjRGgBYFY6G2HHXC4uVZqQO3xQcpOi0Gcp6yEq2FlpgBx3jHyWgvp0mXcq+GmeGTfjlQhny1XKUQFJUnsaOWcTzCDwtrCZk2d2zQeoBzI6ikHqjpELh0Yy4WWakO71+i1eyGhS05lKcQ+WVn/ZHHFd9ogS/hgledxL9jIyfHjeFQmsQYV88FHwNRutWUuvTO5GT22EnJ5tIba7rUMI6sejwOVTE4GWXFyHzyNbRNDGuKP3QMqkwLR9cdEetEj1d90wGK1i4e2cmaRP1Yf3SA7aOCW9Ku8LQukre3AMpzFDFC8R9YlEpxeZdb2hN1I4UVpJFzetoyC3B1atBN235T5nHUHn3NYZmbD6B83uo0MfmtSIXWfE6m5+j+o2DSZc5pt9CEhXncwY5mWHsQ5tgDscTiY5ZzusMMe43ic//xNNfJPv/o/X5HxXd7lXd7lW1gGzI7HI38xCdlsDDnOPp2hoMnp7hO8aZkXDVsAQ7SHqylJueglZh3bpdAoE5VMd8DceLFJAi9k3FXQIB/MI6wZZXniunqpbP4X5SL/+ufEd3QDCfnm7MUYQsGcNY/rXY74x8bST7Yf3UukwlgvvdI2m1nwSz6uBJo8bcwWMUiDe17j7D4GrxtKwkQX8nroyFtEx8PFjkpCOZNpBFjBQ4kJ0l8ymDCL2ebGB87Bof16QTAWJwmSrnFquVBGHkyeNhKnpHboszp2hyx9aCIKlN96ww7lrcl6BHvJp6HWTRHUrWt4eikBo4modnqsJZ3ug4dOd0xpdUtsh/i3wExpUpQqeMuTBFayzlqyrEzp20KwXH2Ud23POTmuxGC6kNN7/AthY1BRTUNlBxyzwT8jH4hM6pkPGuu2xCkNqKOVrSXxGb/elYwiS5iL9MEfqn197QxoDCrdW9oyGbIKxQ//+vN3bX/9939nRZdmobS9/7TSl6J+TQQsY9CQJ0SCZi3PavBC42UG70VxqbKtpehL7rwv7dFsuGpMmpvXDXjLSrKg2ZSH+lecECq5UYoddeGgy0eoDEH2qaTXVLqqz87f+qEspXYTPLGEb4jqsZrH42FHLSbKbpxznlzgUB1O4eOoO7CzYpB0vzq9XAwjliJHWQIs0lIaR7611aa+mAx/UTgN81V8mta0EoOJvCYOh+U9hK17RQR31y475GphpE+wPexbAsKra7gtTou1vFIyc6Nk2vumSsuQ2BmvEVE92ufzcAphX1fcOiD7DbloL83pKysIckTRjPxcDTsxFC26yoss2P9UE6Jd1f+UHt6DF3+NU2XO3HagJG6h4MCl4qqxsvVaUjlmbt1OXqQDWJfWbpKwDoz2BrVqKVMOLa5YGH4pRcisnHhWjcLadZ/z9Dn9nH7GphT30+dZ9TRIjVrHGHbULTdc3p2b1gXCSOMWRlpsKzI31tcCFXGD/AwAXo3UykTY/vnxeBzjMcYxxmFEi1Zr3u/9lpcut+QYr0PaHhaN3t2yaiWE7OLlRa3QDyelr0ggPtMavXQKpTbSg5UcXdPnbRD6EIYeaHRiLsUT0PEIrBuvuppxBkg10Jv+7vy47++FRPlheuVSs3KDdqHJF5U//aUN1NUXvU+75LRsfaqAopXFxVnRL9dPjzFqzo7VrQHgMgLqVeKFWsbNFnblYY2WDqTYOGIqF+4+M+6Lr3d5l3d5lz+rMsxwjCPcJ/OFABBZrbB/6XcdiBPTwp5P91inFPAi3omgVvBZmsx0zzzGz2qXT213rOgj9mlOvi/YQDNInu0ZMLg7pIO1tOiLWf/9P/zdZ8nxlz/+uXgnLxdQBzB2IKHIxzTCSg8vuTCB+8hlIfnFUTmvL/clCOAIVnBnCnMg4IfH/TAZxCuJDT1bi1zfYgVnzWAWN0liDIxR6DwmqC2AbjrRJaKrAAA97mIliESax81OrYUgbs5amUSEHNBOYm+/r6M/pYB4fePT7gXD2OkmJ2PBys8wgo1/OpYwcPHLnq7qCEhYxYjaUmWiQ2ZmxzDJxxs6oQ/qkla+4W79O9EdAGQ+SaJL8sDRC0QIu+IrohYGs7Fm08zmOWPYnahhD5NH6ENUcxipdrnOxXqE0bdWBGMcWgswVVmylwrZFzxrFYrz2aARFesSIevvwn2HHGPaMWfH0qm614yN1rtpRf4qrIzyRTfb/PqX6JFXFocayV4tVG0xMIDrFW/g6CquqWEtIxqalwRVNmA/o7brABiILYRhNKaBNztOt9LqrOZhb0h/9yUCLUPqlaPIbMXS/VRc2oTuqd+VGG+EaLJBn4uDzERQuWguhxypoDRmpRQ157CPowY8fc44WJb3Ysq6rXIftEzVz6R0/EN18CYSg6r4VXaaCROqDo3AGPeUKdKXkjQG2JYAXFnW5Jzt68pG9AlzsDLTbbMrQLbKWuYHMc5Y4d7WOfvZpv0iORfttfWXUNlXUVzamexmaqD3cYQrSV32PlsRkqOeyKM5ecO0+TAbw4MiXLCz+Hiqq0SiLoLRKGlj8j6MlkSmOlN1ugwbiLMyZqz+szhWtBgnsXR5/25BDCfZWf5BOtl6bCUsc2LOeTpWYxLmB33TMxx6aGaQwTZrI195vqgcPRwiMwpZUWxizvwYFucCO+q8YmsVWmXkzi5COrGzRVy+BQKQPSfaf6MlA+c+A4ROev8AFIGZAmUPGwfiLJFSk+k89LX8nhQYhpyKuva/cC3R3T7YW20pIZHfUoHHGHaMXLBPSzM9z9d21PmI5dFrm3mRM4XfxM/RWQpLGot2ktMps2JHGsM5RErrUQv804KbSKEdo0JHlKFY7eKnLA+7vGhIfdzvpGtTm+5paF6WW9FED0+MfjvlYMOF0xtAe1n33pSk/7qR1brtdV0UePm6juUGM/ypReRFojpU0KOeof2uJSjo1H2/s7yyDw9Sr7V/VjGjVKmXNqnDbGDE3IP7nOd5Pp/jCA+Cy1m/7/Iu7/Iu3/YS56BF7o9HPTK4K8haiRvi8PD4fQkCgLwuJeFF1k4Uv9hZ7hjLEpvOYG3CHdw7KAep7B3vbF0BYueUIMbiUA1mv/uPn7+75ru8R7vzHlVH53QYLdHxdAS/wPop0BzMvAS27NVr63CIdXhJSoGyqPHWvZHY4pUbA9usJQfnLJwV0a5PHwQgjP/qmO2YSyfdGW+EhxyL+yUaC3FJP0pA1ywikEoCKkIhFkt4WYNk5o5/5cO4wpmSw+KYdbUOXzCbgoB6eSUpOeiRp+s4vymbXehuaOKt0So5bSvh2AqK0xRikz50l9f+uyKzojdFMD4ccga3aPBSGJDCGW3yjKa4mslnP5v8rYt0SObqWpBol9GmTscSNTY5LL/YUbkGALm5HDz2zUmoiCYiw+7NSs2O3XVieQa6ZTV/qeh6s19UgcWi5FMHMOSKj4FYLLveMUF1/sFf/82VDVvJu7bJU3jNuadF3s9C6pjKZWylsmtxecklhdVjspKj+nfJD9RNKa2QYhg1gpIsksmXZgmLdSVVXswU+oE6lQ3XXt89iqTa7f7iVv5ba8rgt9qZCAMTHa2ebK8ClXYZ3WtLuXkRMjInMuJ+JFqJgdg6HcfASY2LxShJ5vUSyaTMPDivoclAEjxxfyUt3bG3CWWUusWeXnYQcK/NxKQ566rYqtJc05tYznBXXTqN+Y09NqYD4lC8jgs7FyuSWDymX64KbZGsbqVCvm0+QfWGJmDJVfSAOvXGTEolDSovU3rVtkVz3MOYfx4tbRd5bffj2ux2iKvHyAnU9mLYUmpqWigyTDzNrJwnCZYGMJaucVSNTXLVx90wZLuRXGvNiz8cpmNNM6EVrHBgG3u63PBW7bUjf+kkQQ08wYtdyJ2TzzHZxofjbse9yXvlXkZalsU7dSx6DdXtGMF07nHJHPUCHpprbb0tcxOlXXOKkNtALPcuK9CWDXFr0p4y9U7uWy+2ctxQfiOd93niTQYzs+kn0Sjd/P4cvO7GDOr5zurduq+JorQyRfry2pZylMSXiR1KqihvA97M7HgPQjlAo7jhwybvatnkhzUd90JdxTB2qmvtwViLEcAZT5v8ZFlVqsmzc2SvJkUuKSTj2Gy3Le+W9Le72eTqM91d7MtaBM8zt0gGvarMhLBoWluplb5JtNFOK755vb18xSsvB1VmJx5SvwtKlTzbD5CzMzZQuHveiTCOMQYMsVTyMzbpXd7lXd7l21fGnOfz4zNOain0WN63nuPUEZhJwHE80nbPMI/nnMwfaHJBkk/uedaOD6vrO2cCiMpNVGRKKCdIwRsSpDNmcmdm/8MVjTFr2yHwu7//fC7yX/3456sRXz1pbQ9ZZ5eZtcNcAZbHmVlJrnSMrNeReX+mVgAAIABJREFUUYGin1rtwPAYGQZJbJiUb2bAHW5j5LUIQf848HF6HCOYNNfgLKO1GU57Tvc5Y7Jt6iYvErlmz2fxfmGCk8/Jcg1ZGgNWDkrdc64vCV6aWR4SF4Mzd5OMSPcfBaTJpmiwA/WOZtBUYyBnIp87Vug6PVYjeB6euTy0DL9ashbyZiUyt+MYRJLGlZIBJmJAzeFYJSs90p93IJdYhbLjyDCXh+/X4qLOqPQgChJFVwlzFxTW2FFf9l4wKwjT614qp9wVrYbxvL9lWZDlOge1GNPzLu4QDa9bJeuEfTPntZI2IkQcIFGnL/tedv66iAeF1hLWVcxJ/m6xkBFitjI28s0KRQj50sLJH/zk8xnJf/oPf9scbT4pW9YYf8e9vjS8l2QnIXcaDR7QOLSOXRiwBBgdYCcs1t6sg5fnuyTxDDRzpTMCz2mEtOTiKD7af99OfOPjou9+M7DutOoacrgo2UvJhYXAjfwhUubc687z2kZll4R6W9BiMMs7fEuYynSSKj3QxV6xVTS7zelOexROA1FEDqtafZPQm91sjztzcsxpZlP24pjdWy1jkLeQtgiQasWthbM+o+1ZB4g4Y070sHIo/YAqmxo17RFdsF7ZEhrkF1O+FVU0euv+Yyykg+eax1rbbyps1/o+92HHuMWm5hMdZMoZ3R/qBBGilKxKTIhR7KzIXt+VqMsob+PqdFOakb2W11QlIvA8RdBhnrq0efZVYe84RZmtHHjjTXf3TCGK05xy3Arha87bzvN8njIPdzW8EDffo7TF+Ky0oAJpv8Svtm7zfiv3Oc+FP8K0oszemtV0ciV30/k2UOChLC5kNUFEC29kfFY69SlVwTqGBMK1eye3nE/e+Z3LLoe8TduDBGqcXakcKw/Nu+vFalhLY27V+zJHdIPyWhqX++/KUPfizVT0ynDdda3qfSHDzpQXTZP+1YlKeP1Odu/grqzRR/2U0bhHQZJ87DJnfkmKtlLCLaT3pWYoySbg7vkWaE2+7dbDXxfysY0j9o41eNCGb7/SF61WuvSH/y7aG3hMwKo8qHpXwzWZAJAuSnPZX4uz7UEvWNZOxlDliGPU3fzMvQcRKyXp35u13+Vd3uXPsIyEYj7nOWmAkRs0xihEnKjACl3YOA53ObqEt3MshnMJliDoOp2Tmuoy6XT3Tk9oUh3dfp4jzgPXI/kxGlhzZvtLc5EQPyFuMzqGzOq0x+8ndRzM11XWsrxvgchywgvgY4RRuNDLuciwdx/lrLKAeDyVF+MEuWPmrMKN7DYcGQgX5hjq4gPxDRtWV5PnN3LSuptrRKi9Q+K+dOEOenZlJYh2eZL52I8F03oLjZmhElQBtdHZ37tSPS0AIeklV8gVKTXnTwI43OF1UKp2rBq1bKhCIyDXDtffrX+qmqpLhah0GCYt9GFK+SFZkO1a7bsm2VclgjCr+6FUih1zcaJo9Jtir7DLePpOh1igjPRbkLf5S/9uZpZH2VvzVLvoxnxqv19yy9REE3QLFFe+JCkqi5aUz/hdz88USl3uKt1gdrPF9YfAi3xbFA/44RdkJL/+1S/WcYAmeBsWxVl00Phf0vhOIYTGqZ9yWBj/kSwGUwS+1/Fp3OvstiL1CsuXJ+t3p47T9VzacCEyvUpLukhWqxdNOZg/abZJNS2djKhMWrzEOqVgFS6TFRRLyv8SCLFFHyJcgxzbNBf671gGaTWATCvDShhU46iwi5jk93K1SP9OGyQGD94jzSaVMMJWJZYxgJZ+IOmUlWwi0FkDl5jcxF04lM3qJbZ5A4bcnYtI7yCuO2qgcVm0fiu0TsLCYRi5+Jw5/YW96i8KBmk6ShK97jmZmDsqhK5l2EqQVCKC6erYc5zLKFcKY9MOFHoxWhgZToCrJF7ZGVYimqik6i/jt2U01RlTT+ZwxJZqgypC9Tt+SjghMAP1urhdPQlYoI+TwvT4bWesRgTesaZX3QA+Y032CncA5E7xJBdpRDRC1gv3qT6su44ZajGh8MQUx5xN/D6gTqhTO97j6E2yOJZaWw9bOsjPwo23qe+eCCAhE/afltKUnSX5JL3DYsomNt7D4ujQcRwWyxpaxts5WC0P9eFu7gaegdHLVF1aKalp9pT5N6PrzzdMBGypopQrFYIopjjZtGpQIJ45sezuliGKtjQpFIx/pPctJN6tw3hFoXn9gfPbiC38mF3jbVsr41YXR/eqJh09/KVHQK3EX0vZlDKflz/Hd98qflnuAYcZVvRe1Re97r9kTJT4fhHL30HHxfM5rH0+D2klH1avdpcJR6lHRwUljIaqZAeA7NcxjiMvKMjzDuI2ba+av4CY7/Iu7/Iu36oygJp4AxCBREL1hkedtmqPj/R7fQ9C4tsV8xPrJLyrBrIuAHWcIl2r5UxRPsKrYL0CkrDYcd5WeBMzxE6FMOh5izTsS3KR3/3Rz4B2qAw82jkAPQL0Dxzi6M0sFUrGTZH1ZDmeFURrnJXPsCrGOZ6AyuQ9r2CLFdfZZgFJo9HpE4YxxuM44mChuMM06juGmWMAh5mNUadZeadodHNuUoYCEfCnc2yNenS4mR0jtRLllR/PJFS/HD8sQWN8LL+7orWtsFEBltHbEm6SroDEih4WiFQxmmmTlXRoJpSYJxKu0UqQl5ypgVusWXD32EhVgysyjjEUgCmIkiTLFmf5NqLsbN0EKzuhQxE1Lor3Zlyj4T6OI1WS7bnzIloglgFN2ZcVyjFM09rVOQBxrmU8kleDj5Ekq/jLOM8PYJ7zPE8rPppRIjluGanlI2lvbqTDADChAwAeShp6ZHEbJqNNeUkrK8K51rn8mVpbEH8T5zAuP/x3nz9H8re//qXmCQjtGdTgUyDe7uID6ad3xWKWkgXGbqZkWrUbD2lmsTuhUh+Vd+Se99uX4sQXY0FuMa70wT3/NkOwDLZSeCkbxmRYn/koytJvqoVHK/Gg+a2YMnpuAM+TzLFmXgzTc41u7dm16eYYGAeG516A9SvXQZvZUT5ueu9glY6h3FEl+cgKpgwMOTen3ZY198X6sH/FO6v7OEqLUdmRCuiYnRnVIzF/YliDLkxyrUGoAb0c0kY7LO8nzdSZkCPx20B5ozrBgL3tF0BRIrVaujszRbkr+XmRYcmRllezymJQxmtf9KKTlccRm7zmJhCRbUoWDwcsBcleofgdS3Gj3TwXwjHWnC69HjXpZiQcsPo6yPELjlhvE+3lUrQ4fqLD7vLTzKxCzOJm+9V7bZ0xeURMUQy2PqQut/BXf7dxUbYRsKPVdl0xHNmTXF6vpoN3Uhy9Qh6woEMlCwvxtI6WSjZ7nU8mBHI9YkV8N0+ILBMqX0tSrfmrbLPOmBKSmMEOsyP2dw8b0dOzHlgsGtAfVVtWToZAToh8Y0XXUtRYrHg49LxWh5ziOU200rzpUsyNxWBsxE6KhKVxat4m5NbSJP3tAYGzNeshffneIrO3YyySifdPa0OjRcXQbomLfF2Kxtub/XO3XUzh7NGG+ayOK4YKjv6gco+mXNq47g1JoyC93ERaJ/SmlXVArrKi7qQQw6eM7m25p+IFFhiWmjs1vH6t/a0PvGff6OWXyoPT+4CrHdAmlwivdZQw5DPZ+uQVlOJTrerqwTvg83ye5+lzmjOpP44xRtq+PAv+Xd7lXd7lz6kM5OUVY4MQseOCXqf2WxAHDUPkIsvwVZrOJKvCmG3wmHBimg4A6pPyqrxvkbVWRkyMdSR32rlZXgueiHROAH/4h3//2fF/969+mg62cHM2qy6N6zQz4bS4Zk22te+IMFVCTu7KtYq2xc1ocCiZJiye9QU6QYdzlrutI6KO08cs9tXWMVUNvI8YeBw5E/44b9ZmkGtm3R8zRkgZbtUcuFU3NxQQztRscegal8nYClbNbaiMU/rx28wCGilKJO92edDp4xmybm0VA2XsEmtqV70E0qzuj2iQlz2uNTh57bVPL7ZmEl/D45AxHuB46RcJIeC7EHJGtNK1+tE7jUjeQRUqOxy5y0gyjpw8YMgFLi6Aw8CztVZ8iaZadjA7xlexbJYvCstVS7n7CMD0Oc8zyN+Ybo39Ij4pypfF2WRD2drgUCyejTHrXnfR4gqAWTQckc8kIF5+uOegAfjXX5CR/PpXv+iUxLUabf0F/rxQolRpsUEAxKz1mDvHJ1YO+5v3hUGae6kcOofhYnSzwl3GW460VdOu2voFjSAyiyrMu5GKNSHR40fF6Ww3GD9bALegp0U/9a1TbyPDvG6WO2zdnufJK7y6jnGM43hUoLwENGxZ/ULRUkySMKJ/arsnI4WBx+syc5bPGhN6sZU/aeSX6ldD6qZcKK2k1QlzOGIHRtiba2+ZKWFO7nrwo/zk8r+MlVJQSONGZq/+QWvw9bkiB38IYSwOLAt9JA3pUDlq4SlAISlT78Gnh0wCylVAS/dVFbjGuWq6js3ku+miIpLL0Wna/EMmSSuSRw+RSMSXyoHSX6WUkLZNpChR66Bqsb4tNn5jldiE2EROlbDqbNacNmVgqrdcUzoO13MParZ3hPwvWaM9rVIiiRaGtmMNXsysJo26s2KK0hZYlzHMRiFF1tGu2aw+irsVY9x1QVCpyYof61Oy0Ng3xzqOZNhLlQFQKHN5ijTmvc/123SfHPQOusxyEt1qW1ZblbF0i4jL+PMOIWEwUxHmk2ljrxLq2++CYlrchglHUVK0PHNDo6W1+6fWlwVu00BeLD2FJS9kWv/UetbgrESoCLGfSy19fDmYLyleZuSWNJs2vSqfsNX/GaVERT/iSuLuTUucpbkP1+KAGY9qScfvO9vKA8pISwK9U6JihuBR38Ze/TlykRF85tnTaQTsM4L3Lu/yLu/ybS32b/77fwtgwgELeHYcj/N8jnHMOTESE8PzeO9YZ2dm58daQuUATvfjcQBwILd5Tp6dLYHTY4yPHz86cDDjEF5h5HXGiQ4ixRb1wPD8+PTYCQ4cx+GOeZ7jOHww9ebu3utWzvP8wz/8h88O/i//6mcY5j7Nxjnn8dUjvcScfiaGDUTt5mMMOOZ0MxxHOnuf7iNjtLxbI+NlPz4chCb9b77lHulCz53pgX687hCtVE9eKhL7Ww4zszlnpGurkrnEGG4yV2vH4RzCFirkS8ROE5VqxOlnMOWw45ynTx/HMc/TjlGL4wIkexxZAozs56x25pxHLPkBjFcSAcj1aARVN1xxo6MtpFgxnxfEvt4dd3NjBD+ZIYoOGAZjgxw564RZraM4z5mwILaru595cQpg+OrD4/x4xlgC+Oc0MlPzBCcEkHOe7sdxAJ55N4LvMUZkKx25hrGwuMN9OgnYo4TDRxPPLY8+DCkjQEoKz1iQGCzg+uVUrlqV6fBhDJmzD3HS5ZnJYQYOmSAYZnYcvZU7SG251CPWdaK7KCUOhBjHANub5zQzx4y3YhPWPD0U38xiquScZyytOsYIID7Gcc6JeZgdGHPiCUOuEOO5kyIEdZd3DRqU+mEYPuvESecibvAaU1DFssKoJmpZD6fgn/M7dQHTeRVVpOOQuTL846/+Fp8r3/vJzyGoluux8tdRC9drWOTLhAezhFMJdg8zYJgPmDtOx4RbCFbebAYYfIoODhs+k3xxKS0PAbNOi/DGFOqYTT+7cytMLtNNQxdTIjymkX2ol9w9JlGKjmUc4lvEYGpvj3EAqD5U4oth/NCortI4jnQrRmMupxy29VjSqgxL85dcKBE5AXs+n/LX1O6mSQbgudqNpOmlLznTBq4lHLkMsh4W0+fsIUMnnoB7fHiwfaDDpjTmxZ76qoi85kWMuh+DnvM0QxrPGbI20BlbII1YrtY3G26aeRRGdZ8s/Gx2JR16SF2yINKXPicXnjgTK7WpwmC9QmTYcIvJDS+vQilguGjWt711lB5Hak9KtQN5ekmYcYONA3N6XWlAH2d2ZsbEIg9L/XU49WuzGwF+PBnCc2E6NTxLRGnLxI8lu9bkWmWyPLsQg8tVgHm8r1mgGiBZeeKZQlpcdwCYPgP2nKfP8/zOd77j7uf5xMDAIeZlKTFDbAYbwx1zxvUL8adBU8GEbcjM6TbSicx5mo1Yr3mePgZXLsnN8zyElFrEcDzy3QDmOYt0gBgoHE08se97Wce0GzGRJ+6QLUy3vWVF5CCCGWRlnfv60jgGINtgg+Vuz3nm6MLTecBn83mOY0lCQXTDHccI3fFhuQ3X5xzDzue0AcOI3QqVk0phBuU2liSPMcxOTh9eqZHgX5MtHOGcE/QflYFE2xDeYMNU6XQcJBejA3J5eshPmMHQ/JoBcieuphzTL17Ik5/7cTxkTjSzuTMDoiNcQIhx1ZfwwOPhI/K+RHG0zWFwYpWA2RixJ+Z0n8z85kX2c87jKD1ilisb8+c5ebZg5xN9OkkQtjMNb0QP5DLBBm2RF2a4y1rlmtV2CFSohjUm3wDgPKcR0ArDL1lr8XhYMO329+1j+gbvDUPltY2rjrEEBEsVoXolD/JHr+rLXcYLZQn6p6U/iyQRFeA8G3YGbOcDXWWYvEn1n3NSJCbMGFdaSBK0DenBeeYy/UWgrTC8Afj63/9fe8ff5V3e5V2+xeWa1fmXUL4kF/nd/+l/+S/Qk3d5l3d5l1flS3dtv8u7vMu7vMu7vMu7vMu7vMu7vMu7/AsqeYibc61Fzc2Z5REkNXsUs0DDRk7ic34zJpotJ7l7VUJPmC2Lu3D5uaatuSie5y7GlGxO6euqhuUHTlV5ruf47W8+H71//0c/h7zbaxBycUEPqjZnSZPb/a0139+TrYCfZ9zWyKVGNWOuE2vsto4jp6KjE+ueEU5XZjfjr7y/h32NT3uaeZlC6xWo2QPn2fDSFBen6Caznijv5gy8TyBrjZUX3JuP6sV/rSJLfVjuF0D0G96ztyVxFAe+PylrtQwgpui7dlajlP/kFgrjTG0u+Mm1OHbT45Z+mfWuVZWw+jhHwXcWZYZ09jVJ8gLcZdVn7geSlWE9wF6fsyzTcd3ml2t98tVezyKXnWZbdcAAm86lnblcbU4SnsuaPAn/qUGRLX0CQBPz5QqZARhvTApO5d3KKyHrdZdlI7LuJmyHShLwJ2UkU6Lu5ux7gNvP+5oQE8GWv9r6QK0b47qHWi8gfLebfnjZuM28I4VRrMdd361sL4nWnLFlbcPWdgmb0kCkDrESpdYy7N3E8iiwUpKdcMDjBtK4ShmDtk6WoJbIk77H4xiPEV/HMXiyWzFiHVR9SGMtw+HvHOXWYamxvQQMGL3C8d4K3RincsgLpcl6Skkyq9fGmD67rCLpevWHXPQFfTIlbwjHS7NrZYzqLrgIRVbphUHkPm/W6U3XNAaJfWqrdVsvmLi0bsZonMZ275YOk/f+4fqFXXb5SQ6aRkTk6FoKcbStTcZAeaP119oyCwF27zXvNW5V9mrHgTGOuFRlDByPcfo8z4827BgPrkb3taE014kmStd2NCImLU5AHWpt8vlc1HnwiNXznGfchD1j0VlZNeP4dei29I8SlesqfYq6THfXVXLCKye0k7/tfN2Lrsnct6hcfiLZ4ltZUuc/Qcyyzun0vMiqRwmKnXZH38xcq2rzGKF81zm+aqbFbnX128F0AoyMq1PFiVfUQNwweCGXI68bmo5FJ5PdC+Yvn5ThRK1WRv27wdcVf+gAuueWi85TUFuQzJAnXetWj7vSq9xb7lfEsvFaFwcWE1MwXUcglojciX95rdHcnBujIQBYrlZ+jRVuimGV0saO28DLLHOt9oIqEg1eqr/XkqW5G2BAr049YEexHwC5KuK9Vm7Ve1na0mvVzKtyu7x3baiotRNRwVL9nYgoDTOBsk/3lhAsp6urIJsqiLiQOxq+y7u8y7t8q8sDNL9ethCxRdQGN0Z1DJ++2dzdICcdZhjJ1eN0GOLJ4t+0pAXwaqNfWdlwh3Ny7fn02KQM2bgUYWVssi5IG3/9/W/+7rNj/t6Pfu4CfWp7G+LYRfRZTgXhmD2RbAMrcIdsfYhh5G5rqywEkahf/COJH895XpcQPtKSRAW4rPcumsRRt86HvaloY3vKV/yj3/KXdH0d6jgynitHWFdEuheG5BgVfbvw3G/AzdrrojJKriCNvnrVr88IxiCGQXcFUOyx3lhTtzYwUKo3tzsUGcl6b3FBw/x+14S8SxdT+EpnrD43V8JBqknF8lK51MYGOXkQpQdj4qOderUPa2tAY5vurVWgZ6iLX5SCJUy9Z3+TLxhswpDxnmwStcgqYuQ2fVN9cUF4sXfSctyWGsMQiRXOEoOIM6oTJfUlDlGL9c+3MvZC8Irntr/s7t63f8vmXEjcxad/+JP/9R9//Zld20HVBdE2+0rwtt51DzsupnyEolnRBf1tbSGqXsy5JP2ywku45wTTWGRf6buah85N7OUSEIkux0/a+PSZ3NyCUFZTOUNbpNS2xptTXUNI4ai/D0YKM5wRNtJMYMCnXiyUoQuNvafsuWq7dMuFYvuOUvqF8mZueQow2Vk2RchukIF5P+crLXDhhRf3k45SbXqplCyfi91qUGArlQ2GlhydDmCAmxvDRa3A/XnOZttja3o3s5Oh7w1CdjfYJMwf1NJvcZ1Ro8kms0j08OhNRBZppPdu80h6un/K/dkYIyLRzSirLq0dh6PuUqbzMKokj4JZZcqAPBexjaSYiP1GijLCZ26GxXD4jP2Ew1tvXSqx+n9NE219T+4buQWiO0mu5omIiETWmJiIO39KjQCz3H8a1Q1b7AWn55x9sDSLTN2udG49IPRiQkgENf4U+ejuTDkpHWckGu7OkmkSY7lkjc6P94wLF1B22OE8Kg7rzcbXzBt4nM5qBkD0kcmSDbMYa3NH4jAmHLHWAhEkYoaU2dXvXDrmDh5GtJqBIgOP2aEU04DT6DmQJxTtJFVTRjlZ+t/aGpWa2RgBTtyXZQTEISmKjYejRx494H9AhxDX0WZgQL4SwGVCqugQMVYKII4Rd1nB8164OCh/Ok1PEcYKlbm45TtgcFvMBB8pCa6D4YA0F+nFG4+Tg4oZVeFGmYss3bUDhkTa06V3vjB900B+tDnzy451BlNtsZZuKF4ROqPpsJdyZjfJ8LaVFeKVDZw+D8tzgLSTV6YZrT0SULxyLu/yLu/yLt/qMsQbpNOnh4scAYvMYfqcPGgJSBBMbMd4jx/aYh8F6m8lba5MAcXZRtGlIbdXI+EgUZr3oqrf//3n7675/o/+pmuBOEfjTBhqID0lu4eA7C2rscXDywHlziCkT01fwu6NFMtJytYAPXpzodhW4nFOIUuvFVskSvSri2N3IT6eea06gNKs14o0biyvvEDh5cfNtfvtCNawq2jhLxDR5eXCnksSfCkVbWzj1lPlS5K3jllLDavOGMcdNwMXXL8NdiUNQ24GF91J2x8v6JqhT4fXhD4ivlhU0BgepFRznGgoXfVw5r3rGqtEOVMdOqj4bMYUPiV+Sgk1rqNzMsJJEGgh8s4j+Uue5nlWVjU6FW8xUT73FSQ5DsYo8RQ55CX6lNk5vVYxv0DpN3xjMfn/Qgt5p1I5meVftfIHP/kbfLL89jd/t0DnpQZvC7q0dQtfGXrGZEVkg0WsaSH2PLOXUHq25GX0GamWFe2od1WkV4i9CFOdUvXf9IXqt3iEEjZsRFpyTB01UXj0SdZWWfAW8zZ3cbVCpxesj/rdhhALHeJEOS6WI2ktbnORReTRh37RaTyMmQJU1qDiJctMeGdMXKhvG40uXChH+glbrLbHmzXlI4Td6g+99Yz9EB+pIkHdrzNDK9pP0aQ1KwPA4ZTRYhKge8e0SIWi0asiX4EZGofFalMRehCZU8JGKr/QjorUhjxDRX4tTLkhtaE0QEj0ypMVrkDRLWTrMx7T6Anq9guLJd9WBdJRx/F4jGPEsb/H4/jud//i8Tjc/Th6yRuFmQKcJ4MvHUZbqx6joV9mBobKUoyOOTDAho0jLjwLys55nlBFkuSIe02ZXgXdytD1kFdDUIsUo0e9Et/kLOBm2EuatxBLKahRqWvpSt6BuJsoj/NPB9WEwiXG7UUh8BQiBAyI23mpaPVvqVz2dBA2CNG6syFFJamX244vTXPssNqEwa9+TqxgvtUuPQZex/QKF/IyzFEohwBm4QgKSpjBiFPO7oTP6ed20Za8bUWJa8Ku4BWHirJKk6wuc+ZlQhMRkTplRejdPQ+zzZCi/X2b3TJ14ATRXfc/U2gYezh3tcgDW7atut+jX//6CURAli/e+d4AoptouHB1Zoa9KxcT7Jch3JWl6uVqbXvx9arQ2qhXLAwT0GoZ0cuOZ2VQHr1y5+/yLu/yLt/ikhPRBKdAeMycePMVFouh5WQ22voRm9RZ3YjaPLcFSWzSfpMBxRgN1wSOoI7Rl/m3q+cQwPLJ8r0f/axDNTYXcCRnj6uPCRNl2jXGGn5eQRUBRAfk1WNz2V26puq6bN7ZOh6rvizIr1MHiWl8WnzxDgo3wzAf3RMU6G3HK0FPd8ER5/YnEvYKw43Zaua5FGQlvTL+Puc8Z0aDMs7r4H0d6MsFBDKKT5UGQbZ+asPd5JzU3lrGhafp95WvtnxaLOiEnu4Quw0GOlwoXEql0Gesm9Mx1noKBm8q9xXQCYyqupekJgkSImGYyB8cmEEKHYKkGOCoPVZ5LQCzWLJf6wYDMQrIgWlR5e+su8X17h0wRCszIoS4eriibuZy+HTuiBQ5tqskrXC0ohcAbCaijk+EdB0arvBwrdjld6q8QfLpYieUvQ58WUZSmwsdfg0919HcQNmIrs3Me6HkJ5GsyColN9LG8giFKLizWUWtyNaRlFDLxE0ktlEm9LZ38Z7qTiNz6oLHWtXKQGXfcAH8+b7V65ALXYNkDJ35aN4NUiPbNA9xrf32deZXdqNXwFcuMtVzhhuCe2prs8nE81U21suhM0GQQ/Z1XR7znk0lGX1raxvUlUgOeogWGi/Hqc/sTiD5ccPImBrJgaQYmO2Wf2SmQcxW+2iBKLJ1AAAgAElEQVQnetjyIY7K7lj5QtCKipyJn60qk75LC/zMKC0Fg2IpTDKmDFrghuzwTg0OBJxVQapHJ4X6jerUZa0ZMyOicak48kguqUufVi3TuF+5kuWbbz7O85zn+dVXH37ww3/9wx/+Nx+++vB8Pr/548eGiStsUT/p+kATsSw6b0/O7dJJGMMYhIDR8RCqYSOue4/KZta5dKBsSHvgVRh5ydMyVUzQ66WMnEcI8fCFq91eqcperjYXfpuStpK9hYDaacWJSH3rPKy9KCmhqVTwWHbZshQMGttbPKmpfk+xuh1mi1InKGW4aEbsYJOJa78yEGk9BSaWFqJ2lWD95IawC/Wpi8nhktzj8SFByLAx7HEcjw8fjnEYPd21GqAAIXWKMm0knlJLdHe1DkXDwgZ8sewwSccEuqK1rrxQDRjRvSbDC1X3xUWWvr5c0y1OtSEYc7QiBJ8qq6UupiSF1Zd1/fKC9STTi9oNOwJpCbz94ervvqBYQd7LkCp+cGpt/aVgLZvPBctEj1dC2KVvPCkAbfX+1M6/y7u8y7v81y28EBkoSxymbPpEHguiwCat4uDBV0xKdmJrmRWr6KBKoRIwXEnwbJ3nqjbiFu8EJX1aZYKbBiNmZp+9vub7P/55tQveKdmxgGmSrR5U56g2XkIIzmwzBO01CoRzi2+wdWPjjhuRp9RVFFJE1zr4TQKhGBQrbrdHgEPAJ6E9Gd68KSFo5iYjsyXBUU6oSFi8EKa6eke1GsaWMPG7EEvG+8mMZAViGl5Xv2wba350W2GD3nbtkhIOLvWRnXz2dcdgg5GDyZCWrBwg0X0l6z0iyEowCmoXPB3HOzKO96kPMmQWcgKmlAjWFg4WVNMYCLXWQGO8/tXzqCUHENmZEdo7+jymrCeCH94OLzouEV0Jqc9Y71aSW4CskHqNCA3fl0UlRkqvsLjTRUVvXIHe+vsLHtMKouWgpIMZH68PWdNqFgDAv/+Tn+OT5etf/zK4uYYLIn8asvZMxj4CL5og7bDIA4SqUU8HmNQt6kjQ0Dk+Q/Ox8pK7leJvS7c68CGr22Qtry7F1t7J0OVD5Yya3sWcQSSiBXE3SI44UMBz5xw3E1Ps1AhKENn2lzJd64VHPbZYykjLyK913Go/5avDciAPFCNlxCgH1S9S3g667NytSbTlxc7XkJMhGHOqGSRRkjIaZK6lLWByiEI619hY+yL+iO/SRrTf3jc9vyqpqdyLXXsCVsVKkyxj4TCFDUXwPnbMK1rMJYilAk2B+GXOySu0459Khd9RzZn42DWcjgF74Jpq7gJsiATm9Hn6PMvQwrHMv4yBsOru+P3vfv+f/p//9PGb51dffRiPIaf+OjX7aoYlY+nEKruwsGdVDwcfb5VSl9cBMMyOMbxqEc7lF7HVaih8Tx7Q141hsFyBthDQpH3UAzDmsF4CCiVC1JhpLDFqKfuSg7jN/NGUTs5NhAExw7gk7rvb2TStK9OvVs2qWWbvqm+bfbW18pa91opOzDU+cI98X2Mg4oY6iXUQVjXOQtVXdoGeZhF5B3LBmsumrsozo4BcVbHSKGbRz/mMnHtBqXOexrHd0FYSsY0aF0xHt9Ajsjw0k0oWJ1BvVm7BvXx/5JL6hldCIrHfth5fsqS0X4KYqma3t58sKgyl2jSKi6R8stIFtVQkqIhbk33AOqKb32/rZuewOs719cwNfjLWQBld8smAnKvQcvV3pSMVl5XBE+cc19MT3ndV1/oqKAe/f4IS7/Iu7/Iu39oytj3EneJzdcAS2nnjNiI/msx4I9MC8XQ1lO6lA7UtzOtrLAQ0cF60PK2lB2BwTxP+u7//zJGR3//xz8o5VBv9Q3vysULAxDQvw2HUwVGS7cMWRJmW5eX6tbxKrses3OQVRuPOxWnsIxGUQveOhuuw9le4o6JCr9gFwL5thfmuIuWrYb7MSHaAEr8ykQGs+RZiiM8UybWKSAPCPUuf7dfBrwAOWgkPHwg06NHTruhVmA1TFo7ORtqSoq4Mz6U7ngGll35Ve4WwM2bPUGMS4lfnMxHiiuNsn+7vvUL3dN2zsSYiUi3KpTdIueHYBEjW3RAk0Ujslb0zhlyMI93zSosM7M2dMyhlD4I3qDo5UKEkf2i5amGtsyBoHkp6Kmxb2MpR0vw17+LnVbTKrIWVaWNZ/MiRmwH4/k9+9oINWf7x178oCz07I7QVBe3afZ7SUEkohpzSXekbswS0KRp6LYa6s2DWYwEq8K6UpC8vr2mUG60EUbikmpZRdjvaZv6y5mraxmbSYSWYRoDMaeyMhDvGtOE2HMMdk0mJdk8dS1c6eGAcViu6xmH1BT7QKYNSd3FYxRbtkOnAANmL7OvNUJ0ZKgsipMXCWqWshlxAZ0wquC/1pyUwqcAMel/PKm+QD5efy1AHNcsEty0uXVvqklnOerBVGCtJa8WcmD0R7TCrjRI4noI8ZaRqFqRZs+i1uceCxNx7a5KioxWX0cvWYBuodXnurnKsekLrxb55+y7Nwalw5j+j5SfgVKkY6elQcg8cj+N4HDD88Y9//OMf/wjDcQxgUmaVHyTGuvYQpWeLyWw7TpbFcR+xUjL1n/dQbfbMcq+xR0ouFd/RC12jW53motUumll2K3NiUVtbKzCdPDynFduMTTNPazLG2rMuAgXvjXV8voA0F9V0kRV3x3TMOJE2nFeuFR73rRedrJxuWwHn1ShxIufc+lCmrfsIGaVyoxyrK2d1eIl8sgIRmNqEEdO2q4QUw4oWVoyT4QF3qyPtYjBLDTb6zAwprGQm7GcaoW1EZjDrfH2xOAfXNqK4KPvBBGHk9GodOuwlKruVnGeBu7K5ckg3zRaWse3KIvXdftq2UJBRjfDmaVEoW3Kg3m5M2nzVqjY9ykFLz0nE28GUgt7hna5b7Czlj38jqP2CIKOkp5u6e6ncNjY1KAnKf7Mr7VUpSCaC1K7bdzI4MQNSjzJX/dmBvMu7vMu7fKvKWNM3Db3Togow2n4taFjYxgnfazWUxIoZnGCz3vzlPGcGULwwLlzuzIOlM9QJ1xGOuzKev/+Pnzky8i//6qcC5yqy6x8SKPea+vVawxrYpZiShaFaviOHOi0RAdKBJDLTQKGwWqEC8ZZVqsvKsq102853Fpw5b4fT9deIK7u6hLH9Ry/qMV2nQ+3ufRKJSJWfKp8FCw50FD7pqD9RK3fteWyKhLts5SbC6/cLGO99DWJfsfDtd+ktRSx/ZcJF/t7tMQgtTpI3BKcVSctDHbY60wlbrzu+MGxiRn2sMCRZXHLcaY+eTJDUZP8x/jlninlGbtms2Vh6joxaK5HFG7S8oGRss7PucmM4pg02Puerqv2pdDxnqqi9YGrsnL5honxm2LSnqoL08PJ3eyUj9+Uff/0LYAlyrkWzxxoEVW8umimBzTbQa68WVJ0UlTGwqtVwtWnxKyG76cohLomszxUNjfkJSqhDJHjbhlG2KlDJlAUqXcJ/tlS0w+c8Q6pL6sawcRyVwSsyGK91dpyOM/IImU2wCXMbdUxqvOLS0MKJT5vRVQAzBzRz12vqktgCvrNy/jPR4t5ewwN3N2RqXJULQmYsXte1oqqn8g5FSBs8FOYm2fGaCH4Vr/6EP+kVEID433aT/BwdsJqZbXkfTYyEADl3soN7fNl4QSI61Za9xcsYDRZN8Oy/rUXcbetvDEDtYCeHyzDEgj6DxT0ndd53gaKNgn6e5/M8n+fH57Dxna++A+CP33wz57l2ID0nJ8ZCFrkXnoKhemL6LX+gQ8hrhDOVVcG6ljQ4oUfrIeP5lyJ+G0O2UEXW0S0O3psjnaAx1LZyBwXjs7Z7zdDc2NQ+vrq9OD+Ra9qrn+bZqlHHXZWgwaHHloWUX6tKTEhAAuSd6wKXtftoubg4WSdIif9W+clslTggYwaYA1iMQcIH5rok90NegKLS+KS+RDoG1daYcRKcrcxxM4xxRHbfeRzHZZRLqXMugzh0NthHD6p+zgS7cKpkuDQ3ZFNITRKY2RhHQBYuNRXiL/bl/2tpbblQQHsPjVzE1KO1/U/rj2kEQQaD5wNtndmqbQQgWOBWMT/B1IbEtw81hU0P/IRZay1hR45Aerl5pqGopQK6UG5bRqHR29rdhjCcmerJ/nd5l3d5lz+XYv/Df/s/Pt0fQAUUSFcEA87cgenHcRiPEh+H+XQ/53EcAM6Zbju2zPh0P46I/irpli+aDbM5nwDm6TBznGMMiyMsoy1O3uf2zznHh0e+nkCTd9GN4XN+Phf5P//UcjtEJm4kAMm0oMX5XHOGwzdunZgJON1gxxhxMtc4hpmFezQzHIO42y0/XNxmwlrJadrI2DVeBBDT69PZS75RGUsA5znHsECWAdQ8jmaTTAAKB8QIebXywZnLOT3CPDuOaMOIfdpvkiaA1a4Vd38cB5hTBjInZGbHODDPGfliG3lmJQOHgKdzzojb49qQkBzD8HkGwc8zTjUc7tPcimAZATCRfVYaeqHqLtXyx+CIIDf548CHOT+6nUDkAuJazzHP6T4pCYBT9szgGGM43Cd4SJhtjRsxiwCylJZ51o2NY/qEYxzBHIe7jXGM0EU8Pz4fx8jlIXAbctMUCezux3HE8ehzTjM7juN5Pn16wnzuITvP8zgsIMs8p9k4jqOjnQs0V9IGNIzewpLxw8acE7BIebinYBzHOP000ip4WjWPcZznM7gfQ647ZDrMGXmQq1NsgkptmpCDhR/nfMJgIxH8GEfyIBdsNeWpkkw6+Jzww0ZYhTEGnkjRlziwIzChRrHceWPpGn6ZwU8/yQJyi7o8/Yzbfm+kNw/exde/+sX+p0v53l//PMZK0bbFGnhu+XH3WUbPeUik5120OSKHXkeuKaBKwqg9D8aZ2TgOS+ojpbjjR+PdJOgHVGFqh1obOghZKuBsjcq8jJswhoziylw6LyosEGHj7I3MmY/xWHhr3cswFSPz4xlYZpoNiFMDxmKVs6MO2CANZi4kjwXRCGdBkaJhkLk6pPdhbBtXMYxznjaGO8JojOMItyhsCm/CkMf9eZ5jHO4z2jlo5IeN0+coVnJdbTisyq9FT+5kjYSlaRg2KsFEehoFaVsj2eI5nyfA3QSxQC9xBkp85CWDmZd9ENkMnhySeLpzAx6xLWJDaOCHE63NzkviYDD4PCnl+bbFWjh3O9Z6ZeP8EVw3Ly8aSYzHMchvag7CZ2DiLBFdHFVYoRoqt8BHOeyYXrf7gdN+Y9pZACMGZtTBsV9z1k/Fz5Fac2DO05kOHoc9P37EMTKHCDw+fHg+n8OO4xgAjjFOP7/6zlf//Ic/zHMej8Omzzkfjw+1t+R5PmP5sZeSV9jMMd0kemhwxhg5Dz0DsdgYRzhss1w3ytx1zeBZDc/SGyStpp/HcRzjcIKWMcYxjudT/LNZ9w84xkymrikAW7ILvG2GFDYbReGQ4cMMsKc/1yHWz2bW2BgouUr8NdOvSNpR7DLri1XRI8W1RCYtWq7vM7PQpJHjikuBcAw7Sa1AaBQU2Hq9G+FYnAQqPRHSIE10KhXtGTLTlvYwezY9JXXQTHUj5TTT0N6kxj5+/EMAg4QiVrBB1o+tLrZWjQWbi7PugJ0OHLFeHTjnE8AYB7gGURJH/dZxHJSR6DjU+WT7q+23Mc7nx0gsZldCDd2Rez4umJKe0NNyO/II3THPxfKVqQ1aMX4R+Ir2a7BVNwEA02uw0bs8ylCu9LmkoYmwmp6RRzYhuEql5vvqisScUZ+EcN3T9OSwijSrJlZGd3qTIcei2OineT96j6IOnTkeI3VflN3aAZbWUf2bXNUnmUqQZ9oQL9+yU/ojNz0VUujxzQozFZ0Bs09nxT/+8v/Au7zLu7zLn095jON4AP7xIzLN0QjZO5Zud9IGX12f/Ovyg0a3UeY5Y+onwomZGzGNW04rNIUDS9Beyy4EWn82F9k94w+CFcK+d9/Y4WxZh6MpPgtMNvKJPAcmNuOOyWjJ7ZAN6fW6l3/JhW3VFY5XOrNhqs3VGjr0r3deEiCHokzbZmCLX16QjoF4yIXDrerJucHIKfHSlWBsjxWQKX939JGDCnFJhkKg6b17+NmpAqrbEhkvQCSfbURwWU4rZBGYKMGMUKiYyGc6mkKNz/QTJJpzd+skyWU7tDFuJPmmY1DRXJYmJA5iWOvuRGxuiKTScPOCJRl8VHtBsJLs7iilW7JMF4rlk4WqLQ4OJ3AzqlNg9Vhi0XGD7RXGkylIrfHKz4w9jKGUSyALouh+EWY3VyA5qVDGQxSrkg5kb1BV1OiFIt2VS0rRUoa3SXlquAbPd7XlD9/7yc9/++tffrZ1s178k+83ZJZMjuxY7Ahxa1L7ypkOvn7feOlQU0zMaw73/l3tQamcQ+xHKav1NyZ1r7nIq3u6jK7Sr/3QGgCswUn2SJ3cegP91vkia03ulFnLb2trCxksB6Q8KUuY/bVgX604bfVshoppXckQ/0ofOK7l1JRdlCUpwE5kD7yWDgnFI0GZcbvLWMo9tEehc+izSbIHi91wL39DdxKdt9thbqWyp/F9ujDBtyfTzq7qkEyxmIjpq+HAxdR6qR07pTrWlAzVtMqVXNfsjMuA2k7EbAGSAh1gw6d42LYu7JEvACd6En1ITxYCY5ECtAHgeX7z+PDBxvHNx28yRzbPMQbcP378BoB/OMwsztc7HjnFbGbu83xOKoEfNsBZ26SkQg00lzkcKlteUmI5WHqyhTe+KUshqrVQWmo6yjNpZ/nG1UuJQssnqru9SpPS3dZ1RTc+1V1J0QY4HdIinbWPWErnTMglx3MCrJzYDBc24a4zfy1m4qK6Ky1XJZw9WJc8a1QVmmCv7MtduXuMkJMkDpa8rDLlR01IIRY/jkdAFM9JTSsYOh7HfYU3LSREOMYx/Zyck8gJbMf0iVdrzUjaxn2fbkvVgPDOAZtwQ0xH0dKnuRdDWBjN2orOT7FixjS28XUKdllfiPFiM52LTKm4G8Xt0JZfXyAGLPhT+5Ao2kusozf9/1VIaCzUR4rP8n5mqSg/cLunnYm+WIlUAWWjB0Qaf5lVmtJ1rXHyNaR/aKOzPO1Nx72zHGnFHqZvM7apI3Tf5V3e5V3+7MpAwB7Po3ZGHso9EszWjzVFbLlBcHDTR8ellaGBYs3CoxEW8M5ls1qztqRD8o+0wm14vUq88tnzIgH85Y9+mktuTNM6/cniSTsT1Fad2RFjn/cmOFUVZBtIssTEdve4w1rSY0PPVpto+EfmpNbH6p+iu/NLHyExLxgpx575Zj2vsWqqFI6kSwOc5Vsy8+c84ikxklclQUpUVm3pQ3XYTMdXqFcJHSISNyBXvzfSrR+w18lfq/3E/RezWMvQZKw3OOIcr5x5T3bopGmmCbgyaOmAE3g3ffhVHc9MX1ZbPJccnLG/FSIEz6phqhLPp9yOxfN4bvbkKeej3b22ea9EXX9Nka0wmtqZR+nbsHHU3qFWZjK5tl/mIgBb67WShzr/K8kN388rxZJ9K21OaGiqCYVPK33jYkco6TMXZe+xcv/4J0Zf7EAfJ+dUhFXr7+pVE/e9z11r89vf/HJTkxySw+sqctpT6tRIws7ifb9eimBCtlVzb4nRWRAqXEe72r0UWaqT14PeUhPM6E8rkkgFqh+WIl1ZU26UI5R7O0YdtCcupyUxR5PEY7hpOQmV9y5xdqCYazwmYrblnmVNDTdfStKyCZrfK+aaGVdaCivXoI6ONXWUTbRUlwNEKeRmQigiVkfl0t1U2iZ5u4RsMK7t8lyHqGAAC6tAx9DWDkXiEhJtrHIxIg80JkrBm2ILZRHRuz7bFoK+feUMXxX/deGeVmbIPZRHnC/MzbS0m1680eYBRC6yjWZBhKIb99AGW0a4bY/7BvNP5ZmZbIg2yPhFXsJjEOyk7lEChsM/Pr/xmYsK496MD985xgPjgYnznB8nTpjbsMfjCMVCX+5mZgOOM64E8bYPClRWlwbpptVvceUzBizujyoMuen/ggq8ZKQa2njVIKfuyVkfoepfGO11ZaHps9AvplKjq/M8u9FizIoT5OfiA8cw2rMbzNxWoxdrApOWevtV9S6tmZK+mwxUNd25B2bm/vo4JmkCkQU3qNw2itrpc22jdpmXaaVFSNMxxKSTBP028jwdgpU558m84ehEtvisPOHBF8sSZC0mbb3MPB8XV3ocdICAH5UEXN4g2ihSNIZs8Ld7VxiI8K0tE4/+oDm+KwW9qbr0lS14+mpVuyheOTV9brF3t61b+RSO8mp7bSV2+Ymu4xo7WZGrYVFmh1foxL7f943oLmu88D1x9y6YFzmtSLXTfj1SK/fj6EZoqylUm1SJv7znKoR99G4y+u2Z4t7iQASu8IyWOa+w+V3e5V3e5dtdxvl8zvOUy3BzG9ByJW7u7/AOAzw9h9cZ62LEc72Sl5VO5CFOp1N7HkkHQzuS/LyigEzqrPHnixhEyr/60U8rjOnwqaG5od1ZAuPMDG1123KgSQ4pHMCZuyCtny004p1PJI4g/bym59LHqwvEPjiiN+NCAXnOzOX8+w6+ynkaICfMmeHFMccdA/b4AKYGot8kT9A/V7jksS7tmnPMedkznJvrzdgdwYVsLQNsD2ovIbISYy73tHhxUw9vVrxOwXMe1B+r+3hJQK4C6HDfZgbfzSqQdzdYjeFyNtKRUScXwB1cjWCSEUtKhPqhPV8SmAo6p/PUooaYRF0EjK1gZHwB4XUsi7zrKw2UgyKjFgjNzpb2fkfwkcEDvHzO2HOSYW5EHpLKsGqxuZ1LPxh8NEIu2pJayI5JEgPJ2zrVvrqWjzviyP5JWOkF5/Lt1bTcWpkFwZfSUaSiCwsavnY5P+lYSSxHl89mJP/pV39boy4qcaN662kJOUyAdUHZbaQZFFkFER2HFBeWQKBHyKVxqISiLZVS4skbmkClvC919o8Z1pZ9uSm38THHZnns6Uhjv0hGiVUrMmkksQ0teBvx6DO3Hyo5+UT03K5fadx8IWn3p3J3AORs00pwXYfQBDCRCJeTL+oxNMtMU2blzLZQGiofdJyAsKOcXL2h9qnN1DLAGnv9SZxNPDTQd1FL8Zuf0LFcR4JxiQzy4u1+puSSUCBslrgdjg++HJaLbUTdhRRvs8PkrFuUd3ESJ6FGfQ2nIqxS31YsGGKeV7obYD7h3CS+upZq2mjwOhORkpC3QNfRZWltzznPMY6QrMeHD3Oe//zHP7jP43Gc5/n48OHx4cOHD189Pnz11VffOR6P53P+8ZuPkXidYuofx+MYx9jhymLyWpj2r9G/DAxOoLjNiZnn9Ul6bcm0rj+o4UqgUpndmbyX1+sUgebCpZPLaJz2P1Es52Urr5R2QfhDjc4aCGbYTlx1FQ60nzd5Ry8kJ0Ix4lcwXdEGvwR5VSEaq8YPQaE5Z1xJVyc3oQbCN2f5MP3Yl5rXkjQdZTaQxjFSrCsN6q3izmL0+H3OM5TRYMdxHHHR0vE4jg+KsygFdEj8g68qM32OcYzjoFn2c57ufhwH3QK2WgFDdZy+QR3URoYyPCNS7KsegOn3UMlw4+vB3J5o/P9l712XZTty9LAPWLUPyW6yNRzLE/phP4TDT+EIP4ZD03azb/JT6Iem58IOzej5HDHjmJF6utm8nV0L8A/gA5BZVefQipBE2pU8PGfvqrXygkQCH5BIZD62AobJmLRlSmclnfvbZU6H7l3lKpfvUIG9BFLX+TL5U7kt6nXOMJ/MappSN+qbddQjpeBWrkiFM2TqMtVYK1kbro+9JoQzJaUResAi68RJA4S1yfzDqxtJk+R2cz/vBLeO97RQyhSO90pXEzvN1udinuVZnuVZfiDlh5ry9uu/+5t3P/Cjn/7iv05PnuVZnuVZ/guV93okv/j8/We6n+VZnuVZnuVZnuVZnuVZnuVZnuVZvldF7TzP8xxbPgyOxwih4hZVnULJTyPzcp7o7h1BbjbV1lTt1ayp+OqhaDPj3DpUAoBAtm1d+Pt9kT/+8/RFRsOMNIlkxaMiNrLt5I3t597E4/7xFh7hjJTJAAPPHV/tm7W9aJdBYkvzvek1ImV6T3fsFQuzKk1ieCdBuTOyuTPYhZPYG+/sAM/l5TsdxkbmEEaBMTCh4mCXjci1CGqLLyubwTI15IpNWMd9Z09w3+n3x9893lOsieb+p2T2LW5333+nIjMq5vdeM7l3ns2QPGvfvHZolxMn9Xpt+kZMqNU+cp7IMB6zYjFuQY+Ip9muiHQ2v7FZ3bGzHezhDoD38zryTHbGc8Fjhz/iKeYlIRXhmJkH8oIjbpk7z8xn3C6fX+TPIETOy21ISn4LyTxuDAAISg5+qi3+GlTHM3pt8QvgFtvb0petu8D33Gt76YlP8df7280pM8TuXXX15TYjQAs//j9++c7X4t6bES+EIuHe94iDkHsJzXKRdhTeiBMrXpr78ORaXxiI3/RwGdGSDywhL1Pw+m137xUKkRGFgY4C2Z+rwayjRPe544NGDEQQaQys+zjDNRgJkxdOjQC8RZSutJZR2x7UNoMgvP6WdV76FUlmj2icGSZWMT6sbBnsGhn0kOoz1GNGAfvgz64wg8rnYVJSllE+S3PVV9+nvr5w5DyMVeg89HyHnDUvaPEpyIBCMlsrotEHKZ5tpvKtgZouzFqWksw40hOS9EnqUoMhkcS91/xCeR8kL3nlFeRJwWLUo7X8+1fjrdwVEch4X7cIFARKKeRJXbe4QTvYSw99eXO5Xq/n9Xz77audHn/efvP222++vb6eKnq5XK72ep6v5qdI6olDFe522pwm0hcalxg9WvDBVuJ1fgSFOhiNNo6hSv0VxJZZj9TMzqaYhmXV3/UDJUB9ItQKI/a4NXUEmjrE3c4keKOrZqQHw/Wh9rDEsLvH0YQZWenumuFWyrheUPlbacGK1St5S1SDJltyp8u4kbfF/YwH5k+Fftnyu5RaLi+i2NTUEdVXk+aD0mswZ80nEXtmMwkAACAASURBVAS5J0hdV39BBKpytOQZoWEbwZcltaKIZfbr1ZQHi9CvQcuQRIMGN8yG8XKIznHWSpiFI24+34uPP0ROAgTEKvLe2AkRRxmJbzYdSZ0xx1OMN1VlDRWFkbaRzuc2RVYEL13wqNyOWcYf9ox8/J7ifufP6OWdN0RcxHmwA9HKg6db6CyKZGloGdd70OODL3vJcO5k+fO4hqkLnuVZnuVZfkjlEprK6rLSsK4ADLncIIlgAYDAzU3z5t64FM6B0KkukqnjI99PXrZMaJ72HiV7ZOsYKjWMK6a7DwjCC7W//g/v80X+619AiPIkT5Y4tcet/JY+TCxsr0zWtAzMADdRhSSAy+OopxO/wt3oOhWJCxN7ROyQiGbO40W/b3pW7ihpJKxcPQ6ZV5nYaR2deGX+Wb8rBBE6j31pQ5HTRGpAoOJWQAZxrUPcoDL7jfWXrJ/5lKzusjEyFvpCdRD23EUwtC77fgJgARtSKeYHkCqbtCBEc1JZKqPDxF1eaG/JxCXpTB2E7EqUPUwAGx49d4BH0ce7dF45O5bcN/x6jQWd1yAE0JiH+jCHTEOq0X4clRe4WRxSdSN5FA1ABwhN06avwwGBN9FaoGcVO2dHyvTRPN/dY1g8g+uc5h3EXue/HKauIW9UFZ6rn2C3PGkinY58XiXAT7xbjgt4YgQF1cIu8sxUGkTcrIi7i9AffB7iskRNzfWKHheufoQZh2tNQtq892abeIzUEcCRm0TkanoikIuas/twPEtHbwVMexm9WcXhfZ32QsJeEeNzlzhZXz6E+ndIziIFuFbqKGx0J/Z3UmsNG3248IqJQmLeh+v1ho8UARQGbFLa5ZMUhSPVHx+a7wiKPbFWmW3qYvnyU3dvaR8CpRLDTZ91VxpPkwk5kqQJnYNF4DGj0vlF6ku0OC4RsxAqL21HVydJ+bF9yHPiEzQsUndo/KX+ZgReEl63X/SIx2i6gn6vf4j7LoIeY1VWk9IXKYyqJNkzKM8H+QqF0HA2RhdFkTs+1JMIhyB7FzrT4dBxiQfnbhnOxqYuIQ3687lwyW2D+rChK0odAIjbnB3h13L6uHtMHukaP/30v//RRz/6x3/6Bwg+/PGHhx4ALi/H27ffmJlLwD8/cGS+MsF5XqNxP83t1AvP33CNj/M4JF2DrXjCHUinUgi15EjhlSpxExorofMOD0pLqsQ6BBK9oMlyCXkStQ4ZtNRHQjMjxRBoJZakL12645IIctO5wJYHYcpFnzPOvM21ny81u8QgImJFUlBSDVr3MqwtyXaa13ODuSjSU4KUwI37xgYuQqMqGS925S3gE+mneva+1SaH0GoiP7UkMRcaG4yMPrXaYOZM07lcZdO9Ht2NgaKmCFA9zM7TLH3gqiniUgCNtodem57KJu4iqHZpw4lnRcV/Kn7aeAlzZmRhVzLwoN6kL+rtnKeVA70eK6bYEcACCtwdYcE1vxdV5zOrPtr3WbZVIBS8MfE+JXZpznsvbt316suqY0cdN1PAZ+aGYXokb6ABde78YCzCUpKDyfs9p1ndjQoV5ZakchsWLY9YLevAH7lY41KrB8bjszzLszzL97pcVNVV3CqVjjs9GC2sa08mNFxIWiM2CXPQruauYgE4nJuo6b4p2CXi6asMt4cLM1ULNZ5z71pR9+pmT755ry/yz3+RuIY3JUSXlYpgdVl4GTwOkxWDdnFqgKEHbpWltLIgCiAGpnGXO5Z0IfUmM2+Uc1VQrctoymsWprVRroWsZFFP/Kg3QzGq9kqC0sRt1LKmTyo4LyPONSyr4TUZ4H3URZuaZkW4aOvC4+CFxCRObZr+MFZWrRCkCZHvtHx9wGcHMD2l3GLMSmoa6auccJO4n+9lH4vwUs+tgK29JiDXDRJqRgac7IVHRApcZSG2b2MF6ADkBNRqEs80RA4I1BQi5gY3ZwhuLlaDu8uhoDM82oorb5I9Y5CMk4l3806cdMFYPieieqDMCb4okcnM/PSTG/+ajvs0Jaw4QobhU5Vw9tzdk14qRu/9mOmxhpE9l2XlDtmF4W2YSBL5WVpBGDiUMNGDc+dkLm6qZGxnVC0I9aObZcC1g3o0dAuye1DDVsFcvY/LH3/7V5/87Je5pklhYQ673b/vqNgan6Oub8veGa4k/rCuc7Toju/bR1xWUy34aa8AqHV8c/uvLwZFk6JbX9RCPGnLE2PNihzcwnBnSttSDnTZQcJV4nCHyvR5DmHUwTehpspZx6Urw6Cqxb4Mj7c67FaFyCLfTSavZhNodQAZnNHbLl4LO2Vd+DW9tuUmqy8WEWsapAs1xX7u1uV02goYvdYehfCNchhSrsoyrXthleU1al/ZlqpKbDr9l44tQqLBime+XK/NsJwEGpPitbG2CPIwsbWuh57fFrm6w+DeD2DStLaWWKHBbd52MV9vQqDFBB/V9OEUv6avdHHp1nLNKmqbt9Sxh0OwRHJwcKYAFgCnncdxXM/z29dvv/ryi9frt3/84xeicrx5uVwOAC8ffPwnn3764YcfffnVH99+++3b12/tetTVWG+/fQvA7YTImzcvZzNYKOUp927JkH2McOPqcLGAHpIOw0WaeT1TOGURsXPOUhwF4GsmGhxV4M2Lg4ucZNzSK9yFy7q1HZpUNoXPpj+d9SOXxTr/Ocs9lz4gmFhFGccQMixaVNTsXKQCyus0SnqOklybXig1JvDY5xkKIN8TMET5nl6q5bNQvTSN81cBnPGS9cIKnNhyU8iBVDBes+FurnpMTTZkw1LRSoOa8gLJXht6MbOqh5n1PjfGSznC4hov6Tubu+2Htzs2RlP7Y0c/3xc85j+qByHjQqGpeTGbp7LyXYUAwCHqwx8rd9YTp3nT/2h0VF/4eDHtCxF0/d2flQ71ubAPBTyLDOwIGgnUCsoOtHSZtd9xy8n+o2OaMGyytrlvtAqtGBRgTY9wKAhVYT9jJeU4jlnD2h+/6dMcQQuw7StO0AJ/wLvE7jH/szzLszzL97xcQq5aAQ2IKC8FBQbMK5znJepp1OVztTs6Da080JA+C9dD3TxUWqRYNzO4wl0DJwviCGoeXuDe5DBdvksh2lqUanUki2eQS1osemEbN/JctT0vFqglIiDMxBKh0boVQlLGkYk6xm7/o1H4qpuSrNPy6u8dadql+V+utpsdOrpivMe/W6Btzwww9ABpdlXpHJUKAwiUSzYaAU9x2U7TVmaFOfRuznlZef0KTptEsCGVsc/ZvDGr146TgE5jzwGPpPlN6qUCr4T/a1UQxnjWxAitSGLHpESGYrYXfEF3XmBuTFGNriwjwnSBJcf6ccTtDkECDYeYqQvktKvT4IxBEezmGjaiZgFGqAqXF2kDQPuK7lit5sjrrQ694aFc4AKHnaaqeohIJIaPEFG4uR4SXqPEkPQ4HKpGDzVinyLHsKLvtUlurqfw2dzW4utqbyu1JBKBrsTJwPJo3zZ4vxfF49KWNblgZ6bae79T1Y14khKY9cwnn/36i89/c5cSUb747V998tmvGl2noZfX0I6e1AZ9gukUXncGOA2VHiDIkz3+sQBKHiwDe0DBMj8LeNOQWSkj669o0VFlmbR9zeYD3iSlu2CfJhns4dWztoaDWGXzpz+yvQcAQuB5+gvTA92ihBWGvTFkQ7A7OTC+sP3FMVKPpU/ZIXKjW0pz+PLecEeuGvHRWiMh+tmw0gEJlzcXXgSJOrYZKzFTtZcXp+bmTneSUDIPLIRCKYP5QXFzqJUmK89481upI8FUkr7VyjeDgM2bpIjSeVF9ncq46Tc29UiWldIy8mjcK1M+59DpDqka2JQDKOdrvw+yLnA51MK7UThsSE+9HH6eIvLHL7/Qr1VUPvjwg+urfPnVNwC+/MNXH/34o08//fSbb7/+6qsvz/M8DvngzYcfffDRm5cPv/rqSwCv37x9fX3bngtvItavU8dPPzzAXgGxj+zce6htRaMej6BkoeujZDhlGkmC7EAozfvwxt3h6vvE9E/OzoacQx/3TmmnarEVX+GwdFhu7TXkWgTH2FWIwTQ6AIcFdUWiFZuuI3ObDOHkhHJsEFOEeOkWSx+UqzfIXLJoY7OVd+8r5wUYxXOS7a7yRuai0GLumjIeOVnWUkYwGKDlcFM9Uhapmp/x6OxRyYG72t3M3E0lwypPO138OFSPA36fXyThyST50tj6cEotc9NAZTXL6TJ2ZfTvqI08dygtj5T1nmJsla2FcRKKGB/PPiDl0nyrO0of7/i5tb/eneuVE2T9xbkhenfFpajcZF45ahG7oTIe122IrUzrVvR8eFcjN4U4q57Ptu8Mb+ei7DYRmk+1FhaGEGw87kssw3Ut3Hti6fKuGhfpLnKep6gUzZ/lWZ7lWX5ARf7n//F/lUPsNbMadayIh2JQAOd5ddjlcoiKneYOVVVJo2qRxgFrIsrE3d0PPRiTknmFMj7F3K4GQA8V4LyaHhkZGC6n9oVIngT/6n2hkR//9JchmU+3Q3ofEkQDY++ue5wuUaxbVasdbAJ3V4jE7cBEtHba5aKhJ9KLqhoK05osXlURFihvDZbxSZb04JhHbSJyXk8Ax0sZfuJ5qbeoirm1KeEOepFCTyZ5eQxEVTF24wvlTUWXCEYnIAreSOt71hMWzfFycTPLzndn3FzV0acaSFYhjAtQ5Rk+GES62tnVjF1CVVXQjh+UjRr9ei7cIGWYQLVOYTSvAnCcvAM5bIZwLBgijLOQN2FaTOGBOI8MyVs/E64GA/swaoW40NIe6LDQNpCL/MOrISKqSp9p0sLOM07EHTqvn84Zz2euVwjMPJ4xc9XD3URE5fAyf4QzGj0MZC1h6VksB2n/RkY4nuc11n6QFLGRHIfts1gMLi66FOA0c/JnGJNm/ubNG7fz9foK5MHY4/KSCyp+DXMoKHlE7HY7qYM1RRWxyc9wTsg4L9+G2uBG98vlkovdDcDRzmWlO/oO5lbogJ3rWqmf5gthLJFnNj8pQxYXpM16lRxHA5KrwhV22hfvPLIN4JPPfg3W5lsDA8G6BYO3p6YHJuONXmy7QT0lpIxO9569dORLztAO94cFlZHpkVhgRLHxKTLi5rEdbfnpZmaxHFRG1LGlqqobSrMfl8vF3N1OBOPFm3qoSgrnsIy5fkGvOqQ7GIzhnr44B1Qkbmil7aKezjsSKdOoReDc4cEt1FM5NkG4hGp2UFzBKaDk95ASIW+ng2HOqZmdZ4Ytu6X9dhyHmaloWHROq9g8/l5nazgmYkQ1BdOIBWqA6frh5Du4jaGqEGlUUFM8VkIw1nlekyq5TINnVYDTT+qjmCsKNPfRo8VKtlDQqb8yMa6oxN5nJqmIfUNVQW5fuZwMTAr+S3vP4KpSTBW9C1l82mutmBtzlLHU21au+yGHgwkh4VbwACLHOIFAL58g5p3T41Nb0o8bsnBMpfRF7aWWh6joFZKTkbxq0AMA7MRx0Z/8i4//+fe/gx9uBg/HzSUkM4DzPIOYZq55gXdjyzSYlz71jxogkz+rquWOikMvQax6OH0u5seRk+MJ5cYqOGDmzDUYHysAt8w5gNTddp4GcTPTVG/gfk30USE4r1dVEVmSJzs1Zv69uJ4BP2oVpD7odU1X4YJoHOD91RXETWgxt77SPSsV09hxlZz6EH2uehRXhAMaaN15uVzO61VVQz8Gyjop9IIlBJKCVbU4bd3ZCBC1OEmGW8U9Yg6YJaZ9sUWq/M1zCvNFmdCxi1ksUsJxpyxK7BrIJ5bteV4FedKCJE3xxUnz8zRpGQKV44jz2kvAKTue/KkkgzfhwbkdROCXmSBeEj0iTpkL5Dj0ep6HatgC5xlR1QFfD/OTt26j1mdAob0QkI2PdtJt9sLU2e770w1FltpaQjA2xUuKU0vLyMPC78gxkkI4IF+O9DTTFbWkoZDr5Zgctw6Jtd5861jhBClwSopYJM3yYJZAxCO3rdXoKDUlTlo0lsivBMCJK8YGlRfm1E4d0FEtQMBj6WldFMb4tYjBEXlm7ag7G0KuOc7Ug8Dvf/Nv75HpWZ7lWZ7le1ouZkbvhpQSK8M83S6SsniaTKWGyl+V2MjbApDh+ULgp0p+Q42VyFyXbVN3R+deFHwHX+SP/vXPwZMLN8ilSxvkFPcJJNZ3yitbZsM0BNvckaoxv6ct5N4BB4JdSd7ZK4znhDUrA9n6FGrYwkC5acvdAX6wOLsID0KrN0ZMlC2hMu9upcl4psc8DrvROyMimdPKq5HRLF0bkYWrjsL0NBHThHskoWX6AW+BRbq1RzTFsKI2q68POL2jTB0vCUjE2zRgrEUzZ82pu0/ovTxQfBKmTmDQ2Zi0a9LTwFj6emPBLn31m8GD9sgG79FhzW07eE0Lgc5Y+SiuR6+CituRETPB6JBp+BOEqi4DKH9EOOLLMlSBKV0+5F0PS4yb8Y36etRBTt+INMTHDe1Yh3OgziOz8aPWavl/WfzOT9GbZghaKXsioQ1Z04rwYa+yhvhe8PHPfvnH3/7VO/rzxee/+eSzX0t5vx6El4Y/KEkQwhYM4Vt9SzWeB8SZrHjzjOc59oflpqllTY1ntvNTyZOdvCmlWjt+R6xZBlp5iaY0Os0NGdSe/kEAbnbSBs40Dc60jZ4ib3HFduaLErFCyc0IGK/zm+3pST5vU3ClnCfTKtMzh9fDzGPbI/MolDROp4NOk7gpZi4imlk20SYQ9mwEy4urCJbxd41x6zNK2pdkT08BCdHShp/btBVXCeXhWsCoIFown71uEbeWWkOrZTc6C/Y4eUhGvJF7ntcYcYYBV6yXJDu6EGCs7FtJ3qpgFdct3WjNVzySSzi7V58V4MCxTME2X3Rl7SRZSiqIqX0cFV1J8hTYaI4Nl5+JXC4XIK7OuRpzGYjoIep+Tu2JG2dHd3yyQEGZTWA2K1UUpIjEbpET83Q6lfhXMz9rKas+sTP4ruHKUIz1vww6CEFU8uKiPHOxyzzwOaX4It5y3bYT0YUdR2VcyFoyydHQ5jUljG2X0RqZuuBKkqTS9cVTCklH7Wlm5c1XVUgkrHAJ7RDkCo7ySaEszRXV3KJRvRlN0PMB0rCn3ruffFgWN1M6yJBXdeWScWIdEbOrY2QWSlqRc8sZXFNJ7THmaXHQd9vjN0e6sbqyfghb6QXAydqf2Am6fLyrRKaRLR4dryfOTnenr4LuXu2TIdE80409sFdG/4Yv8r2FyyHXa+NJskR7+ebyQxkkHGUBjrve6q25uyruEbnzn9BjFBb5RjkiveRhNjR6UabT+HbosAkVbqH+9kDV7jefSavXMmLfa/Q8y7M8y7N8D8tl+NWyDE9PnoSIPVFEAqqhWD19OMMA2mDl9gONk9qeDDUeO5mFEWBm0j439+/ki8yNcSnlMUyQqSrLddP20vJcOyILWeaOFg0P7+OIkuCpnB1AGeDcUxWiyEGEAD+T8j5VWxkSiaHKyJ6OuD4sPF0fraQTLDPcsmcVAxRhdnKxnW5BiJkdR5o/sTsoEafimN7AsrcFEM2LTQasGOrSm+a8qQXu0ENrxoSk8ixpZYiILUTF9ID5TYDC3bKGMGy2XFSjSWfh842R2ozFcKAGpziNFAGgUpcZLNVXw14ZIOcjHeyzzV5+783Cxv7r6rdRERlG7CMwulgDw1RoD2Efiw6uzNnveJ1ihTQOKlvgdAO5wN3stDMsBD3UI/18BeS4I8IpOWjJreCZTUkIvQY0HSuQNPAJ4iSbdx4DjJPl5RnZZobraGBlWqwNFleWGbDfO6nTkI4jZer2RslUt6TZdEi23QsR+eSzX33x+btjJB1hp7dN6tPc5hPeiRbQvXzkMd0AvffcF4nHmbrSB8WOj0y0pbWB58dzgtw24e3My9yWL0gQZI/jkkbfATayCyVtRue16IuwB4P5cVxuOyVULnNzp/vP+9ri++Gy9AxeniMVQV7EAEenq58Op4hAB4/Upu2tXPfTjJF8hRE31YznwnKo6nGImTlcGLCGyDO5zCuXU+0DTOcrCSH9YzVFEksrWVTzLQ5DhJv4xlAcdr6WSkzW2wVy3ys5vHwfubxLlbaTN90ZpaOzUbPsZnbWWv8y2Iet5IoXETVYDy3HbrFyRicNPjjktrCG2w0o+skUTB065kYWcuP2t5tCCi1wiOIblCc3leVx5NZzYWhr+sdOmSkGRETtjLMsItw7tYw6FhUdnv4hFt5Xhr4bFKAcLmlVniX42R7TVFIZ9OkOnoEIzZT5KJfIskGh2GJtkvjsjc/Vz8C4nezbEAkZhoytYXrnNvJSXDlt7Xvl2yj+xDiO333fNpBi4jfqSy3ULH/y6aeAfP31l+f1Co+TNGaQy8tLjHEuYNLjdgbZv94FWjoTwnmGMGS+mC01kC9Mv1VVHIlcvMoPCJfrMYKO2iNBHnkuEUcn5QDRU3/5bGobEWWZlYd2LGfHMBDyk6qaH4pPpOY1IfvwS4yt0p7mwyDd4jhrCT/0Tm0TNaK453n0dWu/tGU1tHPxA2flyiTJGU3gPrgGd3Halo5ipWSScu3lwbTxbdYiTejZRquv6bskLbcYgN2leSMhy2wUpN3XOi8a6tRHKES1pODqup2W46SQLHyO7nb8UokBEktkD+OncMbfc24+y7M8y7P8EMpFa9dzYFDKQEGdbkhZyXtrbMR7FEio7axyWmyqKmM4Upf2SQOHHo0mKnQjDD9/oO2qfPS/fSYH77ymsgr/zzGVELDp31bTsmjtQYHSqbKgaYemghzQPnFaJhWqHXJfXHUxwDujmJq4Pll+SgcMrQwqOhlzFiZMGKGD2lSf7MEA1LQft+hCzn9tc3Nu8l1zN/eDzJO2YBySSaxW0wdfyLdrYQ9sMdPEyDhfNjSsuSsfdCl2A7C8fXulxQJQd6oPKFMwgDilEN0SwxRW1/S6AgKJjEUCzekJnCIiN9Ypl1vCGfpHtuSVwStedklBMdBoySbYG3fXI7L9J9SLaCmta3npxttWeS6NnAgSeGx5i7tACL6DxxgxkZ0to0CiJ2M1ax57qXAbM1SAHgebR0NjNYX7IHidnZgUJBRPTvMaBQfmZPLGd4J5SLAXats4+7KcWRAfl560Kajmq9Iz1kbOaoMlFvU06afF27Tl2N/Toy8+/8uf/PzfSA2gXER7txk901Jxs3Buy75yq/PVuVpqvuzU3Cu+szzmRCRjlfOwv/bxP3o+M7VFylzzeSqwez+o575spaF0FlzqFB5EtLopqEuW5iBW/qG/sqRdumlKhhjPMtfkUh50x+DQQ5FX73D57kPw8s/eZYthsUWLGt74OqxrcM34+8HGHHez3F5v69l6RzgR/drUnM0a61xiE/0t94aIksGVNfmeHggBT1j41kAJhRQEPhxzvJmrdFyxWvkF6g33dKQoDsOJFSZNhJAyFt6hjTeLpKd5I6vUST6BiAoqC8XUzjLGVdSa7eyDZ+XAkoGvuzwcKqTHPpsBLRbzOKWUQABjADJQ96wEkyn94/FBRpN5021npOKaFPGj/+7sG/+quQ7s0UOuhZKVuHMJIpVzI4cBDQQQERuX3U/xJE3I6qnPr8YXPfUOZH7t0k8TDXJhL2ilRDZvW6wvffW0TzU+xl0f8neZdJbhAHV3f319BXBer6eZiqrKcbmUD1lcYj91HItoupDcwxPMDchFG8rGgBk5fl+V+Z1feo1R3rb5sFbiGCfTy8HmAAx+DMpl/LsPJi+XEE2QYOvK4Du6P6eFrqCUnI5tGhY2YXBp6zcrCcWvfbJJdmsq6fUINPVfJGDJN0uxrH0RRaT0YQOltOLnzi3+EGYkPKzNaN+n6FER6onquCP3xlIwhNQbK6UeG40v7fSuUX075PcY+PKDsDej4kGNpCE3c/IzBwA7rzSX+5JGdti7ZiHcvUE4Nfh3kYv4eX5N8nWPqfCqracz8lme5Vl+qEXd3TqpYkjd1lD1J1Sk5IEviXw9FbDWCJ6a3304+Upf1U7rgJmh3iwOoplFKh6gwd57QyMnQqsOpIao+xEX3Z4GY2CAdI2u8NdXOJXGjOdxS90Q04JL5y9sxLfn+cBukKAVEEnvrCdrG+iZLsdss/H5tnldUHv0rMhCkN1uP/q4Nly4lxvNl2RmR6W8KjdeB+ZrhKcRKHxcZJxRWkfgdaIOfWwpJ3J6B+b0TuW9kFEmsXPGGC0nRMrN/c0PDoLRSQQBBKf76X66nW782S3irXZqyXz7Dk2nrdXLLD/0sdD2akQsRsJRRGKehs/TmnAMbi0gLcFAaRIRcXK8sj3OBc0euSNu97Y4CQsVUT1UDz00DD5GXrk4gkA9pMa2Ql4mLuN0aP47gPtKA6+sAtJLRiJj2u4du8/i8nCG9uKTijL+1JcjxsXnG5sRlZ30WogtkThH1fef/PzX7+7VH/7mL7I7tHx0+6MqEfOerccZ+tPsTKpOoUZj8mbwvvzZaLfI+QfEqzU1/s2l69Po2+RHaREv+89FjF/kEvDwkyQJwn7IoiJheXeufas1lqnlcvp8TC37tw2D8zX0CkdE72gS2O2MM9eD27teCq0gdjwVMjHXjruZKUNffKwbyTij2CeyZWmI0iuU60dEQj5QfI3V1iPfZCWnNN07JUCzx/kJBfKkTVFG4YdABZo8KXqTLaQsxSRaJ+nyyQa9wSYxgCEPR12tgtn94teGJdICrERatZ/5akKSlXIjS7UqB2rYBEAAReijM8pj5scCa+kjVW/RwhJ7WfVto8xStkU5pVMqYOHo6hv2gt8IhW/XuogjsaZdLeHU5YOrRqNTeQ3a5PLZ/AjjVy3ZT+Z0mMPNTKZIKG6O3+oq8wIkItRyXOHeZ8zjkLKIZMKXAg4Fk3LxDTW9aaBlCnwk3LkpmxYoTSutUvrbHlPINh8igKhyZ4QcoqcsuqN6zM68fIfy1s1P5mWtiqM2i6znK2c1gnsXISgYBkc4Vs/aTcmhjkFJSZnhQ0cyVS4KinhRSWEZF904LDKDd8k1FaqjiG7ojAAAIABJREFUxVY7shPxuaCciItapGsqnXS28DYWaoT3baN/X2nDD6r2icrXdTSABpVOKVGnEiydM7AVl17Lg6ocFFdLN+7Pyp3Ppq4Y3dmfKkEzhABXbNo7voBtDDR7p1FSY1HKd5h8eyvFQNuCXVs9IchEwEITESCMlITInhlUTrMz4WZJckkdd7cPY0nlmhf26L0991oPISpIuim9n+VZnuVZflhFzbwSh+8+OITp0NF2JacBZnBvdwRfmThcEvN2Gcb5sFyWM0lhJoZo/uPfvitRGiJlpNTJiiHOS7VGuFFVvxqTQxFj+3w+SA2ZAaROigzA5mmVURPVQIUQfKXsDlSClpNgAUCmodCIs/RngKieoEVFxQ0uwyNCtDIqWxyRczrL5GBvZFiJIsjz+1F/APRKsoa1w+zV6GPB7vxrQII655swYI4Kql7nUBbMLpZxkUXLMUGzE8ssVOUJ1Yx9gDf4b1OWv9ZSGPCNxg3t2HgqwO+NB6MsUG+enfwAr5tbaCV4eQKHnUsgTHDubSu6+0iHNE0+fj3owCGMDxZHX9ldYOXLB/lhvGROlsgQxxxim3n5cMmQ8B+JiyD8RJKOopiJCbPS9LmZ0JXCc7HNZ5rUIj0HGJ6gSY8i9TuK3JTo9bJZs/y09cv3Hu6D6FAKGYbUxz/75Tv7hX/+63/nXMa++ArQlB8zLgJRFdUh1hbqQXYij5+qgWV4LeznyOVGvO4lMwwvFtYjGy/bym+tpJNoSH7aJT3R7R3jIWppSz+85xFFaHbaGTtkaei24G8Js5vsaz+nO7oSr5bq65wkueQkN0UAz0ColCZxxjr61iK5ZVt4A0c3vdYjyDm8ooqTMKu4y+V3rKMpC+d0j5UzxTYWci9rpG2oXpYCAePppFvcV+GwzYfWa2Obs9JPOD9ZFgInL5qTqbXr3bhpYbgvpf/Uzk8atko5XBtn002QHEMpNIZezUXAbGmi9vouBFSpWVkE8L0pTMqMThcFJ0BzbFRODUzVMjsZXG9GinrPxSBPsXSpxK1fDFdseSDpYewKVg7rdGxTA9BBm9vZQ+SCHtuJO/LwQtZQYGgIq4RV9PRuen46wfoA9aIAetl48rQsgnEKwoI5C2lQanurGShEt2Gb+6XhlkVyyHx+9JUZKFSPy3FcLsdxqKqqhpPFzKjaW/qxco52zfzOJnoA8YkVFVZ+6rnfqMBQZSIbcDX0PzXdIPLLiIbMxBH3Z+pxHMdxAdfmwIfoCmqHo3tmU/8O0jWwKjJHFzjTXLnk4DlV9GyOUXsiQfTeFi2j4rcJILr2Igx38kVKHzlFdYZ50HHWy7PGwJntsd2u2PeVmyV8XybFV+HTJlMRXYzl6PRAltooSFFjG0d6vmtnS51s7FYypxf4qiiblpYbftG+aG63b5WRRVPUDu1/rzsDbbcB+2B1t9mdskdjE2V89h2J8SzP8izP8n0pdy8y+b6UL//uO/gin+VZnuVZ/v9XPvnsV/+tu/Asz/Isz/Isz/Isz/Isz/Isz/Isz/KfUy7CmBtwnxDI/SCHz4OzkRQvNh31UH81COb10Tf7iww34E6cuJs4LPdwpFra3ooTyu/b7/r4p7+wujLC4SPL+N2LQ5xBIttmNMNPLHbHbpuNzejcqeMe77jEOnftRGNr0xhrVZt9wu3Tedf2toPVW6fu0MzPjIriZz/RWfxz+22mHFtqlA5D6y+j0wqN5J/cwtsHnCleZgSGIyKnakxKekWHrLb4ZHQYcVH1yNGXYaZRR3FIzo8w9HSJyCBPxlZgR0x45VLs/Uxf3uHw74di5SVFvdsvMruZ9BlsXd8WicH3HZtvX1B3tN7EP8g6/xlBkLTxmqd+Zg8NMzA2IXN9C9AHQ2Y4bM0FlqneGD0jUZphogomW1y6H5v0tXnfCdmTK/lPhjC5m0tu22cXVOQUhpi5iyjyXhnWoII4T2rZo5n7dPAXpLNEboTivHPnXCrYgLFVsUIHo93Ed2Scg69J2DF/nsFjFbwDwM6+fncErmavPHIvMdyjKnKmVUOmSquOM+bUi+nfHxHw+8//4ic/+zecpW5MxE8zQdy+IgBEQ7wDwJVH3WPxvnun3YtQUlPDJRLTGzzajFcUmhJprGWuN+914Ox/RV3U7I3pzsuwGN8h8WLK5uTV5pPQF1pfspVere7IDMnr6qV0zF4x+OtWyCTnic/amf2tA9weTqJEVF5GPEb4i6rIGbc/hfDRXvJGyksLAKk8KwAiX2KqDMkmzOzQAzPScszhHMzoGZVpS9zUeDEx7t0Hr0vGZK+xZzznh5pBxgju0IV/G298alWM9bvSAjF6hsatgxMZc+f9IZkrwngMkaNFqkKyoQhlMUboXKldiTuRfUxKsUMNhcqPVGJ0Zy0bXWOHqq1H/CMU1C2WV/ot705yr8hpSPKiY4afVWzp5fISQblmZwml4zhE9DSjoOY0VduxQomIWkCMiSwAJCES6q7kEVWGSLFIVMkUoGB/IbAU+55JU+PdMz+0arfxr6GBSQtPUYElo3uIF+/6QLzQ41zkBtgQ10QxAPneuVCSKZNOFEf5aOWKKW5KdpUxSwAYfcpAORJssI0D7tfr2+O4nOcZZDUzqBQHrhxReVaXu9YKGfVzsiV/rUZ7iTB8O/NlpO5c2blSWE5C5rTEukTGqPVqMZibirplNuGgdy3S2U0fNY6rZXI8W9CkcBylIUvqOxObDgk2iVdpswdPrOqb/NIi7E5/b0pFbSeZtfTjNh/1tGAC1QqN3J6VPBJ3L4Wk35l0WjvbYr4nnKRp0oyd0NuDjoV7WnJ5958voOZvwWCjmSHU78nK2X9m9EzuTiDQrzAyVY+DENk7oaoAvMpuJVNOI+pGmxvYx7a2cQT/4TH+yhVUaDsW7OjxszzLszzLD6lopv/fS7pYvNSzSKgocwec11AwEp2QKEpISgapLx6C0ufxOw0BqSNm4J8v/8Nfv7vrTnNPhvWSp/PuSeQBA9ynAvaq6vbZ+c74hpTZrCZpJJJ6ocfZNLgLVZvYw7rBoKhQB8bvIpkEbLj/UHA6IXxh9u5jfc9/88H6NonDHvXI016izZ+3GjlhJZZjgqnV7bQC15h98zgY0l0jLgwyeEOJIN+AHYIGB35n7tba3llkY5VlShzuTGNayS4L3w+K1tuZqEiD8pme71CyxeM+3BRPa6dBLCZoHO3T9yJlVDsm8Wb3Frt77UPbLbPBtEVjhRYgRCavmVzMJ4MtIzUhz1KFpTjGEBOacN9rFF1DcjYSMd5M0dbPHkHTLyF5HKppy2p6LADSjS6o+uP957FbZLYsXJMsKDmT3ipHHf/vc1Kj4+W8yTXVGJdPlYMns4bJd0ki+du/mGTqXveI2XqL/ltrBvN01215QCMfwmaw4eNa6inv3yYBye1tcwnKCBwHXcETXlvaNp+VAmkFyZCmFFSLeJU8xj4eIM34TwujdSBBO+aLNPM6+p3HvWThuPGSIM7DCqCioaeRR8Uiv7JD8mh52nJm3UNOgHm1lldI0UUZ61DCAsQdndcswVXaHN4m2VLagNvMwsf3VgxRRh4rlpf1ga5w/5jj6Y7X3/1JL+dms17fKbCaDMULKcQy/0aeihd4OTuzw3Ol3iPnHEINswcV/3jNmlefkYqXEonF4vT9w1LcWOqz5fmdp9/VYWmh6EVdamGzUwSqerlcjstxXA5ROe18vX7bg7EQaeUW6CS5Q7NOBJnPifcA3A2+DFmA0q9Fw9J0c7zECzbFNYrrEvW0+5/D29johjLoRaGzwgZmMp+l2PJ+r7VPKXMAlXihJUzhkaaW1OzcW149Jpcesvuysv3y5uW4XI7joqqxS2jneZ5nUWLHP+/gkZl1ibSfSK6XaIu9xnaTBfJDlfWjxGMxXZVXltoyhRrrSz41uLuZnzJIXlyTOzRYOj/7O95aJpIbiCUG4NQ7t4upgPVOte3nyfn7aihSDYa00mlzcCJ5U+JCW+Gx3qxadpa508H3Qeiq3wdPTlzz4PFCXYvu81wXKHbN8SgTNE2DocR4qqLihbWZ28JQjAnXSnG2gp9vcMmgEQkx2qjjdj5riT7oyPL4vVV2l2coKJgVxF3s3UlYn+VZnuVZvt/lIgIYYB73HuZ+6ml6iDsk4jDiAmm7pp5w2PV6eVEzdzOqIXEL/4XaeYqInSbI619CvKvI6afAoQrYCcChoq5u5heN1HLq7n983zHtj3/6i6udermkCairHSSiaaF67KcZ4dwhWpaUZN4nuYxLTjU2Pdvh4iI4Xi5mZn6GcjBNQunlIifVoYuIR96xyPbdkZtxoULmjDFY3AKrnWkGIqIu4mYqamZm+bqbR06YyLatNAZDm5q52TUSl4gLICrqYu4miFRhcDfwzj3VwwFzo9Z1IypHoSjXsq3E5UjI5eZtRLmlKlQoBAaBeIbSBencACh3/2ILMEI8So+P9IhAGZHAcVR2SND+YaBWhfY4AF6OqqoqZmyILi6lw8UlUzEeKuIwMzhUxNXiwohoSSXvZzc7RQ/SxR1pyQd4Er1kICzdoWj3UbxuImIS0yHudnam1H4qPCjuLq7kNDIEoCpmJ4CYItW4AQLujoP3b5jHOIO3YxUcquZ+HAdJ6xA9LoefGZ1Rd2YX2nbnSEe0h8Pdzlxb9C8HDY7LJeo6r9e4JcPcD1UzM5fwUwuvtzbHeRoABiDHlElQ4PJywbhNy93cVc3NTjc/Dq3rC/MBcxhEDre4/rgdDwO30QANvs4kp3aau58T9tedSEFHYj+nJZO1Cp2nWbdP5qRzgZF48Z4exJYFmmkkQyrFYdZjMcWIRL2B7G+MF49YdSUsD0Fsn/zsl1/89p3SUrstUXWHuR3HYXSfxVcU5iRg/ddWQAesh5xPXG4Wc+Tw6SuYFlzY1E01AUo4DNHAq4SlZ6io7FGDJK1E4hKCaOjQg+Hegrzx0qvyZOsM1EzYLwK3dg331hiCTpLjDdlzWnz6cnl5fX07iCtlVZ3nKwDVQxTmZxjyXFyo6x/yYqc0gI1iQBgCieQld6jyZl2Hw9i/abGmNcoF4u6Rx410DELZ9fV6HAdEzE+cKDFzhlZK7SC9GjSE3klaMJRPgMxBWS+wMwzmPfNeCESOQ/e8iiK2KdJElqltGAVdI5/ym/K/THGnmLzoJT8Jyci4eXKhjBlNhwPdkgjRoaoYkmcyHWkKHPCzjHhBhtSWXRq1IoP6uMJdzmmMTp4+I946Wyzdm0PN6nrEob+ggVtin0tQ+MLnNR/ryhEZ944ZOywQ4DxNVT1kksPMjiPv5zOzcLwXxc3dT8ORtevL4cAfvvxajxeHu5+X44PISqeScM9OV4WGiBDA/TguAMzOuOEdpUzKG8e5Sje9pC7jACJ+3mrB5RhVAJEIWYocknDA8jaqcewmRwqHn1HDebVYIkEyOr78gFjf/JRMH787cJ6nCm9hy2pr2XRz9JO5wzVmK/CRaGpbgbuNADvtrRP3MXOwvp8vpmN0rXhVEKBVx/70cPaJ6ouIm8H9NMdxuQCAhF52fTGIvBzH6/Xty+Wih7xeX/0MxJghCwaDWSxLOzME2pvJoxty6HGeZufp7nWJFu8nPNyZdRQo5UIdQ33blPOX442X+K9WAARQF0kZGBHIcgmeeXt+azBVOSNk3GLKYWfgGdX0cAFURqfFlYM5mMCER8CU3sVJVmoX6hEj8gDEIclF1NyE01ReqxRQLiIHYTCCewuCFBnSUec1ZMc2s4jJabE4HZMAIpXgFNNDmUjRGRwTGgqia+FXKho0MfO4C7xEjUiSq27Gi19VB4qR0v5SXQdZk2LZMyhzOmYpth0t65x/lYt3KsZuqFZTWnsDjbjJGfe4puoJQSJ6II+lBCrtTgetLVH0jMFmj3RHbUGdeGXoSgZzs09pUjnW+RijxCLZNXf6ISoekdyxUlcKP8uzPMuz/IDKBWkGALTl4A4t1J2y9t7RaaEKbchPN1aLTtE+iuE0C1ICj1D3sXt029BePv7pLxJ2Pnw4jbSA297PSdkcBaT6W2+kMa67kzKUKl5NqDrsNHVNaL15RQo/iqtrWFFBGBsn9hrPle210repM+7gK4/eJNmEKgtMnFas0D/XbhJOw3A23FKTnayXwkDIE00SJ9J4+hlJpIJKEIzzWjXAG/ww2q4BFpgKyzbPwKaXioBjhok4jYPZ2/xhYHkfbDCambgQZcQupGggOV4k1YVXOY7aJc0p9mm4rgZNlikFAgC5I72Q6dAx87RrAv46ETrhW9RcPfY64E8qFrXvwxaB1BGSHvPdHvoydSkB4jdhC+VOjFHfWbDzowplKVERARqydDdBHFahNNlreXZpapVX4730YtzMwoMylk8IyP7Y3Rua37zVqLRemHFCdWZ477/n4lpnFg682yP5h7/5TWeZJNulh4feurkQ67JplEUkLh6S3On0p4UMyRwOQx8UdYQyERSqEm+uU473knxIgDAjYxNB6fi2EAvl8KeaoSQ47zbheRBfsDJPkURyi6eNojJZKaumVfYC0DcJMBASCCVIUyqkYG5jOA49Yu8q3fqxSReHwmx0SZpOxVo0Zkpb8dbsQbaQEjzN8JDuuceDMV/zMPFwbK3qWpb55u/cOdoF+qOy6q/58e2k9SfmVnWvbjgpo7E0UC821uJTw74TdOgRZAdTiaQcc4FCB3OP0czApPzX66Rk2rtTg095uwx1q7wGVTR7vHQeDyq5NMJjhRo8BqtKKJP0VxHP/UFH+BrsdHjEDIse4Y+JTQ9axsGH7kwmQnf16PB0Zge/vQ/6iWKlcs1wvh/NTAR309SsLZhmrynB1eJvkHqEYqaezzQRyAPBS++inWSaFOvSWKDQkldSm/EiF6x4n4y5me+pRHKvetbJ7Rd3XN++fRWR4+VQyOv1myDLy5uXl5eX4yJfXb814DhU5eJ+KtSoB8xdYguX4eTznDKLBy0DpaSzuzfPKllFr4tGJOXv22ZLxOzKhZHgiqBakqtiQcHNXfwEcPJGQ09qJ5lSqFIkltZ3p/s79k+5sLTw3+gYuFXCQdf3U6f5cBZzYfHAri9vJQVCkm821j2oVMxGhrxjlw0345CKXeetWF16K/PZoRAGJRd0s4kjYGeMpf7b3m4LD41n+/R8EE1uhvOesuis22aber2KWp6wb1J8SnLeiOpkijYzZqXLefdBXenLvO50kJMks2+ckthVAUJq0InsosdQPM/yLM/yLD+kckHK/ZRhSr9Rw882hcpQEQB5eqkuvyuzZKDsQg8pqM2FyJ7qpr5tefzHv/3Ld/T4x3/+85LipSxE+iwj0NqkdId4wbsJ+Jaao4IZF8DRBhhLCzzVAC3UwgS+oV6fZ+MmPkBZ8lNR59deXfQmZGMgmjpMR1VvYTxa/tcRj1QtdEjHMEYXiDKQxtKLlV7e73ljEimSEzrY3lijOk4fOa666LbRslSzuQlEVQns2X135m5ZfCucmgpgSEeXdDzsfVOFFs5SnPRrXFLcwkAVoUlGaF7NolDr7kYTdmVdCFw7OSKpaYx/lLA2OUwEHQUjNckF8m4TAZW/YIVa5K5aCNP357XeI+JzLpUcoZlLBB7RbT+87lWJkJxu3q6SYf9lXM0GuEWGVLlFXgRvD1Dr2PK/YS+CSmyzsxsG92i4UkBkn2P0iADyT38ic5kxjIKyLT618vfPeQJolL2rfPH5X/7ks18DecCU1rEjGSbD4+haG8IyBuJujmOldztQh0W/8ZczsoU7CpVkEqi58J2iO70HBXs1OarDMQpucEUv/H5VtBBQaRnr2ZRlKaDDA05XetPbzY7LBSW/coDUCdETkeM41OXq9NsiQ5irD54RP26myS+WsW/RPixYYqxQ8kRHMNP9r+Q91YP+8TlTooe26bWwFRdca882Cks+CINbAipwW633CHwurBZ6KemlyP6weFYyDfWBQGLco8cCeAZPhhhpJ2EP0oHyAqeY7PiaXAeMy8EtF5bADf8809QKp7o0RO7gOiW1zK7knkHtHaBcFas8b1u21XovyYQlG1y4FemDxnfWVbcW0INeEBVt/dtLCmB+N1WRM/cpFOZHuJBEIOrHNU6IiBdHmKgw/WCfEZFEUBFuluFR1Uer7aA7/R56qDro/FyWX/hT5w7VpZ5iRvNzeAMknou1SslaorXJGkcoKiEo5QIwxd+Uk+zr1OvxmdfKzWXj/G04nMbyT16XtW6+DepGfpb+yOAjNxeou9lpx+UwPwFcjpfX17fH5XLgJX1qHknAReQI7160as3dcWLcQnnVSh/TEikj6uQC6xBugDnnobRgA9656yICsfM6h5orMORixbaHkKEKrWVSyzEZwe2Qy5wC6VNTESgtC0ET4ixyZ/6OiTMGmvc96SOVXyBl83WljcusFwz0QFoOf1mLmPFNu3LnWhk9Xn/vgW3N9N/J25SSi2+R5sZeuB+1bI09LBxMAiJadwDazhnaeUrOxb5cm1nnbfu0nZ/jNXp7b0VqYq/hx65u74t+OCWboTa4GQBxg0tzSke3aXI6Fv2F3EkasQh2xr0G79Czz/Isz/Is39NyASAifdINADLMAlhE9kAKkDzcN9Qc4yIcS/hL1BkxNZFhupw5S+g9BfMX//5dvkjWKB6b9mXuYaL7fnJ0OzV9bYmVOT38euyNlFKhZryeIiJHbs8bBxh7svf6CHPC4NLYZciEFQAeSyjtsuUy5zZovq2M6KP5Mh6JT2eUEn0BaTTxsT6EThyYsXO+qfThku2K7zmAGrJMsoPAZeS3qsliZypurvQtMCDGBpqi/0Ybt3qKNtN6YGsknsfB3gyooR9kZ5fV9h2YccIGof8voBOpjPJKAHQ1uHtenlDRVs3oMnubndy3lT3iTkgvzZgv+pNlOAS6UqcbDzHSiV/r0O5oiJ7BQvbNXfVI94E2TtN51B4fnGYqbjxaXvCuM4vX2cachWUi2iSQPLnMIXWHaE0s1HpQzfjCl5ndvm0v/eNCmdFPDRrMk073PHMcQIrIweE9iQ7nZQndQAlI+jQGRs+Z+Phnv/zjO49s/+Hz3/zks1/Bo1kPyzs6Hwd+62AdupMkhwNgVgMBItGehFFujDheJ2gZeHnQPGct1x9dQV5Huei3wayJUdiAHnqep7uraDn4gqtPOyVZJjanJgnvdYx8KGtb4+/SHfQkuptbJlZoG4nDYdzocSggehzq7m4VGtlLhVaVC8xNoZC8S007Ia+pHp5xVPEXj5XyddQkDtnMyMtUck4/RQuE3UBb3wdHJy3I6QRdRMfwfJTHRMp8upM+rQc9X3KRvk1kkIefGOdlW1Bod/66gCntQSLks7GNxRUnDVQYP7aJ4/Ebp5kOwtpdcYa/UtXe2xGtXuW/k27seLBOpQ0R4bZL7rHR+YxwKgWI8Q5k7hbukt23cSkd2QAy6wKo5nIkDQZjoq4w5V6Xw/w8HRBRvYwcLk0pSXCU2jLey2lYBs8WUV69O92vGR6Lcxldipg5hbWZxhveUIqQ0YZx0qIoHJeqrX0c6JCOtNWdP1scrZBpayGWD1/RsM99ztlEzXAG/3JziHD2AZEEKOmR8GDymqh+cHk5Xo6vvvqjyPHxx58AOO389tuvv339Wi8uGpHa19NcKU7RXEck7w4zaU9yLQX+kGIJANBnViLPRuh6ZjXoAe/ymTNlouuqZP4McT+vV4hkyqOx+mO0qE3cVPnukOv1NecikluLHHK4QhynnUgvcGtaVrDEra7LlzBmcVPecOiQUbLLsWZI2R7eYMb8kN1RivMJ2Hb5glvRMJRXLB3aJZzGVov0lE7yDvRSR4imlPb5yj7eUQ2GUiIO2PTx6tMe/9Q4upEHLd1+3BXIPs7h/V/eyN3gWFTtgB96807rd92ks0HZJ3Z5d+4uZG1J0oDOZfmGWXler7etPMuzPMuz/CDKBbF9fa5w3xk4wN9Wlx/hzsAO7fqj84AeynKKDXFaPjxP18Stkr5bPv7pL6s/w+AvxbpqkofwNr6cqmiouEWJZs2eOduE33fTNMSm/Qp3ObTuqk1YVt6jdolOp1/9VSRuVBl5GA9fxufrgaSqxopESOUJrCiIsINAaOKJrbZGA/KInPfnrhroGMbVLGjoOsDaUNvdnNOkkMgx540COhRSBizYPcRCs8M4KvAMMIjn0BiNNure0THeMoNHmE0eHeUEp6eaPpI5PIZdkHH8hkQCAAbxQrkE8D64puJniLfj/Ed7AGWsYk7TPjjWVxl7nB3goAf24l8xBf0tyHID02H8SHPKMd+pv+ubMh1rIaXU2AODRDLUr/0rt+thWDs+XtwL+WksbJnjHz26tSWGmJymlC7eXL5YqykpWYxX00+e8qXTHDFdeaSfcNrwviPb1RC95LwBQjNTWt7ClFZBGiS3HntZq+Na883QKkchX6yjmrtp5Y4295IPpTREI/0Hsmerb7jnbozCR9BfOJTZhHscDUXtnqFXOuYP3Wx8bnZC1cqYik2zmx2AMEbUkSliRRWCuDVbAMlcfsJFfyvFl1/Lo2Pme0LBDKri8p40a+tx47V+qIXjHTHfOrHnl24neHHFO/TwnRqX1Z60WpREFKPHpARc5oXZWpT+X7QD3+JvRflqfZwoperKhuhJDw98E0IyT5osGnIBCEs3ckSDZ/IrGc11+5zWBZpRcWCSHLlulpmUObNdUVE2wgCDHKVjK5Vy6YtgKb/Ia5i716v5aXZCIJeXi+DwuMMNM8rLgUzvu6zF4remRv1LULR+O2Ged2IZka2qnFbhVuGmDKYoi1nSQ+FLc+4GZwLW6kqxU4jb1MAtmaqH3ULLZWkP/dJ4epKYe8Z3uohKpTm+kZrvLlwUszYxe/3gwx/9y3/5Z//3P16v5+vLywHgcPnooz/5+Cc/+ejDD373n/5gp19fv3377Wl2hZnqC4YMl8H663ZPUan66e4tIVFLk+eMeQQLy7eTUfM4qveh4EV6hiiuvAewtn5OAAAgAElEQVQC6TtvBDipJ2XxFXpltabUyuB2c7jbFCYyNzslF+iAZ1Grmp0pR0iUHdHPf6Tk0FAcg8TvMIKmRJP5bpN9me5JsG2JTLXMdufeI5LRKU8oBdjUVDr9AUlf1lNmxn08ImmtlHoiZJAs48uJW+i00uXB0vDlkfvfidzQXIAOsJ509/WRPuUkK3C4X8ZKuOn+QPhT3K+DZUfGMpgaBQB909/RlH6WZ3mWZ/lelTqsDSDAnmY64bhUorErcue4lIhgZHXn+euwhKx2yXit7iJKqePKugYg4u5f/t27btP+8U9/GRj3jnWaFVFviY/Lf5esTlNRUxcOBS+b2ksweehRX7kn1AaVNL1Li83mWOImM0qlHYoj4gyNT0QZqYo8rsfMQXykbY4KD9nVTzl76AAdUQnVvcrb7dKU6yN5Pv0DGLpyaU2qE8Mc9zG8zGI+ENHQsoVvtv4n3PcxTpqK1N8uaNtL0mLoRxYqAwcyMUDlvPFRzYYKapCeNFwo7HEOukaLjsHRyuQNAVzbrVnQeGAFvjfCkOMVgrDorZVnhz2awMbrJ2krla3tpO3nmVCKU1LcVJSRfiemoydq2jnCLOb5kApEjuOQkCMc16DerJeofnXwx+xLRiH5A7g3JMpYzn4fw40nb1BqvFJp+goiV2+Tw9lH2euRMk7T4pC1Afh8fFCuKclgonWEZNJBmcE9HM+c4k8+++UXnz9OIhlHtjN4FmkT8uYhaVvCg52adAK4q6qVhelGzhVVmGEwTGD05Wxy+c1X8dG8EaTjhSZYSTxIZKZxHYqZm/O8p5jZoRmwuZtPoGW0YnmuR+xfdZNj1QnNFClJWMa2gykxc+Dw87yK6Hka4Ofp7Favppi54JaYgBBeHaA6RX07+RdDpaMQKSOEWfJFqzOYlOwkrNNkTVYY3mN0wGB7Y/rWFyxyFMhTEVRHFBQxpdKv30+oeof6Q8J3dBhH3RLQseZsq+4ErZgNRvjwzDEcF44x6oW7d0uXeoGGoK+NmbH6bQEVUl1bhnR/J4/enXpU9XAUb/lUgnocxTnxjtwjm2zi5o5q5XP1oE8vLKjP82deTiXn26td3wIwf1U/4342v16v1+tHLx9cjsshUo/nlnRyq5NzUrLX57LooBYFKK9j6R6sorw9uRlZHTNO3rbxaE1Y4a1SuKM+slxEQJXkL/9nd61hw5j4dsPmaOrDFPRDfSc1iL+E07GoQAwvz2S5faOhR9mDku0RCTT/1Zdf/cP177/55htV+cq+BnDa6Y43H3704Ycfmdl52nF5+VAux/GjDz788Hf/9J8AmJ3n1dxdNG+AaaaZs7P0HXFfjQ81i0WNS3Xe3RkCSTGRv2yP77+oKDcqlnMStRtBAjLborRXmCIJ5gmwZM5Z062JXB/62Gscem30L+Eq3/Xxdouyye3Ld3eGyu9qh3BsZMzv75R7eKg9+bJzUKdWkJJ5+axiPLq515pqc5Ng2Uu41zEv6d6yV8b5ld7vo76+U1HFwDwa9V3lvuqU9fOYv7Hvs9Q8bAbGGXwXhyTf5wrd9zAW5gDhgZeIGKPn9TstiELsxj2lDyn+LM/yLM/yPS6XhEcAqKZyyzBEfN7I0aq65a6WM6FVXOiXPr9Al0RJ2KiHVaRXM1Teu32R9RrV2Pys1SY/lbaNUqsUZOnULWEgjd5MSd562pE3rYaLNjVEq/T03TF7WdKLJnDQLy7CHBGlTmBBdE0XMMxMeaJH4oZln11KB+jsbHtRBXXMEkl5ybOO4zhGPj2N8cV95f16aUvuX/IJL7qL5DkcPjcmzEffHhWfsIUPTxs0+M7c4IIOwRNgHAAGjvGSQ8TNx9k5d/C4c7himcuneWl6YOgcIsJP9nZ4ntCbS6ZmcMwIc8ABfpocOh+ukcPblz3hcP7uZK1EaWX6NEwOVoyThx1NMIzLsgMnCC6ioGZMemK9yF7oPSBiQOJxD9KwAJNDRDhFdJ2sqJeM4zW9KyCWHlc1m+tEqsu7m2gsDWHlOV8yOBUYNt+IYpgSbCt3tvfHB+65DUPnYnt75hSAcTWUoHuoJ0nNC2FL/A620rEh36swJWh16HYES/nD57/55LNfpXiKzub1ByF/zuxMVcdgo97QSNL3GhwO43i6xs9DxqDAogwZtpiMlHwQuOty/pT9WcVQXHttFU+VBXSCb46e2qLYK9Z2QQ0mieFGVFf4I8hdCkDi0Hq56kTKDSoAzNpdxtvt6V6Xoh+PLcaCUgHgpznjdwC3047cyeEK4ObSctFr/F+rghtDjNQW3p+eWeSKpFPeQsqf6hSeobtPthAgIf0PUjVOf1JtZaFYw3xM9jvKnVXW8bLUt/WNoPwL9Oz2mkGuyimPlh74svDTKXmnM5TvoL4geVbfRzGXAAww9ElgypbqnvZxAcpRJwCQWvjZevfM2zs5+nrHH7Txem0h98elkDPkyz3C6BHdu4SPBgq4hSdWRfRqwBXAoW8vhzv89dVeX+X89ryKiKhKOEzpvxQROSR3FpOjeL98iHKjd5cvJBdb2/lNRQfA6LZRSNaQg0RlhYFybubCrkmZrUtczs4zyH72nnehk16/m/BulNDTss6DD99GxldKZiaMPjkZp18YfROUIhszXmtroMpM6FnfR3sCvHnzkZl9/eU3l5eXy8slIrIPOb55+83vfvc7d//9P//BzvM4xE0++ODNlFc+KNwRCd5NydzKKIU66BCASoTb8RJV5Vt2lioH6eAzVf2QD7XtAeTZD0KkwsQiEJfeea7JmUIvJLhHshF3OzIWvuCIJGFjrMm3pbE4b1LT5rNu2h2t+2sxzz2fjVuGWiQ5tuUOEFPeUWc3ZTJKvY6WZNXvaqWZcMzmArT6u1oXfoNnCAN8H8DsnBG4SQlN6sQN5AFo82UrAYD6Mdw0+R0IRSnSe3VpNY2qJipKKgLg1uS24h6320AqhujzBRnP8CsfBw8mA1OlcM/OW2M+y7M8y7P88MolzpRK2Z0t83xaljeFwVAsRAnubiLHbgmIYIC9/4yOfvK//yqD0UdOnDsVtcDWCSh7XKUipTacqo8Elrd9pO4N6040b5qEMiBA0hsxny9XVdgpw/LPTkxnXxTLBFKemdHmmfRGvHQMSkSfCIfta+/jUpFx1i917kjpsxLuDgTy8u0gfGyrYzH1uJ9WFxwMYk6dfEc7W6bDR5nAoesjhiWrCyzqYjAzO5S1B0ysvmz4TNoOq3EUwobjhIuobVkCfHk/p6nZIuvJpE4D/oafIK9zFSDjgOK6DD/NLsdx4+xivE318oabzcxq657OiI3vE5uKRGb37n6D+0GN+4vvxsCqasiubBxYnLaTxYXRJWLuMOTdDm7eHMOsssM+GBMIABrr2+JulQ6z3rAhCFxnZ6uae37FGucNdzcazCoX9zgWp/ydOn3EENTzgzKsu2xRcOwzvQA/phfFM2hr1ryaLezs4rKAf/LZr774/H3pd5O54+ypIpe18YqJYwQoFDxHWn4iAA5REXVHxUs2lk6jqzqdgkUQp4nGnkayDcqiKCvu1slSlYXfS1QBO08DICrHcZjZspKWmb1LgnBHtjbYOAPF/JAR1id5xLU37bKJjFmghazqehw5LmshmjZDxvi7qvppZGax85x065Eva7sHibbd6LgLhy2PJTqs0nb4Us9SHfNhpUC7awyTGaUtRx8MTbq5RDa2rr8i/h6sy5tHe4TS7Q1S5IhHqOPUNyLp0hoCOq9s0rh7FAAk74EWieVWIbQjdUkVSwEog6Hji3OQUYqfBxCY66FkWXGnV2cckDiqj/SrSWZoBUTktOtcFKUbmwa3xKX3aCMfgD47wjU9RhY+xDNhiruqiMqbw67yFoDjazmuItATIof65TxfjuOUQ2XmWBDRQ5mruldT03RQaWPLnvCszbMOvcuWSA+0bLp61rE8HjPCe5brA4C36Tg0+XtfiCCs5TgfR0Y1nwgWZ0rxXXwjjkyDPrri4qgL/cgQ8U1JtqE7wyFkI5ZzbI7DgfO8iuDycoH4eV6P4wWA2Xm5XN5cPhIcmVAUcLO3b6/n9Q+C3huTOI2vAnPLsHSZR7YLj7cPLKcj+d+8DpMO+hBmzQlKuvSSukFODgCqFzgcZuIRspgkVznt2rrSa8tArtdrziBlg4q6OqBim3hk65ihrWtfWt/48iHKHxcfLOHtVl+0SCA+G5/P6rdSQ9vJ0u2PavYqljkgj49KykEmBGm3DY5tEZGwMpZzRlyHNxM3f2ZaZGksk19QUVKphdKRlc7ZTnr6HxDr3fofHW7u3AxaXNZdqdy+mo+Mu2/uw+vexUAzWM26374h4+9szpdpkMpdBYaWkFHfo2Gf5Vme5Vm+r0X+p3/1v1yOy+nTQ1WxH6ALUVQlDkdYHOowU1VZ9szyHwWubkqVH7s3Wbv7gYqxcW45qoh8+e/flfXsRz/9+cQ9ZYApN0g3zKiq5hbGq9V9x6lruZWXeHD0XsTNIlyijgFGDbxilSbg0BbG+EohHItIA+jibii4H/RVYUJ6VQBmbu6HatxLyK4u+DtvdFRx8+t5ok72icMViGtVzWH0Q2ZsQlSlCQbDoebH8bJs6TtUJQA6N4ytt5Wjmdwozw+PQwHPeAeHqM7meg+w4Jws/4HAo8BHfNRjr/nuEsfY45ratjuCOV/NMs4oZxzueZHOoZmtv24y2FoFA5Y8cLeInWeRZ4micjeD6rAPYmY7TDiRVGTcykqOoE/71GgDu9kZp6AimDPmNBaXuZU9tuARBzCu5WEECgQqam7uFbvc98motoP+pqQR7XCIRXCB4vDFcV2et+iTnHaqqh5KmgdKC8t6WFHL+w4RszPWnrsfquZ+8LpV0FqgDwMGc8dx6KgTZh5/k5oIY7TwZ3n8x0Z7zmYb6PQUJHuOQOmNSOFOysE50E5AqQuv2zOr5Uykf8q8fRwClWO4JxxxS4y5CPS4cAUltcuvIxU22DaTQHBwTqsP7vj957+5O81RfvLzX4f8FZFD1UXMTvh+M0ZZ3KQZzHi9rojZqaJ6aMiJcPmEZzM6oxpnqI/qcgXhZ/3btVqclfN8vRyXeMzs9Hlyf3Bw+xsZPhz8RqOmJ57C27MP9FtIsWtWQh8BY6jjYLjZNXp2HIe7+3kdc0Dvo0P0yHbrRhpqVTO/vLxEB87zpCXiogfGXmDL6pQnIiFV0/UWjlE9jUGsQvZqIX/hCIqgxbGDfD6JmNotZQggotFsXmsrYqfFQc3QXGYmcuxGantoctsx0r5Yhcup9EObGU+3Yf7lXuzByRuFy1aOw808rgCCtH/f3flW2dbTTk7KMJQv0wiriqgzmR8ANxMRc4so1/LLcJMIGvxRQqZdXZDLcc8MDu4Ir4QNZw7ATYiuIghGv2PsdYWY7ifQI6vRsiUcRwEA47XVqadFDzOzvJuYe6tp1x+J0DxkAlSgKqd9/cHL7wF8+iO/fvvVV29fz+PNVS5ffenn9UcfffTJy+Wj8+rX6qdADsGZlAnWnf3UQ9xxXq/lERYN4KLtGR8Wu+fmrBB5StYcevO41NYXIaYVEggNG7dFiarH5IazlI6Ccl473MzgOPQwuHiiQTMTlbh6OjuUTgte/DFWXrnpBThUwRu/ItC2AYiam4looOFM3+nGLZBFOgblfN6LNRvL+/qWfRSiBrHTzF0PVRE7jSFvcp7nRx9/9Omf/uk//P3fH3IAZld/eXlj19dt09qLSiKhLRvf8FiGuwFKseMtUlQC4VCSo9hfROpwvRPPgIS0gUKjaKJ2iTDjViFVROy6mjOlcE5D+i0TpATac3Ne865sKTE7HIZrHYvK57PigPk8vi2wyDJrbU5J6ppi5sSYfBsQhIg3hqf3fz3o9Eq3xiO6CTGM5rvWZ71zffO/0CHeL5CQZmm7lcOLarPWRycryakwdRiG3zgVomgK2LEyvIL4GONP9kgfpkgnrEACCwX8PM8e6rbKllMXkyV6+dyRxfPJchPK2PTfvMx+U8VW4QCB/e5mmnbrUWtVuhgkHMaeADeKSbchHRac6C2+/N1v/u17uvssz/Isz/J9Kjcn4/5blPf5In/xX60nz/Isz/Is/98uf/ibdzkrn+VZnuVZnuVZnuVZnuVZnuVZnuVZ/kuXiyA3jQFG67hUDqTa0BZG00jcb3M3KX1Ef2w7UePcgsfpS3C3U7AkCn9XWfaYZBxpxNgtqy0p520wzlCXCuOsPceOx4g959wxZdzM/bbv77O9p9M3RwwwdxJZ77LdPdvjllkGNCFuFMpYII56xpzkuGTU0kEY3K72DmpiGJqhj3FHXjZ3CJPFeyZkWg+uykjfRTrWluTSqThmmXksb3f9+LgAe6wrxszmkMlGRWCLzc6ZQHzZU4/zV2NPcmSjZ5CmO/ZJIfV2jmbDzREVoMQt/uJS3GWZdUSyfd6cnIuSlzYMrpVcP8XVDsaSjDZHDM13Z1xkdUVDhip2cOQSsEEmzGCcpBAX1xipuWvyo/RG+bpC1qUeW9+Y6yUGW9m22Arq7FiviDvFd0IU5zFMZu5JRwTMzI20bpPHcDPqSSIFH2NziqGhM19C1307JaPnkglB1w2jSZwZAVdfxYr9F5/9+t0BkpMcm4AqsdBb/eYOFxU9DvJnRg6e5xmRSrlum/M8pVyuic4sOGLIRjxZfJ1Xcdwcz8WYppx3b8Ui6zXcJE4Nq3PUharQ/P+Mm4LZSOuCeeZxW78C1QM1UM+gxuxQdaICXAFAVBTuGf/riOiuCPzZJnErIuMbCq3KLzkDKlcx4mMWwEmOFZLhdn2DliDCjIuN1oW1sEetg1u1XdEgo0XUWTtgifSQcTS83mVENMNYpZbXOgX1BJNoSg0wg38GAW8I4svn7E6c5laLqN7+OGNzUkG0HKUcthGqdqMobktHVrF7Ja8AdIrqoIR5XmNnIyKppEN0SHFntPHAeZ6aJ1hjFHlxbcjhep6rtzg3rhiO2DpPUQ38+Dg//PAA8Gefvvnmrb359vLy4cdXu/z+8vqPv1OgDrx395Jg7nfp4pWBT/YvPNvM3hXUMT9JanaeyQg9jioyJHBQF8sqmIvEGw/NM5MdOixQF+g8YckfOLT8wPP5MYp+JfIA+L6OHIII6JRda9cCu6XbjZRo0IdK9dDtz3DgkqQ87ZFqWhRxbDl1GV5f38I8El6nXkvNSiHjjxK/9NKv6DZQVOXPxJEFEmx7n7dYSTKncJZC4qZWeZhBZftcuqNNVMngc34ijLMstIpxerfyQ/F/Rgy6eaxNk0oB3sqm+zN57pbhqdmo/KTOPs9h3Lwhy4cDYcTzjSAnRQQ5LAzB7q3nlrvUQiFgKiHKXlAgPsJaBR5uv8rRDoUj/WsnzWgolPBq/LNgsPjLb9t4x+/ZxVUdLOSkFGZganRuArBHDPgdmqbYeaAx3l1zxQ9vmXTvGJrP8izP8iw/kHKBip0mh664JU8BK0+/Ll4SnsMKAciTKqWmvK71nKZUVuxWkfVRvvrbd91g8/FPf3FGJr4+CUCNL8LsUdEcBubr9ONYsyDrDYwZlkrZdRwplT4hyXwcd6pZFcLSkqRx7nS3TSuLle7qyQu5Zg8BMxo+az+zvelSq+ka2ENIOafDUfntMHW8Zsy975gTEabF6tkoZM0OzxHVCZTRp8IapHVRKAF94Ue+ZnMwUkZuna7NvstGjTlXeQBsNNXVZyXSmLTQ5AYcw2jQdBE1IqzlkfWULRB4oc6nLAiPgERWKjiAOIy8fEWrayLdcRssyVkoKu8bFdBB+DjH1VJ71Ke8U6HB7LD54XkHFM94Vt8TpTJ5opePQGY7064rL+7anbwvZ8yBL9/uTCJlJc16kh0GMJ9v7DA9v7W9O16ijMvJp6xYrI0k0Rz15toLbi08Xz2UxXCKPunY9BFZZrq6zFXDFRpLTOTTX/yfv/vrf4cH5YvPf/Pxz36Zba+mb/ojmNqfY3I3jxSN8buqhu2gFzXLW+Dnzo0INC6cmSLFsd4FMihXuwTpiGJFIImW9SggT9YaTEIm0wyvJl9O4ZfTuR6xr8z+Dp9W2UL8nPyxlHeHRrKPVksiqjxO7aAMCX+gm8k8oLCIKK7tdX1470A46I0U4bBudNPYorNQpHnzLIV9nKCkQ31K6jl+/j0W2Bz1nJM4V95Crh5gIsOFhVMIgpWVrlnW7TyhXMrGmf2xpqF83pseXqkRD400CMmR/LK0oEBEVTJJpeelRn3rS2Q14X4nMQ/pMLX2sl59zWp8U3IjolkL9LJ1jgjQn+QjYcW6aQKB1hZBnId1OKwvzCnp2ufske4AohUpUr+5HH/26Z8C+O/+9OVf/dn/8OFHP/6//uNX//BPX9r59T/989dmfrVrXCIPIK/PFqgcnUOvRSKnA95MS5pkSoSar6VX0jKgp0+TZ1q1COkRL5/oFdtymAID/JZ8R+xz6zZCccvCn83im1cotqHckVeHNTOUdznu24txLjjgJgVRNjY2MMa/7pu+KC7OX8knknJRkAeNU7YL1HAqREXO86p6sPokZlO3XDM9bAwMUaT1KccdUICONhepun1cF56tjHHzSkAfRF50wm0pwo1XJiVjZouO0XEnqKOULr6lrg+xPU/Elh+NuXEQ3lWpxldwsA5uWDRLB6VMqpuXZ5XrFlwpwl20tOShWKzMtZO9+D7TjHQd6TLmZZVDFWCj8lJWB39LcP6rTFBNDOWlfBuJZJ9zhjqdzmo3CHccWf3Wo0eiVghM8reWGhP4+lLjWtcdJ+i2ZGUl5uOezB/2CfTtp/+HvbdZti25zcQ+IPe5JRb/FGG33WE7wuGBPfKsX8P9GmqpTRYl9kvYA0sUJUvq13KERx544o6O6JYULLJI3rp3L8AD4AOQa+1zbqlDEWLZO8m655y918ofJBL4gEQiKQcbIVAjYVPhz/Isz/Is35pyE8jdfbV1PfCipzFJq55+tLA6LkJW0q4YuDw+b9dBJHL2CmP77d/8xRud+94ffVGvllfO3LfYtiyFrmV8kPEzgQORQHnrb0Im1BPVTuqiUMde9bTC6YYe4lbttppqaVP0J+UBRGrsB4CCFYoYE6OVkdDu16nMC++OkprTqq3Sca0Cy8nC+a9+Oyum+by/PjNrEvO8BgwGrnwE98+lrY5KyZcMQM9FEcLnlMBnUwPkDMCggR0HsGPqu4Hi5DqMaBLbrRxldJJRpKpp72MD5eA5cWAtTp3P6s/AdeTcTnd/gGpu3CZEpadrw47fhM79vNCwnzYAvM2QwkQbynfGEIQlWlw214ckhlLiYqMhdqWwCFxExQ+MYW7UnivFR0jbAwDYoHNA9/FN9XJIrq2anRSjl4MElJ0uEg7l6Hk7KfwBfq2uSE17dWS6Mfb1uuHy5N/KzTifeLXMfZfNlZE02lylAomUhU2TcDqv8INxzU9aTClyGfJrpRwxRT36jS5GFsY9IbQigg86pt8dkTa3ex3XnPRlWSee4P5K0r/WOagBD7NpzYIuBU6FI29zASQS3end70IxgHRZuIwbT04j9JCyMgUr8sILCoIc4fD/lmcMj1ZAxdmOrROAiVnTdzOiy4vKqf3LuzuE6nToxFQhI5mym0MUgYM5l9Pqj96d5MFgLy99bW6KvB6OjZLzX2P+TGVWBh6XkSrMRDt3ZErc+DEWFN2+9XubyEWrzcznIuKibt/4uDwncYUFa/USpqMU3QRaigIih9/z030rkxMl3PuM2U/WjYuVyf2ESvGEmahoMKB2TtPD/fbyGYD/5r/85//sP/u9f/eLXx64vfvsex+P9y+3z1SWmwtSN8WVQWf/S810L7bCWz00530yQThuvSpEnGnTRgkytLet320klErIKSD4jG+1lB9qLqskX8zZ4JryUT30fJe0qdU49Lv7fE5GHOipOIgJ9+X8gLnPPsoJXgAA1ohZSiWV7HYKfFka2623dTvs49ZE7HKmtC0UUF+Crjl3CMwm9kgmEo6oO7yBZ5KLfxb63wVdTjlqHq/keIVQ6TpkluzcNA32Unc4mFUzOhpO+bqITDB5Nal5ajYWJ1m9ZO0Vd8xC9t2/m2Ikf3gRxvfRD10+IOY+8JZsVzBTwm0+Pwy/+nNsiXTT/rjru5T1/U/pXJi5H1ZYpJdPgTP4nlnszOhG6S8PtMxDHiEoGLwS/8lZOPQgq7K3QcypTOe4nz+8kG6Hl9knz8MCAkSMO58rOd+T8A/q27M8y7M8y+9GuZ1OXaf9JgKIuZXeGggmH2wVNAD0HqzVGqc/HdD004VQLO0c3z5HwuW0BwW9Vxn4lNotxoWTPihVBwyULFvP6OMc78lwDWE7sXIZkQxbcnhuOsYh0Cw7mTBBpuflCqumYQO0hw7jSYEYUMCe8SNVg4hg0dJO7+LmaeYH3uQLK9AvXdtKvjg/mXBTdms237hWtO9HNlcJXX1ghFN5IwHPYK0BKE61727jHJi6n0yctLrbKG1DIh8YWLAYeww0u3YxIU7ANYG9qsR1D7SrT7Tgs7XOJm16XRXk3fGNgzFEb2aJnYfwvbjl8a7uNNX4ZNSfg6h1NQySnT9zIh2uHvsTXEhzO4GL5kwVPxFg+A9fwYrj0yF/6B66BKaMkT60OjvkhN+2fAwue0i5zbt7shaANH26jYtbsYTqeOXxQNMz8cMvfvrlz//0QU8AAL/6y599/0d/HG9szkhNV775ttYU6rwtirfSQ3WZ80Tnfkt8xmoPj2r/qGX7gOPJMbRNWh5OjZNie0gK1ilCS9OHC38QJ+2aSEXiZ7WSzOmcR68B8XsdHimUzezhl8rbDIAMijZzTT9mXPpklp9KX5iWJ12lmZRrfTBJ/BUXbfAhERHmqQiKV+D2maZoeTnmIiMjiwAe1N/JbenwU0gdSQT3Jor8fcYwKeYl5HJhXGN+OPpX4ACbG6CiRPLpyX3bo8fc49pfGluawaOiChGMALPqsHV0pBwZT2YAXEWXDr7KJsxc19Uh7GTYGh295ig/8zRyN3cAACAASURBVE6Z6TWYKp4HTGQX9lEp9ZyI4Hw7V/fSyRI5je0sS1+wgQvIHW7+4Wv5+797D+D/+D//3fe/q1++f/93v77/+r3/8ssPS79fDJFXsYkcaJWy0WAMicPpc66FiMaTXuopZckU2EmB9vKekEP/OFc7PVysYCBGNu19BiGXmFyIPkYn14a4WUr2zytqBABUxeiT9XGq3Yt6UlPEkTxuO4TPMYZN6AA4oORIB2b/BVhrrduCiMSVTXBdKgo/iuUyhKC6ODYnqKO83OycyCHAynnYeJco+K3YMT8v3IL3OB1+3yXIpBDZes5sAjRJVFaZIybFPN9q9FJhg6VX4JFPISBkRLuLelwK56MnyaQMBt+9gdtfrfLfPM5S4KtE9xR28gqPnBXx5XO+LSRS+u/nTacVljJ5/Sx4Y1wur/ekvP/k0HTaiysog/skFCAz6YqP0FyKzTI3uwekTD05f54ZRqrbpXhC5rTW6nX4DcsG4KQ6U7MQuP+k4VqadT0njQmkzN42BXx+/SzP8izP8q0qN2w3ymahFy4jYlS0s8fEfw37pGzEMjyNqHg8mGJYG5jgN3/9Vmjkd//Vj32c67tuD7t7RyCaOy3FeMRAqNrRE9jqmvfhFpQdiIRDSnxftrPlUHK0VKdnIl7cDaSBU5+KQATjQmHsOnK8na8MX1OSPXrY+5VJ6pNLcOqs8ma2FkyT9Bzo5bzX22n/OfvCv91hcdw7XRhVTfvsyn+aFQwjy08Rht3lB2Ch3qhJfQXL+vgh/XKNO+tR9mc6NaJHeTYoWvIz7w0fPcELiGybeOT8M188AAvD2pgOqQJdBbQSIJ6AqpclQzqL7PP0DylFD59QjCzkYzYEEpFmHHG+Xq8YfXQxnblGgYKbChyXGRTmimroPNfucJe28xwP6Rq92AyaXvLAtKowJurkefTiEso+dI0xzWSqQsU5xOzpyYMz3M7DoGrC0tV+HQxHXj3NfjM0yJMXMrzO4T/44qe/fNsjGUe2031HUzGlTRNBRNZaR0n5CLAycYjZoWu1QKgFNh2R87fzgrgsdRFjcA2tuHJder2xO9GY+2xUVH+EO6l8fCX0NELVaQtFW4zwsVj6NmK13N2X5x3uNF8Enne8DpuNzwz7hVsCqnYcFcooyFxqwfPnSFJQ4XJld5hVmYRthE3OGB/Fg3bEWfrSGCWUc43Q3+GOzbxvzbBtfZWKnD195IintDrth+xGOOvf+f6R8JpHREv3J988fFpOTu9avkFwium5BDBpOU/0q5A5eRofSqegOC9PnqOb5Ckfh5A9Iu+H0xuYCRAAavueBOF95bWqglOXDnr0xAqAvJUdUjdrR87lDPXTHuBECEzCwelA8sdv/d2XXwHA3/3Hv13r/tH9g9305fMPH9+9e1GvJcoqxQ2AFSps2xk13R7LFnnLcsitXvhoYpgDMH1Z5blshQOybC1Id+rfoRFx5qeg11BsPQDKwGIAPkQ3RflSzsUH9KlPJns2pav/iS83/LXXTCfYK0quVZhcP4uRCSq3C4EaAKi761ogZnAcZqaqcUM6YiqDxb3h2gNZ3rgl9q6S19trpjnmB4CPf46wuB3GNR3GQex2Scl8VK6E8nqmVX3KDJd4w5LEY/kH0jIqWXagOydpH6AAqfSu/hg5QQIuA+dI5h9S/wxKyCnbUFFupLGIUcYy8tNDkxKjFzWUyZUuneIB5PS9x6eY1rhO4MHQRi8HeAoNm67zuYMQ3v+hGCg3pN59VHuSn1IAs0IMNjmVxzUy3QRwXW6c1te2z7ri8cYmEAYcYkVcVeP912rf/dm5y5ESpPTAm117lmd5lmf5HS23iNrwTAYXzo5URuYFgius3jK7F64WQEt3amagTEcAgMIP7ju97YtkldypSvRZYDH1ZrlMRgqifFMKhAwUyKoKDI4KAywQbmTnVegCbLNlw7bCBHlDqbLvA9uyldpzO0Mm6Ye2SvYPCvMMvyS0MtNTNWVFnlu141X6etq4d+cJZXEHrSNMu5Aj47YreWFEDj1S+BLZwq9jOflKMVLl5HOj0W42Hy8fV24PYqO7z9yNE6cmaORzur2yd9yrCyPt43jIKzci6SACVU3PBc88elv+8MGPUbW701Jwt0iJTmcBh5R85ZVQL6lXgQjd4ZwyHjKXMiTRRve1bLC0aScIjurXxo0bYW9FdkspymQUW7ouxN3MXCRQEj15kdEfco4cGq6RAYMvc+Lj95xTQSaZLJpT3KRTqaWPT/pzIE4x0PHOE8ie6eUPfiOZY3VbwGmHiJb3wa36EbKK/WohEKGgbbQUFxSSLV4kbQYrZSc4zljhqmVNvFFShm7rAYXGSwyEgtCVRlHm7nccB/Oyhe+uraZ0cjSv9roezo/N9GpHYT2VWqmI5aGEdoHjALPVcSlsgrPdSel/zP9Pf08vBbNYnrkpMb1zTkuP8jO+SB+h5UlgEYlkx/TVQ1U9+b+GGg8c0zk+xySZL40cOoRaeWyGZwTpAwjCbczr6UVMv+fFoqolnqZz+TsnBXOOMSrFg1LbUUkGn/H0++KiSB4j771BAaenBQJraD0SZyOYnLRYePYs2xjB12zL6WfxFCLmHtmoZZJXyiXBvTdtR6G77s50rl5HKa0apROwsGO1e9r7W4x5NL+ww0wjqzoq3pFDizyJE4wA+piLuKrGXgLRmacaLTe9agtauIcP1vFh+a/NAbys75l+uLuvd5+b4/ZOYsFRf3U/5mzOealTI4E4gsvFc6vNYlEMEFnO6lZ4BTpqYJ4OVk+e8F73D1k1qx+P2KBczFeIgVTfXDJWjs+5HlD9bJFb8r3ZbusLBWT9e4KBm0pKrFDZAU4LUASorNZbE7Hiw9fOKak1aeZ+Pz68//Ab80NVzQ6i1DrTJOT36FSwQ8u6HixSUOT00WeC1NSo3VRJ+XcOCr4Gskk46Mdgi105V2NdlM9rdmqjOFmynYM6n9mdahQayv2nXMIUUICIal2VKSIm0GToJsjQa1svZxu9Z3/RbJO8l3L+uAHQftvo5W2ZrOa1EUY14dsB7YnHmu1LrnIavDsUYq0XmEc+hUENIx7z0zptbS2IY0NIlR7MUAPYf6KViVeCF+l/TwG0TmKdfbWy0XwK2EG8Fmlnwvqj58eE5zLf5UcJitfR2pgBHo8Y4r8NV378LM/yLM/y7Soj5X7qjtY1cKiqat0s6G7ubuC91ScfRyhr84tEbl+hMGzkE+XzP/gxzbzNgqnessWy1TD6HqaTVLsylG+OTfb+5+ClgKGoVF8fmF4CfHogPu1JDN24m4sJdOS88XxpM4rGLZaIi1mp3ViaWIlIiIevD6FMEaellB1G1zLqa+N3U9h+Us0TJdBgoLUFTkhU0ghna3OcBWk0QDbIexKyYnMvKhSNJ/8Vb88ZKFB0QvS4mhlnVk6EvbOEE345aclmROrfROU5AdG6iOQ9UZVbYNCloHP3REZ/6HUgjw3Xm8yHX0U4bGvw5x7kWZ8NF04sC6VAKI8Na5KBJ2l9c9Jzwmwb0yv3rsb7e2e9+bPNI85bc8tbG/VjyU8Y6sNy8TPJPBHxRhV+M7oToJ44N6VHiJRaZDbqeTAx+c5owDlHk858fe9qNUMujY9/8KM/eZUcwFd/9fNTT9wsfZS5biT52ixuMHN3M9rFdadNbbNkN9ilpip8F6Uu02cj9eGjItuASXPvgnLBy3wHAHC7rbWWqkrpsrnmyr6VlM1u3Ofggi5f0oU5RhdTZ4SJmoemD7PjOFQUDsskky7REx/3kJRMO4vdcyNeMiQkmDkNZNFT4pW9LF0aVNC5ntsw803TbTS51uZnTVDT7r0ehziNz0rf1FhnHSmWa2yASDLJrixBk07AxRtPvzE7fF8EV/HgcFiUZqh0qOUaELoayWgBAfpZGt+hpEc/ppxp3TEUgZRT0vrOIz6jKqoaPlCSLjCYpwK3VILmbh5jsOS62CJh3fmUlbpN8UTHW3Qw+INkoj9QRR1u6qbHcYN/ftzfffwgcui6+3H4YW6HR9tudgTfH1ZNlc7evHU11CHZogNcS6k6zX1GH/fa5x/cjQBCsMAdSZlGhX7qwS5Ey+PfHWxO6F4DkhJ5n+Q3lM7QNif+s27g9ddlo88sp0/sCCY+zI7ggeTq46geDpkZDGHvv37/29/++n4c5uZuulQ0ImuJoXsgRNqPesJ5LLYeOlsKSahsC0jpxp4yYxv71OilRMsj8w8qJSnIjbwX2nOjSiSRf/wvblVvafsANJKDkiPO/DVpxxHlgNrq2sY6YPI3KHMudgH+OkueJrUcjtxqTpVoRCsTkJzLiW8vwGl0pjF4EiBnY3xcFBgMQRFROItKewje7KWMxq49vbiaHzwqHFSbnVOn+PmF7vJDAtSHJ8Zw36XHRoxPT3zZCkWpM7R6lmd5lmf5thX5F//1vwRwPz6u201EjuMQyLotAPf7IRF6oI2YgTS8Sh2kOkkMKSJ5OA7E3BIXBwBAbn1/9Vc/e6NP3/1XPy7F4Lnp3Thgiv9hjEK327fTPiuIdMRtA6rjrG3CceexQqdPEM4rxaMfKp4nEyELAul0g+Iiepihjn2JCHA/jjVskvIkho7Nk1MX/RH0r76lLUEHYu6Aubu7CE2U8A5Lmw5gYICAG50ALM+Yq6jHnrgda6nqEhG3cBHNbFoYM54m8OFwM1kRmpHp8FU1rydyNn6ep+xcAYgEpZjK20VUF88E7Sk5abC4ADC/vbyoyv1+mPEomMDizDiRPa3ZYhWHQFVVFBAzh0BFBWnYNTPRNFjrYbRNWp1AHo/Ic6Cq7pan3xgVGf8Lj5yQKYfboWYGgK/bjY0DXCa6bgnpzaJBz/pRo3Y3QNPzNyzmXiQXD9wZtmZP810vUmSwbFOyL6DtfojMut2DsnOAs+wwuGuTCrsjiWoEi7QrR2TVqouBPNHudhPHkFfDC+F+JIW0LKvsiDR92IbngcfNi9gTCNQVzM7ERswP6G6aaRaFToOUA2VKCxdCyiFz1Uxl4Lmow2CJJu7xic9sZtZrRaCiC+6H3d3M4XBZt1uENP7iz1+9ZRvAD774afng5socHIvD7eXlhXwi9/vHtVY4l53xwu3tGb6T6FySGg3K45Dgcb+7+1q3EIyqGpe6hiSMoOPjfl/rlhfHusdXa93CQ3GYLVXLYHALNeTuqloujOO4i0gkUIs1HCnuBg262wA04uK52cEVJyIVO8bZTMfL8dlnn8VwVFd4quLYYynNMqFVNZwEa6281qlkJslb0nM394a/hLwb3YTAHS9L3T3caqAoCM+CR088GYX1xfhM4tIdSHgxkF6hTpdGlSJBE1Vu3dFiLR1kftCprmWHgdwK99vLS1CpXhlaL22rMXKh33WACoh7JH4pmpyM30rGW2Rzd19r1cAtvFVAOPziNuG1VjsHjT6GOKMdrjwg3GVVT3FDNwNXo+nKpWotdl11LVUAhQRCFAi9ohDYYbO3GQrafoyzPd1YyE9SWcrPxr2vTMOy1i0csDHvvFQX9Fi3SW5mcpiVIhaRpSLqHmflvVwcBQNi8J7qMOZMMC40D/ywAwYBInNhUtgldkcs3laZnmkKKoe7q744DIgzB0babFoApGCUWOPcHUsO9+LW4LPQ4QVG3BWaTtyc0jHH8KUrAgxVlzNHxHrR8NUinRKxJyGi4jCG96qMTS8VdTncAdtwA3j38SgDD5vtoMfJ5w7Fbd3U13H4x/vH27sFQJbpbfl7c7/bgitUb0tudj9UUqN5nuJYnVjATWVVG6d/UuKJiwZyGSAvMDncDgN8rRXZe61wiyfTJB+6m+rJ/ZnLyF0zGtYss0OJZHpHOJMEYOAF+rcG88wx1DO5/M0y4e+y+11Cyyw1t6WLce7iwlGF4ovX3GTN5Smcd8B9rS2pSE+uT9amIm0VOlbXmHPpVN08ZcyMz6X7BNebuhP+uJvZ4bmniLXWhw9fS4JjFTq7AFgnEn3F3ef7V5QgORa6kavKYTA0WHR31Ru1bfN5hnfzKHYtASuDogyWqpoL2bUPOc2iw8U8cC8eD4/lJmKJu3hOkBymt5XmQGooB1OteoFnSuRsukHvAEukG8Yn9Xn8aOh9VkDZ+y///H99axjP8izP8iy/Y+UGejLib+ICABBBZN+X8Wk5TdqJMXWrO4+zNVxsh09keX+Q4fxUZLRILUSjcGiWoTdke8khbi601FOJhd7T2ePyc0rphpPXyFPRbzuXqDqk9GliDml4ktB7WJburfiqqVl8YE3nENs6Gza+VyTH2qBVon5aQiSalMuS5B3+5epWdEeHM6iI4HAR7fNS01r2DCPB8AkNXJI4LNyEwhBUAGvX/Dq2IqcBsTcYvd7GjDyeGbng5sEoztOAbKxeALfhHJkxI3vV04PdsFDSd0qu3Gdzo4EHnzwqUtzJCS9DUka+0n5cLrhK6othQb6VZ+pxR9x9N++Lq6/d7XbnOsD8AOPPrZKxdDItYyXtH/89avxSYkWMzHnDaDt3u37jQmZmAhSmL8Bbr3S8hstpYMMGbMBfFhV8LQ1z1Cw8Iyqay1aXvj484RVTIjSIR5O1elFsYm0/e3rXvdZ1NHiZk2urACSxsopyzXskW48FTFs9UXVAagPcTLQimWYqTyaVuLCit/iT8+xMkZJDCFvOYG19QaQ4VfKD9EWGVNSlZlYXk4jUZkEabGfridVFbw9mtpqmTrKP06cgvJE4ZzfHNHgye5qSIplNR5tDnF7myCeTTOFSi3yXxv3XtOTSrt0SWPieqE4gbnaEQ8ohIhFRpbdUP/v8JanSEdlqPvo0+0mzqXhmY3sqzTnki0bMSay0Crkq+gjnY9beY2E4S9I7WKpLxS2Wj5sdA+FcupMnuJ1CJpcFRzDMwpjlhADIhwUem2VCh0V0Xkq1u4MbU3TvPhzWrppO23bdi5xybuQ6IDAA5ojIsnAnXanNeh205oUHumWIQ4bhdhaRgbRKn0m5M7i4atz5NXveElwAs8NF88h4QzgXxHlPruga+KaIvETHNyylbcBeJcJkj3eOrR57DRIku9auBYQgGQaXDF4dTaRTGfsdIeP3Aq1F6a2br4xlBAjXxo+oiEtKUXFRf3nR2G29m92//rggt5fb+/sHM+iLHHaPZdnaL519MZs8PgsmyMD2z5zoM6XTLcYLoBwWGSpzR29DTQI4cxeUcm0y5VdwVK0n3NbTdvolsNuW25HfFf5pjuSfIQCooSTH05YNRZmQCjLb3MHNdCQRXLgAVoo/+XBM5amuXFgbjXfZWzJkiu/Uetet3XzJ17phMhKdbWclvhkIr7Nlr47WEPvpmKJBJnzQwVSUaV7jcIdPPD78d8UYrFbKEvE97qTXtgwv4SvS8FSSJtLzIUSwGXg+8mAlGKgXAVoOzDxzMixoSwzdLOdnsFlYPr46WUXP8izP8izfonIrG0yktVwUkU7MdHrN29+x5R1La9/ODw+k77/+m0/cYFOd8TQ6S5I7QgPLLoq3UIGCA04t7onPT85Tx0RT8xsMJRJqe2aYx8ABAjByrQZbtDsbFQ3UTv1gH93T/5tQ1okFAuyqxFBpLcAdbo5VsRKxD0lVuRmcGLYft0/RaKOfakrvA8r651RujxJWbWYg6MUoTJMDSBCAYjuazRxHz3p1PDGGN/SYPZeeShkvoQZck5oTiQ6FmJWJTFt2ctYOj3zsfGLDy9HZqDcNaT99vtMUtBz4feHV7eHLMkwuygoExEFJgdnzvdAdUx3wgedqoZ3enGBtXy3jVXCm2ik2KDvnJCsZTvJ8thYQCvzPL0axka0vO0HuvCB1Co6+KWFzgqatCzpQXsF1vUJmxVLkygRomRW0OrVlMgq5kFmQLlFwXLsx8jL3M7NZ+x9KHNFwjyfTHy+LUcAk5Nu3bH/58z/94Rc/jW7acZ/D5daF3DKeyxE7FqqiS3gAmWJ+iGZ3d6gyuD1sNIaR1jMkXVn4ctzvLQAUEpF6qipal0pjoHZPS1lh82ZeL9fHdUZzZYV2A1uH1Apy82QEkTEJo4L4h/0X1WbQx/7I/d3ZJb8wVof1X5h+LhH3nV9gPQm6v8Ihn9YZ12CsVnNknM8Q2ig7jsTOuXxtnUyhlRop36F1Gm5yV/VLqNe5ZISguAuD6cxdrObOm017NfkDaQmgz3n0AJzZsGFTZ0zVa268pHrI5Vh9otNd603moaJym60UgcPc1GgL5/gcdFEl6Ua3U71PGlc6w+a6IkhgjOGeHkok1YMBHa6LMZVOB03WXScWSl+oDv4MbxfqXcZm5dLh7qOVRmh+KEd2oTzy4Sk9Mtlb4iwItqnOMZVKbtamct45YP+zBKQ3xQVi6Aus5qu1dtJdmBsw0sRnomfGban3qddB52AU3z9qtSo9zJZyGVx6HtMQNXQ+pwCMd1UWFHZE98zV1RF72Pf7cXz8eKzjRd75ccAVyyI908weExrtcKdUcM2c0Fd4wU0r+AQ+OUrJxJFx7dhxWLvkHhUpC6Rzv2JMQsIXJxlar8gQWsK9iCJuLUMKcZJLiq/CaSbzLs2xVSn1M67ni/GedrdOy7V7t8mQCTsP0OkvKZByn6L2D1hpmWvlzq7GRh8EPPMxZMRc5u1sdH4d7khGIOS5eAdu6/ZghrYRPJjGaqwfJikKDabcy+NGQxAV1QaT+CQdm9aqBzWPbX3VNn3tK5ZUy2mrJkpkv+7X89lAD5uTMdfioI3Pf8oX2c9u7CKjS8n0+5ovfYf6tvysr/X7WZ7lWZ7ld7vchrLu0roq03tn3Fl/KxIhA+3HjJJAs/VNx8YB7v7bf/uWL/LzP/iRtyulX5xSNiripnRCLpRjq5UWyvAV6uTpHNKpufcIEC97MGG5dLaORr4OAJaAZK8pQ6EGVaTp6i4qI7BhMw9p0BRWmKo3LUX0UXFAp4eFuj3/ZPJotDnKbUKeRuxJbMxk7sIDoQkQ2nAorFDdKtxc3sg5Lhotm7XTJTVr7X9za9HROtj7NHo2slvWSeLyIGD4JPpbzzCPihdJ6m72XDvojOhhm556tM71J+TrjpB5ciG4T0pMfL7BkSJZ087hbkhzPcxUkeu7rCEXxfAO8/MTMM6vH8Yo77MzvWhePk6p3d1TT8q8Gr5IP+fseWAhdkuz59XdMxjr1ghTZZvDS0VSxp3XCc/89gGIP7fSPgO+4fP5CQuTSnD3OG6viBPlEaNofSQ2T/pUK1zyZsSfKEJnY1qeFPoJAHCxlmzqu32rb/766PbCzZnyGnttfMzqiJKB47jHwdJ0KolI8wA5Jl4iZ3uPF8OjRMO2ZDjdP8ic9+4RHok8mJzin3A9iuo6jntUeRy21vnOpGg7vSSbcI2+tY8uzO1XKSWiNHxa+A55VUNB8qeNT4TElv7+3McTPzfN54f9pWTtw6m1dysZLF000tMRA86jezwkazw6Ohip6MEwQ+r6TbC3Ny4iaIfcyBnWejJZ91Xff1TbkYnsS+RgEXWPA4QXr8jjGvfeFhnHRJA4HIj33hr9a+xVe2r6HORQ4bvML4qkRByumumU8Mw3gsHSQ47LroF2hQRKplrEQB4pbUUFkVT82yHxK83CWLeK80le59ZhBv9RtufAZZCyllFGknr5GaOlEqUpkKklHYKlq4MuY53ziLdzS2NbDXGKnSKe7e+q91HJTpNZJyUlhE4OZlzkJiS9t5QoWe6p71olqQoOIqSUbXkIJCRaZA+aPRr9k4Y81MKPfD7jDe/zKdlaTXtkAnDjAVcHIPZB7Gt95x8+vIevDx/8/vVvv/O9762Xd1CVw9mkRBZFMkFkvgWoHIvo7CzZp3zrwTTCTTjmSc9oVyO/tMrIGL5HS9lLPxXRwQGDwGCTgDts9c3XPIR28HXh6JCZksu1lpcnWiUUqA3gEgVDeZ2BrBentOLYpGw67I2Jlib22H6bY+RUbNpQxtOC809n56plROBriaCScuGUHbKvZ2LQGK8Up5OOjTAMPNzruS7IIDIOVm8SMqG9wiM0cROQvYnT7w2CpbSK1cAm2P2H4PL1UqbHnBEKPeWi1bCPgbEx3f9gkEuy0leUoHDBvNbVSXpyy1si4lme5Vme5XezfCI24Vme5Vme5Vn+P1B++MVP3/j2y5+/lVzyWZ7lWZ7lWZ7lWZ7lWZ7lWZ7lWZ7lH7HcavMLiJ1GR52CtEtkU//mEVx/2qBDbdJLbWH1/s2v/+3P3+jK53/wY+SWKPfFHJ6Rmduel3AXOv8Uxoawi9GP3EtnrELuxZ23mLi3f4qU6SgEwF0rNsEzXxgjBRxxUpvHSnILVa7Z5mabFZJQ4SQ5wnM4j/TIGdnX9YoIoBWLse8Zivc28Zwk75pHPEvGQo48Jx6D64AA36N4IsbT3T+9Y8+QJx7XEslwKsvDOzFpe0Bj77x2MEXvX3NXdQu8Qe8Gd7RCk9GAxS1sr0w028HKiHkQhzBf2HmvtUpNg0j9vvVi9O4yq5hzIoBnaGd/JnuQzfz9lf6M9dB92xoa0T4YhOVgtpG+OqURfTFzLHKjXWrTvU7UR3jFZKJXtnlfaW4OvxqKmiTScm1BozVOr/3yEb0FATqfbNXIUJZrIAajAyu6Yv67PVhfXWPutsGQ9/LPXu+5BT4T/2dIBh9R9OsVc+F56VZFerSMlDwwaH5AmEPxBz/+k1/+xZ897l69qYrKrigUTUiGqdO1zqtaUHn7a5hgxFqF0FKCbYQIuikjHRmiHV+LMm2Fu6hiX6oSpO4GUbMggo49dwAwxH1fcccCCy9cm3EWNZFxBD5PoiPCt3hZehKp5qbOXlUcg7sFVbyYp2K1eJtahro3t1x0bcWkJOvya9ueG1SPlGhJ+coeEhUMHs0oQLYiiEO7SxQSkbwZQHoc93g+J+zNcDPpsYwD8/Vud0B4EUR1bjvrMEUXa34o8rxjR65ftR4DQ3wAwI4j+EpFRBVxf+xxxG1I+TJ5tRSbiPC2itk6XHicssQNh1ApbKR+UFtEXf/FuQAAIABJREFUdhefMWyUM87oVmwv5+/SGqFmDzKzKXbMtQQguZ5mSIE5lXbHPZ3HhzzM6CSMgPwj43F3j0WsqjWHAsAzLphDZ4xpKtbUsMioqNRAec/SDD9yBmOOsF6M1rOpjAb1oveFMTal29If55TM86yMXA7+jwgpgWipVStpwPHy5Rln2ymTSSj2pZ9yR4GxRE9nXPSoBM+WiBLhQQXKwPjCDnP/CED163effVgvED+Am93tOMTt94B3xs5kKnL2uY63psgbGlq2jiBC5SfGSFCakaHuQNxgbfYxCSM8qFXr90TSglZxnPxVjfwqhdCMUfjAAZh7XPKdC3oYABSecShEx5Q1EBrHZRw+L4GPXJuETd3uAEFk1jxSlZhjIIHWOz2KnO09utzhvM/Hi6MHS3T7Fd+dWZVZSWrFWrEgTjzZR9syfEB/YRc9JzGXf/FPYYTS2gwPTdoNHMXDUWnLTZGXCp0CcSzjen1PJiZUetc+f/PCE/o10qFvsu9Tguj8VvrXydgtr+bLWy9HgOobHP+awfIsz/Isz/K7XG4EO1Z6F42hpe4XTkxfJpc7Ly8YAIrHdYcvcsD1T5YBQXZoSDM12yBckfIZzRZbB2fzbNsL1FHXZjVXe7DrIgrjGU8RiIobB66Ic82zx24+gPPWi6aYbKomTWgp8zVPLkgdFEFDgEQaiXV7dhIiSz+bdQBS+Tzzb59TGTW04cgzNbQs0lCQPPhFGgoJQrCyGwr5RJ4IC7/GI09NdNTJfyd3TmPT4XJ+cLYhz/D1XKPYAjBaO6ZQYpYE0y6WruY8TSGCQZRTV4uZJeGR9xkTXheejBL2SlqZJ1cDAVj13mET44hU+qpw+wLk3geezfkrifTaitv57jq4SdFK3DRcnbkA6xzbDlLLpyQ0JArOFig/DX1ftiSe9F+fAFdFyjS9ejhk/uqcnwfpc9w9ov0tMx/XrE9In3+eMlgiz/Fp5Iqww0Qi8au6u7mJTddGz6mI0HjhV9MoWYq6cqeYy0x4yamT7BKrQcTyGOTo95vly5//bz/84qd2HNOELLRthzk3EmK1hjtSeVVJrvg8itXDo3O6OjKUBi9ITatZJKqNO7BB0sRlDAKxuNa5luh0NTkOO1RXHERdSw9eZROHeounwrdkck6psXG6aObX87weNwa1L7cyWYcKKNekb+xLx3G+F+LDp7neBIsfm2Qvf8S5A375eLLUVmjSDastjhjGBS8WXTdzM6jU5bkhEFkzE7ZJ2+FBn1L04iG7nP/nZJtDw2/lwjSgU4BU3+Yn1bpXe+aIO/FUdztuc334IzLIuJJVgi3SBTa0Kf0F4i1DBBRkPvzzhUCmJ9VZui70rGeFRCltZ3d6QbgzR8ebW33R++m2C4eOJyVkuxavu5ZiOuae19DXXCTFuaaz37scjQ272i9p93r3Rrj0MYg1xXp/3qRoSvqcufBemdvLy83Be0jqvbFh0PcBSWOc10QfN1snZEgydp+uvkjjNne2g1r7caFz/FqzrJE+smY813x4MjN95xsLtn6Rq4a+FmnAtHmp3N0OCERVRGGm6wBwu9m7d7Dj4+99tu73491aHz4sMXPzw20h8hXGvdV5aXUk6rRc9dXu2NtMl3qSvQwHpCMqZUxuO4Us7lPOu5jdf6AoUAg0BkoXV0PfeqD4/gTI+/B1r0QZ1HMSMTVHk14KzTHDkOfD7R11O6qCSZ/Aia/sV3qLhq5JJmWmhIgRe2vT3OEgOBtyp382tmwRpUKPJADgYD5ZScGfM2tX3vsUM0Y/CxmFei8qb0wtLcxO2+JbSbE+1yMJXDIOKPAeRePCvb4PSR7bIecqXx8UMRElVTulw3oxgUjqKQcwTBPhcqhVstU8Eenpy7M7eBdbIZRb2T7LszzLs3zLSuQnFrgYBaDUL4RiLd4LhTwQ2VQ8AkPfwVrlq7/58zf68b0//IJJWyx3VlNxKTU+0cfooidevCBKjxu0z2Zae8Zi2FlJPnhVg6XWdyNwooRAeJ2SckcCZa+SNpGay7yUMSmcMZdtqNDcqF8J14V7jleQ2iYChqUnTF6yDy4R2rDA52j3+wyJ+NuRUBcAuEgC7oaJfC/NGx+fRpzp2ILOXpRFFFvLxWPlvS0LIcFaQkGOcFgzRQBjPwTqEIfNzdmo7YINGRKykWVECoBjIhYuk6XAdBlBE/5W95s8SVtHWbn9ZOVQkuQT2j8VisKaeSvKwOevY5KHG8PEZ5X/qG1wPz03fq2ESkTs0yk1d+c91yJtlHLyo+a1jHGSxPu789oe/eleJX+Xz+IhpizGm6979afeqHAtrskzBa4U9nS2d8fLSD2JIARjMuSkLXPaK0l9Iac34IxVn0MQuntp37gbeQQCU1mqYpb4vWIQf/jFv3njXPaXP//TH/7oj9mnzDEXFFq3FzuOsi9Vl7urRvLaFEhNRkk6+HGU6SNIP2nQz+yg6BbPUK7KQ0cJb+YRBdI3iNPxT4NK6oYF/sn3rS7WSCVR8tjh7loWBSem5ZQf8QnrGkwW38ddD+6OvI8ln8mcXzllGCyX5l/f9LabyFOSt8RELc8R7ivzkWiDw5OydTDqQhJfyveEMTzqIBMoRBiXOkWgkAtyeqZHuVqiYRy8XQFC3nQolRQOZk5i3ldDDY1RDjOZykuC3URXHVnIaNM2OXNhF3FbtKio5X33hxdNIJRQpEv9lFax4jD6podeJ7tiq6DCnOn2cLJPBm472pUgQLj3nGs6RjrNy/nTq1M591MXcsSbijjJz5ihAdKGYNHFLIEuTjnE3jQ/i2jsn87c2eVelyA25PCDsodAB54BUPQFViSpe+YJrQVZ8tDVxCS11xQx4TLxoXDKn/m6M7dAzIjQyg3LUirtQZL5HkWiIH1Vll4WAEIshBZK5gKthIoIMrefh3cpJ+apttgXKVXLGlz0NKhNK+O8MHPWFDjc4b6Wy81f3jmA73y+fviDd8f79+b2/sPxG7nfP/phh9JPHYMCRKDtRxWxI+6Vqpkg4mnCbusk+36kgIl8gkFid6v1usub5PF9m3FIxTJPBAJqj4FiSLCWxUPF1+LJDzQEC4arijXWnoG5Uauzy8KbnvjZBvrqN6dc3jxhc9VUgxCn8TI6ue+zluYQdFeKweI716Gvhi3QyinSe3qh2k3/7Z5EPCpldlFSPnqk7psf5M4FVw7uXIi5ldIaZcjkCIZwPXvcAiN5Uah/VgVsNw2DkhbdK7TmZG8eDngbd+83ks9FeOQKDRuzuQH8M6KT1PBT3TXuN12mXeqxMlK+0WvP8izP8iy/W6WuS2thLQo67SS9TM781wObFUbY/FkQUQHfSuT1hnsEAPDdP/oJaIwRiooScXg5SodOLRgUnqa2PNqIkKoTpXlC3U+3JhWFSA52d2RIxCtN0I48IsEqhdgzgj5Coc5zUgRCCS1LcRQMGT3n8xNG5DeqqxoN66E8lO3UqKEBiBMoDLpsPFdIJq2YCRHSZAhdm5GCgRzbyC7apBdWpBFA/0Kwkri8UJTG/a8bUC0YUgbSFpFHiDdwgs9TZEVeumU4XTK+Zg3g9ZvTDKmeN8rTosqA+LVZPvzJIPs17DqbhleA0/jEGyNePMPsYT7paaV48Sl7nl2iD7PKq1DyWiSvlv0UGOvnO8F7jmgOTWrxFTXaQkmY35Zruy4HC5+chKdC7D2cJjuhpjNlmDM9voHCwwmekuA0fpc91u9ChslWVezI0DxRdXc7DMirbOw4SqLN+RFEFGX7voSRdvVIuS9qV0VEpc8jpdwJx5PRYybt7v8G/DDodhLdcc9mdExuq9xYczNiLMmQUs6FnUMsd2Q+vVnXDuA4Dqe5YvR9QDN/iFf1TT0HJEI1zUzXEojZEX/m69NrUTN+HL30KXVLL5CKcuGI9OMcxxEKziG6xi0NLoB5V1td3jTLN1ibZK1NFIZuRMfppcBxRt8UZVkHuNwkOzG1gQvWWoflWU7VhbLuuCCSKCG3wwRXxdAq5UYUEUh4wWtCe9SbpV+fjcPaVBatOzTlq7WL10jYpTLbrlBif5W6ZsewzHOpuLmbrds697JFVqinGu8Q3fSVyuBFoO6kQUqIHEWwIdwgjJDNVpwGqhRcmbgmxrenS6kJTIU7tY67V2DvtqIhiAOnSfqoImMOqaPjL6mT4ELEUSevc/evOIIjdyZAyG2HWN5CrvNYwtExLb1dPdRyepIy6RynOsz1KkB55RIlHn0+9puqsIcP9mcUVZsTK70VY8gkLACBqmZ8mQgQDiyVpThssAel4ThFxJZnW7EQpLYaPqnOVVd5e7d9Asi6vbP7h1g8qnZ7AYDvfv7uP//9z/TD7Vfvf/2977784uZfv7+LLREx+KrJ3MRjcgn2ngOIYwJSon/vrACH+3JNlCJ5q5W5rXqkay0kQmHUkoarQbXTCVVgHfYpnVJ9iJ1m/5rBCLnvy9i4WCjwyKjSUKceHOjCxQFXnmjhc3E9tQOAVm6TWrf5h5YpAiA2tvrgcjfWqFQAG3MxyC2TuwYreCJ2ce/D7oK6KwpFc6FsSc/dUNNTnAw6PCqlQQOW+1BRcYeV9Bwl/XbIVxrGIkhlR4VOXYsyzkYXJz+FiBM4mA3m1eX0puw4bZc5qwagorxPsyAaRMQwSRf9zFHsrtdLzW/149EJsU2WPsuzPMuzfJuK/Iv/6n+CwOCqmkovgJOKmymzl/GmUYdARQ53MRfVSDQGqjxVFREzi/j0gl1f/fXP3ujEd//oJwUNuCmHNgQL8JaNQ3C79LbpsomQCv+m1ZD4uJ5nezQWIB/tHlplCHXidWm3HQGqzEajt25u5oGVpc9AhdMk7ypNp5JbfBa9DbNZRY4+VH1Gq0h3gIvwxYhaUmUQU2H4xPHH/WPb5Anr3d1F1d2iV0BFHm3BhqQjhyiIM300O6lNa8Ky1xeIXz4A6tuy6ySiq7Q6LeG4mVXUHY5FgWkj1FdCLLK5SCRTvOVFuNYcEnaRjkO4EYcVs6sidz/gEqGHooSSDnetYCyF6FLV8MuI21G8FbV4ul7TazdhIdjVg+Ols6CQVLMrg2nyi7xRNv+ql3JVlDHFdmoNrNl6wU+EGWM5K4WmnFGNZXF6YSB3kSUiZkfwXkgPuOla9NEw+SyZycxdjKYpOxlxasNCk8wVIBK3pW+RcJiA8OP9Hvwj9MnX2N3ClyEei4tDELla9Flut9thhx2Hu6suABG0dRxHJUy8gEZSCnno2MzoF94dQ4zxcPgS9UTnzb/Z27x6Ww+7x4d2WLRuONI35YM/AAsXUpDXTDTljGieE0dFOgOyVpiDv3jz4prv/+gnSS0M1uKpyGCcMlmkLfSmjLMo76AeJm2W6HYZ/ZHFz8ziCHWkkFtrMeBIVPXj8WG4a0k/iqHYsIl2RTQtTFpC5W0RLmAz09X6Kw+u0kpsc7OFngozKs65DXYNYVIDn++WEI5KEDetR4Unu2PQUCoyFYjIqxj23XkitcSqSAbpm7lbhZuBAs25YAE4zK1Pj/KsfZvB4L/OhkJfuMXdoVhr+WUVjdkXh5tZXOCcfBRLL7KcxByZOyK6VqSi5YZixSb1qu7gvfALed4IDo9g1t2eb17tYY1ulqwFmWeos2wl+HstNTNn7OVwDcSS8IrCFTKA6iqYESJLwlFb2dlGqQpVtXlqSJttQwUc24wmA8yOVo6A6rLD0Aw1195JIfeWCrBxYHfP3SUdKRLSXiQCA+0oL5BANE9LD8jh16oAAGs1bqxeQOS4H+Up0HBm0rU5Y043lgDsHnlsUzRt2Im3V6dSqb7YpgSLTZG82pVw4bhAjuNOOglEFT3Gg1cup98EDrg6k73u7bA7SXinK6hoQeEJ+l9CrMnYkfHeqaCgxytliX59/yAqKlj4+HuffQDw3/93v/8//g//7AX65W9++7e//PX/8x+++g//8be/+Y3ax3dL3h23BQpPScWUIZ/rdnPLJQ4C5FxQ3oMDZB7MCbApqpEFRtDhxkCeBx4uFXf3w23pKrFc4YWqCjq2uMxas6tu7F7ED0ETS1w0iUygdUzRlz1Aop1R0b43lbGxIBqN4wxepsqmCIKVFMdhgKx1i88POwDXtcwc7I+5l4ddVb0kKZkhdjCsr31HevJpoeQwE1CxMw6IHHYItaxkvC6n6XUWGs1gExMlViZlZGpoAPt3gCZzTWgHzAwTJaeahhSPXNtlOXzTzrOiEopTrlLDilVCGEv3YmgWs+FaDGk/2o3cL0WMqCrcpQ3AKiN2Tr1Ji7eyY2srAtu8lF7zrqHQtLvLyCP+5Z/9L9+IFM/yLM/yLL8b5TawERAQQQAGh7UFmaeJK7/d2DenhwKgjRrCkxD3gm+3EqGRFVMJ7gYT3+cxFh8KrTSRjS1nGUdYRWBWh8EaUxAAloXdRofDdGwAjnAqYDNdyh3Jvri71xFKZ7W4DNorHiLPp2S3qLcCUjEzErFldjt6EoFQZq6GdVvwiCSq1vhL/BwRK92fRtvZxLBItk6XYpt0mH9J3XcBuTyd6tPdr9+WgSkCxi2lRg1VPH1Y1RnQy8rq2/8kyNN2Oap4WrOFfpxw7Dwz1blReb0mAx4FAk4fgnOz1uqcaZEmWa/S8DA164RWXeWZ9IPGBWvqWFnTcrxByxkyeWAO7fTx/uv4bYbkjHHn2J0daKi5GYZStzLU6kedTxqWJ5vYAKxvdBljLLcT+8hUUVKWA2k7eKs+HbW9JYmMh+xGh850eBVc52Ej6ZkSLuATOT0z1ldHo47IXpcmH1wgGps9GPPdRN0M1+izKgyanlMxQaS8FEhmKEtKOdz997/4N296JIVOVefE84TpQ8CfgtS5kuEnU0RaEQwuUPqlJA3OyE0pY93CGRJpxznkbqNsOqYh7odI8Y0D4SN2Gl1SgF9VVJSGLi2NUmRjdJwlqwQWztMEnA2fthDdDNmLkidVfz9w8W6fKVv/kp3CBRD8UsYYfOvzptTZJXerLSWelYtPpow7jXnvCEcEOftR2/BWEYeKGFTWFszC+3+aVOHJW0HS3dKPhVPqcBN3/CDESw6vDOVaxSljer0MF+5kZh+23kkbCMguGB5AqQfy6iCfgrzXJqtLUZlHP1knqqEHYCG5pADY5RHGM9EfIaI8K53+wwtnjUO1lKDVxTnYvSdJ503kUboS04TIkzr4mdnSxL3DqoJ+lWaV4CTJU9WHT7ZuoCicVuTrgbBe5NmRXJjxXuFBT19VZ9YbO5e+6SD6I1gvU5Bsamgnz7hAiOjES7HVb0Sh57d7TtiDOajTZ+zjIxHsn8DZ77/+er0sXbpEj4/3jx8PAN/5zg//+X/x377/1d/+6v1d5TvH8fVhd+jLAZHbO8HxsMch28mX2ScQASUGkAckcxcC5LAlxK0FWFQ0zlZQSqZSnUJg4MAhqaoaR/sme8GDEbWZK2VUKGAaVJyqelgm4slO7HfkEQihtHm/6aBai5uXNHL1ntvwK7Ta4VLOgpc5I5IJyCfsCt5ndvQCWa+Azm/ozvsEUXKAVv0/yQ4AuUk8hbDsnJC+tlDb02fZoOkb9qsybnfvvONLcPqYjm+QBSm7+bcWmxTICXSlNTjvyRvC80GndfcClzr6BwxvKIf577M8y7M8y7er3DZvFOr3FMHWGaCk9nEZkiVp3fVVaMOVMfa1vvrrt7JGdruE6aW6Ewx+Qjh7PTtx/RYZUbX6NfNOPuzuqkrt+IrVfX0xNZZZwSjs1Nx1cXWwYr4ad6GcXz2Iaf87gehEUGnW7srsgao9uVfDAVAf1KwRiDfWG56gabxgusdmMxOCpsfQyy2TBs9GDZnD7Y3rUSNbCa9j3BiesXwNd+o2iTQte3BVM0NfykvLr6o/xC6evNzQDltTMRmyTdBmuQDO44Ui9dVk5SAMeu/axzcoK3e4chs/VmxsDgBp7jyyearbvvH0mT3oIihr6CH/p7Ao4G0F65kOrFB4O3WkfPVJygB4Cnhiu+TAgqXXhJ6nztK8S/ZxvpcPRBTXZlwP12UfS+phCQA/wlOjTSibJNloxykZXTX3uCaa4TnDVmljP/qfNiyrT1kqgogRMTd3Zf3pflo1vyWAo0qrrXdBcYJnVItqNw5aNTWVr5Rf/eXPvv/jP67h0lIbQsSbiO7e8ResWeZ8cJDTCZDVUNpyb8EhpAYbynXu8Ai6f9x5BudIMmHYu6fYB4lw6bRyOuHj/IXWx+SeXZvQZyG0ayyPjHlPJSgQGD43RwQahA/YfBtaHn5MXzADjar+IRFq3eYnMzNWLBezo3o31KqIrldWOz8cMlto2nIPaXux1n50cUmfjo/60qPsMcsMbEY6WEdzuwb2bRH23XSDJc/2XmmEJtvQnU29Ns8f7FUNxVO7q1PkOxi/BioUutUH+apWJkWWarIeGGzGRjjEWjo922ea1+BV0sUPj+MXxT47cZFnH4HeLSzo0zslkxSvKZb8lp5CktUYozV6TddMn08VNI7oKdqarw4z8m/M9ywqea9RhORLBL8F3e1ALfBJ960S724SiSSB2ahbc0F8pg1Mog8pLD1kD6WSH7sfaejpNxRdUJCeWLpsZTLWToQ3lebt5Rb8bWbH3e1uAP6v//vf//b9b9/J13/35a+/eu+/+NX9/YebrBe74aP4y8CEUoRwh+eBg5b2kgq21eKlPw7ROvvbcmmIL0m2YYvBKp63SAoQN0dSxr1FuqniG7GVUTJ+tzzJQTc3pfTW88GOQ7FtjVy6UCwu9UjIfKq8TA1Vly+NaAz2umotKZUqf66o2dnNuBGaPUTKia5zBNJi7Cz43ixX6+LRq+zXVfTwgQog7fUP4fmneiWP2WF/dhoN8M3he+oq6zkbRYU3RsWxgyzARvf2ML5a5pVh80knEBooZLTpOB0u3Pp+bWZuOTSftq6pjGpvdPVZnuVZnuV3s9xSy9nYHHQPl01JzvxXIi1WQqxhNpZFsUcZACLy9g02n//hF2GVKQ+asBup6yf6Opfe33SCpaFdfCqTGtdQfsS+9VKMZVMMBZM2v+F0uGGp0mXrMvTkyf4sxUFfWOADXlinYNzLq7rkRjdNnjo/DMASOeao4Rgxj9zEfqTw4gHmAR8UwENVyAZk/zWHdp742cgZSmBQ6dSltCE566w8vhP0Rw86pl6Wf964Gt4kmvTebBLV0Z/YlCncEHajc0JHhwQCOw7IcJkK4mDo/c4k9zRJCp3z850yjW29E59hPBt+CNk+aWNRiF+jz1493OI9sE3b1gefD+124KB2/hL+TBmjzrwB2XraT+2Cid4w/SwrGbUnC4aD5D7tgbcgVZmH6VLneTrA5+JVrtOahH2AxVhF3sN99YnXknyS3DM8PqNv7p72Us2MiNhmfwojhZKf6BhvynMiixFTCFslWet6sE2uRJbClHTkpbiC2syh8OnCm8nif/jjn375F3/6GpnlSqjtW5QXmWQ4LyUpI5UjPMWNmRn6IYpJcyaVy4rrReelGRfjKbvDXPg92bBqLX+ZL/peR/4vArtUcblWZScLAKhIOYNLXLUztfN1PKqKVzoAY7xNnimOPaWKZKNIoetgZuUgeB/kTGVTtbmZi6r2XkKqM1Wxw0cHzqZdu2QleRAtlTdStg+XuxQUDp41q9aJhmDX4JomjlTj3F2plRTtTntsitOdvNLEp9d/pzOJUAMiQXoztOZaCug4Gb1dMbfV/FjiaAu9BFVxSsfwYNJpidlou0tSzAkje4h1pqrnK9ykTESWVTxSyMKVcSShd7abU+qlNWtpcPVlnssIxari+cSYpgd4YHxubjUOxxCJ81Uv+p2GMVvORZ/Mnju6gnJ7cRDTO8F66bLszBNC4YYhz6qTKK4tGbidC48Mm4wTfEwFDJbBziYbsbJ/vnkz5A2I+EZRlbtZbOO+vPtMXAH83d9/9fd//4vPf++z91/fP95h+pmsFxG5vej9+AC5dS+SSNX/FssAGKY9hjbUNMfpWBqQoFxIMla8J5e1HgUgsN6zzLZyG7euPGRnJuEKJzZ1Wwu0KvfSugRpg/GuaPgiZHyoIifXN5+gg7tlvoJeqQKxZKlTjMCQhRe9k1jqAU5q62Ks6ZbWoaNl9QP/OIVmYRRzCE897J0Y3tmZp4RiIEVrbtD3AY3Jeg+g4QUQnL7EXkOB7n5tZtd5s7h1jnHWxFF2MrH9nS0ji2wM4pNHL+Uy1HpSLp8g9sKfvshneZZn+XaWvMqmtgrbimtpm7+o5CV7KsIkeYksqpSvLTft39xTypYLUjgtW5RWR0BxDNz20By5NhMameCYZiJe8fjRTVA9yI89jT+Z1sPeetjTNM6bXAPJStpybbaUK5X1RHyGTAglXdMYEr9IVZY7vTMb1VRyr0yBu+cpxRndNy3P/LNdasM/kLYeiMayG54AvEhQu9A7rht2Ur6ZBl+aUNXQeKd6ZXZIs2bDkGylvUh9fS2yj8PqLfOyjaD6TMwMcESOyLKwpmXf26GSMzDdvjn5TpPQaT+dUHOSKuolIbdpDEKDbCIVwrPBku1v3203eqVRobWDmmWJn13VhZ/rJis5dY61Vg/p1yb05ORxQe3WJOZayPM4ZbYIgaLHNDRPnktj9L7OYet22YvVYy3fu41OcNBCQcfLkEiKT4I8P5FkbuljzCKATGvPLm3jcvd1u5WIjZeYGpKBHGG4cRmoMkEq1LNGF4G4rBWHv3U3JGOSPiGZf/kXf/aDH//JGF9KwOGBEH6cqXKn/05YOlPe9iKANgSn6SQq5jmSIqyIQCERjXQx0oLoZodCnasm+hOPHkcmD1VeTFaSzbmiYlVn7S1JspWOX9vT0pVwvGz2UPC+bfZ1IoPz8gfgnYAiNG/bssCDIN/+88Su7g7oUjM7jGngGJR9HMcW3MoxVjXhBe7AvlOTQydVa8EnlE18IdIUSClCIeWoN+VS83BUbo5Iisaix/Rop8ADUqmPPlj5f7u3rFUe9SGpktqwtM32pctWCyNZU0UpAl7nAAAgAElEQVRuinX07TzS8ZeIX0DKCXwUC9pYD0mH7gxyhU4/vgjq3g6ULkVJ6eNgfs/hbEnBc+qTM4ifmnxqZJLpJN/mWApCDJLkiqpPggU3ABGKpjQ4su/eQoeVGS8npBivZcs1dVq19aPlyLawHa7jvHlRIVajdpZe8Vau8tZaVXnAcfn9lmO0oeLo7lhb+89LOe621jrcVNQF0BuAm/zA/Ov395up3D5Ty/SppvCX2tIu7pwrhWff6eAmIw33itfb0VeBiDPOONa8S6TEZZiszxS5QiwFlDJVOS1oaqWzGIo6ujPRy4iT7QHVPom7roXRkNTO9D5F1LD9R04DMUIurArML1YhEzpMcqGm1NVePsW8PgXeaW5PHupB3xp4iE5lyH753/yVKj+FBt4o5RIdVZfkHqbUDvvqI/fudb+cv3CrL5e1X7LuzhKNTlHMHu6y9fTteP1hnRxlvuRtCHTkcsOBV/TY/kdamwAOc24WDx67mmy9zuurZuKHjT7LszzLs3y7yo3mQwLXXajltYbCFOlT7XuFbu1YW1SQKWH8V58MjUQLVfoCsRlnPvRTPJDC1+Gos+S9J4jq3wD5CV5pTLRNFw6EwKaum97KF33q83Mb7FPYc2MnULyuvKtdUkyjtcMrUpslGGGgYQ3LW3M6VNUBOwxucXOCHSarw6K45ZpvqEh5mHsgUidVSvPJRTtPjRhoU+ImvlKfjjIbvGHG6G/PW12dGp/XjrEzQRi3c+lO4pWgk9IBH81RyeVk8szoqoiXM4L+qTJIpqXXW50c9PQ70CxxeF8NIeJLNfkpZzbvMujRcfKKDJIAbYAJ/kG+ItCpDydEQ0/aNt6MqzmdRNlcNo8Mlaq6VoSXGXf1HObb7QpLL3Y0WsMFr79tRsBWakkVJJNkHbIjyZVWqkwWnZRgp6TfqrnYH8GwT8PzxHPkXUcmWBTNSfGBPh86IncKjVOEw6lqZT561VOGq+wzXcvEDa7ufekUzK/zV/C/veo8KQ+v7ZYwLM3CuWq0yCjq3c39+//zT371v38ik8a2Jkj0IVmB/XYOWj5IH0FNq1/Yeq3yDMadRZEv87jfz2aMUBeVN37QLVYYV6HUjDopE0IhfuneO/3Vm6bJBo95wHNu02B4yNExIdNlybqbIOPqc1yeEVJn0jx/cpVU20nuE7VLNPaaG8ZYcmEIXXqeakznuJsx4HLQZRRn6FCG322rDKmAqs0U8kHkXXCV91MA8Ia3MeD8Zar+16zl7NGWDmJQsYLc2Bz1TNQt56oG/eb7Zpl9VDjW3YnELhbth2uApw9rGnDxJMw+5OwMB/dj+dOSj/c1pcBA6n+Z0pMjGZYzoiN9kpT/Xk8Wlrg+ddYzcWLEjJqEX2Uj4Di1O1TfJrJPGCSOiPSWpbQI2M7KSEM/gUiCEx8nVEJ+ovitFmjrj423HIUXkFi3lqaQXrEe3bda2GRDrDHF3Phokj4op0+3Lvn5u4kzd3X9RlnrBvhSFdXDjmjwMOjtOwY5DtN1O+xwO26y5MDCu2PcMLVF+0qxN3mPU8NllZJoW2XCR4obCKLINJ70CrRUnHcyDICQE+mAHohTiph7Honmobg8TQvCUKJXJAAryXVz1f4bC7GLvRq7yXZBn8BZeNZKrHMQTnywcXlhd09K1Yhc6u8S9SWw0xCLOwIFqX8It+YruU8tY8q+QanX4x/jNI3ZBsBkXwKgMqxeGbYVCXdInComvy9QwaE92B3/Tyyy/ej9Et8/3zpb6885M3DQrVzyAy0QgLn3Sgw1hP25DYyHX+15hYRQcn1TWfAsz/Isz/K7V26hj6hGts19EUnDR6hE6SorX81EToi7SWMDdOKXR+W7//qL3XTYygQxJe9lPDqNHGy2ACtI70M7AtN4903zJGCXraphBaXhJmuYXptDY4BpBsWxkc0ylNJcaQvbHCXzjg0cPiqozuzJYbjJ2vgvDK5xiiXoFICnzi126GrhZ07sHgN4oigvSs4PYqfagpBlnw3V2gZI63zf5krGt6S5EFadgEL+pf1dWLpIhO6F55xz77JldQmXX2HDAGfiee5aWEd1xD3u1B7YR9qqC65hrs+2OYbXl+6tcQJ9PiTYgMgOSqQwSVsfQgDEAQhPk3uNL9Zhw6nBTI359+Uigj5RnQsnKWzEhZHpEX2arZnZgbp5sEKZc0MhvajFAN7ES37OX8qRN5zxnMMTXTgc9G5E+KAG7HdSiQ6dXslFjvaOJ9O1F9BOjdECfCDQetVL26R0e+U/Xkfx3R3gFcyjEi+w6u52GMwtuCd8ijnbWweGyEr4bu4ed5WkPWbwvEs008rrWhTstlXxqGSAZJM0xrvx+aSDYL8XE0DOkaCCZ9FxumkiSUesh/ySuKWd1YKWiTDv5cnAyydVekElkyVZ+2L0NAWRMrLXTlLUORPlEqKTBACTmfXwKxR2O5A1O1+cUKugRhSkmPx8HRRNjWE5X6wURne2sBCMtc7eupuqSN7q3lEzD5Mp7/WXQqAzjxTG1Us18cCJxxRL4jrVckGb0KFYNOgiSSHfx9JoJfY+q1MTD+yu5lY8ghTZlHHlTTZevdDRsps9WYqmBy1CmEF5Tl9KpPclm4+FSxjwuKgqVXOihodKEEOC962sFngn/e8i6naUKtw0jQOIW6QyrYjsJ4Cn0xazQ+XzpfTkFnaCqPQH9gR6yKHe/8TQTqvuPUvZV7a+k4UHqHAgtxlzLvsbAeJOvKRv7RFwaVY64Jgeo9Tu2WZdZdrHJCtGY3lqJ0RWsdzUOpYcSQpyTh5NXC5l99Tjl/ndfk7+L+JS5/OJt45xi8j9OG63mwjWTdftBuD9h/cOvS2Yw/xwODJuWnkdeErIkpOkzBSDhRpSpsEgZEROLACxXITESBridCAcDoEMkSWT4o2nrOg/yVKvjOs1BcQsgGWiRq11Xp7WwiiT8rGep1hpWQgSo/ylYKWZJbPeMjRnucYBDWformf/FDga5WFbsbUcpX2HsvWkJkIKIq4gdBlBTab0v8ZKIeiVUdk/rBAvxQwoh8r/jUFs/Pmgsc1o5FgHDKsGT+++qcHoykwtGLQNL+25S6R07QbVJAzM3c8VykzqTbXQTbeBuJMiRNOp/m9cmlsbEawLsHyWZ3mWZ/m2lAdRG/9Y5e2skc/yLM/yLM/yT1u+/6Of/FN34Vme5Vme5Vme5Vme5Vme5Vme5Vn+f1duGTynwlgzATece0dn7sxGAAAPOOB0tofl7dDI7/zhjzvOT7aNsj1c5BP7dXuY+7bD5IzszzCWCpsZO/8VqDBCsrZ2vYKqdAuzc+Rube+P1Q5vx5Zhbr/1LqbDsuW6lzOjI1V5AUed2Oh9V6hoxkmJIK5QcOhSG8NmqAe36x1xRa8zv1UQw93E1yBaxeLNvnbA376HmIEHjBsx7gueDh3kfmR1LPd3x7djE3K0uHPAaYp7u9XHVqq7C6yygnrunwuktskZL4EZN+MRGsxp8n2TuUJ8zymxnDvzqhBRlTj07feIBhqXalbAIj+e0TYZ0ijcqJ/hIPlPr8OqZiMwz9sxzCvfO03YtsSEVPRJdwhpxB76jIViPC3qCXdXlZoEoUzgRQ3wLZS3iM90k82hSYusVrm7XoEI5InJFhW5dtpV7k3+vuKjomaKQKgeohsSjTNc3utJhrggV5wFkvBEEgC4MOBruzNRZvSIu+pq2teqFIDXXMQyiQR/fpxXI4awQuYEJG/wFJAq3DViFSN207sSGZIPb4cW/PIv/uz7P/pjruU9Z1pR0hk7swfaMR6huaxmI/82h4q46NIha1xlZagOgyxCzGrlYb0W4SXNQ9KCwnlqhmBYM7PjuL28FJM7I/nyzHMGnYmMc7kywydzxvIhRNRVC4caS4RKSfUlAobcHEzF3D08CdB8vkO7yD81rZZhgGCk/Hk59Lpft+VmkTyxRhWxuJk3bSslpwCMo9NN7z32pY8+luYiIXrdXkNOoidqzkypJEKfvublwqNnIxMiOcqLtaP+eVX3FLre635TThns372oZx1YIlanmqlBwBXR0nEskHlqsfIrUrG/UYSRiBWYy88Tx1xf6B3lSV5hYP6cjFIymsdFc31mzKcAzAQwRes1RL2aO3MAK6GISQm8S2mr17OYI4LT41AOlhBWdlJTHtpJRTHyv0hqWsqbIkJ1hiGV0aqMFTV6VgCuOVvq+rFJu6EVAaqhEFbBqSICJuZ2d/d1yuTQP5iJFn2CIDqJranEIGe1fqrwzfLhw8fbbR12h/vdjhUrTtRE9Li/rNvhuIl+PD5+uH9ccvPjftMSC+yINHnEB7UR4YrUQJUSZ+YdEtjdfaSDdSN8ufUok49K+jsY8TeoFmR+fbCSKtRPnzKpST+WXDSOjvCXDfyc0RR/qmpjmOwZAHNZG5OMhSt1lQ9PpCij6OW4jGp2y+fHJPFOGSCgfq6+yJmxxZODeMNJ6NHkhWTfqFgi3hhpnORQ1lwSfcCAc7QjSqXmf5NpAFT45Ty4TznzmA8GmK5K6mRUiYhkPz5alHyDueIkR+adbaTQ0GPAhYlFsuiErGE7ZGkxm0LzmxYhsir2+0+Yw2d5lmd5ln/icnO4HRaXIagicv3rCjxpgcLjGORhlYMZkmbw4QaedcXSBeB+/6i326//+mdvNeuww1UFUHczc9W8PsKZpzLVhIjGRbd18IqnjN19rVWqJQzSVmw8tZSpBuMtc+ErYcMrT1scNo2JvB0CBM3TwedERNkbPkb1pEh8o3aEGg0DMutWUfF7+O+QOQ2hYR+6Y6XtmnhLtYCXHNsZ0mz48MOPOo0Yeeij80x3JXmQNNKAhum1lrdR2lZHKlSkg6OqTVQdQzMDcBwecF8lkrqJqoYnh9aGqMphlun4UmMz5VOoTRWouOO4H2EjicDcxERVBz+oq5gbvG0vd58nUATIDPxJLLC3B4Cl4l43iITVlck9g/9BENCX04lH7DCditUuDVmRwwy8HBgAloLxxvm4OejXcD/csYqvLBJx1olV1OxUGcDdvKulu9kd9FCH+aaqSMO+HAnE9YCImpm7SV53zROyonFIkPa90BQfFpQ7/IBIpAkKHtu9ge5cTYCuxYUDei5yUuhTqcycEIiqVv7BOHGYZAVEbze6XZIoAMwtsw32AhxueZpE5i4HU+bFCrW8aXoterkc7nFJ7Ari3nQVYg8iGOhIqHrokMASqPASMBFdbjBz+MfZGzQ93PRWUDWWWBwnNDO3w8x1paV32EcVuuqOSMgbp2tlHPNpL4/jkCUicpip6v1+6FIVwTit7G52mChutxcIzOz7P/rJr/7y1Uj2lHbuSxf6Xp0DgGjmeXS40t3UuwkpxhXw436oKvLidQn3k7oIxEiLECNmLnEVT9BbNW0r98NNIOZH8LqbV87KpSpt80saLCK3W9zVpktFRMwsLqoSFZVlZm4WR5xERFUPM1F1Ny7MchpvNl+wf7lTDrHiRIcrcLi5u6ocBq1Ni3JiMfuVu4uq0UpWelDIK5Yux/RNb+kbyoiJ9ZtKIQXdkdIeWOtmfqioyk2W3I+PQXznuNZSswN04PQqc6gsgIfqX9RDVQOqQgNuUIa/qACed/iWhIl6eHAwj95TMNKPKepuMNM4re8es38995ctijvzQwhEVmf4XbrMDzNDZcx0c3e5vdTbY8s16HnS7OVLi2W9fMwdH820NpTqnQDFDisvFmqzrpK9XXdrc4kf3MWS2hwB4G5G2VquWKdnnUSGQms/xezuPiQk/SEGKERFzY7IWBCS3Ny42RlcZLkVI3mEHCuvknDAHGvpi75kTs37oUsB8UCEKCXikJXunP10LxlGRCXfTcp7XTyOdvDndsqHg9pENeigInHU3Jp1Ye3BAIDjuKPI4PUV1VM7NmJSD3dY3OejSiFW667dNxL6W6Q+sdh+MOTB/+iWLKxjZgwWQWS6NPfFGVKu9qhZVc3NDe4GgePI7kBmWlszC5GbEiCWaCJb1bV0xRXH9vKyROV+AI53t3cxleZ2Ez30xY7jfv94u93eyTpCyL/Ah6CAQ13cEovoenFAiqt8uJ2U8KSARDzhmbmxlSey/mWh0+MUswaSiAHZPS5o1pUHrM1jy1P8OIIOAwTUjPuRbs3I2B1uZWpzNwuk4hY+QVGo4RDChlqSIa22q+A2h6qYHZ7gK9b4StlW/mXXFFQJxfw4QvS3iYFc5g7LbOa5r1eIIRUiumMFB1cxdnaC+71y+JHmGsXO3BlMiZT7ZJ0tQLXTvZaQkr7clGslG+RM63KHHfdg4LVEReGQFU55lEED8sRSbTdkWTSyJy4o2weTo1oMlqfRU8AHkDhp7SFrRbrOjipJAyU/cMgt9zAIJWL/Bog9+Ji/tJcbeeparb4TxkTqg05yTq2d4tFX+qZFUFcO+vRaJivlblR0dq1FJkhEGE/am1f9PMuzPMuz/C6XG5CeGAAhwJmyo307DSoGFmvPSInN+Hh4uB6Wz//wi0KmFVspEG1P4sgK7JkwpZSUED0atUqpFK9dM2fv+MPpRg3xDmqjVIRSmURayaFNk9OPUXV5OuOtBquYG7j0CcX7Rnuy3oFsqpO3EqTbKAuT+XQhKXR8EiB16K6stGeS27PlwjjbeZ6uqXqYc7R52opMgryNGtPf17cwmBdeHHQfXSOYrbnZNhzDMCUyGwzYzoLqd/c/TYfJhzLh1ZisHWVVPZsDQtjRAguTAvNlPtOWZ4afOPbmHhA+CY7Lp9WDHNb5IvVtcG8Wjlkwb5gf9590T9CYzzlDNNF7nOfaCeE4X8OTEP4gf7OfUimxRCbVcabWp8KMmlHKHt8w6p6uiw7lnr05phCFw5qsoT/sA8dfpM5u8Hs7jvrL0osED0+ZgF6AWA6X2HOKne4dZQhb4epR0aVhrel+fUZEI247PK+Xr/7yZ9/9119cyLKNnrGJvXA9fVe8sZT2Rs6ty17BVmn4ShJb34+UECr5r8E8LvyRdJV52ogRPCVktjYhzcxV0m4oTxIkKk2axkS3c2LvVS8PM1e12qtA2D/C5JEGayun5gNN7b24WwmckfPVgRSbnmQdxqtlPtBhsDjS/qbOZPPJCuLHcWdvVVXLdBkezn2wQBj8QQ+P38PnbRZbj4+G5HOB+0huyLbGKPORTXYCe6XOWxD8wbfOsO6qvV5yYpIxrArP7pZL71M0Ta2Q0i+8hLOmtp+3oZRQcH8k5ohtHrP9w1f4ZTdcn/BarNkrXhhBzpDSDHlLe4qL2HsVXvc0VauZq5m7IVV/7IW5ygtDtyEi5oj9AM/9kLqeYWhmYazxlFD0ivgeEbhxxSuUCIc90r0bzBSUGHSZK19ywWIDDEy5vF9dTW7morood+9ft1dGjjsVqq2xcpEJW4mpanUyFr5wcAlkH7+Pxp19j55Qpgpd/Dn81HdwmB3isAq86grLkZPLS+jm7tnZGVyyS5w0qQ+rf75/hG0eZ4TgqYQLGx5Xr5g7xCV8hrEwIPKAI6hy2FK33OhvV+uzNyFgfVCdXqGhqnB+L0ZYKEkmQ4zZ2xKQ730O59nO+MlC2XitXxSV8yM6TAdMug5r1ovtGdYycEnLiR5ECZUpXeZioLSgo9M8XGZ5wISVe7qmm49Havqmxrnf36iQ64r7Zlr7FuMyH+Y2/qWW6tOZnA9k8gOVJ9Jk3KzR0UJjkRIDIIXAIxR7a54Lu3pIs4xLSXYuemg7PMuzPMuzfEvKDYAmhOuIn9IcXQLuUJM2zN0UVppFX/3Vm6GR6JenLZQt5kZgVR8Go59fDuydgX+CqZnOLRWqqMdaxJc5KgLCe5+7iIkj67RAIZz8oPDCpg9C49T+Z91UA8Dc4izl2a1R+1/ty2hY/ZCKZY6e+kNX40AWA1pMpefzbpdXWgGBmoxTwNwOl+QFp7KUfic+o69U2JA/Gtb/y97b7dq25OZhHznmPlK6pagFXycCkryCEyTvICPOQ0QNW71ltfQWsfXTLSdSgLxCfKG8gmHAcV4gtgEb8a2hlqIW3GfPInNBfiRrjDHXkQAj6APMwjl7rTXnGPXDYpEfWSxWOSwTK7umsUDw7nn5AHH7uQj6fr85yGQz8YIsA5ZfNLhUg8Oi2UEC7xbnWPvh/TkvTBZBYyjwm4AwO3MCPD6XxOioz7+5gZ0QL7nyBbfwG1oaM5KBjdYB852ytFlyzbcdI4Idl03/Py96rqYGUNyrR38zR7fjZ6+wJaDYKVfrSxibgx6MfRrbCN0SXHyfo5+CEQ6RgnLahKPKNo0EKsdYCzV0wtBhVdcj7q6qPuqVOw7p9vZDlJQcDsAsQoyPHGgJor61Nvc8ohe/9Pc//+U//tErYh6qU3ygJjq9pmn8KYAMB4iRLo+492kJ0GsZY6SjDr0Llr5FPQ4B7+yOj83s6OuGXPUIV6fv4fMkXCuNwv8t21N6Db5OlhoUvUP3beLwnCXn3Jev6rAxxl9UDz0ioKn75j7XqwhvYZLyY0I1ghZHAy3KU0SoBg0V9XqPPgfeO2puM1V0BsYLBLKuHOYArxc/VOEwMzASyeqGjqLw8OQ5T1FEjDNEQ2J6KMSUM3trjlnb4PhNXXMKxLtBeuX2vq9pFUdtKiK6ycng2i3uZ/ajBUZHKDsYQLOR8VIYjYVi6c1yvIz89HGywpDom3ese3dtvfAIf/dxQrwed7cUexlN11uGHnkEPFci3KHhYDcOmYufSyTZwhMJRC2xiah5k09p41jtLeQofluf7gyy/VX31Qz1EwGb0tToVV+AlrewlRRPAu46ULo1lbn87x3FNcX9ZgNR0kaqMTZNR1J5voLUxLH58qVJwgwOU1oxDQ1iLuB9YoVcRwdSfZG1s8Ox0014JxAVWZHtJtsOfigs7lcE1iJnrN2SEUxfkyMZKxCtifhKrhgT034mXraBBSRdqFKOrhJKsf825jO5obgQMrtxX0ilkbUgiNYeIuEdP+RfYvjOyzNWIvtRLXN4Qsan4D51zEkBr1jvuAdoMHkSNtHFgCsYC25Ut41zfFJWyCaQEjH01LKasHsCYujBEwDnzZsaYksOCjBcMoFchPlFyp0HE7ovmsn+Z1AsNfvoykuneDdHimdPW7VdelfDKyI1aYqQkiLi+no51JOHTTMcvDRGP1/7DUiFUgKE8uLmHvh3eZd3eZdvS3lsQm0U2VREfUqpOoXuUCQf7PBH+c73P78Cd0Qr6Q5KCV83+s1H2Rdq/Dq/vSmGgSaJaaRzeOFSYWLXXT+nkhuN3nR961A2bVPxBZoLX4NvuKgbou209y/hBACMLFHnHlRylfarQFVsBFfuQKEU2kQqbf00xh2O1GuX04rxuLE3jj83JgsYK3GuKtxx7j5O1c5kXQXEyIu7C8uLwC98UGdEVa60c6cBXnt6YqsTCcbB/7ruWQQCraiKgrwNYLa6nJYTeIamoGX6J+9GEq8m4L4MLjo3ujsh/nA/n5/3rFcmD+xfv4pfaLOsehRwaYLWwt4n5s0vr+SpinIrROYXOSgHML1mOeLy5U14vHed0kCuNlCRqlvc3r1B7m1YcHW2QLgleCzJ2sMOtpG0mcx44p08UN6GGgcP2w4zO5Y3l2bH2qzyDaULDJmFVuJIu8OVfihn8tmyL6ZB+6r8xR/9wfc+/7bxhBFpQVuBH5tDxXDkqFUUyrXbXktw8k5GEviJmJnyMGZLcoEvz1Pzmnk5SD0BfXpF7zgfepb1Uja38G7lttycNIWXn6linRyAi3jHe7LDTnssvABHc427+1owA3MyjF7k3sBaqzI5MrlHTlAe4vYSfHy9fhnn/bKfJwMsQlQz6jYPwXpev46almkrXktyyfCISWvIIcHp3g+veilSBoxhoRN08l2ptzHWFMgndIoJZso5+iJzVbbnr+smH1DBOTJOmI7FUERMWWGOTDXTXk6fVA8+oeaeSiiV8piHbUw3hP0YIYWIcIpEbtXeYCZKjCn7+HY+kEG5Lpvb1DOFS7B1oIvaAa16PF/P1XQCLcK0zAIgkgNQIhcCE4hr4jlzRMqN3jeIBAqDUtnKRDkDT3XcHsgeSB2R7ukKegSH1KNJtbe7ZXailjRuUlYfTpRH+5QcDqMDyK1AjbMbAjFbcc4aSIaJMG3QmxVjaGmUk6EDYLQKHW7SXHDVX7cFUe18QZRxKpHvZAitJoLU6vaSITGcytyZBOVkSCUxb/VKKZJ5YG+4vBlMtpkoTqJ627DEFVN57F11yoicWh+SN+foJB67J+28ebkcpYL4S8idBWROSYreoSguoLqUZm15tfVCqFyavX4Wk4OiqqMPmaRzNjDaHMy8bbVGl+cWznSbUvaAT/UXm8DZxRvnTA9N5o+5iC4PWclJ4TLe6kh9f48KCJNGV8djU7GC0LRB1bWpfH+IkTmiosQuIs7N+vW99mD69rgMKnuxS9fq3sko50IZzdYeAheAQ5QiKNq8s+Pf5V3e5V2+HeUxlexUog4dCnKIuPJPuW8hShSFf/W//viD9jaMeF+kQ29AK+DusetXW7hQ7iT2Bhe9b9ZCfbaJAtzjLEW40eh/oh72+XbV0gAVpfmn7kOBS5r0gukAKrUzA7uCxNxkuy2+AQrvHm5pmtvka1vlVA/7EhfqDIrl7zMf03gL4FGwVLV8SyMkhaPogUqPDoOMde7f3bXAJTER0WqDhJpugoAbCtFM8F395+sB7y7oxolmSsfLJFfsRM5KCMKurU9suSHidgO8Lt/o3B+I9f6wX5P9ggD9Qi45Pc4REeVIoztp3DjoJ1LHAWcNZcc2CEsYLRlE01lKdy7BqHlU6TR2udaCeU7GxwnaRVOMKuj+lPVl/W7aeA3wyodxJvDJndedUL1cDxKuoKZG/JLRPV5ctW8EdYyPwsNLViuZdGp8H3Pj4WIJMg0rDckQ0Wd3yzWkEMgv/b3Pf/k/vwyQpLdiC3+gp4j39jjC1UWyQDrYU1r0NB+UaTUMFq6lOMG3lglcj0NEVNWWqSo09vsvI70AACAASURBVJ/a4auHrrWSVUQqC1RJ2iJju9L4ywzditH0SijRWRtao4La5op3jetBVWscZmu5HMfJskPxNzNwlQy5CpEkWRJMMCicjJ9UjfrKZxNyT+FwcYmEjKmA+ITDtxY3EYWDd+bEoCDi5i44VG9yLpzEyeZo53QXf7fhdXm502YVOWjsAyTUEPfbUjlVVyo2uRf7s9HCHMnI2dWr0E85uWQswikoN+l1p4z2MbODLSHrHHcfn6a2xrkFL5kupyrZhE5XwLlZr8KncuCZL9EFucwsYhLj4rKghsuq9Ha27HgcAokDDBYNWWRzMz24jhiEuod9cki58DrCZ/d65O8DCI3DB+PtCFwnbQhjNIRkSqviqbvixbg5Lz670fStdbSnxGR3G0L26ZO6dMjLtz5Wo2fgZw6qfYdssFf1IJEA5WGP30WO3Mpy1C41II4Vw5qeqCCpMkF5bBXfZZHpmZI5I9UjDKEabJlz/RLAzPURlJE4puVJsly5GdUop8XthG9TDPSNcqUNg5d3sO5AuL4pKgV+RkPX+L7+sQO9kuw+vhU0zCgWyX96FUh/OiH+eVsm6Nia/bbsknsikdl3OjSLfGPLHWSUu7qBrVO111urMTMMD904BNSYuq2fpz02wvv8ZBOhxWiiDD7p9zC4IQYlp1m6jGb/vrtBv7ZPz+KZH0Yn2W2ktKBG3TZakhOBMlBbBSaxKmcLv6utxrlQiAbiIFiusiHiT6rgXd7lXd7lW1MeKZaHlzCB0oBUyN3EzY8ET89ZQZ7pergt3/n+D+7sj2HhzcLNOS9d2+Ja8pzcjuY7gKLEPxBJjrtWH1oAjJXJ/SaD7HUmasyBM/fy8EWW8SIDKaSOH+nphnISEWtNvSNMulj5tNQrN9SsGROZ6aicCMPNdk1WP7zawjfMWCr+u13GDWhYHTLQnUNQjgzaFyrlYMUIfZW496V8XNXvAdxFAFGYRV864sPkfEUgYXER1h2xL+2AMFXi+eT3jvYI3RuNNTzZHd8fUjCejsUiefOgn+DNq3dP6LPXY/3tNcZ4oZlnL8IN9vYT7CPd3joD0HihoSTXS+9AGx+gi2oCxRHaMZosuOeOyhY/dvWnERJgnL+NfYcRHTTcnVu3d9O0TKSSW2dClZtJqopOxrpHEY1y+jgOt4JcKCg/GcCDV+w+TZrEwhJRx+YmEB9nSSfyT1PN3VY25O5aKRxVJGKg4vxUuYFd00Izi5tDIlRqxr7dlj//8R/8ym/9DoyxqikQkpS8CskdvF5mcpkjbULOok7xV0ss+KdYJmuWbQG6Aaqi5qstktGbvoLeHbwCSuP6MmdQyDi+WjXnLA9vNWkeVoQDkZrPGdmE2iCJsp7PfispWpw8bAUpHxngctQVJY6xDKI/rAhUYSpS+UZbO5c8FzMLX6wOoqX3OX9XVTFb5WA3W0p3aU/tIGwwfB4JD7d53Dlzvw66Ywy5qsMJLnoK8N+gRwZqbZ6ryw0KQEuFk8EqHSXZmTZThDMkzawbda8pFPrnTuOQDKItBuOGQVvDU4e0DJBipJNy6BfGF+2yykUdHDJEChGQT94t/XoW6KS3V7zNbnu7xxIsTh46Q0RUIi5PHcxhC3Om/XU3Wy4SRBYRMbO4ucK5InLiO0UppXuSdCiyi6rxM8nmV62DKMfTsdNgYBMnsXiNCG5Mcd4jd94eCgGSlynVFPbCvelRdZf34MWJaQInXljnufOEFLlxgxAQ17McDkin3nNOTWmvEyc5tTlHCkBERUWlFEQ859yCLzjBXwQwd5UIL3WBwuDurn4DyVutR/oCdg7wQjYed0rTG1kEq/ZPGEMmYUUy1jmfU8Dk3Av3yrkTFE6mSL9lPuKDboN2pGj7yuLyr5mPhSPCCzFTS601WvF9AZVGbpQMZNwhMVKqgOoEe6cHAxK4FvIC0ekkzwZB8jaqnXhs4zVkvct94QKxFsUlMTgyzauoal8xknlInCNyYibums79qFO3BXNDYgCw0ZPTmCfx0TM83pqm3IAbF8JsPQKRpfQ05eTUEuWLTSi0Zhl2SwMSDqcc7vFt7ANw6LkKeJkCRzQwS1g7VFhOROHrJl7kXd7lXd7lW1EeRuAPUOYRcV/j5luNCPOYAAlMoAB++icv42uA3D6iqri1ZU4ttZopGJr6Hu0bzZ3F7Z7foaIK2e3eivKaNHxOrTSe2/xB036Yem98XImBIuqAbtA4Vcd+HsDCXnK4tYeWOWK8UQwQJy5bGU/4XXeKGsDzVT4HMkD6DmHTYPQi7o7Q3Zu8mECCZlxUx8tzusnlPrb3hgk2AKgNd268b1TgFZxSbMadwAllCofP3cqCGw1SDFawIjvTkHXOAcuOThqa861GjYSWL2woOf/FpO1pM7xybrG1Wm6zi/1jRM+EMTAm9tyFPQiNKJHoB6OyamQ6FgoReZkbt30GY1E7R1JbI9saFNA7G6DyvKJiNTkzeQ2adETmSQKQNzc+T3dMj2GgvNnvcpWInKdTMmVkebTKgjqPv6ldv5GZ6AE64t7x5nf08f86u45i5OKBrm2gXOhxhClubnkg1+sMoJc8qXHQHdZeGHGJK4x/5Qc//PMf/955RD2yiDyKGcsLr3Mhhz9CBcjrd2EW90qSf9Pdk37YGTlKVC+QujnLPRw6GSNQV6tzOFamYBDIzeJ+3p4ad3OL6ExRQeVHdHJwEgB1Uyel7871o5Pxi1A3+jwdan48IpMl9FDaGBa3weRXk7eHNe6XVIzR++tSzl5pCXCncpPwIuWN5/P5WkE0506T+kEhzbNDMSjhveqYFuOoTUSpbZnUkhzcAg2lWBwOPR5BB65rMpanam55izzTCx8+x/y8xVK6Een4qeFIxTnW6crkgoQCV3ksrCF8o2RFrzHUWpWut+XL8GrE3yU4WsgUlbkdOYg7vWw9jPZo34ZxxU/L1BwhRYeluu+85sLMS6IHDSHIjRg43J4mzAG61jMyKjyOR+YuqF5GMKpwagdgqDqpzWbsZ2mKLbx3H3n69TAJ2uSnGhYStBVNfUeHcvpF4XS78aBEMho3IYA9amluyWMMonuRfG2TjrZOqQbcESexJeURdXnvPXBnasyuVP3gytfSZ0nGKUeCs9XcEIHwqRc31ebueuh6LvcMjHTzw1ygdGjm/0GaUuAbm49AMq7XUpVBpVO8Wku/McH0n6fiyoA9d+j5yXw6Z3D3n+UK5pYvF6ib9ZEW5AMRYCCsTc61OJmhtS6qThHJS6EgPITMKPYCP7IP0zMPQ60N2Zm3N0IpYKI6HRYEyJwvhPfQWtfrTRoCch0MG4cflrjuusYpC3FBwTWHa9zU5OH6NoDpg6Hem3cjSH3vCThwkmxowRKqhYTco3+tRtADKbAR7+ybMCmFLgJ+Ui6pUch4flV1NRskdB6re0xrit5Wf3cNsscRyj63GLfO7yX4vhNDIHrA7C6vm3uXd3mXd/m5LY/40dAn7TEGqtGMAQr39KcXbPNR+U9+4zd3AT9B+jCYQTDqZ33Qx7QS8wwVVI/Eb8zOxnZcBqgpjY+6XVskwwDak5fdou4nvpDUBhNkiHCHKiBJENMlb7DZwZe0K9bZBEnM4QtTImHCuA3CDTqaZdQJKZdmw37/sozfMQDQqGhMdJtlodYlwWpRrmkOIE9rbhwRzge46jFts81Hei2E5dXjwv/5UkRpuKMDEU6lzJHagxxV7wB6jwAobumfCcH5oZnDXR81g8mC2ZF9LXSUUMUgnKHQN6yfgcsudzGCmHZY65O2L8BqTZG0YSxNEwCQM03KYm+uaaK2zeER/eReXrY2LIW2zI6QGTZ3mQfneOZnNAVwotqYskkk6QhvAaAiljfFXyizw8+buJCgsYF76yOe40UZAq2fDOERGJ2XRwuQZ9XdjO63m3rGktsbMvM6VJsphpy4OLG1rbjQIHJKjotWuIUTnlyXj7jxJ3/4D7/3+XfKFpqGzXo+8XioHmSXYEURUR503TpNC3Fc/oOmz6GybI28t+n3VNUIrDSnty6j7Xytparg4e20+d2ikq8ex4wo5JUMdM2OCQp+TP/1NvqW2KqHs3NhjISL6qGPeDAovAVx3Cz8btYRkeGSbAY6KM1AuRr1RZdZpSNv8TFAVNTFtbNqImqN1w046npfT0cVwr+jNzI0Suiv8FOZe5r+jgioPD9dkkeFvlThPHOs3fnUbi8F1bnyZpSSD8VsKRO4HRG+bB8nnpHLIAPB5gSUP5eJzybXkculAq9qCtiR+xUjFavVD5TlePAEAbqec7moyamXNrUxWYlrKNWCGYSYJNOQhpvHzfw4jtj5yMAciJQUir2XVnw+GwjSPL88IYJPeKRPKK5PKbkYXVHb4lt5VlTYmWYabj19qBG50zAJT990NLBdEl6eXzrwaiAK4caJz9o3iYBcLttUXJVMEhuAu3k6CtgJh7hEbk0VpgMGUoCZjRVREc2Y6Qq24Q+Z4q38c8YFdGI0UXOBteq8Kqxw6wsKg7q75a1TYyr6yHnKtxrLfCZhKLEC6CAVvku67F3w3iRIQefiLgfEj7M/rV7noh++P1YCatHkiqALPY9C3nPKjBuWG+d02jAinw4Aw8/7UYL+MTyehYiWi2GdVRYjpRnW8+GsoKJAMdsbLV3GwDFuwGFsoOVgZHrouNl6xljutVobhZWFyMVbuzOhFjUem5JrgquNREWoLSZ8f0Jab3EvC5wLuSHAiUDbYG4WwmnhX/guZMg2EcNCRhJvA6Mld7ZGmoHcHaIqvu+4A3G5VDzp3KLuCBK2blxcceZFz4cP3uVd3uVdvjXlIQao+pF4wSOoBKJHG8YG2Fo4ICrLTAQCw0NdgIMBB7auwGEWXss5jk4PgxblPHSAp49ih1T0cKYkQ1jU0U+ziAlBIY3yI4qUYkxXYZnfIHQWgKdm3B3LwmnpPJaOQtdh3niFomxwxOPgktAhSRw2DL3uPIBlxn0waa1a96bJUxMeittwMQBrPY/HAeYRP1Qdvp5LGwLRnDWH4KHKQ3kidR9m7PeKPvFFRWuPNOY6gxzztCVYWbiLNDbSc+DlRTU3MzmSpKtUr2o0ZWvpoU5rNtRxhAupLdfIcu8Ym/eqR993UwZYdPE4VMOAai0fV1vAVqrpJPoaMEumF7jYxAakzieYBUDDZDZzx3FoOWojEnWZIUK3eEpfleZNEDwpGp5KOSpqjG3VjT4PcG85gTGRqIgDHoaByOIVw+YWhk3MhZu5ixyHHuElKQvS6+cwJMpvhbLo3FCMEEw/8Wr8JFV56ItkdtgMu+EVGTrYpycxss7xr6w4YxMK8Eut5ZwdfYgtW2uV4yyupJBg6mUuvp4rQtL0ccQz68tTNWzAzITqjnDZpHXhD4CJ+1IYGN2puSDMrbxpx+MYhxNz8g4Rh68vX0Q1UjAGn3/5+utPX33FkRdgTXdTMrTqoUd7AwE9ynGfaxyOQ/OG7mVL4n4YenBFIdBaBzwS6BCYrWrQATl6ggMB57USjgifMbOwU/7Tz7/9Fz/6fbwqbrn+1wJQWSI+PR4i4mscoFYFZK2lcgDutmjqi0Jjvk3Eki1jHkKf6LKlelguMV22DlWzZbZUVEpeOHytHLLZQq6U5H13AUKJ2fOZgsfiZq3A75v17m6hCELn8BeSjOtBRGZyTH7eySLD25dTrXjow/NO8EcSx0z0iE0aB57PL2ONeK0CFXFTjJP0ThdrCmEHRI5DgSOpYbEfQBcnvc/RmedaKX4zCNUA2HO5+yN5dZhv4gDUki3X84uImPvz+fz06ZOIiouoLFsz9vDQw+PeHB27Bu26z+MC6QJIztciqaqIqCO1TLgQ6tQ5XQnpRMhw2T5ssVnowRj8xMN0E8ei00tFglaW+QqcyvqEYaTNymKCXg7tYeCEpDmt5ZMYcjB715VNRwHSbUGCpbYejhqqMBIkEhGMHLWFT+KPmPxc2m4Iz3IkkVCY58EYepads/CAO9Tdv7gifVmiZhZs9Xw+3QSmX/3i4/nli4o+fuFhFr64lDPuiDuUCjDk0J1sHmNKvYwAKqqqcuz2O4r+aKW2fckozpW4KThDFJLHyw8o6h4d5N16onA/bD3HxGZby+xxfOU5raLHIa21AlYg1zw2QWBuIklPM6uVeHz1KeeXjo4YtnocItFw0qyZojSUDcRS1CvglqFnUuN2d19x8qOTcpTqN8tj7KqPzBQsMe+phczteGjcoOXurq7ioaPNTPUBAvL2oUUgoFGhmCF77qoaIrRlKtdG/HB6jXP8nl4VffAq8GIOAeAGh4ngcLMIqBdVXw7osi8pBb0C9SgHJH3wQiyRPKl9SxI3rEzYGfGDjFCghygub4srONl+puGcyk8AQwK3mCwPGZchhAB828jp0+gOMAl7bHflyfsErLRigg8s5Yv5M5xQqkeON3UGwjxir0j25FIUQ8pg4JKl3G+MuTCDrye3GsKVL6RjXO+WrReSU8ThMGWKWWR9TnhZOkhEnAmRNT50QaJrtuWVJiO7WlfNJe2ctg9b6y9PQ4vpc++srtuzAs+17UMQCQ9ruC3y8HzLAfgyqYU5pPG8cK43LoJlzNw8m5ug/UiTOBYT2a3wCCEfRiiC+57b5F3e5V3e5dtU/mNup3xwUvs73//8H7Ghd3mXd3mXd/n/ufzkx689le/yLu/yLu/yLu/yLu/yLu/yLu/yLn/t8kBtr/CjismfH7LU1uW+pbZ9cFPEx5P11kwZNL6o390R54e2HkbUxDXf8tYe5HWHKjKhg1/yoEH1iiFUyM1WGztlW1hMjs4ls783Oby7xIioDJMSdJR/U49b3vG49HvcvewgJncAFieH6yAHd+97j5dheu5jQy07YLkn79xoZQEDSup0QG7BdcjoqEhyYHGXRaV4Y4UQUWQ6s2SBmD3ru29uIh5sRPlFSxnVkyfdxqEQRqBsaTZl5yf2et8Szqm87AxLR+ky6mV0ZgTcxFeecSrKCR57p71cMiCv2b0P/PSeJ/afI/Aia9gXEUNyaju7EgqMaq75XwdhZrbG0aMmFnnD69sKS2Uuoz6sDUY5e7M666/LLLZZnX3pgY2zXhAwVhcj9qFil2OVM4uceya1k7iwg3RP9g9Wl8kZtehYoefyyG1tRmjmHdWS4wcjZ3I8Ef3QMy7McXYvgRieyRCRCAoY0w2Gk2RwxXgr4lCC2UVVVdZaxXiSsQYRAOlW8SaDfzPMZRwC2mgA/Orn3/2zH/3D256jZraD0qYgk10Q5+pxuTxVvZmicLzvnIYLEZ2dFalD7oCLRp68DI3JrI7SdajwGpOM04lw7tuDeqC0Oy8mH/kcM0Z19rAX8iButmC24sE4I7Ds6dZpC2oitj6EUMUI98g4yY2AJ/Lsb+Qyd3GmAcjQUQbjiEsHbuegM4p3BGukYAyqiGiGbOoWVh3vx/UgjBK8I+y180POzV8ckBVBnSrSeQOTjJe6B+fJ3dTKEHr1CY9idORmVZL8ewrxa117LVSaHwrdns39qclxN6NrMTCif+7U56hQMkJWMjBMJOLaLFaeA25EOVR8eYSZi5edPg49Hgrgy9dfgBCk/vh0xJXrkL37kRxDzsNwjEmeOK8H/JJ0+zuvvswZnVp4m/AS0y5xbsb5eUhUCA5RyglWe9dsy4TTp5lcOJEGmopg/5JJSrPdj8aZ1ZByyge0qcPTYKZg+A3byKytUPM+BYcqIBpJGTKONhr3SzU1BEbXCpByOMWOjxGeVXz8cj7jfVpSMtnEE/v19xEiLANVXGloZihAnlDUASaPaIRCXbwJ1QEiPQ6pyMBHU9ztFJG96/PHh8ZRVZLnFS5DyrjybF4gOCrdSFgE2ObUX6q1S837V7vm4BMlRmTOYZ9ouR3bZLFJj8mQpfqF8jya2rtENfsNS/+DJVpd7u6fI7DPL/ppoYBq/aO3zn3IP5dbRdqCK0Miv6xQ1wYdkj/lWt02FA6j3uy1cycE3uVd3uVdfv7LoxNhNzjnVa2UbIEA0kM1tEvJVwA//eOXoZG/9P3PF8OgpfPFHTMfK73QR6HDFzc1hVfP+j3J+/lofkzrbLxZ+jQRZHVvVFaNdh3ze56RqawseajClm+qXRrTeKLJeC17IAFhp2Mgvhwmq1mfScgUigWFupfDD6oaNzB6airwl1Tw6RCcsIwV8fO+MDJt0bwJpIjGO0QTodCTmz0fIJP/1oWLfdczppXtV6yQ3RakY+n0rTsiGXRy8EjBnc32uNr0FQR7EPgW3EMd14g7vItKZuaQx+OA5jEKjtDdE1HtbZ8QK+dIamJzoVmhbY4XmeCPz9XFsd6XB+e6zKyUA1C3pTc+ahcBdwcIe6QfkX5RwEszODvdd4BOi7mywBMm+/RMtOQ4zc1oDTu8TRssLhNgq2RGGljpIQq3O73UFm7r0TsHIkvP3jeHI4/NREKxE79mb80N44hcSS+RvMtYeWG3u0m4CTdAzgEGF8XiMSsniB284ehsDJSbE5n/sSUn3P25loyzQLQIPfIuSOZI200tLyEo3NYpyrcYeVV+8uPf/94Pfjv4YdwrkTxyfXM75TaANIphy0pxZz57l02cVxOwZdJeHBk9R+0pxRmwFMkqCA8gxETSui2R2P7z0bda1HQslQLxIbtQnSXd+uIyfj4Pe5XvEqqA2FrLlop+kO+J5w3NuTziBKcxb8N8MnzlpS6nNo2uM72vbxJURCN/iIiM68OYvoFuSj6tlXGlVl+34rlUuW+A/WXsinN3Vcn2pNeJVkdo06kFS2xhPH+uayMOkDMgJezoFaFS6+rH+hoD9J2m/FShZ+M/Dxhu8ry+88QumYvjtCxan58Kx0/Ji9L7w7kJDCohJCd3KuMhJZbhta2xYZH0jVQey0LOtCgOjmHSCzzXc31ZcJit43E4THBUr7vvVB9NrPOgBjNgz458fZKbb9cyRUXiD66XfJx7nvNBd5eRqJecUTpNUHn35gb51ol6uycQBCAgUkohsl3rgoRYGw0ug46lFkswNtdMKARiDJRijtpZGqtjv+wk/Yu5bgvYC0J9WSZBEarx0vcFzei085QvblTLeUu4xcUuQztvM9OE7EuMxqc+9vnyHQnUk7k18lVVh+uGk29Ip0KLxSV2q2FxYzxaslOJz3d9+1mMJyND6gQJ6TJOpSGnSqiuEzQHStm7eun8eR3toGiEG8SCV7njoc3Df/m+8DCu2n7udeTUBzf3kEp9J2oZC2FvRipq4NQ8kGleSwiWAdLiF2XfnPH8N5dTk/vmWI1n20wfvavt90HbswKgKPhQasWTQ18X22b4QEwtza4pv08CL5JHphgfk7F1/2oavcu7vMu7fEvKQ6gRArPVbk0E12ACSqeFVOZWYJ6/jq5oC4JVSKEygv6TZE01zGtV6qa88qnIeLiA00nqc4t6M31aT+/+QjrWukuSjiJRmQmzNzUutJh6nNW96oNw8Dne8g6VISaSQNY5onR4VWdVYb6RbgYA4WTJRGtSBGuvrwgyK7QAPehp2ExMSJcUnRXZlHv5BUTiMmURVCoxAJG8CO4SES4iCKtmKHFCG0lU7eXl25A63T/1SiUlH1Q/EQCju51Vb9QnvW0OjKA+wvTsZiSesp4WkchYKnE7zRkNshMD18rpw/nGlvc+TWYoYBJclz6JtjSGGduW1HTh7iQgHpTi6BojuyT9sHuThNZ4VzxMDNUjwjLE96nZ5EHOVGO66s3Wx/Hd5rGA9IUU5IFI4OjugD2fAcjpvNBmUa+gkenBwzaekhsXCZbrNlZCCAG3ud8wALJHLsvpu9FDt2noZZyTi+I+RdeoKsyKWqRuNxbgSLtMeAcFtu2c4Sh2iG7J74syMT+aq20yZAVZ+/d+8MOffHDFdkkEvustvuTMYW3dkP5w2E6cer4xf7nIDTULwwJwNpmPejVXBNmc5SUI+b6UNH5VXvq45oL38ei2tncZOingbjj7EzEF/WwoHXPpdvck8bVjk4+3VUv/f0RRnRvavFnpwwrNZOm86k8gwthOEX9aJkGkWJEmmVczjs0bLuPqpLkNVtLDyc9KuamHbqJ+2oXJ0jfwYyrjM5FLaIIdLzVKm5j11obmVYjNdka5yBK//DWAwdQRl96fR7R9msrszL9luBOshH9HMpdn5qb0Wvgy3yzukZhvZmKNGTQ3X/lA7MHEZJlbV+bIa0mYZ3dfRSeR2BdwYZPQt+MP/rLr57UApLj0joaC3uv13uprznV3Xe4Cd+/Q+q0/3O68lKl7S4q2Z7DcwmihEwhP5HIl1N3Aewyyu8QIImU8eu5fjYBCsvg9ptotAul7Yy8nJ6M3+QXBOles0LuHEhLdT9K7hHF1JNpuss5QNT/VgDxlxDU/Ve8rd2QGVAZTWf6WTljuoDl8I2zsH5xXU37EvO7NaQ3E+lDEuPlpbj4lCcfvr5VO7SyG/Hb32i1MHzQlbgbL+vBMhbjcL4DmLyeNIZdHZsmZoX688yi6+7hY1F/NRFc4Gt67tb0o9VTr9WzeLw9/XOT0u1zGwM4N3bzNTWq9cYzppupTrTejgTADKNm+TNJS72xbUEw+FltLjmqqtjCndPWKFvob0Old3uVd3uXnpDxE8qLVsvNiRzHQElBoadgwIrmLyS3eD7JGZkndRYF/t6G3PU20IiMaa8ZibL5IJMyZ3xaKkYi1KZdEaZfWdKV+h3CfuACo85Qnz2lqq2mp0HakOYbMmby9lx3YLBNaDwMHgzgv1ZkTdgs9wTqDLIBd2UufnvbtKwBNEeIY0r2JlM7mjKRIjDB2T6kO6TIenuom8ADPnrvufEYLY0mNCU6btydzMIsPk647QFeE7Dba8BjvgJBgVMaXhF9JT4tTsQWReGdkNFSdqS4OSO2jmcus35REnN5e68GMhPCj87RI2nPbTWGbgqqLxkcHfFRtG1ITmmpzmSYG3keR/DXgEn2RsocvbRB0km4XAQXgvavaC6VA3DiVPRJ4xMWC3qyoRPXgUKXXsOfQeAAAIABJREFUeR0xA7l1ONBnEOfeqOSV0RAwuiEeNovLPsaQMh6joyByYbj370C9lAshlj2vSkzK+yQjjscjDZH04+TRQlvPGwuZTu2SeRNwbxKrOknkva2vu/Jnf/h7v/pbP7y0ti2yol7b9dGQo0OBh5RpaeJJXFqtLrSLHf44HqyHMV1E8ofQzRImXIREigBYyzTYYFgXNUX7WJMCHpfe9sP1Es4rYXuVz4D6k3sIqnnpkJmpqh56jdU9FbdVbDrkc7tbtRqNsPRbqyolGiV0+Nd401u4YeIqsBibAYjbTsCVQn4rE1vyHmpvuVdtk6xBadk6NiWzY6yF82qnYUVKnoRuEXqPVbrMRNftY76zA7JXJR05ir4Lft/JGMxw8lWfvKvX0iPdZDN16rnTFwVX75wjMd1aboZMLiWbVzkLBGLK7mWaFF4MWNuH7UlEsxyv3YlZjAceD31+7cdx6BEKIG+MybHkBTkGERXZvTJDvV0JxMOqH5TJJ9OPgPpVuHVRHRqss73uPsRtKDNGS7l0BOJo3QcguO1mrTW+uO3t7W/eiJ5TMbNW9CKAm7vuHXL6IKJtGQddpU9wB+9mxC71c5JLgHRGpmipy97ylhXxuXQ8uxEn9KHBTnFQJ66HmrsPO9DayUb00i7LQSRp3DC3iSelr3+P1b0YrhhaIsRS44PrHAb5Oio/oMUg5mnZ54rlBTXtMI1xmdm46WiIoPOC3kqPMh70lpzSihG5yqMxdI17hpKbZubg8eohNg8CP6Sjq3ri/U/ydsd++KhiSm3WN4Z4xXgh67V23HwKfsf58bsu3zxzTRSUs7kL0kn8zZt8EvXsTP7xARbg/0wp49mLXPoiIi2Agndo6GW3B1tv47ptVHL77KUOepd3eZd3+Tkuad2VJk1UKICLOZTGqQjKI1NPUlB+BCF/6fu/lb5LAJWsp32BvSt//37tWgoR1J2jQojuLsre00FQXhNzJ8Yrj4YnFudxzWqjDD7WXMp+jmg0RjvMx+fEIo0bhuttpn0MpJ4n1Pii1DfxtYuP2CK/NESLJG2VcHJJg5TyNDUVUNaHyFZn46uiZX4qaItWHDxyuNkTuc+uYuxwtsWkhFrYc5T9XEUOuf+oYRDvs96B/AfipwlEFnIizaBoYKYaX4FnL+2+QapoIi7NzIaSYPCK4zvtWILG3GWsnseshX/gXMJKsO7RFvHVFhjMDchL5kdPURj13PaprVGrjL4Ek3ATt2YvupsGTL3mdDdXP2R8e252X6fDyT0c1sL5y92F8a47gOPIe7S58NoicO1lnF1P42g3YK9S5x7LZQ0jxiX5Km1YN4tbgC/sy91vQfktyzVJcJoSUmuBSMrcvX9hYcb5rOiP9wGyMRqalnQylSkEgDszUv2oIW0Hf773+Xd/8jqDZHQxI1a8V1r1ISVJ2zVCs5Pxn7EbUbNEyTNtsOrYWFPSfzlzoTmd2e5lKJ7FNEtNj+Rps2mGDId2VX+F99OrdSJK6srsSyZRiKj2Iq8hPIAhFnwPFB2tTN9pTVc/TnpvIkoYz1HOfXanOZeebt8aS36uKejFx9ZB7eEp/KfqydHOmCMv3d2BNO1UneSMi4NR69zdZXJBLF6hTEo6ExjcLOFWALKrjwr6Iu5glyng5hEIertvUMWHlvH1AekPGkCA2j1BCprZpoxqeYsiY1XqOJ2H3qKIUQE+GR1pqaUyJzc1g4weIJJKx3+U9xlKHLwXCp0xke4QN+Oh0Tql/pJANT4hv3FuJaUskdbHVL1IiZK7wsGjru7d32VUdeiR0HEqigBKOrngpDHuuO3cpUYRnOXTlKIn/qNSiymjVU8ac+OqEb94pRs5IZjZfY3epOSu/OKBJcS1EawAufGAGhFybaRXm7BPpDf/Wt7UaPLP3NHZD7hch09VlZdIlwM90KP0SNvRwxInJjYcVlKFg5hisz6aWK+5UAoI+i5xJH2R8A6WaD9X8nS90bbOLNN8cIoj6X+AOCXj1e5wOTGRRwxAY+CpAguLzwbi/TMT7yiDuCNlR8rw8vfF0QvABZki+MWauOAXQsR6ICWwFz5x75MT/uFau2lu+23SORahbH9TwW2t3Ir6vYx6Thxe0KMbn7v6wXsGGDLdp0vGTe9Cm2itodNMqHYZ9DQcuCo/HsK7vMu7vMvPYXnUb2Vm5B9wmI1MS5JoA0gNRCH40z/58YdNeMEV6d8kT+GVDbdbKK3GS1Bb5btJ3IXW/+DZm6FqEz8ARKqJP6ZaTbtLvM41nk7CsXe2YZ0IJAoMNMMnyhuSimiqtjpkUvGUVD2pxOiaEJoJsleAwirhhAhrFmgj8JoGBTAIlRRRS8ygi0fG9mGKduRXIh4rh4W4u25ugTmdrjW0sphaR0rky0qwRTtoDAw9UlpHQ8nyHE1NaJNG+uWyg5qBG3H6hL7lInF3U+CMQEisw2EJt8zKsDE31aNmggmnzodJB5nAsd9MjxtTScUdEDv/mVm7AIvn2gRJWwLcLRfRvMxljLit9+KTM6rp5VGd9sRmpOkM9yr0P/0BTkdRjjg9Z/E6f0henFCjKEfp9OpLDTGweNiH5f1uRo4LlID2ypUXPY4TMtWkSDHJCTtOwVPureqIXDb+NRbCOd+fu7sZjoO0vxjL4/8juh1xRJFXSPNY/p4VyN0ZedLmkZQ1YmaW17Pwq5holdg88rXSU144W/I/Bw1Q0CXDnQoeUb9i333I8Dqr5+PHWPcxGVb8O8g+6LzZKE5B5KetkbI9fZzhEkYGeZxcj0kW1KYX7SoVYVyYZCM0g5yReCUHnAtiCMwcERfG/nn8xWA0EQnXjZcvg55QB6AqlcbxA+ICvKCJbd0KkFooISVFdLewpBYMLRz3cS6RjOTQbV8NjLvvkRe92HMvDxftrhARdPZEUtbhX5ukYNOTty/0zpkb5lrZajcEO6uUy+et6OJP1kg3BcPApiR6tQh8PrLbtKeubEtf6uVp+UpGt1INn3vNbvKfeIms3rLI6zlWTEVZ2ifu8/JDD+R8ATzEzNi6cHT7IHiQJz25tvzL10+BuNhXX301ovOk5yZGcB+oM2hX0ik3X8YOwb1TUrYfl6+2VdwNzqoGC1jgkbk6BAiHaA3KK1LKk4wbo22LMtjc5yTG6tCeRgFKMZ5x7z6eOizfpHQoXRxAegnZcYH7uopu54PbMd7G+QwzbzXndONyn0sGN4QfuqFBDDkTiybOaQQ2UcUsVDzcGLt5Qnq+hAImJ8DNoN67Do5TFcqDAfSaxx6YwSue2rmVyFjodm7mmJtjeqFSbVKemfNGNDhXkEBEMsA8MwVH28INrp6GUSYZpLUwELdCwakbpeUSo0f6fUmmGqbIdRG17MtvTxxN53b0vG2R7jh3apBYYrw+BySjVmCsiMuI9/bJiyHgKNiuz85mt3U4RegIM6TWazl8BmplFWZXxpA3FFL9H7TVK5kBEIuWnBEycy5N9P4q+YpYPZ3gFNC1e1UbFdHhDtJox+67vMu7vMu3q2SIHJDCcX7nw3gARTGVFF/5UEl89zc+T23edYVN07n/R6Ge8/GFE+ZMr9Xm4JGrznXQ7pyfaVwfEEYhD6lp5fGWYYgmOGtLt2FJ9pHPJ1QUwhznv9Vxwk8REcRRFzp46dD0qS8jS9wGIpGnmLNtFVGVcou262gjZvRrRyQO9/BlOICgBiDmHg6OQKjZVsVkJBL0IoRgjjGHp4fqcehxxFUwHhfuOJzJF5NqElvycZrcOQQMqEXmIigtkqIM4vI3wPM+UC+zOFmoOHQwsReqZhUc1Hg0e5YsmuFCQUkvXFDzp1cogjHQ7bOJF9hfoFIPtr3Jrhanz3VE0tU0Bx+fF6TQ1poc8dcrnotF4p5q7VWimfYr+1L0KxpL7+4CgLikI3ACWs70tUdCWM2q6tCgkxHLyHar1aWqoofocRyHiPKu0KZXMndyXELKGJ5WfFbRbQLN4+B6CCPEzc3c6Pzd5CSA+TtqejhqUsurM25u8XflYRMkubVYTAGY2VrPtVbeZyIyBFpJA6B4N1dtuzH29dArycdqig9+5Qe/+4o3/uwPZmbJe2uBENuHYLsV1OONq0Lxk2LKSdC8PF05e0ewi4qqHiop2oORyLpaAq3E2WzubB7m6h198aLmcHq4o82WrIc+cFQVZismOGbM3Gwt97MT8NSDbHX4nT+2NZrJaMXXsGJNOiVbk1P1tCjGujvNBNcf96u2olqj5lP1MwRyRRA1CsDGnH5qLv0oQ8eZma3nWmutDvK68t/NzpyTejLfa0qYlbbqEou8tYxvlPqm+eAEzImgC253YzXltrcHLRqCOH3dDvB0XqECDOAhPZPCJsgfkpeDlN4TdD0AwKOtc+HFSlvL1pe1nvb8eqlqbhex+tH9V5Q5z8Gk1cdlauHzf3fv12TdEXXADUE4itzd4GarUEbCgylSfRJ5gocWAn1cG/CCbZtOOcuWayn9PoUOedJDl8U/rTlqeWW3m82c7IGNj7PWWNkTJZxUGMnJukqMCCAJqkVg3sLVIuVyr5dBFPaM8OWGFg31ubwH7YmWfFbT4zJDUiWVPDQ36NDaN1HpwC70+2zYhX5aio6SZzUp1ulTa70pC5OGNHSRe2Z3cAWRT1rocS1zkJbgIzHAcE6RW28566pdWztQbZ20IjlXBlo5YbBZzU0pgbJP0Xlh1qMJhmxyy8nleV9ySkR8zjNh3lm1XJT99kexQNXV314kXT8np/+Cl2xZRLFGu9peRpKS1PQ5RzJ/3KwOv8xy1nk7z+/yLu/yLj/f5WHmEHX34zjWWoXrIvKoYP0RBzAJQVT3s5svisMjvVq6a0QEYkaV7Bk+wotC4h0Gq4CQe5nUfhzgy2NLWOUwW7k57PC8/JGaGYAhHW1py2kAlXBdRSNeOAGZCAZDlre9YmHJRgd3S8LFE9ak4eEA3Oo6wNBLEZMSOn3xjAsH2iiGcEvExRhhE66HgJt5W64IEJAUj6PuCwyqBuVAimfkooyNYYOpDXQcT1uCi+M4ooOHHkcGNUIe+PJcqB4H/qbZf+TmX1p0OY8Z7eVySFhQouoia61SsuYGXn0bMOoQTTy/4+nwQhwOVXF4XDR8ZFqWSNajBrdl7i4qqgf6VLjUdTR03KTtkfhvcGya0ZHFJpmQ4TsGHubKQdbNhgZEhsF5gpQc7Y6Kw+VU5fJxPQ43NzdBEnIwnhXuoavRIcByIMceBko2ZCaRh7BMEyZV4F7CRO/8bZ724CILP95aC4g7eYG1kHlwxM0yLVw72lmZuT46UBHNj0mvclhUEeDTcTDMKs96m5kLDjlWmtwz5X9Ebzjqnvm+3T3QpxyRqi8s7Tm9Kx1AYUhL0s/oPjEuQxJdBYCtJZ7hKraezuwEqhIpC8X9EHFbzFh4cmOFQEB2m1BXRORgCIpHr3LVtjUmEhEYClfmPYhgSjcm6hcAMBE3izmMtg7R+Es4BcZQpOC+SGhII6lTH9AC+QjU2sqToiJqy82XqJbAVHe3paoRY+Pic/BBYbMVLsUOUNE8fqaq5W8VN02Cu3nnnQwxC2oM5InfshKbyZatEsQ1tQ5AJ1PVsg15ssIPX/OXox7S1b02F3rGPUeQbcWf+lDhInCz59PCdQ64uiCuhIhJimkCzE37mquxgdHHLmkpAulQEVj66Ob+A8WxW+su7nLFaB/HEeMxM1F9PB4hYFsbxunM3HWJA9Aioj5Yxs1DoKc4Ksu6e+NAqIQZXOyCTDEYN6Ifx2FeWzP+/PKcFBCtJR+X2oOePOnRWR6u9JpRFpvh2SJuJipuZst5Z/jQ7/TnrWLFHe14McZZke4JByiLowfcy3F0WGX+VFV6SFaMTDnX5hU93XpbalCAii4zcagekZ8UbsJ0JDk0gj3nZUqt/oQ9DG+zqiBuJzaHudvT7HEoAF+G51r4+qtf/sW11vFQzbQTS0Qrza6k84aqyL0oWAhCCLrcFmdZoBWKW0s2PrHjeBR47AnNFSHhnIuFEHL78ThQ6278m7DTDKlLIbCcbMQlYM+Skk4wpaH+ilo5Bc3MkQeoc3ly/VhzBxv3RNOYle3TejyEkkbghA4Ghi6m8srheyRMNAHyBIpoohxHdEoONTzFINAUZBGEZ0/FIapmpg+1ZQIVPWDKXqX4RGhBcV9M3WtAoKyaXz0GDXaokbu6nhMtlf/XkmfYVqF4WeAUVrJlU5XsTUaaqdmiqzgAXtJZUaR3EeoUlUBilt6qUDwSYvG0xnO30xPjZ+cZxnYkh0h0hsveD31sxkIm/0x1uaGfYgyBPSmqUefDAMDN5aB2QZ4d53GUTDB66IHKj9wesfSW1qBCfNoTE+Y0a8KyeykN3Cl6vQBY6oSULPEd6PfNfkIceGLlghNRYoO8EjASY9HH6+4wi7WzUGpmCuTuSTLjDk6IqLKDNtZfYdrANCXp2sYAPBIoc70rTxn4rD8gcSUf39qWYBWhFZSr3GFmGYqeeB8YqkSkg0VsdMYhR1JVVBWGDKIsyclf0MRwgbaZ+i7v8i7v8q0qD1D+Ylg9uV/ozkvlzpCOEhh/9fqk9ne//zkAtI/MeX01InE46rvxjbAxT+UUKiTfSe9cWwdyUfBdZeD3oX82hDQ1lflMMEIFd3tX3VkjpX8wf/PzE4GshUq1Hjw1FjXfOQEaxMdDXqoWPk948jGpB8df/FbkSLW6gS9hxrHaylPmKQ8FqNr4xd3H4bK+IYTIgePm5Sk8b1f71QLgONILqekfjHe8MhKh51WIhqX6I2MPfzgOqm0vQEOYmzq8PRZ1mfCkUuv4tBhlJ+J+fVBufcttfCR7dBwaxxpH7reIUNEaAR9voGE8PzuxKXDDfmPUAfP5IqQZ7/bpD0tWkTTaWfq0Ksorc2Xg8kXuPM+u1aro5ytkdq2Vl0du2+OSIartBUhxVfMZbr5LGsI8vM12ksGkMHaedfM53dtoTl2FWOY+o0OKdmb4ZJN6Zc0JgHQpnn0bcvqFkql/43KoYI7sv+teT3TFDx9Z2BglwBXgW+I/nNmLX/3yb/72//tHv4+78uc//v1f/a0f0pch4ikBx7jkVG+NqD8ff0k6+1D+vRN5PF2rmXvDabLV0bkSszWOIEAs2DimmuIgkghuDteWBOOzSsrAbpZB1CNNk2l3JuzV0EFWVXlecAZzS/dVWmclDpoEr/zCTpsQ5DtcbN19VNInvvhJ0jXM5TTlk0gtXQf3JgU8ZyKxQMaDxW0adchTRysfl1eSSHBZwhMenB6WQeGanxY/ra+2Bkq0VChNNvOxPx4Y09VK7jKA6zsvS3qa6jG31YM5J4io+njUPMcy+D8j/uiTIlcddyw60iMkJWJrThxi7q50fR+HHg/VQx6HPg7NPYldoIXVL2WEg/7JEK+1Bl5Q4YW6knHVVy2kuWvSE+KZoGKXNu3UCLfl+E5I8ymlPipy91C5/vIRYRLUwSa5fuSOty/Vkxe8ErEy40oNeCAR3abBkXDb3V2h8yonyjGByOPx6fllqSgeAriKqjzMnNwoFm5Ep+NzpAu+iVrz2ftS0BAmYx3wsFlivHuqahC2PpqjJlUr3axkptQTaTsdElLFxSa6qBwQWc91HUf1PhxZKVzLv1VM3vOUkrNEQeLeACydMT5xz6k9ufm0OxMcO3LggLU54no4b2RQXCBDKNZ06FHfojbS4gNbZ4+spyKfk5mN1xbcwHGT4ukPT4hQjzbXEsG4A2Jx+d6w6rwkxax8yPjRmgMYdxGdyMctC7AvEdawg29Oc2v8MRiRyRLzNd/+2l6VEy2bVIQUaaDwOLZXW+lxLqjofYTeeQamUhjEn3016Lu8y7u8y7etPIAGBGWBJDBrAMMfbR2HJP1Q8nG/tFDLKW1/QkYAXnlqyuM5blNpsyz0hPejeq/BW0FUWBoS/t5oWhR8Klwx1VxlG7s3JcZOJmPoQsEUqqb29aFXUE8OKzah1nT/hGps8IB+OGtEXRRDp2GMtZ0OvT04YJ1UYM1QskXNwBCMUeEG+1HJJpvKUo+MzCZzPMUG+bj3hEhco+dJErqUcMUUTSR6KydfwaEixs/d04caBzQBVKRPb7+DczR8WmNO81YjRqQSiiVzGpr1I+p1+HfILhtTWo4d0ybOqy5PqLSnyQudeLEm6VCrdsxCIdr+p0aR5mD0zjmWXFBXvvZtqTs2jFrAaHBOEKkN2o2Y13JaSms9eZ1FSon0bBY9+nw8UfeZ506yJSMxqpcJhKuVU0clGVBzEzo4fusyH5zJei50S9NoWJob2gcA8XuS5D+OjDyt6S0nIpozuvv7drj0aOK3nHo/s1ls5tdfbTWvp8XZ/CuYPg+2w0sYCV7rghQ4EWq6q1v+0J9SnR7f1mhCEyhazp4nYL5B0c9HM4Vl7jBAgM4vyV5sczzzGdYjVV+IBKECG4Oc2QeFYjwt4fx1Br6kSpAiiABQqfurpto4yaiwvcyLEKWGSm27l40zk85hm5dKGO2x+CLQcetnzmS7UJjPt64nT0s9AnUks6lt7gpByfrBd+NHuqrmqpt7jnMippvplkfn8K4jPj8Hms3DBYxBwxeNTO4ojYxksBb1N93nL6nqJZX+8/kMzuLm1plQ488W+UPce2X1c+rU7N7o6XksZYG7z69HkyIiZisfUNGHCkQfGrs+tPQFGDlHnEAitne0/PH7pJeyTAF8XdYoWWjUB6V/060Y/BkZOOMHS3S4pIuMDtRi73HmvyO5KuAnek0IdSXkJFpRczbIZVTHenqM+4idp22oVFL37c22Fk4OhDdrzMrFrf1xXopFADexZetpx+OQjMMFIC42Wwr5M9xbA5XIhAs3LB8jTwdQzLJsTLeNZtaRTuVyqJdJkbcSO0PUxy7IcJy175tfpeyXvqYSpelvUmeUCE9/1MDSoQK4FLPqE/dSdZOrOOV3Yogr5QYeRDRzYpIhyKbS2sg2P27vH3/hHKZ3fvMGtzDh8CnpuXzrCa4vairyxv5+6klht3ZUsfmIY9Oi5UPdgXbDUpvnv3JFto9UqA8ZlhjiPaGiVItNJQHgsXXZNtUmggdtvMk9lHn1drCEErNlM8MfTWZGsocNC4LX3Sd08diKqW0/DIxXHbwHlu/yLu/yLt+C8gi5yJsfkBqjpKQRehDx5XsCCH76xy9DI7/zP/6gQlFACOce/iewIaBs0ReSlF4CQidAmGSfyElOABdsj5hkxgvUl+gYqBHlUc/QsIMn6mr9fkGnaca4eIX3hJfw6u/L9xMGDKBwq3AbH9e7KDVUMLAueg5AY9CC48sMmjGIvHvWaCkQwlanLpbqRjcRGZZvUSjKslUmaEN/Hh6ZRl6AjdzG823GEvrvJuiJ2JlPSQr7c55ESPwaizsPHXPMGRAphAHr1bW27F05GXtHfLBW/F5O22FfeNURg2BAXAPispb0MSZiXwVFt83xP9HsddUQp4zOkOmnZbQHG+7vz09y8TbiInTUYwuDjIZ8cNTo5+7WuLY7rrgJnnHIkTHRbukbYW1M8gW42eqZkSZv2jC0FpoODUhRlQqXYrv4NlpJ/fSupQYgqlK9AS0l2Qi8+0N6TTepN5GSpkleotBzpjIAq8DrrHQvL8nlBsTpuc41u+2j18AdlyEhQ2xei6XZ1baXCx3XSucom4BNj9HsKQCfZZxHohtWKn8CtvpqYja6jnFFYgeMm4Ji8enjqOiqCDypxW5uNFC82IjDGx4jmtTlGZeYuZ3TeYyrF184tR2eyTdOgxbR0mx5slOSVINKDLUZa2Ms8aAAHaG7DpolfByUVMLbadBLi89h45NSAPxTON9jZoaQ9xxNfef7g6DOoX6SyZkXoXIexFxvcvPp6e/pNB5c0S3KyG2t+qKWrfoLfccCnx+JnMJyuCvllcGWPpQeNZN4nG6Bgkwq0jscI4qjqSoKnnBPHXh2aW3UKKZy1i68FySC7bRuVsjEhX48DmTH2pfgY/hN3JNTPe+luqHahsNQ6nUKkXrevUKzS/2LW2VN6Hpu2Ef2XzbCiFw6N1BHLrnx7nZEosPx5FQrPyqNT/l5V3rNCEbUm3cn2sERt2azx5T9cTZ5Dd9iE1Lc8bOv/8P6Yj/72def7PHVL3zl7s/nOvRRD3OOhLOYAE7GpVgnYTEoOP+QGm3T87ZsKzjoWCsm9Xj0pjT+bIinQ6SrAACe75E4+Z1i233BV8HxWZpd0jWsoZBKmXq5xdlmKm5U45Ljdga/n5zW9+PP1utbm8OPX2KBamv4eMF5QdB2EOdaLz1u7jaO9pwujh6ya3M13q1Ykcmr4+dkvD4uDXUq9/TWki/7MqsS1psM3Vvgn6TKWI/5chDEqem6i1OzcAUHlrvUHNT1anbuRzeZp3M13xURX+MjiiTSjHjCvYnnk9qqGRGrY1TO7gq1uwdHvFbz7/Iu7/IuP9fl/gTQu7zLu7zLu7zLL//mP3j11U9+9HuvvnqXd3mXd3mXd3mXd3mXd3mXd3mXd/mgPHJTkJeUxN6fMEWJ185b/qu1ZfXTP/7Rx1Uz1KEjkhg3tG/8wXHa1RkxPYB4R27Gt7nBJYwF6E3sPQTs3K6bQBjueT32MnZkx+ee+5m5vzbiGfmAWe1TO1+47lDt+4ly+430lt0Ippm9rL1E6b3XEY/SW7yMZhXUkUpn70YPxuGdnmju8J3nJLb+9qHliciO0KrQmhGDWduHHJZ0Sz2fyAsufDtlVhvRUcw8EpnHZYbKyzTWshFXISd6qx6IQA4zc4h4JEm739EHzu83ubL2GG8kT09C6zhkMt7LgRrzIYz91D5VV5SZp6NkS1JZC0d4pIjnl+L17KD3zutpAB2PgX1q59OnGMqxX7sFPdX6dB/PncKi9n3nm/50I3IcR2UmNQFDeNxHPEhzed2rmcFCW9fiyQpTaL4VRHItby6n9BjvchLZ91zvXhFyGRHoM6JFmqKez/gIjk62YdXS1V74zK+/tkiovyRuKs8rjKR28GuzPdgg49XFTX2UAAAgAElEQVReSJvRlJwnabLdh9vtEZqtxyEV4zN5InXAJcpIth+XwTJMbXzFWFHOB0rCgOJi0JORHRnodebOEEFg3AH5gdQryTBEcyxerw9LVl8mcF8p4JVP+zyOJxi2TWEKct9DDo4k4pBEEKekWxolgZvmzjpbzmdMpXZQ3ginhTNz9JCGDgfiAqtRSukAArNNMaXIbYmWP1MHUOGqOir2b1cw1MqzF3BumI44rJpyjPWZ+um0tGsSWOOm9LeTjtRiNyvhptwKzxflpEZB7ht9LKUoB7mBquYKOE61O4OJWjvVVyEocEou/c1d5jw72auUXo9CxN1tLeCoj65hRSUDLRei9MLZlMdtF5K3gzzCUKsTzySVeHIjkyWEtPAzB5OTalmNb7rVPbTdT3DuprsFTygYJMSxxoJcfXXh0CslmnIZnutLjh68Ru6uD0K2CDnbukoGqwmhRcc/i8MKObr5l+daT1vPdRyaZ31UVZSHtaXjUjkVQ2WmDKxF+yF78WDQCCrtcDQqsfp3LtyuoA7nbxgSgyqlXibGaAJy5fdZH0sm3tZq11kglzKJ8Cahav2ZyAMldpEHd5J61q1Xb0cs9wgkrya6KzHmHkCOfgT5VY/7TMh+8GEWqS6+KJOm1+Il8+uTLUlJNdG938+Gb0OLTlb6d6H+BhWTV4Wjua07Q6Ndl2vKhOhRvmjMI5G9GFHF27tnDt+m5s6C2GI5z1T28S9AgApGjk96xgmNZaKnfFCqs4fRAtHpX0Mhvcu7vMu7/ByWhwhTnQlE1L0umXWIVM7jstwlT1N+JPa++xufB+yMF0uPxt9Aqe6rPJfLv9jUj7U50rr4/Fh7OMNvaZUDcCoyb/BZbqFWl6EkdlB431vfK707EAU4j0nKRtWo5AOzQ8bDs2LayJ21UBICZCuPx+GlyUiLGGIcgh2QXUQKL7nHReBikjm8ZYDEeCbz2+U5RSpI0fQVFd6Me9BBYz+nfXjSUomWibIzl/PDgmh5Eq3T2teDJEveAs/TOGN2wlOZ5gL7WpB6o3YMojwObJ+Ml/wxz0ec/JEy/k/Tfi4cKXYkVdyrp4kVz3ZkmffVx43R3AnXCUx6xrInjaRuVx4HWpQEEGkH2oEcCztPbUlNSs2MwFcNcfzAeUXsHM8LjNrjHz4pUUF4MDpdJw6HeTrl+Po20bWciwcUKiI22CuPtsq0tJIuJ3thWoC8b52VWB7wFA37J7jVZCRKP5N7gmoy2ujRiUA+r1JKyhOmI++UD0FA7J6QfyLzk7gWCC+OwZkPHFB2N8j7vc+/85Mf/SPclb/4oz/4lc+/ky+O8YZZG+dP8+jtbnAD5brsI+kiQFzu5F7GCTMx9DG4myKwE8PPVagasiAvKyOJeFnqHHo5r9XhmZjx3GPHSJlfK2qsXNwfjq5JjoUjgEFFLL0KQ7CTb1tcDLExLS+KhHzE+07qvVlJkXj+rj5oxeQWV1eUFX19WtxTaNeippw+a0IUZaq2KTB5iJ267Xoh2Bx48tVWG3ARLMCotIbGV4Zro7JLXNTbWYr7/s0UW9OV5ZccrflQ+0mbDUbFJdFrusPJpnFOXCDmT9xM3Wi/dGgmQHM9jphamxMQtwuOjS+vr4p4sTowlt+4VBoOgaqqOfTp8hBMFFGD870F4qnrxGWnpZ9sv7DUfRb7fJBg0gKS6q/G4IDOBIFJo3IT4cUyPZX6/iJDv7kkf8lQ/UPeF6S5TOsQ9uRj2RGFb6ouf9VxXU1QIydCJG6Qp6Stzohb9k4P1eMAXA956MPLf8l8AFJqitM7IFeOVmQugHNuec8b6uvu5ngI91I9SZTuxkbl5ExR3U6nFhRx71NfLSzceQc9wCo1k5+qw9faRPDshqeJdNtPytTWcdwZiW/qbrHQdTbFRddQrFBtXqTRcDxG/0MscHFLnOi27EK/t40oSZcQgVKvJaIGLBDSqDt1r3lL9JzFJwq61Oi9NE3L4SRXcGVq8A3epMUmk2Xu+jEWVLkY43lLndy7KWVKGRFOGRgiJcqoN5sAVPBSIz/3iG+yOz5nrSudxJzrGD2XHrulnKjc/2sNWqTZlqZeZ+Fd3uVd3uVbUR5Ay8y8bZTWUYMA6qbatnppFo4ipcKIF0WVXrCsNdL4ufiMS6AoL9ghU8aWIXQndtu1ODfZ6CVpbFaDImBk5V5XEUwlMvwBPTb+KlOdp8ncISejFHApy4pVhx0DfaFKSMX6yd13wEXcTUpzCy/Czp4VrqDHUsThkVWKINnbh7kTRnlJeChFq+x4qJ1RQoqmj0IbLQF4lt/QcVQIYXKTBdiNawFF3AA3P05GRE4BE7DRci73RHoQNONjctyZc9AB2Cq9TsJoPENjCJC2TdwR4RwxWC8IRT6qWxzDzxsUPFuiF/AwJrTiDfaAkjY+Ehf5xD4nsAOmpNxZJaEsgU2GlAWXxkwb5/rasUuJuU7yzk7wV59PRq3lMfHdlKjlKV1BkjzuF8a20mKWtR2NZGovI2UTGoMHRcKlQgbn/ju98EXH3d1T/rFtPfM3catokZ6Kk7ODARInxxn9t+W15ddCL3xx9SDqBmXzktTwDYNsz4FxBnK1Ro6ruu3e7yS2REQR/Tv19XEckWUx/Krws+P/VJzSeIpSCikOynveR107+YJAxff0MYQDsfp5Y8C3IxCMit0sKbZWP8K5fuvcTLWhHM/JnssoBoqhNFKTx7K/dU0FhUr1LlcT/cUYKRtLhJNtQ942R19HMjyROar6YDx9NbDRdveQn+mCzEmHiPu4zdkBbItlp4unyJyCQoC6dXk+NolJJZ0zWGxTY7oy7cvaxkMMxquFMeFDTY2PF/aar83ddAXFv02L2bl+5mY/pvIobkRMpSbtCgqZU/BLcPZTlJ41uFIuhGMkHIgk8TeU3l9x49YKpUyhGQOA9XRzPyD29C++fuHx1T5rUUt0NWeCLk2uUN9pBYFSjgD0hVMx915i0I2PDUoiZ8qKlwQdRBSCcLixa6S5ZEn70v/fTC7UrG8LoUSUDyS6JTDeQqyByqW+6R3WRUdsKbvCXqML3BmEIG+Umkosp7/3i1uVAgBs2fFJjkPdvPSSw92ttjYGBCE0vourE8H2BTU22Bv3zHXISIRC2naqqkWbIO932VtxOEo1XZE2MYiXS7eMmqo8/HBCrPUaB40Ft4mBFNheuyo97HbHtyPTk8WbKtL1h46zZg3pFQ+g9w6DFrFGILJRbsIZQUxo1cSBh6hGHQ4yd9g2x5sqv2Ci7OAmWHk+TOpzLjN64pGCslcCQdlAi8i7KE8COd8cJtW9KEYeocgOcmGqatORywGxNzler+D6FgSTr1o+9gBnJ7OjTrJwzkvalPghVTm0EVxQFUUExzhGkJZBYzWpA4zTsz+cqe/yLu/yLt+q8khVEZfCaFquAWvmHvnQknD4X/3JH72q8bvf/7wHSFxj+JESezy1SfUJYQCViehp68UTZ3fM0JmeZ/G2CEw5GyF8J80pXhDaKkKkn70OAyj/CD+rKKvt3NnQZXScATRpgakBi2BUjgNMFFDIfNVFWUFBTh7fEiazh0A103g73J72XOtxHGm/Jmoo1yUQ7gxh2yIqIqq+nqACTgdXe4+m5TxhFm+YoeIVaIVmqCpiPz+df2n8YN56SG7hTmsG52ps+A/PUc+rQCAmjPTNToWPRcwdmeZemqxInFCc7jwexstKnZSpqrAkDXZCqHskIBIJByBHOUQ5UFoSJ7CX68ymEXUd6IsS7JgMTF8ku/c3AytEm6dWh6N7A5hhWBQmKhd4g9ezC6GWl+TlkXGqxnyZCSDq6Rnyvefu5n4civLtjOUgDP8hjk0qqujCKkQ3oCC4sISc2hTYZAaBa5nXouHBQe21B3Fmf4cYaMPygvJHR8YwK2DS8oJFL1ga5D9Uh60ZNAzOsohunbTzdHk4RKdN0HPG58DnZKPSTVFVt9WSyydHDNtmNDGJnoZLWTIZTE1/Py3FlJLTKjqTbpu3aTw4g6DnnSQO2164sHi14cXJHM+MhRkm1gdUkuYCEbKsu8taCzT8tBZO8GdPPmbSgu7LadZkHzZHIXfkmn9quUhFAcvtp3FYe5dO3YaUrQn22F3lExulsUmZFoPl6xfWpx/ttp933sKXZUSM0sciOXOt9rmwKaW4popLq0naut3AHV75m5TLCw5AzFYK61p2PQUvJLeUCzv/Sl3lThbZ7kAwMx8+o2ulyQeq1FIAXOBrWcScrefyp+FwWwYT/GILkh5a6msxeoa47wIwWlNF6/HsORVXIgeghUmnMbh4wmLe+LyHQBG92w4sh01rw2+eupeKna9vMolzMfwXIh3dTxmZaEYRLuPN5VfDltw7TK+GovZa0UydfMI9yLMUR3FUoWiZvXN38+M41nOttVRh5m4/E6h7XJt+GvE9PYSqU0/fDugbjjotWZis1W1QVbU7N9doGQs8lZK/e83npJ+cZrU0ODFw7tmbgVvhKR4ugwIV9sCJhXxykQbH+fZW2hLD+Vh4pHibD4NNTHE9vxbIqvPLDLSg3u6NrGSk9qkVkiFcKV1l7pq2Tly43shJe9+/ps85W+eu5Vtj7Nvy6sFwHXvc0lbjoEuS5pBAoIOt2X5u2O7bx11cZaYW2R4KjF6o7zKiWMGbO7qGVuOo1TKG3bgofth4X9BDkub2rc6tiiCd9x9Smzg8cuY8DGRuebvNBFiDzO/yLu/yLt+68jCD6gNH7pv1/bDuImKrZWpo9csh2XPxkVTKqdEKk/DkrjH9GwCoiA3fXTktRKCqcQAh0G186mbRvUzY53nIayAVERmhTJZ98OV1/JxdDE+kCAICBRFWaaMw1camsfCIaLZ06GFmFlp94FNb63g8qsPuUFWJO1sjPjECP0uRhPdpHTEg7nObw0X1L/+XLVPnd77/Ob14eZK61GvobIZQ0SHkwMrYLojqV6qPx+P5fFpEVnIDOv0aPAxP70lUbI/HY3SBatkBoKbP6634P+1uomFARfR4xKB/tpbEQWANH5SLm6qbmWqAFtgyAMehIohooQJhBcXQDCOIOwMTw2ZfH4dmCFPGRQmtLAzEhHQvpL+x3GhhO5GZ4JptxCmmeORinZIXsyE2mj0jpzC14YoXtRMMupvrgwddTzzLc82qmcu1j+xIBilFaIgeqiLm7rbkeAjGac7e0ZdxZHIagTiCqoc6sWLwMIjOEyWSI0JOhElNDlfxvDdbFcJz/xmQVWymrpG+yTPP5uOh5XwT1ZpiVXHAzQ495FCzBYHqAWDZiodUj5QXoqLimhFrhqWi6/mM/uoRa80Ecjw++YLAobabmALDQRPdvcI4g9r6XEsVhx4OP5D9cfeBM4eVMQ1kKTMBoPPP7Jmdlrz705bF3B6Ph7vr41jLBDCzQzXX22orUUQcMBGRw5/P8A+paEUbheM/JQQd1ZJ4vLB3uvJjrhX+tz7/8N+/uLjmJ3/4P/3qP/hddxzHw7ml79DghrVWLpuIyFYVJpg9UvJ4e/shthxi7vas+yjdBSr6CKPdzCxd1aojEK+0R+1BlCGAFIsJ4tsA4CxeB/V8fmG9KiJ9ybKI6FHGTRltEF7bGpzNTBjB8E83mNfBWxQ3Q1z6cLR7XBYfdFJDxrCULezuZs9oaLhWc82a1dXMXWp9Dt+bD+PaHVD95GbuvnzFijY3Y1LC4HyVIyzVVHbYwv/zOTnMzFfqFMBDetPfl5Fgkb83svda5t8N1aA05d3dFR7HKYDh0qR3EbGrB8b7DB5wTzM3pBYEh2o6NLiDxf0gJ1yJY49eSw889rjWOjR2ssgGU2cXSZ3Axd2Bx3EYJTOQnqT0FunAT8NnNec0maqFkITaaa3sFjjs0MN95SM0q90sF6I75z1rSvGbA2nXUPfI7Pllhc8gpIO5HYcqmJvQHA98bV9j4avvfvX0dfgSebi7w1QOAOKhuYyiWAJY+jhyGKSX9k2k/n08PrlXDxEaxAXupoeKa/ah3C7ua5kodSsKybWvp70PrXLqMDsHzs+di5Qiux0IDmDNwygb8Y7HY63lFrhOI1k1IEsWff3pfuFGxvRAtG+NfqPaA03XU7mGBw/mlIfIVka1zwcCsrvHBFocpjm4Oe0HjsfDnusQOFwhx6dHrskvhO/pd6MSqQUEInsN7nYAdkh1fwwOAtEH5WZ3URxYeAKIjV4vWgAC2LIB83I+NnAOB4FV+9RCUIXP18gYkjefix9agJkJY2NGMuIsB8k++JIUC/GVZZIfQW4zh6ixbAVCUVYIuY7PiHgfJM8vd6LUGHvIDjduytauiSdDMJMm97JoKbk+AJMNmtbmkLjxau1gzIq/WLYGqs7ZF4hPxbIxr2dYgHv5ywS88ttQy9AB1YPJW/CAWnRhGURUD4eJS6QOB9Fz0kQzDoC6rmVjrqynce8u+jRGvRhgPkcguTc59icQxsM4MbXNSP5uDhmx1F7iBSqKXPRe0ielMUjU7lj47mWt53E8oGrrmacJ9FjPL5/0IQozE0foJkkTiBlN/LzQ3+Vd3uVdvr3lEW662L2lUQGg8StQ9gQAbtt8WISGWf49H+c23dgVmsZkvyeSW3ynCkol16ZTOT1b0o+6CFFzUKeAylG7C2Mj9wq2zkmpw6wPX56rapHcvQ5InPW4QVVTXwuOQxmZ1w41I5BY6wsc/+F/+8dnml4Kdwz9q+MgBRwCPSLk0M2Y3os691/8u//jVMnf/s9/PZS8u5vZeuZxif/rxZMosjeOalycxiLwL/6fP823fu3vhKdrYOMBoaSmwD1wdgQwrjFHaeY1DxZhtz5IugDSQ9Fo2QFG5aQBUWTPQ+t7IdzDYLOx6RhtlP0Eb9wSQVjlsDOzaE9FjQdnC5r9n//2T88U/rVfn3BTRs6mU/nn13f/s1/fxyCEzm3U2lpO64pewRxRHmHutS9ttfGZqIq+6dttiXw/7p6KhZAuiLSgh8cr/IagWWBewSPDwYl//m/+FMB//Wt/p6RJ7isk08RK4hv8aW4R6W22REQPjUPf67lmBrQaP8Px+Gt95/hn//qfnAb53/6X/0P7chKYi7v/s3/1v5+e/O/+q7+LRostQv/p/33/JJk7p+if/svzY//Nf/HfN1SODgxh7OmS4MSfRPkY9SbgriXXa91fMdbZi2J1BH5jGAegepgtr8gUuqKAXFpBdO8D0C1aWVW5UtMEbX9Qsu2Gydl9VL9jDF63wLR1+2JUnuf36tlk7MG7qQz5i0zjm87DSMAB4NOnT1OG8MlabNz5aJ+FVE0jtDgmRYvbZXQul3b282Z6S7Bs/ECBwCpUxE0gDhVZtXfhbliyJDytEZhMIbOZbEW3Fh2tQR0i9v+x925blx3VmeA359opgyhzcF91F8qj6Psu5Z+ZEm9Q5J8HqvoVimqPwtj4KXqMsguwexjcr9ANKAX1BggpMzX6vg2SUsKj79oYg2SUO2LWxTxGrLX/TNlQSMM7lMr8/73XisOMGfMU89AaiJio966GztbbehviFKR4UFlv6GHlTqW8mzeDEtukv4yQCbuSmGCgaTgmRThdnX1jfRclcwDq7zatLfjDe9jEORpQYv1acjqzx+qkN88xEVVhaJja9LyulHRr7G0mdsxSuCl6CJfUAo4qtNCys8onEFBa+6lugK8RuRE5ARpPhoYvrFDY+e1A/hw7KCNojO0XTIytlCAMZnUcQRTzouEznZThXuBhoF1go81RirscCbb2qJqZA52NqgUyU8ntO7884rCLNRsPD88JgdQZE47RvffemuwWu00nKMcUorAsG/KFMUUSHgmhCWQUR26evAOJvGMn6zHSNOsz1iWyif7wI1hgOUTIDtPx9eXJTmzJOcTOOHMq+IDi0+6ti6Q90n3l1DCabnymmgSoXBymNQL6tpVwbGOe5BJsXjshJ5tLTgHJU6LMiFQWGxxWShxGgZr42SZS5h2Ot/mUuJ08yf+wNHHWUOQXX1eAxzn3ep7TYU2MXKmTWwfQnwy6GqlWVvOcuwjH0ZCV/Vu9gK+DzUDB8B0E0kUvbNBb9CjSd7tdaz1I1RPOtn+9pc4c27Ed27F9PJpX1k4mFzR0UPAA03iI8Mtv/8Wh7p79D19VgdZER3cDSyJt3ggUqpwLDUOUVtyDwYVLwG+FJHSzbOEwUb0o4ssQCjMGIYZxlQJ5sYcQE8sMBkhE591SbhUgic0vZDgizfEnpIYqEWRQgr313ncOgvRQIxNIqHexW18zBqliTMtCbW9i9xs/++FmJw/eeeXahVshEUqX/+f/+6+HngRwcv7UZS+2UE7PAmQlP4gevnMv3hJ4ArK4uQXE3aPI8wCKC7lmg249hBCXWEXCklCMr8WusMH6Q8ZjYsc2GyIeiZ8EePiuGWGr7dXuOYtVSQWxFKZdsCvOlPGJYXK4Z5BsGBMNwo9euXbh1jD7LTHkwdv3Nj589xWYUdL86tavSs7HRC8/NelUlUif0AcRvf7Wy/r5jct3hiuCYsAMdG8pFTm0vHULAMsp6a4UWCpCDAVkNEN/GShEeIo4ZZhWGgN101JVatdfudp6pFA9qJMF4rQLfvzm99dwBvDjn373xpW7Zf30+oEnX/3J95CmRoPnq38z2zfrk671bW99gVHJIuES9fS7nch8ddCn1i32yVwoaqoJhgCf+9rX/+6b2w6Sf/+tP//0H30dCBE/j55IN/8mrflDHMqw9Hk1eoK6tFxUMCSCEToiIrdZKAYJQMTmJ65OKBBLUTcoLRUQzp2qcSCJvTs4EPce7IAsBDVYSs/9SqtKZWOweaidNc5F3kOBYFcoZqA3FJ01cz9dTGzXAGQnWCCy8MLMnLUaYuGm0IWGWata+7du3y/GOU0eyL3GtqeIELkBAmf0pk1WhqTi9QaoS++ySDf/9y7SWlt2i+tR414kViddHX6YuHl+TLpiQeCOw9PJSjWIAEg3yy6dUqkjQEoyFnu1rA+YbRwGwhGp0mg6neu0EU9muXWfMWSVbVJUG3qmEWL+S4wQSBDjl+R8KX0FyRWgd9WPjWEzwMy88LJbSv657kdYIB09uxKkzSKsuoN9xUd312KfaF2XEhG38wU0mGkUMqGHY2TvqAgjM+oMie82WwiR8zNh9zQAGbp5yMnYy4jGZ215jnNwSv5YGjJWHYaYrdWxekcnsbyazEznzjFzUybrJMkuCeyImITH6ieuu09md810Pb6TvTUTs1CSoKocGiSv8vtD6yssIDmRzI/UJ/Mfck0grfflBLnRyX9RBF/TF8neXHUYc0mEqmTR+EAoN93vks0vW3lV71iYRlRVg998rUIp+gbrllirUW07C4fayOi8Vyoosf3OGmPnQWxjnHMn2FM2HHc4+io33CEo6K8KhqE3FN6hZjkj1OPs6lJXq0c/YM0zZj6TxLz5GT7HzNiHjwnQQK7ws6ja5ACkJEK2B2xxMJEcRclq239AxlgDIIEU0UWxwhI8ef2TyMWxHduxHdtHr+3IJTmjhkY4hVzd0ecOX6cNTdUuQXhwwIXCoqtFFFhlWUGwJcK7XEcjtyIWE4mEsDFINeJuh6k1zFLfeN8qoZDNNNw5yaFVRgca/0QqRyvEVAMN6VvUJWTROsVR5IHol9/+1lb/T9WC3UnvlNe06jEmHn/xZJn3/qN7Vz9/k5iJacEc67duzvFMyLMtdGVvEhhFOmdKI3vD48eyE3K505ZmJqsUV1yzCIjC1SjnykRqOhRHWgKiCBKNGY3CPr1W7OqvxUepyrKoB4NKR1ZzuYv5RbL7mMQhehJs7z+6pxZJFcHX/oebtshh2gG2MnmJs+L7MB7mdCQJU69KoMtgvwPCXCLYzxttWQygGcfJcJE6wQ2glJVkJjzxiCmDpYw6yDAD/VzjWLurCr4XUT8A8ISngIcJAx77aT2FLB2XJrYQ0CFbpLbXfvq965fvoIidZze3m+HHPz2r2/ViN/sKpdnUkUFVSChiPvcpd0t9msoDQ253Vzr8iakK9cbEgNhr7x+9i0aE2qlkgmdgKBon3MOHAIIGLBvO8MgvzBxJvddDyL4wxSSLFN1Y/Xjeifyao3xanIKd7hT6ReUCRswM50drC95+0obLNutbe0GlEsU+uIY2adU3n3AxaA68bqWvJcLENYoDQgiEyDRC5uSpDxaaJ3DYmx2jS7NiYeHYTwRI78JLZSJUf9TQdCICcWudCLwsdbZxkLd7iF6MrBUMyCTX5TUKu9UoQpQHYlCVO6QYBvQhzRkBz90moxg0ua5QCiniOmfpj+btHlb4BEri6xBByUVo4FjBiiiHGLCh7n7lXmVJvgA4ixVNYeiJ8IQYYAjDbhmkk7BzdgHCfj90uiU+iu8DpbUBCPFNubCIVsiLahPZp71skkS5KKtw93Ix5ZKF8l5zAt6W3GefpYUh9iLNA4AbS2N+btTdopuGJkwRuu+icRGHDviF1TEceUMOOiDukVMspWJ+v8/QbI5OVkU6uX3Orv+7339Qoq6kU1w9cgLAi8gpqhY7vvjCZFzBer5n8hmEsAGEfrACj+SzySoLDwxVwgSVYJtJ+il2MLQRQ8skjMNUK+Fw8UUAzUKksI+rvlEchfdvsyJHtvKExE2ZAHHBFGOa3pGJd3RlE2jseJxJaChgFccQfjhXz8YiTYZ01mZcdMk4+ogkMAj36vdHPkjoehWoAymT0gdQd2y9VP2B5mfOWL6xiAFZxsWGcByoRaOoMclvSg4DSUduIV2kNeaFiFrbRz6H1hqFvwkc5yruBZsqAGPm1aSP7diO7dg+Hm0HF+vVs744bUhRisg5/1l+fM/+h/+kVkzXNmG8r6jQXX1knJoG8SRXFSJoz8VFF2SGK0UpTJxGsTs1GmcfLnoSiA4q1epBNWoPiLWP7KCIieKZZSItu1qmyBNXAZ5LSG9M5Vf/DBNknZTbxbB4NpbeOpkZRnPn2C688bfpGnn1uZv6Q3gCwpdHxA9+9nJ8eHL+VngkPnCHxwfvvKnl7HcAACAASURBVHJy/hQhTbu4CUoMqe2Nd354cv5WDAGThe3b3juBoBPWOQOpMjjSVCnNZZ/ADZcUxS9WE6VSAG77fUzN9oJ0IB7co+JV7b8KJisVgEK4i55VWGeZCkVWJaq6Rp6c170g9TyNaZcOi3kQePgobZHXLt5yYEj0+eDdV04unJZrWTjcVNVxRRMQ9SRkArAQtd78G8Bd2AhoJfWbtt6a+UfwgjJOFcSrUuZSk3XtvgiSO6czpCWFvR62qZC9RBLWLrraX4N/Tbi2LctO09xBfUBAAumthIUaChQEwmSFsXbj4l1ahIjUh1Eb+wa9Xj588cpdEdEEoBHl/epPvvfS83dFZLJFvnTlrg4V3b76k++9+PwdckL4xS98WadUx0UtAeuGibUdQ+HASFUrXncqNb8yf1jE5ydfQ6WC50+6oYeZrHJFGCI1ocSgE1oPm4cxJ+PY4kbBnO6misUOyj5+KbbhZtXJ24tinLLEHYGHbis0jT14pk8r1cIYgXJOCy+aAMphY/lPNWDZVEldvVsniplUzaypuVmqUA5ar8/33jvXKEXoyRrgWCh2fBR3PKY5lQdcm9YjTxp7XvSt7C6WZiVK/L5wgrtzdCOv5l+ztueeoTFyarbzgxQLITcihDgzIol+qpvlgI1TpeD1qyzW2mtG6Cf6IOkGpb9SosCZizlsbDqjKV4RimUslfqASF3kOMLWgHkJ6B9YQoB0qSISyMI7oAHYneOmge3gtu/7x31Zdjoqh2meGSLEGatRgZBWLZUG4hpJT/XMwMgvqKycl1vsAUAoLs6tipchfe3ESXzcG8BcBaWuv8JkBlwF0HhPVp8UjfWP1ThhGC05UjZidA7doOTTBwcIpMTwScf8cWsdAiIGgz0pM9ClCWRZSCANnYV675AG4RBaNOpdhasigFsC5fKJ7aDHzdN0UlJ+N5up/xbTrzcBIV5uMapDF3ZBt0Sv5TGmloohZ7dcv4ehgUa4LTIFV8Bvesim4TI4zB2zqBwBlpRUzXMBueKAjSlKkIKgw9o0Z7YDy0kmpS5CBbqpuG0yxgP4nXAMQIahf7RFhthmIjaVM6bf26rtLzsmdmkVukMuSK3gto5hzutsB8NvDvR6Dx0UOCBSLhIGwB6EwxkAUks8vPj1IPKM7KHgslIf/4wsm4YFuZOFzdXrFLGST+UkgKTszObUekL92I7t2I7t49VYujn1JU1PGWuga0+kdGQ+Tumk5vql/XHfALK00KbqeTRU6m+pFYSqCP1FxK/8oqBtSJ5FDKssZxTRyD4in278bNVOQvEowojrpIgRQ3Znt8RJzDVvwrQxA/Sr7/zFr77zrX+OO+QAauQKYiCOGrsmZzMGXyV7U9fyb/71l+IzL6iTQDt57pZ0caVb1AQ5NrFrZbN4KB+VDQwRN2yLbbILDuJ6gT3WTa2IuFyAQKUQB6yEjEjRPF3yRO+igRnMxGwbpuhtdRN8QvqKTv6QYBtyZP7j5hO1NweySiKmniNiZsuUp25HAdmt00PAtQpemU+aWAqelWbpssm1C/V1/9QBoJKM4SGpP4MfxjIH/ScUA1vLQrS4Wy988cQgatK7hbSKiDSgiTSR1rrqKqTRfHpAdLcjJJMqLPz0Oka7Plr0GcdvldvM5p6mGPse3klU+DFU62LlN/SDJCch2ts83Aaa633x8pfj+RuX78TnP37z+5NWdOPKXe1Uc5l/8QtfruCdNKgXr9zdpKQm2dtpTM0nlplBphWfA2tyn/wLO3BOKw9JsusmIDFSHvT7s1/9k0OP/8NffoNqczCHWU27sU2102IJyxDrtOsPNlwsR0fxQoI1KGVYmBceAvb9LZsDsyUyTpKfupH4TJJhBNJVtTFQxugqbA7RzWRUqNQBAKFrAZggISi4DdGyQfGLQzznIH6OldwuCy8LK8Nqre1bC7O7OBc1tubVF/xADdhk0yBiBRNz/KinTOkYMWsxG7i51ggKykYHB/LMuQFydi6rHzIvajrVmO+oHACHZsy28Ia5DYOOaEcuVqy/oCmu2s+9jUsWhS5OGyshjQHHE2T27IIpdoRl+H19x1HGf7omKOh6xlPBs8Nu8pSD+ImNLYvTwsY9SUiEhBi0CC1CCyLfgeNmcq2Y8kjpy7fb7HCd3tIFCJc3ymQhEOlN0AVK4rtvn0AJuVVBC/6cUh4Nx7BONth1SoT1gBe7lhihjBWpIxS5YOFcjIoIUTfQxW99NjEbLmRMsIDX8vJQnAKNfN/BnWfCmbr3EieFlwWkDuwQSJTJoLAyUd0UA8hAzHxuZH879+op+3thpDjpUlZHOIALY1tz0a0TCcA972bSHM+Sk3jy6xDr3/MlBxxtKV1MlBQXW4ctIYepL49yUZXo85Bbpo6DIH2bp9utgXm23fKUr1vRmJCfMM/U4RVHe2BR46wiyCyo3vSoI2mIXQ7PQF0igB3hA1X8+InUjBjJRyGO23YDRIHh5HAupG2t3yQnjQ/KjpLDgAr2FZkrNn2DaAb+zXcLA08cwZSXdeNOuNrWeiNQ7621FnXM2v5x1zzrNpJhXaFfqNS10tyjLfLYju3YPqZt5/UkXB0DYNpu8WHKT8/qSxU9SdpelLl8wNUP/9CFhrFricEh4kJCFbSs/9QifaQNjYNK+qdRyCMqxpGideRAgF6T6nVx8lXrmUE9mG5MQbUbi2z9h29/8yyo/ROby1BEvXW1tTGzSG+tAxKJ8uqWnZy/ySZ1DWK/BbHOHLM4oQwMOCQw0+y16qCaLx+8O9fAQdxEZ4HL6qFD8EKg1r8nDdQP7hfPwRc+fzPSRlUVUKCpq6SulwqnXnhRF58H7wzBztcv3pqEam1qUQJ3wC56K0rb/4EJ0KdcoFS7SXXFUIwY9Ytr50/X6KqPxkvl4KE+eXLhFCqoOcbm21q4le2d+4+G9V67dDv0fYMa0ATMfL+EJ19N+ybHEr1/UYsbgUTAPGdOvH7lruxLsG2ZdySgBHDj0i31hqi1Yl668uVUFquligg0VJV58cpdJgapXUOjVKSJTJO5ceXL0EIroqUU2SsX4/5bBpnrl++YwTSF3koh3GZDNGnKxBjkTyYWtC0Tg4wbfePyHQcNgfDS83fD/5FGxMZkySCyoo4Hhc5KgzAaYByByenh2GKK4TPmsnBi8NmiLjNXgVj/6a0ty0Je756ZAfHsooPOTuGOEoOa+uux/FqbXkQxxysgIwgAS9UVJHxRPKrVdSd0Jxim2unX4IXscRG/9tBCmrYaIg2ndI3KowkIRFSK2/uirF/qvXW3OjCpZY+LrUAEUf/Nd8k8KJwFAhoSJuLZeNW9sjUAtNtpZjdLE1E0I/PfMAcgJdiSSE6KtsUAF6aR4KmshqjcBaMBNCo/RGSBwxImJCIOk4BAlmW33+8BYeLeOzEtzK21Mb47tDUEwqRdIc1xY1T1qonGWVuHIEGvpzDjuIvVjJx5OxwkyL8fDioBFoW+B3dDIK6r9K7nekqIcaEDtj+xRedBZsTU+dyKoA+BA+Rhm8FPnEwkG65bqXydAC+sRQB1acGbpMOtuF2oA4t3K5a91Ji8WC0nDGufaVIuT/LR0Of9KA4XBfGiWMaAIFJImbVsxTSc6/TjRAZbQ6Fi4gY9J6xIccX5u/1CBjvy4pBKHfK+AY7JI/ba8QoxdjpbZSr520jdq8g9IZmfFbVlUPgBg3hh5o49LUzSpQkRKybzwlkoDIoA4sBMg1AMEZK2UleTeMpdFDB6upEMkNtodS0pkU8MdqBA+V4A3DmLC7Q6ch7plNIIKKnkQRQJZv0iM/WUALbJoiXoWHsI9hl8gEEEJrRJGqitnNHyYwwe5nE70gJojT44WkkFTy5TBjpTARjC+QjTHJ2c5xXojhJSfdGw2mmlDPhaCC68/6iNqGtwrnuw0YEJBPwLgafhq1gvNqAfZ9Y4FgzC40MUf9P4QCVsvho/NlRpF9L4TRBg4R0xS28g0ZBtOC1wkQNlFwsw7d8IJRMRjRvY2M9jO7ZjO7aPftt9qKff++u/PPTVp77y1fVN1b/k9qsPX53mt9SuPnfzaXbmQZTDfu7W2U9+qPbgnXu17nMEJp942Li2hx5x/ELx9au2SAC13ndGnfuHV88PHUYIs8dEY7JFAnj97XvqWjjVudZI9qvjDH8jTYdbqaUfrj149MrJhcFf9frFjS2bbJEA7r/18kl58lAmSt0LNUo+eHsw8N1/82UAVy/qVy+v3339p987KQV51pWptb321r3rl25N8cuv/vS7m2tZd/Ljn37vi8//u+HdVWFrAK/99LtqkYxfNyfz226v/uS7VtDm2I7t2I7t2I7t2I7t2I7t2I7t2I7t2DSeknkpYRx51SN6O56RT2f2JOgeIeHPc/yV73d3Oo9Qzy7qWEd+6+X+HHnXHReGfmFnnVaPgHF+9a5/sP2IFhfsGm/Su+bc6tLN1SWiDyLGpXRSb2LZ3aPUPUZytu995y/e++vfti3SodiFvYRL2+97sxtfEWmth2NGAa3eG9JQa7tEUwARcqFRekNULGA3tShPk91KDl9c/fzp8Mxwe69jmvfoNLp9JzLZIjdAEBetACLkXDwepAzeWrv/aMN2BrVarstRlpmER0g6TKjzpLgbBxGI9GpTL63VQZUX4mWB587Dtm8xgaiu1MPK0hfC7t9XE4ufik8FuSORrG2R2h68fS929uCqAWiyvwM31dJ7yQ+7GuLRPVgJzrPuaV9/a3uGHd0jUs56/Uc/+b+hJ6CfNdBrP/0uMy/LcvZ6jcrpfTbRS5f//UuX//0Xr/w7SkqwegUTzYFfh0Mvq196Pi2hInLj8p0bl+/cuHw7P1zv6sY4W75Ec45699Og+RX48Y+FDKSSBrowj+IzVPhq9NSnD8dr//ybf2bOM0GeRJhZYws16ldJLnsYcKGfupmW8Tad+9IDIfxYysnTWGwvB7FetlKvysOUMZXOZTjbMHCw1paGF2cnRvgzii0y3IQ8vrW6C5H7NgDAststy6LEtMzQVgADgWcgjuXW9ZY/jx9/8PjxB/vWAPCy7HY79zltiP1l+2O/UjgwpR+Tg4Ja71MEqDGU3lq3LytsRUPqenDPCEXvGLqA1JQfXURzoIn03oiJiXo/SBYLHCjwt6DWgGZlRJjcAju3tjPFg0VxevC28yETfwEcomCOMYcIykwUNv10MHg7xaMHunQkyOnHNHzl5cngHQfm5878OgEnt9ZfLxw0/xXND6mv9ibtsez3bb/f7/d7Xtii/bkgnKxX/GRgmaOXg0bJe1P5TF2qyeOI3WVMpPcI7ZwdiHyfJn+qclhJVh6RUx8ASZSIhnqARqi0FJzRIFdnADbZs2m8U2NzW47N3BRJFCddIgvcho3nTm0TwVMgi3SIFu3d8bLoS7231h5LjzQ5PYEuxQstOaBS3nDZ8j/h5h4ZHma6yxR1NlLXcMgdhs+ZjVJWndDIP6D6YOx64eg0vhB+44VDJp65TObnVomM40+4zQHKLBUstvrUJ8Jts+KlGEWqjGpYj/cSyTT0xG0BJYWAUgMmV0vjk0PNnHJsHCzDK/FzXUOdZi4aylxGEnqQmjpXn6j5TETK8SaPwraR4xwlBZr/bHRTpg47TlPA/uafeLxuk4zT98mvhy+jMvduuVakN31uWSwtO03bHhLIGoJng/jYju3Yju0j33bSG4Bz587BmY8IVJhYiMSL5Yn0DTpYWoegW86Rjh6k3VmNiSDLuV3rZoDUF2mhZVlk77YzS+eCCEgiXjSwSwPTBAAYgmW39L73h1PZ1pc0d7fVVBSQWG568TKLzihU3kDTisDOxeGTUTMrk2UMFI+gLdl0LFGySD/De/Q324hY1Ttmbq35hwRAJecQHzWhGLwm8usrj7aT86dSXgAALYBs2ZQPB0+USEKSBegP300D05AcRoM4u5S6xiBCCwG69LoIANx/Jx0hX3juS0z8YLROunQ4wERT3fOIqCKQYoqKSOTwxyTmaxduiUjx3DyFKz3E1RKp+gYjA9ABAgNEpIuLrJF9311EJeYlFEcH6UZg+9XztyMSUyCLFUgiEfTeT86fxgwfvPMDaMS3RbD2ZVkEQssC4H5xe3zh/E0d7413zQD94O2Xr126LSuN5/qlOyJy3zHk/lv3Xnju5gvP3eSFw49SMyQuy+5+ibw+uXBLIMzL/bfM21HaHvPGAsBLz/+vre9ff3P2drxx+e5r/uGDt39w7eItiIadl3e/cBfAq39T39US9v3HZTLXL98lghC97v6SvTfPoTcOeumOSa1GRDSlQwLGSYmV1q3LefHKHVjmO2u9taa0o3fp/dy5Xf1WS7pAw1czXR5EDjqQAm4O9EaQhTSqeNE+oXVaQjnoWnhZ/0OXPplMCdQFJaWpuKakQW06ZkbVWVdGOp8cBmQVYJSKKt0vuh0Tw4psRYkkIsAKpHimSgDkgV6m2oiVR2NmAWueuO6BoWZ/Wxk2BNJEACzEdkvR3WorWqoXQR51CMr+sGMuqlrYSYwY7M6d8yQA3Hv3ME0HvS7BTTzUWpZQs+0STY9FWBYO0iBhkNQKadI7iJZl6dIJaL0zc6l34IZM6SAwYbdwWHJG/FHu6Y8He7BIWDMBG22zjeiU5Y1N8RbSetpgXvSwDAYRIkUwV8DcaKHVuJlFhHlxeEsXWZYdEfkEOPIhGEYT7fePzaComKIhlL48FTJ8exI79Ya19w6xbL66eYEkxaQXG8YC0LJ4JLyaB1j0HNn224mOVG7U3bhh2c40s0vBR5F96wpZvQKSev5tmW4fTyOpfRE/hskwTrf9L6087hIT+RKyr8HkFnkVzPKoMZP6NlPr+qalAJbWmAHCbtnZkHtZmHfnlg555twCoFNXyybTAoBIsFDQEcsQqikIADe2maqtm6kJN3oYaC1SmBZP6AkRXhYR346wNREtRPv949TeDWIB56x8FyAgynLUiQNuUGBmJ4E633qaXDDwbrNngYk4ZpCVjkZEJNS7CQZE6NJ0k6RH8cQ8RbbZJlia/UFy4LMyZkiTPAbipUbKDKUJpKuQCIAYrT1uH/TdORbqnRrAO+Jzu3Mi8DsIH7DrlT0W4v6M0hlncA5yiEh3SGZQuB0YWkZh0jHOUJiJFiWbIgKCDeFWLQOMS7kS1cb0u0KHlKoxJ2Mj0U3snkkAotayQEgiVxVsQaQ8SCBuxpRQGcjXJLQAXemiWs6NKEWBMJ2tZ0IiItHkuZ4vWKIAFLMmWyRgWVgE6C2WJj1w0Gx9DbA7FWYAFmOvy9Qz0FSucc3HNiVN8apk+RBGUVr3q87MPA4Au90S+pc+rHlvmLkbsxOSZpKJ8njpYUaDy1FxvvV4EoOJwZa4fw9ZPFpaYDS0iZt1DWXsEkAnqVvV4lgCotpo2I/9ojZQj8KiWD5y7RcEWBoW95aA38U61UXU1/QPAEAYgIvsjqUCUU+FscKc6wHSQbyc24GoQ3rfG5os3PcNpX4dVCLS0R8LQK5ohKFcVd4zCcSxHduxHdtHtalvBpmFEEBoY0A1oBDR+2fU1P7KV9OBwQjxlhmL4PppyZgPmFZjbDLVOn2nOyf2hPusgoKWJaXgrjDdT1x4iQt6GfkO/Gn3CbBvrRKrX3uSVcsZyuTFu/6DiYy/+s5fvP9//h9PAfDfTEsxxXcnZHFzfKPB/c0NrfOWXLtwy17bGITK3+XTcDoxUEPyOt3a1c/fpHpzmzaJCn5KXjsNwUv8fHL+dGFm5mtjeLLLmfmyeeiYi89U3PrAz4db3EiS+1jxoOx4Z2JCR1fXLgkno4JyRJl43g7GxuG4ev5WqMiK77Ggmre+tvvvvHL/0b37j+75iZgfOLl4Gjfp1y7dxuF27dJtAQRSo7nDRyU+6W4IyiEiNFvk2iUr9nL/7Vf0/fLY6bXzp13aBP8Xr9y9cfk2QW7U6aUDRTx2R3dW7YDaLNa7bMj1y3dg9nFcv2Lx0dVymh1evkOhLwjO8pCFEcJa4do9MgZ7gSE6sxb/GSwLbvsQSPEhGm4BtgO6N+dVRmZvILOAhM/aZme+aj/DH6KRak6f+dqfPvnZqtoVUuI/JPTU01b/V+7gTnoY/+gx6OGYJHlWRt2A5j+SL3g3jtVkRrhCn8xLLr3nPa2+j5I7MiBAXTmSBwGm+7o9KhQVN3E80cpbdTgAy7Jblh0vrFqOUTwttxAmYJ8g0QauFvuJ68kbC0FQPfXLUE9UovTK2XoHg99ruq8pQhZSkrxUDkBS52CGoeqGCQTldzXf1+qMO4EfT63WuFIoy4/JUiswU9IwrOTFfMHSwhsmqdD6ByIhZZCQVhLNBlQov5E7y7hhoRidS+91YeW7DQSrii5R+P66rc01b2NXPbBMzJkOWQ5ugBxtwV1JExW6Y+OYTx/F7MnN4iHQ+F+DZ/FGc8vEMI+DeOVQHSXdgloVZBW1nkwyEzNtlxw6Qa+mPupwRZoNilU8jNcywNxT/GEk5AIwYcTxMeDGr4VZNxqQfWsivUp6BC0qSO42vlpArt43wuVQJ35cZijI0KukM8BAjkKbONDMolOWEvNJi2E8U49hfdGtznFz5jyj7AnFwgxVU8EZ8cT+sgp+RHZ04LzGtZ4SeOSj9x6d9jxWNkMCF/lTPLis51T86msAAg00TxJMMhjjAv/rGtIETihXa2NvGMxsARoEtcwJFHi6sqiu1mVoJxPDyonSF9JJw3i4ZcMJ+VDbxCjZ+jWFdfhS64Nb1CXEm6lTCWCNgsSynNNbOumttX34gMcSwwpvratpWy8Hc0mxNL0Mw5Min47t2I7t2D5qjZ31BMvUz6sC90SxANUWSdNVdDwSUpdJpSkmqYjuqhNS8E86a3+nsEl22Z6dYBQT5iaeyb5KJWWRgN4vikhwUv2WQjT011B4yz/81Tf/4a9+G8Vqzmpqop0VEqJwnTBYezka117mftyYdbbQPjVnlJJoon4odSIVYe4/ugeXP2svKZeU3tebF1JfLfCtQt34qklSxISiuvDCW/KGXL1w89qF02sXTtdS/vALeS3q8asiC0FE0KUI1aZjqv7cWpMSVFyEwqE9fOfew3fu+UgJF9dpiYhOzm+n9bz/6JXRRlNm72F0wxpHnejqhdNi8sjPI/BqAs6DVSR4LHp8cpDIukhrTSDXS4lqk6Qnk8xK1KuWqVrhGuSmz+Gxub8J3C9evpM6Zj4QyrN/Q7muKc0lXDheNwrVa3rcNQ+vaiIgVBMn88pqNMFwblFzvSoLSVurWpEzcdldHyhF0w8M4vqS2bTO1Mh//s3/XIR2V+IIsIppftAIIHAsuIs0Vz3Iijn7H7dueXoBXdVC+a2bv9yOUW0HzmCCO7g6BuM5+aQrfPq9GtFGPuhbEwavUvZgIAy+D45CuvsLLwsvTKH2A6lVTHSFbA6FwgfpiC3Q70mEmJgX5iW8XnNVjsSUnUjYZRUmqbWSeLdF7XYkDO8MEcleg+sPRpzU5MuiCkrFc080xoZCViDgi4+DLsO5NYe7QPzkRUWnnE4F+Qn1P4m8FIZkcRtanZ5Y8Q6XRKKYexF0yoZ2KnsR+1F4UNWBcwvCwDPymG1RBwOvl4JUw0VAoqpNVXs2d7A4UTbv1tt+3/b71iydhjkoF2/YQSCLX0OkG/q1Z8SR1WUtqlYAiUo1Dp818VEMdajYiR52/UBjnj2qDRalkHR8FHBLIruaTZwj29UUL1E/NSlU/PWBL9edU9jHjuaEDrWB6hFpYXGKiyqHoPUreo49ngMgIb/Kal167pEN7nK20owZbFUGUAKiOyvKckAi6ASJPBZO1QO3DSzTGilOYlFDjKqPMmc+n+QoqVxOciRZ9rxEnDFslV0KwEpNHk8r1b2uvM/ZsXtwULNhglu01rq0mDn5sQCRB8v3aWkBlXJwkqSMWk+V3px8+ZmCiew9tqkwBD1uBqUEVRLuxGgHhBXskShYlNRVNxgpoVfxpJCWIH8QoSiO6RiVSyknrXL5eEByF/LdeKa+qxF8EQk3UNqA4IDcUqFKdSJJUckPv6lgBSWc1Xar0O7UQHyLTNpAuIbreQvktCnkRjuG+yxin4xxDZA5tmM7tmP7WLQdTP50QUpcmAcKoX6CaFcYwYa8VN+lgRqHHbLUAxMvVucd86LXR8kIhczBaNMDyFdRJqQyr6AGnMerER8VEkKoYzp70Wq2qzX/8tvfOgyS324TsRCMWWElF3lJ4HWRVcbXT66dP1XB5GGJFJ6tSF77T5FhPfjqB9esvKm0cP3C7dcjY6PLdaVRYsNqdcNKQX2SzwAQ8Zgap0ijw+3ghLkP39E6LVqmpsSHznOo/p6SFRW3FQKZIKDihAaraYgelUyMxASQWVeJRuueS21lRlUGun7xdu/twTtzoPf9t185uXAKwgM3z51cvBVmiwn0a2RGSJZE1y7dVo/Ch49euXr+tLqaTikjTy7eCt16tiiOBaDdBEJE6fp6/dItE5/WerUMW2KKkPZURyk/l6o1vp6tx+C7k0aBFB/n7jd1wOqhWRYIwGK+jLbR+gH/iUhGE+cX/+cvwxXQ8uAKLLXPKrxvP1TXZHilGwO2sqe5eysozQt8imfK1IK4o+DvoGkwcScBPMGC64deHrq8ECvwktxwYwYAMxNtegSECuSgpNBUXKeFmzG8YwDSLacHrdbsILdvzdm/YKvQFC9lJq0AdieN7hYCk1iOAOu6rHqDayow1dgtEE2PEvaitD0OuD97ituQ2bXr9gloMvS1IGUBLG0LRSYQ06KFptqyFpkb8AgjEWyQCLes+u42UdXBdeW1jLobTLUfN68l3EmzjTjFTn3Z12f7aj1br8Qy1P4tNjX4fMVhE30RMvRanQAJAPXeC8X1zTd+QAllJxUKmmUJ2ugMO0aqVmWqsxgErNhGxMMbR9Yv2ML6UO5BHHJKychmIgIhF5nUucsV36hRTiWZ9zjcgJM5JpwlAPA8CnkQh6PvSzqIK26kiBWe3XKFaWqXjQzi1AAAIABJREFU2sFwQArUZ8bklqychkwTl96pXIiKHpYKqsHTSR/y+VnMjh4/O0XRRpNzfEnD7udBVDF7lrbtHKPtu/7YWzu3W6SLeEC/2VDSFikadj2IKPPGKHIF6REIhCW3t9htKM7gzPJmxlG+gMZpBw0vk3FhocgkpEicmOjk2C8ZujtHxpVLXUuSdN3fKg06MAkAscoAXbrNh4k9tTFEOqSF+1vSFWUojjpDfW8HXm9u6uSwb6r8X0n9gJh51BKrfNeM/ElgijNYtwrnGY1HJ7GEgugbXZQOt4oG33dCGyyOHEv7issBfipG1hcM2q2mFVvs+JmFvagrwfMrESTZRqmw/wmGk5kP+bBpRx4Jky9urn9djuJwWFzUMcM0E2dRbBFpLfA5uVJdWzJ78QXkR8d2bMd2bB+7tlNqxkxh63OeqL8CAJO89+2Dkdqf+spXp08m2WElo1pqKrgU4Uyv++shrcJVjyKeBjuRqpXYd/FIZQom8hflE0gbSDzZe9fZWKojqP+Ox8AWlvI7NERqM3ake+SSTYhIRMh1Kh+tBh8CBGMiwleGKtJxnS8ySjgAEImbyFUQlUfe+NtiICPMAl2KhflM3eZpdcNKD9vC61c9QleI6uW9YskL50/fKNknHz76AQBNGWkTqOYMsQWmcudfru2Am+3kwmnicc52WhoAdNDVC6eRyPLhO69cPX+qOyi9haReoUlEEbp+v9QEf/DolacpnJ1rzB6zZzed5CfVzihpvX/CEHZvX/pBFaTtGVdot+1u9uFLV75MbhAx7N5qT184u8r6NJyN8Tv/59US7n3j8p1NlTixxee6xlpdPC8M4EdDEky0vWViik82YDKvQWAphIzCuc1X1AVlGSVj+0FJeu3HNdRBi62zFwDo0iO336f/6E9+8a3/ctb0su88BanymEFKkDkXNdVaXfWoSEF0DuGHUKcnJGcI4k6kokN/clJ9S1UoZk+VWK+2bDQC0D1NB1xTkvj5iVAhU+CMTKY2PimhdVBYrGOxVy/ErjyZGxC7STeVpx7Pb6hhahew6UjqxsOZH542TDNvawqgjoRdBlCXgUee4RrU1mgjvSvzDwWsIAjZHGJjIeTWZjhSuTabvUePZlMbZiF+xmTeU4LbiwTLwi5ewBRypGxSJqiH7HD9m/WsBhJS3Kko/zkb2yRSUa4HTZe9sBM4rAC3ELnaLAQSZs0zzqBlWUBgImEqJlTfz9KdjL2n2098WAcOUlCwxvlfAY/ii7ODkWYUsfVg29oCP0UkanIfHhpe2CTrPjPAjfvke+OBpNV6kXJVimmVgo0EYGCkYReBi1kD85q3e5ytBFcNIiEApPe23zfzP1dHU6acRpGyJRlzYe62kjT/xDXL4Nrg4rrDxUV/v/WbtI94aUsAJCIx46+5zSdb8Dsrg3tc202nXuL/ZFSg8rGDOwzzwyVbmr8S7MbdCi47p/WjX9L9CJKzQSBcxEM9J4njue9BO1eUM7BW7L5TMJAc2/OSUXQmIGteZ49sEC49gqFrSGy690xjP5Iw1jkakckku5ICYR0oX9F7ykD/AIc4B3BcUcgkyhaqAyfrhtd1RY6sUepuRUoSXpndJ1Y8s217mMdD6WqBAOhtb28Sa9E/+DmI1JBrXbZeOxjtLGQEx3Zsx3ZsH8PGqasEkS5kmEzXOrMPZSBMNaRukoNEUkCn8kk+4NxIUoJx2cIrP1oGGmaV91rro5w3tPD88SCFFHrDTpLqpFmGJKRZmx4BHpcHZ8i/c1skMLBkuzQlVJbLwdQlpLKyB4Le5ernb5YOK8+DOfSEnFmaFPDk+OMzGgNeizs/ePdeYZrW62iw8s/Lz9cvDukOZ7skYezQHaxoEoU0DEdeOH8TY7v/6N7Dd3/gSmqRVyxy00NAI9nfE9JXDZNRk0vezxco+c9kIpTQMDeTfHMCAeRQRaOrk+fmRT3V9DY+8QFGCXWqq66BsU8zRCkJAwDLsoTiOmrq+cb4eqUPUsSsTTP3kxrNuJI/WvAOgvT5ZEyGfrWUmnnxyh0XLWkNRcMV/bfL5Cvk2wgBfvQ3g+X0xqXbw+CTkpldlJ9zgnGuE9MYWLZmGO9RnO+qnG0eyDonmcdat7//1p8Va0KQJZedUyuEZrv31Ghd0KXvpe2JEt2ZMhIbBMs4bPnx2X9VDfBgg2sr9k+6XnaxJPWZmUrnZqcfrr1JqGZRX9sVixV4t0BXiKZkkhGDEBluVC1cxEnw6i9pTVqD5lchD2XXg1m0vbzIGzKjjns38GqZlJnYhSEFHRG5Fk4IKpYjL5ZkjkOTjbSD+jTHarbMDNNEeVn06ZAOABAzL+z7FXpeChDGK4jMDU3EkzOuN8u+J4l9FjGpwXK4eZaNcAaKVZPvLVSVL8CpgC5MAINXDcVBVGx2y8wwwflHn+ZI0soXOTSZ5CLTvsvc53rQcv9r9wfUm/SWeV1DMIunRyyGKfgh0jn+l6mQGzvMyTW2IP+NNrL1GS7Ob6eDX9tAEGn4lLxzPTYkA4wMpyRf26atTl6c7NnZQbGwiCd+i9lJgfhEWGVe0JDGVrbYRNAZf94TywJB0mJsZfbMtH+8l47eBNDSWkiJxxNmgNjpIpNkdtRiazGzXcrUlDSC4KfIUdIZDpW16U7GWlYx4StwOwolTOJd+5YAiKWB75F82BMNaA6dnomEgj+mouIsYnKVj7lR/q4HnJiWPAmiL0OlAxrEVKeuWZZ+Q74iCIdDMhCTn4PDBvFg42yLbZFQbluwotXpMlIoRvfiJActQ5wdO/YF9fzE+7Axu5wWDZ0oeKcMXHX3Rbx0HbAxYYEz8crTCPBkAIGuLjg58c/D7cTfELTiqP1czlYw9Xg1qeHmljh2UlxQ6LpFpHctzqczZKQUPhhRfbekLJXU98IYESBYMZFjO7ZjO7aPQdspZe9WD88FKf/6CbWVt5uXdg0xIb9JKZ5SYrOPB0alzMQdogCz2uinfgcm9aqxjGG9iJF+n4WtrIwzMEPTFKTez0pqNoD88rCL6H/nRhYzMolrxg9TZC7efFef+5J/GqusotUoqNdr9jNFwlCqnmLSo/pOiViTCB6fv/72y+a2qTOu1XHE78a9MbNV7p7EqnAAsRhtevhoKNJdxJeYQ2CFz+UpIDGPSkQwf6XemgjCHfXahVtF5l6/bADlEEwpk82RuZGh6w0q49qF0/CRXOsn242mJwdFelhHTynQP3rKEUbUtIgzQY2JIxaItD5SHX/FB+pD4Pw/YS4GtaFrggh5xRkCTdVsbELVFvnS83eVStD4WA7hU9JzSMU7UfFByc+rky3y8p3QQM7ePgLJ5A8hiHLRJkjrNKwoZAeGQ+I3LKDMKvu0xN3X/rRngCzc2UYUX6DzBgCdMFonIAB6T33Jp215lcLLkVzBLS8OHp/ln7ADAnCvJNdFiMyTzws02BeqhvSYXSJoHZHsUsXqiiY1U3zLcEayrBqAqxW249J7K35Rfq3kjNIHLsoriKs/Wpp4B8NQtXAMak1QMyNoTuzJXYrcHWOlm1anZN+GgHId2hAVZnYxi6BpdkCtdEoWY77V3ORpGz3ZLoNYjXS7qr4UFNxNixJygH1NBJchVMUMccc3s1vW04nU+GSseDdAGmDCplBjdtctdpIKOOOy4+7IDH0CzGVPHPS6uM1sEm76C9I2hubbiFIO0CD7pZHZoeJHRs0fXavEi1VYV848mrnKfkgHsan3rulH7Wvl4FHY3cebrMYUYluseYrZcAHR52yvrZY9gVSSPrlXpFGKRYvpOULEPylxOdwG+ulCrVHqFB0Gu4oTxBKp47g3R9vO4ESkhaBRsLZ97DQ9HxMkgpWoVklahyZRKaKL9H3nZQGRZvSNW/xgGHG2iCjD011MdDw8xMCc+OqL3bM8QCDd4EBAxnBDAlCGYAXuevwdZmrFqpNwjmicYrpkU1f6AKKbwAoNhW2lYkOw3KSMlH3pO1xdDuEnz7sK9In98AUHZyGYZckTCKiw4IfC8F8X2I2szpRzohvxfeCKiLiAkG6EcP4HoKMeL9HwbhUreKFuK441Bm7HaSqrixEL1Sko4rMzzc6cLEfO7xLNLJcWZuxoIVJc8gOVJAYqtH24E12DrqpQMj9X5ha/SH6yEnAxcoGRt6h134SNTuAcazjeTvoGxpEztA7JM8Ti2I7t2I7t49d2ZobYdym+WMHcla6+9+2DpVqe/cp/MjWuUs/gNJPQBBGPcWNikEtF0itxz4uv1D6pS+dCy71GaU5WRh5DJvYQjfQ551m4caShscvAoPViQBDIe4cLi/8OGpFGu4/ALwJwUa79O/3G2PRuWaqQ3Ht/4fNfeuNnPwTw4N1Xrl24pd2ZNjiKOd5jSlgP300DXzpdEgDUJJXjChCi3iRWVXUkOL6gsmsfviwhLmUpAQEA0prVFnBl4eTiKZA5FjfmZqHfpsnC76W7iMZ3+82ty82SapsOzSr6i4iqsx31ytJvWwmur0+QCRnLn4eIhDXzRF0pJUTSCoT+wvmbb7gN+trFW6GTQlOqeaadeUfJTkOVyQG7iI5fmYdNuHrh1OOLRZXVZWEmFoimfI0ne+uhIKY1kkiv+Zcdr+TD1Odfe/Plk4uBVCOylCGuXb4THbOr7XAKMCgkUCpl6uik6sQgtc4M4lQdtEkH0ZDeZSG8+hNLDakls/XrH/1kKIljZXnsFEgc581BaBBTQzEzM5zn+ROA1C6fbhZ1S4uWNOmO9SIGrsjle6ox+fuf/eOv//wbf34AEKPZNLXKYTwL+O0okCdaEA4SKCqV2oTUS6eUYVWOIBCkM+6QpS7XJq6shtaqAPBYqqI5kdlcQhtJ2T+VqJW9fSZj4+BGDLTgLc1ESte+AiNG3cZGjqQmMFxhq3ZNre/DjKCPSOUFg3IYoxaldZxRNx8ozWVmWSP1ESeoEXiZr3Z0AHm2/BwKIJ4xzPH0EMAK6EyldPOtvizSe2dis2z64LZZArFUonZczJulCxYqimQZnlxWqEhquNIl9sLhVAki3NAsvq4ushTXnlWC64JncDAaVNasL15wJiEWBjsSfl24wyHWIEjbUxnajC8ufvkmmfYPs4mkbAUCxZLcCA+qQRgbJ0BQbUWOfpHTzVlXl9xigRizNloUoiD1mYpPzREkT3ikEt1+ttq4g9Y9/X1eLnLz8+SxBBATNOePMW23FlfTyNzTet55xz+0stvzp9k7ifJltcjYeSKouMvEH/TGCxFR77I7x33fnW86qe0u45ghfxRafBa9N4XC4CngtMrxKRCmxNF2xY4UfzZ3WzxJXpJ0Ty+eexLHO34odJzUS9RwBM7WBsnHDrlAINzNzEtABAbbdjgrkSQhMOGcFAiWTtal005aSTm2TfyiVpMCq3+Gp4fK/nsH632+RYITEfPSe5uX7GtdVkUAxIHqZ6qsZPiXnECH+UwSmHH3UCWJkESDt1JhVVTIV8otFjBhd7e2EfpuilLDxvuLROqeC3aR5axja+bMOv7GwwfejxRB2du4+ARed/zGRAvTppyjOGnS/R4jskXQBcuqHGKYxTP8O24MBCuJ5NiO7diO7WPUdiDZt715nCTPNnY8Va5YN4nsw8mviMREe/swvERU1Oihp4SEkte6xEQEjUYgkGZvBGS34x5MVdC7EBN1G0gzx/OyKJ3XgAxlggKYX54yhbQ+uBHJ0qyI2xo8W7/x5I+YIVJb7wsln9NsdESsoSemy7sbnbY3fvbDk/M3VThedgC4txbfEmjZpdNNKgy2b2Vo/0oEvfWFh/LWVz9/080jK9YoXGWG3tpipcwHJ8eUxuJ3vXwe70jNClOebPs9Lbz2l+WF7z8y89zJ+VO3dpTS27rwatkMoZ8dysUiIV2EhE2A8ukQmuQ7zQVXFeNBwpzgffDOK9cu3gLQu6irU5GlETnsiMlCDAVUnL+6q2pNul8f1KlXMIFM+iZp8rrnQLx+6XaNCWIidBEIMxHw4zfNZKaZKGtCQxCh95OLtzRr5MNHthCIaJTR629ahP71i7cqSHW4hVhEsooUBExkriitDsLENRaJwb1uw9ittvtvfl+te+GKF7ViXnz+zlDdyC2yokKkVnXkxENZldJ+6fm7brTcVB61NozRmx0TCtBaV+Msporb1y/dNnrlgizxQoQ4v7N8KTVC0/7prXmoucQuAACztG6oPtiQRaYiYGp8cJqcVg8n3KaYiTtuuEUCB9rPv/mfP/e1r8fcQ5nhpViJAQY6ES2lWxP9qbeuJ7S1xoDWjF54EXJTXMjhrkySFTCB9C5E6vKjK1qWXdvvwcRMsm/iziyeLJgQVM0VPeGmJFXEzXIO9C5gpt25c93CpQlEvNuN3rXOUFUJ4TDwQCAizfKBCc4t50QjBpWusKeIJJIuWvm8mxVRTJUtJg/VixpER+9tNGMw4ne9DwjiYhaALoAw62WAqUYIS4DHeBLzfr8n0PLMM1bjS6DKsDsMsUhTnZuWJItm9zFm6h6TVqpILREkUfUlcNsTPYj7ThGG9Fhili4vlOBLLg6T0psVf2AmEZbehTm09zQrm+pNy27pvfUop6tTFfTWedlVkQWeTpeKYAO/CQOwTPyFff2OrRIYb7TEb7yG4z6hUzE1qI2AjBX5UgikBieHWD7qmrxfRbkhh4ym5JajW/RsLssg0fuyIwB76WDpHbwQMT/+9a93n9yhA0LSpaEB4KUTGEmNBFhlzXMuG5wxjnRyepHuRY0Hq0Iq/gg9XCWFtP5Id0HCs9QxE7EGnGuytrgkgJsG3OR6ULEvZi+jlmEmYKyIogZWCmctvsB/1jubWlaqSCAIi3NYz8RpRM/FF56Ydhw9wmFwI4hgWZ5prQlkWTjQkxd+BuhC0rEjfOKZhaCRwcte9uc8eCA8vVU2E0+8YAZkxGkEyPPGuuGr3ioxdb2Q4MwiDWaWppBh+KkmWEhrVwf/gLg6bxoC6LaSez/68SNw3NiIaybDroo7CfokfEUBQyLW7hfe7bsm+IvznYatJt0pEzVzpSSWxaQBZTGqVhATwLyjto9DH/9o/hF0kd6dMEhHJAyGgK3UOWhhO94a3iuh4jgASYSIO5qS+0AqNklA41FIMVdb7hpb7STbQXa7GcSuCQRdWsKvdxBpWgvmIMCijEar+EisVip62JZGXpQQr6QnDzKWRIozS/AH3+tCrpX9tNg/i8bygtaF3hIALBIYY6phTMlxOjm6e84kh0kvBJ+5J3cJBdP4bJ/9+PUVEcQhN3/eIOTw3fE3nEbpP1rawCz5wa1iLQdp17Ed27Ed20e2PW0uvH+x7f2PoC3yw7RrFw7mFnz4s4PugQ/eGb66/+jMoihP1x68e7CTp5zJNKuzxjpQcObpe/httFpnZmoPHn2Iib0xOpw+oWTNU7TX3375yQ99ZNr9t4b1Xj8M1X9yW9siP+wrP/7JRl2dH/2//1f8fOPy7fUDx3Zsx3Zsx3Zsx3Zsx3Zsx3Zsx3Zs/xLaTm/u7P6KPAinxHud0T4519Q2f7B6qaM3RHGrKN1CVIt3TQ7ubxGR3keRPxa+lv4CAFDvLbzG/GqbAIB767KRZEjfzis8+6R6mIn5u9Cv/upgiPrvvBWPiLkgaLR1/IIa6a4+dyoiD0fjoIa/jQ+/cvLcKYHuvzM/qSObi8o0Snre2NMnF24+eLRhHHzwzg+vX7qFLpOJsGts3Mbzs8FOPEpO2xt/+19Pzt+EyP2VLfL6xdthcXPHz1zsnBHcnkqnibjkN9eN3tVTo3h10uiVlL40tk9Mfudp7f7b9wCcXDhl4tfeGqyBfucqDItJWW/wG+/+4OqFUyKabZGjs+T9t+5dv3Rb/dNee6vYy+p5AwC8/vbLNy7dRnGNROx1GT7uf+tCosz362Uyc9B3+JIgA3eIqHeBdJkDh8frauD1t++dXLqF0RZ57eIpMNwGv/bm969fui0CXqjaB1f4tHWF7HflNV+ktilqW9vaRvnam99/8fIdjDC0vgWvjQbK197cMAHfuHwH7pQ8OAABAHo4UFV65ZHLFF5WgAhYPUDIsnRVYouaj2KFXDTjW7jASObyJwHoc1/707/75p+tV4E6cymjjP1KQUKLW1SvAwHcY1ScNIlIl0a03rbS4TrxAYA44JIRkjZoMgy4r5Okj1YMn+xQHBxroBVfhojSTP+L7JQkI9mJ0HuzASiD0GN+ubKtRuYW48F+7joiNOK3JH2avnIPKimPbg9lPj46K4uwk4RJehZthYwVF5LSZXH6cI4coZDsXRvgHDDqpeXdOQKNMYkxZcy/Sjq00/xE7+ajGtPTQEtzHKqZyYKajVnpzsJNCcxxHKuk1bySfyvRdluSnEt6BoSB4TIFrpjPla69tWaFX1vXOG/1siTSpBxCQbkQK3saKfIJjRIuLs4doFju3+Y4XR6TLuZCnnkqw9lN+6Da2ZnbEATSjh/SsWumADGVGTPJGeFhQfvQHCiR0FdXCM2qB1L5dr9/DA+uF59pb/2DD/ZNPjj3zDnq/fHjx8zM4L5vu90OTSdrSUQ81ChxPqOIgoyE3LOxiNHDay2bG/eyZATmuEomjZenKsEx8dfi7stpsrGKP+RBmIZDMaXyYiByD898l7K7mL8IwM4cicJNbhxDBUanAzGQ4o+IeGbNoIWApVGVWQTN08WRQncIH3afdRDNW0C+ovhKyGefTHALSNWdM3bRSsiTbYyjpvsr83haKaFGdSHlhNAYBaXBABY2oSWH0lHentdNih665t4KsjZtegpQSaKmHPxBH4asu+l67NIZRTxBwG3sZ0WryieU7raJu7VF5YRBdiufUCAMVWj+JqjusR3bsR3bf+e2M/4oSsI9f6JyBQGA9/76qdwDnRIbwwuNtxJIAnrhAoPYMkqGhbM56w+KmzqsaLm5cMUPpjYW9UWZmvJwrL5NbUE7/yjbIpEymYO3/FOhpDHqtdQJMOR51PbCv/6SMsSrz92sqR4frJ68+vkvFanMpOA3/jYfC1OVGxMG5jgJOq+/teHWp0h49bnTmOemS+Nki/QnN50iB6lgncvyoEowaPQmbJpOq9I8ZSYdRe7yloRtCAB58M6Ny3deK7aqtVPkyYXTovRadK/Wkzk5f7Mu8OHWuzpgTR/5+luz2UsTg64l88kqeu3CaY06yVUBILp+6Xb0fH9jJl+Cp9b2lvreJCJr7GfdBYuLGfflwRpbRHSQCtX1el+6cvdMU8E/uYkIvviFL9cy2WtD5I0rd7GFq5stb1zWkdqhiMaeCOCVnuEp7uA5n4SEkp4NZonSuQx2tLObZHemzRwG6t99488/98dff6pOp2Vq3NGydEt/aeSELGvH6m1sK5nTo0pV2I8xMryzgFn1TyrrlKk/slBpmUnZgL0ANHtpVTMiPtAXWa0mRMQWV5eqfmGBg6XK0SBtp0H+bepbzNVtICNY0iq90TLQktIcaeqg0t8oQ1FyVK708FGNqmLBIdtPWMOQiQ9Xb1gyjHEbvAO2fJcDEESsTkd0Vd5UizckFqHJDJVyxyDDaOSxgn5ltc1K8mbLxKrZeqoy03Rfte7l4FeH30nr3NDRbKmFx6zDswOTWWE9upKIiYUXAMtuJ9KXZdHVL8tiL3sguK91EAqBEOye0GbsQUhnqOgk5VOnhqHfW0oDfZKJWm+MBV6zkYhF+qjJP22Ls+eccJhumSbswZhWEO8kAWObdqYQe+tcRCA8ZsMxA3214eTtVOyFJQEn1isQy4Yg0h9/8GssQmhN5PfO7RbeYdkRES9WaNvmKhTbJ4AnMKxLd3zRkOFR9ivUUsatLJY7pSgU+1ZRpbxlqxYRTzwhSm4rXThrT4dZTyIKeZ4IRe4O8jBdTxlQrW4IOkUgYdQweoTZ1GAjssVpg/4afyh4iyDY3UOFUS2WOgHPzaLUw9UvexgTk7Z3FVRiQfEFWwPCfveYr/WMhYcpi/YLxWJzPQNfme7qBvAHlkpAcgJNQZ0w4oYt1RmUo6PjdaGwNW9GPVPVQj2hS2xvACGufNOC6EesHAPfo5jvtIDaeeUKZsYfcn1uXKumzOA7VU5PnptNeejYju3Yju2j3Xj0BimEPEn/wUa1Ge+k+BxwHqPaeL0CAyCFH1B2BX8DolUu5qK6882YT0QnDelhjHR5sUh02wvyefgM3vv2t85e+O+82d1x910SyywU3yaXF4jg5Pzpoa5OnjtlZq0MRERnPHn1uZt69wstncAUppB4wCVhE1EG3AIevvvKGf1rcx34rDnHozhzwtFh73LosavnTyvuxSu8sDaXOkS9tHrvIVAUm2ssWNDHP5oaUwylEdVLttq1C6eeZ4iYNXsRE7EXuJer57906N2Ti7dApJk8CXjh/HaQ/rWLp2o3faInjkCWZVmWhUvaxa5ZXVuHHIySPrnwb4MQZG/EIB7MAXBxURsP53SU87YWcuU2MYcqra6d6/bS83fXCz1L639K25wTE4G89IWDodw3rtyFU8GnbE/QjiflguYXUgsMYreyhTraGlE2Rbfbn0HHHyZGRMRWhPrJIi/Nrhl1krKGsz9Pev7yQyIRMTcQTWyH8Y9xFl3VFqSls+GXGAUjUgsLV/5lGoaT0qqnBX0wv7g+VF+r8CUAUFtksMSoQaAladn3jSyfnV1wUPdxpW7fACsqbLaq5shXnCfk8l0x9j/rmIHc8oEdM5GlBmabKEL9iwMwgHxGjPCKLkql/yFPmEVrXHGXqEIUQjI4aAqPnuvANmURAi3KtjjMq5q9OG0jHJ+77q0bz5aHcz4UkUlwEoXyXiGWW87moHL7tvzmr0w2joL4gXFUc4RLSULiFBkZ6ZAuwsQMZhi+EhEE+8ePpfdYFLEmaPV7KJqxwwy346ZPn4zzNTHST3pYqiZNP7ErJ1MA2+GWJxF4ljcJOpgvPwWdDswYVkVueJJy/mayYI8aZhaylesVt+xQxbZC6+oXZEMH8ZwMH3kFoT6srbXWpQeSg4iZP/eDIT+EAAAgAElEQVQHf6AV0pdl2bfH+w8ei0jbN8zryN8NvDKAX8pUx+2Qui3ikqHYHkggQlxNbN3fGXL2LvongZ1WGdo049RNmM+Y1BlKzNSMjuoYSkkXgEIyyh/x+xjbjipSCix9YeCvlMkE4ZtwQaljh2ZlLc8hYKgQCLI+cgopODIAUY+lCNCh2pUxEEESgRUUC2i8P9/MHnvlW0D16zhoQWlUzOhSsT1IZ2Lc5kYKAEquL45CoQmWaWxu+GZLdPM3AuWjC53SvLFI0SmZmAxdr6bhXDS1V+c3gWRFrxsmSkkact4T6m4JV8d2bMd2bB/xRv/mf/qSrK7LAQRZPOQd+ex//KONT0WklpcIPuS/F9qeUjvpNWc8RoBYln3VRZ3FZ5f2ZHf9ZA5zQ2s9WEf4Eurr8eT4uTCzyEfRL/Jf/cevAbP6Cedo0v2WDM5KfV0uUyVkqqfhyXOnJhDRIFDq0zVG++pzN23HNLrWy5tY2LIg3EQWr2MgolnI1b0kejKB5+HPBhfFk+duhriXii7NE65unlH7G4B0qR2enL9JrjvrA33fFBDVNfLk/GmKIBig5Egc2DuI3a7mZo1qFSf6qpZFbpLmoAfMzkKQ3qtTYcQ7h4wKqwilr2txJq16Sq3Jw+ImefXCqcth8OLwEiCsT16/eGolnkBE/Prb5sp349IdQRa6uXHptg637M4BxfbtfnVEbhySIa/otYu39Oz6uhkgIQrfKSIzaIrqhGz1LDnK+HqVFVJnbebwdL5fKqFfv3wbZDWRTbkmEKg6eGrcNIrHDrZUfjssspXXwYmOSPFs23jI/nm1uEneuHLXj6FF91uhEomYLFkTXtN7fNqGmeVsrqcg7gUmVtEr3dvJjRxmdoxXxkLF47kHRY3RHL/OwB7V3/7um9vFtQH8wR//KQBRKuAj14GYwp94WFUTIqK23wNgJhLpvS/L0sVKLQGbm0XSusEBxExdzO7A1cyhpzEsdMWWWJYpOWfJ4czJSdyDhpmIrNgCM0kz2BZS7JrmoF5N6/XDFbOzo2LmPldTyVWttPC5BhlD9dR+4rP4n4Yh1/CL1cY8ndSHwqkYpeSAiZtXoSls2vm6IfdI+qskUDi2qWVmE1R7p+tVHu9XjrD47NbhnvYU8zKNp9/Vqly2J4naQzwHBcyJWTpESRL6cHDQeq92yCLuFFeyZGdULbmDFk3+VUJ/wBLHEBkfUNJifTgMbTQvjF46yVfLh74NFoubUFCDkyu5ff/BBx8A+OXf/7JLW5ZFpP368a8/87nPfvL3nyUsJLvInCNQazPDD8WWgHmWpWAUGodZD9b5DZos83dCj/ePeVkWXlrbE9Fu2bXe1jp7HvvDcyMmTyQh4lCnESv8zBlXCmmtrC4sU4dZE4dglsHLItgty/RgJWF6sjD8rpZ2dIiI2C0yM4DW9++998vP/A+fef9X733w/gef/MS5f3zv17x75hOf/BQY/TEALOz0TLiLqKeesDHrOD9+BzACdWJw487lQk2YVOUB+nN9bJLYvTdlYkFunZmREPcBtPOgRvhtSv6vVvRqreuUyEuaLKw2MrEpJGDhAmsxswsRWNAw4oS4Qy3RMp3zEJ+jVgzWVi0HrwvJNh0f187bxMjjhbJNepp7oGiRtfUVz1xE5BzPIL8+a+I7XuFaoW3cSfKF1Z5UrjR/MMtcsWtpcYvLgVqHM+YTvSYHCZ7Tyy4QTCYLcjyzDy3EFRkC4vz73gHgVS3sOqXNFVOZ1cApt3Xy2knZVphAX1f09//lfz/r9WM7tmM7to9Yo//lf/wSAe46EqKz0cJffeegn+C2OVI7rTJD/AcgTJViwnTcZvUiRttkOgAQoze3BeirkqoyievePGjdBGp2kRWakulHkzkyrBAi8v53/vLDgu+31z7zh39SRFYBIFwyvmmxVSIiGgpkmyohVaE2bhuM23rsnvUJzAw16YSyH6lYhrtHiEDrIjMzSOsGMkSIzYoUPjS9d1M5maoTgN6lutbt3JTd8NHNhBqR2ifnT6tQMpgjXQ7Q4shVGpiUw/2+af0+hEABEKG5GQsYscLF4oDYIBu0KsEh7zbjeTJ12z4QdPQitsaeOn76qCFtk5lRFh9IArdF0MfHait3q+Ug68pJCG49AaFUsLl+8Xa+TpS6z2S4h6e3M9lHkIaUjAB2OwMRUaY79AmK9N66Sv/qR0MmFpttSnoz0waBmVU9iNsKKpfJisDixXBhFRpBRD2ku1EB4pXEeIY5soB3OE5TB4AbpOPyH0CE3qg5klnEqkWT32XP2hmBkeZIm4CYON+DAg7KZmpotmMeSMjlhma0hpnJzLqSlPpLxyHmUvxaf0bp75BFctMcKeXqpNxTDLBtAkLW/tZq3GR2uLqKcTyh3jsBvYsQvLI2A1jIi346XonXjB40XNcf/aAlECIoEFoQvDUBFl7EYm+FiBlDsfLxemMOSS6U3OKErRM7r0xaqtW3FVQKd1qfjkMibnY+EAxLPqOim220iWvDqqoqJD31nuUKFOnM3FoLe4x4Ddmgi3WZttZeiwIH2hk2inIlhNFT4caD2QBwi6yl3hosMcUcaaCJpDEiIliWUmNekHYUI+NMFLvfFVdJbykd5uMJxF7yhmYA4mpaYbcqxDkWZMypyWG/dfnQ5kgp2UNyARgvV3RWBW06xJLOancQw7Xe3n//PQDv/fJ9SOeFgf7rx7/+7B989tnPfEo6U18iD46IkLDVW59sFGer2TnNJ3w/Q6lcmgTmBTfc7/fEzER6YcPLAhHpB4YZaOHqy0ARnwxWm59vuzw1fIgQ6aYJjKDhWCMZoxUBhEdzJE1ZTWQc3Si/VUI3l0KSZbcAaG3/i1/84hOffuaZ5Zl/fO8fF4I0/N4nniXeCUQLSquRTrMlKeoyc0dzMccGDR4xbEMCROD3CGs+u/ACDbwQK0Pv0gfJypRdyDKn2yDpcN3mcshF0ghNmiNdMpUusjCLuCWU2BZusocTC1tbBX4XjaAnKm6jve68W6T1L863ncU7767C5haaIZmULyBQz7QF5QMkEJ6eBwB1ZVYgU+kmqIFYxtBhjgr31ZlYCYcp8NpfQUTCbbCgp9Ol6SiUUaaDqLe5kN6DHRCp5Mm0jIfJPgfQnS+vh56to/7bEik1CtNRzttr3Llkb5hPb+l1hYx1XTkKJfsWZ7ibLe9ZkBjsCo+1n3/jaI48tmM7to9T2w1ynautZ2XYPtiK9gW3BhgzzjHM9ODXZsbkxBkeQvogstANCgtCyPDOV1RT6CLWbXfDBGWuyaoEzIyh3gx+FGyRv/+HX1vUAmVCY8kzNTbbJ9j66i0ZXFwO6I68XmD+lM6/usq6CM0N5ttAxQxRLBv2iMofZiZYzy4kOSrOVuI/pSzhDwso8a+0B++8cnL+VD+snpIaxU/B2d09J5yHesnk6DJema1DCMUgHfj6BI2pqJG+ioQykbjfHnxpBqkq/DHT1jkb9ZGFu/kMeJYfEWljEh+OjQHS+FPRXxUvWbCAicTmVpUxOzWpo5owl3bPooZLF5Awkd74FwlP3CZpJ9aUg/DqhRojXW62fs1KagjDUBOPokOFz3pPrLRC9FW0o+Yulg6KMBk8FWELbC+fydYU7JIjEMsm47bacfLea2qJQ+8b48j0jectLeuoBpfonKqDms8zlkYhiBvl9OkZsZhBpKNNe7FWsaf2/3/jz85OH2lmttC5U/OhLt1cZsRLZEDd1Nrh/sbOh186sAB+1g/48tiC3Ftl/D7A7dYzAQAGWkK2dFXQPjuowPK9CY5l6JmnOV6QmPFwpRQ0P9CKYDnUyv4We5v2OyptTvDPaKroMkhAIl1NVOo9FIw1e3ZLUXCioqV54Rxbr5dSyTyFCSqbuu/bcFVTZo8A4qy6uoovykqpjByzpXl7STdXXatIrwAo3Hjd+lnqTRRGEStVrbXCuVDPAfwDSpgDG3zvzmZBT9NGEcz+kZDx9NLRh64IS+lWlZ3wsmg3C7OhGVvSFiZ2C9BosSHzjgxA0LhDVI6/Y+3WvVCyeGdv6ye6iZZq3gmxwObBLJJGTOmdmSdTV9BPKgLVVrMr7vhVYlsHCpwnSwbmhMBDoen+YETjhIRxZwORlbcLfpakKUAaQHNcpd6b3s4wk5B1wgt/8pOf+NSnnkWn93/1ngC9E5q03oQ60zk4QdTmzubc0fzsCtIpu+wnOb9PfHCZMSHl4KwCjUvrehOALD1ke5TkbqAEBBdNp60btjjgEr9XqhJGysAbMk9wYNiLKvQEtRqMRJVhZP8QAN1uJ3g9WwkPDRhBUMCqKJEpgf18xp2JAQcEIS4TobJYp81ASmix1GAheetx4BZBbCRj13ptlqvzkfwoExl5yak7fSuYOxDLqVEcIKe7RMibcr2nyx7slBcmuNkzpfCDPKbIX3XvaXw3L0QpYTJJeS46xCOzEFvnUb+uzv60SgBdW4oGSYUAveA9k5cf27Ed27F9VNuOACYWlBSNxi7Pkoc/9ZWvyoEHCOg9DVs08JFBP3JqT+KJ3H18va8XdIAsdXcyDKXFqm9rEhm4+5cTewa1WaYnjMwp1Cchev/bT1Wu57fUfv8Pv+Y/Oj+ror1rYl2dFty9wrher9KdORCJeVRR5VnBmwWQLrxzS6FBziyCNpALDQbs1JVFVRidSYG5+EBSuWpuf8qTEnKApce2Wn8iYiLKhFibpWx674GkGprd2j5MYS4NpZjlwHUAqpg0l8cwfI2cmKbOSiILAE1uTrl+A1QRO1RrIteIZbezDIzNfb7Myi5QV8HYf5fmVaft0ntHZxAx0cK9CUj0BjtEexPfA7/FvQQR+oCIdKLFb81ZIFJqiwfQVejVla0dE2zLABFZdrtBCE/qUSbkCmaCqJvU5RAk0XV6Rs60cUvcWhxs/m0xBaZ2ZETE/oLgUP3RJ7cIf664KeWHbTKj/w61JclU6lQuayf5V6qwq+PgD9kvrqfoyK7xiXj95k0VDWatd6E3dLGzoNC7BNq7wP0Eu/2HaIPWJL13PSBqcV4W6tKXyekl7Wxh2yumDBmwWvtNhPWrr2LkIvur+oJL9uQjViMOfJOEV5tURvVDXTVdwM+IlyalhKYmRgDtNrdENn5yRYmc6usU3R3KYVGMk85qPvQeSu7+NJFwaFIGEnruWQMM3+W9FZAGRIipt/p81Wy9E/JA49pX+WGwmYYKWi0MW93C+ZwIS/V93ViSwMp7+O+jHXZcYJzuOpas8LcO8886aCuiFQQz5TRfN5vgQM4J7cqHQRLXP116b+ro3tqemVqHU5vEcgL6iF4jOZvgMx2Pcfp2i0KJycMTpGWIVDZyplyKephdyQ+vSbgzDRM/h2fxHb+Ail8Juat+PAiSZ8DlhmlFYKK+32BJko8lCfMBwhDqlVHKJdf4w0D5z+2eebx/jC58btdl39oewLJbnn32E8z897/4xQe/fvzpf/XsP+4/eLx/vPu9TzAvYUl2oNudCUU552DAZc75ZYWKPpaVit2CVOlzcH/7jMhFw3LjMlBs7d5IP60qET91E88JCI9FwEQH4DREidoWqpbsDEHbB7ICU1mGWXIY/ZPNGC9KeqasXZJR2ysjzw5Sa0SYql0sVgGTZ8rdI/lIVGjT1GLvLNYsxhsmlYMYLLpVR/VoozgrGGOly4EauNrYbblpo/JivR3YQIFDpLMA0rr0awinH2UH4orGAnqU0gwMhWKK48jD+DPOrFhnEGdaPz7AwSl4rkR//NDYf2zHdmzH9hFoO5CAulRVU+WcQ5EsAAAhJrdMuF1KvxD1WQtRz80h+h6pb5epwea3pH8vqqe7OxhMPSSLL0CxvlR9QrVWD6Ja9Da1iTifojIfli7k0VrMpLLH7ypZ5LP/2x/pulpydtm37iyeHAYIG5kK5eqU1sx8p+Gdi66otQaQoFtUmuQN3yB8M3pvzHDeR0zUOZib+SWZIY5ATXeiShAGcvVS5IUoKj5CAPBOA0IoQiald4B4F4/A67JqfI6L8IQOvPDczTdWJbCjvfD5f4siIUnrvcvyDJtGZ/tOArNWM9GyMEC99940s1yGkKfcKZrASBqwO8cqPhqPd1FTAOJumoL7D6rf57LbYZIwfJKCvm/puSBdWre8qGagtC13JZ5ENP4RYFoIGuQuIsIs1K2sgmlpdsKEBLQsqM5EAmhVHOyE0KWDNCSQqtW4oxORO2tY5JFAPXXDHCtulSMiT5rGZm1z6zARkaabJ5B0WZadiKiRWERoITU2dRG01qmrdkO8xKFW3VizEUi3jEpVaRMxq6jO08iIAJavFCKidXNA0OpDtIS/KhcbbonX8yDpoYXytGFtqvMRaUo/PQiKsShpTRVSBFDnPmpg1gXKYrFpLAImrlWDFFNGJdOBVMKcm7QWMjRh8WeaaPCsH/muZoSwTnfVli01X/GWGCRqAv4be2+3bVlWnIl9EWufzPqBtmlfiwKELnytoqDRG/Sg+NFDmIYxCiTZj2FLokePIckP0RI/foRGVEFf+0IISqgv7WZYNEVlnr1m+CLii4i51trnnMrMQoLekyLPOXuvNX9ixn/EjClxBk4X7eZlYATsX3/zj//rt/5sB7sA1OApY+aELFTiPQ6WhJVH6GwRLCy84BUY/M2Kck0wAc0H05PCYIub6kWJkke/JaWGxBL4VBjPadXogtg0iDGjWAHg1lZxZgIzrM4yV1vF4mA4qcDKgJqnW4zWN0O2dhcAiJ7Pt5F8Fp55Am73cFoyY4xm0hdnkyKstLcDEcygeuLvftIduiyqKlEwxZFqRKDCt0yjNGHm36fXQE6LxBdmcZFXQEE5WxQrBOhYUNHYBAstJLz2IossPTjHIKSkEsGacySQOCzZXLwQxPnXskKdp7pIVVGz4aUhuAPhED+JrAI/LjuQZBP6SW4MFRY18eunY0fq3LczzDGSCZVTwDtZzxBBFPdTQ7spNnAck8kKTcj4lTIpmfzdCdTN8tZyOEZnSfXOS7jLBo9sCG7HihsFsP7ydjmJqpjq40evPlnH++/dnpYbFdXl5FtwPt/qSTHWmF4PLCV3SdhtXSobN0HjfRkjtOapEVH4EVMnBJJ2cl+BiCw84+wTGTzl3n0cfHU6JtInLKh7eisWllrURJedjdr0pb9hdM42/36LSFgIVHghndVJzAu2AliWRUUNGOvqRSogIjZU1aVtYK/Z8LAmnj56/Hhdz0+fPNVl8W+XRdb1/Hi5Eaw21vU8bpYbPZ1UxZKlRAlwGe7yAwbW4VRqbX8k3GVjYNgQgdec9I2yMTl7QuU0C0VA1gTMGFHmW2Bmq9mIS6QaITslRmGTPPhU9VlUNSIUEvM3w4BAJRgdDBbbmbGPYAV+jN1QKp/Ofm3k2W2PQmkmpLsOZ2ZDT0u8PqiThp6Ik+tCuZqYjQGGExVvqpe1Zl3QdQ6uteVC1sO+5Cin2LbJyFiHDTH1KiYiIhhRmAOrYLEZc4V6fuMXYDVd52BZK5w+21CDbF2HGy1C6QGNUqHnsYL2H8nDSZ7FgnPiRczNV9wwxmCCE6FAFsG30z/ePZ4uDcdY/SNdFjd09LSo6tOnT4RvRIdBwtQYyb/9kJA/PIgCBbyIotVxCrQQMYBBBYikv6keeqE1yRkD+R6tDz07cm3Xdm3X9i+tneCiF6nHInnvpUtsQPvPfz34UmkpAIiybFSIWSXQvR80mCeVFDRdXOdxMzsdLPEAmXgMaa4rNK9F2ptdlrRRXL79s/giX/3aN2tybUImXtVlDnpi+5fw2AaL1GVpdcC8DmMCwsKpifiRNqjViVKDDgMzV2MiTfv3T6xZBf5VqQS9NA5blKI0MC+NpnfqHekhzV5D9Pszb3zii++8+x3s2hsff1N14XrDilhOU1FAq3nmdKoiQEbYJdWK/IcaVfoRJhvDK2VuDaXYlF4ZjZRk+bsru+lupPsxPSPh3onqRukJBWCer4ocd6GPht+Xq0Jr/mn+xWEZZanQcPvM29UmXUpbXc5D/z74v4B94kALULsh7Xjqhx+56eZ+lXQhZLN1in70jpF5lmWmBoHIhNYAEnroBnZnUV0h7bC4AIp8K1XSbUuEAWB1hAi8BSLte5+15xZkerB0evIODmfLSXW0ir5VZPvcvB35CbzuoSvIyqHDa1KPO3H1t5BVHSZamDXiS23G3qhN53g4lCakATQfNxT38BakVnys+RHi3w2kZPOzd2bplqrsJ1rTaU0Ku7WsDsIEv8K9fbrC4GE5AzKvUhzFLYYUmC6LhEljxrhIGVdzC4oi5QR3suTbNBB3rwXMaDmRygvbpCEbf0rvIEHRnCkgckhSbiQPTuZa/DhaUWT3et50d0OLiOhiG4Xg/macThBkunYtRJjFJP3p2DiBhG5kJS9ap53HNIEoSR5SfDtt6j0Icm3+j2yy9juOzJ/mR0I5mqU4G3xLVqQHBki3GyVaoLcIZECA4mYqMqi7edTEAFHxsM+TJ0/Gjb30+BVLDz4kAh6pU+xaDbDXhLYLb0/F7jV8yOJ0jVfN2zS5WhtbmM82ZgfxUOxhobsByaQ28ybo+vrk0sp3H8lmSeye2ZzxI4JLsWdggNIAD6epQgYPYtta1YY9fXWsw8yrrPDWKZFh49GjR6fT6dFLJ1Gs5yE4P70dj156KfG1KTmca1CDbXGZ+BvAStVwz2ILa/sZ1RwoQTcfdm1YmnIEyWtsegaWV10iDsJsWdkW3jWHRhGN0XSDYiPvW6Mve/678kFHZ9F8pIg5eUdNWFKLm+dueYs9e5OsGGsdd4tP5VGaimlWqYy7qxTUlCOUCSr1lBg9Q9T6e8bFCJN6+SoCG7truzFVa4yC8cricfHyBJJCtrmQEGhMJkqgoGdjmEekbF3XlO7SzCfCrwm8zXynHe0AKLzasoyp9TjMXa3xGWtc0XRfGf3aru3aru03pJ0AMIIZlkf5au5ozqqtDqyRk9IVVpq7oalwPQBMn8ykZst0UiblSLc9dgopJnHBPyvIiPQ5xFghDn/5Fxcv6vmQ2itf/UbmxkRz+U1bFjQP3DrUdAEBK4aUQ8hFOyyfBtKCFtXajhlUFjkefkIQ4ckaTfID5lfS+mAmYwzPNOTW0eyhCZAntIz2VD5gNhn2TYYfbSIb04Hwmde+GFaiSuR6GEQiXwg2oNR/VescdKqn7H5ZAlVFskq1iMhYV0vrLGcpcco6l2gbS4NqXp0SDT1pTzgER5qk1A6tUF36t3QKwGcY2l1zrXV/RH7M3RIV8cuVW/nuyNwhTlV7/bU3QTW+W1wxj244l/5DoJJdTOsMKPlhcdoUVmXJ1jWdC+F4d8Bm7D3dlCBOpb6eQ6VmGuSMrLJU0Xn1K48N5cFpgY0+bcu+L6uBdyjpMr+cxlId7hR4lTJMX0X6yYSyXSsHjIvPiz7mKVH77TfCWCGqtJVKszM810OXJU0qTjse8IyViBWVuhumZacDsoOLkuLn3/rT/+Gbf+zlQkUVBm5KZO5kdQYn7Pslz72qOtIEbmuaHA39gxI8iao5ioUQE0vSdp+dZgYqbRFpvMZBPYI6nCrNNk6oYoWzY6UOzZlflWPDPNWF5GHAtAdzJylz27ZCfRVlzLoI7CKCLznppms8KXDyOxTHTLyqMMWWmzbhLpw/IPMV1Zv1cI49dIUCmE8qodqtxWJNMneHkqzThx1mvT5kBlELjCHnWvQscALFFenKDfs23xXpI+3nltMXzp8/2i4d2K/AirXOqJrnnJWzKVl/X6skHDJ5rbEdvxk+j2SSo7uPdAGgp5MqRG2IQSGLPX3/CaAvPY4DEP74GENbldU+51ptg27Co7BuhlRjUdVRuMgCDVKrKCreDtDUlfmL7vDOcxv++YyBCe2ccYPvNBJR804fT7rIp3dz+WZUsiQPsySRj0gUFcpeVVfXULFqvx9LRXQdqwhVKQCAqp7P6y9+8cuxmsoJprAhUDOMdcqGm1aTUNi3KIwSKk96itJBJztkKF9Y8pAO49D05tgiSSxVhQP2l+q/MYeNGYDcshngPdxZFEE6MDT1xgoGU0ykGI11nMkDyxAYxqQrtlpJPkycF59u7/THJ8du6pqN8xUFmx+LStAmzhgiWzBy17ktFuqK9dS9vq4spjwJfQvNtMhvw1+7shilcwpzLPChW3xCmMT3gfob6ZMrF1AHP8LH6qa5TmvrxsiiRsMT31UAq0MYza1sbfMdDLkbObnqum2GYVp091RPuzLNm3w5IbVbF9FXGhcTsohru7Zru7bfvHayYdCqZGc7xWjfXv7qWzx3WjpjamWhKjTeP9n/ymwEdlAMNTXvkv+QOJqBiSeHWy6trvgspV/rLjVWfhQy7tedF/nK196yOFKD5itiI4z8FEzK8qFY8l6UdGkYAKhhZXyfqg1VFJngtTWG3X4SnSCKUN4AMBcyfWMSB1pi45qBBaoH/s+8RZC46iH9e2E2cIH9hGj16TbV2nBGfElmXhEv3Vu5qNJo2+gG61Z0+iAFPOonHa8Y8GVZbl66bc3acWdAUkhuBu3i1NTK/2HUSDEAbYRhgLifJnTfpmGE9ScNWA77/Az5j3FMJdm0S9K9VyVNNiW2WaAdN2gNeNQ6rrkEYHm23czcYb1IU+LzzCbxl4aBXy9FBXPYKp4DTLV7DOPJQO5cht5p0nD5ubV9m7n7PgOelvadNL9eUhU2HCjlu0+AtrJiBYeis4DX1KY/hTXPbTZy6m2yoECj2oUZ8jOndPMk6Q3ccABoDn4Ib6jOlaO8s40LNlNN4oQT++t6OrAs9EVOc0wbqQHJJqfnhZZ5ZwDylH1xrNzWSRY8exOJu0caa0wCRllI/nsrz9X2EIiATUI2wwLDevIo/5Hs2KLuo8mw7Rn/gkgx214vv9swZnbSvAyKkYbA3rnb5sOW7X4UUwnXoaQEdB6ok5C22nqzrWEaPL7ZWgVOSe4kg7fKQjZI2OIoiaBNXZjwFemRkU4ndA3T3VBj9PSXyRYAACAASURBVDbjkEwfcixuWaVfdxk4U2f6H1w1SSdLUmL4gwrN1nU02ipWk+Rw2CZ8bODocbxWk5OvNAI2Slt+Iu6BqrW3284cDTezIS+jtA1RYQaDDAB+k40uirGaU5sOFfFT6WR1QtQqdGlBgWLuMCoxbQIlQfuHG0h1VNk4/C7zkO6mn/tA1uZzIRrj0FEUfxQu5Eyeh2EVstW//kFINWijE0ElNk6I7OAcBi82s1uTwVR1nFdZRBc9jzNXLufz+l//n58/enQjUFHRRXRZFLKu65KFdYo0Cjv7KJQ5beoxp/kZflf8T8pXQ1Q3/qjIrGwET/XYX00Kr24DRBHDlhUrRz/sbTNl/pUcKJ2rjG8m74zcjPJXzoKc6kkbh/xNEFmkfMThLDMHtgQGO+Kej2KUnLwrbK1QZ/ycLljsABPbrnsDg4bkpfJRfxBo7p7/6CVwbFpTQ4s6IrCTVzb91TBArM7aNaKcs0iD31ame6LwvHiDraOqrHNqkqMFuDfSNrac+NfyBzYDTG/t/rKWK8Kw2qHcyt9qNQl5CpX++SF2X9u1Xdu1/QtvF7O77zip/VvQfp2+yJe/+o1XvvbWr224a7u2a7u2X2f719+86wbta7u2a7u2a7u2a7u2a7u2a7u2a7u2TTt5KKXud9sHovetB2umeFueQKiI96Y3PzXmgSCtWKJHwVo6gDExSrLXjIZliTHJmx77kbH4xWrI6FEi7fK//bp8kVEjkvkmwpPF26glUFkUPaInAGSIKSFjqMijhS/ZD6UagOH38+RZLYt0sHiLkBGFxM0dIoBoHMROUAuPqBUIRURkjYrpMXzcVCw+E6/P3SOdm+zEPOUBHl3Zh/Esjvu1eGee2MmjEH56EZIlCPPMiCZkCl7MgrG8TtAkxres9B+7oZ4KIYDfI5y71AOQGUdlxDrxb5tel9mmxg9G+8STfIZhiTQ4DUg3eDjCRsQ4dxC8pjBTPcxP21he2VGgzcA3g7q7RIN8srKSyQymDIgpmTSC95k7opG+KswnC6SyljMAiN/5nWlEVhmbLSAO5pcyFwEZJE8IbUiHX7ecmszu88X4hFH17m1O8+opE9t4Pfy89fxZB5+Qdo13dM3gE8CgElQoqplp2Lr1cvq1UuPZpo5F+ylkblxujCQotgUkK4fCkICNXdeEGU9cApXo6ns8ZalnYgip7LD9f9/604++9cfwziunIFo/f8gzic+RIMntF0DjNhNpHzfSjb+q2AgzNiJxL1i1YbTcwyoNW1A8Xnj0TwTWKqSBLY9ss5mWwnqOwaL8HFZcBXcBFbzMhdQQFp+Id5endy2Ll8bsmOBBmZACmEAryPJ43eG659VnHkmur/PRKfuQ4NqBs7J6uXN58JFJJYdbMGNk7T8x0LKQZeB/JCRtU9QEgpVisVM7GvGmhCcqmdVx323FzMZmpg9DNPWRwKJpLTO6J0ealz+L2xUqCx2oLKGDVB1YJu62ExeEfqRQmsFMFCMO7Z/PT8/rGX5RxhAIbMBERPH4pZdubh7FjT1+85iJ6nIet4rK7My8pspO7Kn/3LM7UIvSdysB+iMXX+5PEKdaeTkygYQsOSnxA6ExjU1njRYucsHjiRg7P+Qj0uDGH2aIEwAcTcgpPFGuSFVU2wmJqL7jj6cKGjf7+Y1q6uU0IAoIpB1Pccps2GeQTeZ3R2oK1kZjEObtchF1GKE4vvWNSd7oz2VdPGPTTuLMEGY3QlKtfO+WPrilpE3GPBJJkh30xW7yTzMr+ehCdiH46kebwO4Rqz/zfvgdTheSQqwUexjr0oQqX0yvZhOKFWuCpMF2BwpvFEHhYiMLO6fs8yErZbK9tlKRPX28Z9PuBpT+M1GiPdnZdNfjap5C3jlNvkNSVfzoDsDKojw2ZbIdvXrYCm/w2BC2XJ5MDpPsS4J3fa8h5Iw8+VUzUA5W0RuVkmfWn67t2q7t2v7Z2glw70wpLvc2lgRKm44CKw0PaQq4tAp7ADWNYsuuvEO31bUQD0szSdBHHOF3E1Ysmo9LpEBIO8YMIr8eX+RHv/5Ny8qaIlsJUoe95m/MrwGZLA4gDhcj1M78LtXVvAiyznZaOhYp9HJM10vNcm4l6uOtZjN0Q4GKW+mX1NiE57lqPVFiOo+WtMupkR3KEc4d1oHhq9LO2/aLG8ysnz5vZ2/DkpCmooYi4urTDMimssaShaBA4i7vsyF6xRKbbVWQ4Gfcusna5eVFVuTBmQBmfjWi+NHTmiRY6AfgMSpQQzPua76QdDh5BumeiEeCYKW2BgKx5tzMQ39xaHfUmHyC9qXSCSgiY4w0N7yuGTdDCAFHnknpDAwT2xzVrF3puCOJ7U0hNBow872Kk74acEE7SsW1pAEYMLfprWmn0tszu0A2FC8ATGkoWILOEj/D2O6GwQV2TM3ecNmlCkxsZF5ZmBGxeuewSZx+6VMWc0wGsdB9U9MmWR5w7mntcvDr7u3Osu/o7K6B8n+k0t3X3rtMPGbD6Ob18Kh59c+3uxVUPsJgoAaI+KWw0qrn1lRs/hMsf1KGiQyMQBTuNgxLd6nvRGM62ZMAYZtNk1hVM5Hy2GfKkTCYJhMqJ13Sq5XhawW/LB+e5jaQaCMiG99jrHx3WUOArjC5fOjWrl9qZD//vcWXSRans71iPQfoNbGMpkvwW4oWKT+wVay1z8DVlgv+Ux/+wtFfYSWRQrQWiwvQVugiXlKl/kA2LoCIQk1KbOVWxbss6Wop0mEQOZ10XRTAuihsjIHbJ2c5id3qqx97ZVnUhgniWmd3VMKKCJ3RWNRZSZJPpQSJHJ3BFBDDAz7oQp5ZcL0wAY0PSI3VeLsl3cVkYp58d4OenQT6G84PNvt2N0dsi219TSRjERKZXV8ChBtuFLRS8JoNg6qXkFPRvNjOH9VFw2dNxFjX1QZefuVVG6uY2bD1PFQHoKebR+GcAZAhjIOZH8g8HzJwtZzy5hEtQ6ABj4AbVRnRkget0q1sdICdYDyMcmVsO2KEsJJafH6KXsYe1/5TgvpxaElB0c/E9phKXcliKHerShRzcZUKRtB0BdUwjQtstt6i9MHmIWkF3QmljCojtZqEKRrRhxJSZIEtWKdWYaFtTd/511nEJkukeHWSKxZgB6/Wbgr/jECotL1rpopVdM2ketgIhSDpxKuFV6IDUJEVZoYxzIPFm3UJoVmyzydwCDKpd/Kvdlw8UYm0udUdAYtasYeMRIOrFmnzpTs28Nqu7dqu7V908+owsDEmDewyW3v1a38UihOFAOuFFKuuyDxon3ljVhoFFRC3nRjMuimSGmbaIbKx4MID5YMpH65yKUwNokAwgdiv54z2K199y1StiYdWbIrz2wipbszR+gBgUR0GQKoyYIw7FbnSikRlrF42zYGDNPzKsvRh3MWpQlXWPx8ARJZ4KDfCe9BCkVI8hf9QyJZhwFBpM5x2GRAtl6f13hEoyjjG2wpdpOVhxkui0m1jzCXUWN49Y7diZmMM9SuqS70si07SRxddEhqEeS+4E2Ff61Dp8PObSWdrNz2QTgzhLGUP/nL42SMLkX7YDOIGbLVpiorwN5f66I7QKvxXnoqAnkpWNQWd1FSqYwNn1a6p0a2wFmjBjjHcqw76QCfWUIgRqrPkPRv+CFJ3T/PevZg+KqtOSvid2kZT6XU/mqpAho1Uhb18JKi+a2ymSbGjabvvaOmUH2OA3u5MuCy/T/sZuwF4/kl7PJ5rYZqYQ9f6qd6DqFMLIapJwcphkR6MNm/vcDmdSqNNqoHB7xiaUd8p/0gBDuS8u3n4KnP9OMcGEpHtRC60u4eipWRlb81Ln34RcaM+KKGTMsA6r4MvEO5ZKLTROi0h24wA2ACUqDra6ix99IXQBcn4ZdBqNRvpmFuUqeVdmrCNAb/tKvk8i0In5/fhe7TCnBxQfo3GfROszYAyeEim8UvQUExe1PkphYhoPdu2+8hDV3hPNKdELKfmZvF8s+ro1kz8Mb8hOiCTARyjK2GCZv3NrSl/di1ZkiTJsgKAhGBy1pyNbup79lYBoniXsvRSGyPS2Jga2Rh7pJE2IkjRYMTmLsNR71vk1AokFj/OFkMZxCCLLCfVmwWLOs9f5EZESU22rmeE4pNJkbE8H+lcDjUehJG25iPQuF6wtdzzH5TWsHXTE5qpJPpOCUsXtmLZgeLWXco1USQRbKeAzm26ADhoW4l18AeJx0Lby7xxEoJQWwPgmC1e0LkhjxX6GVSXYedhQ0Tj9u1hqrLo8uTprY3VdDnfrqpDdNGhgjPhNqF8/bKJLgVbijxB15U0fU/hm66wdEbAJKkrPJIRRSkFAJ2VGxl2dyU7SxIEa40jMD42A+e1Z/3dnLzM+7LdsqR9s4IwGCgC0CwOrq5hY2g8BUIyMbdUMpARIqzigMVmSkNIwhWZcthdMg3vbQGdeERcBiGob0lTrNhHy/7tAElwIliNhOw3c1wayJhqkSQzWbmwHkBVYE4pbeKgf8LttATwCHLA9NTBnm3YdVumz7JyfAN1RVrOR4OJ/1aSypJdBLOs0YgalYsx2rcNifLvtsf7+W60rfhwtyTbPHFt13Zt1/Yb1k42xoAfHmQI/rJGCLimFvJ9Zp+pgPspnjypWwKvxHBYBJFlaYO8PnmzmVmeADU6a4BSl1NCCzDibug2cQPjVxJz/tVffej3aL/61bfc8TUwplvOuCr/nRpMWkll1K3rUBHjIUa3mvxbHeHGRfbM71T6KAno1BmmDQ3zy7qBFRqb+7luTjw6koa3DYjIskwmWgwpMBON/Us1vKRvGpKT+TgJ/A26bYy1QBb3HKhQp2taQ9Qpt40nPHsSiaLavqL01vnZ9nR0EapIjWW3g5xJwJ2PhDWVKOq6jpuxZoAsou2op/XwK/X6MA+pGBmCLiRJJlxvxovTU5ui0meoU3ho34wxzJZFOT1XpCbTl0H22j6DKF9J/ZfOCh5nnXfJf67h6jZVjLEKIgqtftCPHk/CTESEaRPYW5u7EVzFluZg3TanLxWFiAzh6eNQoptLz9JxKLLBwXnk9EckxGZbJq+/nmcRL9Ma24Jq8oHK9Eg+mMQxDdqcSZub1vMVasx2CFMVtcxpbujjfVfaMbFOIItM+z1RO/Cxb/7Jz7/1p3vQJSgiR1CCeOGMA81/GJ/Pdwo9uFmzEsuIwvY3JuqXXVh5FmkF07hz/lc7T4Mrcd0IYqk0Ssll+iPDcNouKBlAXPxSNNfm6125bekMW3URXdCT3dDfwLDVL6wvo5VuaiLbZPt1Ce6cY5DLzEZsmZmTq/2gmfqt2eWpLPM7vYRW3xnhuusIAKCy9A8s3Yf9xMXc5tTPsL8DpCpYZwZJYqllhfWYM7PsYj9YbPaGNKzAFWT7MJyeSQohBxKCd78b3FQp3WQdZw9X7dSQwYhIG7IYjdR83UEkQxSn5XTWE4CxrLBAQ10gC27P50Xt9Phx4Qww1uH56ROvnIDYyNS2PGqjA2ycxXTN5bPNQyRdkPr30sYoJ+imkXdSOdv6ajp2tL77JDcdbvuPmW8e2u9tOG3IkkvWhX859m24OiIqogoMVZUlcXmUe1pEZZzPy7LYgJmpygh2MVT1PJ6arGOcsQCymg1Ve/r0/ZtHp92iLOk3PDf7yc+cIpj/vFBr8WXU6XPisEvCSY2bXncVcBQnks51BeGRIxKU5nQX32oqU047hls6Rpk17mowk7qaUhp6bDQG/1v8OrI+ALEqkI5j9lmnJk8twXtmRgJn77n0qRmzWMYcJwskJ9kXnYQ8ysBMYw7xfV7iknPyuaw2ZDpXRJjnb0Y0bt7b5thrzHfTTTxAj7KVHJoLx9y9sWUz9jUNYyEUYB1+SZeqyDp4HVRGwGo3G58pB3YLU9UiqNHAw4rMEs0u6SouZaIrKYetnuRvqcPHAXMpsX1t1/YhtB//4f8M4NP/8f/+557Itf0WttNyWkYW+aMO8MvL99gYRiTba0TF/XNXelwWyilyBBThaAn+PgZE3AdkANwyEEBNVdNqUVWzqF4kfsakiTfeiuy/r5Qc4cGZDAo3xvjXh90+8u++YQxXLqorunchjUNXMsoV1LVcgYnoMA81mohEZM1XsCzei6SiEuF8lpNUWdezARgiIusYp5OqyLquDlIAzFzDzc3pfHuGGVT9FK2K6o0COK9DFVgAEwyRISYGtZ5MBHfwtTJBy3KCYF3X2BBVwM7ns+picUEsQpfi6m00g63SaCAQbXk04TpbxIaNYTaGcs4Er92uZ1laTkyc1KlkDdfLlKqkqvqR7cQMKz8uUt92/W1C8kWxUuUbUK/QZGZikHKvx4lXUVERUbMzA8WFjbQU3WMoqgqO5QrcEDstJxHFsGGmqiJYYWON+Sg99YlDcWcxJ2xc7AKMYV64MQwBgs9sNcN5DBVdlkVEht/PmXmXhjGG10bQRW0Mv5dZRES1AszDzFaDKRRjNcO6DrcbUq9rVvmkTa7nMal5GiqzKE9wWQEtiSoVPOv75ixCsNpqayGeGFS8JNZAoLJfJirt1FifVLWyLKjhl0IYl4/ntpYJxVm1pL3ArBUID7/67aUCEajaWEemlkRRPzMH8Aook5nFtwOAyKC73wnLClBgSnCVmHLsUBWsAzDL2kn9ZHf6Ot3nfOF66Hw8bIPL/pZ1Pau4yQw6khxSgf7DhiQpQby2ZqaLJPwjkWTQxCyqVGOeRSYOi4rSEWOjEv5UTJkidvYEyjCLjDipMNyuq2+KqCZzgh+UNnXAOfp7qpGN4VsZnsMMobkMmk1TGjIGZ/hjOLeyNSINuug67CTDF21iInLyjRjnQvcwsMp/pKeTxa6WxbI7JVuADQ4hMLFhGTARgYfUhqNBILeFvDEz6Kj+29IEGCnaUiwlstS0IvEpyiJr2X0krxh1rOdaJPzZ3NYRo5SrMKbfUp1Mc0TBOIczkqcCqDBopuaF39wcPy1cNkZdxwMobSATw+gmPwyCDOR0Z1ykusVqOoMIyORbhISlc2cRoigpwjd9JP9l5TizQYElIXE6aYpTQehGzmHQ2NWwTPcOySJQMcjJXnrpEQDDWW9UILdPbvFkQCAvG8zO7z+5uXnkSziPdVkWgPoZrXuZLPoEUSlBFqFlMiSSSfADesC8hCrfFmVRVKpE3WRHOgECZsQuRkYM9MK7604XaeeUm5cgvQ2pgPmcuMdHHNB239Ryk26TKEo7EFtEAb/ZHB6VDp/M0wFAF12WZbhvHlEF2zGkegmIA8Ow6NPxVIBluRHY+nQF8JFXXnn/F++JnuzWMHRd9XS6MbNFZb09i50CQMPz6jSXrxmI3SwUEF1CrDCcEnOBOpGoaig4mfSXhMCD1dl5BdvyEYKpC1oDQ41wDrE45ZLHEpunvv0bkUUFMs7moaOkJoGZDLMhQyGJgiUfDTLWVVWXZXG+OAbPiJmm9y5SCOPmczKnxIIRO2t5CCZqFEtGlQxQUFU1gStIvmBZ6lAE0/BCe/bz3d1pTO8mQMV7s38CUW1qULqN4YIxfmN1LD+ApRBR1SkKKDCMYTJkcfLT8OcaBi0Rxcw2A66D+5gJE6ELYl0HmSdSYckgm8Q5jOBqxSpD8QCivofBozEelREAWATDYGNdI0zOmE6yEQtqQq+pmrhE9lX6YZpJ5inLWzgjiUN237pWFH3WcCUC6wbaFJkDFgntB/rrtV3bczd3RPbfr07Ja3ux7eTKCx05U3ndS00ozcKuKgsGETH05yyU9/zapFwceVeISDqJLPsREZ4skIqqTcpAPCaV7T/Fa8tJYQbgV//nf3geMN3dXvlf3oo0hhRHl1O3ABdSANxxkvIpo3FZWcamzyWyJa0JKdef6TYSU7W4iELUT6dq6c4cB/B6lFJ/h4gfI35vSUuh6ElswYwe4QupzzPtJ9fh6+VxC38mpW7sLoghVlA8hptRe6MnJfrZ6sZ1vhBtSLOS1xs0l5aiYa1OzJYcvM8wXCS9UUa3kfshVECXaIEYtIGEaO6OBofjXs2RmkJarD5CXWUTN3bEhCT1/AmIblsrvbIk3zLSpMDkI3HXas0FYoEAaxTZSV+FiEBV3VNZWtW8NadloT/DQHYDgduuqOkQI9s60giYzb55pf07wkxA5z3AU+TJTWAwMRk2GEKn6/S5G3XUVCKDwWViiFvfnMYA+hGyNslAxuFpTRrUFDCX/sIWOBfbnG4SQ+TKOyDaE9PfR2k9x+0X/+HP/sdv/K/hBGADfNO3G2gwjApf8CvydtkscbPayTUdbAAkPZpZ7hWCyTGzoZgpc2AznncmM3gy/WEnPfeSIHhDOAjT9PGqAcSWAksH0L0wt6NnyM6avTj/1ozCovJdR8L1SLO8vIishKiwcs5s3GBt7f6z53F0PtMkDz8go9pM/C5U3yB3OcX2czmcZ3tqBtaGKMjP63MLbNt3umVV890XuUXJf0rKX5haMFKy54Li5fCA0/nkCRUylFwR597Ixnn+CuD26VMddvPo0aKLB3TGGH5lxVrS3/+njD2T9gWyoajSmeJHMEgiGxIVUlrt4XAHjAJOIdDoAHPkIz+OJ0jc2+5qT3YMIzHi0vCpR6T8Ki3WLPXlA2ob1OQ4ScadxoQzxdX2kziSlWMMnq7A+XwL4MnTp7YOCMYIffSR4ebmhssNxGw1C1r/+evk8+JkNh4v23BNQxH5tu35KLYw2i03mL0BCsnq1vu85kYrFg5BsFByfeFTa0Jxv0lexWRYxJN4I4pBjXcT+iRC3TJrQCiO0eFBSbxjVlJpmCjJVfhVysPDVABYVzf8RRPTvkiZxBfRryspdwskEQxykXCpU8cUndSdmnkGewDzQCc9/1JKyrxIATX7ZJuz9kvRLNwM2gG525Y8q1Zq/CqAE1nl2avMv8y/m3XUye7mtqOIy609VlCfMftY4l7btT1f647I/edXp+S1vah2EpVF4piu3CdeXvnaN/yXiMVKWU3lPBDJcxRbVUDyTuBUHYQxpyPxm2/BraCmQwAAPSyN96dymQ/Kh+yLfPWr35hqvB0rzFNjQHhyqE2GKLtxfS2fcXHK6HeAfJGFXgvzmzZTuI4xXDvcK31mI32MMZpx35hTKJnKIKUfxY9RvkaJ+7t7qJM2f7h44sn0REQ3uqTSSo3Ct2/0nkodasp391lBRU336qt1B+jGPj3wGzARr2mDE6j5iWzsUmOKQpodktc98Qkvm8NkherS4l4d0A+YZ3k0bx3xRTCwe5FERQTTNRc5x04aeQ+7m4vxhXhCpRuMntpQC9jBTvaU7d+EwWLTw6DPTADVxcyEF7dyiqbLKaFJYt4aadtohNWH/YF4pt1zYck7Gp0qMWfTCyHW2cizNmqhrmAb3QBmraxhIbzBrNKjaBo1vZjzN1YHjNSS3YgPaZlDMx1izDkdrXzT+QeCTTCageAAnan1bfGOrbjc3NTEDj09OW+nv42r0LuNb40ZI8Ek0z3t5kl8I6dcpLULi30vhcn8UojaIh7bdW/aFtkQXMFTXjhX2KCb8iBWMc2ndU0Ea9+mwyoSSLvRmi54dLNJePFIIB9nwUmTexJk7MIi37WWficBNYk5kfn0eUsJmdfNaVNA2tFTjf+mfrLtZv9HjWhS8hcZOdt2IZu/Oam5P2MXBz1wb6ZJb56aOALChJZJVJajuLG1PjvShg2bXREpDMASOn1kagWBM2q2DvMc+bGInpbTzemky0l5j4o/bGIpB9OBH/ehNSUtHiD9xYRCQYzEclaC3WwRthb55sua/mz2C0SQedZIgT4h+tTHrsPJTT0j6U6xOOjP5u8aIfEL81S70JDpLJX5ZqymEm2wNeVnA6l7wAdFPp7cPvWHzufb26e3p5tHL7/88rIs4ghike7toHHmCXQ3Tl8Qp8IIJSZukI/V+dRDR8yGY1wQwZL/uHYU3sBp8YAjGzMDbN4yxNocHoy6I9SSwBmFio7UkF18FBch1eaxX9HmrtvNXERZOYZ68WZJ8Vj8OwE6ltw2P52onAxgvOkImUd8BwB3jfvCPw8nyOBAarBoUrUkWEp1FxUefJOJpbdtTG4+TyRJwne5F8Ca+f3koaPdR+lW0tBomEJ4kqiU4UCAyak3Odlr0qS5TOE+5kdUdC5L77aQDgyk7/uys3Lil0Rn2VHQtV3b87RLvsj+wNUjeW0vpJ2cC/NY30XeF41SPjl+at209Ng87GtTWcP+JStLumJ6RzgfND0O/DCT2TX1n7aW/eqvPsy8yK+9RWuSoJHUk/ZiQfIZegEm8QO4slcCqvds+b/+vMHTE6bRHZ7K+oPtMJvxKoYMzccZPT/7HHp2M90kFbHQxEoxQiwzLNMxo48BVbWfwpby3t/VnHOOym/nonop5N0PowmeiPOLq+mT09EonQmtWbFr65uU46ai5G+7RKgo0jNYtwCCH/3se7ivvfHaF7OHQGpXmvww7eyoSQVbEpfKhx2zEj8NnupRZAJu7eNulZYfsWlQCe2iVlcvM3lvQ32CRZaylAAAY0Tim7J/I20U3gOCKPRqtUflAHN1lYN0FXBWja3r6rWHXFB/2tirGLCOlRstf/v3f4P72r/53S/f+8zdreveLZwg1M79mSR7J81uSJgTStYEDDpJW2DHNos3lA92i8LdF+DfqZJIravBNcMjRl59APjYH/3Jz//8uHxkN8+W3NdLDL/5IjN9IHzKjjFNdnAAmUi15ejFt2GLeDnWxJvZzeemaDBPH50cdztUcqv2djp0yciac+NOE4GzncvzTywruX03iyTpp9X8SsMQZNLNUWC0cifHzn4f+l43dtmoHZkMapwL4mJcmfqZaJEevcmKOrKgJLeoz7VDgjEL8oGJydnRog72oHkHD18pxifY883p1W3vsvk5aUuHT+9IbmtlO6OYbtKY+2kCZOv4TLKI3myzVfmbilrBdYDxZeMpxUePHw07w+y8rrYa5HQjJgtP0NNfMmyFyZBROYWMPTuirmO03bSO2Ae6oMxo4tGNcAAAIABJREFUMC97gmL7Y6YfK+9NcJLSbqbXkpZ5o29NoXrmWL5zEzcoMuzDdyzf6CL8dN4xun/RlJk5KrwRyjLpD9Elk5ld3pLze1nPJ0+fmMjp5mY9365j6BjLaVlON6IqNhIfi1YzfL3di3yEIBB/sUEwopwf9DCpdUTPDUimBzrBZtJsp2AZo7ISqU2jczh62puVHOFmiF+MnUxwG28N8Padc7WsHFRNZwekI1FbI2ndy57k7LLmDgqyEz5OKQAxzEZ23NV2D+Whonmo1qTdqkRFUXKPKFsDSiOj6QZogIZLy/2ZBitYMtOeuvLEU7vgRVKClUxvjD2m5CQdYciyNtinpiJNl3SsqlE1fZV10/cMOzeL58pnxUx2sKSWLdiFWo4lTb61+yhGbOzq2q7tudqhI/L2yfs3j186fPLqlLy252wnFoUrLvbe5cKR2ciV23nO/AVAclMm3GXIcYC1zviuVa2rO9VN23wiHNPoytxrSPKrv7x/Lc/WXv3aW9jaFwLBj/7xu/uHf/93vpAPZXN180f/cPA8gNdfezOf858l1dK7xiyiBIRnJfxwN4fPfDx6E8Lrhz87Hvd52huvfbHr2W+/+50X2/9nPv5mpjK57ujDvfPTbz9zn2+89uZs5yRogVSl+gleAzT9gWZ2vOOH7Z1/+A7SKZk2GnXIjeHpuvE7zwrDN4g/kha9pUWYekvXKfHOu/d7VD/QBKRp397GWM3wg5/c7wd8eGsewzJduoEQy2324Q9+8lCEcZfl5z/9wZyS3/+7v9588ge/94cIVd7rAwogUfAqZj4m5ReA4O0fH8zzD37vK/5GMIRW2/H+xOx8ckqHTK5C0xewdB1d0KS3Hd6TWG8ezMjcKACjl16fRyj3tKS7IEMZ4WTtEawavdZRxvq2/Jikh/GI8G3W95vwyd1iaMYan6AZ1zKlfJBGbjUZSVexFdh7qmaaXCJeTaxNNU3p5qNhuk3YYhvXFmB+F3YaPg1p8nDuvIHZebI+NIiPBu8NIIuRlejvU5+HuYA1ZZvlb1skFN7zYnWW179opigqcQc7T0izuw9m0g8lusZyaAfutZNpsnap+1hC9dSTcEl4tSo672SrDzQA06ttG5fEwVZuKK4n1E79R5RMcF7P57H6A4suXlLTr4BYx7iR9J4aAFnUz0/MjpjsucatrNDiNwU1hiIeamNPa6JS27liaqsoEMfTVt90nba/W70dUOO+pSPkPvZ5nAmui2U3BouSx1BLH4eItBCBsHp6EqClR9XjMJ6iL6DUWMdQxRhjOZ1uHj9SXdYxxtOn8kiW0ynxkaiTsZ5tCzYee817m+css4j0PAAaM2QAKlvt0+onHUTbjepzp0eweZn78ZqZPuettf6zcCrnFnAxsIS7yCJi0jg2ncQNtWpFLV1UOjtH5xxCaZ8IKCGNkvkYVQqDePH4i5z1MGl9E0QqeBxoF9uKKQUNY0XrRsBCRJRZ8CRn4OupfOQwlPyHfLvtFz3IZcc2PDOCOuGcK0t52eQ6iKzx/DY9/5DTS/RrWzDWTA9c4b3PFk/MNWzyCrYH1GoGsgHJtV3bc7a9L/L2yfv9l0On5NUjeW3P0+KMpLAa+N1tG3yxtO1K5Ht3KFY76XRRzTAei69ttraKMcfX/uxdWkwESJugPRCYL669+rVvYGfOPnxMBhTvU8ws/Chd5DXLoufM8ZdxzxSOdZEX1cxyn+5HpmcbgWWqzKCC6ajaM7V36A7+7Ce+2DznqTFSj00dSR2H3fGrb39wT2g4JT/xRfSONydYqMg9c+vrQuh0aYMBLOgPlLnzolvXzAKBW8HPF9osB9xri42vmb3902dx737/x+6U/MqzzxClZuZJ3/wmNXO6JES6M2Ruk6/MTIuf1kCye3I/m8zqoBIs07f5jzM62+YC7Nd1R/v5t/6Pj33zTziMkHmaQAZL+NccJnPHghR8FoZW5TOnLmn6d6qxtBizkEP+7Hb1xt4oK7JWlRa/dfMnDRf4+IJIAcnXu8u0rKo0zF2eqmiUUCB3HoiTlZzzMeiPMMS9AUxo6kAEsvRbq7Rg07sJGx95F+VLNuEAzJvBqidr/68/aN3NFNoOu+UUdiu6gwvOiUlH0nf7Qcud6YXiLnSfm563zdZ4nbltmKcT8uyRzF8vnwKZPSM05/ltZPVuXrZ+wiKHmozZJOQ+kwuyxfcrpaA5gvuzqnq+PQPQxY9SWNzUDEBkWU6AF/wmq5c4wer/pQ9tmCHq5FbdZB+58qAxAxjiCUs6sfgN0LfyMz9s21OeT+TP/CMzxqeOd8XdjnDmkNNuRz1qSR371rIaPSBRDg1/sx2Zbf/fu25aIWizUQdKALMxxO/0gwhURRc1g0WJyRNn37TOgOhBCoBvd1OfGrsgkCRX1tfyEOVj3jl/t5SzumUn1eLmpvHwdWPafFilNGqbMSfWU3FbKSWqZtAgkHniUEzC0kDkCghsThoku+yxiQacDDUhNzhYhRyJZckZX4aqHPwGKlATMz5gd0ift4UALOqmuIosRE+1hnYFh+JAhILexX+MpCpVMXbGpY1s385771ElylqHOXubELCWbqVCWJVX6vnPtpGMJeA7g83ht3LpeFecyvN+njhC1OyOnfE9DV1s/uEh6Wu7toN2hyNy/+HGKXlNk7y252kqfv1wsNy7ONnLX32rxJwhatAZxMTv/3M7M2R4ytcKiiVvB0ANYa9jUJvftlRshZG2PPlTcfcWgze895f//nlAc6m9+vVvQACxOFhN+eppioevCGdcs0cI80uj/Ogfvusdm5dLo1ZADcAAidtjJ2vH7IK84wOYDka90MbMJf/9w2nccpF70PWDNuZyhg3m6mmNYkzvkgiCvv3ud57BF5ntnXe/MylUItpa3An+Itb39rvfefvdbyfeJfnG+jwS8SGoMEGJtBDo+eK1Sy+wpeqda5vtyfzl7Z88V7ru93+8zXn8IG06VhpU6HO1uFRFkiFIsLvjjopp0pSSeuWBexn0E6yfurhsfCB8OAtB9f+2y3vQtkY2T+JGJDNOaT6SGY5NvfYfgx4NI99tYqQ8+6g69UHO7bwcyoo0Tin4cty5DVUvYUFvrfRXaFFZtDFsDHj5sFwegUKHgyXX7maQwMWowTCsQXcM8nvpD4tXdKQ46ZthaQhOSyrDTMTP++89WgzlbXcKHZusljziYnEnZ+dXcSs6Okq781jGkDhkOuON7bNeDgX/xVYZf5n9hGPxl3a/JcqUQtK4fNLPDLfex2aOEsZ56h/8kMj5gRZHlKL7JH/dGdrpJpnI3do+JVcQ6QtCkUSji+pbbNiI8yoV7PO9HoZhUVp6rGNY4TYEZrbaGGMdYzUbYz2v68rVdDA6xRi7BRmRishUfiX3twhcLnlSN6u4AF3vmoDBhALpcthwt3uUJbP6rz58iP6zpYjNK1n8JOKIouJX5zVqDQ7a+UqRrST4/GOXL+LXxGMYxhgY5vFdQLCcdDmdbk6nuGSM7xWHYU0REpEV2/FsAKEOahMtBoXVWXPuolg/vS3T4toKD76InpKtznQwqL8RHQyutnGf5WCfyfABEV2E99TnSbCYcX8pnmkKOtIyIrfuFk3gsRPOjDKND/MrK01AIApo2B3m/6IKSYrvTsjpGmLTWswDE5lseERCZAL29KtZ4vkRuTVeSg7L1SXBNPFlgLvc81Xue80txX4bxTdtxJOKMq9SCFOus9PY/JF6VNivAXSns5pxtzTYccmHmftuZAeJc6c2Vb9tnBK18cNipvHhbvGgctL28IBQru3aHtoe6Ivs3+4fuLfc5LVd22HbZdb8trRf/dWHckz7VV7mc22/fc3zFn8rh/vbF3o++jex/eD5fJHens8j+dvfIgXy2q7t2q7t2q7t2q7t2q7t2v7Ft40b8dDVeNgOPZJXp+S1fdCmZh4Mz/SKy4EVhmZavKYHfSISKHE0LgKqEbFREaZQ+ACMjgFA3JjW4lD1b0awWpiMz01ZcnNo+wMkWTy8vfzVtyxjvwEDy6ynChbuW6QyVcZPwODOwPuP3F1lkYjZisTVe/5hpDppZMYc9NX26UPMjrRLf7ywpqrLskRGzhK5OS+w/zjjXNklgo5XDIm+8+499SJff+3N3//4F15/7Qv3DsfwsoiqRqKIJoY/73pa+8FPv83ofeYqYo7LvthW82eCAl74fiGD6w2npf3r7eHFIu9tz+aR7ExpjGHDs+oqh4S31SAR7xL95OG5zcbNe1iR8v6dZOphZdhNPe85Q4vP90Qg2z71sFbpFHKwTUCyScgc+c8jeEzOyWyTfg7v8jw8M6hLlgMIi0BF1FPOG5NtCQgS50YjLYP5S4NJiwnblvuSYpKdzlk5yPyazBoxs6wpNktkOVyqtJ/MK6k8zUKCyjbJ/CbMx/0OE11qrpEayvKb5FcKzbye9mx8EPmUzLXrCNR28WDUu7HqQTg3IzQzlrb9XKK1kri7XivXLsAsoQLFvOfFFjbcJRYTtyclY/OScLRdV7k7MLMxfMM3sLXY+910qEpFXmYmARMG61g9rXFZTqrLWMdYV8cC5hm1bnTRZVmWZT2fx7qOsUa6cRuRxVMzzUg82Xa/sZEltOdMD2Q71hdcg236kdSS5ry1ejJpavPfA2Yw/3cw/z1ajDHGGJEQJuJpW6o6ZT3X+gArFrRH3My8EmilOMNsjF+996vzedVFReV8e/v+kyfrunrKpFEVih3uqn/k7fKQEEdh4j9a8pknwGr83XcB/ZkDwG2kwPzVcWvyoBiwz2tnLJCMY2z/dTIrggAu7LJff2LVDY8vjfapV1nYirsDPjDn1XfyoOlUJkQIlWQ8vIMeuLeMM1rH/gfR+i6CSuTdgGLmr/VsINcwx+FOZvPzmSY5SdTirwEWkQB1bJrE7aapUXRVR/okJ7mXoxocfMyARMkKR9aeuchpz3BtHERks7jcyBl9ZQP6hOYOil0+zjeuHW2ucJld67u2a3tY23sPH+iI7M9f0ySv7TnbyUKCDpdKv/yriwecNeRBcDtTwHiZGHg1M8wGVIvHllYOWFTrBwSyRCL7MDNbVRc+GIJIFwXM67AYL4BG2ZMuP4R6gHZ9870P4Qabj3ztG7IoqJ+l8JMbjGEycNdxxdmyUMnzH/eIjTjF6Y+NJn+pnhsGJO5JxNBlma62qEGVQs5MjjT+F9JON6fNuZUX20RkPa9Dh2sJYx3PXzty3374s+999hNfsnEecQpEVKJAn5mNsf7nn/1fl9797Ce+6FNaVEUxhr3x2hdVZQy7mAtpQYLr+QwYtXaMsb7wpf3gp3/z2U9+KTR2wTgPHwuL6vLiE6VVJz3Ohg28+EW1Uaq5YQAzw7g3L/Lzn/5Kc0nET68Xedi+/+O//vzvfaUGykHvREbz03ExhESQJr+FKc+yha14gYDES6bKEBHVU1kwZvmv253dLGlHgSZrhUZXe3JmIKkWy6UJoZjzXY1vD7II4bg90CJeKK1OR4WTy+L8GxYB59JMbDShw/iasOfVhhNV8/KIAMvpBhZXAVszTs0M0Crw25h3UI4DThRmajbixPXi76qZiCyqvGll9IoacR1slJQSg628IcRgoguAZVkEsjqgxlAXrz5rlXaNDGIp3CQv5VfVF5rJNmQ0U1KEByPEMKq2l5hJeqLa0UqIG9VxDFMgsGGrnRk/DOm+CE8MmhdnNAOvKQccYukMMhs+H6Xl78ZpWXY0FCf6JjLcnm9zNdqt/WEJEsm6odL+Hz/K2SZhDI8O1zB7Rcgbjf6K2cnZHByTEXnceM1HjNQ2BFhgRo+mBdI67Lzkn5jZel7NoKoChQHaSqrRdZH+RLO4hr7GE0Aha3MXgb5N/93vGlSYHxfGCD/hao9OjwGc7YnqSV66Geut3UJhIjIwzNbT8jioaV3FsEgVtnVvpEPqpArAHRVYh7EIqQFyDjVqJ9mpfe7giQ3I98/EGtNXEqpTOTiiHz91L+61Kn7aS5kLLMp4DkCWZQHExsqzsdZ3InkYr/cgXZjZMBVRKLk1fWK+douijrkkZJVIw3k9I1wmsYLYPrMhdF9ytsLeh8CwLo90YJiZLALg5cePf/FPv1geySICyPr0jBt9fHpJsIxbkyVR1cSPidOvY8tocAuqtTHGsGVZhOdqzSxi5AIAGgUsxTfcYF5Nuoi97Rjpl1s0U5YAWIaE3QELV6MCWFRBayCYrPrx5pRTyQMcdEMMaxbc9RPQJjCTMcRs3PDem6LykNPnVWXyhPkeCcQ02IIYbBGln20Air5J4UDSFIdRj9VnqURN8o0shR1u6rrCOxhluI9TzgUwhf34JxryVNUFpo+oEqGF5rQN9aAchO17eAxdXZqr2VIKjMHMliUKWtFogojCbb0xHIXNbMjwq/yErMg3YhgC8yPQR7bV6kg42/B1OFxZ0yEoRg3AyDX6p6I6XbLKZRmAMRaY6CLubxcFwHosso5zIoOJcFcCenTDhzM2MHNi80ZWZICpKCRvWkyxaGMYtEpetnIBjrCWCykj0jBs9Vnh2q7twe2DHtC+o+3v3b5Wk7y2h7cTaGx03fi4pTTzRj1CJnlYFxQ2rlii1w1yZ+OR2Qcgixb3RpMVxYhToLT5NCXVZ/Sryx7VZ26v/ru3IAel5AGMlfb1tqx8tY0aTUPv4m0V2X70X773+u/kpdh+SYH4NQfxYbupI/T1y16DeAGA39ydamvsq1EFEgHe+dnWifPGa1+0QaewivDSRhEMs4jaW+3G4fivv/YmBTCWtEIpwr15bxMwbbP36fM8Xuwbr73pF4ZIOSkq4Op//ujyWem33/3266+9WZZNot6dhqZfX660tcT4+zAR+czH3zy8zfztd7/9uU99ObTItuZdCf1qn/vdL/tUZIReI8x5Mxt3ZwK+/dNv++U2EEm/yqVVvf7xf4vwbniE3/U+2MByWrwAWNCiqgDDbIy4lENK8yobbXbuVPv8p76cqv5wD8bGKdbNzbz3ZBuob06feMd+8Pd3+SLrVu5mCvkuf/7TXwbkUi7k9/+OHsm2nM4dN00gBnN8WMfarvst08PERFUgpnZfdMONZBVZdUlfV2Mzz6OR3oXjz95+/ud/+rE/ethRbiJ1/6wzuu33E18miacJ6fq9SNwOJExk2HqV3JAJNm6AGG/qlKL/Xl2eMwvpaeljlbh4u77vt2b3NzHj8OyEvQgeftn8AbTWPbZh84eelybH8YY+p0t5NVvpLG2m5Y8Y7vXQzHsLcyp2Rzsf3YwcqkM6aqutYw1C37LDusk8ubr03sLItPRoY0NTSTXZYf9xBIIw9Rs4Opwf2gqDtkNZ4XXi+BbZJzAczjXXM7lyQ8+K15Ue4vTBY9p7AZmT1Aav6xnAo9Py/pP3naXpEphvY8DTJL2zYaIS7l1K4OqrydWQLs07JW2+L6TJBsmPn6rlJ7oAW6yTg73eIsNB182hWWTBxFV1f6Wz9AT2tjdJsDX1l49RhZJ5nU0xj4R6DXKMUImowPD48aMxhgvtRXXYWMxUl3DEoCI1/B1yirvqM7tApBIWJPykHVrUb3K2gXN3CU2uyfnvVrbxuq74N9d/R1BwthwqXmdA3kafY4NMfFZEgETy7I7OuZQd2LzSNr9bSUjpMc1y5uwBCQu7YWSX2xct/+E7rhdO16nFYmtfUmBiAjLoGXQmarKt65X8ti5ZEbONpEgDIfbcGXnDV0uUYo8daWTqJjCNPwylJJdWae2PibHT6Mg9OGwhNTzPJSri+uqM8n0SIQyelCwphygkUlYz/oaYSP7agy7W+plHoP7RNq3NojOuyvS8tmu7t71AR+Smh71T8uqRvLZ724ma+4FY7O3Vr0bNxK5Ip7qDZla5kdd9kZ2TbnxMoJlRIsTyM76iWqoxH8lIEd/qrPoFt1e/9g2atk1ep0aigtG0zMO2ne7Bb5davSkJA0kB52E4UEepid3RG0Jm8eLuOuWRkz1KQ4BID876P9SbIuxbSuGlWajo2LjayjZJnSK3uGaTYv6Bhkq4HyMvypFKzGwlLr3+iS/pGO8cuQjRdixVIv9xKTXyjdfeTFNCMmoar5tr9q//zhd+9I/fO5prjlgDMqR90FSYpZuZTKEyASKf+9SXAbvDKemhZEDcOyYiZzvOM409aK7B0CclUluAuqQ4qBlA5iqFAlr+CFzC+K2X0spX37I92te7Gws3WX/9CNmF9rlPfelgsbGaaJ//9FfuPp29ceBcGtGzaUg2ziB9z5yeYzGLKngL5yUSyrQUZwJjZWZiseMHUMjW2JW7+cY9ncmkNj9nOzBQBWHpOZbyguv6unMl8uqGtDwnWNZnPd0GKXdcIG3YPls31kQOPrFuhE7YyUTIZuj18UW0T/uoha/iDgldR9ZKFEu+OXtHufq0FKWv6Mgp07dVaLeLGEbxR+bqZ18FYKZvTJbmIabIhIRhNk9ZWNNvRAJr5xRnA5ZTKthNYY16ovqdBXWbC2xnMU6L+CAyfZ4rZ+PLt2mKmWraiEtSCG49Hzb9Ho73HgFq+TSWw6ZHctoc6T0KZB3j9vYpgI/cfPT9J09OJz3rMmy1dZjhvJ5Py8nGYMLZOC2nsY6tE7zNUDZfic/mrhjtMzOXY8a00Thy7+cQbxu+eeDanC748DcvssM9GUvGKjI3rY1c8dfYItkijl3C2iYKZIzhpCtmovDLapZliVoLXshiEQDndRWcb27U4jovSy+fzz2ORRn5HPwSFYFANY8rxeRzVgZTLOiR52IU+cy0MH4i6Z4NORAvudrN//OVwWxrOotYN2MKJJFJ5irCymhzCw1iAHVyq292DCpNWRG4rysnTKbRuZbsykXw09bzrhWJlmixHppi3AW7x/oegCKhAJZCMUeXDBwLEEcNZAu7fCmXOh+R6EKFnCsCTgU+Y+hqoHynm+WnzqTKFZu0VQmXZrnN0t4MkoYVUtos1AjgimOF97FJNU6iz24jDhowS5TkP9j88yBmlkvbfUzHfku2z6uQLvLPa7u2bB+GL7J3dU2TvLYP2niUCQyAXmhWMiXZXRgstmn1gPPGyGxwPhl1/1gcrxRma0HHnhg16W6WjBeWxtIUEHrhN9jwHu0+fJsCZYXrWOt6fGhRBMrzA2j/POQ62h/+43clg23eW0zAUlrmf7sUEk6g/xa6GsSgZn5fX/zHeR52kg5YobaXBdRA0zcx5dLCbKx2Pts6rLJKS5VIeEnefFjhSf9KOdRd6Bp9qGocQ46pw4+pGFKNjVTBXfvhu99FA3UA7gJkPvPamwC86CPMbPgZb7UxBk9d3RG0NB7PDUiQqi4bZp0AUIovXUJm9tlPfumzn/zS4cs/+MnfBDxUvUjOsiyHh7Ul7/KMq095E3Oatep9SBGHZOlL2WPjZPX2gXa/pw5mid2F5Q9qIvjB31/0yb7xiTdTew2MvQDsP+C57E37/t99gCKS6zrWdV3Xsa4jmVql+82LKj/CUdsgfjHeYRWdf4YW7PjZdNkijYffZkPNXfJf/7gJBj5mu2XPPYls2ETIHu5soHF84sPGq7bBqeSxLDNoQNwT7SeZg2FlRyJOGn0+wdZg/Yi7mNRflnOTXi62uCsSAR4iJfhgyNapN4lsJQITdHPR8CpM3CTScDUTgTIJXrr4yS/oNQhob5aXIN35AqcV92+kiCXxPArfCR0kfapmG3IKm42fJ3vNqU8IYK2TRA3PlbHtgi/sixD5jv+7+GpGlpyKR5tuQ//iv1v47boMPJ0UBAKyG85EEacSQh2N0fuz61jXsd6ez8uiLq/GWLklKxm3A9e23sadWb0sizKk6qt2iX0RcJRyF/67t21xuOaFBNTuhQ0ftkmQ1xt3TsH3NS+WTuHonhW/e1l69ne4pn1i7eJ2S2KU1nVM63DFpESfAGAs5Ucea7Cnt0/N79cWmMBr+ZzXNaU4Znlu0acKPXZmiPukxajqhxDLgdIHk0pOaWW5lon0AEDrKvFSmINGUImIDuGothkFNxOYhXfUskaorO5CDZY7FcGkcg2D9oVPLMBy7Jz8tCBuAlcO5+Up0KgOGVAphgW0jnapWlvvdu+42yJhP2Yu9UhONXlaakH+lnAIX9Zg/foEUkf4wUuxN2pMzZ58dGa4offTyy8xfknoZKMpzKe/t4hBw6rz2aZEpH6z58ZcbJMDoX5Lf6DwucUkuuFbG4F4FCxcsDOVd1ofOcNF7Tk2zrodMpESmvy4tmu70A4rRb5AX2T2+ZChr+3asukRzztqqbyGoEqrjd+XtdCLPFJA0CyZ+pz72JjDyeKT2zZFhTIsdCW4KHrhvsiPfP2bVFxiUlShUsuqpXRD8qiVsiMMwz0c9imiJWExwjBzsIqfhrtw8LYrMdKXMwnkvYzeLgEoNQoJC/99I5UvwiJULWHVb+N2pl5Xbi/kdkMESrO6++EOhzEvSjTGWNexrqm6+cPaNB7DRY9kaqSlFT1gxwoIe/NGxA90b9rbP/22q8lpaiPx+6hVkXvqrwxo07h8CFWnnWOGYbiQHdl9G2gDgL7IMlrCWOO7F6Fz9HliZCj0zIuazJNJC75H5ardPmif/QR34WFq2+cveCQByAiVsPGGSyQYnESWrhVbIphARumquACptFPMb8OZxigj6b72QvXVpuM/mKn11ylU2NuMvhtJIIDHTqziKJIE10y2NPl4CUfAWsKFvniv6SaIt4oqyie1sb98Um31038xlYkVboFybC5vwcJ/rH+yeeAIjhn6KljO1fCTXccy5yHdVSLWF7Sx3Yzhkxo6rgHS3K40Dvmfdby/Yx371k3g3AwVFa+9RV8HH7DtZvXBJufCwSC7sXw9s4FbYx509MBmMyx2k+mzqt8bd++HvjcUA4AKgjN396bzCi16dP0wtV8ho4suiy5CYVIubbLn02k5nZb333/fzN577z2vYgmHUPpPgpVRVsw4tHM48ZnYlWc3pOVy60/tYLyngW3HmzdSl0CGFC494/0GAAAgAElEQVS0pPADLYVMZhpAJtRKrpeRWGqZpcvM4lTmzqd1LsvCYpQ2xlr/revt09t4AFCVm0ePltPpsHR1TKczvyI5SYrH5GXmKkVgYl54k+9Tk4g5T6TnWtzIQG36eLxeoabumE6tfmWWi4LJv0RLwvLvQF8YIKZiGieGfN5iEBMdtZs5bVoeCQTLEctgmKVRn0fDmnBSDUo4FCo39c92v2zgnyusZ4ShmtlMQSGHlWM34J/watNzCdm8kbInY0vcKOhtZ5o1NUpR6pALdlZSyUhuQqC0qZeAC+d3c3sX5OLlcQQ3y0lM0KG/L77iFogIMqyGY1uAjLLLo1Rcyk3YXZMTk2ycik7kPuFCv3kZxYOstd2T13ZtwIecFLnv+ZJT8kMa8dp+o9vJeaIXkflvf3m56qIwhNVk8AF/NLPpJALTLfhg5A+G92Ji6emJkCwjDijE6/OnvyqVEhiVnJBxH9wOvrO9+vVvUlZ74S3Li+x8Bj5pURFrZf4PW3cvCCD6gdTuH/7sO5/5nTeRETmLs3euIalfINPGOmy5D5SaRhu1DOP+5NEqmoXtz3FJUmoev7zQh6pi8UwlrOvax81fPUej60bpDZu07zt9HypivOTHjNWZuDxtOHNp1/7zz777+x//gmtpZdXv2huvvemgHcNQtSMNFrUCbayQBcTti83QyeuOJ1ONTv9gfZEqjgDA5z715R/85OBKltj2MCBb9tTcoqsqNBYfGYPR+d6wzBsI3JqWYIYqEX9XS52zHYEies7T372JfmJWIP/px//xcIjPfvKLKFdNHDPG5cPKdidrMcnvhSR29DQPvAfSCmwYT5GRH0LMVojy8NklFpEuHlXpJ6SSvySHuDxtuMlQFLQ3fT5AkzBAn+X1C5jH3tIC5D2YBx10w6b1WNwqErUMfsBtIuY+Ycnz2sLAThvQMVvy9uFgJzx8naGSFEhJYlzBPHGLB20nvUTQ3u3fyoyoYc343TIprZi5CCDiHPHmHnitn90HbVf7Eo42bCr6WKWzRPuq84gnJIDoleHYJxnx4RiSP8gjrMPUegfN4N70NMms9rBUGa/9xs8CYhrzApU9iAj6uw3Gwn+t/kyvAVx0SUMaOeqF0ybTgXWvCkKkicBM0yFEqqB/iOdgEa+cbhYAhqHLsp7HoqdFZLxvNkxELM6A+iQVMNcqC2rz9Mb53DwvMHeejstp6g+H5v473lMf80wI5RMZ4+HDu0c4SGghDQ26UjQ3f1B1dywdAS/6ujtbyj6LnrKwodQ4Vo8K59UpH6mxAgZRNRuR5luqkCynBeL3ZoQIV89HPPPitVqdtHkFRwkWQd0mwRturJqJpLCzDHPklVvsdc8ieVCbZNu5MZp/p080+0sNl62qzvRDzoSgzJGhwBY6qWYajb2g8A0eG49KzYYvl42y41Ep1Mo+6HMgZMQoe/oKSi7MFkWcRxYusawWAeDHdxok+6tROsDvWcrd3alfJZ0v8eojFWRadLjrpAMMFRzMT4tc2xepcjUFwWSUelreWbNSHPqqi0Ck71d0G0ideCcikVfbedrW2ztjqQEHReBL2yOKWi0n3mulr/2D4mH5ETfWvzhQWK7t2qr9Oh2R+1GuZ7ev7d6Wzpr7mNiRgs0TXwAqLCRxcI5Bms6+Z5Mr1aqdI6DEwGW7BKLNfjF77y9e+A02Xb9xxScUCwbmIKo8ZGtm4ZA67IiamD/G7IQHR7F8CLdCJjEvkrkOkeB2Z59NsjMLb/N1OzayW0UF+Dg4ldCMGmZY94JRYTZg8Ch2oQ6DzfwVFYvMc5ah2qUudE8z7ojfJMMciH6vq5tPZjxtfQCuLDdnZmbvvHt0+Hd2ZjWbvE2Gd+HlDfKbppVp6DlGKv063s2AaReh7NXhutIIj2TPSdm3t3/6bTNuH7H3jqGQJirXOLbYa3Q0YESmBE3NOtK1Pf9Sw4T5bQ18F1TdzZANGzsK3eHul9aKlid0NXQsB3AhQfL7f/fXYlnwwgw25IIjMPhFGBUcZrPadHL50o8XH5F01UVlCUxJUpG+R3c1auqwDao+Y0tUvKOrn//5nx68NU/KRYbt+Ax9+cUk8rdM2mgWTOVLur2jKLRo+cc2bUFMSQSi6BUHEl304AgqE5ulqCXTH2o3L8Es8pzjpH112p5qToqDPmhOSRhRUxv1y4U5FKxQSErAxgidNpJnq6gyL5+cmesQrwEiUVkjtsFMqDFkWMm88DIh5SjZM/wLu2s7uNWOurn5u5lzBCKntK3MnoTSq96deFTagMHTtBOmXWh3APtBrbHd+C/IIvCTpHs0EgVWvA5AYNOGgnKaa/A8Sm+9G8utc1t/YAwsehLo6eYGJsuyjGHDhoryinqDmYr4h31N0pYjnvgW956RioP/Galmop17vJTEsYP/+DI9vbnwiaLIXoqV2fzffhIP3GilBgOUK8Ui58wLCVT3ne7qja6SUezlWzYR6SzILJVX9tNo4HQ6PX7p8SsfeWVZFrPhLsJ1DJgtpyUKsVC1K45mA2JB2ySrWM/wYw25ycrPMQbcn8ayRVxRULzsJd7BJjg38bxF4xsT8Fg3I51RqXp3fZVepFS54mIhm5DGUTOhnN434Ws86N3IqUivhHFj7FulaMKorI0+q8Sxa50vCcpDVUNw5u7NEtbw3mJwDZKQDuWLvsH0HXs3iilXOsfLNWx4Zt+/vvQajnsAQeeXZgWCYj6e3WhSBY3QVwSI534TgaQwNZhJynkrFhOvUtzsVRHyWsu/OGyOnQKlFAMCIrFi03H/s0UND21glDWE1m3RNALbMgchjgvcwyuv7b+79s/li+zD7Ue8pkleW2+ZNjIFBjftla++5Q/1ZiyDwiaeDSHC+9K6CiUh8sRPyrUydPSwNesyee9RS9OC0vCOiT97e/XrvLon9ceQ/VRpo1JgmckUkkdzLt0HG5tx//BnPv6F/Yc/+i/f86WGlE5ZJxhRlm4dZhgXvUob7d7WYX6fIntz3TNszONW6uG8uP1qL3ZhjjcMWZZu1EwVWl3ZQpsPg9Ue5j4JXxMV5tTfU6eq4O9dLgPBPRiW6OrQc7POVXk/FidMhnVN/N/87h8edaI8hBR+SVU5dmwBEAW9l4SIu179Yuuuu1pdHr2DD701F/dLeR+utJOFAigw0g0/sw4RAb2bTYFNbfceVYlmE7Xte0jbqGDuvzl+8/Of/kqhVVP5uzUx9/9B2wUvYuzQ4G3kBMqOD0gZeRcA5bM2d+onU0n19cFzb1j//A4U3MGyH95I4GVDhNo/Q2Rj+/BPFnoEgCEYkAEbrTCrY7GGKXkcGGCfQvducZG0+iVS4TcegZJeE6e9e8XOtNMq4fNcd/3b+pr4VSdMn3m9EGL3g+xMe3YjkntTFvnLcR29PdqUr5MaAtMv5WfGeo7IPXhis7KCXw9Mvl6ZpsteuR4SCc062sKC+h//kxmkkrY5pXjR7l1glIv/+eT2e5JFwWoJ6DLRHs6aLBbdhWhnez0dfpiFArGufl5hg7quoano+en5/PQMw+3tU8Bun5zNIKKeHUk5YG4dRwRuwswd2SaxjwjTCHXRJlosP7l7xZf+43DbsTfe3PzDZrSuLwKfN8u4vy3UEbG75o+YaubB0eQ0QPoBOdV0i8iMIUX4m2lZrtGwLFuHEgCBLMvpX330Xz26eQRRXRbVxbnao0ePwj89IoibB/qXZUkVhRgtxtCvK/8C8TrkJeyisDVJoO3+oVIg5H3mYHMXZCg3gWPal49+mnaDLDPfTKniyqXqohpsUqzvuK9i7mhKeHfWFIPmurZ3HXGOW9ba94K4zxXN8DBJrp687a5k91idJQxb19L7yTlOvNo/y/iyeMX2iYURxjL1Mg+fv24mlnJt1rubei4iETss93B6IrfLDuxCpLcapWjjHz67+mXHuw8UyM3nxOSZeLsqiZ6b3ERnTQicaWo4k8t0cp8ShAespt050PSPmsxB0PTa/rttv55KkQ9shx7Jq1Py2rydlpO6LnSHbHMV2ZmuZwCKxPHldgiIFpmIyHDtdAwTP7OT2offeyeAYV0HzDJeaOm/tNRdwuqAm4RVZ52GvF8LofLeX7zIqpEf/fo3M28U5jcPjlQXzADeT+IAGX6h5GVPi5gfgcEYGGOoLgiXwgdxAwj9EMPGMBWILAK5xa3wgLAdKKXRllRtLc4cSPskRgAAjPkoRzazFVnsPvJbwkjy04JR0MerDl1Y2bKc0FQ692rRljFEnuCSszKYALqoReF8UVXkSi8B0MwGVNVD5eenZ/oXuF4xOH6K3iG6T8viB1su5b3CFyOy3JzOt7cIgyURVeHeBgHEryhcAXz2k18ObBJgjHhHxQCv2CQA+lG343Gp2InAK66b6WkBmlP6shmny8LLEkwvoo0BGGtFHhSAiix6khOAnnWlGuEB1SVeHO3CiVjQ8WTO6znZjFsitrgyPDKHNRjCOsLWpeFOpZNaMgDgb//+4Ii6N+IAhg2uW6r/w3cu02mU8BwmLGh1CMdhA+JbbGKjtOUaYYR+HkW7LrojF1LHagYbueS0V9L4dz47grR2xkKZP7QwJGeS09oN320qWlkZvRcRA/6nP/7f/t8/+98PJ785U1bn+tIP07lDH84A2MIbOA1e5suAQPdFNKwncyLyDk8A/FCdwES9wC6AgfALH6wLgC4u2URFbYx1uJvGVMUzyFTV4roRGWbLwhxZEVlYg0zgHM5v08qlq9mAOuQ8uNIDW75WXjIQp8ZgdT30WAWIihc2xkBUUTRPEXGcNCAKVogpVHRdB81vSaPO8UI1bCUR3k9jBmARP5wbyLM2JMo0usQgFXUl4uSgEEQifO4FMAwqaiFioKLwsxSD/oEKUgm540iQgIQYvKhx7ubiAATrWFVlqbtnbUDWYUu94gZvuIN5RDjZT2KdGawGan6KGK5fu9dwO1STuTOEt83/cdyJqXhbVwtXYfAjGwgslFV8i0fCisKR6UKxg8rj+mNdPUHK8qxJKGkYNlR56IBLcUm42jrGGSMlgrl5O05Db/xONpxE3n96Pp103I6h55cevYxhupyMNGUYqqex0qJ2r1NIRsdZyKKJhkk7uixjHS4OASgkYmyAqBgGY4tpj4eIIwI0XzyZnkSAPBgujxMn2uTGT5vUMLdEho3hF0srfRPD5iE7/2V7up5T+TCDYTh4RWTleV8RSSnrygAcDTyrkEcZBLImfyYKeGQKgC5LCbHERYFAzrdPVWVdoQroI1/v++//6p/+6Z/w8lmwWNwzidsn53GLR48ei1YXLq885m5+g/awVvbCVASGMYboqRGCsT4pTidd7ZxbZ8XcJimzYf2CdEzFwoQuyXU4CQ14dgOB4GJdlQw1wG4wGK+aFIhLgvJ0e+amnvyM0cCA+S3kcjtMRSBxbbjBeIm0IKmcRWxjKautZFwk+F7pdWoiCGmxRDljs9Gvk1FZzODpgQLossCwjtVgiyjP45vvU7xmrjmFlC+5Ygl4+se8poj/pjLm0ldu0CnEMKhVOpKrKLX3jiWpEhggsrp+0thqElqYkdC0gpxZjZxXkUNs/InZ9o3PxsI1bAWFiIkJmIpfDxrr/wQlrqRBH2k12HDPuKolcTmiJHwUuavN12kNX6ebj0p4xh7Mp7LFOSDnxjxNNYjYmFTJDJr9/+y967Jlx3Em9mWufbpBkBqGwv45vACiHDG/RVxIv4FEEqReYih5RM34QUacsUMeP4VEguM3sEgC1At4OAIB+o8jHEOLAgR0n12Z/pHXWpd9DoAG0AB3otF9zt5r1SUrK2+VlQlAfGCtIe9RPsBhyRU+w/CJB0Vu4eju9vXi9hWupyhr+Py//jef9BD2AySvcIUrfBj45tcOK9LcBw7jTK9whStc4QpXuMIVrnCFK1zhE4WnKihyC9cwyStsIUL86trZDtQRpmZMnJ8DV2i9By9ohDN4jAP8eDzDAgriiX68vNNv/yF+p+gdIHrnr59k1sgo3KFQv+Mnw4sMVkyEH3tNkUkHkfU5BY8B4YxAwfs7x3rtzR/bkSaRX/tV1dEKIG5Omtuk0K8sggi85H35/lTMYS8yi1oh5fo58pHXCSFWP01g138iTVVFE1C/PXcAGRlxj+uHPiSbdA62SDYPSQNeeu6VbSt1fWJNvH1KChUZwwMmigwidMAbanFncfAPj/5QqwOe3Vkyr91q15sRQuNIPedbbVzY2J6CCR6jsvdYLrbm4tTp60TuGbJSOyL7uYS71lHetMrkUh4aMUEbv/8Vu7Luu9yBsml2NfaKiNnDBPb3BOKKYUYc7EdSUlyQpPnTtukKsfedQeM+1BE/BQXkrslHHbc7U/lgEHhvtHe/pvoYjnnX+p3Gn1L00BwwsZ2cYSXDffSuroKYZJzP5zHUw7uAuERp5LpEQeJVtQrP1jfjISUlcgX8CiTnN75Zqma61mgzDoMtJ5tGsEXIqgjDiBc1/2+MxDhRDMnDT3i1/eyCdCyoRHBR4vKI/1rUiajftkhSC/5CyIgSCe66uxQbEqLVLzlvn6Eqivo8UCiDeSKUpHflXxYn3hnE+9sS88MZmNXHP9PmplPnB8HUdOYLR0OLZ6gNYxLiEcoTka9I5amayOaaaM8GLRiHIqPIsiyn0+nmdDqdTqcHpxsVJ5HV9GsAm4mumVSfmkrnI9YMt/FMAqhFVa8QNI+m/u9xsL3XzRBb8HfQ0AXlZBra3seraPgWA3aPZuemiirWsfixTzMyC7F+XnJah4wxHgMDGKeHyxe++OzCp9Pp5uZ0WpbltJxUhIkiSB8uiEuIGGPzrdsnlZqPQj3hX7+lcR905b1WR5FPaXp5btLLY9WAPO7Pb37Ud9Cmzrm4arl1fWpxmyM7iPDqooVQhVLwVJWemm+7q1tK5x6jmT7Q9qfU+WoTbcGTobR19iGUKLxDELddEZxnFXps7dktfGfj2r6cN7ImftIcKAtwf4u6JRE6soye6yMkkknGSRmeWyOoyZMiHYsJJ1KiSGBquVoRQqMmn6wggn8lVPIq1m44EMmOp9GESeEcEsQZmnuYzJGMHqVRZeSWSdJrf9Zvr9mXZgWIO7XtK3zW4SkMitzCNZvkFVZw0rgi8u7/fsmpp8irxW7zmKcAYQgSlQpgF3ppy4I3VRPp4EGgazJhfMav7fX3ocbdB77wZz+wPrS0meyUIn6/BG4K3RjPfrOECOonIqZQfg6HcSRNQkF33XyIyBBaJg3lqFXX3iSEZtyQ6GWMysjfnUVfrFRb7LbQpI0RcOhTSVT5NKlmS+Hv0lkptJaJ4kI81fNHiPKLzmGa2mWuWDC1+6uq6Ln29/EWFtElxS7cvK1uY2vOkTYTeqjyDSd2Fyo+M1rr12b3Zll2fkw0r2n36bz4lW+/9uar25fjIWrOlp2nzClok1KABEUza3d2fBa7pi3vyhY/nFSUNDBK8CbU78c59auZYk031IkbHNEeadgX61pZtL5H3Kek7cEV/Owf/vYbf/Bdt0mgWkXYJ0h3Vd3uo6qeTEnWGkr9JXKb7f5dB0eahfMbFC/sN7+j9bZHL/Cre7uA9bDvZlPtWy5ALjG5JPHLmLxmNllidio/OuPpzhEbozbDoK61KojY7ksBRAsRUbGXGG76v3KXEZHvzaQ0IiiYl2ja3vTdnNstZ28XmXMT2W9ONSoKMHFsia2hkk/OCDXBHpV77YZxGXUb73+0TbrFYcxLRcCsfh8SsMtzgEKZ2HivcVSBkhZHL3J1Yyslf+u82bo1ujDYKuWMQgkcLD+QQlivO9niatb5RpXexhou7cnVU3s8gHJ7Y3uepu7NIRRmy8MYeobhhmDZIfymP5wbrvhrObhDOnhHTYDC7e82tbhnmaN0dj6GylAAY6ilMtAhkED6JMtW09ds1MfVXPd96W31Vp59zQda7u7kBBptr19ojfdf1Wd4hyBaYeQ+PHAfQl07+DZKUCAVomnnxtpQ5ZzsR7FrdXpXHfI2iRdixQAIIsMO3R+c+HTzhS/+/hd/85vfPj4/evfdd5/93LOn04NxO25vz8tyE5POOQRnNb2jxut6qZZqoyG3ndVtRBqt/q39X58pIitp8/Q547dlCZ2REh2kpAIwRLzejma7DSmB8tpb3oIl6UBUePbN4LuAsN5m0ZoGu4rd44NJB2KxGdSKZwum7wXzn7soRIeP3E97GlJ1eq5+7Dy8tIMjMjamXRMPs6XxRWpqXTIrjdfb7DobyU2LtrzTtlB1Pbp0oZQLPojkmk3PS5M001EgZF2rQRSkuMJqaEl9/gAyB0E3MCJ5iFGaNhlflpCKN5cDx6Fi3eREEze0+rqPa2LRKIZf3DdGeIXfXXhKHJFf++pzq09++as3dp+8ffTetej2FQx4zQV3gfqPFFpy5LPhyHRTbjn7O0Mu4NJkbtMPD3c46Kxk0d53ZmERvfPkCmp//vt/EeemVSwVofVm2AVCzeqfXAZtwqfMkuN36aigza9/EoZKtKaeNjwP+XYbnbuKGUZfObKY0QUBGufhEW3R1SEfvDbbY39ybVih47ioJ6I5peZkYE2EeIloyb3lfqYZSa7jvLNrOHH6vr+UdcbJxPzSV7+zfeS1t15Vd0W1ttyjEDPL9HAoxdmWi6N8OJMVs6lKshdn174OpUwLkfMIN75Ify/IclrJ6YGwfqwDCU+h2wEU80U+E0PsGnd/aB+ynVyjjF0wwuZiJhQFf7jxG3LF97ijb/7h99pAY/3nIWxRA9xBaQgaATIP/s4AYoHBzbgN4kKz+OIgfY8t5tiKd9D8Z/Vg4HAfJ7N5doi749nn2X0SfZD+4Vr/5of/ftvgxio4HKfvmfRJNFFDwQ6nMavaScbKLL2DHAEZA6pVcpmJmAQ4q5XCthjAjD5WkSzT5FsK4V8KL1iFTfswktU2Agt2MbnO1qtDFTYcs4wSEvOsNBHiJV3hxlQtkI/YYxURfEsB1Zmm4ohIJ+qbIGm40UIWwbKkWl5uiCNDorktaArz6p1SuAXmfjq9u9umiK+JsPrJPCLaQ07QJkLV1oS/vgIHoPNjWat1d0L1wbRvkx35ErS5JUnnEHfdxNTn7NpGidWSsNylqOsjio7NYk8WDGn81qlM1Irgqco4y/n2fD4PJw1q3bh7IPfHisiNFGoO5ceCZj12/yq5QwrtS6Dbnwthvm/azwfcraE8VLZSuY6Bpn9aWyUHNZZaZ56Q+kBSu06rSPlZSP9NwFlTCLZ4sMsXJjeX5WTV8M7nIUPGkPcevfv48aPbR49uHz9eeBHLllirWj4Xx4K4vhOaMXJqc92UNphp4XYQOY240UY0nF8Fg7Kg7qDOcBqat9tOLEMt88dDhTD9wd6J6tjBaCaNxrqP+z+uDHGTzekNCx+szguRtE4dj/MzxQHiMIKK8PrTvr+C09u57K40O2ZXMcZJb9DphxRgTQJF/4HjaAox37ZLKDftZqpJ9moZaVFGoGOHmZjLluwyj6r5mnMib2alADI60k3FrIiknpZ5367MVQsGlYSBWKSGy3o6hF7J6rIBqocdQTBptM5tJ46981KVkHK09s7u1G2u8FmFp9YXefShwVGY5DVS8ncNTiatL1Ti+vy//kFTpxDWs2pPH45QpkBlWcTZr2w8bxqB5ZSKeD3Qua+fxU7K0ASX1cP3B2HG9LHWbKnG2wLtmvqux6NRK9hjpTzEdTdqJ6fvA8q6AggWnhO/bVXUaQDxGlS9GA4y0tIf8x9kLyxPfNXCbArlKF5umnX7Z9OIpI5o9xhSzpfGDWQA3oRVImizT45mm7OGZcwGnTg9BQpiD49Q1QqU2F0KJhK/X2/973f4i1//5IUvf6sZlWi1LIgIMtQtMPZiMEjXpJ2pzhvEA66Oquxs1A1TAamFLb4/OHgpFZwwOOJXqBfESMtNa/JZgsfMFtoZ7yXQsJ8MOwRgKrRl+qvThceNzDUaf/pff7TXrjGn2U5zxbTbUA0bGtrvXUhNwuDjp1kjTTvN7yAtjfr1Ql+x4zZhmPP4Ke3wPgXdWgvx7oU+G/u/8FhwmDtWe8eQdtrRemD6pkB80RV7y9WBUVFfBEwBUZcnax2ZH1Nh19Ty6p9GGQyF+monO+z2QYyyGE06Q9WZp/0iEK8DFpYrnCtDCyUN923wjX6AoO6+VkHAti4c1vK0cUEg4magFV7bgue3tIpv7ZqBc2X3CxujJ6IUPSxU7LYe7Q3kPz3QBsS9ZlODPu+tPNUuSUxgzJ68qftA1Kb9S9AeaCUl9tWVFYuf/pm+jUJd/YM48Zhe74F3EYWTOozdgUC6A9tbMKdW7qJgKNkAUZ8ZERQ3p9NyWgBL84LldIKIinr1jDxKye2gql4PZ804zBWk0tGdRa1UqTmUCQSyeEkCDa3UNA2TGem+PmUi2nzaogs3TKiqRU1Tz+iuO0WrYs2Ti0BpwnWMKll99ZIP7VFeuGvR+GUbm+ahyBrslFcDW3YV+5233709PxZSBj+4efDMw4fjLOM8CLScFgk+QUU21olXEYuwatREqFX2IRc306TanCl2XNBHZyu1L7ZnW+ldDtz10QHlZ+4MMgL/khF3z2C46HwKe1AcdbswTq8VONkZS99I9vDs3G2TrlWMp6NwJKI0FhLrupKU1VP4hSfmik5VhV5CKm/wpY0Z2bbWSGiRb1AemaQIIUDbFYXV9ML6CO8w2cwyWNSKelHWc3O+7S42n4u1sGIl3nI0RW0f5V+65go1rkmUlrZOdWE6uwwqYSyF4c6lAPKLDtU5TRGx3udKzvTNqjs/3QW9bRXLJXPvl6/wGYGnxBGJ2e1oEZH5yde++txRjCSuJW6uYJe1gV0FpoGLUVCaLHl21nU7sogHRPhOHejPzWmx/xIUNI+kvptNlkkAv/3XP3x/0z2Gz3//L1JNWmlAk2kXun7GvjU7bqs4FSQmRNUcNqKXTHeFvvCVb73+5k9Wn//irZ+88OVvQwE/FW5nrkpxXrrtvShMEMwAACAASURBVJyOKeb1aH2OrbB0a9ra+DIr5vNzsyqO8JD+5fgkLuMokavMTaunRmXuNbqPqI6DSq2V6bJ+cqQbvParHx8MuI/mcJ+8butizcffkbcAcdrZX3cqSh9fXF2MEc4GyjG4M81slEywc7fpVEpqLOLOM6UY1ieE7chaPM9a2crbKnRIVv2CXviaTBEmEs/XE1pl6Khr4y3GdgTNh1X02e2VPpmazTzVb37te/1JAokos19gsrHtq4NxoIGw9DvjjG5jX9gADuZioRFJwToRtntvg2OXdZLsNSyOptLbOBR3UM0F3FKYecEEfv8v/+ff/Id/f6m1uYEdrnXMGlf8ZXtaFX7Rbr3Mo9/KkhkWZoQB7xsTICbihVQJiFrMEBXjW7Te3W4De8ATTWMmj2awApo9+UFaND785v6LkMs0kIqGzNAmARiosyRy92G4WYjK4gxkKiS8VoRWZrXzvX47e7VxApE1N15UVa00LfxyezxT9bhL0K/ZggYDmJBpN6/nY7J2l87GyzGnMNfjbbuSHfshPo65+jJtNm7S1ZYWJ86hMW7yuJYtN208yoa6+a4/Ecw/Ub5zztQ7RYlRFO7iCjqwOW9WmTptLVqoUsh3EGAkisg0LaokQgCfWM+io06iItwJ8Oq21UP3PHcVYt4XCoCJB0khJX3I5qGnOQHD6oBpD8q3GevZMNwf6gpB5/KNjRDt0EI+GG2v+reTgBaml332yN91xoladPfE6Gq5yMPxu+cFR+Nj5rOcxzgrhHm5WR4AYFqYFyJiBpSY6XYIKY/z0Khw7F4aj/vytbYws9ofjQ4ZEBdCaMcpWeZ5hbJuAVBvKNqN1+uNOghP2l/FoXF6bFfHj9p/1qi47p131SmFh8ttxczhk+Kp9OjGr0ybqwnmW3nfuDBAiVdyPMdWnvsrROdqxKJjQ3MHMNNGHD9N229GkSPmQKXpQXo0s6g4AzIkBR9znHYunhGWHiBp0tW79xzEIAYV8y9tgSJukfxEOhRnJKNoE5pVLtE+6Lx6XV5yBG113Hlo5zxjDXe2RpKY3mV10caA8OeqP0AoKi86aQidpfY0scR2Q+gVfmfgafZFYnZKXvZI4np3+3cbTnAr/lCclQaYrojV16VI93+QsqR4M01ftpwwTZQ1dSRVrQO4nwi+B/zen/+lxhG/5jwRQjL0lVAPKDV+t2IoVeZ9SWDSmDKxH8gNs93HiSBC/VbnTnvwk24iKT1JsavzIb1ibrEQES2csw2ThVIh3O15OtQNVcbH4k5E+56AfQXGO6HwmrVUX9axuI7OCs8VqDEiCocbiOyoONWGLdT5MEXDmqsFxL9twoegroIAdxHc62+9+uKXv13hgL4WBLuiuz7xV4uL5LA1ktLq0TtyR3acxw/nkVbRHQOez+J3MZnJKyefV2VYQpvrkdI6n8RfwnRT52PJTYdF3esJZ3p6MUzxVLncdLQrfm3XeVEicB7fPI3iQs1CjnGqex/Yd3/Tz1c9i8W69lWePS5p/QN8iSGLu2ZcJc6BpVdgMnVCUU63CtE0g7UJfE+YcUIEmbe8XKC8IM/y2qy6vTCKkhg1CCO9xkoo0Ly3oPebIhGbnUlM7OwD7CxSQMyeZ7EZxVMfWlh3zFcgr3tqkhIq1iLWMyVlEXnMM9bMouJVBaDIWksa+9IaZk/cGOeEfb1XpnkZOMo7KHLzLO+v2QgmBysK4UQEsHOPrBPhCgGBSCXEF4DpXGzd9yrShxVC+fB6jCmb2iwLjTFYQlNbDOm044usee/0VONLAVVuwb1XajCFvHkXqiozR3PEhqhjco2jiMktlHKVIrrWvFr+rYsat5pXKRQ1yRFEebKV7YiIBSfeKniIZf5UOctgPaWATG8hpWqQchYrn86MHAQSiOM+5Qp7kdRvK9fC3+GTD7lQmELbjtQ+zH1wpK+s+kkP9v3B9mkOr0LFVhPZzhfm8VgtUltoDSdG+XqPEDx0iAwZojSW0+mZzz0E8Hg8fKj8+We/8PY//XbIePz4rOqO5DHOtDywNp0zudKCoBAqiumURNr8/nWQSSrYJg1tGIjnaP3hhJhUC1z4xvlM3wJknG+D2CSSZHaULer8DOWPuZHs5CkVj5Q0+V8bt4rG+d6c3rFNfwoGoI6tYhPdFea7nVqG8Um8abGEHSg9AUky2rTgQlLE/MUnsrqcQqE++ckFyWZPtMPS8M5RbJ0ag0uqPqiIurT10XxaNX3vR3uG2xxDktYOb8oikJpkhfrHaZ3pmdM5RQkVZ1DFM3rzzinrrpM3w7Rakdq5fS5FcIDurWKTHXsERTlzfr8M6gqfWnh6HJEruOxzvAzXMMnfWeDgrsdqb0oEDQnRvYruH2hQj8PPHpvNQitZknKnPqTpgcOR0Tv/23+47yzvghw4ZU1JclM0s1/6R5QSPEQXAAK4ftsZKxEiaWE9VQbS/HCYDQBe+MqfbB94/a0fp+M0Rn/ZpQaPO0xfDtGycPhRUcsWUzxqhCgSnfjPzam3M48dsNxrEIVoO3hMgzgiW3YxE/6p2eG219GsuMSsbC2ZmJdW0JaOpxyVF4vKX37+u7tPGrz21quvv/Vq+WVDXW1FX9cqZwwp8FmpI1kuVtbuOyUmwa6sxx9V/Pww6pPyqUudzH/s3FapigAKQSyXlGWgO+is69873RgFF6KdEFRc6WVueWhTddw2eGknrL2PQeo0nzS06a4wsfszfGoxu32CzC1K/Y/6H1awuhOB0ng+wtbMUbeDnBY0XOLBuXHgL+1G2/tWZ5sza50H8GJX0RPtfrn3YunjKRyCB1b0RLnL7A9H7PD9IQuvggjE1o+IFbzO7UoKiJTQ20yzGXkhQYLcmoeEYgJaewCTl3NaNP+y0SqBPCBoQ7chs2h3Z4QBSFHENDkJlchY7Ycu5uqH+sumAJAVFVXAcg4OkUxY7FUL3Fy/KLoaKnX6LaW25rja3sJqazRy2bi6TLHZ7rhjdtX44cYqxGqD5WixfZbm7ZIsjVx4JA1TNXY0nHmqCqigZGm0HEy8NVZmNgC7pygdsZbnk4ID+zVsgBcCJcVOg0sNqnexHeoawQoAvdSsfRR6BzeEa3tjbmsXRVSEHZLjmMOmjyKpW7WHSB7BhZntPa2wY/i1jN3u4rVKXD9vJ3s0J+LTcvPgwc3p5nS6IV7AC4Y8fnz73m9+89/efe+9x7ePADDzOJ9vHtwsi2Wi8EJFGRyZlGJEtU3t6maCNl4WLO0y7Ay8u8m8uSLU8P53yBTNM4p822scFub2CgZUPK5z7HLjhzqSmGhUn0MNjt7bS908Tj/yPn+fLrUpqmZ/pRUFHvIQFZueq/s7wMezb3sk/lzD1sB5LWcg1XYSR3rl0DQCaQS0qFWTLJUfkVnF2FOJJxHLy1xKcuiMpC6Md2g+modx082XXU9wX6SP8YhRgNoCi5NFDCxU4I10UbhlVDyb3HwMhXXlanfJHbI3Cdg5Zy/qHeMxrbtNvIYdB1wpf6/wGYenzReZoZG7vsj88EISyQ7XbJK/g3CyiJ1Ljg+lIQPAcmKAIJagYpExwjgMbTtsLBlCAC+WLLFqpSkUyijZ7O27qEiV231JtPAks5tx9iTZ7Rf+7C/9nFX9YigRmBcbSdQo8BFTqtahqAiITF4qjrBowpSJiTBE5FaYutY0wRBJP8lRbeWBEcE2xMw5uCOlT0RCgYRpEEPOTGxZuVRB7KP0F/bGZpqHhXiOMVQG2AsUkL9fusGRYkTMfZQZJuNaIWFhZnLkx1jsrpMogYk1rF3dH6b3Ynpd3pW2BSL1S2eGsIUZGSezB2YzqyZxKoBvPP/dn/7D3x69AuD1N18F8MKXvqULQVXGWIgAzSvJMoajwoIGAMp5RTeE3URZAMK54nqRH3SLQCGwKsPlUtpH0De/9qfM4e5UUT2Dd57MdDq2vCKWUkdLsSrTtsVKYBMEkWS11wuCc+QbbCEJqupF4G0fVEF68/BC/VEisoysfJRtE1ZtBLx4VyIyRABaeEk/txFiLoNtQvWUYuVJdrMWIJCoSGRapUx6vwFeToHS3CAbIzMCLxTHrCTTSW1cIYFDNc0+P/HFi7fiqx1EabYz+QKs+AUpNV4082KN24iKI4prr6p3PWQ0MqpyTk5mMaa+QTmS8Aqpio7wQBIR6TCbR9FfCbmT4+rD06lYbwglBSAWoqXAeQDE8Ox1dAYW0WEjVwbYAiVRLk/1rPkRyELqezJRZgE8CqFaiIjPKLySIhlP+AFUFYtfflZY7/aVClQX+DaMSHJbQbKSn2mFN+dA86K7kaWjbEi3CT0zV16zVT2bg6bZ/elgJVpask2TOLHysBMGuP2pygRalmL1CpDj2r0aWFwUuPchkVwoS8QlriwEdajv06ASdx47LTEHUkGArgN6u1joDCHbsk+Y7OaqDlGFmoTCGGdrn5nDxiWrrCZmC7flMCQzWc5IIjKxZNavGFoGEQhMjDTfM4wpFDBzIxjvIMsj4SU9VDUIGACzYCCkIqwmO9TlDbMloOCFRM5xIITH//RYzwOAsgwdtBCI+HSDM0T1dLMIxkLMfANAVFTGclpuH6ltbDVfAMeOBJRURAlkkkiCG6sI82KrJCIqwswgDDmbrd5FCqVfUXN61LZ+I+1MkKBxDtX5VbfxFeqX+4PUUvybFtGEVVtD7zocUAVnMWybrj0zgnQ+tQuidsq7BJ5Eh0nChRdeljG8NSYGQ2VIOEq4XakpD7Av8UILbh8LKT1699HpvycAn3vwe2//9p1nP3dzwzcKJcLtOC8Pb1RIsKhJCtPH1C8225nuUOlhX7YBXZFj44suDnjhQGtcqdDgSoFCzcUMZCbmI+EtJefJVwl5lkHRjrdoaCAwg0RVZICxMI3hA8m1QDoI583e5YcxCG4n+sl4aKEYkkJJdNjolqaHNF+sggliKrTnwuXiANAxilqB3OCAErMp55GLMArjcRJuLYaGH9kiIABwlCxVhYraPQ2OTSHiCUl5WbSpNzHdueylFYUpNcpZDaBMXNhTsrkGCrrCYq2xql8SMwch09K3U/hBS2AtyzLGGKKcGRKJQMpkiWgj3DHNSwUA5kXV7s4nM+wMoLaj904AM4ZYmvtwrlKKYKwgNKXzoDxstPU0e5LMs49ppciz2LpqoDJ8nLYwJvpDDYbCeKAvxHKCiIk1Sn6oanUS7/T7X+HTDk+bI/IjgmuY5O8anEzevvuf/tejJ3ghCKNC69WddF06uelinBVW5mJ2t6m/vYFmR6ztgchyVomQtw99eMg8452Rm9Fus3UbiRQtv0+4TOHWdM1lt4+Qs35CuDtfh2YmiUL/6Mt//Pdv/R+rZ/7+zZ98/SvfppXqVBr5FsKVApAIEUHsOrSmE6RmtVq6gNvzmVMNAGpVZLpYnZlVdscheQcsbGzrMMePTVqnTjbaFsCzoh3M1rUfavPJc+W4QSLhQvnZP+wUP3nhK9+Ol7XlXL9nSke8/mvP+/nic69g4+/XsqEcKNuHq8DLkXOtdp/rNRrn5kPrdQCv/+rAbUqmqgrMdIyr8WtIwk79W5rCvJoSbdb8/ns1n9TYUN5LEWTZn5XJTskysLqRcFEVI4KqXbgC/BQhZumqprOBWCs7R1dVWpY2vj6uJ6z63ae5suPcc7j+Hu2yb72U7RN2+XCNwe0rBIOHa/1TH9R/QfCeyQt41H48NVv1k8FS5z8blDTTxu8lRYYHf6+lOaT7YXQHzKo2k6ExE5cBbr5GSINZFGdz1iQyEMGu2uzKRKdu77PXpdvgDFp+cvMENGs83zF3vI0nqVKndrPxMvRrpr5tkgUnEveQFxjl+VwhfdHWfVvLOsCbX/Bxrj9tPDF8ypp/tWkki0BqHejSIGzVqXGd+EhMn9Ss/bsoJdE+oUYFhLxHnN+HRY/JrR/XCUJATdwVROzxRFmxvc59qPuxUpaXGOljclhOCzUfHIzVq1hTLfTLLX9zIXOm2OkMF8iTUWJirRusphCk/84PLYICbIwiQohaxwTJe6xxUE1MrOxyLDXJwLuvl6XGLnVxWo079/n0iE663P7zlsu64bsZ/p0W+mDiidkjGlvOZ93puZqdRqcgkiEWXLX4kQNBKd3pSV3BnRpF7MGQWwvxOi0ngM9nBfDo0VnO/Pj28c3NA+v2dHOSoQuf1PMFQaEyhOFBcwpViXwJuZka784LsApgylR8jO62JadFWfOH+aXGtcgC2OuwYm64bXbriOIXDQbY3Z0rSOXdiTDUUwKJeBYg8gQrBCvSMuXwnlj/ilS7rltnhJtZGx7d11W7v103jgMwQAnUDmDynMhbCmeZc1CXk75k7nFeRRkSyu1VA2q/lF6Uk9pXTEqlQBPRTiWJ2u2rruNJ4cF0YzX7Q3LEjTk3drlHR4bM9ac2L6/aydMhl6iq9BP1ZCLWX1TcirVR1yVdVrY55dFjY9zTSPbv3MdHTKR+9KSJbFXR3JZX+OzCB/NFrmISP8xl6stwoeVf/uqNe2aQ7LB1Sl6zSX5W4bRn0E7gJlUo8lIGF4VIJwClhChoYbPZQht2KNa9wzB3eWhogzEShIry9l//L+93qrvwhT/7QVlrCkpNIo03jeAouJ+smetoYXP7qbpzcu6gJbcpVyrp9KxVAVANdX2/0cJ3ZWDbSXjvD4cKkvJKysOUT3WpvaeWZSdA2DPqWar80rf1tTINJ3j9rf27w/eBl7767TA0TQU6zuMUx5g5XyfYFmylChElol1fJDBbCJPJpy8/98rP3jh4awOvxZMvP1cXvWnzg487u6FDv2dYJHBkc2l6lAaw4vU391H9zT/4HrI8ImC0sEuMr/3q1ftMcBdefv6VveoQ+/B3/+VvPnBH3/zD77leudHqJiAiYKhUGFtuCIVb6bY1YrXNVWERLM0iLUxpXr26Cz7MBDcT6T+Ujp+DQnCyDB3qJkt6Pw5Bxc2tcFsAgB7ylmlUOZZLZmRGWXbWk2ZIutbTpF/1rCLSx2aWCqXvnkLXL3pYE/fKXtlCRLzGogfPIwp7JHfa5C7LB8OYISaCDlm53uJKwERRfTwUq2ZBiBrbvc/F300vpJukAOasvNG1NsbZWgiDhvraUcm2PTRRbrjYF81FFv6R9FDYx5mPOVq1vA85zFwwZ36rZTtaM00TMFWZbSLboCZVz+OL2rkZzLIzzzZZNLzlwwKxGu5FIznMPKf1eXnoY85eMwiYnGibHzeMa5el1KLX1/rDRHtUy01OuOUqVhWJsFYmuxPhO0lZl7pa3VWgdlpDAMDLQtBxHhbE7OFv8Uo+bKpTXnBRQsaO+1nC4hc27T6jpJdkbwFyJ67Wqcz2riPOZCJtO7QGV7D2r9DqI3Q3ZHciadUyCeS3VIrb2UyMgDxVX3TY+2LPVet6i6qoYPaHEDKB4gEz8/ubgC62JLQwweL4cPv40RhDcVKVIWcRZWa7oLJEzLKYFwZgPhFBRUWV/VpJ9pqava/IxA4npW0Pm/FJX5adM4zGZ+YZss6qRh29p+lgI4i9n6OzYMPkAHlcUX/1afTdZb/HXXbNDK0EQxrnTo9jewt+Ri5bMbSQVXyY+tEpzkiLTduGRvBgjlWdC1K54JoamQIzDnB9LPnAXOAqNh2gfo7SkZJsSpFXAWrwLr/nfdbPK5O1oXa9aryazbSXWyJdnXZ0vpAfhXDd8JNOgkSshBVJ5mPJceZWk5sXxTRkZB5TqkeSj2TjFNw5Du5cW8nCVjql7yzZM+kHSJM7kd01nit89uADB0Vu70e/L4fgB+viycJuiZurR/IzBnyZfX3++z8QDYMcdv+BmImWsl0cMIkrUwe0/RbfqoayitV3a/DMapo57hB6+Aec7F4fIdRV4ckEI4mWaRJxIlmHaRn4MA8VaHbLClzkayoBYZntPS+e86lE9YsWpjeD3QgOOd4Uk70F9XmxL5QCDP89UkGCIrFL+i5XcHNzsywLeToUc0fb9coFAJkFCT+tv0e8wvuGRIqGGJ+iclZP5jOmKHQlp8T2fWT32iix9l9+7pXLeSS38LM3/vZnb0zhirkNakvMxvz+gIJsm47iKxIJFunIF4lCWvz5yPSXC4Xjn2Qv7ru46zEFAHHqNAJIc0jNHZnRDZxZY5l54ZnG0oZazy6a/ohnHT3vWN55vdaMnLICkzdUqMcRiLr9Oan82dfqzzyE+zhnf/PDv3IKbI2Y8q3etRsCq8aIoIR0E1JClUiYToQshoD2krPtrV6OhGDJXiWLsMCZ3kJgEhVRsQsCzgKDccYf9kC4TLLrZmiQWvpW5jD5YC1KUFIlKEOZ4H9gf6hNIuwcVfW7W2HeMrSHVjdDbjVxoj1+X0FNFFKwFjn/nkgvjfESXpu+d/mNom1J/6Q2HJXQbSuOOoCoo5Rkaromw3TXTHOluV0c/bEnm+GcG90jdmJipQS5zhSTb5RevD21jDjwgMhw05UjiZg1J7LwsvBCzJm3tGMiFylnBCK1q86NIRlp8sK8cCtD7gHy9oxRMtqSq6qIgJUWooVON8vpdDJnYmR+SIrQWMlYt4VtHzb0oxDjedE6O5tIp0MuhNNY4F99koHEUD46/quRxqNqFdeOx7XM32UXun3gIuPvrK6ztqZTEjUaWnhhIlWMYfm2g9uUWjrPivl47sYvmYhUhYlOp9PpdLJQx/O4vT0/VuDm5uZ8HqflJDLWWo8rXblwqb63vZpbq3MZSj/JFoNOA7k7agPrjK6OxkAds6Xly8ME1Ylrh55n14YRnN050CQo1ku0HqY6z1dxCsuNT07dNgCopT61V+wSCwMRy2Y3z4NFx2ozQJZ6frKJZjaTAjJ9Xahpp8JIgMkN8sKP1Rv5lk7+UNJ2Xmffkbkndg/FNf72nmNLTiK2Gl+juvHCzRrs7iF7gYPjqViunaGhCBTNpabSULTRQxuHasvs/4rYSQAoGbwvNfvnNYs6iKjWgtWF1bXiL020hciPjRAPaGvH9ZjENRQqgih873NNbk10j+TdV/j0wRP0RX6C8CFL3KymfM0m+RmD0yc9gA8CTyo08vPf/8ETaecKn1L4+XGE44tf+c59Wnj5+VcOgysPwDySuy7mDw8/u5jUEsDLz333Hi6jK1zhCle4whWucIUrXOEKV7jCJwMrp9sHzhRp3sAPcGP6fbV/H/jAve+GSeJ6d/szAac4eTuAnbCgdrR78KJGXjqC57SP82hqp1/tyIpWYV7oQRR2Tbp9d/EM+n1B9dqrZEBzFGTXvnbiOWq0cTtlL2py6isCWjQzMB5gsNrxgzWVP/qXf/z3//c6g+Qv3nz161/6FvJk7kI8XQ8ggaVA4QzRrMPuPG/bHxewiUbUPPz+GGAenl6i3UrwBnjBnPYN4bjkNIBvPP9dtGXoJ7fq0ThxI53w8vOvAHSnH3AFr7356kvPveLTyMYzkmU3MCDg52988AvvdtHV6yv4BPkjCuhTv0iaW3qbLe4JQFRjFGAnIV89Fofn7TZrYNkPpy3lokoEGlNRWmtoRRSXj/U/StBt4EKUVgQmPPfip34t+jhw1aM8yB+tiIOJece/lT4skuzHV7//b//db374V3fNobVMsaX3Ymn87B9xczmDRJJJR+jaPPW9ad61UlQZ8azx5B/aA4AoKMZ3kX2egeE2C/LiD4gQCcngMYo4r95vm+96mEmtGUNTCW0zKCRjbNq6BYP2BaQ1BprcKv6jWkj3uVouueC9O3jrEyIX9zTFpmjEcNQSaY/jXI3taDdT3NwLKvV+m9zEgRSbJgtcLrq+lwaZQrgAAKQlYGPnMgIIdPFP7aKjlfUTFc0w0n5Z1UjDh05Evp+qZws1isisGj/R0sNtfBcQmOg8BkeAeigcVg2OXbPwgkOWlBEeZJz8EK1zVV48LIiXhUFjjGCVdp0iUkNUrFyuynxNRjw6mD0k09cqJsboySViD0aAVMlKp3D/vVWbQTCCWmdfrwhkyhZWP61/DyrLjHcpFto/05vrcdlntMoBHQygIcZu4VK9pFCPUxdbVmYiXgg89IzsQZ3fGPtzLTb7riESn8DLch62c2lZbgDI0PPtOMkySG54+fznP//O2+/IA9FBOPnYCvkZtcVrDq0RiFb6sM1vJoaQJmvc6+aT/MHY5MENmFxTjc5zWCEtIgyZAqtI3BWPmMXb1siIVaX5k+DAreB70qBy4z40tyFKlv2q6DGYZaKzF81sw1QVCKPiHJGRoPm3IT7yDCuUvGCMp0Nuk9CadZ+ZZ01t8jwSMGH9aANC1gzrOhVmbl84kr4hGzY2i13WkBEDdLUaO/YH9ff60zXfEEw2Y/UdFCo4EZOVh2xtUCqwFLxWqx+v5OP/+c6MWblWVjQxEfokwxt5FcdJGgUg49yxQyAwrMLUGg9X+JTDh6xac7na9ZOCjzkA81ri5rMKpxVjX0He6DFJ467FfqlgSugB19mkvHsmAFuO+amzJqikchKG4tJtgflqyhMAD40s2VSqrmo3cXaglbxLeQ1ccMsRhWhWhYpKXZrYgOmL1NFwacYl6uYLPKtGq6iL2x0Ksaw70HLB+HLLrpvvfL5Nk89UIlUvWGypxGcd4Mks0wGQFRzZ2uzZ96xPKoDX3/zP925++m1X2fGvfNb68vOvWGGNv7u3X/Lnb/zoxa++InUvUks1Mc3ruE70BwMLydSwedMwln33woeErkHVR0+eLKgxiuMDgUAtpa1SLAYkVjI+b2LZ5xIp20LvjF9W7W57+uhBw9zqSjeAUJUBp85pL99jbIu/atq0W5pkN8422E09enWgcWRD+lvJXcM5Grp9lAMtoz8MMZ+OL8PKsq3UIXdObxpUGrT1Qbi50jQtLm3D4Di/0XAbzHbGZGQbc/bKxd6IO+2UvMBn2eyBRmfke/jT6ePCuqPGSy5NqO+/pK09+QMLD1FEe0qm71hAWLNhwO1hu7sGwwvQKDTklJmdZYl5P/Zfdk1EeTaQagaVZ2TKEdkxqWHOt5O9Etg2pV0PegAAIABJREFUJZfyGiXZZigPS3TeKKz11Er6mPfQ7jkmozDq4UAbMjNrE+tZhYGZnQNZ0lryPARhpK8wRoHSnJNvJg5nRZJX4JBy9bzqbpIC+RmpX0pFNuGMhheO2ts9CSbc1eooinu5mnkIiJC71ZckdMrSIhHZG9Ze+H4kGPXa8yXkolOa7X0Jq5H4vbIm2BptWFTnJ6GBxY3XWM9jHqrTP6sfa079DUe1b/k8qrBvGkcneB72Eb4v9xdViSGKW5rUOoqzJ3P1pguGjVEscvM5GjKIcbpZ/sXvffG3//hP59szKT9+9N7y8CGC25JX1jYiox1C9CWauenBgcAK742R1Pa8k5erZupMeHL7dJ1K5KxP/Sw2z3osE0XVpPpczG/s6nPjyxpZjCnUAyc1BS3B/IqfxT70yoPBXaqEe6U06BzZh0bh4BJS5kxOOmc/TIUmmXBMJY47Mg7CCJlWL+Z852pNWGMv79LnsZDLNI0WuvxySaja8ZfKZ0nY5lKv2XflxVLNMiNc/ESwlBRMC9KNO2uB6rhY382Ov/u8KPkXAGJm9cKGGUwStF57bNNXfRH8jJAU5PiI01uXogpiVXHWmMm7Ac0ikyYArA3Lne06k1fdSU3pgg58hU8XfEQVtLOkzCcCH6ygzRauJW4+e3ASVb7Ev5It8qR+iCIqm6GELWBHcEwkCpq5s8liT2kSTDxU5Z6S2Y+NQzSruKJv8uRJ3dTGJLdsDAKEFe1/pVAtAd0foGY7weTlcVcRzUJI5BxZ7BoxEpT5UvD1L/3JL369dqj94tevvvDV77jqc8n+D0WEvMSFDNfQKCRdmnbrpctZ2Mmk9cKh4rtNEwLyoxSGSTahguphWB+Fd4rotTd+8r56eem574Q23Kzb1EFVtXKTlZWCMAy++fx3laCi96l189qvfvTCl781abmppT1pPL78/CvdvG+Ym5XHJwotbCU/esJdsBekbWbpHoTJQFQ8zU//lUHKJEpEAqhKOFLEIrfyLHytaMcqbczkjx7C7rijQ23/56t6ibqILT2/pKfNyoaGBwjeseOuzkzmzXhxXM5oJIMnKDwW5FzUHcHujw/bUlXNDFHFZNTek+vce3U0MjHGpJNmAB2T0UzucQm/QPbBSIPcXXxA+a8IUTnAmjXBSNXwOrRk7b5q7pyUOuqDTptv9W9jL5Ncq/lQOVzagh6whx060vILhHCvZpQsBReRi9NJYtUuS5vSpFVqCYkb84hs/KFlhoe4dhO4WHTsVWtAAS4dJgaZ3eScpoUo+zk6M2ahEexnlw+SJrVLZspoTIUgPUltH0QpJx93qilmtaanwrBDQ4ZWG0E+7slk31aT9FJY0WQGYg1ExcLmAS9Nk3RDUZubGbc6AJyHRXoqWY7dZfG5e5K0XKxwFhcxTNhTK+eRubptdzCPHSVqJZCnle+yOH0MEzR/vztBwl3d2cakPlE2u1r3dgLSBjD1mKUP3eWJhhPE7vVRRVUuJlMCQqsKXQtZeUkDeGlMIphybboZZ7mOIiK3QwW8LMCw2ssPnzk9+3v/3eNH451/fltVHz1+78GDm39++90Hp2d43luK9JtU677B6hm/ODLtnWlMsYbp9lnvtsTl3fyc+g+1+D7aZDyedY/i77mNHK5KjK6oVcPZHbWJ6jzSvwcGgaFOSvatqjIv2ZfCTxdsnOGbLH7smCDd+rqT1mJvTVkBO+9GjC9EQM6w857mLiwMlj2TwdTTll0z/5Q+vtuYuNh8vG/bTFPZaEZTjHWnjHhTrXfAaG46rET3D1PnDMWhRTfNpOTs/m/zHBNcKtPq4WjOr0QEU6a+0BN38O/mdvIAOS0nwvR34RCuNISrd1/eJiPInNT3VnKu8NTCR+SIXMFHcV/7Y4ZriZvPEpxc3h5B1w0U8wmMlgYSX6exEDVyPN4/DSgm1rzXFk3Tti/TS5ydE0rSPhn4vT//AcrqoVBn4v5Hs8JM1ROV0kSbdlnaF2FSzVbQVE1qkQWylpQALEQilBYyRTUF0m7b1AXYLlhAJpqQJrbjuFq9itZUrDNcAwBONycZY4z1dxb74+ufFuXBSP7oX/4Jk9tkFBjwa4DiYRdmKtUJfFUtTzPH/1wGM3Jeev5bP/+H+3okX37uFXjBkzLicnuEhTDrBa5zpZ0LAgn0pa9+m4jvdEq+/tZPXvjyt2puMXYCHXm33y988w++pxGvwkFyk4l0AN/4g+8gbNggD79VJ7aCqeH3GJzJZr4XvPz8dxuPgYYNrQS/DFVbUtNtYfzENPJYgEvKrAKE9eGLsy1SXhZAWZHn2LVTY381k7KjcQU7H33zD7+3Hs48DqrWivD/7pc7kbap3hKA6ULn6hmKhBndTsjvL/Aqtv0froUWx1DgDJ2iJ4Qunsz80tpH7nuETVVjU4hqxpxVUIO4K0WVkx+mdVV+qDb/Gue96RBhyjGT26Eo3msm4enEcYkzjnHc4i0msMLusPuwvmLxvaqqLhSpEpq1HHRM08dJfOnNSTqMW9u799z93WZkJaLi8dZDSMCSt9Tf7wbnbKTGhuCwMyvQcnPbGewRVhHgSzE2NznzSZoiIJtjsQee9E3aBqSrPWb2alhvcAvanH+6R+S7kEO1X7jFJwbfUmr5WKwzF2GsRISs31IXTi3m1st6eFx8RdSqWmSihoyhTvQ18YYqhZdjyGHVEKHD7jSQVVNSrXwdREwkueiq5mv0RBYA5DyUyUQCE5hZdJR5XVK7RChRBMNqIzIm0wSZOImgnBS7kCK21iqcm+LMx8nzwtlsIGINjYqc5RH17VRO4eNXjdopdcvkDcWJ2usKFVsndVYXB9Z95Ws8qQlnn8RQIcBqZo1pONNUbx4u53EWO3JTPZ/PAJj5i1/84vmxvvPOPwnw9jtvn24WYi/hVk4pDe3Ot57aMrVzBgBeGJhSTtZug5afaA97yZT6sR6td+/mldqEq9g9tgIyrrgYG9f1+YVhus4M8rMZhcmuXa+pRxQWbwpoVIYx56foEqU+zUtfZ9hEIjMqNBSelB7Jj7vSkxWpKIYkKqrL2vcbXKEx6sllHIcdyPU0qRF7ak64Uu81uo8fQybiZMJA09XmGprGEV2FOjRZ407M0iCda6w5QAl1y3kCq/clBECZ7XQrBpWNt8Xb0wtN9BwpRhaACehUGQnExGcdM8Yr7JEjCQYV6xMAxC0lWnrhtUapVrEbuhap07DrqzCLyVmzgBhMTuGHIRpX+JTAk8oUWQ1+LD7HT8qzeQ2T/MzAiZgvKW4LALRSoCTq53jqXNeZdTuWAhC2nCqYYG+5qLQUSmYAlL63nFikiqYC8PAAJmIx0fsEb66KB0BR5E8iIlqWUGIpDAFXAkVHXFAi01LdU6SqNzcnkRHa8D4qZVjCPusRErJu92E3jbhsbutLFF//0rd+8eu1Z+21N3708nPfhSuN+8KIYmjN+JOM7PRbgxT5TYiJadvI7ePbsoLIozGIQ9Wj8lhZT7uz44VL2SaLclEdboRRIjykNRNBIX5NI6ZDRASRo07gB/cKuXc6lZeee4XCyh6idh9GI56mLCBHVSbjUsArlt7engFY+CEzAQspvvncKwr96cVsj6+/9ZMXn3sl7HMbv2knH5bkX/6D7wFEXo/XdCMrg06mv/gczhvLvSYHgHmmByJafH9UogCFmPI9hiwLMy89RwMAVeVNDEc0mJYmMZOqWpFZURAk1Ft7oIUzxe3ItLkvgTJDR0R9hINJ+eyciG68CV7YOJGYnUikkCVuuomM7GkhJuYxhqhaVWVFv1F4gNC904Vuh9Qm2gUmeC8Ku0696UbCH7DyYQG20RDaeGCtnfIrRVK3iM1WqOCsYmQT4aLRUVipFI42jfkcz8Dcnb7fqOxJw/9pyQaCWUasIU52p34BaAmidPnQvcXNKqr8xeXFcIPekz05JQT3s4k5TdmpmPfEADy+mKjW0YZUYWVl4JDb6Lwy3XMAgJ6tYrKIqgw7LLAkX0wru9VboAzfUN8CSDlq+6GIK01IYrCCVFyCxcdQKM5nY2lMFumjlo/VZtQiiJyA2dcrziYtoN4wwiRj5KmW06A7d2zJIarh5QVAi2X6sOFW/I/buuV0jpnUT240JgtKrFqMs7UgJWsAsBShkS2+uexJrVap80rj88nsvbCukmPMZy3z7iMXiAuD0bgnCFSDG+QEr6qRLtKYWfi/aq1r8kRMzKYNaFCBDqFESV7JdfGrxEHykSZaoSc+ATrOIirMpKSiYoXhh+tyQkQiw92NSqI0Bm6H2BXfmwcn0XFz8+DdR+8xWMeQM8YZywPXcADoIFWCQChjiZjgnNrXYPiqCPxuOAhDBpSCuTlWVYcxC41zjObfc+wxsedvIUIec/pmz5yYqqO5dVZpABoYHl0Ta85wZEPp/Ag9KF7TYu1OR3GmsXItqCqwnBbzCItC5ZyRXERkAbaExWkj/hsy4LzN5AQp9DwG9Ey8DBkLLQDO59sHDx+OcT4tp6Hy6N1HDx7eDNyqni1/J4C3f/vPv/3H3y5k/hw6Pbh5791HoiysxGI5l5jYgjetWDRC/yWm7qkhooUX56KUO1cC+RRspxaw+EOVYM5vybPuFqOcFwhNw1wvoFkeSqS8hMdIwbwMknkdkJzgRAuaX9teNxVSBJGDMUpk23oymG9UdRhXBfuhLS8QEVe3LDkSswUyq7KdZkMqJNo7JganLDL6TkYtELZgZ9fkxTS6luea/ZzcNXx1XmDFne0ZVQVEy4nquyjUMmNnC7MZaEwssErTC0g8pqQnBwCCldkncRAVi2ISxEzG1UGd4mybw5HpMkG5LWe+YWx42K7mujJm5MLMIhIypo8nyKG8yIZYQsSmT3qvgsHM7Gefk+whOyw4EcM0CnGBgFjApkP5sG0XsAzM+mmIOKWFAWVlBYiaJW45ea0Zd/OGLAbETy1aShBRlcGeguEKn1Z4skGRH8+N7A/Qy0dxW/waJvkZgBMTvf2f/uPR1xYKZz4Z9mwdfm9aLUou3VMh2lIUBUwMP3yOCIEyKxaA3+/J3zTk/+Vwm/cDX/izv0gVPru1CIDQ7prpakENi18yDy9lpPXzq2krhXUNpqSVoZV23PH5b5gYXZE9fDixeujirJm2dJTWu5i465m39nEdFo2NKI753IY3FzXcg4QVDazn1b0klKTgl92nG4vN4WWt5pEiMR1XLtHIggYA+KMv//Hfv7UuBJTw8vPf9QEnrtcZrOonCiM2lTmzfiTfiRiBGAgAfOP5V5Tws/96GCnJ1NY5d9nR03fBy8+/0iIpkmrb9XZVdasPEem4A3WbtCvhqT3O+qURgzYlKebSyPIwxJkyK3v3pvTNlTpc2nIehhKnHTim/xxoi9PKPVgGvNuSW9SLKpcJqql5p2u+MQK+tG4Xl5QK0zHeu+GSG3bLXmj9m646rffcXGpD0/mpao+iEEPijuiAiUzDaDE/9jYBSrKOv25rtnD5CSLMxCXOIcs6BE2/9ubJiAPKGdb2TGMH5chUqEqLZU4rKwID797KcS/wcLA15v4QtfkbGftidAs0WXqIIUWPQExhHAjZOZygHIE5aLdyvvuI9mai7TIgap/NnK5of+KFzU4LxFO52NY4ymsPxiQyNSmoUZuuL3vvHsiEZgB4MRptd//KHZvdUgqo9ZBjtLlNfL7Vf35ep7RN6DJj+tY2b4kgpVzG+FCjnpuWuNY0ys0TKk4UohFguNCQwcx2+Y+Zz+cztZ1KxMvCBCzsVXQ8whI6xnBfubK5sUqTS5Zea9z5ica6rWlPa5vOC7a9VN+JtAuNRkq9kYvMofSjDZQUaAOft8K22/5y9LBSEqheTNnd9mS5KqfWqpEVOaaiZOc3ywLFsiwi8uDhjTmVmHkMAHjmmWeW03IeZ16IFz4/lpvP3QhCuqVq1NQvl+XJlvxgWX27xgkQTQu8RoC6yhiE3VguH57J3U8uIoJMQ0YXShvP0FhKjUwSxd2z9yalVmA8PzYdUdwV60vRV4UAxtKzpeQxhTYFfDVJij2jwdDt1Lw75boULB0s2B4ni0Dle4iL3fY2ASi2Ex1b4GRS0iGuD8D4b+w6z8HprXFgLudrdmYGsGdSkgz6p3Y2Fq/pwcBo/TOtvur7N1r27uzIucd2UIXih57innpbjBSuvVF7caYbqh7jNwpCza/q0toutHPXazDkZweeeFDk0w9P9rb4NUzy0w6nC9994fs/6Nmwmh8AadZbVIW/UH4ETT2gu1mIKOR3mgdx+NP8FF1GadjECoDo7b8+9JzeH1JVDUmmlYhsxdpDZnhgguVHQtlvFCFmMcLDTtXvVWpac7TpbR4esln7wOIKX/jSt17fBki++eOXvvrKpTnPJpvalQhPsK+pJOikXW3akLIfmq5VKr6GFRRf7U/PNCZ0q8zaVFikE5+WFWqsak7qV356TJddP0iV68iweOmr39kxQu1EWQVa6ndZN2ltSjqLSVUzDDO0p7B7CW4UKr7x/Hd/elDoxttpxJlY2hn286/YK033tYhjv7SF0NP8UNr2WRz5EhChkQr4cfkuhrKWTh0d0Nq1vtLuMsAnCLc0K6P6owVLuwZAO5heP17d+HMUeqDWOPfBQyYof6mqugpCS/2nGV6HmLjPOje+t0JDREXUstpfCNPU2CtHls30cDuxOJrMVhHdPr5RyftY3LoMUi0/SkzTo/4ieDQcjsBkbmXsgbXbdOpLg/dHKP7q04gNtjN059Y1B/9Zt4Syi5AdWIdthDAqh9hOy7VJK0ConbRxrbGaqb6f0KmeyjiMbr7Thpg6a53N3XyNUHaoZp6rab6TaUZxthY07oTKzf/Ve+q7PRebcXwyVGQfQi8HELwIYS3nqnfq11VrKTsopVl96f/m6Wh6v7R4WD7rQyJA0cLdY3NYtTfCEPXw8NgU1oRlnmmoRMdsjl7L1gVlKrMpYi6ETHnozP6uvrJdyjVW9awBwVVC4Phvqpo5dQuVBIt7Anv0WfDzCMxUZSsqy0REouKBXXF5wkIekw5SQAbWbBChERTT8EDZIs6igkKWVipzm0eUfOhrtsWys50eID1f8lf/v8tX3U1Jg/7U1FX3pawHMutXbXjJBtbrGCudQqnUG3tQ5uHRVrY1vbWRtT/lV+BTmIoOkmVZxhh8OhmzWk4L6ETAELlZ6OZmee/sVSjPZ1keJKVW7RQXfsb1CX3meWQFSc5RQQehdwYxtIn1KEvvwAhovx5akuohdN+39vA1lxcTWfhfqgSSpEpN2ci6CaNLarTyy6qmOSvRwuyRpLkxulcLmddkDs0NJyOvMBO8wI63qLdUJ6E165RhJsR59UAd/LlUII0RtVMzM3SCacIpLwth1+D7BI4gz5zirw03bwLdufss+Q1fTIEObUpy0/wSJdVYGakHY1sPJwNRJEpqa9uS0Xg0GtE5+YDIWPXVlbSZ5mhqa0LHzphpdlVP39+9G67wtMPHkyly6vETrWbzkcI1TPLTC3wpnohAbJeEUq9w0R13YUKchGujgt3c1PR2TFlsTYc8iyP9tNZTYPpzCpK82/WE+G6YP4jLOKHeN+s2ekeN2/VrLY3JFaaSoAcDpNKvyBw2NoSjqLRZh3DcLb4M+y9RaCz7wpfWP+h2bgHa9LkJPAyoz1hnZatGcgRU/1ZIjgwREbNQiDluQ8YfQqgg3r2IuFWoBzgP7cpaOCrW9PNf/VjTLef0Gjcx82XHav5TrjBE3YYwims0s65R2pFHYm7gp//wtzpvlNhDh5BRExZdQAD5wXIfhgJiGXBExEoT+qZjtlQIvCxH/XDsfQRJaDgTwv+p88Pc/KE7S3M0nz6AoEzHfZKbtlUlIktR11RFx8llJrGxJzV7av1Q8iJL6jW9BoDSU1BIUM/mNC5n/KR1WzGUYn9aSLgIoeDvNUIz/R4scOj48YjRkFMTd5d4bECq3d16AYUbUvXOJQDw3374VxqyovMmir+wx0YouAbC3dG48YSWO1FnoJ3XF86bvTDxSP+giTjAthARh4wk1E3FPHwiKNtY4w+rZVeenGQ0Ne0kXzFIGSW2gaJkk5VqN/ewEBi0EJiwEJiU/VJfPbBYLoYU5VBCK6AW7cfpYi2VHRGl0KGOw5VdpRMSk6oo2dYkiJwk8p++oJPUUqxorVZMo6NaT50Wz2w8KqrO8Qbh5d1EpdzvOpWXL68HNewYCcxEKARvQg8m2/Hd92+qGZNrMhbEOtQebpXIaVRYQtsFNzPzwrw4v2YvpWLtqnFd/zWcZcvi8Y4iYgLbuAeT+WuUmZm81YXZWCfVVJv/yocmjhKVEMKmekhwkdgVdpmcOCXxjKYiqFrFztG6hupqKjaw4p6oHxqFtXayOxtz6RE70FhZztNWNrSocE5PDFtdG0lGPasEikZO1UX8wxEdrKoK4mXhxcS9jPN5nM/jfHt7KzIevffeGEPGELVQOFKFnLV1Gxs88J/EFbqFqqjoMD+OYYN6HWeXDCtfZMwu9Yzc7BfRCaXmUjyEIOZcz2A3/VVNUZ77S4EqcuTkkvmps2lqenIxIQWUGBbxRjHD3FYEMDHTwrS46AAzCPZOjigRXhJvLYyICZ4oaV/gmoJ3hBtNL9bEnEkVEk2Wln+4Ze6Stckgyyxsin18mQ/r9CcmZmo5qanynLI3BpH2VO3MhpD4asbRSktIrTuO88KGzbEZMsrsSAxJbILWUDCj3Dl9J+UWD5nRmU4ido1rmn5O2iimkd8q7SXwucJTCtugyE9jXOT7DXL8SBNNbnH4yz/9V1uf7xWeNriYODL1rDicdmFiFlgoTBYeVDmaUreOqIdu8WUCFGPozqWZUhQheLkrbYAKVADBfVSQO+ELf/5vmt/BZuUsXcUiyKawo+6BA1JXpKaIRebF2YTrMH/houTI31SYDcxNsoqxGwj5szd+JJHFbKfNlIL2i2VHESk0dKl4P7dvKSt1G9fynfGxA9DNhi6S3bdkZTrN7oAreqUMhc1WKoAY9kV2YxyS9Nzm5Jee+87+eABbyvrVF3Q1yhL6MXHXR1J1qhZzDP5veYQI+MaBRzLJMgluN4MngDYU1PMAqJS5MvviB8fzwgUL83IQGAkA8K0d1oV5H8yNWVPzEVU2BVeftX39fsGt60K8f6gTem1GHTjT/O3jjKaNYF2lVtvJPr1N5rjhJQfimqgqVIcIMXjhZUmPzqUZl2tl2uLpZWjmWnHdw7ZipIew1XObT2I20XtfCvOmEJl/m8phX6QyW0EKLQdMmWn3GP70kbYdFiCaYb0aPjFOBb/IhOaF7XbN8SiKu7fJ+zcID5T/moZI8wLFVAiAeWHIEwSriGg/tDtEAzWvwjwLLR57iSeHh5GCcvKmARE5M+ZwTmMydO2hfBiFPbq8d33mRYRloU922CqsI8i71i++KCKMRUUf7B4E0539fL5cs4NocheUeU25I7OTebFcTyCklQ7AimeRYj54SGRWDgGFu5OhET26YmL1qiqUOL1oVAtWcifJwM1dcjdlcIx6SKeoofXm7s7SspZD2rGI8LIYBnlhkRGyeWFeyB8GW4JgiiVA0+LIZZN5FuHMOXzYubNTegXxr5dfGyqBIxXQd0vjdRMDzKXUwNGGu5b6Q9Q3wjHkcjZpu242d9NmwWnaeimDUolUKBOH78Uku+FsmBurs2Jo0goyyzMTWz5dEYX6kcn59nxalvP5bDN8/PjxGANEMsbj21sZokMfP3p8+/jx+Xxu5yc5Ve+S7VK3+xF9PF6/UhWWHDYEKNUUV5hw4o5mal5BOzJLyfiz2u/bP/FQrpKpaOW3qdVrPxjnpkhQGuszi9TJ9XM+n0WVeTmdTsuyEJGIWJkgtcOfjKjV9EzukFH93IVYkL6zxURlzCmG6bTXEEpkSSuDxhIxkgK1tJjAQHY1eSPVDwwSpW0FS6+9AH6zOX7MUe5tDwKoiIrgNYiGigwvxhn2mpZLddrnDUETnvtAN8tQH1AxAadsiiTOGmZCIQqqojKGnIecp962P+XvtF5z+6FtNp2H05pq3HHNpIiujshPF2x9ZB+bI/KzGhrZYYvMq0fyKYfTBWmiqn5bU9wT4eLQM8pZluIyl/wCgAsb1/cnYwSHzoLWqan0bjOoncRS2FofGrqaDpTOFEImtddpSH49KdXHeEBVl5zqxVmFv84M1Oj6APsr50Da6C6nDgucqFtDG9hTGtyAiCAzBSwV/WH5F6K1cLQfAMdp+PDWUzgCEXGjId5zz5dTgCtSlFpHHHW3RHUHoNM9C6zN4oLXfvXjF77ybcBN5zTpKBd18155Ysrg7SSV2CG7MNcvjVzCiqY5Ds/kfgBUc/JhBNb9lhBtSAjQZSFLl+baVdRj0gv3LJPga5+H4ciWMIxqi6u7b5xppHrcd9yBcyissO3HVoI3tOTCk92B39eGv/G17/30l3+zOxlbHABR+KFt6VizMIzRvnGK/D//r02zwP/4P3wv7cW9yc2QdD1RQ6PW8CTcrVfeX/PsXC/olWDsKDI01eDzQlnsShtvSzLf3Er2a+wDQr+x/vv/9t/95od/dTB4mpqINp0Nae7uJHZngrFCtTUn/T2nez+8JL/UDIiZzJbVQjjhhYWliOhdRNp+dg9nsMtJZKTcyK2lnV26Qz13Emy/Ggv0e4sKXXQ9QjfP/Mb0ZJ1RMrJ8TlezbHwnThB7uteaNiZ0BP1OywTVCMlZc7ucfO7aPSLXLr+Ku1CWYzjgoavP26DqMANehScej6Wh1RL3H+3BykASsrvkxHYiCs0Bp2nsSg2tEYbpgeRPhDkhYZdMKfxm/7urSlsMNYeArT9VQGKw5+rY9K7YDqZ9qYjoqMw5qkTExENlnAeIFSSiOgasuvNpMQe4xoYN5gavidJwoJrmNZEq89Jku2pshhj+rG4l3V1ih3nzxelvxVtmZMVotuQU73RO2totHG5eo+m9/MnTOMTb0anGTKc+k5jtB91oQMYYORIigVVOQjxnAAAgAElEQVQ88QQxQBCR8/l2OT0EYVmWmC0vCytEhr733qPzrYooRCSKrRudFDFrvOehYv6MKpREVSAcajQRICm6g6ekyE4WNFW1j+0ZIulQ2b0g/frxkqdd8WEHf7RhUybsVfAkkW0neJ6AHDDWoiWPDYgq+80sNbw3VY2aRTLlwqXcsHkYV+xinmluIfdExq9bTJBP3DWyjV6S6Ain5pplEJGnC2AiEiOAA8Z7SQ3J90QnjKCIWiekY1mKEGLzA1AM1ZNrrtMFeOODckwQJfR8x62k2DxgW1ROCZV/lSt2lhczgnNqTeo2xK1IP7Uumjb8nH5+K/OKEKbxt4+v8LTDZyNT5Id3az7Z9JEruGaT/HTBXcn34qCsdN7JKRA3KgntQre91lQ0/59WChT5fQfYfajeaT7WP37nuOTO/WFnun4SCBO9lKdh5BqJlxCfDhNdw6Syly7pR6E4pClI3XN3/JJjMXpWz1Sm+wGSr/3qVZOku9NeXXCJOrBe9U8lDJzSeDdtcA9FK/0RUIoqnh71IHfJRM17imUVNhCPSwpZ30I4iCwKqYIf9rHoUS9UZPfil7998GQpmhmzUbPMTuI5N7Eo4wrtemPTm9cnlzlUTCS0HofrQRlscOje8ln1Xjw2JNRPLfRlqjGo3apTqB2TZzzwfi/tsjZ1/clcxr4ck0kMU+eIViZ+fXswfcrAkDyVTiIj9J3SuY8TXVkENI11hr/7L3/jF/5YifNyXFoQNoSkcNQ5BRpG98BCBxKOkAk4x8hZ9Y/DBjbOacful5pq9Lj5054plK3YSYS9QiPRV5CL8TT10HW71G9Ghbu5my1V65GzC8bobPVwAoHSwFjhkDNMugExMXOy6o7oEhWXZdnOGJrJF5w8hJX/8U0ecm7me/53UFxQETMvy8JMvCNepw+ozJAuRrpdoc6X0jWw5uSEejOTKxwjIyNatOPcljepXy3zQNJLm7HmHl+1657TxpzyleiCnLlVrlrxKt5JV5MDV6T4cEwp/s+xFPo8vkRDwpLfOi/mkJtdWysxUp0IIr9Rb9EDdkxdiRvxXTYHKwl+2EU8AXmo1pA5zSMVrcK3alsnEYlBdudaCu16y4VyUm1jcbb6tejh4ExQKBNbOWEwqQozuxxJ2pAxxhkEqMoYqRaqQARiVX/7asZed1pyXsvB7bmpRDNhJWr7Zzn3vlIz60uchNIQCIozx8RScTFXZHMk+1tIexf5SQnbLk+bxlB6Y1G4M5bSPmopjEWrM+eiq/zTR9CoLe/d+yoTRVpCkKoupxOAZTmJDJGxLLwsPM63zAvz6Z/ffleHPPvss7TwzcOlHEKph4UYJFoIVkCeLa0HR33mVP8ok7M3C4LM+Vb6S4jgcvb06Rz6Iu+C2DQlmEubKodnSvigfdstZDq/XcXxAt8pt6nRmi7LYueyElfWiexSi9bO8xcSA9kdOT/0u0GcemLncUliE02pn1eVT5fiuTQzJPZ3IISgRMoxtZhdMW2yYYRCUoJEI/S4O8xR1HsI5BIu6LnsqO5uTB7GGZ4a3Kt4E5OIZxyaLkzYcEIRDR5SSikrQ+OyvZPfxGYTqU4OQTSxhp01kSMyNHWP6XW52xK2NMQ0beIASSt+tGV561eLFLftfgA96AofJ3yCQZE1ho/yrvTTBte7258W+MDy/lMJz37/f/qkh3CFpw5ee/PVj7nHl567WHfoCle4whWucIUrXOEKV7jCFa7w6YePv2rNUwsfs0v0enf76YfTxRAcHcPDG0Dk+dPixH8MC2/zcLw4D88Iih6cn25PitrNcV7tsXCy8KnO0f2KCax0GVCdfkioKBYALbzfTnEjwi8OGf3Ruq8RH+fdWBCRikDvGp0dvWkPl8hDtDVMx4/+s52j20m73ZLHi1/5zmtv/nj17s9/9eMXv7qfJHHVCRFTRIQqoJD1gdzRJLKFNvhorAf0XUKJnf6dTjdimaDjmDNQYsVZKr5CQKSNbAAvzHJ5wHXKTFAR1a9/6U9+8ev/vDeeWHmPsqGpCVUFfv7Gj7YvvvjV78AyMxHrGH3m5DW1C62XEcxUBQkiDmV/gpTxFH5wneE4BLt+TRGOQNXzGGcyGrQowYxYUD1aLMmUmhmtTB7DGXWEGw3kUbmtWCOQO1FQV9p7aRI1UpjCSwht77SNOJ0nH5Be7iDrwfrUDKPJqBLQav/FZb+ddl9+/jvLafEwH80W7oAWYuOMhTNKx4/K75sEaHOQ3mMXMpghYyTqbpZfecowmnqN4zaZxyophHZ6qQXRxlqDpmzX3oEBtXClbFDjs/hQK5SlFXeN73qDtFu9usXbHHyZ/GwWWD3wCH1Fp+XNeAsAnsxeiTlCnFRVIJD9UOE2rtqCrfWUQpSSlWotlcCbCXtovE5Uoe09I/TVfjRyYADEjAjLVV0PJn6iuDK75RwaiTQIa4pR7aGmAACpErw1BVWFs7E2sZZto/+8QUD+m+yveOF2q9goW1GHFMpqePKyLrWve3ZmsmuM1WnshWzZR9v60yNuGytB0aVjo31n28EDzDzRAk1U22cFhCQAUZIQJSHZ4JgJEuFZRjwy5GwRkVBl5vP5MYGYFxuUiFodGwaBeVgFZ8VCrBldRYvIKAmDtluD1VgoVrEO2JaBwMuhIEfstepJdFgL7a1EXybpK2xYG8wMkkx/QqHT7SzDtFFNgN8lttFZMO3wyeS6EeuX7eYsgsmltHb8+HQUgKZ+ttCCzjZbKFa84a+HMgMQzuezkj548EBEbpaTSfYHDx6ebha9ufntb99+9gunZ5753DiTqMh4zMvD4BgWrUVGfWy7L9ThxkEicLJ2B+CaXMgZqm80VDt4PesQ/aEEEsBE54v2yWUokR3hZpQmhXGdWI1YnJmz970cmI6vSi+KqxwYIK9sBwvqrnY6LxC/+pOChNCK7PgVhKihnW2s5Jj9KqqnTGJhs7VtY5er46a5Bi1R8USd2ECfX0WC5vXz4IR9xeF0d8fyuMalJcfbHOYUKz5EZy6IPhGhiUzn21sQgdjS1aOrAL2l7ZgoV+1IFejCjkRHhKhmIHLEVaqV8Mpo2WLXbis0qruMG+9sEtWKFRPafaWvwd3s6QpPBVwdkZ84XO9uP+VwMvVxFxQE9x6CGSpDrZCiqIJ4CUlTqQbJZSmEyJRTAkglDBzoQpz3P8hvNQDKGK5NWq44KCIv+qCFtsHrHxAIU+GTuJzIRFhgHiuTMpF/x5WXJcUqwqdoMxrCy4Iwm4+7NZWxXdpRszz2lFdRMwzO40xEy8Jm6IqoDgGRCoPpwFW1L56q29DBxlBb+jEGEd3cnBQqQwDc3Nzs1ocR1VYylEJhIgWsZHPLeZ7JifbmF14ukZF3IEzbtX6Jqtaz9cIMtTQ0EsY4Acx0hPZ0B5gB43bgIRm9/uarL3zl2xC7fWK3b9xLSvD77EfTAYxawj1KBCYmzsRvefci/ad5U35uyDRJU2qUVPm0j0M5n31SZqkB4aux5OmKSYsin3/c4lfRISOtAwKVW2+GhRgoR3UaSCKi4ndjUvVSU/fJkudDoTqGDY8jm+Qg2WU30j3L6RgiBZQDpYmmeMpv/uQhSa3vwSqLDo3uKTKlujdchDiOJJiI6soSANEzDpITMVMWkGJLvXfwWDdr7E6bEYf97qTn1Yp5uk06g+0VUXXNmHlSzcOYM9903cqMxoyv2SFKGSjGcSkwG3w5vAGkYB3q7gvUhUciIeZxHnajWtUzzxoz4OPbduxmOdyFoTpEnHDOI0p6ON7ED630dOK07oFytlLZNu1QrLFb5uZztruQotq4nF27AqAiQ4R5abSUvgAFKePG+WnaMBTDRZjvjS8ZJ0oCUE8Y53vGNpIzmMxqp8rLogIvoALPIxKHd8OaFXehh5zS2Dvbgy4xbKePZ14LoqgO5I8wMU4UuWV9ppRVWcI/ly0oYGWPTkRnPccXtMzWvMigMNoA9dUljGDSpO5RVVEQmJdxHui+FyiRJ3LuDqvgsOGyWRixCunXj1o0IdXJSSmMyQXN8kckogEIAgaNMWIFaQmhz0EDdpPayEDSekUMSpE6EC1UrDSola20rgoiS0PzVoAUokxxYqJeIGeTBM0R4fqVEpGKuY3D06+iQoTT6UbYGR8IQwfp4OD2vCyqA6Bxvj2dTsyLiBDUVJ3zONPp5pmHD/7x//3/TgQiun18e/voRh7qsngpEmJDt948eMZrzxpKs3QtQNARmSLIEtWZWojmGbJlFB2uBjBoxi0AVXHHhYYTY/IKitwac1uWk+pQyMCgxWS0tyQCChY8q39FVrZYy7I4ksX/bYPRvomSFbMQsd1/h6oys3EfMi1OAU9WV1luONiUOz4U4q4uuE8WE9uxlVeFDPekiIwHN4uIaeyDFoboo0fvKeTmc88888y/APD40f+jeLAwHj77gBYSVZgXWojk5JgBCMoQEHPk9RvjTH4qUKhhgJdlYBBAC5tT2QQ3w/MPBh1OvtbmwTTiIWPNlqshiDu2ZTmWktW65KP0p9meKn5gKIx0riq+THDOpiI8TmhcJqSgkWDtKmiO3ZuVchS7w1lNVR1nWG1xuzEPUrkVVTAxMoG3T8mQUSu7kv2RF6JZD64fjOBYOfd6i+t0PAg4vmqVG+cflPz+tpYaJkIwU6W6iPvFrd2J1eeWkOTNQKy0PU6IDNHphCUC5dld1b2B14W7efDAilgmzuPKNKuMxEkoHbFfQvAGCyQVQ7lUEjJXqkSJoLIsp5peQxUxFl1MeomZqIYMlxNW9C/z24jgjEimPKPaRQvbqFXQ1BpTuSD5cJ1b+MIS+1RSADiruWDiXOGThE/WF/kR5Wd8UvVwPtL0kVu4ffRe90gC+OWf/qurR/JpgDsua0c8WrOlQ5emOkYLxcikFSMMJKiW4Ay9rllEFLaTcVTSzLVSxUuCj//zk0gcqaJZL61LLRkicYxL6jopuaOSd1LYzGpLuK6OO06TcTLi9oH/f/beNN225KgSXGb73JBAZMMAUijUUF/+TySF4KsJpBQSykkkkCQwkcqkEZWTqAIEzKBQR40AJAX8r6oE1CC9e9ytfpgtM/O99znvvYgXHbr+vebec/b2xtzcmuXm5qphWYcqjmyMEYEljOAS/Id/d5IM8RgyCccs8kUAgN8DWHbQTJAQ80Y+wXCY8/a3sEaNxuFSblLCfW9VUSXiN/OSTX/g4M6j24AIvz2MslOq10Y8pzLmVORXPv7F846BzicHm7Ty0Z6fsI4RTPOMWYzKCS6fK1s8d+6zVgA7kK4V5s5xl8k5WnMF9lY4oLdpolitYID2cP+kW83pI9aXITziug8D5pi3LkriK43oFCy0T++NowV/AcAbn/r14zN/9Td/AjNMYxQDGS7FHNdndJhQpbuO3/zuSXhsUjutwttlGcDJoxyByPNYRUBznTOea9k4TXO3iRNLQJdKst0DZQW7Tjhx6P0Q8axVaOSVdJDOqm1lzMRkQcHQUqeWdjFN6QJhnE2XB2KcupxIUiULr5em5HJTPvVbrRIRgVjnkEXco9j/ZZazAZGrFbGRtAAeSb38b66OHEp5btu2xVXuSiqHDDL0bvO1qPv2XNApI2E6vN3dXEmsG030Gxb4R6VPQlQZddfcHD3htctYBc1K6piuBBwRt9ZqU9gSt40b89txRyNSp7GiLmH2vWg/t46uXdkpGn6yyuBGwlCbnY8obm4H+0sYao4nxrDF+sStj5O1VfoyyLyIYVSM0Fyu/lmGpw0VEfV9waBzaUbAruN6ncOvuni4vHa5XDxIfCGDCHcYAd7WRblOEiR3ppLwEUlqlNWoIGueLc77JS2WEK/Pt9dW0koyCi8kpBkcYosycVEoJavyA0r5NDECl1mlNRFKcwjGA+qYYbAaivk4Lvyumr0lle2yiWrSfYz57KfPro/DJmB+wdHwjYkxnt2y5Djq2LcTUdlUVRHHpBLFz6C/5P5b6tH6CtstO4Cir9HoVLns+lsbbCeadv9uv+id6iHMA1YgYY8kDc7HkhZEorXLsHbSodpvxurxz42mkgOC9SrP5S6DvKx/dr+3H4+HMkwO/bXdb3b8sD9aop5e3Xl0wMGJKvUoEGBcr2Nep0OcmdwTc86RND66BSX19kwjjQApMiESSSpnul6ZqnJao16NLSm0DKDEO4WWNQsjv6EYb73ZUTyZOubWhfa06aZsLg36ofOeKnkq7205zRT5FBf5/panbJIfzHK5+20pd4mjH+JbMBI2EErKt9MHPDLW5HFVWao9TbvYzl79lDqG9KJO33PKz//n30SYuNF4HBuy7KyAO3V1vNx1g1nz/vtwFv1zq6/CEfUNsKZslzLnRFm6ItpiQzrtZLl88X6x9dStGaAOBVjYM0w/DmCMeWpqiZ/76L6quIsZjkO3Oc/s2OpA/Wxx1IsMFZ8nf9DobvxRLstzfBFZzm8md5537Dt//+ef251zp5mC5/KgHIdMT9mXxmoZf/N7f3pay+73WwiCh7G4v8eVFuZSeVlLZbIwJ8/33Gz40AGOhRxsdUK8PQxa1WZtgdOrhBlPlx9HvjOeT9GHflXt2cTLcvDlhnU8ZydQ+s6gkeiiqOqxjOS9BxYU/gackpOUdKkjuzni3OQcSRz0Py/WaN162jfRLV/vzBfta0TLlZkLaQSTuLYV6wWPItr9Q2EoX6OMLTb4XfkkIsbgl93ew3EdhRSUCEcIWbyrnUMQiAkjUnJE5THwTfPo6kV8g/6CmTEgJNSef2HpWrROUqcchrtMTePcHu0nlKvdQ0uxgSCPnwo2ND8Zuy4gL+nNbnciHZyxrBu7AWUbnJ3FRZU4BHjipcYDKtlF82XbRxsqp3h3VYOMy7EIpmpS6/BDdR3BnlzU8c1ySxD2C6FhUrsaD+SpqFVIJg3wVUrCKlWgV9p6nnwnkmcMrdeen9DK4idSjGLsbDwJcGdhtTqkD/dYBLUsfRpEzIOvcn+DSIYI4o6O2HHdXlMAD689SJDEbEKhujG6r6gXUeuzX2q3qoP1U6cyGX/9utHyIFVsfU6WSeZXEWeYw4rY8+hpZwkAFaV0qmMoMYzNedF4c4U9Qp9YVV9LN0CThfWTG6VWMMI8qrtQUmPVJkbAMBnDBqri4lURwFQxrmaxWrbLRcSmXUXFvwlD0PDsJ9fXPibsamRPoEwwYqkmPFxlHoqfR/GDQDvm3KdZyEUV3688X9qs0+hoLXTTChAPiOuMEfIZ5C8GSjbw2wPmWzVAqsFmRvYURXGdXyrvNrJUv7wOzD+zHFQXxne0ZK9wGbI0zbXndsrtVvHukRdpcffizVeaafhC9efyCjnmfFR6SMTavOeCMIjkzoY0wC4UBue3dbj3wpbby0R3XTQuKIFA/VhA62UqqYOab3o93uDqL5XfoxgN5UcKFXaZYSGfBSLuoJVF0tqZVhu/yZ1CvXoHvH4q73F536+s+YCX7/7dW68qxPJtlKcwyQ9a2cvl/de81TR+LxUironjX0PGI/gBbG56x0ZNODCxjwxR2m+gW7P86V7I+uM7GaqqekRebHcF9Ci84DFA0WhRXML7xZo0IsuOyF3UDMboWvW0CHb7XufjYg6nwEcqOpL4JFKBif2Hf3ce67dvmi/rpuLXKc7p57Z027ZNLUPYzPLgw62ORTFXz26zhSvWtvxfqKyhEWmf2mJs0cYXODCR1krFYhwGHFpZaKe01Hjy+Rs3yTDWgORH25++Ub7z938BLMybtimXQPb1JYo3eH5JOlAc7E6WZGnZVutp/2fxxJd6ZflvbSicJu6am392v/NWq2Wt10x105un+Nm3g7dx/uQhg1zHIm+txG/+7ddztzvcJc+M6HQ0wgHG9c1TSYtfvXSDsPBdtrdVaHBkh/VUnH8HjOS98/4LS0yO+7I8eozOzW1JMFIwBfvR81hcwYzGaZv84G9GgX90NW9SJPe0gBQAsWS0Bbmk7w/PVSeMJTiLbyFzNlRE0B2I+DqdhPZQ9iCWSuAeeSq5xEJhSQSGbJ2scCLYSvOpTspCBIaLD9iwyNjQXJqTaJLFU/d/I2akdUFypE19UXxjmYSOtliOceZXhv33zU1yjeq31oeGXwUNJWrz1RCIXu5uFG9RZu+sgwMB0wGvz1AV9ox0KInOFrh42xw32i3EDiFojYaLLq8u7yrqfcsV5Hkzg+mXhd8RjKV+7kBJDqcoRdJx90HyxGWbJ2+njnKE0vCy+ZkFwI9D2pzDHWlR1e0yHqdbej/30Y9+7GMfe3h4UKgBPF8+xxyTf4iuYqcFkSM2y4nLv8EbC4MuDNBmI7/OmjktTd+05S/tNc52s8aa8GPoY07BOgsWJHRkYemaiEhkoWiWAzH9YiTS39gOx9GGKC7YDUBc28v77tleyecM3kouOJas3jz/TMx8bPYKzMBwbD+ce51pz7Vg4hiJ1I4DAPiJI0h+maQrsXLaKzQeFwCMvl8o7uSxPkb2uttmnAqmtEf9t2pni75VnwTGdEO9GVsYzJtFSo22BKko1nGFMY2y1rwtkTbR65q323/2RdYS3U1Zp3eF5u0iqS5utpzd7tPq2tcO0y25CmVh2fWjIImdZq+KejhiFa0w+GURZ1ba1IUwT6KUMi/8h7rHfJXjoQg49DY8ErYJskMfI0dR0YOvrFI4xxN8E6K4bDQOSVbCNZlEy2NxcuJIx2S6j6fyPpan67M/FOUpTPIDVV7oZu2u8NNALLckUYswskIl5UFVM2bAEcx0pdMKYR6VVbUcW3+nxfvQzwUjEJwwbbrnkJrVpTt2226FZSyA0y0l0Dy75w8n4SRCvtluGUBvW92k1eA4syNeNVTeD3Ork3uoqOoNU/euuvYaVPJIJjq9a1gd1ix1zElqTtQpv0Q7oaFpiKcrbbe19bfe+rOsMT0/gjh3QEkn3EjY9GjNTWC6YS9y/96jpZ0bs5xuAnCAOQmOpSHTsNtGnNWRTqN/31C5cdKhi1vP02EJF7v5L+a25k3+SQccubp239w2jt2oSx+N4/n8p85B5299/887VjzTIaSRza/KM51zfuetk+vXv/CZr+z6iFsMmbKiG6W1wxBw2Bzuv9xb4CWZEJnHLPgKpuK5V03yq2KEzls7p4jEk5WMRd5qOr2J3YqwWFzrkrztheYiLAew8cDqEdEvXN5oMjrOGTJqgP9W5y3lHqijhELnSFmguwJHd7R+W1fq3vmLjltKkhRsId52+gaVh4EqNU4c1yylLKID58e2CEnnkE88IMNuZWUvG2HQFnfCZUxCitXVPC9W88deU4yWf5aas2M9JEq+dr8h+uGHx7zqUB+SHUdj2HbHCnCkSW+FTyjfFs/w2t5d2ZXMLMvMNYF7VCXBqwUonfVIBKH1Ns9NZtgFrjZa3tHVAK7X6xgOHfoR/3pWJHFJASR4awy/gQOGy7Ztm29ji0BaCKoPPlRtsBK3a2KA5DYz68zfh0D2z/o6MVJYJ5cXcRaCdlHFReNdi/5Y8XnVzLdPieaKhX1LseKt1w7Kus8ThmWtRkFNcEdzBLuV1SDcdWgpFHOgKbd2RCgc0RO4RzpU3bYxx5jj2U8fH59dbWJebVz9/kBMm9tF9CFPDu2pkXSbqdlTklls/Hp/5g1ttmfMVTetY9gLacl/TquKTpx/W4rj5K1lrLktmZK9t3jb7LxR7Vl/Tla53f5zu/K0l3K57C2olyx24+fdU1wHCcrfKDcmtxRZA0/TWG1rxB9O4afgJmhqrViYJ5xqtc8Iqp0Q2LdGFguzDmiHsRX/NLHT5XX83quJZtNBLreXP5kt53kaYyVBz5VgArNxEF1W+fMSzPlU3o3ys3Y6+8OOe56Cku9XZ36Wi/7wf/z+3Qeafw+g2ZNlEy16gIiSP1wlnnbkJhL0p6ZZddXiKdz3FV6mmMHmyaaiMBEWDcxyZrJ9o/VdxiadfE99iINvcKfsLKpDP0lZ1zkQ1U11qxOeTjeO4Fc+/vyrtFN/T49eGCMcOZtjjDEGYKqq26bbJqo3rLy0eaX/JeoECwya7HGjL+m9HZxJIQi4OJrU9uXlEHCT88yeC4XThe/bqLh9/zh5ohlL0YkbI/rrv//LmKgiFJ1tjirMFgGA73z/JAXh5z/5lZUWAsitA8K6qW4etcx0eA2BOQzGqztjuudbLgZYWzPHKrKhXMUC5AY77TXO+biOMc5vSZpMfocClBbI6bR7yWp0f6N/52zR+4swGWWazLgthT4sMTXaw98+wyKBZInmzN8jaCyWdHH2js16Pvsm7EMyq/glJJWsz6ubYHpMMXPIstWbmArN/2SNXByNkQpRW5ABXxIuANIZSAPcGs/cooXRU52ARTYoRKa72dzC4DmxPj+wGCKmB/WYIH5AWwkNkOBv4cLEUck1tSvXrNTSLxZIbZdWv//gEfbLVtRhzKvDliTbuWBYLycT8XkTIkOgrnTkNRmWM1kC7sSTPfXW+lz6PFhoQ+8Za5ApQej4Y3QQV0/U6ZD5MSer5x+XVMus8KB1ZxXjJ/52Qj6pedhc+WJFMulTaRUpWuKvd6Vjp43lkpaSn+xpuJAzOCD55QadOYJcrUJGK5i0MaSsfWnda7+f2hzZUKc+LS0BZFyn41MU0X6x07Q5x7he5wCgIpdtSxWzCWJWzR4fH3/67NkcJiq6pT7KML7AlWssZjDMVbR0RCFd/rZTGBSW3bQd1WSzE7rksj5XOdvrei8+rvoqn46IK5KYLqRlwIf20j4byiO8ReC9iSyE7WJWUePzqpyMYbQ5mAioiitKWqpcLk0AFBrVmFF9Js0M2LYt0s+NufmZl7yaxGDTRNTyGjTr1O8Lp6tn7qFIUTPgcsPNiLci+o5Tly9DzkrjrICVV+2yWp7JOruH+jovwlPWNIEW516lz2Fzd5YxubRsH+0Xba3HPNu+FFvk7cmf28U60YSrYG8vyd0/7Rdban7ezDXZfFIOM7lMhnCPAuRjQZ4IOPcf0tRLYTEtIqeXMHwAACAASURBVFyKUaLpYtvMJSxudcxpc0Rwegw7JVGYBPQsmqAmw+yoQoux3GJhT30QfUnzsdx2TIG1o03ft5Bl4DCmx0rV0UImvNcqt++nfSrvavkgBEW+SHnnR6Rf7SHr9/HIdpYdKPkUJvnel+eIrQQCUoeHO9G0XUrS/JfXneXNK0ipnFAbwitEnB2hFuInJXxfSfmF3/jtpmkadlJ2vbu4CUQaYiPdDTGG5pXV6xrUu0+P96b+bmbaMa3XWnTb4iiBweYc/ShOOTQIxSOiN470LnXy9pi4LULjtm7zW5JtSp7ZUt30HMzhiRMfOCMBJSgjuYPo2fFvDK9ZsjsCBfqqcddNmyVuSxp4H+7BabnfEr0wTcDz1k3Z3/r+nyK+pXVa0CTe+ORXbpFXVZH+RR+hJN4NgZxikdFbUqLslxsjm+FL5h1AEJFAKGVfS7ANAOzHnJa32XqicH0kV3QEZfm8JEs2QyqnzHKZdAjJOeV0UAfw0foBpxsdW9yHqMbXeOTw+sKNAMlvfz/gRQMmcDW7opKjt+VNqOJW0VzzJQFusWPUv+SNbA5YGJqeNOLeihaBKUw9IrJuNnArdQUX01KtdeJkpmcZHswtFDUdMABmc6Z0jHVpAKDVBGuU5w0CAkAhKnHhiJvd7jKnkJMQCWDPRcIRWdLMw4CZ+sPy6RKXvOiJfbI2OlKV/jwN/fJTPK19ojdcB40SqCGXe3ujcCZ2anSmQqqe0PuWml1lJns/XZiNq0qkQdg7fWS14pHsBHncxTpd+f0q7r5hUcfVXt5sVsWPaYvx9hVJHDOnZlknlkhCCgz6aGd0y9dseaSTvARfA212sEhwdu17yLoG9jREgxgWAkaLyilDASYH0RH1LCEtzaOUuIfW2OMUP43G3YlF3xxdGuqs3pGwGHVq2hDlQdBpmELd67ySqSM7QeacNsy3wxwSAHMveCqeJoro5rPTq/1VHWtEigVbCyB57aTk18uEBBywLrKEOZaU0ucVF0qwSGmsy/+wzHMpdb1QWnOVMsmnlMRNkcWMNJma/1qyyKrc98qwHgbMz8L4Budlu5A08+EjD7Lh8rBtF7/wGQLdZBvXlK+ZUXEXUuwXkkzCpRNdaPdu99HtJaOB1kXoEe7tZA9XmA8UcNSf7FJaGQWgeu1w/VIs1lIKB+HiF1qy+dO+q/y3q9U099ayg7mDNFpzvpgDEmyPW39ul8X2Csl7YoTY+ud5JdC5Gvddldb7EZ3BKpyOjJk2w0SZiMvmSuLL9Uls+4R9F4JCUyJHtZI6E5DI8ZJUyFE1RyTMmkDj53QXLKE9phgTUeUhjBppoYKto6tZKOumUaq7aZahCvEgxMS3x2OBpyKpH7KaXCi9cEPwqbzX5YMcFPlBwPs+FOXp7Pb7WG7CkR/7jd/Jn4mEUZS7jqAS7w6JC+UEvbpf56+FUmZtdKzMqJ6pKzLe4Dla8EULtVSKdGv+hXedShFuW4drQRsiN8i630Knkeb7DUWfVlRU6ormxshWlEcEiSIKQ6FYpdHYfV4xEhTssAFLdYCZzTHmGLptpzU2+nDg1PN0FSs88hYs008Y0KKWIqTmKexmQLTGp5U5UFbk80v6EMKr1fX08mXQwOitP5fC337rzxki6waWn84OPNuYsvrb3/uT+/V0n0oEt4Dmji3Hi+nPv61yEyXk9+Fp1yUHi0MHYiY7ows7l9wijubYQFlaZYNajatjmvfHKAtb3Jm2byUiyXZq2qWcITOcX6gNfOHTXzZmlKSHtvy371hy1vkIDL5tUBGvN1ZQLlsR0R42yHkKz6T5akU4C6+yG9C7yd81G1sQ3BJgWwWFFZnLtXpuMYGqyiYinhXSOOj0DmKEic+uASidEyZ3tRYylWhX3jDc1nIKs5K0FPDkZSfYzOu/c4XRjy0sIOVYa7W6cjL808XaX0lelOpg/Mr131zVAH1SypuHmYhZOnEpzW6rHl86tYzLS949Je2fsAF21QgXUfP5kE6gT1dyUK3wWvqgNDjrqCUvGwGCJD+RVDgEmRdxYEXdjSqFvVyGV1WCnm2+LGiHtSVxIFCTCiIF3tkcJy93Ho9/+4DycZpBwuUxuQu1LL3eRHK4WWCMi786LS+i52SYH8ieY0wRDV5iYh0V2bZtcoWpbpteNr1s20VUho3ruI7rdc7RppRmn5HXXT1yDZYXj/qrKb1ImpJoB3RnETRh2TUdcU+itXcWfSv7p88gIamC1t8geLZITeZc2KacKEnZxbTKkoG9Bt//EKS9hEAwEphgvsKiYyONUW57xyaP5esml4cH7+D1ejUzm7Lp5aIXb+fyoA+vbaK52VPjXshJmnNFl41FklVvDjJkPxN9Og+PltWKzgO1QEE6NqQ2TfllzcMAXkZl2XSCSugz8AJ29WnpE96FKfm5UL621jvdXgiPrBnPvmvppALjz/48r/sLBHbnFXLhiYrLyc8FTwVbIyIdQijt7MVuN/J/mgQRj+1pJTaVLbV7b5lvldZOFlU5CUZlP+rQRKJ7tny8dk8orOzY9fitkce445a19bURNEdeDJUEWrpqK8PHp8ypuYncy8/+VF55+bAERT6VFynHuXtCJN+bcudmbRPBmAOuTczi2BggiVYJ0iQKU3OaQKYMt6HMN9JnPKQik+GViABK32CFKiLzlVlYppTcmPbj//GH73Sg5uZcXbEonshSEJv80y6XB0QXEDfvwQwTnuna1sx34bJo+DVuEpxBLUAMLa3ssOSYqnz/8IibtSchBt/Ttri8MXbaZgV62mc/8eZ3/u7rd0Y/53QYKKigHo463dUWSLjcEg+fe4GCOee2bd55mIluc87YSARdxLslEhRxCryhbVPE1NMMD3p1YzHfDzcrnznpKfEy4ZDSEqWVfnPT+Zvf+5PPv/7mPfP5rPz13/0lgF/5xH9Mz0E8TETFAJn4zls3scjPf/qr0LhILzNbT8h4fLw1Ov5o64cyhyNrMbo58xrD8EFBt1aYH0map70rfHmfbNPMLqLwe0XKBqPbHAJinylSZL2Ifj+itE5h8GUnIjLpilhiEAJfkIDF/UouiKorhi1ufn3jM1/65t+eH7X+1ve/DuCNT33Zl+UYU+OO47hE6Bt/e3NNMTEl2T4JeIf/I0mbNr+1LFWJJeRBZSYiuy31LHPQihWITe1NU3B5NzwI2iCOKbAVb326kJ1jArJtm9TFtqODZcJwReXlY4gBBGygkGmAH0nmig5w8fZ15KKbXa++1QIRaIOetERJzroAJhresYhiKyxi8UuL3ODHcw6VMuwz7sGs0tcapyA76FyVygiA40APtlnQwMnFN3Q2AnkVkVlPp/GeaMxkGPUVVBs44NL2f8jZ8Oj1DZhziGxiA/ko592rdUEKiZuXmAbYFdjcVKbBQ6inV+IBU7q5xLB8XmxW5I7k/WEBdeZtss0QANefSKxFv5tcW2zICvpxG9OCtyTUKRDgVTJZHWMXg8c+U7gUDUJq+DMjovwQNz772hgyGYySTmpMPQbZ3xdUHZA3KPyi9rl56gOLC4V9mNMiHYK2zozULmw+fxtzUG9J64AIBJvTKJSe5IUh5hrNGuzsb/jd8AZANnXl6FpVVG2EtEmiOxOKQLdtzikQVRnTRKC6CdesAHMMIWAoJgp9eHj4uY9+FMB2Eejl+jiu12eqssnl8pGLqU7g2bNnr11+DsB4vOp2EWxzXpEOuy1kz6gmNwhV1CcLcxbxErbIaUskqSY+rVBXDCRnjXqS4Lrp5jaqIK6LEt+SV86Vb9ZsYYB24MCxxd39cl2f6aaBhlkJH0vDhgiy+nIDNo8tLcsrBKYxvUPgN6IGCOL+HDU3wAxiqroppYdAXMzTIjdA1blls3G1B5GB8cwex3VcnwH46MNrzx5/+tGHh5/85Kcf+TnPlbGp6LN51ddeq/s7EIeMxxwwuVwuIW9SSYmEejPbKLVUxFR9RHNMbp06ASXlpwCDgiNmzcJYRza87EsED00VgBlD2u6G+RKOKk/tWI5LUDseYbJw1WcnBZqoUdkXqR3RP8pQR3QQNJAvF8kS/LPurFlqXxpjhn5LerzV2Z4LIiTaDJNXwtPKbsVrglb5tMkDzPVOVOe54zWz0zrnTR1d0qb9Qp4W8AaA3uwMU4CZOLvJraaFe0reQcOIbcScuxCcZpILr6Ya4QgYZFMtMzaWlJB27FDITzH3nrZSoS4MtwuMHquvYglTUEK0TGgc/FbNjEQRjbhtD2ZTQwZNURrY1jstTtjojxvqGu5P2+JSMc075LDoQFdzFOVN2ln4Vud241N5l8oRq/qwYJEfhJyP7+/l2reKz2C/d9tn+ene7Xe13IEjn8pTefXlr+/CpvfLrbPSb7t8/vWvfOutP325Vz7x5W/93Y0D1wQlP/v6ct35t757r4kvfPo/vVQH3svyze+9HHF6+dzrz09pmuVb33v7XPGFT59HuWa5g0gC+Ob3YjY/98kv8ZO335mn8lSeylN5Kk/lqTyVp/JUnsq/1PLhBSKfyouUx5/+pCOSAL771X//hEi+e+UeHGkA4yHBgHDusHngCFp8RMTeAYBNyJZbyzBe0VD7wHF4w2oba91uXKMoXkk57s+BARVTpgcUbKd7qdy+y72rGEtWkfuiveNr4xUp1rc8TzdvGzFiT9OWbxkLxB1l3yh9boDkLopLfIeP0YoeEdOaOae7VnhhRDbEFjNMMnThXSvGUB8w6GDZZF6etBZTsQsnZNyOtV3utXzrra+/8fp52kHg+bGo33nrL547lixzMAAZQMUAngbO3it9kz95iFv9DMNI5qllvDLcKy02Z672KOcs/85ayYiZXFp+3UeEa7TwtrslE0o+t3z+U2+2+IgX29HJZQODHA7hv9y6EcaYQCBzjvaNdI7WHsQRUVZLYzw+hlhbFZVcoWNduu2HBNFp5gkNGUqVjCv7lEm7l22fwVUYJ9YfW36wevB4ou/GWobHRjMAIR+OyIu9XFx/hJi0pgTokXoi7d+MFDlZTBleAydLhDVY+3L3mmW0V/WIcU+6S1TvERIRb+FxfNZDKTK2aB6HKQzDlWgs+pYBNVWPLUphVVEnPx8+vjk9XRAhVIufYpDbc9pCh6qHvQJZHiFL9xijxlIeQczXcxXEqxV21dV3G1s/jfoiWjDDfDplTu5Wbg1ktFiNr/f2rAX2acfkYXDlOpNlkFBREyObk3PGwMblY7joZmbbJiIyzcb1qtuF2airn13QsKd1lexpSL5ArHG3tb8t4KlH2Rcj3oiSZuQp2jJJkXaUI0LrwXafpnB9aeF98kCeAeBhkRCbYfCJWTGaMbOBCMwkFrdHCM7p4WM+xjhPU2QzWtnYdHucU01EMMbVB3d5uMwNzx4fAVMRbNscZvB8kUrq9YFIp88qESIqmo0vMnPhAEB2jN5nbhdBfVxOEZS/p7AxevHudBy/7JbhrnSdk/06yp1WURPYtyTXS31+0E3Vlt14xmox9wWeT7L/lPM7od7iUhdfJZ201WR8qUi85dm0V0NQ+1GbHtR8ohjb4LJK69/Bjpd7WX+MY8/OrJ9nL4uCxQAW/cVqEYjyWIkntkdczYc49qCqYIjjSo9KG7CbfUs6x7o5WU35b7Ouny+LnsqrKh+K09kfzNjDV1VyaO9emOdTmOR7WW466m4GRKbg/CwPlZVzdeJFYecchqNTVrXQ0pgngNLO2r+f1e7FS1jgmYknTGsDeC1Cy1xi3fGIzCBpm/v77vAY83yL1bmP08ar2K0v8jPmQQndlx9LXOQqYWOzYVTP7xSraaH+RA6HCVAEMse8dcpSxI8GxaXY00yVwLL7sswwJS9lqLxYybs3Jw9SSB7w2Q81ZtN/iHyRbjBWKhazOT/7Sy8UxFf0FVHVz90EK1+ufP5TX+GtlJ6ehgmb5gIp7Dtj9Yedop3C7JueX66+I6WSeZK73o2ZQuMIEDe2LjheXRFhE6h5yptHfLK/8OmXCNW8X77wmTfZzMmiWzyYQy/7j3w/BeptIbGvKY6uz2ljzM4M/X6XzIIExwCc6eOyLhVkSjohKlfd2B3f96mUtQBxr5lNi2SWPNYkfrPH3WND//O//2/Bj2G08xippBxK8zrkam2JdbPcF7Usc2JryXNQuQhOuD5P8PmKmQGndEL4mykoY/HImYO8/sbb2sSY+FGSa9tkEx2l41vHt5YKZ6Vcjj+BvHPkE40C1J/5704zSw2HbDQhpokll69Uyh+pIo9LoX2x9PwoZ45kW/vm6ikhjLu+VjCHcQdioVnKvQasM7lDutjJLQDWo3GlTg4sLV1Np0ZeelrvcEnIWvf6exL8uAdQ0v4gK/aiiNhrySMfRvGzM3p52CRxXOJMoSBuMKnMMa5RBoBxHb5255iFJ4uMeR3zOuMy6NkUNB1rquXm+xetQs6kCIhZbfBAknql+Z5WvVJpCS1SRHBCzggc2WhTabE3noQWuz+nTbaJPsj1EkrTs6kIuS9NMTPLbB0lHuP+wk11yzOt04bfFDzmkEa8uOUw7pSUtJdcM855nfPquLPn6r6OxznHGEMg1zEenz0iWT63zzggLpSjlqr0710IyEqG3WtJgQ5BCWvZGdVk6rN1WKLsvJxMXM1yySnyRY3a6uHTUbPu7Paer1O/vKAz85zHFhRdBLnJ2690ItErDz/tsdqGpMZuJKGOTMGY3kgW5T1YmmtrJ3Dz6LnDmAfBV6vQdi2UEm/aKvt+3H2JervkzCbC+l+0ipgCm6e+6qLFO3ozwIHUdbFWhrrxTrlIoJGsmG6agvaUsMlmdLVeWLso0GpC0h1sthdvCl8l2LTrtOG9uj2Ip/JOy7uaKfLTn3j9XQIQ/yXhkjsqvXtE83K8legpm+S7Ue7HDe0srpKibkohUnlZd38m0K6ORABBmumnWDEts9V3YbGbv7y9Ug7kQed0iyfsvqbTSQaBb8kTwmvKcjH5z1tnU1H9mSW+PH7U3uxH/zkUWhi3+OwvfelmlX28BjMbY4wxU6s2lcgYxPOuCY1jElQ5r3ulebMvb7toNkQf6WZLnqePVzto71SgQQ2tOCvffGs5kd19Czfo3zki+dlPvGke3QCbNscc01M/0nW4ZVknyNI+Aq0T4e3ocf0RZtxTsHpxQcYTJOXVlURq1vKuNAWkQ0Mf0rtgvmlgb3zqzTduXLT94uWNz3x5HY4unBHL9mSEYaHW7UMuLwowXZ6+K/CKf+c+qWd3ySTj9UiSYBn3f3nzi7u4oDxD3pBLiMFQic86pzRrmlJtWo7JcVAz+8Xf/d2bAyE7rguRaG1qhiDVHnFJz7v6A9QLh5YWyvpdma0suF9HAfsVORJ3o3Crw+/4Nr+S7dYwkW69lthi1bweROCp++jlFUJxFFPtMrBawP7jnkatA0ieE1KPb0t/rPDLRISlVlajoURE/GmL62epfW+gBjQd2qIAYSDAk0XmTbR9dXFWC8VABDrKFF5WvujN3qdwxst/xNKticAVEnqRfVWcQYNY3D5As+DgNvcO74mVq9GOUEYfYjq64e0eMciG/6SSKO0e4jHRiqQEzN1pSN6UlBiHwcb1aoYx55hzjnF9fISD3gJRNYbXKrOKiwbjrGzq0ASbTaKewQzF88b8gMnqgAgKD8mNt9ul4CQHKNA4cD+lbD0NtaLTqr3k8MdWupK2C18FL8XwhGJXUqKS8Yu3PYMc5Z1fTkQ7LlLRqkAgccsRNzS7BBnXobqJquj28PDaptumm4peHi6Xy+Xh4bWHy8O2XRxFFpGH114jTFejdv6fdde25Lz0aSMhrYj2Akr/9JFTi0FIR1s+O5EsZvUvkq7rJAb9ZRkpv0HjgiZkdsWKBWogklWsrZ9CXm2lFjyF5IcTEZvb/o0m64OhsWX3iXGrwkX46uTsqF3V1TJO08fNH+5bLYuJ6yBVCBYK5ZCbj8XajcSQEBE1A5YKqgGs1dW1wj15vZEygrW9FTaPJSbYbg/r2xJoNECZe/kpOB9M49ks3lKgpEb7LrZn0n7NwZbhKH45oSRha+/BSSXIWzpFX+g80FN5G+W9OaD9AYcO39/u3Wr93e7VDpR8unT7lZd7YksYuQKElRoKw4hD3ivtjFOzs1aDf/Ei9oXy/4ah8nKlC/rm8FCzhkXr6cBLR1Ntn55WLEuBp1TszuGy7rsf0aR7b6y/r/qcZD4z6M/7m5ov7okzEfi9syoECG5UZJNwRtxa4V6xxlU2jj2Xc/cKZm1XphnD/iQB4lM80SyiRjv39G6RYALgVz7+xWMN9dJaLekHwF4qPWIvn//kVz73+psJK2Yjhs6cN4+6Ct2Y40zl9X9pCpnHsJyEuy6G7C1Y9m0XJfKirbzaJryUKdlWV1rM4QAM39Gen//U25yyz33yi5//5BejPZGNJR2y8D7uLmxJKYCV4snHt+zpVtiCD1B5zbJoR+hp6pZEondkYrCZMjmfokyy3sfq+RJKtvfdbA6PF5hzH595h62M7aZcXnhacg0bHT5BY+yFrIFQQAV6xA1o9HswYbS9Lu3UUdbcV0dndo+1YOZA0ebz4hGEaoyeRLp1KSzTzSvxmWe+DjyRcEqBHSET0s3RtgWYaGDIGrFGHZGGYqbX1i43Twr30ny07KG1T6qnznf3zzj0Kix9xFZDed9IHqj+tnoYmUgvb1acI933Yp9yrcPl09xeiEspBmwAw2c3TrUXtVkp79EwTPBegx13LTDHARPx8OVyVvvbpAc/FRFoux2o+IOTYTUTAAxMTJLqJR4WapmlMxRn1A4iAhljymWjoyvXZ882XzACuej18erHKR4uFxdEl4eLbjQLGok9ajoQ97W0ESfwnf2Ljq2MVX+ek44j7pFqDJUCO+GoonORM4VhLl7tv9wuUlOVAE0T9inmzGRTb9cvCs62datMG5TNrYJVftV8rasvfxIL+y4wbJuP18fH6+MYV9+rNEC5gUlVQvJzY8oRUVdyeeFIUWKVnKfTcBBlOQUNcSK3c7nu10pJHlbWe5Jv9efPZmjP8HaGmYr1aruwLjFRsdA78/zQEHaL/la5bckf394z414rnr7lF+ewSwn6LXoPuWJ2GOUitjg5ctASkhZ2W8o7gW20AdCnKdn+dF+xPwFfvvuJ2793xgE+oX1t5yDjVp2GpHolEi01wUrjPajgjnF6d9wP2DVdJpaJcS9mrr3M91trUvQWTcLnYABQ3r4Ymz2VlyynQZEfzDPa98sHE+t8e7367t+91U9qvwdDewqTfPfKzcRn4rk8wsCxdAO4u26mql0BS/3l8TEAzcxo9bRmTj43vib0P9/hID/2n3/bqlK2bCHedzdm5xWo2b3UzInWLQrPujY+76q0+25dQ0komNMSHkQara0305Jmh9fN7LO/9KXv/P1ZCryWTsZNhLgHTxyIdD1LQ3neAsIElpnLEu4w9zLSNOSjr14l1m2v/qvdJKF45EFZ0tTmYVE4qEC1fc+GPqtcRUTNJiwQyW+/9aKXn/jzNPZNIJ5gcYeguCGj2E4rMW7Vtj75OOHXpE7MvIoxbovOpVwzY7v0r6+4SDruZemb3cRY31lbfodxIA/Gi1ItblF00849YbzxqTfxMvfVvPHpL1uyk3FYtF3NJloqrFuyKkG3OxT3AKUX6FFhl7LKnN2vQPK2tQci5Mt7E1dOu2eQP8eAUgibO40GZPxTc8KEGDhELEWlI0v3MOi4QdUJ2yWdJY8Xs3OIJfiK1stykGi71tOYrWKYTYjaSpgWJsJUpCHfItEsi5iNWT1IatwptvQ60ZbgBIlrZGU3pp2eLOi3pnRVRJIMgUX8UsgJofn8NXIBA56cTijEzWLgu0HAjJe2nvJ5x4vOZepdnICKtxBF44H5HNmJa2mRBU1Qvi+EYt/vpG5KoGDOnAVCU1YX3FZdqSdq3BSmXReJoTD+VTnz6c7C4Qz3pyxJkMqhZsuSIdGVufVHcscgzvpyPn2TdVlAZqaxVn3aXT4qXMmiJl+3LdLBKpfinNfHK1MCELowwLBdLuN6BaCic9qwQcal8HQ5fVQCbSH1KZbd/xRbPM3IVztXdCbvssOcCnK+WheDzkhj57xs68ToWrvXuy75e8zZIsdi5njxeajImtk+qjg7HzPbdgiMHBPARew/BKdK2BEQA7YHGTY2iKrMGduTl4eHy8PDfPYoqpfLRUTnR3DZLj+VZ9cxKIKFAspXv820mIulT7JB2p4YO/XU5+mMqPWUT3mpJSAY/MYrIS52smJ53OrB1pu+/Pl9n6T+za7n/S0zaGu9y7zz3pzKNZzy247Z7yBP/nJI9nIk2s5GPOaXy1d3SfFdB9t4JGRTAnjVk65pi0d3KnTXTa63ZdegD5mvy2K/5ogQ/IC1030CKMvzAyykdHvOqKmdHJKZ3IsyYnNCaS2E48rdIRU0VFtEAJW9eGrOrvhWoJ8oxG66c3uanZVlBloW3HiW8/bKAwueCp6CIj8wpZMogcieIvPTn3j93b4xfHfFzVM2yVdVbt/DYOEu+s8VcgWowAGrKdCd3grfIDdoCGmm3aJiZgEFWuw22Qke2cpdX++FSjOrmv+Z+qic124D8j8qKnBz2EppSiZJEXGl4iem57Q4rxQDEJIncBGai/LZj7+ZY0ywxuhA5T/Rl8Vpp7lkpYEM+OzHvzSJ5m7Y6yd/3+BHNLmvngncpnGY8tlPvJnIaYSOuAcbl0IAPptzmsgmujOpzMxvg5nGXUWwuzximvpe20UTorqAVhXKAt10Xofb7T4X/tjnXv9iKHV/w3LDPKqZgXmgA4GhxAGI/MrHvwSBiGyqruqZA3Nn0gJmc04VOLEczfnc629um44xAHz7rRM4+HOvf+kYwdE8E6E51dgOAPDZT3zJMBuisBzESOOMHq+pqh/OThw2PBRTehGJt7d9cRiAz3/yzfLxvNpti7lkSWdtxkN0TNJ3WQeGxUqMbv7qZ74aDyTPHZyZNldC5zoGPucMX7Hvlux2yQAAIABJREFUEHdvLlymkj0WfCTprX3hM1+RyMA4T+/1/rX/5ase8Cciuunk0pAWk9v8/fCSvfzqL39lzkjAaoAvnpo1TkR3Jk5MSMGv/vJX228SJ5kiOK8c/HqF68p/S1Zun1nasbEuPeCnzV6guongc/7mtIQ1hMh+rhAhhTVAYUnyHkfWSCGcppSy1oUcYVfUvoJFn7uIALDRQTDEgkm5l31z1pcCYnpX4nVwYiwEt1R3YtIM6em16ekXbe1K3OjkMXYFSxxixHa/BUEFM2dBRETtAiCPtbmC8geutmeq1MPVRPvEjzNPgxIxiLEX/lUaxMIxXzwsEsZai13I7MjRfmvdiMmlR+i0deEmhdA5yZC/tA2llR245qWYPt6q7AY1C6HamXGT8qT6nxhRxc/wEyNbRt/2vv46pfEQbxSSVo9zXzH5SrEwlyjtDKSQALOmoKNwEpyazLuvM38srEIAQDdN7WOAQiZsPF4xxOM+LwoAc07uYsrDw0VV4xj39RHAw+XBJhy7ZmNJvAAXzGQ5ry01LWvhJFqgANJ3TCjOCp/qVLClBuFhoFj+Mf2dPHyhjD4n0Um3zunZ3s+mDxCL9PZctqUZiVLIJwLFDGNeAb8OLYycsCeDu/suozGy0mTbDJFTYs7x7NkzANdnV3ltg5kKtu1yvV7HuI5xNTOpHRsn8ixZ4rvCkq2wn7Fic/QlQ6IrghKrTibXLBnBnCYKLQteznObwH0pF0lvlOMWSKuRtg07b+tztn9HPENDn6GOr+4Eb6/igKTGlpTtp/u03Bxdr/AGFZr0k2YIdlbHpD3jW3EUJVKGhPlv1qb/TmejseRryqsGkvoa3qF/nWKxTlIyrcPxrYZYp3LGAZJ3c4GbNb7BUjNB8U3WjMrCGuJiA1ORGZ0s9428F1Zt0eIQmTbQ5Mxq+gmlUa4GBCM2VSixO5hybMUiARqByjqfyisrH/brs99jqO5dLadY5PtSnq64eTfK3Zu1/aISKguLrVaA+6UwzGXfK14LkCu0qyGdrnAGYVYXlqba66FaRvUTT7zDUumWJcPn3YQqvQtqiLk2mWEKItyA6j4V4zLY9aBM89iO5k3gc92rowuYH1l55S0eBvlIGjs79VyIUXlHZ0+CrUjkbZ/GPfMIaEprnjiOikwzokIBOkwbfgTLCC52FCAtFquRlUXi0IoQbzp2L5+Bx4XCdNMaiIUanlFlz8oHwuIAYHNiUzH6/zmhk2fVvcMaQQhz3uhPWCAqKjZoYJnBbNCO/Nwnv5S9cJNmTiPm06GZ4MK8HWi24Dg3cjAlDmwK0rdMww7NrmcRvWzzep1zYvr5wvjctJzO4qv0vG+VxrIEi7KpsKoTLOZqFvd50npM56qe2JHAzDHooLDQXXSo+vLgTihtPkBC6Mw5QPfHlrEIZi7jECWBRbLxJtnwxqe/rKLX61VVk9f9GYNCsOlFMAIBNErCoHjRY5kJEVsoE5NRWE5DzIMmO2dp/1tcXFtutqhuOsdI+RP1UkqrSmC9PJ4b6A4DxfL0YUhmO2m3zZyFdZyWc+GeIXFEBbwZV0Wm9TtBTgpJEkZ5Lux0Uar5FhS061f+6H0gBy3NxClEh82tJXmQ4pLuHMlGn7FXZICIbiR09pae5EmJyvumUQwzrkav5ZF6KJ7hjgux+czquexQAdYkQvy+LOkGyzD8EYVim2SPFn+PcUF8JP00pupASRNUNJJBNHok2LHSbTmTa5dt0PWjfj2Jgz9UXwutU4K1+u+TY02SC+fA6APupOKipcs84ScoQI/kvT1S2fd7lb3hfBYSugeyyA52Ovy93bLwXX/Y3eklEqxMioRFfBNIVUwhMsf1OgXAeNCHy8UMl4eLqcwRm4nX66NNPD57BDA/8lHRyyYP1+sjKObaHLbJtEMPDivduzQ9vt0Ce2AQ82w9xy0jQpq1V7KFbAFgJQaJZObWazwuJzx9qzG+0M0gZxLKaV8yc4ZcmhHcGEOZJsa75bNCN+5a9c2QQmw9JvlW8TXGFRc4h18uDz6icZ2vQfRygdn1ev3JT38yxtRt2y6bWkTMdYxXtYnmXTyaiCZ0sy4xflTbN7Ino+2eB9JANiyc092C85nI3aLjV9144f+3TtjI+uRO13QGJgOeQem71b4PiV6eFDPrHINSD7f4GofaStKf0YAmVpG2vePfLdqw6UYKxnyC9v7NAfUeLgKruyUGSK7lJgIAMowgI/ubhIyq7Uj0u+sz9iNh67RzzUSMMa3IVTtTghTdzELqm9YnubWQ8Yy7ieAo/WNbPuMAjIZwdQbcjoFBeXqBJgUEML+H6nkXQjyVlygfdizyTvnQ4ZL3scj35Q7xU1DyCZF82+UmHOmhPZFpq2/nGbjxkxATHQeX1WIKDTXlCibD1lTaDphpmGYR89PMT8pUM5s3Tw6/eOlORGZb9w/SQ5yG7aJ+UonbVRALKFM0gICMsKJFYpORJSKYwwZ3xmwaPU5XLgYzUVGox9R3W85jxLyffro2EBkV4ybmlqrH37CiZNxpiwyVC0uUD0kaumbwe/HGHPkCEFBChn+IyJxx5aLNCdj1etVNFISNHJcZA2ZmV5uXuPQage6JYgBmpoVGkxq0Y6aZbtumWgwguedeqtxxG584iUDOqGtO/98nlWFZCzW8rxBzdMl/i/gv2TZRAnU+AK9JIf5xWcR8JgIHJo+4B3rru8o1P4jHnZkriVszAcO8IEx2OIAmotigc0bVIg41DRUGkxI4SxzZbKp4Znvy53V6QK5tCkR4W4AzEET4qgEORpvXI+IneLg88qhY8JRtstHPTZNUzDBtAKaR1VCS5XJGMl6DTmLsNYfZmvu79BsTxBkJbHFaddN1jgW1AAgbOnoMmYJpc5M4/07JBlW/RATbtiFkApPABlCG6/XqU5S4eYaxKKYLhER9PPvWVf2SIm/IYJ4fwVmCrimFHjlE5hhcAbL33jOUNQEkGzZmSrQ8qOcL1mxeR1HL0rZnayY2xxQZomKYooo5fJomnG8jMayDGh957bXrGB7MOhnSKiqXbbteH4MRGvg4bAoK6z8tZoYKlganJuHsmgIf/9WmAEoEKNczgElhG1sKZqVsxLcEJsFjXxGWPIDw3yqsR6BmiPs6gCWy+eC9eYzhpXEi2TmKTk/d5kIvLu02s3m9bpuyA85RuRJifVhKnsBhZuEMVCMkFUQjFV3vrYokkEAPK0SNi+wLdaIBagbzq3WCA3NFCEElMz+Db4kACeiXgb4Re1nHFs11JSriNpBhx61zJtoaxlRktGOaIux86mmjBxdK1WTzvP+KPKdNMIit8Bp477DVGNL3d1bXijg5GiMyEEkgzPz6NO+imkVEq+WkJlKgJZwiOSqvqPUlw64gbmX1N+cUwbTpAnyaH02ZqhKmxIb56BYOZKrZFHrruQ1Tpx9iaYhA5oQAKkqwYsYcbQrgOq6Pz55tDxeZfkAbj4/jn//5J2OaCWzMhwcbjwYz1U2gl+0BgGAbc5gYtsq4SR5LdvBTDBJ6mdaY6uaJaJ1azYZJeJ5cQM6HtRXKzyv2WNIWWhav3zcNyhAffSip3DhUyZ1JsnSrYi3c7KmHAnucVqflZ5Ag2E96moqWkl0Em0yqL4njGGYwVaZwoQgwE49oHWaT8iFUsMb9NtPsInrFnDJ/8V//6//5//y/AEZ0zsacj2N89Od//oc/+NEYQ6FzPoZ5MIeobKqAzjlt4nJ5mGMwMXMuiLD9ON2Ais0Qsb5dqLKlVkf914w+Dj60gsUktmW7TGC+FIbIDIqoataaU+MEzr7GHekF9UJaSoOQwnWEqrgtbRMRsUg2a7FiTWECE52UBYgN9zRx9ttzGdUsqAvgs1v5gYb9wf7F3knlYGjyzT+bYzhHJiF8BRHF6jvuDLGGxc12UbeICYbAZGrTlXBbz3nPNrzm47T2QNmlXKHVM7eLeHeWu47cCzKoJqIcyJpJ7NY7M4zp+VYQNzhBdTPMlsXAkMAgXAvkpoWoiC8Rw9QtGGBi+g2V/tuV3lgqK6ZKNYfpzUacngoBNWMCwyO4AJaHY+a1yx1nVQlNShcgtCz5yqUcU+QjBaD7UjZGHQWrmsUjG/BUXkX5FwxE/osv73EQ6NPZ7VdVbkdHpiLG0cFyHNFWA7/hBzThTOLWhJCSE9jCFwntFuZ77ANVVWiuwuoBvJ1Cb0AMtPg5yLAJ0zezprQDMDLYtm0VlxTXGcZoN01cBqqS560X8yrsm6Yud0/0ni6w434UyzurzWJxtiKCIy2yQaX17/+aCNKtqjb6DHNSEpwEckfy0OsAlMMzphNNw15Oxljt2LQZVn6gvQDSEo1qGEDmrhSPku2J0Zg1qSVrU3mWglFcOmvkyFkJu4DG5JH9LI6tJms6Z2E3Zd0wuI7Rj+tGpF+RuF7qY8romCU+SJLENLuqRc4hIZxyRpVH8IibtZbSHpZGiyWjkCXYEDS9tybFM22XvylF2tqNkCJYbVgsEyo7gh4LOT7Hkf3zw4Pp9mVUC02+6mv+A6jFIdFqIBDNG+OVrK4t+jQtq5cAYTX3ZXowGd/2sXDJ5YvFV2ZJ+CZ+oQcGFS5YntXcZX5Yns3qax0IpM1Tfh57ACoANlUAw7esdmhivpRe960iaDNcs62i6xzUf7novOa9zxarJJwyQ6mhoIkjEiyqy91jBUbmoEkGotMAHS1OTh4PQ5+rXH6xpmICisAE6vusrgLY0vsUKlpGhuVyanzGyMnUncVdgXwdCV1jOqir/luOYCm3gpOSBqypBlXSznKpuvInaFHgc9kWRyaK+sgnJb6awnJFV7KGGuqcH7vG3T9xVwaRO05lVa+P7LKrsC3eCXEpnS/XfiNnVNCpuVTVgeydSDjoxlzMyfSH/lv7Cydu9GeMMed0GT+GXcdV5eK4IlPRQlQ3g8hmlathH3+46eZDnHM6ZuO3rZiDXsHHyTWiHkF/kHZNNS7935mTB1rkgrP2JSXKqYZtcpsvrEKPSeCWfpx0DVh50Rg0ydPouQVzfBudo+J/weT+PaGMYIXAW0UEOodtoti2H/7oB57y5WP/6ucfHjb4DWQw+IEAg0CaEDZuAM8A0S0F3aJPUm7KApMlqeLYBJKkOzhyN0jKjufYAKREoOtoB6Ba5Tt9fNricxqoGlPSG2WNj1rCrvJWSaRimZTONxo5jrSCp01Q29teq8hCnJUr8pBTVMPeBDBH40D6193OCdEb/JPZKGgdUKamkD303bpEZvKOkmetLevjBFLbFqVoKmVsaTecrehvtVUrpTgaYctgyNlJ86JpU2VkcJ1W8M7nddy5QFPvosn4FRSsDTRWVfC7SAZ7tkWTrYQVTLi2pBPh9UUnlBv2VN5Z+ReGRe7Oa7/yyt95Da+kV+9LgKSXpzDJV1JuX2UDlMFLLbLXX3sdRGMl4tWjDtqWMDNl4kTXEoa9FvV3+Gbm/HvnxdEAq4yOln57GjRUZM1Ycc1yvV7Zvbz5hRQS+Fao91rVzSERTaRAID2A/tR1IUks4vMNEUXZ/cJm7pwbaeVAdDcaVo06GfppvgUfAqjPdvuzjlCcYUSSgWGi/jIthiD1guh17CJvWxbHTdOj9gSR+evi7Ume1Kojhz0eJ54pk9wbikgE0Lp2PEKYKacIWKZ2EHrxawVptlj6Jwjv59T/aSUBKL8cgG70LfYub7Tb9+lJetjanDOtxEgpGC69ALAZV1ao6pgRsJPH3ZudRgNwxQzQJit/kmUy+vu0zyRiIH0SS4o4qbkmylzMuCYasVlvRMTcIlB5QJ3VnTEyBssW0pWl2MzIgH4IQrcoORKeXLBmNy/w3/hryZLFKeo0nuD5JEueCKOT74ds3Lm/3gEleht97AEITkfaqp5cfQWSd2hst5xTDkjY21Kfmt8abPVowKEByArdkzqpF7xzR3gv/N+kkPaIumgluaa2AaRNV1CbwR/g9BoQUQacFfguiAvpSoOb/fH29KIGDpjVha8iES6aXlODEw8rhgLLkquqpsZmKStbLfT6DfWVmAUWPEvqsW4+tQQgGjc12Dh72HTcOgG1MPyDHFLXjFx7x7J85nwdkoHympNKX25RRrnOzhlIklqinpw0N23Y7wTOUmoeUHg0MXfWbf9AjpN5eIjnDNf3d5W1JWaVsq1VnbaEqzvs8w0E+9DfzrG2IxZi6JuP2JOt/exRlqGgM+UiZ8H3GHLxpFGnORDrG0s2zMymwvcW+5KROJaAhhOUxLY5HH905Mvm9A0kT15Jnm3ZJ80Kr8shWReaN/OOLNPjsayFF3UBKPAdLqVEL/gtJMOqj/YSfgnabPsPJeb6IuWXxmclGXZdhMUvNcKyjHJLxCJfMDmCu2W0B2LFPXv2E5chP/fRj14evDlRz5qtKiYyYZY5QBj8G93kmvM+pAxPach9E7eJu0RMZDABmqYDTzXF/iNZf7F12ZHCy1MLNHls4eVKyi1WKCEnjJJW6sHFbIptypw0UmwBt1EUzcprHNNo4CFCCkWx5P9B31EOaqN4vNtLjY/2RD1YCamVcoobOUpQrGQKSsV7h9V/MhknFa+UoT9IAcgHQzCkEm6rrnfHB5s2qDN8jdpjGbPeFesNXTJDCfaZSbFDwnp+h+pkHLQKkjjW6YtUNJJmLHERzlOekShptsqD4r+FRwAAd4+jPJXnlPcXiHwfAbX3tzwX03zZrJHvS5bMHSj5FCb5suVedKQqrjOtGQDpErmdSMndFBuNqXSvYr8y/YI4nQqe2DU6JbKvRJiV6SV3Mk/HIulkddvEjeKKe9c8s4U2JizGZVeDRz9HagTNZqHb2F7k6930KD8qzE4P4kvS1KZhmh3HOrv9ghaEUodMww+xQd/CqreJYwA5K/liWrfZDbRAJHf6lUf8sjHY2NEokuRE47Qf8idB93vDZGO/d5Z5Gh5LBFtYvQeiFPAqcc3H3LswBQEGEQ7+jfDK+bU5kT0zHO1gegVIW6dHYaydleXT3QqQ7Jy5A6jKDJT+aNyYC1WZE6qqogMGx7htHyYkNIFyiOwKXTb+I+Fv1HjWEmiY0S+VtBeBaVOFRwuF7kTuTrMn7vgcsJsbxc7M2Mi4iVhD7K1nDqhA1t56wUvVcPJUBpnkqIsy7WYhRnfd7mxyV05prrgdB5jlDjwrB01y/hrGaFmzDp+qxoKxfjYt+CJMcK9EBV1YlYgwq6PvdCEswmNiKiGwaaIHDt5JiOcVIqikSamVYp2kKlVDE8idrE1G920PtMfcq3NaqS6Hm5oMbk6POec4MwvA61AWuFl6W8clvQPV62nj1Bisv5AuXPUrPa1SlqEcygMTm7wLopNIDDPCiKmgFnqGjEXi4TH1/QBg79zSJt2lm9O9U6CtAtjB60vRUtMXgmelHrWfmBolYdEupWdQVpISL2JLlFIphXvvaatV0kfBg5O1p7DOI4ToU8bcNEmLmvem7pNbUFIq1wsyGsmzm/IMx450/pIvq4UzkxmK5d1ZFoi6ZIkVEmLI4PDc9TpUMK5DJW7hnmPM6Zk34vRuiriSXjFsbg0gIiw9CWHf3POFOxEbij2rCenJY4/nMwTitrSHmjFGq6JptbKawB2qpmAaC+1atFVYRbV8ZXmWggzSxcj6LfgMSsTlFKcpRvCP53lydZtNTAfPYqNXdJrNaR957aM/1mcAHh9/iohgtTnH9WpjDLVt2nx8vH70tYfoIxFOMjGEXMDlWIKKBO+nOcCljiR4frCIzrNy+K7z8wHSQ9iXu3c6Bpa7RyF4X0xR9QG14RYmWg/sep1UC4E64tPiQdZmaNJibZ2n3kU4xY5KtmWyM5TmoRa+EPRxjg9vjvOQm7KMgQy9EceTObSuHlcZ01VxRv7lG0DvcpLE3+gm/UpC0FukV5lK0OhLyPrmOvZcbtVbHlrnxpWZGQ+tay0l9FkNqW778zJNOhik0qDHvlnzR/oaCF2caG1pZZFQyGwwZFx0/URitBefytsu/8KCIt+b8mG/HueVl+PZ7SdE8gXLU9bbp/JUnspTeSpP5ak8lafyVJ7KU3kqT+Wp/AyVHRb5+NOfvC9YpEN7H2SA772P33zx0MgPCN12zPPdr/77I9L9VI7lXnRkFsv4idiey6AO7Ldpaiu5Mgu3PTye1mpPCjAjJuNVDOh0KLV91/fN9hujKhpHc5aQmdiOi+w+rCRPEca2lh8InJEAbom7aTvvSZlly731TqRHRWA5eFs70f7ZLobtfOi5v5YN5T53fZR7x7EvegCpGZNQoSM1x7WTaNxt9aHkHQ122L72rs/BuhnGaYDFPQn7QakIL2ooTskTjo1mbdTLEFe6+IvSwgx8oBOI2CLu8ctSkeXdNfvqKhiNJGu/MIyJ0xpBZao6xnXtWY/72HNIbo9WvQBgEgeheVS+pbyJe2n8ApBIEn7oG/f5lefZAb9F3dDatdbBtoR3QS9Lx8n7traVfV/qX38MnnvxVDhZuQHDpr+rgPMNYHH1p4ulPG2aF1HX5nRU6GEK6/IyjhDt4TiGum5Oz4oDje5F4ezXSZu+bd/CPaIt5RKLbyyCKERENc79I2Wr1+dxyW3V7wp7FdcV9c1+iapqajx4oJ2GajW2CFNEbMdCljul12QpYXyELd6gxRMsK/kQTbYfX34uOYamxSAYY/ZIA5UiYYTD5s+7bhtDkaVHlO57UPTXpTsMQFnET1MQOfIuCvIn3jIXBDAPZItumPnx2yS8giIzZq8UIQ61H77qH5ZMtZT/xXD52d3C6azgTQ/u7R86BzJ+rY6t7gWERfKJyHErs1gRWyidnfy818M6LL8sheeMKYRY9W8xNfJ/6lghp/dvwPzJ/qGkxGkCMloTtnaQFbX21Gh05b1b7E1jottLs5YOBYtwgjWUD9W8BHdV0GGGGErKWtsbfft1WydCRHq2a/T3I1gzjLASiEnlg8RoZFvqy+F30q7aXDph05gwH/BJ3PWx7HqxPpx6pvTlTstIBGm3eMM2kcnLEckdqtJYdVK3yRm/XmmMYWYf+/lf+If/7x8B/OTH/wx5QBwTwcAQkcu2zRFSiq1HNcKg2XbcaBlwCD1GlEqdLmEqmbOyLLimFnE3cWRErlmLJ7wxJ0VdSh1pHNRGcGzjpoLpL3l2qjAAzFbKNGN/ZTJq7ZvV93BLykGJ+1JEgL3G2Q0/0rb6VLkFGHJBRtxZ58ur2ExEmJCFYYEibuzObh+lVCkGWT2c6H5dkQN0L+x8uMw6kIqPpz+E2RVaDgO2JiV9sp69imOCceuppKCiJohbaCLgMWqdNpk2UqX3CJXEkavN9sMyv4C0nfsO72SR1iRyuJFhcaUxASrFrGXvitwi4lN56fJBC4p8NzC13THwDwhs966W9z1s8ylM8mXLbTjSrKkYl5KLce+XgyAVQ7oqYUU1Q9Rj3qvmeIFaa3UBVl/0OSrsxcqclQptcVEEHVxiN8uPCWNHZDIjO88M81EJr0gYYH/WYUuV5JZdOyrUSpycUoTRl9qIZrFVp8ufq8scd0VE4oZB83Q/TRN2VAiphdPQjrOaSk3Pppru5eQJ3SAp/LnwKsQ9sBJOSwKRAZBt22Jppysk1Z04FOY2lYofMT8Q+bbp7x9p3GZK34kjaDxN/CY9J3Juh2IsjGzkCNniwVJoRzQCE1z9onSYOeTFVw6SWK8LdP3aVBp4GbXXIsK1yReFlqsf8JH97LsXsXgEO09+pSmBqsZNbNzJUOizwSSOjOUoDgep0ptiXdqwGpxO51m/gllIuLjk3YT33rOySjjAQadr5VsIATMZAGb4N03xYUGAViU5y+/BFXo/hobZ1AKb0zY1EoNTnHAEUW0hC+SSwZRVNHHZ9bSbfYfC/GaMWESckhheAKIqc5Qjl/XkXHB+ci54wf2mAgyZXCagW8zpyCrX5FYnk9eXfE1lX4B83leUWFH4tMoTsz98634eX9KxzzdFktoxx7zRvnuzEoBIzZiEmG1H46u/AOIO33SZE4bkPV0lAcv/bTPSWvMGKB3q7nB65SU71x4He3YyrT+mh9nJljsGi9QIGXJQYLb8z/7b8k0/M05Os5KegnVWCtBYusb9UZ8dTeEpKaXJLED7BHLY89zz0AFBlwVXvlmaV7zsVpy0UlInf4l72ky3LdbzxE7MCzF1EtV4Bj3xDgp/V2vWNiQOSECIYkRNHZ0h1sAzkALf9gBih9WmTc8JYCIq49G2y3a5yOXhssnmTW4bc0Fb5sldzngDcDTZ927ZEfOLntP29E1eDYhdTmTJGaHb8lw1dWMzELHNr5bj3olFeg2ada36a6/1qYRIziDlqRZreYs8oWh1T+qJXvUCRyy5jJfcL5m8wzvtk6aK67gC8N1J3wQdj+PxpybQKZCLCNTMJuaYY8zhRu+cQ0R5NbFQIQi3rLzFHGJK852A2AvzHS3OP26GQXsuF3WKQdKMsrD0YGC0h2SIxuWR/z6vLCYTf6bz0tZybEwl/9RQ4rPdXrYgp79Lq3prabZ2+Xci6TjGuVxfJgAicb4Q5mMPk36I/V+lZZPvW9YWqqC2GAAu7gRQ221CnKPWsS7QF53Ba11keYQjyHzWVuTuR72XBtxS8N+y1j7mMIXMtXYhjFWPRIfWjifSniKzbI6gQM987b1cbfbotppUdcBueQTdJDOdtW9Xc71+3nl5T+VFyjEo8v3qyXtZ3neo7mekPF26/VLlXnSktd31jrPVbjVSJqfd7l6e0Eih9UlrSXnrQFY4AZhtDVOhX3fiIL29Yu0/s7mYe5F0x23oDWmOh4Xvr5RC899RHlGibza5EZlay1LZcC85nNDFSimqhm2ShjxjNd28FOUOeNNLyHfYaFXogIc7j7J/N21Gdn9n11fMg6OxLfxnZwG4rlSzuHFF2bV1OzwIEhcBeaPTiPBF1ywbbe04qmVmlpFgnICduQbyqq2mWaHJpIhAoHK6YT8RnisJKkgDoxuMZbcwn8zeckHNkOMc7FfEVsHm1LSSkhJHAAAgAElEQVTAWihLGDGIxGP7EpVMg2DbueCSZhYAiG4+cT6DsqcYyYsyeDpy1e0eMrP1t1qEmmWs7CyIKr7OOJCk9h4tETpUNPFjIwO4n1BCmDZOdll+GtDslQVR6loTPkeQpRC1JIFIJlMiRr8P2JgzITmZKuqBqmhzsPNkQP5uYQscaYyCd5jmeJYFv3TBgLzOteRlwKqqWw00BBonMQLp4mGpdJFRaZGIJOTS8aYkGwyY21YQO4h+D8mRDCGXfIdUP2H8oGfzCWwXFN0Nd74vRVXGNSQpWeOymhtiadwzyOmJNw1raIpBultQFCoVgtrdo+uY+wNNRfgEMate9HNPN2ROvmnUCin0sQhzSwrUWr4xIzudlKpB3csR2T9l9Yy1Wiyld2zZ1TMZYJvOn3cufMLUAmRXl4q7flqsUyt5ZeJES28494JSOACyV7wrMQo7Bo2XFylWXMEw6+dZLUm1lMcinpF0dhm7aJxmw5wHW6XrCxAVVyIUwjqNj4nw/EFtLnWzIVhAJDhHBDZzlRoMNlKvQVRERVVUVHUmMc0vi5PeiiUEuG2x/RP3BYvopqI6rtedvCwqSH3Uxg2gD/U5E0AuwzIFSDDksECkLW4Rw7g/vVhUp+z+orRtVN1T5qWihMiMBJqcpVAyOU1mKOnkr9O6Lko47ec0UwC2bWrTfvSjH47HK/xyc4ObZKp6eXhQnWYGMbmoW01mZsxaF5ZTYZEpakpU0dLcc+iOw55Tbjxhx0eEQD0/4/ZhzAI3LSL5YPaTqzBUQKzI/aLfiYImoYK4FLCh1A1uRhzjsJuIDF4wVr5s0ADJ041PmfKZ/suq+E5L7aHmKOO2FqEK8k+lLDPfHzM0/Wb8E4AktUPId/NzS248kDS+znPKS541TStr4mbSMOwRADl1SZkk8Z6gSOgRRRZS1ZPOYgF8UZERDeuLS2DMLPYfhJ4VzKZfawXbkpu8lbLUJI+x9aWROfcta491bxU1zkXeB0ulveO/oFBJ48YG1snyVO6XD1pQ5LtdPqT35HzYgzpPL93GEyh5Vu5ERwIwECbc2cBCT8HKQuqb86lXwL8sBCpThLvnnrEbzYsvy+YdluyfANDtUGcohNQfImWYOgwmsr+vjGaYybZZoWgiVD8dv/vhH//Brb79q9/8r2tXw7cH9b/wyINYghf2g6/drBDAL/zWb3O3ctF3OToRm3O9JFLww6/90c1O/sbv+DMZJ0VdGk6hWOA2IviHP/xvd/oG4Bd/+/e8XU3MKK0rETHeLtQsjGnTvazNb+HkBUi0hTPAAAn4od7Gphp+ntsrhCFEynsTiInwBCDNf0P5CnxuzpWBEllfvaj+kKOBxWHopE8SLKfQ48Nm1dGlY9b6uh213BJJh6QWkfilpdu2sZWDHbsikYK8tKS5oGvxY/PaOSvSFhC9Q4w4XUhHTiLKlk5C0I04X4I/+YPcRiMbMVdcizXT+XAhZHPO7fKwg58oirizHCYwO+wsMm6G+CEgchOoTkY1eaGNagIRXdchqRLWMMFc4YiK5ilpiAvUA0CL3JRNatXH31n2KmuOE5eTMTwGm2F+03w+pbWJ6KQwn3H7wRSVwMZQnggAh9Ih9+zj1S3twUkVQVdi2SmqFUJSztbe2yCLN989Yk7bgS/qKWurLjsW/x8AZUapsQ8GTEQwyepKLFWtHlS6rwV0NMexieQblCvGCESi+FlV0S9Arx0NyRVW4oIlV5h/rLLwDEJqNLC6daJGRhLVQKzeRbTM0FY6xgI4+g4QDzKuh7Zxt4pAdCjUsqU4TJzH3No87HDr+yWW7Qs+vX+5ifBcpK2U3dO/dSTfbE440JRMIvneTAHil3MLxdWuST/6XYBYMZ1H+7rQ0ZiIXkfaSbn9KuUWX4dtF39KJyKqeruIynY1i4vUxHzvdNqccwCiF6XACtGfkXtGCEPTUDGzMVU3o1JGKM3QaSLbwpeNvItcfaHCAVIepOahFYu+VEFx0tbngc1QKQICkSzl1FW7tTrq/HM3ipdae8nP1mj06pKVQHD7KnTfuG6byqZjjh//+EdjTgC66bbpHGPC1PThchEZ4zoguFwy1w4ZwcsmQB5SaYJEwB0X2ZtM0eG0ydZ9zr0i8GeWEZ+vRWvHV/cyTZIYe0Syk7BPy72ykrpYTaaD9CHG4ktZyCK7OqTU20Kjs+ksc2ETx4lplZmobvcdJNUt5GTOBi1XP+KQvd1hWtmVaSah0KeINraU+4RrpJA2iK6pSJy1/7nzi1Ti5UEechdkJVQylcth3XPmWo7mw1E1M5sZ+1KVCzKmImkX1pSZYrPGOYufcZOdDgESVFrB25FwpVSnwbTJn6bjAPeG+KxwRFXzU3mB8rMZFPmzUD6YqOsuTBJPZ7fPyk04MhyEkdciZ8SBzAkze9i2ZrMhnF8AYYxk4FR3umRisT4EHt7VovsBVZUwYCE4hke8dHEjNg5g9o1mT5Wiqn4w2Qx+rje8XThGMadsG/3b3HelceE2ghud4zo2Vd10jKmqqvJPX/vv9/v2gz/+AwD/9rd+BwZMmzYjaEZ1U8w5/d5DoLzG+1gkgB9+7Q8B/Jvf/j0A8zoNJurDghnmnG6DaACv+Kc/ek6FP/jffx+OcqZ/bnHmxH8fmNI2XO8XywvyVGwG5ADEbciigsdhkgQOlewwJDa1MabZP/3Rcwj73PKL//V3zbDpVmCBmW7qkzwDkDObmGbKDsNs27Y5ps+UO9TEinx0CKSgrBNRUVHMQbB0g4iajTHHBm2gXVrsIHDgVI1v1Q3BiJ6STdXMVDBsONzwV9/9y+NIv/CpX982R+ELVjME3rERrTDMOQZoNm6XzZ/8h98PfPkXf+f3hgsEFTNa3WkXCSCYPKskkrelxhN/9bf/57Fvb3zqzWnwjGdrie5uITCIIcNsetiO5NYGnXkbDOh47SMPERLrx4XNBHK5PMgo+9HSd6TV/M3v/emuE7/2y//JYKpamSnMIlEncjIMss1xHQJVTRclISez8Kg23bZts1myMU3kDMctZAaNdqjlL2SviJ9p+/yT10aLqKoG5tgj40Bb2g9IGlQ3hgIkypWbBPmbo6me1JVQAkQ35o7LQ83SzGYD7OR+zyxjXG/43YUCeBRBuLA+k9PiFwks1GxOTI1wQxUVMTFHvXzhzOn+qobbaH4RfYAPdFw8pmbOOYddXrtQI4QrBLox8zpFBPDY05iLOaZuvCG5eco+06pqElQy+lwx7BnBrS3xCf+nf2QU/y5RBiLnrrDMOd3TFJ1o+JS4E2m1RWGAxMlggM6WlpTlAADAxnDUEOIgdrpAItgEs5S5peenWw9FN0SSgahRQV/RegzoRbc+8WSfYPJGCrafTnGiAFaUsqg7Mom4sp42jYfvWV+gRf7fDO+0dJOzCgxMeiANoLYa9Ypn+xzl9mqPt4t51OicpdFjoaA9wd8EXdMM8DGoXuacgKhHSplNMxXZNo5d5hyAn8qfoswwQ5Hiz8RG65gjNl2lLDWCCH6Dtq/kaXOOcR3jelG4dhCBqsiGkIMyHqAXuaip6rZtAuDx2eODXChBYI5SCgSOagQhwG0bUYmNtDnralpXiCKbXCCiojZDLdfWnotLXoWLBDRALD6oK8ke0NgyUy1h5eOPKF2HB40Aah6M4esSE5vxZUhkvHiGKiq2AbqBmy0C4xrU9tPQyczTZqYpEM4aDDaimstlM7OB4Qm1zcxhXRFRbGzCAHPjQ2zotl0fh1702RjPHgcAseu0zWDj2RDFj3/8ozknDPY48Cj2GgCMOXUzvahN32RXmzzIUQeHo1fTJtPt+bCKq0pdJyVT4Oz2dzu8Esu4acKUDz1Z7PKVjHlNtWjVGPRCkZ19SKoXwsOe8VdPRDvN3IoIieEMZomPWwOItr7k2kQ4j9VHyQa5Eo/D9xLZHkVENUg9h2uWUhqUMi7rVbc6cpJdM5IOtKu866kOprk5CB5IVgigVwyOxflREIaFYFzDfgLt28BMuTxjpFJLaelNKEv6ZbPFpsTGeDQVAsLYd8+CYhNQqIulIHH0JBZs9MkVeXTfeDYrWwI39ww2VDi6nCITM1xxRfC1u2k1XzYQTduVn4lhACL9UA7VOjDGuDY1jpJD8KDp4VPjZFNhxq8W2EA9Subbh808lX35gAdFOpT2HoQB/iyc1/5AjfEpTPK55SYcGTZ7V4yhM+j85uel7g9VlKUu/fFzkUmtxecgcZT7eYN4XvnhH/9BBiHaahe6oTxlmtllU1gkjEslJKKbH3kkEHnsdWjo2A93bS4AnotFZvmHr/3+v/2t3w0EAhFLkD1Ib+YHt6Msj+Uf//C//Zv/8nulT1uPK+wB8k9/9PsvWOEPv/aHv/Cb/4W/GRGUFllxK57nWJIDrFkw7mdMQsKpYsMkV+HJ3BsM9HLF+nlOdMvEvvX9r99/97Of+GKvyf/91vf//P5bn3v9S2HhiUIFU//vt/5s90CjIb1uGIDvvPUX1frr/xHAppuqjuvjnKq6feN7N1v/xvf+BMAbn/z1qNVrL3gBecIyvXQfVkfDEKZ7xHoI0/HkGcFEB4KqtFcF+MZ39zBflm9+7+sAPvvJL+cn3/n+n916+LT86md+3Y23b77Yi//rZ76Sw0lj+a/+9ryH/9ff/B/eBDfUAQCCb3y32vq1T3+Zwy7jW7A884VP/7rP51/9TWGyv/rLXz0ysi/3b/zNn7QBfiW+AIHIgEEMhjGnAxyqqqoEnW6VLg6W2bUTSVFfSoWfxGoVCa8mg7ZLajZm+sf/flNknYf/JMcfvxDBrKNtFYQjlUBXEHF1EReYbwoyP1sBNG1EFQNF8ctOLCrQEMNemT0PetGtK8p5O5bshv+fvXfdtqw4zgW/iFy7QFwk+7xAW9zcfgALKKTzu8ewgJLeQiWQ7fMg3bZuPk9hISH36L/HEgVID9BtcRH+2aPHOLYEtkTtlRn9I66Zc65du4pCKopKNrXXXnPOnJmRkXH5MjKyXiOyoG5/WuIpER8sm5TiXlCAsOL3SNm27qNgDRwOysDGqpI5XS1/uwMiTljvmflf0R1mHsxzewWAjFFiVPc6VQyB8mkZAv+7QKshZ4rqyEpCGPml2PcmUT+QAEqlQbQlVk3h3qZyk0hFKpYSw1Q8zdpI/9tCTk9y9kyHkMGBd3lHAzpUbDmROwrBY1LCTZMgQ7bMemdEm5AezRUIEhJR7JCImOn4+3N9Fx9oyACBQa2xDMiQ3vvxeBTfEcI4WOOHj6RBBQxfHRm9R+4V7Y6aZhSmD2wqi9HPIGMnaP4ilK37tIxqSCefJtYiTUEz3WN4iU+74IYQI0AiKpQnb5Th2g6uwM/n8b3P03zkiSXKSGQTgi/KLaoGDKDwFk/WKVkGUYUFGx2OfXBjQX/4oYcOZwcAx5u9tTYwCEcFl5kZgo5+PJ4fzq4A6L03HoBgpJjnZc+ClHcu3kCZ/bHBe/vg3S2xWOGcUQcZeVJhaWmxheb2B9dtTX+38/1FkRHkrnamvK000eP8Uzy6boubioXnBoPd74Jedrq70cqef4vzjnkrt2PTUgSgb/6uzEBL3SHHiuZ1q8Zr0awmhZvIJWHwIpHKCd+HEt5Saa//zlbPXxXHzUyHHdJMWRYAT6kVbzRZEI/ajUuQhpslvp8kB9D3Akj+X8hVpYbVRNAMlCmn1lRCD8pSPitBkfcUjlbLHzjw8B6Mc/yEZReUfIBIajm9E9KhMYeu2GJPbH1wtc93lbCpoc29t5KZ4mJXs/Kd3il5G0UXzISZ5oL80bZ5lIT6kBYLYJonf8qDfkwyMlCJiD784WVhPi3//oO/A/IcZG2FkYoAwm1hkVp+8/3/I1U2uQeijTTA9PYa+dEPy4bucOZFGBIH/l2mKBGbbhbS4lzla31hRFhz7buCE3yS8qVX/9aNC1siTrLfkj2BX3zwUzNJ7EduzdTA279+/Ze/ft14ZS+S9O1fv25YiW1s1FvX2aZ8NmSMMRSlvQCLjPLm+z9STi1r24jlZIEmjUqTa2ehVRDjMYbIGGPofhexDxJRWtBsYjQjd6fKL97/sU2r2x/creV2i1KYSz/cOIFFRnnjnR+5RT8gm1fFnLfCMdfKLZuvwgh2BhQ34itkCWD42RHkqLxGoGjCNo7nAZ0+DRqH4Ttut6/1twatxR6nxPhWktnUCMGY4cP2BeaZSbhwdeK//O3fZpCF2A9pZE24MhpwYXPLpguRhn665a48yRwaSDwiY/IVjdjuYqhDoK6Pvt3bTMwaATT5ECGFvF3iSJetH2WwmwdYeO1OokJsq0b/iNgU99UDLiUJvgg2F1c9ThNPqwAyPWXLBQGehArJoBF/mwbCpFaLj8ZLJphV9aczJ5pPw7VkRcQUAwl2iKGzoYTJC8j0In0t+TeBylFh7TprZYNjhKVBjiIa4efbtDOcho0PASS+q5dK3djHTjJGzmhDpYXOiFaNqrdgieg+OReryodLEPjA2/GvtTHu0Q6U2UMMYiEaenxfeNkAGHqol0Awn70+w3fi7Iu0gSAC9OGlj1AXZqKI9D5unp+fH897771364hNZItO6t0CLfs49nEsIEW49LTTGOUYDVr25emkeChvB0UmD95NNUcdJNvkk6dMUMdwg90qz5UBN7FXlL9zsD9GlBxoWjHIaYxNxM1y+xr3TwaFsxawyNWoNyaR0oEz7NSbTcHqRExNL7bGX3jkEZ3arfGhHZjb4ezscHbl7HCFiSFgIj40buzx714xEyCJRe5M/6ScBNn9Vy5ypP7ZN+fWeVtH4TIGQpi4wRYo3xR1GRfX9xT+MssmBz1vSoETot/voPoyBIcjcKjbKkX/WH17KyQ5+tmf4OdweKb+5Wqi3+8xeP6l9mlykqDjpipgkOtTyle6vLZ0MZOCWJuxitxscZAxJZm4kqgVRodJCuuEosutbLVXK4WdQ4iWRB0Uush0h7qQXDyerULKZxefQpPnynpLTHfXxMV8clqZrS2ut9UCtJERYIw+xH52W/N5Lu9+8y8+K1jkH6x8tsC+exOfvbOy8N6WOT+f5SI40sW3m3PmgVWFlEaZfoXlA61KeXYQ6uetvxEq5K4VITDA5D/qAWsnxXYCYYIatTERe0KzR4dJZ5SO3i4WqeXff/B380kbbr8W/b0tj11/5bHrrz52/dXdqx/+8LtG9QC61F0luuWm792SiOTkslFaEbcqAxADXxLQGGG6+zd2d/gJbthfoPwvXyavxeEIch69TA2//NefkkOYcKfnMkXGgO6u3H2R+zWFAVfb00w0EREhojfevUUsZ5Qb7/7InKzaWtsEk8zGrGep7vdIm8aKCjmYrKcQJHKRDb0sWX7x/k9oevjTKgtMcOOdS8VU3njnR1TlUinhCCuMQHqowx72uBVnVeid7HkyQhE2Xg6H1pgbM6V7eBqxr8iXG/qgCfuI6VWdmtlVVk4xnzl+a6kO/gXT1Hit9D5diqniHToyT/5L9iCckfm95Ewb/pJtmBOkn2XnwxBAHAdhS1bgZEh3ymsLlztgpoAoxHsqkj2KVkNdpVL15Cinr2UUkyHSDZox+D9v8k6u/VcvZ5ukq7qtCRfK1ELnDkoTwMEqWaVXBbiwlDQDsj4noFJl+K68C/S+TD/7Utppt2ldcTZTnWpFA5aLyyhSLJ+t/JoFp4/+trUBxgaml6Qp4AxN42DXkg62CGEAECGGwpxiBwJQB1I2TeKJr1CGFnUG6L3M8TmRtD5aYz4wH5ibjZ9gDBl9dFdGxpi6vUTbLSIg2FK2u9CNuJlfDyLbqW38wSvNBYZ6jj3WyM9lxMNuDVZzcBD+CYr1l8eDEwfGPFkDJRBri2vLUhJDoOWpGMRqxen3wxfat6y8yPuppGwKKQyy/pATwCesz9Uuwsy998PZ2ZUrV3SomKlxYyZqdvpLH6OPIbDVREubROSnFZnaX5qdiCQ4hOMkWkJrbMTQKR0hbqju2JRyavov1POhBpbW2E0zEBn6ZJqmqLzEKR/qwmlpDE1VxcuruRm3344dW6e54W97tpIdp0Xl/o3JMQtq6+p0Y2puib+lEk+mKpwAq0EQE256uZW9offtVl4t5Z6ARfCiTiei2ow6LNWPQNroKQnceggNGj2yJ4p8VruX1brj7EbNYYJSWa3OGxMNErfew4oo2hAm88pwmT016h5zl5o215OjL210f17KFoh8gEU+KH/csmXCB4jk6aNsACLq3U1nNbzJg0ASopOi1ebHUyliSqFUb7no9Wat3BXZalpNMHRh2dskQ5Ndk8ggbmo9S8Y4pEKMs2elmhfqkRC5e71xX0p5/NuvFm+RiHY2SoeuCrBJFdJuGOPj335VDRAl72PXXyXgwx9+b7nto3/43uPXv2MWg9UpH+5V+Ni3XgEsIYs28cMfrLVtSnh21vMvvvI3sGZBREC8s2ndXKa03YkQJxubX+2KOihjeeRuwTeXKpE9R0fc37BTnn/ymmOgePvXcxCijr7XFF9/5ctfD/4gprfem+DCX/7rPz33xEtuQqzlFx/89Ct/9vWLbXZ1Yxo3ImyxyKtPvAhYcsABenOzUVoHxogOCNBFGLbXK5wN2hx5YfZT2IBaNisakVoRQssm6KtPfyPtLWBpm5Lw+Sdf9j8W0WGM9uZ7M4C4R8bnnnxROzJEmuf5ExFutvivduEWi3zh6WuVLd94JzdN33j3teefukYlpTqCEhJzn8B+UsdOWb9c/qa96NA3333thWe+oUd4RyiO2c5CIE4WkWnCTH67I+fhsEr4rN4SH7Ti6phPuM64EIv+l5G0duwiMNJFpUnU7Y2yNam1sZ7YUnwIvQsSyfbjC2w9WYqcWD5t0x8lc+bUnB/Zu+KyQmxm6e3hicrkIS8OmNRVPAQ5zWOxmtzDsNqkZp+caRsn3pIH5jvnufQs61iFJNmLTMMgxa9OhqgOrQ+l91VCe3lmPtShGHkqXfS2fC6UKY0LukVnJk7a+YVkEUmNHAMlNimnF9SMA65oS31EMHT4dtQLBf3S1SUpknOqLO4UMW1HCEupuKJ61SFHMLMdhhajZwMnRBxdGtkfirEU53l7v+1GtEY4ZWqOEOjMjJQQunDGTKYzGQxQD/edMKCgYhkCxRlZNgMZe98rK9TrBD0iLa8FZE81PYBWoizgR/SEL54SPN8uqUrc/Z+tUudoH43ceZtUjUHKqXKaX8Rw2eXbaUaU3/N92+dcZHh3UNA0z19tAwwDIk2Ck/TRubFo5mvpfWgKvEFMh8PZsY/j8ZypaT5YZgYdXYZImAyWWzMh75TdMfMyun5dElg6IvOft1dkK9rr1T2FUlo53bS9Up8Sm4aSiW+9EAIz8jrIf7miNQsmcrSuDZrKhZ6OjXqKa1fTdfWhDMbCZMVCKN2g+Z86MuKbh/WvSBiK7KR9jhW4rH9u9X4JnVEsklR4bt7MBpHqhlWbmolSl302TXDFaupMYTwpcZAqNoaZWPH1Ru3Mbq/L8nIlHz/V+aKLw3naKRSJR6UqZ31TkXLW+wvNrc9tucczRf7Ry53tCr83j4v5LJbliJvPeTbJk9GRlBuJKHVgrILpITOuqtZYiVQzaZ2a/lw8eQDF35uq2Fcud1jKC2S5EJdys1tExY/hJp9aZggPcK1NAD8wcIu+PX79lcevv6JomkbmKPrzxVf+ernzf373f4+oIvNUad9O0TSOlbxK78e//Z3tzYJYnTtpJX7xW99hhEtv4/74t7/z+Cbu8qMfft/CGnxbqPaLSxyGNetigDYWEYmIqBEdmA/cWmvNtgrpYiTroqQbIxeujF+uDOljdCOJRxqZjTu3V5C4zrNffrFeevv916PxVKZSnCdMAIk8/8RLBrF50d60k5SxTZMngr0QNN4O5VefflmDVqKiZ5+Y2nzj3R9Zh2G0DJtmZGcudg/I05L5nDZAyvd6kgWXLWDf1ae+Eb3Ttz83k+Wt938MBJPawDTiBl2PtqDmWp5/6lodoKmREcrlH2im5EK7q09fu/r0tWLvQww/zcLkUT5zqZ7iCThjWqqPx0rwiL122altnbHjAVDCHkiHq/fj6H30jiFEsMij6OsJF9cDVcqPTJDa5N7kV8UsD9VAVC+FmMGFE5V836iLOI2WLh6vsVltDgAMkTFkSJ6RE4EGkoEBNgNdSYXH4MRz6vnmXaZ6mFJuu1xmmaWWS+g5eL5W7HSe5pHkHfOL3K+kJJ5YIOSws9FtJ27GK6XcJwvU0BcMe6lH8esmX3WeHC1yT17c/9vSOIa/TKHiBdO8KS5a4lQt88ODq2ZaR0SHNbMaGqj4UHBn8On0wggPCZpQfWXoDvIULUPjMKGrkmVIrLdLfgHv/tLTwvTe4+20FyGxE5VSGeRkCbfVepwuuD09z8bJULKBdJfW2zpBQ0Tkp56IG2s204J99THKCinXQwiECAEiAOe9H4fKmqEn/lhyhWJCRiofjseIiIIbRUTGEN3QDdEUH95bU0pSJEvMtTQdLTmFycsA3Nxh3zFD6+dk6yqoyvwlO9iwCkZ/i68QM4GTNBSECyZKhMblqohYEK4RCrSmnKhyy3BAbEuMlHhileRQ/7Cxm70BYxCTYByPR9sB30cc+AaiDLlmbofW++h9EGUib0U89YQ058U628OMij5MBCymrAO9BtjO1Uz9pXVixWhf8FN6vTy62I/l3vrd9OO6wCcdBXvm79rg+mWkd5xFV7Bv/HW5slmtDBEm00qP/+uGZBXf239CErtGNk4S18DZ3JAytm4nORkj10yS01C/QiIpZaKKP7lozCJ9DW5fGEUriuXfaX3pJAVTo8zEdHWkKkmnbnEOppZqbXHAIU1X678hIEKWZsPs71jb9JjtokRtQpnG1nQcPqTik8waLCN+9vr+uSufuaDITxXj+/wAiJ+tbd0PwiSjnIQjZRKhNMlUPQZxawDoc/k/9sx0LIZWWNoh1VeFsqtWbrNEO9N1SYvZUt2ZmUMIpVQHhfcAACAASURBVKFHckqEE1Tb0/WFRLort/33Xi+priTys0NEvvjtFZEcYxgy5tp3e4r0Y9dfMc+WCjndydgikh9l3kmZtKeXx6//NaqHF2N3yk5kIxOnOwYU707bLiesai29e0IqsZz3MLhWW8FwVxvpMghBfnOro8AvUcI/ClfDdk/T3GJzodyI/cv/pR5iE0jK1E238cRN59X0ufHuaxcgNb/44HWIxhlNnlgU9a+zTV6++uSLQ7rY+cN9SB8YRHjuyQmRTKiPoD9G5nQdxVCf+Q3ix84CQr5L28Fi/dWYGwEb2FCxyJmwAESee+Kltf+zDNAZExDAG2smSs90tIIBts3QaCh2tK7VTwDkzRktJSrCYWplvae68rWF4dzK9imkT7Y0chTTeMc+zjujI+EG29Y5rs5yvINkelttcjWIZ9/RbwjXC7P/Foie/3hD4FNnr+eneiRIU9/RU8tIZ2MdKZ8C1iDfBW+nfwbSBIC4LcIqoOjhwnZyE2oPfALAJbPSN4GRMn7kR8fay4I1/ZuJIatYNOfWRKuyi8u/2SF2Fy69iqyHbJY5sJWSC+6zkZ3X4yEtGe3uHlC2KStfeCZXVZxmxQRwsRl362joaWN6OI+HsYalsM7+wKiW/BsSXjZKKj+q9C06Kp/w8YbkllGKKe3tZhEWaW7fKAIFZmeNnMlzc/cZ2xlg2rRnXFudy63ZQBIs4UPni0PeeB8Ubc9wmgXFVEtY+kak42vcZbbGlI83xsCEVTK101F80lWBA9Bmf/IwNKv3MUYfXXNKunRiECygW7OdEVpr7aDl7HA4C7byBsHxYeOoOpdd7soel8QtMesEyIba6Dn3Oq1j1lolxrVIJrO8sLIwrrNu5uYtWiNO2zOyqmhi9tU0kAkZTf2cPbFaXSR6i7fm01YH+bAlQjHd6ZpaidN7F9v0KdT4/Pz85s2Pu4PMffTj8ajbQVQ0tdbCChMR5oaQktXY885XKk3miQ1omaUu6ust++UCI/Lk7aXarb1bLkUJXeeolP1Y3s+cqqkzZ4m/1kxLI8JN2JMml+mhLL93r84biLaUS9eC8rmpb5O4iBHmkMR6wY73kZFuXgo654cUSUCdkzEzZ6WsiRVdhUSbfP66Q4IYLB0fQexUSLZch2PiCZH6Kz5au8glc3KBxAv9JcU2CSOhKlTXX3IBAxJIIzqk0NofZ1PBEq0iBmtsuk8w6OZtNVC4lN3XfX7Kg0yR90G5K/jpZwWEXUDJz2c2yc+G2NJNxJ/R8vjdbvyj3/r2J6zh8kd+a9mNuHxQbqssUYp3pbz5fh63/fyX/+qST93y3PB7tvx83vr9/FMvn7rzdsvVp09W5UGdAPDzvdDFB+VBeVAelAflQXlQHpQH5UF5UP6I5TMXFPmHLO9+8OslePCzAth9Hsrn/Iib05u19d8s7P/OEXNloa5EOJzeBVaCR2hew1qXkCgXxy4bcnO6fOQZFS01+xAZXWz53hb+9VRgjbmhxpreW2SM3nMhnuZmRsCCr5XvHWKzNr/sU9np3G9/+N3cyLAuhHsN/lQs+uVqKBGA7Q7r2t7lyy9++28iyBHAEM1HPzCcNptHKI7AttiJ7Gks3meGshPFlhY97sm28jF7eEo5vHVoMMFd24NAHIfx5lKth7XMjfYzozXMY+2QMfHufmFbr/WIljncxg+h3i1vf/D68FgRj74oLRoyhixLoESs+69AAEWDByBEdPWZCW4b3iBbkyYqcajaPJEh65REspqPUYQ4GTOPMUJ4zM+VRfKJDhMR8ihQD2zSaTrmk44BXH3qxYhTIFonmQ+pRpKyBwBcWpDIdtZa4TlsB0bMDHGyebNW6JuAdr4vP6V89c+/GZ/f+NWPtscnqNQ5HPQUIU2UaSFLsqmtjJz+U8ILBLURmEMJZKGchwtQkYcZQpDv2WyrmkucSUWxS6kGZyV9pr80fYNKi9Za5nMogWeeG7QEloU4nVmPCq/lcABSc8y7xIywi5mi/jRRHHCshx0PP6Azz0GjrDXKsfcYMiOHN0spmaddmDAerXFrTGzz3EUloDGRFlZqBFCqiHimxZ3NCv5NMkgGkUQkFPmIkqfz90hVe0w0yeAYhRVKvMo0pHatjM7SGCKJaJeM0FEpUpI2eE0LM4uFPnrcrDe+JthKFg3OmSKErIFlyu7KA5peAe+zZpewebmYDAsD6N/JnWQ7eYkyNGYoVZ07sylBQNvUwXbahuTBQKg0VDNFptgjTFE5JnZzL7LTuJ0dDq0dlPmU+mob6RaH4+hdo9Et3hQifQxlUQ8wsyAjO/uVInpK36tpMGLDdQ5+6I6gwTIEULaosZF6+refq1Mf9NrFo6LZIr+Mo8RyH2vIdokItsfLefKThJktYW2Zj1JmXdApiTjzJTghxsdaC9qbqiiWs4V2CuB7Oa2v2jKy2aInzRE3ZtKYTCaMgTFwOByOx6OOUR9HAcgjr4fYzhWKMGeRIYOInKKenJdyaIbTdkfVaeqCQiO1tk7EjX2y4pS3X8uxfDZni34ispC8+gP7beczjRjpGG2p/BuadCsp6lBeaILQyR/aIdV6uHb+SZRK+RRtSj35KaeeRKyySeRUdd5JE04WtpgxohQPUsxwCsYM02VpXOqeMmHFZoJe2sYEm9y3/Q+hb07YelPPpSg6V0gx/71pc6gsUK3dlb4usCaqSiGdT/iJF7ctU+ZUjvT3jfCVZtc7G4LUqKup/PkpD4IiL1k+W9uZP1fl87x3++RRNrpbw+W0Rua7ggFYJa9A4OmASdwvtSQx4cZMhVltVriL47exJbwTEOupjYMA25R6N0oDABzTGiGAGtwCB8bo6srAjSXSXDnaQN9iA0v1DQAE6iIM4ahhLo9df1W0BhlmATHLEE3zpdrkseuv5HHVWnSjE6HxmqjuseuvmGOszZbEYtQ4mGyDWiUBwG6+wiFdwTKlg6Y8j8FdUONsIsMNNjQg9/G6HcNmGWyHz5IoMZibJgiPKzIAthT4adUT05ABwbiFPXfZMvrQEyKLS+ZZkpZ9xpyOjoi0Q1vqYWaA+jGHnlpbWjlGX9BDERFM6OqzT7z49vt5VI4eYKpm8EJ9pgZgzQ+jPNl5ssTEXfv55qY3j/AISGT4sT6DiKgxfCjLG3j0AYAae/YsVp5noiFyPB4tHxbRm+9lJOPVp78JAkEaSCEL6JEgJETy3JMvvvXe606WMUTOWmvEox9FhJoiHePGez+tjSGhQ2sOva1Nbcxqv45u+KiIkBBuHrnZpMemZCXxe4Mzyxjrk0N0no4xPEsdL4MjIsy8zAViX5MwIAI//9WPS619qcLYML1c0U7pzONDU5+o27nLo6GpMEhz2mEWQrCxBCSzeZ2da2Siujh1Cir4BTo7HATSew+fQQAiltPrB9wIRGIb6gFgDBBRY66rDgKRLmGhK4hpJnqcKRR8GHtF4d4Pm2vk0kMcY9FsU5oEz2pnJtHjj8bQgwvU4VTu0W4PEY5caokKEQCmg9NG96gOlaEDdGhcRbJlvpQhgsOhBQOHwyV2FoqykrKuIo0gUO8dsHEd3V6t4MSBFaA0EQFgDCGi1hjoqsbJ4VrEsAqYmcAio48O6WBmZnFOj0NNdMSHgEA6TMZ7RiMVVaMyadyjX1iCUBnk3lXptdIsLAOoYIoNzHBXDa4CARUh7hujaz0Nze2VuBEiwn7MjsQAuvM2RlcgxtMZDqXhsM2zCssQEYGTfDQ4pY8YKKgWRLw7/EMiYuKBQUQQLqgGCAL2zd1iTA1IHrhKdoIZKbuJgFQgpy+quVuUa2wzv/fcOI8IY/TR2YlpFECx1ATUFFsxMQIIH/h481z1Yx9H49XGh0OTwZDRb3Zq0hrpeTc3P745cDi7ctZlQATEjRoEfXRxYwmsI20pC3Wsej/3sSYAvQuRaWAZwm5S6OEiRiah5ofv2SgYtaHDTcLENscA+L5pgp/LaOQzK89khxHMhK4kF9k5iIi7AhYgR0YDsXGp4Wk8DWegOIOq904FZzJ5KxDIiGnu/7k6tzEdow8Bkcigjq54yEDBtlXOOYIkN49nVw7C9B+/7w8//BCA//zPD9tDZ2cDxHyQM+qQ3kXQO8mRzpT/b/azh6+IHZ1H58ebNuMFA+Yg2ERW3KQdlJykymACWtgnpR6w5JjPpLu9r0RkG3EnKy3+jad9NgT/Qron0bVplJYkNCJBd76CIEPNy348rjaBTl8mFm82EQRdugk9Jl0nKM03vCkyqupeWphZrjAxTz1KKaSJ2ylenUZIyAEXxfZZyp+UELQxsVt3ObULPo7dQiTdYW4ihflceMUjycmqOjzdc9avThOpQ+QgJtmE8+Ews9pkOABVN+Say+4m5wBB70MbRs48amYwMYnm/scYUPCOAig3PSyiGTLMPsWo89wWsQQEMDU+hDNVEL+qx2ZzUxeDeweEIL5a44fsiMCXddzXtPYMM97Y4G9XGUTc8TGhgZjQzKigoQmuGw6qLU3oEdh0OiO553NXHgCRF5SLQyAveaDNgzjKP1j5fB5xc9HJ2sXwXxxhVY9uGoSySwM3bGoVxVnHUV3WEM0RhpIWs1cRJjJNyvsTljj3k/wdapMuIjywtbljUcw+EFuOLuc0rq+DpaX0tT4/BfJCnZF0xm+/t3MEdrai3i93qImYeWCo7QEdvfKGHWhzbgR5au6KtgxIpFjelrQd9YDROIowvOTSOYRBInfnmHWY50CTsauniRLR2NyZ3o7UY52ffeIlc/4ZzJVoaWw7B5NsuGMXACp/xm07jV++ufrUy1ZBTp+sNay3pX7H3IkYNlyG9VgN2xw0pHaV+o8kHICa2/4G3699Mb6SSCfq8MISTSAijRQK0UxkLGMQU2sTCvzVp1/m1vQepdFKE21n9DvMwaDsmhGs0j9nP/Mme/zeNE93ARGZshAOmNPNLxe3Vwj01T//xs//5UflrvK7emgVXICtoBNni+r4O1emsEgHSOLv3YZSsJaEq0OCyK7lmQqtDXwrKRfshElNTA/l9+L2vcIZpfJgWW+ea5SEGn1VCUkpo4kF84pMt6lz7QRLEZiEk2kgxGd4hI3Y7Yr1O+LkvdZHGX6yZwxcLdaloLT71yNXPsgDUEij8LYrgNVfhUUNTqoV7joG6bzb5XgNStfNF+z0qxhu8hH03P7ZrTIoRp2pj6ohAUAX7gR2BIIOwzxRlSbqJKomDcFa4EpnKl0r9SHfnZSqvVJUTx+yoUHoOlEw/HSFoJvfloeJnRbjpRmSw2H0lQrUKm0oOm9dW5UrlX9B6H1UqAsAx7KN+8Tz84SVYWyERTBENL9mQGa9n58fpR9xdmjtwCA+P+9nZw3Aw1fOWjvTw7V80UBC7FqniA3jEDczUjE4OW1hgRzr3qGi+CnjO3S1F0tMJMrZNyuetdIg4/JGIorT132uxCg5cGkdNZTFpn9KkwJU7r7cJyitVqlX4B+i296VCTCBmwMqfORwdugyZIw+jvr4lYceEtuLMkQGEQMDAuIDN5Zu1rGJUixz0btpZEVMWCliqJIxYMfJas2/5y7uvW23SPnXalW0t9qPMk8eGzcxuW0oVTGUxM3NEUJubXl8JkSF1vDExhGgVNLh4r5U9XsRg+Zr81bPVegDNwsNWpXDTqHUC5jEL02tcZavQ7hb9QVf+puUPf1EpaLVRadKDOX2HVQmAKmECNvTNbgtQdnax8SBU4Xk08ZWPVX41UFGEUgk0+S1yVgppG8kU2sxBVJPwRiu8FaSU5gOINIDVc15TCNktrDdetAjuE4Ilfu5PDg++74vf8hwzkBd/7gxpMrDCyh5fyOSF8ORUoSx2uIeCiC6umUWOFTYuoMi6VxWvwfAhMqEYWrmV5osWe36/CcoaiJw49BIHqlhF7coTxp+MpkdUlS+YZnmV+0axfCVXn1036qem3pi8y/g0AKlRRu0dSv61i9YqyQmHjR80C6h0QrQ7HEHSR79Z+iVzZbkaJ04HrC4g7D09/aHvcE6S7/9/vdup3M75YuvfMcWkhc6xdplKUyIHvzyg3+arp0AUaqdeMpV2rIbET33xItveYDk279+/bknX9q70UoNP3RS6UA4Q1sLF0syHongDt83PgWIIJ3v0sY0mGXAI6T8UffNT3JPYINl2osAuPr0y7513aIuxcwrNej558uxM/DQiRK5tDQUC47uTs5CMwAvPHNNwa4UZVkLiPDC098A2TGK27GYoCqBkMjEGHatujD7dWyJXcqNd378wjMv71RBKWpt0gHukBmHpj/kfDkM9YGa7QEjAptJ4ZWrT1tnswVwpcye/QW5SI4wscYqhkIxRM1lgSM0hheJugAY6Rj4zX7ItkWBMtQFlCKQQwYn3E5Esf89ulMgp3CP8h+fUx7POfkjEg2WldNy0QXugagfxYuXKnGPvVI73gq4LEM8Sk47JdluDZSahGkqYgJLCKZ0ux1oFOfRdP4kK3H6uIRQ2s5M45H6QS17r0sRAsaIXfmFT4q4NDFPtj5FJBR2RlIztYEqU9amR5AI/PQiYHLBCTJyXjr3+MQklMY7bfTEdmOYYg8Eu7uw9JNQCmC5P+HFRqPwCHk1gYBXWCkIrLhWYUQnb95Ebsl4PwznHcEE1UihoplSKe/bDouGNtaU7ptYWiMiPdkGUOxSzo/Hs8OV5ONQERq0SBhEEBqFSIWnUhT7QQ9+IHE0uXyKeT7xYPkUMy0HwBjJpQGcco7r66urNUVAhlcVus8zhQwxd4uMFBV340h0JHPWpY1QrIaotbbeWgRku10227yuEKa4RDHpOUaX/l+++KdfeOQL//b//n8AmCDcfE/74MYk3GUAAxYlbFuVNZaV6naSGK4cJ8p5bzZ8pqpAsStpHpsd40TFwC3M2IlaRYbUCVCwSOd8mUfPJsXMNmFQaTh9WktajSucOapNLwuBRu5GEVs1IL/vtE4MRpjaMHVUkLwgsOD3bLhzdwg3qapKDXKiIv7n4uOnIFqMoEUNpqAsPEcaebB2JETMStEiKhHXtFUCFDqHGCBALNg0V3+wBibo0WBSJIELMvI1cz8mJ4PLS/PIedWC4MWXogN3VR1TD0SMESnK1LcGAkXQFkmbMLkTIHH8IsSs2XDfSiTOhIP3Usr7najYjsX9Xu6FoMht2OC9uRv6U2rVJYMr7/2yjKP+eU+Bkvd3mOQFcKSuTrnpZUhNhFDZeY0EhCUEmMiH63FM6hUAWuMwGxyVmHTj5OiZzXir+LxLFxGwmGqYDi2ddfP2C198K1/Xj2GrEz12/RVyhWGIKuUdGg26WGUf/nDF10JRbUso8d3LxSqWx7/96oc/2EfuHr/+qhnfcRwqiQEYNbDxdPGhFffdpVib4TVMJva25BqyL0S67Qd4OANCx17coNspJVZ1ageVcJooN959ba8OPP/kS8Ts2/kn1DX4QX9rv97+dR4g89wTL+ujtULNA7C0FJuvUEzn+p0jgrAgsgKIDggRrj710o13fxK3p+Unlhkg1o6J9FRhWkhOZTLzGvsmIqJ5yzabmZ1rBb7xPNzMAYBEhgXWaTIBkTE04WPvnVtbYK2vPXMNhDEGMdlOHBkLc9xYD+DO8sLmAJwpbk4pUskEwOJHNSHFyoY/O/2umQCbMgdl//xfktNe+PNrEIx1U5VNu9UpMZmpUynvnTg5ZY6ERKrhV8u9tWGVk8NT5AK8sJGHohmqIrbJK0rXEzcxOJKB2J039SwfCfc1nlsK6zVH4a3+YXv3DL6uYPEUxQz1ORg0pLsPQYULHGfYKwmyW+ulBMnIcEixwP123+RPAoBirprWw+N/NfPAGDKGY1Wqlf0xibcuLqCpahYfu7rFD0HSQeHzKc1in1wFD9wjdRJOfO2tCH9SrOaIEsPEu1mDNdf7oF+RCNDIgt9yfSUcMRKyjCDxLheB3rXCyZOZUcOFEkj2VIF2j3effHB9iTQbs4js2HS8OOQUH+FufNZtjaPS8/JsbfzKMfNNqs8CUoCnA/bFAljAUODMqa6LYLfniQIzJUBEWjP7QLdv60JUOxwO3M7ODjoedMZKmuPx2LidHQ4+ezxwl2aJQwRAgyRdFGS74qYck2mFw+eFLlcEpTdT1DZ2S6yl63PrLI51jvnPaYlZMK3cwDtnVtDEwFkx1ag7TF1xm2sVufrc9HYfrdDa+qfRJzgkAtocn4iHxxhj9EceefSRRx+x4dOUK4fWCUTtypWHIDg/HqUDNI7j3No5DMMCkW0oHjTFe890jOaJ7cgPrvKN7oJqxcSHRZ5vV/Xmq9MLqx/h2LfzBDs4ZfwGIPLKWgqM0IUBNU6d0yHOCeO/V22gM9uCMqIBrrWmzu70CFi4V+aL5bakQrQmPYvpFauSFAERL+F9RQcJ5pgRZSipd1abY+nO+rbd3hZJA3chPCa81hWqXB9hSiE4v9SprEH6OuV8t7prheDLmCFzZeZ5kHimrHQQ7F6SWEgsL5Yysut8kHrBdJNzfn371qgoS7yKlHrvvBLfUSaGPScz7k3K+7LcI0GRu1uY732E7t0Pfl1bfq81+C5uDL8PtqJvQcn7EpE8eZSNRGLs6uHeChgUAYn4El2V5VqPZPr7tJT07og8iiz2fnFHzN9J+e0/fA8gsZTUEkucGvh5IgQN2lCpR2mQ0aFSo7i37hiJDM0Yrhn+xQDA3AB74oWPXX/VDHdstTvMuCnPSuhNyh8hWQxumfUU5RYG8QNPoNn34XfuWUXxTVgGokMdW6Is4EgiUu/k4Dk4oGaYN6WAtW4E24toq7XvsKT5Ej/xktt4Q6w9T7hOjJDHWhEqFhl3LSynh/lYRCQAIDIqnuSV2hZMdnKlXa4L1Ncjh84nazVyw4GZELGIw2Ju9QQREYEMgsgYnlB0blzGaQDw+DWHTVnsx8KLlBUscm688e5EvQjx4dY0i6vIyu0XUkog8sYGQ3S0qLba+DlS8uxsub902Q5hRLLQ5qrFze29bvaSIHrkU8y7FJ079+tDOtH19CjPdUmN7Kc6BSn57W+Z/oyPPuPnvlxEK41ZmPwtEwDBIAkMQH0BeOSBiJ8EJeq0EIFVmbmn4xBBzET9PsSRAEJMngJ1Ld5Z52SS8uypLlW6SP3K+FNCSkZU1s6T0OlQxHmp0d1rbbavGYgMO3FhUVEe16KejOhpM0HoGc7VNhVVGHzlm8W8qYQ4Qse1z8j6qgpwvWcdDu+MkFgkhSC2TjHbwDAT7/uf1kUug4Xo0/BZMK2pCczV1P35Km2gbCOcbrYE3Sk/BHaBOmzmFJooD6htYpIdi0k24z2zgpM53gHYdgGpJHSUbtbq9jtG2E9hG3Ee2MgJPs0Grz7qoPp+OZydcTtwO3BT2UHE3NrhcHYmwID0OI4JGAJufHblSlTk7S4zRDRWi6gRs5l5yUMToV1HjTR3wr7YMHIho2mKYAQna9EXQhsQBM7G0QSXIogwfL+rzKVq+cANXsXUEFMyVebG4ozH5i6sRc+oGylwvVKLXaToAEU1RCJohzakn5///vzm73RyXzmcBRkMtNGTwRoRMLqMXuPhiz1hCTiruLRGRxcx84+YweiNohAEaatM7BG/t+bEreyhW1lLNH9wEkzH5YVAyvsd17LxC4ULuKA1iSvE8J+JGsG3uz/R8YkPtuXUhcsZJ8HTMxVmvV1+JL+aHthpzAonh3GYP4vfBGTIqqt8H3Eq78r5vLyDqITIFGxWVN7Hlu3AJDVBWGjmifQp+sWriJbLGGP0YeMs4pJgQH1l2/wW7Q3HhYqhB0NYg+dXwe+iDRh9DEtHAj07zg3j6A2MoG4wY2ei3Lfl3j8++96Bt+6dltyzpZJIDx8PBPPeoV7l8Pvy0O0LN2tLwJVps2ixRV0zddLtUzE5iu1ewvwBSyNtFeXSp5SNGWrkwZJKil2jx7716kf/8El36Tpuon+AQcPMQnUaHXkJ5edhHZasmaa+2D2++O7WlJAhuTUPjT/upDI3Yt9uUvtm5yyRuGrO9s4Sv8dObdcts2ogttqThg4V42/P6/rwB9/dVlW2a0NCSU7OGia7ozTSxzrNfUJGwIQTERWoJfsJsKCd97udmKNUOPDW5c33fvKXf/ZXTMzciEBckhsSaN5MXcvzT14rd2WxPZirbySR7TTv3DshhNTWUXQvvKZKtWpPlsVxgjkWAPRE7sj0vnmPqDUukqnMnF0GIBquqHklNw8mR1D+bXhuY9ZdfhjErUGkj67t1IM7olx98kWy80IdGCWSsqH+lmVnRpXJKts7oo/YoeSli+xNBuHIvbV5JxMJ0dee+cbPfvWj/B4pviJk6HS7Ql6tV9Q5zEWFOnXJdj1vG0XLpniXiKMPnb6Kbno1dAGlRNFEMWkmgRIYw28eFOt0RZgm5ZKgR7bXNVQKtVh3gE0ZP7kqPevqKlXxY76L7wgO0QfdohVAajQj1IXINm9X4q7miU8KiJREhfeIoOqYbR85udNj9BRnZHWX8iH4TvFCnYlQlMSvl8TVdHRk6cPEWSJGB0X70mkKeFwq0xrbV13jCKLfWuJti+S3vmQlyJ7lhr/aTvcqYYZG6bNzlV8TiZWACDcvLOGzpC4j+KEkGWVpB/1NSEaaUKW52+k62xc5+KX3BWcHkP6nb6IsA+36meqc1GWAnSIuLshTIpD+QYCIMDetuCmrCJ0fBSJDxpAjMTAOdEBrDKBxE5Kb5zdF7PBqTUTo1HYjz808sB8hL9Ajv5JLPBlxTrcyBLlZ3XSS8npu0gHcGLP+6OuEyKKnXEaUESnDm3Sf6ORaQgq3xp/GulnfpHn9L6rQQcAK1X7K2lYxNzeo1CuF+ckGjxQ606M2CO3QmvPAGD0EmIj00RuDlUkOBDqHVQQiw91hsfXTLuDJDmXYKSFOrPp7VoFSe7Yw5XSBNpd2S3gitqbu6yVFpClh2YdJQspZHtiqSwkCPSVlFjIe6D2W+RniwiSDfVlNiwtL8obeT+v3m1pKaH3cnFM9OG3qxOz3dQAAIABJREFUfWiMShWa3rzbNDjLJ2gsU+UbVB/bv6twC20lBCL3WKaEOFUCetoWGO8VrollOFMVygamiKdFBa8Kqe92mExpRzVvuMOyvmaQAY8oe1z8WpFeISzKTCFXhcN2LoVULJNEpRyp0GJIRmrEkYgGUJr+cfa/W27SvVrukaBILbt5Bu8dAOu2yr0WIPlHKXdMgT/Mhv3tETf3U5jkSTiSJnsKIhKZrsxJYM/QG9+6TmjDI6GK6QnVNCMtJrM1NbWOwE9BtbyBxdurO5U+YVkBRUYsX8ctm8+WPcXTd1XTVMT9EZjl5+nbmDzjSsHZZMigXqs4tZ/aXcS9S+4nwHeRL50TyKkgv6LofT1ccU9L3mynJC9G/W5VYqqvaPHQufmyObnV3IZo8mTOxjs3Ay4iHy5HkN9pmS1gNxjUTp27++yXXwyb9Rdz7shffvBPz375Je/HwjcX2gRpipVnbFvinEHy/Z8+98RLe0+v3RHHGCmAC6QvJ4vPAJQgsAyJElhWAjOZNkNXvT+RMWSYVBi6hMBkPLnbfcOdiCabySHFfJmfZguBvPXr/3OvKgAGaUFjdS5thOlrrj750o33StCllBFbel2t7zu19Jzf5udnc74WJgzCmI95f+NXr119et5pXhnNJfDsddT3Q70jAjpALp7XqVYA46WNiFFy0EOdk+FSqMqC7VrJ/BIKz4Ibi0c5KScW+VWwgcClaJo7yinkU1dAnlijEER2mjMMDJ1pZKKQzJ/1qGknlQAHUG4rIPM44e6osXboRUWWA/G3JrtDxOa50oLduTdJTIibAUCPmyhuD5FHwgsJOohIWEInwSc5ljBn+zcQhvJ2+PBL6Uhtt5Mr8MRwug3npqr10mEydytPk7NJNWgiimsWEWEynLWwqAF9w842qS5renaVhesOSP9V+VXfSGzHVhihdWZI3UIyq0OdcOJrg/bvFAZmRtNQREgkkHcnQDWwJoG7qN6g4ITTFJYOgks8loFrRj8mi/kaXTV8rYFKPaURpIMhkGHx46xpItHHcYhI78T63eFwdmBuAEhIRHofrEFiYpFAyue28EZNpzQohoNyucD4k2IWBuArBdp3ernS8UUyqRQL6zPstOibj0Bd5xARqmK3DoM/mPwKycnr7Qk2dOqXeUX5LqpVz4sBkiJuEecFgI19ron4SzKVARS2l5wIx+ORGFcOZ4eDHp2O8+ORGx+PffQ+ZBCRNCGwCPrxqGeAMLMeQi0REKiqHkvxbeMSarKoDgLmIIYch7kEx+pfFygRyYrtw7rQNN+AIt9Ky0KwTzfY1+wWlXdLYKsNcb+xwtzyMncnEXMrqIgXMGntT4qDWK1fVHeZGi4co3eERPWXhvj8O03vrFlCRAbhTicsJJPwMZ4pGnXZSmLYLBcOzU9Hz2MtCCXbRcqKigkKUa5pmtixjN7Z6LpIRy4KxbfaKLNP3bLpPgACLedyZ1cp3qkddgJMxRSOp/6kzQ2wVRk/aq8wLupB3CKpM0IknhyM+6B8GpkiL0YP7xuEbtuRZb/2Hddz35Sla0Gfi4HaUzT8lFJP3scbty+AIwGago5kI/ood9ChwW06mvUfinD2/CDpayacViyEbARKTXeh/PYfvvulb33HXp26ZDEm9X1pW3j0A1CMwTD93VYJ91PVj6CZ16UokcI3ZszxwczGjVH3xVdeHaUlu9aBuBuK8Jmy2SU64gLThyzgk+BBqMCQYUNxONSXf7RBAL/46qvlnYJi1m/16haOjJvKiq7ZJYpJeOeNyfz7k725rfKlV/86NtloXsLAEnR9dtveQNqffeLFMfovP0iAzDJvDhlzMtKdWoDnvvxyWq8byNg4apfTF+doQ4tqHU7IeUJpKx4TRiJVb9DZ0iDCbTN8oMd8DAiKL7ntgbcndjSl1+vwk2hEAwHuGjER9X5e63n+ia83ZuYmkN4HmsZjaarLqVx95poYVGYn2VvIZ5FnEzGIxBNbbQLZjIDx1NLF//rMN8fw4FDBkKFc8bN/qVGNWDcFVvqA/sc7/1gq/IbFoI6xJsXEBGQTqEM451H0hoqrNDk4ektT1CAGo8QZ9pMTbRJXGjnLBFLLGfOR9CIiNRHkWroIV+ROR8ilPZUJX6tQn5k985cDAhIIuJiLEyiH9i95lcixP4eHUOF7/X5EsJJ3Jj+U9ZWi1Lwa+958V/G1N+Vyd50064S4xKmSdHJiKVLLBQhgsI7f4XLUwR5EvT6ejstROn1xxWohJn9oIjZVLNxFpMmRZkGDU9vDy1yHLVy0EA8T6AAFKH33vruIRKjgxlSIKKOTKORcQWqmiU7OF/tzWysRxcY8+lVHRzgoDrcV8n0Q6PE4zocuJaaeynZg08eG6x2E7plAkTqlKzWl1DfRsvTIXibDVY560g7u1vx/E1H8hlKl9DHUomHSHb1MeciPLnoNkdGPAND7+eGMDoezfuwQIWKpkcw27SX64ctzFbE34oHAmoahGC1TNwvNosHLGFP8b3SQ+RomM7C84hQ8Y2pAIIaE5uSPmZ43T6H7oWAsKxyCGUqLYnfLDrPm3CkyygXcer9PdxLwoQnkP373UR9XbM2SLeoKIALbv0Qy5HjsmlbIZZfjNIXQ1bQzzJN1jKxTNVR+l4xJzPWb+YHtcIEsSXVCvwlQTSrPRQTI9m35DCab4RDdJoUyWe21w7NLWSsEBbITX+aobGt3elLZ5filk9InWl4s6Wh95RIAATBKzU+ykKc8O31JC68t7TbOL2xPLtUlRUXIktsoG3ElITZpytheCLAaa2ooSukZTGo3Xe3zdXAPKiQAo+d6kUsaNRhUfLGSxZbfRaC5sM0yKGNvkqqXwEg3oc10X93j0qFcMZhZe2gqINXghYWJmUWGJ+Wfwk5C4vo7nGQX2Fuf/fIpBUXeEo+7AFQ6dQTzJWGsP0z5jIZq/iHLHZPoMg/eCzzwWSkn4cjwDJGWGlBkbKyVqijsFiWY0JzHjNRCMrrZAmr0DyEBER0HmYNLwiBzTQYgNFiK9vmkRZgMHInGCwSDicUVUvdNqgE2iUQIGUVMCszeysW0UDbMDe6Niwwyq073OY3IyPzRZhO0DOjy6+ijUHsmot4pdvYrU6bzr+jQ8rDbsGKPMmOIuhkMsKBpQiDAtseK5GNzYZD42SMyRmvtPM+by9NivUsbC9kOQsEYbD4qE2T00dXZkz45lq6jt1bfnZQ+RESYeWDIzfOolI2BN8CnDBniNtlo7fCXf/a//fKD/0svvvnea8898TKQ5xNpSwXy/JPX6n7t5554OcaEGovMpxTHjBMB8JU/+/ovPvipfv/W+z95/skpJk6p9+yXX8qUlMbLwsyGXIwBgJgwRPMX3Xg/MlGi90FEjQ8AmPg4jua2EImBExhjtDbJh8DozXWXwbaVCR1CoKMIiJnW49R1dzkzc7Hk1AAzWKgfEwwVX9zehEbqJL1581w9JRlqOBIJxrynG91dbmarExDbcohoQ2miEJsZG8lTST1JAR0iJxBt4ZHB6DIO3CDSWkMX6eve8cOhGUhW6cnJ5PX7fy67szfNNOvWPSkUuMRd/holCxfGZSYT+UZFKvsOs9cyAlnW4YFxFHPigDYxmYghXQBhRkwfpfdv/n4nyYN3WZlNnQN7ipnGGL2LNztOTDIHnsFjjI7OzK1xWOMM6l3PqwloWuVxN/qGh1Nw+UM7rGMpFugy+uDGCXM7aCJ9CA9DI5LsAoifYkx6bAxI8xMPIcEgGWIHN6kOUZlnWPwY1mVL3av1jeFyh5DIheZmtcYqiAAizfpJvQduMKzVhqvSAIEO6olFkMzoo2MwWjCAzyeEU09QyZiQBBTcFxD8KBcygsQLBQMelKjSQHf7TtiNtU1smCadQ9oJJh4VQgqXUgTUtJ3dYhKDWsKH1uAZLQsJT3juBKB7qoECihuvD9/IWRchAVDJbem+IStFRnAZmZq25RayfJxRSbTBcCAf2zHjAjrQgiHSiVhIs/RSm1J5mKdsb9b4GjHUye8Z5IwRhlDFb37/+5u6m5c0kQuNfjzSwNnZ2eFKTBy++TuMI40hhzMeHeMIbscxSDdrEw0F2s8OD4mIriKo205gFcKDOly2MLNyFzHItiiafTjcfELsIp8AP5uZflpJoh4h0LgxBMojuSZhhpsPn0JUDAugmjLYzBhPhGEHxBEWD63WshZmjlSPeupakc0WfrvYHMgvK6InARjlrVJ4BGYAAGDiARn9qMlPIIPPDvJx50ZnV670jwkAP8rH84/5rPXjOLty5cqVMz60m+c3r1x5+Lx3OQqA481xvNJbQz+KprCTTu1gp2y5lFYmGtJJ/Fg80x7WtlG6QkWeAiLcMtY87nCY2dYHQtaqZiMSbpEeR2KsAY+kDbmUMiDeUWQO2YCuS6+pmqErmnDMSDkIQGuMkWKFXBOEep17gpjNYf+UJgFqRPlqGYlWIkDKNV88IxekGk9ZiBDvBPro5CXfJEbCiv5tf5doDNcC3muvUaFBiMJpotMz1nIGaAwMRkvlTvMios09N7hMe9Rjul3kk3mMx370SigBWQYR9+MxpLK1PxrcDEYvXELuvnF23EBqliHE4oQa/iJrczsc9Bj6EKIZRinDYExdRMuMJbpryAdJe0wikEZmw4ipZSX+AOiIo4+69UhTkcLYzI3HnPtAY4yxcPv9UT6l47MXOGmBje7lk17uVrmzAMlP8uB9Uy5mnnrP3WWeulkb99cp2yePsrkHy6Me2Hh/lI82B2o//u17roP/8Q8/XL75k1f/aI387fc/afLQ+7W8+d6tD3e+l8uN9396473Xb+TRPTvlhSfXTet3q7z57vagoan8869ei59PqQ2XLG/dqqkPyoPyoDwoD8qD8qA8KA/Kg/JZL38ALLIeXXLqywV+OhUaeVfac9fLJRv5eUYYd0l06kCbWzLPxY/fcbmPsUhcDEcusWgRBGCLZrb8ZMEXcdGWrmMFizJAg6YbregKIDeOYsnP/dBTBjgOTv3E5bc/+Ht7ZVmjzM2P3oWM34yQm7IoaIttIrqKPgU9zURDRov4mhkIoC0WGdRI8myq2i87i2EXhBGSL+mVRzeL+drZj/77ikUCdrCjRQRNy9I7jaCyry1r8HM+QdCcgbnqCtqe3HLBK+6gKCOR/uvLqbFCKmtkg7XMe6JxTVMLbXGzPCbOI899uRyT/X4ihnXFudRTZg7Rs19+MS4taGONSs5+MVGL0FTJ4tN1bbPfNGSKBLEZ7uNbO6YML3qqsUcMBJFERLpYlOJGdJD32yIismJ5452CQoroedki8vN39qE3P62SGrchAyKtHQ6Hs4VEvkM3Tv0VAMq8Y+6XNVHK9PPwHKJpyypS4k0vKwvVFsq9ZB/zgK79ckE45E73qR6tXJb3M05tKTFNPRzFY3uWOI3oGEfmRO3SfrtTDEYktliwK3DhlP2T//bfkBEB9U6VtcoCuiG0KIXcPAhQvt67EDFVgMY6RCRHBli4EDc1JIPsR0jsjF2KQBDrxTTZyEN78pJfGRoJVyacuIokfzZJLZ5Pz7ptA5BRVOJxIHnQKBdVpUrFDvzsffRjH+dJEGtxECG7g2QUp0Zc9AiOicUpqkq6LFxG072y3hPBbuIHbWcZosedMIH0qHfm1rhx44NGZ2fEFC3vi2ERp6FRq/BVsLxkDzHRMdqIwj9GiBz4EkqXN6B0d2raFJc0E3/6NFdWGG/qNNkZgDa4RLpjunE9P22nRo/G2kxgNWjsmkwWTTs0C4KO6FCRIXI4tEcffeTRRx85HA7t0MBydqU99IUrrTUwcePWDhg8jhhH9KPIECKM0T1TpJPXzngvEcuxswQAoY8+9IAczYBBuWmSYjIVq1P/3J76M9Gh9t3/cVmlcbUay+xhVxFnVYfGPp+OPpqfitft2RT2b7209Eny8WKgICZnCSRc+qZxzLP+IW7H8+PHH3/8J1/604cfeuTmzZs3b97sx37lypXGTQSND0MgA4d2BsHZ2ZlSJgLEmf0AOaaJIqoAt0TZTtvoRFg52lwTmeW+Df1PfdiZTNMXG0W/V255y04nEKreXnrJoLTZbELI/uWG6cUXmPN7Ly2voJ3vaSP7dipwoXmyU2kZhN4l2LwVF/S1E1GtfsmsZ7N7kN8Ymov8gmY1ZppHwuaJCLm1EKoGkLD1vN9kZhPg3BY99bZJJhsz7zMtfyJwMK+EVBKXUCZH/WfoSQkyt7lov/UoJL9Digk66aOk3/7RAGnq3kWH6Y9c/jBY5EUNOI1IXvKp+6P8EZHKPy4xt5Di5Zln9/E7K2cPPVyxyKf+8f++z7BI3OJk7dCDWztc/MDCcHSkHBJGboO5vZEPun2VpihA1QO0p1KVbfGsT1pUiep+x+yfKRQsBoJorL3uM3LvZ8S2C1mqjS/UtgVAsD1TwxOU72KRj19/1T8SLrCut9aXq6b5CVpvrY5YPKNNzntV1zER/fb7Oxstv3j9Vff/wlWLxGOzYbzvtwK6LU4EQPONRQjMgGOn7J499YnLF1/567SSBQHLap8qxpodKX3zc1g2Rh7RzAi6c2nnGJ8Z3ZsuLTdfyPUnr5HxtXOykOVhXG4rJpIjKhJnF/nE3WmSiKaHpS5dnw1IF761d4gQ8PxTL0XU4Y13f3z16ZfTRK1zZC7cDmMMIeEZlf7qUy+7kcZcGqYQ1ZJiEgrOGqeTZeVRNj11AjelS1x6HSnOt9TLopLOUo677cszZm0Ta//dU/nq09cKXmbW6413Eo+e82TGyRn+tzfRQNK4LC5iB+Cpmmj6p4hkcSZCfl13WqnFq/3VdQV7qnT3os66wyR2HK6qEhABg8QSvIdNbu+tXkURO75cYGxILsvhnda9hO75iif88sOoCxTo2ig7C191i3lBDm+FlA6NJgAwyDdw5gEq3qD0E8XaggmRlDzBzf07B/B9SjITqAHehhgOrbS8JmgdI0WAHRWQw+28bANmOSgd9bK2pDCKuS7ugU7MWPlcE+tlAlqTLgmXZg6uOHZZW8qmCCSrjIRh0T/CDIHUScm6yS79spgVMXe2ksfTFtByPbJvemtm4wCkdgQ5hQHRtBIh6xL+X4cGy5/iLIxYL9TxRZzgLf5iEdk7KG6pztvvvdL7PU+0n6lCFC8YMsKfN5K7xnzoysMA/vM/ft9aOz/e1CyUx/PB3EYfMgiNDwddM2CAR0/WtXy2E8+XtqsxRkSgIeJ7pv3dhl9GVtJpa20R0PNgh7VWM2ESCQl7Ioq4yQRO3fFNdQZvakeySJGbRfCeKMHEbiR7ettCDxt/rz8tjmpnRfODRv6oHVskLvtFQJbMRESYDx9//HHi2tyOoxO18/NzHiwiQ8bZgUcfh0MDcDgcWuNghN5FhtCBg7ZKVEnu3WNJmrbGl68TkS25Hqw7Ekhl0lxfmkl060pgvavIVa8M5Fk2SqMq+5wsMS7OSdtHKKevCAQrEXLcZl1dcVdVOabdUrnUtxT1sF1jWHjVhN1FeriKsunXel+hUkjQ1CGszmBM7cm9SN0xOSoi4onvdQ86A2PLO7WFzDxGNNG1k4oUy1jl6hCoo0QmkaauigvDUux0nXJSnWu4uFW8vtBFcwrsUMGuwEGW4ibbZ9aFsTsZMeH+aBV93tzKpuw5fzfEYtBYbf0HZVNuC07C3sbky4dGXnKj7ifEqj6lU1M+D+W2KH+ZDdoXP35nY3R/B0VGOR1xSK5LSnAF3AhGWa53DGJ0GV1T48BXjFJiqp0SejdULiJZjxbRBTavOYRwNdc+aRE3fwESsp/avIiF9FeHC5UOOlmf6lq/34VQMRQKx2/Z7cRj118Vb9dl7KP9bs3PbeM/si0lqNV9jfifiOg3e1jko9/69oDojx4KoPFEmwVNqopyjM31XIscSTwJprEVzloL3d2d2uoVh71Hxu1mL8wrtKLHxLK1S7+aektTWBbg6MBmoN96/8fuUgTjT/Wg/oC+UgIkN2+kxXS78e5rgbUFCw2fQ2/+OiMQn3/qGnGEwAaWU7CpYO75LV67iNj0H90DXd3+0uQ7Cw9oq+eoqJML79yYuf3snTUgtDjI1rIxuox+PB5v3rx5PB6nl5WG+nKxQCQYeDki5s13fjyvJgCw1YOfvZsbtP/rM9/Aepe9j3yNxmzMGU69/LzmiZ2MWM8/lWG2P/+X18oifJFFc/fD+NVqTLiMFGALnDPNBL1nFKnm9nEQk0r7UN8vgvWUpk2hqc3myhQBmmxiYVIWlIsMU3ZdJJDkPXK5Yb+dgSPonihSWFEAktEJ74dfp2yIvoWo5BasrY+Oi+WMCpwn61rEbVI2dFyOuoOIRD6YUZ3TLmHU/EWa75KDhXLO6VIChXZKn6pohJWRaoOVNxybOqWKJ69tsgDsJfmddiqwlkIEha9Qg8eSCPEwhRoxopHHvvNGq+c/8ZqdlpMFzvswOFuWCFKq10LxQwKEqjNmVf4IqbedHtM3RQpnDcVayrHNpm9rdMYt88Sa58cl523J6P6ljkWzyEeGEHEjbufnR+am8csGII4OiCVEGzZjReBnQoiTa4gnzpRYXyBSnCIEC0fnlqaNCMKjokSj+5Pk1h/y8zdiTGZKSV6UqUqfVKcNTpmqncrCZuQols9anwECTS+eYEYRjPP6ZjBdCEig9KRYbxLUhng9Kl/H4ezsykNnv/vP3/3Pf/u3s4cOZw8dWjtAhKlpRKRyXIyehpZZFRCQxdQSFx1hI+vqTYoZs7FvNhbB5Fwowzn1Mwxhy5wA8vP+GJRhKjpwGf3bt7STLxaTL6sEFgNxUrYJbjmB5va7HVHEHsWFRTRTeV7Wdikd7eM+G59m7rhD7GfTT+2s6bkRqm8EI1QjxnSWz7OtBehHHC4sQpWJxJZkdNWICvNEHkmnNOWKIgFlhjF5rsbgXb8sgmFCgzbchsqr+r/KTwYYwuoegJiowVI8su9h8aDNOrdjAhPJGsgcutjNmYmZxPg4/sJ8lWCK734pC/KyxIjdSYW3iUUudz71Z1/+DO1ovkxTL8iYecub75ty+QjZSz5y6s7b5Z/PQ1BklAs2QKd9FlYbAMThBzNi5E40AX7CarFJ1DuIW+HwQrxK8iNSl24smMev//Wd9jTLb374996/KUjQnRfaqsbqlYZfSnb68GmbZjZb9PeHP7gYVqukxmVshjpS7pqn2tttVXbN77IjBwlC+Pfv/f32oceuv+IrkBQWt6loZwavWGJ8RWSMvlQV3nI6GpaBebEAaifuqn4lb+LinGDTBlhcjvEDgYC33//pbp21Ju0/Ea4+NZ1CMxmPSwUT2KCvPsVaVsdzT7y8e5nmD2+///pyNXCB+moE53vxHXP+VtuFIzJsHVt8HcFngYbd7WCNN955LcgiYQgCS77IMYb0sUDYzz/xdYFbxgHsQLg1CETGjqRIKkVjHEvyAagYn9+fYQn21WyLh0CcvhzCfpSRnlNQvayLSuWG+K5Kmtx8M9000um/aHMTlX/jG7oACa4N0zmdFr7DkRsKSMAcqQRoxQfWhrlYDBSs1NYaxw813ZPaWuOmZxeQnXqRXjCpGxxt95U0k9DsL+HwiefzUmMQHJWocc2FXCe8usmvNJobPm90rLI4Kg5n21pH4Tpd7GHDANoeq3ehMgigbWgW+cIYS/jDLmQoPeQcl5xNRRoV99+xmILS1TorrUytkJCepcOKafhHY5jyAcE5lecp1jcLngpHD6cb48dZ0XfV8S6AMBMK8KHgktYjLZv4s/iU5feMFiVn2QOSkIS7mUH/qFGmCm3gABld9zNKoBUez1ibTzGWczk56QuvRQejF4o9MTM3unnz5se///jj33988+Nz8cOyZYDpwGwbx6mJcBfuoKEbN3WHLxD7CCGBJezNJwnzRADBGKOAFiHOdwo81cLpfs6T0LVIAXCCzy8Uj7NAoDQGTzPXNErVIlxum8hSLkZE9qbe6W6zvsgkt6Aw1+FwRsDDX/jC+fnx499/fOWhsysPnY3RbXRkELEiSo3b8XgOCavAoGUCzPqdVqmCLKXR1Y5Z5ce2cyl2/C1wE+qisjRhemEsibkVCpPG5YeWyi5T3F1weL9qEaNNZVebjNlSCuJsiWFqNVuyb4/uNkhryWrVWPW1lapRqryt9ciJdxmkrz3xowfzzbmsMLl0BNJMMi49ials6CeA2HZqs8p0X8dYnIWqUlzhg4Tm5Sa1VL0h/kLL85JicaqRgAK0SvD08N75Qk7cYCMiKdA3gyE1GZA/LpoQyX8015HazbVVpVZKUZCzIpvvXJiv1fYfRx9jLEb7fVC2+MsdIJIKA90ZFnnq/jvAsB6Uz2jZHp7+SR7fcuNu+ZwERUY5CUemXRGay9WtStnMJGNpO8hTDiF96rIOZv4jkQCWNRCIILsM1XIn0pKj+aYtbcGl7YZbF5fzlDpbzXBbHBsisTkIqZ+COKb0yubiUgTVUhF38eijH35/25LHrr8yOcW5Wnax95TVT6jLrR8o3nroPFXuTL89jZb6/le1JszlIztMbutdZEDIxa132GUYAIN1Ffu0j3H7xXg43eSV8qd8VjcUFvjs+SevufdezCYkay/df/v9n6S5PV+qf4hYhMKzewGSZnJkWhwrb73/Y/cb0sP/5b/+U73n6lPXYBu6xe0rH8lq+BBo7xTpaGfjxtyKUUtmjQUFRJ6bD595872flLuRixVevvbUywT0Md4oB7a88NSLdVDCuBRgjE5M7XBgbktCT7Zl6QATHJ2yLITO+5V6770eeNLQH8LP5rNrQp5tqOJhHSLuFE/3VBcxayMA+Od3MnHk156+VlZFLEpbBez0tpVt13GSibIFoSLKf+2pItVFxHc1m1zgQibXAuHehbiekKOLPfO1kQ4v6ZgWTaFMDsdkdVKxZo/yMTCxI3BY1h2jHRKZHpJ4n276Lx6PCdEhAfLuSh0qAxTPhpdnNdl2rxWIJN21WvzS4kFlI4JAEihvgiYGP3ltAAAgAElEQVQe/sDukTGDmbhR49hWEG9kolb4fdUsUnRVQMm24hjEyWAokdqmmSxlXGXmQK2H7czbcAYD+JzblgsWogeYzrVnu4VokNoSfosI9KRXokWLrCw5z08HQai019rHMdAJyq+Sq3rM+ttwtHyJh/DI1B2tp7Rk1qIenWQZzDjGB65ATqpFn+FLp+25uoZERIQQi5mzG44w6GUZFnL60MNnIn30cX5+PB6PIqLrlGoyEYMY3IgawY/TdaPQ2FfqoIoRjbxXgAaZJx87uEATTJAIwsR7LhUKfqx1mKnj/CxO/pgCYpIyY7slbpormwh9oazbCEafaMBWHVQxPEbIA++pjXxBOlyWLRXAxWVMU4KIHI/H1vj8/CgCbsyNj8d+PJ73PogsBwu3ptFd8TOGKkMVs7IA4k7DRPcoZkvtGuk/VdYShQDfp+wJ+Vuvio3Z1vSMf92sd47bG8hbWZcSbLqdcwWQRC5zn+6TP7YqHv/gGJZvhnZZusd6qHK7dF5iQCYTbjUTvOknu38i64C3Sqa/YVIenm7R+EDD9cEBO9enSFL2nlqiiKIxCyvPCMgyh/kEnqds4cckAAEkDFDBWwMuNTdEMMqPqEkYrEWFgK4nd3+KnPM7J8MpWkSlwVyGM8xXV5RIJpyHdDUm7p+yDQr7hGGSd4YV1qcuX8MtUadPKdzy8jvK73Hk9NMLR72tmvXImgsOrrnM45e8+ZNHAX8Wy8nckW48D6BYN+5HoUajuHuij3mC3mKIF80FcttktcKlHq6SzoK4RXZXJSyFFyCmzyAkMkBkrrDeplajiLvN4WdJSVqj+j88S38FkxtCZrp9tLfd+LHrr0x/i5jCOaGY9ywHkZ2b12/SugUEwg6geDzbRYbA49dfdWc646WiLSJgtlRZuV/YtPb+wDmgEX2y1oUvvJTf3CKk9FLl8euvgjkSAFFkStRhclNoae5bH/wTTpf0rqtVYFYyaQKzZ5/4+iag8oQBKMH/SKN/+9Ly+9knXnr7/QTv3nz3NQDPPfmyGjRLXKQ+pfyM8Bl0em/O1alv8s5CQAyLGmLQ8Dv08BmdO6ShgiTbU4neev+n2jwQ3Xh3c0o1EQFnZ9OhNG+8e9Fx2wC+9vS1LTJ7wRHYV595Ofr4wjPX3ih3vvneTwC88Mw3QHjjX9bjZb761MsxOtuWGxk939qyQL1gdt6AlcJ2NkUyojBkm++SmOdGpOixuRSePAAL8PX7mWZehUUE+JeBGInziFu/VMxg8a1PUAQP81K+45onijoHY2hUDi3TZwwJEeGo6DBTnd3nHhUR8/wASTyxlmcn7Vfgnyi7DuFOhoczEGsgbtFJXlihrhRgQkoPrznRwOJmVoFZJrAdn5FtsIbac0T+bMpoCWx6Wh0DEVEXV0+ujsssPzke5YGpabOkUeFkCWAVbNq6QrOfqHrdAQGgWaMDTCE3KBxCLGC4jeA46tBUNaIVsg6nmBsaArxTUD1Ui8jwtzlYDa/XTBoNaPXXUFEU0I8KCRERZNgpFo4SZatslIcYnORepDVIdP7N3QnXWWkcGtbW/PQVB13KFQHY8okMARHyNBsimsQCTTO9FtbTqhJdGRZiDktMSmQGmUGWTF945At/+idfAvCFRx7+z999BBIRIaLW+Hh+BAGDxgD6AAAeuILDWRNxxovG+dl1QoCQJJ4MQHdMUujmXCbTWWkZOWuvKCzPuSzqbLrgI1DsVDdKE9BYKJnPF0G5+8pFzs+vqoaz8brD8oHACoYM0Z3UNsvJw+8vEqpmNlSzioiGgEEqosfov/vd7zTKFkBrbYzRR2/trB/Pmc9ERh+dhBUSBXDepR1aaweVQdyYpImMCVwkwhCTUy7NXSCKEyHoSmVuChYsbf69rnXpZKzW41Y6wGmWVmhIp5V8W6W8LTMnTRhysIAxlTdkw2/xuczJIpBlvlcyaWlKeKQsyMSiqq7glKFICw3AzLC5k24QpK5Ze5mt2uX6Wgz4dofFxRVy+cH+IiKRQQ6UEmAIorbYzIbUMvtcLsOshbp05d5V/FOaLKUbyQX2QC6zxUVyle8oLgUz2w3kloXLqOTluDoVIlcy1owB0ZUVRT0HILGjQY12rbwod9fp8+jMfAVAA0a2Yun+KYpI1sNtFLK5zPk2dwtx+/SQuyVD5R3kGbzj1ISfUj0PyrbckrAXoJDvfvMv7u8AyQs3a5cFeFvFcXNJP4pkmkctkT5S3NSNJ2QjqotScS8FvshuJ4aO0Xs+KIDcnf3a//7Dv9MmhCkU/yqK5NGR5svEGpt3vNJmv7gHae7Sh3vZGFcsUsutDaT19mjM2MT7zbfFqKQpWSwB/PZ7O418/Nvf8Y11cF9hMf5yvbLqQu05Y9WOZXmf3DwTQA+4kXD0CZsnP1lJt8g9xTR+ChtcXp0//+S1Wn1+9NAYsvCuCZkKiHB9TbhVyjVe4bNPvLTeGAcrMzHRc5sb3nrvx2+///oWi3zh6W/WkBMthm4UjolpawvD0S2x6Ag9WdvNOragwylzupmpV5+uJMrmvbnBIr/29DUAArqtY6btTViJfEGJyA+d0F99Zm3hG7/60RaLhIuo4bKwXiHzrNLxWf2cyy1Z08T0IkNGt903X/tfvxkXbvzqNebYijR7FvW1XpWbz/5DFXvWhf8MzbEnPJgnFvgZYBH9IeNB38069GBqxBI/E/Gt5JiH+Lgp76/Tc7V1szZz48bMfGittRb+gauhlMmlN4WP69tQ+udAx+xxibeq4HzbXnigQoYrmBiLPFvpPbLTf6P71heYY1+6pQ0pmIXfUWR39EogBikRYT6A1JYblt7W1/kbw6fb9jmZOy2AuYasUar4qnSbbIViOozeR+9D/7PSh/QRZ9zsl8Vddt3mAjReYJlAJA8+35krQrwmBQZIl19ctznZaRpBr2pqjmcFlCV6q9xY30aF8lLu9BmrIwyVM5ogtDHvHVswjfLmuljYT545S4ALRQ/YyZvNJ2fmduBHHn34kUcf/tM/fRzUj8fzSBnJrWFQv4njTenn6OcYAzLG8XiM6cYgS8AMO8Y+4IfqaAvguq2KaYhIH4NiIMoY0SmenYgSQMn2W78mlJkOTlh2lA/R5sqEFEyiBxKGk4O+3luvVMqjnrKxVmFyhWYmK81CUrC+2dVOa4dDa733j2/ehG0VHVceunI4HA6HAxHa4aBC8dAOIqNZlgwG/HxzqPZbVFvpeIam7ZBCygMLRbemnluwhSqbnvpkn0kR3a51nSiXwSKntu5Jtb0GyPRJdvpeb5dSeaDJYaXdirmzouQOCQx704XNh+0Nl7K9C+0JiO1t5botV8SQ2VXy6nWjijOta4SLrLgwTRAp+4cpwLoUGm+PNuS64qSYCJG6RsLMUUQvCB/axDSIOy5ud2mFZv/Fbg7F+4fnXx2SO7VnHqp/h3Axq8Ingbc2bbJdumS3L/RLP/vlruzdvnfKp52J8g8DLH6G8mluy72Dvd6Sk9/95l8sZ83fT+VkdKRGvowuAJi5nZFnjlMbnWgE0hg2gge6eHiUfj0d3OxbEExPEGRA+mAWxa2ESWRgyNDNI4zRpZ4tc7cWf7pmfIOH5Om/Q8QTsKP6tnE8h+os96JD49YPgO5IH4b1Eu+eDPP4K9+pHogqN6aG4ujtqhURYWbdVWhgUi7SujtWVkm16NGKlrfRhpdCGRLxv39/J2Xkl175GyE/xFaYqRk8rZ4RIGKHM0JTXPmDpltFiDc8Jt7gPOY1jSnlnLrevUu9OygcnYeG8gHqFWlsjjUtEYuLy9WnvmHG1hSSAO2LsZYMgBq3r3z5r37x64yyFNGwkLmUDXR69MaQIxGwuZFszAmCLmMIvvLES78oMZK75bknXrb5QwSiMfGXMq4QwJZMSsIBLsXcV2ESIRKSIa1pajAMEW7NzhuRjAd54amX33j3x7iwfO2paz7gd5b1xsIKL1M89NnCN0TG80+/9OY7t6De1adfAhMReDMtiaiPDoEeDan7NA+YGiS9A6szQ0z/4//5x/jzq09fY4EmYhsifYyux3MohTcYqMw9Vx+EbbGBCIUtqxWrv3tXKSV9kO7Ri5gJg10NDYFn+etjMLGGxVHu90fwjIlGh80uiI6UfgRAw8Bs+CGgehj9GAqrxVKZMaNiJUTUuBFx5EhiogNzxzCnSDFS64Wx0xhCTE3PYR9DZDATkGkXTer4uFjXmJkFkMozA0KCslhl8JROaGYm1m1ldmyQQCSlun5DCP+Jm2GN9fxfg4xNxTCR1saNdSXg/2fvzdIsO5Izsd/Mz02gqAJQVLf0zObQ3TsgRbWoDTRbpHZBAIki2esQyQIKoFahdUgr0Iu+T3rW8LFQqCEz47iZHmz0c+6NTCADQCIrvLIQEfee44O5uU1ugwPFFSdWJUt3sbFyvB4o4Y2ZY6VphCr4Io+r2daZAHKWkXqeOaZw6PHdKhfkkxTCFttJuci6Z3F9jGA7n8YSHb0XkylsXEs93PX7ILq0y2QeFMnBKPiPqEBg6cpUjRSrQpmIAisjGi6sb6LCniW4IBP3VYM4ttwMaQQflPf9zrRHjcs6c2waIUIQEaK+H3yzkMom5apNtwaRFYK3XRH1mH8FM6ZMFWXLycak0/zanhQ5MhIQh3gxb9omNs4bAE+Q+/2S7PvlnScqmLuMy2DG1Dsl7Puz5/sO4L0P3h/bj7axsbACd/v+zuWyw9wghyV4VVWFbE94zjR6RoppIjM6RDhyMl7fRju2eQxSYd8iAiNxMhednq31urlvs+2tcrkL2wvEIJHG/8JuUuPVIPXjaOPRkAKRBwKxu2WziES13VZiJIDnvAMASF7HAGDaCCqqETZPqmbQMCk0+gmSbXvNxLsqRoKSyd132Z6Zso+Nfm9cnj37LW0DgMz55J13eO77lIEhqs+fvSAioiGCaULdhUV1n3OwG12Y2IJrmmWSaHMDjlZdO1BlV+TERBMPtMEvm+9REkr7Ix5FwN9kV5HYTDcctYDx3C4jXX4iPHaneOJiwbs1h8ojfJWjuT3MYe5qiswQY4IuIm6UnJM1c1bdkDGl4G5ELjSopFcNYgfsRDNhOyeemnpW4oyIrBckunRSPovxqa9+G8MPpcXWcAQaE+16Z0S+B0K5ZV+7FGQIGceLh8X+B7wVBCauuk2FW6n6CVjVUnQ4QimjAqeDmyfxsBUJBTQtFxTzGMxTJ4Hcu5xS8he0RVAHrnUtCjItMdKxdJ8bk5ntOsWTG6hRb++RKcUYkLIxHVLYJy5vkxanigPQ9oLNeTk3MCb5NYTgH3h7HTfJ77KdS3Lf8+R3M9C318PDtgeczy3YvlHr/VomdcP8t89T8nawdrthDG6rocI3A2RXSa628BJsH5QEcSVgod0DU5F4Ey/j/YdoX/3Pn36wOlqyGVlcMy2uXPpb/L/PIaK6HBYCONvQac9dtaa9//EnzlBSGQZgtgyDsStz59WavC3k2vJS74TSYnRu1ZWpeSEwKRHoF59fs0U+/TsX6sgl0VMA49HKcdhrpcMH/lCpnflm/u74cX7ttdp7Hz4F5xzbDNEl3Zf389/80f+gXVkpWaTNV2VG0O4wrWh1YzHTwGE019YCwYhgtmkC/ekf/mV3daxlEFi9GPOf/vFfAfjfzhHQwJ/94X8KhFpGWwYnP46J4yildW1x6omo3HMSXT3bFMwWQKSi+ud/9Jc4Va2x9ud/9J/GGDW1b3LZQER67aRcbxouP355Iiwif/ZHf/m/XpsegP/w7/9aplCoE9fmSGGFCANGUwDymfOLhyhsYtLdkoKGYViFAtNO5sg4N2fjaA1wZTlBfj0JIzJwqNM0k5HdCOg9EZnkbwjSnjZDofkJhHalhH/5p3+6MnxMEnC/MzULUjmgUzjkHhSlrhkqdQx27Y2LXK/USc384C5+Su0kUCqZWtpU32U3iDQl9VTcyKOaEzlM4woadsaV1IA7xbhF7mh5rpkA8pw2ta2ea0qrP6jiqUg61zZQHAa3DSnwOfoBeuWUNbW9PqDUnVQ1q71FcYxubvB7S23DYYWZz/8AHspJtb6Wr5PxJFoKa2aY1fboUdZJiIVFL6UfRhZkUIRlwd7jukhxE1VT6dHEBzKVc+GANR29gl+AkJWVZfHkOarq5eYHOm42sxpoKZWga48+hStIR8abiC0jxD73fZ+q+uLu+YsXzwBA3xORuxd3JBAAY3j9BNtKNx/TnHPOaQwqxvNNsk+YyZLDxrQJqqLCxLrOrYBUW5wPmIASnmtxBd7x1PBdU65ThdK8xtuchF/D85xGSIbLK+tuJtLm3qAIRyCTkdhtu6idTadbhjsCVdoIQvDbwXijzrZazvUguaQU5y6hrRQx9Rr82sxiKdlCRIiJhcTM+RQ3nXFvR6TKea1h4Gw8ZzmyabTp8Gqbl5S2tnMh8lco4grSGqSb2HJLghBqYZlTePMU6LuZW3ZfUpFl4HXBL3np6tm6wg7s4QMxi4fjt9tj0vlXs3bFHKh910XsOiNaW3BaavF5T+9uzBRKdj+jCmDQiJwZ8XDhazlIuBDholJDnC5MUAZLdRC6CTf4QIaHdzDpFXjnwlp/fj8k6h0enqQQ4W506j4N7CmFtVmBI51CAth7tboKmqbGZEjhre+AafbkdeCUwPzjiD/MceggCfwutKs+Ypd33n3TLJLWboU/vyFZEb9Ze9MsmL29yXM7t2/m3vv2xW7fNEe6QpUcXxZpN+7cQsezZ0Kn1BDyq4MURNwfzTvpxDxE/BjHP1IeREQ8oo4k0QdP/+7Lz/7x4YBwENBbbifNVHBd+43bLX/H+R2lbkuhZ0F/eaN2TSoKocM6Y+GMDXMp6shg4mZS0Ov8lAblPIuAr774dH2x/U7h+QIiup6Z8YOnf9en1gDUBQn/5Upg5vLo9ZYCRSFJybK+BV9+9gBZI73zUoBTNlnk1BBg6c/+8C/N+GLcv1kQrgltZrgB/uwP/iMcYwdHWj2T/Qfxf/iTv0r5OlwF1AaSlPSjWKraq+6DQyT6p3/4H/1XETcJRaQuhbCmqn/+x39l6k2ZZM5J4JBCUPzhOpjJqq5GeE3aIxQdUf2OILrOes2qIGKomq0c6doB/Pkf/aUo1JPfEan2Ii3pHvMX//avFSoi4a8TknII0SlHioqTJGJS/MWf/HUuyvwKNeJkfJQu4aJcGKxey5//SStTnmMhdcCyWyg0HlZiblqsgcJB9Bf/9q8L2IFB//2/+x97Msj/zkLFiTzciMlVRAIxE7NmWDzw3/67Hld+TXINOT9k95mfpv+OoSQPNqUl3Hh19Ugu0bbcT+oXRFeORhxaRCqd9xz8f/X3fy+priNxxiyidFhWCytPY7kPnk4T9jczVL2WQAOIn1uGn0dNJrYa4VJhO8x8UZfpAPOmgEHTyTrVDbQZFiFti3RNTctbqM0mWYqzVi2jRGnaAMrZCrGwpswTuaujGWSU0t3C0g+aTSOTUXZufOX4F3uO7e4qkFoZmVHO5vZpAwG7ja8seachKM573QgmWnmPaSluRkm6oZE2NL5XWdO4UKCwdyD4v2oV4SEThrQvrcy2qSQSR9VrB5XAbLxa9g9Cxy7fr8IFAOWslLc7lOzGRmzXik1CO+j4JyDH92lbXUAjoiBlYujc911EeIzLE7asvlPmnPtgJmJAfE5MYyPCMA8dVTAPZhbp59bm5mkcKFTwpO/STBFtzsUck9sU2EoA00JZM6iG5T4LQxjOJqBrGxbInBh8UAX7WCTwxF+op6sCdaPCSnAXsGMkuNsfTJhMHg6nbuEbK2Gb7cSmKFXegsVcVtc3VYV15R5YXoLQL6GgKua63cAaBlAER3YLjdHOxZi4Aq9b/FQPWNfuiOwQxZk5tStWlTrmbotyXqTxQbOG6XL9EQKQQSczn68zO39GfS3d3n+Yan7fJMMcrSYd0kuQh9PiAIQPwqFz/72NvpDN8KoPDA+8K+2ruH9bHTXNjAqb1puyzg1gtbFDVDFDt30nGSrVCD3g/uFpY6+BuTi/TaRlLs5+fdxQvJLmKYC6ZwkFLfXT40wCKDaaeW/45RQ1uCXjbHu9oEBuq9sh06TPaf6FTsOEvImyqbFlqoit8ThupcwvnEbjVd1T5ySRuVhDtlGZwSIdohR9B4Te5nZ/sOqb6Sb5fbWHCkP+AZn2zu1NToX5mnkG3jKL5O3ckRqiUZkaSihbRF9a2HA1fzV+9V6biJMjBQvQVBpKkfNiuEDc0L4sw8irty+/WFwCNXNNwbPm+V8aqRYpWEybvYXl2Q+JgrBmgLlqizQg1P0bRRb1dDkjh6kxtfc/+qS/+9UXn42ReZVgt3wJXmoZ/Y+N2jYAeUd+dZJhi6wpMbeCeURZf9NHX3cULi6WenwEALmM4WYthBlBT6LTg7ZDqn8QK0GIxIu0eg4ZchuNS4E1MdWKsU7lIidNrmMN5rGNbRs8sva0PZM5u1TMu2XaHwKRDqgINw2LX4PzYPY+zCHFZEFLx+X2AVF4PUAbxjJFJdKWzpTGG5VIfGefiGf0PuyqS/+uk1sebngUmaoX1k7B0cRiB5s18ql6xGcY+QIgKqp+4B2wZSQNSkPHqbtQSSaaJyJpnFZlOAKvwr5vBpCFRFv+o9wuy95oYp6ucDjqTX5uB9GgTMXmUqtRDSdlzKYiieq0f7Z0lbENt37WNFWmyJTAwloAmapbZEtzInHjEwc+EDj6BCPE6iijq+jkIVaZGormivPQauB9ujjxiCyit4iQbWvM3KfMkchTVWWm1lEQzonj+EvNMIV14zt5Yk3xzSR0KBqLILNNjcKxYwqa7x80Y1AASPPgR2edxhQi1PpD1zfNLLDJHl9Mv0eSmgOuqmpoUgXGxrp8vpHwVYHM8ulkPI6eUwA/TGZnTyq3otgJWmouG265a8yGUaRF096ZClTmrg26lDIGcvuDG7qZLSgCI8wLSaADjpLl2Gui5DdPKc1cbQG0eMxUV++s7MguiUjnVz5LK0/NTlNiMV4Qas0e1jHDsLaspnW4oFBMmatJgoh5jM0JvopxAp9o46HXT2LD+CMsFKYvE7GCLKsnEz9/9syyCd69eCEiY4x9v7PCZXPu4t6REv9S7hNfAjWREQSQGLO16EXDH5cZFlGlnTWNyBztzyT2I815C5MAc6W2o0VscXtJnEAEijXxE62j3nNN7BouBd2qS4TcyyJoJlpMEyFtUM/XyGPwlkU7GjoaJIu62/Ex9ytfX8iPCGToYi0FMvpzzgRdJBEVJdt/FRHPTjx4zfKXpF8Ld0KAD2gYQIuwlYdcgbJ+7zuCVbpuJKiQwoha7B/a/jnwsz9a8OjM4pYtu/pxPX3NcloA6JJSIxTJXWzn7hmg86EDp6/3V+Q5vefPUQ0VxHgZkOKtYmxFXtEZUvHfLmVIiGuaucWL6CRbJS881VDQNxuLXrAIV4meMZdl9oHVhVbBPoOm97V7Y1B8nENDROJYhaDs0uyCqB2syyyCzML5QrKtZFVORoKJ0nknCEnJgj/m+RINTq2JeZWU0o35mbMyDmQc89vs7YfdXj1x3ptTmPgeQ9i58vWbYz38QdsfX6W9etnxb6k9CIq+TbZI3FvK5rF9K+16+ZrvqX31xefnD38StsjvvT2Ua+R7Hz19kH4e22N7bI/tsT22x/bYHttje2yP7bF92+2bVfB4QyyS1g4Gvm/JHPbWmxG/WXvTwPIghsi3zBaJe4O1gXYj2C+vQPb3jWtGAC1qKS5qup9CtXMvkW2dPHzU7ufMDaF5Xf34w6e/+uIBbFVffvGzDz785MrMfLbHvFrXnvHr31Y8RgH61TWvw/c+/Bi1aqVyc/HEShH4EHf0y338Mmj7ZfFSsYvHr07AIYpbPbvDY/rl5zedN9vv4fxY/40l9ifz53JfDuKrKZpOKwovmLyX1GtPfeMWociGtjVrsiCpdOXwy3QCESTcTDwBYs1scZD0q10FMJgAiPlAzozAAlEmjmnuA+nFEj44pLXZ5nzjKSZVQZY9Wy3C1ktYM1FE+Lj7cIb1VkQggMWXqkFVcxfTO44AVcmQpuPe9T4p34xvKK9yyw8DzVtDoUykQnFNbLfAw67afU6MCJ4meD6siKppvgPhh9XcEE5HldyPhkDeoyp07l53hdmj8InN0cmD1K6dd2YO7IjFnwicfc4ngPcJafjjSISXhUcBxYU7KXl8Hoyk9KYLzHWlxN1LQs/vwiFO/QADBE4f9pOzZw/XPfl2OPHqnhkWdnht7Uu/Of+WTQnhMRjQbTjo3puqWgmW9Ah8T8lnGQYUsBAqc9uMwFlZpxsdkL/YkqJ5xXAl8gI78UYA71Av+dB8WOYgApSu1gX5ckrNU0t15m4z2Nr+wh9QVJpB+zbdEQ9+jd2hL9xfSPvykLkHAumOmH4owRwz6zuy+vGQnw9NVxbCssY4yasLiQEpTx7qz+5Jasegr14piSBpxLOeZnwevU2+40r/oe0dYw+enNT21KYjWkBxaua5Umv/a40aMRLLxseGaDCn9IrLT8AoaopIY+1dnVhoyQ1Xc7QBwOBheS5890Vlypzzxd3+/NlzAPvcdeo2tp2mu4ha6RiQKhkhFyErkl4X3ieyIDLN/U4jD6b/KYkcB4AvrOssQ6wLXelfnL70ibSntG0sZyeU6F+CQo6/el7ab4f5aZCSDtrilBrhtQQQDTskyfezQysK5M5gsVkNKRsZoIbP609SD15O18lwkXZpUFQYwz19qdiL+cA5MDokO24EiCi/gZ+ImG1HZhAvkuXCUgNGIQkmMaAO4eX8FkSppzg4ysxJJ7WcQ8/YSKCD1HSF/Ob8lt7PnGAhHS54BxjtpKwT0E4LX8Y/84VVSjsRK6KgCY3oAB79e9+a1qF8R7pTObVBiTgqfTU+o3G4zuJAYzIlvGT9mmVq/cRrUDbj66dzgMi435GRAK+blAH7eehFhaKoUjIqurKbKzjgKoEJ7Gj71Qo39eExyhQAACAASURBVOWS2kDRP5GVQyNFZPTIMyTAAQHSj1RB8JJoGU/QZ67GhE4d/I63Nyp2O6OGv1Xr2Dc2dL5mvsVvNSb69Tu/urTvxTXy0SnyVrvtHbmS5bNg65RY73nmJl0s1hAcWjurM6ZTzznPzXCVl5oIv25TpLhWsTweb2AhVxqL1SD90YgYHC9GN199fq18zYcf25IZVhuT8ndfomqxw2DuIsfFfvnZz1Kl48FH9nWtvf9xRnzrIpmd2gcff0zw8DpSD62FFNf2kWnRJknBuiS9sjCi68ZUf/n0L4Bs4H0o18jf/+Rv06ABnFSMCuzxDyLkYRF9PUDIir73eIgUfuKfie+WYgYeeCEOLc1nErtcA7BgK9RZKNkl4qpEZE6ZquI4oxohNJI5AhasJWKmQTwGD+YK8nTZcVWoQu7xGJaEwSo3B1aqBXBZRm+fpn/iYc9uiK2vmHmQlR52+FPEyyxSaBV/7AjkRmIq21Tl9PSQcIdY6f8ZpWtW3R6fzIQyGRisPGS1HbeceNEin2LBQ/vDFbYTYESDcwTT7DpN/a4JB0R2mUuCc8BCMsfYArHqmLhgnKjX7o50pVHrFjrGhLagAS6fcOrtxIHFgSD1L5DMoyAt/FxV57TQ8vvyaajHHIW1znIJCFSphe7miTu8mrFMfUfUI8IiZ4Cvvg5pIEyo4Wg1VhspW0h/wJuJSIkqbQCMQmZltbK3+z8q6ufUfZ3qAR6hbhst6In6l9UH3cg/oa4SFfUKAkMZMBbbGvPJBzV3gvxgeceiiSQqWvFr1Fh8WOt8fo4TpmsVnwjFvFMDZPxy7qT02cZwlWwBV1sjTpVtLDppW5ig0rKD3mhJ9/LhRQgKkGT/TI0ymC7cZtthvUKtvsnfKDXyhRklTpGn2Q0ekSh/hk8e8GVsogMe5pTQuFZY8Zy1wBIpMET1nXd+9Oy3z5/99vlvfvPb5y9e7HeWPpJEZAweY4wxeHCdhYWDJXgzwlNyL+zL+C9rOyJmPSiQla1hoYJ9W4NqKDIzXcSZSqcKvl+1WQewdJNHsu9gOoeHVxqV/02Mu4Z0DpIFRwr3RMVrcyU0g1WXjKTq8fkeay0pnCaCOBMk8HDbh6jMOZssC2PZ7eA4oTBKLhqoJoJMjVJr1T5tqikmpVzWT8t/Dgi5CrKURwHtR8CpsUjNiXfifbqaqQnfpCg3ic0yrSsfxvEq8lhfrUOoC7q0vJ5y9FUBnuKbbi1NCOQe9I9hCSsAurEkg1yyHu2w7AMkZaXMaUOcIkDQ89PLlIJNniOX1lKq1DgjiDPsu5qYk/kuOH6nIAlojzl5QJ0lKjQDoMxZgKzdBRFRHBJFpCdqN2XnjbDRO8KLH7Z7zjkiu0vIMyFrm3BgIhQTrSPW9iR9C6mM/LK0KGvMhgPx6RoW/aDba9pfvt/Y7W7z+uM/+DfdLna/OexN8+k7tzc2J+OrtO9+8o8B2ve3m+bIgxjQZGsniyEUdUHExB11YS6I88LX0piAowwYIj5RqtwIbxsispR8ns+GiPDemlTxG7cvv/i0lhbMUtdppxk0GX/TWtA4hirw1bXKMO999DTBWYphylsJqWApMYSIyjm++1f//HON0odn1nN2jWyCo8urX/78yiR/8vRpE49rh8MuoK2f4sdmSvD9Uu2aj23pjbk0cIdWdEMee60mMhlqQk2IQDGL2EA0vdUTJ9UGcftHIWFVc4OFG/nMaGGWOpN3RGSK2Z8KuzRHJDMasqly8Nxb2t0lGsAhUBTOqGfByZ2I5KWuREA1s0AWcF1KDchXRqKQtbxHuJ3ouHllsoxPVM3XTH1GImI+NXPOmKACSorBnEfbTSomQrmREPZuDibT0vy40EcpsYZtZjGe2o6YxKx+nqKGtLIr9ZnR3FZnNlxeTnOtlsKY0Q18mQK0acI5OojcuBkGnfzeACyAHtDc9H/3EjXF0TYSiij405Tj3hpxDlcI7+uoY7c253Rt1Cws1oWmpRoAmBjEqYmvtpzUn0zj7+dZFeVAfG6i6vaIQPEAeS6QUsYvAHVdveWxKiHfyZWdtKkIKDuKmLEeRPCkV9qHPKqZhG6gi5MTKJdUqhdtKXtoM1cBREp+9nO7glG6BSF1j8qqiY5tDdSNHftLynanZYVx1fPfshnSXc8BAJHZ8dFnaP5o5ShqrDxRuu15iAJBSshBFB8QxcVaHhWnKwkxQtbJCoQKMlJ0X7GMGYJAV40DVnHjA3i9UoQNMd+n2DDbj+OiDvBtBybgqwoFB1+ILJIOvTG4W36bSHQ8eYnQoldxzo5iQKmZhoqbkjpnqJcKgpo8LabdbzEpJpedH+hcM/1YtkdREdmJsF22MQZUL+NdERKhuztRIRPzLpfLu+++08wxLTdy7MW6yFLeia3E7RS7RWOGVV1ZbIuI0odm+AAWRAGcJa6vBDw4TWv54kLCSrRdv0FQ68UIhtNDh31MFG3kwnE4OEgda59+SAbNF9ENOSVhwa1YVJccSYS8N4kBRDTSd9rUYoGxKXNOVTF2O3gMHpdt27ZtsBuVt8tmnwQ5r10r/MFKkU3mpwVUsfWU0gbcnOSkoMl7IZUu4M1NKzm57NFOOCQHSlHNCBoauruAcWaGjaSdN5ZOvxy2G0GpcpnF0NBuJuPLm3bQAEo+e4Zhfh/QCqC1h5swa3/7XPp5cj6fIn1IlXluAywhYgaKCfwWwXk3prNhSNA9rKfLB4oAIaeUCs/U6PwsJdNahSdCB6gZIgfRAKJ4Y+kXBcAmri8XBhznONPYgogH5ySdNTiOuQJ6DVka/gZ84QI09WydfT05mbxoLBK4UrqOKARA2dQpk+ytg1pZ1KzSWJyrAlnw4K1rr2+FeUMskt/gw288xOu0790Y+lAr+j/+r//z0NX5k++gPQZov7TdG6y9yM1d3IVaoLGudpWQAHTpQ9c/gVC1yDxONBQSp7NNuwUYmDJDWo7RTd99OJL75ec/+/2nf9uqKLSRUGXXyum/qTzFAEgV+ut/vhaj/dFThMxUUKCuc8YtmJQhxp7iK5wRAH71xWc//vBjEe4Xwb/8+c/OT/74w4+1gqZVQb+6Zov8/U8+aSFtLssQbJLkEseRT3e1Z92PkoOvTH7h5zWofUUK3Aok/7rt/Y8+UsDdWABO+dovQ3sx39gPCyEenry9FVE3VFAzlqWbRNoTzPDairK48kG01Nz0sbSWb3UuSUl1loBLiPoA5TEWtmwGoO7gFeYBRdZ5zYFMNLMoTpFl7+qcpgjq0mt8Qrgi1tgR0NB/ZHlGVFlcDPUVNtmQiBTTD5MSsQd8EtWiAKsVq1l+XHSSsnqEes4+XAzCkGJISg0YQbC4lkpQEZsOM1mRG5uFiuRBS8nSVqEKjhrnCbrA2dIxQiysu4Tah/gZdC/AEdBXxYRCZeOLG73FrZPMfHSKSEOCBigaSPKh+JKuyqZqNbAQpiC3Q0jusX3CRKbsUx2ctiDXjSDiRXFzyf/vz/7xPGgAwUmcW7opzFkFtw4zJC1U4yZOKZNR+Mp909V/I7TNSPQwkx0UPgovIf4L63JE8revKYranyTU3RCBQGbM9ZMwlTiJf0xbVQJJFmzp616pp3HM5KEaTMIVKA3MzYXbnxLYXypqTsFZngFUtYgHgcIx333pG89TrEH5lMs2Z7QgjK5CR9nZupMJ2mAKXivRkD+p/ooF28eq2heXO6ENmKoKy3RhBAgsECY70fc2israaUaNETRUew51tSxdLvw4i0wTTbSyiVGgqG9uh2mmaLAS7YHlBgOzMzneuoM5HdJwUFhlIyuBPVXwLEpxktQIHtBuZcSkGdYB3efU3z578fxHAH7z4jd3d/tlu0zdmenFnN4RGaYUV2cmZwNLioiwbDGrZTVhYh52nO0irVhykq+QJZJGB9SQspNti0UjBzx8mwwmWvsZoZaNUnq3KbCuckHOvc5vx5rcwzo9RZhVpVcidqFZsZqodF2U5+1xgtdMpzYYl2+2b3JQ6J77IaeiFlchQiCyuHjbozHGtm1zqhnKzSapTKBpTI9HOG8F+Mqg2glUmknC0TVO41VI9aosxaGSoh05Z/Z6EM49wDpuaWLBp2NOLrvHOCuxpYTaidPWE1eY6JWFFS85nbrrb7xCz/bwAvoaqoM4Nyi2oJHOkj9tL10w9ceR9IhTw1kPQue2frHEGrKlUVqROjxQtIuXxb4XeM4Jm85I+7QpxYKgRZr5TA5CVBZyb6ezVVryJTlZN6bDFiFDNbTbNMGiMwhiO0vFhnJsAmppZcxN4yxZL3RYquGnOrGulRccfInGJhW1ZAUMsuRZnHxdtyzub1UzW8w3SCKZ7XuM3f6O7V/fxnDf8RIexB764GWCvkF7EDv4222ItHbbHGkKjps8ynsCzgLYSSoV60CY7ULMoGBP7UrYeTADFhUGJvDgCbXUe2qJ+txMQSK6jeEXv6Viu3D2gIAQkeH8gyR05pSSpkkw4V6hwQeJSHVH3K7h7AkI4Ia/5D3tvQ+f5uoMdD/+8JNffXEMAL+anvLcBrNGJLXemCGAf/n0SoD5rfaTpz8FXFGl5g2V4Z11SXjKHsPbMEuE7Kb8JA6pGaa+/Dozub+ZajdFTSVM4xUs4oG4oaypnAoQAzJTJtYwCwJETINHUyHiF1I3r1BcsE4RHgOmBJhuxCxzhwkWzDp3EJEKK5kKNXgYJEUnE0NZdMqcMgEyL7lBxDaQpaSxiZNAVQfTIFLVMbY5J1KdIyaijT1E1asaS0TzEUSU3TSHEgGBfQ25HWPI9Kva4QsaYgMxicjGw2Xd8B8KEwkBkDkBmqo8NiLa5w7F3O9U8c677+RhZmZREbGAMuKx2V0yRS4tAlR0MM05KdVOZsT1iAbZQjOjEaAiPEbYWJR4ELnlisfQOTMFZ55uAKrqknMZUuKSgABbY1CF1I4Gj1I7g24ZEfMTE0CybligwsrhXu4+kgRQ6KVAM1q4bQ2WjQ+AZVQQp5tmZkWY7bIZFVVYAHtqwAIhwLJnNt1byyQxSKekyFtKgqpAw15+j/9Hx6LtoBb6dhEBmKoDFjvvz4u6z7UlAwiVOPzq0kQgQhY/m4CC0fIgMlDV6UYAAIRwwqWcupn7mRiRuNjRuPQI09vD3BB6OgiwU5mOeiSklJukeXOTfhhBJewgcd8mY5Su0ROTm3UsypJoACDiMegwf4hzycXMQYDCcg6QLjnxbEk0hl2DkV2LILyx1isADdwgQERcFyvAANDpPqEO/7B2WwdELFBAPM2yueOqKjBylIZgRqwmUL62GgZzBcCkInbC2I6/xoppi1OhZhA28LasWwWCsP7YraffczrrUlIV80gbTAlqP5JTiQ03bG9daWWCzG6fsKrlJjcR75JqtK1HVN2d3kKVVWDOw8sNgFFy4kiMMFWgYpWV4bOVuBgwYIzCtKBFhsG8XVIXt1ck7nfpCc99GmG826foJCJAXjyfv/ejdwBsm/5/v3nx2xf72GhX0aljbGMbd3d3GW0zLhjbNlWgk7ARjTGGsux3k4j5wgAYrDpCLtHpMQTyzjsXTUO4TLsNYmYl89FSo7FGrNir96ZtEXBrbZcPDfLd9dtuoSSI+BHD51QmsjBm1URmv+qITv3dsj3HGe4EF1AaDM07AYp1GKokH6HlPWDuGRltZMAHCruLCSzIWHYOQqVJwKNXIpDaZbsOHvud7Sl4QCBf/fpXY1z2uYvKjy4/UtUXL16oEiYA7M93Enry5N1te2KunO6IABqDp0wEiSbBNgaF56JLgUFDSAGJG9NmZozvXJYMeuI3uBgGwgZNBOFt0Ao+6htI/XmqnSorZwh+dtzSypcPBXUHEFdXQN45INifkWVSADPN5YvxrUhmdr/ySGofqDO21J9qRmZms/yIDXipWRGQN7Z0WG0BXFWNidTMgKVHPSRYjgfSwhsX7Y5ZUExMJrJgCgnBIW/BnVP7FOo0sajMaRji3AcQwFL6GDCSAtrh1mnYXxkS89afaMRIAhApR0lrzJY20ubrD7JzuQRRAHRGWtsBhGojEVWADZWM2j3B66pMHBep7U7wxpgGEYEhuvFmR8mSy6uq0U9VZaJBiPLaqpHPy/dZFjLThABpC3pr2x//L//761gk8YYllPw22g86hvota4/R2a/e7q2sfboEvvfjQyPTbJJWHiS/eKo9vX4Qn1NJVymXhBb63ocPVjH5y88/dU4Rkl0lviNmgO3yTF3EIuQ/qqm9Gmhe3si6UqiH7xH0A7MAfs32wcef4ArcH6SVAK1XmgUO6xnDKjFl5P/3zr6NaZYAa8JHzrz9DJEdjcOHjhEWGRAhI786ojBlXOSC5xQCkMvOokIEHoPH8LRcZmcvNTpfInJ0A4iImccgYoBExG1w8ElRjgYFIHOXjAxX1Yj1UPGQtuUc2ZhEdsossNoGVNW53/Fgj2oxWJoKRUx+ZwAgMgq5LhFmIk2R2s1IBqfL5UJMZmEl0NjGtl22bUunYIqW6oDB1m4p1OOX/XY4I8ERo0o0y2umohUwninxCkN7ZKHa7prqS2nSYyYu8fcKfrkNGvmW9eweCc0HFs0ikEac2gOTQHPbakDthC9A0/G07Si1K/Icpf40i6kruKlZh8jsYLy2SD9EdPyo8nfi1U7vv/77v3/pMzIlE+b6joxhcYU1OHLPm2GytDMEFgHllBe28TiV8dgVfbV2oYOhrskSBk6lNbMkiCdZi3loMAnKX0LfNX7id30+Q6QxjHJdbuuAIedoOOLu64t+GoSu0ziKAL02MsKMTx4aF5ZUUtzEhA4NuEHLAZeKeCOkK/dfuuzYksQ2Bu6nNJbk21o0Qo/dtImtLZZ8H4bGBWecLz/8xEwHbCjsaeeGanMJWNJ7UBkoOsoltVwmcVpIShbHE8pExByc1r4CwQPwxxjkiNrHMOMM7/vdPifMahBZgG3ldy/urK9tG//F7/3o3XeeTNF933nw3d1+d7c/e/ZMRIhJVOcUZt73u+fPn6uCmHgQD2Iac05fj0JE5twtTDgASJ6p12cPIhqDt204mSrOaUYEMwt56rqMyHTAUtv1siMlMkVejYKznpjBgm+ZE8jnSsXHamvOrHTdw8Le+LwsH4GMdeQb7rd1xC/LAHT6KIZDOy4lWqhCmXkb2xjbtm1jpJFPRaYa688ULcBg3saovEi+DbEbzS5fzMiX06xjJ6gE0BDzTFmhYl3z8Vrl8s3q9ljkMlbbALiUukON6/OsfQlTZvHDAmEQYM39xHXucJ22aP28TU6poQwKiMsLmtu6cvaYrM2vZnxjpCt/dRGypeFtszO+ossaEpA+i7Ay5jtNKI2NyFPkmcR5WP6twbzZL4PZLv+lLyROoIaWsEz5JWyqVtqFKiSnXBeQTFI15OXOkiPuArgB5CZLxKEJUSSVxZPBsKHTsskI7l5ER2OqCLrfKdfvTHuQqNU3qu72G9Ku2jG/96hta2/INF69PWaK/LrtnmBt7VJeaHsA0rZjv54ECTqR2+iHltC4kAtOjzPyEhVxmeivx8Cpar2KCvyqTUXVguGCG+Q8M29gZPZCW1EsRR9sPq5YaAEagKq+9+HTa3khb7YPPv4kVCVvDwkvRORE23KlFNmpGPPhtaZQhW4fRmfVf/nsnx5qfh987Nbq2FJ1sSj2bBVVQUSS37gjITnydkuM+tK7frP8ZR+ZAKYessk8zDgIYPCA+SSSlwEBDpKkqpqpiyhqunQnTfhNguMrIXLO+XyzFrAqVKao6hibvU4hLNk8mVhIzOxo8rbFkHr5FOC//OnfASrmWYbBRGLuUVYiMlzbRMWDmoMsJMxSZ7zb98jPTxLBwZbsKZdmOd6YxHV8JiiRW3kgKmNsDe96WehU5BydmInM8SotSHXLnRJlucgBqJnAXOu0o4qetvnWiV/E3tSr4hpD23gdRO11BZZEB5rxvKHUlidJrPrqBPRY71rzYYq/w0VJgaNPXL7V3fc0TgdhzZN3Pgb3tMMJTJZj5gmbkhW3CY+N3JrFvFA/Q/63uYRdgioocrUrNXhSqBKrtqHrCPaOUm5haNWpQ5Zmmf93MlCVvszLxbxs0rLmJCDASGlpVucFafkVdQ/rwOQsLEvhRajJnqO7toYzWV4n/yqKjTrNofw7La45t2toRP6ohjrpiGfu3sVI3OvrNu9y4aEFueu1CPJGgl6CmGk7zMQa5CVoWURzAxtugUDuJeow5YwUdK89G5fAgCQVXKxJ66RK2dX144aHKX6pE7qeZji1fiymhSJpTCCwJHqm0Swc4QmsIuZ/+fxun3MfzIMvT548mVMAPH/+Aooxxt3+Ys79nSfviggzj2HpdwleFVrnnNsoywTCVtokuziYibl+sPwVJhL3B1QF3PuvTHm5tZ3IxmYXrcin/ZyZZOU3bU2AS3qYVILUPdlzF26IDzH01XYww7vctNBrPT1f9HxhFS2GtsveNicJQRQ5X3P2h1uCzPuMIDIB2wPzbTe+JzLFcwUEFvmEyuTRhgySRumDr+GmelsSrqwyByw/QuH624ia5Z18aKPxikS36y0pdyctjXTEcTrNJ2nIYnnr49SM1hEPzN4fpuPvkdG4neXooSEx4LBb9ClF8Ew4T76xBckhu6G8Q0udpTfwRKkAX29AmgqIFOAqjqAElal23KKkGxXbLWtd9h2LXGB/ZCVNZwwSGAy0QFG/FMFYDkwt1uGhLfFOILO0iZiLQI5rlP0ajGtVARKCVtVrt2ma2GUUMwDQWVmazp1thBUSRahsVoVU6su4efTesvaDjt2+p71mhevHhjfAdvloiPwG7bZ35JkbY2GOyVv6C1cbhXAT2p8Taqo/PJWGWvRDv0qK35rDIue9OfBgBW0A/PKfPwPykhkh2Ski+VP8uKlMPOwNlZIiSroSYYoI9Mev7BD6k6efrPNJ7eyB2JVL/C5bgEijPDRSX7v9pjHzpagP0dXcl68zQ3XZOPSXZo9LbT4mY4JLTLoQ0J6tq3d3gooUkbbrER5MBRA0/A1rjpZKSYBbKqvOhmvF7IVtXOlEB1HUYuEQ6KjBWc2VjKJIBfmgGsEgGgJYACL6dEuHyD7vRCYorITlc+DFu0VEPd5HZXp9DBFhEI+xbRefYA2tolN0ioZjHnPZvEJNqk3TdPMhGkxhtDnurepluzAzCFGomqzA69gusX9ur1SROefc9+7ZFObMMsaly0agT8GrkILSBcqBRx7QJ7GP5BK2/QuBMVAQPKyW6fEeZtU/Twum9lUXdnU52D69E6xyRW0lgaC6PFNvJRBcD7VlafPUUI9bXud4T0sz6PUnTShnZnJndMBcq+a+73d7eP6mZ64fCS8OVknsAwZam9kDv3xVjv5m8af4yzUv539pA0i5P5602Wr6+GyDN/PwGOlGZYdbzr01vdHh5062tYJwwk+vDfcD1nATDhoU9VW0cvWfYX5jZxaNNNDpFflDODU57rklLNB6YQQ5RuDUtSFchablo/SmqZHSjwVA5EKIOwmqAtXRUZQbojz7V9eXVVzreEeR6aSZuej415dKpnXqavKGsRGHC5HdkazL7I93E03lYQwncUsjILn3oou1b73s9Xpeq1jiSUzZY76NMqtMBGTG2CzoV3Te3T2fcyfibdsAzH3aPyJs4T5nFPdyuTCzKqwcs7rP3R5pHCuGAKAoiAEyp/JQ8/2OQV02yNOdUAqjjMbJVkHUOk9TGa2cG0hGHYiiwdeKvWUTl0IpMK0VD1rPlp4+6W3ZCo1zQmFqqFvbm+0Gsqa39XESxbw0cTCWCUwLSZjpKKoK3bbx5Mnlsl3cuALMKfvd3kvY+V2L6pSp4uiUBzEFuZCwXJRqpzWmFBim7U5uAewVbaMzxPxJdH5WY3uvMbIkrNmnNhAZ2Qpa1shJQKpP01dUU3wlWln0Qm+sta15RTKtafR3nIIezZ7X5phSXh4OSknhsICSI7Xebd8QzJIf/NL9JUo6i8W5wCjTqZRa8EBEjSgsActiY/UXu0rgc/FAKy9+XQy8BOwOgcMv3kX7/MrBC6FmWXXjMkiBKQT4BidCKmnLzJskcuOsawy9wCDACjSTeUldSUs6P2tS3z3Y9Va2t9tN8ns3q32t9qbN9nsJWn99p8i3vmTNrXZvKZtVZsiPrn5tlPVsNXhJc9EvkhSj7gXhGjDC8rkwxnr/6433kvbLLz774KNPaoxYj6iUcOR+IASAiebUPqn3PnxqbKnsGuU6AxBx9JZj5A/VyP9S93TIpXteFtL3P/4pAFX96vMrORbf//gTT8Zv4CQ3a4V0SCC8/9EnIKiqV1YVzyYTeQCvG6nDPpZqJYXmR56ETSWd4qh8Vegnn/xtKI8hGgSrTtjRjZob37h9YOWDTvJauLhofpCWCyoRJ7VGoKURjcQ63U7ZVdKGN2iDqxKxVTq25GVWuXuMbc59jKFReSDFX4WoKo/hf4aqFFqXSZaJhpQYY3fSKuq57uOGXaieIr9aL8xjYrewqngdZyVRsdkOGoh6GJZzcIpetieWo9/TgMoEYAnzW8rIhLjDYXvyRObc991hPNwvo+S7UnjcUCpT4jKAAbASEZlZdGyWfCe250gaoqAQQEQWvahTujdfMx0SBzSV06TBvhcrapV8SaDKxL8iVUO9E02kRMBInRY07zqVWz7JocM5LdXwvrCScXMfkM/F0Sst8ZBiMmEYz+h1ZpDY14Z6GQcgZFkCvfK06ti21ATCdq6qSgrLeOASfqbKsoDaynloOKehq2NGUlcbPn14nRQlLUt3lNrRRbtzaKw1Uim0Ex4VS+54ELVKw80h+6ZKxpVVSRrxid85/4j/dbc2IL1jdDUiJnaZH5Y2Wtym5094WknlANGhsxtN0zce9X9HPOMVWp/mSz6/NLRpfdhuHRZcT07ocyfyouzavvXegpJnnY8al16yLPIzmxMLO0yd/kpCllPmME0jD4rb08LdjwjmsZIXR3HOz67Wy3zIC6DYwopLITXO9S0RRwAAIABJREFUa0fWr1gCvXz7A6NtMkQQSfu6X8IFUROZ5Hnq6EfvvrvPu98+eyZTxjaCfgxiySSeInPbNiLe9z0x7+7F3dg28orkQkrgpJR+maBiiUY92ai7yutKFMhy6dqM3Eq4guLlyKpa+HmKxSnehHYA2wVZkU8/He3U1UybAaLmVrirefGfxaAdVB0BDutoLKTINNp/loXrephQ9C3os4XA2s45Z9+2J5cnL168sGh/o0qWHRIsTiajJInbRgI5MhEkHDRuiE+ecGplg6fAOj1Mufd3ozkGrxD2jXmZHaazQken2kObNt3bxSKi9nXmdDXock0v0OZlE8t9SmGocPsgGzsArlhDr8ItGWXgHeXMXgKxZUhqq60DYYJTz8oOWDpR56hxNlzCdmFXsyyboU6dnZxvkNsInUpph1Ymhsap28/Fi1qXfilh21FilaQ0mTc8xytVD4t989DLArTrQk6+XCDTWNmZ/SUCJRVfrpxzV0yv/J1xjqz2trpJ/iDat+TLmX3+gHxFH0vWvGa73xyZdJZCrPVPKTlSkXKF6VjtliaFhasU0p2gEFpA6oCdmVPoPGsnXUW4WublG7cvP/80PS41JFkRDXuIjRpVP5gwo7Qfmhiqemb1FBLPykxKVqBiTfZ8ezVKfEA1L67f++iphz6VOKPad+TcApAacoJa5Q4FVMmzs12XUXq3mnIh0qRcNqh4nAKGZG4HlpTQ4FedqEVt4xef3qzG+00aHSsslsxQnhShPiO1REWHvAFWln7Gxkd142iWam4JoaRyKx5CbdPNaEbIMjL2OVsVTLNLQkp9t4KAMTrFEfTVWGr5OeeGCwDPdUPEwhZkTW6m6lKSzimZNNHPnM11TgBW4sH0qEAvnfsdM4PIHWem7zWZaVKhXsJaw+kMxDT3PX3UVHSKwJ3UOJFem30yfI0UBKYBwAr4WM0Wt54jy75QBBFPZmZlpe5S43fvZUto4CaFVDitdmRBoUrRNJ+sulJm9CAVf1Uh5kMeJscF8wvI3U+a57LxUjtoVX5Cem5bR0SRvS/mHdhxsNUcKPDR9F9GFSS8Qph2Y466sFtCPIUre89+T8D//Q//E+5rBcizjUjTtF3blFoLVCfcAER9++BOcE5dElxhByy1BHYmKFTGDqLQXjQcZUqBXkET//EQaff3aRP2xzl8Hzr3pKWLQPTlo+J4gSFkeGPHr8wiGmuuXV/4UPZUBWzi4b7/Tr4SwzwE//7rIaLSIRMBtYUQUgdJ/ZKFIWJ+NdXgpHXnoliW54ZMC/edPpATzrb6ttd1zQS8ivId72oQSXW9NtChQY7oGHGbEKsjGpo0lW2xkRVtu0/JH3zstOmCskZRDg2AmXnKXQclOVVRVWWEGtsgDIJgaifLgJrPe25LiFyWHWSMoSpz7nd3dwDsQdEJorFtRskJanG+ES9MRIOJt7HJNMOqg4ej2NEudgdCiHs4+y+l4Q55p9Ili8VqHHSTJAwi1DY8wBxCT17NJhdeQhs1roTcgue+filfHY1g0Xu7+Dmcq16lvb5oJeBveaf5f49CZNvQ/EEuuDRhO/GBQH7cxjZEJg82Bj33CeDuxYtBQ0Xm3KFKVgodVqnNt2mSXaO63MrEAiGLYi/5h4iJZpyVNsOOVKiZKhoQqa8re1g51pGu+ZGECyp0/+EujovCtpU7+ByubAetMmI3t57I7bG1benk/vjMMklbVJ+AVgeaMgAdZVUsyLToYssjJqoGjmjrokG35kQrYEIh0+OTeaYSsRWUl/qotF42b7v8BgHCgBpSFbn01VAJpCDVGbsRxLXNIRjlCa4tpLzRXvIqVQ2IdmCOGEDtbqvB7QDW3vz0hiySV6rxIJUI3laQgly79VKKi1VqAFzHa4clbgzwO9kepMTND9ci2W12f/wH/+YbOwZ289/r9PNttFecz/divnyMzn79do85MllCXv1So59NzG/ie2l08SMIapJJ6gMo3HOgohEiLYeqghG+He1dp7yRf+Ylgsg3aWebXvJU9bjyuDIWNm6ZN+eqWvXVltbgiWSh+R0lmF0XC55b0o+5XFa3ZvhQVaE0/CTXTBEQ7dcUK4igbt9UUWZWUqhYLbk5u2fHyty694iLNRa3AQX4JCCFiB738iHCcvhmSeRR+sXP/uEVd+dV2vsff1Ir99/CnpQ+AxzSz7k55sLtk7GXrvNmirBFmEuE0brPtR0SJQYPzlz+zJthuFmTScQlr7A0k6rIBJN90MzgJpflvqYqZMdPGVzKFFy3Mhce5LQQ2l9Itvu8GxjMw8Cj4qZE85M1GzgZpitAxDz2ux2ZfiuAIxFF2+3ATUPTfd/H2MYYxOxWS4CIzF0UodiY/QaiQu6hCc2chv5fc2CJjT1KYHd3L+Kbph95XFFXDMtsp6n6r1Zs9eQ+QXwIqIqolM4jh0YlPfa7FnVJ/kwemjWod+I9xH+g2bmRnXbq87g3reDKzHLAcNqyyVCsTfVUedPoiaUUdWfC3IyY2wPJwARPmVqfMKl7h+mgERpV01/NBhY2tLUvIKGBPBZpZLUj3g537Fba2TQ+yh7a7wjAEaxCcZKKIO9h6AP5qa2AM03qTG1NqY6FRW7B7m5/CwoVMCAAXud5gY6ddPDCD6ilaUlOZMhJzv5OpPy0UeXcnmKAWeg8mWUwmyKRi3JbKrcZoBksrGGUcxsDN9w3yGiC1gFdCmtJCLWc2EM3deUu57HPqYX9qXUUOXCpW0xU6+JiQ58eobjF8Uhk/7bdvgvdQJn0op1xTvBG54nB7HeAyzbaSlVV3BzpLDwugYy4w2LfXYRRbTsO3saUCYKI3t3dgXQw02UzZ3MAKsrDEhkLga1cmDKYqQIrjKQQMQ+dllF6GFczwQMAgcny0eQEQpKqSyRAVUzyIeQVF9rlXwCy4ByQbJJSnRo7UqoadtW6fUEQRBWlUUERTiXToECa7CAgn77HPrGQNGxsfw0ObdKcVE8CF4hZe6lJnQr5Aj3sP9SNkQQOp7ka0O8ifAXbGEys6g7jz549A2gQWXZpu9QLXJBGduC7skitikY6DzS5ZJN1QU0zSKCnzxfq0Zj9gQxRuLXVNV4XL/JHQgdLcyiQ3yPCD5VtOmGl0hoaDyEN+xnKpW3CaBOqkdqcNB5qS0deNFE+3Xo5EV4/xt14rgBoHCOZqH0fO9Y38vy4oUzbuZMRU5dN8IlE/uMUJA0V1XvIG48gUwEJR2mvSe1SqSJvzGjZ7gRi/qKKPsHjXixw6C921pALXY9gPRTAIooDWDXBPSAHcawbhBXLtV6fQVCDZQa5zCzdZ486mjnbcjKfc9OGRMXq/MszJH6H2lvjJvl1bYLfgQHujbJLvlHt0Snyodq9lbV/OO0BS2wD+NXXKRfz2B7bY3tsj+2l7b/+z//5+57CY3tsj+2xPbbH9tge22N7C9vrp957kJrIr9kezX/ZvhkovhsAPohT5KMt0tpLzJF65Spnufmja0/l3VG/y8NytRd99R5Uj3eM/WppvQMNnxPKGf34bz6+fy1fq/3qi59n6RzzL7MKIWRxx6JTZIpMC1KNa/30bugZp/pywyHfPQI0Vp0X/eoeDf7fcF6y+1cxgGdQrapOmeI1NMQ9QApi3fsjp+CN2cKBASKRqao0Nq0QpVwJ6t/SMozOfTHUCp1olPTwv9L3wuYXC6XyYQHwi08f1DXy6U/peAdZDgqad5jXboqbD4Ltj1/2RmMenJf6UdhFBJqVSetedPX6COcpJmIDOA8+7JHWDS383tywKu6T55xzn3APj9VzACBg27Zt27yQTUbKpONhQ1LkkVNlHgSK3JE0ts0qHcTzDUcBZtrGZgVE1vPqVXiijg0lECw1/pyTiKbMfb+b+52qWhESEFRE42qcwodJPS9BeqZoXAWbJwjzGINHeH/a6F5MZIzNqtykow0TjzGSwuTmaKAy13nPbOocvr8HPOlOLVa7IzK2myu3x7Ytnh4azrK0jtHcAzTHW/5FmqWF8ObWtuktbhhnD4uaM0l5g9kotdrT4w4xcSyIWGe/wL8/2dYJcmvHp3cVRt+i9AWPQcSDBzMzDcfqKAaSm+g15wNLEjpGRPtZtllQ9wEp4tY+ScefPvuiikoVSRnHRJOFLbhqyTCC1/neJ1U/gobcU8XYp6daIK/+lGfadyuc0GKhKO+JBGisXZeMWR2/YysCJejaxlzbyPQ3TyCWU9s96Bffd0cfpxOI2mBGWzX8XolqhUm27hsgfX0XOlwTXZsC7q5TLA9JeGvz0LY2XFeCOhpW+DQDQ/NfzCP3A623nMNByqH2ABMTW5lqamxWyKpiEfdxZMqcU5vzvgPPhxYvAeOPmfP+2C5juwxijGHei3S5PNm2YaVgmP2XPGtE8HI2zDInjXG5bJcn2+XJxjRExKK8TT6xMoCqYnKTcUBnoNN65MFj2y59W5NDhdRkrvptr6I2xuIVlt5CgTLuo5Xb2PYlJLJid5z71z+O6nDXXZDoOr7HDujho9jvfD1fPnd0wFZSd889C+a+0lhHXzxk7iI7M49tjDG2sW1jg8JF0kyZ4ngqIlJHuwR6R7zu/+WDh9TTF3SgIylbJs9tR+Hcrp7vIJMFp3Xg9l6WjqlvaJ1YyaNQIae36r/k1jcem8SWiG5s+c1FvJykdiJ37uYevFv/0bmXPtfiawtWOiU7jx4yapIsI00rHsdGUpwYP++9/6B8dmCL/zhXG/FbVO3yrUqiGdkklhlGhM916J72qE2IiCwrwQqa9Cq2sShKnPXJVEe60Oyg1ivWpwhakNagWISVzjSG0wUQzZT3QPizq6sgVsXxGlv73W4PUuLmezdKvkr7oaRW/G7adwaNB0GPR0NkbzeDtU1zt7i9sBSY8pW/WaGIyEfMKa7nj4gjTIlbxPTd6Eg5VDxmVrHoGFEFM/PGqjJlH2MzkapousugylrK9Jp17QHaV5ZE0tVOlaj34iYAt0nZtzKnEiHKqzFUdc/nVT37ZlfgU4pyrmOL2wiWG7q0HBcMG9+UqZ4rLWUjBESd81mSeE2JSmsqodJMT98YYoWKFxghD/lzBpxPyHRjgffDRDws0TxPhbHJEFsY8ISIoipC0C0sBKo6LdSXBxP94rN/eu29qvbBR58AVgkFuWwVD7YgYCQ+ToGLnYCpaK78Q6EZT33oX2WRQKruZwYW5ajEFsjLxBadXiIDQRTz7g4AD4rYzGl2CthjPMx46ZWrrRKO1e0lIOOF4QrqYFbVuU8hYdrmnAAsASUPHWOo7V8IYjkbmTK2LcG03+2qSkzbdpEpNtBUFZnMA+QWwjGGgcFC+8myC5ixRITHmDKJee6TgLTVWjy4qY9c1h+18G0H8LQklY5sloORIhgWbqEgF9CYzBRsZ85w2LbQIE+DbNdkChEulyeWXlMjtFYJKioyiZlshJSjSUwe1ayikGhg3ytEptHBtDPZod2P6kCcI4LFws8mWDcjoJ/oRa6lBF39DwAipN7jCjXuMiytl8zc50rGS04kItg8d0BDqEUZqLrcHdTFanRIFFMlEIj11YivW5vcngEanhzA7GROOSIus1kP/erF4EZ+WMBBPAlgJsutZwFkLu1HlB0Ge4+x6WbLtJjVtFIJwhoVtgggbHS+TyCQRZg11cWP9vDEu2ZbV1cfiIho9vDzoNUWBrwR2wie0UxK91PaA7x2hvzoCEVUODcmaKs+7QNzfB/7GRjkCEDAVGK2gxSW0zxlQDB/v03R0lkNyM6bCjWJ45NEMocmsUXnxXUDVEkIGFEFiNmD8VV133eVqWOzhHcJPiKIQkR4jOEhpJaD1JVGYrKbD0KwycSlUOpsenZHZgZes7VF8J2rlmagyeQFBAG8OHTbSoJlFCjjRju9aRRSUoHAstdGLbt4BIBMocFZNB6OWa6sipd4JuXU+SEq7JwLMDKiZMlbLpdLWlyUtPi41yOhtIoSMMJeB0CVn9/9hpnG4Ck7MQbGs1//dgy2lYhMTBFREClEVJ9cxtwGgae8YH4CgIbevdj193QKjW1ThRk/t8s7InNiBmaaqdEhNydNmQQdYzNqr6JMw4KI7/adx/Caai4AhbKO5GkV3WyUBp1HL/RsscgM399AENkJZBeoTM7p1LI5pwjVRrV5yEwqljtv1iEWkbA72BukqmNsFJclacrIJpa0MWzNHkWdVvgkbt6nsbnZWZR3yiDCGO8SP//xez/+9a+/2i5swinrRsDzFy+IBzNNmcTKT4ZMUQCbAJi/FWAfl32Mi2HdhS4ik1pFLxWJBD/stNcovOdagEOXxGWySiPYr/ZsLTV9QwDj0WkR7GZ1EwOyi34zput/neQndY2g2cgepJGwtBuXfG+ZZnXilNHxR0X8JoCdookpJ5GiWiOvetpBF3NXWykUGnSOYi41/1xMfGfviuxNZiAjMwsXUGT8cMJNIydQ7EFCPziP42qdsUS2tCnqMkBLzpGwM8Kusc3cQ7ZBqpYxRKcS0ej9+0ENdcrApqyYbYYA8oDF+UotVGWKZ0a60DumltglIDNZNa0QNwYcfM4e1dGCNFibg4KUiMTyV8ZuViYQhEicNtewNHpC8EBgBWiMkAg1pYTcVp+LTBdsXEZRVU/0cKg1qiktm+rytS6I3+r2+rHbeDMSSr5izkR75ru3Tn4H1WZyiDckneWjIfLbaLe9IxfJCmWLdIbs6Wjo2qsUvj/Wj2mSqYWGCKOoEUoWa+KLhqIT3gd9CISK70NAoT/+8CEdJAF89fmncdPYZdfkhC5rwnMyKaa4HSTde5pPS1641usuqfhyKK6+8/YrhZ+m11HouW5qvHKbmpwvO+tsXkvFLk1b25LqQz106/d3RKCoiRKAkXiGe5FizUkuHNc2XFV/8dnPXm+Lzmu/+SEdHvF9yL+Ob1L7cEH1FP3sjxCKvKW0dtpJpDVaQxoHDn60SqphRjbJaIwxtm3bLsyDAIjOOeec5nLYpDc7eGYLcD+7y7aNbSMimbNOkSltrcmcdk/tVv4xmFhkmooOgInMQ41AKjL3yeU63NHNW3ivWOn2McyMOrYANy3A9Ym7MTPOjd+c20q1Pal2OTDFjUcdfnE+7ADa3YD9woN5eLJUDaUffjs/xhh2VBFHFX4LoRA544Yv2NC5b/6KVB3RgIYoJzP3lZ4P//LcxhaCQuEMyASdrluil7YVwxuxW+YdVIcb4sc8D1r+fXVs0nfC9jkyAZKCUf+uHbmwp0gWq44rBwq65FoPJVLe2rWaeSeANx85fVrXAO05atjvupzvmtgg2rYxaG8zCekyk3ZfUFucKqKBzd8LZYYWeB3bER+LqYTWHOoZ4tC4TVzNArIgrIZ7eyOp8T+ER3zhbLZ6P1du6tVgHlHwXFXc9Ek8RtSMozYQiLyiV1NvE1AKVXZwKClYi3GhCgbAryyoiLHjT/FBx6DwP3TuzYQWOEFhKvHTF3QFCSCTfcQtvLe3KFLH0QG2sc6DNc1OePeyXjiSL8md/cxtPDyLPatknp5lHqqXsTFoGxspGLrfvZCpzF6YyXkTsG1jbJvIvNvn898+lylEg8AEHuMyd5UJZwbpz2MunaIiaQ8HEoB+ZtNCF3Uj3J0KIlMclBr3ur4haGeZoksHSzLgw78w9LTznOJUl3VuCroNZlcE1Jt/AaSL0WAVrPzDwZvXHJc5ZZ/mRnqOulmJ8fHL+PbF8+eXy2WMse87gZ5cLk8ulzFYBGNs5nhbhJSIx8hDq3nAkgfZhDtBDzwsELYjqQHwM+CWzztJXR/PbQvF4L5W0sKVzx62HXb2FUZ5KTJda2UYC4H7OJIefmk/NN/ogEmB5BXGPqlprdHh5/pq2eeo34CVtdbRQ5JNmK3QCZ6GU6QGwYueu3wd17GhZ4qJzdt22baLURRK2hyiuAvM5lVh/i9kfCMM3F2M7zpRxKKFbOQCK0JYyuuiOH9NpCKihd5SQqmT9ljOSlUIaGTi0HwtXG7yj83aDzd2+02wu31dO+PD2iVfpyDPA06jt0enyG+v3fSONC5BoRf1tmgGqwhRGkyWKUwFAmZsc8aqiLz8wRtcjo9eMk+/eCmJle9RqePol/8P3b76/NP3P/5pKxqwLDnNd0SewtwETdMT1p46Z9LsKIBIIXl3lr+aW44q5a1G9bUliwZic4CoQHMVWstHizEaGo4mtd3LfbO4EbpPzRTG0Gtr1qGKvFyu/JrNXCPbB8eRESZR/yJNw0DYC/NZn9xBV6O1FgX1QZqcsRZzOJk9GmhDqulgMsRgnXfEUBOSYs4pfsVMQNS9X5WYB4+EsN+EE8miyfTgDwLUKrdaPJrtCzNv5vBl35tmGT5oaRcqzTuaR+epmBMes5XqAay0ek2BnDi4WyU7fM2FDVAGMcu+2+GigFxXjuyiu7Y4ZmRmCaj7VIaNiKUuRRReXYkcE71IdZMXbWX3YqkbEZqVNdDmfFwXKoZ2Jg+tkKWhhLb/2Qa6X4h0MT3VRyrzxvV5xySpf2SQobB1mYNA+oqe5mlic2zMy6uK6TLi7dlpmwyytkk/TmJuzn4U1bR7p63Ba1B7cVgjRTmUZQbHugkxk5NwH5oUpS5BUHfbKp3B52loFp42jUgwIIjqBG6hDI3Nod4mSI00Bb5HLDcc5VPrWQFb5I1CB62lqIYJWAv0AaW0m1ZtH9Ug6X3CBVqQl3lTzdmWRxS1l8KdUNVsiwnXQM6j5lZDMkFieilEBF8DAaNOkA9lIJpVS418heRfM3g2jZdy66kKx5OXWT3vy4KyCjVvGsBDSHznnMaHeFRleUx+iG6a/3pttgam5bhwW7TX+tJ6MiAliMQKksCDahSTscaIm4x4zy5z9n0n4n1/AeBy2Sy8GgAxMQ958exO5fLkwsRQ2rbLnPqjd97Z72zVINDdixdPnryjBAiUi3hV/ID5thlKNYimHu5wUAG42WsLLlnRJghkx/Ba0QmLEhq5ZlPo45Qsdlpa9ttZUDF2aptyJPB1aFcyQusRQ99xf84ChkTFbuMikKMORePgbYGNqQSOFLzsAhPh9msXnMwsMh2B1WLnwfmOc5Qk9iKqTCOWkCt3iSS8uNtuG3rnzJLWxXxXSbktIM/3DT55i4EeWkD2+HiSw2sd0fLjnp5zKvGJ0rVHXnGut5uiQE5VlZCK/mh6xnXsvTbtl80kVL8Fz49ddAXhRodiAQ15LkEIalocJ6zd1pM2+gXnHmLaIHvkxGL9N7XLrIGdBCBBQI7AbWPsokqCDwRyLwbA+GhxtFjXqV5WqTyW7aKm7bmu0Ol8Aw4F1SDv/eBGPUkNST0pSVLFCOPRqLpElY7lsR3a21p3O81/BwPcG+JF+G20e5b2bTtpPhoiv9V2T+7IVK78Z3youn5omc36mxZeHAws/DeIKmpm/e/Z6Bb3Y5H/RZtGanMAwj8v7tagUH3YDJLRwjdUQ9Ek0s5aiJhoMFOkq7NgFoee3/qnCbAYRrtedhcMLZ7Y2eK56ZUvtDatZPa82yuPAF12IG/+LC1clzC7/AyAlxv8ENK1C2NEzc9N09KzqtGRpe+rL37+UtB/rbZYptoyTIwvPr3e4b9SW+SBdTzHTDSPmXJZsYHCSwDpEBYuTq6qdAU/N4p5AKSiMqf5hVH4/YXrH4Uak8iiRJnBsZe9bqilCJmZcmcsgZddFVOaQgKBNeT5WBI3j5eOqgpgWJZV99MkeFFCq2RKCFXPlYE4DdcQmgBYztacQ6qpPIb/huxpeQZJHdKwbEAIl7J4puThwh5KPynX3Nqm6/IPiU4RoZYH5PTPfl5baSHUSe6PLajN09L2uqLsMmu3EgcQkxAtHR9skYUP6cdFYZHB6T6+T7oIza2FAf9Vq2NjPdx6OrwOHENUCybtbGVqCDOTF+2rOfvkricZ1PaL6QPLt7nDV7CSkCyvBcYtJ8Ewlvzw9AkEbkKN+9aJPAxkSw18jq6DmMcYNoxvfcO82LPWX9/DloGyPvRliEgkIjboWjS99rkfgRKkwE3GpWH6w4YfabtTixe8fQxy7qFtxwa7kpiCS2dtOdwiSzA0T7KJDeTOaWYgDbQ5oDdRAjizyroDTHxrdDLZelEZw+2GDgkkQbHDmKIPk6vzdbXTmWtrexunsqFLP/1xHFOO8myDafRkAhONfpadzghURWS/25mHcesxaAxLc6IypxK2bVPVuxe7Hx6C+Zfv+92+3zERKey/kX8gDeeV19hwUgN5KdhCM+4Qwa6+hAiVEDnmLAoz2OUWFoo1L+KrzSK5UxhQDkacyMBhQyyS3dgVEFYOzb1omENl5V2Ia1DtJpJ1dA1MUffht+3UIOglVDqlXv5daU4QL08uInL34o6U9rlbgAXbBTLBRVtmZs+QPsZW7IZAfr2ulnLUqXLRvCAM5r8uER5RyAkebeqxWA3EX5uujDvO9G3mEmDtHPzE8vRgcFLE1VVnZ8l3qp972iJEdpvktde0b/21fy9rq9nUDdTq1xRa1shX7O5q//V7g9K13hYZANfZrPNmJzyBEbk15KQjQp5TctMwqyU300hnlRPLb1t2JJPBwYCqiCF5CBSWIEGCRZF543s6XMtmCycyyWwd4UPShZEvH4IAo02ic+qcq3xmTOz8jxYEg6NQnKU4SH4HKwrP60FJlPK+Pwm8wYtUVV6Crr/b7YfoJpl2t6uGtlu2yLeyvXp58W8DGo9Okd9Bu+kd2RmQu8eEoEt+I0WNYyGlIwKikANMlAxK6335z4XB9au26EfjebW/nNeGHYXiPa0cHwQofvw3H//qnx/SzvXLn/+TJZFUd5lwEcbXzpmx2YPaLAcZcTCX0ruN31IZOFLs8QVTihyaSWfysYOZwTqMt/OvhR8ZLJWUtCYMF1+yU4p3Q6Ko2RhIe3+UcqQjRX0dEnj9raqW/icn7nhxdB19gPb+R08PoFl0cf+sf3JYW4d1fqhffvrpYaAPnn4SOl33zgiBXVECUrk02ghNlmxKYDsTZVdU0OBnCmuIAAAgAElEQVQRMofd5JY+hthIF5NCER28QTHnnhKPSawqYknDcu+7TKQi27aB7CLB0lCSqP7Lz/4xJ/yv/vanTKyZkIsyVNgnAssmRmrVfsi+oiX61sDcvI3gUTSlmMVVeKyOzfST2o0bBYhi6NBoEfIbQODBdZkOFyTNTECJjcSw9KYg8gR/SbcW+biS9ayG11xXt6jmn+dmJ8Ddx4ygNCzo/Z/bq6gX1OZw/wtHgfjqw2lgRR3dms7X1XfWA+mflfku0dW6TjePsJ+U3F5vmSkwMCycXRyQ5quT+x80lACKBHongkmxMfeuhEKzNExdqLRrSObiFmriShWxGoLz86OXTobHutHDk/PGaNpyMy60jGjVDYM9JHxquhrO8mqliqS5RS2qqX2Y9jjXDdNXvMsBbUpUtrE+F1130YUEOKRiW/KodaAkVoCtHtMyycbjiua4wBDcauZVQVCQ7JzaHvZmvXIDZIk8yxwSn8qbu0sKpq5aBTBqIk8mptVyH16OJGXn8UubaqNxdVo1YAn/JALh3YDhgoxJVYshQUR40IzM14Pp+bN97nN7d7u8+w6Aue8AtrGpqIhs2yaWf4R024Ydyru75/vdzvR7okoq4YvHbkHxkimO87HgsOcpkCGauW12WQtzpyS4azLMbG6GhrqCaOsGbhPVA6sHKcCBEYlARQ2CsJaN8iBWNExbPgkGlS8V5gee0KEvcrMvAI86Sba1rqZNDi56NC+rWgczIHf7LqIyVdTSd5KdfBG1ymEd/zrYQqRPI7dog+tiSj8IXgfKZ7sVsq4DTM8P17IUibQF0Hru5A2m7XSewXT974Jq24cbTPzwXvamMZuXvHeYbSO117jCOsXkJpoeIMt4Sc9eOouXNkJt8AKskxCr+lJgpahjhM/idSK6eTlgZcPX3Ma6lmwcpri84VWKPLaXdQo9SomDUvhUPNE3Gv/KJV07YRYW5LJ3u9qpg9Jp/gK3e8TBIr3SJ3KUWg44IxLK4bJBJiLcHO6xAXggN0kA372n5KtnkHxNJ8FzDy8dt7/y+o6Zt+b/fWWQfDREfjftpjnSb/i7lIOSO4pxJiuqC1x1utwKqy1afohJPV609FLcZG3+rloO9RRhOvehb4kW97I2QNcliJlVNEuCwtKik+sWPu+wpPqPLqKlkdFAS5V33+XKgkjqbIfZVaAXYndSNnoVq4EefgvOHmHE/cmuvJgS4iae2snqw3UdaksonfZB2wdPPzl8ssitdNZJGnAWpdbln198ejOp5ZefuYHy/Y8/yZQE8DLQ1glBVezC0gDoppTQZFLXKb2z4bOjjIrFvBHBrHIukpnXJMKGp1b9qaQZczlxgSk0T2asId7aTg257yTcHSRGPCw8BULKaOiaeKm2+363z33wMKcVDuCH8FpylsY5EtWq0u0OT/6nhEkjnT3NFCReDLRp9SWduX8GgsKkDkVNR48nPVAPIWImXriimIJuwuEon5fE7NA+4rd2VAyPmrBXZZ74TD6x6HPxc7Gvtem13UFaGRoAX9aOJOJsCLn1HpbTbpN4yWBhW7vyeaGHs45OUDRVku6+Xbc6RBkRtugsqSi794h/8fKggLzi6i0YnamEEic3dMOim9pvFoj4pFFk50dMamY+8jAzDZQPcMSpDVaSnLpwl9ZRKAF6LYLfT0qolfZfJp5Z3Mz+H9AtxIvzde612YZ1Dc6G1pB2cqIWU1cWc9iUQpzz+j4HVbhierDXkOBtVCrpDAW5WPXh/lKcRPcN1SikVbG7ZNkeEF5+QcjjiKcQRHXZormFR/JKSZs1xYJcWZ3rde6LNWY9t2HEPvM+siJr5gmocfuTG6s8xkaXu+d3c87nL57PfQfRvt+99/77APZ9PH/+XKcYcFRBw4rayGW7PLlcAPz6q9/IBFSIRAQeSEmkKkwj9pPh11aLBSBYRR1/jQNrPJW0LnTZrzxjv4JYx4U57m1NjM3FmzlCIgwzjJ1wlDjT9r5JlP/pzfe8nT+qY+xj67p7BFKxuk8UhpN8XA99169JRrOnMi6amfjO5Ie7F3cA9n1u46KkfnsCS63sGU3qiii4iVMc8/vWvm2a/2cu+a4RLU8bdDyn6xqWG5biL3Xzcrhz8KNdZ6DxhsMGqCo8KYTiuEHnV15NcEbcnmk/Yg0ZDiMdWN8rMujotG5pa5SG45oPvoJ0fZxb079e9vLZAGwvXuMscSJz82rM5N267Frd0GpKAUZBibK2dvUb98BaQDGlibN6gcpdzTBACELcClc/5jwhqlklcKUPetzomENhed72UW3WTVRSWGk4SiaT/Vyza4frQfIn7bAN+F7fhsfW20OVuMF3YpS8Zen7nXKNPDdb/rl6z8NC49EQ+V2223rZytZMKnCp1el/6An1kIssLs7YPwqZiqjl2Y1PTWBq/46sjtLCmAqGRnZ0SZZBodjYMz/+m6evBZVr7avPPw1ZphF8K3pciqdXcaEwe8Q6e0wcebAtWhRoWDMD6l3GiOvCBUytqTcHOFKmII646Faew8U8g5rWcPWPCqSoaZR6pFRc2TcgrTyq6nFhsRa2/PmcUmSUX1F9/+MPX2dHFhj0OZ70gQWeBxlM6w8NU+8vfn70iLzafvnzT7vMUjuQrnz+MZCQjnh4KtSmvnV9BVI2bpfNTVfIZShW+QiuHmcspnrmKUXDAseFxBtVZvaQNzKTiKgqRblbAP/6p39rlXMACxgfuaCcMMIq56qLp1nTvooc0dWxNK2fVXOfpxGTUJM8tEZ7GZ8+kUDvHmOjDrYpVly7TqfqFJlz7nOf+8yiz+YiapuYhzj7DteueCROQB4sn8lyUtvxXTWBnHCHwHLXkiDJaYe462OH52bib9fXbrUrA7SvluMe/4r2+5fkMd0NRv/PP/7DzSFTuw83MUffWF2uMb6MM9ENcLHqMKJQYRARtfU02ks2ewOTgJQgoKu1wNuhLTxFO6qJmmY/0waUhgKBsH4QnC9Q28IjE4zhNZQv53ORO84VnUQAQrn+QiMTiHYmWNP1aeWBiUz8RaVCifK3SImU2pK8s5yv2Ysoyz5ZB5y6WL1RKBOsLDedcoPc6fv/Z+9NluQ6knaxzz1OFkA2B4Bg90IryewutJCZTBvZfSRZE1UAyKtH0ErdTWJgS0+hlR7gaqNX0E5Lmf38myRANppE5Ql3LXwIj5OZBRAEx1vR1URV5jkxeHj4FD6ATEsMmJOnmtUc2XpP2kFIu9SMyB3aoR0Q0vEzaCbEVVLHhLHvvquJekljNPhFpSuaaxv8N244HPLq9jIKQG/MB6NV3j9OYP42+EBZcGF1YXgK4s+BiDJoZ11IHKoZW2ClZ3e7nRET6bKcLbuzxXJMOuMQUdVlWZa27Pf7xs0ScVxeXlqE9MI77UrKxGC27MeRI9WuBeq1Co2JOJVJeqqA5zxwDjgY6bznHAlMfNfKRm1FzPgJZHSAu8yqdpw0K9nnkakSAgKOPhRl5tw4AJv5ZXaW3GAnDkfsP3YuuLURCVEFNBz8MdGqQ7wiM26qal+9Ap7NjqPooJ3AcUlgcQNRYMpEEZGuBCJaWttO2UXcka4mAvI5QD0JHInOGDQvljOOh+/6hCuDtMxGnoHP46lxgOqbR3heoePzLry0vfyZ8cRMY7bD0Us7cwYUABiUe+Bc6Y2S8m8FGAzh5NiIR2e3fWaSXrLfHK/MucgH/uXYbrVcC0HfqlzhgTa++TXpQkLBPkfwpPwvMzUerMlSRhlS2kfqHITGuKlRJg1Q1dRaKIQXqNCmFTgkVgZPT7J1cnMjK84gcE4iNYheEDRR0TzoQ7Y0Xow8ZUpXjHbdpvbjY7fxJsxVr9Kqie0//Nf/jf0cfrVpP3+J7Tfbji7tMEXmT7TM6+jsn79d4SZyqhVOVSwhlX+5FGW2RxfnTBoeOuBQ1NVE9s3n9l3ldlT55MQxTW4tWjGAN15lG8C3f3+kqWWYVkOw+sO+avZsPCjqo09s8MyxRDcf+IrUM8DPUEg+PgRp4IAhem9VxAtmd0oEGZq9v75d7oFU4owyTEmDGXssWOo6LqMDARYKQWBYh1TkoNrPa7b3imvkJJMe3BMeClr1T9vXp4+PRGe/f37+/vlRGzfllg2xIiSRovXMUkLBbIeJaAjgoclOySgBILR2HqVat/e1YGKFWiJIVfVsUID07hKqY9vQAuxJ2yQzfxAxt1b7FRUCt6W1ZWmtNW7NKrWGvmHzEBFR2e12u7Y7250tbXGdir2+fGojmkgaQByHxVS0sHMoIKJdel97X/u6rp5s31ehGS+/VQStMz+SQ03NMbP55EyHN/AEiG1Fp8X27c7qMP9MlgT/80e00EYSp9IslBNSP/KYKNBMUhOfUhY/2bSm44z1TBbAAw3jpS1FatVqsp90tfHb/Af53YYrOYlHQ8WluZ9j2xZXMFc4LczzPdYi28BkfLMJW0onMwEhz7WhGfy01DdiJ4eWmb4WXkQ4XT8A07ZiIzCOseZ/D5abKwm1S2RrUWO3ewx/6M3tCJKm+Sxy6yuRo61xRTEhiOYJnI4DwXNQMLu1xp4iu/bI8QfqzgQ1nWTLbLTARwD/oXpjdAReWnAuTCMKKLNVpjbrTFSotiqmYy9V4xIEqlG9OgmAxnksS58Vdy0fFEJVwVzPbJl8spLB64ncLsm5NQnBQYfLqQYQNjUSlf1+35a2Ozv7wx/e3u3OKuiXZVl2O5skN25tYWpnu5vS8eK779d9X/e9r6Lavv/uBRvSs9ciLxwSWkAQaBM0DpzYpmHTBwr1qsKG5Tp1K0DF4BRFTrVyOnL/KwomFuXl00QQXp2ga9oZ4DQwhpqPjFNrE5+5sSdcVIWkDF2HH3O8cj5kMfhExLTfry9evODWuLUbN29I7+t+39fe13XtXXonIm68tMUnkDK8KhRs9e7jdrNWmLeWFfOcs1LIf1z6kZj7SXHsqKlsLOelbYC1YEQ5TdOQV6HIVRM5NZurXjlgfYhpvaQVejk+cWUhJ0nFu/zIWo8PNZ+RA31gywwOWhFBNhOm7TYkxRk6Sn03aALBb9goU30P0TifD/Y5RD1uCzdunsG/S+/Sfd+DV2vcZjEvNRGterQNETfVSNhYKQ6gVv3JaRkFbPxohhpg7gbDXvwqwpFLd8EnLTI89YjYB29UtjZEwTrVV8Gm6+btjVgkfwaj5KvY5v4LaVes+k0B5I0YIq9tkT+0na6sLRrpNuxvpDIySudl7AMAmMMfjeQVAmCq4WjduurD5hSmIl0tglcBwGv3IgTXIcdT9mBeS0Tk0ZfMcRUehoMR8fyG2z8/fwjg/fMHEQ8dWa9URcWyWdlkOaJPI0iNXOEbOa8ioT5CsiCkzxOBJEN6FCj1Byc5ngDyOFyPa1VYOJOCEFWVdSNzkCs3LmiHVFNkglgWqWZuK96FhwVaawDsmp2Z9/s94moyJuqCQ++rpqI3jK2E17KFH22tcUYHG/AUQgpwWCSVPEwjNFdTVtx07PBlEJ7OmSLfu3s3EE8B3Dq/UOizx4/LI8VWYEOJLksjpt5XEBGTiBB5JHPuvO8Bk3p6RFeBOE1XKsJo3KzYJTGTwvIbLjsHviWEbNwAdOnSBexZaBz3OLyc1E/IsIYQwoFRQniyAunMxFCI9HmRCm3ErJBVLn1KAWGJU8ncRJSYe1/TmtDC0ZI1CiDaIkPog2iqC/ZAYqSbaYvHYCrze7HEZEQEzicBqJL5yzCRUyQPV6PM86CKcA9RrwLOXbvJc3aSGlFrTVTXvgKpE6NKtxQ2YZdFATMoSRcKgyEx2+2+n8/FS5iUjzQlT/tbTKYdwT9ejp7CrOTCuKqyh3ozgoZC4a5qsfeh/rsmpABHOHGuKRrloUgzr02SlGKIqEnpGBUjnRSC73zysQz1IVzvHJKhoIdKwAYu+yRi9Q0VQrQPFEgXPoWbJOJjqnp6GDC8mJo6htRYTodEGjYnS6OqZsdQTxdM5jdUpu5Dac4eMM/wwG0K9MnofHRTYyiC8amwVFGwbasPY74XbWk+4dg+2MUTszltKrnjPRFc/RLEYRw4ESs3RKTWGGAFVIVFVVZOSl5MvwBUsTr8tU4gWLYQN3IXswJhz6cs6iTLHUgJbHyKkp0ZJYrCrIB0VewFsCsZAhiqXXqgHZGAKJOhYIG7lQlCWhnaWpyRzNxABGBdV26NlLrTPWJeAHSs47rDTQmRkZRIB0JpI2pLg9UCgoyBYMTbzWvs+Uuj3A08u/KutRB9MHIFxJEioi4gqDjFY5VORIyWbnwMImoOvZREwj7rixZRKKcjq3vWgpVIGEAnlS6inZe2ykqt7Rqv3/VGi2WN3PfLZbcQ3dhf7pezhkX3636/Xi5tWZYbIpcAqOnZGZ3daMSNFIzW2tJlVXSoEBYA+75vrTGxjpyoIiqsBMt4AxMqGYB2UVVeGlkFw4jbBKxEvVqlNIOW8dPBZpyrAZHL2qryqQ4GF9WKYGYMWppjTZvzlqRhgcZtRhZPN1ZuxeZgmV5FRLWZUc/f05LsFtIz52UxfImZJPZ2qFSdPLAbOzROXJKr6I/VjRoxWeNGQlhu7Lq+uHn2zjutXX51ebnfA9ivl8uyw9pB6CLceGk7dYC5+Vt0JbTvX1y++84NVbpc16WBlEN+cGaRtloPiPYd1bEFCrODO083/zdiIhpeCsMGrEWHsD8pf1EFSFt0FIxqazlM6pggGel0kOifJyy4ZwEonCXFQZxlEQWkGKr8NzfH5t0OidU6Cq468DZ+VYVAALCLSUdM6EXch0szcBzLJ5y/RBZFgpzqRssHqhPj9hCZAoHAzvpRrCBG9uOWZbHz0m8jHBdpg5RtA9VEKIIlApAulpaHMiURCdhctTVEL0proAtD5EKsWOKB1LaAriIqZJyEDXhQUe0CcpRwcspNtYOIqUVcv04+NIBxB2c7ahmFPTNsXgRjMFV/i7nZi6rC3FRVeufWIKvxFXHNkRGyDtWoFwSCzxegqHKx4nCDrturtN9K7Pav3Pj443NWWnuVTn46ULwRy/K1IfL12pVJtCYDFqbf6fDTTTv9lfOQkEsq26PUapInJsOvJq3guETpeYd0zgiK/VOEbFt79vjTqMgmdfr1onI4oNSlD3k2wKhVOtj8NTWLtZOAXMh51lWOGyLOy5YQgtx4cniouRzr0Lcbbm5MFr/DzMziMa1Q1XXt8TG7gONBYiLSGzcTL+yGH0RsbnatAbh9b5vz8TVaKMU1X/zxRjNeBrTj4/m9W+cX4+GILyPg3buOV++dXxjEzAXAFmyeM9ZZly6iVofaRNLiVgMR8RhhHdK8d9iYW5NoKV5xa21ZrGKgxxTbuohaW3bL2eH6XWoz88nwaai35tT7KmpOkcxMKqoq/3j42egkjth+f7m/vGRuvsaYALvDJMMFYio5tEJlUG2tmQW2d/N2XFUk1PuxG3HHu7Ghb4lK+HoN0HnRcWYNDOy9e11ye6X6hRXomZpn1wGKyEoJtRrDVOhQPTIDe3R7cvOeerzl3nIhrUYH9cRues4HqmoXmzq0hMNWXSIOdJvJX7K4472sKQC4a138P8/ey9++cpTQ3Q6/uPJPf9fBUUX1aTtUt8OHXpc/NH5/iVQfYbAq0qV3HcFgYdwiL9vJVFe2ITzln/mL7eGdJ1P8YDadxadJwnXG1QoLHX/F6dHgB+OyoMLHfjQi6ZhHdi43HKhGPoccaNu6gYsgar+Y/9fBimIlA64FwAPMBbyuiRdg+AH3TI5x3jm2O8iLhk+jLTkBZrgd0FE3jFBcpQV9ysHCvKFIJ8WkZzFRe7q1BmK7wLB42NYWo5yh1h49KhmYMeFzmaF7M4dpzKeqgIpYWORQrV2EcHJY/WXZ78/YStOKaO9rW1pf1+fPnz9//ly7/Ov58389f66q3z3/l3QBvND2uu6NAC+7XVvasix5udalq9dLSUsjxWxTz86PDxHBru6OUTO45Yx8fyio8tiDIAqVZg4NP5mhxuWsJvZsBLixG7r9JftEHKnxZwqvE1lX1GJmUB3jBh5S3kcFZZl5RuFMmo7rSQrL1KTLsuzeeustIlLV3W7Z7ZbWWu+dW/M41jgRRGAOQa6vgJp5hi1Smwae2VRjtGN089iexf4cIbVHgT0jh7+9pehlswdIj/V2/K0AeTlOR56ulIgqIm2XERJPyNKDjx8yXN9ECgy4YrJXravuyMETBdR17ignD7E6//SVOeKpyaaj5lVv+xX9uCbXcQhyOTrBskiuPruYpIU4m0QWGxCpsWB29jh141Y+fqHB89KdRccTJ0StecnbxXnPccUFJjaRlRt7NFF4dqdqULGsLDJncvSUvQKuX7cr228odvul7c1a616jtx9jl/wFra7XTpG/bDtdWft30d7988W3f3+lPIA/tH3z+aP3PnoDprTr9uts75+fXyFGvXf3fCOePH3kxrvb5/f8k+JE+f75R6847tdzCZ3bFy83qf/js0/rn3cu7r30lacPp1Hev/uqmQ2+Ki/euX//Fd+6bv8ltH/7y19+6Slct+t23a7bdbtu1+26Xbfr9sPab7fu9u+m/SIpL6+dIn8N7XSwdtzOjha31nmvNt3BEcKxiFTHazrXYbMLn3F/Zzdj5C70gIUE5hUoAaBMdTQu9Pw+M1N4ACCFl+ecLrL03Y8uvv38p7JIvvPnu+Ezp1m2zZwhNIvA5o1duBdl5MMIlKggA6CQuOWvPj7dL2i39RPLTXEEhR1u36bNjptU0mCFM+PGVwSqnYn91lBEoY0bQNo7PA2l5k1ehL4YkpjDGhffFgCQVegK99xXbk8ffnbr4sJ2YIRBlkYjY9wWJPWy9VnJGuk31poeJo76VgePsuifObuVXSKifxwU9Hj2+HMAty7OM7Y/Um+6v8+zE4W8v370GMDti/PccCYPxazei7V9+eghgA8u7sFn7W4eEDlVLvzZkycAPrh/n4jMq7Ci1627d7968uTIQJ99BuDW+TkiXayqqkjjxVwN9cDTp0tXvwpmj5aEA6SCA4BCJgSMC+vhAatxh++Hya6/wxnBc0paQHF44Ez+T0ZGdNw8qxKTikLS1wm9i2WeTNcCYCpwXelPfB+9l29iGoWMwQsol5aLTVcKB02usODZwX4Ur6KM0D/yzPRXQvMVHCAiQyhgvoFMGa6U2RRPtOIMOnkI1RkRhbePrySdfuauKYpxEk3Qq14em9FPzCZeqvRSCyebNr3+EzAYHXl8aFB1slwZ4blEmxnphC4TcOZJp2fUzB+Oev/Y6WBH9vGRO9xs3TPnrXCHruGwdryNgQ+91BKS4ZSS/jH+OWmybFVV0jYgfbj07JYAZpZwUSE/t5uBC9/fFGq2qXp1tcFvIuA6HAaH97N763j4v6jSFJSp+QwI5PVe1QNpi99pOVtJ3OxXBroomJzVgkb98A0ctpBxeYlixiQmFIzTby955hx7RSq8zHfIyx+TubrS2L0kqmzpQHrXLqto3+3ODCCtMe1X8xFmarDQe+JVel+790xQwuV+f/PmAhA807G0tjCRpXPjzAVg3nZREtDi9sPjLh2iMnPNLISCCCSklLmsk1PEJoe8scFH/5cK4qY40EeE5pZiHgpWM2jj1I2JWrR1uuIDUUo3cMTJveZ6FSAlZpo6tv+TRARzTD66p0Ti8p5HNvsAZ2e7777j4LHOA1trqtJVJYr4gFhVd5YhFEzcGjcTwgnce2dqVNakRPBAXSqwKZSEKEV91YkW1ZOKg1ac+A4/rC9QJSI6fJFTcDvJoAoLHD1S7fC0LH1IuwbbCg1KdWQOKOQh9aJBuQ7Xf8A2KpOfZzcngi+CxQaBC78NcjUDF9MT2YUed37NMXUcOIxzcmw5g+mUz3UK9R59h9DkQxTtMJ60rXZCWj4e37OoABS13WmCgj+otOGwqpmRFraw42hQqMrRduiqWqQ+FSH2KnegaSOSHqaynBE4dKg1EaAQLXmartvrtjcVu/1rsEjWStw/Q3tT8dr4eX0kr0vW/EraFdYgl36GX/vgHnSKKh9tI24hWPAcylBlwylMYMzDPfaDJ4ZNs/Svoqq9REhTFe5+qvbPvz+hmHisAHCpK7Iwh4gPSpHEf0YgxNCFoEdtkZFGR6uiM4da4IeEVbhsaCD3VOMRL2BlYKOzEi4xItA91ZnreFZjNYpna4S5RZyOLT6qJbiOZEFBV4Sm/KCWYrjShFBjsdtnAZBSWedBlyG4uJg3xFoeyoG1asf86tFxKyGAp48eQyWCAyXNBqeshNnMKCmi0ruocmv/+OzkKDEN61Mdj/Tlo3z12WceszeHDD09ZovMxlkGxlum53TRSGO7zeaYYSmmyfcuvXuyNiDOictsI86L8oGyV2HvyQ3axLpFIE9tg6Z5i7fE7gn8ZTKJWq0WyYYiAR5ytUXeUJOAUlp3M9AM2zRCpNk0NWnkJ7GszTguvk5SNDlZKS3A4dCbg7Cm58ZsjrZhhNPMGzHpIifanY8/ppxFHUm3b+aRQ6GM9TSHPmN4EoXAKKzYB4xJ3Twzf10AoEE0Ke6+DKZXaBhixVHIk8QZPTOFJ5lmnfcJYObBGHTWbQaDbFIucPwULnO0TTurxwnswDSY5pU7M/I/vISRJFLq5pAeIkNAmMkTWdBQn3oW0h4HYbMeM44FheGht82K6YHBInCFcVU6AlKQKKXqpxVHrUaKqqe21eD0utmDQCpPZXB67wlALlisYLGXJdtSOswopFtZxuASNaCtZFL+kCcQQCJ/IXmqA0kC9xNktK6rJeM2wm4G1P3l5Xq5ZyhD1/2eQQs3KHa7HRQvXrz4/vvv+7pKFKjlxmc3dqbPayr2TBS5b6HamKvEYsxNIjl1VAUq5DH2hWwJHps8MwVDjLAw5pkPa8+GoGSx3aT2UIU6VNWnOm3fIPHA5tsQVfNwxIc6COV4rRjtx8fiNd/1iPEneMfoO7CE8r/1UIQsRiHo7S/3RGSFPta+rn0V6Wc3FhSrk0duZo4AACAASURBVGRKIHFZr6+rrKKq69ohwlF/MBhZULGNvHp4AtLiNmY64IW0IG9fCrZdIbHlVFQfDmp69AxeTTtjnkQjzjhYl+Zy5+4QZ7N+VP5OXo76fiKJY++xeQUfrNJKWe1mST5ukKnQM8a3jgdDa5pso2nqCgjEhYYfoCrFbcQ6bI9JQrH+OURtIo1g6DKwJ06OtMoDetV6Pb4yuDhzD9pV9pdqIT6AoAwwKYcAPm9T0mz3M+AyOfI7pGo+vmI/DpqqstdGsJt+BkGkW96kYPNa1xZZw+NOKUl4heMgNz6poBBTzdjr9nrtt1Li5rr9mHZdPvtX1a4O1i6y1+xyYOY2vxIdHJbmX2rLd6M85CRRHRvW5RsNxjMNQ66UbgeibY+qoHf/fP7t3x/jp2nfPHkE4L27Fz5s8vTCGvMbmjPAVa0wdHNb9sbvZxajQx1LQc2NtGrvDZiElHlEGyszjTFiAOkSd5LDXcmUwiw0RMSAiud1ptgPpCeImhshQGSJ/LqKEqwWgSeKBkFFb11cbArIvF6L2RpwJnk+TIrmimMYlWgasiz0vfOLb8Kw+PTxw9sX9xM2ddtEokpJ2JmPttvn56ZnfvP53/PDb558fvvinBUSgsO3s7HPIqBVVaQ/fbyxA6pCVeSLTydb5AcX9xDlX7/421+PTuawXPidBw+Y+d//Oj3/5cPPPrz/ABsJu7QPHzxQ1S+LMdR9aVXVPBwn96OUOesaRLr0MHwMC6E3CtQ8aCF9+S26qoTPWEr0NIprRaEKx0bffZoLQrnCW+TyqtypmlqcJY8qZQI2lzmTODnRS0XW55qRJR4QRGUs9mHy5Cbw3Bex6tKJ604FwvY/VlNdtMrkJgJbZvUywXqa+SGhPfmwFkgE8c4/M+3bwINiLI5LnIojQfxT1Tg576Gnzgutc44v8t4BwEkDlltDxqzDU3bgXcUnX/vRGaYpV2feejD77bwLwzh0gNWYinGEWM7Ydfvz6PIMBX03roAqALLcckW3xdjnw8k6HSBmLS4nXpWuAFunNweDKvyPYkANq/QkG2xWZJQ7Fcl80vHMhRA7Q1QPUJgP/Hc/ZF4mYlj2SC3cIYfYOCkNkhLzI/GLD9aoe6EqzExWkq6cStVxsxC233CVtjiAysBsge6iRhi+T7M670Y2Ld86yTEVdl1Xt8krEdGyLCIdRPvLvSgD4EYKtZJlZjRspqsr0odXeodC9ivwBxEl1bYwAaLKoSonCUhZwwiWloUPDCOnhuS4HXieFweUvdq3moCcnYkMlWa7rpN5uMBympJN7dAEg5h04tDpk+RMyoULX9Z8Ce0Mz/cdM0LYH3YrHMIllSMznSl1WvvWWzef/+ufgcggEBOJeRtTWreVoNLl7MYO4cSqIob4zKza4AUafV6jLPyg5QkfBaIyEyWlI1/967UN4I9oAdMJPM3Y8sFkFOWrcvSSwg+GcThOfbVctNPwQji+lBQhr8C7Cujymd1IxN+nLtEKb0vDYvYw5mF9xMKm949M+qrp5oE8di9zlNgPCqUm6mBw/JkpGK4dSb+5sfiP3yKdcZVUVTtxCzEiuLRJXrqZ+zGQ+DeTP8UBSPTIb8NGqgQ0Zgv+g0IgWbbOuIY68gQGEWotzpxVLpwtNuLkZdh1+2HtN+0m+aYcFV/bS/H1HDN/tkjt6+jsX2E76R1p6r0RzgPbmF1iTQ7uqfkesF3d0uVUlzZiZB3cP6uXriPTs6b05P4S+atnA57LeatC3/3zT1XWxpoZJSvbIYq0+b6WlLdDwhwBAPmE1leyDZ+LcTnmDxmnpTDIbGRfUnD6Ecw/G/V+aqMf5855tRvQL9UbAGZSV03G07khQ/MJfdcMUtKF4OG9Pwby1r5+9MhFlyFYDgQe4l5qDLa4CdJbMHz96LNnTx49ffxQC7oC4zYWKBp/abcvLm6d3/UniW+fH+AegQFiqrbID+7du3PvPhLCij8++Hi8oqY88JdzjPaH9x/k1alCDxNHEvD1bO394OLeB/fu2Sh//PjjaRSAW1uW5cuDSPA/ffKfbl+c7/d7qN65N0bJzJWq1clnc9vrrYtXlYmD2piaKTyGMHncQ8YKGhE7VXql4joQFMPcLAb6cfrtxuHh+PFDVN52qTTNB/78kfMxfsoSKSc6HXlohp37NsnkU5bgKjGVbvbQ4vJU1u3z3lo+jmgNm8PnWnqoyXWaQ2u9ulWiGyZBVcUXf9smKMgmw4liM0bxbfL/qK8w4EsDogUBhtnSyaFr4rOyeACxum92ZXFkwQc2Btp8ZUOqXRmoxHxmnhWOdEX3q0NsvKVQNm8sduYFGn2PO6wjPceKyLWjWOxwqTgATg7kNUnBICZqV2o1Tu1Ts7LtYEpz/+Dbm7fEQsrGxjVuyRALliYdCaStimwYTZBEXceqfVVhIY2BEDjinXMicwFzNSLYj5VJpmmDcjKsqn4Wiq0wLDUaVtoJEhqEhec9IasZDdRDrwU0eScS122ur1rUN0rJHu8tVpo0NJGhym3eSTDu1hpUiCDSe++2nGVZlqURg9jL2bXWcri+dgqL1LLbLbsdtcZMN9++4SeP2WqoqbuCQhUi3WofmanFozNgdYgLKiAoAKV1QlEQJuVBW5BocWUdOzbjD5BmYvdPzeQiLsr6/h89MrlhyXu2z83kPRh0SFb2YeBiYnEOym4ZHFZax3mbNrSESQfFDKGkoE5gntolZl+W5f33312WpsBu2e2WHfOy9o6gpbazTpQVRI2o7XZnzCyi5IU40l6TsHSci8uIColC6mO5wcxnwdgwsfyc4EWVRLiAl0AuCHMwvoE8ZavxUMiJND6YheijXZ38KCazbYFNB10HqTscUwd2537GEsdtwkA+2uKgvTUJCLr9/njT8eKG9MayD+/ctiA4aYssh2PEjwVpGkcq2HWOHgdkshkjMSbgUCmHAKLmcOxzivprCbrsypOPUawujnWcIQIdiZL2bq+iE0x2CWeLYvNRYGaRzlHtLm5lRP2eXpPDH5hZc+MnSAeRAx1Bsuv2+u3HFyf5Zd0kf5FUjL/ydl2y5tfZrkrdZyoKjfDXkB+c6B+2Ku+FmDLJGCmzDN4ZTG9oWfGX/0+gREijI6Aqgs1dchHK3IZQhVCFAu/+xJVnvv388Td/T+vSgI8qXOze2t10iMwBGa90WcRMkGkBQ6LR7LZKJSHyDtX7KhaZg6JwvtgaymFjJHUjYpF+FBao2xpZdVCeArFdyu+i2qGSm6NWUVp6l27/yJswR9pqi1AzLb/6jJaNAYiUDccJBSc37dnjR8+ePH725DFYidWK5mZs+uYs3Dq/q1ZHj2C2NlH5YFORRhVE3zwaHrt37t03zS4USgWwgQwzfzUHXH94/0FqOFZF+qVAunP/PrcRNp9CjLUP7t07Klz+8eOPVcXKqyvQll39lkw9bV5CnYaStpVICSDitrTWWlsWj9xUHQUFB9VAFT1pqFylNxcu/Tbd0M4MJYlsgY5bGc7+pOnYQEPpsqNBBG6sgVkpDYd2UX6KGcT+iWF1SmgbSwgd29tQ6WI7VMtvQTo3m2IQLtI3uarqP0NN1kpc6wkYh+K0alImmcOym1Po5Jmp89RKqSGAEIRK5geN4KSInizKeYzpv0yqSOBM/fBwb7b6H5V7oJjXbL+4Gg5hJAj8diyrszoys3y97sNVCmEqj7R5vKioowXCTxOxeTDQiDBWPR1wdd7qx4HcwHVc9aq2g2rqdpSIGqb5MApnNy7i22s5EIgIZObJzc8E7TB32I8VdReRYpDcQHSqX1/MJ3GqKS0RDlsGqftKVxtlQTLAasCqoOBVOFTWHLW+UBSjc8F+wEAkw65DxKwU5pgjyqcCGuBNm5zZw3UkyoiR0zILn4mkhS4oEMNT42mY8QaeLsuSiVtUZL/fi/TdbteWZuNI754aZ10BJaK2tNaW3W637HbGGNfL/bquvXeQXToxVLv0DMUGNP1kw37KbMGJyom9gzx7UGPYoAhKZlZSJYm65yY6qYg4YtAkaFomi2m1IZ/mmfCdTmEzNi0NExiP+XZifmp8kiZI27tRN5fHwnxfyFmLX5YxE29NEVSNeHYNmlzCqq6PnJnxxLBH2mpv3jwbGSljijSMelSQB/vLF/vLFzAE6p2IiTMFZABH08YYQE6WsyFvG9btjyf1PUUKHaZ+jV5ViUK0yf1ax7YdpeURx1qBiq2ZKYhVkrnB3ouxdKZ+BzMuDemWkKLKhFkUY4IOpn2MHo7tnJ4sCXVzbrNWtZ3mobmQ4mTGIoMyH6z0SJchYW0XOI+gmNapVFw6bd9qzs5YR5yE2qvj/5hmUGc/6iEZMdvVkwHJThlz4xzQey2Ch6qG2hvuFKp61Bw5LfQKAaLIiHZorVPLBYKIyh4SuTIxUWPmxo25Mbf8hXLmAYf4xzf9VcS56/aD2u+m6PartDdVF/u1+/lJE0deR2f/attpc+SkbF9J3WhiMMdo8qxoZERk0nKXqrZXQRM3zcnEJzRS/GiIXUMVyznXW76foRZ2WiQp5NeUXhCc/lBG0IP/5udHQ4dUs2OELrltVynXObFgYJYdy22I0ZJ5W4Q1LNLanE9VoWJDi4Q/QhElaEhgDUyWh0ukm0bqncQcbx36D75W062YN8NiA5EUgkJnAPD++b33z09Wpn768NHTR48TRK5LlpE+uHcBmHGNRESkGwhqMpevHz3m1jYCm4SGbX824ra06qJ4KBHeuXcfVaJSdTPovMipFva9+3HiVEr78OOP//jJxx8+eACgr6vIOg004selLcuyLOv+Mr/98MHHiSqASu/I3U/w5ips5N5772bdZua27Ggji9l5PorWpcVGZDGK4R/tVj+xZXbPreZHsGgSYV8c5ZuKLO2Kylw5I746MrtpEXm+ysZRBiJtV3LVMo+2o2bArX/EgWCqefSPqSIvBTgGGU6DzcvnXon89Hm1v776ZE5pOpuuqPwUpTyV36JVbi9krl5R713FDBbsUfwiIr1qzqEjnwboyaXH5MdzG4Zw7PfJJjLBAGZtrMYJT+inYTQLnTzfCdtIhZT9FFt3av5hfyhq8GaaiYdEZGqVmM0IcIpRBYT5Z3QYm4iyZ1oWXFFddTJ2IylK5ZVDCx3k1Q3labGudGxsCAEMsFkhxzlQTRtlqaRRYeFijzj5HREPyEIEJ1o1tZTVap7IHG4ccIUi0gBG9IKNZ06Jq81DPVFgMH8hIlVpSwPTfn9JzLtl9+677+7Olt3ZcuPmDbtG4saifVmadOnrSqDdbmezsc0VEahaQKKoSpdgE76kkXs1c7wgUsWlzj1sdRVNJxSphC/RfPThoC/oESAd1ADJwsxGy5647Zh3/OHe+D6G1SqcvmwOg1aSb2TwawyDoqNyoa+xl7GQMGflIax4gApYHLEgUaO1r73vbSb7db9f99L7sixwk04cF4nDykzMZzd2u7OFW9sty9KWk/Rs4quxlZPc7+vMeeWqX9r8Pd3uIHIwTMbjzasn+8TJN5PY+AAEpEdAXNsclc0LURkYHDfXFDu5ZcoUizi1jjCJ6fSJFsAEtRzPvNwqNYhK/uMUFqMT3aLS4dyuGODgk3oqZwZ5DIHjF2cEW10nyTsG0StJI0Owp0I3XN5rzV5J4knRg5ZzW4WXsKQeWVFZ0/YBg01jDtlDLIBAtBORKURGcSbIKEDq9+nJW4+MW/5UEDj1rsN5Xrcf2d6Um+TPZpf8OavB/Cbajwf+tVPkT9pO5o6021dqDQAUPYJsXEMnS3izLXRmHEXQ3TiQ97+h8FmqYjflWNewBGH1b83uTA7wi6Qh6gBQkc7guJzVuDtWABCLxyAQIh+HZ6r66QptZ3v2+WMA7929GMY+X5oykRJRFzLGGHYBn7u7aaTlZPDb6IcooWXrU11NnRsQS+YK0SyZ7THRap4vbGWHCWRlHyGEvKeU2B02mTfVa2bpdv0IUWnc2sLrft+lN7bgY6LmJbVF1Hz9ZJXWmGlRKGLzvUYn0dJYrEbvj25PHz2+fe+CrNiomlTiFjqRlvKWFlTT3qGe8ErgiK2q75+fM/jrx8drv3zz+Mmd+/fN6YQIXz0s6KREZIKOeOiZC0CTOyFkCqC+c+9CATD7DgMCfPXZNLqqW/qs/emT/1ksKafnX4MCX8+5JsXK9kW7dXEX3EDcewehr3vb5tZaUzYp1FCy6sV/evCJKGlXYlraYlbJ3dlZPhAamJsbdsvOYNi49b6apW+/vwTArYVQraq6LDsXoKmPjgACNSZRgqcoiAjQclGuAKnujDrBEH04HwCG4YBK1HxO8qBjHFRDBJa2wJWQke5NVRkk5p3ENKiZnzLt0s26DiAdM13QVoVX5vWTaTYHxrDn+ChhYpEB+RC+40SLJWOV1NuFuIWNIZSZ0B1ctmVSUWIaFltmEW1EkMzlntqQDpG6GpXSHKBGtcKtAEFfdezdYfvTJ5/o0qDaA84jeycVPdUpN6mzj/yKUkVSBcJjm8KIA7e6qC8/AG6uZIYPWSupGHPc7FYJZhnUYEXkhaodfExMTI3Z9tEUJNeBBKuuNgZjkHUmAjEOMiQqwh43aNIERCIsxHF5pVLmrtpjNV7aRaHSzbxlfFDIt4kI1I3KY1i/Uv8NWi1pUEEeNU/Aj82scNTaUZpCRJXEB5DQBDVidQPxyVl3a7aikAImrdTeBAz7fF+YCWy8RAI37Tt/sQPEzGH4Izs1xAtTXgzZpQvTorBbAgryl0NHwCxn/tJcjJ+cxksc9aH0GnVyRB9WKvuKSWQBKw9zjFaDi7nFAa0xEKTYNFiRPGrMTNp18qFVbrTQYm/1fqliFzPKbTFkI+YuHUpL24mKoKt2UgUzMS3EAPb79ezmDUVX6X2V9bIvS1tuLt99/10cYLSFLy8vl2VZe//Xdy8AvPOH99Z++c9//fPmjTO4EUNIz/bfd2JlEjDz0gCrGM0AdmAjX0KiqpAO80ZXpRTbDFHc+Y97GNWRh9lzaKaLJdFIZBgbDRn2wIiqISJdpU3oFhwQ0DytQxrzf9hokJpgRWGwsc2kJL0FhXWQN4DUy9+Sozt16QQSKIGsUnmXlUDU0sAxLKaqShqnMpHNcUdIIguxEUC2hMMiULlcl7fb972vsjK4mYsuyfcvVqJOVrmIGAuJikDa0mwCsq5My3Jj+X7dv6Wt7/ey7JraZlmFkASUlc5KI95IZGPoTdycSRqTTaNe3a3JHOc0fpMXkMJa678ljxjHSQNwA4kAUJsI2jh3o+ujEwmEcheD8VfiRozt45QqbxARci3HkqcSSsb9lDOIIH2eSc6AUoKoPMzloTLLwrUBZre45Z0Mgu4y0WCpB8uORaRUEeNV0BUJQcPleFwX2BlWpfSn1ZzAdKUQgDSx08QtDGJORO65DkCZ22RsRR4rAjoFtaTchhCcgEgD5E1AYGIK93Yl6mrpL1QJjUhU3I3AVCEmUSFVpmYynqoyexZdA8S6XpZxh4lXpKdEbQccRIxFRdCiIDgxFa5KoL52Oy9QNfHSPJSB4fQxGWepfnPdfqr2W0wo+dr1tV/7rdfzi/zp4sqvM0X+JtrVpWyQCuvmo9Aa5tyRp+skzym6D74tTLB+aE0QA4WmlJIoDUWW6lSrUlOuj5GWtZ+heYmbj85z1Vb14IoiCSeaLy2F06GRY1zC55fb3g6lOf945IBPifbULMxk18XDdQEwNen9xYvvG7fdbqfdwzpSFLD6HMTUwOwuZqldhU7BTO6J8IZ2RRSMpS0KDSemVr7WMHkMexKo1JUMCU2BRrh19yJ0dP1qTr/45WeffXBxcYjuwwCtISebBHZ6dwF8+fAl9vEP7z8Qohqp/cVf/9erX7l9fgGir8srDIa6GyY1ZnIPzdaap9MOeegfB4kjywKvMD35I3ZCe99rxFO6VTQy9qeeEPsR5nUgLIT+TCoMNGM5ARrVkIalwBXCKu4WpWS0+nUsyoTU+YH4K+tA1qOXmg6BfMblVqX0rwgJXElxikLaRF0ZVh9Ji/Eu7JITCZuPbNGgMVVrqIs6DDc6nMrR2ipU/jsPiH8/nTgyHwxLDYUeDiEj7Zg0Whr/jqGNQ5yCnZb/jhfClFCfq3UvvHhHMU+T7bRThmKSHsTBEnxe4XeShXkm2jqvKVFHMuHyvN4EQ9H5ihpcyzFpHqpUVWjGEA3afgp46iCfeLACkLLKtFi/vHEAEASAA1YEEmxNoe7mNdm4B7Ush7mAToexaBANIi2aeVAD5EyyFfXNkzwi0jDWgbJ3egnxzkaRktlWCsrB60MaAkuViCh7QP1AA4HdflYsBQYoghKPhLt5v6FhW022Hv34URHt8Btcdt+qWCQTybqu/VJVz5bd7g9N0Z//85/L2ZmYRcWCIRSrrHaH0lUu9y+IaGk7Y7hL21GjjnXZLQCIGkBdRJWYdNW4HsAYFJFgJyp/+H9N+eeKAHV3vH7HVVu0PWIaIMK824Ooe9i+TYSidlrS5LBChcCrBQ+3QxuIgzPZYhOLSSWpzTgMs0XuVZvhKo95BNmx25obN25g6e+++97Z2U61G9ElIunrbtfASgTL4by0BiLLvgLbAoKqnLUlA2KOrxYhkh/5HEnZjZCXc1GI34aDvjIYrqBvB9M40cPLhjvOf3T7TEowxwav+UzTTDfuPgfsKhXe8lyNnT0yRSrkfvh8lD0b2KvZ3RipEqx5Ci+jgWMz1Vf3Q1vcA2CzagKFAhNQLdBNu3MBeBgYZ7WnIiZtsZTGi+NBDek1n6EQS9Xv1UmhJeX3BIlcjdmFByRrNA+ceFHx0PFT5FermgPqcNknVCSICfo95XX76dt/+D/+nx9vkQTwi1S5+Tnba1tC32C7js7+rbSrgrWJymV0VXo1jDbqN/oHzJdSHyhqe9yGTnemk2aVN98oZVeyz4N/ItrnmIC08Unw/xIAvPvRT1vWprZvPn/MNU22AO6pURe+/e2gEbKPuCMtQkP61ti2DAe0IpeOjvIfjRH91Yjoyoc9Joh8+5ZlB2C/Xu7XS1VtrVmpELcQD2FfEaFP5k80phHSljXpvasAKqq3L95AHH0EpVkE2nAxJQZYTXQQ9VRag/9XFTlhTeGURyCiD+5dfHBvmmG3OIvSbp3ftaBgstz9FPUFDrf1h0hrFiv9g9rti/NaVsLH9NRySkSW75Nb49aY2FJ46jFRUjPlABJtJvFfZ8RN2Ukk478SHXXYBYDEUnFtedKKx3HV8IdUN7uACFEbwQVoP1DlxE8C4mnpuLhRSJTZKBMep27TbZImSrD687aykhjOJhYmlCv0zWEwGTRN8zRXc0QBEULlnKxk1e9hDIvIvRbgilv8WMkxxAyfiwBGIUUxg6tVFZ+0kS2n12y+O0ww975YLZwK+bqdOgXzmJ31Nvc6cUUVnZmlxLJQHM5oC8mqEQd9jU2SRGkVc+sODjjWHmuyE+8vcqo3YVhK9Jn1zHE1kjDWOGha1DA4JgRtsdfUZ+yhr9MWaUy22OpyCuqEevp4YvVOJ1VdMSon4wS7SgwuqrAn7yKQB0cU7bUUYEmYDhSxcnCJHaGP2ZQMaXGItkVbdeKQ0IqnKc4mDcQpeEHlP/MRKlpgOQVRyyfZ8mz9rAbYJGpxXtUPV7VKbIygfipiB+BcTgOv8sA4mTT4OY2MwAVOykJZHiKYfmTNtEMW8Kezs7O3//D2smv7/eVut/iKVJmbVzUBtWVZdsv+8vLFi+8tOhuq67pf95eqXSFELKpdxOqkqDni2a8BWYWqiPYuvccc8zxBPba9A1FpYqI/L7eVVBRxHMt7ByMuXE981t9LIkqGup6cIciexn91sKvBxoI3xZhjErnDgYYpY1k+TBpc+GWSwrz0knXTu4iz0GUvsr799lvOb0lB2tiiVknNpVmFIiEPEZalLUtTEend5D0NTBqQjV+qLD8AT9PP4T4FfZ9Oy6D4RwsFFt4ZR7IIKUPl+IHth4hkEwv8ob1Uflw/1sCWI30MNJ/0pCJ9DUwN6qNJVmaRIUUDTZt76W0jXqQ4t5nrwE46veZTN4gpqyQXTDIZLtFDm5g2NFJOIJUVh8fxCehJaF/VYnZUIFNELSuPE4TTinKKJzgKFrOVcLXIwkRE1FpbWjsb6yzU2/ogbpaiPsiUKVNeiTv6lPKr/OCVXrcf0d5I3O5PHbj9a65tfWrcN2i+fCOh8de2yJ+tnQ7WngIkvXxNuW8acYU0nndNpMtRLgSAJkaaH07mjUPlkeD3Y6k4wJ0QKtcyxmSzthttKhPW+jDe+ej8n58/xs/SLJvku3++G4PjmDZcV53XgaiPTZ6VBoQQzYuTTZUo3BsoOdwYqAC/fJzywZiahjZmeEAgbosNLRE+b4UXXW2tKraCiD3G37Fo1lVFUyHYSjyv1Uxi7l1gBUkWD1OljD+2u0xic0aQqqSHcmJA0SozHbMeNW6icjjtMLpw6lGq2oqT5gcX914qNXxwcaEKDyVhfsUYjNv3zk0C6iq75WzzrRIg7ofCrSGjKUcuV3tuvBJFt8eZtQ3+t7/+tXYMRMCV0wwVkdZa790shxas3biF3Emw8kGxK9VA4bJcnGGIhFI3Tr7L3BrHWisx8C7jpBUqUMfBIDVa3vCR6wkNRcoReMq1TiZzK5QiSjtPYZ3PJqGfj7rNSumWggD0vCAyMA/xPE9amQ3C2Q2aYeNwhI7VTUpC7WRoBwkwKLIQ1Kx4xMMvNQXEm+qjDxWGRm4KhyEC1FQwKraCxq5OxakodDnbi7EDKd5rcUEpEyu2u826lIhtXjLVsTVSFg5/YdHIcXPFw/qlwUOL9kXJzkhz8jSAmvtRjFZelmnsQoaVJn4GfyiwoerecZzqaHKPglZ5XMrno6fq2zShYtwO5LubI5kOzfGYn7mpvMYArt/rhPGzfOmz4JnjFVqOGjLhe+QQHLvgAouDcJZnxloR/KKAYhrKHb5ouK5UPpgfxBmoAlAatmwWXGLxKp9Oa3ZSihVFXAAAIABJREFUolybeCXWYNIRakAgc+SFhStqADUsoa40cyx8PANVLG2n6CBd1/3a+7Jbbt68IboHsF8vmdrKspeVWBXK1ES069pYue0A8ELSZWk3QLr2PdHOLr2I28LLLDrmfvvKuidc9v2hRnEVIOylzwoFM2y9kkHSRC0C+ik3VbmIwnE1QEp5wAdCyPQ3lVNLuQ1ASpx0hEZqELaQifyxvIDZSodV9jve2TTkeG/Qp7605d133lUIsaelE+3cGjnNVBEx+z8AVfEMJID0vmjzGw3NCSVgghAaR6vn60Q7Jk/ld/OrCd4Y6aqdPsGMkk6/2XZobXNj06l5TLiCA46kRx4qbx5tB4g990oFH0fnh0be+gSPjwoDGL3qwTxPTK6cq2MTroQ8BadTRuuoShhTUMxvH7YjPD/6diBshrCjpxgCSWGeVLBIPRBQFJ5QyLry3CYbgULBzGFsrHMnS9yVRBfzoaHC34OP+bdcRLJ5K4+C/Lr9hO234ib5Gl6KP8YsWOO1fykHyWtD5G+uvTRY+3fb3vnz3X/+/cnLn3tD7dsY6z23S163N9++evjw9r2ThWh+fLt9cf71o9exYn95Ovz5g4sLlzjiQvWlvd06d9O2p+DkNrSB63bdfjXtT598co2T1+26Xbfrdt2u23W7btftd9Z+i9kkf7XtTRkur22Rv8V2Mlg7ArJGQBEQNz8Hd24Y18nWKL096lPDiWGropJfzocXAUqQBxBeTWUWw8WCtj25+8nh54gbthjo54zazvbN3588O3DM3PheAAPU4wdIf5743pK+g1VZlRWsIKiF/PgNWsRzeTd06A90stnWS7j49LUD2C273bIDINphN/rp20JTxAHTQAPMfmkWisXxX7yaGe5VGg0UcD8sAlm9Axk391YbVTlg8s2Tx88ePzJUMwcJGREWmcAFG1skET0t1WMs9tlrB0Xpat2cgbA6TlOmMEjGR3ZRKxFIpyL1GvTp4yfMxM2iqka+AvJL2yM+mwDctcQcgjL8z+ZZT2/ZJ51/EJfD2T588IAUw2NP3UOjtKx1zWxunhTRWQjvHSDDWqyTcbiH02PET6aLLdyHZoTG5NQtxd9YUEHDCA01cKWrXvWC2WxxjJ3zrN4wE5nRuGWftvzgyNH4b5zKoHeoq5r3Apr/AwIrZeMt4oUaxmwNMKTKUddlTMYHiD+rwwGAcJFML4JTNOOLyVV2apqX/IH1FD5QanWt4qBmpF5ELMbMVEnh0b5BxOuUpiODxJjBLjRHPzY3jCBNQrojRhSsYPbzD7+hjIwtvi5lOuTgG3hw6EPtD5b6NjV0bus2m52CImLNYVU8QDaoaTBw5KM4BQMQiu0Q9fPj3yb9nP7etuIOloCyszfxs/GQR7x1LaSLGOyFkjQOLnJ7ECW+h19LbvmAQex9OppMM6YUZbabWVZyCutnENV/NQ/z+DAcXE7lPy0zifdRZqRzdaakD0E0KN08E3kVaBTRfcFwnSAz995VlS1ocFmYSVR77713bgxYvaAmIvv1sq/73bLbne0Wb7tlWZjTDRPmBd+YoNqld+m73cLMfe1gLLsdAdKFeYHquu6NZfTeZe229arKxK213W7RLtlia52FnNiLl7TBDUOonU9XHDmMalobjl1+pUpMkkjTwUOVFMy/FBGOMBWDLIcjieGY5RWr1yO/5syILEe3EtPubCfSFarSVbpoV+2NqTE1k94sEBUjaQ8RSe9kJXcqXTqUsJMdDpa3+Smgnj6ZAXpsPcElDxh0/nLMdS6+/5GyJc3/LXtJ04xeYbCjjGjIPqffo2PvxvtFMcrC61nEcjMjz+OegKV5u45NLsWzzSzn9VN2hxAzCn8cJLGSzhoF5lufHc7wjlOr7s4/Tspx0o1DsI6zUbnmmIpLHeH8Hyx0yKkpJk+o6Iivh8jr+DAHaw94hpP44LhFNB3zjxArwFIgCbx83glmct1+ifam6m6/qflkSwPfq0Rh/xQR06/S3mCE+HX57N9uO22OFM1MQCmYoEhIwwySnMYyashGQ9bB5WgwlvgW46tkjMWWQcQRcVL4JZFPvdD3oYsbzwjNyRnLoZRAANF7dy9+EaPks88fb42SB6wlrLEhiSEVLIyknrZYfyQgWxIhORefOy+iZg6FTOy21UZDVyNm0VAUVIh4t9sR0Nd9pmeOBGvi04dS6C3WLEOfPdOlr33t0lWEgFvnb2AjRLS11hqraO/dhkMUA4BnIwNF7nZmevbYK8k8ffTIDD0WRc6Nl9aYW2QonfB6th8CwFefPbR0eKrSbV0qAD19PDb6g4t7Gxniq0cPA77VNiFd+rPHnz978nnU4N4IgkzUItEXp3L69PHn3zz53779/H+3x+7cH76iLbJ4KtB779LtdH/18OFXn3325aefGlr849NPyziTLFuJweaJkNlUoUTc2iJiRmCISFuWtixZMCeFOdesw2BAZaBCM0KNVjoWtW79oJgj7EzEXEO7OyG5pXGU0rCbn9SHhqY/rZ08Saob7zzAf1jWEKnYQr6dYtEmTcIe9nPMsLqP9guxZw+ISMuxHwYfcLm9aWmzsG5LngrMovhWA5kMLAcbffRu6YSOtP0ujW2KUHh1eiBNawHPI2Orck0nPJGpw54oXlQ6nKMC0DRH04x6tpFipGAgUTwhg+uMEY8tXyOpmSNknWIY1WkgVxrubM4UeJ00y18pNvUI/vXUb77T82yOIv5R1X0CqmlF0095APNZqL1ksJ+bDZMy6Abd/DiwEVo2CxcD5Fm5RER6uT101A9IMA3tPzRhXxQYVsLYiUBOI9RkUyF9/TYcVVSyLQnOdxipnRIHok/vPGeQWKMJ53rsc5gKkvH8gHOZDVDmkmOEqc7zNNOYujHYSHhqq+0KEeltYSYybt57V0VjK17SiGC3XC8uX6x9b8bK/X7/7NnXz549ffbs6b+eP3/+/PmLFy/8gBAtO+6ygojQLl/sL1/sexewgtdlaaJKxMyt9xWqTGxb27hxC0JlsDbJgcnFTxVj4ACIiZc2hLkBuVO2iNGGOQVXX644XmMbEaz161kIQJIsY99pIKYgP1tCV7eaAPXoWC485rQR/IpF2n8KsSoEmgiyX/u67+teVaV3e6ZxI5M6iJRJAemivXseT9um1oggqn1drT57HdQOQp3uVk6qj292alSI07EALft1hbklmVrlAoVYHB96Q9Am4rYV5w7biZWNFadEfnrNp9ezebHeN1w1scJjNz2S0SVKeDqzI8TNFRVkSfHhAIxHDwMGvc0XX3GppavyRlwVVLIYgE2eNmSgSBhqgjOOABDlw7TxDUaWaVTGqmj8kpoXCoYZCaVYvNNbhqc5rkuJ1YCIzKugzDDux9zyGaIhhilZIy8HMSPSR+ZOiIh0s0uK3Zkik2TbVe4P3Ynr9obarz+b5M/WqkHzZ8tHeW2I/E23k8HaJsxzcXwjtaKAg7GDtiwF+TRSWLenT7KroVPUbjYK1cYPZWTxSC0uGEmkp1KXnGg2AMyaefzy7kfn3/5cqSRrS4vkex/dxbzuYfZNHScsL+mMFtInD3fVkNXhiffqJkTHocaUT1xBC9UY3zw+WevZjIZETAwvGaOqKkwNGO5aVqXR8ku6MMRIKUdjUN9YIq+K86Pb08eP/vjxJ3DtJlRj9ax7bpuJYt4yW2oAPH385P3zj+x3d+gzJCN8+dmoUn3n/n23C8xNJN3yXJx4OpfkThG6tq8fPfrwwYN6Spj56cMnm9fu3Lv35UOfw5efPbxz/56KwvLcASTIbxHSHZX7hn/7619v37uwEqXmc/nlk/E8bE+Pi57Tgfzys09xtI2nQjnYCthjJIozbCIyk186DOHbboW1+Nh6WsU5o0/QgxjChxTVWYbTg4WEbSLfbUc1EkvbRUXjsaOV6iZSIcnrbuuFafwxxNjS1Rg6z/iU2ZE8O+ZYQBxeuEFLvYIk5cuJYepT9cOeOSnpAP2S1pTZj0c0EpeZMl9yhIKuzBz5p48/AcE02NQs3f4Xq8XYw7H1iqL0xpHIwsmpTFRcJRRbXKYN3Gq/4+z5Wo4sYVuRPBWIAAZCjVECqWqcmXAm9Qkr53QdbXO+bn4BjYy5abBMhAhljEL7GnOdvOaq2n5Md00NmZJ1TNZ+aMmxpfGR/csjj5gPoDX54fRN5aq2QBksLB5Nc4njQeRelXkThhZKUXG0XImOSj4ARmniAZaRLdnWpo4fCm3ksMwcrBSOYKRq3CHdvmhiq5p7qVoXRkmAzJCkpIF+Dvui2aKamiiRz+ybI6JDc68LZVBEYfI0LijNRDbf8nOkABqhOztGFwcWU1MoREV73JeQ8ZG17wkMZenCUJB2kf1+v+utSwfQuK37Ll2WtiiUWZX0su/NddMrKhAaL2+9/YcunWgRVe3amt8gW6lZ5pDf4FEIIiCAF5YgXGr5ys1ipptt8CW/gvXOu9ialREkID60uyQNuI9udbxZ30kbSnDb4FSU+KjBiMowgTt5anVDdAplGCS//pOzovJB1nkb3ytBjYkw89KamVO5Na/2G4IQh5GqJ01ubHfMzNR2i5fsoNZ46W7KmqSCIX5WBnvwayFxsThC0PtxxisscPiXHvnuCk70Q9qA8ys9Hdw6d8xZw7H3p7kXjp/Un+q3pfT7JGsczrVOlpBo4QuZETnoQlBY/3aM7Ph6HPv8Ix0k+mCBafgz4Mix1I5xWFIMymMdPQ8cosz9PWA9wHjA7qZhxqyOt8FXCv9T0nAsJRpXmZUAxNSIiNFAQawoBITgqOxsvG2OrcdMqVBbNMQDsyTabByIqgQihojBkySP59BqTca3jZU3dQyu22u3Hx+7/UsV3f5JjYZXZJB8Iy6Z19HZv4N2OnekUf/KQgftjwT8hZY7A3JWQRs6j/hjciJACjWEvHe13lzkUwLJkEB9MhSZfFMYTKk/n0Gopy72pwRTZxTdKvTdu+ffPvkFLJLWvvl82J7e/ahUcL5KLNIQFSZBhcp2yDHmRJ4IOVL7h2BChKcPHx55YW7V1+/WhVlRVc0jRYdFhpqreubgAsAjUwlp9+Kl6boaEqgoIEfG++FtXddcqWgHADQUGc8FHiKsigMG/uzx51f3f/viXL1SzXbCX312FQBvnZ8jTK7v3/3zsyd/z6/+8emnt+9dmLbw1RwS/sG9CyY+rMXx5WcP37/7Ub5ev/rTJ/9prcabaF8/fATgjw8efPl4i+of3LunzPUM/fH+x7I5LFe0FC3JbbJtaSKdCMRt3V8CQGuObzRZH+GuToGP4f4lChWhZXGiUwQ6Ora6NENokcpp3uCNCkXTP2G6nS/NdYj3w3HFZ0uURkDXKbxyolXZsCdp0jUmpSQJbEyKYdahjX6LIuKnAxQQDnbxR67OLB2iShyK7azIJQAoCOj2iwO91/hBSszWvvjLXw5eLU2Hxo5QgoY1Mh4qi0RoR/PeOp0/PcqwTtSlFVeGg8erBSlmRbaNeZMTzKEYUHX4xW3adNGVfG56sPBR2+MwW+jMnxxMYeBM2OjYYV9DGiPdoFSWnuO5IXfSOsd53UxusyT7hryIhSYSXUEMyAE/MKVQyfSat68MZf3S05m2/2vQk8HfKF5xrGc1W6RqUd3nBQV3Szi4k8oQRXyVVEL+JiniFdZr72R4gE1fjtKZnOLY3Hm+lFSL6nTIlF+vja0Cv6Ihwto7hwQ2tp9AoC7uk1pqOokSQ7VrJ4CJG+847JtWHLY1vnyx5xtnb731ltJ6uf/u8vLyxo2b779/q7UGgJi/e/6dCJZlWfslgHXtrTVi9H4ZB46lq3aAXSQwLyGPGBcA6L1jkLREsbIOOChghRNVuS0EFQrVf7NfJ5q5RR6grsuwA03jtOOA127v3F009sujLWEark9xBokOUCjXMK4SMG5tjhDkV2p5decTdYJl6L0su3Xd997NKRLA2oV3u0ZQsKqKdLPNW1l2JyxMS2uX0hWkIrxwP5jdFR6qweicgpvxOO3wWoA0XtnWF7Tf8j+lz3EV8VKY1WuXN9gGs86BXgEl48GZJCcPgMuo0+KPlHskUpG6qllwieNTOa46O1CL2kZgXgyq/Zg4cGhULHN9TVzNfiKr1EYcq37IiTDjk4oQ1X56Kh/GiRaDhCnQOyGv9RXyi8tcfjusIYUl+lk3kitK7ZPY7ruSqhGgTFARDQnFKMrEeNTTLplcrEbA1S97fB+hNlMaMsyP24vr9ubar8oomYVlXrGqzJuK1K4FbX66dm2I/N20096RJsR0AUJHSgOCEgiCzswgMnEzJH8iQKJqb6qVReQLR4+gnqpQFVCtFJqFw0hBi3oJ41A4yai32veqg+EmPSbiFi7uQ9kjArTLuLsM/wWAVPGHP999/jMWtznVvv3c/enevXsRywQAFoAB41JMzA2qXaRrN6BaTqjGi6oSs6pYRTcLkrX9Md2Dyc0eRPT1o5ebIE+1p48GuG599JHNwQsyElZZrT5jRQGOBkBIsLB5UNre37p371VMoi9rJkA0aoQVAISUmbUrJMBApF2XpYnonfv3v/zsZKmZwybSWyNsHBhf1u7cv6/sOMlERLsP7l189XD4Tn798IhH6of37otAyIq76p8+/viLv/0tv3325Ijl9MMHDxRkEAbhTx9/8sXfpux+//7p1r3xzsU9MNFQh4Dq6NraUHKKB+uHDx7AUjoAqtqWpbXFQyRJRcTiCBVYlp09o0qqUOmioqJMbKqaiFmNTWmwvGSNocrsAWJ5Zl1XB8IaTGGL0BBSKR27zLYEgND8pprDiyGUGrcgQFTC2DfUAFsNQ0HES0S7hw0o77nTXdHCpIHh8SIQ9+tJCXZykfJp5uIoZNlqu7L9AJSH+uyyrMxGfHVjDghoxD63Yu6SdOfIWQMSEcgAZL8nIm7s1eHhtgpmHgbfaic72TSVqpTdq96YBpmhkgY95nQEiBsuIt7oGZPfEREPGd0FfArlPDCZlNLAQSBwMS2GGyqBSEm1i0BV1PpInFFVM8oUxc9hkc5tyfXiMSLVNOaRCrk6QwRIa6ZsiAxGZVWau+F3aDYULFjNs8mUe0XYyH2o5ICI4+A5xMQe3SgtBJCYV0U5YjGPmICvHerWcksOQDSpyGYBl84RRxfQyWvBLiBS1hF/ln5w04zKVnJLc3qAV5WoQ0Wh3EhjC1VTMyclCAgcQ6syjGNaJmQXUezcEysAzhMX/CrMuLSQZfywTavyRJxMO3Ua2wUiCoLmZiZ7lshCkc1MN+nYYUBoBK/nblVezUONk0e6gKTaRRXaqASwpHosqsbyoN1Ynoc8G8rTjV3rq3haYqLI2akAusrurAn22j3ZyNtvvd2ln7198y10AGe7s0Ztf/kVcSeRLlh4EVov9y/ef//W9999B0DRiRcsXffCzJ1WEFo7U8vjYhXYlsUFOFVXs5mZqMs6dsFAo+mVrG5TxjBF2RGXHhb5cPTLRxuf9XUFgRiiojYOYIJgmJHNduAUo/eOIfFWGx8UysQKY3zCbMzRwAsRUdHuDIiNjFtmm8TfsDC45CsQuzeNHNUgVWaOLDM5+rikQs5ZoW7iAxGJ9KXtylFihQpBpAvW3VssUO2isupyBmDZ3bhxY7dblmfPvmnMvffL/X63u9G7QJVvvgWg3Vj2cqmiN2/uCNQhshoNpLCBOBVVqEIbBzFycqcx97xJgEvutrWKdDCP4z/YwfGw+aDmWmhVNcbXxJcFXhV0wwho/7A5604P5UtOQ8ZpnUWIOimbxriNIKf8leFvLWtULIcVFEPMUJO7NvZUVS0JVTXkH6f8fTidx/BuFSOi5iYuFZVu16cAxAh0CgjBnFW1Nc712tmUHnSZ3M95gD023CgiI7vSFDMcc8hCPBC0E8gc70VpjEUcSBzOp4byWPYk8SGQVHtlzEx9MnP67wyCSGdio9ruR2zCq680RE9zEsge2KUpd8BXp9/kecXsIUlGTSkOjbDzXLhanh6HllEFwPaJB7/3U+J/zJTquv3i7Y3U3X6zFslT7WewGx41hv5I18hrW+TvqZ00R5Jf07uIUMWxYMQW/3LIkk9SxKJWuPANxeGdH+JGn2wQvwFygY6CL6tCIa7ep04ZMtBwz7BBjej7w2VChPrkz1xu++r27ZOtfer9j85dOghliQCilkYuZRepmRgKFXfdCu8DEPGzRz/A9Pbq7ennwzR26+Kc1JOstV2T3gHm1gD0dRUJ6cHEh5jZoaD22k1D5RT/zUVXDnMREwtJl24Whw8uLojoy5dZQj988KCvK7WFZkdCALfP7379+Djm3L44jySPfr3ZVczCcuf+vS9POFT+8ZMH2oF0GxHTFfvGiLl96+OPAVr76oY5pi79wwcf/+PTv5165db5+SpCEvpyNFU1m2b9aPMukds90/eAiFFiRsxmIvE7MGIS61DFrrGRuO0zFNQYniZTbLnLcMU4cqzJkNHdWMYRNx6OtJtWNCCtg3tr3GbfGRcgueYSyt+O0Md5nknJQifYfBWK2ms0qv+oEz/7fYrgNnNbLAYpDuu2r1f0QggtabxI+QXKNMZHQ5k9uot1i4sYP5k8jzSKGCjvI21/h08CIGJR4TrfI90XjaewmyP7e6hHDSd/mxVZaHEokwMV6LA7m4aGihaxvccIaACljHzkMTqY89gCjbOZAEbYIjGQwFUhHvYH58SYN2wcoI09YIbMxhtm3oFYqxlitQBlswYas3Akj1vTGT6po0aH8/fiAch2i2a1vwSAdmmtVX02dcR6Q5oj0HgwrWkuCBWL8gyM2MB6hEgJxGBhKTJPAVQBn3dc6QhZkTRPU4a4SInFQrjxfr9aJLiItIVF+3vvvtfXPYB1v373r+e9d2KYxZNY/3DjHf0W/3r+PHaorfv95Xf91p12tlvS1U6DHQNotIyJcXEPorgVyk1J23r8T5G2wjBi+JGhsqXR35SSZZiGXIgs22JzJILFleeO6PyPw3UkBRg7R2F3QxnTl142KCQ4jepEgDKp5zcgIib2eyZCQsbzlKS5KLEqgMuNp5PjBlm15KG9y9r7fr8n5rWvABrxuu4bk10HElFbGhGxovk1CbqI+4uqBtjDlFjP0kQzELI1jjQNhN7yRWz7O87Dty+d+nrsOdFE9nS8PeK/XpGbvXKL7R7r2IoOb3y82vt2O+KpvDMxgTJYYlArPh7uu7mpDjgmfzryTjJ5N5frBsZHoVH56abX46O8QiuHLz86sNrRJA8RsYgQE4E1rsPNH5/tgqCg0uhJYq1Jk+Ls14GOOVMr3Lo+T3vz67hT5unPoHhFVL1uv5b2q3KTRJj/Nra/aot8s0VsNmbQjUXyx9girw2Rv79G/+3/+f8e/eKd/+luCMlAsNWkf2ZIonHFg3BVAYoYcUgYJ1cHdU3SxcnCi9LvJQTxIcvZfeCQ51LHKLKGiUKaFq5xa0cIJwhNr5JwEUo+8Yvkkfy9tlvn52aram0B0PuqirRhqQc+uCeWKr5+A96RluCSCNTNd4+UW6MSBUSAWJyLKvnNM5tmR0RiORmZFCrSibiFR1IXCYcbzULbd+7f88RXAFS/fvQEVrXG46TUnfLyZthkP7tIV2XQ1xE9feviHEDjBmjjBVDz1EDguYrn3lLoVw8fws2droaZ40ZfVxoHCNwWFWUCtfbFX/8C4M69+1360ha4Z2KogHEGUa5vU7C32VNcw4eW7ueRgCxbFPaLcBZQ96AU8YhwVQmdkuFOu2NvfDmmu0Yah8OLX/MkCtDk0bcpc/wyvVLMUGlUcUOA9D6Gzn5FNWgO1c+d6GnbLQh/qaifCACNOZRXTepV7Abzb9mYNPxmkBA0GPYRDlpBke4Rs7YQivUB3IZqFFqI+tbEvX0+AEcEGt1up36qrPafPvnEJxrYMqh6vH54D2VNVHmYwPxRP0FU1ze1beHdSZwPPXpwBNOT8tscqvonqJ+4yl0yXj8sB5s3ix1otgtMDjJ+/gEQk1fONGeo6JBoe0+XI8lWWRtJ9dxxrFhYKMbBnLmjKkWnMk7Vs1z0WFJSZjJ7gubxtDR06QxUdyPgbNMLJyl2WjJtJ1WLf4J1aM6BQgSsIhYEkMwjzrLaLafCzUNIcqRlezB20KepArvJ8z8dd2l4DDnPyj5MNU17U4ZY15zbY2mxvbmeGWVihYNoJDVD7+m77a9GLYYUmwYqem+Zz26sMvyVyPz4iJF+iDELRmNa171CABHt3Kj39b/7H/77L774/wB8/9133379rYh2WUHogv3l95cv1mXZCfYqQXNIqN98652bN85uEpoqN95Jl8ZLjBi0bjLdqPJAiQMTfyBMWaVtnJnyZit5kGJq5ntsZ41AJUt1GaCct2QKAw/8N1jEfDCwEB0NwRq5SKMUGQhsjbLBtMGx1A9DLSpvsS+993irkksvyBuR++MMKpQYzK2cKFEImXfk+uKD/+r2//gf/+P//X/952++eiqkAJh3y45vnt149uwbZm6NVytCqFjOlrOzGwC+/eaf+8tOxG+99XbjhWnpqyfFi58BIkXx1zPwDG6EkObtrWKk3TDp/HywsCNtMOPD7zSfmAE/HfZjRG8WIxxNnSBUe9z2XR0PF8w5fDizwp54ggZ5Gw+pVn3oRKucCUSEkWewktOchft8qAcwhZw2pCPfYjWxJp3H/aukeePEDdmKpg/rYU0mAnciz6kFZ+eBM4QwIocUcHL1x6C6/SouuWJILvRTi+k8nCBtHVIYnEb0tCDyn5R09+LpkpP+UCHXtVUIc8KocBb7wF0B7GMdBB/MSzFNIu4FJjHtq7/+LyeBdd1+ofYjPSXxo42Sh2bHjVPkT1RQ+6Wulz903Gtb5O+ynQ7W3tzwokrNWuRgKjT0gBMftEIyI1yDRhdlnBTwKaIJygU31OPGgGIH1akb86a0zsudtZYHQmEJhS1kEKu1fW2UfCNtyjV599ykdzexGcfn5NsEejPpI61lrI8rlEWtAJFCW1vCag03BYDHAAAgAElEQVQAKh5v0kUas8tXqqlzZ95xKzqZA0nk4gfA3G6d32Vibm1dV/n/2XuzXsuOY03si8i1z6niPKuv7Uc/+NmAH/yPfEWyRt5/4KfuljhIt/0vDBjwH/FDw7AB24ABo/uSIkVeiqyqs1dG+CGGjFx7qCpVSaTEnRLrnLP3WjlnDF/GIL21JQEmLk43ljqWlx0B79y9S+78ZLZpxb9zwPx+VtK1+e07d0KZcaVOiaR3Zg5hlLixhlOP9v7e/fuWM5iJo8OcWkLa3rrq1TbXsHViKYGAnFTTJSkOmo7HQzkq4Q19HkKqrTqHekDGSfPc3ipjo8cjqz1TDK6FVxwDfTbLiCAPoXkB5LTLCaDb7qSAqN6hcMGjmZ4e6ewAG+CqCx08PCT3g3d1+muDCZVRUIA8eSOvw3hEU/4v4CwBU4YZJQCbIACb7mjGoPdqh2a0tbbfdM9f0vohlZ8pe0fnEe3QkXmJtUbacUw655H2DXQg8lh+GEEot6YgseIEIPG+ofzGDz2y1LlJyDWmSeE8qoRTvld7O89oqCbzy4cdOIIXHrZkvwf3LHoPuSPeiKECt/Uk940PxzUafLlMz0F/aPNzMwV1SJOooHGlWfXu4b1m/VbJaNel8qzTftEQaeKY5y9EFgAM6bI9ZkZGpZQnHfUhjT/jxBFl0oPodRw4nRI3h3SSFWWHUF3GRzvjjORVkgPlG9mMAkxTVfMz1mygtaairTXbzo25y0pE+/2T3W4HYN3vu3Tm1qgx0/r4RoVB0tdVIK+8+gqAt95+/eoW/Z//27+89sargGVOg5Knl7W7ve6ZYa2bjAhhSRaFuoInsV21olOx1la1+1SGFEeoNt8RTSNnfAiN86ZKGFjHE6UMG9YULXxbud8ya8DpM7E7PP22IUiyQeioUyOD02AIY0U1fzjvGXKv4emJQ6kRXQIxtbYsy+7HH39UkKi2xRMPWk2tNb+dIk8sZUnNEaLOsjQV8TRNAadX3hTUYWMRuaHYKdNMx2Ra6fhcw/vnfNHpx+jPoL6FPg554YDjz1dTz1eSkA8SyaUxqiLDfMRLf/M4nsMcz/Qhel/fPqjIxzd8exTqBFoBaFclDSxx9J/mmdExnHqU8tnxu0Yk7HiekJxuS+uR3NVv+gaPzodOrs4ZCW4Qdv/FrpBIk7Vhw29VVYmaqKiIX12oinYHpml+fNNSSAtBsOjYpsqtQAXcHZKSjhw6/iyVKS/Hx/5N/dUW6s/Zw5fyVygvxXcbLwBKVlvFQ4jwL4RF4mmu4s/V7gWI/Dsup521/adOfyEIf7mzHZ8YvzgqyI16k8sUafEIZQ9ZSiEqDB7idiRrBkD+sW5dsCJemLGCULtVHY+ovYnRVdsnEIDXP7yTYRwv5aWUb0uyoNf/8ddMjYiYIq8jVFXfuvNRDUn555Vhsuc2CiaMil/IEjFzoyZdNKwjOYUksjhivmfc5KdKypY1vHwk0jFgvLGViMgUMRXl1jjEQIvaRUSt7WyTmzsbIeLymGv2uprdBHt+adMtiiWhX3HHfbMrJABR861NzGx2oLMITkMbJyJqfiQpnEkOPGvqu4ASEysTUYQdkjg3tQQooKmLAwM79fnWgwTnrpcWsfWonhDmmSb50lD8jgmuod4Vqdr/SXShDO+wMS3/YUj9UnNwETyVufdCo4FJJB0kdWgs2Hx72APemF0U4bl+9oyiqDoIUpQwzckxEDKzrVr8TOUzVP1UKyn3T31OO8JpTlKGPzOGje3JJubS4ZvVTU59iWJrzxp/eSdxC5+dungUCDpCm0EqDiem59yKOKYwlNCCY8yLWcC4mkHLtau4exgKvPNkHzlvfcSO93WcgM335XgOgztAaeQui1Mdyl/pb2xRT3M9hh7rVvZ2bpYwoaEjPdITBGE8oEX7jKeJFBtCAySYvDmLoR8SCF2609/w5zUwT0SoGMnGlkCQNwKgpFxCaSrU8qoh4356J2vf6tB8IRuPzBMUryMQUsSc5zCn2UniX3XcJKwakZtjBSQO+wAmCF3E4MgfRaRLX2Xt+2W3EPOya8vutgiprt9/9ycA/+V/9Q///X/33/4///F/kb5S2ykg655BjRcmeGYzC4gZW8l7orHy1t/stoKBEU1j3g15zVi48JgA5kbECpnhzDq/+V5UOBZw2jCU3yc8WCa2ygj5dXSOalMomEaxEBvXp5hKeaB8GaNV33xkCefzct5nwpDrRtx4efTokawROgW+wNJ7Y7Y4onZmGGR3pTY+u8oXkea2XOnJPvlBxd4ve2zLkVyCj++2dKY+nYT2sATcHl9rrTUfiJ6UHm1QyM0s11uTY4zhTDFYNzHPiDRVOvznAZ3PUE7y/Njqo+XynA7m7JuEMOUco2n8m0ZyR2PTeGzUYEQepcEvtJK3VRKLY9zuZZSxDTesYyCQg+YayYxx5eVs+jQU2lAmNo+AHX/JgaaES0HUynQVKDhUC0qEFBi3Rxg71nfQMLccY7JJHZTpL7XRLuVllBf33caLBZQ8hQz+5bDIl9XuiwORuGCRP+9yJrM2KOI6Fr26XC1tHx9lcKtDykgpOIfoMESUUUFhoApVJSFNX9uUO5Q8AYdzEBoIJxVwIkTH2RECWz6lNHyyvP0LIvmXK9//T/+h/vnu3XtTNocXK3/84ou379wBAtJqLCKQEA8MRCA2WNCEa3H/FSIi5tQy/LJzbCSD4Ii+++fRf/N2iaQuQtQAki5E4NZUpC1LFU2IhjIi0hOtA8AW9xpCRCoZWgsgYtvvHb1IR2EpGd4xItyaipJlQQWJiJkEqiq3JuvKTKrUe19agynV6AoN+z3yEz/keb8ayGtYGiGXHBwpXjmpDOuBTdL0YBhRFu0TcKmwgAFuRoKh0iMOueHLxTbhqDmgPelfdJG8Z6bQ3Sx+5HG7hXg2u16bIZCkCpnGj0HfCrkaymfqUUUuHXRSQ409Av15OF2d3zh46uniqGvTuU6YETeFuayG2B3iNYp+eD6q168++SQ11U1nFCeHMLxyj+Oe8TWNOBxjigvOVTZI0S0BGO492ot1K13wd7yXrmmoRlqicJAcVadJTB3P1P2taQnmP2noD2XYcSTGoRtNDhLp6J7bZEDgpGyqZ2MXNNWUtdgjW2RlJLwuj8ZZiM44OIboWsyejt65yitKYY7CPu5YOErqkROUR0zCHLB2fsBrmm/kxKg7DoZeHBXTqGVQi+EUsZ2npPrDs1cscQhynvs4rAVMLKMxqcRnQo1MxPLlQkQQgoPwMgPWIBdhor8B3w18baOgwp+fBjWuigP+5ri0C8IhKuy5XBRs2Zygoo8e/fjaK68A4LZwa/v9uiw7AknvT24eXy3XfZWrW7tbtwHg//iP/9d/+v+++u5P3/2b/+J9Ilah3a3r1pYnT56sfY3sELnHZ1vERANLt6nuw7KOfr7VYjWQ6JHLAbWdrEarEzCuBkioD4NQrP5p860jCOSnQctDg3P5o6qqAmG0PLNDvgmRIraGBsfEAagQh85+j23mtud2xu2izwwYp4vEpHIiqvub/X5dl7b0vgcAZlXpApDJIaIA00LMFiEVwNiSGLS3IKo5TxTNFTqYmzP6jqgrZ46ijymcT3x+Dvw0dvZ2g2C8p1mLhvEDKQkVpWO0MPp68NHEoY+XvDSNvlK8N28MQ5NyqMcK1QQ447OnMfO5O6UCOvgquwkPRmTZ4WicPaaBSBa2NVZqVJEE0ynXtp/GKTUp2/g8psvFtpCJUngKKK86a89DOFLOmJSeAIIPuzZVJ9oZbBGkbbDMLbNojV1c688r/tiJRGru2FP3y6ZWEaM25TI27hc3hCK4DKKupCjZ4AmSdik/u/LTprj5SyOPf4l2L0aRv4RyxjqSiNxKC0FmEZzDuSsVopgRo1QPeGGpNuwLsqbTxTw7LGsIJZnXvJ/dqm/egP0I/9qiLLoCM8TVxB+3LKlwsNc/vIOS7fpS/kLl689fcoIdA+AmRZeIItaSqMBd0sYNaOYGFBXmViTKAD7McFK3HuVExObcDbjzW2Tl80TTRKoq4tvPt7SlpSaz2DUDRk+okjqAqbNMTMyu+TKor/DdGzgJu5mhhpe3Ww7awFrAix64jYEhRqcSNNsjVRvGIXe7KheGRuoh72zGmEAiyi0F1rwq9kq8Q5LVWUg71UT1oONJhff0QE1AqENDG0+FP/XPY8RliG6FYk2qQyFLRY0fypFJmIlAcLQ42leFp7KJmfRN4iiIudR5fd732KEHcr1ptVlBIWj5QBK0M/K6jhbjk6rfHd6qq4rlTwZFXiwU+8nTUSPHNABlcYr9WyqYCR/l4KLyU4PwRMwUkzBWHiS2v0GpRW6U10ALjJsVnjbaDcuqNFRwD9Ix9RTe/rmviwFDNDc1P3Wl9md+u9r4b7ff3MetgubILHJY86RpOUCTbl9qHPqSncrQEWOOKU9kGSLRUCKpNJ36ZgR23thlAxMBTRKR+mdMWfSKxg/7TYOUbeALqr/ECUUwemKgyzQTVGjL9JtX6QanxAqIdtHeRbmxQ5REkI74FXATlo1tIjKXmm8midnOtD8oXfY/tjzGOdMgNT7FESsQYQFqWzzqHMvrM+NLN8SpoEv+lNHzNAAEeSiS77//3hr97rvvpIt0EcG63yvxK7dfWVeBypPHq2V2vn371R++ffzuW+88evzj7VuvMdO63ogKMwHc+wqAaVfPjKZH9TCkcoIZ5kJ5bacT5Z4gvHoax/kv4/fWyl37mF2UU1zewXb/Oid2A9cx3wVEcH981QTmcxkUmzZKz/2uTCNQRwRcLKNwA3yop8BJPk0qXeu9TN4eEtQ8JNYuRNxak77mpDFxJ3tAAerSGwGRdq+1RrS6wTw3FTDxJu1bva7d4q3lgQSVI/5lmDDqmFNUOlyYWlmXagl2YpniXMXaqBuJDqVgy3CO3JzCOfRJfuRNHdLpCq/HuQ5wLi9yzlZ7sEdOlEKTx10DHfvWag19R8eWsl3nC+jB04fNYALPB7dih9dkh31O+U4neSMW0LHoshxp6B/VBo89cUdZhnZkwrzJ8cWzGjuoqkLYDrmPxNidigfHdYLknMgeUd/TdnCdn8q2SR07V1QIDFjUIyoePJSiWlzIV3W3MgdnLATy+i7lb6H83FLc/JzLBYv8hRR++iOXArz10Z2fuguX8jMt79y97I1LuZRLuZRLuZRLuZRLuZRLuZRLeUr5r//n//0FkbLd9a2X4sX88ywvProXn+FL+auVc87a40YqfaqK2YGMy7dxD/i0uye/AdV4+vhVzrgV87Yp7qYAuwgCynURxWV+XFqV67bqSkburlH9eWjEWVa2XGtHjfuJ3vzoDoDvfn8xk/zbKN9+8bvXP/x1CysSwGwYGxE6wMxM1Hs/vDAlIoudBIAIzJTX+cy5+0apOW0AMDdV7X3d7a6Yee1KhHVdmZlb80iLqtoziaES07rvzKxQsxzZLVddOoPTwZw87LyKehad1hZis5WwnKEQ7czNUoG7E7eitdZ7X9qipNKFI+Mzc/NkzYSlLQoxq087VlK8ZHz2VEGUEQ2YSC1hTuTVISImFpaShWcqZoDgJh+URAVp7QSkpVcxnvAjrMWqbvj41Oqrn1yQmekRAkCcIQGNbKjdVmu5mjkkZEmRVMNhLTN3cxhqqBvkKhTqWYCOlWEKFqMusQ+DNE1R9uaf01gPzXlONpvt6PTObAYLtNbKNwcuQ09r5v2HD9P6g0rMrDGKTAuTdvQUpl2AEJqe5iUa6ZxoW6k3qmaloIfTMuxnhqXmqINmXzmvIdhbWrEx+Y50UyYdWwPT/pzsQM6VMPZEmEXkQKvVyKF1yGS1M+2GycQ2bINwUMF4dfJXzxnSTG5e14LyDXdQCIIWLpx+NHSY0uYG0DK9xQBm7Obhazasm9Pg+cRUujlsnDukbZJHnggzHwYTCeXmKz2x350Wusli9bvsKkRmAOp5g8MMWpPMuBnWgY1V2Rq5WeP857DdQmzM9DTfAAhsfCjqH/bHZXVyTOYfPk5XsSTTCHEXlqFmCKQKdw1Qpa6yu1pUOzG48breqOqy7HZtB+Dx48f7/bq/2V/fumZuvCxPHu+Xq4Ymu3btJnU73G6vSF+ZF1VR4tYWprauvTXe7W4DuNnv07xdI3odzXmfImIEYTjg62zI5rFXbQr9ZBuhZzMwhPQDguJtVEI8HlFEkpdjgq2apbCqspKHC4kFlmmJEdNLjVhJStp7pyLp9gwFLFgKxtLoRA4wlxCKSY3zh4SDCAQDX3VLXKeq0riJ6NVydbOuC+8A7LsQUTPf7bDi7Gv4XjQAaMwcdtDMJGIG4n1LUMKGa8P463GjEhSEEOwXmSwkbNfnDT6WDOe4nU7LO5jd4An5TPyebjHZwXpS4svzLlzbPgyfk8FqsuQOm6KLPHP1x8opRnn8q5x8M8WjEdVCATOuo3bkhdyx9rsPzamaHGzO8boqRa6sUZXTtNP2jrXCQqjPluFdkCQ/mhvHoVAO3b6eZBgQVYt6qaqRIwuiwsQSsoETJExh2Qvh8nN+OqxAsCoTeY5tbM2I9uVfBGXIk2INDu73lIm6lJ9d+clT3Pw8y4sDkS+rJ5fy1ymnM2sraMh3AZyUf1hdY0odQoz5mGufl+AP9pJLNKmDpPRlXkzecsr19jc31i69azhIhYcMRESJGRFQn4PHi4onXoiOO6E2bDNbElGj8iYKGVIzwguVMQfk9OaHH393ybj9N1Iae4JDFhWRLitYlt1uWXb7db+X3gAQcXPVRLqAAGaxaKWqIOK2AFh1D6DLKiLX17ek93fv3WNmFVkBVVXRLh3A7uqaVYHFw3gLQG1ZmkhPTKB3IYBb62snZhG1EJPcGhMDEJGr3XVEnYcqukjKccuys2ekd1Gxj5nb1dXVzc0TZiYGCQEQaO+GUbqvuAlVrvlaum3V/fqEua3rfll29gsRKUS6KTnMjZe2iHRRaW3X++rSfEBZFk8hwCJXg0DERCCOAHFs0eMiyKau0s3lvO0W1/BFzHuw+Tm0EJyT8zoxmTKvgEpXKDODCJ6mFRygCeB4w1DUPWkPRiBPDSVIAJrNxUN3ULaWwUGyIoOEeqr0cAujwMIqSB3rTu7zVHXp9PQkgjsCh0e4txCOgEgfTweEXA6l2s70SXELSwjC/+5rN4dHAsBUM6RLF42oYcScgUeDFwDAl7/59zhRGOOxBESGpp/AHiCNE9YJN2xTbyhZjz3KG89ocs3dNHxjHj3urNQclihmG+BIpzNc1Eo3fK7UYQIQUQuMAzEP47FteDpXPIKBRTiIAQ+oaGtl9oHh06gQlcg3UyfY7s7IHyYoINZGQOF2eSaihlWpqlEVAL4lMcFjing/BoXYsUwshrgRqQgRMZGoMo8OS8yzQSxMU1ptWIhHQiE1YGbNeC8Aml+uMDgc3uDs/kAV84ysADEroBEkVy3LCsFjGlpMDCIwDfCcWRORE0EgNFRiM+ahSlSQMLK7RGEGoLQ0XvsqIgRitsAaAECW2kNtcUhCtWduyuwxPVQsoB+DmImpISjXmDw7aXY9FmqssquztjlFpTHbbDKTmPtts+E4lVaVxsuqXY1ssmKcI43JBqDo0Jz5uALyLdEE2rr2W9etS9/vb5Zlefyv6/XV1dtvvwHg9dde+8OP3yzL7mp3zYvs1xsC71pjXPfVeC6WhaWj6TURM6P3dce3iIhIbm5WS4mDPPvqMURs/ZrtN8qB5zajjMPG3CqWxMT7dQ3ImByGyyQ/DIESgRogakkOJaN1TBhNnjenJHZGk7ASkWgnv9j2qtSZB1SUmCwkX0aF47aIrAl9kR9sb0YSYEVkJ4KTSaZgDGQO6zrdM9iei2NtM7ksS/Hd9ogqTKCrK+W2v1nXH5/s+/7HJ08Wo0giyqoNrbVbt69BtF9XEb9zbCYe7HTdd+I99ysVEPPN/rHD8pMDaZIWH6cPJDmWIjAchGDga6zB2muh8Z8BZzEzAdrW6BG+fUaxGxEt2CzgAUanNsqXsSFp+2WE0J0gTTjxcB7gNxNxwCBjbmKkakEPfIuWldSYpwD4NKEmqy31miOIVTXOiA56uvZDkCx+J48kY6IWZ7+Zl6w2J9Z+kXIPTwyYmDd4y2Ce9ojPZYhD2RONPpTNEjSu0OLR31KrGJ1AGZyqJX/Usl5D2cspGkBeEg0i3UxnykMWabXHRBUep11oLDTBRcJYgqZG7WHpMf2+p+Yxyj7Zz7Zrvvaez9FrsiCe5h3uezNA4PKP/0/HVc0WZL2Uv5XyslLc4O8ClLx4Z/8yy2nrSPV8nABQ7FGSUx4jfEXtHJ8VqQFQybzY5f5zw4N0RHXLyigVmBAAoSBm15yZ4dd0LpoDOvei9jLFIOeizmGOXixtqLzijV9/DOi//ocXzf58KX+F0iKjS+NGyrb3RFb1NDFuYmAPhzzCUG1tCQPDor1TY27ruk7p3U2lb80M4tb9frcsu93OdMvWFteNRAXKmfiFXBRUtQytRMSW7QEAEa/rym6B5CJkKkUWuanCCn6gFGw5w03tBSCCSQk3fXva5wRQa55snHJSyDG+gGMUFt1eVqzSu90mqOo3X7i98Hv37oPGsQu5LwLIWfHXEJIfMZG4SuLvRXB/5Itx9qduu5ibsEpRJLT8h6zLR6upBqvqV5/+1j7+1Sf/JL0fivuoM3byOt9pxFBpyiRb/zRlUwVHktyiUIXylkL20Gx0o4sohmTqJOwpRcs/YxeZtutY1yYXKGfHVCAj3cJzyrvZQx+V70GdwUR/lCjE8cNoS/GPH9bE1OzbUPGn5zM7po02WE2p8HzPnz6t8wtJQ3x3DxNJgmPsWhcyMAQbgU9V2GNGNMU4DOT7tlwLFpPHwpm95gmAj31XVKJxHIYqWfivmwAfbvjkvWPVpuETTHluTrg0z/MQBGhbYSVI1XJvmtyNekwZUTUHVWENzVODgDWtIZEAffPW09VojWOQi6QIyilKMNBW4wwJgC69oelMeOKXjeKZwF8Ous6Gz0GYrivcPm+amdA6XXBhZlNbdUwZg4jBDs/IXMNEwMbw8wvHCnhRgaj2bolNGFBQ/+6771+9/QqAH77/obV282QvKk8e7xWsuu+9rTeyrn3ZLQCur69k7dQM+BMCK7Svq10djasaBRzXsC6xepaNWBajH3GLQtR0PgV+s3UQaXOz0ca8Byccav2RYttnEPXKB8s1STIfL9yMEJmlkgGS3YSG0h8NcTqzWMVxH7CNbSDyjTACHj5PifsUY8Aiva/rkyePiNCWZvjpsiytLQDt15UIXTpUl2XpXZibyRD7de19vearZbdYjdw4o6om7BhTEvOtw+j7YHLLb7Gw0wPVcNA2+8TNi/XnyytOBIZNd+nPhu8Nw4mxqElkYipmajq41QFFdau+svany1FuP0sHtebn3THnykFlVb062qlMEvDSlokSGQ4ioACpKJGKwZJhhQJJo5U6D3HSTpQg5Sm7WY5Kb1KAYr+f7PFwWR25TtPIIv1EN4Ls2/nXoHTTYAdvwthaBMO6oUFDMpS2ncmXueiX8lcuP3ne7Z+8XIDIX3I5DUdi4iMzkaOhKW34lA4imq9VyxrVxDFqwojBx4GZpuajeXMVSi5CO3X5zpqytsd9+ulil5ZD3NXpAnDqWpUz/Ps3PvwYwL9eLCV/xqV6OzE3JhIz3VEF0cILdU/bas8QtVhtk0JIwzM6HmBm7uvqWELvIL/pB9KtY0W8SyBqDSJhqDVgOXIAhlUFquR5YITC2m7tq1neITaodQkod81x3c8Oq5In4S6gk/vhAUSJgsX0RGFiMlgQKppe5Hm21TR5cSlQiclcwmdl13ESCqDU16C4Hbudw8BLyK4OZEKgQlhTBZSPSdWhvxmpSIJR77t1gs8UPIav8XWZAWZZ1zRQmEc0Go1PB2VxM7iystl8og++9uTNFg0uX4pObYjpkR8HE3Fa3C9a3fgs7CDMJiKUvUnjinxHZrWocwVnyweffFK6tplA8gZjuRiMgRo6ZEapnVadM60qUzdAUQgAYorldvibPKulPU8e6mOgEQcj2urOZ4ZsXan1AaF+WZfCaCpesPVN0wuvRSOFQEnP47cL8VZUMLLXDyjWZjKg2Bz9AavG6GfZpIM1Fx4enSyAj302kANr1qwyB4JcRup2HoBomCaqAnbpwuTpWOpbh+kiNHS0MY7CkeNYuYdaBdJTRy3ih3cMgNZMH+xLQOHCF7XkzMSxVojZE1msDCYSx8I6M5d1KwMQNcgyyRwckKzbJBeEglmNyxgGZd4afzTucWG1EZioF1Anx2nr0p2Sjw3vam/MmVImFNNDyHddO+xkoVNTIv32uz8CEJXr692y0JP9D2AsvAC0riIKbq0tDQC33bruicEihAbiMOylzAWlQQrCIL4apk8C3HZbU30ilfrpe53/nI+Bo+Q07xstYl7YReZSOf8lM1wK8NC+SfPNuNzRlDwU24ueaN+XIMCooGuURy7nwWsnm7xDoy7r4QH8RyB1G1AfsEh/cvMYQOPWVwux4usiHjrGpWGTB2xQu+X66vqGqUG1r2tri4pw2wFlAo0s2AR78uEZX5nFhDoXMcPlgfoebW0SN4f1pZWgbGlAPPrgp2UQ0LxYQ/wYyz0+KxXU26q5ciBI5JF1LVX45iyn4pj6dWxc89wea//ZSmVnz/Ha0y4xc9PSwSelkunQFw6ePgVhUEzhGyNhfu6Cd7LpoLUpUhwZpHNVSganIacU65phUT4PNjoc7Pq0hEYbYTJnIhoCMO7dEipnIolgHzzSl5Od2ZeKQl/KT1B+sb7bFyzyF17Ox46c5bVB51TJVecThHYwX9ecCulPyW6WUSZlYEiQcEEn5PlRTHwii6nkbteOzohIm6TbIxwOEaxugDxDBh4yn8lGzKm0TA+8+fEd7XKxlPx5lu9+989v3/kYQO+9tYXIltjklJCDadzwpxeze4WZFuBSg8X9wAUAACAASURBVG8WmC+eqqqIulpquq2FfTRP6v3+BlBiVhFXDDRuWD2sgVqkUmJqrVnfyCwpAFVhGRgkqpg7JZuMb8Ozb3ixpQzj0nsoduO7+Q8Tm7ooqfSV2Dx4d16Likg35DRtOeFYYhTXwrSe5WqVZRUV4kCgcERKkQrFPSY8XicTjChucOizkCRGAQyYNBRD0+KIKJZsPuRuZRMCP02zHRSrjHNSqSZxe1Kb7GdItKG3lo4e0KSM7HnEVII2P59ethp8bSi9qLRSPn+WQ0727jrqxQAY+Jd/f9JTe1rtWB1UfbnGzGJSzaigw9M/wNsqpOtQb2M1ZgVVRSQwBz/UnFiZxmPbdzc4wfG5OxxjfWq7UFQGEx8pTfthvG3GDePPuXUH/10N2SisubG9ljApS36GMYkDairt2dni2OqUTx1rC3X6a6zDMahUFdMTUyTwUgKgXSrWQqPm3DU0voAalOyxcxFq1uamRGNQea7gIGWtK4SFSVczhJdiSijvfqLiaoGLgjDnyY1g00O3DXQfUMnOlXXJ6yXMFqw5joQ1HGomzMowQTV0ziAoI5ylatduB3pEX9BxRcyxdlXjdS6j3rvehRsxkaxdFY0Awu6KAPnhhz8BuH3r9pObx8TEjFu3X9nf6HojxPz6q7fXdf3TD38C0JYdCH2/7gW3bl2rOz8SEXp3A1WB3+MhEXGbocno0NfN/E+5aWVssfnK35VIzshFpdPJOEYdB/R2omRlozsgIY6txhcjmIGTrwQyNtVSeBuTxqGtDGUMJvhhPRYDmgsD0pnhaL49kE07GSIiIvv93hBH8xphJiKSLgCt656ZAerorTVVvXlyA+Bqd8XEIqLSlcDNEHnYeHOyKil0LGYj4g8e73do0eEDUjvIFaZKY10oD8PLLH5vQkc4c70Ymns8USHMNGe8Shl6f7qrHSs83Wsd7VzWB9DZJ+uADnoz9/jllHpMtkyj0sbT5VnXUURizvyfsF60hNcUB8SEAI8SThHLKylLkOrBULLn6oQ9AqHWeQ8qrmNEsbrzcQgLmaQI82jz5ObjSAv7FA3zm8KJk/hRchAXaLxLWntxKX/D5ZdmJnkBIi8FZ+DIlJpKKWLTqRufU5xlqEDFHavoRVryCGzqSCv0kLWCx6jzF1f2Ji2xhOtwseVEz2ijHE2DYx3fUeoEKPHgAQXe+PVHAC6g5M+2mDjurDwkGDN2GbKvwiIiqYQYENaJ1dgQADeGqqEfw/I3wMZlYVXpsjKxinSR3bJzdCS2JAcQQGyJaJqqSBeOaHoi2riF6OLyKsXGS4PN+Ja0q5lPtLaQO3ZKDLeZZqsSdhvHj24GKVSFQsVC4SAlIpPb2KTAZuH8vvztb0YFFBkATiZxGbpVasLFsJHqzCjkqL5RVTcdNZrHn1QqQOGASeOxId0R6MtPR+d7t/BehxK86ZbzJ2fk56KpUBCegvPgYAFmzTiQnkNTiXItP8jdEX13eieGUxXv+GOAYTUKl/UiVGqoUtoP65FenS9lqZNij1U9yAHFtgKuusVs+Z/eHZf/YevrQUjnKdXDJdzOUQzaH0ddpWcaVf0reE1SiWmUZf9stlHYN076B20emIA2lEfVoLTRjuVZoVrRFgopPx0DTljM9bScu1Co8o1kshSTR8ChdeRg9ZM9iv1MODjaigH7GkwXJwNssYkok0BxG7qdlBy8TtjjsC2d+hq2XFAAywaGiGOjgMMxcZUi4rEjGy+iMtvuhQOdTjVRTp9yyXgyhAg3cZG0WAQsSZmP1ylzWuLbEooj+EG3Aw9TVQJP0lpQvUpCpjlLu7ACM7fGy9IEWJbl0aMf7TKNtclK+3V/dX1FxNxkt9s9evzjKrv92p882QN47XW9/ertP375nTbtvVt+DDNgygsZXprjX9INT7dbBRAnQSoKeB7amN5AOhwIH8mPfEWH1ZCi4j2TnDh+Tf+a+WmnHvNsBe2l8Vep0UYXKLzv5lgCGhRtML68vMjYDHHSaEDSSQxyJVUHV8hmomIK61rbFLvdstvtloUIIBELiyratVucu4SNBOJE30JX7wGRVXVp1wzixmzRPSdZYjqax9hjWn0a+aAQ6ysF20zlaRTrYD1eQtlip+VPrQ3mvQtimmGH0Fc+oNJNjyf7yEoGMfYoHZKoo6W0Pn04KPRZoWAe5LMWOunmfKa1PxsYOzxYc1OH0hHz4BEeM5TYr39MyA9K71jeZpXqX0TMbj8g0OQeBIujTS5qIq4eDodMh/UeeACQi+VaNkSVYryL9SYujhtHdoZB2M3JCM+x+Jfysy+/EDPJCxZ5KVbOWUemMJVxdGad+Mgrm3LqybhjRtrGbHSf8YG1Z2zFBVBNUS7jEBE3c6wNcl2Z54bvaH7qKhflX9Y3ytaHOGmatIT8lQrImBB649cfKdH3F/ftn1Ox+EfL4vmOBMjc1ubOwUBKKoRGESa1m1XjbABoga1apE0IiNGsO6iLAFjXlYia5de2XDBhOCkT3qBpj0dEaokqxGFE7cLL4dnUotdMxXvg1boDlr0xvk7V52i9ABkYqjlkWj3Jpus2RI4ZtcZFFwGADx48FKcPFD8RAmIcQNeuAl/KhuNCIbpqSB4rp8xYSESVI9XTfBKF/WneWWSLqcnqHBFvA6No9QqfKMYkRx5AkVvv2GFWXa7crYUcRwBfwzA7/jXTzclMK14FoOUjpOv4aeHzEM4tlyiVPMYgA/3xJYytHe0oFP/ym99sK43ywcNPpqkzxdv9nhy4N1k+zJ8GbfVfKC0JKztw7Z1y8XVSjxSqk7FZKtbelbYNPubnoaoOWtfljCocE0k5h5Q5KA7VJ++T3ww40pH9TGh1u05UXtdhGxcNO9lQUWXHXBTR/2MVzjYqWvaBTasoeVQHJaYx4/H4eNF/WJabsr/iIQKpyrx4Us4wxg2B6c+l//kNl15v5uQ8QrHBE7KG2PQq8wOV5Q+j5TKs0PW8rrEe1k9mWdfaiTiSusnml1iE7dUtMA0HaHhCl0LaGX30zkiaYooioptBlUwcSvqmDuuFFfQ5gCc3cGsMdBvt7mrZ7drjmxXg/c1NawvKLQIxrfsbJeLGV1eNG/ebdV1de3/19dvfffOvqiQqu7Yjbr2vTESLewpTa5aCiZRU1OzGpAs3Qtp85rgdRhOa90HYV/qkGQqrSlwopNC0qBRSXzkP+U9uGCdZ8YgTRrU7PaPElBvdf3g23jGrqkbVeGonbvoH+Yq2Bnm3s5whXfw545zHaM3YHFo/JGPKhkg25taMgZOnqfF3SFVaaxaOhZi0d27cFgLw2quvXt+6/tN3j0RlaTs3ELOBaRUBAn/dCty1g+nREOJDDA1lq0+vkfWfxrDP3gb++SVtoU/0ov5VWAZq8JWkxBnM1O8hcgWLqFPlCq0w57Fy2K9jk7x5YmY3Xs9TGNvZyk707VDOKF+pFs+G6avjH05i5WgeSmjcVNQPoPfKDxzTon4vqcjAzYUQOgEHUYbLmFuuvxAitivC+N0sAdhhzXDgmPt4eOHl/2wv7XLsqfzWmwVfW6eMGCJ7NhjcPgZlQnvE1/nLnI9L+UnK37eZ5AWIvJRazsaOrGXiHkE3T1D0WUizX4fsleYv8cxxRpcSctHKnat4ntvaelxYPoXdbnl+Wl8AQDquhY43dwcZFW/IHWa4mVIIFK9/+DGIvv/9F+e6cSl/rfL155+9c/eeCSJi1hdhJMjE4ik3AdNSAMTGcycLJaikaKMOajkIbsYdESyNQgUW84SyVDacuHqxQnKpCCAzKOydidEMwRTvRRE4tWw9mHlmSJv+lOF8xJ4MG+VUkeMLzK7hmNg2tOQYon1i0p4abkfRcyKKNDtdOylDNtZtFkyWmJtKR2gt1Xwi0EgiNwZx8VF1TD5cmESZpflEh+xfCcrQjUxYNANSIiImeJ5QVYl455sJwvsPHuhxkZpqDsyCKYy+0PaFMR05DEmzzaGLgDTtkfyDQb6GtKuksUPjR1QUXT5D8OpXSaXs5UoGaXrGY8D7nuaU5g2GOd1YNhorE6YDnuU4XOHc3jJjRyLMJoGwc5rupob6SY6UpnlUWQgKMNECiQABOkSto5IQ96fFq5L+M2pswy7y+LeHKGP5oJjY+AkZPrW+CezaQ6GAhBXrOLUO9JdJaJHYrZZ5PIVr54DtuMf24LiPqciYu0fSOHhs1iibjeTgM2uEtRyHHyALlYCwipqQiTJpBa2LtR6VRMcp0WaNqZ7Ws5DyJFLmxe+dSYzXb2jKUsYRzLnu0kEepSveUwBd1mAVAR1n6/V4DdYQ7+ZK+lLmVOi0b9RxbrtZClIURIIZ8HzoAATKRMxMzJkGh8jXLsId2rrG4EYfyyKoiHYzDSIWBd08uRHtClzdWgD0fiM3N9yg2rm1m3VP3BT4059+WFd59923Abz55ttvvvbGl9dfySMjBMRgc91nblgIwKrKcY6Tpg0EofRtLLn0EU/ZRsM+wq6+aYmCoORqJNnxbLXzHZLO/9grHMQk8Q0AKij8wNGeIhZrBlVQ46R+xXhgQVwxeWRDQTJjbHG6u07gx0Z+Ta7kThEI8jo2odp+Z2aoMrOq9A4AbWGIRgY7ZLrs3lfRvuwagNdff701evTDf96vT5gagVUUrawbEMxinOlTJXtWj/NMRRV17TyTUkaDPUVvn5FmnyxDnAiegc1o6k1ypaMUs+6klkp1tiOLrpMlCJFfaIe4+ZTuBSsORn2ylO09PaXR+ulXn1qmS+JTnVafQBMDjz514sPT00DEmgF1BxMETE4ixC140HPvA7uEC8UwLchYp17PhA5LCEB10BSiqrqsO0SdvMoyAuM0OTZEvd9yENE3OXvKVb8Qq9PgiPaQ2lwW9F+JmSBqJhIRGuIpm+hS/lbLi4OSPzczyRcHInHBIv/uymlnbaqxFA++tR967DtsCWLBIhH61aQ46FY+nPSnjRBQZTARywECEYGaOEWA5e8+COp7UJFDLlqUg9CcQicJQZUAD/uTYv0x8TDUuDc+vkOC7/75Akr+9EVEAFUyoXuolGBSy8hQt1toAuYW98cvxgq+c/eu+885WOeHRFTjBlgAtMbMzbFIbgBUUslM3YE0Lna/KU28/fHHFCLUdhhaz9EQgxRKykXKt/1JEQptIwkNA57am97XwAlhZoaikm6AzK7kWSX26dp7hSOt10zUiP/Tpx5b8IMHDzCfdiIa8nkAVJneGsB79x/QfLkd+sERKUtTj9dEVRTAH34zLJTffXDP1DoryzGKZ9hK7/1pUvoERMYvw9QlFKZZAfANp/Ut/2YoNuooXaKlw+UuLJvii42j6pmyJcPR86HwpVpzMHIHTXjgygl8PEfRpO9aWtXw/pexo0OZCJJLiSPpuPOhwIfz9PiIBkxQFEXUPRTgzLwIJ4X3szjj6WAEpd76V522or66aolhgoHiyGeGvyCPVTIWaVRnBhSmVxERWNHr90P1Ce2rwusHXc4thiBoNH0XE2xXMo7+z5iwNWhexI0ZRGZRDoCYT8oLobvG3/7pUWzDtPeDOUYKEUajomex7Sr+VytzEzbJOZsWCyDCXqRRI+IuPeiRAiMMYuk0BllCthib0ABcCao1mqwHYz7go6IRAU1FPTAqMSX/0oykwdI7AlYOnCrBkiPWZam6A1j7qtoJFnJk32W9eXKjKrvrq74CwKMnj5mX3uXmZr26vloarTdPepfGy5sfvHHr1i0A3377DRO//fa7X37/FYDee7cOiHRZGxusuU/wwCeKiCVDMtoZcH5rMzQS3camMhcDg/w0riR4EJ4YXl4dxFzNmFgpyUqJhud9zBOFgVvOYcY3ttE5WYpDbGBEMuXRSG4Q281FsgWK8aaiWvRmMroT3T5OzQxOXfd7ERGxDDa69g5gt7sFFmJwa/ubfWsNRCqdm+XrAwCRdbfbqXRRERWSntcM2WC9t8xLuOmJTVfrI5gW4tia0AB+TsoCL1xi3eYGNuwYhx+rRxGZzu2WPBGNE7itw+aDzqKLtZ/bbp/CFlW3g8jCYSG4KXWiT7Zva5cjPPbOYM1E22FPzT2fOOHReMrtYvH2EBMfEv+bTXfjqdrlukh1XYg8PHrKzF5bjpSSTyfbts8nWRcDPjwcujHSoxNj7Zmy4h9QsB2jM+VeUsluSZxr64k6L+XvoPzd+G5fjCIv5Wih/+Z//b+PfvH6r+/MqvXgtPaLHH0tHw40022yQt+wlBElyBUcmOGZboftuiLsc0IXVoV0IWZupHZjpWKcgzOa2MSlU6IHPPBTyEqUFjzJYpyhBNtxBdZbIWJTsbTnzZv0zm3J8FIpLouI3eS3pf3xi8/Pzdal/CXLW3c+NrbelqWvqzrU6JpaKBCZDQkmVn/7u5NQ8jsf31MoM3/9+af2ydt3PmJmCxXA3MgFDRKRBDTfvvMxNJzLQF+f3hLv3LljgMtudy0qX3/6aX717oO7X//2+Ivv3P84tDMGqMUAv/rtp0eff//+/VRivgxA8J07d5jbHz77NP/0p4mI8PVn3vS79+8q9JtPvyi1PaioYi0fPHhg6Kplridi6auofv35Z6dm4L1792G9Dxwru/T+g4df/XbyF37/wUOb1C9P+xEDeO/BfRHZcbOj/eVvR28/ePhJSI/+iSuQQw217gBDJoT/Ndt2IfGPgt2ZdaQ6IRRSz/Wh4hI5cwOqkeDow5DZj1zaGGIYkb/Gf9GR6Gylrfb76gAW1OHGHCVYSVSMThq5I4WoMLf//O/+7anp/eDhw6Of29IThmCeQQTE0Jwk6/Y8e2CkhBKs0z6ksLMQ1MUCAOmrK8QH5kHN0iKPqQs0s7WcESp9gKpntfLnAqwhIpA4dLVVNLTkQnH1PGGgRtlR+yesrVHcO1PVMa2qqIzkustALRCkKv40yMbvP6JL6SZNoLWvUdm25425KFrROiHRYpvTguq7MkYBpYiKhY0lohhrctZoUd3bNIY/bIO1azyXoDcNdcuCWGi8S2EDl5N2qPP5ntLArIauH9MdYQRMABHhhQ1qTUU3AD9SG6C4jXOYzkHNBtpXgv0jE2e4paWq3QzZn0RkAcgcBR2qYy7nGBQRGxrVpTMRQNI7sfvbsseIXDyQmQjSOFBERKgtSpAugJqfNQDpAgKLyWAWv8TSF6thVWvv11c70bXL3gIR7/c3682emnE27Pd7ombGQrvrRamrLoobKNYb8xgAc7va3WKmr/7fP95+4/brr72mavC6KlHDAqCPI2wykwPZux1TiY+sqiLqq5B2QHlzHJNoRn8hN/oeo4iQ6GgZSMh9BTZgAM0/Q14FpugmWp6MDV4ANzdTAomoixMgYoIjpXZYxag/LPsWt1r5IRg0yL8fc/WrSirHSjWWPhyzR50CiEJ2V+3NN18H5MdHP/a+X/seAKksyxVI9zd7A3nWvu6WKwCPHz9+7dXXAawk6ypPvn/EvNvdfoVATVi5zbhHOYFiKZ4gOm5kCSTSwVwvjY6MscA36iSAEbzOp2H8PAYmeYXHboy29CgQHet1vdyaB1Vvf+f+IjjOoIyOyeaNRLpj2I7QPNo0XZAQ4LGAYjjl12gpu5SDpwMKMoZ0CpU6cyWFeedn90JxG0/Nr5SJKJWo28iPl6rTmWgnJ2POKYzK8+nQ42e6zSAihpskw38RAdAy9ofqWPhjUYs0BLDj+wcAQLaHVQcXiylIa8YN3eDWtuCoEtTOyGo5NsVdsPxE9y7gzskLwzDfuyc0TPvJt5iNN/fbH/7t/3hqCJfyt15eEJTET4dIXoDISzlTzht6nKT+z1EORAWMCF2HaoTLieNhUNgvDEWImYlRbopJ0y7DKjjd8ZRmhzbneTAQZm+TlGViJDExN9cr4grKtJSKBJRWwMzW/ZubJ298+NGbH3742j/+D882ZZfyMsu3X/xOVLqIy4NFgXXTBmZi5mVpy8Jt+e73vzuDRQL45nefwTEmL0Qc2bpJVbuIiNJsXMnM5kn39RdfnMEiAXzzxRdDeSlb+e27H2+wyLfufjT+IIDIsiv0vtpwz8h2AA7PSgK1UWWo/aqJRdpzm3N9CosE8OVvf6tFR1KIEp3BImHgY+AoKFgkgA0WGaN4ChYJ4A+//ZSJ/aQepQ9niMZMqrT8f4vuUCI6GiqvQxRJYigIRpHUdbPWtS/HG8Lxj6YBpdpdeqx+V1PaVu+oqI6wdIESpnnQGWXgKZ0oFDPnIeCgpMaB/9i4MC/TgC10/iT/8DsGMxOIfUtVR9rWlW1oLBCcLxCTAiLdkmwQEXEjbkQkaYVB+TRN++MoTxuD9ofygwGBZk4SVzRS+chqUt9MTIZAZLm57J+yL6MvZl873ore1v/rdjYPY55ssZtUPINj8lAt/RkniAXq8TnL32IFchaP7+f8imJwtqNmOEJrEbFcY2TTgnGhGKMuWEPhCWM7xkgEBb+yB+r02vRpWsYFMBQdrj3czuRmjgmpkJaHcz3nIyKpnNZVmapVWIYczclB7nEKgFUBQEQ04AZ221gyfuW/LMutW7eWZVmWhSjMlKC9rzdPbkT2Tx7JzZN+fXuxDfHkxxso1nV97c3Xbl1fMVMo7EoqSqIk0gUAE7OBoSogbQvbuRP7v8Y4CUhf5ljAQTrGwA0miF0CAzjKxKmj2z722Tk+57tuECKMRRizHXBT7HIUMpA7iByyTxqjDrtE6b1bzuuQRLy01pJgptxLExKWw/Fujo2S/4/zTgATG9pr1S+L/X9hYiY2YFohRGAGM9rCva+9rwRqzG1Z7Fi1tjRu2dBh2Z7iZIIBrMxWoqBCiJx8Ix880sJZjnf2kUKPgsmNhXh6rT75Ez9JtUPL3xM5TTHgoAUK9WPoLfM3m1/rOF6KZna0HMz6C7B+ez/Ej8FW1G4lmDxYLhUXJdLT5XzHS3+Djua9RRJvpQhOfbhJdKrn5HiQC1zGNNWp8wrVrm+ICFPLk5pfKhQkJZQ5yv4oTCJPWspnCgtue7GQ/PsuLw7J7a5vvRR36edt9AVruGCRf9/lmWNHXsrLKG9+/BEU3NofP7/YS/4cyx8PYLK37twFoKrfncYov/n8i7fvfnym2rfvfmzmM3/4bFv/u3fumBvgH383XIy/+fyLt+58tHnyj5+/UJak9x88gJ7DDQGcRwmtvPfg/nlh5/0HD7767Wl08gA3/NXDT0zeqhaLf/j00/fuP3hqZ96//wC0rfP9B/cVUJWvP336Kfvg4cNn0nEu5VIu5VIu5VIu5VIu5VIu5VJ+ovKyUtzgr2IpeQEiL+VZynPAkfrcWvt0jzh/OL6xRGXhKJ1Pl6shtz+IDyfbBb/uVzMFyIuotO6pl2Y098nMRqqNGxdTsqjHvAIJw22EoGCiDH9mz00xv0hBKmIdXpad2Ru0tqiIqDLzGx9+qNDWlm+/+PnGl3zr44/N6gaKLnIGj/uZl8ZNocRMqrnVLMJZNTb4ZjZafPvuXQ1XCCi9dffet4nWWWS0UpjY4kVSa5akcnZvARRfzxj0e3fvqUrYFNE7d+7UOJKNF9E+J4P1YkaRm8OY5pRUYiGcyYN8whRv6p5taJljFapnrTlS/s3DTwDtB77baXq26fUHDx5Wv5gPHjyoiOQZavP+/fvqN9P0hxn6/ODhQxCrCBTv3b+niq8TAtYzNKzaJ4QFx5FYkVMZpjElss8Y8iBRafPhHuEUj1gRSGRnzoj4pV9pcoOwNEGhhgedOuzgYZn2rkY79oulwxaJXDtOVP/l3/+7o5OA057ao/taJzP6EBuiGCwTJTE9qCicfc0Wb0QL8Z1PHm/U/Zum9iiiRtbPsum0dhtpVLp2M4qEas8IiURpbjzMtNw4Yj4RivrJscwcXjL9d41WooDOxn/JBMtD4a7pBkfDb3Q7zjDXqg1sTEHiT81/MOVZio+G5Z1Tm6QjZGmPiCHutZabr5yDud6cvq0ZlL9hbnfYFPXAlkykYVSSjH9bdfg/UuY3nw8E+URCRKicNimT3cLfD5uACWaUFmsTplM08pd5A6qYV0WjTzHvniZhZLeyAzPVQm5KWM9CHXJMRdrVQkDkQWu1TAaImKSvgEZWNJFM5qBQNSs5NufZxgt21HtXjKyyQAfQaNeWqx+fPCFa/uEf/kFk/frxvwJQkR9//NNrr7+2vLKY3ySRMrde2Bkzq2hHNwNMWqAiop1K3LQ0pA0HeTpkPWEmbESBzLExt6w94SmpzfEfbpec4x3zN+qM7R3rU77L9bAFLHbmVL4OSz9zIp3MGiNSIBFV9ldckiEqYWmXFMr9Mm0RPVtO2LprbJkUQ33VCZ6el7n3FW7/SKTsEyPEzVz1V8vRt1/3APq6rmAAr9y+vV/30vuyXFn0yV27xtqnJRinO+lEEv6Y+kJxZwvDeTmRlmc563XByc9NKgBBfsva4XzR/NfDHPjk+pSNTh1hnkeoEULdoDlQYiGps3SR3GHMGdL8dd7c/uX0+XFp7eWVaYQ2oGP0+Wg3ZjZHmy/CThsAwNSiibpDzo3uoBu1n3Py9RFwGUFyYjuRp7YPoXQ66TG6M8X0QT1Yz3zRdwRSZBv23f52oRMcngJG9Y7msRxb1PvJY55yK6lqiOp/6R1yKT+X8jcBSl6wyEt5xnIOjjyktxN/pu1DqZEP56dUEZwTGFJXzdDLc0XFq1UySFKgThJf5ZAMNHmeqWQjg1cM1mCuKMEkhlcQ+6vJTgIYpWjZRSdNAcN0qGVZwjmK1nUPRVsWUSzFz1elv/nxh9m3737/z9s+/9XL2x/fWXtvrfmU9BVAYybC6x/eMW8VAAR897u/GQNPzTUHwGRpakTMMdm3zTczLvzW3btDzU3xIYr5f717715CXRKbR1V6F1X99ne/rz3YQCzv3rljm0ukA+a6pm999NG3v/99vgEVnWO0vnP/YwDclnhgbPivf/vFOw/uiOjSGjHt9/vNJBzKKXkg3rt77w8HdpGqrodL1wrUqih4e7g+ePAwFXubtvfvyuZRoAAAIABJREFU3/8qol7GwaSvPh3w6Hv37oOZ5khGGxyz6OejvP/goQuVoI2x5wefPLTHmQnaMFdOI0bedqymOZZZQczNEZ3kyNuBH6RAXfAClxVDoQ6p/EB4VShRK/VO0jdtiKXTmcl1dINMTDc/R9e+PF8bjI1xKKn/OeUY8yjfbvqvJzo8nOz8JxfhHYUm+8gLhEBARW9S5ZyHphaZzlA+bqyqoqs5Sqbu27UvPDPNaOaYrqSbnwdfYKAs2wdHtuvUnawEUqaB+OkAIo5tX2dq6Q9WF2LjV+Zd2ITkd1oTjBfBO2clmxwu6dObUz1aEJuZnddJiDc0BquqW+bunYzjk7+OvsAc10ZTHmwRB8VuN5NCj02kfja1+HO77hi9UEyYrObd6JjXcRYjLWqdtBjPRAzG91GvqtE7AoEcQ2ECRr5BJyt1WcasxkPuSk6GSak6HSJihoSQpgGfkvYu5L1bezdhyF4Uh/WUiF659XrbyQ8//OmH7x+v+z0AapA9//j9I6aFG3bLjqiNSAE2WvFAk0RQ7aogVYYxaESuCIrLhiFhUazPtIg2d/ms5slAbOFBXiQJyWYnbH472Cx+iBLdnMRAoEN5bEUbAGARoykk1nTzhmokbUM9HjZHdnla3FQjce54KBbeFl9ViUuF8YyoQFW03+xFZSW2/SDWGRHh1izaaGtETMRQUW7Ulgagy9raop4DRwhY1zXPwBGaN/J7lCkc0PoRUnfst4OPxoqMk3TQ8rFPp8q29O9Ia7T5WWwbTr/k6DaR04tJbslDOa5DJ7JLsUFpLCud2oInPnvOcqaKY2zsCNk8FCePyFYzH5lc0plYg2JNJ/vPG54TleQExmcKp8ldQ+W8nA2jebRobompbftJNL7ZpJ7ZzM6YFWc0/kBwdookBOMtKoKIkw/vUhJ+H/ul/LLKi6e4AbC7vvXSEckLEHkpz1VOwpGKklSQDr46ydM1I0RVSlqUzpD5rYFCVef6ke0SEytpxDky2wiYhlBCbky1nBUfxnUcESAWf8su1rMD8Y+Y0G3xhl0rqaFALCFuTJTZSXhEo7jC6n1tbQF0XfetNWIiJeVN3GoF8MaHv7Zw8URMTF8fOPb+Jcq79+6qqmV/JgsJ7eGcjFmLxu2o2VHYW0T09p27wdvpm8+P50v5mZQ/fvHFmx99aLEjDdYQgUjfLVfVJCHLe/fv98x2CqWD62EFQkD3sq776yC+oqp9fPvuvXtK9E0xjXzv3j14lEbL4gdiZtXEIrOebz8fCPXb9z7yQNqHam10qxFDLRbY9OU7d+5qq1AX/vDZp54x5oQIQ0yel6CYtLxz766qQvHNZyWPzYP0d3awXo+Z5VXo8IOHn2gJ53X0mTBzm3pnhniTxF7qPDRjQTE/IbBqB+irz854rNP2zyJ4H5PBh8oydAxXNAeykclIsxY/VkNtGyHPSt2hqADK5LSnoDwCtKENb7o25hcI9CZrDhXrGHYkZDq9k1gAdCY05wcPHlZ6u5k+3fwxrU8CJUY8Q2Y/Jli7xaShJ0ZxyRUaCU2+1B3AF01VZLW5MpMUT3AQmWBAtmUnU4DB3uGjdj1n1bBNofKvnteN44HZ5F/N7JrKSBwkKX3LwMocZqNhbQW3HgsNqszM8a7mBkd9IxXv+HBjajbXUhsrH4c9UezFI8YxlkN5EiUGpmMdy+4dbpgB5dhg5kR2yJoogjHWAx6ZVIsSTdvVKngXYfSpCkjqeaGHdDHki6zW+xp7OH0y8ngYgzZSsmFGA163zeJvKYXBpqYwpkrUPAVr7KWI+EkWzLEtC1QZJNrXtStEoSK9LZ5Xx15txEpQlXUvjx4/evX1674+7isby5C173a7rqus2PGCyFG9rquqQhuAZblVBS5SsePI5Y4qqZwblVPlFmUGFYwIMm5pH3wBy4MDoCAOUS33QT4Yx5N8FcszZuTouEmaYyXogKAeUfE4NjKas/fi9Hh6nw0ns1rDxNadfhy30BI5LsdFY/sk7mkUzMBMVbV46wlQG8vg1kTQ+6oBhdDoggeeXvf7q+v25MnN7vr2siyqKtIjA89sBFxeH6Qp/qbtEE8WLRN6yI1LN3X71dNKkpGcccKx2cdGw3EgKIHjUQ/iNiLNGrYtbqsfXSm0V+kZen+kgnPT+OeVYwrZyWezL4e9PyIpRf3jkYiwXW1NzrR+BjosupsSyCmwiQc2wRin5jzPPdNc7ug8+nURinkm8rnxQeyrcsTiPzLKnNRGB3uD8+roPQF5PVm5ahDI595Hl/L3UH6GZpIXLPJSnrecto6sxHii34WExp/xyTFiOOsYm5vbpOnbxgffCq2SU6RGkblDFYsvxltHJJaQI1P3pCHgq3ryuCJ3hIZH4QauW82tsg0KwwhjUr2vhTG5fGY40bJcWcRhaovZmXURAnhp2iVESn3n7l01pFAUMHNLgso3f5Z/97v37gJY15WZW1v2+xsAxNQBqDI3AhlEutstxNTXlYha2wFKxOu6z3Hk6E1ZJOJ3Pr7jyphKjYH48ylsWpemQmnmsB0YyQ+tvHPnbocSh0pCRKBvZuPBImlPH8HyLAHflMQvRCQzOGh7ybYOkWVJoq9KBu137t4h5gNpMwHuHrXgzTsffvdFQJZKxJZcnpZlqXC2aYmb+qiesbn86sEnmZJwE+9ywAv1E++AdYJIaWPkeFDy+jfz907ZrqNeqrlrfvXwE4xs26gRKt9/8ABuIgY75kR04KteBcPxER2hFQcv1U7VP069xWV2VSPjqw5qEEgSXLU4nKMQWItaRNPXzyZ7HlOSMB/ldFXzz13CpdC0zioop7FIFE2jYAJV50VhA0O3PdRK0u6ASoc1LB2SRcVq5nA2mqYOXGPsllBwo6ei6M7QXOE0e3ZubWRMLgsWWAOPdsYsWkfKGjivCt5Ax3cfEYXtp71Q1mxgb4Tw5KuGGTCI1i+QDAgKFDH1m7lMLPpw1qIDcUNABPTCe/MsFxDJRoEiPcy8gyqYMUkFU+MHnvuJx8WUHNEuNdRcD7GSRoSjRVsh9czY5GQzaxuol+m1MbAqWOiQbgYh9T00NgeM1hMMGQzE2+UPr8vgdYObdGjMlOLOMOSpU3XsKhehqSaVYQfRrMOqDst6BhsCtcYaThskqiqgppbSunFrWFcxXgmAwI7BmGRCtLviJ4/Wq+vdchvrYwUgwpbPOtC3BhCUQLxwMxtwEwAthQ4TKTGkGzwalAGSkh6Rilo/Y07HPCCS2Oci1YkZW8jgOoVlp4467KWZrlM2XDY0zdszZp9Keznn8wc8yKvXT9n3tLAsnMHJTpzb6UCNDhh5Rt5qjWEiZinbUBWmtoooOhNEV/tYRMCer9xPr4ixK8u0jmYXnB2irbW+SuOrI3y0sAE/LJFCanRXBQPJGU8XKXqq56CJOrrCUaZjMf96vtTJLRcLXvlw3C4CDiW78ccISVw99g/FcTs2CPUNOm5KQgLAtLrTxnv55WTFRxG/81aERyGweTbtk4kLJEXeiB5/ZvFTr8GnVN26HYUY26Imx9zwqWeabMK0K9IpIWq3XzQfRuyRch2FcoiDs1hIi6RIG7ce5x7jYG+m1xjTkbhOl/JLKi8LlHxxRPIFscgLEPnLLC8hlc1zsRA26lw1yGOlapYiym7BYG/FPRBHqIxULsdtNQ2GEDwpqqbyS0Rqc92WQrYLxdTkUQtTFUqCh/shAGjUpPfUYOJ+y6Uxl++Z1nVV6LIsqsLE0nvjRaFdujVkJgl9XZkbEamopQFl5q4ksgIQYW6ti77x63+0kI6iZtpZ2SqT5fs29kfaeNmvN0wNrQFgbsysCiZe+wrFsjSA1r5nahbPRfpKyiKdicVlYu29M7fKaCmcngiaPSHQO3fuIpjnoQvwT1VC+gETgRmqJNp7JyJW+rogvN+cTXttxR3eyvbdLbt1XTFLOQB+9fCfVtFNHMmvn5bIyDQoLq67lipHARWlFtjiRn4jN/JprU0o3iefCJPBKO/du5+Jqqt8czBCEHjjKv7e/XtgDmDeywcPPplUEcAczabKDiTWpybC/uDBw4OZ/KTUt+3ymeQ5Vt6/94C4bUb0/oMHdEwU1c3PUSa90uxAKe2HUxo1k6R4OKP6VW2WpvpMjeFEmrKZgd1Vvc6V+xmfiPoPsJn6dszb/FAgWa7sGyH0hw8sW7YzctiP+XsUonHkiyp1ew830JF3quo400iN/MefRcWv4nrFrox6GXw4QEnxts0XllS7eorhFqixrn2/o8WmrEJOR4c/dQkHWxZIExcqKk0dYWF4Y0IixvEwq6K8Nhtw4KhpY0dDs547ARl12vyyKfqZqp2bGxOb1RdxdlahKsrE2yVKR3kJBj2sk4Yy7j2g8F0jgyrYOXwo7CC/MjRrvlhbmgZV/dqguXsK0SYAKqKkDLJ1tuH51aEZw4WaTOFUjzgZuRYiChUNe9ysO2dUJRw5yyrGv3Xk+a+N3trJBXZXUHW5yNNDRzr5hDYz0GmorYZHsh0SM9sWNuxJHYdyzmi28A0qxMzdLYMJ5tK7YzIbYaXe13W9MbiKqHGjteu67huDaIFH29R13S/tdmsu0ogoFNQalAF0WYmIG5OoBLLZ2uKwTrEa09xBNEyIxpzF2TxC2sh3nct26kdlzPSE/hwSzlnxD6EHcx+GHfyG4viCGBg5WMDArEC9XIFz2jQTCCTFwSJvE+vR9l2SZtEUZyGX3+0wicDMfHV1JbKu696vzAFAmanbenMzcdpyZxORrAIATEvjV165/fjHR7tbt5kbO2I5C9cufZOlDYcl7AZUO4JU+ZLarOTsj9v/5yjjaPhIafPd0VLFkuz//HjZBjndfpnITm4HWYCYc1W+rO7ykcxhUj7yWG9lgdL40Q+eS8X6CcqpGa+sn4KjBRnUuJAHxmPP7TydN6mUFVlE1Twpbueq4Yo3XKuzk89RSnguDKbjBPT4gkUPh8Cu415QQyhh0jF4ChIz2JW3R4AHynCG5p2Iq5LjF9uX8gsqL+67/SJmkhejyEv5s8sLwJF+z3zAiTafuKuS/QoMMw4N6em0+BAqpDKSi0ABNurMcIF40N/wxjnJ1ogtCLvLhRKS7rA4ATZOOM4sRJXcuUZVPZUNe/yR0BGKIpS3viK7qytAzd4QzlG62dsbezN7N8MxKMwxiLm1hVgsGnNblr6uBCzLTlRUtVGzLhraVQz4PQBal30qQRZMUER2u2uoEvNCSwacYmKC24eK7he+CrPNLiILLxg3dr6MVahtbaHwP8plJqL37t23D4np689+Sofub3//+3fu3lWL9ixdREV640azMvzU8u69+73vtS1FMwcAIlbdA2BevikpsNd133t/rqig79y9o6aBTD0jd7wSXF012LkQqSkXhpY1HyjmdrO/aVNcQu+0Pd/79Pz79x+oa7xSbTZNsyKm2q9Qg8uJq5kcPPn180lI7z94GFLkyynv3/Vc2xvyVLQ/AGfpEA2aNV4NG68MCwlgAJRUJFca3zsC5LlcQt8GHdggTFqWmQi56B1Sd9W/t32n0lQe2/jFHJ3TVtxsBqhUVzWJL39zEur91Sf/dE7/m2l97RpQQOWgGEfMvQ7GNH4PDcRuilrE/0BUVtCkMqLoCiUjygEDMO9s1Q5lZr8cUhgFNnROB55X9NBxEI53O3YQBtlUVbObKotUKnb1IutLA5qAlkjHs77ZzOotVa7Jumrq2HQEEv0pvS02K2P/RD15BgrK5tRfxSLYTSSgbtNEAI7Ff8v9nSqjp27BAH/LPLuCtzVtAwxbQRjfKRN7vGDkxYHlb4Fqj2xiUEx2d4NAju5PlIysgyJhlANy7ElFjDkksj4ljxrTmIOJCvJU0JBsjJkCZMnXYieriqJlfQRMHOPINQKBCF111xbtK+zCkyNuZmsARPZdhZSgYOLdbtcW7tqXhdZVAYiIefcSWEQUXfa49eqOiB//ab8sCmC5btL16vp6fdJFZO1dOhqziAh17QqAmqhS72IBYqgxHMkiJlJuCBJBgKUHStV87J482qrDWpaQ8dhQNuwRLjSoeTVM8xWZV2s8HdKsApFSxusvDQwjfvUQAXkkFczqgm1jiouKcWVkhKZ3shjVlk7Q/mcSADAim8Z99IZ8uiAZ1+et8W53ta43va+wyzKAiMFgUTA6de1dFOaEw8wdK4BXbr9G4N3u6smjR713grbdlUjnkoZrmimdtzb5ZNgAKdOFFcJ2nvkelpPLSOXvo3S4LpDvnEnwf7ZGabwsAQjZgGxsRFtD1fImoQQKON7AAeU8UZ5Hbn32Mm11HD01z14294JlXQzrH3s3mN0ZNG2gdRv02RsKAgBUTcVbS6YycdvKw59xQPGfUWKiqZUjYph74FVShdQw2Q+7GWvPpsSWhir5wLhJCxEiAVXfgiRPd2W5lF9C+UnMJC9A5KW8YDkDR1Lc67qHm1P5oIosUCaoXzmqCBGJCCmG92uIl9wI5pIDFz/8rkpN+CZOqyUCLKBjdkQDCXTGw+4TpmKXxqhEHCYxK7el9+66RuRwJSbZ92a5CMhAQAWToAcqKVpwASJiYktOwsSeTFkh5CYI0gUsQ/hiMuROVffrvrUFQOMmXUS7SZbMLL2LioEj7FfnFNwpTFIIve973zOzzee6X039s8u/xotNeJcbkziZmZnXdb8sOxERWQESMfGemRXAsuxUFWTGlyRdukhrbbe7Euk3+xsAjLbuVwCtQUSWtgDUWtMIdQSgi/UfTMYEu/R0bWcAAlYRAkDe/3fv3fP+i/x1wmJuiu8WEKDcICLc+NlBrzfvfAjFCmI0FcuLMt78+rPP3r17H4DOkRaJeNntnqX+d+7e8U4yEzNEPS6+FXERZmk77a55qnKVPr7+9PP3799nbl8W08j3791fgeamjtTaFpRc1848fbj2dWmLzOrNe/fvx96bXo9H8vxSJImOD55l8AAsCmG8YTHj5pw5ofRhtnM4Xd67f98JSfMrgrwPiM76iXbpLlNj+iN5OF3bNSfw7IwGuhGhyL2WeEWybiNLpNrsEsebHZ5K5rfI1E5pwbbkVP8kApFIH3JqqKTW3XNWJ6oIRT8NCx06UUmrO56n67BQIwBYx00IsssmrTsugDBhQ1a42mUSHKoZltc0PVarDZMuaIHQootTKC01aMzIOLPzr+BECoikAYsA6A72g6gxw43Tic2ezNRptgkRtylzHTu1gwgmO2FYwc4G0GWYVUAllnfXdQ8dnYeZkIEkA92HD6uxIREhkN1EMdBFmLkRxwT63AR0gjThjbmO2fY+EVCUaG8rkpI2QnjuY3BiUNyESay/GTMeAGG2AeIUJuwXtNINqWLfVl2RD5Gh+KNHXzXAGT/kJVkcmK0SmwtGpZfUuGVH/3/23qRJsiNJE/tU7blHLgWgUFiK0j0iJEWGMvwBI/xTDSATmeBP4IXT1d01zd8xJx4pFP6LufEwMgdWVxVqBSLC/ZkqD7qY2vPnkUAVumem202AyAj392xRM9N9sR4Y86UJfoU80XXekXjp3ImwoCl5bVxVFekLs7qNR914ahSX+Lxzw31MK4dGTI0brLyyAsDCTbpYQLPC/c/i9Ee2i3BldWUVADbeSlWcXWlEItKYFUJEvHA4uWmXviwMgOXQGtZ+Eu3PXzznRg8P9+vjCix2m0TlcGhN+eHh3qKniRXSjsfDicT27vR4fv7sxXk9P3vvAJXj3Z2IykrPjs9Eus20Ebs+mwiK1fdemRaN8ynSzXRaIuYBOFoxwPmn1GgykyFJcWvNji1Ci5DexYYksk/TS4cik6J0++iQFBaTQ0C3+rxVMRJe7hoaB9dYdgF5XTiihrACqaJRUxWIp4NwDoqJiFo7qIha2g8i1bA62yEXsSmDLBpGYWWA8jCpdOkNIIKIPJ7Xl9Tvnj2/f7hvDEYDcD6fGU2od1EVgrbDoTGzdO3r+s03JwC/+vo//Zt/8z+9/5MPvv7lPTOD9L7fM+5y0flDAfIUx3akSrCIeXEyYuUObHvNUtlUtDlYD0LFW/7dgHjQQXtD/NvGTns2k/Ro9FBaje8imeagtPaboeg2xJnN+MzOuUugP1v8dsaO3LkwGAOH0vbxokwrvMagDglRgHnDjlGopwnaKzyn5TKnMn1jgiEffcR7qDo1xF4jgkgJuB4IPlgATJMvL44q0oVlrnOpcIlbzoVTAQKzKpMnOGBnlMbKmSKoy+wcwUWEc3eFS8zI+YlqHnaUq+hMDGWAwQyIopuhn4XHtpFCYWm22BM/WRhBJIlUVQiBjccQiRi7GI2J4wT4JkhsFEUKKV+rJTwKe9YFpG/tX2j7p3STvOkib+3Pb+/wjpxzlX0fRKdOkzKuAUlfiyBJA9nueksAcCciexwph1+KPP6M/xDpRlPD8wGqqr2bijBXVJncxqH+oUK+nUXOVOmVUQIRpKaFK6tgZlfUAlCxsXpX6d14cOZFJesS2FQ59L5k8o/7C8SqNHKlIzNMQS3+GoCIiIipllS1tWVdzzbdUctb3f2HiH0WDFX03n2ZlaMhatwy+M5jwAeoKX/mHAunJ6qmZzbxPU18UODDL76Ac2L4+l2Ryz9UM9mjSwfUIpJEQgSN9uPPPwes4HMDoa+roDOz2zCZiVi6GEx5dnkxIP+mFqFWsWwC9bGPXr1O/1lVgDwXne2En7HepffflNrcIp25mZjqLj9xAKY5iFStzKdv3nQV41FsrBo17tu90C9+9u9qJ0TUpTPzNvskOQx/VVwmS/5u45Jo4zF96X7wyZs3GKyqLz4WNWkDrsZ0z899+tVXdRhS7T2MBKRx+BwJgPDp27cmcF7v8qlhx/qeeGHEJU2SgL8cOUwHXK6iv/1J+IuqobEJRGZaEUolzJUmOgrDpuRR5xm/X+TfnLvpXlaFNnvsM916Kdich6QZw+2+XmeTshj8nGkGLdv8N+lZ6wBIXBWySvoX1LlmVyY1wImIiS8EKwYa9GTIkJ4KcI53nnvdX9JYT32qnpbp7eE0F1AjDDlulxiOFW7nsd2ReM7O5MZDMGFH48/NgkZKLmwgsN9oksmHmlR31j1hivxS8xxpECFMyxgD1e4SHiXUXhXvmLTtfswwJq6qKkIcBGQ479jvLpjW1XmKsL0Wi1Gfng47AYOEEli5q5dXa2KsQPPSS2Nm0S4q0pUEzNRaW5YFwP3DNwsvxKq9KwnzQg0gWcO8KiKGO5n5eHdc11W69HU9M07nx7t2BNAWfni454Xuv1mPx+ft+YFIpHeFgMPGFJZtUYWUk+NG6gF3Cuw9bckGX1WNh29o8p5KiojYLkkTyE1MA/oIof7iWpZxOS5MIpexIfHeuAsR5T/NnzLmO2Lri5tYrmL+dzsduvyz6nWcpTSCzNSYmKhDG7PIee0dAMNU8o0APi6qaoYbcyJQ6gA+/fQTAB99+OGvP/id9HU5HqVPapoY3diA5HinZRBgbtvF0XpHRaVzjZnt1xftElUAQx9Tj9T4+8mWe59MjI4Or7yvkZ43jtx1THJ1Blev9OUb4RytITLki2HKCb1UzK68mrsUNZewh3AvCPM7HSQ13fJ0/ng7f8IOqdqbQDy828usjcyF7CxhdB+MiIaxztnD4ChqL1ymqBNKhb9EBKuIVRkB1FAl8+YeAm4kGaaCH66BdZzZsuYYGrhUOfouFSbn1m4N/yQlbm6KyFv7odpTImslG9dxZ+HJYOoI5LND4Db6OShEaAgIIMub8dQ03Iaez8N/CWeKGMyRPwGeKW9QCgLMjAy3clnV1IACM3MXEemWxkhVXHM3Fp+VCtXcmsKyz65cUFJ4BjJI+uLZ6MTcIikPh3IkGa8EF2lQNHJN7hgUMFO+OOdpvxc6mcAYS3MNINEFC8DExGw5gwDt/SzSjXtlbm1ZPCkYs0K7iFpxRk+sqbZr/okqWXet5apjAuDGaQ5Uld67uJ+IS/YffvHFT169+vFnn33w2V998NlnV8/Bn9167xouKg433dbf/e3f/73pHEOcVIJXGY8DaFZ9Fe2i/aPXr/NdVam6yA9ff6YQgSjEMj9aI45sAyaaulOHivQuXdSqYk884IdffOEcFoHI3WAdfMTmVmlNRGrlGTsJoqoqbk4tnMz/99f/u6hsdJEAvv75z5mp6hw/efOGykUcn3/5Bilf++UnALVPDcPygADG9fcT+j1MHTHexLVX6UgV5pLrtcs1JmHCZ9y37ZWYQFOVBvH7n8zlEYKBJSKwKkMjvFsJmhWWtsoU2nYSE1Lkf/lMSMKujwycud8kOxkTq5g2se1T7dO3bxFQ3cw2eH+oFh597q+IBPFlxYgTSPzX1pbWmrsbDHQs4mhXty6WYQZJAS1UWMPK45QjVR5EBLTWzMzTiJlbtqUtFGLneLHAbnfyCZKdFiYu1XLmiIjKGawSUZ7kekSprngAMeY4JLvNRtgDfkGQF1VTcLskHH5kdtTM+5rK+d0xwMxdZPRoyWxJMAcSnVv2EQQkGnP+y2zUzYjMeGosLdfiOBXY7lzp2VZctzvWqwMV+cZsUVlcKAUsuQbFauvODB4GcJsPNNyXMWvjJixROQlVjNHnJY29AwCs61lUmXhZlsPhwI1VZF3P63r24AZRZpa+ntfz0g7M3LjdHQ53h8OyLHZmWmuH5UCErv0Pf/zDcuDnL5+BBSxCvT1rp/Xx8fR4Oj0+ns+qIAvm6F16l24+3eo2QqRSzsOYB4jjalKocssKKeEL92GOw1VYwaRAKt0ZPOcA1aEWw1kZ8QT+5vQOV7y5pHEqHUqLUzwwgx/ycsGc6rs2fDql0NhL7zk9qy+bjrs4H3X7VlRVRJZlIabHx4e1r+d1DdpB5/Opd5WuCmVuCuq9K/R4XP7yL//iL//yL168ePH7P/z+N3/4ejk2aqwK0ha83/gv7tuFQJGcvwRIB6HS+NU/psRDwWADuOrvNYelDliN467BrcaxqJsSF8hGuhhjoONCbK+0yy++O69unHixAAAgAElEQVSQyLcMqeP37dxr9078gsFLYWXg85xZ7IL3lYCZLlvpPqlnIc77039ycyYNKaLLbQ9TF+XmTt/6L5cINudRJKXsipwGUuSSDCbBl+jImMb+DupQYBH8kN1SLv0G5XVlO8W9InOebATWZPZAiJiMS9p6CZjpN6KYXX08AOVyTLBt79Qf39q/pPav/8N//PNVfrtqx5su8tZ+wHbVO5JmIuv0j+yHMUjX3oyE68kvBFUs4uB4OMMXrrciOZZPUkibJjlkBSV3NxNVr3dMFNX9bDXO5keqK4sK9/jvYP8UqlqTYRFtIOMRYIQ0mhOViagrg2wEld4Ph4OoqAgpeQA4xVQzhz1RyXesgOXss+IBxmFHtA4oi6gws3GTBPS+MlssgD1nsdUsVgTDgxVAADNrFm0ApIuLChK5MkWU2RnyALsFcqY+wEQuVe1qMZikQFMomVulspJ5PTD7QAwSFbayPCIWtvDBZ5+RcedMP6zj5G///u8//PxzIjIGPbb34kwmW2e+o7aPor/5954R8sNXn1vEHAwq5Shk+8mbz1OuVqiUCirq+cUUAFztKSr4bfGF/Oj1KypdE6G1BoWING6eaVQ1vAm0PDkm8fHr12oRf37zjGfhj7/8MlWNIiNffm1FmY6Pv3wDGv1cKB2Ci4tIpQt4XkgUofjZgG2KMX/ztg7z07dvdwbNF3/2s0/fvs1xNBw/a5+ffvkGGNeyohTFDMQySsk5FBMvbz7F9I33aAzgSCXVC0WzMS9r6lnrI0V8iW8TD07huHM3G5t5OCuDij9HvDP88J5GzKXchzjiKy8UKVOLzF7OKnYO0+4wZaXpcDnlbCdQXLFJlEihqDLo4ciyJ4WlxFqIS6g0kag4vsh1hraqDJCP6egtPimehFrIX5mipoQc3SvMY24Mq2UWRl4miJQRdyjsBkw+UnmUCIIJCAWKAwih2KOpWs72Zuv0T/APUbppaAOSjOarOhcBGxAaSkQaPynQT2IYZ0RMfQ2aK1lNQIlKFcOQN5ZQT1A1fXhGSwOOqHrGL89Ak+9TvqcquFDZzMhHg4uwYxPr8O3xk+PIHFoLKMTU4aC4iDixjiVuo59Fv+1+SBlNutipV6X1vPbeCXx3d1zaAYCIPHaRvvbev73/9vHhgejAdFiWZ7//3S9+/JP3DJqP9+eXL98/Ho+nx7PI2kmBRsSNGGJFTmQoc2OXbI0D2hSak3o5yv/ObQJ2EpUSUqnXiIrhEyA0DvTYA4UOn7gdFYmNSOIqB0myl2AOrhcV2xDSwbPssB2GyNLghrPi5w71RCyB5hwZjI4cGU4n1XRSORQRiNjiA7jRixcv7u8fmOi0rtwagMNy6H2Vrl0EqtrsZOnd3d1777339W+/BvDNHx9evnhBRHfPD98+3D+7e9mWQ19X3xWNueRNKfPbtDjn5q6qhZssvVS852/RZReXLYwNG7w+usflvlT6GCvYdD9j72IsKd0bbuFZLijUIfrZn3hc7FHc5wJf5u+b+RPWfi4XHsjzQUQqurMkrVSszvSHbTPPkiaySzA8xWDUeVLkQNh9+0JFOZ0aO4uCkkWIMDEQKCjFvnaLQCykHA+yj1N21LTHJvql+M8vOY2T4iTCZ70Ffd5cmgLk46pvyHY5McbRkW5XdWu35u2Hjd2+KSJv7QdvTwb03dqt/RdtP3n16qPXr37y6osPv/j83U//eQM9/UDqIp9ov/0OzwD49d/s5M2susgPX/3pLqJflxLh36VVF8ifvB5elk9UL/le7ePXX15+uBuCXXWRP1Tb7fMXP/vZL372s3/42V//4MP982u/uH4MPhmFzm/t1m7t1m7t1m7t1m7t1m7tv8b2Q7lJ/pm6yB9kGrf2z69d9Y6sdRC2Ho2lCuumFTeLcLAZBntQGMSLa8lTxjHvYDbyuaeB5jibwefX/UXN6WSAFRG5+2T4iLXWKOxVgnCS9PHEMgralGlM3PwHI+l1GMwwxh1m4vTYEB3B1CNwmCgCLjQi19LdIRY9IDu5XMRibU0chbZluCWleT1+Nm69r6Ji2SGVNH0GRbtVdoDl6lY0ZuYmkUTfAavpfaLgBpjXH5Gyw0IBZlVx8yiNVZjPSwepiK03DKEKzz1P9XEAH37xeQRl/enpJtuyiKhqt2BM8+zcPGMavY9fvyGCgn81V9358eefC7pIVzXvnh2F/o9ff6aN3deBSthPtF/+7G8/fvMKUAuE/92/n+pum3PN13/38/IJc3qvNI7aBR6qT3sn/+M3X4LJfDgyLsz2gPdsEB+/+fIy+R5Mn1hd7mhfmehOBESEKYNO7iOAT9+8Tf3gL372s5++fZs+Cr/460k5+Ombt9fwQvat0E++fPPLv3V9mc3qk7dvAJBuFZE/ffuVLXvHZ2P05xOe3R90eFxMYIiHrzlrDJ+zyf1bAea2fWvr1ZiDVUjaB1v/EEkreqAS866RESNEQEk6a/9weIaPRcYVZHrKCyUaUwbWbktIXAAiLn/8X2A9FvmdbPrhTGPzzYArq+seENJcWpR9SDRK8W/dQ8ePw1OKqPconJmOHTAvcUs1qygufeq74h3Uor/Fyb6CZHykF9/kaNlZAGvr1TK3dA/a+O9ceacMZQTNnMnK/aqui3WL1bLx73VaQix3vy4uPvWnlgfGBPZGKE7m7pViGynzor04hu/T1VwQm2g5D02YPZAQB22EDWLMkuOIaOkFUE8sthlMFAR2V53x0uSTY6gUyuYjmNfW3B2zGH0yNHtLokGfL9ZSLp3FeWbaD24M4NRPBG1L69IbNwU/PD4srT3cPygerPN1XY2HOZ/OAJ1OJwAvX/7ocPz1e++/B2A5PPvP/+k/d3SFlSNTqz7egj8CoF1AkcTCPQERjpAlOsX+UvXHMNDV+B1oTO5U6OscB9GCTaJKrX/kZ0YLYMrtSe4sQRpNNep/BPM1PJwqwzx+D5/rgoGrd/SEh4qvICS+spzKkex3dBxPxnMU9d8GEohAJhVmfvnixde/+U1b2kEWBNupimVpthvWGXM7LIdnd8+smNY3f7hvfPjgxx8srI/r6Xg8nB7Ol4RPoYWgmJ/+dOsczxTX5Xn5Uys7lzB8uhXERztfxHUZN7p8Q6CrQ1BB0I5sLhB28OWXRPy7zVmDvDgAE24DRWwiGEai4hFfYYCteBoTWLbj7rcrhP9yn67s3LY3NkrpbGheLUx4dur30tGxYmQXwXTzvSah9DiBUfvUosj8wlo+B9oZv1LXGlweq9WMGQlv7FxOHJHIJTk8hCXxeaFn4dSY2Sr216tABPBZaMTFDsS5AZIC3Vwjb+2d7QdJKPlnjn5rt3bZ3hGsPTFiBMDIS340U4UhOI0e4HxRkt7KCIyeZPOJjt81eILCwzjDGhk86ixThFJWJSZEFEWwkKl1MZGcIaJOM3V0GmuyLPIaZMhUftfFwqAJhNRzmahnNbVNGuh9ZW7MLTWOFu3MxIpOQfFiLkme1CWfWSmoCqtwQmzM5WFdzypC3CSihFMJ6wKfalsWtW+N4MnIOGkTa0SZ/R1EmisqMI+MRmS1z1Vc7HBYkEcPsacaYhXplj8SHcDCSwZSqQpR8+6n2CZNIQ0hXn70+rWIMFGXXv0K39l+9bd/++EXr0z560JByBYfvXr9658PzeOv/m7HL+zjV687uEjdO1L5B68+Q8TN+P6rguiD15/97u/GVH/1N/sa1Q9ffV5EEm/MrCKWnROmrrVijhox/XP79Ku3efgipMML74jIZfjjJ292fBgB/OSL12LqS9Ob0+bKV6lELQumbm7H9eiRazVSPn1jakqtKkUtQ8WOQYGqkQTwyz1vvhrovdGlDqESO7L7Rp2AISi/WzwaUoXGexv912Bjccnc701gyCb12OVm8kAU4gki7MqEgAggos0oMOcYJ5EKIkD9iSI2P/3qq0kkL3PezJxrNsBg6DMsc/yBkFYvATtfBWToV8Y8Rr+afY73THpJje3obTPOjPUDoxJz4ArA87BFcNZUBEND6Nj2PGkPtu1Cwpvr4cSJGwqHcrk0FUsxQdIiOz9p56syp1Y9SGxkpZj18b2qBobrDEC14yH5X7yjAA0Ktv1q7FB2t6k65X9JHKSMy7264EFDY5xyTsziME5N3NP4YTUKnPZiIAsPjI3bmFhftcjdWcBDvezBiGaNHzlzu9ikXmpihFsTmOFFkzzlRjJoZaPHpQ5jWa5mTAaq6OvJOBDyUGGkjfBwsJJlnZjWfjocDh988P7D4/36cLYkIctyAIHByugix8Oy6uPzF8+FTj/68QvFGUAXvP/jl72vv/yHXz9/9uLly5fMra/SRYm5RwU5mxoTacrmQ4kYP5WS4aG6ZbmTsZ86LoutW8OiWXHO9FY9NJHMBMPQEG/kEHYYgE0IZh6AOBUXCpN64cYN8RMTWzQ4J2NEBzh2LjOFnbNqWZkm66ovX0XlfH4E9P7+G5W+HA6mr5du9QZBzKxd1UoB47yu9w/3P/3kvwPw61/87vR4Pp/Of/zDw7O75+fzGbQQDpi4n53Lp+T8wJhOxQX1jbHnurMvG/sgYS9qd7PoeLj+rthh1+ypKWp/elfJ9NbXlGflLbpU0u49G8aN8jHpNG519oiP0/Y28wq8tLoMURjPDUUboW+bSUX2pxyiQvv6lHeWfb1RoElC3JQZgk+a1va6887oym0IjiKNOGngKOy4Op4wi//VCRjGdjHBETUNptNueSDncRb9+sedNUFTlJjrplECf3ukim6ZRm2kOq1k5p2ZC0PHRM/fzZve2q39ALHbf8KI/5TD3dp/c+16Ze2NA6QqzNRVP9t5B556LJhgpkqLCtIM3L7NfncpKIJnfxY1GnEhgxSbtCpHrVPTxLmPHjO6DLWaenJ0y4IvvSvUyqfGjCw52iAnGn4XUXzN5bAhlROlLDfMXKoi3eiTqnjybyYQmW7IXRND2xh8mQLuuACgy1q9dVwxKqYaC+bdtHvExGT85pZ/CcLeey9SFcgzSAJAa+za0SiF4sAhLQuPRKLslkhPtUJRr9l4USZZu9NpEYn0hc6rMJGQKSwY3FrLsWxHYsJj1SpKXKBPFHHcxIRf//zdocrcPBt1X7vlZzSaz8yfvv3qiRjej169Ym6qorQwNCXPvWfVj1YI+cYgvf/qs9///Kry9INXnylUiDGX9vvo9WslElFlgRABJhZ6+lFiYvrwiy9+U8K006XU+DdzPvGDoLKjUSDuve+lblCffyR70uJC+embN0NT46sGkVb9oE/GGELVj1+/3nib1vbJnN5xmuHexOyOffLmzS//5mpM8U/ffuXz29mpqp2Y1KYT5gtxYWbah2yw22alb3L/oS+4sGioSsob5JoHN8O4rDtJKGMujESDLqU5aCjrLmsipcCJ6gmMak7caTJXVlXWl1x5zGqSqQpkQiaOv8z2E3DY63tvdBcOpYeBxhJX2qeEyBBX5aaivKPyM9RDNHYEF1s5VMYKRjnfVWMyc/5PwIzKI3nOAi5ONbbPjkk5KvVTlKL7DDpSTbezC5lye3Tq3zpJhmOdU+dP+FykBHXd8di73iTwjFmXP2sPmaJ6s87trMpJKtOUSd1etmrD0zyxbZ4WL2TgHN6os8RkRggDg819L+6hwivYT91mXYMUK8cdz/8olVPBXtAeupnRyNByFNsNK/WK5VSViHkxAtu7mwOD+8DxuAj62hWKdV1b4xfPnz+c7s13EgAxsdjdIya0xvLt/fNnLx4eHqE4nc8ATt+eGh9B9Py5HpYDc2OmThCVhdrhsADQ1aeVHvyXInWqgJO3C51grrDI8Elv/aTJ5vDM/OK0L2nYCHckpVlJNOAXBtqREpiQFwexoZfDbu/HuMkoCHw8Y7cdkXMcrnh0h9l8sNgwIJLGEpQqT0rCSnj54iWMgyXpqwAgxbIsvXcr6SHdE5KfTqdv/qjrsw7g4fGeG3/zzTfffnt+bzk+Pp6Oh+YZXbc6YVvNdMnDTkEAoij5tOh6CfPGJWnYTxw7ADvj7PnjAkqNj7V8MPdVz1F5O9jnSkfGK8UzIOfwJFKJA7o5V8Olf/tsDsbjDhRspNtV5lJ1kJt5oaPrwu7kQxf48fpCLu/H9D0l95iD1anscc47QIaDKw1Podysz6q7YozmV7LyAoYgKNAtXZqVx+Y5EMk5DRujoqiwYGngAPus8FKGQHS4t49DSRoybBm+UDOK2VPpzMG34RPyrFLisOuk+tZubWr/lG6SN13krb2z0f/8f/6/u1+891dfAIN8Gk3gITq5z4tCTVmmpgE0vDjQqIbh20ajXnwGyyyQHDMVRFsFrwvr3MSH21hTl6156IoHR0tjbm1REQvNNq89gDx+nIjdmjvxjgRzEvQxRRVQ8/UDQMRduhEwTaaLACLVlYpiDgpiNj1EWO+QSjcRkb6ua18WLp6MMK9Kag3Aejqlb6aqMjciEukphXBr0l3311pb11WlMzcQKKoGcGsmU2tQZ4oIp0p5e++tNftcRLi13lcrCU6mtHJOyE4BddOmsvHBrlATKBFYiZnVClt7vHCoI4lFujGpImIjEtC7cON0+Qw1gDOrNk/pnZhy5tyaiFj/AET67/6PKQI620evX6uxL73XY9iWhaNuT1Wo/fTtW1OkAmBufe1uN2UmgqhI777LTAB6X4kQYWXwzWrNXasAAL8v0dkffvmFeygQo+ShFxESyycAFUmdRGuN22JrFBU7bjUF5IevP2duyfZrPCPSzdOktdaWRSXq+biCh3rvBLAphY2hE2dwJskNALR5kR/fAj/hBCJ22akKV6EcUFeXY6OU/PTt21R0hplBYRfEBg32OupzDiGAQpKono+fukfkEPUQ/UziUOI3JPO5lRT8IQobRdEfMbODKAET75caQWQosajQCcg6Dn7VM4KICgqd1JHzj50ZIq5kKqMjk0BR/FEy1247Cka2lsAmwj9c1/AC+PTtl9atCTKyF/+U4Ci2mjqFSece76iLORf9lLddKM9l+nkrIKR5s7qI2X/sg1TWWzGuIqANTSkTWzjqqLtpkBUvBMJhBYoZ7MgBKS7zLH2FmUp17Fz6oQ8YFVE8lzJkKC+PpgqgEasqM+cWFklzM+6lEYVcuRCBwOXa2g8Lks3VgOBZIDBWna6qGtV1nWoPb5Fxb6pqMgWqzaxcVguGgfLTXJtKzzMcrsHZ6WVmBjuEUv4anwKOsckF6RB/Pc4ulY9jsXWSIFJVq+wNkF3uSJmS10ItKLrOamwxQQHWqYaSZr0EdtkWGuobsg6LetwlY4ijD7VTR8Q9vQuJRJWZF1YihsKL4LGTLQDLsigEpI+ne2Y6HJfz+WSDGK4T1fPpbBV9bQIdejqf//v/4V/98Y/3j+cHAMtyR1i+vb9/78WL0+PK3I6HZ7JiXXtry9KOtmJrEVBRca/6djARuROfI+dZLZBaDluslRiGEzQEc5Uf+F2v0S5Dh6kKY7eIVlfO+p1MpbkqhEMfOU6QDt31oE6+gwC48VaXkTgwr1VRbRliYOJgfckWZiE26upIAxxymdlFkhlmNIZqP6+P/+p//Iv/5d/+2//7//m/vv3mG25uVpTezXGcGKLSV4E2tgKIjWwrf/frb168fHm8a3/4zamLvv/BB89fvDw/btSRqIubWmA1dS/coUki5GbOWCsoOoE6ZKOm03BUu3RjSAwaZWEqhfX9ian6LUo9oxdmjKGqytdgNeMrH0/LzbK65hdD5i/JI8ThiO8Dy1bl2Ewxk0pBxyoAhZqPgj/BNOFeGZiCak+IvD2EgONkt5nmZVPfhrjUPTQkE38ke1EmHNCafrm01xpo7LJgSw6AqK9ovEpF2qpojcv9qRCDdK9ISaQCUZURR4GE/jwVJSLipCpArV3uNhKtki8D6CT2sN9bdZREfEiOrJ4DAqx4VBl8wLH31e47QFF30++FWAhdwjs5yXJEv/53/xtu7da+W/tH1UjeFJG39h3b9WBtQchmgSBDaNch1hRsaGTJxDxuGkouY1tSZjEdhm5ojZrTltdbdN4AlnlQmSkYzeBahr4nqVYhjwCIxM3+5GwcEUBdtI2kTqFHUSGjPtpGp/6MKMQjqABufGAWsYBvJ43MxMzSuxmkU79GaM7HMIm4Aw8RtbZ0Fe3dtIdGYJbWziuW1iiiEbucW1vs2/V0MsgLpNEi2pe2iErvQu4GCQDSu/nKAZAuzAyrat2FGItF+JoHKDdTPxzaYgo1IpLg/FpbFOrSOGE5LETc+2ospWmUGjcCdREilt6h0oKfE1kB8HKU3pmow2Qe88B0FtvERVGJoDOgcc/6vEdWxRkdAHUTSYi5EcjcNc09EN1leGaGgKlp476ugC7L8tGXr3pfTT5fcPj67z04WkVFOjpaaxTqJADuv6oK0o9fv1by2MwTEGwBFMoLQ6X3LivI9p6bVcdEPwFoRK01ha7SzQN2Xc8iYHLdIoAPXv2V8YFExGjOV5goEbobBhNrMIwMy13aTHuwAuC2NCy9S62O/dGbV/AK7IWjVVWgo4tq49Yhsp6hHp1nSnlmsoumYg6rIxvfJA0gQ6XIhO68yO506ipdzUse2iHDGBYJiE++fFOlMg0u1ThHURFz+1WMAvA2emRuIONcHX0AwE+/emu+yypA+PL4LHzOIGZbtdkGRDtADBbp7bAUpcgQHuxhG9yTMxZG3eusRuiP5XcDaGTojKFVwxGcPJwqHahdQa9QEljoonVmMrn67TY2WQvT6nMom+PihbPM5jCI6iJh0xR/dMDeUiUAsXnX20dvP+8gd1q3sxry9sbFVA3hu0wfH6femWnbtWt5zUTi2hzEKdJIUxBQVfeIVFVIoxZjqoDtuiX5UqAn+WCYa6WfhCID5GZChHPWA8KWYK+Ig5FUlNzJy0Vkc8a340xEXfqQls0yZPLIUDhtQKHzuN6z/SQ722qxj+qQInRVCndXHUALGh7AcB/m9JxVy+6X63DKriPJrEtfA6O4zqOKqCGZK9D8nKcazq+GEUsHAomXJi5C6VDb+XI1KHjAE8F/KABO514/yoN4V91kcaTRtIQVvYMOJQVNN8V0bUSAm1jGvUy1EZMfcOdZbEJEUGUuKAhAFH/XcBzi3FO/1cYjGJH1/C5uwVIY5rS9NSIeAMtTznZ3KbSzhnBUZWEfSkWX1ohMQS+qaK2ZRmM5NBOqz+uJG9b1LF1evHhpIQ2HQxPgsDQA335z37uq6vl8unt2JGbq8vL5s971dD5LJwAP68OHP/7Jen5UyPnU33vvpQitq9wdX6hBAlDVLmKU3Vz8iAFCX2VZmmkr+rpya8wkoqJi+UZm/UpsHjeoeMJFAoViEYAlwCEQFF3EBiImVsoENaZ7EHHWgloLqjFGUs/pSaIC7WDjtljV0jiDmZTcvu3dgplZ1kjFDTCBQ3ViukXb6yCR9gyLVtdOjZwXKtoDCKTc3JAUNMt1KMaEk6mrBBBRUcb7P/7xw+M9oI8Pj8+e3Vm/y6FBcO6dsBwO3GUV6QCp4PyoANrSel8fv5Xz4+lHH7x/eHY4y0lHoJTtBsGM6MFwOCJMRVf5hwJjJ47jUms+WHPNq7OxhmakEhGgDuzQLsbY1GyooaIKmphbCQ8GiZyLPtw01kDY/lr1rQBM7VmQoAxd8EbIMHAE01fOb+IeTD3lGU+cVo1Tmg8N0JGHgw3iZFr4CaX5tz0A5dtxEdjgKr+B4ud1IiwhfsgHqGJs2N4Rt6QaYWcpezvWGQSIVFZJGhHm4QR/+LIMCDr3ksrNFBKThzGO0pkuAjcTOXRKtjCCfpKpiVMVYqJ44inEfY2DDIiuAFrsxVCLEtCglYNNvt9WS2veIB2PqKoSLwm03AQj243cY4DY/FFAIFHhi3zFt3Zr36X947lJ3nSRt/bd2xPB2pVqapGevIUlkAalTK4I9bVCbq7jytSjQc3HgFJWT6Es+xhCyBNtuK4kaQrmJjgjVysas66hJnBqEjSTQghQEYECKqLiohpZZLEL4qGcUIFq45bsjGUqUVWBaE/5pDT3nXSGhkyWNudJalgMDhblDRGixrSqqjamLmKOhPaMiinaqPfuBSu4MbOrVFRVRHpvSwNolZWJ2rKsfSV1ad8kLw2HHRGBxbBD0kJo+kAVi9J2yzfiSMDZBHvGmRkqZyAExOQphggKNs1L9kNQY/pFQaJCxI1bnA3vrqPLquYZYdwJt7Ysh26esKt++PkXocYmPhxCq2XnwHgDRhYT2hRdqpZUIqamgu7p2iEqUEhY1pnCTusnRkWEOQ+s9UEuIJH5GYUGe/Bszo65yiqPso7JJCx//fPhGkkEVVLIALQBH9R4IXJD7hxZ6Qwo0sVMgiWqjKztpk2snN+Lmz26ni3q8VgqNXLLqTg6pE0DqoqQL+tQIWm745VOnY+pzgiCCvAxBqq/2DkfuKIAJwZJr9EilZRFoG5x6to0/otOmZjUr5J1zeShdV3FMxtpxaJD/JhBWdn8uekkhFy2p3Hn03XVKY5UXKD9bpO3ztmiYu96UC6bat1OCuFE9cp2p6YXRW7SGTQ07UJJaVm+j040JNTpzbqizSR21p6iVIArJjVJfldBcL0NXeVAS9llSLYXV899fBIPFfxw2Wjv45wu0dATj28NX+nVPv12GVYbbyE2ilQ3L5K5+HlqgBSMncUYN+5CmTuNOH1yAe8CqWkIhBJhOJ3k4cizPVxrc6c3y3dpdoi4hmND/5TrdKK7V2UMjqGz54ybxrQHCiCtk6qb6zXwdK4rqaeSqR8EgIgohKlx08fTI7t1DefTScQSqtKzu7suQgxinB5Py+FwOp1Oj6fT6WR5IY/Hw7ff/PHh/uHZs+dEKxGYQURdOxH1vgIgJREXqv1kgABtSxNRMyczMzP3dRWRZWn1wE3oJY9QarQLBOJcOeV1SM5c5aaJYXpiIh7UUiLT4kh26YSy7EU1fsXO1mHKqS2baDzHOKXalRdW9/ocnYeCPGumc7UAACAASURBVO5BuZCbJcRLRk61dzmfT9LNIj5qqanCtqN3AXqyfIfDcjgeAPS1M7dlWZZjb8yaKtpcY9mRKYvdRbssGxhMwECOFzfZFjuYx7xr07i4gHA8EqCh8R5N/E+5weO1eoPzT71AqpeHpxCXMonNVK+0Yjh5ikDPuDnnlFxPIjGd11jn80T3BW6DTavvxl/vol5KoYlD7CBRKqilIvGZ3bqYHs33bJpV2bENvc+vu9UPC+65sAoRGxd3GZHPemCXMjMK5O0L0h0A7zSisrWZS2fLzQ1yPiMMyn/K1evOr0tGNkA1DPdPMn63dmvX2g+rlLwpIm/t+7br3pE0CS3keaiH+8umIkaRDwaaLkbCeCiUPJOsZ08UZGzJwM09LfRok5Axp8XeaRaJSgCZMYo8xFj6Sl6mwE1i5uAgIkFwBj8QgjcpmTXKWLshTRCRiLgSKNxejFRI8l5GgdnVJ+Es41QvjW/enYfkkHaGQlSZfeVMLCLUmocMqzDz2lebf4BISSkNxo1YaSgWUYibqhLTel4tNAcCYu49d0tztqamaW2xXXVZSKFQbmQ5pzyazzPwlxDRPE0xdBwBhWu0VYcJG+mYkychDa4+Ojd2mI/lmPaaG2scT+mdmbk14zbaYSEiVbHNImIVWdfzsix5bik2FqGJS42nqqp2AGoVrtNHQxF5pE2ONB9GNnVz40WhIrK0pfJhSIHQ9sHleNIRfmpnw28T55l0CwF5yKEqgF/+7YhQ/uTNaw3epyjxiU2LDSLKM+nRpgBaW9bzOZ/ORuN+UdWgBCKYb3ZssDoLN8tgIQZXlrucSE3ebojsfi2EMgVEmSBFeari5TdkjBys6Dlp6LtzTqquHSHFCCPbSDqDz6/saR7SSMiwlWJyRZMa3se1vwmWMireG4zwkJZNDHac6V1p1reoDP1FC3li72u9+LNu1VPs7CdfvY7cr1uksjNU/Wb2UrsyMZ/BVqwwhWQWbXeIZnpBR/IxCsVuFpm7SjxF13gFeHYofTJDQvOzGeeinsnyx1CW53K35y5GHgRyHw77H1cVVJKrPKUXdNGwmblbOXWdhbnvJL7MMnxBZQXRgDCrjC9WUzDgxeCXM9FQGle3j7zTxJxAjhNRFm0dxNa7HDhmS6O3sdHl/sbRegIMk+CnhgU0Dt80G+tcNyrgfF81lI0Vp07jewk+KaRhM+PapaqrrYguniraM9FMoaZBUltbuGnvIl2WZbHoB1Mk9bUD6KtIF7Pmrmc5nc5Wi/nh8eF4PK69A2iNnz9/fno4e0A3tHdZDkeo+W8GvoNTIvUtgtmEKTJ/q6iQuClCppO9RWHDcyoYDYVENPxGu64KKymOuvt+qSnwqxO1kYZzKJ/NeKyhI8ltFOQdK7yW0dq89k6KVEEk0lWVOXRmwVVyE3PkjF0EGSOYWC130Te52HYLiCIsHSLdquGt64ni+Dfi8/kMtzML3Ek0mB8CXE1JBCLGihXrGWhtJ8104ECafp1QvU7qxh29ybv0fYUjSUHBP9fAs+VsBfK9IGlZDyehtZnMhIuCp9rBk5dKO399Fwe+A9HmltKFT2MpwFmpbvBwMQG7QTZfi2cedOGpVoe6BAlNH5c/aZC53U6zmnxQ5gJzqgzmNOy8hZkpC1STW1wfdtssSgwAWYRdRFS0Nsqyqm6IWwJNp4/IY2vGk6Vc0wVxTGI3oZm6NJ2P3aSIHCaGYHLi1WpO8XyYIZvSzB7c2q193/aDlLi56SJv7U9oV9WRhtQyikudRiYjDRm53vzfTAaWUiCwcTMprfAu07gxPFRNseaeQsna5Kvqyeom0jSoM0f0SDFVupxQfP4DyZPHWdubPEt8G05k8GqutzL6oDq4AwvvSqrmFvWYgusubZUCoEdyqmIGJIl84daPhXGJWM4mhpJot5jEUA17GL1lY6LgilQtraJvDjkRFQhZyeau0trCvJxOjwDYQpBSsFRzKWVzGFx4STAA0PMZVQaAV+SAgsBD3ZPKiEKbl2VxZ80RiwdElrcNVTclL1m0dyrdyh4xE3gBtK+rqKy9K5z5ALeQfQgADD5s04skiMSm+DGWhbi5IBB+bADAiDAlV4i5lZLgbiXORahCiZtp7z3/Y0oqyTvA9ZLOTqgmwxUrDJVkMiR2YhUAfjFX3fnw1eeayeOK7nPDjybUhlw04to8QJsz81py/xi/lHqlPu+cgw7eMd/a3HMa2xkBiqmcta1x3V4msjO+Fqid2i/mUuQgDDg6gDLl3wj3CahnuYlAD1pAO9jhKqL4Ed6cOgCZwiJXkEhmdospO2LutETI+OgAp9YVDt8lQwdFACSNgS2Mb2ZAh5bjXfLPGL0kCH2imlN2Xj38UmBPbDp6Le+ZYK9awXptDJCWojvxIRC7felFl7vmZXpVUPIIzF4MGuiIh2odwwV2o7zcm952eZv1EA3aVD6YO40z9/1lh0qbNmNfbIGf/NB2ZQ/zqFc2pBxH2i7XvOJDLkoj4zu2doA7ux/ekhtITKNnVrb4mPJK5XNB6lUmb0Q4AEBOQwN9FQsFUSRUGfe0LjkR+1TUd3sIFbm6K446dkQRKGrQxIoCHZY5i9KbRspX/1O0XHZNm5c6ZTIr4aQ0MJRp4c95KChUawDUMgpKl957d9IgADGb+83a1/XUj8dja60L7u7uVJUbc+PTeT0ejwCeP3t+f3///PkLDl9Ikd6aKkFFnL5T5PcI25JrAbTrishrbLk7lEDL4bD2YYcbQA3LVSjRiu7PIZNoPDN3atocaz8ET83DTA4QlVJHAyBSsaDy2JhBx4yQFWNDwf3EznU6+wWP1WdTqZfe/ChFQrowswDinGFmBcA8hnMYyR8kHQEAPLu7c5O8KhG5sZyaqi5tkfVsjqhmz/acGKSwHNZgWJqKcWanqzF0ZwH6XbY/SOOl6/WEny++2sEpFWGk7+sVVEq5GXWYYQL1XpJrmeCGAuE9wjAbB/IMlykG7tlDzlea5n7mTS8n1bvT6WOHt9vzaSz7yRbHug494OS9JGc4xp8f33AsU/9jr0lDTrI/Jz2qcRSRhiPf8Fid0cc7VzRejomJiue7VyJ4Fh3yhejeydOhCqzGtgvFbugiLxmCzVSmb8fT5awYN5APGpviq6hWdp9UITLuMqP4XtC5tVu73v4cN8mbIvLW/uR2PVg7fAftjy0xJ3ArjmwAtMh/hRcpOswNvh5BUAAo8kWyRPWSdE/DEHd2G22o4sVKMgzWmNUh4XptGfeY63ImkPtKFYKlsMRjagSISj5uEfFaMZr9EGBZn3wYZu5dVb0KB0E9Q7vPLf5RVfLIdxEmcBoDLTuUuS+ogr3GS+RuK8mv1GPGA/SWrWvoCj0bNBGLrqrSWuuqIHjhGuN7pfPCDOohEhCgql06AR6xheRUjI4SEPbYnBX5Wo2Nk9C0cfAi5hpgWaJLHQ0wt5lF0oidNifFRkQEVlJzHBORta8LluXQpKuqNm4K9N6XtgBY+8rw2HcR6SLEdFiW3oVoZDat51NEmL26AQ3nBasGDmJ2WcVkclVB1nCAw4Rc7UHmD5seIAjYmXzTNc9oaszsKc8D5bKiy9mmmq+1awD85PUXcQDEO89gYXLhi7lZqiWTFyRUB0Q5mWCj47z4uTJ5drrquHbbQlynqaMEi4ttqSxMjbff8ZDHkpkkTR5/wzbPG0b+mktHyA2rvPX8i7j2JHUCV5bk0kwiET/fsV6HzuAbZ7eLyVZi/4ur7Pe8zQZaMCSY8l+a8ql0qxde6sirNxj93TVtv7vi0DG1T796U2Q7k40BkEIuBBstBvsNABJB7cObs5gskpSoUQRf89zv4OpNzDdXddsRww8DbikjDnjr9G+d6RWIDAW0P+i2oIH9XDynsFgYcdtshBYN6H67Kk9OMlSZyPaZSzvdxWEIGXY79CyVRp6toJ6aABqKMn/UT8ZG0YO4YJoewShvlINNFfIhco8dG8d0UubmhbeH6MreXRXRdy7JwEdui625xmKSYU+gbT+b/nKBoSwLXO0rYQCmsqJITlZ7ITBzohUnEuMO+MGb1DwT/Mv6w2d60gYMoyqk692zQ1/5TCeO0Iy7u+PjebXul4OS8nJYRHRdz31dAWqHF8fDAcSn0wlAX8+NlrWvL16+d3r8HYFa4/P51JYDE4nVz0HkZ4zE4LZkZqbGVr5Bulh6R1VZT+eRkDc3Ik+i2wKNpvpXuW7VAhoKgDkZCpIU9dgRWMeQBZUD67yfvxusW3VMvLCPO+oKakqh9rWHLJ2L0etB84hAkZwxRhtHnMcMgTB7lYswg8hZh/d+9CMrb8jMTDifTwAaH1pblmUxULXGZioL7apdP4Oq305mJlrQc2o2qA6okqtQyg75k6lrjY0KPnAEcVfUPFY/EycnIlniauzZdovnDwuU/JGi7Bn2mh3qdA0PFwXShMPGS4653k1avZstEo9VZNm3YaS9mFtSQfvjypiZraouYPp74Pmr694Z/eJvAtAtqMw2mie700RHfCKmf7zcsZqB5BpNnj4fhiYYsh4X2EU8QvAFNkq5YH42Nlhjuujfq2k5H9Z9WpqTzUg2J5YZ2727XCdNCKkhbSB/yvRu7dZ225/gJnnTRd7an9OuqiMl8++EZKNJGgqzMregyNvvdPvMxWvpdGDjEZW80HusFu0R72lIHVnngh4mzavMvKN2JqvAY0yiDsrA5jXnJVaNUGV9BqhyyxIKgEa5Vmb1BIjOL5LrIolAFg6jmjANeU+VebGlMTcL1tFCkrp0ImrU1vVMzI3buq4KMTcrsko1RKavFOlEStSqOjLlSQJLpH5v3ER0XU+uvMsooYjprqEtXouTUullfqiufLRIK1u1Fap2DZQU0hrbFg62FPHmg4FOttT4UQndn2XJVN0WWE6+0tweW2si0nsPUKzwmCmoSu8rK2sk9wweycq/mAeMcrI+Os3ZXE1zjT44EcVJ8FfKA/Ba6un3mlo5AOKlqJMhj2WrKnFy+erQUsUoXuDtkzevJfincBJJlwBjVQSRirsKNrEfQ56JiMCQijk8eRzEmiva6GsoHMIcVkOWGrALOQVzc0ANpi1PAlGUTipcbGEh6/gTu2e3lAZfiajxoZDQGnspECipqhVp19G1TcJCL5V52glMgxUvPH9580ic0wzGZ0tFocVTlQzVlJTqyRnrGHNGeRRXGk+2XT3U4O/rF7p1ub1smThs5qjrxPILDdGxnhNc5Ae7HGJnET52yH86+o5fEqsAhKE+s8cLJdj2ujvorhQwq4iuvJi53XKmqpGd1udSH37n9u1OpM6nfHJxtcr2kovGV6XZ6rmUhHxvcEIkWh2Iw4kBgUhFN7O5WOKkH5seqB5LOh5NgxBc0ZFeWMijtoHL4BDGAd0ux+nyIDygTVrd+g2SakeI6AS/HdRWWtIP89ShLA7rAKmKtnw2tiNzIms67qi3KHpe3oi7XZRYiUzShghEYbkAuH++tOV0Ojfmtizn83nBcliW8+kMQvdA7AUHJW5MsiyLKphZRM7r+fHxdDwcATC39bQ+fnt6+d6Peu+9r3muFAhPb1ILkrQmap+fz+fj3dEviwgxt8aqPGIUYn0YJyc4xVjLRN8mdpGCJmhcVT+BxdOUpFtkOFd4mSkPoanx/JHOUuYsaMAerjBQEWQNpULrbTs05idxoElIR3py/8zZjIurhJ3m969qd1prUO1r50ODjmohZhtflkWkg/TQli5dujC3Hhys193unUWcLQGXSNwykesIrVzGQVsHFRlzn3W509mM+05+c5XomoqoYLPBuum4CHEaJ55kwiIDcw6cuYH2VdRdFNTlkg+SeOWtgs8rEgwUqUBAneqcnZmcDsd1VJR2vXkqNP7NsTVxztTdlUs3DWvwZat3l8pUeL7+6GSCRfLGEyPmvRkMxmKfXqcr0p2MtUEfI/e6qkbNJ5uHe2c6AEKsqOAwhu366R7zv5iM1s8VO3reIscgghJp/rKivnCjUTXhy/D6nkh+a7f2p7fv7iZ5U0Te2p/frqoj2ZLZDTyrhRcGirtUmH0R8qJFGV+Sm/02pEiNytqZeFgpJElT4hWu4Ul/SIQboM8+1ChipCmUQWoaHiIr5quepq+Iy+SmNRZW1yGCoOl6Rsw+hHP6IFjhxWAtAbEMSGbyIgKH1qFQKafXos3ibaXQW5Ee9Ii5ifROQkSqcl5lYwSNbkNggbCSKqyCMJC6AGKyQtXOFHZZFV5/o2GxoDam5rwzQVWsJl14Y9luiPuBqnN7mvAzSh9kXQuXkSzralKKx1eFWAK1XIe5w+GeSIhKtWJVYkIxx8Roi6is6yoinB4oliwSgIrFJZmA11ornozOcIjroEd2RQxOAnkoRASiDW3ILsRklahFAXTpICvWLKKdgA5wa/VIBMejqtrM7cWkMSbyQGkRVVj2ADj/FNP1cT99+9aOq7PcydD4K861+hyJUhkZQulYAMUe1yHKwbIe1XcYCHVtvYOVIeP5T00UoaGiD/RgriezZBlxoKZA1gi8THZO46e/O1jgcnHTXuA/NVKCUXWmIyKFuFMSosq7jjVp+uZVtruoUxx+rpnRXIpuHipvqZovoRYBKfddM5hTBws7GFMMWOabW6lgz0g+R9jH61NP72JlP3nzpqbjsPVaAOsTb+ncse1jag12XympHfJUI5TZdnWoCkixoRXeBE8VAcS5qIFjQ8t0rZWpbXWYoxzMVsmL+LAoxBwCMXkEqrvy8vduqVKuIN3peey8FuXo5RNhbtzq5XK8ePry0Oj4tpzBoYDWzUO2b9UZBDGx0HSqqoA4Av3gYLMdCEpE5CbCgXBcxgQ4VNeX7MKlgiEwTVlRWpCMAVEZQisANBoEd+5ItsMFWIz2+6Ed0HR6SRY2zm7+yf2UKHSblNb7q4M4SQEAJirJCur4RFbqzfAymZ3UYohHkm7zSVwOS2tNQNT4wItrEoj6aoHcANHheDid13Vdn//o+cPjyWvHiTZu0uX0+NjQTo+nu+PdsiwEUgnaEbauoTglQImZpDvL05ZmyStFdFmWVdYkHHXPEAUJA8s5K2JIoiQfN5wa6X67VL9GxDEkQLSegfFjoC+F6SqZWCSsShEHqnbqnXhovUjJzxZykOdzaOdVqDlvmdzcZsU5baJxfmqjuEoA8PB4v55PvcvhwAo9Hg4AgHY622ES0U4aacjJriEDOBwPd8cjM0O8T1VhyzVE42gZdGdt5OAB4vBNUy+2+OCyYgUUkw/SPLckXWn8ybcuiNn09sjAGP+G2nLufXqoih2br3Y/o83Hmh/pjiJqavvfFoKSJ2dPC2vCSaa0qjuxpTX0rj+3RHt8R6PrIDnhrDCc4kN/SIw6VxXVqH2ae+U0LEYNOm1mn1R6zwuue3SVjPqivTpgWYtpqd1P1Dm+8Bo36lLiS7TQI5U6jRR75kF1jwELhjy/qtdeI4VVIrdiSNRYS3K6TI3I3MeDSbT0X1gDJrd2az9Ye6dS8qaLvLUfpF2p5Hhrt3Zr/xW3j19/+V96Crf2z6290zXy1m7t1m7t1m7t1m7t1m7t1v6FtF2d47/+D//xpou8tR+qXc8dycO0Smlzr99zmLeKqx8kTcHFSDnFjF2zy7G5qLOw+x5qSSjuyXfC7qc1j9N+mx1w3DeFidQdFUFeNBCtmWOFivR0X/FOoOYNuHBzjzVVQBvlQtSirmJyBioRIW4t8xnZc2YbZTGXIrcoGpjNti/axSrLiHitGyC9NVs7ACDipbXTuRNZVsozt2Z5GTWyuXvPzNyo8SIi2iW8PNg944gJjN6JsPZVVY7Ho9ehXpb1vFoPIuYREp5GaT4nUkCkgyzbu5e+0Sj1EmUwR2m9APuw9VN87k4f6R9plc9pGFjdF06hIlYqzzNS52RUAep9JaLWDsTMQtSW4gIIwBNYtmWJQ1QtnwqMgt0qIpE3yWq9JIRVOghgQk+XneFqIyqMFieOvNC2iMVKD/t/5KUEzEHPvYTCwk+qsvZuCUIjJp48808M5qZV+yym619pXBUbRq2DsPpquSMqRIvZgRVC7uI836b4Mfxv7Ee6qZXnx3VPZwn/kRn0QV741eva5164tdsui6OBeiMR9miNZy+qxidk3AoeflZIR8y0yYe3t2VZbRZFOaX0Cw+T4chB6bACAMp5wotrJGpQUpiwAwawU8p+VX3CAqFSsWtsbvoHpLtV+obU7YgdqW3XSl+W9j3aT//Xr8IY71OA+y9o2aXyFdzZKEJSp0MRkNnH4NVrLT1LDBfw/G0+n73TQJiZsKCWzxo/4gxvHTV1AxkaX/qi6jrniccb0UHskE1xHNOgXnox2nbkvZYAvf7IHnXcc8Z0lHP5lTkDbjqpvXqkwHB5x8CFV+ZUywN4wYVEzgPCxSukYrm6CvUbO/CpwTR3huK8DLiX4M0xzeniUHrFjLPnFBg1T4KjkoDIBQC33VNAjoITkNi6dNEPZ9B0bR+9j37Ji1AnbQEb7cCY/mYnbWoV1O6JVPzY7BIrB5bVbg77K5GKnFVAtByW83ruhr0Fp9OpNYbI2rv52b380cv33nv//uHBpvPw8HC3POPWVABapK9teSkrqQgRr2sHcDwuiE3K0PW47NX92amzl4rLzCeBDT3AYrp05h85+Qwa/qdCZswF3whx0mbr3DKoZLGfwCPkdcP8EFpabgNoDuRMTmJIMs4142kwvNfsC/Z0z2PD1fG9RbLnufCABpHVX88FJ6rbc1S3b06Pp9PptK4r8x1ROIwKGpFC13VVKCvWdSUQc/N6icCyHI7Ho/nmmhOtQJvfsrJDpV3BASO5djmmsevTTR+w0LrMeYFPoc5p/aCRrVUxUBfi0lx7M6ld0gjdcLXx+cSBBFNU6f93mnUyPOPZ/DUgRoOsAVQqbiO5vt0zYJhnf9hr87GBkdnoLx4c/NWV3jXFAGejuNzrHljaS9bEF/EreWxclcfqP5tfrzWrluXogFxeg2pbljKmZez1Ux2iGZcjqSqaqagqeAo09mc1GBJcIOjtu3mQSObtx3wwgq1QRuQPoUwh9b04u1u7te/aqpvkTQt5az94u15ZG8b5xR9VCaLukC5WdsD5BAJZTQPSiACC6bEspi86FlEiEDNxKviM52sAwGwklhsrICpNyeXeUD6ByYJmFzp06aYzUsvCHeyN8W5q2jQ25SlZ5RnXiBG1xWikhz55rKYKwBxjiVEhjSgnuOIvBAghNBBEO3MjZqiaBnChZg+fH0+qwq0tramKolteIGcJoYikkLywWEw0sVgqI6KFD6GfVYXFGPVlWdbzmRhMi0qk+BlkKLQfncDce+eYDDMzk0LX07osbTksIp0EqrSepS0LAO1ovBCT9WwKtcbc2nE9nfpqtaqJW2vLIiLk1Se0wbWdABqzrL0x95Oy6RQtzmyk9ATz4ikNoUS0MFkKLdPmcsZHq0bWf1J0S/IkIgYcg0wXOR6OvXfjk/u6mo7SU7a3tkoniJWP9PMW2ZGiYo+dvQ4ibguYREVFST1AOyDLyhDtJKDGJGpxbcxsEV6ND9xY1VbNqkJg0bWBwc6+qHbtSsTE3GHKTahKV8kL5ynGnPtlu2GgIepY+jYTatSiw1WWZenrqiJtsXpHze8YgS0hqYp69EpGilCjZuyNq0pS+lEFhonBrj03HrqFyo6OI0pg0ydGjjDnQYWbV39SYF3X1pa2LF26iXbWjUJhg0DXdbUktsQeAB6V5UnNIpLqQR7J5AC1UgCpE3ThHWjjkrB9z9TATaGi3cCZqIrIelDilusMDYVz6C6iK+CJ2A23KOB4RorCMBTyqqLCxNxiDwYa9MtFnmuiJzcb1eZzASnxBsfPsAJPuSNDvqGBGQFYkKmjdLXsEP/wN3+D601ElSBQ6r4QhmvdAZnKfruM4f94vlaFqksiI+dGGhQUYc4i13ARDVuOaV/YDD1BLCbZSHXkZgsd4JxzA2NOqfwJjXgAU0PDBRdF/LpVoWMAsei1yNUY7IBKbYcnlGhWrCNVDkZM7KDTmPIQNWNXbNY1wKsQ5ZhOoBR7+FLe8b85KmvVzz0BCUM1ivwCapkrVCNHrcuEjhysiw5XxdTrbzdK/So41M2OBKhEnuWxctslGans7IcrmKFQ9kvYU6wt0vgQDbcpA5I+kx8hW3xk44PCVXQS16Tlpvm0iayoGmEVkUgTYHPQIBlCXvkuzEUwq2UQd6TFK/VQapHUFuJLQ0a11XcN7bpIa4eUwG3vRCVRgDEhIi63tqWBmUBiKaShTLSalSXRlVnZoO1wWNfT8e7Y137gg0gH9Hh3AHC67yIkuj6/uzvL2lr7wx9+z4qTrFZw+eHhsR3ATWTlRs9O53s5d+3n9fTw7O7Z/bePAFpbDneHvnYi/tH7d0Qs/ZFwUFWV/ux4B+DUT8uyqFI/d6K2HBqzqVNYpEv3A8VMxGhEvZ+tFnc2OG+g2tXYxTzXk0VNAVUmz/1hKXqYKMzQRmxVugKsRETN0BwlwQ2rkUfDBnsYGk/yondk+lYrGAcmEigzgzgxi/HGfvGgrD11b1nayKgvB9UF0EVsScw8+DxNOuyotYu2Zoca2o3bWURPbWl3z5798duH5y9egJXAVqO8LXxoy3l9NNVso0ZM69rPDw/P7p6/eP8OwOlxfXw8k+BwXLSrnNblcOjaoeTcxzhcg47N6pNyL2cytK/NMS1U0lv/SMtf9oEXxwoUHF/TZP3R4SEB5qVO1NjOVASblaHOOjeZHMNsTZ+B0P2n0Z1Aa7HeQM0j5d9muQGVgrsvNUqhpKtI3KEck3HCq778QIH5fKrt553R+gFRK326EtdLv/j4VPZHAa+WOdGl6NlYXMty6BiSeHBM0CFMghhM4MwrRUmFR1sCs2vcc5+qp1AcelPKZRs3DniFTFEBKTUW6YOWuIJPAAJTIzZeQEudQCLi1nTslAa4/QEvAhVVWTVOmZ26tIqpY34QID28EIiSB6ZxpHx/fXmR/gAAIABJREFUKfFNIBG4dw07i0IqIjCRiS8Pz63d2g/WborIW/tHak9U1la9zHQypIxdZmO0lLwmOzeIiJlEKZh7F190qGq8b2fajLuf2BBzJgvxx8ehHK0uAJ5vPKZMwT8CKH573peIuDxYGFk25Yr04Y63ydSvUFhl6B6GbgKQicDNR8BY1WBoVfxfF3+cg+jkSQZteaIg9FCQECBRy4WsW8tkryMLfsg1posFN9aQ9dLZZO2diA7Hg/R+Oj2aGdzyOgVXJDAVWDD1tuC+rkOojrretqvuVsLUNNWsEKs3PR0WJSYmtgozXTuUkcUcB6UnVU0AGu/OxMwswaDQxIC6+uZwOKzrKmuPDFayLIeES84qGRdupgtTAMRN+qrQ1izto2lQDAiIDRFSIQ6rqW+UTzgT41uCS2NZuoZ+j6pd3f12CPCM9aqRM6/eFju05gJDwe0iMvJoOImopQwiVQgIDRBZxV5mZi8IM1/mqiMT7e5BUEQgVS3KO9Mzlr/i0Nl2m3CQ99q42LBtj05Nmuy9Q5VbIwO1yCgcVA6L6XUY1KW3gJYxkW1Zeu8hlJaMVzt8/P6qg6+NTygBPqm66t0xbVAwoJMMkJtKFGqOBLjWrmy6ViWbQkcKq1SBAoTkRi+Eh4u2lQLK4/umeFu1K5KGCvd6++TtVyHLBI6ooo0pYUOPdjGJi/6nJY2z4edHVTe7GL4AZWRsxDlK0awMsn3Krs4QzlyDkN+G2LgDgXkn9xamV4Bt3+2eStqo0C6+j/MwgYSungi6tuExwUk0NQHTiClvF01VSE3doP2l4wFk2jwd+6J+JIIQBeIwa+O4gL4mTWdGDMKPccdSq1mmGLLZ1cvuKIj8oTjp44yM0xQTClwadAghoA5mIxA2EbUwDthTaYfwx67NCwDYJW3KI01Rt8/4D9t3EdOIJb0beQkTdYnqsixFmxmTNUWxdAJZ/uGNltpI/LqezEpl2SofH04BOlFjh4iY6cXLF+/96P2H9dvf/fYPAJZlWfh4/+09NxU6W7TJw8Pp5fsgYos4WZbD/bf3TG1dTw8Pj0s7PLtbbLFtWVpjACzUe2+8HA5LjxLap9NpaUvyYME8EhGWZZE+NEKBDEKyv0CV30Eor/dvR0+zeczpOI9T6ac7/R3NfEhkqU6V1LgNokExIoWxP++qcgLM7ltw0KSqu1zMHjIyyzGFgsZvDpGKmBWWG0NF48arQmRdeJEugMDylhIvbRHRxgcAz++O9+eH8/ncWhOhLnJsLKvxTCjEE7op9DH5zvkDTywh8IGDPDpOYF1SEdCVv4MWbaEW8yRDCpcX9UrQVSnKtp36BS2KkROjJb7b1QV+n0bZ3/fohraTvdbzk8M+1Z4gOoFo1cPB3KE7v9rSRg0Gd3cUIhkvbiZnvigTBQ80nj4vHoMlCmXJM+I7OPmZXuGbLgnOrE8PJg6+STROXGXrC8agwRVuFp365LKS/DMWZH9nTR5B+fTWbu3Wbu2/pXa9srZeEIZqCbpEeYXUDi8U92KipOZEJMTkAUtGfMSlkAn5KgHhIAMQpyeEOjtBE/5O5sW6dYlGieB1UYz3d1nukk0Z0lM1rLtG1ozeZhU3DULwWeLazlKtW9VcMXtfTXXIRNqa8/mmnCr8KDyv+qQHJXYApDBqq+u9m2OFEBNgmsTioZK0NdZKalqbKgSKyNIWFRFxVwwrEJOimWgnomF4dycqB5EvitnsfCY1LTwO0qCdqvDIaO82uDoaz4YOjMKPzKEUFkFiSnufqjKxSJYs97o65jVwXk9LO6S8TASNdP6U7n6m+4tACFFhQnfZrhOTgYWIGrHUsF0CgGY5pAcPriHl2ikfizONp52JqCFTgQPfFAVR4YNoNggHz+GMW55cMhV2lYG9U4USg5SjBpG7DXE6D9obOmQFVfXKBnVgsxRQuhMlI2xzTkkHsaUUkpa5gkUGB0Jl4tQjmQRqDp8Q7bGuwboVXQgBysSaFZTiPi+Nh9OyIp0lbDIb8btMIH8MbZxpfBGaxLlkaBF+abxX+kv+M5hMN+MMQcSBZVKKOY2ma4Fuuh+d5Xu+M1eY/kkymbzfptD0C4lpkgOedo2M3WZMAbah7aGEKk3goQLuOKs7usTYBEUWFtjj+rXawjYPFKSf60tl3tzPJMpQmYwr3TYzK3FWOapuxUETtuZZTVCn7YTf0SgOls5HxNByiiKbM753QCpqCYH44i2/xWWwmLuv4uIyKTHDDGlSP3d/SZunazzKhOuMJ3hVSp730ofO9A7TnDU1enstqXGsoSLSApdqpkpgpDIo8DZzC7Wmq95DvJ2mrToQ4gZH1CYiqXgaeWkca3UitlJsQiDyinwAepfom5IoR8KHgclsbDKAMdupphDU43rR4bCs55N09POJeeHGSRv7ujKzSD+v6/OXL3pfz+fTH7/5Ay/t7tkBwMO3j2dRWmQ5HPoZSiuBl+UoXRu3ZVkA9LUzL9JFoSLS0ZXYKweKntczgMPh0Lus61lE2dKhEA6Hg/YCGdt7v/k6cWfFnFYu2k4jj+wfDGPi9MpD2gD5TaVJhV/Z3joHe6J+DaW/0xSKszdQD8WsnT2IOk2VTfDbnRNOrUVhJIzwVVuBqmqUvyOElRGyritA5/PJQrOz1LGa0rk1VQWxComoHc7Hx0dT/hzaQbqcT6fDcoRq76tCQUyTfBDH3p2jQxVT/AOmq3+FoCk2KkK9sqXTpgT7fvlIMgT+pRTwgjYYZRejBta90mZCPZa0xWzBcVTr15a12KKoMbHdcb9725u9XvvyknHaXdbmElzdJvIuiZIi68ykxO6oujo7cHYymfag841xsiauUjdKzQ0Z1/pYcsD/P3tv1iPbcp2JfWtF7MyqM9yBFCfBLRISH7rbzy0BBvyTDDRM6leI+jcGDD+2gW5LVpttN+Ch5UHiKF6O95x7qiozd8RaflhDxM7MOodtu9GkkSHxnqrKnbFjXMO3JuisOV1OEVeWecv0z+fKG2qRDFdpIwFviFUaDYLgbF4WIv6FACjI67c5/HUmhrd2a7d2a79H7XnvSOCczVwS4ElBvPzanGYln+qtaYgDzFRKUTAcV5oVh6HnEHmitdDwTHN1P3Wk86Nq4p6Au8SrBytZzLVxuxLq8+QU78M1bwAPKkYwLQ4nrJC2VDMSQqRWRioxIYTFOkzKHZGGtIpQs6a182EzF8ADBD2SIRTFwjYA8yZLzWkbXzK0YFVTBbSTxQ2bdwrAVJhKhv+QO3MNw7BqB9j830wLsJdNFfH8KzYvT/8HhaiIQHxBHC11KX/CLSJTlIvuGDnIkjU7Hhr7jwHmau8d7g7pIBUFBNjaKZLygbkwsWk+xeFgf9gD3EycH3qBQ4EuNSpIEieJw2+a4AQ7KWl4OqJYpLk6Hod4AxNL+IluRCwA8KjzgKND2aLczli6+VsRsnp2ysg+EzBTibik1psdbGyaDqw1wm9yOchP/jhNYn9xi3L6Pk1jImTOw7Fidj3zRocqXksVEgtXTCx8ar7hJmqJCLOFv9ktsD3qRCVGbWMdcYxzb1udCCMgLqXSuMpWvj3+eEnWdOzHxVK6gJyeVqqMCB5MaRj+qjLCZh0Xl5Cfh7bt+7ORyFOt2m7jWF46+/P05bNRa84H1+Y6ta9+97vT8aMJzbI/aqaJPX+FeUSEBnB1GNHpoHLPDUZjX3x1cOWN8dpnPzsbQd50+9p1bOua5jwfeh167IbTPdeSZ06k+9qLg8Vcw9ZTY9todpuhXfRJzME6J1a+RQvy9F7qRvMMoKZGsRI40tDmeyjiSePcy+h2/vs04ul6hn+YbTG5pnndukAbC2lOP6SOdD7zQdMGV9XNdyamrdufFMrOHcy5xnIY+xczKDPR9DNAUjeLA4Tv/OYZDfYmXc1SS5Z8li1fik6+bkjirAqgtSaeE8ANkxrXyYcmAZH59gBAOzVVlFJ5V4hKYe7aZoSUiJjLbrf7/M3D6XTa7+8e373Z3+2twxev9uD68PZ4eFxfflweT2itg8rx9GQjZC4QS2CipVQoA0SFd1wAtkDs4+Gw7Pa1VpUOYhGR3o0ZaJwaT2gBVlXja5iMFzkd2pwD3fwzdjj3heKqBpyHuMd2PgkQGvTm7Mxo8ssgGvGA+wpnMAvZtiW1n44c5WiSEW+kOPsLecr0wQmSlI6LpOPiM7N0DyBhQIWYScF2iR4fH3qXUjOhDgAlIot6QeRSV6V1bYfDoT+dAFReSDzjS4cSWEVpe5ljQWEXd5zUdEbXecRns920vH7knPs6HZpv7znL37ZtBxfE4uJhPTs3CX+9d+A0ONS2a5d99byb80lMeTDOOx7/PLdoV1vYdS+XYBZhNvfoss0Gu2dftD3bU28e6kBD0tyQen9oS/4z8Ytudt/pnvOFYTTKCzwWX88GfGZzGGs51Ndr7OXfa7FjItMBSlYQ6X23g1ICUVqbZjHDRUMdByrzysddV/8vjeenifx7j/zWbu3Wbu0/ensWjqTgpPGHwdg0JXx/dMj//pQOOc40m6GsMKeEnsp7vE9Hd5OmpUbPOWU6h6XUMg2HXGW8IFlNlApxPz6XQ4lc2XL+FoKTqc7SYSErKQQQhXvlRuAYXgtULHmlde8+cQyIFi7OV6x8SilMrCSqutQlvAC3iz7Z1IfpLp6xtEEiUgozURcptZo0mdzMl2gywzKxqlo6ZyAy96nWpfbW0HvvwmwwaDDukM8p/usyGY3U+bEU5PpnIovD/cYH0KQxeRAacpOH0ElWzscYri2KwU+mkCTgaJCdJ2okMht+5nYholKqPRzpRNniuwHUWkW6bY71YJtYuIh0Dg3K/C4LFfPjsHmzg4wmqYuKMIo6yK6h5trRtnBEIXNfhJI78xGV5UwRUt8olTktesDVlKuUYN7YHuRXZhnfhCHtsIyXQ+bqUCjKuKK6leFDoQupNMXAFH+YHHoNnNzqLMUQzoRdDbEyikskWbCJi1ApzNx6IxAX9sJNo79U8e3+kqgQUaC93Wbae4vSJuoOIiGHDuM4zb+cN4VByeP/YnWmdAwakp/1N4EVMcgE/rYSefhuBR6n4+hHXsjpwpnjz5wLL+TZocfoZlbXZzSFQl06wm3Gh9yaz773l88+Nj8fBaOmyep73jDGcSXIaQzRurk2pQ0ZMYrzYbDv6o+Y9ZBZ+ZiRviRpToqij8uhbd1crg5Dz3/Oy0Tp7HV9/KHC5GsI7B7UdjY1hhUcbHrXgFY2p17PH7i8EzG82Tlxowrn1O0Ht2ZFag4D01RhsbvTDEd3F+t4VdUdmGlw7PmhicNvdUinaq6N5Taed09TP7p1aqXInjIEGzv1FMxaguGl4w57Ebkcg9HNyHx6bdKQLn7RLQErAAbEGAUUKioO4Ss6xL5SyxL4WYzOEyFKbNlEo0SVyWsMAuMCUtA3LUvd9965FNW+9lYKhaCyWObK1k/r6VRLffXqZS3l7u7V4fEI4N27d//oW3/YW//VZz8kKm9/dVrueF2bdO2tiXOM0rUtyyKkZeHjw0lECegqva1hQiGoVC5iNjNoZRKRut/P+f7EPSxVRepumU58UNfNFZ3WBxdtuvi2TtOKho+WG0c4hM1ZNPVlB4UVLKXPPDITT1XPRajPsyAEMB3y3njQ8pP6AGY71Ib34+zPBMrDNRoTmPjh4YsuvcJg6yx+yKJSuLTem/ZSiipa65YsGMDxcLzb3Zfi1fmWZUfM2EbNKoXvMOJOETZPbFcxMNrtosU3dLuM72sffAC6RbW2tPIMetzIXuOnrUJyxg40wpDzJATjSPIzMcCwcLx/PldIx3uO0PPtuffMf3/mAG8//dBbzh+bhCTz3Ej9K7lsvm5WDu3JM8P5tTZtAuEsR8A13pqxQM5SHeq75IJ2Gy3lBdGV1QgbIbbCg8/lysqeLU5KpJfCyFnTYPcUJAeRl2sSWiZuTYB8QC65tVu7tVv7XW3Pe0dOjOIqwdSJH0z/5Bc0GbyRbiAlK8usY3Kr84lZoHRsIvT1UUE5x4VQV9SCHy+EUHcncOXMjW6URvTQdebpbWSoyTHMICIVDZ0LsFTmg58Qsdpf0+eRQEQGaonKrESl1hPMjHJ+GuhblFQ0HNXHYl6BAAoXZlasxKn2nMkxvryFCtiSb6bYIQQWETTAsjsTiYfrRrgxIjVhSOom5BqyKfAs74mSqCqp1xuxNI/2Kkl3OU9vkvOODeMQ+TXF8uk8ELDNZq2WdJSYuRioqkOlpbUdCldTBZmKighjqVaO3EYlriab96RlU6ISEDMxldiiEvF0Gr4JpjOw7R+ZMx1Z2IR5QXgUuXpQf5weUYVyoVDQs8SBhvnTK/oNh55oIj2FYO9w1ky2MnZESIILFCq92SfMJsnIgBmBEZzsG7wRjvxEsmPPFLmw4MUuNB5VhDef/ckzl/kzdjkZE17HtbS2qhVuIuJSmLjJyumyDHeL8EtrMpjvnSISbIlmCQqYs6j5NqYWc9UFYPQ/rRtgQp8SWEdu/um7NP86LVN+FbOFZquWxE4NlNBNDkFD0pYDEHMgCBpd5dE7y501Zkez/rQ1n2/k/kvVgmZF6Xr76ne+47ojLlctbB8xoAtVbfP8pdaiQ+EDBgZ+JXtkvuCy29EVzShWfG7DojHca8wspzEldjsnpduxOGPZTGwoBeeP6ay6WAYTHQrqFfUpXK/GzM+9AEOb8h2cTtfV2QHI4sXX2ji0dNHJPP9cHFEh87kPBdsBO9HI5B8mplgIsTpL49zibFNDV59PtGdZzk62R/psQ3VW+wdbhaYhcwPrEOHCM03nX9yK5gXxREg814q68zuzRi7IcH86Z8SXzSKjNYmkLR0UosQFUZoPTMRROYFUNIIUJpfPyUXIrJVOOS3aIctwJ51BrgdJl9abqCpIjCkVJgDHw2G/39kID6fjy5cvmenzN7+ptTQ9Ari7u//f/5e/f3o6vHz5YrffUTlRoVp3T4dDqaVJA7CeVohqlWVZZO2tdWZmcOut917LDsDd/U66WPbq3X5fuHTp0tVzVprNRrzUHRFxrQPFmgBusriT7Zo/l5WVNskb7ZyMG28MWadkkJtAS3so4Hf/fXKQHHmBIo7F7oXbKcexCh8vQAUuws4pW2KnEZfL2fawuD3bzou12wyZmPlweOy9e9U8FbiRlUotqoTee+8ENttYLRVlAbBK2++W0ypGljjO5BhY/Iwt9btgQRMhnkllSBzxzIUm8exUL/+U3E/nntJ8tBkLkk24KQPDG3vzhguiOk9yPJ5HUA2ddNyKUqKZv7+lOvP2fmCG/x+1/3A9z01DKEpjkXKqa0NrC54zkUTgDPFX1zmCxU/g8PNX3b4Zeoi6YKFGLs6/SnkUXcQ/6y7ONl3+6fKcbu6BThcjVJuwOD7PL2gUgEccEX86/mZ8I2VEmqSdW7u1W7u136/2vHfk8Di6+Gj76xnvz2c2amsoh4bUqHieQAMwRLSUkortxvRE5OLm4F0eAWxCfA5z67KiKsb3tHs2cQAkpJwxmantx1d4Eu6T/hsWWUoxWaVHOeNJuczYK9dwmBhs8E/MIFUO4CxsNmoyepiwp8hLb6yxIJbNU604pPEgm9vwJgvunGxJAVJiMv8y70sgCmlNuBjoRpCeizkNhkLpUvPc8pyAJnmzRVH5shsMG7FwG52ucAkmbKqFvSc0dbeos7rQYsHOPXEuqzMOA2pVrNSPlSQWKyBkR8LVBiXiwrWU0lvzVQBab0h4y9m8+8iWwlYzRxWlVKhVgGVilt5VISrq1XYChI31tp81dEcPrFMvv256iYHRI7w/tEJV17VQEn6hob/bricuHzZ3j/tiAsDmfZjHCkREXMg02wBzXavpKpnyejpZ/qOGH41XjI2SurPA6Ji6uQo5XD6J+LaRqqrKagd4oEoacqkKxZ1SS2Al2sdByKaIUrYAUAqvrVt6t8oLSFV6YcOOM1TYXF3sTIq5t+g8z4RiY06WwjEdhkJpp4l2mXfqhgpeEMQxTTKfK01FM/7qW+vBnJJicPhZhTq0gblC1z573YWwSTiDVK4N8jqVVuCzv/jexbNXm/quzTrEJM+re4lMQ5+W5rrGoCNDRJxEf9HYq3k3nhvZGBFckdDtZ5dag/Wt+QUfPI2P3/9S3egrV9jk2Ys3f9PITvGBiRGSUNo7KK5z/MkBsBEKeN7j+H2jZ1+Zz8AvsF3S/OfKoTqDKtX9lRPpc8PTpFFGjdaJ825Io/9pclSKhUp5YOosvj56mlfvbDtnmHODBxjfiZFqXNh80k9jGBIB8fhoe5ji5l5cw6styf3EqY3usEQ9aaP5WcOP3HjhI/MXBQoZMkPCq4bmkVddmZ2HAizjQr11KlQX5lIJWE8nDwmwDRJ98eLl2k8GF6qItPLJJ68BHA/teGjf+pN/9MUXD3/3dz96cbfb3e9FpHfhyADeRbS1UsHCp9PpcDgCllWbaeHFcK7TSYEubbdb7u9ftN6Oj0cPRqHNzpm7YhoIN9Bj6ONnDGvTiJ7blemwk+a6inh9uZHQx5+j4JUzSdGgg0yWEGCAXyFROoeGUWAd5tYUnCaZ0o8+mX1NdX5+43l4CcQoSinmpGWzMENArQuB2roCkN7TO18Vqn0ptUk3xt5SamKngfvdUpd6ODwuy45YRYWVKaGQnFQOfIr8mG754GyBoU+7M3YKtP1Rntk4fxvlv1e432x+cOI+0xC/fhPLujBY+dPP3+lx+tTplUnpBqnDOTwFswyT8ExigaTfg47R+Q8hd5+3D2BPF0bI8zfPE83vfKhNtrLBMvxtG44BhXhEjZP+jeVgs/F+y8ZtDvKYRDq4C4wmx17FFZ2I99ZeiKCN9q1JyhoS4UV8xiQfx5RnOTPWTaepTPfRk8H6bMxuOFGoTEXyHnbBRDpJVJvjMvOtIWSZP/W5S+et3dqt3drvRXtv7sgPa4LANfea4YU4HLBC5nPYSV0sdjlcs6MITqWwYxPCrW7orCnuBPO5lMxKLQGTdcuVHjLiRoNyrXDQd1WxwEkrYO1cpLWVydLNwwOfg6n11tTDueC28sIAa+8WNsMRUqrm0gXT2ERk8HKCZbpU5rpVBil1UmJ06VyKlYojUBdhd35zMcO4fuwCpKuo1KVK66QFQCksKmAUqqK9NQGUmAuXUYwFPK1I9kYWU8YsmJhlVkY2YUtUwp/Vc0f63quIClRNHjB/AXO0DJmNjIsbZgOx0iQQoVKIiEupw5sS6NJ776GSkSru7l+cTkf71Ba3kEONXFjVKoWbwmSKnhYqVkoSAHMxdl5KMU8iMv8S07G0A7CqlMMPxjfIRAD34zT8URy2JuaS3kIYGAaRV4IYhcjzQvlWqnKpfvTHUVGNQtLq5uMhwBMxwYL3UUoFQEy9Nz9pQ26bBcpJZ9IIfie/wSO9pA1+mGMRHnxRZSiGBxc3FVCP7dcYpFJrMvJ4KnUVUliA2CzhGphpcpYVOs+Pu3QilFII6GLbJFnjIGBb0MYfZUiXk2oeBCr1wwR/QqNCxkzOiuIsRTuxij/HeQCoSXM8HY6ng4mVAJzsbLgTbQiysZe5uDTyJOWrP9B8Hs8/utXQPqB5fO07391gbjpoJeU2+dCiV7pco+dUIIP0aPp5SPoTwLT54Yr+nc1VDopDOH/9mYleqi5Biq4rf+evPsNA7F9+bqTJAt1Q9qEXBEnEuB1ExGSlvGbmHGDF+7Z+UnHPV2P60mbeE2EyvjAx4eJ1siflhwqbrjgFCUdQaHTIcc7HzmwuJFK18r+cjzZ+TSnAnlPkBYLJFWZCI4cIiMyI+HwMtYoQkY6ECb6gAixcLF+EDYajmhkTr72pm4cG+Xn/6bFrL3L5oDKX0K61tXWozMQQGbinWzcIUypcQzts0srMXDTATc1lCyL/zW/+0RfvHn/1y1+2tmoDKZjZyguWyqLy7uHdS74Ho/e2tnVZlv1deXw8Avj2n3z7W9/8o1/88uff//6/fXG3e/XRy4eHRy6FGaKyLAsAaScud8x6f/9yPWBZTlxqX7tJKKe2AoB0VTFf0YfHt8fDiUAvX75e+2qZVXy/2bBVBkjVy85Nh189W8pvQx+3dMAso5qYh/oR1wzmgGNEG9c2J9pjafMj6QJmVatVTwwipuBdYaulyUVXkRapML/67tp+4vId0wzoQu4V6Rx2fRFhtTzearbb3jsUXYRAZksmC4OHWPpIIuq9k5jdvdsA7u9e1MIiWmqtUFEBncP64x/VSN4Qq+Z0S11KMmtmTHq+hoRxUidjQBzwi/uaqI6/7or//8U3YpQutQ0oUkG0HU6s8Ye6pOnfwSAnL4HkJpPEdfGia6+Z3/1bne4rXz/vlzCx7e3e4Yycb7v6LRjifFDzRxcKKZjHACz9APqwtksyr9TldELYxHwSz9d0fMDMnoBBAXU7tUK149oSXe0td2PLjzaPJfo+ue0nupoipVtswmawEXTmzj0blRO46AqAJ/rfLqSrNpRi6K3d2q3d2u9XexaO1K2XyeDW5HKFuSx20XAUd1xGVIEexNSgE9K035uvHIRL6o1KBIhpKvl0aHpdmpdicd8ui34lQ8Qi65yVe07ghpR67xw5DbP8tIhQSVYGpqKkKgICMWuDxXgX9jIUxF5jhwubK55lGOT0VGAqC6tXzYFJzoZeJffwYG0VUSmliIohFEQi0p2BcSUQILUUETFnRsP3wJnwmEupIr33vtBCTNpcCguXFhGfryX3I65Q18fc71IEXApEzHfFoCMWCIlqj7RDwxMDluuKSGRVdABWEkdU1/XEtimkXBYRqChRCT1RmRiKJs0Pg1IpBYBIb30FwFwJwlyIydxOQdS1Fx4xQUwWfy3EUEhvK0CFizuluiEeBFqPR0Yx9wqQKFS0c6kAmLi1k5KONACCslQC9UCNW1/tYLd+Yo4QY3LjbnEEkDVwTXMZIcNwSYmkrYYDFtOmTKeo8SsTI4ulKIioliLEvTdHUtwHU2E3jGDrqFTPAAAgAElEQVS1UQxnY4/99xoH8PNvjqKQ3olU1hPVWnh49KkIeKg3xIVNPBJFgEGWa8wkNMNtZV1RS6mO4Is0G5p0AVGhoqKlLqqekNNuKlRVuqhUvx6sIFIeQlaBinsIGloKc5gV0YzKNx3b6nMaTVFWFULlKBFAnmFNi2OOFBJgrB5DlZnZS3KreZQSgIKSVaosxYBtBRTEJBJhspENFKDW1oDpZ6HY/0Oh47sua8QEVEum11TXqdTdqEsp6j6dWXSYCaQigg6D622eMDdRp0Zk1WnNHxkEg/s1O/FxpEoHr6fEMVIjR5rK0mffe59rpCn+5FiX0pRqrYsQQMUKf1GmmN+I9wHNnEnHQ46eUmVmz6oKq9pq1gjvNx0JLMee2xPSzUVF0qOXYtRDfO/qFZDc+ZcgKqrMWW4oV82HMLYYoBGwSTGkMcNU1eJ46nlf9uQGhAjf/QxKjTME6bkyRhNgLg9Gf0Rbb3PmEw2tMsEOBPXIAavDN5Tjo8kh0ZUmzTUYdeV1bNB03Oc2v4ZcnZLOSPOhVX8er3Yfz7ywKhBV4wvmXW6bYofc+IvICAJgZkMDpfcYwtY8lIusir7RKdUXdlop1Zx75eIlyNySSqlbipeUIwUKF3HACk2kEIFgPmVss+hKbrsbhySugsbmkvl6J5zkLD5c2xWRzTluYVkqNG+4xvZRl1brAqC3Vmpta1MoM/feVKlUrGurpVpvym09Pq2n9vbtmzfvPm+9LeUeAkErpF2Mf0Gpv3r94s2bNy9evlh2hUmeHk7rWvuhAfiv/5v/6p//8//i//zh//XrX/+y7vkk/cX9IiyH9cSl7gsD2NXydDwqL2+/eEet7QrW3npRNF2E4AYzFmIwLy9efv3rX97V+x/83d89PRx2u9q62L0gZjOpmsQYTBDq5el8BUUEzOpAmxUInwJHIjjGfvMbZ8yCkf67AjcqElPrXhfN7tZwqFJlJrXa6KDe3WwZfMd83EtmLTDCv9Qi3aMLRNQkTNFODFEfjEW6OPHIAjiWVztSA6mbUrtPyooXuVjgSZl7a6VwzNZTzajqu4eH3vXFyztpq6j21e9aLayK3W739t27Whcifnp8qMuugFAXuykNeupdHh92yx0RqAvAksPzs+3vlDaxoHHybYhKZ1AmBT2a72nysYm6Ttc2O4gs5gEJBpsc5G/4FPpn82/5EcVrJhQUasQJJrn57MxUFcJVWLgRhltiy9gUCgagQBMxQk2FAdIgXGdT9tlFLo9gQ2Sz4DRODksdUdTXyi3IgaklaxiLO89YJSkm0oBnaoj/ZHVUCCCjGyJpGlFM04sFP2s0TgXB1Z8xV9cgpOfpIN8cAli1u38A2IJpyMw3IqM6YBwsCXpoy258LS2KZukn7eYFwQbYiVPOylVzMqE2BRvXnF+w13EgMS0lpg0pXJLdu3Lr20EgidXW6IwtdbCdn/RwR65/JgUOu0iuOLnbfGKSJiiGGnFrt3Zrt/Z72N5fWXvi4glODvafta4nZU0R/vgKDE+QNOFswi82AgAmBndGUz2o7mJoyuFX1bXD1TBjq2QRRgBCQFBDHMxSZS+bslOb4lSUEJieaysA3J8xkAdRhXoaR1JPnzVpqMm3XIj0iCkHdA1sKOMRACZVqIrKaT2ZGlZqASC9d2nu4jcpOS6Sq8mxId+YMBzvA6kY1GOZjHzWHitoVSwLlS5dtJOSOVr6rGdtfnhpUW/dIpF3+7osOxHpfSWCqBr+yyAJEKSU0lrzOqK+emn81pyHs2dXD4WIeqC0AES75/tXiCiXKl0sUz8Tm8KcPH+cjZCOXceXDiKRVrgSoCqlLiAS8QxoANyX1PJTEql5o5ydRl938rM+SXrzwSQHCkz9kN7N72a2k6d4CxHNQBBKjT+guPmt0SgK7WiGRJvzrzATk0cFZWn4vHnRl78pxw/lUkwkci2I/aQPnTlktlzkLn2WS/3gEVijNCvCbXcoFzb3QPxsMsQ6RFxfVpA7k1AUhXBheSgbShQXPA5rzCtABjXQMvQkmufv0uL0P5UOqzjvuSMAVTMY+L2dT0GSI1FRFSIYgJh+kBK30oKYAcJI9EpOJybKafLzuOAKp7kUArIarJldKML4EbOYFZPYGKOTmKY/Pnlf+9p3v2skX9VFYZq+OtdCNw/Yq74pYdg//2NOOse00Tlp4jJntyuX/kLqHvkzpj/lixwdGyrBleHq9Eedf3w2E93Zt6+0uMWZYtHfHP50M/ezi3pGbJxg0ERq4taFn9VMijS1qvzEDYejh9kraGh25wM/+/P7V+B8R5wCbpxtAmGhWFHKSei0dzqxn0gjarTEZ+jVfxMunK99jifgaD9DqUzqZh7zJK2bzcpQiCjEGYXpPmPOKBU0ZxoEiMFpOnp+sWh7/acbERcp74D/pjph1nYAIonN7IFKhUwvFS+EwstS29qXnUl6ZbfsX7/6mLh+9Q++8atf/fL41IgLRFrvpu0z393fL5+/+VxEian1tS68f7Ez7gjgG1/5xl/9zV+9e3i37Hb7u+XUtDfoQve7fan0+MVbAEtZLK0yRE/H1lsnAjPXhRbwSgLgxf7lw+Gh7pbXr159/WtffffuUUmA3jpOx2b47G63kGE8UC619/T3TKZiKxBB6i6VzQTLj8aEGk0UJ0wPmlTFbpMFhgd4ENIrkRmJwxEVujnhm3NF8V8lURXV4ng6hZFPAE0JZIipdk43h2WITUBI0GbJIASbN4FPubAh9Waekd65sgqOx8Pjw4Ml4mSiutsD6NKZ+elwKMy73W6/v+tdd8v+6XAAs7sAM4l2s1C31kulLlJcVow7iOBSeXgxI4n+t2Hixtm/E/2YOJTd6oQK/YOR4OMDXEyfv4LXnkaGYyEpZ5Ks7YPnLZ6+1usQ4T7Idh0iR9rZaZhuNA9urmOa8N7Xrj1Cw9w1ZK9YdxMsEPcJBEtwOixPH3aV1DQATMNNQ8p7mYnm18Z9xMwfhlQTfQepnq6I04B50SZf3XlhNr1d3dstb0kDx9mYY2rzw5qbR4NrG3UxSY/G/myPBs08axyvadJn40sa8Bw/v7Vbu7Vb+91uv0UVs1u7tVu7tVv7/1d7v2vkrd3ard3ard3ard3ard3ard3ard3af7j2Ie/ITczFJieG+2CkZ4MFnZC5xZcIP3VDtEd2qGdXd+u0O0mdecFEFNXW6DhbHzdx41AFRvyqPwCYmx4RI8KZzZ+OlNPjYKSAh6pajLZEJm64ESy7JHiidY/itO+qeIitpSZneBli4hoWdwI8UMWaiHinac93A7PnHVNV7WLDTsuzunenZ9o0L805eVRuii+wgEsRaZYSMZ3IrN5jrYs5uJVSgGL2zChiM2JcJmc9KKjWagmp2rraV5gthFzqskBVerdYDCL03uEhRaMycmyPrXknYhGr7Ek+HcvbT2mBJFi6LQIES10aWlpfx9RViNiWCIClEg9HS9RSWUaWKPMoUSEVG569h9FFRLgUWPEf96gKK/JkpY2BKaWLE8GCpJiLcnj5hHdNBJ8ySAB4zmkAHolFMa6xlXmOsXnlsODT9NfMmOC9DBNpuFcpADYnEIV6jK0fwvDkIISXnxd8kjBop49Avt2cR9IFxX0+rCTR8JHY+szZAc4JUt50mmZgX1WNc+B1gcJX1PZOICriEbibNYvuhydR5oXEbDOfXTb9j+RFVykz+4V9fRNiNu0PPG8pxbfCEn7dWcIdH9jDzJDvsdPt7s+z28/0LvL8DgCJQmwGJDwnRE/7uHsNXFjKg5p8wH7+9T//8/AVyfXBcLvGdnJB6ecyC2MW28d18nuZ3Symric3gGngw21NLz0xt34P07jyR6/KZVsbdH9e4OGVYV+g+aMPLti1jzf389xrYT4f6a2Zfljjkyt90tZ1avOuCAIPMyNReHily8lmLjS5mtJYhGeoz2/d5jM2Lh/yBjsH94iBkWxkQ67Mo1UhRMxR/838vglkGRnsDZeXzb1E85243IEcnc7/JefXRtZTcrB0BBru/WquZ6pxg/1Mzx7UunnJhlVpjjlGSBcjQdBkJ6WRaTrmMjvDJK1SZWLlzFUg0NK77HbL8XQAsNRFRP74W3+yu8PbL96unx0Pp+OrVx+hkypqqQCaHB+Ppy7r7m5Zdosoi/Tj8bTbl7U/AvjTf/an9x/t/+k/fvUv/+Vf/fKzX9T7uybrUvjFi3uQHh4eARwOh7rsgbLs69uf/+bd4+NHhy8t9/fH49qVWBnAqof9flmb/PTHP/3o9eu//dt/J70XKsys6COC0+QRAqRnst3c8mDRZEyN8lIzcVBvT/igWjZcNv6JnYnjMXz+k4uPS5ZhlaJgGffO94KGPx1huEbD0g7QeDT+Y/GkVul63GYmJktfQ3FddPSYx8OvNCnUsrKal6jEFbIgWRVl4lIqESzRkKjX+ePCa1uNUyx1WZYdUy93y7quzNzdnZOXutRlKaVACxPXurO7MK71hmboJKdc8Y4fK02xv+kINmaXvfnfx33a3qbn2jnpvCJKzRQqyOA0Ea/HNRwUn+17895rzGASyd7HSGicDdq4hHuVEoy/mOPiXOpk7thDLa5JIJsjHlQoPM3tCDGlWGMXamhB2f8gRva1q1MO6ZcilUuEqrxnDc5Fi4nxYyPnEAAwPFUMovITUggZLpXBCHQ+eWol5ufXhnfqlfmE6oDL3NAXIg/iClz5IO4tmdBnKYmDXsSqbVaAUnrI4U9ZbrZ8hnkjg93ard3arf3+tPfBkYM0Dsgw+QFxer/bI6aP0sRIBZK1gFUBCMElQhcQTXqh7N87nyLlnA2cSTUhKko3iXAqBTwiAhJEMMDDISGVWa/2x0LhNpCGJk0hVaWBpoUy4b9a5eWJDZqAeFbG2EIUjO2TR36lcgYwO2BauYqIQJs0HwGxwS699xGNZOIgCKBSrQYLrK5MyhAAmAuTdFVm8kQqsR4SBRGkd5BndWy9IWrvTMKg/yvSl7ozwUz8jVa7RkSEoiB1vLoWovV0go+Hcl9CloAATFbrRKlYTWgwEVvOS9dknclaiaHWWu8tAj4oigURqKpIbKqqKBVysBgQEoslt04scSGX0rURkXq+NmYiS92nhheTC1MaWo4aGobQFGfRRT13GAK/NBlaoQZAh4Yd/agyrDjSFLgeF8wSXtPZRsQttEypFMPD+LpnCCWiqA1KKl5EiEtRYmgfZ9e2EiDpiPBmsiSXjmy2fGwzAgSKGqJe7K7fPpdD/fxOVRD9++SvS+GdBokhkIdi5gQDGbC7pX55KIADuLow/TYJr5o74mkyB/Yx7RZQiEWViAt5wgFAixeu3YbsTPJlZILzi23vIK8xbBsQoLXbbyb0ZKyrD5I2WxO4hf+u4pD9HDxo6meeHfUltA6nOmJjmwD9cNZIPfsh48fOxPBzzewSQKPA8sefzj+9XItJWTz/Q8Yz5s8alq1t3xPQMMd3x5OeWeta2wJZ2GzLlj1N45kRzNSmfSNkNhrFU3YnJqQuUO3L8Wx/nhYg/s1TT2HcmI6qkdFgmuedUhRmGUMbMMpv3TYaV+zV0M40puofWQqRKWfDZnldNSRE8tOheBttBzncNK3JCIjDVmHbTPfqjhMG+LWZxyiNZYfCCdNI5goi43eZYyHahh4B06+b3Qt2DsBlpfH8WFfNRA/zOofoM+2TuvnVpC+LES5FVGqtANra9vu7//l/+7f/5B//kzdvvwBI0Vs7mWix3C0ASOpuVw9PT7VWVTw9PIm29dREal0WAN//H//Nn/1nf/Y33/+bH/3gJ5988um7h4cd0xdv3h1af/ny1fHQATDK6bju75enp9P9/QuWKk1Jab9bqMPMMSJYT6e2yuO7px/8/d+t63p/d3da14Xo7u7OrJ4iUkpR1dbWZVkoCxJGQt040WppcxIHJ7CSJZ3M00ddU/4x6J80k6iSCxuaR4yAYeBWwGzeGllcFOJ0QRG3e2I+npyFQAoL9p9SrNi7vMKhcWsKuBuA9C52hjUuiCZVIsqkJSEHqgLFbe0i3fP8WMZq5i6nWsvd7u6B35IDTVxrAXB3d7+29bSebKmPp0NvspgcqcpLBbCux1KK9AYCg9d1vbsjk2pUweN8E6BbqjFftgGk5rlPoQCTfYgmDG7uMRlwXtEJbtPY1fh9S8+uUTKdxmC/n1m5YhtdSsCwwk5sNDd9GsUzIfw6KPxV9hFdbNhHEul46+b7evbtuc2E5bKppZDRIGkJuqJrd8Po2LLzQWvUnYOrTd7l1XHkaOzca6Q+v7ApbkaHEF90kGTyepJzx5sf7fpjYn9+xJJjp+UmmMXci9L44OrYhoF2I1FMA78yuG0JBn+KIoQ7scYQITFofkDhKUfErAbv0xkZ1szMdW3wt3Zrt3Zrv9PtA96Rgx+SiyCDXZ7DiEMWkS4mS9kXvfKx67RCAdUpED4Ms/PRVtkaf9YoLjKSUlnaeHAOaSblBoRYGTIHAxzI2KIKwXUpNTcmzzxMgGezrsxW/YOCCQ/Glh4U8xREwW7SRGYmsqKLqHUR6WKZHUkBdBlLGMqvo4cBjyKz0eVycyQiSi0shTkCxKp3e267Mp7INZfOXLguvTeRTlSWZQfL3hRTnKRDLHWnqj0S9pVS1nYS6WoQpLPoPDE57rHfCVdZn0yWXzoMfzZ8psLVzfcwIVVFumVrsrIGIFIYBkoAuBRmL6Q+bTANJdST7nmemUQXbfEMwjPfCkugacVU0p90+p/rmS6Mb7FAn64KUiOdZNCA5X0FQCn8yFB5QZFVLsF0/2wWQBJ8T6chRLLL0OXH1TQcWkhNcVUaIlAsDuDFozRfN5ulQxAe65l/nzLZa14/nR+eD4VmIsSYcA52JG0KK0X8T8I4P0wStlHZ4RBXhwDPVm5IQ6WwnZQACqYpEAYiSXGN43zErmSGH9d9Uxn2wwPA/Uo1MeIBJCbJSFk3xMpYcv/jWBs7aTRWKU/OGbCVthCkRjBOwUSTZqr68798Hxb5te9+17+hOvQwCpR5LBtyMc7eki+CAQ/bUzMO5+bEzd+6omXp9NhVdS+0x6uaxDRCzeHnb9tHR9cx162qe65HXQC102EJ+GpGxwZmFms4qSIXgxnKEeXDWz3cdT2jqGVYPOxdfp45usCGWI0XbX8+/3jGgM+Q+c1jMebz3gdLCjlheijgiNGLD96VcMU51YgbsR1CrJRbQ/DbNqduUfM0hmxzDeuHiSrmMw4J/Q8LL6Hs5s2I6AfS+Q1jKTQphg9SEEEmEuiTbnyARXU43Y30hwDglVtyoQikYIIwqLt9qtRQdKGttcPx6d/8T/8DU1mW3W63770D1KUfD0cAXVsXBuHlqzsuWHavXr9+cXd398tf/aoQAHz++dv9fjken7761S8zlY++9LVCve7u7j/69M3nn7fTEcAnrz99+8Xb1qTUclrbuq7Hx0Pd1SZawLv9HoA0MBWubfeCv/0n3/zo5R/8i3/1L/bLi946iOtSYbxZhJl3ZUeErLidmZ3dpCUaAs62Dc7qJ9BvkB0R3h5fN3qZfi8Y7D0v7Mw7QnbUiRwZGJViIYxpaRfLN9njYKlx8zOxMU0+IEpaO/Edv8xMedxMzvCqOw5fzrZ89sly4S5yPByZCyBEtNvtAJRaiLm1tfcuIr01Rmmt2agsxXknEZWcz1XyetmcJM0WA4wtoM3Cj0XI9TaIbF77fECfCT8gukbNB38Fzon/2YMU+kVOYVJ5sB3Oxva1lSfyEMwkf8N+n2/kC7yRdoLkb7L/pqQ1zZMCYI2FHozgQm5zWzXFGcoBkq/9OTvaiG7zWufOnpd5n4cfncy8aWKzF5s5de7aAQFRRW4e2sWKzrjptEtjegORDGkp3nMt0fX5TDZvvj7deVZOVDiX2JQGVbgm4eE9NHSNi5nFrs+0f8uc/WkrnHU9i/et3dqt3drvePsAHDnhY6nijo8IwxytoQ6SmZmCcipmwACDTKcOPpjvpEYNGkvpNTCriUnagUAspifIg7yCc0OtRkSww1EBY+Kiqq4NTHEOLvkMpzUynWRiRjLWxOQBQiAPUdSFyBEJiCrP3fOspxp8OrQrH2N4wJVSDXHzqYOIGKRtbcm9OcpwixgoJtmZoZmFi7HCUIq09y69G5zX1hUOcvmOD8AtFFITUq3At4p43LqVqVWoiiNFomIAWRr9g0+SrxJgdYNUFY5y2l+oUPpBEJE/o1qYqdSQEVXVVxhC8BruUTw3Kg5nXZVaa496ndKllNJ7t8rLQwoGueHxTNCYJC0/dGonZ1Ih8vjJJCiGcwSTxJA1DxICWM9yJLPkNEDz6b9+nhPmZodWmUnAiJWRXBkADOJSKVHgxDcS4TSQnTROmnmAqErCKIRw6RjRixEymQJpCLG6KbikU31LBUrc9xADQw6b9K7cjXECoytfXXcM9scphMpQYpIOBFI0YXkbMS+JmZJVJ2crZ28vFemiwqVckTvtPWGM4FA+J1qBxCwQKIzLlFRS9I1n1dw2x93PnnLji8vSFunv+ikzxvkb+lHSIe9iEO0PwzR5KvK3KPZ59qJzuGVWKnT64zUx++LPZz3QeOR8wKl4zWXRCJOj4fSkf6pjP7aK3hnfQRA6ClJ1xYli+xLnXmcuk2MvzJVvUjYmJI6ApHv0/M6Y8SIdH+JGaLzIjziN+3sOrOUGBq8dSreqFywdmt7ZUKaN1+Theci3a2GfBsMOMNXXPXj+uHWYIR7DGvye2FUWJWKoe9DPr4wS88iqAjScgdLnDamijyGOqfjqbNToKwcubACJzCvcHZJUrGIekUUwhLVPcXm4x2spRpYjd1/6OIi+t3m8R06QHINXkh3WyJQdRISoqIKIIdq1Vdqt7QSglPJ4ePfH3/r2l778+sc//slvPv/Ny1cvILi/f91aOx6fACz7F/f3d6fj6cWr/X5X337x7vHwuCzL2sRstK9fffKzn/+iN0DptJ6a6tMXjx9/pXz0pf3LVy+hHcCnH3/0+uOXAtrdLfwNacfWRLBbKhUGNe0ATu/6brccjnw6rD/96c/+6gfff/X6dWu9sBQ2IgkJB0kuHDbF7V1XZSLzRU8MT+MW61jhuZHCTE2UFFEpixSrQnm6RwMIovSFZ0BVlOuGSo2jHKYk8q0Fk2fi4cJkt9WCe5jtDPnhsWgPmB0dCiWdzat5f+2lDIxwHoVCpRSy0tsGYIk2MBFT712hxFYnure+AsCK1rpCW28AiQiXahMnUERa8LquBBBTW1vhcjqdllpN/p8upa2+xg25fu+2wsxl80M+mOXFbUyBa2uZe4540nNvO7/o5wjcrNdo+KcF89pIGZu3aJDiYGCxM0EHL4Gn/NX1B7vVlj0p+8wjmEuYtC7+9T9NmGRg01sLmG0V5ekPqu29mYwUAViJkG9GnDyR8q7FCy43ImUc+++WTw/JZHDMlI2mM0QE9rw9W9hzEPSUxu0IjnIz6SoRw76CXI/lPRNNtta3620Lcw9pKFxDGCRDoLJnCCrCYXKbXxNHhebON6NJlnrlDOPsb7d2a7d2a78X7UNwZPAiCuvYmRoydLuUwImSoRrNlPgPlGlSw9lZX+iXydCHznIm02RLRcf4h05MK8agpKG2w3zWTLBgzxgSzp0a+qe9SxVKHn8Nl/aIeu9Wm5pAXWVTWDZYhysZIULp1DOGlqEK9N5676rKzMw8dcWWZjMkLVJFSVe4UsjQOs/waBU8+xRqp6qsmrNS+5YtVKyA87dSaltPXVFLoVKDTfuqz8BIik/H06GWZbfsAfROvXcQMXGXTqDTeqxlsdAwm3WpRZs7/xkmm+cn8zUKxLCwrpn7SS3k3EKSW3fvTiYDNxMvIC6VpxW2syjaCczENgvDRqU3NwSHF6QV6zTQliOMXVRAUAFzqQZhMDEspMviy5QDEhh6cMiItmZiofGenFFVwEzqCT8lFthRYxUli1dXE6DyJNtQOQWO+SJoDzMosQsvtgLMNE7ZdgNBChEV0zQseWRGSqoKlcIAWaXvUL5mAWv4OrhHYqLqGp1oOHRuhjwp2b5HqYtr6DSK0Gx40A3L/1BKtQcdYLUJWAh4XtMUrMeO+LO+3IEFl8LTOhICOCBfT3fjHrffZeINERovIrAFUNuPduMtEDXvdXjBjrWawOehHsCRdEsClu9ImhvpEXIkTmZz6XRzRhBCt1+63Jmffe8v8Hz76nf/S3vrRihXBZH4WBgjVB9QgKP6+Zl+dl2Cn8cyzkZMCZMeOGY6lm47x7MPLt+UPxougDi3rqtdYy55qh0DnLq2Q4KpZDqin0u113W5oVlMFNX/O/sy40y5sCunG+UtdP8pSUSCxJQYQI5zXgc7cnGvLyZ91ibH8A88GHs2Bu1DhGmEgxTY/zg+hE7/F3NgIkyxfO6dJKKAJAxn8dGR1jau/RQiSSNdIOaUe8Dkhjmf1eAN6gDQdsXtwMbJJBDgXFsB6pLSSijCUMVwW4dbrTTCWSkJ64am24Gi/C0+94voNrw4Mnn3pXsiy0iCrKIdor23UhblvpSyntqyLACU+n6//9lnP3t8/PznP//l7m7/5S99GdDjYX06HLueAMixPD09/fynv/jkK69rLW8+fwfqv6i/fv3xyx/98DMAX/7Kp3/8+o9UwURrO8q6quDnP/vFP3z2869+4ytyagDevfnJ4+Ph/tXr03o6/OJB13736sWnX/+DpdT9bre72wM4nB6fHh9LWV4sr3/zi8+XuhBhd1/Wp7bb7XrvANbTuux2onI8rLvdYocnSM84D44jipJjhyRbQjmOoMY+JW0M4NpjJ8U5zHz1XWgxkZWYPIevIlKyhPlK8iXk9EOhBhBDPNRUYYdcIa1TyAOzZVoVIiG6BPamo4a7C3JECvZEnICqdoYNSX1eZAKjgFFLXZZdX3thiLo8zMxcdKHae394eCQuKqdSllJK4XLqBmtWVeFSa12kN2JqslbUiXCBPGHlcFrUPPeDkkzHfWshVIIAACAASURBVJC6M8YxjC7WHeX9xFnbWFxwZoyK2NfrbSK42eJKbihwSjZOjYN50WVf2dvWcVMvRqEXP07IIp/FGqjrXOrGh4l56GZBp0Di+PLk3TuLcQAgKjSC24xAmgxMMtZbJ/MtjYwE27YZw6VBUD2OKLHj8czF7mzm5mwu6qgriIkkjjqFb2NwYc08vrZVm42cT5+LfK6TJWEIcd1OAKFPopMNJrcptnsSjqbFQCDJwfoB4mLaQ3TFwQkGid8mpFRbpyEmeu/+o6V9CAkhxgdA+4dZ9q3d2q3d2u9k++3gSLh6MFSvwYaCWgbtDFFoo01G5ZHZQSIIPgGY+Mtv0+Llw2sxtIaQHwWRN9Ad4lQUysTElBIeMRmjnwYFeA5Cte86syISDijF9RbnLlwIDluQw6CugspYg+FEqoBKd32KkpmpmHyp4dKYIGbCmibOqkohlx7sPZYSETB0TUL5sWItUkrhEDUAWDAyCL01JdRSABieGKkPx+4ghR6FQmupvbXVosg9Tnw4YxYuCkcSbULdB+wbnVpBSqC1Lr23mKoQUSksIq2thasNQHrv0peywEEwNUXFkl3G4nURWXhn65OqWnpHWm7OUoqIqoAqS+8UMEoNuV/F1Hthrr0330UilYAjVVWFuFyRjv3gmBdq8Wo8oqoiysysMvly+Q6qkrv5DEx8uinPtQEWuebk7gTpUUdzPko4gmvC5UieOqGJvTdi15JNnGJmVWUrvIOQmygtElGXKYZkK2PoiafoogjlDrcTACEEpwTvUz+fbZwZQGUE4PsWkGmhXCBtWtLtPxehQ76wTIjECEOwVADKXFWUuRBx781mELGQzxKnyF2A0BDJlctIokp2eyZ9wXbKqcUQQdX1njy0+XRSObsnjhermF+byc/IK+sATK4wxdCA55NJAQC+ZlhkaH92aKIfH23qz3ZQzvxGNguex+997fzTOGozEHC1++23hjVr7jcGr+53bjQ30Eh9T8fqGxfaWAzu6qRCjwn1JXrWGRqYVI34lvqGRO+pLk7PbTRxmmhaqm2DFWay2a1m6N+a3rIZ/ZX98eNHflOmGTtOMzHrQPxz5Of9j8N4DgwNnD6xAM1ph75o6xdRzAw21dCKAMx69myOMNPIe32Bz4c59zWeIT8wClWV3jsp9cnQyMwkI9xB1ExXklZGnaZ/sdJhR/N9VGKaMc0pzUROxZfQCBwTrdJV3THTDmFXUUETqYWJtXdhLieLeyhKhPW0Fnpdyw7CP/zhj/b7+/XY13Xd7xcAdeeZhK1+Sd2xSFO0X/7y1y9fvgLw5jdvf/STn7x7+yBHvXtRWqejPL386OVR+uPj4/1uD2BXauullL7jXd8d1y7SlZR713fvnu68LE9TSG99Pbaua132zJWIa63r6WTekczcWivMBKynUykFIYfGyQASPVdVBRcOqSlvwUROoeG3mHavbL7iUEj6pvFEWFRFlX0MljtyuqY2mszl5rYIUmip1Y7sfL6ImYoXF1ITEybO6NzB7XSKQPeCj/uUGAT2B0SEmaS3uhQATboZ7npvoP2y2xERc2n9CFXLJUpM97sXn3700cPT09//8AcArae1tW4m53StK6WW0pfdogGL0cBEkw5peAWGHL25SLnA8XvsHDbUJASYpDdJVOddAnSOWEEm9/alm9fZDwltOsjfJioQ7gkXH+DixxzbJXn5ILejmO/lk4EDJokebNCNL5QH4ZyOT2wlOcCsr41PQcS+hy45TAZjjb+PJNs07dmmoy2TfnbKKUJsRdzBnc8mkAsc6Ywk5MlItzoUvRgZ6XTd/Qhe7A3F6fKVVY2ouViXeX7nAsV2Uld/TIfTjQ8oQWkUXjPpjEkFzJd26ZzUOKTzzp6ZEjV5p8bK0vtFvFu7tVu7td/R9iwcaezGpBYLvHW/Eg7sA5ECI78yhGuGuF4ebBQAFNqlcylWIlZVI+WfunAXnEZT8bXSFQpAkFq6JOoS7544qv9BJDIroqtCwVyYzZXPvAuhjbgU80lkZlkbl6IgqMdeWRIoM7bbkkg3oXKyvhdmJgILFO6qSMykQl5jGpRhNbUszEV6L1xLrUOgBpnADlFLkqiAgX3MrmL21kuptexaX6Oyp3ZplRcqDECZLXkigZgLcYFKb1apxv1Se18VKGRgEzORIYlrWzWg2y6dme0ZY/OqammbdrvFgr5bO1mJj4paytLRYh9GmDATG2hokBugxGxYrEV8r8dDKdWAGxKottbNs7FCqTUzztNSF2ImAyJVl2W3tlUFpRaDJqEEZRRAaVf2KirqSaYKVQBr74ULQJbBXVTMt85y89uA13YiAjExl64rL2TmS3X/I9NNCEwiqlYyyA+kQXHNT3mFWC5QOzIFHU1hdWviKqiImHE0qicFCjlEe6CQJ3VUlWaenoWka4l8YV2FFFa2hsEEFl1LKYBjYeFoJqJSUFRQ7M7MdTxBvN+1piRgFiLlgsLUV21HAVCWysyKCBIP27MSUxQCEu2iwpYtIGQmO8kMdiSxd7CVw2ZSAonViA4hWAFIb7agljEAGmb5QJGKT6gbhp7qm7oHXN7+4TamBuG5FChDZTJfORB5QXAFVLQxFd/ZrqooXMSOd+J9U2OuZloQBRFRsSLyTWSZnh+e2AC4WM0Bhfit8fITJUJAXfkIeuYqn/l1EhdXGKzDUqpvgae0IwJb5DeJcCmmP1tvP/uLv8R7mmEf0pnT5QjmoWGY8EDJ3fUvtZm0Xgx1eobXgqhvaXRX03Y8sSuz2pkCQ0XyJpgzoPkBN7KRmYOwexR57SmW2NGg/KGykqfUIGZEvXg/NXGEQgdAuPS5PUMdexgbz+GjbaUtEGdzU6+GHTWC86twyYQrK5bTF6qlWI6L8PrOZ3JPRhi423/iwZhccl2otq4gipyyjqlYDzpNcnwBZqVDTnAsBJLwOS6TW+lMzk6LwlzCAxJyBGfybo3Vi/MSf3LVd4YRfX0gnk0Z3TJS+Jm3GnAKRSEewXjhUqWx6mH+0wkOJCICyzz9SZekLgId2XhVYPXrYfKD9Ww10KAxayFm5SKqNMM0hWVglO4qqZbPREThGVfSV9OKIlN4UJ7tP4AC6mp8k1iHwikixK5Q976WUnvrlQtAu1pP6/Hly5dtbUQw/xsRefnqZX3FYGm9Q9Zaq6WMvL+/M/i5y2oRvl/+gy/3dvr8N5+vT+1uv3DXp/YE4NMvfWnh3avXL97q47H3uquVXjwdjq9ff1xKkd4BPDwd9nf73joXXV7dtUYAa+9d5dSasADQ3u53r9bTun9VHx/7i5evTsdDa73yQlxsiSzYgphYmIjM9NtatwhuAO7tbvyoq2HGcNtaoMZJ7OOs2BonPhInQqHKRCjkHu52XEUkit2VUkCqIoWgtagXtiFNWxeMwruJ0OVUET9NQIdlb2HiUpcKERExCbOQR5ZwKcyQ48pMXFgFBjFTUZ7IskgrpZBCege02OEAracOgEqR3jva/u6Omd+8+VXvR5HGpbbeVlkB4KS//vWvvvmH33h89/j09vH1R69QmOv+cOr7+52lCn18d6jLrktX6P5FPR26NCpV+0mgFrEDS+5dKwvWjz7+g97amzdvF94bPbej2tYWRvSEjfKGTPcQcVnz9G/wFbM6mK2Xcf61+PHSOOU6C+OsxYv6mV3NJF+3febxMZQ4xujnaTgQRMi/GbCte0RtSO8QheKTlF18ATKEK6c9eILFI2wwOICggx3EJAMJy5Q+g7qOuicRUDEBYP5ML2Fq5uIxJZJc+HypYybWWxrcw2QCTPSXaWuKpnRnzDGYVJjTCfC0kKo2aQunv0aumBKpBDx5ll4/BzzJvI7tg+BVPx0Rdwu9ZgyZs/34PFKGSZQ4j3nNSiiNVXHWRiBIXzFAWIZCu+0wU2T1yhgyXyq4R40fLTdA+kFhrhSFtWZBgZU0U0/d2q3d2q39XrVn4Ui9/IcmAENpCHabryVrOVPdXdMhYjPvzurixqQzBb/k+y5elp8mjxw6mf+B01iUCojzMhNkRSFdocLESiQqpVbTG0HUAykgLj2Eiyj4O5gthgorLm0El9LU7eILOXKXbbpM+jucY1lxltDKctzZzZA5YlG7RV7Es5QNUGKIWrHNGEskU4xBx2oxB8xUS/XNYo4qi15/z3BJe1ct1SBHhRrA5wuecwxPxmcbhQYLKswOThDaqQ+ZLzbRPUBVe++krnGU8Ogsqr2LqkjvXVrhWusCdW25lMpEnqbSUEZiA6dMGMhhGNPXOe88iKKuqX8aaSmh+QVJyPJqGwCTnn/EXAIYGMIwW0qByO+prvfAdPOM5aeAutxkLB3uoJpnZ4i0tr3u6rg1oaqIHYwA8FVFoGo1OqHa2iqhR5lMVmtF1EqCC40mWYlmZcaARGMAFCDMVL2diAlh9vYVD9Wexi04W8/tMbr4hGopXscpdy3uT6pAdpFMV03JfNOvP6pZynCiJfFMIRpl2EFTfH0KhunRYNOLnFwAIsASDsvO6sFENgiAR4WO+Tr1673PMah+9SlrLIwAr8++9z4s8uvf/U7Oyfxw3LYRmNHwk/Al8OVOr8//Ry1wg2AZ5r17fk/CFcosP4FnpZewqgRq6ad76I4++Wnnh540wNKx2fMMg8HMWuqUoTCtC7N+SpsOBzHfcsSJI3xwgcYLTI9MXrc9ZnGcaR5vUKyN2nT+huk0Y86mgmnY81J5RrFpqQgT9Uoyd67AXvdX3P6BBsztq32drG6ywGKzmCGDEHC2yINGj0klcdySxLnvwpmpYJZw4OR3TuwGIi/Y5X+cfOK9hTGJYEDkOIdo0mEXKseXS2qb6El305pkTNzTScdGWzBEqYVaayBIgvvQ4/FIu4W4Mrju9qq9LoWYmejpcACw7Mp+f3d3d6+Kh6dHZm6ntjLt9mW53wPgoqfT4W5/d1gaSNdTJy53+/16OjUmcyff7/cquuyWdV13u7vftM+/+pWvQNva2ov7l2bS3e/20qHU9vv9m9/IYXl4+fpOZddXQYISRBAKoQdEpTVz83QUpncxRDJj1S2ZdVyC6QDHvtP4cdtGhowhcqp6bhDEPuTJnIVj3pz37ekBelSORoIyxgqF0G0rKW1IAITEIjpDIsFMeWu5A7CeTvAajvLxJ6/qUh++eHdoR6ZClQEwFwYxqBCr6OPDgzHw1juDMon2/d3dv/rr//7h7dOy7ER02e8Ox0PdLcf2wLrzmYoND2vrp2ODcGs7wz4tBaWqgvTu7v7N55//0z/7T9+8/c2/+9v/dT2u2h1Jub9/oX01q14SX90mRtjuxfOezYMNbi+tTv9SbGDQYJOPcm8v26CXsw1m1h+uDERDoiAdZahMDpjHJRcMYZrGle7zbE009mLQwdUvpZbzGV1+eP6dTS8aK5fP6hy6tqWnmymcjfGKSDxdP7rk8WdDnSi6/SYaTs/xbSs8yUT9DIicuSYuP/GYIlBGrJiVOgx3mVModLUg+oOHj85joDyD3VvkeOIzKexrbjIiOC6WbDv6zAa25XHzVgZRsvin/xfi2K3d2q3d2n+k9oFg7SGBDyY3ROQhLmFSw/xbdMn14GikevUNj1SFKZmT5jcUCfuf6owDOr8gItEtm9zyw9mMmbIKR3JDFS1ciEmkqWqpFZ6cbRItvQoB9QgNC5BkYkZsaQG3PiuK1FIYZF45HWLPsSViU8/lB8AiboipMFMfyohLg1PtHbOxTgjPuboV/yglz3P8bIzP2aznDyJmLigWfw2g9abmFsoF0G7+jERL3RHlYFDKwkQq6K3Pqd81cSVKOClW0+FaHyeH24jvtBKpqKJavZpYXiJSESsaQMwi3cvp9B7gmbqEAcPbLLEXARgRUg57FcIUVe3Qro6Nc6dfiAqZn88kG8zCyKUseUU0zI82RTfONXBD/0ytcWVSyU4Ps5lYBdTN5VUI4n6jLvl64I44CqBdQPBoQZ+eJ6IaMiZRqrjwuzAunap2S6MZD4ifW7YVJI68AjodVQq9MRT11N79tZ7kDARTxaw4AWEArNHZ0P6S9sSx2pCVaTG3F8FmrSG1mXGCuYTXrEnZKUaHX5Kp/JdY2ATrplIZY6KR733WZgMvocwxlqOf6NL0FoWquf/EpU2EwR7mi2naGMbSpR0CmoSUBh15b0vdweopxO4qxknDZujZo57NZNNl3vwrnxMFuOb+wYHdSDoeTtAMVCHolH56MRwGC6t0y+pGGqMnovTgy0SPfphyyINlzXTfNkrHb8PLX2d8ad6N8fPZHp1pzW54CNZ2deU2etsUNT57+2Gz7HGyNq/Wsz26tlM0/3WaMwC+8CUaSVDnzqcvQyGwHB1+/OIQxiBHSq0rEsLQ2FMnvnZXkvymUHDRjzO8TLVAzx1T2i7ilR9B5AD5ZDIygjcEEhvUGOzscJqXCCAKr38AINOhORRWjWc0IN+hmoq7/JHVdnONWZgrAm7WOPbGKLkUQHtrXIpz9tbBejrpJ5988g+nny3Lstvft9ZNwDqdTgCW3YtalsPj8eHhcT0J87Ls7h4f3i19zwUAesOP/v7HH3/6MXPhWkiw2+8UvbVWebHJ3t3tj4eVqbx6effm11/c7Xanw9OXPv2oNjmtUmoFwLo8re8U+oP/46df+8OvgNBWWU9rrbtcOYpczwUM0Lquhv5I4Ku1Fi7FckADKjA31YlbYMsw8lZt6fy4J0oAmC0iJBJ9cuZn1DkFdmBCOlGKoHgKTTzRXPKNBTv1cgZFFlthBvpSMrCgt267PQSmZM3yZG/iQsuOD09Pf/rP/vN/+Own//rHP9692HMpRqFaXyFCDGHtvRPK3f3+cHjHoGWp1mVr67e/+Sff/vY3pdX/7m/++kc/+TGYjwe9e7HX3lDMbKairUuzgBhVrbWWUls7dZFCOwBMRSCH4+Oy3/31v/5vSym9K5eSSbePp0cQY2RE32aZntpEmQe7nR81rkBBK2IXN5TEbfETjXAdIsHkDPb3T4ccGd0FqMNOf/0rW6o1pBuNbk1BYspsmsMa5KbWnMxEnKdOaXuQxprQ9tfpuzl2HzqAkdp7Wpb4Jz/Q6X/x0Hy0dfr/826m1+vFR/mVS3FlCF14rm2QSM+ZqaQEBjMnvgvTHQj8/t42xoPBOXiMM0XLMGBMJssx7jGjOVPnkE+mZ/0oqMK86ef5OhCpqmqVRa+tkz9JybeIxnMhjhpfCyntuV5u7dZu7dZ+D9rzwdpBj72pOgwXnjeD9G0k9/GF691OYlu6EGiyg/iHJtZJke2NhnzhkeA08shfjGTjbYbQ00mlO+LDJNLJgm2IRDoETBZHJGbkVzdhG5QV+vi50uIfxDcSowk1jNlKYFMfo2GAmDmLhhMJmefbxLjIuI2EQDOCRqBi6UgicmXIago1sVzSnWkqb506dyklSlWSyXf5jOfpd57nnE5Ue29EWW/RvmVxuJLlBTL79WzSM8GMTAeYdoULRdQiAFISURHplRcNAMHr20yan8ORBBWhOd8lsVfk5p2KShfA3SfNjUlEqXebLpOBfIOTw13/Ihu9EhxeG2pLjCgz0KXQyBbDhWeaptx79ndVAiOl2Ol4kXpRUQPSPJzYclpR+NfEDqeo5KKaRj2ayUyfWCBy3JpjIGbWLipqBgMoEZMl3y+1FCIxX0gR25Leu5dx96yaYtEvTNyHGg2DlSLTqSsJE9Tlo8z65n6uIoeXzWAW2yehcoKWNE72WHB3jczpbmC6qSOkGpIHc95u2v7JextXNHBDILCakH0l876F6usUlbmYCjrwPl+dGZa1AzFOzMg8NiimP+dDDPTfiYBVSYqX/vwv3+sa+effwUCfZ+jGtT9VIZR5CTI2nqK8xkWvGqs4b+CWRjvSqfCTmkaUMUXvPdU4mj0W/H+JO8cSBfMacPkGSEpta7zFsV//1dyT50XezmsoCFdx3s1RCpq7we/mw3796xvutT2ds8a6fcZPzZU+kTfl4tMEa9/fput3nblHb5Knbh4KmZElz8AMGG1UtYkSYLtoF21g5JTHYvr02gyu93M5i8tP3ejlwgTy2k5ew/apwy0Xx9+4RqDpV+Qco5zqVSsmM4fZUwBY4hQz40Qz/snEokb8IYqiaG0FobW2AKUygFK41sLMh/ULIjw+PbxkruXutB6arp9++iUAvbXj8QjwF188lELHw0lESqmttW/90X8C4OHp8OrV/cPDAxc8PX1BWNqJ9/f7WsvpdLJRMhGBD48HegFVAWlb29PTQRXM1TJWPz58wVUK11cf3Z9Ox7oU6f83e2/WK9mSnYd9a8XemXmGGm7duSf2QPFBJi3JtmRDgOwH6+fIFiCKv8KU4QHw//CL4UcB8gCIsGhRIilSbIo93O57a7pVdabM3DvW8sMaInbmOdVNG4bZRkb3rXNO5h4iVqxY8zByGTorLDppiJybWzFmcgNrlVp3dRhHskTlRQaL48eiikJ+tzx6R8Qj6vtlAnaHE71vMSi7Lm83PHCLZPsuflFHKIUoMzEXtfwaw38mLkW9EI3Lh3ZuFLoezwDsZCsz9vN+v5/+99/7Z7vddLa+mDFRSJ2iYqk/pFrnOrFenG/ONxe3t9eFxndXbwFs727/ePvHP/j1H/ybP/vTP/vhD88vL4UgqHe7m7JazZMl78PaJc6zSFUID6tVlb2IGchnAEoo4P1++s1/7zcnmW/vbua9vH79ps5iNUB3+/16tfFi4p3VXnEPPVoS6v7jtkchInQg7S7XRkaMdxzsd9DTmIg20rggIYcyg3MMyiqDqiEHWlEZc18TMSDGz6RRJXOJLj2v3btc5OzR0t1gubJgPD083kMfD2lYj8bqvDRQMS+yqjPJdYMl30cj4zW6oNiA++06H2B7Ub+Wfm7+m2LJixQh/wWlJ11ISnrwrPueevSN09iFmdKKzitSFLdXN6N2TKe3Cx4Awv7Jwx1/U7s65Dd1vcLDeBdBuC0qui3dFx7zWKbhu9yJJmqcxmmcxmn8qo2j+IfTOI3TOI3TOI3TOI3TOI3TOI3TOI3TOI3TOI3TOI3/d8bDtSMzxWD58YFbmWjppNTmRsIDQQdWXk3Fau4TELFRurwl4w6aj5C6pxw+/cCh2kdKZPYTKLyW4VBVbVF75oSKmtEEmCdeEQGD3cNzIiqztVcgbzwTHlHxciQHq2+/eDBTzCYDqLrSIS3wKpetUdEs3P90EBkUO6SwhrLM/ZZ1Hkxzz0nxaA5WlRasUYqq1mp1+YsqrH40CMVStsyFaFvGHKVbyNKo/XuoSuUykBURJyBzM3O2vkQiy08GCFSYMw0/k5EtNFctVJUs/70tygN340tlhao1YAYgOkO9+LN5Py08zYI6PaCDC6WTN9qKRAib55VbPOBc534f9S/jkgzP6sILTkzsYaDRxgTWTJbgHV3MawvHNPbAId9HtJWL5bt25yDhE6/v4pFjMJiBORysHoJJkaPan7aIGHQXbtumSI5KH3HGlsaOE0ikCkgFEBD7O6w5bU6uZXeGe74H3b1O9Qeq5WQWFtRrngohWxxSdxu5w5no6C0GwEj6TFe6xo8IHjBItebmEQyb57tDe9v/rqwtYqUthu++eAQnS362sCyP1ShlF89hx+v5+6tG/s5v90uOAFiQt48xT39rFYmMcMnwvsPwk45ANZAdb1JHl+wRhvxBv2LRHjIeVKiFAbT9C1pqp5Wi+Efuk120QNl+rvkz3qsHUU2NZxytoS9HgXgbZdCIAhaG3e8NSJ1zZqvyxbz6kKvFBT0Mj6eCJOCaoAtIOdSCz/bn6J7QyI7AHAyDkD/5PiTtAHPAk7tooy6cu12zjEzqlhlkKz/S5TXtnMUTDHX7G+ied+Qteg8QFn8nvvUrZmrBSxR1hGOZTS7KDy1eqI/c64lDEN8+/qYdOmv4Fom9hpxG8KvnKzCp98+hwkzM42qss6zXRaRaRN8wDPM8l5G+/PJL5qIste532+3Z+cU80247wVrZiOy2+9X4IbGuVvP2bjusVrc318+ePgNQyrtShlcvX19eXjAPA48AaRUeUKd6cXkO4PbmZnszP376mFj3ux0r5lqff/HiG9/5BphUZwDDQCJls1mvP1l/+eNX4Prko8tCKyO2XVqtSwjGi4dhIFKp1bJRhmGwDaztsBAS7SOlANCINo9/YYciI9KijqGjKB3gpk1JG/HtiE+PNF2cZOtJzK22ZJR9VhdpmNRKbJP16hkAiNRaa0n20XFtVez3WwBV5oFJdP77f/8//+Y3P3r77u0//af/y5ubLaAmrPJQCIWYCCKq07Tb78fVOELx5u1boALYbDZf/PSL2+319d31PNdxHG/vtrvtbnMxjuNqN90F8oGpsKXCGMopD+PI4KozXCwpOk/7On3nW9/68sWXP//5l9vttvBozQMHHud9pbIU/vIULM69r/pButKOVfek5fE1heKAoC446n2JKiFuLFlL46qNqSbfjnD0dkvL0b8vCDdYfCf7dWvySXH82YGhcWdaPFIPHnTAhvr3NvLbySLLGfpXLd0hsa9/2r1UmpZ/6vHKcbSn97GXxYSCi9kpJtEKa44YzyIwsd7zqj4hPfgNHaY09eHnx3Mx8tt1bI8z3OHRYkFNSgQI0W/Hdcx4TbySQkEKEnf4dqLGJBNeR5uMYIwdi3kArKdxGqdxGn+Vx/s6ax//kXoWjkqTUMel85YHzQTw6uvRsbc9TdGJ4y69WLHoVtSkMdTj6lbdaGy/Y05MZZYQoZjV6rx7j2CptZpoJdEsj4nnOvd6cyfjOnMyZd0EXZeeVSmrRwVPahNzU4JoZipDKypAIxVEwp13/VgKWgDYrHXkUpeoeBHlFK/JG4WE9NwKC7r0pDqUgQhVFESqVZWsBYpDiVmia03hwfUrAakn/apIRSVYh+i0qzahh8JuxcxalQiwvsadLNRmZbUOCaTE1iA3LDy1TrBG5iKqWspg9UcNMtkVhKhYS2eZZ0ubMfhP0x4AEO8biQAAIABJREFUuWFbKTu5q0iVMpRseeuJb5HgK9V2p9o2mZmV2ermW/e9TjR1K8jDInQ834FjJjlfpriGdLDPqoWLZWlrJK0TcWaUR3ppnJkUkQjwLsxHkomrvP573kOW8ecpJGwm37y9Sp0NpbkUOymqzEW1VhXzKLCV3lcRqaDBTqkbytvCLG1bAURze/E8Kc2ujy71NZmxaYxHIM1L/fG9yJyH0pRbCvsddQY8/7u1hQrwpdjXH9ruj6YIabwMICFNekTe9TlpD8UrXK92k7o/wDwYwqXELsUOH4q+bsVoWlyTlykODREpiK0W44vf/a+PINfGp7/9j+B2cAGiIXxgaesx5MdCu3kQUiFDVgRs4ApdwpZyz9FIZaDZC0JM72BPpI7dSiRV2t5QaiYtfzsqALiwHw0rGwibIml7FIWonLtwqMfxOLrHLtymiG5fDxDUsTnmmt8mL9WOhdJSNzoAZFKodt7zHf30QnnMHx0zTUKVdrmcWcyhYVawb80nHm0f+RGPXWu2dfLGHof3qTulvHpBn+PcK4X9YnIph5T1IB/9mNalgyCAZ6bAew2sR6M9O+5F0I8eRkjXAy3lH436pf6wtl8KQq1ZzyRzkmPXeLnO3tPrJzROSjivMnE4Uv9iI0RTGBERb9msUK210Bc/ef5o/eTTb39YhvLjf/fjMmBzdllnBVDGcdrP4zjMc92cjbe3t4ULFVTRv/jRjwDsdvP5xWbeT9N+L9M8E8ow7neVZ719d7tarQDc3tzsbuXJB49VdbNZ3+xvhjK8vb2apnm9Xu/rHgDpyIztdr/bTShUVqtSxlrnBhzA+qKFzxjjapznSoQyDrbyOldDXPEddvFGRLTZAxUtpb6z55CmKelgp9IZT961MXCbSFvRQT8yyYfpEOVjWClKR1PJW8lrnrBNqdbqjQxJOZru5ntjVqqzAhgHBoSp/NkP/+33vvftn3/5R3Otax6GRAAGwc3yVGi1Xgvoq5dfDcNAUGM0zPjuD773+utXHz378PNvfuNuu53nenZxPoswUxmNGanUSsTVWhqi1Hk3lAsqVKWSuly0n7aq9Y/+6A+nabuf9tdXV5TrBsrAUhtVODrQCzJxQF0fHp0gc/hVeuqaxJPCsB/h/pKGEe2hJoE3aWAxp5BNkgcmozYpiNzrGWdcD1SAxdL06KMgC3pwRb/OI45z8OEx3+p9QAfw70herrJ/+vv24T531uHnIbF0z+w5VUz7eM7OAP1L+yEdA9DOcdjf1NV2bD7+nlMvGGnPUbT50I0t+x77n/eDwvZYe9HsoKppE9Ps2VmEllpdjgUgF7iorbZ5fn/EKZkH7yV5GqdxGqfxqzbe08rGrSD2a37iNPc++p/E92HeZSwp46+6sj6Hbr6mzXThKgsC7FZRPFSknnou555vVVUq7KtWEauLRMS1VhMs2XuTmXhibl9uoY7atBAApOBhsAbD2jecgZaeGfd1Fd0vZmyvNWyxWD6pVkrdRGz1wiN9bBYZn1dadBd1ESK9/qaLUIQ1iar75ZmtOlsVN7QhriE3Y6YFwQN4JKImAwL+TmuBQyRMXHWGuBnRqgWZ5cwjFklc+PZJpigREAuDDggMVpVmqoiKhCikIlUqQFVqAciqZSFUM6lgEq2u76oC3iF9Pa4EqtVKH5plL4JGM9CBQgRTJVDhIhb5JhZaywibYMiiaZbuK0fdPxhm4uNUcG1TOhtB1a5Hhx0SqSaURMiMWX+12/bm8G5Sm72ihZfaC0Ufmp5hk0qFCnMhMNgqq84o8UQrsZpzE+GSYng8B2E/YxOgwujW5DyNoEtbsx8ZMxk1awYymLmjOw+IgViIfU18C5BASJlA6j2RqlD3pOYxN1sFRVGopfNAaCFNclsSEUjBYA+JMHE3jGxNGfaFwUmBW/2Y4hb141dr9pQnx+sMzdAw1VG0om5YZ/8xrOeuHWdghi6Vivvhh7DrOoGxEraUgjlRbyc5Mm+hoeNivd1GvE+Zaf58UiuGamGOnbIJUqMmBKzsWRYFXF3lU1W1froEUiaF+gHwdttHXqtOCWqTDt2UmSxsISwYC0Whw2e/8V7dWtu/gWSxoARQ0h6fTGeRTP0pFF6b8YFihqaLJ5Y0Co0wqhwBfTnV7orlpfeRtXCWLTs+2SeO8hzhK6nop4p8EL3b2Qp7mAC5M6EWLmewVOwP9Xzrbu2oGcfwgFP2J5q613WXdW91ODp3CCeiQgsXasVhm+04hSRugPLIaeaSWNPUXtDypUfHliSmweRlgCEi4dVwwBBRrcLMIiJSVZWYh2HksNsPq83F2dl/+vf+3h/8/r+6uro6v9jsZKcAAzMJgIGHu/nuenu7+9ntxcXZPIlKLQqm8vznXwPY7bZPnz66vrkaaLU5P9ttd4ppXK1Nerm5ugFweXledL67ucEAAjbr1TiMt7vb3XZ7/e7q488/AnA3z+dnm+vbq9VqeCdvN+OlKnilule1tmkuuDGgFkU4T1XdNuz8jUthsgbintZCXKDqvYcCLN1Gd1WJOzhTIopbEgTw3kFd8W40BtHz4PylM4Ic/7kYfvLV2geaVAModwXIqVnPQ67TFAmxn3ai02rc/OG/+qPf+s3f/OJnX95e352fnZFkI++AE6kyEfN+v1tt1uM4zvvdzd0VgLPNZsX06Wef/cm/+eHLVy+HYTVPMwZS0Js3bx5dPgFQZxHx5BJSHgrP+1qGmSuqVJvMABWZNpv19m7/5ut3wzhUgUV6hsyoTKxLi+QRh1zuyb1f9aM/n8sHG8w60qwIf8QCnp1kcvBUbeTdOk/m1BZbGpgUZD4/7MqIB0Eg2G4kWTt6zpGTZbn4Y9LkkXdH7CcedwC83uT2EFw9YSjet7QoLuexXCDCJRh30eF7etZmrpGOCzdRsoGnHWBXDrJmZ0bDEARieU73zGwx9wZ5Yw7eYKrxodCYcgod3yf3BS6Y6ZKDpPdOQ551DCo0IHfzSHVuumQTKg9A5xNb8MFwHiK+CuA92Kz+NE7jNE7jr/J42Bxp0l5oXBkPFr5hPRC3QjddMOZu+CdsXlZohFmZzYKT9xzcmcYft8f09DxJ7330N/zk3YLUVM1imcVEIG9YoESwrrullCpVpGY7F5EKJq0S5h5aTJNQmCtAc7WEPEsWBqgPGEkHOkEJylxEMjpOAZBHDXo+VzYa12YwcclDFVy4Spo8rC956CcKuClKVYXLwERmQVGYqZEpYvM6yIiLB67saq3WTdssOUpeRZ5VRXOrnUlDLVbSEIaLO97j3rTBUZimmrvS+aiSWitvl6Gt35zFZtpaOHCrlMJc6jx7x+1ZAAiYSOZ5YmaQljKo6jxNhYfBs5/E1DguhQu7gqxuEmsIY0IowT5P4T98qxBVSG22GdOWEnAPi0O2oxYRgaVKRJylqy1IOOFAda6MYhpvKSSitc4i3hqms09wGPlFIypCw+Zl14jZazoRsImR3h5B4WZwtxmKihXTL2Uow2gT9KlxEdnDc/MNOhKRaOSRAKFTJVgM3VvQYCB5d6w6kdTjCggqD8G1M98citfDMITptp1Ce64f46xl7lI3hcR7IKu7CWB5XlIqVho4RNcwhwEEEl04Cwip0oQ5VlRIw1CmRETFFTbXrTzCM1+7EEaDDCuXwTdSLDrSHR9ECtJXv/vf3Q87ABYaeQRcdyI0rZgNLxQ1he2cZCjq9+yQHrr7D4e4amAkxMOUQxPoLyQDmne6DwhQvDcN7k57CeYfoghNaf4PB2soh9JIWVBbnzrgyXt2HHJJ6MjXUm/uPANAmDcCw/vlUOqLx+pi026DEj9shj9UKA8UvzznaSJ7cDR8zoC9I/2eut/7n3lfI2h9O2ssF6ro58kRJDYboemmn8eQ6Qh4MD7arNb3LU67GTTRIi9enHBFt3XLdWb8WxANdeww6qrRY0bbprrJQg8eiHxOuGCXG6jGIe9ZQ9xrp5qZLcVXRETE+GtO1nhs4VKlrlar3X4/jgPCV6qQ3W6+OD8TvRvWqPP87s31Ss6m3fT86ssPPnoKYL/fnp+fb3jFg+72+0IFhGmazh+X/W4P4K//xm9cXGwef3V+ez0RoRQe1qu7m7sy8qMnj9WFK55rLUKsAxFdb28/evrBr3/vB1999eL84mJ3Zy28y+tXrzbnG8E8jqvbt/v9ND95+sh23MPzqxt/vZU2EZGlcZBlASt0niuIqljLQVj/ZpUWJjSW0oFUE6whvQQSuIQbG0u9HGtY3HxX7aAvulAkh13uozZMoGbmUAio2JIIvQfR6JhQIhNa90gtwwigVqxWRer8re984y9+/KfDCBp0v73TwEMeB2ae1VrR1A8/fLLejPO0m6b955999smzpwB2++1Pf/bz3/sX/8fLF2/ONucg4lLW55cCpkLzvgIoA4nyWEYVkHKhMk+7ccXr9Xo/TbVuAQyrYRwviPnp2bqMvFqtyNJoBVOdAZRSpFbuNgILsnxIy36ZQQuy2p0VDbtVMx0DSzGjo3PxbU+inFb0dPGeKaoqsdO9tI4hvHSdIYuCV0Pva3idzzd3zjHFf4DEHU74+JkP8t9D9trdn+kObZkPv7t7wD3MDEEX4/v+ubq8Vh/6q7vWd8Rcy+yREwCLWL4QXDy9b466XCe547ZLYjGe4061Qw/4vdtysJUNtwzTSJ2qK2s64hQu+MAIgHSYu6Q23bQJUWUlBHfNNwAp0FaZ+9ZJp3Eap3Eav0LjQXMkM6dkY/YsABHKYlSw9KpCCG8pV4V4R9yivswgSGx8xarjuWOqBTFoSCcm6DPE+vF1woGr3wg1whXLJt+YbGpmEWiwHCWmqlOoepbCW6AqajITV62ep+y6EIsKAzx0SZQLVQbTfm8VI0kFXESrijIXUi7FjQ11nlWEmEspxOz2HmQqnxkntIoM41hFqlQznkFEJVtdV1FlZolwLTNrpinP5DAubHr1wGOtVYkgpErzPAHgUlQrN3ZuDTdHswlaXchZJorGsiaAi0idhdEFohKqVIFExCAjluGyNcyuyvM82WLnSRzqjWWylcQy20AZbDlq5XM8IJGhag2aiVCGMopUpmL7u1ptEDKlBQCWMkIhc/jtxwGAeNAZqUqdFQSIikgpXKLYpWEYgwPVDKhK4EjwhSokmv96U2kCEVnwrGiNs0DN+BomAQWoi7F1tzZD1TJ7lKgQiWXMETEXKkOBakUl0FytbOYAUau95ZmlFsgmbGqzBcISKZfAeVKFDoQqcxed6PjiE+ZSlFXZ1TVVFQwYqQgsoFaq4ZtaSA5z4cGDa2Exr+ohoGqlRakQSaMGiA+t4zOLVAK7AVbUjmFexmlfVSEwRLl4/QSJ88JcbO0WVBeo6UZYlWqKMXvxUI9wpbTvETENtlYFAIZWsVhUcuExZyTLnunNjktcp2qKqxsFvGSQMpUAMtDqogKAaCXXrLsKd646WRGGSgQLCBqYxbu9m7XRKjIQQDwUAjTyl40yD6UozMFAL3/3v8HD49N//I98p50ME6MAqLVXxcVrbhIYK0Xt1HAozNpL1f+sKZ+LKNOgVaMddzMKk1t0QKVYir6CiYwbFFs/2Dcu+YgQiFTJD3Kr3AE3D9EQUaviZqzcJsmYtTSRu2VcM7qkWFWEiIRiGsz1Qm7kdN6miMAsxxS3QRNozn6Xfu5Tz0GPPU4TyP9RK5FBFMZsSlwgRfUgKe+TLiJMZOHnqVA3pCQzSWtOwVVcJCH/RUMTuZruHtBqAAegEejuC4xIE7aO8sHTYy7+5jKQCLwMsW2UQfPAVNAmagHVB6Y69dIO5iSIeJbG/dWOo4cJu5FAtYp0FVQ6S0MoeIuF5ivDBqXxIQDbAvNRzB6/TwIPlBetFBlz6n2TjQCR1ho+p+Z7ikerSviZDB2omVxEiGmgyFYRqEJ5GChLzbnhg7iUuc5mrRyGAmghnqYJwMXl5s3rN2/q66c/+Nbt9fXFB0/OLs8++PjZ9d228jRsVgB2129u3+7WZ8MsW621rEch5WGY7nYffPIMwLe/+93v//qvf/X8y//tf/1nu+t5XI1VZlFd8UAAjwygznVc88X5453u9Ga/oVL39YuffKFD+eibn7558RWA6+f7m7vtZrN68+paIIVp+0Y++ejJfl+pFHOFKVhEiYi5WDRTgFVrmLCJqaoWcyaKzDITM8wv3OJ0w/NLdgeDqO4nLhxO7o6WqFqMWLAuIhr8MRUitRQuwyhSrZW5qgzDYFMRqSrpwVTAckRaDVCjfG6nYFSrDkOkQBlSFKEweZv3GFJrcqqp7gGsNgOz3s7bzz//5O/+3f/49//FH/zsi1cffPaUUKz19na3f3f1ZjOMn3768ePHjwvJ+dnFj3785yDd7253uzWAN2/fztO0eXTxjW9+/vrVm3fvbqd9XZ3J+nyzPr+ctzcAdtvy+NETESWU87O1KF589XqzXl9cPKpSt3d3gXts1PP63c3X01soEaqJwQgBTMQykMDExGSn3/KT4lw7vXLp2M0sxk+70w2AO3pmykN3cptjgJqZyT8xsSeVDKcBBM+n732XxtQpawp56H5jB4PIZA4VMxkhVCbqNCO1gkLxokWt91ClkGsMRKTOLGcUJ7FHNbEL6fGyuAUjpRrXHudGLNxMvnhdfhBX5MwSSP0FnXSUcuSCwRxbxZaOpfQtxopzpVk2ltq73ChvpWMUfm7MGSAalULJ5S97W0q5kYOVIoGqqtrempTosyUwlVK4WnELUqZia7PwETNKelPv2Ggmzm3qpZIU/i0YInBG8yAbw4dHfR5wZ1KdXWwJ9YZza4mgM3QUVKgSDaJ7wkA4KvdxGqdxGqfxKzLek6xtRhvt/2zcD8Z/F/ylMS5qHCpZCVyXS97cGASBqteO6+qlOHtfGAKw0PJCSggGanomALDbQg45ol/d5hNvsW86gcZVzqxtl+tLGahbbPNqGptxRbqaFcMEYQrOpOpmXAtRDPnUL1AxoKcKl+ylNW/xyDNENERaQoJN2rcQ0wmdO5onkdjURQVqFctDkXkiAjPvpz0ALuwabJMXAPWow9h9bVAym2+DancJ9XKfKf/tkc3GbVnhbot0TaPPrDFrBxFLraJRBj6tOQRAh3F0IBCRWYhV6zwb2Jk5tV4y330P3DbhA+mMsNiCxQJzWm5LyFtCFOqFDDeIdK9zyz6ZfcQWRMy2iaSSJXJS4gSBuHCdO3uHfxMo0/I2/BJ7i1KTVLqIotCNNNJe3a7WLAOA2/6IWLyxE+ZpKr00CSuXZr2ThFD8KONwOKpEcC4ReWZsrwVoZJa7fK9BahTIoDMiwMsINBTLs2xBvK2XgNv7VVVLiehdtQKioe6ihXjFA0Na7AOaYlFBw4KEdfeJW9QbgVGNim+Al6RqFo6kPBoVVNOWTUD6wM2gDSUwedCugUmQ8WgEtbhe9CrP/SNkYsqN9n+4k4I7VFKtGiGejmgBg7ARL25QFV1QRfuQ0toVdEXt2gSDq+D3KEJIMo9e/yTYshF6oGtl3VTa6tJW2Ax1jYkkRJggCDbWhVQ51fUNjYcv+rn5Vizh380lD1e3rfZNRmJ6+pWoR0H4Jd4w7eHwh6Myqj3JCfXJ376w+S1meDj1YFA963RLYHuT1SnVqqZnBgtYbJ0HF8b/6KDRzsF7iUwjNQAncrRTRXo/JBrxPfyc8oj2uEFR/KDXKGPF2hHDVDG7mHpPGfHf4+uO5zmLj6OQidXkTKyte4EzuScdn0VGyWiQdSVeEFpK0BGGYZjrVKsI8Wo1AFiv19fvrr98+bbqdrq5w2pd5+n1zd3m0aOnl2fzfgvg5Vdvbq53Ty6fKhGVolSJZJom2Vfn2oxSqM6TKkHK26ub80eby8vL/bS7u769eHwGYDft99P+0cVjqfN+d7fb313fXe3qvhKNPxkeby4A3G5vz87P3r75mgpv1uvd9S2X8vLFq4tHj0uGXcPjT5nNKcgua3QbFFCODUprxmJfiIg0ENAraRZPGLDnqBkNmLIPki6NBE0cgYtgCCpuX9Vp8p1KAqoqQFme/55O9nwPKR4BAETU6+aCMogLar5Z1KnONLPyv/6jP/xbf+u3/s2//dPCZT2uN+uzaaoAxoE36/WTx09fvXj9ox//+OJ8fbY5f/vm1Waz2hX9+uuvAey2u8ePnnzvO9//kz/545cvXsw73U9CQ7nb7+Xt27NxA+Dq3burqxuolrIax2Hez29eXX/08Qe73a7WOk0zAPF0cCZiVpNyiamvV5Sui8B7cq853VP//YDgH4SIe8h6QL+Zme8hIvePZhBsYpBqb9BMjugS3mJmi1UhTiUiuf5wAkT9k018DXLgkr/LC0dso3uGocehePoQJ8h1Lib0/quXNx7N4/DmNJDeI+bF5Usr/NH4xRP7ZadOwe8XNuruMc7H6XgViTlHCBWUtn9K5vNRsDm12G2gcKHMdErC4oLSgzv7wPCD3yaPlPcANZmvLdjI1gNQPo3TOI3T+BUY7++s3at8lP+yObCWFXN1aX2wq/WIwXL2Ruhe46YDN46QUpO3XehEsu7ufd3DnSY3D2poj3TAl+BKff8wf7RqBNGk1iLBzlPqRZZE7+73ntTOdJqkZfZBf6xHcoi6PaGXY7tJVJWYdScxKYDCRdSNrF0LHCXu47BsuvGA1PK9ZBW4FM8mVeLBLTIm6+YbyQEa9pweCBSm2VTRwxQbmmMP797I0D5P09Is1aLZCg8Kb+dBQNhXU1Rz3FOPFwDc3ugBiaYbDMNQa1UVC40QsHitSRT2hHHP33KdRDnMu4ljofRSBGdBW6eTgIMk1qW4QESESt2HDYp+SVM80NDbLHgEmM7B3jXb6nBysRgAcguzClkJRJPgHeJm33fLFCWoYvJ+jVpd+cTkJkVq5MWDiJRZI9u603PV44zcYsJMUjNNkH2rW8iXxyOQt+lZjjCXewRcnNkEuGhFWOSs/JmZHhz8RhAsslkpsfWAzqT/eblz3qAWQK0zEudVpc5cOH3XmrtGRGA9IHbtqWE3VA9bDGlWEVbS0KuDPlkkRptnQ6Q2Y6+D1jYoNjoyQ13bNbttmEqzcYpCVV/87n+Lh8fHv/0PXTmk9np1+M8U/SstgtcijeY6ZyE+k3+RfgJRqIYRONV2alQxwdbb7bui68yxlaCMBkWeylxgC3DwTuiIi1TF7Urdpve0/1AT8cc3lW3BTag53hTaBS0u+Yfj5oJEOMFPnaz72f9xrBglnhveRNCEl8sgkID5+MZ+ScFXulctTILtmHRM7EiLiat08RGW1pmDOwzvw0fZDPROD5LFti1RPWDPR7SCDkCKtgeNrRzpeSG4+KW9e6bxoPiZNlErmtFMB74Z1CGIAyVyduOJR/tBRNy3zfGOM262dkeYYXKcFXuDt/toXO94q4O8dE6HJnhRLo0AUtFhWKlWEjGS8+7dm7v97eefffbog8cffv7RV19++ejR5bSd7qo8WY9cRgAymx2y7HY660yEi4tNnebHHz5djyOAl6+ef/6NT0Smzz77+Ivtl6QVyvM8q8iwKtXKp0yoE7766iWKDrN857vf2lysRLWM65evX+/urgGITtsdbae773zn18BaWG6v9qvVulhxZ6uPKS6ZSK1ue006kPtneQqqDrYUJUO8iuI87FU3KU1PjVa4qp9Mu0fFhcXFN020auM91gEPItXcFV7GUrVH1EYMGoa1xx7gpR+XFBuarKhkHJ8ZWkoZdlfXX3751e3tbcHqL374F+vNZrM+A6Aqm7NNnfaFMBgZUSEGM43DYEFhm81mt9/Jft5tt/vtHlJItO7noRSFktfpRa0zTL6ap/00M3ieZ1vpOIwARGSuVSPaWUSYSn/c1SsmpztXKSQxVe1J85JdAEiOE6ALaZzQnBsdXUDgwZKc4j5K0ZOT4EXpr0HzC6rl8NJC7LNrkxY02a9dme+m7veDQUe/3Hvtwz6oh0e8+x7uQ4sPFjbO+8D08Fh42e+bYnKA9y2A7v31nuegMZMGODPndpHTizdRq8mzYDnUCZ++yU4WxB/ZLSkJhbW/C9t6fMutPkmwuqZNBFGJ5UVydVuamsS+4K4ZdImeEMT8YEcnRMG46i+3d6dxGqdxGn91xvuiIw/Zy1IXCbVjodj4D2nfaLsuL+6Lb/WxdOQ/49WWGns4qQOBxdXWUN9DaQnzqDsue3mlW1tngAPpkfHE9MBOVjqYMwAwmx+bKJpcJy+LGbHVKxSpZn1yFTZVpni2u9TS9tPeRgDmOiMrRWYuUmOi/kICWewKEVvIYczTeiCSajGBvhQ2+52Jz5KhizaVCPlpOxQWKXtT9yeZdCsJIdMF1IqmUIpxbY4qAEoplq7LpZCy254o0lwDegjBFqqW4kHuZA5hV8Tlk+DzhmnUwpu67LxADYrwzF5A64KGEhhpGPFXpERxeEMTbnA0mooUc/BRzGpvtqUwNZKqorqf1kUdJYAYxAyvedfkITcKNxUm7PiuxiigZA2vU4OOqRCR92MAIufFk9DM0k0glTpLLVwM24ZhqCKm31kujIja8SHXw31u3APXg3OlQ/DcQ0m51pLdiIofEA+F6KKkszpdi5HotAF4gr0GmCk1USKAvX+ORK0HtjlohASHtJkb1QhY+5l0Q9sedEigFrmZEEbQQs8vz4c0G0uTRnsngJmSFYB4Jqe3YYWqB0KqpkXSfB2Avvgn7ysZ6Y+N3krhenA64p52St+Chi6goGwK7HiSa1aLPdJ+m/LG9lHQDZenl0zC4U3ZvjZvA8K+7bNHqlF5uiN8xE8qdc1sEbbrbtfyOXEouv0ASZdhFzPIf5TaFAJytLg8gXOk/B0OL8QBx7TelFh4dNXIfHi+HdSTlwNNU48+7JbwoC3x8DGKWOPBHV1CxIFyaeVconbC0eN9S+JfdZcj8uM4Bo2swjEjmcq96qyGULEASn+phi5q8OsWG8fIns+OTRmN7BfrIeQ6s2inCHb6Jim0S2Uw6iJWfAFJqNGKGd0uAAAgAElEQVTbWI3wwLyMnXkjXxlVKw8DIWkhDwV9sVlyKaWU3X6CyJoHAPMsl48vfu3Xvv3h5xcff3T5n/xnf5vH8aufP7++vbvZ3b782VcAiOr5xXj5ZD3u6d3V1dlmM0+zCl48f/H9D74L4PlXL/908+fzPN9c38zT9PjZI5FyfXX98efPhmF4/tULAE8+ePrBs2dv33795t279WoA1e/92q89f/Xys8++fXHx+F/93v8J4Oz87Ord7Waznua7UriMvF4VaDUs65y+GVXbqmIjC7DFKe/A1ZNVI2gUUdppH0yHt/cFYk7XpMK3L/Y3jreB2LZMRKGgwtrQ1B9CYY+09thOLLtz3f/2sJmmHfT0M9nczHnMhFlA0KdPn767urq4uLy5unny+FGdReoEYK4yz/XF8xebzabwMM11s+bzs8t53k3IfoN6fXX9/OXz9bDebDbvXt1UQYWuGGVc7XY7mAM7POs2C/OIqtUW94PqOChewLhhaBwQhcALyJBCKfc3s2ryLpfagoyHKZb8O8kz1wh48P8OsEew7oTAlBiwIP7dLb2dOgTJuEHR+RNTEKBubV3GLB2oC+iKFSZEFR4MS4lgi9E9o+Wr9UT1weFAbtC4hzp3rPSYYTsnOPy8X8J77YwNageoTsB77nvvJM3l3bPW8PQ6g1zy0bYu7cIj3dUX07RrnMaEhHIPrIYyVLFiWepuwt5FGnG7DdgNRW26GotfaJZpkWxvDZ9zf3uAjuCc1jeWGh6exmmcxmn8So73RkdqU89cskhJPFjyA6JUmmsoJTp7jIs1KTzQIUtbyn6IdDUEy/GndkEJS44bUoobH/SIk3ZiQGd8pJ4/piDs8R6qPR/0uXTLDYZEoJh+CG0ASrFE19l6LZRSqswuSvS+XVtQJGsGoI0leUU8clHXEgWsfpR6nxS0eNWFxzjA0gRHv0amqVpZfKnCpRRmr8fUyutZ4A8pKSlJb2uIx+tSjumkHdXI4vHpsUtyrv0BInMUB3RMYIeV9T9uWTzG5RVQL2nvqqVvCxMR1VrhOc+USgX3q86iZaqIViJN1Et8yltcyNQQdxO4vi0IZ7hJQB3OH+JIJ1XEu1qckNshRIXF5SDLynT5WUxwIohAkD1RiA4lf43oyJglXGrK8MrcmM5O4sZBap8mjBx6RBBTlV0dFAFx4WjwGrPhh2S4lMGoEIklYZKdUhVRRe2iD0VqboZ6ubfO9+F+ZIIVUEt7c5p00KsXDheDp+2l7ctQioiKClSI2Eqy2p5nZJPDSPvtR7g3knAFOiTVCM9EUrheZxO3c1A8qu1gyrgdxqh6O1ITZN0KaShDqlBpjoHsY/6LxPyP//E/cGCRm7QDj61QQKvLpUTwghVB5TpLrfh58up4xAShjmILEYf6FFTAbQ22to6edBoXe9f7gGdeorm5QefbV3H1gxEC3ceUBHeproY2oNosRYvnG/q58bchmUIhiDb0/3cGZdHTxPz4f+BZ0rD21vsWGdjYP/owmKPdrt3v+ZtT/PbFknvGF8vVumWfo7or2n61uTmbdC7pfHIx/0WIiCKVuyDhgTn2b5YOWBzGeNDyucFEFie3o5YLUooMsXtoLJ6e7w5piaLwiKm8FKU43N0oHu2SKclxCpuL6cDo7C5CIBsuNw002cgixIao1koEJppEbrd3AMqgZ2frL37+0+u786t3V5/u6m/+9b/xP/+P/9M3v/MtIdmUEcD6g7LbT+Dp/HL9+s0rxWp7c3txfrGf7r5+/RrA5uz80dnFq9evr95ev3tz/ezDp9vdHUjffX01TRPTAODFV6+ff/Vqsxm58Ouvv765uv4P/4O/fXf3Y8L85NH5Rx9/AODs4tFqfSVQhZyfX9zd7YbLYSzraZ4JbLV3LUk5go+s1jDEK4MkEogLQRQugASig5KbOzOlRUbUBGk7qgHPjKpzsFNDYsvdaIUs3ZCgAAYuTjsUqhLJMgtBs++YTVnsL8TNlCgULYaeYlJxAmYAak5E6LSbGKp13mxWKlp42E8zgLP1+W6ezs7OROeKWvd6drY526xv7vbjaiXzBEBULi7O9rt9KYXB0zSN47qUoVAppdzd3Bn0AK3zDCnr9ZppkGk3DqNh7zzPAGqt8L52qHXyk9X8DAirTSfsakPyBbHSJoEtNyYASGgYj26fFtQqpJk0/Dvw7Younz/uWxiHcq+cI3DghaFTm88yfMGQZVkiMTOzjxYae51cq721M0QfzOhwiu8bR3bNQ+nsiB3Er30u++EJWNxzuH/vm0wu9ZcdTac7nGOav5USjVKQ8U8SdtT+08QRWuInQt4XKxgd33V6k03AyYh7Bqy+AzjIeDg5iJYrXXCXnhmLSmf4RjQ9IgB9GkRbjhE0juVQVrqHhkf/fvHnNE7jNE7jr/Z4jzmy48QdQ2h2DgDHHC6/o3v+pp41E5KS+vDPNb8FAMs47rTsdLKl7pksqJ/9vV5AHAgQFAmmYTlwA1z4u1i9/GJmkt7nuQQRN17hXApznTjbpKDWOlep1sYn5cqmfDRYUlokCWF1VQCoMhcuKOGPo+Ku46wmRhbgFOZLcYGEmapIiIFiwnaVSkCtzsVVteZmiLclWQCN4F1fQtJLPqhNbAFFC5iMWvAev3ldB8ZxNUq1DB/LdgdTISZF7TTMAKl7I1nDStmjDxGJVNP54ECwuEsGUPdTBpGQFzkiNntmbEG6OuHsP0wWS3R2KSj1WlM/EEbJmAw60dpU0LaYUC8PbNp5lWMiGPAwUoJJXiSiqkLRpqZp2C3a6D6xscscCZCZZpsXZFkCaichnAdiug+zeO8pFqkMFhXK+m2hyeniSN9DG5jIcucyWZ6gvWuDImWaUonxXhndY4kk+pA3J3G39rnOxf0BoQWJiNU/9SUryOyt9m5RrWmDdigopdaUyBBCr6KD4ALobh1ssYNpP3Vhs1vJgcpg4KOwVYTUnVfbEZCQu01p9RAVDm3oy//qfWnan/zOf2mWkqb5hiAdcaNWwjLCgv3VQZ5C28sD4pZlE9wDnq2yaC7enqKKrh9R8AUje3GwnfT5HaCWBm9Xmxra+6067a7d1whE0lsHcgBaO8YSB9vuYqOuXp/QT2ayniBmHXFYInpP4HH0bT+609IREKMbIgor12khdWRFq3TZWKm7v0OnVMaaiZq6mofL0XQnO0xHSvACZQHVFlfuRDXe2oGDllEqMZ+gitKYdrtq8ZrlL3n4aHlXqsRt6dpmlHce7VCqjAuBpl3Z26rumd/iWQd3KwHS3hsKqD0p6UfzXTrUlUoyqQXo8qZY3XIFiZMRHGwfTNMOGIexgKjWOwClDBcXl7fbt3uRjz/8aHv97id//ieXq9XXz59ffPDEfEJXr9+JYrefP/3G5wOVm6vbgcq039eq++0E4Nmzzdt3r7/42Y/Oz9dPP3g8z/X8cnNzdVeGUUk2mw2AcTNAAZ53d/uLzdmwHl69+frrr99O36qvX7/56ovnAC4/2G53UxmGs/PHd9vdzdXto8vLgYvUoBkBNKnaormRXNooCTEIXUswosNNckNkIBAhK000OHvxmxCt8nGaNGux37Gfwe5itlafOEofW/FHq7DR41LL+nUiYkaVFr59cNqc+LnMtdc9gBFFVVargTfDMJRaa50rDUxQEw9ut1tm5jIAUBVmUa3b/R6kzKgkADar8eZ29/zl87dv3l6/uxVVYh7GoYzDatyMj1cA5rnO0946m4/DQKAbut1u78ZxlaessDuQRKrUqlHJNwkLMXHhlECMAjtBWwjk8cR26O1St80aS9SjohQNY7pt8186SC5oiXYXA4f6glMysxe7fdjPcy8O+sUh8rqkdiQ6h3yOaGySvLWR/EaEXfjpx8Ibg3bnAwR9eW3HyTWwiQ5NZmgXYFGhknop+fjyX/TydtnCGfTgnXnJwRmAH6jub4TYCbP++9XkZyzV1mxkl5yioyDtXfZoVkifS2WEoVhhIoWIiKiFOTAsa0G8Fi3syVbNl0JJyJQC7ebSVhFWSJuhLcAFq8Uiu5RztL9tuQJtDc0egOtpnMZpnMZf6fGgOXLpoIRpoNTxSzPeNIWk4+gmbVutOv8gBbAW8RZhPm5w6UPhwoudYl//jHa/SQlCWXes1cYnK1uXGSbtnRSEu+ONVoTO6wp1fdAABkhl5qy37fJr4yqqYJCoZi9mzfzQdo2AqJSBQv5Nq24ToggEEvLyfGkgzfmXMvgz1UVs54BNAdHcBPPReznCzrBZRcybp6rkzQRRSrGeHma8ExUKha6T8cjDx5LrmVDpwlOz+wSnN0kSCut7Z8ycwJISvtVBJ0tyN39lqKkho4f05NYPISq2Vc6F4dU5RVxIjdajZvDzVZtdWDt12Z2c9th4o1kkmbgzLbqgm5Kor7uTMeyZqtoXZW8BL/GBe01NtlBkkpKoDKUUMKQ0pdVldrJqYlyImUQYHidbbQeIWD1aIww0HFvklj7KGWfaficP+XD4ZJ+R0J+mOieKK7xpAnk4rsL6vjtoSqBLHl71eztU9yYuKRvbsWVmbdNi9tKrVo4gi6T2E1YFVIiHWIYuhFRTSsMLbf4FQ5JhICuUacZrM0yLiNaamciIkpSIsonSGcIWcrQi6UkYG73KFFOXrekzdrgm/I8kcicPCUWP0+zOYCogikChTkpV1a/ea4tEEAI3J0WPyEXr5yA/Rj/FMFyUcttjJqGohMGdGH1rRy8lCQIppTrZTNZBVC0Tzu1+aEogDt6E7lMNG84CdnEce6MM/BT4wXD91ioGpDYTHdOMIIoqwZIZ/WXNONrCqeOA3buLCO0mlhCrD9g8sDd9np8bIVqMhj50X7LgxK5kg25d0YcskQ1CTbcLstGZaLpLtXP16eJ1hxNqK7fJHJBEv456LtPUftfBWkJ/D5fF1H2liVfqiYHdUwFEya5mGQ9uHS85mBvlm/qQ/35VR5pfrKavitrZHhQqKsyccGtPslWql3LoJIfgF7H0hvT3KJ5holcoMG5Gmes0CxSrcQWgMInIar0+u7xgwt3t9osXP19fbJShhGG9AjCOw93dVBT7/fb88vzl85eXmzMmkAwX548B3F7vf3z7U1K+222l6lyr7rab843MyoWsJu9ms9ndTSoqdTdwEdHdbvfxpx9XmaG11gnA7fXd02cfVNHtdrdZXzCGWufd7fbs8rFIlBJWEoGKzrUSEQ0pWjSObKZx8wE7sSAPwA+wOo0JZqQhg6GjINpbtDSZ2MH5JohqiapzRhiyKV41S2oeomWZ9Ny2Plaq894t0M/MGchoelXzyBKwGdZ2a9VKpJ998gmBztbnr69fby7OVdVqR97OWy50t78bV2OtlZR3d7PqTlXvbra77R2Aj549qxXf/s63b+5unz57Og7baRaB7vZ7Yb48WwPY76e5TlCdp2muE5h4UPFynGaIBBHXKlVmT59orL9bEVHvz4nPiIhVq/+J/qfz/XhKR+N6CtSpCh2kM0w8n3gv9VvkVvRcJz5KobEZ/rRxQeoLvNtVvSu6X2ib/vE4vDqWvZhoh0gLkeDBEbRgmaMc7NtF3Ptefd/8jqau3b/9Zcez+IWXHFx1dE03x/S4UHIrl/xMdmumXyDktTzIadELoa7bVATlDlZgVsQe+aKKWG8pbsIQ2sOke5xHb0IPJCxoV3+ot0iibb0cMqTGzpQVQtk+1GhQOHkP4Xcap3Eap/ErMI672p3GaZzGaZzG/w/HR//4H/x/PYXTOI3TOI3TOI3TOI3TOI3TOI3TOI33tLKhVuglXYj9183BHCP/0IM6vf0jJbqm+v/UTKKZnhA1pey7dFctHpOTsdKEYpUi4zsg4vUjnKTF1vRxB9R5XskcYp6D2C6LYMZ217LUtEUaITLrmK1KnxAx1MOjqs4iQsRM3smaSvjWmz84UoQs2JKiJL1FASoAq4bp0QERtwKCR3sBuV4P2Mqa2ZYe6/5VEXjVPqqzMDMzWwICoF470jO1+ypHBFjdxsgaISLi9FNqc1h2GGIN2EWYGNZdlMhyrQ0NqszMhbkww8rPqepcZ6ZYXGxpZM+yzJWY2WIYi6Kqb2WGD6V3kKDRgLxkTfkuasW3NXM++3gKd7TaZz1WBg4gPK2ZetT3Je9m3sJg46W+q57xRVWrV4pkL0oFX4gUHhQW90kEJtQqCggXewz7XnkkhUeiJcgQgSS2j6I1UJcO3OvWb1ozakRJiVQ9fs3iB2udPcEdc+EBHmRitR3tDJizVkSVPb4kDlhA2II9PAYqIriYSxYTCDzy6NPIND0MFbBsmmhdijjf6uF+hKEMNrFalRxBGOzBO7Ayd6TR6cn6m0eJsQi5sHJkxEQS+TOxloySisy+nInvOHOJNu6qIk7zyPZCENHKB0Sy0IAWHw0QURxjiThTD+NxPCMRJfYY4Rf/5L/Hw+OT3/kvAjMCDTIisZuH6AQtjGJtcxgsvl+CRUocJSJbAVBDxQzMs3lm0pu9VDyITQFIlW6pQarVeUfEtyypv88/kqE6TG8hK8FMOoLkBzoPo2bwSLsofxh9FVCXPu5lUomYal1s2kEIQzfPw7/6sIouWicJSx/b4CQtZ+gxn10M1zLMqP+9Y9z5kQWdxz0tYcyXhRZM0z3No0cNvM6enbW3WKGgnveDIYqf2B99MEpb7SL28nA17Xh38T0LHDx4QrtGjwWXRJcOWyiDn7Ihb05kUUJzOaV7Po6jpQSR2Z5RCjknJMA60QVS51YmT81Qcg3szjRDqRUdDneUPOYUpYyT4ooIM5eh7Hd7R1qmOk9UsOb1qxdfDutRy4rGsdZJpv0nH3wMYMR0+9MXv/b975UVf/rpJ6T65uXLZx9+NBecP9oAePv6HaDnF2fTtNvt9mcXG7DKLG+urv79v/HXzs8uAajQv/6DPxaZh8I6Vyhev33x5PHj/bRbjauxjLDIc2aBXF5efPXF67rX1eoRMc3zLKKDCwFMxFSooDCzF3Xz2owKkwAtOqi0E2+yS6KmSHVKxXmNJUWIqsc5UvSy8cSRPKCHCOmVhO0mdZbm9bAtsYaJmItJDxFW3z+hySe+bYgzu7ioVVknjyz0vjlSZwDEOpTVdnt7s7198fLF9m47lGGuFcQ32xsA43rc7re25O12/+zZs7nuhzKIznUvlnd/t9t+6xvf+K3f/FvPXz7f3U3v3t7u9vvL8/WsMs2TyABgv59kVmiZdtPd7Xa1Gono6ZOnXIZpv7f+iqSodQZ0HMb9vrZ1dgHLzms80DND2PsSeQc0JAiLLjp8JEFoJVQPJc9fPCjISAQfZ5JLXkH91ndTbFKWIdIB8cqAuqD58UVO0tuut9sCNnpMx/prHvjj4eGUQoM9dl/EVOn4jbEJ/WYE132I7j047e4QHb7pHulnOYH3fZOykL/fdjD51vEEibqwfvITqT3mRTWugJt/LmTFoUwZVVKoiBXMUZMziJOeR1nvpOr2s+WzHCztmHqj/yQF0WBZfo2IKnenh4iJJJrVn8ZpnMZp/CqO95oj0TiTm1z8K4C0eLEv1+jVmroSk8LSVzR0cNdDAREoCRRd7jOpCoGKFhdcJExAJCFzcmpk8OnA9KDC1vaErZFlGuaIIFVT/0fkbUKhJDJX+9UqFVLYbERUpBIzczHDnLGBwmWWmZhLKZDINCTLa65M1lwFBSQ19NhO4CAr8q3OvQoPzFRVXOLNevxEmgXUVEVF7S3snfpURFHr7GoJEYlWEClF6T/1HEyKijwKUahUjLwOxSakbVApRaEilUDsDXIIgFQh9gbEqlAV5gKiYRgTMawlt6qM41q0etIuEYcFEOImJ4UqaSGmgTXUCO97LBhXKxVlHocyzPMkImMZq4hotT6MqhUtgboozVbXUazFR+Rmi1RLoqniplEoCpeRR9smAForExGxW1VUiUHEVo69lGL63zzN4zBo9ZRwqVVUMusCACtptaKiYjuh1FqpSqBomJhNk5jdHJBCiZqgIoxx2quVuNSaZW6gyoJq5bJsm0BUBgZIZAagOpsZV8T7pjCNoAkAlOvklmUuXKswc6Ehk3PNNhzH0I9wVy9b3BroJ1yZSJnDA4EqM4G40MCD3Su6hxIpQzEUFhECuDBURD2FDaW4CcgsmFzsxM917k9Ly9oFWbFKK/jNfjLUjgcTATOghZ10EFEVAODCJFFfnKGqFQKlyFnyru5MRWpVrSlNiuo4rFTFGo9KFTV1t9psxdrQm8UKsDaZRExztfY7lg/OqqqzkwBmSz8XgTjYdQVSYpNovcwrExNxxd7aL1mtLjNcWv2vqpVAVSpTYbZk1KKiytVO3Kt/8j/gvaOKqAozhjLUOrOdcVOHmawkKABGIUClSqsJ4IpNVVRHFTAV634uKkwGltrMbFm51oqXWZmIyORPWkRmeO5NK6RTnfKsxTEiIuLCnmqtIAZrEE91J44qrF86sxdTKlQAQAQiYW937ApiJ5LaAgFEBYxSoDOUtJLV6zDkU7Zs9CJqtW5VYz2qqhItJnyJTb2orRxxorfr0paTagpSs7j5FA2Qao16bZ/SmIt4XP87lzRX+aOiRAOVwpJG/06zJDZqltD23fOlwO9YZrUhMt/SQ0aqyiAGTVLZ+5ct7QStkXROObXL1CvRD/OIiOnqaYbwWaNpcL1k0rHeg6xut//3BsZ2X6usklYQtO3oFh+rtz+k1tBtQSje3Um18Ejt0kSGvqJZW2wmt3rVQQWquYTICoCU3CN3ugVt6FynhQvI0NncB+ABIhWzrFds+dEy19VYttvtmzdX16/fPf7o8YhChK+fv7vYnE3PJgBCw9e71/M8f+97f+3Lr34272+fPH2i63Je1mylUXbTsObrq+unnzwjunvz9ur8rAyr1bTfv3p59fgH5wB+/5///vZm2owjBuXVOF3vfv5nP9Lvf+cH33tWZpKiAD76eLNZjW/f3V1dXZ8/Gvd3dRguRCcj9kZnuJgXrq7XZ/M8kx2HzkM8WNVCQp18I9wCzkTs1XOYB+OeRvHEGwkpc1F0neEig5WIuLjApkA7cQBIRaHT5JeBYBTG+VUBoKJVKqWThchkk8AB1laYQgUMVea0KxjDrS7IsRfKJZDWKpbEzgRASAFZna9nbP/sz39eZBiYH3324e317u5uB2BXp3HcqMjd3c1qNRDqfr4dzx9frM6319e3765iNWDQ9m4/jusqWsbVNM+rzTjtphunJALItJt0ZhKq+1qEbre329v92easDAOs8glktVptd/sqUymjoorJD84Eh1mUYLyDeLC62AY5oaiVECc0TkGafUUFTU5m9+pRO6YUVkDfjI4GNDLZTrEtzEgwuk4teUWrFEQQCXEuRJelBdVcctCo6EKEhjEaL0LUMZFosBT0QUXJCsX09qQF8dJwSpqDtzNjtd/U3f/h9bISOGH9daZgVnmopEqURwYBcg3b+GIK0uDbcNuf74AE2iMRwSYu40R4Rc5aD3OR3fxWSmbstYub26bd0VFPXRQfP2BVcMNhtxQVksplbOS0sZjIxVcgtDy/KRuKNvuk94jMLTCOHsEkal4R22ZXeQJeRACDaVCpWfIomCmMJhj5MSk9Jug4I1qJuGq1TZrqDoSByhLNT+M0TuM0fmXGw+bI5SC4w7kTrdtXQFRmsk+SX3JKz8mtSA+cVx6OIov4B6ZwI0JqVSJKT177oQoULsZNtRNIRIS5uCLQOFgGHIb6S4AiitMJM3m9n4xSsHVzxC0G5+ktJk2ZQNPU4AGM1D5z4cPqmmd3XH87k0c/uRnRpSxSKEnHSO3hGvprvrffiYPfPRot1TCfLBVuMQZwXtxUWbIOJql9kQLuCe/Mmm4aEol5hUsPoRowSEIrDehRbOJqtVKT9UT2dWudRkRkGEqtrm8ZYhAXuOBEpvzL0rVrxlyAyfsjJyZUAFZY0tt8I6NS4HpCSi1QApihpAL1mowpm+S72NrlhFmB2j4fDo+5CYSS/jL/0GxViOAB8orY3jRGl35t02bHsgEwTXsirlIN21WEuYhQKaWqeON7lVorc7EY2MBApHgKuLS9HL6eTsTUeHucMjqECvlZi66yGemZ9ZUkECyfANAB3JpCQA1K/W+94QEu8uWc0t4hKuGIYGssIYBCC5EoARARgagIvHcU4kRq81TbNkOiQ7tjQlpCmLy5UBSchdVjFZHsOi5xzh1/wYoappUI0UsEBBSYph0Rl+Jmr2me4FhH7MoDVMGiGhVgf6Et8qPf/gep3pkmoE3RVlWxWFGYxtX2p+0LG8UIJTtuN8pIbRmxitjbpgkmih+PjvQvbVK5z67CdGrj8lxqPD9PVqPiiNKJFoqpQansDaH5ByCExOLcff97OuNuCUTgQ74AHQwa0+n0xo5Ouyq+jIg6PoQZyq2i4aXSnE9n2Wu/P6CJGMJXs54yNQ7dfn3/VI7npsuLkn0wJRtITLhvw+P1v+hd2sW9oiO0+ouejfu+e6AnwwMPeXh6weJyNu2vYJgt7EtFTTENxFhiQv+bsVEjNQeLC43c8SbemiYDUfO4BCEmNq8dmKTC2IqIqJAq/+wnP/3Gp89oYIGs16NwLSNdbs4BXPHXG+Du+vr1zds//bMf3m3vLs+Hqx+9/O7f+ZvYVwBX725B0/5NvXt3+/jJZntzzXOZ6/5ivdne3e7uKoCv392cDY/L2Wo16N2r23K5+u7f/I27d7dgkoFktwewvZ6HJxOovnl1fXZxPowMngcMc9Va53FYGzhFFKBaqzNcRUNfh4InXqRwR5QGFQXMg6xQuENIM+ZVk9Z5YyUmE8lsywLAC3B3/BsZ9G0fWopJzioiXN3GGXdqh4WqosxUa2ViIq3zDMCYtRKRRP1Wdds0M0EmAGXArk6/8Z3vP3pydv3p9U9+8rOrq6v5py+2d9PF5TmASsI6b6d5GMo01+3d3bheDWW12+1ubq7fvXsH4NnHj25u3v7LP/jnX/zkx2dnF8NYWJgYIrI5O5sww84AACAASURBVDeBofCw3V8DVArVea9MZ4/Wm7MBqMBsdGAYBmC12+2GMta5QkFMQxmQFatJmDl6/1IcrbSCLSl+5/pNwKfYGRthLCzvXBxh6m5s7Cih3uSXZeRaf+Lta7ck6qGUcvyqdqhVDy4O9rigmYsPG4ItyMFCsg8hagmWY9p1z0TDdfTLjHu5M4K6H8wsN+Me+HRxgvcOOvzKnQF6sH3ozt/DT1s86HAii+861qz9qv6fj+Zy7F5933YskKUBlRpmLrb5OIiVWk1xA731HtRwrZ7GaZzGafzKjfeaI+mIP1IwaEv41a7rg+JebqGLZ8RFySBdgySC5O1m1DBzFlQ98wW91u6WQSJ3UauIqnIpIbUgiht3/MZNKe6C9lm7hE4gpoELc5VqAZv5LmIOTVCzd7X2nMMaKqq1jol1dgxXm6FOoChcrDObsudShH/V77VP/EWdFIBQdzuAtqQkzWSz1E79xd65McFA7q1rOfV94/AMePGP2DdiGMbm9LWkb0trlGpOcvf8ua5GnpVCFXCNISTzSG6aVSClFC5FZjApM1eRuc4IgxqD4LXqe0ORvcozhQMmrvKpNX4lgoY5UtRNab1ISNAKKtQbWLlYBz1RWBSFQajZwuEGjQW2xhOWHW/VWyootJjfkjvbnH1DysQ1lKfcZvJ2MV6gOrVd/00tIqPAlCwSsiBZi1QrhSAeQ6EkMhOt2vF1mWlhuzgUmxrmxluzDU+TobNhTmcrsIerROV2oi7l1b5a6O3xhbdWj1uyYbepk0gBbDG7yOoVf1dOWBWFWc3Wb3lh3tMJ0OghZFjIRsO0inhYg0eTae6RqLCFHdnXIhwd261Nk4GlMHtvKIdtnNy02oEL7OQJ4O2zOnQWVWUmkVpKERGoFh5qncxFr+4K6utS+GY+pELk+OR3/iHCxw7rSeUTs5IOrCrMrB7xfV/cAgGOk2Rb0myZSKywi13j1NS5OrrWy//UlJ/8Do6lsZWhi/iLc+fv4Ta98qIgMwP7kXQUarYjBVSILYy97YH9EFVH2uRuzvUS05q1g2LiLbQhwJV6hytaiwMX6u59+oN/7rgUrOGXVDUOLYXtqegoV9cxrOv31lKr20Ydc/X0XVFbFQGqRMk4c88TCNHN6MGJH6qnjYW1acTL7n9Ko5H3P/JgGb/MBXbu2kN7nbgZ1ylnSHEgsvNB/qRGC2gJB80nxCPSBhvo3NgNgp9TGh/z9KlYxDGTn2gGQed5GkaTebyfnu7rbtqfXZxbEsk3v/uNn/zbH22+GABcnF+Ow6Nnnz7T+Zau9+NebvXr3/itv/7pB5f/7t/9CMD1u+tHTy8uno1lQzP0w88/fvnV67vdvvDq7de3U1UAf/vv/Ed/+C//iMvw81dffvdbP/jsk894r5cXj+Y6b8b16vIMwH4/bzbrqVZVHUrhQnPdbVbng3JxDzBqrayl1rrbz6UMNBjVXdYCUnUpsdszHPS7CJin9k+ETHnMbfLETBGlBStvBhHNIjjNXUaJjoTEqBSk9OAh+UYoFJaVYoY6iv7RxAThML+qS2wu7XJGSNV5Wo3j+Xoc6NEPtzeffPTB5vyCdPzy+VcApGK/m8pQFKpa7+7q+Xhxd3Pz9esXu5ubUghArdM4KObt97/3rZ9/+VIUc+XValXGca51IAZwd7ed97UwCwnRWlWI9Zuff357e/ezn/3cDud+vyXwarW+u7sDeJ4qSIjKMAxBcoXaAYltOgSJn56gnd3pSXGEGnWhLgJvKcH4Kz3OrTMPHpnAAjvatlFDrO5kLxSQ/maKb1tA5yGJiwO8wAMNytnJUzi8lQ5+Hs+8n29jsvF3+jf7md/zsH7F+tBF3eWLXmZLUa5xcgNN5z7MgxKvWAQ79hv4S8zheAndHuangSzUvspzmQ6tfGdTtX6pcSTGAggBuK2i44CGK/12aLhGEo0OYLBcIhabSNnshhhGQDyh7C+xiNM4jdM4jb8i433mSBfqfHjRHOCAUVASx6SnquKkVyyMpgl0VAhKFCWQ8r/WUTQ11FTnnbs15pweaS6D68SWOCedY9KVz9AbU7ORyB8BaSQrRk0is71JC5hTu6SkJhEBP9lrl9yT7rJFdAlWmatn3KDN1+dU5xlQ5tI7zaEQkWEY0kRr74keePcIQwrLidYmOdnsldJOpEsuF/BtNpIWz9EgHEBF0+EJZEnNbdd5YRlpImZOloiYWNnB4/mbDILnoI3sgoRISC/Kpcx1n7IZN8Qw40IfZhB4RQRApDKVMA1wKUUjs150Ipc7DR6SRRcRYbeWmeuFYUSUZlCUJ2ovJRCJdsKPx7P8X+y92a+l2XUf9ltrf2e4U926VbeGHsjuppqTZIsiaVGUndiGYSQvBoI4QZAAecg/4MhJpPwNhmUEsRHkMUCAANFDgARB3hwntuFYkiGKbFIU2WST7G521zzcunWHc8737bXysNbae3/nnqomYQMS4bsJdp17zjfscQ2/NRFRG/PYdM+lniggr3FTldNiSqmNP6xSe5Vx3UlX+35lv4pI6pKFcKaUhiGTdCpgSkIZQAKrWkIDolHutirybTC9rn3UWsiU3LlUmQoyXx5pWy7sDWPhr8we2stjXcsz3DU0frJ9Q0Sw0PhmowbgQYHTVdpkUp67rWoQDLKEDBG4VI6GF93OKXUoqe6aUYkIWEWz46SSNcRsEWVKChgWaZ2QgJnaeXQ8iYQA9ixGVb5UQCGqIkIpdaqaEitU8sCcDHMfck9QDwWybLns1qFH/+Af4cXtxm//l838JMuZVkKBrCcWsnxBW6ubEIBmLSGt5CoHHDdvIrkKgOKrXQF4x7AboZ+Ks1jVDRSWBKlRASmQ7+YUwpItXNDhuIm7U6n1d115LcdOYl+1gJG9jyzjCFq9gsoIoMTOB5R8x1uB3zIm72u5exPhJietLZhOaPYdYqLWNB4aP2/8uaKA4xarVp06mxM2LkG9Vjl73KW1UTT+Lxr0CWAaLSeoYe7tFMRwtPlKUZKdldubkVdmqe0vP3MrfPKi5lmJUAN8Ntr0xevgjpsjig3bQZWBgsxKoaqaVT3RYLU++q1aP2h5S3jFAmNDVzmYtIZoV8uajXHVL7bmM1JxRVWwHBZZ8/JskaY3hiwdJQEWy8VXv/rVd7/zJwDO0/keTa7v7z58+piXPWH1m3/lr1+5ee0bf/iHr95+BQC/CXB3fHwqwilNM4bPf+mXjo6e3fnpw70re6v+DMDO1nY3nUC7r3zla9vbO9/6gz/+6ld+dZXSyenpwc7+4acOATy7+0yB2XwKHSaTlLpJztpLT5KIOk+8YxuKiTh1XZedAapKmGC5GNjWltMmoNzv02OJJs3kJSKFGgXJd7BYW7NhmW4nCuUMxf6IxSnyU7tnCzes1NSXx4+j5MyJja24EdFFx0Bdq/1QSbT3SBGdzHdURIbh4f07y/PTtLd79dres6PF2eIcwP7ewbmsjp894w4pkSL1q76b8LPHz2YTns47AG+9/torNw+uX7n6K29/9p//0R/fufd41TOYRTH0w2roAeRVnkwmFsDRcVquhgcPHywWi2HIy/PVtYPrAJar5dDnvSu0Wq1m81kehr4fiEA0N5hYoZIzIzUpfwEE6tyeLyrijv0RlqSysDGBjUKwiRRQEUhe3OKk1UUeUZXN9KVSLHJaNaYPWH9py2TKwMoOa8SfiwagTfjpeASVaq2/VqVlmO0/F+ekoSAvf59dUuWClgCu00lgA+u7+Jj63iYLN9CmWi42mZ/xYev90TbQKyhAJOAfjf2l2+UTWhhrQ84ob1/n3+MXxuYjojWudIHFadnskYPdOLDlYCGR/K81gMt22S7bZfuzay+EIy9kWnJ1wzEAY/aNG8s4pXMome6WV1lvhCmH3mOa7EhssBdpcIfmuRc4XLZ8OoTEzBYGzQSgo07yMO6RP0pUPOdl6XrUKXCmZdKtvUWyKlSlaKZahdzaXetVwHsGSRjWCVQuZRdHJJaqeB66chEl8oxFcHWVQFIQwVxcJqikWm+FkUZOCNXbgRUt3j5FtS7Bi+398TlAmer1Yy5kxYXNuKiCE4tI4kRlxSqG51qXmlwNaeKaip6vcPQHAeUIgEmaKMQ+e74z32skpoMxITvOXZag6yZkwbPiGX8k56LVNCtYsvIREVNJpk5QzebtJhBiKjovkWeJLgfAYZWQH2qwGFw8U9dwNAJC/Pi4/4s5dfralF1DxFIUSmk8yyowb7CuekiXwoIvDdVlyzmVTb1RBcCJDJYti6zRihL3IpG3WB/i9Lc4sB/xBnYoenCESbtjYgt+SAUm1l92UR1Y01HqudfqwbUuD4feqDkLgYkSpQS3GWQRZe4Q/o85CzESJSKyPGJMbPHRZZnhixjoFTGrNj2RQZSZiRPcRbuceyrdJojYyUYWJyY2+0VRLmO27avNULQGCNtdbewO6b2/999vntLaDCxkWHR5RULLxkOd9Q2SPY22QV0X3wZEVBNt6MVVk/ZvbeT/Cjs1QCHgzsy1M5UyI0f8cuzEchMFFUdV66LXKBTHf3cWJNLCqGUy7NiVnlIddFPYJDZJcLoLakDTCR2F7qJ6a9RRFwx33KJDlVeVQaJlhvG5+mdTjDQeX9xvPXNrnCAfyEVV7KWKaWuRiR5Ung6Q5ce4AHGPdb/1SbvIVykMSO1taGZuTWVrpuPlTccDfrkmekEvrD9cQFQKZYo/HS5st+oI02wub9LhufdplLELatwmujH+J5FTrwJzRRTBbDpTkT5n7S3wFtvbW8vlcmd75+x8sTvZme/sDHnYpu3v/eDdt958A8BAebqz8/z8fDXoX/kP/r37P/34nX/1jSXLf/yf/O3vvfsegCvXdg+uXv/+93+cV/10PizP++9/672r13c/+/anDq5eXeYlgFs3b7/19mff+8kP3//THx49evzm599+5XNvvPPOd956843nq7PD2RUAk9vdoycPt3d2ZvMJVBOl08Xp1vZcsvZ5MUlTAIy0WvaJExh936dJp2XPE8GsD4XxjuUyrSih1nUJlkeERFQogIWpaM4AEnNGMSeF/BKEsdnQWhdEFbC6etUoUDaClS5cA6TQEIECe9dMh0SRXKa+z9Iw02IFYDZPOZ///v/7T978zK3ZZHL2+NH/9/63Dr79Rj/o9u4egInQdLo9LPsJTwYgD+eqOdF8Np3NZ932TgIwDP3x86NOstDk2v6VO3cfDUOfeDosl9BUeiW9UNLZNJ2fnxH48PrBbGver4Zr1w5m0y0Aw+Ph+dEJAXv7+zs726vV6nyxMCAycYJnLKklbkqgjgmIrf9zaxkBWiozmnYdn7n2p/p9HCZzCW8OZpA+MvGzIJLtky6KRZX0NgaL0kvn9w2bGXdq9I0KgUNuKrev3TEGYcseXntSvafwhpAhJTQBGtGN9f6N6dsLyOAGKdEn4aV84udsIzIc5/niC0aXkYtNjcQwXr04WzqyElcMuPzTxGr8mxzUS5rz95Az66bXona1nuARyqaV7GiwXferCD3nsl22y3bZfrHaS0rZuEpRW6CKDjysxXKv3Us1LMz/ACz+GBrinenlChB58u9GSPMHhDBY2WUAdqrKXecwFwGAaI5iFlwq6QBFwlelmkWcQtVhKoWJ4yd3HUKMoDqQqJo62SAzIGIDPKRx2lCiwBqKMikKhRASs4rVaHORmcHcJSIahpVlRWRODHelMMFOhlVMQdV1q3bWTBGFthVStBZpBbEOxIzK/lQACtGcotCQqeUe1k1sWQiLuGh4l6palY9wMK1ZRIvY2CjT1NyLnAcRmXSTxElErBpMztkgJFu8ROZcqQRWLz5jQuy6iCWSo0yNi4ZWNxxAWyxFfaGZoMydanYgg4QZimzOhgwuk1xBCBCg5gAb6T/DA1MpNRNcV95QOQv95JKWL4y0tf7SSBxW361loatAoippYtdkpiRZVGFF25mYWHMeqNRi4o5AYCTuRHNxBLsoNrcePeuKEyoM0pxJMpCuuZ0AgifoNIzUi4O6+C4jW/5IrawaORyrr/A2Ne+N5s5Y6uqMXaOCOu0c0puhz0Xhq95MFMYHey0pLGVgmQoD1JmgkoOSKHumWomJUkCtjpGTJgUxQzT2FQBi9xhXwPxgyBChEppGIOUEN36QqCRONJkOfU+OLDPC3cFGIZof/L2X+UUCuPHbf6eqxaEdW0pfz8pv2nhKKbK256YgWLsZxr6EtfCK11HS4ktfSZOtke+BEPOd2GKURdg5grr2T83mKGGXOtYyLqQgrmym8Kc10Cc8CnwavQxaWAgKcq4KkM92iT33Z6z53xuS14CJF3hiJc3V763iFS9We8o8UwFrG25SdWBa+1xe5xc3LkKujvnR0eY5I/3uZ2rkeUvamTGtytbK6JvlqXzx4OrT6sfxhfFfGn2Bn1Htai95qVY73uT2qYFWpckO0JyKwIpCHgmpBW4HRMkn6BuEOAyBaDZn6Qc1JiAFGuU5+li16bhxbXrtxYY+g0SVGVf29gwMWizOQCmLbl3ZOrh+7fjoaddNFueL24e3jp8efed7fwLg81/60o2bN4a8uH37tQd37pyePLv+ys1bn7r97T/+1o9+9BGA6fbke8sfkerh4eHpySmn+atv3D4+enp6cvbh+x+/8dYbAL59/9vz7flrr75+cP3qq6/c/vGPf/LDva0nj568cuvW6cnzJ8+eANCu+4t/8VdOTk9zjwf3Hl07PEgdnzw/JaIudeJbWnmSum6mECIqdfxQYTqbXjOvjfZVa74y+s8R4mNnvF08zxoZO9HPayHQ7Q4qBxPh690spP2v1L8oUkoldO25Niml45xd+jVwOaVkVMJ3PhE5cgollVkCoBOabG9/7rUvHhxMbx3e+NRn3vzpvY9X53J60ucMACenZ89Pz4mVGMvVAtDVcgnpO05MfGV3D4AMWfpeZDhf9YcHB6lLw2qZJt00pdxjUItVkG7aqQon5omkRJ99+60bN64dn57OuvkHH34MYLE8ncysih2JDMzEiRia88CUYNTVM7PHRrbBObkd0dhyABQbiAe1BGyzU1yd6yKyVIinWcx6bNsV8tVZf+0L4DiMmAwV9//1q9dNZO2+iWPecFjfmWune4PRq2HPGL+1zk/7S+XGG0ZDKJn0sT4jcRO1P2v0/udnICPJAlVyaIwKhdOVztSRNFRw1Cv/0+7ZtDk0OHxLPgsT/zfQTPRoGIrrzSF2tpeiiKGtdOzW0vrX2nppWcWgEK6BbBA/Lttlu2yX7RejvRiOrGJZxPdWFaCoVV6Fbe2+DQQxBD/3KnOluvBQQjxIuSjc9r5aKaVIBOa4R0R56P0yDT875+hZRSoY0PY49E8n3uYtopJVmVnCpO5VOAAmy1zpefdEcmvLNecli3x0KEI8RUoJHadSDTG5/kkWJK4gA0sAFXfoImJSISCheGahjLoKMS6mwkRiLqyuqEkWBRiab3XIKJqzjuJM3MXDoyJVVYnZVKmiO0+nM2Z2LctSy6uqCFLkWcMFnaDwSS9DCctxaNPbpS7rICKGnSUkFWViyUKJ7YmG4GUVwxjInWCB0P8Aqz1OWYo7hKUUzACllACwWqHNHMOJCEkWyb1ljFJSskI4FHKhY2I0dmErXpmWq7PRGF3q0LrlbIarb6VhUnC7J9QBEU8rBqhAishexOSKF3jssZdohAKaVUStpj2xKmUr71nktSw5cYIXKgkBqPQNxWxcV62csyIFGsTQyoUVUwlJmpQ8mD4gRMNzONxsq0OSpVtsPlfR0zVHW1tFHK5AmOMCAoEE9nCf0Go1IDAmlucxQ6AI3+nUD31iq+tttR2USFQ154G566xiFlGhA0VitQrnrirEMWRy3xytBy0kUYYFmsWps24SEQuyUwGqpVkcdiNwSsMwJGZiVpEudV7MmlVEDXeGaw5rIZzr7dZv/10hMkgkUgTa8lhPGYarKgDNZFaJOqjYDbbeGose9SVDoxMVO6Ra9OdyX5CdajsIKxBC8fC0aP6b3dzguBUe0sBzRv2y6SNEwFJ5sB9NV5oCOPYn2RbNvpr+RGkUHzcc1MQT8BeNYLCiZdRvm16teWhoqL8SVJFIQUijifbLFTDPeFRsA+HSPDqIGH0u89h8bZ+852M1zigoOTdoOfkn6Jesvh4FtC5THbqUM5qaecHAmdFT20nDhh8QhLHxV257+ELtq5khv3pE88YXlZ+q+ceO6jp0OEYz/But29zzkYT6WIUloIow5cfR6vj8RJ9C9489FPS/nC33zaUUBCEokP0rCln1eTJJXZdOz06WqwWA3b0dZlqtVp/7/Js3bt74/Z/+dHd/7+Da/h/+/r88fOXm1//y1wF8/NG9f/rHv/9x//wvfO4vPnjv3d2ta//Of/gf3f3wve988503PvMGgNuv31qu9OnRk4d3nvSarx6m+c5en7efPDo6fvL8nSffAjDb4kVefvDDD37j139tb3//4w8+vPPDD26/9ebZ6emjOw8mu1MAi/PV3Qf3r+zsv3Lr9unxc5UsJCJ05er+wf71k+NTAMOgXeqGPATwbxxxnE2g2QYt2SrTa/7mkdO7mgM4pYKEaHlInW+ti9FwumJNASI8IpY/8lc2kipU49hppUHxKqKo9hbEAQpAhKDKzF7Y17vs52A6nQIQXU5AO/Otfnm6O58k4Mu//IWDWzd+/19959GjYwDzvd07H93vZgmct3cmlLqzo5NeYezvydOnAL7w5uuzSX/96sHj47O7T54tF0ti7ld9mk51EGORW7tzFe6oU+qJ5Pz0/OTk+auvXNe8nO/sTmcdAGLsX90/Pz1/9vx4PpuCOA95Ou2KqAZoSkmy1gloDmdjAogVC5LdLMmoUVmMILhrUkQRgahSt7EFoFDp8YPjCaOXrb17nTYWduhdWbOUoRUGEYEldVw2EqKiMBTVqAz2AjLZsKSWfpW9R+WYUBlA1R3G3aKNU7zOAjQiu9YHtcm/s/5VSGjjvmJ8sPS4pYTjN76Aum/o6/qFCqxbAlqkE4CZY0vXw4z2CYm4qzmhZfXxUxEXK72ui90uqHN4hbi24WUZiogy5tNjbl8fTZabhpgkeGRrIrxsl+2yXbZfmPbS3JEIxbD4QRQKvK6ujjlKJEerTDjkN20YY6hO7j/UoiRecdr1HUO7qHlYYBZU4EUDcpzM5zyw18sNtRZFjKAQautYVKEilDoRMeROSqK6qLja8NdG0YzviZlyFpezGCBHQFyzJfMls7eBKQkjEUVZCROTRaSbTFsVTUUa/odwBquc/BPMkiEBayvfeIUVE340NCSvGFNWi2KqTbBjQh4y4AK0iKgKSZtRtAiTdZUknkiBWgEoKR1zzsRJVURlOpkR0ZD7yWRim0NksJm3cjnECSAipgCGLBV9LBMXGM8WTLweUbIlIKJclYyyoUUkcwEE3FTJzLCJL9NW1EIFqWbXjfxyd9eQ1SiJVLsyzIQaHl1W19zAyBbIhsTJb+KYMFVYkHrsZDKHysRdHoSQCJolz6aTQbKqpJQISHCvhDxkmrDKYG/wki4eKG69sOUu8lK1szcHeDyg9vgHIhzaIkkE27oalYomJuYrEc/U5tH1iQEmwSVp4pI1Mn53I0AAnnaytdg5iFAOGyngiRMYqqGCAlG0HdoxUUqdiLl3UhWQyd/KlJiT7XkmUi9ZA+aUkUWEZBAFlJhZVFnAXGid2pYnMBEzEgxpJzWzCVBibIlIWZN5x1iVVVVN3AHI0osqckYgG49+93/AJ7TqdxCO2Ozxf+6gGXNIZNg8mt5cbLZHbWY0KKOL8ghlOxbL0z0EeFp2V6MUKhBl0an+xlEsor43dokW2GykWpHv5ULXdV3jbTQNW1QCkNjooPU6Ela6UYsQec7i5DXPAir27R0LXG58ikLdat2oVFBqGQHVqboqae2L2H0g6ll8ubax4dfyelUoMdtyO/+wsicBmYyeNNIyA3aEc7iRruerog4/1rHa8ass4UWd3cTDtNkFo+f515u01ZfxwvKAzY02fIKzjAsvKjspsh9XlgJAIMhBiygwZEejG0niAkBg0DDCOFheRGRIZ+1TIdiVjpvRwgxDzEpE0ufVzs7uV7/yV05OnwF47yfvZum5w5Vre6fnx9eu7ifG7vbW7tb2b/761x48vg9guss3f+mLb6a8Gla//Ov/7nxC//v/8j++/sqvvPb6a8fPFgBuvk6f+tRrDx880GHYnk8evv/4/Z98dOvg+vUb+9cOdu/ffQTg5us3Dq5fe3L07P/5Z/94uTz/yq9+bb6/vTpH3/fd1uQKbQH41Nuf/if/+J+dDSevX3vt9U+/9uDBU1Vsb0+yDrP59OHjxwAYCUSrYbW7t9f3K9JglHGajRTWFCDcZI2JeXWfxwY6j/lXZ1swI4Ef/QqGrUEPtuP0wg4KzmfF7wrY0LK3jbuOCMQskhOz0edSkIeZPSuvevaOIFQkgwLoV/3ubJuHDPSro2dPnx7vbM9u7l9fHD3fYgJw3q/ms3R2vlgu+xuv3Lx+7foH5z85Ozoe+mE+my/1CMBisdqbdVd29npJ337v/eNnx3v7h0zUD8OwHHYP9wG88sqN93/8wWKh/bCYzTsoPvrozuuvvfLs2cmTp8/FquqBzk/PiDklkqxAFjE0k1QzgCyCzIkmWhlrlTOqwcJFfv9OoY2naTkybvMrp/IixSuAtVsUChcs62DMacPCbCIqL26FsgaT8Ddtlsp1dFeTtcQ7UuTv9fsu1laOxjUiYOxCp3Aj7ieMpqpBP0O7MFnNilE7dRRdetnDirXwAs+rz9D2+5c+rrKScvYuPqjlYew9Hk+6/pw74GLPi/sGSpaIF4PbatxyfXUjBUgdwcXtRKEOKMFQzbGad9ku22W7bL9I7YVwpEDAlIqSGXqWAhCoIjFUhYU8WbWqZAE0paQcTk6ABdcaFqfhYFlwKmgTv2EinYoXG1TxIOUsak5kKQEAAHwljgAAIABJREFUqQwZRF2XNA8czIO1enh13ElEI0aWQArZRKzILMRADRIRFe26SdZsTkQ590UJMtVUOWrWMBmWM6jFDaGjpGJhsJpcJFFOSYksZx9TGiQn4i5x36+46wBQYk4sopZwiO2xOed+ReBuMnHwKHWSB5uenCVUGxISKqHV8aIwtXFZLmXTmKr+biK8uZWJ5NBfKMrpEgDJSF0SVUqAElSJ0yBZSVR7k/2ZoCBBpo4EWQZJKYFJRAM8QvG6MTGfU5Kcc84pdeYSZNGqAuq4g6poJqYsA4NbNIFsrVWIlQxCInQpWfgqABUd8iCiTETJxoRErIoh9wASOogySEHJ471VLXsgeWpFKpmkiKBsnn7s+4Yi26NNZeIuZelX/ZKIp2mmRCLSwJqqIhrSQccMYSKmKHeh1aatKsqcOBEgWYacV7Aw89RlWTrKYkmkIjjaTpNIJk4M5sTQLg8KdInFpWNSAFn61HVhKXd1WAIGtc4Y0IYQqls1KnUTAFFrHqoG7LJB58QsIiJGEgYQE7PmzNxl6VMy5IuzDF3qACSdDHmlnouACa5b2p9SIrtt+7AFepIF2jO5q4gSqw6iknVIPAFA7MSjMS8T0wBQIlJNjo3aeLOEUx4xEjFUrWg2UuIsOeiYE0HPy6BuazAXhqJIiA5EKSVLsyWGDaSOVZS4dVnWcgZFB6qOZO54q0kDjyYwrMRCmjCBJMugve0UAgL1pft//xNSRh7+N38H3EHZ9D61WYICIqqEZFHhsJIOkhsF0UNKRXPi1KVOVVZ9DxVOlqgUAoFkThPbK7msY5Mq0swMXSAytvEsD6/RIaakkj0zhvrOTCnJkDklnxn1GjvkgcZK8CryLuoDlmYtpaSGMKgqq1WdVE0AqFNmGoZBSQ3YSaYqi4QLVDEWWFAhiYeuGzWrbhXGAYgYZhUgBRErMXUNekZlBqpqRB7vz95LkKUmTkOgg6Sl+rR5QnUMDZ9NgCpTjY1FVecMZGoMnoZrihMcBcKAF+ygHPaqygQeoLEEcLCtvhhi+SqabxB8x9crCtmXXvvzR14lBZ2oOmHzGkU1gm7QsniMSgDBbbQFEON9CriRYqzZBd5HYUEJbx5DvQDDxysS2Nr1UAqUFejQZJoGRnHbT4RdoAAhtZPjeQzirKqlAHp2OI1iz5sYpQBVL2nj9BZaTkTMaco09PLRRz+ZdB2A1dlqMqVXblz79h98c7I1P7x5bTLtnjw4+twXv/i9H/7gw48/BnB448rOZH58ujp5dLS1fcCT+d/62//Z42fPn915gq1zAI+eHB0dn52eH8+2592k+5WvfGo16PHJs9N+NZ1tzXZ3ATy8d9SlybW9rc/8jX+fZ9P7d+/1qzzv5lNK54nvHj8EkPZ333rzdcl4enyCbjLrJouzBW3P82J4+PBuQgYgg9CEpollNUx50vOq4zT0g+XCASAizKDE6kl3ldjNdVY0DwAnVitPp5rAbAxDMQxDlzpnC9lNDx3zMAh3CaVGYUXL3PprRmviRBEuE6Jms6AUqDGAwZFiAB2zKkRELTk2O13mlArqRMQakjERMydVJcmWPCkjA5jNp/1qtX311nCmJ8uz07Nn89lVdF3XoT/tAZyfnp8eP4fCSnHzfDKfdwNz6tJku5vNDgBgilU/fPj0ZN6lnZ0rgwDDYpFzN5nPpt10AgDDcpjNZsuz5xAkIR0yzlf96rSDgrvV2SmA5bPj+faWTueWiWgYBlXNklVBlAB0PFWYBK2eC8hZjwG4fuSkeDLYSfATMjr6tkw0Om/rrSEWcaXbYC9QkeYsx8OK3FmOo12nfuSKC0L5x8xxJdR/VHnKj7l6Mn4lsXCe+NnIBRWozISTkNeDQDZRAWjeHhTSp8/kg8Zi50iYBELmJvSSJKr0oWxyjg3pI/MxNMpVY+Ok8qZQOMoTqzDWTLliTJkvknbrbXmRJ8ZR9hqhhUGsr6NbkkPFgQfMtRNm/xSBDoqhfULdJuVSd+dXwE206ul86tZw24gbpy2OJ5n3g6hpd0pkpVCDA2hhNyaOuHmlZhI3WpJtLURVPJWZloU0+7QLtlCT9i1fxUU+edku22W7bL8A7cWlbIoxGUCtnRJUu8YCYEz115gmKqMvRNzvKte4SunMxGRsc9lxBgpKKfQFqIKToTPkxakrf6ysOhQtvci6mkEGc2tHMlIt2n/qZ0+KbH8ylTpnAiAPRERCJazSnbAYcMcf02O57weEACo5q4WUIjEngFJiVcmDpTKU+n4qC7JBDIthrKk5bfcBC4i3yMeGwZffjSuOhAlXd1t1UkNyU8CVTy2eGjaNBCIWFa8ZQUxEKSUOX4A8DCl1wbktk6PHFGvdIQYrxEJqWGSbYErbrVyWush0CkMAvbapqJJnn1SFqHbclZo5odmavxA0wtIBlOibIiDmoRfIpJsaJJdViFhyjukovgyxX9jqz3CiDgCBrQoecVJFFNXQxk4qqiJQ2ww5CwPMHQg5D+QCC+DudRVwk5DlwgBgWKF5wnm20KYWq89onC1z4/EHaBWJY5nj0VAwd+EZVOOamRjJlfO6FKFUo8n9V1yF3FDAzAUDpLhSQwFUlZrUyBGiYuTY2Dxy34E/qtpNkxcV1n3viNcbaU52DLn1eyo/xASKSi2vSTHwwPo39Kyes4j0a67j1FlKUNtXRv40HGcMaiKi+3//H75o4NZu/c5/Be5a555IWgqUAQNwxMzmmSnquobZgwVuWgr1j6I6WUMM3eBUApvrvi8qQnOc4dussBhqMwWoargrNYtb/A1QH02NJmORbnWvOtTWcDDrvH/fwItUiEioI+WdFJrDBRXKtQCHLwmWbyFHbH6bimBjc+xxzU8CrR9FdIuo8tkCY1PT18L6qjo92nUURwVGBqGiJXAVTnUaPW5TKzwCdOH5L7i+zlflPWtcqO39y9r45DWXv8QPRGM+y5Vl5vQFL2ycatb18YJ9rL2xCCRlKGPJwZ821v2pkNOi/VpXCSQOdlDpdVM26yVjjQ/Fm9fvT4m7xWJx9979LjGAp4+PZlv86qu3d/a2+iz379/b39/v0vTp8dPPvv3WX/76bwL44M5PJpi9/9P3pe9/4y/9+vL8+R/+yTtdmixX5/tXrwHYurLFrKvzw7Pjs9uv3vj4zoN+0CEvt/e2VXKaEIDtND+8fuvR4ztHT496xle+9OWnx0fPj57fe3AfHa7s7QO4df3GvY8+Ojk5/fQbry9X592kS9Pu5PTk5s3b1w8O7t1/BKDjbrXqVWg6YcklVQIVxbuwJEMbmRhmkXVqYEyQYHmby2w3PtpOW4Iz2WFXE1hGBMGWsvVTUwS9D+NIU6mt8QWm9sACgXEGXNRw1rrIojkPpW92hwXW2O/M3fn58bPnz/dnk5vXDx4+eChQWT3rkgzoAfTI127fWJycP7z/YGc6S4zp1vTK9b3jR0d73aQXBXD8/Iyv7dLxk8dZF6fnvOozMAyiw3kPmuctAMfPj0UGkGzvzU6eHEF0a2ea8yBqQLD5IiB1HU+mq1VPuSdmFU+AbrOQ89B1U2mIVAPH1VPKQT1hS9uCjnFN+fTCk1xWBs1hwgUm/oltROpeSqZeStRi7cl34kYCOnaTNmml9p0u0vVNXRqTiuCyo5c4lXox6WxlH609q/xhZJgqh+hlxHhjH192CTVXbhShNt3VbKRx56gx/qM5tC9j0OvPqOS9GKtCbKEi1ro8APLAHcBzg6FGT1TbRohNtuND1Iw5JmowbXt4cbwp3uChJcUSFZf8y3bZLttl+4Vr/MmXXLbLdtku22X7BWk3f+fv/ll34bJdtst22S7bZbtsl+2yXbbLdtku22V7WXtJKRuFanarL4WxMXK3WPQcdJRI2fOYlEA6UqD6a42NnY2VM5wu7BXuGkPh3EKAEHFxqYOCOVF14CvBZG1YTU1oqJHDjRpbXv27GMqp+HC9eEqKHbeYviMIyN5IQAReU/X4cvNY8c9TWJxgY7SznqeUcs7mWsWeMC5T5Jd0O1tp7hEHbQ39TSDYyMhnzoNoPTLM18DKp2tEMI3u07IU0Hh/jasr/m2ICg8KrXEfsWm61OWc4UHWROZjICUi1wo2aASoeWChVssfAK2lUyL0Xks+SkucAhYW93yyoupaC1mbAVIhZcN63xtDr20wC6nW2EHV86TsDYWyqghIU0rEPAw9RIhHGzuson5SBumZO1Kyoj1QZU4gSBZmFRWFkO3W8IASzWzFRqC2pax8OVMNA2z9bjhZ/GdZHorl9sPlry7eHbXHHgUTSVr9YJEiEpsquQU4/KLFkqZRcQ+JD37cmjAf741NYhMRWWe+eH+UCXbnVVWqES5avI3Cga6amuu5iH8tlaiWU994c7fngmwQpVJO9bZtLynjKPfUae9SF96wkX3MBlH/G2F7RD77HqJT3Xs0PN+IWS3SkG3LNvvT3dU/OUxbPY2DleEqXkJBm+so1I4EqYKscAIByMOgnkjB4oPMN5MskwHicNu/7tgVAWftqMNTK/JD+ddc/NxFsqqSeMyX+WMysWLQGAN5ok1zRGAiafztSl4KBkrlMKhX41lzbKKyJ0tEs5QSB8VxJQbA9WIU19LicqUgKKlKuF40rnUj92J/gmgGNSTCkxWQrDmyB9cKf0abgcZFhsx/euzxUpyX9YKTSuWSFDyWoG0iWogIpxQzFzsDo80SZ6gc9vqCdopf4LdCo9mLi2sopK5dvdlvpTkKzcq86JXrUkb9q5CgC1evv7YRV6h55sVeaXN9fCwFh3R05tCOt/EoBqi449aZjNkZ+4GN5rnQxbJ4lX1rXvTDbMqvvXpre2sOYHd3+/j54ydHj7jjrens6OjsyeNH+weHX/zlX/nozge/93/+nt34+duf+/Rbb3zh114/Pz/9n37vf95Ku1d29h8cP3ibPw/g+q3rDx89gtLp8fK95x+K0GRnIiJ3fnr/05/59I0bhwBuXr/53e9+9969j27uXH20evreD7/3n/+n/8XR/adH955evbH/7PETANMvzm/fvn1X7x1cPRhkePfjH2zNt6/uH9y/8+D87JzQAdiaTXPO1PGqX6aUVCFWYC7m14QqBE8xVtUsJQEwTz1zL4rEkOre33HsjSrlLKpxnslkklLBzF9BSCENB1+L2X+Rh1gNXG03mQtq5ntexZRYOyVmSPVFiyACWuUBQJ+HPORJmjx7/uRssT2dTkV1NSyIpPNstDKddEuSN956rc+LJx9/OJ9OV3s7k8WKpmmKCYC93d3FauiX5/efnp4+PVfNmvOMeG9359rtw14EwJOnR4L+8NbB4vScEuWcl4vVydlpl7plzsPQA5hNp13qVqs+DzKfJ3BKnS6Xi62tHfN46KZdzjISqHxGgYgIoRIMr7FAVMJiXkJ2NjTnOlRe1K7SC1oRFHwtg82Mfeuwdvw2PQabnLCL7LEhj2jDQ+LSSO4cvG4kf4ykdFrb7eUTjVIwF6YkKMEwm1oT4eTJn5t9fuGuElF08TlNrxpi3LoWbz4zVAZEhWmtXfOiFdAis6/rP1pnsrzn5dtBL/46ljkR8kfRICPXE0OziFrWVAg4JefOMApkDytTGsSEmpkyruEid+SHIbJA9pJ+CxoV5YzFe9jGZbtsl+2y/eK1l1XWLmo/Anw05ctyO2oDLtj1KOS+FLkOOa7El10Q+YPtUv3GvNTJam5UpaXl1q5ZVZT0orh3gTQ3l1QJz6RPqjywhS0a9lWFkkAvY+wiucANJvVG6pCSh7sod5FAkznnwWbEy0cAqesU6pnU4Ao8E6uCvViylM4U1GMUsEqVsa1P80XhKJJYIaT4IjTXVFeqqMJYCfIoE+uBTbYa7bRgxKRdpNSylpE5J3EHKESsXnbOWb128Fr3W8G/vLdG4rChAmyJ3x0Rjf3BgMVeiXHvqgEyiRoISIBF/pMKae1zM3llDuyNKanmIQ8sDjyj0fvrYsRCMCWICdme+t1KZzCRkqgKVGDwpRcIgc28xKyGRm/Ib2SqCtWVmOCIoeX9i0HBqtK7cD+aU61x3USexaYINKblxiRoXbu4XUSYEjOjRI5DPYFXCStRKhDkWmtJh+sfoQ0UguJWDW6QFMTRR9Vmih5BIxmvXhMNCk8aiDqnAYWNjk2DqCPeiXYT190okl2LKnqj+vz47U7Q1LP8KTElw9dpfa+oEVWFZRTlrCIiVsrGBNoHv/sJWOSN3/4tS51AZIHk8FIo8SaD1kRz9AmGnWuMyqZBQrRvjm3pKsIe4LPT6FhVVSB7VhvyBd+FbhIwk0yB2IhAysyiVGxYFBOonqlKg5ZRqB8tgFNUyLgX5TERXaVuHAKIiRtlkxD2g7oDyrCD3pFZrQiwpJZl8IWBbFLcyHOEGp2x+mRqdAauNGo7okpo4n4bFoG0cMIxlXTepPAqrCPti5RUxQt/iZMax+sVom6rGinurc4XIyt5KsfpGdvbMLqNqOG20Wetcxrz93OoUJu0zc0t9sbG35oNj2b217U5n/hP6M/aDes3x5L6kWByzAvlleQzrr4Nde0hVMlLeWkjJqz/ZB/UjATEjx4/fvr0EYDF6fmVg+3PvPnm17/+V/+3/+N/XZydX7m+Twnny+WHH3z85uFnAWxNpo8eHwnfe/XV1/7g299+8/UvHl6/sVqeL0/18b0TAL28N51OT56eXL22/+z5k7zC0K9S6na3d58+Or51eBPAhx99cHz07Obhrf7kfHu2v4U8DMP5+fl5XuylK2997m0A33znG5/7zNvf+967T7/5jU+9+emdve3zs1U+0WGQ3Z39Z8+eARi6QaEq2SxgRKzZc9TGoeC668wm5HzW8j4zgCwDMRETlAQaOjwIZpFopRIJLqRB9C+sfaxwOQCN7BjHVoPGxfV1RbUKCi1MudbUstRRY+FXYTOkqMKy95o9SfHOD37w6rWD8+XqyelyZ2fv/Q/eA0C5m8xyvzx9+3Nvzva2h+OTVdY79+/v7O8woe97AFvb26uTo+99/90kk72rN/cPrx0/fdpNt1enerO7eXjtGoAvfPZz9x7du3P37nK5mM0nmXlvf08VzNQvVzb0ybTrJknAxJpzhshkMulmE1XJWQC3LXacLpzahuWQ02mMc7C88Ji3Z3sDMayffybq8rORoE1y9Np7wwLcEKlWJg5pof4UMpDXxWpE2yAZ0KDt9bWNubyVNBFvqIFvRZYt+thL4MjCXVDEszrCMiKqf5aNvjY1YwmqPvLC+y4yy/ZZVJPnbKRyIz43esfmBf0kphHPKcJPZaL16aPJbrpq1SNtH3uhgdB1xja5ym+q7FHoeCiaNUmlEQuN3NJh3W8UWSICqaVd/zmY6WW7bJftsv15aS/JHclFLKCRlAXTHKsUrKWKbdBKcrewDcLcWlt3YgERlxrUpu8RSESqbqEQFSIwsyfpUCgpK2lN3VMYcPsPMHaaoZItv2RqplFaRir3aJuCpPLLivEQ3JalmiGm/5UJMgySrCIHk0guCnlWq5KcAJUsqhDNRJY5Dmq1fZhRWPt4RsfirLtGVmDlgkvJGvfeyJ3XGaZnQtSSdy/G3qhzXmfD3UKBCgOxp+xjoorBUYxIw1uh1BYGRWme0SADDK3KgFK7zIpiYDad3AV5q1rUD0pRa0HVPCeJWHKT5hBhFlateN5aCzs+MakYlGxlaEi1lBqOvpflUiV05WAAhqwIESIp+/p2MvxKVBOnkso6pY68stNYiC+CYxYCj6W3ojGhcatcX+4y/dHh8kT7yrSikWhpuHsjQtm0k0aiT0AZLDaR4/0YlWw8pbp6JlAnLV7tmUhIPJ8jHOX3ruXYLxdE2WYMVRaOg2vHcKROap2sZnQjFNL7XV2sffnUHx21nqlqH7D65g2UgPCsoWZ2tciX/gSCZPF9XNF7n6ss8vB3/9HGFSzt5u/8loHPrkyI1BeG4ldEZK1TZKClp/biSrcpVqWcCN8oHAYctfRIKGfPN07U/iib2y0ERlDa9SkZbF27Jg3WIT7Vjh4wNNbd0vIWcmDYWM2EW6fU17Ni3XEmbcA2Pz4xWq6FlbjVcpzDFcs1u4Y0EJuLtxZSvJGiqioCDbRTbv+LJI4U0GbT6ygs1eil5XGbl7+oUBf6QICoKBXzWN35RCAHaLQ8I14dl5dLjdf+DBpPoc9jJVQBuFcqAtlcH8DGp23+w7nURTHiZ2jWhdK9TS9uzrlR8fh20wqMOjDCDuyGcPkPZblxOgqyiJYZjFrcVAlS7aKur5NPiDnv52E1MF27dgDgbDpdrk6Y6KN7H3AiTqwZR0fPnjw51iEZgT09OdvbvSLDcPfB/eMHT/MS3/3ptxhg2nrt9UMA2/tXCDxN3dNHR7P5PMuKVOezWb84689Xjx49sF7t7Oycny2YWSRP0+SdP/7mydHJsOqvXjvY3t8F8NE3vrG1NQfR3u7e2ckpEmeVOdN8ez6ddbP5FMD5+VnXTVPixNM8ZEoqOZu05wxKpHDeFzVWkBjK5cZKxwwMube552JvMWYQhFybf+zkOw/RQOTr2WeNgoAU+6EYN8oWM/4fkk4RqS+IGzQMAymMvEAVIhlKpNPpBECiNAzLxWp1ZXv3/OmRiG5tzc0S+EuffgPAd9/90XI5VZEPPnz/jbffptnW2ePHR/cezGZTQf6lz78NgKfdzdu3tlL3R3/0TupXb7z9mccP7j66/+Tu4x/d+Gj6+muvADg9Pbmys3dH7hLRIHm+M7396uHB1YPj4xMi2t7aAiCS8zBMZlvL5fnQD1tb25R4GHrCwDyFVfmQ0bk1D/WA3eIncV5V7DI2xxWKKydgjcqtzV4wI1Rf9bXZXfs37guO9wJy8sl0r74/+kn1XdSI5nXUYcyzoUa3QgL3KzVVMbWKMG0vmy0aH9bN+P7fl4/Cbypb2DrQJrHdICpdbI1cOtrba0dr1LWx9hWPb40ClUPEP+1TRpS5dn79i41/jp5VKcF4lwFocotvfFRMVZmwqGE0ZoJFEkboHWX5NTgwu74QTCFOiqkbNc9k6Zrrsz83K7xsl+2yXbY/8/Zi70jX3kNxrZ55gLoiZ2qiOMuBQQqudbv6PbLE6UXFoRqNnCkzUYb7cnlYHIVSGKKhqFr9N1JolBvTDQxo7UUXYsmr/qYIfRu0xmQLe65lBYrrn/c9/lUg7GqUmIMlO09R1zlVVLvUiYjhTSjiWqC8jggQdyllZAPvVPNIkiDTWLU4w5TRICqEBAbhYIdPjfu5xmO4SkOB2GWbH/EISopqxBdROvLpCjSXK0YAc/pj7gRidVSyKqAS6kM/5JQSFJIzM6fU+cZB3TgBAMC6VKWcRmpCFfIBoECdoOzzgVqFRyUWhLzEeqjM1jsCJWayujHeAW2FCRDBa+8YVKRFM4lO1gttCCCAk5eJtzdyIgAq2RDc4isXYIpaSMboxTYzRAayEo/fBtWo7lfhRSr9Ug/5d0WsTluR8BpvSIqvDb/IvgIO9KqSMtvec2zUSmSmxKX4g2H4aDStuuXqYfQ9q1bkGOAKp7rNt/nsYIq7jFRCskH8CvEWRJQoFbmcqHZDm32kXjVo83PW3kBVCFTzP4KTiTgl4Vtjc6ZlF9npThYdX3xfiuhPAERy4s4qkgOUUjIM8BOxyFv/7W/Z8Kwn4i7qJg9rKaghynDH4ao1hXrdiPqOu1F0FUBokLYeMX+hJo6mShVccLS6h0sBA21nLOR8pho277tS1Ov5AKpkoZSms9bOWG6H8ErYrEjGxjZneAPEOMwnwJpmBwPoibwsbUHVxPWlMkFaEHlttmOoIz5XkCHXSlvEsJpmnJggMpQJt1lo+lDwyaqIxJ+j1mg+cnH8Wv8tymHVhINNlKV3DlF6VFhrmCQKrVpH5dbYbhyGstwV8g51C4Ept4rghseMlqeC+m2XL3Zh9G3ZluvK2ouw3Q0d8se0FLm56gVfBjhlaqrqyNZGQSE11iW2zAV1e/O2bn9ZmwIdcp5NusVisbO3d2V3B8DQLxXp2fMnxz88fvutt46fnRwfnXGXhkE67mzRl1l7Gr7wuc/2hG5rvhgW1w9v9f3q0bMHi3wE4HD72tmpzLa6IQ/aMyXSFfpFP9uaTXI+PLwOYOiHxWl/9OR4az7Zmm/pavjO9791ha8y0eG1G9ev3wDwxS9+/vHTo6sHB9Kvhn7optvT6YQIy/Pzu3fvzKbbALo0mU4mXdetlgMxMaeM7LygShxKzCKDAonYOGCwzsoHzF1d1/c5NfOs6mZM5ea8tZIe0JAadRpOtY4WmkjadRfl8gw3zFjXoWH2QJEiTPiVLAwiVpBlqbCbSJFhLp+JUpeOnj/d292bz7e62eTm/pWP3v9pN+0AvPVLb7z/4cfnZ4ur1w+Gs+X9xw9vXTvcubIzm08+9fqnbr1yE8C9h3eRh8d3HgrT4wf3D68e/LW/9jd+9OMfdd/k+bWrd+/eB/DP/uU//8Jnf/nw8Nbx02dXru6+/uqrIqunR8f9ql+slns7uwCm8ylIBxlsuKJZRSbdZGdnbxgEwDBkSgm5nMBGINW6javbV8OndbTJL5CHjS1OXvuNbrikobe+ri2BGN2x4Y8Lz3Sm/wLvQ21JuOqod2ZvHz2uLfC2Rr609OEC3EpNAIc2Ip1dTEpKupGUrF3TflOG5m9orrX3bHhgg6yVI9PMwqi1Elm70o10GuPUNR5Yh21S1qino66sb6CXTcIGTLN53NhmNLpWpBSWbJ/W8KpNj1ZSS8QUwr+xAgJIzSkSZdOoD4gitM1jPjQ4yEUGetku22W7bL8A7eXB2ugmHdAoFbWILNBqXMZJXENUZi580FUsbdjSGmOvZs9QSh2pIAv+5HVfKuXiehm5hBzmXNfv11iWQiuHDqjIAM8CCIxviT4ifA5J1y+IcVJ7/ViHMBW4pC0CExu+kjg1UplXZ6bwTrJeMbMFvEQh48i6pS2H8rcV8bmJb9U1DhWj8UdZ+KYW98bCYM39zVCTKv/xAAAgAElEQVQC8Xmj8aPIoQILIbR1WNMRCYB61Kjp21zekIfB9VFie5moikjFNAFELd+A0doBY21gFF5HRTipmfiKDEEVPrW4OWf1JX6W2hVck0iUiDIZ0ur7UERVJHGSWBBU8cvdL7oOkntVD+suDr+cUkTrx5DL7BMRkB3SZXj8LxNxOIOsQ8MXdeI4Wq2wUodfUUvmECupeXBZBUYxxrpnh4oOLBAVj6sNSdt2KRGpReBqTGj0WP3y6jBCRFlygR20KhKEcChQVXOaloD1QKAL/p5l3Wx6y54pP+mmdTXES1+kR1S3qLirUgHKQ0+UGny39XPx2SZAOZJdsM2GkwIqKxk3MXPQz4JQ/0zypUiJO7Qdr5JzbHup2IgDu1LIkXWgUGsXeYsvMQQEZtZMgFQvUafoqmOXc3sic9Fo7DjFAClWEFAVcqpScEEiYsPobdZNTTNkU1TIQQTzcB5jyyUPHJX/1C7Zc6rA70tQodI1ih/2ncCJbHYcnFdwaACFqrd2Nt8jVYvk1IFy4S0KzSIMqMSh8rB2HT2ifq2uZo4NfFVdDTW1boAyqNj7RcWxU1FZehDVysfiZJaNF9PuW6hMU8P/17hQecjYltMg3gQyYrEhgUvLXuv/m9XFeIFf2LTpc0GkUeao1SkvGE3K9+sva0Y9Igzj5WjXqBjPYglrr8onY61Nv8Yse9PgSKHM7JaGgLPbmBRCmk23ujR9+OghgAd37t+4ffXs9Hx3unfj5q1e3ulznk9Yc3++WNn2TtDT86MPf/Kjr37tN55+/PjBydHWtSuJ5lemVzvZBnB6enpyutzb2l0tV5MtZp1ZTpHz56e7B3uTbgrgyeOn/aq/du1gyIN0dHjjxtf/6teh9MGHP9YJPTl9BoCBGzdv3rv38PTobL49X/bnzDzknijt7u4vFwsApLxcLc7ORFVn0zmnZKmuUUJPXPZTogrExBYis6Izs5M6VeVyjEcMUzXOArkZHM7/quQQh6Se9OCqKLtEm71SxEaidvs53USkuYn69g07gAJIFjIQXt/EZCmRV3kJoEtECcw8DJm5+6VPffqjxw+Ojp9lGR48egDg5o1bv/HVLx0fn5wv+3sP750ePdt+7fUv/6Wv9kO/Wpx990+/C+A3fu3LX/7Sr3348d17Tx790R9+4zvf/E6G3Lxx/Vd/9VeOl+ddmgD4tb/w5UdPHq/6YXt3Zz7rHj8+AuVbt3ev7F3tcTyZTAHs7Owslst+GCaTbjadTqazLJkhq9VqNtsGMAznKeTYdlvbUWp8thFgTOFF43O58dRXa0fd95v4eHlKXbW6WNVOu36rrmGHo0Xe8JJxspDmLFs/ywGtV7lJqDm3mxywX/BaHf1aWEUlvs1418LDNrdmzxtTKaKTE/ZCEiujK+ON98Q4/EEb+75B2Bp9EzeNZ2MjMYwR10cEp1t/cDz0Z5Kp2u5QI5q2i0NxqIkTu8HNVSeAAfIkkv6Q0SNtbsiUjzpJrLGN3PW6MvS6cxo52e8rUXeX7bJdtsv2C9deCEcykTbCUYM0GaQHSsWa30rPodNoYeLuUGF3tvwwQihdVyiUlUqkIlRVQGSOS1aLw0RwZhKxOMEiw9RaLwQvwrOxjRWORroMVFKrymASZ+V2Dm00irADWFWssM+aJXt4qYa0Si7fQjAMPRMzpywDjJEQaRaAutSZZ5NV7GHQMCxRWKOi0f8/AamgMpaGSYUMYZOuUS8FCvUOj3pbkJJApMaqGQJJUIkUQa0QwJRSJ+JhUtYlETEnQUOcQZRSsqiuYRig4K4rLjxNhOsY8W1GU753ldvBCxfHAKQ0CbC0mi5F1AJqHfYlElDBvnLuXTlpxNhahsO3pkK4yCYxe0Uz9M0Jhehgsc3i9WQ89NgMo1WNjUNmKyAqTKxkcgZS6mA4pnJc3842+VciouL+zKxFvbIUhBSIFIpmDihXzC0gCxeMEMIWKm4LC3QlUiJKya4hInBigubsS0Bawud9u6XE0rjaxqJxdRmKbcecVISIPUWWavbUq2LJ77gs/KYjYEOkmEkN1GPIQwHGLDs4OSUjFS3BY+2j1iTpVp4nAJwsV2zYpwOfkqIecZHR7dVi6D4aT4ZGC+1SB6WSetIgxQe/+w83DLJpt37nt5QQFWBMpYsAdxKNHLOuEEGsn9aICKU+A5BVYp+TqKVzBRE3RNGJYbi6bZLwR99R1RTV2QnsvLTwrUJUI6gvtqgoUBRWL6bTwgEIfcgGGXhbnPOGg7WZBTTGUMEmKgNTBUXCkHA8b3Fvh9lB4nABMQBZdxKpGJi5fZZYcqO7UFFpyJFDj/X4I2IDCKGONeCszfAI6NvQ6rfK4RrGRAURNmrgnsnePbpwLxpGXiyKL1d76khbOJIaXA5obYcvf9K/RostUol4fE3xTSO+bOhKAxLVRs0ktTuoQXWLCq3Gigog0JRJQj0kIyvtOgla7/ZaZ2IoShUbAMDMi+U5dFgsT5fLBYDZ1rTruM+rYSUAv/r66/fu3nn99o2T45NHT066+RTAyfFyh3a72eT7P3yvPx8Obx4O5+ezre3TfHywfx3An37/3ZSmk1dn08lMNUNZtJ/PtlZ9T+APPvgpgH7Vd5NO8tAJD4LZbH58djL0Q5cmq8VyJgxgyMNitTo/Xcxmcwg0AUSH166fzIdbN6/v7RwAeOedb89mW4l50s0kO7BHaIZZxZQEzwwZe9uhPDDYa+epkjKanCoUiWEiWw8RkRDUyG+QQY1MuyIS9fGcco7YBcWWNrjRjii13pQAwYvxIHZe+FQWk582+0mckWgxKe3u7QDY3po9f/LofHHWTbqU0rOz0+PnJ1dv3lZgb3sO4P0PfvTkybOv/6Wvbe/tD1P67Gfe6ubzrPro0f2f/vBHW1szAH/yg3d57+rW1vyf/4t/qkve2t756Ic/WSxPr736ymu3b/BZD+Dug7sHB1dE+ZWbr5ycPP/et9/tEl8/vLmi1dD3/aQDsFici+rW9g7zpO8zQVPqssjp+eliuQSgQtMpVbGz7HkdHSeTbX22gldgtOfjqBSLcnN6Cy28KKfWs7LhHLWzTs13G5MybDqYI2n0ZVdo5Otp/QCpTEkIuxtE9krIqSUfa2StfaGSrv1WDPUvaUSFVkZPEX4njVxPsTq0eTIDiPSTuPFFL+pB2+viJ6utBnCRPsY3TV82vyCkmJ8zprkofEXSQ1Fpyy4x9xnJzKToip5w4VFF1LAea3DFcvzF/LPrhvRdHxxFtD7H+YnN84b4nst22S7bZfuFaC+EIw0PK3yyVUVdNRGlxGBCLhJBaFE5o/Ewl2ASbP9x1zVVRRUNA/3MIsaTsyooMTORZaADJ6/8mzhBiRl56IkpMYtKlixMKVRDBC8sdLsUFE2I7MIgCxB2oLRxK3Dm5tppqQ9iD6MS+UhEg+SumwDIWdgL4ILAtZotWQENKJA45TwYvEJMOWdDWifTad/3qppICUjExKQiWYSZuy4BEBksU1JgLQZ+aRKxiG9D9yyFsqqKZoq1KiPTAh/CXO0gXhRYoS52m0SoolASFXsOE2eP9rVXMVEyDixqJRlYNBO8NDBT4tQRkIfBcQcXhhScbF+lifVJRQfJIIVhW1ETkAF4bWVzaRRNhD6vQESUiFyxE9XEnbDknAHlxFDkQYhp0k0ADJKN0zNXIYGoltkBoGT1uCGqKSUHjgCmznAZu02FEzKQuOtc41YRlax9x5MYjkkpmWBQlVXIEZB6JRtYaWpSpZRI1J8yFu1Es/lWet1zkYBBdADAKTEnUpdHRCXnPEmTDJAoqs8BvIK5ZsNRSj78kH4gvdo1RITIlk0Kw8rVa18YgKD2aluPglbbky1aiwIpHHIGKEeoN4BhsGAWQ0CMZKh5PUdZb/cQsgkXHTyHIFD6W7BUE+ttd5miaCil5IETVD00lInjBAunVORKhHoUdhJStUpKLsczUTb4UqumQiHW+yKJaoKImAt3lkxMIsoE8ZKvGQQzLTiEXbxQXcqG5JCOmbKdemLNktIE+OTyNbd+5++a8sCMUGslbBEsAhF4nnVEjDVAoMQppPgKEzFYRQf0BJCHFEMGzZoLkc+SEdk5AA0qosTsMe+qHh2pqhFwraLm3OuCNhMU5l7bdQwoZBiyEpiTO5wysWVRIOI8DN2EjaUIoHlwrZGYOCFgiVDyFfDANAMJsggRc2IRIXNIIO89qnoblF8opQQgS4/ItEBAztn3t1oCXAr/yIuQVdllDgJHRKc5+YKtXrlmeDimhWW5+iGqzMaZVMEcldZVVFhKP1vdKqDVVgGKJxJBzNc9emM9gwx5KEH0I221pPCl8vA6TwUFGEVRu55dv6sb3UgK0yhYkKAQAkXNt6KuU9mUo7GsNTWknwqyU20zBa0IrTjAo6JGjlyRPFkzjR5SIEyikWxQUGNY0mcCVLNI1RIJikwh7uQsZueQnEWk6zoLj0CZbSYVlSzEZfQjpZXgKU4CLABZPlTVnIfqSy4wWcVWIueVqk5W+uoX3nr40QcAvvS1L3347o/u/PRoa3v3/HwxnXbMpJSW0P2DK8PpCsBJ1sR8drJ6dP9oaz6z6Vstnk63pq9++lUAS14+P3l+eHP/8aOHWaaaQNNJn9FN5kOvMvj27nM/DAMR59Xqzod3z0/OFqvTvSt7R4+ODw4IQD/oe9///vWrN1c0bM3nE+rOFuc0vZGfLT++8/G1awsA062OACbOlMHc95m4U0UuC61QRRYwqaeUFS1lmmwSh0K/TQwTdVpZNi1Q8EjJQqW8lOPIFOIrM7MLBkRdYoXJqSVNDJEY0/GdSyCvs6cVJy4WMpHcpZRSyiK2N7XZxiZTkVodL4CESOGJPrBanJ0cHx3cvtpBb908fLpYvf/+T7SbPTt6vjWZAJh2W5/+zO3p/q3nx88+/vEHT3a3r+7tE+jqzv61L3/l/Y8/APD46DGtni+eP91K8zOs0jz95te/Rh3+9Ac/vHX4SuomAO7eubd/9eqVK1cfPLr35puf+sxn/ua/+IM/PD4+3t/b1351dpwB9ILp9o5Atnenw9Oz87Oz2XxHhpy67ur+HoCUps+Pz1pQ3ma1I8thYbvMDKhO5QDilJhlhBxpjezw+FYPpWmCu70Omzo7aCCj9XAKx7hspzixrc4FId7VQ1iMZMFiRkS/kkgqBSpDViCEYBNEteQtgVvqSVOhZBIpnwrRSXExwQ1SJhLn+pgG8/Q3r0NhNq/qkVAcgluhhkWaCt9df5JCKdUOo5FTrRcxadS8qihP7Ty0cz/CAw1wC8HUnlZE4uiJRL11im6MqDgqtyksoXa5iJZBotvlCxnALvZ0T6PhFFbsUqvzlIY75nAaIDvcAEFMdCkdDBaNYENDmLt43P+yCcXIgqW5Dss2uZLBrJYV2hynSxWHP8ft//6v/9afdRf+vLe/+d/9X/i3b6Js1Jft3+b2ktyRQEPudcTZnN5JlkL0iSBZXNIyUgxnQo2SwA0MYs81DanyUhR4oF4BAmkgSqrIORNKBWrjz4wEZiv1C1Y1V8rCN6mG+Y1kCENFaI13jVSs9YI8ax6JxCwqiRITi2QCMyfJWVRM5I3KLYZPGEiiBfJo3kMhrihUCF4eV91CDgv0ler1J4QoTFFXSRUKCchSg8dWZ43xQGp0YBUOWlG/nQ9qVrJwVC2CFpFXsyyCm33pWp1lygtZpj7C19dXsrgNRECyp4IqSmVRcEfLpBoKG9Q9qjhZfXPBSCUerbWLEo1UYk4UUVjbpbKqVBMRk+aEEjpqwacCVRYqDmh+dUF5AiQZCwo+ftsJIUO2jTVGV1bTdOc4gJIhiuxx9cxJIapZNBdJli3+N7H2Ak9/6k4KEXUTeD2FdAqFmrtX2+uRrBnHmgM7CDu4wsC4UP+JHSQEAUPui1E3fKNMNBRDKYoM63tIQZ7SMZbE9rfGxMIyfttChs8pfO0szD9cVQLArHJfcTKuOgaaLVpOekWxR2sHAIbillfGpneaU98xJnHlVxQaplUO5xgPoA/+wSf4Rd78nd+SiC0MCungi2tTWlGdtRR8a/YXJxXMQRRBqJ7q7hbU3KElkCvG20wFYkqb87C2/RVMxJTiJKiZnIYhp64zjU5VmDvTFCeTqWpGoSqN+tc+3GigQwmwnkdpcx8IESdm7nMU2S29jP3+4vxaVqPMEl2q58olApGEjWrDPcSAuFdVrZ9q7+KqmjX+UUH0gheuUa5NrWh0F7+FHyafnCA79vXFe0q3qfkQSJnGJKyxSQraL2ubiip4OeqTE5/RL0HkbPgv6lg8pZKQej8aXbRdRHrhio4H++LLiuGqDoayDDFy/1JUVWQy8VSMGoqslWhKKRm7aApGRUjFJ/VwPOH+QrYkxNFB3/yqBGVKSvLs6KlIPj07g+F6eViuFovl4tHTxw8ePmR0p2fLxNPEkw8++gjA9sG+Al3X7fAOqXaTtDhbnJ6fXLtx7e79BwCuX7u2GpZ379wd+l5pUJmoynK5mkwmy/PlfHsLAJQ4dRNikABd6ni+Ndvd210Oy67jxTID6PthZ7a3XCylo4US0zCdTh88fKArfXa8Onr6HMD1a4eTyXS1WskgpDOKCjYUmAHi9KnUZAhhiQs2MhbxQqF3XrNxpQu5bml4sXRc3CtFNI1bKo9RVaqZK1DJPAhuiiMRKRQAQQ0t3TYITEnzIKI8odl0ev3qVQD3736wNZ8nJgh60d2dvfn/z96b9diWJedhX8TaZ87MO9+au3ogu1tik5JsQxD8ZBkwoN9icTJI+jdIsvxCtvwXDBjwiy3ABvwiA7IlyhJF0ubUTXZXd9WtqjvlzZvTGfZeEX6IiLXWPpl3aImkKDJXd93MPGcPa4zhi2l5cHb6Yjafnp5fAPj444+ni1v//P/8Z6vJ7NZkdrruh4Wu1xc//MEP58v5Rx98AOD88oJ0umHk1G369bsP33v02Ze/+W/+5dlw/vXbX//bf/fvAPjK1z56/uxkt9u8OHnx+PHj7/zMz37945968uwLAabT+Wa9AdB1adrx8cnL1ergzu07X+62qUs2lOXiAMDlej3pJn3fVy5oaLqoRt2zwihjVYlac0QRGpvchuEnX6YzFovcPsOohhfADYRU36OBFcYnihCOxnTpJ2l+AIN26ngMI7k1ultg0DHDHD3TZBeqTBfBsXV0YUzIVYFy78EtOcOVi9+y7Uk17Vvb8/aKe3+yNwahtbCbN/HCvV+jSw1zaNRMHV3cPrwyQcAz0O9TjSJ5XAmdQWMrG+/kvTVzAbTw2/i37vrmqWEjQYDsBqUrv5GF3bSbdtNu2l/U9mrvyLAA4hVcmVNSESiFa54HlzG3uVF0zLCuEuFwl3gFc1GvNE1ZhhQuKhbjRkReOgUG8pkpTQB3lQrPN0UbrBWSAjd2Voxtbm0fdfznCNkCAJiCQYmZVCUTcUpJRRNzKCSQLO6dYfWpRUt+F7fJqyGVrqWICBMSJZgZTgRASp0yAVYCx1TiZMZJLVZ1YibkLIAUl5Nr5AIbs89OM+7gcUWwr58HuDN+jscJKGDulI6IAgoSCCk5nnhlYct8UrBsDT0g58ystf/RZS9ttL8CUPds9UU3t6wuMYgiKrwdeNWscUWVJQKUVJRTKhZRIa21nABCytKrSPiBqSoxOHLEUCvD2GSQezpp8y7/QxsUowF0KNwlwr/JJt/t2jYIBw1VlcT8MFk0g8JnxzUoEVENV9mYK6Wx+hu6XGBXYaiuHSuOzK5RsS9dla5sqCaP2yMcOyTLkcfFOt2+GK54NA8i1JikeEPTAiozVdyUDhGIZDv+WTIzWx9DPZei78Qz9rUC+9Qnp3am6juApxIs/YQZwwGJ2F6nSsyqra6kjhialmSb4Qq2YD4xZfHxFjHaD/7bX0IxbHhvA6al+nG77Qzc9pNWBOmGzjMxkTsmUHURUHZfNL9eI+KXiMxtAeOVjV1UX13PPBSAyACDGoGc1XAspjSdzgrUTOYdCUqpU4VYttkG97RFIw9sjgyLiuJBgqgVzvDUt0ZJvKp21UOKZF/JUaMOFAKLiLejMto3aq2RjK7MtSJA945Tsd0VfIRAEZkeJinXWKt2M+LNjvc1joeVcMdvxO5VqgWLbKCT8W32tIYWoXEbsqngvVvcQlDTR9Q93iSC8HdTeNT4BrtOpX3jrF7XrlEjEb2JLtDelXuU4Cqz0jJl41QkMEmDSUG+xC5UGD8sviqNPZbZXPhFmpDSVlV+lZJ+Bay0v7lsQo3tHiOczrrz9dnq7uLLx589+vRzADvqh7OL3abfbHbD0C8Xi9PN2eXlpptNj58+n8znACZdN/TDsBuYICK5V8n57p17k+nkzt3bANbbC0aaH8yHPh8/O1VsFayqPONuYuQQUO77HRjKSlkHGR4//vLBO+8Mw7DebYQSgPVmM1/M+35Q6HazY6Tlcrntt7u1vPPuw48/+hqAZ8+Oz04v+l4m3ZTM7mwAXmw/9xxEuE1Ba1YGLceizJfWiSsrvS/hIY6T39AIC0ApC19ZQuF31g00mISO1rJu62J0da95eKY5p8PGPoacoS45gGB+6kToNQP4W//p3/7+7/1u3+9Wk+7duw9+87f+7csXLx989OHLk5O70zmA4+cvfvDHv3N+cSGLwztff/D+cvHs+VPN+cHD+wBOTk8BrFarf/Vvf+v8/PKzHz96/8MP1v3mMm/+y7/396aT7rMvH33/B39iY7p9685utzk6Onr44J0nT59eXFy88/7DfsjMaTabA+j7F1jjYLU6ef7i6PbtW7dunZ9fMnUpTZ4+f2rd18xjot3MXxHKiv2YiWpEfsNLw7a9J1JSw6uLBOjbAkA1bFBZdkL4ahZeM2bK8djRar+uNRYyFCFFy2sbZjImCn7FNRWbw3PfYcjYNBrevqpXTDYUDD/6vvc1jbqpZYOj7eiIVTe9edMEFFC1vKM5JaM++prQ/iuAa6PJW4sjRZ7F+qhrZETs7Q8XVXSUbOTKsJtrQx4oRKIVmE0MuPIaN2vrtaOGtpeXPW/faJUsqwynY2Nh3Y0q4ETmjgAQJ4VCQnS8On037abdtJv2F7690TsyfquyWQh7oWEXqc4dr6SwoisGaFXi+rjKkjXSuIV00mg1calzTahlWuQaZ2mBS6oKtSR7ELVQqQpyOJc2t58qnlZHOJQ3VGWt6l5lBFXSLGqYxz9IcXOzB1oMjl8iohBSdjEr1EwKA5h6uLSpMtZlzRBjjeohyVJuK9GjbNVOAAAJiVJSoUwiogSJYjTWYV8TV6rEJk4bRt+u9X56K9TXthtj9DPWzmZcVYTB3CXPlFZ2Q9UHxq/wqQgpceRT43JHLILDTBHhG2BhQHx2zVV5ofTevrTqH41vi6seBJOGbX5M08z+JgJAIqJQq7Fk/2NiJYmZauYysu5o80XZ6THidq6bzVEXz6CW8lx7ERNTIhIpM2euoAkE5si3JxZMD+psb4csV+WfBlepErNWudW77qsX+pS2MqKW/4HQOCT6KGOCq14eygD5mWrg0CIHM5HFBbfCKAVWF+W8m9fWss+q4CAc7k1Jozzu0a+gax7vapoTigJhL7XIcdsXjslFpFMkGWwAVd+3pbJq1aFAADNUpeoJjZCqYHs6ExF9+Y/eUErb3orxbnPVgohQ6iw3F9jQxuBv+x3gFM32bdAeG1dLFHxAXJ07zIm0OKyV6GcKrbBAef4Fpy5ZsJ6qAuKFbURACrUNnJJ7mqvokAdASQVeFLvMm6oqOFHQbnUyUnw34/zF1T4AujIVFBvsuvTz6sFZTUl7D6UcTWZz/mMzO6DHiM4hkLl2Rls2AxptESoq+Z5+UgiyLdE1Xp0Rnh2kV33f+jlu25XOlHNHVAskxK5uVblixohjUgBcbR8ej5D2+fvbCjSez1e3ff5Rfqm778oYrzba+zl+fu2L+Fy2GC575lwNFNIiIihLVs9BwQQCs+dp0ELdUKi9jYPril8dVyGR5BVV/TMVbYlriR4EAGYw4+jo8N13333xxTMAU56dXxw7sq/oUnd+dv7Nb33zy+dPv3z2+Z3VXQDnl+vUcUo0qFBHyoQp87x7/OTJB6fvAzg/Oz87vbh1eJBz7jiBMQh1XTIRyNzlZvOZ9Stf7KYz7tc9mGZ2eISX0wRg6LOKTmeTza4nogTud/18NRsuN7vdTqQHsN6sJStZRlnOJiIFPGgswog4ufEhk5KnFlI0FGi84DbR490ep0MBUqb2q2Z3lVwDBcJpnhIEf190KtykeaELGyqi5N6RhTJmKFSZyaPO1QvyZMkXlxenL58AuLXoPv3hj77xs9+YL1cvzs6Pbt85Oz/HRocea0tvMp1982/83PZys+m3W+TN+csH9+4fLlcvTl4Q4c7tO7Znzi7OZvdn9x/cO7+8vLi4PFgcAbTbDX/zZ/7G2ckFgH/+f/+L1epgdXAI1aHP79x/N71Ll8N62/fr9eZgsQRwcLAyBHBQDEPf94NlIkIm5B7AdLLsJpOhH8rm9zVgdiuzgbCttDcOS6JCXeJH/TsYQcNwqlSlqLjmnphLoTxQINhVVhkzhbdthTpUQadCf0oRBR0iddMXd37cM+CV3yzaJMZoju6IRJvxahQuV0lf+ybbrs0t5ee1JHIsHOj+NDTDoOZDrTfgytS1koj3pjK5Pdp3TfMZQBHMrj1he9QzZpbKpJSKnDTuYBELysluoMVgfHVPVAbn81ApuNZ9d6U8osZg6eqnLo5q5bxNmiDEVNf+UzW4WBoDesVa3rSbdtNu2l/wtl+a9qbdtJt2027af9j25T94g18kgIe/9ot/Dj25aTftpt20m3bTbtpNu2k37abdtJt20/7U29vljiT3qm/tOaIej1y8JJm9pkR12ij+XcWWFbbIsU+EYmTfaqyfimL1D1MoSi46dwEjQJWLQxmgEm5Be57ujcXVrYnFnqZIxdo9cqjQGMj1jvBqxXAJqpq6zmqXdN1ErEPGxikAACAASURBVHQOQB78CCJIzqKWLjoq8Frv1NwodMgDgyglAOZuoPHenLNliiQi98EoMzTu0p7hcGRCprDANbHaV8ylbqDTYrqkdvzFnFliLbzYJLm1VuE1KMItSMtDzWwYDh0l+tACH61mCSGlVJbb/NqYCNwlqEjvNlIfCsF9BZXcS9Ym3J0Kaojx9XM1Mn9SM2siIpqh6oPzCQCDlDOxQoiUzc+NSCiZz9t+OIxPyL7V0icg7NXl4nHPKEK8S89800ZCTGLbEF5+2ifB8hX4civAiZkt6PV6d5HGxbHZCHHeI43S2FqeRRgc6S3NvzdcRtyrw7M+RpgLsYdilTeUHpagmPhOC61QVfeqa5yKbfAy7IiT1fggYkZ4rlASrw7TzHxx2GxM0GGILqeMUBIEAWq1MhSWpxXFE3Ds/mYn3fxJS4h5GSLFFNgrGCxehfmapbDeMb3ZSvTw137JV0h1NJvFPP9anw6iq4TOe2PuqEE+YxAEzUrUZMeIoEUiksgxH1PSdqehwPa3UqEr2Y4YiCklTlZGFqrMnPMAIOchWfUh0slk2vfbevxbYg7Vvod7jvuGo5Ziqf+pbTLZxoczOA3qN9VPvnUECe8Wn+bq1TAOZn7NzIeLB/k0aqXxJWUuMTMkl/PkdRt8gHW5a/Kw2p8rbIoIiuzRYQRLXmm0R0Vrzqk3tNjbCsBi3anZ5OFobO5lxtCd6RYhAiTmUksWWh+3aiMAtCTw1R3xyfPrKgkdMffml6C31zyyEVUKRb76EB259ZdmtWdUrewxlfWxjCoAvAwfLIhYUfdmJbDukBP/Xdu96ra136rfUnmMfSC9EqXz9cVisZzNZwDW5+dD7gV5MpuutxtRuX37aNgNn33y6eHy0Bn3tNv12+l0YgEN/ZAVen5xuVwtHz96CuDWrcNp152+XG83vWhGz4MM3bQbhkFVshWy6zEMwzAMiXlQoS6JyvPTk8XBiqTfDFsAKfHlbjfFBKDEnMAXp+e7vkvdDKKPHn0BQIWyDgp01E3SvB/6Oim+XuUIVC9JwP+Ismy+EZ1kUbOQLcNreGMJBm99tjRIPGLLld1lPuRMRCUhApUyF/7U8bo6tdRKpvzF8INm6TCIiFldUiLiLHjnww8BdMvF8nBFBFU5Pj5epu702cnLwzsZOpnNAdy5df9f/vN/oUN+7yvvb6XfXFzcv/Pwy6fPPvnjT2az6csHFwDeefjw3t0HF+uzR4++uDzfsKbPPvvsB7/zg/P+9KMHH339534KwDvvPnz58vTs9Oz05LRL0+fHx1/92kff+s63X+ST3W5nvZ54fe3twcHBwe07xy+eD3nIQ55MJov5EoCisxpHoyNUnLvKDo858RX2LOh1zceL8gq5F7Daj/XsRKIkGQsZdReEJK4EajIAj3JTvmIZ4/72WYWMj+61xOhOrPYcNe2f1zhyh6t/49jXJir0PYN2ropr5NUHx0euAsQt1M7xlelt5Zm3bXHPdcRTX/XFq1oj3DQftitg8zj2PXSR7qpYdOXVDTur8tsovq6JBQlKEwmVQmYoIRrO4y09V30OghnU/pelsGt4tAxxGpqRWwyi04TgamyFGcaZJW7aTbtpN+0/lvYGOLJyYyPEY3IfETK1zHTVS/YeUzmIM2pFjT4zdbGGP5vcZzVUI2sTEReWY7gnE1lBm+geWi3eUxZ6nQAK7UdFtAIGCFzzahhp1WP2AIj9piLKrCoimcEKypoZqUiYsDxgFfmKYGAP3TBeFFVQFGroW83wpZa1PfeDeugXWVY8m7+obwtVaM4KUSAxg9hKQFIM0R7mAhG1yWpidBHWFzfFtw2Dr8p7VeBJxlK7jVZEwWCkonabEln0gViVUOob8bBE+YsFIHAqMAhA+wIWAYJa+5uJrEhR7LYr46x3Br6CCKw3rVDZUhLWmYnJAyipJe0kYg/SJ1XkZgqLeBTrq2JKOpeATSqi5WiuywQgRM5GYPFaPV4/WpQq7O5PUgyO7fqLNL6UkIKbCKXxO6tk6jJTwSMtLqguLohEclvZxII1lTTKPmo5uQqwnTPlK0C/x0u1AN9ehBFHMfK6Duo/rHSVBspQ1A1t5hBV68E1g64dKWdiJPG6iFn7YHsyKF6EBHLgqUQkpURQvcNJZRyrSNeAqztSVfHlP3xDKe0Hv/oLVW7WEiYLO/u+eYpJCOXAjVqjWo2+LHgh1WdVPuDR9Or9J7DjKMShUJWuVbyw2cVhdwDUi9W4ph0WCEUbDV1ZAGnO3BSpbK+BKqfkr6CIg46txF3yNfKIW1jMvb2s1eB8Fos+V7ai1lXMrk0EXylEsGztonY2U6qAF76M9IJFd6krEhuCohNxuPfDusfLCLzqy3iW+rI6Jl6HWsn2/iwUjazdzhT6VHyko2dZpTH23V0UqbqUDelHMMhxQNxb6FKjjAvjgZRLxoLF+Nhr/WljbASRhqKN7tDyCZXno9xMIOYEy2egqioVuLfcMsaymZlIrERJ06t9Tbl5UR2O31IVbzv2XA05JW2vQgXQ7XaLjgaVs/Ozw4MDAP/md3/vg3vvnZ6dLpaLrkvz2Wx+Z/rbv/U7zGnezfvNDkA+vzw8XExSd3lxzkxJNaW0WW8PHtw6f/kSwMnT56uj1eXlNnVWrkcnabpb7/LQc6IuEYDd5nI6nS5Xy2GQ7eV2Mu3Wu/PzL86+/o1vzFbT1E0B3L57NOz67eVWiXLuCTLp0m4zHB4uDw8PDg4OADx7djLh1E2nolhvL6bdTAo1C/HAfhJ7yh40+RZ940lNoqdF5Kv593Q8yy0hiHQVWv6MzLGqo5Mbu1pldKPRtAZxqK+yLcoFwCYqiTGIyAC0nIUJzDwMOUM4KTNtXp4CmL73YHu50Tzcvn373Yfv/sEf/j7NJp89fXL37r0DngD43f/nX2+OT1erw+Mvns+OVl//6tefP3/2wz/+ZLVcisjnj74AcHrx8lvf/OnLl2cvnjxn6oZeu9mkm0/nefn05GT3h98DcOfO3a6bfPHpF7P5nDndv3f3xbPj9cWlqk4nE8tel0UuL9fT6fTJk6dbzUdHt4fhRDsMeTi9OAMw7eaTbsGeYakh7KUYvU9fORWBw1xh2FT3ulOoCLcuB9cXkQrjUJSC1u2jgroRwjw0+spIegCLAVFRu451OccEqFjTR89z/SRS+ozf9ToaDj/+DcPaZ1xqiQSah1QZ87pP3E4b8vTbGaR+slaVhjK5+/RdUYTO17RyeNtD1JB4YzH2JflU1FeVsxkMrb7xqkBo3dVmDwZibuJ1K1WNb/Ltuz//BT8uH8cZR5Hxy1eljyF/la/C9Gd3M1QVAiTbZpZdhzlFKdCbdtNu2k37j629CY605sJdEfABQFWzVdYOLlGrA3AKya5YiPwepg6BDziYiWK3josANGmwmNlKi2QrS6LKHBUtRV35pwaWAzwpTyjPIXPAXlpeQ63SW0n+FcGgYQqFrRYRiilBDSF1DzWmZOnXXP1kt3SbJwWDiNj9OjXb0829SKEwh1PT6pkI5BWc4UiWqDCIwIV1AmHMJ0NkhEPaVQlXslZxt+kFq2tTMbQGD67Iif+pZX4a9hwOCY3aGs4+UGU17FfFS4kXz7h4j5sNAx6z3xjIMhCxOz0qDPwqUFGICvbKAN1sq/jqeheoXF2XeNTKZohxW3ZPAoiZvOAQuNlHgEIMPksEzaKiENv1AVmgyiEIdC2EkKJpUtuJpgtlB2qBEtGMDuqbCMgiogIKayoTEUmtheJ3WgozEeLURTGiq0LS3ry044BLrZGe04BYQSQu9HMgYRiwYyeAwPdDLQ0fAmDdav6+KhCP1svH3Qr7gqasYp0ramwRTJBwWCxan0LK3/bK2NJo8kaOtUw1aKz5aIzfNLuMosI3kQghqug0ehUH+FHfUWXSIjPzk3/0hjDt+7/y8+p9QwW4Y+5qzygobNWh442ugFU5uFX73FZCDrUUgNFLYMcLRmihD8HosR9SAomjclQeHd1WAJwSg8t0iOM43KVOIFb+SEV2Q9+lLjEPMiRmUQcF9vW/gNrMixCOctpDsqoocaT8JCuCJG1mJi1dDEC26DfxJUPJkhY7dSxHfR8Ou0prbBv61DEVwltwztAqCoWoG8usGVB1r+N9MlbItgZvLcyONFgwmpEUzli9R69re4B+yyngiHW1lPi+Vk1mXogbxu4ajMggfL0Tx1Ua/e/U6gZptNc9U4gWClaHNSqp1ZDIERg8Jtjs7M4VQWd1RFw97DRwQ7+78XNytmJr0Sax89Vq5ROMNGn/qMSmlHvKBummLMAOuDg/t/rbiWjohzzowdHRen0pQ3766Mmtg8OsOmy2uusBdArOw+WLs21e97I7H87mab7kw5/96//5kydfAPj8x1/OJvMznC9Wy8l8Lv1u2BGpTiYzZq+usNkOm+16cjmbJNlut9tNZkpJ6PL4dMq3zXTX512/223Wu/nBouuYMhITdnL89MXR7dXdux8AOHlxojrJvSrlxaLrd5YFmBCqO0fhqjACVWuJuhm3BSageM0Gq7R9xF/2Wl03VBTLVku0kksAahRGgaR7D3CZjCRneB5vgiVeh2dUZBZVoWArzEwMuwHAH3zyx910crleP7z/4OXJ8dnLE1L0Z5f9bPXF2RrALuvqzj2IDNthvumn09lmu3n/w3dn02lKfLm5BHDr9lE/9Fvgw69/4+TF2cnxaaY0DH2XpvPl4W6jAM5OL1XzrTu3ZZDddkskCvnyy8e3b91ery9M9jhYHZydXXDXCWm/6y8vL1VURA+Wi8OjIwDr9Xa3HViTE6nmoFmUlU+mI8ghvxFBgoIUUQT1/oieqOmZfdpztQ1TYydivmbttWYjRcMOgi6jqAavbXv0pXjpotlzOlr+K7147fMVLnpHv2oSU/iPgq/b56pSTD5XNn4rg41G8aZ+vKJvrxoPtdcEI3rlTfZ54cRUL/VnVscXbul2eVVYAQsiCZeFR+vQ3nVdCwOg98YFp+o6Yc+Lx5ZniVMOta3LIVQqmuXd/1kktDobzqxJm4vYyYo9TNinR2HoNnnYB715o960m3bTbtpfyPYGOLKCeFeoXKmYHKn9Xe5mTlZIxf0KmVAEc7Lqpl5CtLDocKryd7A6w3HDt2ZVJHRF82HusgxwlwPYs0S11FnMeQA6VYfbSuUWGJl3NqUhFBqvfDV3ukrkQ1OxR2bNiRKI8tATEyfkbI4/CoBSx2YXJotsN0BrFHtF5JAJTC2HEpSImVizh30hMbK4zufT7mW4k2vH7DoNOfBR1dJGsqYRy4crl1ZeoNof9xWhMujiKULlRyifxchtk0ZEjFLauMiL+/NMBNWRj4dDrrFeIVGoGvxYJb0ikVhwk9+hAlWxmXi9cKV71lGCu7AoK7LpdaH7WHQ8EVhkICYiFQgow6OqR/JPOAKo/1ciwUPMbWQuXxBXhqo+5eIXBSxRdOeyd1TBxbFMARLSZE6Cuc4zJ2ZOKYq5usDa2PlHc0QVQahYw0hLt/44bNXeqS5auccomMrWYACJU5+HEltX106ViYuigQDC7H1161apdG/9rzaCO7UoDPQmglQMCLFO9k6msrGpvtrH5Pu3ffY+gBy73TdqCdOzkUoF+3w3lKLPxX8nKOzjf/Amv8hf+QWEv7ZNADM3cflECnFjT5A7X6dymhBHuU5toYUAuLm3Btdb+gUUowLFG1tUswrexZoRX5L7FFFdhX63Td3EPGcNxiTmLnV9v+smk/LeSTchYgnXQoqaIORevM53iq6jTlAq2aaUkK1iOIw4eJ84hddvSQ/vqFIxQbSU08gFp+quG36YZo1oqG1Za5QpCbZj3wWGzAW4IwqN0x883t3Xb/pykPc6Ob4mEGKUBQpHDAITaRZc14p9JTy9VMN9MOpIN2+hJshg1InmNNXeqDY2wp+o1ecWdXVMIDQ0SLXBVkvFa143Lrmz/0tzUtp7yMumqFildvdeVwXBa/vCBCCnASqSrxm18Ud+eyelSgqlqcDuIkAEw3fEl/1meXB4fn6xW18AuDW7teu3t24dnp2dHhwsn58/f/H8eDFbEOndBwfbyzWA6XI5m01Ahw/v31VC3/fL+XI+m//o80++970/AnB3ee/09Dil7t333u23Wx10t+uPjg6Xyzmxmtnq9q2jy8uLLHm9uSTC0Mvl5XY6nzJ3lLrlwQLA0e3lsM67zXPJOXUdcr483wqxsgL05PFTn2PWbkq7XvotiCf+YTMJKqpQnrCbtFVUYNlLLH0Ks9dJjAJKiN+pOVDtcdP6HzWYtYZ3r8ZBMtf+uuu1HjX/E6JII/9+x0mdPDG3co4QYDRBNDKEkIoSI3UddLjc7hbTJYCH77z/oyeniflyc3m2Xi8Xi1t3bj95dPz08XMb0bAZ5gcHz45PHn7w8PD+nfOz9eHBrfXlhU+IAsCtw9uz2fx8/fnf+s7f+Kf/2//OCUpDNyXJu8l0av3u8y6ZrJM1pTSZTrebbR7y+vJit908uP8AQJcWy+Xqcr1OifthmOY8nU773WWfh+1uCyB1nNfbdo4LSkjBLIyW2t71MlBk+VfcDh43Bsaoe+Swnlfn6MFdwxV1LyFMu94RuFTwuDA40WtlyEbq2/u8FVoq4Y8f9Zz7MEJNeeWbZPSaPUmmeXLpV8jxY3lm70INv9KYqOsG85O3K9bm11ykTWdfL9RVftRyuHgIFY2srFez2whoXSbjyI+peZ2ykS3Sb6BCBFCobaMwhbje3mZWatGM9jKXNF+52tHJRoIo9MU/ECP0okIm4BJEsjSS3k37S98++KPfe9VXj771M29/8dXr24v/3b56Y39u2k272l4DR7bE3DmGqBjXJKsGaF/GZaYHikrqUnjJUcgORR/PpjKCwSjxNVYWVWAxtg5DGq6DLABhl3cRZkN97kEMxQAv1Ucg5qqB0mQKL5qczPhJQB4yc1LNpp6oqoeQmnnVs9TZM6oxy6ClyGuIYAjO4iXgVcuMk9JMgV56daO2ApBs6FVVg0UzRADKmu09iVjdg9KyLgqljomyZA8aBsjd8KBEooa4MbOFZZuSq+EtR66Ts5iGqy1UZkhxL8yO+Wr2cuiUqEx8sEx7pWqkMiqz56qsOWyazhoVjAFIFoMzhl1PiZnZuGagcwpARJhZBe44pkKkqpJSUim4h4A0KwgpcSe2aKTcmVacy2IowZBbIiLu0GgGhFBNbPcCDuN2HQItYDaDq2TZqmpKKTTNoco3xETMsLsCKyQHvsL5SUShNf8eEJ6txdzp8AlclgBRE2ZsGhQBIDbfxsa+qyCmQQYAlBDepWBikGaRIlU3spMqZBiy4/u+g23TOAwy5C0xi4hGQliFJu68l+w6kz1MVNVL2xs4ZQ/hRBRusI6QZA3oEgAw5OwyKMX4DVIXKEqFWIOrOHEHuBO0Q4sUU6AgIk5sjnauXIlh2r7mzGS1Po1Saaw7caoyHjGKpx5oGHoiTilZDL4tU03KaXVO3VQQlhKRxImALIOqAV4MhdknAlP2/EGcWFWzas4S0CVSSTWh+vhNfpHv/Novg1kkB5aoWWXSTYw6ZRECMSVWZM1GgQYZOu44OdorIkw85CElIoDTRCSbg5UdRgAppWEYmNiAlMTEnIY8ZO0Tp6wZQOJk+9IUSNKkkkW161IE19tuZELuUrKDI1mz5MTJOjOZzEbklFmJBihPqB+2zl+IGBDNRod3gzCbti4QsRNLTJ15qYNERSUrQJQUKjLADhK0l8HxN2bHqjWriKm+6hTAQUEREDFTHBkbEhV3JwCAeBVfp2kVHgMQ+LrtrmSmI99URnLhVItBBBXDtOzEMHFX0eSCPYVSwxokhcyPk4jUDl8OoLMC4E5xzEkfoaY7O7Ca5U7YC9WAWg6SrthgAjg3zFGhHGqVN/Mjr0XpI5ISbTBeUeVGKDw5+hC91nJnfGZAXgGRCTDCyOrn0ShzNWn5zBOZR3YZoJHT2HF7SWAd82jyvoVWWjuDgJMdQKmcMeQPF3cKriFafI3VaV9gkyUpSmqyzAR7KqCtf84AZBiEiLLk6XTS73ZEBOrM85FERXMe8qRLQ5+n08lOBay62S6W8xdnZwB63XXgu+/cXsxnrDg6PPz2z/3017761ePnT0F0fn4OYDrpDlaLr37wgZKCIAqeTJ4eH7/b38u7rwI4un3vkx98Mp/NZl23SGnTb7vZFIkWq8WdWweJFcB3/tq3T06eD8Puxdlp4gTwanX09PmLT7/4YtMPffYVeO8r7y2Olv2271L38sVLpfXmcjOfz89enp6+PAWwWh6RGnNJs+lq0CFr1gwRmS/mANbrDXMC6dn52cHqMBEJkYoyKSbJq3v3mcXzMCOKEscMS+IEIOcsKpxYVboukTIkOA5RHFWoKiQzdQwFMTMrxKiT7UyzP8YCuwCxG4ayg4p0ZBZDMxUyIBbNAwWQldIkyTYHUxZOnqO9m067gzmAF8cvTtYXdye3n3zxmFL33ofvzw5v/+D7n3XdxFKLCOjlyctbR6uj5WI1mepysWBsLs7TZIIuLVYHAM5Ozyd3pt2QXpw8P316cu/2be3SLoM76pWnBACTtBDJg66VAE2bnXDH3WyqXdpmEe4ALA4WD/jep59+enl6OZ3zNG15mkDdetOrbgCkpJM0Y3AehNk4vjJRVkkTtirqdgqNaaOIoBJhT5U0NUew4r6NzDMiOSOC4GJYoeiBU8JjnYGa27oaESOsAYh8CIUYFBkjVCQ3lVX9of5ozQYtTbStVfI8KLNj0ADCG07JRPMqUjboW5ClKkJCST32CEHxm6nwGPnSGVQna+US1eHWOB9diY6Jt9XRFcy3fFOyeO8hY/vLUu4qMfFUh1TmuZkrjwEq/CvETU9hXqadWpalGW6aAiysp3l52+1iTyCp1QkoVt+0G55EQnQUpuG4ppuFjbXYLRBYUE4LRrtJzzwo3JVnBBkrjCfG6kSQYWxxFVEVUiTLOaMEBaNTZIyW6ab9FW2GDL49CPjBH/3evydi+Hq489//+Tftr0J7DRx5haVXxgeM/Gm87TOP0ZMqotPaA5sbRzbIERejK5/EUymSiDv/qcJ7kQnVhX5TZ830ipD0rQIDHHG0P0t/YJLNlTQezVQAAKdu3HdAoQyWelmoGATrjIqhL0Uts+lIXGJYKC6vgoaHeFulC8+dY0KbNPa46F51F9S2d2XGidm1HTW/JxukQwA+vYH/2C3h56Vl+n1hg9vGcArCApNvqKyw1txKQOB3zfoa9uTdDPQZHsFm4Jf6Y0YuYABRVtM5LNTdUI9qsQygisotXMC8dunqljSNkQCpc6iqkBSh8Y0Sa1vtFXvfprR1dyq7LASd/WWytWuU8nrx3nNCdLPprQcydlZdlIpFQtuIZ4CYq8YOBYg9LaZXCQK55O7iX3QvJLGYm5GIXHZRydVQ3RXQyLxjdb+O1D7KItC6wRUKEXj1HjFPE3txydZAxCJZTS8hF0tdpamqSHXTsvdPJtMCMqp6tBczK5NIptjvzaJWab6uRPH6QHHHVlVSlWyO0ZI955oqSrkr0LN//Bt4UxuGngjMliRRiTBJkyEPVecxYdWOCYSQCnmpG91cVj2dRByKkPNjWF4xy3qo4vnvlJNpdMQp2ceSBeg4KZlLT+AovqtFNPeSJ5OZVVLqUqfQrpsAGHJPNYtp3V2GCTki78YT2NF0xLC6x1QdNZuLvOuRriBqclqkqsXDpAJ99jNxGTUAVVFRRgqgyeB1u4c1lAO0W5ZwnbdKbVaWJwgVytlxRVpC12COzWh7eAhWFR7z5jWPQjep0chUVDlxq6yGZqyqIBFnmWaHsJkjIjBIijNyTKentfP9grBcVDW7oJGu/5aZaNt1k1LpnnG2q5Huo+bHuNKJwhhtlzkb9c2i5aZ9QLa8faRKX3nbdX3eI97txQ0UEmhIK+2MX0OveuveY6+7KFHX73amls7ns81my9xl6QmSMwHomC3TCDMpZ1XpGN10dnn+8vPPPqEJAfjrf/Nn33lwXzs+f3miMnSJFvODr3388dFyNl0sT09fAkiJPnh4/2g+f/Lk87PTl33f9/3u888+P3rv429+66cAPHp6LJPUzabHJy/WZ2fdpBv6rJpvr+aPPnt+fnEK4PLsuEt06+hou1nv+mG1WJ5uthcvz/J22xFxHgBcDtt3Hrz74QcfbjabzWb7/gfvH58cQ3FxdjmdTDfrLYDdbnOwvC2qw5DXmwuapJzzJE2J02azATCbd/3Q371z9JWfeu97f/CDg8VBN6F+J5uL3XSVKAmATpOtTSG2zTpQGMb8NPnZtCgWdaigXRgTKkQCBQlhB2iJrImO6jBCgUZaWELdii4ihkVySpGGJQ+9mInaxD6QZskgTdw9ffYcwNe+8pXV4UE/yPnFZjqbHr/M9+6+89Wf+vhwdWS4/fHJy4uLi9l83k27LHnKCalbLFepS2nSvby4BDCfTD/74tHJk+ebPEznM4Eq0E0mu23PCbvNABjGKhBm5jThxGDmhw8fdl0XOVDBnDbrLUmadNOUqO93XZpSSrcOjs7PLwDcuXP39OSceVZOAkWejHAcrlqCiygh1Trzb92XKysDrj9sdPUjRD7xKqk4twia24j15DI5XXccK49vXlO5f0sqQgirFNKloZAcIlDF9l4VhmKnxszst2D2143zVazoWgH1yjWjxwVxp1c/tVx5Hc3V8Z9v4JPj26LV43pVn7mu1RNbabHWpQhv0bAIKVAFjLiDyoMoVBuzK4vVlKvdvLIb2wHuifzxpMpSQrTYlyigEX2oDRHh6E2ZahPBEerumPfctJv21u1ViOHbIImvxyLf/jk37a94ezUcqZHlzTGFkb6hqiUTyJj8Gm1VjIl0q6FHToygsqjZxGqjima0vvx7jZitBEnpVXxR0YsQd4gtEDcMg8Ji3VRo8c5Qx5+0MrTQYMkl0ZiPGJLkAcHFqeERHO5R/hhvogpKrJJR2viPnQAAIABJREFU04tA/E/O0t6D0cBDjnHoVeA6NpXZLshdXK9JFSPgKa5zvCkcQolJxXz/kaxYsfN+hLE35JSQI1ulPOrVEoU50aU5RbBSkwF1L5tcfQDGfQypwbAH8ysSZNe/YR49MMwysAdtGXKrLFvVZVSMz7Agr5bUTLhPD5HmnAOhoKJMNF0vE67Fe6iY1qlMdDOkOp/xOnKbbfUMauHEvUVz4Mae4AV6GkQ/MAmf+nhKGQ8QWf1C2qrGZwUzZx2s4HvOgyH0OefiKGR1iepwPGUTtM2e7tq5FwQPQQoltZNPIIfjra9XRKteAYa1bkMqUHer8jGn0hnX+ELUDwzStUnV4jPRdqWojABIVCQLRRJDwA+o++eBWh2kRTUoBmggr10h6sZtJnb3bHL9E9wQQCUAT98Ci3zn135Z63ilIGVq/qGBiVzVXzx2vtkPzBQ7uUx9wdxQaIJri+pUkrtEUbRKJBdB2XcnCKg4VwjMlLgLZwzz/jYi2OT2ogAYY236fiByJ7UCEziubMYNMTNUAd0Iqpwin0Nx327d39xypc3Bcl/vsQrqumZiFgcdbIWrn6BnqdPmHttHzZwHmfUjytWAMVoYY6EaTwZRuOGrh2r6XexbhgggS89H3Ci+BS11J2Mtp8X+Y6IsQoSSabY5B21eDm3nofRXYx8Y3qvqhebqeGugNjXAwTWtWRD48JuYuKuqpu6dWlH3hBlRAj/1Y/6vrQxhY2iWoDElRE9e0+nxt+2datzTd6m2g9D9OwLe2NsITR9GQ2gnY8hb6jAxd2bNgHRpmpiYu77vATBTFnFJillUBpVJmgjLrXtHP/OdvwaAKC8X3cdf+/qPP/nhZ59/tlouP3/02W/+q02/vrzcbi8vLgCk7eVvp7RczlfL5abvp/M5mD/+4MOT84vn52sAjz7/4tvf+NbR0Z1Pv3zEeaGQw9VBHvqLi9N+2K4WcwDnF+cHi/nTp0+yZlFdX15wmnz++Gk3mU9m88V8CmB3fPnD73+PmO/cuTNbLJbLxXL1XlZ5/uRku+sPZlMA203ud0POw2w630qmIRHAzP2uNydsET1/efHwwTsqXR5URHLuU+rmi2k/ZJPrEk2c1pY840FFmUrGITKneHO9RjULo4l2JQCkEQmu5kNtMohlnJAQDy0sqNnsQYK15mnwZSfyOkgKLebaEMkyzMZAJKqiWVS/852fBXBxdnrvwX2SYbZYTqa06QdNtFjNKcGKm9+6fcQdg2g39Nthly/PpyDKcrG+JKKLs3MAX/ngw9l8fvrk+P133yftPv/x55Z/btJ1CiULMncjNalqHgZh6hgXF2ci6PvMLADOLs4eP34qmSbTiZogmYVTt1otLy4uYKQf5rROJtO5LF3SGCAOBpEWYXxPUmxb5V3jT+BHsnw68k0bEcXm0Y0o24j4vmVatSK2j6+hJe9Gk30S4wfvda2MMzJ0FG4Zg4k3tBIc0TUgnoYD+h7ZCiI8vvgtgMjxtWOpeEzSWwl4/97xi1+xeNd18S3adbJxfXV9Xbj+a/tl6KuGQJYzPR5tqHvVCKal9GoLAMYtI03X/Se0ShHRaxPc6540bskmTY0UNsJrJq1y+2bnKsjdGW78Im/aqP1pIYA3SOJN+/Npr4YjQ4uHU2fXu+NXdY2zRVCco+o1PNKJd+AmBlNgzGYr148/TY1wUt4+siIgVRNoUBhTsRormN3iXOrkN379zi/+otevhpm6I2AtLpLC3oga9lPeVDvQqCBFRDLmUIVLG6aE4KsESiUkLNTMAEHKCPZhBQoHt5hICjSknZjC+MJjJKCPyh7ZAExDEMoLC7qnYUN2ASmepMhV6lLYxBvrFc1ksap12ssGEXWnNAMI4qmw1xtA6zvDoCVmrutVMfGYkcA4G21ZASViC+izqbXfSynekqHNNOBY0bqWrQYLy15EoRgUs2ScB6D8rGKj0t6HBR+kK9pl6XYVrSiOV9kVbY51am4ZbWhqfvfVaJ4QCr8GTKaBnFSU3/dJ5HclwgjBRkxcs+njR4t+ly8ootdN7ir733sksZrmy+OFeBu0MxbIiYY6LtMibT47noiwlsP2KRXmlKLUOallgIBCPR3B3hL4a7JXngrvY1sS9vGPvR6iq+1HpXt65RrLbxbKqm2G6NOzf/zdK8/db3d/+b+W5J6JhkZxSR7EHNl7A7IlIoDZzCGeQc89UKjg9wko4GXRrm3KBAQqOLC6MaedrmJbsP/lYQCpuB6vNotxopSIJWfLSiCSU+pK4G08LebPd2IJmwYRqUgW3+CpoxIGbOAu6s5UkayWnZIIXrPVkpZ6GgUUhYlg/qBBTUP3JQJRah0WotknVh+gWGeCyAEo1oiWUpR9gb3PmIi4A1E/DIkpjA31hOVhsHo7ZQs5RkxE6tiou0WEvmi/FHW6mZk6EC97H+dNRUSkSwkNIQqeUgIG40ONTCpaCCHVb2PjXcenax+aRQ8C9cb6CUbAYsYtigFR4J4cKxxJEu2f/s7x4aW9P/feNu4sFeY7vjRMaxTSEio3v0Zlpr2foyFiRIyik/XShKlChp2AiCF3795frze73ZbJ58GqxhGxEiVmqCYdZLeZd+nug4ezKQN47713v/7RB4+On33xxacJulpMibHNu3v37nzz4YO7R4cAJmn67r17Ir0M2+lkYsVbN9vt//g//c+PnxwD+Ma3/trR4eKzLz5TxXQyVYC7dHR0KNpP03QySwC++pWPvvW1rxKgKRN4NlucnV5+8vnjP/zBDwS0ywOA7nC1nHYT7s7OLr989HR9ftGvdyL6c3/nPxM5e/HyBMCDe+/9yfd+tFjON9ueeaI7UUjOQ5qknHsA281mdXDwo08+0cSz2aQfNl03kSwgzJfT3dZyuRSmUWlWYfpRPYYQ2JiIxUMU7j3Ovle4jm0Wo6LSsvJiA0NYYkfLLp6f14mY23tggZcEAKwqOsgw6TpOHVSyDsHB9f33PwLw27/9r2fz2fp8t+uHdb99cXa5PDg4PT8Xke12AHD/3oMhD5QSFLvdjlIiUMesCmY6WK0AEGi5XO62u+MXzxeL+WK13AqGQfttv1xOBpOjMlQoWaaRQTmBJnRxdtlNZjLQkDIA5i0xb87XxDT0eRDlnFMnjx9/3u8EwPHxi/ls7hKwuLedA0PV8ZiKTFTkuxBCG6GjsPL2XDe2CR2fbmrIanDzhpjUN8setVKgVFhshI8R8BhdpGYU+5RmjwoGA2poVJWO9i4uQO2e0LIvX+5/O/6wSphXCPLVpvVwFB4yoqfjX6i5cvwr7X/whveWNdrvz5iUN5J7uaG54NpH+7+N/2b5tbrGFP2CAISVAqqWAKkC0U4SmvdVYaF00t5I409C7vBwPZCz7GaIWgXymgyNXeYpqh9b/i6bhRHDv2l/Fdvb53C8ev3bODb+mXbmpt200l4DR441h8ZCrIoCv71atq5MfKSHjT+pThXaihMAdE+muJbeKpQ9d5tAlCMrmaowdUQkKg6NmNOJyMlv/DqAF7/+60c///OuSLiapoqITY2BFYCp8GczbLEWAKSIDDSybFF1WXQtVz0q0FQ2q3UikhEYJeD5sBxSKAhKiFMcZVJbRYeJiLrwXFGqTk4m3qLiOtGxghJ6qRzHCa0YHAPopa+DQjgpWo1vGkvmLuxJCROrapnL2HChE1r5cwki9b88rF1VRcxnrqaIHm2dmHeOWYqpUBFFrbY33iORmIm89DYxSCESoZF+nQuovhxNKHf4tPibtEK4zaQWJQRF/CjizRXn3/HZGKUw3wMziltgK42XTJQYncGirpPrXXCtSiO2t6hS0FYgVoWWmsWJk1pMSmQRIrLMrEyARJK7tpJ17Txiy7SlL0oHXbK3Ll6BBwqaQ36Nlgc2ocQMiFuXzUkQyhIHkUKYrLUrCA7boHXkBKgMvuk6FAaSVm9paepxx6qViSOQQEhprEoBI3cqf30ouIZ2MVihePwP35AvEsC9/+bv24FtwoTCDVkJUK5gmRYJu85onQlf6zLiRgOqfkNwytZCtQAoD0Op4kIgSlx8JJkZxCx1IzOxunptEKrTX1UhEfOOpNb4QUbyiQBJXCpnKqDElLynYhFT3uFy4ImgTFCvSJsofBQCwB18VwUxs5aHoVUFIpCLyCreBo+rGJ+fi1RUNLtgfIKvoFaKSNsKuKFLa6oJVXugpZN0siOaRUgIxTfDBy1UOY2SRjJ7mBJpi9KEC3hSsJG7kJ3DQmHj1FzDX4PmKKCW6Dm2T5CXyI9AURY66N4rWp3rK295VRvTEPVstVpSKjuf9GSg/klLUMd0+rphBgve0+uuqHn7hMvysxk8ropwqSZmEgGVCEB/tT/z+tl5k0qZZUdkuSNASbJusmyYldgTDRcXaRXpNYtKRj/p0u3Dw451ngDg0Z987//6Z//HB++9+7WPPvjk008vLs6nTAvCrUm3fnH8o8dfAtiRPL1zS0VW8/liPk+pA6eUJv/V3/0vHj1+DuDgwXu/9Xt/oKy3Dm8NFxdE1AuyDICcX5x13SGAFy+OP11MF5PJ5fYSBELqB/2d3/+DrLxYrGxSZLftdxuezu49uPPeB+9crtfDMPR9v1rOnx8/ffjgIYB+13/w4ftEdPzixW6ni8lMhfMwzOczxWBT9tWvfnT79mFGzgN+9MkPT08vpt1cRLabtdVum0+mtnMNDnO6UnAup4JEbd61kCvag1xkIUCTm9ycCkisX0hE2Lvr6qLbqTNPRiMEXmouXmmJrXMe/MgzGU75u//fbwH4/PMvZh0I2k26W3dWjB2nWUoTkX46nQEYBumHjKzTyXQ+Xw7DzrgHUxp2/Wo+A/D8+PlHH3/8zjsP5/PlxcXlkHPOmjM6oX4YbKvmDAgoobggADSbLrLknEW2GUBKs9Vy9fLZRZrwYrFcb3aSMZ9Pc5ajw1sAVDKbX7lPNwFhn29AtUqTyjV7UVnBZVFpUJllFGmomNnqXO+f49EzjTON7PoF7IndUhzO9Yrwoi7axP3XmJ7bi4tM2fTIhZPWArL3y/4bryNPDcZJlWeNR+ryyau6R6U7e7Om9edeJ/emtr3u1UzlNa2axcohtEfXDr35Ibh6bSHAVKLn6yvUNCd7RVEkdPQsVdVIjR0C33jjlU1TM4VZlYOKUYYyiDBWXOG9Lduo7x5NUDlDFCfketZ2027an0q7cZC8aX8O7ZVwJFUOVhQQDVFLHd1Ay7RK3sOGLFZZwm9rQDuDClq6X6QHLSS5MthGhqhMQusFraRi/Xc1wbP4CIhe/savlwtOv/vdo5//+3YnkamuEVJc+kWla1rstFRi8BoO10hRDTstnURk9iDD2jzbYLXSR2cD06pBxwUmiKoDJRugz1TFKLzjpkbbk/JYSCkTZ4ht5GlWUfe+YQBd6lxEDA+u0reypC6WetQNKHEUOQ8h01OVxWzVVW8WuqiTe00ZyGUayRezagjU7BzrP6DqtThIVcTSlhfoovbYn6j1o2ZO6iqiuvj5G30MHty5J4828kZRWQR7vhhVOiyvK616YDXCWHE9iifHooTfIBoRyiT4mF5qXuJVyamsX3umvP+WglHVkgzAy4U7OGJXRNklJkZKNR36WNwjF4vryNtAJtv+8JLNKuouV9VXeDyvQAnurjuv+HnJ1UWM3lgFAF84v9xyTMrem6ruYGCeSolA9ymNFFJ1HYoGxUSRa79qp340KOzrnv8rlgs5exGkp//dW8Ro/+oveYw2l5o8TB6x67Caday4HDogVeiNTQqHBmhYgGQPdY8jahnKYkoQUzF22QonFIV0MOzPkWN2mR6lKKqajsckkjklyaIKpiSqVmyhMAubeA+yJvWSTA2h4YjczpYoU4MBUaxHEyvtaS4BAlmRcbOjuFnAjy+BkCXbVkKpNhsUyUhpgtEeCrsESGucnZpbTCFN9s/IX9BfJjmj4uncHHSFgZzBHP2QMpIXi6n6TqSFaN0h1NMIeFXmCnqWNAtBzEiyEJOWqQEBcOVKBRp7uNCYuBlRCt7UKnt3duw4/Lkq/auE+dUquStsLW/du7f0vWzvKpE4cqBRpsZkk+rG3wgBNpZrHu6buenlSAIZt/ayK/3zkRZMM4Dv2Gk0enWlNmPw41WvLi1F/aXtZq0pHx8/n6T5anWkkg0GFfHsLcSUlCcpDbvdwzt3k+426xenZy8B5N32Z7/50w8e3P/s6fN51x2sVsfHzweibjHvUvrag/sAzp8/OTk7m03S9//k08PVarVcXGy2k+ns2YvTy20P4G+++96UKc0XBPRZc+4T6PnJ8dHhKoHOT08BnBw/Ozs7JWCQftpN7966/fLscgKWLBhyxxMAm+3uztGd7W7HxMOuPzl+wUQfvvfBYj5ZrubzxQRAYpmuVqvV0Xw2+/zzJ1mG+Wy62fSShwCH6Pvf//63v/2tb3zzG//0f/lfu24yn3WSh2GQlJKZMeyYu3yoKCXnCmAAuKGjQoluMxvxyWB/5GYl86NUkLMz+PH0O6ro0HD9/fVWVSZKzKooWYBTSikxqJS2Qs45a99N0mKxtBdMmPIwaM4qMkmTxWKx2+0Sd8KYTWcALi4vSDH0/QDMaJ6IGZCsotLvep1ObGbee/eD7//BHz68//B7L//44uX5ZLWcTLo0STkPVimPWEGQrKpZNIsSNqpQIu2HHr0C6Lq02w6TyUxJh14mk5mIbNcDKK/pEsDhagVJQAYxPNmFmybj6MThNRrTSLgjbHE8j9pwf9pPKFUJWvnMUkKXZxTM2U0ZV1FjxeiVRYoqZ5uKKOa5AKjS/eYRldxUCbiMMGTgKpYVG0ZzyT5xBMbXNPKxd7XZdLr389VNtWGeMTdWLKW8sHKlq7ePP65Zeq5eXvnEGztV+jZazdGv7UYoD9XRNzaFIZaN3k1tf1B8ABpvmSvvbdFwf36d5bJHyOXh0SaqsnrJWzoiEEFeVF02MNsrQlZp4mxAEQqOdrf/ZWl/pr51fynxtRtvxJv2H297JRzZStO0xzIUKkqJMPK7C2++6gBgIl1AVqUV2IIqU2t4Zytj+AftjRjzQjHQwYzbtbIwqyiYojqNAjj97n5Q5Ol3/8nRz/99JXV9BukKOQ/FsTFlOeUvikix5ZZ3FzUU4ahCRZM3SJdElE3RNj8ZB1l4gJgCRlekCY8obybCbHqR4Cy0LwJUnJMGTDbiqwpXPqXKCyaIG5vjxBrRpFRcGxGRnvYq8zbzMnXqHNXuCuGLvdppDg2NAmD2J1JgvLFfyJXq1qurRM4C6pl6TP+s80BXcjjXMgxFHGzVa4NNVLnRCaVMtF3KgRKAGGwOdwZwyiim3oWOsuspJImKRtS91KRpqzJ4jRItV8fiUzWfNshj2RRa5R1U7yT4YvlLQ3gMiKHKTqP0NpZajpLWbFYw2CiyGUhMbCImlaI91Gmve4zCE5Kq4l181tqlag9X0zScDawEDMpj1FU4FlRNQEUEKih4nDJX+75hF0XZbJSCgjwh1gIB6wAAMSdOAGUZtJz0mHyU5ze/V4sLs2YLogyV1fA9WFKDt8oX+fBXfrH6Zu5RUQdZWWqqLdt7TGwxuOZqHXvLZo/Q5hhSVWIiHe0+O6cauUbJaYelePChi7mdu9jMAktGy6W35rmovpnAlMCQnEWhKgaWddRpsxiFdFSqC0BB6nl+vfY3xa4j9UyL5vwLC1p3UMbKc8UDNVy0VVQhAKu7G1FLKaIDSmbMIH+NP4WJOHW2oIraxzC5OCVv1ssbe6HsduIdhE2cPKGw0Y3AlJk4gO6q4Xr96NDPA4szyktQEobbtMJ52Q6Cg61xEgNc9ZfmIQc6SWXSir2B0ewMXx7KTnkaFn5FTKA9LWvcyrfUXlfYKeoH9Yb6t5a658b0W7Ky5yQ/el585SbBKziRltPWDkrbhdY6aEXZCb7rnUZVinYdEoXSobdVIAl93xOBOC2W84fv3N7lfr3ekabtZpc601G9JWKFTibTBw+/wkmm3N0+nHYTAJivFqenpyfnP56tVrP5fNf3i+ksKR5/8fjo6OjxkAE8ffrpdrdbzudZ8vnlOivtsvS5P1wdHb/4AsDjLx5vL9ecJpdD3+ctmJPyfLGwzXR2cQHgnfv3AJrP58TzaTfd9Xm93b08PZ8tVmcX6247AJgJr08vFLJVDJKHoZ9MJw8e3j89PSWRo+UKwJ98+clm/XQymS5my44ThHIWIhHtbbVXy9WzJ8//39/9/S+ffZEHmc3SZreepsWkm4Cw2/UAEg/cVM3TqCHBBClV1tSKEVNryw2S32wIBazKsJ0Rj+RMIRjWixuBpORdbggCHMmfTKcyDFnEuY1XoiIRSV2CDmIF+phYLULBDQ2TedIdsubN5vLLx/18NWdgt92p6np9CeDi/Jw5GTO4uDibzabb7W673XXdpGM+OTmx/fDJo08w4SfHx6Q0SR1DU6LtZT9bTmu4qAJKoppIQKzAycnL995/R4XPzi4AEBIzkzKBtv12sVoIod/2s+Wk6xhASrxZb9J04ug8Y5RFfATsaEg69nckSwmiF1cFftgY9FvgcmxGax6PfdimXN2a7V2YrUW9q3lGQ5q0PRTJkIqMue+lps3i21ssbglXLgsBhyisTBUGo73H1Uf6+IvA+Lr2KhRxvy8+K1S63UC1fgTe7kk/WbOleMWDm05cM3vQoMEh8McDy/YZHeNgOlesaWgWweq5xT0GGpdcQzE3jTsIoS7KSAagvW1AZayjx9exoA0Qq6OPF2npo4yf24zjpv1Vaa+CIP9UStC0F/+lBHBv2l+cdk3E5V/KdvpP/sn1n3/3+s9v2k27aTftz6g9++/fAov81V/8c+jJTbtpN+2m3bSbdtNu2k27aTftL3f74I9+z/7/H7ojN+2mjdprK2uDGmtbLUehI0NO4ynQVpy53nzm/iNuFRrdOnb8s7bvbzH+CiCwSCZSTklVNQ9m+OxSJ4NwxIG8Cou0dvbd/+HOL/6CAqoiKhauAB4bxYqLRZi7KMxbbZCmXWNp0ibdxDOmuVekmgsNEYmoqkApJSY3qSmBUuJhkDpud5mpHiHmy8PMFA42BJQytQRY+LUX3QUY3Jrpy4CKI4BVrA5nAK9dIBh7J5L7Gg45N6sdBkBzybCwx5qRDJaYklMahgHhShOzGZPozpFeFocAj2nN1UBORAQWUTR5McXrVGhzDcTcY809DFb20osvD8Nge47U09KxO/1UY+wopIqQUgdk8byWSj5TBb7Xdlav+NTsN/fsac5HMYsKqiPWaKkarzgNd6bWOhr/+sQWH6YYQf0tAoxUYdZdN6hqOAsQQVQSJSKy0DbmJHkYhr701mYSUNGsw5B4EivddLj6MYx8qapPUhbixvHNl90N23XY1SnAMyAWk7dRJfO9Ncdg94uwcGMFzBfMtqP9kIZi1efUGfKIJPNtaIie+V6p96ghRnHc9xwhWpoXng7my2nR2lAWBZ6/BRb54Fd+IVti2ciQaLkSoSQqBGJPOFjdwuwlzExShtnOo++PCP5hOyZ2GtXd9RTmV+gD1+QeOUSgIQ8gD6lOCcwsFnYuGSBRDv+BMnwC0A89Mw8yMBERq2o3mbZbt3gnQVWgEE3Jo8gRP8idQKl6yJlvIUWtMFUJkmEHnVMqQZQi6t6aYjTOXYNTSrS31kZFiRTKXhhGJPwEyXKotq4RRjcQ7jPNBqsUwS8Kt3HVZt9Q13V9v1MVpkRRp0gVKSUVEYDEkn2UqeXifq7hL08+NcQEjdpddrX4dcKcCh1oSF2485XSRe7KESwEXuGb3fHCaI0XoyiHpWY5MW/yWKVxGznZxOvHZOtqa3yq4QxXKJiFEXxzZWo8XTxLXTkZPtDromapOajXrp6/dTwe70w4vVj3mMmkG3Pf9giA1/KFt22KlGg6nfb9IJr7oT86unX68vPt+pKZnY4rESeCJuaswomH7ebp8ZcdD//Jd749nTOAXb85vHObJ9Mfffrpg9u3VwcHz58fzyaTg+UqTSaXux5Aj8lkPhWm1cGUQbssXTe93O5kSrScA8B00s1mKXUdJ+n7Xc7T6YIYwugH7WZzAPPVaj6f93kYdv0uI3HXzeYHt26vd8N2GF5ergEc3T6S3HepSyBOk36Xken4ycnJ+Um/G9YXGwC5F1IatsPZ5pyR5svp5WYN0G7b84QB7C7ODm4vfvobP/3BV9799Mc//vTTT2k7I+WMnkCz+QSA9IO7gButK4eRO5JMpT6YuR6LeA4bbc7DmDcb7bGIAXN1Dye/4qPXbrTG1c9/OOGzoyTFo8vrNIOYIDoMPREl7sypmwr9pA7AcrY8PT+9d+/+y9PjfsgHR4fQNJvONpv1y5cvAcxmM1Xk3HddQqLddpv7nLOABkZ6+vQpgI8PPt5st/PFfLPbrDfrjllEdrttSrNBBMgAdCACMwupSCZmooQnnz+fTrvZdMXU2YzM57Pt8Hw2naUu9UMP0izoe13evw1gMuk2tHFpNgQCIiVmlSL577tJl+lrA2Prl/XkoZ1gat0fdXRTSbrdlDbzB3jCn1GW3TjlROX9Y7c7cCSmbvvVel+2/WhFuxHJG8mSzfeqHi//Khfr5kmN71x0o2FSb99KSER50l5cyHXtT4XGvUVrGUiI1G933xW2Ux0Y45NmujyihapgXRZJW0ZnN5TcAmWTFGneY7G58O8qE2gRf5zhxFv8PZbrybi6P0sVnnTbVaFQpjT4HOuVod60v7LtJ/JnfNWVj771MwW4vEEwb9qfaXs1HAnKeYDHbBIxuRYjpgzD9CIuwSxUg0m1RmEEiQ5d33ArVWVOWdU0Cr9sjKV4+DBAJEH2A+xQVzoFysyqkvMAgJkiuYaLFZz4bfwfX/z6b9z6hZ83QdHEvugHwcsaZBCDHKZUiIROyOL6mAlCKi6f9P3WIQDymBITg0WUIydZjBWkJAoZMhNpdrAtTVgjNw0A1SF1HcFSyCElFsEwDDOeRU4csRJMWY04AAAgAElEQVTCpsAzpSwZnrnOkRorfgtAQZxIREQiJagqW7GdUksudEULMiVNpOxlanUAvIiteGGTwhQNTWBV7fstJxYR8thtVQgD/z97b9ZrW3adh31jzLnW3vt0t7/VUtWJjXopUgKlQRIYCfLqR/sfBCKLCiAmP0Kx4oik/RIgD/kDCZQGcJDEgA0bjgQplm32LLLaW7e/p9vdWmuOkYcxx1xz7XPqVpGWBAk8k8V7ztl7NbMdzTe6nK8tNkkGu1esUIeIJGXMmhiT9NbhQQYCMUWIBqY+dbYUVo0HABGnlEIIKWmghoiGYQumUsO38domqippgE+pK5SOoRiKRQQgJc3qNpN1zwYFsd1vdYRHyK38ME3FhQwrtpLBhYtSJTMHg9UEYrUsXAhmJvXAcFs+0QQiDozcGcfEHfVW9ULz5f3eJYDIgECPax8RUDJAOINQPlEC5KIdRZbOYVTEgUmkSnUHKMTPK1TgeRuzyMU5yj+LTyJq5Y8MHyUOmhA45leIEpspQYg4xJiSlUyRPFgCG2yqRmEka4KumBB4SH2eAhE7fnYsmGKZUpFEBLXqPZIPhk1yJU4angXTPBVq4Brli63MLzEHUagMook5BA79MBiAG5kdrWBRPPz9T69d89J/819t+m0IMTatAn3fA4iRNan1B1bZRkVFYowpbwBYMQSiQJBB0qyd53UjGIV0JI0k5fLcooktsaYgcIADWEzMMajPVRqGrHm6LD6k3hAe4iBIqkPeUsQAhiEpQGBitpkaIGZJUSm7OpQjqAQRg5l9qv2wCDQoAGWSgCAqzAQEaMZGbV0klSchclCI0yWklJhzASgvUaUp9TE2yct2EQIMbSSyDZw8Zp8plHSPwzDY5oSfk5wZGGDO8yMqgMDSGRARk8hQQuQJZLW7VVOSpBKYmJi95jkAECsx5VQYxn0B0SSSVIfYtABsBQNF5OwKlIakamk2yS1hcBSSRIZhSIE5xigpV1G31wWiJElBFILtUkP8WG2TZL2spBowS4Crg4RSA4ScXJYAxsLijUpLynvE7Uh5UjhPb8FuyuoTgokCNudMgQOr8bFk14cduMfsYzTS/GwIK+Vl4PvYqRsRStRb6Vv+npHljwpe1qJBSvVis5kZZ9daja1++NVTAwllUqppzC0Dz5diY5AEGRAjd116/OjZ6fn5ycnZYrZveRPyKZBEpAmSUlqerI77dYw8v76fQP0wANisV6mXELb788XDh4/O33uPCN1mdfByww2HwADu7l/fxuHZ6XGLsADNhPo+Sb/h2cysgC2aNs42qdeUhiEN6/XT5aohPpjPOEk7mwNYbTsQkcphM+tUj8+XTWg1ybBeDV1vxDOdb0PThMAMSDe03OzvHyy7Zdenbd/3MgDo+g5oIs9ns8V2u0kgDrEbhhCjGS3aqE+fPHv05H5o+Qc/eCcNMm/noWEI98OgYmJhkGouTTgMTEySKEMGMQZREU0WC90nDSb0GsfJGA0REwWFqIgQMwcW0SRinWHHycq77I5U5ccgsKNxqsAwGAMxeuu8X5JQkoGaNqzXq3bWxBCTJGJar5c37lwDsFjsr+P56eny9MnJ6196Q0U5xob58dlZExsbUdf3HEOYx36zka5PSXXoZ4uw7VZf+qXPA/gv/pO/9U//xf/79MHTJHJyfE4USUNASJq4z2JwYO6HjkMcNgORbjZ908T5LDz46OHB4X47nwMgWsRAgSmloZkFFchALbMO0m0UAKmkoDxQjEElDWlo2jYob7bbvb29YdgA6LbCFELkGEPfD8wkouBCqagcClsIsTpkhssQwZLb7zS65FeTS9z8li1xgX1d/Gotp1TFZfUpslwHxmYF52I9kawmFIx6VJFGqZFAYLBXCKz1gnyJYuQfBdbyR4+PGWUg00bGJ6G+llx2U885OyK5poiN6lihX1P1rPqCwxggXVG7fGRQKO3YNQBQLVm8y08CUDHX8pb8eOYqCWOBTbMG6ib6sRd58ngk9dV012vtbKxkgEqaS1kWQT7rvyIUiMymSYCyz5UClmAr82wzvrpuq+NQaDK1g0jRsaq9ZJsjEJs4mqC2z0MeJCcQxFNUkltBpFh5r9rPUvu3r5R9FYV91f46tOfAkSAKk/Il+d8JAKPOH0fDonty5TZxNnBwRN1HQws0crEVlc/Zyyd9PdEUAFNxiYjo5Bu7+SI/qZ1845vXvvp2MTvV5kAm8nRDPhotLkvZuFv7lmQ2kWR01zDsCEUGcHDHlRICEwjMsEyLVkRb1Kr2uvENMj4T4mVtRAavqskpmS07EEE0kdcXKh45Y44rNoySLFW5rUL2gJvKVEVx0ly5uyy3c3GF1UkwuCOPmTlLiZAKtrIlcpR19IozLYBdgCeFOHAgSkzMEDCH7EZEo7oIR+JGb8I82rK9xg0y7pMCHX4i83YNxLMg5tuZRJLVCFZYvkXiXE21EshQJtXnq5ZAcydHs3de5amOOh4lk3YuaOo7Z4Jw8RwVASVLtOQSnHv1QoBg+Eh5eJbThTVUM+xLBraeu63a7/BC3EzspuUdIVU5BEkDETHFlAbbhZJSDHGUcd13ovzqS0kAOAQ2/Doly2PlyU3JB5QTwZLL7GZlJi9BMJKu0VtK83XYzcDpuIOfGt+fRUSGOSoSZWcZZiIkGVDyAGaJnAD5LFjk3d9725wPNddg4RijLa7ATEGGvllJ00g5n6KlKFPTWilwE1pT5FCtdV5YKDPvEF0txXDYwf4MoKuIiqTITXFTcy2j3OprXdF9A2yN+Nn+tdkov7vsnhs7UcgJMRUAYoy+9ySEWG1PqAole5ipalz2mYzqpgAIlWOglnTG/kkpRc1ueUhJ3EPzQnMyPv6F0qPiC6/uP5WHSxSYiTDOmqoCHAKllPyECLxEL0Ekw9xsG6moWV6jqexG402wMkeFwuxodGR2O1N+VMoOtwyMIhJChGpSQcrp4VIamuDO4Oq5LV33Ik9yNzK9yUvLPqFpR2r3FtBUT7ysjRS9pt3ABe+WneZyxY7soBeusp+021W73b+/8KIJO9md60IZ6s/H+XHiWz1Nx2pvWmWpq1vTRCUJgSggNBEQZsQmSO/JanNTEJlVcW/vABiger5cHx209oJN1904Olgl7USVODaBhuHsfPnajRtkprtZs1yfzkSHrtu0rSBQ0qHXGcIszAA8uP/g+OlxnLVh1jazWT8Mc6U2xi51sWlsIMMwMGYNh01Kq2HYilAEQlDiEJvIAcB6vWp0vk3dWUqReLPZxrY5PHi5iXMRvXvzLoB79x6kPvR9b6jfkJICxEJBRQYAmtJi0Xbd5uHjjwEhCqIJCSlZzuG8Splqe7FzyfwtmDEbQBJlYtWURJg5sIWgjOwT1cmdrl4xLFu+v1FcKiuvGa8kACkJETgG5Bo7wUQV0VQ5XYuqxtgMQ9+2M2YSRQhxGLbz2Xy5XQPYrJbnJ6enp2fXDueLeZto6GS5WZ8v2lj456yNSWSzWi7mcxwsmGITQ9pubjTXbxweAmg4/Nmf/+ksHgxDOrx27ej6taTYbLfDICGGfrC+yGI2IwK4mTXx6PoRA9tt3/ddSomM9pKE2Lz06gunJ2dDAhh9v4kNb7b98fETALP57Pq1m926F5WmaYYhpWFISRbzRUqJIwNolJiiInXDRpWaEEX70T5LZbYzWy+STKYGoElC8cuapCICaSHj5YGTZYOfYdsxRbLfoWcls3pZ9Iu+nM/Fh0inoqzTUv9zQr7q9yB74e7ItNaXUWl4Tisj260/k78cIfULQnJFjiuX8ykPMPZai7x4nriNC4O/ZOJ8zP4c12ssBTZGTWYciK+PjkitXvLoHcl7Osb8ve0uGn00M6rpgYOjWFGeshu0VL+ZXASiErpSadqapRIq/LXScset7++po3qu2s9uq50Zn3MNfhLg8rM886pdtX/79pxSNmAKbpQrJvuR/hpOMyW0WtUdvfA4ZJFZ86XOfaimtPnqi/df/tTcE8CZRO6kQkWO/8E//OSBX9JOvv6Ng6/8jodaKFzlK9iKKcaZ8Gd1jFlGu6UPVMufZTBF9TJPrdHLvhIdMi9lJiIxCbV4+ADs5TnMkCtJCMQhUMreRqRgilncdtglw6SVBO16S3YFvajqIZeGwIhB+XeVJFDrVESurarLRy6T2Tpnvzp4RJsNOEkquCFTUCWLRSBGkpT1gcx5OWmKTMOQPTEJ8CgGmDMXKVdgqX0j45C8o+VPd8bRS0Ukq9UhVnZDs3SgxFC1x7pJEtXeuOQ5eVnVxbta9HXou8iW5MCtV6tABjHKIhWha3yKL/Ql9Wzqd0kBEYr8Pbre5IhMvzhfwfXQHBHcFZ2qQWY5n4jcaoxxi7sUyBxVi+ea+T5YhDh7p1SzB66W0FTDTcq7VIm4Tq9unc9rIIrA7BOfvRdtYrQIsHC4wV0WzM3KD+KkRXezte4RkZUkJqKUkmmSRBQsZYQCIOagDo4asH7/9//+5fujard/78tWyjRwMKQsBCunY7orO66aIUUOQTW53pRrJhhkyczFK4d8R2Viq9m/PW8BLnOnACBgogTlMVM/2Dy6q/WuyAYxyJIfjP4XgVyAV+iotZHjoXYVwauYo3SE1Mpx5E2jUHOBt8WswvOLPslWrZUUIMleDIrR1TeGSDDXBUG1iUU0sJcv01x3y8pwe+/gXas1E//ACV6e2Kw4GIMMRTMiQEWZ2ehVjv9WmBHDq3sXT0x7A2v2qwYpsjs8c6BgcwDPSADPtGC+f8hMGJh2XRWBYLEMmbFV+n3B02nkZcQhVMNVsrQBBWzzwRYWgHKesvmGRgR8R0Pz59aRkdXkjp+V68elKNrlhVM67r2R3vj9araHT1TYCsffXeiaml4kDRU31PLCKRe47K/Mv6ZkWqdKpo8kkxCAJCUBU0rD3RdeuHnrxoMHj0/PzotpBBCzi9n+YELXb5sYlqv1crUlHQCcLk8ic9f3Qz9cPzxarlfrzZrBs9lClPZmcwBnmxWWHTQ2bRRGaGY3b9768z/6X1/9jX83xBmAD+8/6Dbd9Rs3iKUBB+Wu2zKQNMXYzBczAHuz9uT0lCTdvHW7bZsuJfPOjiH23abfdrYmDTAMg0oiin3fr9fr47OTWVwQ8eOnTwEEblbbVYzzQVLgxh5CiF3XtyFTxc2qu3kzMKGdzdarDkrD0ItSDI35hBpZxugAawfEgMgaPvRvkd0fs7AqI/5osS9ZvtMcSnPRDa1aVVgakWIZNalGRMjqGDrqYEXG7Fs7j8OQAgdQ5ihpSCFwbBpOAHB2dnp8+uzG7esvv3h7b7/ZdEPQ4fYrt/cXe8vVEoAkOdjfiyGo6ou37xyfn0CDQplp6NPe3h6AP/vBt978/Bf7Vf/s2bP5onnj9dfOVsvNdrPcbAOHIZl1CiJJRW4vrt24fuPa0VFKg3TSDf3jp49Pzs4AtLNAREfXDjjgfLkm4sV+m1KaLWKhKMvl2azZEx1YWSSpYj5fpCGpSM7ngyAQsQjxXKIqJ56o59ZFlgnxqFnPDnJTLwyVFEzTg1ooxVSfGVNQOAK0u77VU4zMeAWSOqlPdeX0cNet0K7MWiaEk3ZCsfNPnfy9Q7kuvOFiG+3ak/hy9+crXZ7SpAs09HK5t9Dgath64fsLXbr00yy6FOP0mHDIp+Fyh5aLz9YpD/rEd5WLdqkD3FxUvqjmQ4s6QK67jJJ7dZVLnKMuZgEA7t5o93B+mwkUVPe8RO9VI9eqj1ftqv0k7apMzVX769Ce5x3JzKnSamC61mjK3+FTFSPNF6O2I6FQZJPRtHgn0fRREykBpuRohiD8U73AIzFhEapnPyEWae38m//w8Ctf3vmQmAzq0uxlpFTqjSpyNKIDKRm5AWLOtVd6NHZdVMkiy2tWpjnws0hFBALG+ITAQZKQ5ix+AgWUmWG6KKBgBhMhaYKKxdE7IDCZZx3Vecup5snAmACk1MMCVCuxJmvfGaisVT4wCMG/H2tvZ7GSmX34jk44JJR9cxRsufWElWAODiKe/ouy+O5OUpYiUNXEVhcWmTmXKc8+bsU4WWmaLlyMAtxUR62XgwtiizGLVJEpmAjuIOsOVmIQFcq8lAeOLx3Pzng0xusJJRtmkWKJYHAD4DGbtbCtANVD9N7CcfFRSKlGl3dyeUfVrRKATmXVYGPVkI+4ZdZjqneUlxI3/5Ac25/Dhyb7XEfkhUkVosIVemlwGeW5ABGlYWAOwcGRIQ0qGkKI3JiHYN6eNaHSnHJRrfq7Ty4IpSiq+VKoCuWgSBT/tcmiqJ1Lqy1ugq+qV/G2IPRkgc/MTCF/zkxgGUM56f7v/3f4tHbn994GQeGOP3nDJ83YrBAxiAYZ1DY8sSSxGH+fWpNsSR2+LPPjMV4Bqhl/tHdkawupLzdbClcBUfZHhm0KGn3gbAO5z14KobFNUDZNphVQiExAHTIKapq5J9aoD6YtDo/HRD0dooPi2RJQDjllFoGpTYBGU4Nmh1mfDdu0qqocgqs4uXOiGqy+vG8ldcfLDGoY5svZQc8cpWHGFX8vEXvmUtvWlpWybM7JDjOYz1Mk2pyMxXgFUh9W94VFznkHMSIkKoFiofNaD9eOgWYk3eh8Cba2LBlJDBGw/AMC8+d0GuKGCBj/l7xlRk1zXF73hCUULlYxv4pwZd2Kxm4+p11QtCp88jkX207TsVsT0l+pzFUfSiKwcv0loOnFlsdbYaK7nap/n9qMfBoqfdnliXLjMAwiwiH2Q396dkYRy9Wy73ouVVrV0mhqkuSHhPo0hMBDL8+6NYDVZj1v45D6a3uH3/nBD/cPDxBCjA1CTCGsVQGcHJ8++eDjL/zqL3/+rc/P5s27P/7x0yGdnJ7cPFnaQVyeb/b39ppmniQF5r4fQEGA2Wy+mM3253MAN48OX7x585333nv25MmLd+9qiOttRwIGNqv10CcAs/0FINevXRuGvl93s3ZmyHjX9X3fbzYdgJSkiXPDFkXEfLpDiNznWZo1bWolcrO/t3d2upw1QQUEFdE2hl77shZOTl3yAYlKksFzOASDcUOIgGpSUHH+H5eKMqaZE+KAJvtQ6w3u66lObKZ7wiBjRxrMplCosl8Um9gPvSZpmlYBUFpv1sMqAeg3qze/9Pov/+ovNzScrE66k/WNu4vHy3tvvPTi/KUXAMSmWcznXd8zoW1nqqvNepjNZ4BibzafLwB84Y03vvjGF/74T/5svpi9++57Hxy+f/369UePH6WEvf1Do1NNbAkBLMTNfL4vQoqwtz9v+u74LMYYACzm8+12u79/CEgSHtJgPL1tZqv12nav6kCsMsiQOuYw9EMaetvn/dABYIoxxvm8uXP77uMnj7abgZgku9fXeJmFQWiVUqkwf9cNQKiJpqJYsIDMg3yFsnui083dE0sZsC7yc8mN6+JX5lhGaPPCTzJKVqe7+rNilPkpmhHuivRUZLXcW0J/s5xbP3SH6kw6gWqDjkrG+NjpyMmvHjnsZWRwaoOvbVwX3nsZvf4MhLX0aDxPJhzmBfxETlA4UOE8VH/nBHPSlYob+QrudPEC9lqpsAAoO+T4+MepnIxkpPXGSLnsifJUz9jv4m1Gxu2CXG7efQjMpjtC51ftZ7b9Jfk8fvaLa2Tzyqfyqv1E7Xm5IwtRzQy7YnxTipe1rEKDiWqya6kJM4Vn4gwBFGeqTMbH91YivYsEprT5n6NkpznBH4qTgCqAs2/+NFiktbNv/oOjr37FfcBMCdfCGLLPz8QTw16d/6Eqg804BzJOJxVkZOKQNvI9UUVSIgQKIBgUUn0PQ/OsJIMb1QMAS16ini9MNOVUl1QlxoJHUKdUmPlEMKpq4yAL3ci6fbUc5CnYzHynOhBxkaftBpsT91GyN1cqOpBEwBkMMsBXBEwKUiayjHgx5JSCzKQqIZqaLKqW0a9Mn+WmFFRSWpEmvBYFChjgQmQguihh2HWquQiG/WlwhWbnrMoL0aRVSwlaln0COLgPVa2RTC7Icsa4r3Z0lzKOMQVRuZlGQFXrLXKhsftoWN7TUYIaD1A+QdYVzYi27Vz2pKzqz/fDW3cmy3k+S1Rg7+wXJiopWamcXIYihkaq1FrIbkKk5XGUQaKJEG9dsuW2TOHZPc2W1cao6gtnCJ0CYBfpKjzElEJzR5tYCPxspDQAZKWZAIiIxUEnTZrz50JEmJg55AlwsA7Q+7//319cjp1292tvKyGEqCJJlJgDM7TyY7PTLUlEiMmOm8jAIVegUtJAgYPVIxqSSMi7hibklQjQMZ1ZVtWIPWsTh8AhKqwCFbuXHjxd4PiY4gJiuRoI2XsR+fhb2jUQZROOuQBQ2TQ7mkxRNKqXhhAUmlJSK15UNppCVTQRACFR1RBiWWt3nMxZ/wYDHWwqycFXsBW1yNVjhHJApapoYs6wu50IKXvG4IWsQxaqOkEtnMaNWqvhRJkS+nGzT2yfcs40UHiCqiLGCAWsLgpBPLZ9YvaQkbRxjpwcjz/pqJCRpdcArHgGFZ0LIGZ24xMTPD0GG85erRKh4nSTSHs/SmXP2ytcQbJTObrhUpmIjHRe0EsrjGH3G6fdu7dMWk1BS6cqGSVDyeMbXDV1um42ESMh6i++rD+TLl+QjaovJ72bXjj6QTk5n/QF0FmzWG9XXd8xBUk69Knvh6FPbdvUlYOsvwBUJcYgAmLedl0IAmA+mwVGSsN6u316fMxNM9vf6/r+bLXa2xxYvozlybNbL78waxuFfuu73/3Sa2/d/9Z3n4jc+vD+k9MzAImZXnrxgDk0jUiaHxwRY+g3N6/fePr00aP7HwN4cnT4d/72377zwgv/1z/6P/faNjbtwXzezvdW665L6fT0DEBswquvfI4CPXh4v+87Ym5iXK5Xe83ear1+78cfArh5844ndEVgJkbX9cQUm4KVp9Pjk5u3jtYbUtWUElTbdi4YkOvRIaVkJ64ILWrHhoGRpGm2i5DCbKgjguUxNwCyNYKKgTJTg4I7aBFCC89WVS3mNEmiqiEyMVQElrmbTTrK94lbx7quA0kSpaEnFqWhHzZJBwBf/MKbt29f29tvlmfns6aZte3dG7e+9Z0/fecH/+bGtWsANt227/vU9yx6vlzdODjoBz04OFDSgWhvfw/Af/rb/943/of/8Tvfe+dLX/zSiy+/oJKuHR3duHFjte7f++ADpQBgeX7etjGldH5+ttmumSgE3mxWe4u9vcVB284ANE376PGj+w8ezGbtjTs3D/evaaK+G9ab7dD3tpFv3ry13aZnz562cR44EPMw9E3TQNLh4ZFN2nJ5Jptu72ART+NquQqhUYXBf7XBwwQxLqTykuNWqga6ykBKbknKZjPKthkmhpKLqZc8bBSwxl2QL+WK2cHzF+0+x+1PxUysbhZCGdiue+0UjdSJD559RU5kqzb9w9lJrWFMdqb9Oo0Pyv2YkDy6ROebtMLN/aRQTWr9kuqlyNT/ue3iO+ubx/goY8fP6V7hNjSyQ4y9nVwGuLKzQ6BddfL1qvBxTw7pjGSkxS5sXjIUAgBRtXxD7g3i3hx5czpnLBKPT7ULxpMzMDoSXLWfpfYXgvr9BTpIXqGQV+2na58IRxIBKnBRN/u01TxePaYX+S+MaYNZqXJMKFyTcoCwPdNlb3KwJot0prVOiu1iVyhQKKOYjyYGsrNvfHrtmue3069/89pX37a30DhyR1emUkMdNkIwQxfBk7vV05khFCKSPKEDZPSyJAJbFYXM03PkjpaqDmQhqDa1gVgpx9BNJgpFgeJL1TbKcAmpKpNpseoqtpa+ZpzCklq6NwHGZSEiEhFzFDRvAUdvBECOalLkwtw0Jrssfgqcg4eLE4prHsQAUa7owwa+EpOqgJLYpMO/zQUTWQvTth0IgcolmbwqSeM5LjZkyfXYJQktMwOVpKbU+9Jazy+V2MrOqAVBGiUKQsYHaseuvIgZtXdNB4pp7YQqb5C/XKeqepXnJndeilvcKL3kkDFUM1VLpR7GnN8hOTWfxb3qZGCXyNH5CNmHE/jYC9FWDghZWM/xqApwrjmTUg6hdTQJMgyFtHgoFZWOUxERDWYfIQZPJmCAqR2fLNZlHKteu6wfGi5D4+71hINUIgE9Ci/lufBpf/gHX8entbtf+yrgpgFbTw42ojJfHEKSpIoQYi4drqocmdUqWYlIsijsouiMRcmnIyJPg+Wx/w6yUb4tw4hkUc8ZuhJRKqm8xtkxOkBq8KiUTWUeib5qRu9z1fscmF+dKRPG83ONa2TFPodNmnefq3Xk1g5/V844iezs4vuthNhr2RA6Idfk6J5msqjMuVxFUQKqcwYQLI6+Jqt+kNSX3iip2GlRIHBQaM58kqt85+PAJWp+jO2yR7KdMQKoqg/jqUth3rLOiQsgmlXOCgdUPyEEJVERmPnKqQKRWkoK0SSWhoKhOvQ9JrGNI90kVzddx7QX+1+k2WhXHcLcGbLeWnyAerhaxaN2NusnaMGfqMpW5MtuzVaIvK9R/iiXjfSofq/PagE/dh5eWm1IgR98n43LO5xtY9VHVA1ayz/1vwqFts1MtB8gxBRjbJpmebZqm2gom69QZl0KIeZZ26S0Wm/We4sIgBld18dIL7784n90sP/sbLnebluilsP5yUm3XgHY3zugxXyzXHdpeP87392sNi/cefHW7ZeenD4eJAL4jd/8rQ3SB/c+evGFuwf7e8vzTYzx6NqNB4+f3Do4PHv2DMCjh49Ozs5+/otfPHn48DvvvLMXw0u3XzjbdttN+rnXX7Naefcf3O+Hbn9+cLC/v1muRWRIaegHbXQxX7zw4ksAVMO2WzdxNgxDDBHgpmkCh6HvmxgBzGetJu223VE4uHv7he98+9uLxcISgPR9Z4augDA6YOcZt9NAIeTckapKzCKiKTFzFdVbCbwwDyVL1kG+ZwprrhZsKrra7huxAspQBTEkWSUbJ5N1FCZzw9z1Xds2w9ANXffyK3f7tGa7Az0AACAASURBVIZuAcz349H+bBaiUPNsfTJv2yHq4f7+2XL5+TffAkBNUOD2zZv7bUvAsu904GuHB8OwXcwXH977EMAf/e9/1Ab+pV/84nK1Ptg/ePzsybXD/dt3bm+6YbHfPnl6CkBSahrLYtn0Qx9DeOHuC6+8+Nr33/neo8cPN5stABHd3z+Yz/aG1D1+/OTB/cf9dmjaxayZdX0HIARqGgZCbKIF9DBRCGHoO2Y+Oz0DcHR4fTZbKPU/evcHQyfELNlwYvOYp9gK49EED/qszSm/FfSrBCYt9oGdx1acfDQMjlujUBMn+6Ov8iVvn947/aA+8nkLVVbaKpKGMveejmj6mOlLaipH42WVfFQnMS/pm5wRTUa788Gk16ivJJowSWelVbtogrr8YT4bavJ/4UHeXRCIjKFM2MX0QeSYXxEuaEwOi10BHZevYM6u4GYL96od7yxEWMXE2vJA3VkD8gfC6f/YefJF8z0JAALlip5UsnTlAsKg0p2r9rPePhVe/ItykPzsCSuv2lV7TnseHJkkFYteVg0zl0GGsyqOSq7fZgZeaKRO0q642z8BAmKX51WFnSyb0jXqOmP8wki7nZFUEr1pQJ8Fi7z29lc+tcTNyde/ce1331ZVNf++7C1JObC08o80/CTby4vIqSSoIB67uXJ/KaKQ2bs4D4rgdtPiDFrswFAvNaNArkubIYLs4KMFE1GAmEMGlLOVLrMy07OtMCGUmayrBvtZqm8gyyWjCfdSwYNAOWMVAikDxJomeXMUSVNg1oxUGppERY8rHkMpJVIliiJioGQuvAvrJSvE4iJFsnsdyHJlgimIWOXUYpEkaI07jrLPaG7EJ6q7iuwYmdNLjSxfFEo8wmeVVn/Jc8aJIofeCxTiFfFyZ3UUL2CvI6IMUhpSTCKCHIM8WQFy1egSLb3qV0GBXdIZj7V6rlE7c44QZMerUQTzJ7hqBuj0pXlnevCTXSVIbECMAhRCsHIcFh8nuXRAsOckTbnso4+okuL8CFqBdpEQ6syno3BJVk1VBQorFm/9DjQmx8tiaZHulTQbEqaAJCkprJivZodkK8waAHAI8OrngaOqptQTIcRo6/bgv/302jV3vva7RcIchp452B4WyVUabcoDMxFxCKPNfDQhjFpTLl5syi4c+nfHOi1bggxAJY9c0/EJyXID5JyMAIgp17MuU2PoWgaPgUKZixhPXH0GOA2fnjg73Hks5TLJ7t7F/1qZWa1AM1F2HCBLDprpbYavVVA8JVTLy9zpIO9zK5SqpAw22mJdGVKvkBgicxxSl49s1V0iZPfZnJFAXT8meGVtct1RzZ2ViMrcenC6uhmPOYAoU5VxRmz1uLjB5lFydi60i5JIcFiTmElFRJBTphadRMeHqgPzFQrslEl9nevkpO5LlI+jV6qy35GR8/E8KbLGV0cBGpkpQK2NFlo4yi7JuoSKjvfW+wQX1NtagbSREpWgyWIlGW1f1cQAlxFPRcnAfKFD1TWT3e9fX7irkpV8Y+6yjGLe9QtqBxyFEAOqMvQizbbfhhAUktJgpa7yvsiOrbBDnFLiwMzJqmaHGLo0BA6nm/Wbb7z5/scff/eH71y/eTMG3m7X280GwPUbtxaLNgLbs+P7Dz8+7rv/7Iu/9Hf/zt/93/7n/+XFl18H8Gu/+ZsPnz568vTZjMPm/Pzx/Ye3X74t7ezZyent6zf6QQEcHh3++be+9eprn3v9zbd+dO/etu+7bvPk2TMGD5vV2ckZgJOzk9A2ypjPFwcHh8dPT/fni5fuvth1Xd/3q80WgArP2paImGMaUiRWoeVm1YSw3WwBdN26bdr79+5fv3tN0ma1WrZNo01jDsWG1nMmg1AIAW70pEELeuRyCROJBqLkG4xqmGCELij/4ybDcQl9gfNvBCgClRKAhmgoJIEoEFOMKtn85nCWvVlFEWJjZIsYgfjw6IB5nqQDsLc326yXDx49uPfRR7/4i78wDN1yfR4D7e3NX33xDoBmNnv49HG/Xj47P1k07XKQZyfLp6czS0ZMFAC89vqbL770xj/+5//sfLk6PLi+WW5+8KMfLfb2Ts/Pnz57enx8DmAxXwzD0Pdd18WT05PZbDZr5x989M/TMJQ6M2a+unbt6O7tO81ee3p69vDR47PTc0nSNOYWysfHp4v9/Zs3bg59GjrpUgcLf2niIgQA3bBp2njz1vXYMFOzXQ0ffPRB5JnJOCO94GyPEXFCUxOGshLuQgGAzDmSaotXThWlClIpCTO0yDNuu8hPM32Es/4ykhpj81lQGQnN7o64lLAV2bTeYOQgntOr8qJ8beH2tcQzPnEiGpKzwQlzwWVyr1sisxxYBlGs1uUVJWmWd3ocazVq3b3xMjn709oopZfRjUKE2WDyPNC4YLVYXndNSwfduqQ+SSb2mRI2Tn5N2Yv07vJmDheseYTvjZrhUPGunnbIJZc8iAkRcScAAUKWIMa9mxW0zCIK3llxn4ml9Kr9rLafAv77VAfJnxqRvMIir9pnac+BIwlIJrWIFqYzCnBA5g8jBlfuhbtk4BI2ZBq/FogGyEpfLVzUOgJVfKN6CApX9VedfoY62tfe/rICR29/+fTTgMuTP/wGgKO3v2I1Zi3jv3n4oHjqQC8ZoUJhYenGV+C6MlgFQin7ENGox1hVUwVrKGBH8RKzoVuUK4FSEgAhBqBoxRa0IEkTsr8PSYI7I+UKtz6tUAgTQSVjqEREYCbZGZQNVFySNh8fF9+dR9s15BnQfGkIxKxKfdpUa6XO6jU/UNSUWNGSrdmwuQygWFB5bEMaBiUx1SL3uFIWU0pMATCnzzEO3tRyLg6kGR1wbWF37XwXKjJWkYdSZECLrPIqJeNYRsEAPtpJM4+uqvSTj7bonQXp854UsNI6PJrxyZeSpm8w+M/Dzwre4N9r8TVw0TO/SbWkMdVxMCruR6aqVRmAPFA/BsVlwPW30r+sIVduYapNbK1UCFEQSSBYYd/Rpy8vfYaiBApLtMpchdplUXJcBF83jwImkKXAA8MoWKVEOFlyRz82bwMREY+aqcww5R6bOQZJ8SZQ1ZSGjOCIaPatI4BE0qM/+HRydOf3ftcWxMNmKcZGVYjCoD1AjFzUpRu2MTYEEkkiGkKubx5CNkIQx7ItFSBmcluPjigMZ19yQ/yJc0XmXOGR4G6eto5JksE6IqKSiHM8kbkNs00UUd/3Nonl2Jb54awBZlvWuPF9j04Uu7zXrbK2kwjOCB4HtpHkbaJ5Yxk2TZpEEhEbmbYdk8adaLixKOxos0KJOUnvp5hUlaxKmAPBBhBUQL8fPZUcaW34wnhXHpVzpVExGe0rnoeUiYk4iXhNdhptEspMgQhMgZjM2sSAgFIaLN2lpBQ4mCejhWmLJDsjdXNyBgOUKVfQyXXVxfiHV+sOHHNKB1XO3kkoFg87zvanIb/ja4xFu8anNfWqOHoG9LPPrls9pq1SwUHTVLn+rFE9fV4jqjuQqVYhFK6l62W6eVlHG1QNC37Cu6qH2O+ik6+BWtUlnQ68wgt2vqkVfNsnIOzv7R3s7wXqhtT3Q+XKJwqw7StARYQJw9DfuH39cL8FIGlz6+jaydlJv1n/y3/954PS9WtHvSSOhMBx1gJYr5d7B+3xavWP/qf/47Vf/vzy2fmPfvRj4fm1Wzd6SQC+985379y6+XMvvdJ169Pl+vDmjTbGx48fL/YP3/3o3nY7AIib7XJ5/s4Pvjdr968dHj5++igyWugw9O+8+/7q2RmAO2+8cXp2CsL1a9c3m812u7kebsQYU0oJ0jQNAEkcY1yvN0QcqFHoYn+mJCRiWQs5YLG/eHR8/OjJ44cfP27CLA1DbGLXWYUoBpAGMWmEUXMZVVVJKRO0wGrZNmJl8QVlwarsI8WQJAMTNexg+2uMpdFxpxJEJXDOnGHB2hSYLCW0IknKBzwE4lwhTYkCcdd1TRvXm/Xhwf7+0XyzXc4X8XC2D6DfrrdpfXT98Ld+6T//6OmjH/3oo+uHR/NmNijeu/8AwK07d+YH1xazWbfdrterqMPn7t7ZbLfMNCjZi+59/PC7P3x3uTxr2/mzZ8epRxrw0b2HD588Pl+u1ss1gNX5eQg8m83Ozk6HNMzm7Yf3Plgul3fu3J017d5iDwAxnS9PlSQ2Yf2oZ6L1esXMKfVNbAAk0WsH1xBJleaz/WdPnoEQYiBxmQNIQxLtnz7tvviFL/3gnR/2m7SY7w192jkoxrhAKpqK9rEjCV1yqu3olXp0pBhTMBGILxxtMjHCk8AYDdylVg5oX0ApJ6mtLpzq6SdaBK3PgCNljSJLO1NlbOQ545vJafPO2CapNtyE58P2cAkj6rtx17XdZyK07ljEdTfI/fn02s9Y9apyzyViehFfC5an45UjK4KJ0pk9UTmw6jJ5/VhXZV1gnVBmco5VS7zZqlUusiYory48ZpxfV0JARc0pll4T5BigJNsL+y1LaK69aHmD9TNl/eJT5vmqWfsriyn+SwLjfqLHftLFl37+nCf/dF9dtav2WdonwpEiQhx0THeVPYeguT4EAe67YSRZRASaAnFyf4iRgzsxttzhVqsBKWuagSiNaa8KQul1hYmYOKWiMrGSeQVmscMiBM++8en5Ig9/578Uz/B3+JUvn33z010pT7/xzWu/+3aJ6LFkXTUj91yE3nmXXIRp5KXOtU1JCVnJGRmozRYUaDQlUzrMvwMlgjMQi1WWtKBt7U2LDByzjVphEdGwosMk6gVfMht1RYiJJZJFcwss7C5HaKLw7SzGsNXrJLBAiYKnRGMFKJAmCFITWri+lEUrkYSeiBjBMzQxARkuMy+QHOdOKsjgivYgSjqiWgYkppQs/09KGkIEa9LBCoYAUGiIQSQR2OZdSbJBXK3+5hBCIA4qmkQUyhQCB6jkWfbltCkigoVGEDhbg/MmV6j2Q89sSFA0adpc8OiCGFD0WDb/Osf0yOFS0WSIl2EvgUOu70HqkoqrqGNAV1aSClig7gLHVew0Rn04iy1W63mslUyeKzB7nmrZJ5pFpMozrPY8AqCwDFYFUTA6kN3QZCg5LtnPLwBiEoirbLmKsAE6IimLY8RMsThFqoLY0BIqIiKgSkltUxARe1EUO1uAiipr4JBEDCTi7E0MomBVO1REFMxWaYmSJA75QyrCIxHMG0ODmsXAkxjmIjEk5lEmIslQITMqcPtZsMi7X3vbklGqajJ0SXjQAczEQiGIQFKO+o2hSUOCgpljDCZYE7FVm/Xe5s7b8U0igZlCKJk3OSvSBBWoJhlAJEk41+PKjxERUYEQMxOHJEOSFGNgCtlBG6RiVXREFSHk4MURiYZS3s80yGAQWEoJmogzcmf5HKHKIahISskSX9qTrD+WvpAMwlRSTUXZpFLly0HPEGJGTlUVPPhxa0JjGy0wGTFOmszllykUx2cblySlzFM0g5uOPrs3qBAROPiM5zxPVs4npUTExKyiNn1JJAf7Z82aqCTalMFgOaKAMSUCRCSloQ1RNWkyu50kFWI0bbQ5CZFVk1n2RDRww3BPeVUHWdxeRRqb1vgXdNwzxqGVLasWCdyLDNCkiEZdXaXN2RUAEsYU95zqq7GJkhJlkD8ZEsNMIANiLaco1J1yeXRats2Tm2jtnjrhloBnOLmsDynXXwKgYLeGlI05UuaCGZV7x3Og4nbSOnR3SuRDdsPKnBJGfFTg1BVFKsgmWNsvTLBNaISQBJokMdgqsLsrWFGeRYA+KZgVw+PjR9t0beiHCC41miTxfB77LjHHQdYhNBQY1Dehbdu2bRsATDwM3f7e4trh9R+/+69eunuHiZOkJsz6gfaOrgNYtE1o9ppG53MmxFU/vPfjd37x1399O9Bmewrg5w9ef3D/wYPHD26/cEdPuW3jetP36+2ibTbn3XYFAPuHzfe+//2Z4j/893/77q0bZ+enXTdAsDxfv3D35aM3DwE8Oj1bnT9birQxNLNmsb8A0vv33m3C/jD0HlHSbLZbUQ0IRl/7Tbdo5oOs+y4BONibJz4ZhvT4vfurtHnri28tz9ZDPwSO3ZAoO027KcsBKZtc1tFOacKPVlliichEI2LHlEw05QQSscpnZa/Ajq2fqHx9XvqkxWkdTBSYkwyDSOSQiGaLxXwvPH50HAIN/QCgjU3ksO22AQziQKETefX23bs39+ZN2pwdAzjv0Owdzpr2/fv318sliBNLH4KobrdbAE+fPJkHPlFd930kDILh+IRD3JvNz8/PewDA/fuP0yAhzLYbcBu1JY0htu18Nt9uNn3b2Oj6ru+3Q9M0szhfHp/v7e2FGE5OT+/e3st5n3XY25sfP32qg1LAfLZ3MN9frVfrvpvtzwEM6/7d999/7fXPff6tN//Vv/yhSCKmbdfHhlMiQg+gbUkSr86W3//+t4+fnu4tDkJslFVZjRL7oiENwzD0IcZyfnVM+XLhiJYcQsju6QRA2cVqg4SEQxghpPHcaSUeGtCcRZn8UUlmVclH5o0wcZOk+gKd/lQgx2WPQdiZsVRl33PeHpOm3P1+TLKTd2Bl6puCUjpeY39VnHqcMXLhsHIxGXWY6aRmHK7IxjUcSD4T4p4B/rxRRq681LOti+oZKy8qcHMBVslnxAYiefqzklrOXvWMLEPXTgS5CxZEkjNKmfgHUsDKA5JKJh1soSYQWwaGu16bSpBzVrOKJAhRMAGMggm32cAPjL/kzjK7pbRmXwJVpgYA+Usxag85g1RVFdMfJmk6eX/j218ewnWV3/CqXbW/bu0T4Ugj37vmmdJo5B31JWSubjplUSNTHjli+Vynzyja5vgEJaXR23Knj8aZPwsWefTl39Gp1vQZEcmTP/zGta9+ZefFubc6DrBSaBQKFam8jbLYkvHMUQmiS545CgFFrlXAlBTJz1YCSc5TOfXxq927OFhB8LE2a8F3xbPz1dM6FbCd02anGA+aV8Wkt0TFnJj/zryYchxDKlOzu4Du6VRlIQOAEAoMB48NMSGMQMlBuaCiKb9XVJRNCHCfvRodNKcnkywduqILomstn4F4VCfKd6pQUMjxv8n9fEylLM5Eu1taFSLJxA5XbLJiaqk/RT2FgXI2kWfXMFJICRcz4TiVxFW5o5V0RoSSmKwSN6ezW8mELntOFqbaE+7aNjWC21dFS6vG6QMvwuwosX3SizC+oVAMS+NQEGm/J+PpJStEOUHjI9T90Uzv4JzYVFWtQoxG12HYfS1zekEVyzbgYnPui71kkMGeQrkCSXIJFFBSDoEjJFkEOoE+W+2ar4rBQ4BHoGfVQFUsvZpWKko2QsD7YAkWSHdoyHTvZYl/hI6yvlF8I2qVqErGaJfmbSbqeTYVmvFBwLzqbC8ZqOemCAIsO6CGEEV7phxlHDjaXjDPJkkpayJWrdZpXU1faLI9suEk588gGF01j9SUcp3csrKAGQym5JEsE6M5FYayJ7Nvdd60huJVyELpRdWribroGqkhuRwCco5ZQO0g+5tqImtDoHy0y6FlIgqh5Ce1tFnucXPZIRrXzaMXJicQAGUSNCqBKL8YRSgms0w8HOKnnV1Ck+AznVxDBIgKSRJNlGumu5+7GGJg1NxTkdgzaApuTtrzNKxRJ3v+dda1XTehStEdV3OXpk2R1nrN7aXFPY7gvCCrh4WgjbM9vllrMyYB7mGV3ckBVFVfAUg/sBAREvDxex9211fL85WADm4dLhY3AKyWS1E0bei3a0VSZem72FCM3MTYtub5eLrZrJsmxhhf/9znmhDuPbhPTPv7ezGEvcUcwIxkcbh48u6PHj48fu1L+zEet/O4WZ39whc+/+DBAwAi0m+TCk6OT/cWi+s3bj968HjWzJ4+fBICz/YDgCa258f8g/feff0Ln98/PPrcK6+er9dxNqO45SY+PH4G4MmT026zAeh8uQohxMhEeni4P3R84/r18/M1gO06iRATB7YEtbHvho10szmtVmcAXnv9hd/8jV/7Z3/8T5ZPT/+dX/v1Vz73+ofvfvjD7/2oiXPm0M4aANuu9+1hgR6ewTubST9JBkP2r61MLABB2MwPOsGY4JvgEok5xqCKkgtQHJ6mwG2DPm3eePmt9bpbLVfz2RwAhLabLUcSSUlUFd22u/fhh4/u94uW7t46BHBwdEABRDSftZrk6ODgcP8Aql3fD0kALGbtZrWKMZrIOW/n28RM3DTtYr6wPKEcYoi6WARoosjhjPu+O1+dd902xOzOycTNrBXRJBI5Nu1sSCmEGEMYhv7k5BjA0eFhCI2qdv2WEvV9InASmc3macjlsA72jx48fKSqx6fHTWhD4NAIoH1aN7ni0BBCe7R/9MrLL96+2d279/HQ9xybTKyyvKrGhd0qNq7aRRFlurYVtnbxmtEtffoNoRxOJw1a8clL1vrCu0uunSLFTftdCVwFeRsFs+phVG+uCuy8dMv9lTY7LJk7Tc7Qc2l3NTHTzyd1fSYSehFQyqCpfjsVFv4p785Prkizv8AE8YwSErz8W+5YNrhqLlSocCDQ72fK1UXLbiXNyamnylYxOF3WL/uxk5cY1e6QyWdZTtOc5+fSg3DVrtpVu2p/vdtz1IC/Me2z1NE+evvLl35++JXLP99pJ1//dF+nq3bVrtrPeHv0977x8O99hto1//VX/wo6c9Wu2lW7alftql21q3bVrtpVu2pX7ar99WyfnDuydrTJ5pr8f1KvkEC1aS77xbkb/4XH2SM80ABAXe+GivHQHXjc0AUt6aVrpyv34Pks+SKPvvpluG/WRXvU0Ve+DOD009wkT77+zaPf/QopJk4LE+P55OFjXhsOpcKMStIcy4Uc516827ITInIYN5VgIBXKBSKtOkK2wecUb1z7OlkgL1nLVj5R5VKKuvJzyb+Zy0xeNSrm1nKh+vOVEP0TC0xCKWDkjlGjXZMAVStOnerX5Z++nqMzyfixmsNg2SW5qIzFCwYeXb40ByyrJmJYgkzKVtkdfy/KUYijB5KqptpdaXSDsltzVIU/p/jijenwskcbjRur9usa3aZQKvb4x6NHW3aSFGWyep7iHpeUO1E8digbyHOBHbgvz+hHV34Wx0JYIn+q5oI8rXdVn7HeuqNDKKFUQLYxFjdHRQ5pHnNHjtkcILmWR669W82pkJ9zT0tU5snf7yMnKkPI9dc9gIgBJQ4QqLgn125Ty1Dp9m9CWfipa4NXe4c7VpYHlGt8GkOAgoncD7jMUhRJFDiEeP/3//7Frlxsd772ttUnUclmdA5lawVP15f/LWRAVHz9C6G09KyjFX10sHTHCSu6XPwjveTL6JJQDi186ck9FDNNEEuR4YZ39670/Wcd17FXnClJ2cNWvEhUmYkUAvHaMnmvFp+XvAwXllRz79yriKi4UuZFhAYKFJqSLJeYlUvGXD/+UKiObia5iosAlrAJoqKiISBy43PkNQx8opJFdLljgrsmoI7YEqvPS7DIcdZYYg3GLFIEVWKuvGKdMuQP67wgmp2P9YLTRN3Ic+27Z4551SgBybz13eG3mljbOKj88gA7Z8V3JFNDm0LVPFsjXS/Ezj0CdcyTS1Ti9WrHVzuTo3vgT94K64LzrEvcQmj0YSuUqv6ydKZ67mRoVJ2WSzywisOsu02NV+QtSqO//oQr6HgR8kFS1ex9qpk8O1+mEGap7+KMk8TUb588uHd0eH1286gf+qHrATQxElETW4BSH7quD5EZBAEzW7oAcxPfm7WnZ6cPnzw+3NsHYbVan56dzefzt37uNQAPTp8ujx/9+f/3p0c3rv/8L/zCg/sf7bX7y+Nuvpgf7h8A6Pu03fZ9l7hJTdOsVsvVetvOZtQ2g6QXXn4RQLfp9m7c+JVf/vl2tjg5P+Om5a7fDimp3rx1+963vwNgNp8TlCkMQxqSbLptbMKbt15nbZ4en0gCgADdaJ+GxByYSAaZzxaghNDHNgCIDd5445XY/vY//r//nxvXD1ar05PT48AhhBiadrveAKDsSZdpgIkIxERpuuyTHUTZEXtKishodV63sU5SYa35zFViFrk8lA+uZmnGfNxJZ5qGDz74sOu6JjZFdhKkwMHKpDEYivOTs9SdH+41X3zrVQD7hwdPT56AeO9ob7vdMIflctU0cbPZBiIAXT90fd+27cH+/n7bEkcOvNp0zOGl6zdW774LoBvSatMT4tnZcu/wIFBs4/zsbHV+vh6GfrZYwMMgmLjvtiLCMYokKC0We0mG1XIF4NHjR9ev3aAYlHQYUmRSwmaz4eLFqJQktc08DTybzWJoVbVPHSWOsem2HYD9g8Vms3nl1Vd/9Vd+/Y//5E+GoW/irEgmOVLV8vsQhxCkktBQxAlMVqyQQV/j6Z/1deNjyp3Vp+pucWNa8mq30O5DK66aBUffHmNeQaou3vm1UAkXSotsTf76TElp7Ezu0fi05zhvXjIFP8F1ZQpo97rnP3e3P6o1ec6jIriGNb28vM78ES90ZmdWbcJGrl0/zHfFTuRD/rL6UIHMxyy2SVVdcslbgSphvmh8lfhkmexL7u6s8ihc8/Od4/Fh5b0uaecQ9PI/YMwRWVha2YefoOJetat21a7aX/v2iXAksiYDYIfLUIUl7PJhJWIL93Jiiko78gfUD3UYYNR8Rv5hXzgwNLnPZPbzz4BFHr79ZeRKEXSZqps7/lmK25z+4TctaltH9oc6PKHmBTSiCqPOVcCykugka++ORWZFhUAkyLHgWvhf6cmI3mZox67zsq9ZmsrytEUWlYI6Couy5KJeArBweF8oV+M95qzWGTMEkKEBpQw65drZCk8EZmKFjzl3c5yMqmW1rUTSUxIpG4vywpk6LiC2MTExMZmIbwUfhmHwQPUspxLIc9GXbWYZ6wxXEUd2d7qkRaUYRYJq5itYpCSkz+WV6/vh96OgLfUFjuDkfEaU8/q51iLEnGEaOMqQ57IOFxyX33OY5W1VwjCRRTMt8pOF+9bKsQt7tpXFdfEwxwAAIABJREFUT3YRqMv0jRuR8i6s4j5dTHTc0DdoNbsTdWDU5grGVnaCqfAK0DAM1ptSH9nuFEk0DnpyrskREFUt8KHNiTguT7levYx3lIWryoUXCgbk3Hk+EALABAarqoo++INPD9AGcOf3vuqjGOliidXNq2IZKD2n31TmzDrIpLyljt/VtIlAo2TvCzhSqwqPRiFKtV7Go6CfCxf5PFun1EmTiDRNAyGRRHk5GCpJejjFUFVLayhQDIUWWDEDKQdEVANR2SdVTKvvLuRNV06B3a4hT5WKiiKwikJTAhBj49Sw2gp2o+STm41Dovnlo9rh28Dv1clfeXJRCqyHmBRJBgbHEBMNqSSkGsu8kPdjQtULNpo3PHM+ijpqwyNx2VE88mfVTijTVrh4gcX87PvGC75f6p2gNJ64nKajnNHMkS8oc7C4Rk9lQE6Oxm4qBGo1hXJW4ueozZ+l+QxNPpzMjE8dTb+n+vf6IEyfjJ26NONVNBm3Uzyb8jFs25ewPuzjS5yJYwLX2v9H2goaGD3JrTu3Xvnciy/dvv7+977/1ufffONXf/U7//rb773zAMDe3r4O3K/X3IgIz2YtQCl13WpzctqenQ8AmqAiw5DSdkjz2SxpWsznzDmz8fd//CMA3WZ19vFH3PCNz736re9/95WXXl0Tt4KHjx8/efwEQHN+9uzxyWq5efnVlx89eHS+Wq+Oz06HPgS9eevm43tPAbz2xquK/o//9N90ko4ODplpMd+7fYM+/vjhvY8/Ptg/AHByehKYnz57ei3cBBA4HBxca+K877ZD6tyOpzIMKUnboGnak/VJiCTaRfBivgBw7+P7//Rf/JOjw73z87N7H99DmD97etw2867vlcYgSpcaC8eaZjyoFsamnriGBiYJ7zJzczHOxSf7eioul9QsGVgeKZkdpiSyPD+9fv3w5Zdeev/9D1frpR3VbruZzWZJNXLM+bI5EtHAcRZnt27fAdC28eT8GYHms1nTzpjDB/fuURqIsFyvAYQmbrd9l86Ojg6laQ5mi/2D/flqs+r77/z4x++/+x6Aa9fvPHpy/2BxtN0Mi320Tbs6X8MCTsFDJp5m1WPlMPQDMQg0ny1u3by9Wp2/9dabAPp+OD093V/s7e/vnZ+vhiRQYo6W8jtvYSBBZ/M2PUuiayI26zdzO5spgK7f9sN2vVn+m2//6bOTx+2sNVm1omoAoKLEObVBqSGi9azr5acZOw8CXfxBkzt27kVeulr2n5j16mdPALPcryzL0vTz8nStBK3895RkeRcnNsdJByaCOi62seOXfHlJr3a7WLpRJMD84YWX1T0p1LYmlvkTl/o9WfQuWyt30ChYTok3jRdcOqyLfbPGGQIsAYJu8QOp1bLL0liWANz2mQiwrNBmsRjlEBIrIpqFNEuWXGzOZfQjFRpJzo6KPJnf6ruR204mwZUyCHbt6lftql21q/Y3o31ysDYV9TtDX5dwcuxwrGzXz0JY0f8Uxa1mB5soWilct915C/kLMg2vAIjzzxKj/dUvA27TpEs51jgyu/j57eTr36zlEUf0xkeNDN/M6LlkgbkHKcg+p+zbRDkxmNVk4ZJELLPqpEianRNJMYASsXBQsCipQpQSOfqGUdHO0i/5rLkwoN7pepbL3PiDysNARJxbcJgv2/qsvIhDjbliL7lDn6oKSMBCBQfIC2gaKpMJlWUaJxsjCUl+ESkxOIACcSBJKpKSpKSDaHJmr6qQXLJWFDmlnbNsy/cnSZJldrP/e/3cagFp8q/mBOLG4nV0YtKSVVrLAcgqqGIqDOS59Hz4VG13iOFYQioEIYC9ujgRmMBsqKsLVKIqKlaWnXblF9dgdz/cSQjoEiQVuLUGaXa1+Dxs27ooLqcAEEJwKGE8wLYc+W10sZs2QdmntsDfpON+RZa31XbY6NQG3w+2taw75RJbiALEWn2ZsXvIM53yKdQC3VatEvbKSubXkB/k8gW5uJgkxRgf/sEf4jO0u1/7aowRyE5z5dWiIqpELK4qk6rlaAeQSBOplXkpNJMIqpLM23q6h6sxcZ4vEfHJcpijiK0+79W6j2vjn2F8vr/AaoiTYUti2Rxrj29VTZKQXYOt4E9SIFCwzoAoVzxH3gHeIe+fqkLE8jQ5C1BA7DS7xzFTMMAxSZKUyDyfRCRZdV0aUj8M/TAMKaVxJuCv9mEF4hACcwghlMX20+93iRRlr541BcQKd49OF3kAmYNOLh7pTS41k78Xr9mdLBVmoW82/V40uVqrQrt3Hz7h2Fn/Gaty+PSKimqy0jJEYDYEW6AJWm77hCGUyRl3oBq1VVFNxiOqY5Qd6qcw+E/fdDrG57faNFiQh/pZu784r8yHsfwn9h9IzNOpAI+AlpJgmW3Ap8YB+QyvSN6B5b681SypIarPyxD7tBXt07A52G9u3di/dn0RG2ln+uILtwSDYAALkSglhXFe1SSAMmjo+8KZ+mE4O1/2Q29koYlxbzHf31ssFvM2hjYGVnm87H79P/iPDw9vPPzg3suv/fzRjaMHT95brzoREqHtehs4tG1zfn7ebfrVcnu+WR3dvtEcHJxt+vVW1lv5znd/ONs/+JXf+K0PP/z4gw8/+vG777/3/gdPnjy9dnA49MP5+dn5+VmIgTkcHhwycdf3InJ6dvL02VNA92ZzZmKm5eqciJsYRdL5+dl8PgM0BN5u+xiaGJo04NGjx3vz/Zu3rsfIgSiEEEOMMXZ9F0OIIVSkK/PTPOcVZ3cUqIhwvp2L7Fr+S0QCpPE/ElACJUylqAnHZaZQ1aqyg8xEscEg/Z07t2dtw0wp9Sn1TTMjDaxMFEuHM/VTfXz87PHxs6cnxwSKIZ6tVgK8ePfum5/73Gbbiei267ZdZ8Q5pXRydh5DPDo6AlEvaX9v//at22mzTZtt13UHe/uHR4fESNKrpvPzM4ImSYP088VivljM5osYGxOfrJKKQFer9dD3KSWbOuagQIih7zuOAUTDMBCxJBmGYRiGzXYjIk0bD68dJAEoxBDbZh5bDENXjuV8MTs42A8xzNrZwf5BSgJJyHXPcsZPgGTEXCqJZDzpNbGa/Jgwyl0p2I/eDjEtbPr/Z+/Nmm3LrjOhb4y51trN6W+bUqZSkstSyR0OA66ggPADVRQEAa9E1E/ASjXhknkneAJHGUu2XMELv4DgBZ6KsgscJgqoIBzlQMhWL+W9mXnb0+1z9t5rzTkGD2OOueY653YSLltlzpSd9+y9VzPb0Xyju6Y6XCU9I0I2vbL0VEyWuNpKp/X6s+pPLk6N+3N6A72ArtWdqAgaXvp/4xxdndVRnNMKK3thq+XoK3+SP2o0yuS1ecHjxvmpO5WJZVFmFNMMwlmwfV1jDlwy0qsLHKKqYnuNy54jAum4A2ppaPoiRSH5lRhppttpB1/dvGRRfnfh2nn5eAQ04fxVCyZ/fafetJt2027aT317hXekUbUXks7MjFyLcl6vVnVG7ZLsRuJM0K4hp5iqxe/nJVy+NkOj6osCqudffz0WefjF9xQ5stU0qmk8zgvGtf/F985elyby7GtfB7D3hV+fdHgMPMvPL8VqR2E343CQjJPo6CnjcE82+aoKbIrgPlNABqtsymwkBTYos5yBIQgQDCxAVjqn1WbcYl8NbCKmEVz4yO8jiCayqhRAcduasuaK13JOBD2NLRwFawCq0VhoXmQXyAIHY7AARIrgVfxs2OdsrMZe4pcNxXGZpLhHsMU/js4R8FmE8/oyWnVmn1+QVexxDMwqSa2ocLmvViPLn+7xQuVjEaJ8pgWSY5o1gZCXaYyezCOfhJjSKBQSVbWMaqHZXW5KzwmkGN1Oq6ZQi/Fn33TKWWSU0luXhIsUdFWhvy4b50qQldipVfSJYpTS1EVcHxABgIgSmdSoqrkItSQRYmaFSko0cYxy4VUhkpiC6Z8KsG8jIttRGfm0CSMi1eRfouw9Nlgsy8CikqeA/UgqoCof/te/fXVGX9Tu/+dfAlFMQ623+NypZjdYKIGErLC7zZdfIwYMs4MWylmmnwj7FUmuDqRvODerq0renCMKnP8QkVxTWJWIrRoMc5AYNaeGgKVKcMoizJQkVnW3IZoAK+NuWGRWN5g5hMa8I21Q1duNFPgYRn2zmFVysRF3Gq22nFptXCo0xMivg3dCXk+MiMwhoiBENjXuI0lXaRSs7E3hSu5QWyDSTLw1NCVjPTWhASimCFW35FTnw4bhTlJelRzlvNtTQttC1dJ4WE0LQEUr/5ppo0LN3KBVsc7xaPj//B5TubmswDi4CS9WrUZbEqpMiIBTwnJ//mhUWFKhRsQ53D4bPIhfahZ9mW/L+PJx1K+4MtPQ6qIyReWAFEL6AgL5Bj1wiqvVJql4QfH41fy22jE/GwmMnY9zX01uxxDG5vzs8fs/bFI6X632Ly5PPnr6/PhsNuusE9yi45nZ2eI2AsKsHEhEFrMlgJS2Kmg4bIbYD/1iNltv1sS0SLP1dkO0B2Bomr/7H/xHT588/P63v3P/6OgP//Efhgbd4c7O8sDKqVMIMa67rrtcb5qmTTHtzmfLWXd+fDZs+6AJwNnF+R/+L3/wt3/1V/d29qDSdV3D4Wy1Wq83knTezQGsLtex7zWhISx2lySpmzUI8vz4eHV+sbrYAhj67XIxU5G2aawoSoqRWLpmtr5cA4iy/cS7n97f27t1tLtcdvP5wUcfPE4yWCEqi/iuuK9bXpze1jQY7nJHmS6xk08uxMJ4RX4Yskg23QyTVRvlCz/a5l1lGXVAaLiVmD786IPNtp/PlsN2ABA4bDfbwK0ghRCathHKFoIo0rRzAJfri812u5hRPwwKPb9YxX5NQIoxJQGw2WxjTAZ83jq6tX+wL5J2d3cvN8P+7u7Z0+cAUuh2FocA2lkbAhOwmM/7vo8ie4cH9+7eB7Ber4cwDEMkYJNSmcbNdtu2s+ViCeD9hz86PT3Z23/n8ODg0ePHbdumuI1xSCJWf23oe6LUyTymJJG7+TLFIcY+tNK2nWoE0DCfn58/ffbs4x+7d746l0gKEeTCZLkCcnFH8G/qs0llAUgr7YPKsbtGN7UQGMrcp2IqFZm8ssIv+FC+0smvU3IKSKZ1Tkgn/b7Cy2kUj9RJ8AhBAdcl+OmHV2k6P2arxqWFrJcfR7p6RRJxMWv65dVLtVA9Y1S4+gZbGy3DhokCqBd/fLROv5pcZV/7ihvv8y/z840REI+T75lpFCaFFiuT2+uywUBQnGeMU1syKGUrpFnxlooF2zb1uLpatK4m6wpfIi+MmnevlhEBL9qVN+2m/fS3f/Ib//FfdRf+gtvf/e3/6a+6C/+KtVfAkbWjB+WSmCNoABhONgIgI+vM7IMcRaoeKcU27eBDlunSyO6u4C7EVEdLqejq91+PRR689+tgdgeDKih3YoSbah+qIOx/8fMGOL66nf/u7+998fNQrfSlaaYhZ5nilisiK4VNVmoNqhiDM7LAxMTu9ZOxyCJyEDIUSMqjREwMkpGpEllmT0vIJsgMt2K7DuoVPJJy8rZKGSVUwoySEmfbW7HgOpCTJQDRCMBMiJlZemZAlxAUGFVeEzrGTGPq+i5M8Aqly9mPKkuOREGJ2CMsSsSqiqaicNfQgWt2Wl8M1yhoujlHR8f8o4fPeSTpCCN4n2zeDLiphFfyNa2ePQItZTea2TSBCVCBWD1ilBMlXJ2G8azV4pU9hMuOc+Rj3FejkF5PyyiIj09BZXRlJvKMY1nKyaWG7Y5c9rcSgMrf6u/NYqZmUFg9ImcEfn0wInl1GOQw4MS2Ue0HM2GzWC68ykYMP2j5r/Fl+TJRYjsvZdzEBAhUVEJ2DRtxe5BahfWkQpITfuWpEtMt9dl/83paAeD+b37J5omJRbLXngej5anXjOLBqYo7zsHIiEguL61ZXrWHpKvJBBUV1qvIG2RyNtTJuZK/fbzfBGw/IkQMzS4D6hMtOdVAnvSmabziNtkfSrDq0immEKy6q1DeRiWlI0Tzl+4xIUxOjRww9g1D1n3zGTCg06uiZ5dCJgaRRfGbw465ZzbMAiaWShnITj1EFpkFESEoONjfUHOIGKF/WM9Hj2pbBvc19YNhG4kopBRVlDkwhzzbE4zPiYpmsj1RpWy7SkTGqB2VJ2LmGBOqa68uvtrsVbTINCwtG0AzlfVHK1IhxBkfvEIb6x1iEyJSf/ScVVq0aDKkpsLTy+UjZxnliVe0lyvVmZ5S9dHV9BfeNQGwr2Uou4LAVnfRyzt5zS0n44yTiMwydc62s2BUWIYf1IzmKzRJmdB8q2jDbdz25yenP/8znz791Cf39vaH7XB+dh4MlhGJosq87WPTNtQ0ASI6pBhFZb3ZABDpAdw+PHpydiyLuYqI6uFyd2e5nEmyevcfu/2xP/qDf/y9b/8/9++98/TZ6uzZyeHerXDr7g9/8KP5rAMw251LwqzrVHWxmKEfuubg5HylgXjeYFAAu/O9w/bohz98+As//9nt0LfchLZ59xPvrjc9c/PhR48AbGNsQ9uEbpO2zExQ819erdbHJyc7y30ARGvV2PeDiAZuVCORGdV0NpsBkM2wujj/6PHDoU/nZxch7JrUI5KY2z4NALq2FfeoMlIzcpUyx/mYqyMLCkglxRYppvBABYE8azdlQ87VDZRpgiTmkVrWMpdADvYOZrPu4vJMVRfzJYAhrmeLWRpIU8zBNEkocGg7CmEbIwAVQGm1Wg1J5vPZ8elxv10TsLuzY7kjAwehCOZPv/vu/t7eydmppOHe/Y9RaP70j7/x4MMHAD61t//s8cntj3+im81iHIyynp+d3X3r3jsf/8Tx6TGA1cVKRZmbJjSkGIa+aQIzxZg2m83xyTGAzXZzcLQ/xKFpOg4UhwSiru3arjPjn84x6xbUBFHtFoFYBQmUJCFxJCQAm+12Pl8CdHG53lnubje9iEDN7uUSEQE2dQWYK1Nd5Jxrh/WVJMYpRhHDq9tc8p5Swlc9zTYUgaqTWyivAlnCeQHSiSI0oaBMqJ9xxfhvV0wjv6fdLwmjr+OSL6epLxveRLS9PsXl16kI/KpW+TPk669mas20Mme6ID/AQFHrSuNCVV/S85pOoyzIKC66RKVETJJUDEL0cJT66iLGkz8xLxoFzRqCEQ0ZryuqjhvrClpeZ/ioZmI6xgnHBIGdgknR4ZA9P16xtDftpt20m/bT214KR5rbyDQ0bGzOpye0z3is56Ur2sZErvYPCgUxuWJEU5Y5ailanLL8EW+CRR5+6YtFsSv/jKbv8S3Xhw0Q3sRHEsD5175e+Uhe7XnRu8jFBGdaRXnVysKXgQmmQGRoRJ4ZIvZiC6oQBSksLsgKg1QeiG6mNGZHUEmew6tGjVUtMtR64whgxXAdIXE+LIacBrDDJq7MUg4vTxLZQULLHJ8VcMcSfV/kKXB0pAT3q0PeBCBJJMpZFQPCOLmKpJuAVksns9wmihioc+YuE0aPOuvfuC9oqp2Oi+cKhubxlZXKyIOKYno0RARILzkr1nOx0je+NxxqIagIkWXe9OTsAHGG8fJke9y0K/w1eJRHk+cVomM+gxH2zwNzpasaMTzjaDUhjk3Wwm5OOzeV0etjO/aQQq0ZkL+IiESvoRMWBF3cINxFKLtBOCg0Oh9jPFbE7IeryIp5A4emMbxPFchOhQQQcagcu3wiLULHsit4AZ5xuQGmoKwkCByIiJkT8Pi33ihAG8C9r3wBxFZlxQLc7DlRo61F9t1VMRwwdxV23kd5OhviiUplKmZ+kdzvdEUVIAYzF80klcWCr40/Pm//tmlFRVIqsJ1IiinNuk7KzlKfQBATMwUlMAdRiSkCaJoQuEkyGBOxhxBxUsGQ7L1MoZyLjFgTYJ6qrvS7GSB3OSM4amhY7jAHJmIRstJGTWjzhlAE8koKhPyikawpE1eOzArz2lOjJwbs5w1SpTYQS7zgUzzeH1OfO0ytxdHnlc0ACJXN6YSw4H7kRoRis6Dx1Nh7RcU2w8t8XpwU+/JmtbEgLH5vhnoLCSIi0Zzt2SPHq6S29RvymttkFfe/yUYisr6KQOF+6kYYk6aWWwN33OTGFVH8sVutyrrvoWt/efYm+xtTlp+P1UQaeKFGS5N/rjY1r9vJnVr27/Quf5JmgST30KWD0aDmUkL1jqaF6hA3TWjaRXd47+ijZ8/3gdPz55bxYGdnPyXpN6kNLbWSYpQEi/ffWexstucAUtzG2IvgYH9/0W/fvnePiVbri6ZpFjvLEAKA8/V2ubts23bDXXuwx1sZbu9wE/YP9jYXawBDnyCUorazdrVahVm7vdwMEkNL2Orl+QbA7u5CNMZLenZ6tr+/u4mxm88+9emf/V//tz8+X12s1xsAF+enLXVN0+0c7C3mi+36kiiIUL/t++1wsNcAuFitFovdvf291fmFknLDgUMU3fQbIxiSo9655eV6Ix8+etq1i2E9zGZdH7UNLQBiYskZS4GSNWXy/3kX+YRb8l4FLCyhgBYKY2HVquZEx04narGjCK6iYKsJAwQzciBBAI2x32zXcRhmsxkpZ4FNQr+Ns25mNvokaRtjy03TtCA6PjkBsLeYLRfLbb8h5pTi6ep8Z9a2TXvv9u3lzhLA3Tu3Pnr8ZLGYHx0cHJ+enm8uHz15/J0f/ujf+OVfmXftx4/uAvjgww9+9lOf/cQnP3FysvreD769XC77fljuzPb3dj968sHx8QmAGFMTmrbpdnf22qbtNxshijJcXl4cHd42z9wh9Zebi2EYzlbnRDQMW6KmbbvQhD7XWeq6bvEzn/302x9/ezZr1xebQGE2a58/PX/29GnbBACzbr7dDm3THe7fevzo2wCramDWdMWEjKJDZB5XSSn5JKH2dK1O37XTrbBcMVfhLaeeOh7X8bd69V+K/pD/7qL1VFgqlxhNrmhWvjsrRaOoNgXrNAt0k6FUk/DyXr20x69oP8E9+vKujE+7Igz6P1PxcWrWv/6wIh+9/s2TG0XKipgwb2YkIrZUJRnkYxMd4DVtBJRjvAk5NkeKgmeZiJjd4qHMbN7K9eq7XDxu33FX+4ex+uX1nhcUFahEZhJJbzr4m3bTbtpN+ylrL/eONM0zCQAOQYHAbIpGVm6Nh4sWbyNmhvtmmJ4IqBqnd+0tEMPprOmUxq3BJQp4RHUcahLK5ZXfqI72wXvvqVeDJgarlWA2cUD55YVJBZo9pFTeFJH83d8/+JKlpzTmQllHB1miwjKRBiRkXMfdo0AQV44BVVKRAQCJqvlCsioShyy4FKc62G8ChXIOszX0YlTvs/m+ApjKX1TUbPM8JSRUHjGG3eRkeULZgSuQTyRg/DmLiUTEZvQlJZDXQASUVRElcWBYIjlKTdOAMIgFJY0+OJkV2xoxqwg0wTXMrLgTk7aiCkpMKIoBExPaLBtkAI8JEBKRhKxz0mT8+dGcJKc7ZB7tq0RI7nTmMbRALfoQuwgCVeXAgDKVCp7FFSNPNDet/2BXmCHTtU+Ssm1qgAOSB6eqIkJMBJbiM2fIq6QRxYBnJOQcNpIRJcciyCQnv9zAGgJiSiRIiEwWGZ1ISVLkwCiJDj2AnAjMpCmf6KrZ9jZ43NypC4JRJt9JxPQ+y6iYm9aVlJDSoAjBtgSgKkSqkprQWPJPdqxnnG6QpMQciNlAnpCjhsVOCSxzEAeRZPW+G26NknFgYthSppSIAkSZoQpiFhHDQN8Qi7z/lS/mVZFkRUHV0GxJHDg0VEhhjviRpIroWQWCIQQAgJRLrmcJtvKUUEMAodQ2LQfWXC58rNxlyRbh2hwxQSCSmqZFwSF9dw5xICYKgTSDp0TUtp2IJknsZgx1JxAVWPx8jnfO5JuGODShnXWNPaQJrVkcRn9yIuagDlHbARdJYrkCHIhU0ewfSJw0QQBWkEoyjgMmloQmNIMMqmJOCoAye7YBUEwpMAtUkhBT27SqCgqkqjBvIzOrpCa0RgwAVtEkEYwQGEBDQSWkNHBO65qtCKJS3Nw5kEJUKQQGKRCTZif9Ut+FmK1yFRRKQsiAoCGDgJ1TUW7UU/Qa5mt7yemCZfAVc5lUFUkpNI0yFYUl7xQ4GIvCnF2zNUU5kUeumvuwrQUcu64oWblNALViOwkAcVBNxAyRQWJgZnCO0+dgRzNQMC/NTJ882askz57JzEySfUcyHUuS/coLJORbZaz8kjOclENhaauNLBNZClHmoFAWKRotwSwuuaBRucZfkUkfRITt1DAAA4yc3npQrxM1ZrLukNKY6IIUnlKbCClFBYg5JS+r5b8NKarlyM2Gh+wurSq6ZmqjDNuYZEg0rDfY9ufpYjbbXactgH7oiZqmC9vtVjeJmcHNLLTPn59cbjZd1wFgSqEBWn3y/vu0s9M9fbqJ8eBgnyXFYXvQLQBshj40s15m8w2ndCnr7TKGjpt2b/f46XMA9452tjQoqVIQDbpNTdc13MQUI9JABOBySKSiKhfnl/sH+8vd+WbYHOwth2H71lv3EAKAH3zn/cvji1kb+vXm4vxiZzlbLuZn52dtN9/Z3TXZ6ejo8PT4dD5bUhN6ibjEcok2tJRS0zYAlFLfp5Oz1Zbidivb7Xa7jV27VKBlyDAAGCKpaODGrWxQEWYaUiyVlLi4Qqn5Squ5DDNzYM6IMyFJyhACcQiBmESycGnLVPOgfPCU2rZTEpMhGCSSVJWJRFUkdbNudXmuSJLSsEkAZvNWCTGlELqoCsKsa1RESDca79+/D+D8+JkQpxjbNuzvH/3Sz/8iiTx88kFaby/6HsC92Wz/9i0Fnp+dhWH4s29/4/j0fL2Jj5+d3fn4J37tPzkC8C++8U1e7u/fudW07QcPF9tNOj27vHXnSCQ9/+hRkgCg7domtLPZbHV22vcDCW/ON7OuDSFcbFfU7gE4O30+bIfNehuY23ZOYauJLy7PV6vLNCiAbja/OHl8fHyy+DuH3/mz7zdtGzXd3j9aXa6aeSOXANAfzQZDAAAgAElEQVQuiML2/Gy1OjjdP1w+f3I6bFJYNgqiwCkOAGKkEFqABCkQgzKlzv7fRvSSkBsV1clFBjDVBcEK2ZMsk2uOhjZK6GakK3iYHelCQHS0E4+EElXdQj/0o79kYUn1NmGQMrKkoKqiZDvSIrsziy4WCzFZgtmS0JQXA0WoBMYEIeNY3c0BV0dU8K3qWqf1+RnVL9W/NMmWA5ncNBbFxEgB3cRYjGsESEnYVP7NWF3dv5yfepw7wBhWKa2Zh3BlfGMNIcqSoiJnScqv8MBqYmJVcDvihWKs0aP/2UzgKO4DY4JyX3fNYWkAlEVMAqWckiULASVOvNwt9ToxUYKUHUIux/s9lYtGZj0KhTld/CRw8027aTftpv1Vt5fCkabp1SYarY11xYeipn7Gn8eqkgXbKSYgzXUNsukpe7sQkMSTiGn5zf+XyfIbYZH7772noxDgnfV+oIpu8C5f4bKZYyre1Efy9KtfP/jye0X6KMpeVC1OScX9R1VFhd2sVXpXOpylK3d3hINbcIjN9cyMS6hqNuzZa5DxF1+aEbWYWNR8AUnEZbBR6St4J1WfiBCjo6jmOkNetKB6Twmw1RwfaDqm679Z0aU8M1M5jx3VVINIr8gU1+GvK20qFGreZlS+qa7M4zRUQGlcJp8BVForoXg/1Du6tPwTTed5+sZrzR166p6XDZj/FRWLJ7WlhihZIkVfJjuSLq1mVQvlI/KGt8VxydUl7ErcZK/64sKRWohuTAMApgCLaSVquCEiRU6jeOWI5X7T9dnW6xfWU/HSaZqEyY93qInkyJg0HA4oi82hURFJkcl8HgXgpulEU32sfBBZWnSkbHSPtMeJWCpXNh+1R7/1RkW073/lS1PRvWRaDGZetyVEFqEt8JdzAk+1wYmTTFATqDwur6kC5gdq1gjlkcgoRChDUFMBnaCqITRClsxPxBwLx8owDGV4LsWyMXLZnJIDL5M1DsRmt/DLGUAJ97YYagPOCLBUnsmDrG0XVydHYaljNTvQ2QJk1A8pNK0ZwCzO1EioRS5JvkVdPxz9lzkEFmbiHGNuY5aUS19W4H7ZimLRWhkWKi6haqpFob5Zh+FAICsjnjN7MpsrVdFFMWY+sdvM/Wr09nGSQOWj1OfGN5GOTLkwOc/AaGimXjkslnAgrzuA7Pc5UTwrwuE3lW6VXZO/J4MUbehF4x29sJvQ+GPtD3UUwGiIb95yDlWoNuQ4AXIq5Syn6pcp5pmwlKH4etQOoZltqBoGRMzV/vXYC/uviGaHJAUsqHJMWVCm2jY2jcRTjacpa6GpI0PMa229ytVaxx5XLM7WJ3BQAqTkOFZTtlWI551ANYTFcpdns22Uk7OLozt7283GgveZoqQ4a2fMxKFt27aX4eLysp01SaNVYArgfjN8//vvn5w8/Q9/9d/87re/C9B8Nhu2a+bQxwigT4mbJoR23W/e+tjbw2Z7crq698lPPfzBQ0QCwBJ02C73u+Xu8qPnx23X9FH2D3ZX68vlzmJzsQXQNk0Is36zbUPTcpNSUmC1ulguFhcXF4udXQDdrF1pVHRRkogMMUGJhESGpuHN9hLArTuHs/nm2dNT7Xm5u7O5XPcDGgSBDoMCiClerC4+dv/O480a2oj0TUvcyvpy01AITQcgUFBSS4OjqklVU0Tt5AQ4rGKUTJs2iJBLWY7wUHX2zUBowk4Wb8cdkrdENt/pMMS26yx4U1IiZmKIJCLEGEW0abomtF3XPVk9BcCRiakhVbKDJiKACDfUNd3e7gGAp48+RETbtKp06/Dw2bOnm+0GSS8u1/v7e9bZQLyzuwPVDx8//t4PH+4d7Hez2QcPH9y9fbvrWgA7y+XT4+PNZR9CN58vLi9OQuD5fPno0dPjp+dH9w8BDMNmvpyFhi83awgCQtfOYuxVhUNYby5t5LbEBNrb2wXh7ORiGIbZrA3Lznb48uM7i/kyhNA2DYiYwunJMQWWJNQkAGDe2dl59OBp0zZD7Ld9f3h0qx9iSkLIBktSsmwILbeS0ihVFk4OP4BX23jY1D8UaeGFyXhdstKXITzqa67T7172cqrvqt6oIzHJ14KZaOxxse/bFiXXAURHDKsS+fIbtMYW67dNe6aqRa54SVLiV7VrUz35PJFop9c40pp1m8IVKjluyqRGOjyK9FclzRKXVy3LlVTLZTJ5qiWSqwvkQqVOllPLRE9SDr1K0J++99oH58lZ8qsvqcRzgusWdt1Ln+mPfGW25Zv2k7eHf/MX/qq7cNNu2l/n9orckQBQydf+xQgNaM0uKtSGpnykegyIiINaqhZVZ0aZul9rRdYH8CbI4P5775U7wRVTc69FqobyKhDE28GXvnD61d997WWnv/N7AA6++Hl1JEMBTxbkA8juMa7tQBVSJQZElnZtSriUox4Nra6fExy84JwoMnqF9FHJcb3Ttewy7FpVVbX0miYFlKl2lUmp6pNqdhG68hjDcUQqfFrL08le4ZfC9bG85Dk42a2EnOeKqkibuvk2uSZmqSv55O8YfWhqIfDKX1lRLtryqP1nCanCNyvZoPrC1Wz7ScYOlmcQ0Qu2mffzSqsUZUCBEEJWpKFMOeQ2pRjMl1An11PWz0EVSlB6TkRsma5cgL92Qtm/JxcPKVBT3sMcYG5cJrWNCneZvdxxT2WJ8ec3OGtVTyZQgqPw6hsoa3qa03i52E7jlOZpZybDv3LFleh73PY7kiQPElbmIGYHr4oTVJI+ZWlV8eQfvp4gALj/lS9nUdJ1CpDhkRLAUBU7HQ6l5d3NRJ5i1cGWXMa9KVMzEdGhyOB+BmQN08zVqFU9CjirOj6VTWgIZg2CpSRsmizIpuLTjXIKNGPZZvAAFKKiyoafWsnsfIBcZ2DVpJrLy7jGaF1nJxR5fZE/5DVlDrZiyPs2MJMKVCXGnsDM4OwxnhUYVVhM+kje83khAHHoRcXQMcsvacHyko0x+UG1siNiEDExWDQJnPYqZccZ3xqFXI7LLWZQcaxCySwyOeWv+0kqEYElicGIpo9kh3qrGCbmZzfmmLRtHko0enZpL0EJ0KtwpOGO5oiSK/Jk7WfkCJmG2wiqr8dNpqNS5luD3GmEx+s5867sJ8vMyKfMVGxOQ/TTScWaUMPoipLEyx46pu+cENuy+0G1HFJgY2OvmnIkLYoTk4oDlJkxwvHrPMlFOzVQGZIdTLV6qRvXVL3imM+Bw+zahDByk8zm8hiuiE1jU3BoNKnm0sFaJl6hfdyCBMR7O3uXF+uhl3mzGPrYdO6VTDKbzWwLnZyeXazOZ23zM596d7Fot7K9WK8AHOwuZEAKuH3r1ne+/4O37tz54PHjx0+evHX3NgHrzQBgf3ePVPq42d85PDs/f3j84J2jT+8t9+69fe9s0QHQwKfnl/PDg3Xf37r31tOnTyXJs2cn7axZX25sZpIkgix3Z21oVLQNjUAvLy5JqQ2t0ZmmIdEUAgtBSPttv15vWyAAs27eb84BrFanf+tf/1unZ6uHjx4TNx9tP+raJkps2y5TRWpCaJ4fn6QeF+tVEm272XbTE1iJLCuEpgjHljKDC80VSurbQ/1K8tWyPLl5FUSkaToVjwCxFeZcMC+Zu6UARBaOI1r2XanXxCISiIk4xuH+/ft9Pzx/fnx+frFcirl8zrqOKKSYkkQiaprQhCApifYxpgfvPwDw9NlxyyoS33n7Y/PZ4sPHj05PTo7295HE8nf0m+3Q99DlerN5+NGjZ0+f/9zP/dzx6XnL2/PjZx975x0Al5dr9PHk+PzBgx+yamj47tHhZrN58tHzWbectTsATi9Pjp+d7R3odjt0ob3cXHZNGxrEFFtN8/kSQL+5VFUrgB4H0UTbbVRlInNmhGgkEkBC4H7oVWW23AHpcr68WF/uLGcAZouw3fYnq+P+h5s49H/73/m3zs5OV08vNTFxzvtrxdUyVZmCXaPAMJXx/JosVNrZri/LIlN1DPM/ozA8fdLVRrXo6P2YXHida2MiaExeTdfkVoU2oTUebDhloMLtXyzZZipHPtqK4uiodRSLbC3qlAcoFS4x7alOvpi8liYTmQNxph3DZN1G+biawOuIWtYpi+dFVg6K0jF6A+QLqkwXhaTno11rs6OhwbeHXSEmyIzqaXmGw6cuE1Ktp1Qz80IJ/3obOd2byMnjoOpeTVi1vtFrb9qP026AyJt20/4S2kvhSFGBZxjMbPuKIUwLJwCQye+ozkyoYlF2stbp5D9rHgQUh0B/eGZKpqu8CSa4/97nC/iCii8WHjWy5pe1a5ScgIMvfxGqbwRKfu3rB1/6dedYpd6aPVghMkHLptlijLU5zoLipuW4kD3FfTkARzEs0CTbbzOLzM5EZkctsoA/I3PkMuQcGOJIRR511mlpVFStfm5hz1nacd+V4g9SEFB7zvU6BvlhlfDhPjuqzJZLstxd/nDZqErsd53r5gFTJZlwhptkFCuyBgqfey0qXy14lCm58oorV40bG9UPeafRdNMVqQmoPXRe0VTEtGKFiiYHazi7zFSjotKdsgDTXprDhla7qbrkyrnOqlc5NLZ/rVSxeaC5YXtEcYom8BJh6AVjpWpeauC3KPH2KCYqRAOA1XRWiKqYw1F+fQ7DcTUyRXMsLciIqiZJlqo8T65q4BwHM4qsvh+8Z5YvURV48g9fbxGxduc33lNufBx2kgjI6SsKqFiUljIVKSUrw6gKyhHLGd21eF4Hk9xzB9CUxqT1bsugKz4BJfWmxxyJiKjymAFwXCSbNK2yfJpKHUKASPbyTmSOosRiBbUlI48ORavahIfQGMTGxArKf1vFGB29raVka5oqd5l0SjYxJEmBoAhMRFwqgVhJICpwpG+iWsmikUU5G8qQlHtJ5GWhvKlrpc03aGDimAY7gxlWQ96+diq5LGnhJjo+GTCk0ziE3Z0xdcfs7GY78krj3gBKPsjMMD3BJalKMkR76j9SLX+meRmzLOfUp8n5UuHgtRnF2X/9keAmsPzR9TLV5GANcmIHdjKY4VizwxXf0nqh1OeDymbGyMKq3ngigvFoTZqoiGvvzFeT9vp4RiaQmZ+7sfrFGWkSIwJSZqc8QieWA3X4WTGevwJGToiybzyfTCM6KaYMkSuYlJhKXB5RbCgomtX52Y8e/gCDblZrbAc0M0YDQIWarjk/P9s/2Du6vds03HTt27fuPHz0cHW+2ll0AFbnK5WkRDt7izsHB30cjnZ3IzSmhK02gQFo4FXf7+4c/vt/7+9daJrNZgGzk9VFt+hmixmAs4uL5cHyl37pF8Jidvrk5GJzvjqOJ89O3/n02/122D3YAXBxeslNUGjf9ylGWaf9vZ2HH36wvlzv7u1vNlsAIQQi6uPAbdO0LUPjMMwWMwKdnJza/G22m29958/e/cQnP/nOx773ox8uFu1iueDAz0+eD/0AYH9vXzVut5vV2eXZyfnnfulzT0+Oh20khLadpcHmWn2z28pkAhACixuEHDWEElizCMcM2zx50UUIDlo7JDkaUHMmX4JDG0pWCRqGHpV0f5bcwayHSdPjRx/t7e/GIZ6fn1pNqaHtCGphoZm9SUppINYmhNXFBYDttp8tZlE0JdlZ7iwXi1kIJyenp+crbhoAoWnatr28WH/46NEP3n94++5b1M43w8mz45NmNv84BwD37tz7sH/aBg7M/Xa73mz2Dw777Xq5sxj6aIelaZo+Dmdnpzu7i9QP3axRkaGPxLui0vc9gLZpn5w9IQ6L3aVS4gButKNGRCUNAJq2ibEPzf7B/kG/Xc/mSxUh1e12oymdnW0ALFLzCz/3Oaier87m8/m9u7f29+fPnpyC2JNoQ4zIW2raQgemAk/NweETSMbZX2wbhmbVZfqzejWu+rtr99YkQ4s8e01I1YqMTz5OnkZjVFD9JrUcLwoRYc/yYcaV6wnLdbxpDJB+QferKSp/TDtVbMDXNSPFSJdf1oqoSdMHTBjJ9dl98RJN4T0aEUud6ByFrY0LN3rKw9N6QJEFKsbVCRJoEQKp7qsLmpKlMruxetXITsrTXigL1wOj6YcXor9XZ6IWj6aX6+Sfm/YX0m6wyJt20/5y2surb/zUtDfGIv8ltsMvf+FNLjv96uvL7Ny0m3bT/pVrb45F3v0H773+opt2027aTbtpN+2m3bSbdtNu2k9fu8Eib9pN+0trbxasXZubRoPhK00/L/uxtv9obTibOvP4lee/9/XX9HCCRVZeGcUglx9em5DcYPqi7maXFbd35VhH0MGXv3D6O68HRk+++vWjL74Hd5bxN5bs1srgpPHKS0snRp/9YlmbRHuW2wjwSLRxdAT3Uclm2pSD1IvHkdXpHh3axpmubGr2vRsdyRxqlFLSslD2SNVstbWSG2oh6uTWSMqV6jw8Nnv+lJGbY4FWOdLyjrhiXtay667la5m4mxQXMDOZmqfodGhltGMHqPh8lrtq4+p0hV5ga65sy9MFpfK/4ooziVypnHBK166aPb37WnV2Uoa+uDNVw5wadf2Ujf8bvcZ82hVkzoZ5/sQWxl0LAxFEEog4l01/QUGoyu+o3qaTSRH3QKjmyilM5WpaWclHB233sSIQSBhawt/Gr8c4LDOCixc4toDkXKTedqPVr0i+OTVPiBXS8QGaF93T3/6xsUjxGNXcFcreUIFDLhqeV83JU04MZLngSTWhGO/tOVaFg8wbi0ZagVBCjtSSJ9qJhbKf/LJtiXJsUUpRkcuFQFU1EeWyJPYc23A8Hl5cc56AQknZ/RNAlnSJr4Tqa9mGpdG49wtxIpD5AtMwDHWIruTAKWKiJgRCyOslalWJrNq1Ve5mLk5+vtlzgBtl33OyPJnJD4JyOfHFRxEoKQKqsFl3YsyhdL5242nMGym7IeZvtKrd42POT1NYbW8iGzoAMxCaM68Y+XaHTuRk/PYCq1qWXTgp12x6pV+Fimb2YP5c2UlHLYicnCOP7KSKVXNv8onnIHxu4VTFOiZWCY0sOx5np0hIjERWY8bd982Z/6rPYfYw80FfdUkH3G/HcxTAGaZTe3UnGFGl0Zk4nxmtHjKyDTFe5oM136MqHZ2T1YnYgNAwPBahTAhVCSrz/r8+hOtOLsZ0M0svPFkBMIE0gXjo+2fPn3/09NFut5AkzSyEprVaTClKYOpmzd7ezu2jw93dnZi23/yzbz578vTWndubTQ9AUyRKLYc+pt3lsu/78/X66Ojgsh/mTWODvTxf7S8WmuI/+YP/+V/71V8lwYP3f/jBgw/f/tQnnr7/BECCHt0++Kd/8If/6d//+//jf/8/3D66G1pCgxDCbN6dHp8BUEHsBQ2GYYhJVCXF9Pj4dLPdgMN66AHEPjI3ITQU2MtV6Ttv3d/dXX73ez/abgYARPznf/YdDt3Z6mx39wA7ut6so8b5sr177zaADz/44PBg/87RrZMnp0nSvTv3np+dLBczERZzGQZCaACvLOQB1KjS4OQdXbhiybJKVDaXXUEckJnwtQontiEVyDUukAlGTjbKkhIsKS0FkSSUQtMqZNtvu02n0Pl8HvsIgJn6bVosgpZ0z5p3pIqfTkVKspwvYozbfnt5uZ41IcbY9/0wRACPnj4/2N8D8dnF5eHB4ac//TOgZme5t92Pl0nW2wjg8PDohx8+64d1N2tPT0+72Wy72V6uVko83+m2/RpA04akVhsYIbSa4mZ92c2IgL3d/TtHtwE8ePDDnZ2dzaZPMXVtwxTms9X52TpwO8QBAHHoupki7ezsANQ13Wbo24QUexC1TQug62Yfe+utGIdvfffbq/PV2fnZcrlom2YYFNDQtABYkEREhQNL0pE0ZHp/ZUV84YosUslFV64r/52eSK3lpRdeo1CM5CWzc33h017biMhokDHMyqfbFj9rEiD3xK5Y8jUKk507KxXLr1IXjdU3cTVAdXF7fC4V4pfnQCdjrNnbi4TmF7ersi5c0Jlwy0rEdkG69lj1KLOaA1f9rrt07cCqKkzGGqVjZB6X+Y4WRulPL+tcMYaRe2q5rp4vn+NRRK9GUFzkUa6ZDP06l3hFq2brpv21b2//+Tfsj+uYafmp/vWFX/5Y17/iCa+9/np74YtecdkrXveyi3/cLr15H149FTftJ2uvKGUTivZhKrxmXdhrgL0EsQFqhQZFSfOfqMTAWTxgJvlThMXedP5G+SJHv0h7GgCyit9jH16OGU0ZeiWTmhwz5j0kpYMvvXf61dd36fhrvwfg6EufL2GAAMgTxDkseOWmUa6qdZF6hpMqAYwx5NYeLyBPqkLXwwPKUlTZaFinsSOuwjtj1PEGAz9yyQjiwnGNVWfGPIIdJbqjqK05QyQhx9f5ZjL5llDJCKoKgVCq95bDKeqwZlGM+cr+I8qV3AtXHp/8kq1qBVXLfvQNP9U46+Hkp9G4VtWTp8rN+BxbTBdWbTVqIadcNFGQVNVAFtOImMmLvSbPFVi0pPyAUjexEpa0pMt5QVDjGFAopRCli3d2PEteP9jXGQRRpTCdpmptxiXyjlyZous9ufIN+XyNM0VXroeOdZPz1q2XmDkXLrBKwSICFUsmmPuQi/iqBwsLlAESAXvaVyYWyBtikW/95pdFJeXy2eKYitfEyEvFFDw/oPr5QtmrGkIIHNRxMCMZMgV/M7Usz7+6CPkvL09Ufsm0qCJBFgzIBuyNwjLUFSDHUew/KqLKpZSnlbExFEGiqIYJEJkjspPE4KWQVK1iaYGQ8hAUBY0dO+HmHP/IHDjEODDlrJEliNJmW0WbhjNDySHMI7ECyIAIokCGs4JzxtvqkCKfOwsJVC12hDxmEFFjqRsd2UCmPI5oGxipNaSmVXi0L5NFJJNaDTdLyzCO1pHGDH7mB5dyMRmyzASFSH1hfVon5MpV0EnxLD+Zoxrmc5Ajtkc1T8scAmWVAJtSeOrPrL9lo4lVOxXVkeYlSYGakfergpQoJ5L2mbHMI5oTkOT0qj60CWn28Fr7MCHJICZSyrVbcx0bMJFUq6wlgNbm2cB7vaImK5QoEKQ+Q65bpoyBqFPMvGy+gj77IwmbkMMi9ji38BfYxIkmKfJMM2ujDMo6Xy4++7nPffP//sZiNn/rY289e/4kpR6ACG2Hddd2Z2dn68sLCrS5ODt59ny5WATmg8NbABpOl9vVzu4ibYfHT5/N591HT55s+u3t+/cIMMRnHWUQ2UiPyw3adrG/F7rnP/9Lv/j89MnHP3kPwHq76boudYs7t+8+Pvlod7mvqgw+Oz0/ONp78PABgCXvXvSXBwc7m/V2u90082Z1eZlEZrP5tu8pMIBhOyCDxYwAJojG5WImKTWBTzdrAMM23r177+zs7Nf+7V/753/6z589e7x3cHR8fLazvzg43AHwJ3/y4e7ObD5rBMPPfu5vHJ8dE+P45LgN3WK+aziX2VYAzTWdVC0ZQooJnCvzEhGB1dJUwLPmZfrghiXP80GwvDUEyjW7CpSRt2shC7aeBKuk5BtYAW2Ym0CLxc477757dnrchi4EjkQAkspyOZe895WVAocmhKRpiL1qAnD/7t15QzH1e7u7bdPOum7eNEMcSmqCIaXLTR91FUJ3sHf49HQ1m89uHd5ZzHZON+v1dgCw6eN66Ift9tmzZ8MQm9D1/QBgZ28ZNZ2dXACIKe3sLQNIlYZ+23AAkSSNMa4vLx9sNgBiTEwhRQF4sxlilKZpgY0KrNh4vxmYQ99v1uuViFpa3KZpFAJoHzcA9nj5vR98L4T29OR8u4nbPh2fPnEKD5EEwB6oogLSSlyqTlmWFytikaVU+Gl3TAlO7F7QtOTqmErhWj0X1iF/3ijrA5g+9IWh2dPXATnPtRAsYJimLyq2G+eYpgRUXbv6JxWhkEbOms1AJloVWnd1GlTHZ7/IakLlr2tjnXzjB29yW+6MC8JFf8gXX5fU/SLNUCFREV1Hk6NfWM3A9En1N96pUVsoUrrL6RPY0+ekHlQ+5oRxY5UO07VJm65PHpalyZpM14/ZrvAq19du2l9Eu4GZfhqaAX/1WrwCUnz7z7/xF7JqD//mL7z6LfWV/99fd9OsvZF3pOcgVFVYmQQ39VRa5Ki9ZCJbmo7sH5JScWSyF2QWq+kKuvMmtWsOvvDeS2lv5X6BbFK8Kn2M4grqf6tPWrhT7u3+Fz9/9rXXO2wCOP7q12996T/zZ4z6XNF+2UyUhUuZEkY+XQXBcP4SSgHowllFFcrUVuyQXIMmAFapmmjkeepKM3l6dtfHxvmAmseQ66swwZuZg4rmOgxwfdKqNIg6GOYqYhEyVKn4B+WdUDLBy0R+85wtV/LhjHDnxE57deWJCXLF6Or288mX4zqLJi6YiErW2fN+mbweVwZVLcLYEdLqeriYMr56svWKgl1Q84l13+rkJvOvUEBE7G/mUIogj0KzL57CwOyqUoVmyCGE4BCDjVfLyF4mLAfP8aeW2FNVJFk1D8cMxvGOMn49xqvKvW+JyvKPqwtU5iMPAL494ETGDfuFjvgu9n0cUzTEnEMgz+VlGJPfQDkJFUDEIsJF/sxnA49+62svmZir7f5vfglEBCYSZlalZHpYrkoFq2ti0xUCaz5GXB88880k9+OzLmkp5FgVDXHQhwAkV3SZC30AgBDYHI/Hqce4mZkbGAgrjnxyKM+halOMqF7ZS3lcbEMzl8zcQ0sBaQtERETJzn/pQQaw8pAyNRqRJFXVJldqymO1N4skFVGR7MtHZIizbRAmq1akmrOb5mfZFBkwamOhDPMaSMwYs2cad8tbsdAScl/FPCqDxsatTQBlH6j8pJJTLi9ILvddHe/a89JyDMuIUNrbWVWJtOxoRznhFp28XVRBScgrpHPg0UsQcJKkef85Q1O/xs0vPt/Wx6J4VZxd66MxJWQ2ZlOEFKBcu0zdSFB2kHsFj+e0TGk+huUn6DgPE4XY3a1zGRnvbeFuvs5iaYNFFeR4Pje1Tw+gpAIiZiNoZv5RcK4pnz3gyCGxYyIAACAASURBVPezLbfDtb5rSqHzsQMZyh11fSdPsD2fjNCUDZHJMiGloaZ8BrnnXRWHw4PdoQcR3711+1sda9CkaejN/xsEGoZBQhri9tbtw9tHh+He3f39/eOTZ3t7y1nLACjJgmfLsEjz5mJ92Q/9ZrM5Jty6ey80nBQALlbnF+fniHH3cBeCd996e3t2+dHDHw1M3DGAbj7rt1sm/rNvf/NnP/m5rputTi7adnZ6fPbWW3fv3rkH4PJ0Ped5jIMggLltm6ZrCFgQh5g0MICLk5WKhhCMXSUxjCsa723bYPs+peH58eoP//gPDo8Ouq45PzvtZu1XvvyV7333zwH8nX/33/u//uT/0Lj+hV/82cXs8Bvf/tbR0dG7n/jkcrlzdrz6/vceAGibORHlgJTsHkxQCCmXzVXkEGKCuXgrPG6jYjKUkgstRSp0mhM4EJk7vxaJBURkKFrGPZEkIRACDZrOzk8Xs0VMw85yrkgcGECSCIZEYetnioNGQEPgtm12d5YA5g3H7XqzHVR1s1kT0dlqtR36IUUjQEdHRw032xi7bvHn3/3BrOvuvfPOOa0/fP/BZey72RJAjGlvuXt+ftFvI4c2RVEViN67c+9i2AzrCGBvuS+adnd3jo+PoWlIabHbapQhps1227YdgM2mB7jr5inJanU5DENoaL7o+m3qZgFAHCTG7Xy+pyrMUJEQGhGNcWjbxlDj05NTQf/Zn/nM2+984sMPH2/6/uJyTYRgAKidMiEztFkKv2plNFcohsvCNCVhUIWyy0jF0GIZI4sIVQuVJjiilu/GXwpzrH70N766tLFe+ddvzn0komLBrPppgQvmFylqpVayGHD1cbUo5aKOk8Vi3fFvfDhjHE49tJeiqLXgOeEHV29QHaXL6YKMc14fljJk1N+jMMHq+UVyyDpDlcaxWqjyyaTVkXQXToPqpdn+l0SK9XaKImY5dAxTsCWrhCu6Lve/WXvxCtZDeN3llFXIl63aTbtpuf1FwXb1A8vff7UI3V/40K488yfzr7xpb9JeXspGEjTjEddbzdoLtX/RhZnNFMGc3OfOmL2qElsx2KKjAqpnv/t6LHLvvc/DVff8+Ik1sVY7gMyMaxzkxY+d+ucXBjbyl70vfp4IZ199PSj5/Kv/6PaXfx1WScXGzsRMMaXCuIq1U1WoaJgZuCAiKoWKo8dKF/acVVh1ZRvurOEdd+MbFb6aZawijGieKaq8IzOUkN2w1J9AkqTI4D6rqskSvFNeTcqxiZ6UW4nCZLJ1NNcaQ2f3AbKl9ydhIiSaQbRa66vCI8Agm8GCOhdZMo9i7HgR08q7IeJeEuZ7OCqZlUTgf/q76/74jssa6fg3fDle2MrDdESe831MJKoiQhW8kZKUkdReAFrW1OcqP8dmEwomtY6pYIqDw9HnDI1qfkRK0eYEQEoZGzX1qkpF4DiFz9I493plDl4qW71saqDQyhsbqPABgspU7BvN18peOtmGEEJgYhgiaZuBkeE+OA1x/cbG/vi33qiCNoC7/+ALHJrcO4sPVSXyKSKi7CkGpjEThAJQgR1eWyvVKIk4OzaGEEJGdkwVcd15pA1EoKQGI0AkF/LJPr8lPcQkpKtScNQiB+2QcgiNwXYx9Q6NZSiYfeGVpNDPTDxERaVpmGWEn/IS5Jo67s/r80BWFQcO7ak/zL0AObCKOx7CYnIJamVtW0Iw7MKeXEbIHJiDqhIk6xojddXqdToyAsrWFrjepAoRZaYCOFolFKusDStIFpz6ZkqlImWTZhKs2QMla1han4JsVCErFheYQcQC9xW1GWcbMLmnZ0XLCjUjmxZi5hCgTJREBePpd35gDIyyOneFTyt8fzj/UN8yzuIrxEbHOSWPIfdPVAwcOaaUmch3IxCYU5TKY8y3kG9Qo3LZzVIrCkbj9OYZzKfH51Wr2clnQYjJmGGGEQnEJCm5BlzgYOcueQokAx8Za4frnvVrtJzYwj6ISRWSSr6LMr15SUfNuhAzdasRFbEImacDqgySIopsz87XIZ2fPT/cXTx48L70/f3791m1a1uJ+QVN24hYOXtRTQ9+8PD05OlyZ8asp5enAJZtS4HONxtG5BDatpnPOg5MgZhoExOA1eXl3u7eLCy7+ezk/Cz0w/OT5/PFbDnbXZ2cAWhmzcX5xfJw57vf+tatvQNi7pr2yZOTzXb48KMPQ2gASMJiuYhxfefuncPDo1t3DoZ+E/v44UePDg5vHZ+fAljuLM+PVzEOMaVZNzdRs9/2s3lHHJNuASyW84vzbdu0y/kOU7u/v/fNb3z7E59598GD73/2F38ewJ/8s//z29/71np9/su/9Mv/9I/+eLHYPz374JOfbM/OVu//4OHe4haAlDGYnKaAmZlZVEiCQnM19GRrzhSYiCVJ4WQldkejPYTJHc+JCAwVyR6xImVtMyciI7SpaRrD5EUTMyurSFISUk6SQEoMSZotGKr9sAUIygplaAhNhpMlikQAoo1IvHN0uJjPHj95/IP3H2w3m8Wi7YfexN02NJshilLXtk8eP1twOItx2S4++s73fuYzn6HQAIh9XK9Wzy4HO0kM6rqWiJ48fcqzdm9vF8Cdu/cefvAgpqSKrpsPfRy2cbmY7e/tt7NZ9i2d4+npk6aZtU23Xg8pRgoC4RhT2zbIBiGJsV8sl4vFoh8GCtwgmH+6HYLtdtjHckgbxXB4tMcBQ+wDhzQkYvLqZ5pEoAgtT1zO8xnN6zU2qujCqBJcyQ5hRqFJDpzJ2a0/TwU5F9ZdfsqhS9cepPUDXtFGzoKK8IoL5PknEaMWzKESU6939IWv0LEb1fhUX3zH63r8mt8VmER5V3+oymhBNxeXvEKvnSXV6qm2BGZNqpb+qt9oPTT7m4tZFE7nSzyCSzmjUdZtTGQXOZtXKGkOoWOvVepZPSoh+JWDqWarfDMxKkInu+7KFF1ZtWvb+KbdtL8+7S8NDXy1g2S55i+nM/8/aa/wjjRN0pVXKIGYc1qxwCG5n6MWqquO5ozIhhNycWSCgmj2+jENwQVud2mRN8sX+YX34KmjMvPlytxl2IuJglmxUWJisCTXqMcIO1MqWN2/ZtTnip6cEhGRgrkx0OHWl957/gaB289+5/cB3P6Nz5tapUgJIGrMR8f8X5DVFFaD8ozxsYqQ5doLwWKOIjMDmlQD2Ix9gUNKUtxRxURb91qSlAAlCuy5xVRcB0tKrMgqmxKFkvZRPJwUBAoM04pEAnJspq2qmpeOaXuUqzwbDgPA885RGqLPtv808nix2CKX71VJFMLcQNwHSERZyTPpmOwhIu6lkxdfSaPBdjkC0NSMvIrmdHC9NaEpXixFFrEORUh2IqokNVPGS0IpR4XyJ3G9kxzCMc8ggRXBptGzKN9ihwBa4SJZ81FVtdqarvggI6SB2dwfmAPM5k/BUBEiYjQ5AZofKHK3OQei8nDYpWYlJSFzBXHsgEFgZUNhDAbKXk4WicyoIbw8fSNKMpGITGHJM5Nrx9su5wLoaHZIyaCMufVZFXGDQcenVVHEXHth5zAV3w9qR4cCB1Ep3gQIZImwCFAPPB+Gnjkk9ATiwMT86L/66gs3zPV29ze/oKRRBlMVuGFVMVJTaEhKSTXbBhTFZ5Eck/HQLCZGUPO3yii1IR7mMqPFS50IBFZlIg4hFPOCn04WUVWJkpi5bVtVFUv/pSSqgcw1MllKzUCk0GHoC3BiJaoDc5RofzOTkhCHImNrgrKEBgGsgJIOceuqmZWUTTEOzMGCwjMIqAZnEwAKUhAe1ezrrQqwEGfw3egTKYhD4EaQmDkJNCUlzZlMhSBMBBsjkaEMtS5qComSpRggsm1MyuwHnDIKJgRVicoNGExQiBI8opOhgUhSSiEE1axTKamIeIftaoMT1WBlY5qjtqF2AEvexZrdwJamHFXXgTnj0UX9KBQW5RBRaBqoSslel5/MTBBJ2R8UjsHkN5AUYDAPQJ1a5CUg4uwJzcgu29yoKlEwz0NRBSmFICqBCAkUmEBRxHK2AhhSosCmZmWmAUDRcHATAyGByAqKW/4EM4iWeu1FDyMiNMRJonGTbCxhBiApMRmRpGxPNS3eV81OvXKWHVQ0idn6wMRWmDhTsWyJEaPEdnpFKqJsbtqqlg565I5IhQhSASsyyOkpDpjyOqjxCzA1fd8TtGtaZkoOyoeWP/Mrn9kBbTd33n73raEFd7Q6W7VD3Dvcv3t/AWDWtM+eP39+fNx2XTffffTsZHN+PF/MRVNKstt0tkyh4W6xODl5tDffmXWLhsNyf/9wvug3/cnz5wA4RVrOu+WsbfXy7Pg73/gXAbOPv/3Oer1KJADSxeb5+XncbrvQLZaLSxqIaN51wsuTx6uf/fm/AWAYhjQMKSpans1bxDist23TEGTYbvZnuwDOe21Dw9zOu6adzdanp/uHO/Odtgvz/cNDo1FPHz2lBkFIozTd/P79nXax/OY/+6P/4k//+L/82n8L4L/7R7/9yc9+7ld+8efu3n93zv/7/q29D7/9/uwzzclFPH2y2v30HgBGGGJkagASEVFpmy5uh6Yl1Ww41GIbyalqeaQdDkpkSAgMQpJEQGgD8h0MIDRBU0oxNm1HoBh7IiZiEzmNmTITSKMmIiJqoErEt4/unp6ezmczy7QoUZp5xwExJuaGOPRxgMYmyO588ezpOYB7dw/Xw3B0+zYRX16sVGIIHLhZLpYxCoCYELghYLsdAs3agz29jBc43927JV0rfQJwcbkeojIhibShA5GEoCrbzZBWl+erAcDJ8Wq+bE8enx3cWoiopNT3EoHDW7dOz05n8x0ALRMznTx+qo22XZtUZEtt14hurPT2rJ2lrYYlXXx0nIbYNW0cUs+xbQIGpc5gRNls+pOLdVLSlLBNAeGyXwOBweqKCLGCQhJhDlM7T4kPIJFIyGsoZmAgKqmGshCWBbRsVZHouF4W0PzAjv7z/qNTZSKytANZZ8ginJs/atDP6dZEnEMmPshUF4LkAhTqmxkwSd1CGfL3FsB+XbDNUnbO6gvNG6/QIqJRpq38N/yFRRrOCaCqKybAmeqYPITKj3TlwuI4iGyEmyCGFcHM91kOKM4ahKqKihhDyJBxmW8fgeWrLyYlKovgWob301h8pZtOYTslVSSIUWvTfEdc0ph2EVh9k9SMu+oTO/PO0TilN351nmRRybaoetFJVZVRbQR1ISb3FABCTrOjDFJSJpYXYtM37aa9qP0EXoQ1Qvdj3f6TXfljoYFvCFO+LLnka3to471xjfyX2l4KR9ZoDxUukrUI16Cu8jJvRonJU2gUqMXlgBE3NIAMas4FeMPaNV98z9TLbIhyvouRfJeQTBTPCx9YxafKf1UBcURFtRIqijMIskwwsvA3RCQBPPvtr9/68q/bE0gVJAaQjr3JyCmRmVdz0Re7OIsIzFxSjagCxqZFiAMVsIzzA5lUcxYh8iHlxXNI0VbDk6WrSpFbjOkaGCIWfJQzsrvcDoXmq01Ny84EBCrxFKLiuaGy4HPVg7bhNnviUcpDV5PRa4HGV/JKRYjrzRZ9sl6vMRSOvlx5Ohwc8Z9fuMNrwQsVaOC582i8zB4v5vVQHMHyk22hC7jm4zXRvIQE2uhHTHQiINqdplVZNRDDuqotb1uAAJGS9g+sEBq7bkFh5SSBJnMJZNc2QxClljldYx/3xcSHyIfp0pvjHIayuTsDofg5EUaaYSHLU9FznKZr0mSdwEd1nDCUCSQiSdEB3zxUMsc6Yttfj37rjYHIr3whC/cY9Qy4xJ9SKsWsPBQURdq3DheXrzKYEAIIgKiIEYJCxiZO2+bq4/b5sl8FHp2rMCzG3L3GfQQyXzuDeq3PHuYsGXTLHlp5PUZCUK02EZTr/LN+Jq6dlzJgzY5mhWoDuB7UlPdI8X+QTP6IRIgoaRoLRRSNq2kIkJQdVdTQ3YpUl6eVJbK9Lo4h+X+Y/S4m6P/L3pt963Zd9YG/udZuvub0t7/SlWRJliV32MaEVBICZaBSBBhFBqmQqjxUXmrEtiw3AZLUH1APVFFgzLDhIakx6o2MkQKKpjoGUANMbwcj3KizrObq9vf0X7f3XnPWw1pzrbW/c+7VlbtgOEtD557zfbtZ7Wx+szMUqrLE3F8CwDtuBoBRRLzhwRrLGjRrxCgCy+LhqzxqPmdFpOqoHtpEeXqT4ucqenmG2U7InhIR9ZSNt2iUN0AhOwgRgnExXRZzsAaLQgT/Uj/0eFG6JSOPFMl/4O4+v5uwc0Qh26eyrjRafxB7rDlxK1DOqvPOKEHxaQqiT6cAFEpuqN0jp9ERdcrnllI0nvQLF2jX1IgT7BlQmFwvittbdeEo4+QLTaSmLE2motEDvfll7sgALJ1rLVlrqHMCYD6fbZXV2979jj/8k9977oUv2tHK5pmLL/7ZbyweulSZqoEBMG07NgW7bn1l3cDV0nXDQStdWRbD4YC6FkBRmbooOpFhXQ3qQVkWZVmWhT2cTWeT+eHMly4pR45Y3O7+3plz577jXd+BweDZz3+JSAYrKwBWx2sPvPWR3Zs311bXp7Opcx0xdW1nK+PmfPXyNQBnLmy99tIVS0QwXdc1DQprnXPD4ZCdqwcjAIu26To3nU6LjjqWqipXxuOtjc22mU1f2m06BmDKqt1fVKP6kUcf/uKXvrB5//kH3/H4+sbgs//f737yJ38SAHG3c/324G9vXn75+fWV0fRgdzCu1je3UC+2Tm8SEwCx5FxnbEkEWxgL64KPYZmvAMUF9NJQ2B/BUTrbhOEWhqBzUJglbhtjCy0FZiJdjbuOA20XzxSKomRmIlMWBYCirABQAZUPxT8EgLHWFrbtukXThFcRQTBfzOvS3nf2HICFa+Hc6a3TAFrYznVkrMCBaDadWmNsYeaz2WQ26ZwDsLe3N51M2fFoPB6vrExn08IWhwf781k7Gg5c1wDY2Dp35uzm3s5O0yzatjGFWGuKwlZlwcKz6QQAcwsWCIyxliwK04prFnNAqtKb0tt5M51dm/yHX/klI3W9VpPhtmtRSllUPoKhKMrhYDSsBxM7mTneO9gf1KNDnlhjnXiDbqzxlYyykVzkv+h5PA51ikQocuTwiVAm78djjCh85Yc5vuUY+VKATApe/i5ljVr+iiinqKmv6NHIIBYq7UidkaV/v9qWNnLYYIHZZM9XoSVBjXd/mCT2JkK9y5dHnM19tNXHa3MWesdhRna6LAcfuTBPPhr1sWQhhP/oDlrHklapHfQ9S9GAQjHxZehFtie9f6tnYZzm0jtFBAk853tpqUWZadRZ/QW6oU/aSbuH9vWKa/4GRWq/IfjvG4EV3v2ZJ66RX/d2Z+/IAGOoEMC+zK56qivKFDRHAJogMJiTAwHOVCwA0eswmQC9cilGLICDn399LHL9wx+KcIVS9ciDJCEsSSSgSLtD2qDYK1Vj/J1ZhRBlwjHLuddxfBHoDCbaeOqDuz//qdefZmD7458CsPnR94MyfxydHfHKpoiaJEW1MYq9Centwr0+hzqphqPFMUjHFHQy9elgHRL17KPQSRSISHDosGRiIki/kP7O4EgZr0/+d1Fv9FCAH1LAMQyFONb0MkSeaYLPnXoPEoxX8DNVUh+ucAaOE4COig3+fccLpOkiSd9KKpd5nAiiT+rZvbN1BLyIjzjH+lRdRlpClAJyErGETIf18HwAkSlqSkFHDhZ2Fb9FhNSFw1Dc5GlYHhEhypwJvetACKtXF90jAnEueUdBnSimzelNU4ZJZY8IwlmUpHSoiPOisrkAASgJ05o8AQJ4olsWXklMQh+WfuktZhqAaMsGqMMkAnDjDWCRWkFb0U2DVM8kvgtpM4Q8AvB+oKKDgHYAHpb1x1cYTIwwzHBWLGU7KOwrcAbakHZHbQ5EzKz5MW1YGyIRMPuCpwYB3hMiskUBQHzAqQr0/hSHvkS/AFJPTFWbw/uzlYaGBiu508XLhPW4XEQIibxCaqYcJ4DSC4GmKSTysfoWntSLIJ2LkEMpbVlO7gx5F7X0FmWLYFUDdX04yy8ygyOuxXE7+TclKklRWwh3B7A7AvaBU0nPfHNE8aJj8jR63mp0FEkx1BMi6muRmJ4/q9nDw6mHJy4+6CF+IhILTNtk2YmVWRCSr8YH6i9GV0VE1KOSxBTGmJC7kzjhntmSSiREvltxpZEiKNMuoqhmIlZFYw3UU5aUFlsS/1DCleYTWcHtxGR6UIIAIQ1r6kDkiL4edAQiJbxOySD0gVr2xD8vbEbtky6JQBgkVWEFVpjJwBaFwAHoOvnV3/idS48/9oU//8xjb33Ltz32yKGYclxVo8GNy9dOr64DQMe7O9fOnz79wPkzU3FlCRh7a/smWCyhEQdgMZ1TXbPw3t7+2soGiwwG9aJpyFhTFMPBAEBh6MbOfrNYjNbqLz3zxR/7Rz/66Fvf+enf/+2t1TOu6wCcPXf2vX/nu1564bk/+ZM/HQ9HJZm5m5rCiJO6GuzvTQBcfOBsWVazw0MWrqqawN51bXdvb2tjq2kbAIvFHJCisKNR7atNN02zs7Mz29+trPVw5Gg0fulLr2yd23r680/v3rw1XRyeu3GtGg9+4B/9V76Az43JwSvPPPfHv/e7q6fPXLnyyv0PPjpf7Dfz6WAwGoyHPq3mdDYry4HPJmyNJUPWFEqic+4Rdh6FTM0h0CdBCd6AJGqEIOJgQzY+lKPrnIgYsuIciIqicN4xUiSKLixw7Hw9rc65Qrhr2Z/PrusIFkBVlu2iLauajDjuRDAoi9bN25atKdVCRUQ0bxaFIRa3s793cDgZr62YomA1Mg2HIwccTvcHw8FobdQsFiLSuGY0HM1mUwD3XbjvyguXV9ZWjTVEtLK6srd7WA8GzcIZY1fXVwDsbG8fHGwbmHpQC8NYGtTDalCOx6P1Zs3D5cN6cHBzz9ii65w1VoCisEUB68cDMNzqSn3+wtn5Ynawv2jdtKhqKkoS6rqucw2AuqiE2TlXlVVpi8VsMZk2xphQKEwLo0sgvSaetECFKJ08ih9GkSFf4kz2DrKsP7h9FwxEorEkcOZ/ZPJjL1dU/v3SJynOR58WL4qZXJZa5PqkDp3ojT5/zlHZ+A01gaQJRBa3k4Nh4Y3HQHJ3lLTTIdJL4poc0+UgV+cC3h2m5hgMLhc3sxclrp++i6JvfFBgE0qeY2ejQktLvU1/CnHOz5NuoUseL88mOJNYjnYx3rK01Nk7I/+K3hrLHTxpJ+1bs70uvLgEp36j8cG/Ovkx/7q2O3tH5khESOtOQChKCBGYZZoZWo8xBAl+SU4AMqhF8cF7wSLXnnoy54v6ZmVbSUcKrofZG1UbSP4d6lekLhaJ/1F+VwYoYelxEODUxz4kgu2P35Ob5M7Hf3HrYx8QF/iTxz/DFBnWl4VgUgqxXcF0Zk3kW0JEqjMygJDNXmfY65OGjGhW7KgMeyySg9Uu3CKiGCl5hd8FEEWjLL32zd6LLuPm8SdRKJKYoTAeEkm7RPIF0hBvCm4iNokeJMIWvStTTG+veG8mgQlCtUtoJ6Aie+rmMa3/hU6E3OkGWvqr97fPiNfXRhHgbV+YFRp1mPXTV/tMkrCXV/qquepJCg+Yftejog44HwWWIoV9MxBGBsmoE1h4WUDcBVqu3B/OTHAUEUqmh2PmIjuERlHViCwo9hEeKxLzZB6ZZwnVef3tosaBvik+IgwSH5JLrr1ORfLgz/gR1wY/yTd++uPLPblzO/3jH1ISQf7oEcBGlKwRcVBj9DU9l4tA+nKzBvweh6+TQAlfCrgXgMIUOW6rj/aCtD6DjM9RgPhKirCuIOQTIEOALbzKLcFfISHo3qPIZ4bNtHHx0a0RdSMF++Jxyc0ckTyGPyTG9WeieBoI6bso3ty/Kqx/AcvsPEdhZrDzHY6x0sc3iv8h6HYCAawpYifCDCn8FZVR/SacChEurCWJeUICpkEUSvSqR6Tzz/LcLdLn8BLKVi1qQ/Ed+aUImz2uYL6Zjg7Tly9f0phF+04gOO9vCklLZNIJTWQzY9thuiTbCem7nsFH9Dz62SAyYEAcd4BHj22YW1lyJPE/fWSk+miGmVFSJHH4gUgIxBgSB4L4bJsh66Mh7XWcUL+Qku1V7X08jUSUbc7MGpjTZO1L3BU68Lhs/nLWVVCuJ2pCEYakvLvZQglQkIHPBcBckiGBr/JhIA+d3RqPh/c/8FDT0cvXb5+9cG5vPuleuTwcrTgLAEVlHzl96aH777+1s/fl514Yraxtbm0ebO/UxizOTjzi48ALdhCWrivrqqqqpiyH1hhr19ZWW3YABsBXuna+OBxMq0ff+mYu0C0OLl16oDD2woULAB64dF5kWlTywAP33by5bQtbcmGG1Ai7Ttq2A3DltevDlcHh3v5oMBxWddvMrDFO5PyZM2trm8wWwP7h/uKgdeLrXhMzr62sbG5sYDLfw1TQAVjM52957+PtvFsczE6fv3D5lZf+xX/zz37vc3/86le+vHHpIoBXP/fnl199+d3f/w/e/ugT1159+YXnnxHu/vQPf//spQfnrqlNBcAQWWtdKxA47kiosIXurOCtFSN1I9cTv2Xz8s0UhREY40H8EGPgLZHWGr9j2RfJ8jZghbyDyEfGhSSW5FwHLsqybJrW2wd9ugljjBC6riuqcrFYNE1T1+vSyaJtiqooihJA23XCOJxMzm5tDgdF07ZnT5+eda0Y9tRmZXUDAHdusWjm82a4Oupafvjhh27dvFHW5dkzZwBs7+4OR4OiLNuuGwxHZHBgpl3H9WDQdc7LMyEfAxEZsbYwxghJYW1RFqPRqG0ZwOrKalGUIDjH4sgUOsMtd10LwAIi3YP3Xdw/3Lt96wXXiStlbXWtadu268q6AsAtj0ejuqoG9Wj31vZkOimrkSms9/imUBZM58cnUEiMLzM+EbFzwTu6b31I1L7vvhgvWkKNlGPlXy9LjOneIB9R2lVLfBCeoupaqAAAIABJREFUNKogkjqjYsEdkaTwXk8RVRI6cvGd7n4jLY+mjnKVHJF1w99RUDu2PwgENp+1bLriJ8cL22o60qSSubDUH+vS84+glvkCHj9HSqW92Cb9DRFJ+ZLP65FHhZMeExJrXiEg4p49HRIQDdYmTRqLwKeSqqACQloOFdYpxQBKkrDuqLyctJN2pL1RB8mvLl77WGDx7vfeo6vj0cu+vkDhSYD2N7PdvbJ2Jp0HYrrMj+XoxbnwtUT9o9+cSMh2T6Fyw97PvX7hiLWnnlx6a279lJztee6h7Lvfzx5yERRUEvJlCoJKkvrtdQmjOVokcQAdswCCzQ8/uXNvFbe3f/YXNj78fooOpAo66OOV9QiAECDNmbITJaJQyzXToZY8P+AhSwsdT1gQ7wiJTPdS6FD9GjlyOmWD/rUiIeEfEMrjapoyMi7oopSmXTU67s141jhks9ag8uAgGl1YdDF9Rs3MhTB6m2TLny0fkaKvUVQ6fi16HaLsgSJ3ElzoiEizPLi4P4g8Nve6CV1UJV/Kxn3HDpuEagX9KTtrohLZUv+zTAS6Z+Lx8ch1xEcyoSi+18MEoR6LCBsqdAXCe1XBD+/NjMPHiKqSCeFInc93JfrrIxFOXRpWPMj51caoWCkqYmt53fg4vR83/5d7rVpz+sefFCBVxPb5jAAPAMfdaIiYKRMgOQqL3tMcUcjtEVBAAVOPB4XfJc3M0tx4AMpbhozxh0mLEQfCRVFFAsBwlqyvG+Z9ZiXAx0ZE2Lv3Cnm6EnRxhX6WTmWcvmjL8cpWTwFTiFOzTyz3PntOdPClIJj7vaN7koBYkzuk6cgb9clfD8eH0tX+HhOJ2RhDlzTpgYQwcIr8IfVSZzKQRb2XfLbM+GavdupRMNR7kcST6CHR1C2k/a2cJh0jyZ4eFJA4keFWYyyiq3x4bNgXIREhKQkIF2j+1jRanQxSHi7Z9HoLCkGdhhREjYRFhMUFlqqRzaLaXOAMifcF9S1io5pKN/eiD5s5mDZzX8Q0S0RalArhp6hjT9YE+YagLHtIX7nt6Y1E1HXOp+lEP+GIsUaiR/zyS0h92nt6tG6d8FWPdEIMis45FhhjBGiaBtwCEHEVjHPdmXObv/+b/+f3/dAPLdqV0yvjxpYX33Tp2979HgAjuOsvfvnsxXNfePYZmc9kZfT5Z7/4A+/77mefexYGg6IGQK5j1wm7tcHg1NZW5wTj8bAu6rqChFSmi8VifWWFgNt7N55YeRdz89Llly5duuicmy2mAP7os3/0rubt80VnIfuTg/H6RmnLuZss5m7ROCoMgJ3bB/c/MBTQbLHYOziYzw9XR6O6Kh964MGDyez2zj6A8Wj88v4rG2dPdV1bD4ZlWQJyZnNz5Oja4TYbBvDylZff/d7vfOD8m37lP/zKaH18+k0Xv/ef/OOnn/vczZcPXvmDPwLw4stf+c+/531/9zv+7o2dW+/99m/7/DPPH9w82DyztZgeNLNmbe0sgGJlddG0ZKgwBYsDGeec31riT66uTDwcIc+vCAIVVJpsKCVgIEM+37Yg5XolOHbGWAic6/LI/sSek5kULFyVddO0EBS2mLdzAI6dLazr2DkejkYgOZjs1VV55uy5wtKwrn3fjLFd17Hw/sH+weTw/vMX0cxmB+GlZWGbtjM+qkaoWXQrq6vXbt4oqRoM6gvnzgFYWV3lWfvSK1dEpGkWnWv8ISpsYatiOjsAPKpqRisDW1oB28KCpK5rY0xd1Y4bAM650Wh8fX6zZhEhY8q269g513FVVABI3KuXL58/c/bBS/exs23HTQMWuX7zRl2XIi2Asi7LslhbW79x48bu7o7xGc8je4nU26eA11AEOiqo+YKK/TjWKAuEhehjNoHMESWxJHEr6ZHFZfGMohzVP9oZl4zUTGUd3SvZM2T5uce8BtkAMmvSV9PuiHv2Rhgk8iPc9XjZWI4w5a+uZ9Qb5h1l99TJ2FVAjsnodGdJPvsqAofJwpvJC4CfcUL+yXJLZF6SNZqihTPuhcyYd2TGwk7MzIW0/B28L0keWUMa7J1+nLST9s1vXzsUeC/5Iu9+zTeisvbRPnxDn/83tt0RjpScEIoCQB58y7XOqEos0eig5oamliFSzSqoZEaMEO4Fi1z98JNHP5SsK0EhkR4rS0qoeIaQ46ka9BfCaXz1C82TF56PoAuFzHJRGQpO+QIjLtiOtz781PYn7gnX2P3ELwI49bEnIQqSBAEoMeMwFpKoeGqCZCFopZRQDSSVHYwT7tUnDhCiSk2ACAsFN0b0VG6Q8j5fck7SMhlDhozp2KVyQd4/MxVCdR7UNKHGDoJIJh6JSO4tQCbohej/UMk01ILx4U2Z7ijxpr6BNG9e37bGkDHG52wSdiFtHizd2XNqSVzp2Ubv9DrKvpWIWAQ0h0AIJYoowitRaGVJT4lugCkKNodU7tRFxSNi/lb1aRLR0pyZ2qw7WzRc1y9UdkSiyC+aPyl1JMx9EtainK7CYt8PAJmHXERKdJwUamGHaTN6Re4N5UFhxRMS7KAivgR5LPYhy6OUIwnhlyAC6n4SYY6qJhHd/Ol7BSIBnPmJp0QnUJUKikkTErZABIIxxFreKc6WIvkhd2EM74quBlEB836I5AuC65wEl+KkjngiwZGoxn0mETYyISg/nSV/tzFeTTKhJrURET8z7JwnrAJJGKuAjKQUiIEOQTihSXpus9WXCP/6mQl/cCagIywksrkIUZAI5cLTFuu6zpds0jouBp48ofBlu3rbWrtB/eMfAXzmrKtB36DoYpBt42jcgAAuvQhExjtl+5pLcTixQq5WPknDRGRVy7QlaZkeWpNsXVM/w5mPIwkzGcF9Pe/6UAquWOr+GpBSNRcSwbB6LGqaPIGARTxUms1p9OnxNM4rV1H99nPmy16pXSnTmiN26eNds8EnxxEEm5bvrwCgWFQt4wYeWhcBq7LnrZvhCu9mlNGOkEGkT0UzY4v0P1TJQQR5fg01yUnPWEXxR/Z3Rowih9A6DGGwOdFThKRpFmRMURTGEnMnzllLAEpbzfjg9//vX3722ae3Xffu7/jOc5ub/+//9r8eNg1vX+9uXQUw3thcH47P2qqu62vb29e2r6ycv/+f/nf//N/8D/9qtSyrogAwn09JbMc82dkdD4ez2aIYDch1hbVtxwURgImwm8/LstzcOrt948ZDF8+Pt06/4y1v+ezTT5fGAnjo/ksWpq6GV68+t7WxRVU9Pdh3nevajp2zVQlgWI2Hw+HW1pbrnOscO26aZjio264ti2K2mIUpIirLsijLwWg02d5h53b3967cujy05ZwaAO9+z3tqDC5fvfbP3/8vnvnS0z/2Y//kqX/9sdPOPf7Od3zhL/4CwL/9yMd/7bd/99d+9Zff855v+8vnvvTEW54493fOvXT9Jol5/vmv3L5+DUC9uVkU9aJpxaIoCmZmCa5/suSR5jkUUUhlLhwIoR52BWikY2eEfSUwBL9IMIcaSgYQAmvqBy8iOXHIiJ4A1lrXubZturbtOmc0H2LbdcZQXdeta20xOHv+HHNXl2UBurV7a3NrBQCzdM4R3HwxH1T08KVLD953/5y7P/vsZ86cqgDUZXXj5k0nIBHXdiLyvr/3PWLkS1/8/Hh15fPPfRHA2x9762uXX+0cVjdWy7Ks6+rWrdcgmB7ONzY2/RQZW3EH5m5yyNYWRSmz6aIwxfWbNyfTWdM4AMPBYDKZAiirqhpUAjhmMrYsgwejhaGu+LPP/cc3P/LQ6a0tJ/K5p7+wsbG1trays7NbFgbA7e3t1fXhbLrhXAeIcx3PZ4PhwCOzXk4mayDi2IV8tOHIBj+0lKCX4gmLZ/d4uTE77ZE0ChAs4xoWvXxx/tic7WaOeT3OrjJOxpyifuWJcg+RzE2lOZ1ZyqXY78kdxvW1tEChxKcjiVyI5E7W+jeCSCpFl/hn5BU9YFAn7XhZnHrTu8z9X68vuaDvBWCmUCA7saTUvR5qfHR6KSpTKSTdL2HvrvgUUqagOzeJBzF3ZDaETHEABBGRJBXow1Un3pEn7XXbkpPj1/Kob77z4DfZY/HEQfKb1u7sHeld4WDgE5Ip8KfqR1AcovSes6HgxSYhW2Mm7wvIeIXdqAi+/4nXD3Ne+/CTOZWV8PLAQqDBWaT9CFJCTwmU2DN9SFDTog9ICrDqqSfJyaWPhgVOQ6EGGgBsfeTDgGzfA7oK4PbPfvLUR56MarsP9fMoVpxar+H7pxeFxxyd1/Q4/C/WFNGLKoC8WqTCh60xR/ACDIYwGYolFCTxPdHFTX9rLjICGWMjIg3WXO+GDJH3FkjCQZg578aobjxRAYsXWZ+bKdoRDUgM+cIm4Cgo6vV5BhYV83LlTzMf6t5Q4OauLXYmAVhynFwVYYKwXOhpqn7arUXaIKT9yU30PQE5dNwP2Et9GYiIJENTfgf6QzLB+zSCET4Cl/J7yaPXYTYozXl8IHMQjj1cLwEH0VB70i+85GwULl7y1jtmtpcwF9JYGLUSx/nLIC0dCXAHqTc/x4S08XJlJHtj/CnRqAII5OZP31OCBd/O/sRTXn5EsoBAVzTJxD6bncmInkCJjHfxIlAMnM+IlHY/Gz6RsDBxmENrIvnJ/cciPOUPHCgfbLCzxOrG/kPHzlJh4NNviYiv/S1GghNjzxalK6tGAv8u0u5K76zELaBMIUOswhGWrMp03iIy48EoTUaBsM9CqXoXRgEJlVV9vWBjoOssvfzzngxGkC38o8/lqOaQHvrgEsouUC09EvAd8UxHiZintN4fcRlzRHxPUkqzhdaJ5HwLHNN6bikAaQFZ9ruprzCFhaNoCaTck1QV50zZDf6vrN3yZMf0exzPlVegyJAi/IoqKhMTgTjXATFLo4kcLGpzqoaK9gneUgK1HoEIJqyybopsRypQRDDMPoMINB1G2PlkNOeZZ9qhuFHml6KD7c9uj17p7+TBJkSVUUciIiR0zPJJ0DXDc6LiSJIHOqSlU/ZtrDHWis8YCFdVpggulmxrufrlF8azxTsfvX9cy/bNG9/1vd/z5Zde3N87mB3sAVjb2nSgJ9708GOPv/W3/q9fO1jMf+i//CEqi/MXLwA0mU4BWGta11w4e+6Pv/B027WFITZmZ+dAyqquh75HHXPbtePBagfeWFt75aWXHyirrdX12zevrq+uA/hb737fzv6e2PrgYH+tKK3hpmnqcjg8XTrmDgDQtm4+a5q2AY8IqKvaWtt23e2d7boe+a1RV5UtLDNPZ3MhapqFtcYaI+KqoiBv2uvM819+fvdgsnX61Pd+9/e/9trOb/36r95XrXz3D//Aux9/DACq4vu//7t/6Zf+/QvPPX/x3Nl3vfPt1tRSD3nRPHDfpU9/+j8COGia8cpa07Suc4aMgA0VPjAjUtScdYketJ48E4gnC0RT7sKGTJIhJw2JITKFNSIsLCDqhInZWAuEavZiQOTzerC3ZRweHDAzO2bHvqCNf33HrqzKpp0NUY/Ho8nk4Pr2be7cfD4DUNghgLIsO9etrGy84/EnTp2+79b2zb98+i/8Hi8tubZ1Au6cNbYaFL/16d/6Z//4n/4/v/3qlpw5vXUGQF0Od27tjTY326ZpimYwGjBzUZTDwVCYLZUAhMEsg+GoaynK/F3nJpPpZDod1Ct+/tquIyJmrkYlM2xZkshiPptNpwDqotw6deqVV246MQZ2c2Nzf3dfmGfzWV0PvvPb/xaAP/7TP5w30841g0HtWb9AjCURGIYLhEJU7mLlGDnrzwV3SeLOsS0Kahk5iuQ0GpkhEJPvgP4zogeBRJbeQ5HSBsuS0+T4Xk/ASaJiYDc9Lp9SBWR9OSIh5QXmjh/43Zu+MDIAvxLpgCg5U+7Tf78cHRWOXLT8sv5HSVtUiSjNQ3pgT+KLuHAgzjlVvhN3DWlBeioEDHwZUORCFVRwVcn1+P6rn3Va2TiXcZkkX18AoVBWT7OUtCczBrw8jNAzExiyjqnHKU/aSfv6t68Cyvxa3AmPvffoh980xPDENfIb1+4erP0Nb/s///qIwOpTH7x3m9tfhbb5kad27hGR/LlPAtg6zvHzpJ20k/YNajfemFPkh+RY0OGknbSTdtJO2kk7aSftpJ20k3bSvkXa193p7+uC0/mH3KVjd6onc+LD+Neg3aWyNpINRwQaGepd82CMuplkZsboGROMaRI8NoDM6Ob9dwzA94RFfujJOwIBAvi8aZnJSt1W1NklmvjU7EbJrhhuMKFgixC7YPXLzFrxXcEiqI/WIEQyFGtYsH8BEbY++uHtj3/idUfn2/YnPrn1kQ8QaVWTYC1TTy6KEZP6du92BGINlY49DRZ4Cu5F1lpmVjcQ/xCGgTHkUpC4hJkkhJTufv2C2xWFxD0CWxQizOpKE5wIDBkyIWeLMOvkeY8w9XIKA1JYOToXibCP90TwOiMGQR8YR22CY1W+NkeaMRaU1RAHiEIY+xGrdtaoZ3j0L5LXszBmHo+Iz5dQsCIYLcP2824TzEjxktlNEnx6JD4x+yUze6LXSy0zScsOoJIKekQjOuLZULfR8DzJpsVP7VFnRO9/QCIgYwyz+iD5gWUxJpSZz00aj4j0e0ig5JpJgbQsl6rxfoTqq0W95ZP0Ly19mvW+N1DSUwMQzBvCIs/9qw87ifnFoCn3etZ37wNI2bYhCs5rfsDGGPLuvsIISSxIvXfDpHhzPIeIZgrFmYkgwROZREt8gKLRHj4ZZbC4s6jDN0DeQ1oEzGw1qNBPCjMbnx8yFABhhIIPBQDHTn1NYo5BEQiZVPEBIhCTqlqFo91b6bwlF+wQxKuLHZ+gzhgECi75YfYkuLD782MsQL7DQHDrYydUJA+E4CCpCwH9IjlH6tt05sJpg3r7CUBl8HkhyhwM/Us4Hmz254aFiREyZrBjDvRUtOh2qEytaTMSCSDyfuXZDg3vAWJdqcSw/K0cvc29i7y+hSg4R6eXkJ7BOOe6nOS7pA4Wcb0RHgui4GijBD05eRAZ8ZGn/QWPnqQ+rNUTCvYP9ETLGHX1iJ3xe9qHBgRf0bCTyLOz5KEN9S5RIQQiYnTkoGyhs9bLoxtDpiOB1M8N6dInf6bwS6h3DxDBn+XoHYljm4/5NZEwZlMbvNc1EjTPh0vErmMwMzNhPKpHtZ1PDwDsbN+8cLp64h9839sffvRPPvfZz/7BH27dd9+NV165/+L5C+/5tj/59GcAvO2xh3f2pz/z8Z/5oR/5kfe977+4cPbM9etXf/BHfvB7v+vvn1rfvHk4AVAYkLHj0cgt3KJp0MnaqH5xb68cj0tb+WPVLBa2rmfzWbfg2wf7Mp+vrKxubJ55+OL9t/cOAKyM155+9rkF06Aedm03rAfCMtmfgWdN24431gCUddF2AgNbFF3XMTcQOxrWXdutra01nQBYzOej8aiua2odGSpKa43putY6M+lma/UYwLNPP1uMVs+dO/ebv/zr0/nhZ/786be+5W+vEV+/cu3ie94OYLKz88ibLr31kfs//9xrVVVcvXHtzQ+9ubTkDIrV8aVH3wzg5o3rZMrBYNi1zOysKSHUtZ2tCkEI8ufAHkJMf2Br/pinZfbOrWStDVzLF9fmUGLPn0Vrbdt2IlIWlbCI8ZVVKJBxWGPIr7IIk7WHh4dVVYuIc1zVFQABC5hdW1bWB4gfTPYXs5kQRivjuqoBOJF2MS/sYDqdTqbleHWViKy1BJrN5gA2NiHCo8GoWwhYyMhsNjt95hQIWxtb589cAPDqa1cODydrZ860bTeEEBljrQ9XPzg4vHDhPIC9g+35zG3QqB7S5GCGhupBQWSILDvxRG8+n5dFwczOuaabj0drhsrJwcGimTfNHEBBph4M18+swPBi0dT12fHK2BhLQovJfDqbAGiaxjY0Ho2f/sLnz587t5g1B9M5SFzXASakWGExhorSskvnmpQwRS9s5jt44B9tUY4Nf0WOJFFgio5y0a8tnXtzNPolUUulBpTu1k8lihDHJAyPwdoZ5VJKK5GZhGffPdn4G26SSSNB/EM4BGmK0ihS7+7wuCO9e13PEkn/J/XNOzIfS2+XZyj2O3rjKwVeuq8/u7qXCCkAH/GRUZXtRZj3+qyPiMQiBhRlYrVy2hAzJuxC6BKZKCR5MVGDeNKDc7GhJ1GH7SYavXTMJJ20v8btm5At8T9h++s9upOWt7t4RxIRuZjakHycb5DSnDCRzyqd6nCwqssmQBFeFepRRzJeI7gnLHLtwx8iY1grFAYCLUFEICKyhTFRFkFKHiNEsDE+kUV8qjGJocmxcQhFD+Gh+gCFHyQElbPTsAyvvatwSdShFYEx1lChGIow88aHP0BEOz/3qdcdJoDtn/sFAFsf/aAyxRAPx2BAqABCVUE/n+SDawNY5UK3ycBaAxGQMLfGmKZtiUC+3mNQ/CAAOwasoo4S4DAJ4w6Zt1hEyEeCszDEcdcRwZAAYHLCTgBiCIUwUkGGwFHArTof7228ZJ8gRsDn/RKyJgC5BMc+jpVN6HeU5YIUaKzpa+/hXSASx4YMKJRf9LXqAj6rOxOqs0b9O5d+/IIaQpic0AETMR7fJeccaZymv5F8QZGwZ6C6exImYuiQqjZBTiL4WiRZSoAUgRQD6qOwTbn2TmH/6hxBYIy/Jco/FOMAAbImxi8qZqu7TcORQtiIIV/qzwXcx+8QJqKCCBBLRoz4swOtnKEyO5kiBK0E4InjdBPgdMkIITUXQLAqVbPWIDba7yWdv18R0+84Is0rqZCLwMSlTrjmzf/5ng5jbGd/8kPGlFSSz5fEEnpHCBXeiagk27lOAAdfFNsQyFLRurmfVT91seCxIWIOZSOyDH9hBxobodjQb0Kouth1CwBE1idyhYgTx9xVRRnhOQOCiGPNrgjjeywsLB3CCTVVWbKDMUbEiYi1Bn4ZmQAQw8B4UEkAY6zfn65hQwpdGUOqmREgSPi4XqBnllWj8xkmkOV58nXMEHY/CViE2XE81IA1Nm5mFgEcxPjZI2Jfyh5khcVYYnaip0SzABsArhOQkCFjyHk2JSQiYCay4ZT5CF9iQGxhHIPIiLfSaPUhFrFWBIa1YItlGGMMkXPOn0Vmj76RMcZn5GRm50BaPNNrScLMLMwoShvWmoAsO4FADCxL3DgiLOKjqD19g8ctBGp+EhFrLSKtRFSNQWQceZDReuYlISOGiGt9/TEf9uVhSP90NW2YqIb5p/khGpNij0nlA0OGRAOt2bGnN5GrShaML2FKIQJq/QU+3UXYBX6AcKRaYpoob0Fjhk84HJO0ekJnTCBc/q5o2QBMJJ4GOR0ERJzm+Aw/MmsmsQulk60IO3b+IYW1zrmYmVREQubBQK91XwUexpHmCrwZhjU/pr+DGKaCGZpq0sym0xmk2rlxA8C5U6N3vfMdj166sLm+8qP/8B/++m/88uHuztrWyjve9u2rA7v4ezUAttXnP/en+zdu/u//x7//xL/7d9aOfuF//KkHN85ffuFVeRDlaAxgOtmHsClMwx07unjm3Hy2v3nqrLGlJelYAKysbrW7t8i4lfGW7cyZxx4zdblS1zRaGYoBcPX6dcNw0/nWxfPTg+m0c3VVHbiZiNSDoWEGMLt1OF4brYyHVVWdv3Bxd/vq1ubWfD7fub17+cqNqigBLBZuf2d3tDYYDMtGpHXdYjG7efVKx1Sa+tZkD0BjUA0HDfj0+Qt//Duf4UpsiT3bVI05vLkNYPjoI5ev39xbNG4+mXS4fPnVi+fuO7x95SuvXpVizbkKwHi0smjaQV03xIvZom2cIWMLy45NsNcCEGMtCNZYMoZbZ4x1rmNx1li/fRxzUVgRNF0DkaIo/SG2Rh9iiIW5c55AiuvgKw6W0iwWtqg8Pey4M5ZG40HbLqb7k7osWLhdNMNhHfaDwDmBKZzw9GC2sbZmi7IYlYbQNN1obR3AfHbYOVcVtmubZtqUJAbT0jCoqasSgOt4dXXFuaLjqTMNC42Hq3/57DMX7rtY2epwcgDgs5/57Prp84aNr2RXlAPMGJbmrunYLZo5gHo0ni8OpzNXDagalBBp5jMa2rIoVsarwi2A4aDcNcLGFoLCVhBhXnRubi1VwyGAxknXtDzr7huvTBY7w9V6OmsGhWMn//V/+6ODYQGgoXZlUNVj8953veel5188nO0PRuWp02fLspwcNDeu3ALADmSctSjKUthpgDSCCUUleD2CUfjR45ggJ68sCNRyoqpBskTHw+/YRWnWKzdR3IAoKUaQN0QTI/UkPcRHkhqZkljjXy0+5QgCFrgEeGVCEGVVuzJ5KOTcUCDNp5HSxNMcJUmQIWLnoKxcjVXhPYjmGu18tBNHUh8mPeB9iZ3n/U3Ibobf+fcFSZESBij5jCkWSvF+UfE4DJzSu0SCgGGitKdFjNKQluYyW400UlUIxNskiYMwFK5lJ2kpKZffo24AJ8HnYPm9nEnkca79LuRYFDE9VwCSPhyp8KWyfe+pYLxgacKUGQlFSE/a37iWY3Z38h882r5qj8JvtCviPT7/TqM+ad+i7S65IxMtVgFdPfGox2NSU1cQ6TkkHPPge4vRfhI+05zxXJJy9qYADDP7tL8RUopAEwcLp5cUWe/IaisklgcAWW1cZSW515K/ngwoplMEiIjVodADm941xztHEJmtj35o++P3mqJu++OfOvWxELgt7B0Eld9qWvSQWRpIjoKkxT39dIUSgcweB1Q+lwkY0QcIkksRlIQbRb+CH4dHQn32cEniR7QiJskigj/RmKoMOshYpPfG3ixtI6LgHppfglifTvJb4p3qRObdyCCA5j8LumKvunW6TQWrbGruwM2TPAVD0eMyw7+AvkDWvy/JHPpGTUrjJ2l5Fr6eMkXa1HF4uVG9N83ppt4RjnBq2GRRIDu6evDVAI55aNy62TwpkKhX+9fmsvZdZyJaKHozTynhanrb9f/pDTlFPgUAZB07TQjba8gWAAAgAElEQVTppUAg5EuNxz+4FlKonizM3Eqr+eagJ8QvtLAqH/2Jp95IQ8ZV0RcTAhSiFwlAZIRAhjOFKBIlT5DYMZExsFEy9gpP5zoDq3K+xCyN0cQS++XJvYiEhLQR3/bXcBiOOrAlun/HY5R9EfzMAGYmP4kgMoYDpJ5mJ5smExQg9Yfzg8+dhJMWontcd5PEUgUCYRFLxhBJKm0UcC2PPfkXSaYKiGiS1aAyRuf0+AuU4hlPPJmdc85SSSnDsDefGGMkZd6kvKMeqI2nxc+2tzPBkGFfxzdT54jiqmeKZWRvUUFSPS6uU1DvFLVXQDlw8uVFzNSxZKEJemJIo0iUXDqDLBBPaaaUBcbhtXNKrAJpi4cXevMUZedZAPE7LVpRokByJ/eZiJrGYUjvRj1kkumE0eQl2itKb1BmFI9DMCIQZUsSxyrxpUp4yXpv5cQDIMBgMOgWs6Z1nueK4KEH3wTg77/rid3Z1Z3ZdNyUk/2dSxcvws1em+2RNAeH3dqgAnD5tasw1X2XHnnlxo2n/vsfn+xPJwez02e32AhA82YBYN4050+dmh7ObEGXr127cfNWaWV9PB4Oh9v7e8NBDaBhms0WxhRVXb362uWzD18qbLmzd9Cx7B0cArBFWRTlZLbbdU5EHPNkMmmbpqzKrml9iEDXucnBxHGzsjE+nBwWtigLe3lne3d/f21jo/RmBJKiKPZ399e31urhyJVlVVVve8tjZWGvXLvx/EsvAxivjAtbHkymg2o4qPj0pVMbqxtn1gfs9l+9dgXA1WuX3/TQA+NhXQ2qclA+fOlS27Y7+wf7k6kYs7/XABiurHWtY0dg0zkGk1gBC7OTYG4G4E0RYJ/tkQyzE2FjrLHWuc6vqbXlfDE3ZDzlM9ZKJxGSzk5E2PZRfCzKqrAVAGON49ZYNG3Ttm01rs+dO317e6coF0LSNA0AYwsBqrJgx2VZGGPLwj774nNrq6tlVfod2DknIoumKQ1N5zNrjQgvFvO6qv3mdNzVVTmbw3FnrOmaZm11c7o/Pbd19uatnd3dAwAlFXuzg/22Mxa0Uq6gG67UAImzpjQHBwcAhitDZm6axtNra8iYwpD1HmvGFgCMMV3XkoFzXWEts3OOu9Z1LQf+CFNWRSGu66SsR9dv7D78yAPPPffMeLjy/JdeePt/9h4A+7vbe7PpxrmN97z3+37t139zfX08Itre2R6Px7u7Bx13AAaDgYgQLLvOi3hLgko8TX1+eoQuSO/i7PckF0nwW0wOzJRL3Xdunjv1PzsinxyRWJCk79cRemJBMSAfZ84qA+0KU9P7JtDPwHQSLezJg5R+j6wiVWQL0SwxA2Ym2H+dWupDTqf7WZDj59lS31F8P67d+Vo5WvkyvYSWb4wIcCbX5ld4h4bMFuVZTUxoDAWxM0Fd5Rvl1b182FB+L5T2JYLce5I58m9o+0+Lx90d97xT3+7R7fEuDpInKORfp3YX78ge383d92N45RL1J1VthHufIupFgNxb7ZrVD30wIBXREhZ5ZxAKAqmmiBL45wdnEpIsviANRtCzHuWUW0DWBK0ixk8ouTfGxCkQLUrh77KmUJDWq7qq4qqWdeqjTwl4++OffN1RA7j9s58EsPWxD6b5T65ukfNHxwtFh8OFBvAuGBQwFLJCyOM5JCqH/ep40REEQChcGJwajabrJxaXwhX8HgiWSopJrbPJDe4fFKEC/b+PgIa398WrqB9HSU35sWR7Llt1+HieTKxTe7nkUyh68dLUZk9cllFyqcgLEb6WrgR/1fxS6t+zJKRR77egr5AarjPJD2GRXle4isPJe5+tZHZZ/8r8AkryVfrHf6uh38m/KKjbsexv7znpV8nkxWOOsPbRR7T7rvUfpyc/6/PRufAUoidMJ3wiIWfXf+pe0yb4tvWxD1JV+d9d56D+xUrE0sp41CAcAhGBGEPMEHBhk9sd1G9C1BfWiqC/9RLKETDCZSe3whSOORS3NgHSIkRrRbCz+Ann/voix4f9K0xGzuBlZI0GNyHDu0APNrQWDPobK+MK2cfpfC6pbklpAACwY7/BcmArFlXSN0DSXJHuVmEWIi3VHShEpMlxgQK+SOTjbRGjMAGIsJBN9DMoaB47IgaTklfm6AUICXGZUJuOz7AhEHRdC1XVjAnO0v59xi6fyfBSQ5kbCXKOFYu5kHIWDtXLOOogWb/DPLBoSbFIVJT8RkQuebACBLD35EobTaBVyx3HWuHZ/QgMMhwwj+CqBYDiTtF9EXeASf1KriuGKNkBvRqWyJVfu0Qt4zX+2VZNTuJXX3o3Htf8LtdpUAIt+p1eFSicwtdxB/q4eInLlNZHZ1Nnxw8pcRtRIoDgAE6i9cXCq1lYpGkaCBORsbSxeWprfYPaGYAvvPjy5ioNqz17euWgmUyb6dOf+/1isLqzf/Ps+tn96zcBvHrlqgzGuweL9XMPMtF9911oZF4PB2srYwvXzVoAo/Go6RavffmVcTW4cO5sZYvbt28cTCYLkZcuXx5WAwDzzk1mzXzRnhmu3Nq+tbd3uDVa+8Kzz3YdO8cA2tZ1jglUleVhd1gNhsYWQcgg0yxaAEVZDUalY2OM2TvYWx9WTdvWZbmxtrq1dWoxnwPY2dsGUBaV65gXLYDt/T1jabg64Buyvr4OYGenEdiiqIwpyqqE2L3dye6tGxfOrFoqAGzv7DzxlresrqwL2HXd+urqwWQyW3RVNdw/7AaDMYCudewEWmcGNpzzgkovwPmlY3F+yZwTMp40GW+NDXE2EGb2zumGQr0oY4i1qHsA8iM8EaTk4Js2b2cAyqqwhR0OBk5aY+DYzdvF1tbWq5dfbVv2AHtVViCqB+X+7qRzi8Vi4To7GAyKohbuFosGwHy2EJb5YmFru38wIQBGpvPpfWdPkx0AmDTz2dxfxUVZDsrKwLzw5RevXL08m8ze9tZ3+g1tiDvXErCzvQ1D3iY1O5wYlItuCmC0OlpbXy2qiqVh5wAiWOdkNm+IqDAWQFkW3vfcFtZxx05IDMFaWxoTzHWFNdy0zCiHo+FgfTzqnnjb265eufzrv/YrDz38JgCvvvjydHv7hx//YWvNxpnNQV1MdqbD4WA+n7VdE7zIicUbeGx5nLAWmxpcSA92ol49OasnlOXEWPk7AptFOsbxmp5xJcosvdwyalZXPnOUBah8Cxyt13LHFhHJZUts4sl53zL6Gol3nIc0O9l8kJJBZe0iqeZPoOpxbuNQlwQBZX+9cUViKSEEG3lBttCrI33K4Drp1xIEoNJXevzS895oy4VQfYosf3LkFRHiTYIbEaBF+LIdpYzFe3smQFK3qiw9F+n54YslIV87d4JFnrQ31v7Kplxc6lhEJP/Kdvikfe3tjnBkLqMn/qaFcY+51svgntHlhpw+/b4nLPKpJ3vYBvU4H0nKnZFEelK1H4l2Z448ST06hoGHr4JKRV456A/AB6pr1G7m/CmqIIcOeid6Q2KYO5VAREBbH/3g9sfvNVZ0+2c/BeDUv/wgMkOoj8VQnCCzvwbJIp92db+i5UwiqdvIEUlC4nowPoiSDNTHJ+lfym2JNAaNEOHnPlyRuLIkBVTnS1cjrbMIKPsrOsiFsIUgG0mCurN9GBbapEVG0gWhb8kch0JfQliodjyfhaWo5/CcJAGGrGsicUKyK6U/0dqFpTiO3tEg5Ki6yhXHi1JLIuDxbak/S48kLB1MnbBQajecpSArpz6HeynA3r09CJULQ9HLozIh5RdH8TwKZ/mIlujM8eMkyi/MhPAof9ONN4JFXvg3H+u6zpF4IIbgK78beD/F3KdX92FwSmPuuCNAqPBuhNkeoLD1grwe/9eZzhq7BHAvTWEIawXSRulf4C0w/qW+B6RRhERWjRAsPg1oCCS36tgLo5qTH1Sw10uGKHlHQe8MriYKE5KmdZEKJRqkT9YpW1ZMAmFRSwghupjFedO7Jcn5/mlKiQF/VIUjj1CtLv3sb3W/74Lni/ScPjzXsB6cSOsVN3oYtIQMc1ELAAgoi9IvkyZw9OkeYtbOOGzEkSSkWPMJxq0sJsXXKYsR777qvbN7E6SX6JRnyS79cYXkELXSLwDBDzefmtA/TaXQb3EvgCJmHpYorJzH7CTwPepRGooZHNRTMxgwY7fS9vE3GUuRYOSLmP++nBy3b2lDtvH6xALBDOb7Ex23M2U4BirqGKRHB0L+R9Gkddn8UtTec7SC0uPyDkrsHuuBIna8u7e/d/sGgGuXv/L4o+e2xg13e0+84x1ffO5zV67c+L4f/O4zpy4cHk4uX70GoGsXFx940333P15W9ZXrl4dlefr+82RsO11ceenLPhB7dW20c/vajZu3DiaH+4eHq8Ph9v7+2c0tJ1JX1YGvvl0UzLRSbh4ezubNHIIbt7ani4ZEFk0L4NrNm03Tdc61Hdd17Vistbaw8+l8MBx4yLKwKMqCWA4PDp54/LFT48G8WzjnCGY2mzXNAoBzXVVXzbx1wqYTwzKbz1+9ft3cxMuvXtMIDnLOlVXVtW1R2sn+tGs7C/fK7PDSfSsAhB0z33fuwmunT93e37dFaZuOYEej9YPDQ+cEABkNLAGZIsI4MGSZ2WeqFUNwTvejBPGRDItjlphX1LmODJg7gJjZiY/LthJ81BOLRGSoQFEUxtimbQCI8HzRMNqqKhx3DOpcV9UhALysSgAMZ4hm80nXtfVwAGCxaE5tnd7fOxyNq67rAHRdV1pDIBZxIsyuNlXXdXVdTxcdgKZdGGOKgqqqrAd15/hwOl2tq+/89veSNZ97+gsAJofTRbu49OD9g/HwYD53LRtrnbiu7QpL8FXSWYaj0WA02DvYXTTzqrSFLReLZnd3e3V13RtamI2ItE0nQs2iNWTKoizLghSKda5bNO2oHqytb+zs7/z5n//Z2trq1umtRx55dHNl9Q9/73cAdF1z8aEHDufNH336d9qDw8KMG/Dp4ajrFoUNM8vSGWt9oj21Twc7nyJkEAXS7tYSbdWDrJR9KYd1PLB6U/51IIJLjyZCP81OuCNIYZL/CaS4E33ica0vDvpxx657hia9y6MQTCqxIhN841uOWhlzKSoNPSPiwakPShb7VFtvFIqd6D0+m6b+n0tDXRp3Rn37bfkVmR08n6033ChJM3d8TiYx+VuM3tEfDSXxT7LNRhTMVtCod8m3WBhsyEBJ2UcImzwwr5BL6KsZ5Un7Fm6vC8zdo+/hVwfwfRNgwTu94u6vPkk0+a3b7ghH5kwocqhMTvd/ZhhF5gCJnHtlT9v7+dePl4y1a4Ji6aNe+sS2r6dCokKSXWV6aEAylvIy2VYPK6HIEiTEf+sbopKmXCrWWREA7Fi9AAWQkPUOuW7p/zn1sScF4qHGe2m3f+ZTp/7lB/pOH32UNnBEra+hHU1RjEm2yoYDNdhmE3SEz5OkUgrQsWeuK7koh96u6IkOkmna+lVk2Evumd4bRVGA+GZankf/dVYEQPsUPIMIFKNHc0FOJSdK+h/yB4cLNXRR8g/1f6+rO1J1JVVtkdgjnW0dsc7Vseo0QXPdULojaLd3FKS+Gtkj78XxynqQgiSHgvUfXUGGUAr1OaZlanj2fIrbMX2ugrRfpZQSPrsCSRJbfkt8GYWQIcn2E5GI3Hgj0dkAzv3rD7OHP6hQFE/IGAK5oG2aUGcGcQrDXhYJJUWMT0Hoy61kuzs7AmQ0HWkkLnFC1KlPLe2UBsWO/YypmUVTT2bzwxzzMwAgn1pCWAghfl5CulgTAXGKiQziEihMpOBnL5lW2O0cQ5I1SSvifIR/l9B2Xcy47KHajp86QHwyVmOMaKWmMAikv5ByKUY00icBE8rPFFJPwwfh92AVEKAw5COLY5clPNenuU2VEqAJPkKWpkSHez7C6XcFLzJL0pKakf5W//KeVudPhPjCUQEaMYB3uBfWdJwSET8oFtm3qWRHWz8Jb0/UFRQ8fHVnhaXKHb/VPNH7xDuFZewJ3l88FK7J3qeKOExQy/Q/6hdi8BaOOKMkApiQ5THNYVzVAAYqzfBOxfGdSy1TT0P/STd8n70gn5twTDwCKaE+lRpd/RvDZktEgXDk9RQ1V9KCUTpnegyJQD4zsjMCcm4ymYClLIcALl56EMK3trev3th+uOlGg9HF81vnTp+ti3Lb8mQ+A1Ca8plnX/yzzz4/rMdlUZDItP2D2XRWEh6+78K0OQCAZvzaiy9cuX7l9GAwmc2m0+mgrldWxgtm56SwBYBF09aDQSuOWq7swDGaRUemcK3z5+DazVtVUXadmzfduB5Om6Yoiq0zp25cvWHIDgYDAMIynUyca89vnttY3bDUNNPF/ecvvPLqq2e2tjx33t2fsOPOsS1MWZaGuarLlqk09NJrV0kqAGU16IQMFR13hmzHXJQlGIeT/cMJAFy9KbPDPWMBQ7P5YvfgcDRYHw5G16/tdq0YG7BdvzVYXJhqv3KGktFahMkYT38MSwDjxcfUa/k+IR8lY3y1PRGREJNhemchxyRIsJi3dV1bSwCMsdx1TbMYj0dNtyAj586fe+XVl8qyhARa3DbOWmraZnV1syjN4eFkNBxVZTmbTW0hHvBlkbZraTB0risLO5lObGHG4/HqyorDDMC0a9ExMzvmyf505dRw2i7+/nu+3XWHK+OV6ssDAKvA6qmNhx988ML9l154+aWXXnrFVqXnXkVl2Vc267jr2rY1XesgxIxOWMBNswBJUVg/XBHDAnZYzNu6qtiwwEkyh9Ni0Z49e5aNXHnt6lpdzw/n19zl9fXHH3zwgbMXLgI4PNznpjlsFxY03zvc297d3Di9s7t9/8WLi6Y93J8BqMqamV3ryrLmRDN6gsXxckl+9sOPYy0C/cuo//URsWsZBYo8Q9PF5HKyit3LnYzXRFJ9nLi4PMy8E0sfEJByTwiOEr+lScq+7n+V/5oZ7YLgEI0rdxJH6ZgZO6arCNBaEiGOod6ULRgtdVeO85G5h41wp+5kvPg4g1zeQckFj7yPSzpXn02GIfmrVCnJXyVZak19n2RyjDeeBaaYeN6Jc+TfsPaGoLe7XHzsV6/78K/9gq+uY1+XJ399n3PSvi7tLt6REnXhoIBm3yaXxOXbIrGWJcH8nrDIp56Mak/UI6UfEaukGQDEAYQgGYaPku4sGkzlux+KG8RnC3IfBQLgQl2QOK6oCZMxnu5Hlhm97o3RnvbcJrEs4ai6svnRD+x8/Bdedyp8u/0zvwBg82PvT92JPVOGlcsvWlfbq3pG4I4yd0qak47OPzjyPQ42XAb7yomqLqsOrj1A4olxSyTVP8ccpf/yiDBSKuihTib6iihy5IpwQpxMeo9+YjwkmA0nW+CexTLquvlqReEmKsRI6n64RnyH4xRmBySXGrL4Z4lzLNEQHgeDCMlppFAmzXw1zetZx8hEmaR19J4jFx/TvPYendICCrAsjitY1PtweUxppdPG6eE6qV/HoZHHi9KUfr7R6OzTP/EkQEwEEmutyYwR8OU6FDRK6EWGmgAgQ1Y8IQA7JhBrFSzqe5R7/TdAXmmUOopQLd3vrHCgwjfsostbQF68PqxXBue6DJ8C2JARwxwqRIXKMOIRLQlVvPWkqOuxmODT5g+DjjTUtElVrQHvNarAq2TisPqTUb6wfkLz45RdiaRmHK+QxDsggIlwLsLQCZ5SQTGq7Gh7rC5gbSFs1uf2zfmaLzhkDBgwIakTURZ8Fze+d1BnzRzviXHbNXGJjbHqEiEiYkzSoDIlxdcMS3mqSOmRR2ZZ1SKfmVETcegMUjZz4ShqNWekbzLfR688KbSnLAORWmpZYYSdJf3ljKQsDSOa3Mh40FCYnX9OcN/whC+S9ag9ZX1PH2aPJx2nRJA+/tBLSHN9xglMNrrjWr6n0pboc6Y7tZATU0+KCgaSdSuyNiDZK8NY1Vco8hM/qL75gWg2mw+q0hTWUCHCrvPpBNA5cahOnX/k4rnztOje9vi7y27W7L02LVeoGAyHYwDTGQpLw0Fti7KDVGU1GqyPxiPDrTPdcHUE4PzZs1evXD493pzO9oyxGyure3s712/trK6vr62s7h5OAAxM0Tkqi7JjKaVu5+1gVBIZgq2qAYCm7VzLRVEVjEXTjEbjw73JYDAo6/LwYLoyHgIQiGMnADv3/Feea6b76+urjz748LWqartmWNUAVldWbuMmCdqmGRkyVIxHq21H62tr91+877UrtwE0TStkBLYuB9y0RVnP5jMDJ2yCD5zQ4eG07ToBjaqhMNhJ56RzJExUWgC+7JIL5pyw7w1Z13XM4nzqbV/N1pD4vHHUW2gbDil5QbKwJREJO2sLAhyzd1h23KZ9rQxfgLqujAl+d35fMIsI2AkzppMpd44dG5AXtwwA5jOnz7LD/sFOVQ53dnZWV0enz2wdHO57x/aisNJ11pC1RWHtYtEyy2LRzOZt07YAnOuEUVeDlZEUZbVYdPdfujCZH27fvv74Y+tVXQJoZ4v1jfW9/YPNtinLQsBdtzAwlS1n86YuKgDNom26/U6GIlIUJRm2xhoCjDPGDAYjAEBnjCmrCjBVUS0Wi7Zpqrq2hiazCQBr7GA4On3u9P7Bfjfvpq69fu3aRz72/teu3nrmuS9dP9wHUJP78peeOX3hwgMPvfnxd73jmWe+NNndK43s7G1XZVXXDGA2m4OMtXXHHZmCBD7iNz/wWiqlr3706EBOZRBOZbgXdPSyqEdo+qi7iElRXM0IcS4iIqPN4Y3pvZQ+jPY7fWi6tYdbqQApSw/tOSem2+PDE+3LgDCKSXiOkbv0GTknBlEWp5VUibsLacu/Bt2ih0giStP+Ay/YmCh6ZuMPr059y2cBx++BfAixE9T/6qg8HPiuSD4azaB5dIxBxmN2ywOPs0TazWQ71RdLfEBcr3xk+n82kv64TtpJO2kn7Vup3SV3JIBIcVXV9Tp2grKWrg66Xc4V/CX79+gXGd8Yn+EdX/xvFL/wgdMiia5D8aCAcOSgUuAVx8UPBEIf/DJiYLCau9MEpN+TH4lyFBMUV3glX2tCqDk0kyBEAKHNj7yfyNx77PbOz/4igK2PfSBZ5jSUCQCIo5OHXycDg6B6KwAmaWYRoIeM52tknf/WV9MWjohHUJ0oBKEqFw4roY8OsxMFrPCfkZjhT5SJ6roF104f20iiadEQABfoFuhLcLHfhEyMTKKa/0cd3KJi278zByrzb2KTEBGay1Z+MjWjIgRaubvft6CbiprUw2COijfL0s6yHPtVtCVJMBP07vo4BReSNtV7BowGgEYwm4/6jyalOy5GJmsd00+JSx0l9rzvdEyv879z6CJ+df2n3phT5OmfeFJ1BUNGHHcQE5KFEQsYRDYLw+kLw96DmA2MsTZ8zQKC1USmvgoWUcAoRERCGvjexgunkQgx6QD83olxX9GlUrT6DOXJE4mIgm5GYLCwoRhFGgVbBTfBCcrUZfdfsSgemXQl7R0Q6t0j1tMSFonhyPmI/FbgLAQcSTPzs+dpiOKu8JkwJcnoUBoRmQHUhz0Uvc7e1n9zpGuxX0qtRFiM0YkUiTk3vcbj3fu8U55/UjYoCkeeImVPE2NtsNyQ5jTMcstmyBcdpQN+zyR7RG7gCXvA1zaBADBU6FbReSLEoC5RZSWuh7pxQdEzyWBEpRD+mCbbUhhJfIqqS8mQFBXS1BlHGiiQDa1HyoJHJKVhxofH/sZlJPLATd5j3dGJ66cOZyFrx8yxvkKZZXix3y/HFt/KPuqB6LqUIkxqegpUTNHeJY6jxgwvC3B8ko4JAjEQgmFI4zrvnGqt9a7HthrsTw8efvOb/3/23vTnuuu6D/uttfc55w7P9I4kX4oSJduSLMlx4KJomsRA7Djplxboh/ZDkaII0CK2SMkWk8p/QAsEKNxakd3UzvegKFC06BCnSBsjrgHZbWLGsi3FEjXYlEm+8zPf4Qx7rX7Y47n3Pi9JxY3l9tmQ+D73DPvscQ2/Nez53mwY+r29G7R37/HjtyY3arLGe1B2l33n1FbN2vVk67brKkINp9LDNJNZA+Dtxw/atl27bmrrxWpNSut19+T05AWift2tVi0Aa+yq69XADc5IJRIPuycmMgDW67YyxpgKiuVqRbYy1i4uLw8PDy/PVz53pJLM5nXXtdZacTKbTERlsVxcLi5hjNkHAGMtM6s6NqZpmn4tdTX57a98jUg+dO+ly8s/AqAyaea263ti267aWk1dVcbWfb/2hxNObGO4Umlv3bjx5PGpKItINwjIqg5t1wJgssweTRafBpiImIyDY6Yoq8EQazgbkVTFMIMgThVqjfcBJDc4JYGAiESct/eIuGR7icgzpa3hxR6fHBQASJ1zUF2tluL04uLyLemm00m3bgk0nU4BTOtG4fre3XvhueXqvO87IqzWy+/78If35vNVu/J1s2ERUcZ63bpBiCsFOafe/baum4vLxbrrnZO9+f75sjt+8vTtvVm7Wv3u733l9o2bAGZc9264//Cd+c39ppnu7x2cnp+q6Hy+tzq/9HLOcrWGITuxKiSitlJb22EgUWW2PkNFN/TG+rwfKipVVQ/90He9t8wBqOq6Xbdd11Vk7927++IHnv8h/dSf+9Gf+Af/4//Qdt13vvWHACa1zifTF5+7x7WdHEw/9NEPL04uwfVytYSa9aoHYNjaqh6GzCWC0JKF2bC1EjVE2nh5AxcbM5IVkUiVkq0hVikFskkbpvKNz25SiExCaPQSbTzvyV02AGVpK/0uWr3bHzErKJoPFLuCDmYIDbnHKPlIZARxJRc2rfisH/hN2p36vYvPbZfwULR1K6TAhqls2Sg91Ihn5KZp2Z3xPzvGgDbu5hEcXUxiJu2YOxSrSxQcXCSz6WFjJaRV67k0c1FR5vhemEutCku0gI1BFNKd5FEikOxq4HW5LtfluvwpKM+CIwtFIoFLOe9F0sszBY/KDxUeH3hvWOTeZ14tfhViRap25P5GvUwAACAASURBVCPn+XU8YSaassdoSnR/SDpwdHXZrXhosrSFlHFFG9QYoxIySAKF5qMhdQdFvUoUJCqiKYQxc8+oAnm/mJs/8+rxF9/T+Ta+HH/hl2699kr8bMl0olIXTrDJbUzxz6UbSNnl0Tgjen8wpQcCPOLlBYgk5jjCFtQjISM/t7RgCkRzQ7tl4lKS8CKMaCkpBYYete5dABUQE1iKBwfFJzQKcpQP1xLEtVHqiSnyE+lGVsGLvpSNUZDhMI/eySGLNTtW1rgr4ztZad0ScZ7pqvP+ypY4fvWT5Q4DsIkT+FjbTXG4kAED7srFq1qIWXnNBWEMaVBGTlwR4AhvjMat/EGjwX/fQOSdz7+adQ4FERN0kF4GqqsJAGISJ8w2OmiF/qS5JiYVEVFwRNagRGDm4ngQTchOcvXy3lTb0nowg5S2mPhhJuNBe0UApoLvnj9LJ0mwfjxURJQNvKrMXOSOBLIXBCE6QvqznAFAhuBGFGqKmJEToYTF+6yyUJUYM1eqBUWHdqzw+CeTISIJ4Ko/24MMGZEhqn0bO8N3UaJzaGojElKu4Z9NfS2NqIqCIQoQnHMK9YnlwAigLSVsgShQ9kLG13GSrqhoUmI9BE0QNPncmhRGKbTWg8Rp3NKy0ERx4pHaJtE9ApgNwlmc8Vo5qHG5lUTYH+AjwSzgI9OVAlTtNywB6iNPEehkAJ3L5ZlcNxKUlw73oPw4qaiEZCXl0I9/xmNwkNLUFltrNNVRzMCISofJoWgATGRmp46+o6QhR5QOipVWYM8RqYisPbyGUQxEUKP9AeDei1VVRZBP/c7wLGmEnL2HJAXGpBp4HhOJQNUxC1RJaYACMJUZHC0vzs8udL5XH8jytCdjamutsc2N/ZsA3rz/1tDRtEatYDeQijNNLTg+fvLcC8/de+EegD/49tfrSTOfzxdnx+eXl25wpG4+nYtTIrbWAhCH2WzqXD+Z7bWtMJFl0xJUUdcNgLPzi6P9/aaZdd2Fc+7i8nLoh7ffeufjn/jE3nzPDR0AFcz3Zt3T9XK5uIUb/TDM92ar9aqpa2b2GXKZzeDcZNbAH0EvTsFvv3V/Uk8W592k2QOwXjsAtjJD1+0fzPsBvXYQIhNEnVW3ZqitcLlcE6ium8HJdDJ1WDf1ZPCogJAxJqVCFX/WGHmzBCc4hI0J1h9mETiRIMcq+mHwf6gqWxIZDFsRcc5ZWxHgG2MQkXbKLNAvKNEgnBhrrK1EhpOTk/V6XVX26PBu17V1VS9OLlzrAJCQ6NBdrF5++QMf/f5PfPnLv02EO3ef+7G/9BN//7/9+7N5BWA+ndRV46Wy5Wpt2c7ne3VV13WDgQGshh6gwbm2XV+cX9pqWjE/vP/gcO/w+Zv3bG0BfO3BY6fu1q2jD37wQ2fHl926Y7btYrW6aOu6WbctADI0bSZ1Xa1XPRtm1rbtJs20c66umpB5oNeqsk4cSIehg1LfD3XdABwzS4BIT56ePnjnj37qP/kbOq2Wlxd/+z//z95+6zuf+OTHl5drAC+//IL07q0njyaX565d18Z+6FN/9vT4dLG8uH//0cXFAsDNm7eYjLjBGE70Iiol27s87+WxgTvBStlqGrZgZoxhdwfJpEAqt9Jc78SoRgLcs9wpC7cAIAo0Ww9tXkiS1Ih6lf0LbY+PUr6z1eT0I8CzmwJX4n2JDYBC4pDdzX1PJb0YOE0gpar5uLGi+VHIjPR+S0Mo0MUkoz27aOwKwkJI4nucDN18fscqGyXsJ/WiETQoRYjrLHc6MjRVBY9XRhZ6kc+4HH0t1RLE6sRQ/A3Z4r7X5bpcl+vyp6NcHaxNXsMJlnw2vMV0JeI3XrGBQggwloL/CBGAsy++B7/IV1/JcbNUWNUUROTjDHN0YxZDoFUGvUbqq/qwOwrOSIE9KQHOOe/Gkh1JZHAizAzDXjKBhPMNmEDETKwqBZ8m0qCVKiAO7I9+AZQ1HmWgoGB4Z3DQaSn4GDIqkCjJjc/9pCpOv/j33nWIfHn6hf8GwO3XXgnebjGXccAIkj7KFMbNWBXnRP1BkIhIERG5ISRrExEVsGHD7N15NBjZiG1SoOBUFUPi8hSVKN/T5BSW2DoTM7OH7IiIg+abNK/E+sG5ZQaAiWcQZ1UNYZUxoMyZmaf/QqHgBCiox5ZM1mE5iCklKAwgJI8qxQ+fFS/HTY4XpIcjQlA/GTYKiDgRUYUpJJuwGAN8qnnxlpJJXKtjTZqQf6vXvLXIdJZkwtx2zTVSxarJHzP77UBjArzNZoC2WpYGluLB2uk/iniWLtK8lLJW+kejHEn54SzPbUmLyflJgyCHCNttC5YJ90n33i8QCeDO5z8zgkEIvWstW2Mqw1B/lI0jS7VzQ2izX6YZXfUgmAmnT3iAicBMBHFD8ub2qJZ3uWZ4X5lsv0i9BoFgFd7wHbBWJQomEJ81we/QkNEseI5FgCzowgooiK1hYhg1qtBoSAnLRRUwPoLYqVDcIKVSF09NIRCLHw0SIJ0lFY4OTlpUmi0O1gtSQFV9GGPwbgumrALzBwhK/jhyVb/5mIyqhLMJ/G4Ins6sJKYyRkmcDxP3YyhATtQYESoCAugnQSNQpDQLEmBHQxLC4lw8zAQghg4Owd2NsgJBIBCzhaokeEMjHRELBD9HxaAAwwZOQpq2rcRZjyQtbF+fhC7aeILbacRDQrY4SEiH53sdA8U1EuMAh0nC+BTEZAKddxpOOPKrQhFoT2iANUZVRYXAomLI5FmiGNMeTFvk3FBVlbiQZRXGETGYRRxTcBFka30KPv8MGwNRFVE2Hi/3OyKrv5lMIC4W9SHqpWYYk6+S8yBRJraBaJOJHCQaMQOb9AhnCMYt6FLqZOTvudJYs/eS8+s2HThOwBCIQ6zTQ60eK/H0gjmaIvy552qsTdwrAl5xfzCTN2cQlIGYflYHrWy9t9cI88xO4epDWl7aqet78OpsdQHA6VDZuocDs4Mog4bVhaFusXzp3nNzGgC41XLVDnZ64I5PKujUmCdni8ra5XqtMqgOALie8HrodTA0rMk10+l6gKl07/BweX4GYH9vbzKbdEPbtevGWJ7Uy15JK+favcP5g7fPAFjLZKWeNOdni2G9rqx2i/Xd2/f2pxdszdI5AINqf9G5Brc/dJf7wdpKuZJOzHzC1aTtBwBtL7WIDioinSiMAlVjTG/WXuQ5unnz6cUJm9l6uVwNjpiNqbtBp9aupO1UAFhbO1Vio078MjDWOjew5dKWoyriRNQHUau1xkscGj2Vws4CjLFEDEfMho0HcRiABs/KsHlNPHFFVADxCYjVwVbkhOd7s4OjudNeBMNAqsoTO9ufAbC1XVy2UzNdXy5v3r3zY3/1L1e2/sjL9548esgKywRgGMR17sbzd8QtBzpb9N3ZxUXvunXXOa4AVNVh151067U1TbM3sZMJrCEIjCyHlVED4HR5WTPbG4ePHz+8OFn2Xd9etNWkphv1crm+de8QwN7e4fnZxfJiVTW1k0HEigydaVmNG4aqbgB0/aQ2DSmpYhhU1Dk3NFRrdFoYBmdMfXaxeP4DH/y13/zSj/3oX1qcXrz++pfvvfjiw8fHL77wHADbzHTKR4M7PX7ada2DXi7O1Li6mT1/78X9w0sA52eXtbWzWb1edjGZlD85jjIylyUwL3InM9PG8Y+IchTIBPEq+DiHTe9DG8iaHK7gsc8o+0T7DUe7RuFWmIGzsaE1HfRVXgt2jpSYJTQ+hlNvyUDJLF96Wqcm+tp8SD2AdHxOprBZBi+1Jgig2aO/eMcHSidkjfIwg4JtrxD7AqhZ2FyTi0FBxjMAiPQ4NMg2SJK/Hxj/Z7Z9Itab+hMuhN1cjvDYFF/QfP9ovB4ZNAA3GoCRfOznfwx/xyTeIW9XFECi70pUPkJjqJjPmOw7yAcpoouS2FeI5ZybGtRUP4yig3/KGDNaCNflulyX6/KnpzzDOzJAIcWPrD+Hsim3R/UxMoezL777OdqHr76quklEKXnEaQAiEyiUIr3ipaS15uYUfHzDwkXBNSMybSIvbgaRtOh4fLrgspGtlqxffXCjagyM9J8PwwB4TEcCrwzCDCWuY1T05uc+ffyes0kCeOLP3X7t05HnkgbnG04HjccjHzT1qxSKPAuMQoXvkSa5idjlLnv91qt26QWkPzJamAcwyT6afAfjjAYzdfLg0Sx0jZdR8V8dT6/Gi6FW38idzlSjssmmaePOzjfjGiwuBDHOD2KBOG1AiPlP1Wc1CxmzBPKTfikpRmBocClCKczubtzmaG7nQM+fKiC/8WP6LNkmC2SFpL6xOJDmeeOTV1dM4+HeNW55LUDxfs+rQcgUWUxinOGQ8k4K0FeJ2ScT8Knx/DzTRtKeKE0m87xG56fc5PHa1QTlZUgrXPfBzQY+Pl7T0oBzQkQjbzBPaWJUeFxCsRmewJVI+Kg9+Z1IRiXQDKZIRjKpSBLzVlWU0dACqEXSqyhpiR4vRbnwMg1In/NaYCIB6ilQ8D0LOG1Q3iKKmkL9AQo0dmPbbbZcfWNKlSaPDUBgZp9sLug6aYOIcggb46zAiijlA68xWlR+6PI2zj53I6Um/E4fQlDx4kIiygMZzU472k3lQkudDfYIKkw7lGPUw6LSYihiDzJhTvzPY3qWrUc9vW1P1fXDQGQqW8cMxojDk9rhtcZo4ojaXVF5HLaC3ocFQVR2SqMxKdDMQnP1FTEZgVBCkiAeBS8zhabvgqChjh30Jk+IRi6WL+a8sJQ2VXTCj1MhmmBuAqIjamlUi5XDsAk0n5QC1BAaLE5Pzi7u3J4x264TGaQy1XQyf3qxvFy2ANygIONZtWHbu6GxVkgnsxnAbbcGcHxy5npnKusgtqqbyWRfZN2uD/f3lquFJQLQO73s2oO9w34Q16/btt3fvzGpaLFaeCfBYemOjg6nE3t+vlicnjfWuN5Np5Onj09MxbWtARDL4rzvWyErNw4P3bCCSNcunxw/PTw8WjkPK9QOA6tdXi4vuvXhraOubwHu2sGYgY0BYK0hImOJ1SfTFAeFMRcXFzePJgCev3tnPp9eLPpuGAbnRGTQYbVe+YNWPIw0DD2zJahzrjIVASohKXYpsvnFzZ58h1kSv3WCtxaBiXvnCGQNDLPEYBS/HlxMiUDB7hKEEsPcu8EbdRQqcM65pmkODw/n+7M3vv6GODBzM6krawEslytbVZNp/ZXf/cbs8MFf/4//o+ls7x/9yq/86j/+Rz/48Y8wTwF07VrFPXz8aD63R/sHho0Ijvb3T2azRacAmsYwm7brVYXZEJMxRkRF9Pzy4u7tuwCOjg6NgM3k9PTi+Pi4mdj1EkPvFou2H9zirANw8ui+E5lMG6CH0bphhhEdmI0xPLgeAHkuyeTESYe6qcnCOaeqvkfM3LZtbavbN55//fV/9i9+7xvW0ksvfbiqLJOZT/cArNatqawxdnDOiVus1t/6g2998KUPKXTSNH5dTZs9Q3a1XHVrJ8IxYNUPvyTjFWnEZYLeEnh2IiFJ0s9+lUk0zds++k5iN2Xw1XtfuFRPItHvJo/uLqXgW3xmowGZEtL48k7RaVMWLAll8QF/YWx8B8ZaEbAFpe5o7o6SBn5rMHUDLtxu8sb1UeMQUrLyRiOycL7BDP+Yyu7ZKD5HGQSmXY+NX0lCII2uxPc2O1J+PfipAvCJJt5rD67Ldbku1+V7qrxb7sh/ifJesMiDUYz2/3/Ljc99WgWnv/A+QMmnX/glALde+/T/a426Ltfle7c8+rl3Jy8b5e7PflZ1KwTmulyX63Jdrst1uS7X5bpcl+tyXa7Ldbku/2rL1XBk6ccS3MPH7hLZaBYV/OCAQQSc/8K7Z0Xc+8yr3v2lNFKmqqIPpCang7HlkaAg1hjWOXLLiRmtYsBnOtiU/OGJ2e1AAQ4hfcH9xEfojCyANM6+V5joFCDD0ccohmDFQx+zh0/KUeXjBY1sObeACDd+5tPEOP7C+wYlb772U2HMmEAQcSmmMjnERJ+MODZpukbWOwUwOBfdopiCQ0t5TMnI38VfyX6ReRpCvFz2FEtnZsSo4eBpU3SnWGV59Cn811u6xybH5ORVeLFEL8JyxYTaS+PsDliKkHxAUb48tjn6T4Z6YipFSo4zBMTsN1vf2GEW9p4wyAnhiqfHXruhdbrLmjxuY/YWDa9lF4DtTufrOUBy06RcWtPH/mw7O6elt1B6fTsv0Y7Xk+csxZimDWs9FETfBRZ55/OfiQ4u8YMhc533cFE2hogB2Ui1WWTCjW4WKdQ6+1YgZ67Nz6d5i7sjEY/kopCGFESAtbWPWRbnNHxh5JUQnAHiNiIfD1iOfJycctrz6MZFCwrOO0US3BhUG527NXcgfaH4khZ1eqeitOxL15Tsclv43vq+JR/x4CQdCELOwOC/qGF0Q+5DKcc0jDcROxmFN419MDbXUBhVKTxfKK1+JSVignj/+Ryc5atw/uQKil6G6VyTkFwiVBL9UHSD7oTjq5Njdaw7kMJAnPOGDg6uRMzwZ5hLXknlns9dS7QoxRWqD6SMzsCJtABQBolPduLDTCG0IxcbhezAgcAaZnEAOe8BJgIIyIKINR5MpYmeZ+ZAPqdidneFUIrXThu/2DlgoypceAUXhDeQ+MyQ48CkXVLuMSqYVxyY7CWcRzBwTBTTTiGv3PiMmuAsnRpcOsxHMSXWmPZQSmya4/eLP0IQQpGpAACYqZcBpF3XqToYc//x46PDI6f769axl+Ikp2cZ1BFYFZWx58u264bJfALg8dOnN2/eIVNVPCFbg9hWdm6m89l0uVz486P39+aLZVs19TC0lWC9Wh0c3lLC2dnpcrkCMD84aNtOKviIwr7v2Yobuq5zLGobAuAct6uOma0109nk6dPTmvjy8vzx40dE5MgA6LWv6xqMi9MLA/AdZmC2N58f7vX9EFNk+FwTRGRV0dRNJ67t1vP5fBg6AMvVen8+EdXaVrPZ7O0H9+fzIydORYyx7BRhw0Il0A6fedcYG44Gi3MJKDGFjAw+r6dqyQgJzIb9edjic+eKivMyYKT5XuL074rPVaD1pDJKahgAsyGTJEUh0P7BwfGTY2ubqp76Zd7UtSq6bn3nuTsPHz/6lX/4Px3uH73xja994gc/0Xfnq9UKwGQy7bqVqBq2l8uL3/n9r7mvfGV/Pnn46OnB0W0A84MbTT1pJlOiumoWbMwwuPls2tTNMAxN3QBwg3RdP6kmTT3Zm++fDSenJ+ez+ZyYjm7u7x/NAMwme+u2dyJd3zs3WNsMg1MIlLpuOD+/8EPa98pqZSBbc8w9AITMLhBRgJh5ubp8/oV7dWX7frG4vCQiVbNYrgDwwPuHB7aq2q6zbPb395tm3lTN6enxarke+h6AMXVjJyICJQTnV0q7VDMV0MDaIr+M+zTJdGNpI6+BsWCZypYQmB4q76RKy/cK7kNJxsJI1tuIPNCNmuIXtHxiR0m0R9Ont9vrP11WouWPqDhtvpVkt/SRTKK36r+6iRgN+IjRI7Y9n+ezweN2xmdEmWvk3r5jqp5ZNiq+4vXAmjfE6BwAUtZSSEfPmKxyDMu1SHnSizcSN9d4kCCKoAEdqx7X5bpclz/R8o//5r/9J92E3eUnfv4f/Ek3YXd5pndkFiqywJ9KDN/b5AEEnL0HLHL/M5/ZUM/zZxHV4aCrSqnKJtk9CxcU9ZvcSvWnnwYe5aOGY5VJ3Q16B4WsH1shDEAMzoy9C81IOgTBY3bQEOwclM8kffj6ooLq9RX1mo3EIfaNIGYiUnU3X/v0+0IkARx/4ZdvvPaTURbxwrAkDHI0tur7FIHTgHfFN4kAsJrY7KIHmo6gpdDwEBscwkF3xLoVgTAYxcIlVRgoeLjmGS2kFiqujOSW/Msnq8vCSgmk5CubL2Y9tKh3LAL4ud4BRvp426htR8kyLmifxG2zIztKbkgE3zc+RNsf13HcIkIrYh45vzU29ypKAOLKhgDF9tq6mZXmVOfGR/KsjiiC5ic2Go3tO1S8nwCZop7H/+X7OP3Jlzuf/0z6Zp6YALjDB+YRiAwnQTx+MIieMbMTwsynMJxyyaaeB/Ch1DYKDQM5OUQahhTfObjeN4qZI6AfcyhoSJOV8qMrKRMLhlSV35MZss67P02IXyrqcxYSpex6iqh7K0KSOwAgjsQCcZGmySjPVh6HYRclwnPJnBAbsqkDqKebRAhpGTPuCoT0jEzOB2/7nqWv+cycIx6RQ2fjQGxOVIbU84JO5J3j7HoIKr3oF0xRT6450dSdW0MzKpuaHtGrcJ3SAAUrWtzQPiSRws5OIdf++3F0I1oolNscExwraUzO7AFfKloeoNMSQ1Bsk6PA7FKCxDDvog4AhJmNYXLdwDXHihH3TVK4QMT+jGFKM5W5a6lwFkwpZ/X07CYCe4HqBpaWXwJUVCLajXCkj6ZGj7pWwAlx48YGUFGnT7JSroNiAWwR+XwKaoF3BlFERuM6Jvl+bXDmJ/4vNmCDYRjqyljLy/XycnE5m06WbcsUoFoismwcBIzKNiQyuM6wmc73nh6fTprbAObTg5OTM5BZDKu2dzBm3Xeu7y5XSwCXyyUAJTaGz58eTw+POlkdTCfnZyeTaX3zxq2nD58AGIZ+eXnJtjp9ejKdTKb7e8cPH3uDxOq8e/6DzwN49M6pMETEkHZ9d7i/p30P0v3ZTIa+njYAVm0vqpbZGGrX68cPn964decDL31AYB4/eexBt9rMjDWuH0TVmqrtWts0cLRar6ubPmthLwpRWazXs9ns7QcPvu/DB/v7B8cny27oyFgA7ABVUjAbonDERDy4SzfmwU8BM4lzaUOmTDAEqm0lqt6m6lkDEfmUBT5Ndng2YvsKda5nNj4WXv3R88xt1z05fvLk+FG77vuum04nhnkYegAgVoWxs/V6NZ3M3vzm/cdPf++5OzcW7pKIK1sDcG6w1h4cHBwd7T988uhLv/XPh6Hf35u6zt24dQnANg/6bugH9SnBK6ahl2FwddXYxsa1r6vlutk/HAZZLVcX54uqrtWRtfYv/oW/8H/8778KoOse7R/sN7NJP7RVY0EDSKrKSu8uF2ca0rBDxAmcsaZpGgDMYpih6s0P1lgivnXrjsDdvHVwuL9f12y5uv/Og7bt3n7wAMCd524fsq0rBqjrelNXe3v7qjr0wzD0bdsDMIQOgzU28NfSFFBS0mTLhFcekh0x7bTEKTJv05I251WRDM87KPsuYp9WUylzxqSQO0HNTGvi47GWLNFedRoOjf8aiS87qgSKYUqtLKjhSJCOTBHjIdkahSzrZMFPxz3YbMVOtHTzmZGUn2q7CjUuUmcEKWeL2G+Xjaj6KGldPavProvyAMQ/tBCVcu1RjSpkokJOVuTDFLA1yf4osEKkDP8Ew/rVYv51uS7X5bp8z5ar4ciggo8xCIzk6/HzQW96TzHaP/3ZMRiR6fNVZWQGQta0Srq/3S4NWELAjrh0Yoy9jOm9NOIPiGpe1PaSxkJxLAoVVVUyR/GaHRH55PcJE0B0e4hHF0QWUjo25bSAt157BfHgmvdYTr7w9wDceO3TvibAJOA1+sQU40fZgyZdVIRc0fFk7eRik5SkcuTi6OYRHYkqVDo0XsHHN71ik0owrgkj7l4+mKSfDbkjGcJ3i4tZ/90hZo1rCphAToEZ7qmmRKDRIW0krZSI9LPFA01CYflcbMq2PFUK3bmR6XOhOcWIUdKoRtOgG5WOb1wleVGc9aJ/qRWFVEuF+E+b1eXO7hKYo9KviECXf+5fBogcC+hZ6Qh7xG9yVaiKSOFaqqLFGTCpqXl8N0R3Ta3NzxTajHdCCwkRqVAxIjTGZFQkHZNCRMzsv+2cIyD5fYkIwlHGmhqRMSbKHqzkcx1GQdvLw77XqQHRDTdUQsktKNlf/DHWo42n5RoMCz0DWiMVpyCtKITzRDXCwEkakw3tK5ISgMMjqsg55MNhnCONc5daE+tOyGd+TqGJp0TrVbAfJUSYmL13X1grydhDlMlp4YPoyRBzmpDYGi1JT1jjI9qqnq1wsAgqQfMZOOGREjNUIB634Y/64kSpwltE0YFLi8EoKAmJxlN9NHqs5haHRZS0KIH0vRN1ta0AqBCDDHMvLYM9qOz3FIOLSY2tLUBAAiRk+w/JWWM7Ke7LDEGCiAMgChGXthjnDFpxMWcAeINmFuRH8z9hq4yWDieyOVqOG6u+oLajQQt1a0FlEIWMtCITRUpfkZCvWUXjWUYGul6tD/fvQXnV9pfLVgYhh+l0f7E8D4d4UxBzVLxrpBhrXd9X08np2dne/hTAbD4/v1hM55PGTpq6AYjY7M/mqlpZO59OAXTOSd+dXz66+cGX9k9v9OvVxWJ982M/0Nj63osvAnj65Mnh4YGIXp6dT2ZTEbHGDP1QNXXren8SxXTSgMVJZ2x9cnZ6OK/hBgzDwyePj45uHFQNACLtnVMhnhtb2b7V0+OzprGXiyUTV1UFwA2u73oRJVInDlCRgUj7rlM0APbm+xeXS3A1qZuBqxv17O6du10H0vui6le8tVaFQkpcDQe7yygzZ56UtB/hHaCZkJ3XAinQcISWQCSs5JJy+M1N3qQUFqEx1mf47ocuHBgoMgxDM6mdtAA559jytJoCWC5XKqSm3987tLax1nz0B37AuTWT7s2m3dADaLtV168uFpd1xZNmevdOM23qtltBUDdTAMIG6qy1i8VqtVqJWnVCRMMwTPemYfkyEzGD2r7rh56IDRkRdb1r2/XF6QIAWX56fHoTR7ODqZIMgzPWOhlUte/7xXIBYDqZEjGI+qHTpT9VPmwcwxaAI9d1/ZMnJy88/9yjR4+++Y0366qqrB2Gru/XxhCA+f7++cX5rJlYWznFarXuuq6e14EAgAGIkIgjZfJUJey9IJtxMJiTkhBIvHkleCqolOKebkpmuvHXWGiLW3xbLCoEAy2e2BJ5twW5cuePBfQNYd3/VWR/qQAAIABJREFUe4UUSTSqlcpm6mY34uIeJ+Id38/8W9OGiDl6Y0cyHyisQzs7fGWbyxKR43GjE3vIvfEXxrwjVjHinhjPxujj8ZoUoSd/XCW7lW6Ii1c9P14pYyl1S24ePZc5VbaTB0PpH2uXrst1uS7X5V9JeUawdqFTJrVx7HNSFq/+Xvziu0MGhz/904kTp1C1cY1RGyy8XLISEDRqeBUu6cSlXbBoFZTAUfKPQawaq0se/kRRuU3HGMdeFdWOFJ6k4MSIvex5leAB3ybvnRnASSJSFQJ7sTj61KhAxKk/rNlfvPW5TwN4+n5OuTn5wi8BOHrtJ+M8kaYeRt7lRQsmKo/Qi3K2AlASr37muLmonxaKXfgjc8Us/PgnvM8i8kCGYwiJSt/BOKBAOuMAiJHLcQzLp0lR4DjhkoLG1zakrZKtj5YIxcEe21yLu7i6bHr4JZP7lvRJmxeKtuXVXui5mw0dteQZfo5RKqPRkzQSZ7WYufK18s0dH8gzHuNMtPxU2aMgHm0P3lWjWeiCpagcFtzjn3vfQCSA259/NVKsOPgUj3EPv304XgBOGBzHJZ/EHl4PqQ/KNZK2uozAcN1ab6Gm/N80R6J5iDxC6fxJr5RQL594wdMrzu6LUEBFnHdmyOjI5tRGyolieONo580aaTonjzvKy4gIAs6EemsKKSBZZY1xqRQwXVZCNrSn8ETA0dWPfqTU4R1RAZHEwysT0c4OiZpOPCWkgN+RkjPaQxr2W9klin6FJIjEDyCQG0HkESyOS90fJk4kMgB5oxXd1aiS7KAPOdS70FsCv8jgdibOOwcyOjtSGMY8vIVzRiYaWpjPPMEPHEyQtkf0ayyKd/7wyO8gQhWxhIPUwRBRKDiAr5E7q4az6CMPKpGd0MLoxR//H8fVt1w8dhxjFDZIV/wZOhn7yMTC4SgbPwVKgauPfK3LMsYiSwfFxKORmh4W2MZSDqMEBRuO+1xVJEJbRMTe5jc+EVfzNkRC3hHdqMGM6bS5XC6GoZnU5slZzyonp8f7z7+0bFsf19z3HdvKVj66WQhqbGWJ1dDicnH/yWMAwyCNaaytAEeq4sSSGZzsVY0Qr5dLAEJglblpPvnJH3y0f3j/wdur1dCvlt/38Q93bQvg4uwUipOTEybcvn37xQ984Mu//dumRlXVlu3Jk1Pfo6Ef2LrZ5LBte9qfkDGHh/svvfSB6XR+4/ZdAG+8+aYq2cqqOjAZtm+/+c7R3T0VI3GpazjumoigBGPYqasN3bp7+/ziKYDVqjs6nHeD/uHbbz//4gfPVxcgunnj1t5872J1MuMagArDeHLBves9aRWRko0mGlUIdnkCoge5ikRzn4KIjSGNolOaRu9OHpwjI1nr+84fwq6qxsbTuECHh0fWVE+7p1VdW2PWqzUAFRWnxtqmmUJ4tbys7XRvb3p2dkLkc+mg69eD6xbLC+eGaTNVkXXXEler1cpRD2D/cG+5WBnirmtd32NqrbWVsW3bNV3n0Vgnamx1uVgc2co5xwznBmI7dMM3v/XmzVs3AJxdns9n02Zatd1qMmtgiA25jtwghqnrHYD5rHJOCCyiIHRdZ4yp6wpQNzgAgwz+rJ66Md3QNpMJ+6Qghvams+XF2s9C13b7s71JPR3I9CJORATOiQxgsgAqUzvniFidwuR5ifu1FF4im0R0lBvJKiFRSbqSbUuZGFABUm9JOvlLI0EqLqbEkketKb4/YtIauE/xuyAvcc3FZbndhPTpq4hb8RAlHnGVeLsj5qOM9SjNeCN5oGzK5kyUT0UOUFQwEhHGwxD/oB0eookveKVg54euFpbLOd2o+l2G8d3LKN3PiLkHn2vVLB75W5xZSJKed8pcwd5J5SLyv8wzOntdrst1uS7fy+VKOJKZVCKt96pxUskCNdwkk5f/9XuI0f7sZ5JhMmrF4XDr7GgUvO62DEcx4iFIiIWSjZijMDWJaOwjl+oe83UvNJJXmURA0S1y/HB6Of3LwZ8oD0XyOhHvA+VPyUQUV6MOE51yNOJvia9r1A6TgkpEuP3aK1B68nfeByJz6j0l/9ZPAhxcfUp5IivwWmAUoOhgwsG6HfVgD/8ld8c0pMW7oR9j7TWCBCGMJlYYAAMAqm4k7ueKSjY7/lmsvPSOrzcDTc+QImj0n215jBKA6z9DuQ1lyzKukOQCQsALxoKbXyXRDaf8YJY1w1gRkNZIijWJYM3VXUr6fQwgD5WXksoVo7JDJU/9zStxs6LyZ/rg5gSO/AZjrYTRnt4h3xeIHRF9N+fVALj9+VfjctTo1+frpV1H2Xj8L6iRHkrws+CD70hZt1pdSN6kWx0J9vs4MnFDp56VpvxUaURiQwpCH4DMYI6nRkopxgLJOS5/n2KSXC7jC4tdmmT2YqtookoJZPHGh4QHUvZUo6RFgHLtIkrBXyVPMOXl7+uIoEsed68kajb++H1NIZR8NDbq/xd8XDwuKZ6EqiqYImQcqUmpD6AAwsK3KKYdLtUrZUChHKxTke6lASSVuCsj8VBVB0cSYWYfBxpXgI/NjawmNDbwq7jDNDrDalJIIo2jmMozeEmW6R/D+g6MsjDJxekE4uAqARRwPRB5j00/1KTwXfXPJi2dE8MvllfYIUro+u5DL720XK1Oj88AWDaGjeuFjNcMKeGnY0xTPXJNRP7AaSbeJsOjV0orJAr1NS3j0PMtaoOCJcXEIxuPbfCWNMvlM1q8pZqGCED2DBopgQmvSr9jQjQvHUTBJ/HS3CdRIfVOLp4LxegBwiDdtKnrxnznwf29vUMmffOtt+5+5AcnVaO68I1gZuecirC1XdcP6hpbrbv2cN5UdQPg5PjUkGnXLVQNU2MtmqmKI+K+HyZNA2BoV7NZ0/XtC8/dffOb37azyfLpw6/97u/MD/a//q03AGDdDeomTdN33eXi4sHj+8fvHFdk+3VnuRoGB0DUKUxVNcvFetGYi6m9d+vGR17+yMXpORkz3dsDcLC373RY99pUNbMlYnEOxHt7h2dnZyIAUNdNVVd914PJMK2Xq2reWGPIudWiA/D45Oz2raNBZTadHZ+fT5v517759fnkaL1aMnPf9wBUnDWVABTj90U1nL2eJilGySDu22JdUd6WBU6qqt5lWEShQ1qrYS+LJDs0s3EyePrExlprnBuE1RBfXl6K6N7BvrWm73tbVQCYbbfuhp4ePnoIYXW9MYsH91fW2MePuudevAtgPt93rjJ2f286Wy4WRBicHB3c6lox1odLW8uGiGtrZ5NJXVWWrXPS9926XXvTO1NwvRdIVdVdOyhgLVuy3/rmt0kcgNl06px7+vhpPasm04k4VafGmF46MvVsOgWwtzd/7N4ZXFdP5gTywaTOeX9G7woKZhp6qav5pJn36pwbun6oq0YH7O81AKaTuVNVRdt1MvSeYnd955xEkzqGQUTBTLYyTiIQrAoSKHl7YlAl4i1PIq6w4GaMUpPFMQl4mvfpJiVM71PBcPJO9lVfXcYw0pjVFUJXaayMpbTUb9za+kmbl2n8RBBXN1safbkLYXD02JZtPalS260ZSZCFSrazuaHayC8zyc9txVaJxDPLHaN/dpRCdiv+2mY5OevW+HM5EUC8VozuuC/+duGBkYL2fYhKUqXHXj7es582Gjb2E9VRhucwBrLFBq/Ldbku1+VPR7kSjiSi4FCATPIpaa9I+n74dfl33x0vO/jsZ6NAP+Lau2WF6FXiRZEEDY2E+6ysJOtnItnZ1BYVSh8QGA2hQUZRVeVo1s4Efsxxo7RaAjSRWQY5JtygbEnMOolKsm+SjzxTJQ3hAuEZYh7xsEIiIeD25159X4gkgJP/yodv/1RwdBm5b6goRJQQ8tFRAD98axNz9Ops8OtQHZBx3TxKCXAcLYigufr08CP1LOp00aIddYBUW9LY/fMlW07S4QhbiBLae+HFNP5PrgRXyawbL1Nsg2bJFR5D2fX9Z9apfm0QBdsvUQKZ0mJ7dkkgRxL1toXm2PKtb5eN1LFwGpSxlKQmCmjlKhpVNgIvksOIl7ii//LoFJ5NKjAuT95/aDYCEBn6oUmJiV9QqPHudSiaCI2gO6kqxMeaxd6kDb9Ldo/TNS5bInl4NEvskayWSA9lGpZGDOqDNgHAVpZA4kE8ZsCRskLVRQJCu922NgAXH7mWJjQumLDi4LGpFFxaKBQh6HTnuPuObW1Byj7L0c9929QPIBwflgcwjQzBC+7BvSw4miW2EdgSiFjgSi+8cd93ahWR6BfNyGQo3PKQGSUNSVSdOMolLQ0FEwASCh5//janbVm4pERg0O+QbYR+vN002uryRMe5K4kq4L14x9ojhXmJj1OAV+PCJSYM4orvx/j8OAcZ2fRbBArVvute/tCHHz958vD+AwA8nTa2Xq1aNhqOmNC4r2IVkQQkoUKJmCIwmicm/DdMY5GZP3fWe89xocGFWYiKoSCmO/At9zgUOFVxJVEeSQAKlDB20PkQBZiYW5LCqAaIClBIWqgUCXrczQUNKRBfKLwhUD0+S35jayQZDD48uLFarY9m++eX6y+9/vv/+p/56HN37j5+cDKpJwCIWJwYW/nWGmMcSd92tq6MMfv7BwBm89ni/LKuJ6zk+p6CDEdMZhjcar2GDwwf+k77+XTi1H3yU5+SQZ98581/7Yd/RAYB8Bv/5P/cB/7av/fX/ouf/9uz+ez2zZtEKiL1BFYMGQHAYtgagulWbd8PdVWL4sbNW1VTt4Nbdy0AEZ3tzdALEU9n83bZ7h3Mjw5vuAE3bhz5YO3FRde2awXVtu67oZk0IFquljR0s9kUwNnl4rd+7/d/+BOfrKuqdzqIW65Wjx6fnp6e3n3x5cuLCwCrRUu2hvNulqyqzjn21uJowkwrlKKZwLPEKCvGRAeqVVM556DqRElJ2UssaSWEFcTGGGP8YhoGZ9iESgjOuWFwAFlbM5NzPROF9JHGArhYLfb3j5b90lDFzGQVzs2mMyiRlQj2DUQQ5wQkooMbbt24fXxxcXJ2/uL+EQBrq6qqoZhPZ0+7R61rIajruppUTmTSTAC0XTf0w97BFHAMbtcDw3Rdx7Zqqmbo1wDcMJBhfxqkE+26fr43u7w4rUzd1LPJZA+AYWtto9Cu65uJmc5nTFgtVyDa398DQEpt2wn1p+dP+37ouqGqqW6qvutdrz0EwGKxpMrduXGrqmtl1h6DG4a+d84NTgyZOFEqTpwTYyoN9h5VFWLiZFnL3qlAQaw9NrqbAng7YWGu80tlpBRsM5Kd7D+Q9rGokMn3VgvyvZiwKRjKEmsZ1R/W24YMu9XELU6bRerytQ2eTTGHZtKSEM2KJXPKtHi7Aflzu6WQ5M/4jFfLV575XLy5Lf2/myy/XdH7eOPqNiXeHX9TyBCxMRpZnsxo5kjl2TG/UfYOq1ijDBEaL1dm8L4u1+W6XJfv9fKM3JGe1fvzDalQggTew8ErZUwAzn/xPeSL/OxnvbhX+udkxxBXMMroe5LgKgJ8ljT/AAcXERIIRP1xnT6ZX/JnA1M4zk+T8wggSob9US2qqv6kUsDBgYgN5WZFwcUfMSNRnVCIBw6C2iQgGrysE5kEBXWao0YvQEhcBEAVTlUJGToIfSQQxaxEQftUCim/FMCNz71CwPHfeR8JJQGcfOGX/R+3/+arSBo3A0rGBJ8bhGT/4RXmqCdDE4AQWV2040UdW7zIBjCxiiNm9i1WZWZVhWjULDN4E2dDAe9UGJwQKJxVWkCDyVuVgOCG6p9AMa9xFL2UsyXFlCiSJowphUTkl8MNJnISkTjf+jhXCohzQESv4h2XpA2vRHpsSUPMenBwy2HqGeUhYxLGm5sAqGjU1b1cGHRHELnk4ZfszQmWTyfX+p7EkDQCiboSANYoCCWZJrQrOZfRaNzy6IVpGaEVSSUDwt7x95jKd4mLQYLfKqElFE9M1u8iRySAu5//LAj+uCUVJeu1Eq9TSjhEg6ESMdYwqMlLjNgpiMmkdoHgz0B1EVv1kmCCoSIAV6oTSCRkc+QYPhw6HSQSlysyOqQ+qBCaloJC/PS4gYiY2eeEdaLiJd9AjhFNCmm+CSq+fpe1pghqOc2YmmJ07gcTq8fWiP2BDyoqmsG+oqoQVckmHOww1t28A7YS8mHjKsk/DDCsGEASaJwABGKoc3n3A4jWJ5WByGSvbdHsXu6EYUL3whhQAAEBVU9qRiYleAe9iDQSSF2cKSZHAkuk7N0og82JFCwWNvcxEmpAOW51AABHTApK1GtPIe1keEs0BaM7hRpTMfEwhGSIzOQ0OMNGlNjPrYq6QtUBKEwC+1x1oqrKxIl/MbGoBIe7tC5dGgQF+9AAZTb+THcQVMUaIy7uDcCJQJVNWLQf//gnybr9g+aFFz4A4PjpSUeDWlWQAakoEzEZT/EjZ/RbkOMgG0LsWhoXIGKmzOxzUDpV9QsMPl8kQL6DWjiuRsuiX2TZsyZtXCjEccbHKWmgATCUID3EkIHUnri9t6ihhEwwkV17vqXJfbkogfyDiCKcHbcMEAg4ESkGN1hb+eyRlak4tLdXNzhxi8uzh+tu9vzzdyyW2st6uOzbbn0BgFlu3Ni/OFsPyo50zX3tjGEe+gEH9bypANimcqB23WtlieBIVQeS9uyiU6g1BoDh5vx8MZtNP/Thj7z4/O/e//YbTx+8OT2cTOb1naMJgEar/vTpl77862vtjw4mH/zwR24899W3/+g+Ozuo484AGMTNm7qemnYAmerk7HLdrS8WZ1VdLVftZGYAGGeGzglpXfO6XzqHWmvLOFmcz2d7RgnAul0SjCELRxAVQ3bVHdy50TS9ac8BXC6XD9/82l/+N//8v/EXf/Q3//nrw2DIiRXHM6vVcPvmLQD31w+c9Eqshrx91Ft1nILSMfUKophOThlKUuRGiFxP2ah3//SbNCztjGl6sSQGhUg085HzaQzCZBPZygACEsAQYZCemPqh73sHgEDr9cJyxV76kFp0GNxgDDOMsQzgYL5PLIcHe/P57Ctnxyr6R/cfsMKyhQwACIMDBucG6bmpqe+cmtVFe/RczSIQBmDIDNb1/SBKPblmWh8/fDKZTAco1eR6AOj74XDvUNarrnVDNxjWoV+og624XS+eu3cHQFNXVWWkF2NsVdfr9UJV2Fgo1us1gKZp2rab9hNS7ruWDKtS24qtLBmo9ADWbXs42QdwuVpAjTWNCve9VrZeDpfRINQ2tjFVLa5VA9JwprmKP77RqxIKm/Z1pgOqCske9EkJ8fuViXOy5XAj7Fom40HPJGdFU6tEHwKV6LXIxJG4J6AoqiTJtwKl7SRJnb5ZxZ3Y/GCp1GjfCeG+UFGuqihFRw7uTUaqxFE6VURHdSDJ1lHuHVm1SiZB0b5SAJOxG4lsef/TfLbbiOxFQ1aQ04PJBj4NModYedXo359F9sSb4AWSIIKrSBy9JEZn4hw5QdyryYC3QYvjT5MmAsme4E/MI59zIBgaklk9DkJSHlKHY6bmLK0UslJIRx5YQhJsVBFSz3qNpvB2JCIlFQRmFCyEIjFLFfvYBYqsJ0ZjBI+IZ+SqvC7X5bpcl+/V8qyTtYN6iaQr+4shx1iyGJ//4i++62cOPvvZUM+V7jWRNyN9FRGIyQ5N/pIomAPm46EFLzqKlGBZZPJlvSjOvERQSSgm8E/6hhasLbDCwC5zI4NSVfAmRQExpuxAfsQiOy6ujDh3CtdKqnKp8BYaFG5+7hWFnryfhJK+PPn5vwt/SI4CyUVlXJL4oEUgmoc2KGWIC894MSellEziROGZpXn4i+rL3x58HJ3ll98tF8FVpYC1tjy3yp87OoskrGV5MfzDRDA2+PeE/gYMS7eiZVLxWfwjPBvdlQREWiaG8c8gm90lyazj0cprK++OYGUtAYntMSlU/PxVLbMmRk0LCLJUufvihL67/Xp71K/Y3Tt+jiaYiB7/3LuTkZ3l1n/6qpfCs3jJUegvPKT8F8W5QpBUJDExyeb03rpU3isIylUEjkYzRWnEKe+5IOiWKEVYGAIAokLsc875PSdeXC+VrvTW6BgPitr2qEHBABLx7jwdmqdHCyOFxsYWYnh8ibbXYdRqItaTohwpbR9xQacXp1FZQQAw85JMiQ783/mTxacSsKsIExi2YCKnuUdJywmvlYSQioGKPI9So30bOJpp4oVMtfOVrGkCULAxY4QqW0F8qxPRSANL21pFpFV+LBOaDIKM3I53rFgKPY+uG8UTEY8NqnByhVFVay1id5hUIQqIiIj7sz/8Qx/+/hefPnlyfHwK4OnpIzLECL7eJS3zp1r7WgybuOA0rkuvapc9RDl5ARAvVHfySj8T5aN+EKCCYDFjVSkHJJq5NrLzJvoTJz3uRSBaa3bzjfQ+7RrsZ70QGhMOzy52KRERed9CUUdkKmtV1ScknU4qN6eT9cXl/bc+9tGX6eL0uR/4mGkmi351/vTRH3z7mwBu3X3pcD4/eXLeNIfrvrW2MVAZhq7riGfrdgVgGIbGNsKubq04MdbC2bZbNfPJsO68ZNSSGNbn7969s9fc+4Hvv/fi3bvPP/c7v/Nl7danyxWAs/bxv/8f/vUXP/hSM7s5JVczi7qqshCdTJuhcwAM6Obtm9NZdX+5Xq3X6/X5wcFMhO7dfb5dfccwAejRDd3yoD58+viptfZocuSWq0dPjo2drJbtUAXZr6kaJ04xEOuylz0yxvBHP/yRN/7wXwA4O316/62Hv/47r6upjx8+fOH2i2+88YZRrpuZ6bq+nvo1k5wbS47I5QT4jRAcicP+KpCFOH3hSO60e6PwCABgNoAKA04AVRXxAcUEHxUTv6ygsN/r2gIizjCzMUa8YYk1WTAVxIHIMzNDdeWPQa+rrl8o+n5orTXCaIjWi6VGc2Df9+KcqvZdt7xcGudm+0fidLVaGtROBgCTpl6tVwSzXCzrauIGZWMBlsGpgI0BYC13XTcMrpk2IHIiw3pdVbWI9H3fDwMAYzoRZyvrhkFVrLUi0rcDvHkCcM55v2AmYmIJRmxSgXOushbAet1Op83e/ODo8Khd9yI6mTSuDx4DVVUDsMZK74a+I47ZJALZAuWpSlx/M9h2TJnCnEaulgUGlOzQqxI6Xjqbu7pIeJLsUxsfCU/uJM67BODxowU3KpoWiFakV/4LpElhyf7j3jE3WT22jaVFZxBloy0pN0pR70k0T/VQEbi2qfjEahJdHlFULR/OcsBGBWHiysmNU5KFuo1ebMoQZV0bj+4ouy8XcgTtvn7VG3HQS7kvP1RIBlFP1NFPj196uvRMhnVdrst1uS7fs+VKOFIDZBXVzsiHvDLp3WgAPfuFX3jXb+x/5jMo9LndZawVYpNxaVZPvdeIAyik7g3OCTp6IeTxi9eCuud1uagVlF9NDHPcTIqvxiNxESGDfJv8N0RiI4sIxaJDAVvIbmkjUTbZTbO6HnX6wIe5iBu7+blXiN7f0du++Fdu/sxPJTcRwqbYnfyPCkcrIoLzuTU1jl5giqoAk6GotWuuRTclgVz8HGhx/mlcaQk7iKsuFR3pD4U9dVtECRXq5l8bT1Cp88fGRvE/ewDGD2zOax40zfABBSiBoAFuAaINOLc3XEWMzw0ydSEElzJiEtZ8Q6LorRlvK6HyXINfavGliDHG8R6t/ihsRhuv/+6uURt34contoRvStJ+IdYr8OS7OqnGl9uffzUs0GyhH2sDGfYlKJg5piZUEW/b9w6AOYgyraaCDMTsP1ct5/w1StrsztthvnZEYimCTyIFAqHpcpom78aoUM2wZf5SoYyNFn6RZSh6v3J6OO+/MnQ0fq5QfRSCEtUsA6+Jyn6gIKfsfUKUiqUaoTlVjofmqKr3kRQRSxwhrA1SjEyTNhUqInIRS8v2pYJAjAhJ2BFpp8YORXfaxO00jEJ4gJjYFS42uWnFF8p/A7QV9nmYmeROB1AegaJLUc+O1aS9PGYZ5NE6MiSiecjyvBCFMPfw0bCYRrYyMIJVL6GcCMiwP3wDUJ8blJgAAalC/udf+V9+4sd/7MmTh9/69rcBEKw4IaBuqqF1YUmEMSIPoiug8QSXRLECf/UP62jKNHsiI/XOP8JQ78NfbJGsYBJy9GyeH4++lyk1wkItvV8KahGHK5eRsphW+XvLqVFWk3DkElsnApFzTsQxG3XSSl8ZU9cGwPPP3Xn5Q5/8xtdff/zk4c17d3/kr/z49Pm7v/lr/+Tx8dnDt//AyArASx+48+d+/K/+6q/95le/+o39ZsIKJ2KNNcaIyrJd+Q/1MjRTOyyIiOpmAsPdenGwf7CQc9euAVROj/YPbh0eTKZ77dnFx+7de+cP355P9y671cF0DuDmdP+f/cavf/AD/8Fv/Or/9qN/5d9aLi+tMWTIMKsKGwIwmc/btj09ezqZz42FIWKuvvaNb56fnFxeXPJiBWCAm84mT1aPX/lbP9v13f/63/33GMjBDqv1dDqtqxpA3/Zd3w6Dq+rKWntr2bXTyZO3/uBX3/rq0awG8MrP/I1XX/ncuh3+4o/8mTe+9vtf/epXfvzf+Xd/68u//eSN7zz38ofPHh+nAU9sVkRUVEgoQn6I0mD5U6EhA2/ecQp/qpbGLYtAM/1tJ5nVcJxn7zg3Wp3Be5cA9EPvhmEYnLEKkPdHdoNTVmsjOaaQ0EDEg2IGwNn5Kdjtze7ZylbWqoLqilVXbde7HsCqXTnnACYyVV1pC+ecE2dtVdd117W+C9PpFEDbrg0bsLLhfuiJjah4a4QHN+tJY6zxcSGG2VjqVuvpfM/3Yhi6qmaBEJGKMLM4USgbqioLwBqezRpjDBNbY3t1fjMSIM614ZCf4ejosO+Hvh+qqrpcLJxzTuAxTE+FxU+eKoSVCtoNRHskRmJL5pzFBiwVjOQiEF0Po4CbV0VI9aAxS0M0JiaRr3DLS4ZGzXKyv1eQImwVKtKjXCFShXtjewql8DL/C0FD02TRIxoCbPUxAAAgAElEQVQRvOiSvyEs7vjqdlvyQG2L5leU8Fwy+cbf5f0svmRbwLiSTecJ3ZxfLaSekTXR35Qi4wfSho1sJ053GJnAcNJgxYWyyVu3unmlyBd7kZuVZOyRxBn1D4qTFvlj8J3MH05SBUJ7vZFuV07y6/L/8fLi17+68/rbH/vke3zyquffy1vlu+/65MYn3mPLy8fey633+/z7bU/55FWD9t1V+4zny4ffey/eS/O+p8q7eEcmf5gEU0QjFBHR6S988V0/sP/qq1lQuFp2p9EfY7KerNEp8YtG37OYkR8lxuhriKeCZFZBWW0JN2K4bbiQYrwKr6DAM4IPiQI6CrKKylFWarKMRIWKVHjwZH48tmURsqI+ZmJldVpIS7dee0Whx194356Sx1/8ZQC3X3sleM14sSyphUkAD1eCMmgoRykSYnQisi4dRKDYdu8Xs8WkS1kisVnK3QyiQMl94xcx+iNXppmXX/2tnXy68D1NUKdAGS6HtyA5Ants0fmAwfzlOMWpB3nRZe83/0+5Jkv9Z/x3vDReDIVTbZK9yvv+z+jWWs5Uko3jUIa2xss7pdLt4UxY2+7b47Y/62Ycqu/uyGxf7v7sZ0VCwDKllZcWU55Xb0wInXTOxeyRpOKg5IN2ysVGuBp13FxGOu5p4Tmru8fAy5YoldzyrsadtPXJlANBIz3eRStzNX4UfOxW7k5cMNE5TREHLalmScUqlDOv+ZR3x6tO47iNv0NJU4hUOjvFAezdk6CBnSBEdkfsLtBKT6LjwKa1Wy5FVVWwUsbzk4NkaDxFJjEeM/WP5j5pJCWBCCGkoxCCP3fFB6Cno35yK4oDzuNHEVKBwYS0ZhpvE1iT8pQwRspbvuAW6ZVEjoLTC6V1n0Z9BBAjqlTeeMgc3YjGrvvE/ugZH/zsaZbvdelvwsxsAkYD8KrtHjx4MLjBj6UxTExukN51UI794k2Wn9krUJpwom9PctxJyGRMSKrFaIThcD6BQKTPKQXDhsbon+H0K1VCGw+myjPzjYMX100pFiQWmZsUxvvKzR9qzasmebv4p0UDoMDGyOAIslytADx+cv/mUb24vOgWize+9Buf+NgP/tbrr188fefwzo1BUdc1gIfvvP3N13/zO1/93ZvVUd/3HbRpJu1q0XVdVR155/zDo0MIt/1KiHrnFotF27WXl5e379yxxjZ1DaCy9Ph88ej+45Pz4+O33vy/ntz/xtd/f3542A29JQLQ9pfLs+rBO29NCefLxa3VQtT5Q8ObScPGAhAP7giUCca0q7aq3DsPnpyfHFtmcgrg7NG5NfaiO3vrwbed0Dfuf/1TL39quld1nWmaSWU9uRMoW1Mx7NAPQ0WLi7M//yOf+OgPf/z3/umXAOzPpwr80Kd++PbdWx988cWvvv7li5MnbrVcdScfevnl4a4A+Kf/9+sIu9mTk0zxsvBHSYYciVgZBM/LJ/raFgKc/8cNAzH76AqikLVGfMrSJNnBe7mndRAoDhNzyC/kgTrywRZQjl7A5MlaUzcADg5uLdan58uL2zdugtB13QvP3e2ns6cnZx5GtMbaCuLUGDOdTdXU9WzaD61h45xbt2sAzg2r1aqeNASwYWN4gLAxzGRAfe/P5wEpM9hYAyIms7c/7/tuNp8Nw8A+5wDpIP1sNjPGqigZds71bTeZNt7z0bszq2jbtT7zJiIfICJjDIC27S8uLoh40kzOzs/ilIX85v8Pe28abFl2lYl9a+1zzr1vzpcvX2ZWZWWNmqpKQlKhAjQgQCAm0WCGaKy2TTg66AirSgKVosE/2w6H22EDAtOAO0wE4DCGdoPt7jYGJNESEqCxVKpSDap5yMrKOd98h3PP2Wv5xx7PfS+HGoRU4m2FKt+995w97zV8a9gOFRU7MVxURWWtNFby6d9DfspKkKQoLHV8I2kMiSEinOAETfkwfoqMEJFYdw67Iqd3fjsFQfByhouOVXFPwQBhCyM9EJmcZwrBpkhQf+tbbl72/DWaXhD/SfbKrlgRK8gmtysBThttuoNym7b7bgYbJ1EZgdImWLYjqCbNLW8sVrTLLVAv9bEDTebDiCJL/HX3KuTVUvcXd1D2arMbmZIIUfqSsv939l/OJ9I6eD+PvC/ERLvtyfvlW7xcBgF0P33TAlKX7/kr1e2rrOryQOpL7s+LrfYlrOZUJVeDHX8zl8vBkZcvG7/5G1d8ZuGDd7/k+vfLVZaVe+4CXoqn5IVf/x0AKx++65Xv037ZL5ctLxOIvLzCv1/2y37ZL/tlv+yX/bJf9st+2S/fYuXViz1dseevICL5ipSvU3++rsP8pprAqyyXDtbOblrxQZ7u+hWFAlu/deVEbwu/kGGRucHZFfIehz4JYW5/zhzW4OPRlIOjQvLDA2yrwQORkNsfU+e9R2TwE/FpgNFx2/EOgpo5TGQ+fZobsnb/EfocrVrRvyXVQMGcpRRDHIOrTrRFxghGzf7JPd2iwVK9b5QzrTuXvUP33AXChY+++PDtcDeOgzUBABy8UjTk6HY2ao72weAzEKLxvK9A1/t1D2Nwbo/2rmyK5J8XrXu0F9yUT37XAeaqkCnyIFbHK2aPTDou25N3Duq6M3a9IXKTrJJLnhbMmAp1Cc69VZsAgEMkL5LlV0MQUMyEcOkRRO8b38/OsJK1O4uOidZx9dd5UNxQmtL4J9cQH0OYTk/edr6gqnv3NHPn3d17Ov+rV77z6jLlyC9/SIJzkQ9Nyqz6u4vPphOtxu6CDO91okqGKBiWkUanWXVuaxMuleK0s+XzCUtLlYVLBcoT3bwpWsUD3ZLp/elrADOnO68pIxjdNt07mRNFlnPDz0GyuyeDO3XoVey7j7CmyAHCy1PzLcnHIZ8agk+x36Fjoc9EJO7+EO+RByiYCNmlZKFe5zgRPMR995H5qfj71ZjYHzzxTo5wAePkW8ynLHeDIufi7cYrcUJCU4FlUDjauecggJjsTdNJcs4qCLVqbNZX7upRE3Ij5wsVAqZTSX/nfrLi7pXN4wynZvqqgXvy9M4RPALUuNyRCqGWDSvEiiUGGZ7r9S+snWVmFQtAuTXcN6VhNq00AMEnb1THQwCoCmAigYjJ2mKjbnoJ8WIDBSAuLUvWSe/545J4xo1OKRVELiGE2cQek9CZ3Yyc5V7Ae8xc7g6UE95IY6bI9B7vR8ofZQE/zyJVr2raBgJDVJpC7AiAonnqgft/4Md+yPz4j/6bX/sf7njjrdctHPzd33tia+38nXfe+bkv3gdAy4JKpgKNHZdFSSBmU1XV1vZWUZTLB5YBDLd3draHRsuqnANxPa5Ho0E9GNajcWPbhgBgc3tr/eLaxQsXm/Hota+7+Uw9PnrtkbrVQ3Mz9eICgHE9uP327zx27bUHjqxcs7hw4/EbPimfbiaNJewMhwvziwCUuSKdnZ1tRVS4LHv9/lzd2NmZ+cLQ2F+cpRd2zi/NLj74xXtn+/M3HLwJxhjDIB2PRxNuAYhIr+qpgJ3LqCnWTjz/pjf/1Pt++F23Hl0C8Ed/8L9df+jAAuqFg8v1zs47v/tdzWDwpltuqcrithtvfOzEKQDWtmyMOh8iEANKnLtFxi0TVocQpbUs+an72V301xFRozzjvCJDTZ6iCFQV8QJ5L7a5ixmV3TvuUnVmlyfY5xUgpoz4OLImolXVA3DgwIEF6as2g/EIoKIoiKi11qo14nJYQ6xtmnY0Gg53BtrKxY21ojAumr5tGwDrG2uAWeot1+OGYQaDIYjKXjUZN2JVSAEsLM4NdkbNpJmZ7Vl3iR/Ahgnctk3dTADMlb3CFEHIUBaVVmzbEvou56OKQpuiMG3bqAiTMca4C6+oqKxMADCZ4XA4Go9n+jNr6+vGFMPhsF/Nksuvop5cWNt6Cu9IcXKcz5nm9Ll1tIHTeY6HPVCHPUSe7LymSKfkvBflyezRmMJoqoqcI1+ypNb36kcYpRPHg14TupNNQi4PdGXUNF9ZI922MsEi5yGaMVskJefKnEU9J0Ta/7vGRp3nu1Wmc6ZBkcscA7OY9Jyzd4SHrH6dqnLPad7ThdV58u+SzPbu6t6lK/zkbXmRlqi7EJ3EVFFQxC7pK0T+XBWX3y/75R9U+UYhmy8WLL6a591YXnj97fnDcYCvXng6lst4RypA7uZABbxCR6TQnd+6smfTwi/cRckvPsl6vkRhIAnzU6pseopTdI3/KYJXIuKkuaAsJj2TDTsOGtS0iPaEPqXAriiYOoqv6e9wSwD5YB5KomFELdSLj4CSixf0gX6B3bvqvU4WxBeH9lEESi4hDeVc378b0hXtwUhx6CN3AS8FlETwr3SwJrI8lbFLorYrk2gYCAoqorQQIQzdpRbG2SD/NyHOSlqIENS5J3uniOhdUuOjqedxCREjrFGeS8hBkQR197r6pRPJxt3ZqkkAVBUHM0U8EjGssqvuAojJJZNSlMGEoeQ5A1wFFP7ZNbT8fIRAU6RYMIpVpHnW1EwOmV1CeQ/yburdJaZ075350u7LjuXIL39Ik1AcZjR2NUEEYRY76GGY5GD6CK+72GhxuA6FSwquZlg5ApHhGEnwpc7On5rw6emN5Isky/kUklQkFYqgIb+kv9FTEe9Q9hBX0q48ptelvOEJF+CsPiFmoKGpIdewip9xDpsuH3j8nGkaUxQL7gaDsHkISRWHQQkhdSgB2KWmhA9BzrIc5Ch9yn01pfZFiNpfPub2iE+z4Mfoo7njZvF9DAqeRjZD4TwqkTJTSuoEylQfhLS7KeVkmsA8SYWL90ZolRwjJZ/Yg32UbjbMDtoRRpr6FuY+gJAxPZ3rnYhEZqchjZi4QFDFVLA2s6eDLpVeHu/tUsWByKqQkkIU0uv1q6oEm6axtm0OHToMQEW3t4el6Q+Hg6o0SWHKkxI7eCXHDcNR1YRRwnc7cHIRZeZEUX06T7jk/fA7IUaWu/pYISHPTEzP6xc7zZ0TCyK8QPAIUnjGh1iGy3fQUWfDf3brgDStU08/lVXV6Q7BFKZpGyhE1RCLWNtOALztTW85dOTgM08+/OwLz40aOdLvzX/bbbMH5kbbw7/71MdPndsC8BM/9oa3vuXWE+c2H33sFMQApmlr27aLS4uD4eDE6TGAY4ePPvTA49VcMaF2WNfz1hpQ3wFGgMu/W1o89sjX7vyOt1+ztPzvnzthqlLU3nzTLdccPVo1CuD6wysbp05+7dGHzr5w8tnFpXe96z03XHf98ydPzpoDZdEbjcYAJk1bVmVRmZmFvoqB0vra5vDU6PCh5X6v6M3PApiZKzcmW//0p37+J37qZ8ejwWf++jN//H/8yXU4DtZJYw1bANYqAFFrpRGR0vTKXvnFh+97w03Lt9xyM4D/5r/7b3/n134dprDbG0sL863qe3/wH128cOHM1lp/fna8sw4A/uD7tXQHlRLGlHai5w7uf8QUqFvaEiDxl9oFto9ArgHjb0v3XJxcdmKoRsYNpM0EVVVmFpcjgYmJEzNXwNMNZbA/46IiUk8mANq2mZ3rGzOzublprbRtS8RlryrLqihLV0drWytWIW3btHUzuzBjCur1KpLWUboDBw5sbG6DRGzbNpOyLI7deO3m+lZdT6iAIQOg7JU8qtkYgdTjuur1RKRpWrQoysLBmtZWpqgGg2E5W0HVtpZARVFMxvVFl74T2p/p9aqS4CxqVgS2FVZVpaZtAFRVRYzt7a2i9HDqYGcwLmrSUlWhFoApSlbPujp5FLsB9p0DR4mcdulOkNtShpVcGtSMp3T4nb8E2b8Tmk1LCqgSp7YCB740xUiDuPRPoWNdqyiBJInUQRh1+1EjryDvs0A+HwhkqqUpaqUKL18EbN7Th9jPaBPVKclhutvBXSGML5/N6YnYs1NZ0kZnSEAIOAf8KiVoNDNj7UXzO41lNwBknfPHNIiUUaKJ++WyCxge6hbKhxfE8u6YNRsonCDJQUrLWE0+cd5IHhv1ytdlFmO/fOuWS0XsXgqMu3qE7uqzFr6y9b+C5fKI5FSjX4/+XH1Q9lU+P4VIXrGGV0u5bLA2ednLM0ImANu/eWWoa+EXPpB4J/KMS5JJa3lDucqdqZiBI3bVMUSoz12wiSDNR20OAIE1XPuggPN4oaCFI6sK4aqHiFLk7kREpCLkriTx+kuWXk7Vg65EFDKfOc4Q7ZYAOgPw/Q+ybHhCASi5m5i9zhtbcU2F23miPk1JigrNKEA4dM9dCLHYL7bEt1buuStqzK4RAiPokl0zIak3IXfW1aEYl+XfAQ7MgJPLl6jQYmpL7BYDOrrG9Cu7nyEKo1KIatwCfldoWq+OFpMVEdupOp4BkENygxkztgOCmpDLEHFqu91zGg2AcJ2SeofYKIFr+pTU63gBR6yP8+14aUtAGPGe45wW6K9Q6PyvvCx3SACrv/RBpgCNhTmKO9L1N1sbhB/CGc/m2n8RxsDE4ZjBFMZKq1OuiZjalXuK3nvuvivNThJN06CgcKm4AmkJDmT+mfhaIIyeuiVqphFBSUOnTKfwkrUChiGKeN91NxORmxxvfgpqNHUnvTPQPXyZHd1H/G+g5eTU80zE9jAziJgy4uyr7+7BQObdTxE510AXwqklBYGJ92I2mQIQBwBP9cXDZ+S8NgGQuputQodDisikBMV1il1E2KCeprgsu6wkvkGOShzBRr9oZ/lzS+7oYud6pnDO0/bQMPZo4tKIiHvSkTgPkb/XJnFQwM2SVUt+rihUDALZSDMVAlUoF8X8/Pzi4uLOcEdJDMuBxSUARVE8d+LkcLBTlIyIuATnefIJ5uLE+2R4fkg5oYpovP8r0RvP8pyiTxofDWPUSOASy3QPhWkDpUMUAYq0qbpYJLJtkWuPsbKwFHuedNrzg++ew6qzjR3BVGutMVRUvUk9UgUbHo0GAGZmyuuPXPOm65d18Yf+7f/5p2e32tneelUUP/ETP7tw4NCjDz4I4LlzG2vnN01rSygXmIj0i2Jna+uOt9zxne/49o3NswBMK5+uPi+tsNLOzuDIoVVTlkVVWpGmaZvJBMBgsMOgreGg6hfPPfNUryyeeOzRieLmp28688zTAE6fXy83NntLBwvR5556cn1re2cwWKgONBNlRq/XA9C2QiAozc/O1eMxgwED4qrXv+7YNY0BgO2zF+fL3h/8m9+74fU31834t3//o2+45jadTGq1RVG6RJb1pLG2bVtrTFGWRcNSzJW9yeSJ0y/8yZ/8WwD/+H3v+3//3V+++c13zs/267r+5Mf/irUYjsdrTz61tbU2rgcASJV8Asewp9SLpZq4dDxeAaByKj5NSW7IbkOi9IxbR7HRBp8RCSKXxi8LAQk0QsuiAKk1pjCFMUVb+LTU3onRm1+I4R0vFWKMy8bIbEzT1iLatCKCfjVLqMtiWBYVQiwLQYvClJVZOrBQ9M3czAwzhqN6NB4C6FXl/OxcY+vxeKeuR3PzC6997euefvbp8aiebI9RVQCGgyExV2Ul1hJzUVRELKK9qjca126gYrUeTaqqIkJZFdubg3pc93oFG2NrC2Bi29IUvV5PVUxhbCsuhaY7XFVVAqjKqmnr4Wi0OrdS1+OZ/oLLRMmGOMhITEREDhA2gcQ5opzLTtnxiy7Ukbr4r/3TaeF9hErghJTeR751kFGJQG906pW49H5HeYuGXoZipO0S6urKgv6ukkjwgMj+KH83o6s5ZBvGG2HKrG+d4syHcTjdkJCccnqqFcxdlyhZjJ1/m3wwwF7puYOIMvVVpNXMU5YrLw8H02yooiuo7InyUl5HqH9Xn7L5oawfuwbcrWhqFPnz3rKY19ZZmrBLFUk3ihWnSv1PCPsKUUXpUKv98g+iTGFPV8SqvmnLN7znr9KZvAyi+uoql7lZ21E7p0GRix3Z/I0ruzgt/uJdObgUmFGguZdsLnG5+J4XQbzjfdLg3HNEVBT+Hgqxort0V3girepuAQdAKh2mkZlXYyeCMgb4+PTIJDSGYnSitztxruQFI/LXH3QLBW0ktZmFdAZjeJjwIJJEIGZKxvH+G2lElOtTh+65C8wXfu0l4kHOWXLlnrsI0dVHuxirN7aqu4NS45heXNmDf0YM5FLPZ/AisHtX7fUuRakx0zAjE0dASjwOQaLCHL2kvIjDHkiO9Uy1kG10NzteQvBGV01eSfF8pQiiaJ4N/VH2t0xk4Lj7zy4BS3d90vwH7+DGrl8JMMmG1lWzfa/3nNldK9w5Q7G8THfI1V/6kNt21sOv1LnKKtIVr0DGrkSVPx30DjyRuWL5ulTQdTfNB6axVs3Gvsf+TD/6Xl1i/zq3gvh0110LREW83QUBqQs9z2K8Caoqoioi0W8ia50ihVGvMOdKFjmCKBo8KIOeE4ZHXlPMhHY3C1PgYNjauVUkdTjbDhnd0KwOYYYqRAXkLoEga9tgg4lie7Y3xd3G3cl04SZUWoUIMwM+UltTWxrE/rAGSJsm8zpBQPZIxBIIcD7S8fhFr5CsZGsZa6U89ted2HgFKufTEVYizmIc664tlhij1z/Cnt2jQ7tKsHa58Wa0EOoOFnlvsKjfEEXXUgJURKxYAxhj+r3+/FJvfm5ue3tnPBoCYDYzM73trSETsWHnKK4Kn5cgEDgiE4DdcEo6JCeuBbx2RuHgSXguKJ1kSK11h4lcPpcwTIHTefMXcqgg35edc9rFIilw4e4Co+tBGf+MZLrb4O4loVhtRqE8kyWx1jbtwLAhRTOpe/0KwOGDy8+fO3n89tecfubE+eefPzfaNANtgY9/+tPv+b4feu70WQArK4cYUhlStW3bclWN6/H8/NwDDz2wObl42+teA+AP//c/7PEM9wpVNJPJ3Nx8YWhUoN/vjYejpq4B6MLiwuLSYGt430Nfm5+bfd8PvOd/fezJ7Qtr1x1c2Tz7AoCS6cg1qz/8vd/X3PHW3/2D3/vyV77UtI0VCxixWo8ncKHHxGJ1Ute2aZvakuH+3Nz2YDgzM2eMApCyP1PMDJp6e31rfWOnR72yLBeOLGN9U6wGtztbFj3DIJC02lOhXjmY2HfdcccBtQDObm3Pz9KoGayurCwYA+DG19wkVs4889TRhYWVxUUArbXGWpASjN8tRKxoo1952gCBVITF77ADh4Wlu4wjAELRYpMzw2jkdrssmEY0IEoKQtta6wBXawES66+4Z2XrYjIEyrm5NvGvgo0Fzc7OnTl3fjgc7IyGOzvbm9tbS0sHAMzPzZVVhYlKwWWvNAUvLS3ecO21z549MxjsrOhB15ntne3Z+bmirNSiridbO9vD0Xi4M5zrzTYMAHVdl0WvricVc6/fa9t2PB4xGbFiDDts1JiyqvpkjBVrW6tuFKIEoeAx2jbWSgN1tKRlLgouTMnW2sZOADRta8WeOnVypt8zplSFiPb7vUndim2ZSwBta9UKMzvTuFuUKEAnmSeXXDo2hvhIpM4acKAkHLrvA89w4ocmKjbNAqYEIi8yTP0YgcxLqkCh7I0nZdS+I/5lepYGIcX1Wq3C+BwmgURBgqNkbGv3AC7ZK41/BAZxxdeCq2bijpd7Kfg3dmvo9CE8tfsRyv4IhgAAgI3y9V6DyqqI5luKpGCKBV+6gtRCGC1lD2Vv5NwhthP8WJCJ4h4azwaXTI2ZPqB79Wu/7JdXZ/k6QWnfbMkoX1S5lH/rqwIqfVHlknAkE7lYkvDF1WGRv3C3UwjYgJmgEIgqnCXTqw1OW3RXeZKX8rxfCqIm52Q2goKKqMCEm5xVFFBYFvUsjoE8H5A6zktMDGIlgVooIORrQ2jBa7XMRKLWiU9BZPRom4gSSXiSlFT8BaMe9pAAECjU6cvMJB0+ShkTcaqpEBOIxLN2J1eJKTixSJfuL0g/IkKgJCcDPmiCM36nGYNUiNqVj9wNwsVfe4nYULwhZ+XDd3FwJQBC2scQ3kiGjGEAxsBzSFXDHORpuBxJ8ACruqyFDJuYaVTlHC6TA58apHFS4uDOwOTgifiWWw5QRxAIoqrzKyT4ZI7+VvAuhk0EIhPjmuO14RkEkIMaQRDOgR/3OVhnExjS1WvSWwDEtn5mRFU1XZsLaLg6NorLGkzkQIZ3UPCJDPpQlGwcDO/Cm5Qy5T7vufj+aR4znxnqOz0mPwsEYmZVZe/X0xCxqpz/1Rd9z/tUOfzLH4peCMSmYFZR8Qno/O3GUBVYqBpTAEIBDWcyAFTEdoVPB616Z4FMXtQYpx5MzJ1Rx/mMCRC79os4We5dP8dERCTSpjr8xPmTK9K6BdLsV7fu1kcvu+hdt/XI2YSUJXqAiPVO3GwMi5Cj0goRsVAmVqiIuBtL41jCVICIpBVQgMJELMBMZACAtbBWXE5AFyprjFFVFe9A4iYCzASI3zYWQs7+kmZIoEKmYiKIuEZcDoTYI0vM0hpSJSarrYqwITbGWoFPMMdQWCsEcmsN+BByC3UnkFjhgo4DakXJZ955G4XDbmNom4IAo6Tsui1+hEJgAjEXTksVbR1gDYDZMBkXHa+qzjPftuKoP7GB15XU6dIO8TfGtK1l4ztlFRAFOcMViVhiv/qu6wDIEAQiVsUhO+S2FjFUxVoGwIZBomiVwMRQEqte3zQclWvn5eoOB2A9BYu8kkXRAoWqqg07loKZRrzDZllUIo27V/z0yRdOP//C/Er/3e98+9nTZx9++EkAi4tzKjpb9QCayITJKDvQm1V8PWzYWzeRKCWIGGz9YYn5dN0i+6UUhy5ndJoIVpTgcBr1tDcYWIKJTkUknllVVUh2fkM31EcQOiQoYweqQpKdG/elo9UcuIOIdcda3WEhApGIuI0HOBfdMGBVAIZNuMYcPvqEoICINWxEjGEtSAuYFobGLYCL2+t2Z+cTOxfGaxe++tXHf065P78wIzP3/s1nDx2+9rGv3Avgv/inP98S19RSM5H+PIktAIXOHug/8dBjRUsA2nZEzIaLZjw5RFQwgXn+wIrU9cnnT47rIYD5mflBPZobbD/63HPf/cJlxQIAACAASURBVN3fd+Mtt/2XH/7F3/1f/tW993/+xMkzAJaPH//Zn/3Hr7/5hrObKz/+sz/31LMnRpuDQopWpRUx7PKNWlIxptzeGvV6Br1CrKo0Snxhc31+ZgGAUbWWD/DcJz/2yQtnz127fD2pTia1MSTiV8qwAYtaNzM6JqlMNRk2Jx5/YtxaAJPBC8tHr33LnW9tJvK9P/yez//Nf1wx+LsnH908+eT6zsXX3ngrgD/d+MThI3NlMdOI2LY1bNyKF6YQzaAqCvTe+KNorZ1yjxTV1ra9Xs9TiyCNeZqAFhFvIVj1FIDZNO3Ee74rioLZ8Hg8JiP1aASWsmTRVm2KYDC9goXaRhVqDJPCGUiY2W2siZimpZ4phuPBTNWfTJqLF9bG43rz4kZVVAAW5hcFlgqeDNtmMNheX189ftQUxZGj1zz58CP9ngFwyxtuP3369HhMR1YODq185Sv3cdXfPn+xItOqtbW7WZvUiAqzMdJKYbgATSa1ZS6qqmQAKI1a1CAtUU7Go7JgaFmWpqmt1RZAwaY/UwGwVtRqAVOWhSPWFjpXLgMY6VZRSL+aPXzkmlNnTo92trnsDwYjbduZmXJUqzt+ZWHaCahnIF5YcoJTOqmqlqy3JGuweBAIJNPAjV9AZwbzcnXMfQKQp70hPD+QUS+fiCWT5AENt6IXxoSzHt4LshsIKh00M+6WTgmMPqoCbKIiFvA9kKoyGdeuqLjBGPLh7FqYKJnkjVnVrNfdGcmjtDMJiohCvETsn1M+AvlS/24EA10PQ7JRdZzcUXiJ8+uxYE2NOBk1KUA+K407p00b1gjJC0OJmCBWU1ZsBNUoq9cborPRSh565m10nhSrSwLGDGfw9YIiM1HovmNVwRLMToSQuHauY9hdgiEVUYsJm5KyvkNV1GarQ5iO20lmCT9tCpre3vtlv+xd9oz//brW/4o08a0HwL3i5dWLuuLy3pGBwQDA1v985cjfxV/8YHgTKsHt3wFMrppwdwaQVH0gqPEAkHSK+Jyq7aAjSe+ICVo0WQUDqVcJbDvqz0SkpNIJRgiDlRjlpnlIBYGYWDmqSb4LmrOyOF/db7qTmeNRRC6FDqUXU23qZytPskIAYAyHESXdZhf/6YCgDoQhpYMfuUtV13/9pUNF8cabQ/fcHfo05QEWgLvsq7ABEH383GvBkQR7ZhqcnsPos+QXTp00kHoQpkqj/EWh/kyb1IgzR2ElCXlIol/suaswyqGd/kwVD5/BSw1BqnLwqJKqdCtIaxv9ffLmPWga2+pOa/L4SfCw3+ou/LYzM27YqlOzkQ1BHeiDEIkv6v3mHMQcz1VwGY7Tnbz3Xj4KCeDQL90dXfC8DiAA72VQJofnkAOC4rQ42VEiQhdLZgpIw07CegeGzA918B3cteKhT9OnMF+bTnWdTbRrB3ksW8QaZhB53RUiYt2giNl3yHfay+2qQhr3SJgUAoNjX7JwMg9p+gRcQcmi4FIBQMlyQQ7ycyYl24iH2aIzm8NSItwP9hstwmBgYmJmK00+Zd4RQAGgEcviPJA50mRViJWY3Uo93OksGZqHmhGCMSTf2XtRYEVO0EMfXZqNjPOo10rcplJ3qJhYgpzvX858GNzskleKyM9MWiW3gbQoSMTPHReiotJCVLnQcJWFRuXMDUFoKv1oGl9uf/K7z00iQ529Jn/N27Kct35Q+YKPcLR2Bf4Tl88tsAJg57JKYGaFFGVpW1vXk5WDq89Vp53CWdc1gdS2VVkFikWBW3X0WsSMAr7zScvyKmyi5JSfJspYnxu6qyuehcgFkNcYj2fM+JZNTb5zdvn65BkFkLTfLPYT7lQmrNE7a1JMO5idWO//mDsNaz5YAJBWyHBrWwBEanpmUtcAjq4eWr7pho3N8xdse+38zHhilpcmYxkeObJwy7Frnzm8CmC2KN747d/xt195qBHpq1BZKbi1DakZjux9DzwCYKZctkzCKKveuJ2A0dq2MPzE08899vRTy/MHAWxut8uLqxtb6+PNndVjx5987Ila9fW33vq5z3y2EQD4we959/2PPvbUM8++7tbb33L7mzY3tp5+9FmFNaWR1gtXZVGK6HBnOLMw01oxTGVZiDRlOXPq9JnFhTGA0XC4sLi8eHCJ2vqm196yduHioGnPPXWi6vWPHD0yGjZuTg0zlSRWiGEaHnAxe+jwX3zmMxdOPgNgZ/P8s0+f+Q9/+hdv/vY7Pv7Xf/Xs1uSrTz8+U/S2rT76yFfnDt0MoOpVxhQAibWi4lIiOisc4o4Pkaf5XghMFSETEEHVOENoPN8icZ8yG+c7KSJOxPP3pKkws2EDQERFRawQkzGFN02rwkrV417VA9BaC2AyqZlKZiMiYi1UAZaAXjRtvbE1vnjm7M729uLBpV6vGtdjY8qiMPWkBrA92G7bxnAB1eF4WM30b7319s9+7M/f/j3fT6MdZ2D7jjffceLZZ0+/cLqdm9/c3OqZ6q1vvuPc6Rfu/eyXq7KKTnDtpCnKXtu2ZVGWZQGgKCoQjYZjt3OLoipNRWBV9KqZtYtrVVkNhsNe2SuZADSTZm7+IBEzq4htJpbYWRELZlPLNoDRaMIqr33T8Ztuvub8hcNnXzg3amxRctOAURYGAIRaBpmCrA12xA6py0/VHsJxLjl3yx4Rx9FjMvNQm6rN/5q9u6f4f8Wfdg0k/HcPraIrReiuJ1SDdEu+d7upXlfeSX9FPSXmWOp+kx5L7Dn84whtlNBD/pRpFSEN1Mu9neUiT0bTC9lHZaaoeHiDukJDFqsoGkyvriIpbKmdXSWbk+D+kAk0l12+7g6J/d371TgH2abpPDEFLUZhPnsssn0/9wqfkkIvMbj9sl/+gZdXHZR5RUj3W89B8nK5IzWkBbkqLDK7RzsqWMlnLXGtqNhFSApQ7/vmHw48jSh4P2QcMb64i337PmffOOKshIAAEkkMoSaQB0iTCh1bcV+xw2OZcmubb6jDNHflXtsl8eRsgsFZLwDPLhUEtTk71py9cEyWlvAh4jCFexcvI/ipPXjPXQCIcPEl3XXjyoVf946Wq/fcHUMx1Pmh5HEgXp2ENyEqMoA1k52C0c9L/sHMmkkC1JFpfKxMwqFjoaltEAFFwGWlC+ps6FMuokyVoKB7t5v4pMbeB6V7D9E266VXYzN9vPtPEDr8NEpsNhvPbqEmyjuZcpuJda7T/qCFk5gGE2RvitNH3ayXzu8t088Tqp8EKVUV4OWnhnRl9Zc+5Dd18DJ2u0JUOMie0aUq7H0ErMpPNZz/mBPTs1xS+Z5LNvMuGBFKIDeJwnQ1De18Crs6/B1PecdZIzwTcwPt7REBhHSWcNZxQlBpGYCKRN0ASIqI8+BwDZG/6EOcwwizCdBJrqB5/zj20cmOPDJ8O5BG2fh7qAyT05/dhdcZeQ3eywQAYpWc8wCi86OqqEjrExz4AHCl4LMCQERE1bCJOB6x8z3UeOa9AzjFNgHSuLzZ4mhMcptmPSlHacnDyrvt0dEKOPnKUNtK+Aa5v3rYiQkq9H+qSnT9y4qD/8QqWF2yBBFAwSaSoeCRoQigtB9ZAEVS//fQtzwNIDgq14YljkHS7CtgSv1WCkiuwOPDUxs+pxoga1tj3Ltk2EC0MEVZzhxevabXewwAYFSFiH3SS3WIjaPmCTUOVDnYpjQTCeCtkuFDNodp7fynTNELeZ/Tzk6vBQ6gnuTxtFkoUpJsPvM/s75EscXNVdh9USzwMbzieY3zHI8/uGlnT3azJnJpBWAyqtrr9bS1UDRNPTNXAfi+97733k9+7C//6t83UhWHVpYOzxodrx67aePEc8+ceObco48D+PNPfPym6469/Y47v/Spr87Nr9YtQcZLS4tUFTM3Hl9bvwhgYW5ppx5ubmwMpb7+4GpV9evB1sXNnRMnTy7NrQgDwKhuZkirsnd09WA9GZ5dv3ji3Ln5xcX17YFRAHhhbQNl8dSzz157zbVbW6Ol5YOTdtK0TVH24myJCIirsmxbKUoyJReFaZrJYGfYjNuttW0AZb9YXJg5vLJ8+tSpWRa1zcLc7GB7YKU1BQ8HAwBkClOYsqhIlYiHihnIM489sv70V9/9/e8GUJA5p6cndVORhUzGwPr5C//Z+//JEw995eP/4c9f/53vBlD1KhEXIGxKYoBEtCAOklZ+nNLSEuXbLNtRoiKi/oYul12dQppXh7+QMZwJF2qtJaZoKlOy4b5utVZAWphCoba1YmvXEDMXZQEhZ9cXSCRabdMCENsO6/bxR08MtrfveMe39XqlttS0rTHGbXtmUxalWKtAVVRHjqy+5U1v3jr9/PNnnu8vLi0uLgBQRlOPlpcWlWAKOrR8cPXQymh7vZVGJnVVVgBAaiFc0uzczGB7SISyKESFDPdnKtEWgKglRtPUtmfOX9ipm2ZuZqFpJiLqvOyZMRjtHMByr9cHwdrWFDMqImpn+zPzM0sAtp59/sL5CydXFt5m37q2tr2xsVXNzq5d3FxZOrB8cGFnbAGcO3OhxwVTwSVpmx/YCB1PLWjnOMcA7yQ+hqw/Ic9wOO6q+ekO37vj35EzOYqC1OX7qWfImdXUlgtmr45GkdhaYOxT2kdnXJm6EPqdj3bPqdi7eNqauUEm0oogxgTGqXFsGilkR7qaHky3ddr1oKb3s0kKjeoeNe0twIe+xWqiiTDKhFGqjxJ4eEGT6Bvg+Dh8eOE573FHL6DUpd0bZ9cceAGg03O/EaIvhGPaQRiIQlEQMmOagSg9T4kH++US5VXtRLZfrrJ86wF2ly+v9l19ae9I9ZrC1d1dc3dUKQA4WZwAULSPeR4vokHj8zekBTgq40xR/HfeD+6roJUqkqQXuWEiwpEDkVdR0+saOxje9TV01KFp9SQXH7p6SVZdrivtXXKuS0QMdj5rUWZw4EAIo814YBCzxN/T6KtLYkyuggXZJ2FZ6scVRWqADn3kbhFZ+42X5dF2voNLJlEmw4Aov1wh6tpTEFAm93v7HnOCoeEHE69TRBbIHTeMAggpfhJ2EJFMH2ybSa6UPb936ch/uQdV9i2CFVNzsSZWoC7GEohr2PkV+VIFwDa6n/n9GYRgZD3I1xJBjqWINiLKMPC25FCHephjatzJiB0Ey+DRswvfcT++Uijk4V/+oEMTJExGHJ2fsQxAiH2Mh81KG8V5j8xq6D+mgI1pkTAKqV3JOWLnoc0gF+4pDSPNVfyIqH3kpMET0+h1EP6TPeMG4klcFk7vneEk4audnjgXnWwIQU8ImLo3/WRjck1ojFQMjofis/Sm8yhOZGYFkyEjKnGGgyWeECJYKSOn6jBR5KYINzPOK0kBmILFuSixqnVBV+y8DKN7mYggJOQVFfJTEsNsNc5B3NaOCJHznnEjitdLJF5EgLobJvL1ozBdAQ0PEJJmm2vvkjTGdGjCH+Li4yICCBCDALHxXGoyMsExq+SaBf9ANItkO4FC98KBcdORaKc/8+ozpYiLKRO/IswEFolRjJHqJPLHBBgmggZMhNkQ+PyFi0sLS0VhADAbw4VtbNM0bIqgHyH53MLtGFezetYcZ9Urfv5ihE4PKO7myEf8W/62JT/zU6F4CDKGh0OJwERWAqeKRCQjz35fBJFhqmjUVsM6aZhxIlKwBsfPDkUITC/REIewhxWOFRGIiVrYtrUkLNb25svWMoAHvvi5v/rUx2bLuUef+Nr6qdHJh762Npl89VOfXG/xA0srr33nOwB87q//9m3v+f4LT52ZXVkYlLpoWylKW9eDzfVeUTaTMYC2AlntmWpcVAuLB4TMaDhe39i0jVTVjBgFUBhjm3HdTuYWF0yvd83RI+VMNbh4YXs8FjIAzm5svPfd3/OM0urC0lNnL8zMLUCNaOtohAtmbCaToih7vd5gPC6rfjOZtJNJ1Su0Rb83U4/GAFq2qMfbGxeXrzvSDkdiZDIaVjP9erve2tzZ2R4C6M3MzC8wMcAiKhVZFEYbs7hy7ft+6EcBHDp2aPNf/urpE88+d+OBrVMnf+Td3/Wf/KMf7Vf4z9//c5/49GdOnzwDYH5unslMGgEzoGIFICpL6eaOjKueLUvHR8qT05Q+MlxwGPI/xxhzJlaIhjhfYwoitWL9w0yAERFr26LoqTZFwda2bduKNACYeWZmpjc7OxrW1rbMxl3q4kJMmkkNYGNrY7Y/S0RV2S/KXlG0w/FIgbKqClMAYCZmFmv7vd7SgcXj1167dfEc9cqbrrlmsL7mbsqZtOOqKrnga48dO0b62IOPzMxUMCBoVRTuhhlrW4Wagq+95poX5HTbNEVZKmnTTvr93srKCgBpnQu/bq1v/MCPfP+pU6cef+hJNmTIuAxLk2bS6/WMMWVVlb2qqRsiEtWmba205y6cAnD0ukPXHV89e+ZU28jm5vrF8xcPXWPe8IbXPvrQY6PRCKYEUJSmNAWhbFRVrTvAUyc1uX5nPvG7i4YLzx29J/YuAYGuIjjmdS7w3qOe7IRHUWPqwd1GzylhRLN/KElBHRHiUiVSsilnziAGdZ4MLDu1N9WEo6SOrAZ6l/3W+XeqYkSRIz1G+XA6L3rj07TRPXsk61OU6qYbCmdSbadTXmIO3DQqDFn9FDuRPiS6njeftpBmQ43DSFPsHuy6k/g+XkJqmOIU+UfHOgNqiiTSw4s6HUYTXYem3Wb2y37Zo3y9cauvU/0v9opqfKsjkt9io7u0d6RCgZ2rwSI/dLd/IRTn3Bbj3bLfouLkPsRgq8QYMtgDFO4ACU/nxBoEaIjI3M3MkkCS2EvASKfGGf7r76BhgIKC7xAw6aCWHYA1hyoIUe1NI8e0f5yHBqJLjM9CFR0hM74XOzjNmDNGT4kneXfE5GGSvRemMZdFDv7iB0B4maAkclzyI3dny+u7QHH8cUMk8SQpwUgK4S7ZzV0yo+rxYg34QHecwe0qhKpkO2/ayB0b2y3oZQgCAsKRbZpdUF5eZS7ZaFBiVJAlPcjmxW+gtE+8ANrZKlFiTIBhfDT2KoyZshc7SnWoJ8hLoD0HHx2t/BOZ3V0B4PyvvKzbafJy5L/+kCIDV6K5wOMIUfkPd6VkcmCAXsiYEh1PtYjLhb2uSHO0a7TTU+BgiqR4hu/IkYPLKwVAOvJeLIxVTM8jpUDyrjSuLvE8E4Wkvc7RUbLKO+1hulvkwJ2uNtYZfYaf5iFZ8OQIKEojIk4MtmIJTMwuLz7lg4nHGmrYqKqqWMnsKUyG2ANSwVNRM6yM/WXOGumscz4Ch6WLM+YXQbPzJyC/Z6iz75PHQ6RFiuC2BL+vsro7JAhBH/MpF9WHdGqmcXrrl88m4NaQ4ALb86lJQ5WiZBG4dIJcKADbOipoFRTi19V7yAcFJqkanvtFdSnjWcyxOYRwNmTbgqiDawqFB8LCU0IMI7uN1FoBiIphI9qKiiFSFWIzGI/+8hN/trp6lAsDoJ20KAwZLtiD2tGRIztMiegl+BMAgyRnTu6youACryHlRV46EdPT50JzQhd2NSGo2QAAVoTYdeQ0XmOFXW1vqolsbYR8yjRneiWNwLmvi5ItUANRDSQ4TIL/3bYtSjCzyx1D5NNlfO4LXzh/bu0D/+zn/+j/+uOPnbx35uDKoXrYKm68/mh/cfH73/sjAL78N1/4q7/73NHFo71eYZpmQFo25uL5cxfX15YXF65/zU0ANtYv1PXETsSQOX3utLI0o3E9bmZnF8a1HY4mABhFocVCf3l7e7Q0v9TY8cGFA3Y4Of/0CS0rAMduuH5ubm6ienpj/fix6z9z35chhrRwG9rlqyUXl6vU6/WgWg8nCoUWTW3LonRY2PLc/JGbjv34D7wHvd7D9375K8880ytmHnngMWYebO+4gYsIE7dtI1aLwtiSC0uDRnR96+FnngLw/je+5p+8/yf/+A9///NfuHdna+tnfuJnHn7gwe96x528uPR97/neP/rDPwcwaZpe2XM3+jIZLtiFP0+LKskC1NlLkZQ7omKMUXXrnk5QSKJHAESskrhsiwplNm4zhq0mzuIiom1rRWpQK8pNM2EuXO5jxz+cAcBRF3c1k6gFwbYtANvIGOOjx4+ceeH0cDQoClixxEVZlK6n1lo2XFFJquPReNROtDBf+cIX3v6u7/7S337q3e/9YQCDtQum3zvz1HPoFWsb6yeff7ooubFNaUo23r3XWqsM21oFZmfn6/GIuQCjae3S0nLbWgDtpCWQre1Ydn78J3/6ka8+eP99DxxcPECK4BOqbWPHk4nLVKyqjbWqwoaNIZdWcnn54MbahbWLa9def8MNx28YbO7UdX149fD9owfm5+fmFhcA1CMrE8vQuq7L4tLKSyLmGWqzW+RxHB9eXN8V0euIbq6wTMkS3nxMkWj6gJjpndVtcrco0ul1EMESpY5NT1s+EcyOWeUafLEjLc/FhK48QHH4uwmcp9xBXMjIYSTtUXD1gkTsjHuCk6g2JYLHEXRjGnK5PX0BhCiGTg05l+tW7SawM9HeauoydmT7gDpv5Z2I85MEbdrlORlHsbu61OwVSupNHG1g845RusC+tKBh6Bo8OcMW9b4+V2xxv+yXb9oSwbVXu6Pfyy9xBr6VAMfLl0tydALt/PbVY5HZiw5hdOBRorEAgHBTB7rqepftBY3ZBwQmq0/SxQIWtUuHSKRakj7l5QXazYQovumbjRpK1GRVVQUuZXXkHHFczkcjn7XOPIRhdzGJBBRkuVZCNwMjDAp51Axd6i5oynIZB9Ble8mRJ5chAvAQ6wvdW/nwB1xnL/z6S4/gduX8Rz1cdfgjH4ydyWW4BBTmSEC+kEEXRuT7ESaB1+3C0JEtCNzzAQbI+uQEo8xVTjt6IHY/nJVsj8avEPCBzsOaP5SJbxoUGifxxl1KXjenbDN1+hL9TKOpGmGOLtXbjsgX0l3B7+FM6kHwJooPi1ry3SHAg6cx2dr5//EVQyEBHPnlCEQmp2ZmyO5V0WBAyMadVEOCqoiEjEKZlEhEBI4bCYHGTInzmeNeEtj3UlyUutskPpeXgHch7W7KH4yi/CW0kdB8CLj22g0haCV7gI9Qn7AsP+Mq4X4ZRP+/zisKBTNnREinoBeJ4UAqxGBmoWAQ8NMa4HZShb9IOWTbiBMCsHLmzAyEPE8EuGsNFGAJOXHhriAS610nkKg9PKoTneWRAf7eOzP1LODqAQR18BcF4NMvBXurQeaDoho89Ngj4l5VjWtEDqGMWARCzaneqMt4jUHh/aLcGN3dJ+z99hJm3HFECUwoRWpTAEfjVAamRqRqPZK4K3otos82sgSobx4O7RaksxGR3LTWOYEmJmstg9546+0HDszPz849P38WwNcefUwVVtqqrCDeJV6DBtVx69DUpbivI9NLnDmi17lFMRKvMB+u556hpNx/LkLdo8YU5mUKJaAOwdzrVGYwIvJ0LFkt4Sq1sFdjUGAMSPDMoEN8NIkEHTZWllxray2rSFUUk0ltrALY2N562zvunC36v3D3XU996Z898qX7x2Y0v1B92zu/u2RrtjYB/Iv//l/869/+/a9Vm9Sfbxs1Vrhv3vGOO7dHO5Dm9be/CcBcr/+Jz3zq5LMvUE3SyPPPv6CtLM0vTBo7Go0bagBUvbIeT+bL/pfuu+9LpDfdcHxnsGPK3uu/7Y3nNrcBtMRfvP9+W9fPPfvcn33sP5bVzMrhg6obE9uKhMyMgAqpggq0Teu3gJK0wiWNRtsA3rR64xc//2k0O8dWj1iro/Nnl47fvDTXr4G2sVWvBNA0Uk9qKMqyZ0zRH8uEdaE/01539NyZcwD+v098itvx5trZ1aXDW/3Z7fXh/fd+aUdlYXnpxtVj1950DMDOk2vuEihVbWF9mmRmiqCv73Eg25GARykl7RMK4Lb/yBSidYNcRyAmI57Hqg9qIWF3w4lVImVma62I1JP6wIGFufmZzY0NsR5qbFtRKy5TpAcixV2qo6LOgRKz/bm6HR04sLC8vKgM204ILRtW8fu0LEvD1EwmqrKxs3buwoWe4dtuu7Uqy/mFpfXtLQArK4eayeSp55+4+bbXmeLQc49/bTIeLy8fHMtguVqoBzUAGKqq0gpOnDjR680VVaUiKrq4uNC0jXP2rqpCoUWvqOr+//Rr/3K0PSqoKAoz2hlWvQqAivR6JRRFUQBoxTKRMDMwMzNTlYsALl5Y295aX1qeXzqwZKi0rRy65sALp08uLi7MzPZee8trADxw/8MkpGx7M4U06YQmppKXcK6iGNyRj5C4DIXjnWkl0QGhK/t1mtEEESW5dY+O+AqjrLfnA/FBX53u5ZaZSSIRDc8eipuTiAAJXDPvcYfqUfbf0GyUZSmRJ8QQm2zrJ+mJgt0+xORkR6Lb/05Pum7w2QwkdtsxswdD49Qq7vomrHwStl2v1LnlZ+x5CspNf3OQMcK7Pg4v1ZkSzSbzVy5x5ttkD+ai4Z+0L/KnyPfQJz2BEDj3f41u27HrgcfsTkC0X77Fy6v62ui/h/KSXQj/4eCA3yTlknDk9m9dGZya/+BdyZkCcB4iRIhRgJ6QIqfiUe3OtHwN3CEoZUEb87K8U9yi4cqb/dSHYkWFLrrOBP+MoOgEhdO5L+ySHyL78yqHKpKfFSWgIbGnXESJzDJNTIobjU/lfyiEghk8pH+UcC8fB46S80ffQAouzeQelQgEkLqb8YJDiSDTRwMDdHpmhqJ6kezQR+4GcOGjrwD2dO6jPph39Z67iQIul4NuFPhtV9xwOqRkemOYbwcBEHsQIfyIVGWcMY1adZBE4jcdASS8n+mJaS0TCO7UjHwKXW2KuJOCODYldnijLvkrHTWMJm6m6eYBzQfnD4cirpci4fIhkqyrH3WEVUx1Kfgbaey0+5qdS3AQ2kVEgYu/8q/xypXVf343lNxFu4iaAnmUIIE7vmO5S3E4zQF0U1WXNT8skcv7RESsHG59kaDzd2agI4hrZ1+EJ4JiEjsTdJ0pNSKKgpmEGx07c+e2/SwfqQAAIABJREFUZGn3Wz87u9T5FLJ2ariTh909J+66cLVhOycdjHwSMn/bIzO5eyYVIGJ112fvKhqogJ8PFrfvvIklKLQgsGFicqlutRXNRh01DoBEhBhEZMK9Qw6SEYjb/O6KYg1EwEctizKTiIKsp3tKROSSHEx1mKBEEA16IgWsFelIUeIkLothZCcR26T8fPmN5mdTnbN9TjgkT83oBuVyxuXMKxERv1G8YgZ4Dz8isXD+owCsgIlMSSBoy+l+T68d+Y56pZCI4HeCCgWrhqZnOsanLo11y+QMMCqAQ0AtMSmYPMqnICEyUIKlznigHK6ItWKJwGzcFEyautev7njbd8qkfva5kwBGo9Hs7IwpjFg/1NANF2BK8UOYe6+OTvv7dAlvmPUAY2YjJZCEq9yDWJAIo9MW1XmmEwXKJ3HXBosn4jRklGGaQ3gZIuKRmcpXcAFvX1Mowh1U6WG38SFe+ABNJertHP+mnSgTkVRVCau9st+MFcBcv3/77W/W9uyFiwff89M/88jDD1QV3/bO98podPLixXahBLAEHLzuui99/r7jr38Tl2bSjCbD0frGudfe/No/+/if7TRjALP9GSuT49dd88j9jzaTiYUCmIzbncFAGf1+D4BKW5VVNdM7evjQxY0NNsRqDx1YfP6FTZdYRiaTCrTZNLaojt1006BuxqfXBRYspAhOw5aYDaNRMYUhQT2pGyImFtuaQgGMJ4OV2VljJ8evO7a2tnFs9dD5tfNcmXp72E6kaUYAFhcP9nqViFjbNKNxRf2RSq8oi6Z68P6HATz58IM/+WPve+tb3r1Vj2fq2cefe/ba1924vbV5+NDSNhm3o8fjmvs9IsP+EpuYNDcPgEyigztijtog7rmwTtEcrKrMiG7sAMRauIvIDBsYYhYrqsqGrUiI7OayMmVRjutaGxkOBquHl2dn53Z2tpvJpB41AGxrS2PYGGpE3aXJCpdulol2tgYAVq9B0zSm4qWFBTK8vb2lrRAb0YnbwMxsW21tw0TW2rnZWbJWer2d4bAGfduttwGo+jOvv/mWez/7mTvf9p0PPPhAM5msb2wcXFltbN22jbulqjAllJjM+oXNg4fKXm9eAGKtJyMutJ4MAUirdT0ajDfe/zPvP3hseXtj5+EHHnzq8SfLsnTz6yjG/Px8v9fr96vhwGGsnqEMR0MAR46uvu2tb3zyqYebyZBYlpYXjqweueHGG9fPbg12hi4CXUnKsqjHE5jKU7cEUVG2TFMJEzrHbQodUoCjzqJxB/hTmeTWYAvL3o683kny8X6w3U5qe37sCiORuGXPdgTOPasK4mCg3Qljc4ZMygS9RFTDiNKs5RpNpNfwYreTCy7Vh0zGRjZ7ng1Gr5DdpUPQk6i/x1Ox1UtUtMfXcWgRInXfxgD+rjyX6s460VnFqMjEN6aUgmRk2uU0sXs4cdqj9XJK5unKhwj6BcGRoIQaBy6m0x3eL/9wSo5I5iDaNz9Meame/72VHKz8OnXmxVb7DZ+Tb1S53FU2ly9zd39gn/p965VDH7kbAgLO/8YrgEvGOG4Ahz5818uvcL98Xcv5X3m5HrJT5dA/v4uC0r9f9st+2S/7Zb/sl/2yX/bLftkv++UVKd9UuNUVb4W+4sOvbLl6B8kX1ZmrH+YVq53q4TfVav69lZcIRx740Aeb3ba83C3P2dSSE79/Itgtc1tQdG6M/0kfQaoyjWa4WmMgG+V1BCOYYYJSHi/uHXNS3rZg/nP989ENcJfS+qBCf61CNINNeUhhVwBYGKemtJjRppaMaxpri0Y7Qsj5khk3OxYzCcb63IsEPndk18YZTH3ea0+nIkRUk39HiFbJvJ1UBYRDH77bxdxduIp71a+mXPiNVM/qRz605zMOuVJR4mRh9C5Bwb0kOqNMvef+CHPkPGh86SatTwHxIaowNJRMo37xksXb90RDY7l3WNbPrJpskUJn4ohCk8HPK2SB6VrMQ527dx2AcB9E/JyNLD9yaXpU4419yEI6zv/qKxmLHcvyPf8VAOW8K+520Rg57bxhCQRx0aZ+DOrTsREBELGOpiSnlJDuiQ2HaEl1h9c7FSLuEj/63SVsl+j6EMjKVDBp9BXovJsIUgRb/Qb1Y8ucpnNPCDf/kjtodwzx1jrnOzKGXTPir00I6Uc1p6V+nQXuPnHvaq1+g8bDvYtkxSeTr3n2O1lm47ycmFkVtnUJ8jS6iUenM7gjxwApSOJRcb5nROmN0JHUL3ZhtCJWxDDc7bRsGNlhZSINbpWUx2TnCQc8weykgJjylvEfnQ9bmEPK3EPcpEVyQArxDqTJMVdVFGK4oBCepnF/UMwPmxbceZyqalGU1sInmGMBq20ZALNSyPZBibqAgoe4nyaf9T84VnhXT1WNwcEaUheCAtP1D/sLuwjE0FYgVkDuDnd45xl2K5ftFTde4650VzRNYwomZlWrikk9GdfDufm5tfPbZ86fBlBVJXOh2jIb8ezTdVIEzOL3hPO7pZDB2bvkKiJPduQw/NidzWz3eilDYyqQtMXCartnleJ+U1FVYg4s2JP24LzjefzUWZlqPM5/x0VGIc6dWcHwVzFlu1FFndstEU+FTsbijpCWlbGNGGOsWBKIxXAwBmDbdm5n8IWTJ8+euvfOO+984MHPzvJCs3N+5dj1D33l3uXv+h4An/vKI08+8ODq/CoX1fbG9oGVg+vnn3/y8UcPzi8szc696ZbXATh77swzG2tGzaBdPzh7mLiwrY5HteGKDIkVAIVoXddrm5OJ0rFjNxQlw0o7HLfjerizA2Dl4IpVpaIA9ODs7Na550+eOklt2Z+bIRZ4Z15VaYbj2kLKsizI9Pu9djIh0a12+wff8y4A3/v2O/+ibKte7+hNN3/XW1e+dP11Dz7xNCZcFuXZ8xe/+Ln7AdSTum1aEEHQq/p1q1XVVsMhimoCADh+y/Vnxm3Vn6/sWIXqnbV3fs93ffZv/u76Aytmdl4n4maPmAlm0raqWhYVEYnIbg4+vc5ubaaFTyJi2BiV78Qrn0iWVK21RGRta6gkkHNZt7YNLmNiTFVVFVSbSTMz0zPGiNi2bSlm3lTq9/tz8/Nts9XYhojZGAILKRtuJwpgY3PDVFDF5tb26uHDAE69cHrl6ArZtjJV6CcKU4joYLi9vHxQTWG3tr96+vEDptzY3ABw+vnnT5w9UxlD0LWNjdKUz79w8pbXvH62mJvU46LoAbBixztNb2au1+uJlbax1k7IFCDt92edYMVUlEU131v6f/7d//2rv/Ob933hS2fPXiAxStT6RBVKrE3Ttu3EijXMTdOQMf3+DBtG2wLYGW79zeceH2zubG9utDpo6voTf/G3P/2fHjjx7LO3vvH12zsbAGxja6tFUTYQVu7y9kTjp09ZoNVTC55zASTHyPxsu9XY2ycPzp//cltouhcheuISD1yurr1/i0wwiHaeuCU2e5lKM0H1Mr9MzWfKAuS9y4NI2a1Jo1BOuwio/zoTdzs5SHYNOGgrqpEpRMIfeKfsMauZzAVEhUnDn1GRCg1S6nVG+IPU4taaOjKjdlrq/pULldkPHZZCwXt1r4j1MLHZm/5lEXUSXD4Apzteiajtl/3yDS/fkAtY/p4bvZrmXqzv6je/r+vLLC8Fjlz64AcEYOPejYTV/WtFwcal6YiXDWjQMFXURHk+kVulLPI1EF2X7l5JoIkSh99ddKlIAAMA8dnEwxM+FIegBDUxEoPFixfk/eT9JRUq4nRvp6z6DEjkwEkhl2SowyEIAHNhYT1yBSJwzCXEXrlFhDQiOOCxEwAEd9+i018EkvnmKxSca4eJ92RqtGjM1uSqCbKVi8W0CDKVJCbq4i/Yp912s5yJHT51YOjw6j13u8Yv/Porhlud/+i/in8f+vDdQbbxs1qUnMWVhLG7EZFM15XGLj5tno8Xo5gK1F3VHUEFhzVAYdOqQgGIutTR7iqFoOjncbceeYh/B7U51u564uc0RW6Ec9DRnj1mljrARNkOBoWcZ4oUAhIRHlVRd8Cy9NwCsI/Xd6iNT7HPTBpAASL6OkGQAFZ/6S6H55Nx0Z0OYlMAxsCQSThKFNNEA8AAACoqLoW/T2Hp3tCA+hnDPiRSRMqisNYSsao7wj4/F4ynLOovxXAoJk0rJImukIoFiJkjsOQ2nYq6SwkAJLExy4EbW/IrQwoGxO8IKxYOYHUXlQhMYUSsFRvEVTCTgkjBhrMNnO8pULgJXl1EpIqLRy+KIqSYUKstB+AeqsYYEcmxeFUwkTEs1t20IICSGlVRsR7/9dojq4hTrYkJ/qJYHxdOHv4i3zCxS0MYY58JcDdli1onImvgBXGLM7OqMrEyfPQkk1C48wUMQALhdedQtHHNe1DAjwriYiHdUXCngpSJwAqgMCxWrLsTyOUYBCDCpnQgFQBjDAystapCYIAZquRGlrByQ4VtREtnqCAVA3YhwXComhuk24ZiXdo4uAvEverExl9646xELjOCYzqsMUtBgBLEvdvYVlWJiZk1cAx/MshFjKpKgNs07CtXnXicUt0mVAGJZ5cgUg7Aa8CmySUNYBuuKzWmYFZrG4UYwwot2RjbFqbo92YAsG4bQ5PGMRVlRwKVBCYeGgVM4PmRrvrI+8TCHCgQ+Gtc4KRbei1URbkoHHuGCBExyEReAQVguGBmUbG2VVVmtmo7tabdClBIdgmk1l2r6cbdzFYIAGQ90yRPM9xmQMhRA3+XjgBMYlVUwEQUTZPRhEUWIEGvKm0rlqRFachwr98DAJE1O+hX/VHbrG+cXzp8/awxj371y7ffdue1x2+a1C2AM8+9sDEeLy4Vxk6MtK+79fjSHW+4+aYb55b6t95206nnzwEgU7z5jjtgaFLriXNn+sVMoUZJW5VSw+xZ0y+oV5miMLqzpeUB5n65tFRubs1bASBq+/2ZnY1t0yvmFubnDi4fPTxau7jZSFtoOalrAOViwQ2xkYL7bb+oB6O+Gi4KocbU9guf+DiA2259zWhrcODmlVOPPnTwre/46n0Pfu2L99HiwtLxGx7+woNaWABUcWEw2NyePbAAaaiuuZhfa8aqct3SEoBHPvPlo4tLo63tJ554YvXwkXe9891CRqmcWV795Gf+9ulnngAwt7AKQGB7vYqZm7bRllXEGBJryUVbE4kqETGRdaKgqCisbQnKXLhtLM7mQkE6suLwd8eb/EU0JKpaFJVYS4TKmFYmFfXdZrIiBGplLNDZhfmmGfd6PWYuTNXW9cLiAv5/9t70yZLkuBP7eURmvrPurupr7hsYDIiLAHgttCBF7oormbSS0SSZvujTEpiBSYsFufouM9EkciWZ9Bfog2xtTSsJ3CUp8BJFEABBgjgGmAFmMGf39Fnddb47M8JdH+LMV9WDgS1AAtyKsemqei8zMjLCw4+fe7gDx0fHs/m8Xy+KsjCGhbFYLLQmQBVFVa4UALrdysIuarOxvjZfjOvZ4u7u6OHHH5seHWhdAhhNZgIRpkrpTqFfe/3lr76w8/J3Xt68sPXtg9uT0TGAmVI0m89t83u//a/f/YH3vfXq1Ze+9vx8OtEoieFyiTKpsltCc6fbLTtF09RKodDMlkxtnWzp9crZbGq5Oar3R3s3jg/2d2/u9rt9KQhWAHR0SYUmrRRJWZUOomXTWKM1kXJFaUw9KLpNtVgs6s3N83/w+ufvu3C+0rrbLZvaDjZWAahuKdN5oaqmttAURRstbVdyYlgCPhM0rdxjnPAvh+143dkxq+j6PwnvtBAiUrlqTjrkxk3GTMzhEXi+N2xSr5lzIykVQTxEPdCpfPlrujtiQsG25eAEasyOkr+N0wZzV0y8hNoDT3C7KO2lbTZu/0+IE0insrND4f4JyXKKlp//LUZCUOw8TLnXrZy5KMwg5WRTDFYQFssizGVZGmNcsqrWZMBfBri6BeJXIInXkNEKToZQSByJ+PLt6Q2cPRljuaSK8QMujsXpSCzCoQenxytPBpxkQPLJ+QF5iec88xBhsV6fD/XrKC2q77p1nPys/a1u3xPw+lFGr95+8H/NI/8hzeT3+44/UnPyN9K+bzhy9dlPMIDkGMw0+pTVMUANGcYIIKBfEvI2BaaaBROFltvh1Eol558eChPk8jD0GMV2gAqcT8slrKRo2mCJcxN7OZ7LmGSGnHBgAfBlUvwtDgYKmQKTI5RyKd7Kud96XWr/CZw2MbHHd9QIAQNLSKZkX2aT8D27PPdff9KJ190fRH7J2O5mp8K3Yw2ce7Z7CluRTDoHgE8SPaQbBZFaUpd5qFTrSZFY0oMkzisBpEIuKslwn0xJicNYHn54bvvjk+twglBbu+PkTUJQUZuJ39z5H/5NS6i/Tdv+tU+6rc0QjxORj0/M9f446fnn8AZ7bBRUeqeLq7CFfa8Alnan23qpC4I3MeCDDYUcLqnSg1r5SbGkwYZB+m3JEnhehkfnT4tbWkIRmLThAnwMcjFvMe1P5oNJRJpBFIn1BMiSRYTZIR7esAgZ9MKwnAXh7bGkz2dRZR7kjh4LuI5j7fAUwRWnOloKJ9iRD+qmyNDzOUyMRdqfJLCH2lRKUTOPQ3B7jN0TFIXyyFneJiduKEXdtSEjhKiKZJsRAS7WLy6ysU1cJgk5KsmFHCYoXwSitPLGroeVnKcnsXRSAiEoKO0ZuFibCyVmROGglMdPgVCRx82YchILEezieOTAdXMavw4CL5KVJIdGXMV7xMtQNAwjPTojkv3buQBZ/7VylqFy+SX9I7LNKRLXJZH/kqEVLdXEAyUJzWiThnUmRBISISJmCWaenziJ25MgbK0YUEFESilmZrakkusurEjG5tu19yIB+UpfCNZldhHFeN0w3wBYRAWL2UfgO93Dz6t/m6BrJJuc2QpbkC+LbK11U2Msz0fjUnXe/ehjR6Oj44O9OVCWamL2BoPV3aN9AN2dnebVN1EUpW0Wi0mn6J7f7H3uc7/97Cc+9We//weTxRzA2vnzParGk/n+3ZvD7tA2hhuRxnJRTevZQFcAdIFGFh2jq1JPJ6Pz1Sb3qq7iizs7t20NwBjb6VRrqysr62vb587XFndv3jlmZYgMT7uDLoDRtO7qijXJvF7UjWVWVdfOZ6U2TAtTWABr68NzG1urVF29cbuz8Z3Z+A6GxfZD99sax/Xd8+v3A5hNa7vZSKGESinriqr5fFqPpwK5uZgDmE+PXr9+7fEHLnx45X1F2bl289bCNGZRHx2OiqrzwEMPA7hxc06iCNSYxkUgsiKlYpLunAicI0gkHEVRKsXpez+ad1d6p1DM8Bz3gNsywTtOxhrHilXYJ8aaslNVVVXX9aJejMfHG5ubdVND0WIxA1BWemV1qIuiqQ2zJSGlldbaWm4a06k6AJrGMExZVpYN143SfOHSFkgmkynpAoC2smimWqEkXajOsNPrkH7iyScefOiBa6+/fnB8DKAgKkQunb/01JNPPfXkuwbVyuf+8HO7e3cGqytHo0lRaADEAnZZZ9nUDRGVpa4XFtB3du/OZhMA9128TKQtNx3q/eEffuGl77zU63S6/aJhX0ALTM1C1KqqqkLrqbVclpWbD2bpVF0As8nRjZu3nvmJn1hb31hbXb10+UKli6PxqK5rY+2wOwQw7PUXliEixEC+l1vLGPlHwIbiR4EbhZVCWF3PmqSN5pAXkK1ug1K+ZJMEBiDpj2wcJzX+wLKCcpPpp3GY6aN7qL15t8t83UvlpGVSGI4fVFALIhtsh/4Fu0oBOZOL3S+9+T0GuPRLNsjTdW0VXcaSmy8gpSS7KfJuJz9sQBUp5mDOYtjjXRS1fAo6p5uZKE2dyAxvn5sJ3qVNS6pra0Lyt3UXBsg3vGPLLlgO121NnD8tJ9HhRQCQ0lt/jxGctX8L2vcFUf2b41lv38MPKdbvbS479avv9/rvazDf78V/DfGPf5tgyu8Pjlx77lkfaUbkyqJmWAMoHigUCUBklN2u0SkWbfzGi52MoaY4j1ZzvF4outUES7mrnaBvm/veDM2sIad7UNBErA3yOwIIKkhGTva2vyUZfIhagbhLGSKilU5KQABqMwtr6fX9QLldirotrpKi0JrP1hXhnEaaEPKTnkQy4qRKbtXGeL/lGfdqQBz2zqefdb/f+cHFS7p2J9TA8Q/6zKdwQszfqzmoWQXVgrxL0d8XbMCk4iRVpzWjGUFDTnkggZnJq3Peo+nDnWwYRxptjOa5J9G/jTczwjnRfe3GHFTGsKoJC8upUu789z9E/NG1nV9/1v/mTtW6wu9Eia4iPpHW8dT3bWv8+X+CXF1ExEqQYSAUNncsfuUC4aKJGEC45CRooQiexj3cc4qN4asPu43vfSEIeysbWNzgFCIxAgshHwzGHA93I2zwZDXFuEgAbW01hlHHOI6gIXvy89ucISTKpZjwlohTrFvcg5Qigi+8HbZD+C7bE+nG8I23xAIAhPwCAvKd5lVqH18RLT2VXe75bRqbQHLyCY9M8A+RpC3T3rmeVMIrhDEm11kgKXei1tfbTsf8CUppCiaEC+8HuaWL9pIALvYt4MxuNVUsgeSCZBlhISNS6kvQONxc/MmvUMpsaVcQhbhJCntcRas4/Wzzf7cUaXs4QolbJeQ4yAB9L5v8NgrlOjMWExY0gMAh1NudKlVK+xIT4t/Iv4tAhCTNQaDP3HgOyyTZYgZAGTGKJiB2OQONrCVt1uAiCnHxHqRUCMlX3Git5D6LVmCOixRGWjaKcKG1xmGaASiXNvsheJL1q6kiZQG+RrsfJufF0U/aoqE4D6qyaoTr2nTKDoA7B4df+PKfv/tdT8+NUaTW++t7d3d3di4tuPqr559vpjMAvY2LpdWHk9nqzgZ1lDRGSWnHo6+++PzBbHLw1psANjfWHnnP09968RsHk7vnhysNkzaMQbdblko0swGwtTIoesPp4d7KsLc4PNRlVSvVNHZl0N8vSgDGGIepLRaLRT0vy8JqpXvVuYvbs+PjwXAAwNy+Wxjd7fX0puXDycWdC0eT0V4z3R6uP7Jz8dqVlwGMjg8mxwfzg/2Xb9ywPf2d117pVn2IWRmuF6p0m6Xs6Pl0QaSsnQtzURWVUgPbHe3uL1gADFdXrr75WqlttyrWi05zPBZrVnv9w9GoN1y5/MAWgBde+MKgv151+oAwO6cUFGVbNK65IzwBVIioohyLIu/OCWspIsQcSZjFVZwjgbC1ihQJGX9Mm0L6AiemlVKKLS9mi6IoqrIiYDoeu9kToG6azU6nni+IYI0tisKd3RERWxsAbJkUGWOKgrTSdYH1jWG32zHWLhY1gKrfV0SKZDabF+guJjNb11vb26vrG//uL/3Sn3zpzwH8xVe+PDs4eOzRx19//bXeak/r7n/+K//pZ//1Z2fjSaG0y71hLCtYpbRtjFguCg2tjRgCM4upDYCjoyNrzM75HdM0N3dvllW1vrmuS333zn7hdkShSHNjanemyloT+e10Nl3MZwDWVlcGw9U7d/deeenVl7/73bquVzf6o+lYlfrgYG/v9i6AAsoozZap0EEm5zqc14ayPe7PqCwpyhlW5eFABIdTfhWW+FW4ORdXQHT65Fs50ZXnJlhuQaIIgq5OlN7ixNWna4kpMUVU/5Y55Sm9OeU193vmdlK8xqtDbS7d7ty9m2SjSF9LuCufsDiaE274KLdOTjspuND1EK/I+YBdjD3DQ5DJZ3D6pAV50kZyM9mVliL6BxHOliXouLVUQdtBVFacfSqSLVH+eATzbml1gqIPcEj/EC9Jg5R0WX7TSRT7rJ21s3bWflza9wFHrj33HDJLJloKSy0ZJsEEDNY+4Cwmz+iTDX4qD03MXbLTMZ7P+780EbvgFO/p8om4lDMPgsTL8ZEsPkt8NKN3qVFRxJgOb/GK9aJFRVwS0TTLW3BdOUssGYWZZMsmSKIOlWm3viM/cRRNxfRxkkvuH+84a9t41Po9SeQlZcwP8MS831NrSi8Z7EsBdj79nPtgCUb8QbXd3/pf8z93Pv3cvbG7DIgN05b+CSOO10BO0xDb2mDSMOO0ZnTozfhcDV0yXJYyDC49SaLZCz9MSjsqDvLk/RR0o4hTxBf9wdbCfpu2/ZnnnHmVqUtZKVLyRB4VzNzKR/I/+0/D/s5aWMjQQWpuY/ow5pB2023ktD+U27mScFuBCBQCUh07y35S/pD2YCinDEHIWRi0Yb/NiOIyJs4WdVoJg5b4vBQTEWgg7mXXlaR8Az5CMwZoxcDvOEVhoikUTW73Rip/R8+UYoSQUMTWhdJGoKXZxwl+FcafUXGcRQFc6sM2y40T7KYi3emfxWyBbBBR/Q9Id5yxtAMASBqLtMfpSUNBtegOIT7FpXF0WIMHe8Vnt0jJEgDnfxAQxbgKUvAIkkIGYwchkEbo/lXwpJuJQ3e1CELFzDR52d7xQKCjC/KTlz+PsqV1s4Fop7iRJGtVWjC6pJsCcbsR+cgvIsmKY3sKVEozC4Vsdy5GlCLJRObW8oP5v1uSGWH35KPxwxTJVkvCNCCJtiiKvNAOl3vUicLpS69CEKmAhQeUwI3UKw9t4e5mkEDQWsfoyGwU7lelfMVVAoRDpjyVWaFudMG+pcSys5RhJGC/5lYsWefaJSqLEsD7nnoX1QcLNFPbbKxe+Lmf/rm/+Mr/d3B8cPfWbqGaV69dAUC3jwadjl7VpKVSdnZ8++j85v7+3gtf+Py1l1/tDjWAG4d3y5dfPN7bJcjlC1u7t++O66OdS5fqyXTn3A5VCkBhVGdQqAs73bIHUE1Udbq9ouh0u2WhAdRaV1VRFoVlO1/M2VrRavPS5vs/8IEvf+FL2+fOA3j8qae+8dWvN7PmkXc/eef6LTLm0qOXNpqd8e7eY088OZ0eALhx5fWXnv/G2saAjPzUM09zbX77s/+qmS2e/ujf7RZrLuS71y3qutFAZ9CZLWopq4p4AS4IpqkBHGJCthyUw6JHlqWndW2XnPXnAAAgAElEQVQay3I8nkIVDQOA1lrrgtk6N5UxTUFdEESsJL9SzK0Cl+WBYDn6nwJMrlzVeAowFwt7AgNC9gm4nS4sQTRYEa20OxZKRFppY6wL+ut0OmXRsYY3N86x9bmPjbH1orHWQkQRuUBpYy1ERLgsCwDGmhJkhefzRUep2bS+c+vu5s5O1e044jTGEAhCi0Wz4PrOnb1ep2ukfP3NNzv93s7ODgBrbb8/sIwbt24M3uhXg9XNzY2tjY2rr1zrdFZcLlHH2KyxYllrpUBWWRRSVsVsMjvaPwRQKDWbTh999NGq1OP58frKyvhgfHw8FuiiqACYerq5uVkbaKWKoggeMiKi2Wyyv7cHYDjol53q4PDg9Tdem8/nRDgejy6WxcX7L9y6duuVV14GILZgay2ULkqps1RNreb1Br9QIWIu4yLty5Bzk/Rd9EZEoU6tO4Hs4G5yBZ/yl+uDIrM7YT/kTD/qG4lFJzHQVl/C8MOooiA8RY1fuqbVRSa6nIgLcfG5FXNKn5Hdhv9bM3gP/ToMgELm9uTvisNpq+9JzRSve4U7Wxq+IiKl3AmSIFgl6/OkEp6NXnI1LzJ6r6yGkSMIiSDW4yREQRRU30hRYSBJVgehR8HJfXJKpf13pjIta6qtt2i9w1k7a2ftrP1YtXcKRw4/+UlWLZm8xNwpkyihnkKQ55n9BRWsaYejhZPOqh2S5BxcAc8Qkpi72IfLOHHtT0UHdh9u8U+WgHz6wBCK5ofARy1JsKCJIKZBDP/xciDau3mwS/u1lTuSF2sGRPuHvdUlECU+QCYzz3IcYmki/c+U0mX5+7ZVFO/IFYBgrpOPl2qJOEHwWEbZ2dJ0ljGIvMXvvBlIRMD2P37WzeMPPF4yb7snQM+lw92S1C1p/UhiPKgCjqTyizIVI34qQBYK638oV2YkoUdJ72i3TKGU1kDSM6Jl3lY00+hP11sB4O5v/YCrYL992/6nn3Aaugd7AuTLcQ6EmUVEyqr0SK3PJhgQVwdUkGcIufeaW4e14X33KVpgeW7DVvanwZ3vISXOIV81QhjeLSGS7nv7FgO73V/uNkXJjZGbAMKIoEV4gIcbktHhOaEEGCiSS9rfYXQhxir70PcHRcoxloBLKlclgUNaosDIHNPyWW+VD92KenSmposPAw/+HsqskmSMOR68BMnmjo7WWFNst0gWgeEBLk8ygeYTWurz+YYgX9XacSQIOVJBxbKuDsAdMQvvhLQZ/RnrLEzUA9PikwpohssLDKU1iIRZRIg0KUvOzZVhgg5KUkQeeQvWmrjk8p41CyBC4iSbg5/YRiQLSlzHcBGacdQI2EeYTgqpTVzIvEtFJcn2Szc6Vuyent41Ul5YabcVIkcKT/LLGiJ88kVx4yAoBaWIRcCAJqVIF9qKJRIfHcni8EgRhmg/wBNuhszwTbx2ORg5kI/71hW/Whq265aREkH6NQ+QOrEAYlkcxKy0FmHrcoAmkxMJgQZlaSUl9UggUKHLQJ1hNv0+J2bLKhGD70yE4wl8AcS9gscicw4TV9u9XlEUBN00DYFEkRI9nc0ADAfDqV7M6rGCaNs08+md3ZvXrlz/zl+9fsRYGQwBNLp3XE8365pt3emVU7J/+Se/8/LVK//Vc5/6s5U//fM//D0At27u0gc++tADD7znsUcee/i+p596YlD1v/3d7/z8f/wrL7z4tVvXrgD44Ec+9pUv/8nlBx9QWqlCTSbH3WYBlEfjUd00jh4KXZAilwC3rCrVzAaD1f1b1yaHh7O1VQCDtU5RYjaZv+/xJ18ve1/80hd+6SMf6pD+vRf/76P9g/e/7/0AHn74yb//H9D1g72KVVGs94bnPvT+j+qq8+abVy7cf84sLIDVtY2i1OPDo5tv3Rps9fauHFw+v1l1i12aPPP0uwE88/Qj11777oc++OFbu69fv3HbMBrbHI7HOyyTyfTO7hjAysqKgq4tg5QvtQSx7hxxJnFDra6g9Xn1aVkpEPE57Cg5mjzNaFU0xgDQWrOv2yZEqlCwLo0soAoSltl0Xtc1CRTp/b1Da7F76/ZkPB2uDgHUCwNpjg4OG2OUUlRoCBnbVFUFUqZeABCw1p1ut3/r5s39m/uD9ZUCHWNE2OpeB8D62trd27tHk4mCbK1tr2yuzBeLjY212zdvVL3+hXPbAEaLxbxho/SjTz5hSS5cvv/r332x0+/bSjeWCyIAhdYMMsYKACkMWeEFK1FUaF2WugRwfDCqqnJv/+7q6srhYbN74y07q3WpirK0tgZQVsV0NhUUatBhtk3TDPoUcZmHH74PwPueee/V69deeuWlbrewth6u9s9tbV2+eHF7Y3ttZWW0PwFc1gslAliOyrRT6Vtuj8C4AvgTszAkmerZUwSOciWwrSVEteKkNt7SICVq5C7W3lOOV6+97IA5aUoELh5ScRC8dpWpK+K5PFKyl9MbySnfUftnaybazVtSUX1p68jpg+wRQcE72SMt/9E2WE4+PI2g/bEXjcGVFvyDcPKLYuILSg69FLEpmaoYJxuiXOoe/0wO1hJB+chK8Xem4ShSCUNMc0lpKMiZShLT93jVe7XAhciRTCDdjBDvedtZO2tn7az9eLZ3BEcOn/2kK4OaLM1kCfqf3laX+EewYNsHGZyqkPHUJYYPp0FkQX9Ey1ckrhsdzlH0R4PBFaaNjlEJIhZBxTwhtYkUR6vbG4jea/52KIaPz4pGvVeDMu1IEnyy9PYpXCgNIn/J1ncpQASn3pGuiX16BUeWwR5vmi/J/YjkLX28fGv6NX4Sl3370/707p0faH7Je7VTozJ3PvOpCLt4is3Mv7DA4W9kRn2gInexM0jFnycNR0wpAV8p1ZgnvTjtQXtsjStpxwgPIJLlJ2bfBgoGAbu/+UOJP/2ebevXP+G1eQd0BdhF4hZJLxmK/QVUKZ6RChtvCUJxf0l2Z8QVllWrE5859dQtcZjAcKiWAAGLyxcO+HN3EIBb+EiAVJcfFjXWCPvHa+JqB13zVBXQq/SBnjj1mRgSpW78QwhElAF/8WFJpw3msn95AqBIRUTYMzcKQd+0vFsDLxUWiAPNFDwkR6EuuQ4xPolhRfsnM9/DVz7WOgQQZcvrYw5CIGe8LyFesUeSsKd8tGL6Jjhd/Fn7liEU7BcRaM/Q2jsKAOBhKUi8OEgoFWtAKzjqFhH2A6D07nlzJXtCQTQHTTh/lPtABB6fVOkIQMrzn35hAeCrX8BbW9HaC0eFQ+iVhNTIIiHEL5B6YjzeJIpLl6R0jLyO4Z5LKylIVp5/sTCJChSK9DhYGURFUTKzFYTckdGDGIw/xBj8bGySCSfgxNSe0tLlrfuc1PG0EXN3RvHnoP/wqozg2gu+w7x5ZkUUw3gzjuZWxXckEgPh4jqFVGdh6gTizlwHb5eDJp0EEQBgluBS9esd9jPYmLLULledIlVqPZ7WAN66udsrjhtjyqrq71THhtcvPnDxgSdvvHXjxu1ddxi4a2jQX7OjycUhvfDCG1JPH3zooY/93E+9tb97+eH7Lr/3aQC629vcXL1x98b9jzzeXd+6ef3aBz/4yNV/8c8XH/vZZrJ/+9svAJi/6+EbL3+zV8mFzVUq0NVkG3trPJHaTmZTN9vT2XQ0GVOhy063KLvDrtq/e2c6ndWLw9s3GICxUzsZN4uDv/yrL0zH8w7sqy9+az5frK5037r+xt09AjCejC89+sjj2xdefvHb/9e/+j+I+ad/7qd2j+aLF79T0fDGwU0A6/fff/7C5Ze/++1G7vy9f+9Xjg73//j/+b2f/nc+dv+jD+3deBPA+Z2NZrr95o03ZqPDu/v7585tiyLDXBszmUzr6QyAZTa2IV0IiEhpSFM3WjnyiaxWCK4glE9L6rBIt7xuqzILs2USLYEXR+QycLSgVpGLoxQRIqWVMmbuxYFSi3rRNI0ihYJM0yxm9SEfmYarbsdx4KpT1XMzPh4prbUuQEqs1E09XBnogvaOjgBceujR21feWu1WutDT6ZhKPSy7xvLa+kbZLQHUTV3Xzd7uwXQyfuj+88P11WnTrJlmc2OLoLr9PoDjyaTo9erJ/F3PPPXNF14goc21jd3xbq8YSGNd3kcdsmOw+x1oGkulaupaa+0ivomk268ef+yx1dXV+WJ25+LG9Hi0qOf7h8fTwxpAPWkG/ZV53SilAVKKLFsXTMrMk9EIwKtvvLp9/sLFSxcOjw600isrK1B699bttfWNS5cvX11cB3B0MGIqBeB5o4oi6VNJEoVNlWR0iKtraR5BjVnW0ugkw49ifvmr4P1OaGWQlC23R1REKUizJc07aEmOb7h0LPCIWHZN0HgQHhHsmOwr8t/B+VnCW2fSPxF8zgnjYF1IrJAvqxj1jRRpGFWSk6oPpSe1Ji77CEHPllZeGi8CJXPyw09EWEVIGJFHC8OdQRURAXOuYvvV8ZXawuCSMoUoMiTpnc5L6D7jaJ2S9ydBTnntpM5I+igdxHLajRf38YPwzvdqHo0MHTvPetACkwKQj+DtujtrZ+2snbUf7fa94cgVly8SUeteMqOXG5HLexHUce9ximZO5NEhZMPpBDk4JhnjRixsEI9OSJ7XCVGzJ7j8IsitSCe7Jar9kiTZCWHAxgVVSTSkiLy9wTFIJoq68Lb+EGawGaMNqyjG+EiEHVu6a9ZN9qePk0uirD0rtHx9astYZOhwyTwXXzW6pUi1NIcIsbVHq6A4OxKJYMSewIoIwPann4sv+0M6yn2vtnTEO7adz/ij5b5FizspT4piPJf7SIDc35yuRLTc4yoFXXQZEJEUtBmphiI9uw3gyObu3xDmuNR2/ptPiiCWrfSghBs0iYtQCsFE6Y0oFPyLGl7SyYIyLVm9mQRKSZa7XVGMPvIch21AQ7MHpjivCKFk3mOCL23rC1gpCEBCcXvFltbh7bCRPNgh3OXfyweLgpJ67wIDHctDNh0O23B1UdLm8tTmqzBK0Hoz88k9lsWV5VEexfCRkqKUVikfhoTZAQhgj0wmXD49NL65s2xExBcq1YWKpJzz5bCamWkTe5OE5eQcUhIjjpOdfbBUm0X83JHKrAMhJwBC0IONSn4Ek4WzWI4w7mRCpr3sQ+a9ZUHkSMLRJIuBQMCeeJwoSYFwYcwirla4kMMzQNob6zG3o89W6PFgiADKppmMYFTLeozGX1wmR2RJ7LpXYOZoL7m3FiSOE/H/bMohyP1e0iatdJWkb90IVUqrnHFAAZi5KEvXWzys7aeJGbkAI4ToFMRPIz+IvsNMqkV+4knN1Z6Jrr4WAbJKn4XJhSd80aR89KKLhHXvFIYX5GT6lx08D0iKVPdrwsGd4UPiIgUCRoyKYG1QRFRLZHhPC2WTf4ryIf5pxlittFksiEiRbowFMBwMNlb6ly/s3N7bm8+mVdF51wMPdIbndm/vvf+DH3rt2g0AJZXDtc33PPbkq1df/o/+w384HY2fePq957rq//zc5y5fvvz+n/gggM5g8Mb1K9/4fz//4JNPK8j+res3rwxv1uNvfu1L165cqc0cwF98+Yt1PR+Pj1TTsOW6saO6KXu9qlMopQEs5rO6aSCoylK5UlpFMW8WOxsXu+VDBwf7AI6O7prFrNJ0cOv2/Hg8Obi9f5WqXrdZHF+9c6enFID7Hnj06PComUx0USoxq1VpBYZUUajDo+mimQN47bWXJrWZTif9fmcyGU/nk7vHtzqlGqyuvvKVKwD2r7/1wfc8fe3m7Vqrfrcz6JQL2yhI3dTW2lIXAMCilFaqMGyNbQgEKCLltnmKovJkxA7HSSU1IpUAzEzaE2+gN6VIQQsAZquILMNt0uiJsZaVIu/RFDHWMKMoCqWKstB1vWBrO91KIFWnAsCCsgQboxVZa7VWuihK8MbGRtXRLz//IoCf//u/9PqL3x5uD4mko4vBaq8jultV48XEBTI2zeL6W9fNvNZaXbtyc3A8Wttew2LSWdngxtw9OgJw5c03Bxvbw6q7sbFpGnu8f9AbdkejsbHcIzVzfjRm60PJNZQSEJMoFmuNIku6BCDWlKUeTQ/LSqlCPXjfw3/8R39ULxZFt+OyDcxQE2ljpsbUzFZpbY2VArC2sQ3XAuDq1esbW+f7/eHh0TEEjTGzo+Pd67cvPXh/fzDwDKUgIlWQ0sDCSpIBSLsrmQ5e+Q8ey8ynjID1SI4/ZlqH7893kZHCkgjL9y8QD3FFnDs+MWkBFPS/KEADdhl+iSmqfb8iISV6y7pphYVEHuOnQIITtuXFie5vSV2jrbuHORB3yi1Qfv7G0roDWXft2TgxScjMkxM+sSjTg0sNIfcWJ0Q1iJTsOSIAlCK2LKGyqJ+iJAYpzEpQbQTi0UY/KkqLBmEbtLjWWzkxFJWjqBiFp4R/XaRm/J+i0xMChjjeAEJ+HOXkTCH0Ai/jlx1py8L+7VTYs3bWztpZ+9Fu3wOOXHnOBbtl8iY3tTIpTVnoA8eEbknvzoxIRDEk8YDaMqddviOluwoKBEFEqYhJ5ILeO6n9qKLyn87nhq6zJ8FVUYzQpzts7oKrgv7ZNuHCXWE8AglRNSHlm5fKXs/NpyM9v205Untsbanf/pi+t/iJbkQKIZIS9B4VJibHWdu278nu0sd04ps0zeHuqG5sf/rZSD93f5inud++7f7WOwL7dj7z3EklIX9fXlbmCIBSZG02laG5S+785t/YW7/ztvWZfyQkirQPE0nfSFLVQoyadwm09oSjqIjBAHEnBsrIFWjyB0RDaFPqIz5VKAsmyu7yl7kyAwnSCGEs4cFADvMIEZG1sZ904RL3SSsYgGTLTNFXDQpPTqCZv8cbCRIi2/w1iXsqkEuekO8lRD4XYDIgHZ4N12ml4QO02Cuqznki4gARUvCPFnHfhQA+JC3cd8dEBeCqowTgh1IwRojhpLBo8RWzuYkdJtup1YLWn75KL+NBr5TPUGKXArjD2i4vVwxVAEAeFw5LEUSLX8GMQIlCykq/NwNhuedCkVJKsRilfcJT5gZErpCugDmFQPkgXLi66I6yFLlSSQIhV5ecAaPhKINTzhDveFAxqQFCqLXEmZDWvIa5SrYk+W4pEFeLvwT6aRklrTVCNJ8iXp5ccoHY/OWxu/xMgLCI4rAJBSKstXbfqmC/EpG33gRIQGoaTYIU86G7i+8V7kH+trgEwXMDSCj7RpEIQMpZ0L47Zl8x3pVRtmy10ieAQOTkmJMxBWihqRfk/S0qnz4iUctv5MvZE8UYFidkxSOewi2/SqJ7iIDZsHCn2yvKshFh5sFwCGC+qO8288GwM1lMzvUGu/tHe3dvTMvxT//Uh1999fW/86EPA6i6nS9//eu//Pd+4Tf+uz//yD/4T+7cePnFN759tHN+Y7By8+bt7e0LAFY71Vq/9773vPeVV75u9jasbV751vjd731ofnz00OWLOx/5EIDFeLH6oZ+shuuFVVw3RixK2tIySkxLrLUC0VoVhQbp9ZWN6uH+o489VKEQWAAznndVVVB5MDqiAuuDHkPXi6aZTG7f2e0oAnDffQ/Wh7efv/LaSn94/r4Ht1bWeVzTYr6ytnH3eFJVXQDG2MnkqFnMjg73v/KVL5SElVJfeeVb1Vqno2YA/uyLfzpq5g9duq8oi6osjkcjXWoIr/R7R4fTeV0DsGyrgqw1zOIYd6E1s4HKRYaQhMailCZAICwW6dRLot8A3gc6IQWgaaxWBWDZWioKt/AsbGvb6ZVu37IYgtIKSkOpwtramKYoSgCkUJYaADNEKZDSRdE0CyHLEGNqFrOomwZjAELzg9HuT973oatvXZuZWW/R1JMpw9Z1ffX1NwDcf9/9W1vrN6/d7Ja92tjt7XPT2fR47/ihJzZKXVy4dBnAVtXZq41t+GA6aRiNlQqqs7K2sr1Z394rigIAOwSlKEgpFoFlgQiD2XCB7koXgO51q6q6cvXq1WvX3vP0E0p0UarGSG9QGsUABk1nND5gUUVRaqWrsqyqikWMaVgcyYDZ3tm7s7raV2zms2nVKckw6U6vO5gvFtAaQNkp5zVbtppVXvF6SSl1axe5YkQcM4A5/Qg2QtAillhDYHGnPCmwtijlIjuKWkdkKt5CkAShRg8bRf7b1rky0ekiz3P+KN5zorITHlHnlhQSmp/djqEDjt20hEY0Z5z/hzJhEWTF0rScwkfv2ZZEuRuDRAUqsXGkmFBSmXgSkFO8BXDhuiyxE4G4aH2IQCnA1ZnK1POwwGEAgM+lgEgDFDxgIiJskXNpPxEAhELNxjQRbTUum6LMbxUUELckXrgSnYzOze4nR0feY3LqVUsLs3wo4qydtbN21n5s2tvBkcNPfcLHGFGI4CB4qZpbkJ5Pe0PBGQkSxHBI0RiUbxERC3J6e6iGJkKkrYvN0URElll7l5dY5pBmiwD4mrkkpFRUOUQA4WidEBFcWvEQQRI/Z3Zxi0CILwo2KolY+IPXydKFMFtxLnGndTgPnrjKZ+LCQyiZ8CCKeisBwccYlRIAraOpoQVREg269BnCcIMC7Pr2cVIZwhnSVXnDDeRzo7h1c7UswnJk0pKZfSYuAgCNPFInPtol+AvYjpezwvC1hLwOFrWtoK8rrYg0ABYr4rM9OlVg73/+a81++A7bO0Qt/3a07V//JABjrAgzKaWUtZaVaEUUK4b7fSeefJyLPaRJcP3EJWcRDaVVEZTelI5AmCnk5CEiD7EQAHGlBgA4JqAAUk7pZKUVELEBIJ4KBqCgocVX2iAiiFIA2DYgsLj9LmyN0+eV0kIhW1AGobntpigVqA6BxnARiLrw5YNDMGNEPeFPfwrg00iqeMIoKLQOz1JsrTvoq6LfxMEbQau2YlziIhFhFk7T5eZaK6WUh/BIkYK4KY3mktuG4sqqCMBkCQQJrplgBxCIlYBg/dBJwMTQpADYhkWgtSYQW+ssIWuNjgvh3kk7HG7Z0RLZmFIaQkZqF2JOHk5FsjfEmwku93yEYq0Vl/oTAfr39WpZlC5YrBCDwD5sDQSlisKlCYUoYYgVJiHy574tG0W+Rq4zTpitMaxUjP9AmEUOtqMVEUWa3K5QbmBGIMwFYEFMSosLmCIqy4LF+tdXopgkxFYqRRDtPkewSkLcBDE4WGMkAt+JMAsXKNiK0kSgxjJBlFZKKVW4xSaxwhIcby43ADvyjoH5ftEdKhroUVEM7mIWETbQhRYyLtmd1soaC9WIi3w0UELQJGyFpKg6UNwtNZEa9nqdXgGATcPUc95BEfbLR2StdWzfPVdlGKWX5wSV7HBSmgLBi2UmAhSUUkopb38GIzNABz7m2EWiOR4SCNCB8soNSQS+SkOYE4nYopsyrz5kMjfkaS07/hStZevVlXCvVlqYiYiZlVLCVilNiiwLTqZIAfxBk/h39isArSutYGsjirqdAiSj4ykAU5te3ywm47KZz0va2hq8eX1xYaW6uDr8nReev3DpMQCzyZG98epffO3zd65f+9a3v0Vse6qaHo7e/7Gffe2bL66vrQNo6np7c+P43MYvv/e/3BgOp81o2K8GxXB9ZX24OdhY2QSwWMxrw0pX3/zm87PpeKVzqR5NjgxgF01jADSLpqhKKNJEdVOXBetSTM3T43nRUUWXAPR11S3LRuwDWysC0MpwtpgXixmt9HbWH709rwG89tILM16URbF/dHztrS9/4KMfefn1140VC9Ut9IUHLgGAMY3i++7bufzQpWGns2iale7TPJtM92bDsu+m7I2vfW2jUwjUvGne89S7lC6GVXH+3PaiUXM5ArC7O3HIIkG5gCth2+kUi8Y6ex8AwMaaoqggmoVEjFNklNJaK2sNAGutKgrH6H3JRJeJktnrukoJrBNYItZtcE1F0SsAUZ6/kCINLcY2TdOQqG6nL2ArhhRNpzMAw+GqtWwFxlpSBLYCKZReXR1U3XLOCwB1Pe/3uwXIspzb2ioJWF8ZjUb7e/uj4xmA3Ws3O8O1groLU0/seO/g8L4HLu3Vi3o66q9vdrUG8OJoQqBCFSUV/V6vWcxGh2xns8X+kdXaoYRVRxelMiykpCgKbqRpGpKm063Y0tFoCuCZ971rbXV4brG9aOb3P/BYtyoO9588OrrLpV5dXQPw6nder8pKIKQqqEqIazNnUKErbZxKD6V5c2VYltWoORYqhFFCVRuD46PDB+976M3jNwGwYdSGSBu/BBK3UcbhROADUQMASABIZ+6G4AaN3EAp7XNfB9DKOTi83ynt4cS/EiMTRCkcvF7+CrFCirTWzCzMRJo5wNtJ/Qi/kYPLxfpUxMp36syH4IgiuCBAgQr+PIdxeT0NXuLriDzmXKiFncW68VG78kp+slZit6kX73FG0Cpa79BqihCC0xPrdsJUa+2RThfdIsqpk61ydxzOigXvqI9ZJgmqE7EIW1vX1gGYYpF0ETc0luDC9NCge6z7MyWGEfF4pwDJPXYCLsysK++qRCSDpSttIAb3XOUmxM2yex8iJQIRG0bkrlEE1VirFMF50sgfofd3O6dsmv1oh7fFyVk7a2ftrP34tHvCkSvPPecSkCBaUAhmlGvRuPMSw1UXhRJf1paWrnU6ACeDwHWgyB+ZQ3a1BxkR8D/X2rELAWmEj8FvBxZKFC0IsjXEzUgC7TKsJMgsSLRZ/Aj9YUiH76W3CqxfgjD2RldQAdr2eXzjgBq0pEYuPu/VlFIxfAjRnEXAVwDv8Az2dP5PQJLgjDTJ/MCUidx37l2j9h9OR1A+cKg9PSKg052AO//42TjYH2oNnLMW2/Y//YSjCL8nnFFdKH8U2Ifchbi7UzUb/1kbsc6xvVzXzltCLz1ncdpnHE74Kr8l3EcnOpGQFShEwSUnAshjGe6GwDiWc6i2ozWzztP+DztWWn+mdxaR6M9vfxkeRjHKSvkt69wwWdnghPGxTQ/SvtgzXNR14FmklHJHjTIWG3EutxeDkg9g2aXhr3aqtKTDimmDhlgt9qekISDySY3XXDMAACAASURBVPcSp0ihBxmskrFfOBboU7Bl6JhICDRXpH38UDAsuXVv6C8sicCdrnZKunNiuaFSrHQmS7ZInF3KZsSTucoXzHWX5ohUDqM7YvLpn7IpR9gj4urxwm+oyGvjLFE2Y5l0EX+4jAMNec+R0qTQkF8BQEmAQIgcZA+BMJJhJdFyBFLqYkljDY43/+hMimmtlVLiT/ixCCmtOp1O0e0CqGeL6XgCUFEoay1bq3URJjZg+M5wFgGRMZbYSXULkEdP3TDa+w/pC/8z89n5QTp/AFkbUkc4Cg/zuySr8hX0c3TvJ4qcQiunXiwBcw8yN61gxiSTFiDeW+mvycaHNAtBgIfNCKCxtaZSKWUFs/lMF9TtVQD6/e6iPjK236l6RBVbdX5jHWR+7wufP7x55xvf+CKAm6+8Pji//mdf/NLOxbVmfPXuaHJx56HRdPT8V7+yf3v3+q1rAC6e22Jp7ty69jM/+eHSzG/vN4WU3IzRoMu6Od4HwIVezBadwYpSsjoYsLVaKaV13TS9bhfAfDbvdLq9bresOqtr64vFwljqF/ry+c3zO+f3jnYBnN9Ys7PZvF5cv329P1jho8OysfWimYMx7J7TBsDK/Zev37l1OJ8vGjOd1a++/ubxwZEqipWt7VKXruIHmBfSbG9tPP74kx2t5s28V2L31vVxs1jRCsCwU5ZVNVNVt5l3OxWLiLWT2dwY6/xrfvkC+4mLmBMCvPqYPswAFxYh9lA4M4xXFD0G7qLTfJJYY2yIoiUPdDh+y8xiXNcWrLXSWjtUpNDUNMZYW+nKMruARGtYKW1hCUppYrbCXJaltXY0nn3gvR8A0O92L1w4v7Gx3rl1S2vdNM1svKhn86PDg+HKCoB6tmiaA0DY2C4Nxsfj0Xisla7r5vD4qN/rA6iKklQxn0610qPptKyq9cEKKTKNKapKVQDQLCyVUAUpVcxni7JTFKSU6k2msyK41d987dWPfOQjlm2n352MptTvvfCtF5pm/u6feGZ8PAFw9/begw8+MK/nxjZlVQz6A2Zp2JrGmkVTz+cAOoPKWqlAN2/fKIuyKsq1tbXG8ng0Pjw+HI2OAdiGABRFwfZEQH6utLd3vrTY/8k7gLTHk4yAQ6qzA97BcImwZ9ZVArj87dHkiZ07cDRdFLORt9mfew75ZLbpjZZYnVPpw1EJhJNf74ihxR6inF1S/OVtOmoJZgEltwudvCzZHWHKklQN0SLiTDimDBsMQwqaW5gl/48Ip7cNctPfsPyo9C65JXaSeBzNtD7PTcpTpuQUAfR9NQmaCXyV2DADzh2vEEvX+UjJ0zXvoIHRCQL/UWy/8D/+zt/0EH482tlEnbV/29rpcOTqp549LWXvWTtrP6y2/elnA/YtELn7P/0oBk7+OLZzv/YJOM1JvTPj+6ydtbN21s7aWTtrZ+2snbWzdtbO2lk7a2fth9lOgSNXP/WsKwWJzMXnvVjJWeRaFnABf2BBIEyivO/HOdFSfZAYx+FDZkKIVHRcO1+axAqgEh1QeagSpV/z+B9KT3RnBKIjM1xMIZIk3Os/IBYhpYJHLndztkJ+Uroa55RDjCELcTk+oZgbgO+CoqONgBNZKNPoMwdd+zxHmADxSVHckQYK2dHTgFJ0SfJ3LjsCJeTfyiJ23oFnrR3t6aNIszgU78rkGAnliEVYwkl1ie91ImQk9UEAkdr5J8+FQ3kM4AydfOdt69d+NeUwChsgi/0Iuyn5WsOSUNxLp1CDW43cq0wZacXY23TtCcKDYxHgULmGUmBXJKtAk1msH5YCmbKIMxUTefqAQO+pl5A9IO66Ezs792z7Ply9EVoauCRWmDz+4chOcNPHfRdKuhDpcIo28LjwWgTlRxIDOvwvpJRLBCmilPiSU2nqnOffnVdiIwh56LJQVwrZMSTEMsqJzZ3CDlJ4aMy4ELl+7NINKB62SlEKrfCHe3qvPPOLbDRFB4qkWUSQC3mgabw8LKlYCzcDQkpiDJoQAy5iL0oad3I7igPXfc7zI4ONB9NSxkcRClEbLEKBalh80KT4RUtx7gwuQigWvHwLh4iDlDwRhyJxIiA++wF8eW4Q+ZgJF6UTsib4M9lI8ib05t+R/b4KsS9Bckvc3i7TWWDmblG1CENJURQAibAxlkVfvnQJwPVrbwl3SYTZFrq0xlhjC62JyLJ1lVhD/A+0ouFwpW4aawwApTSlGj5Z1tQWhWT8xkfu+1RhcGclmSUL/fDyVPk8sHEnZSoERSI42cIZQzcV7yigJMTw+PDxViQsZ8wr452tAJalMHKJSxbvcZliSYXTmVortgD5U6fT+fTi5sa59Y16OplYJs2ANaZe1PNnPvCUIQPgZ3/+Yx9+//tNZ7hej3ceuPjtN29cu3r9cHTw2re/S6pgNABm88l73/XurfVjreyNm2+VSpUYbO9skEjR6/d0F8CrV18bT5ubd4/KoijLsjHGsBTMnU638cmRSSldN1Zmi6rfTGcLtvXXvvLl77z4/E9+8Kcef+wRAOtrG7sNP/OeZz669guf/Zf/fL3Tf+8TT4wLuX3j9szSwdE+gAcuXLh2463D/UOmLqti987+ysqQlWqM7ZTdRb0AUACdqjoaj9nMmQptZoNB/9zO5nZjpvMFgJVSW6iersZm0e92jDEs2Ds6Mk3D1rIjEbcblRNfQoBhIZtVNfSyLJ0HoJAPkFlErDuqzyIkKbI1LmLkaJ7VB14XYuKkLAtHK/FDd9zSHe5e1AultDGsdeHCMGezuVI0n81tzWtrQ621rtR4PC6LYjI6mkxHAFaHA2Y+t7U5HPTHx+PByhAw9WTRK3q8sADqemZZExko6lQllPR6PStFY6wsake3lnkxmziNUJjruhZIp9M1XJeq9JW1q8qF7E8n826vS2AhtkRFVXZVUc9nAO7e3B0dHddiB+Vwejxp5ovR0dg085XeymK2B8DWzaDbr02jFVVlSaSNqRnSNM1sPGPTAGhM/cabbz7z9DMP3PfQfD4DS9M0pAqACl2urW8CmBzP6tpwLFMMd2ApstKoAmcb8hSN01Expf2XZGi2axHYaLij9VX4cVoAtiRmFHt1/7D4nEmI32dnDrKOgzoE/zYxdj9wend8JUpkimV04m3vqEnWcUgMTpCQu7rVEqHHP9yt7L6V1syk8M4U5rkkASVXFYO20p5HZBqHiACMrPxUUBtIheRhIkLheLakgboeItG40eYvmEfUn6LLRI06DiudycmH+L3aUufsj2mjpfJ4jdPnAvdFGaBAPnlIIgsvyt/5cp+1s3bWztqPaFuGI1efe068Ra3cgUD/hfI2p0QJG8xdz07V8mkIhZCuAzHUUlzVQjjDjUI5DB+MHq1B/xMZq06KI/kTnl58+ae5vwJeEqwTF/1OUdw5mbZsnCdrLki3/FskK9kZJW0VxQuITFxFhMArHBKz1+T6RXZxLotO6CW5rPGHa/1BMIfsqJQcOylU0dhsi+2gJYmISnP6NumU4yizYhEnlDACBTGZOmopAicGkV47DDy3z0MdiqRcbn/6Wafl+/yDoL/mat0/sm3rM78atUSJEEQ4uyOIExnJ76RaHdQ0SDzuH1Q31bo2p1u3DfMeEHnC2zZSSAeVneneZh3B1+Cf6sGAUJs+gpEiAuX5UVDqKeDzFOrjhsmJfCtpb+4NAxYmwc1B2ZeuOkdm8ASXQRx52gmJQUF8hUd/XQsOhT/l5xXNFv3788thwJwQO4AiNyUI+x2ZH/5agmxbx3jTfAZTg+ME+cnLTKDAThiBqRJRKAieBizSvi8+O/2QwNYD81lmg0HzJvhcuAQYtskQzAwYx2SDA8nNvy9dIiCfIiSsNS0B8D75QDZFiTun+fPzDAJHcyPNMSlSSom4Y2Xsewv/R7xZQiUTbwjnFYUy6k5GDTseTAJBzNDPolWsX+Pe0UGWkmia0tz4bcLefPGllhIWCZWynAKxHpeIiOjEIxhCxhpmq1R1eLwPYDwdScPKYcBaC6jT7SldMHPEbxzkLWK10r1+T6ZsrXG5z5glpPhsS7ucTUQvR3J3CCWHWyYGkyRFloQkyqYl8brM65a+8Ev7PRhWfHCrynycXgT2I1ELyckKkbJ80/74Z1Yq1S8TtFIsTIAiVRQaimfzGYDJfHLh3GNVgYP5THXKbkePZ/Otje1f/OiHm3KwoRsAq5ceI7EdY24f1UcvvDwS/cCly01hv/qnX770yMPHoz0ATV1XvdWy019dWb/Kdm3Q09xcvfLm3dH8o1R+/mt/CWBadG7uHRvRK71+v9chpcqisMzGWh0yZh6NJ7Uxuqym84WxPBYFUjsXLlx44PIff+73AXzOzB974rH/5Td+43//F//bozvn/ujPv/L1r3/15//9X7p4bnU8mkm5CeDi+S1bN+PRVFcKqlzMbdkxKxsb/W53Np3NplMAa8PBWq8/X4x7WrbXV+ZTOb+5fl//wt2r1ydFCWB7Y/XqjdvuKDSzTKbTQX9Q1/VsMZ/XtRuwaQyVHHQfwCUxJYVWWsDEIRA5diBUn2Q259VBQ8lhrHCQP7IC36y1SpMrEm8tN2YhjSitirKwjVnM6k63wyKklcM9x+MxWytMTWM3tta1UnW9EOGNldVn3vXUK6+tAhh0O42pF/VsbTi01ozHEyq0wFZVNZ7OAHQ7PRbauXRpVo83N9dXB2u60KZpJAPhjbGz2WJzY00rTUrNFov9wwNNutcfdPrd8WgKYLhaMvT+/tHq+rDqdPbu7A/6ZW+lv39rtzcsS5QACki/379z7aq19ubxtcFwqAvqVkMIrw/XAMzMaL6YaaU7nY7wHALTGFXqsqrmmJdVBYA0Tcaz8WTCbJvaWNMURK6UtlJqOFgBcHgwMo0BbKl7NqnKJ5LmherHYW9LppUHAZcx5Kg4LSmsSFe1lNus5R9JnoAllytphC2BE3+mJ8Q7iNocK+NSQbC2ydS7/eC/uGeaipPvkikOkn/fAuyWDghHjkwnvoqjk1BqLIWPtMSvk9AcktjTyTVsD8q/YzAYM+nucndm57/zDMCtYSEUWVta66jaeD/VKTLq9OYl9Pe8rjWK+GjOVOhMdwOcmsyOiYCIoAje9Z00ZnfhGSJ51s7aWftb0Fpw5Opzz/rgIKceUzI2VAgpERGIMJgyOxNAsk99nQofnxFqi/pL4ONWAPjnJI0hY8xECZAikSzHI4BYciIYXfDZiSXVzs7ENHwWGI+1sNcMFCmfu1h8hnNxSemTpecVyjj0bNSpBe01/RKua93pZUZbbuVl76KK0Rp+1h/BGWfBHKX43JZhFy71nSx942ca0ZCNhmJS1046V8k/tV3cD7kopXRb60eMIYraXxpgDpZGEMO/Uyaxs4F4ilOEnX/yXBy/G7NY/tsdRLn9a886gnQojEcygj7ocAh2pnFrz1G23zKCO7GhwnyqqJPdI7wIECjE5U9WWm6x36NJ+pGupCWNPCpYREpccvLW/X5kvtBtZvjFTIYkirxK77YLBwpsKbcSfSrwqU/9K3icy+uaEozZ3Er1j85V+GyWrVgPmTnPdjRWCSF9bQCLgBhZ4MI+fJCNuGz1vl8WcbkkhQPSSPDlbhK6km/tsCGjoo30adpkRDGdrm8c0KV4MzNcDW7JmEPafkvsIpuIyEnDT7cYrtqPYZNjzaQUgdjXwAnMM0oBIgUV+LuEHMSeSBRgmCmalomcku0GSmnAApKb473+FRFnJsQWZlw5PMAnsg/lDnIGhrjocfYRRV1siWy8My7y3jiJ0UpGwF69odou6pJZvT4hKblpbK8R4Aq/iX+xgGyDA/MgkLVsrQVJt9s9v32eKg1ge2tnf2+PRJRS9aJmsNKkdcGWtVJaaT+1BGGBYDqfGjZQEGbDkm1zlTF2SSKhxTDCwjmDmjK7UgWaFUEsBuBnjvJkYJSml+JUnzDXQqxv2KTL32ctKR8UOhTAh8u5qYsKSjgJ0O6v1X9QLU55awUwFCkOLkwX7r21vrKzvQYx9XzQKCWMTlmt9gcE1RUSKgCUkxGVvH/r5nRC5Vrv0nAwMeUzz/zEH3Q/d3Q8qhczABc21snU51Z7d27vQmldYjbafeXKjY//zMf+7Mtf+ubzXwDw2NMfMY159KFHBHp3925FqjFGlwyWyItIQFAQaKUbMb15U5WDstOvTbN1+TyAup4zQAX+23/2m//gZ/7uL//DX/79z/4uHzXjVX1cH64UQwATYw9Go82tzaNRPV7MClbHB3ZrY+utq1ebWbO6tQFgPDre391d31r93d//g1/9L/6zW3f3//CLX9zu9X7x4x8ze3sA7hwdzjXW+j3en1RFobXudjqF1mVRdKqqaqyjHEWKUw5xMIslRs73fLUNL9OM4bT3gzcZRCKslc72JQe+7enEKZRxnlwSSWMbDV8OSxEppa1Y560oyrI3GGjtwZmy6gAYjyZsoYiqsjLGoNRQ0IW+sXudeXb/5UsA+t1qe2trf//ubDZRirYv7Uwn06oq2VI17AJY31q3FjsXNlma8fh4uDpg8HQ2G/QH1pKxBoBW2jTGMjfGMKMoaL5YdMpqsNJXZbHR6QCANucvXDy3s1WbRVkqa2drK2tVpZWsFbq8sPMggDt7N7udzvHRkTAbYx7ZeeShRx78zrdf2L1zc9BbBXDf+ft7/e7R7h0RNi6wn5mtKNJKq6ZeAFCiR6OJZSsiRVGwcdWNyVoeTSZN3QCo6waklUAR2Ux/aLPCLAd6KwBuyUkAiuGs5Eu45LI9aacUtugpWnXcv+0bgweCPNkABOXozSlqSeehe3UUNfuIvqVvg+oVvHPxM4q0eupAU9f506O0za2sUwa0/KX/QwVlPrpO064K5UUlHzUAQHmxk0LZyS/YKZzRP5WW1tR76QgESqfhljSyE9SBuC5R66Vs2KF8wPKrysmJTUoS3m66s6kOtBQVDU+CTp8Qpygq76sOxf7C/2nwQnRyjG8nw87aWTtrZ+1HuyU4cvW5T4iP43CKvq+QDc8CU4ifD+l3FwVDIBq5Eo73tW0pAICAwd5sJgBCvmJADpP43gh5acosRCX+k9/T7sXb+rl3T4LIjFEzDqUMIS2+6qaPbPEdc1BhVJvhZ/7E8NpxdmJAy8nm7JgsoMiPNRnM7WloixcRCUUcowhtZ/ikkzctW1otQRpt5HZkZ35lmuD8xlOUlaAtJfyWM32Q4m/hYlnqNB+xZIphrEIe41O9JufP2vh/3Ftsf/qT8QmZPggSufPPfmyQyo1P/yMfBRYW5oT/OZrisRKJO0MIBUVxQsJkkVf8QgCzuyfhvBLKQUedMIQR5M+T1i9IDofUTQvUxmkjD8suAekP44wgezza6cOj4EsohtCGjFT90ezAo6JqTkSh2gcgQpJUdudpcfcnACjQl7gjPBRfMir62Wfx8fmPQN4nN7535jv10T1cS0BDJO0mV/fVuVXEqcqehfn65gETZRaIP7JNWRiEd5lH/A7BupYYV+nVX8fWwB6FipXEItXkkQqSuFNrLSWsCpxlh9biBHuMAg3GgJUEHTkkK4RxBoL2iDCfxMKJSMSCiEWErTjgEtGkCKOhzB4KQ428WaJwEA40QH72EiPxcRjJ1gu7w+bwKjEARRpOsoWH+LlzPbsVjoU682kNK+LHHfYxAKUUKTC7RWQES01i7GT2Rn54BAJcYoxkZbn5d3g822TLRLSUlPYUJe5W5Wu2g0W2N7YATMdTZquILNuiLARS27rb6TIzgjBytbCVVkVRWrZFUWit67oRwxEEZrbk1qtNQrkB6aVxFMqU/s5q0LdrCrWmNE5t4n+t8MkW2YJISE4w1xMtw1DDAmSiNlKt2+Zu8KpVAaM1VPaHG6JT0YO5fjKIBGQtK3fAFwJgbTi4dffmeDJdTBera1vCKFShNSbT0YVzVb/oACBlVgcrW48/VpSrf/IXX9zf2z2e8Yt//NZofLw2GCxmcwDf+NYr53bu71ZSoZ5OJrLRbaz9wFNPFN3BqOGytwrg7sHhztZ5Zjm3tbF39wBESqmt9bXDg6P5Yg6gUKrXqUaAImit2VrR6A5XpeFmPLWNAbDRHxoqH3n3Ex//Ox//2je/9dP3/+LPfPwXrr300pOrT1TdzenRPoCiO2ZramOsbZ566l3dsrq+e3NR171eb62/pjoFgN1bt3hm1rc2nn/pO29cfev+CxdXd7Z+919+9vqt3fF4BMDUs2PCalEs6noyn4uuzm1ordR0NqubhkgDMMaUmkUpt3CKSClFOhJvWiUnUIjA7IvSwHNjAqAc58lJrs0YSanAwFv7mTzLcvWjVafqMFhIIKw0lZViNpYZrvgTACEWqqqi1xuoQkNBGGVVaIXpbHQ0UgDI1gAPVlbqqzd6vc7DDz145cob5y/ed2dvrwABWN1cuX3tZm94vqNXRuPR1sb6wdG+iFi2lk1RKgCLpu71e8y8qGsAlqVQighK08rKYPfWHoDeSrW+NhyPdw/3j37+4x87Otr78hf+f/be9NeWJLkP+0VmVZ3tnru/93pfpofD4QyH25ASQdtaxqQXiF4+SIRtUAIMC4ZN0J8IA/4rrC/+oi8W4A2CDYsmLdkSBcriJlFchhqS07P09Exvb73v7merqswIf8iMzKxzbzcpwAY4wE10v3vOqaqszMjI2CPyyz/8w993uL//zXe+NZvvADD1g7qpx+PxarWaz2e7u/Pje4efevMtAZ+enQLYP9zzwm3firBzvbFkrGm7rqoxmY5ADkBVj0xF1tr5zs5pexGIkjFRqGz7HsBoNBYGe+nZJ4PWjf1aSOelr75wzKY1k/xPdNJo9RIo+UZiCvHWgfiu9BTII8kSlEqpIgZEhqR0MN4QrkuEDG/XL4P4xWLCRAPN5E8iZDdbyYpUeslzzvds961MPQt0CrqBTEi3rk96l8oXkv2IBbEtXqXd3d6ZiAhYF6eIfUzeo/h44n83hyWDG2TrQgaUMYSSbJQW1m2BZXuYcQ7Dd0vM65EEThEvIFKJosDDxLwUtwoRGjeF7bt21+7aXfvuadEcuftzPy+aQkTR8ihEIhy1ABFAOKdGZ2tcpvyR3SJ5vDJ3LcyayltLylm6ejTiJWrIN47USboWCgKfTk5M/0dVR8ouM4tKc0pSSVRk1X2ZBY/A8UNwYHyuCPXXYJl4Tkih+GzxBlEuQlllvcHDktZVDDOxL+d8UDB1zBJihLY0H9E1oVweJY8kBtAkRSf/PtDWBtBOAyrWYvB0cBiKGkKiOYl0melGv6LsvcAKzn5UCdFYBADGkKilhYUH9WBSmEo0JN0ce/7n+Bd+jpLsE1aHAD2iNj1w+t/+/2i1vPdf/3zCISnWoFSRJUJFYoyelJgk6WFArW+i4VUFkpu8NVTBL1Fd35UaM1MsRSSkeZ5k0uHCtNUfFMe3kWhbVt9uweKTpiF5emnb6H06Qdp6Z+4+BjeExNnCUAMAhiyLJjtT3rGFYJ+ozg1xPEFcZUATp6ZH0he5Qhr6oK6NiKrxrETtJOqoYcaBTqbph5kZE6ej0cSQGMEn4WRtEIkaYMNLCCBDIXEbSrCiOTPexGloGqEZgEPRlBTxLYaXqcGwdK2EVzHRcIMl8prtx0OdjJISFxM8JZKIUinUsaZAEbWe5viioXpA4SxnCmlMMTzWaGxpeHyA9LqXQtXCxJ5ukN2ETgDAIjaEh4qG0ITQvHA4NQnYiGqcMeuZ2OgyRZAz0vrqpgcKoqXaT/g1wmUIRM1VCAHRAXko4wwVbCL8aAw0Yg8ASDRclkjA+Rx1kSJ7EJBQbYMBWGtZeLlYnpye3HvhAYCLq8u+72trWbipR46dMWCRtt3UVZ0WEoAxZAyNp2PXu77vQWIsAQQuViJ5FUlS1A8XcwiWHCKwUiNRdTOuXUJvFJxEcu5zSS3VxJw29GDpRStblIzuZhMIS6CQCUXjaCJBiGQtmbOpzMJT+GSULpe4GCx5LzAkzMFVwczeeQCL1UoO5/PJbGKa6XTSd96QeO8c98v1av9gB8Dvffl3pocv/Mf/1l/6J//sN3/7N/+xmc8n0/vSudHe/Gq5Ms0OACwuTi6vDw7mG9eNq0rE9NXsB370z/+DX/31T73+4vHuDMDZ+cWjk9PZ7tHp+dV4NGqaprLEzOL9tBkBkN71fQeIAbmu7bpuQ3a0du+/+979l197/4/fAdBuFj/78//FH7dXVyfnJ+cn//N/97d/5j/5q6v2vL0+rardurYATO9n08npxRNrqpdfejDfO1zDP/rO+9W4WW+6EaYA9o+PF88ulpvuc2+89btf/Vr3eaZ+85f/4k+cnF/M9ucA0LeV76+vrjdtF+zjXdc579uuh0jf91A+EpKUWcTYaDAkEAtrqgGMBIMyJJxsD7LWCpiZjSVEOhMj6iPqkmZ2Kk4HVFCslFCnwBhikVhl1cZ6HBDx7KuajFDv2Hu2tmrbDQBjyVpDBs71btU2o6a2xN513eZwb7pcLgGQ3xzsznvnHz99snc4Pzo6eP+9b3/qzdeb8fjJw48AHB0cXD47m0+mZ8/Ou/WGiKfT8aabHO0fdn0bysKOqubRkxPnHBkzn+8aa3b3D4g9s2dhawjA5fPrh9NH11frzfXmnW9+0/uuXW2ePTudTifS+0cffgBgNh8/P3u+M5+Tte1q8ZW3/7BrNy+89HJV4+0P3wEw25ldLheVrUbNuKlba1DX1ktV1/Xu3sy7HQDX1xtTm5PnT3d2pgTqut733XhGVVURjOt7BGIrSUhXQX0g6EEEhpTPR6qq9E5L8JabO1AN1si0IWvYNvBkxaCgDrqjo7ig6sLNDmQgfwx6jKPPklYgWirWlORJikdvGujUQnVDcRretPVtmyhJqfFsN0rUMheKGohSN4EWy0uh0IOI1BEVgU43gaOdJY1nICWqHC1aVSzpRol/lv2krklCIh2gA9O4SFE2Sulp7WNAqItF+ARp97ZWVteBCmjCOuJcmtYEZmOIj7h8SQAAIABJREFU4sQlaTmc+Y8KteVYPmHd79pdu2t37c9qq/b+q59XNZqi8qfWMo1LQwycQA5KI0PsWXXW0NQYUcT/SXREqS0AoSygugyhagBzIq25M71oaMv8GIYUtduhQqajiap9jHpPHMrEoCPVVin5w0hYDIXaXKK3h4mCDImPeXkIzLjwv0rgJcnQotpz5joyYF3CUIvPUDVKf24y8wzjENZPga0FKSV3H+cZmG0q7aezT0q32kfi4IgMNLgj6Iy5s4HiHD8U/Dn9bMhA64AWEyDK4WalwXlL4qMcPgaiWK2asmQRpgnOkhZuZAWXZXsG0AuoFmw2EaOTDmtu5CMf/8LPhaXznqM9BsVaRssZGzJEYNba0kHPManOWTCjZdm38HoP3xfybUFkQsopRKvmS1J61YCqphzKGBqt4dnaWKrA8R8N1wj5rcVYsumDU/oniDR1lIoFTmFl5chv+Tb8RedLW79SVuiLwWaJeEvCSxavrHbEhwbRDlBjAYwxYVNyOp8lE4cb40lmOFLpLy14NFyVZRZCvFuuTZX/VSAFiZJjTnX4zWi4KwKp4bgwKYRXTX8qYStQoKQwJL8KJJx1E42HxhifjDmUwiNFYwy3VyObfmmAIEoFCQAzG8U+SmBVyCTCEftJ5vMtupUogzGs/AMKbCVFRQwJUTB/E1EuNagD5ljTVESEDSjWDI2DZIEIGxMLWYgki2SCYSR5Muy2/JCaWoqZORwgE+JaAIANlVs5dy5IBX0JZstyW/ip9HZJwIb3TDCgFOkqAMQLIGRNuRmS4pdfTsgbQwQCWxukHC+U3FxxWIJRBkiRxQJTVxTOjRExxhoYX/umbs4vzgBYa21ljbG1qfu+37Tro3uHm81mXZPd2a2qGoC1hoiExTk3H4/W6/VisQCoqRqjSQ7GmrLwRkkIda2ERK2PwajKQoYoMVm9NeBjPEYmmAGLI6iEEomRJHdEaAzIrwz2xcfrlUpBEUdGlEI+BawMiYqiGFGh1TUYtFwzQAqXAASAMcbaOopithfxlbUANl13edkd7E5HDYF87zerdrVYjQV82XTHrgNwtjz76//Rz3z5yfn/8nd/cXQ0Qut2j+Z/4fNfoHr0wYePe+cATOqld84JumYiqM8v2+n+4cMPPvrd3/sNY2Q0uQdgdz6fjsfnF1fzXdNU9WK56pxbrJarzbp3HYDrxfV958C8aTezvbkhuHa1GuGFey99+gs/+tprrwL43/7O314/PH264Ie/8g//3Jd+8rf+11/8v/7+P/wP/t2ffr5ZjEbeuTGAnb29N1579fHZGbdiiJvaGoJfb8x0PNmZdvAA+q67Xq4OX3tBZP2Fz32e/eof/b1f/mv/2V8fnZyjqgG01nZn57sv79FqnSUawDmXoMrMHLdcgT9bfEZQ1EkGM4fU2mCJRqhTaAx7T1UVFpyzITqSaO8dqbwQrkRXBZElCoXfLFklAWSMAcha6z039SjbPY2MJsbAOufb9Xo8GX3fZz//9ttfIUhlydZjAEf7k9rWvXPT8eTZxdPxZHRwuH9y8tH5+YU4D+Dpk6f9qvvOe995+sGTF156sO7WO7NZVVXz2d5ydX3v6AjA9WK5v9dfXJ3vzvdmsx1r7f7ewWZ1XVVV13e2sgAOD+eu89zzsjt/9HA0qqr53ujdb32wN59y75+8/wjAg1ePz5tRx+x8V4F67zzzetPKptud7wFoRqPJZN6uNlXVWFv1vu/71jkGYHam1+sVgK7r9+f7B/sHXnpjTF3X7Pq23YzHMxFxLuBDX1cNM1uTTiAr92n6m8W25KqVIbUvXFbhK4t6f/O+VdfSxxIHyfciU53sNSGV0FR01KwU2uqhGBYlk3e+hUpqpZ9uH1QASxEycVvblrKkuKBSUWE5u/GGAkY3ZNrcC7GwemhKrQIo14KAVMOXknRedKY8Oy9XgkByc253LEN5NLLA+EWy87WYvmpGYfsm4SaB+taVKjv4JIBvNxWBwpmvxSijwAkGGxiQCd+iZzoMhqNwlgFUyjZ/+kHctbt21+7an5lWBelNSGAMSCjkEgVzlieKZ00jVdlS6xKFqHUkAyVF1YeIPbtwyYZyURpIIiIQIyCQZLcPhyOtNYcJIENGwuloIEPsOBa3Kwx8gePEMSAerOZ9qk0usKQDj0cwhFGLcFCsGQCDTMyjMYZgCF5YhEiSaMoiRoglhMZEY2BWE4k1LDL6E1PqaKnrSEyCI4h41rOng9Ja5PFJTk/LBpIkExg7zP+K/yvbRBauAmShzw4iboqorMTSdI01ILF4RRLxATUrxsMFCt0rJ6BBs+8VF0Dee+bMrcvPkOy5Jk0aZYgBIQYWpHuFQJWxqV7hthxIYNZKOtGgoFZBRK1bbshNXE4ym7LTytpgx7TW6kmUHgIyJlUuCybIIFYw9wDiYag5VEaIIA6FWx+6+sG8aAXCXsiIMbHomzDIWIazVInAM5ORYOsyxiaBS4LJNGwlsISDdYfCpupfQDyYRi3yyetAhNCRiYHRRBQsSGFZE8QTCAUwlVWZL16Oe1SITcoGJZ1NRo80rOCZEBYRJlCw54S69XF4AvHsvJ4VFU2N4RMBwqxHbcWTS+LBp545TM1YzXkmgQk2jIwC0Zocd1q0egZRT3WYiF4sIUjaJINGLJlnDJOXtKdAhBCsKUSiWeYUSpAbS0G4FBEzRMYYvRMOmAlII4nQIoREimEyFI+3CUtswMSolJxEQqugDktPIMQYM2h0CRGEfVzpFEuouAIDTywaoRh2vncsEuqfpZeFZ9mzt9ZS1Ns0nZHDWpF4CfYoQjw5WHcnJNT0EARLOYkndWAk6hZGa5XWUOAMiRqJj1hFYInJyIrAWa0xsMpsoFCBMcZ7n3RUSoFtRAAxizFWj3LiENlEYgAiCyEWhANtAgMjiI2bxXgTLB/hZgNj4Dkb8+OQQ84XM5FNGlShIgmILIfQSwbYsycyZA2IxPu4lUK8nmrNauVWdhuRj9QdUUT0FqqgSHi0NwRm8c5Za41BU1f3798DcPL8GQD2QpB6VDu040n15NnjZ89oOp5ePT8FgNozt8bWHdxqdV3Z2lINCIw4dhIZXSWGgudGrdBxLDZxg8hBQBr66lkQqiMWNl4WIjaxvErQ8cv8u1jfQAaxkOoqo4xLFswm2aWGaENEwesQDVeR9ZEwBw4ApaIMlpCKHkFqBCEYOVlHVCZRgIfFS9wg+ASJyIu41hnxtrIQD2uMrQGwNBvfLTZUE3Hnjvb2Ose+7+txNfZ03l8DeOneG994+HR5tvmLP/mldx6f9uu2bqaXl4sf+MEfw/SbT999D4C8PuHGjurGiHXiqtncXz3/R1/92o/92E89fvL+1cVzAC+++Okv/ugXP/r2158u++Xp2Xg+u3K+cm42bp6fnwIQI+JcK35Gtm03m7YF83Q8bp374Gu/8xNf/PMAfuxf+0uPF1f39verwx+cTsd/47/5hd/77d/71qP3Rra+uK7vVQDw6OmH89HO3nTn/Ufvfvj++x8+evIHv/MvjvZeZM9OuqoZAdiserH1fDx779vvfP0rX7n32oPTxeq3fu3XP/M9b+1eTwF0znLdbDZszYiwmTbjTtBUE49mvbr2GAOgyhpL7BgitTVkTNgTzEJkghfQWmL2QXZIpmTPTsBeGJUBMJo0o2okPXd9750PO4sQMzcAGEvMDBj2nmItXCEyMGA9Oqz3vSGylYERZgMQM4+aUabXQD0aAdS13WQ+sS2Jl4ODmTFUjUZ2ND0+3gdwcX62XK2n83qzuqom1cmTRxeXZ5vWbdpVsGkuFhf7L+yOxpOX3nz1lZdfIvi+833vvXcg6pwAcE7avrfGeseeN+1G9o+OjLGmqmxl9o53Aezu7ldV9QD+Nfeg73zXd7u0t79aV7WpqyqEaj548aXZbPrk5JkRIuGa6xdffG292hgay354ETXWuq713rW9a7vOWppWk+fPzwQuyDzr3vHZ5f7B/u7ODtAKfOf83s6OcyyQ6WQHgO8XrhdjTLTxlVaoJF1I5JhJZ5BIEgf3J+IZNmc49QiiDD5ZhiBGBXJSI182immfJa8EYGACSkT6bC0g7Dm4zYwx6r2KVI5FjCHvk1pEwUWtgogp/EvINC0SD2VrOr6Aj2QKfYEA5GPSCjtaJk+hE5XQaOvSwDKZGFVga8kPXIwsKJUUSjomQaSYRFYZlOlqKgDfukwQIWOCjdSozzI8xETRBVuom5K4TAjbtwECEoV+ImaHQjlSiQa6xLEyRzGd6JzWUVOSBgcVXAqAq3Y0mFDAGAWXz2PQx6K6F4pLBKmGosMs+hCtqpnBk6naNUPVqLt21+7aXftuaxWi2yUbHUKLKirdQiiTM6rkkIisXRCSUxiJIWQ6T5ralh+O5hTm0plH6lFM7B4Diq6PJgEuhS9geG/WtbfZZTYYJeuDqI2Jilug2oIqNDqgOCMz6LN8RQpvK6/TQBLIClSEUp6a6imREeITW3pPycZFp0TF0gGgwbF7g563+pHBIzcmeOswRKWPdMY53bJ+sv2Y/lIw/EFOvlpYb7xvyOf1/yGS/UntBnR1lROfLxT8JIMMIisD5piyi2g1L60DtAXYkGEYhUQSFXkQtXTmWPePiGAEzKIRsSgQNfV/qzM7Sc+lkWLoG9YHZfBUAZiA8JrgkmOdiycKA0e0RaoxWFDYID9mfKKRjhSMhib6Ppj55uroE1Fiix2n+Nqb7yLREZUFJ/LEkxRKlMrVFlsRuvWhtj6lY8QMUyxvWXMiZbonEZXSDrkdLUV9MludhH44mMZMiGKWUvVIMQN5UbNdS0sdFCYRiVg1XMKtWIhkplFglARaQparQi8sGMOHb+FuCdUVFH5hsGrFCXhk4q/QEVLcNQOlL6k+cQykqJi1GxlQrQIMEqeiVDTqT9A025zBDpTzM6T2dCmBJCIw+QVpWwfjvh5/FFSx9EQZn7MFx2KbKYzTViaOHE2pTficlEmK743MTQDKYc5bM4oEXCMlE7CUpJrIiA0hBPYy985dXV0B6LvOs6doy6aqrkXw1a//0Wa1cs6H8VaVBZE1hgVNM+rYB8gFh0EmLwq6kncPSVGE2XY88oAtR8IvpT6bVbxPbsmVWXJ5lSG2e4gjMMZw9InoYihPTVsvDSz0wuzzG4eTJGMCb1YuH83DhoAQQxe8IkGUsvEULO9FGGvfTyaTnZ3ZS/eO1hvn245kMWpHAI5ffu0P/tlvP1/1ewe7n379jd//46++94df+ao3/95P/5XPvHj/g6/8SwCf+/7ve//Jo/refXE9E8jL6bI/Pjr6kU9/6n/69jfGkz0ALzy4/953vvl//t9//y/+G18aj8frtq8MzSyvetRVBaCp68oaAjH7pqoIRKjGo5kX/0df+8b+/ADAn/+RH/naN7++uzPj3v/i//i//7Wf/Wtvvv7Kk8cfTmZTMfYj6QFspFpebvx6/dLrr51dXjx/fvbC8Us9U7fudvem891dAH66A5LFZmWns6dXy+9/4eW/+Tf/xq/+xm+ePz+v7ATAbDJ+vuQNu7qpd3bnVNeT6XS5aee7e8bUH55cRdin1ABhYVJiHooCAACXNJKFiBEPiyeAQj0cMsYa6oRjNn2U8LYzTDPD1X+DCTtjFQuLmMCUCt5jCi83Eaq62qxXIe/k0ZOHgCyWi8VyenS0B+DJyTMwi7Wjpjm8d393d+/7v+/zz8+vducvhyBWEXGOZ9Od6Wwi4maT6aZtnXNd3zHzaDQG0NSjum6869uuA0zdVF3XtZv1eDy2jQ1mzc1mdXh4NJvueu+ZqW27uq6NSO/ayWRcmQqAZ39xfe6dA+xycT3fPZjNdiaj2dnp8yQ8nF+cNU1jrdmdz3dmO6vrpUDqutps2qqpAlzW69Xl5XlT22DgqqraWCt9b41t6rBHBQJjre+ZEtXL27WUvXV1UHhSb67UJ8Y+3mgFMQ9fAYTXDGX9LBOUL40ce0sSg8qImnihguegtkZkZLl/JERRCCtriZYrZbJZtCMtM0OFd64U7v/0kBjAXXEYKH1G4ZsMnvmYRp98k24LKSNWwusSaxW6jfyHXUoszD5l6sfSOVvScvCShp+stTe6UmNklomGanD8dJuWs410fxooqx6VhAZVZJGWOb6wlOxUMLtrd+2u3bXvtlZJ0HSgIlq6MlC4gKSYBx5XxkUNexTROAMk00lRcDmw2SHnYGWhwx5jMd9bjSyIcqApLEb6e+Kw6adBtrd+0ViIfL30lqopIYZfxqNmJWqk2YeoZjcCBqwKQmIGmk/Bq24Y1oppRXFEn5A/BYMpJpR6ARCjtIZ1m7PJJAhj5at1oJ7V0JAAAQo51LeNOOmKSVNGZKcfm+5RCmgfcy1Zc2NXuL0nvSU/dgsW3/6ifLm8P4qtihSqVGfF3pTLpwZjSHnekeSr8eLw7SomZ5V8KxdaND4oiJtUmCEMiBF0XaOhoGnIqePyZXHFTU4wVNmtiAtLmpg+VEhacXVJIGknq+gm+SZEW1GxzZJt0mj3EbGTGFnq9gG8kXaEXPEczJRAmpduW57MH0spTaCjGTYirfamVjWSWOlQp0/R5haiKoP6SoXwDc3IVq0hftKCuwopUdBKtFyJ/smTS2NOI83aVqS8EnvTMOJ4r4leE+YAWSp6jcOg4YsGpkcJhjkQCmNVxt0CXoNONb0qEgAJ9Qq0EwPDkBzxXgjPWVODj8OL6JvALglFeagiRVzdkvMlh/qWO1qprxoKS4dVCLIwwyNmFMaxz3B7HJik92vURCL7cYeFaJSS3EYLKxeeuvCg0ksKkbwF5ylJHtHgLUDyhxWzl5gsGs8VFZ1DQNISf4jV1ZGnTClWOsI+FPoUFnHOnZ2fA2i7zlpjicgQswdksVzCymQ86fquqWoATTU+PTtzvjdk+65ru957rqrKEKXj0NljMPgCqYf4KbmMabyPCqIqYT8i6tuZUZUYUXDQ9HtiqUgfSvKvSD/ENlH3QLSZB8TPd6NgVCVct00QkaAlsjkg0RqSm1dF6WM0hIUKhKGLUdMYa+c708r2665dtv1rzRjA2Pt//PbXdo4O/9lv/4t//6/+zGx+OLlcfOfr3/h7v/TLqOzarQH4upnU1dSYSd3Uo0a8Zzu6f3z0d//+P3jltRf3d48BVIYfX69efPnNvpqs2+uqrq5ZznvbLa+99wCEebNpScT1zvW9IYKn56eX8/35S/dfevf9DwF86rU3OpGnjx//+J/78U23+to3vvXFH/n++YsP2qcnk9H4tF0DqMZzU1dG/EtvvMVcz0bj1ar7qZ/40u999Q8fvf/h+x9+AOCVl15p6qZnfuHV13dm41/95V/5uf/yP/1pY9/+9rtr5wDIZiNdP5nxpvXLq+sadmGvxXnLXMUqqjDWEBEMDCBGvSVEIF+YiggIaQZkDAl5E53iBjDhuCpDhsgwJ5yNHpGB7Drk9GFfh5h3Dd0jFiaOh5GF/CNhgRHnXDhxyDMbMvWoBtB3vYg/Oz9l4b7vjKGT5ycAzi4u9ufz+c58b3f+yquvV1VzfX01m4zrqgpmxNpWV4tFZTCu63o0M8ShvKb3vN6sP3r4EMB7H77vnKxWi7Zru75vKtv3/dX1gr3fGc9DfrRz/uzsjHl3Ptu5vDwjQ00zOTk5E7Bgf3/vAMD11WLUjIio3bTsZbNZt2379OnT9XLJ3gEwtkJtfeeury9NNer7vu+6Zjx98MJ95/vJbAIAXHnXz3YmLNy7nmDG40nYAsZEUm2M9fDhrI8M7rS7SuETBI2rS6m5t4i7ooufRTHc2pI0m3i0cu1BzypcD6h0UUgGIpLVjcC1s7KkBF4ACr7Yj5GUVajbkq81zD66ZimNjQY3ECVzXJrEzZfcFJkG0EuuvuJPKYbrGG5/umwyWI1ChYhQSmUwUo8qRRT766Z6oK42lWoEZGMsKoTFWoMyzLOUDm6MbXvasfdt5jXEhWE4aOoAnwBaSffE6d1+QyEUl7VZ7tpdu2t37bu26cnaSWeVISnWv0OhXwpiPFB2lYPEuI2SOQEAKJa2JyRJXwCAB5V7whPKb8wNRqNBV/n3VCOOhq/cGqYMtJBCDKHMO5PBBUm3lNIOt6X2bvPbbFEYPFWoPDko4jYZNsoZ5bDlVjNgAY7tjzljIFlPhuPTOweyThr48Ikt29tQZCmE+azoKY+OXuMt8eIWqeHW2VB5NcAi53vkTtI6FAtBsRBmunELfjT8VFi4FAapMmeUDDiV0qOM1lsyQK4eFyzKOfAh9JBducWqCqwlAMwh6EryfAjWhtxepDgpY6whKzH05oZgUyzhYL40GHB2rcctRsaa6B0eiDulkK6KNXIibfEENFYuzCUeCBCPWaEC4gOUjzoDBNbavAqq8BERGRtiU7e1vG3xS+2+A+2yxPYoSCZsVAqShNCQ0Yzy+UT0iChZagkgY2KcX5pQRjbKnQbsSiRBgk6T88KEShqg6k54PoY3hOIFpWgaOktvFqMumWKmVJJcQXGKukquyXUh5a1bQM1gi9ZuRZniET0jIiD8NivImzmaczLZBIyxcSdAjerhITus0UgYoo2qU8XPVEA7D1+/3FZEU/+JPSGhc8R5PX0z4kFhgg7FNtI+2QadDMaRwlpQTieCECBEG2YAUDI2RmXaK7WIQfJh9fQY5rAwmbFovwXwBoQ8Tyx5uNTmHGzx+qgIkWGJRM9aW9W163vv+4CVXdeZSqw1IOzt7gGYTWZXi6v1sq3rpuu6vu89O4tKIvIGuHmiCoMRDccuQA4rHbKZbNZWS26iwwWtyxuBInwGpL5AGVJIDWhTscWKFvZAttKXL1CU10VV4UBymPVtw9NVG/LGUImYYglVY4TiHV3vQZUxFbFft93ZxeXjk7O92a6tquvFmfgOwNcfn/3Vn/3Pd+4dPvtbf+vDhx999lNvXF9e0rgeTaardn1weABg7XzTNJ33nn1lqPf+3vG9Vx4cvjurZbN+7C4BrLv+3fffc4szbheOmUHjqmoqSF2H0VtrWYSZXd9dXS/W640xpm7Gi8Vms3nStS2Azstbr726WHz4S7/0fxxMmvfe+84bb7y4v793WY/NfPd4vgNgOpl+8zsfMKRqmsePTmbz2XLdfeVrX3nr02/1fbtZbgC4jsXa6WQu5I4fvHBy+uxffP2brx3tPTi+3zoG4Dftcrne3xXXu4ur603b77Zt3/dnZ2er1boyDcpTzk22HFKmkUpdQ7Q0he0YbxWWFDfJXhBCIxNx0xXcElGTx0rlFkDL1Gj5aiEYFnbOOeeYvTXWEFkNhjXWuq4bjRrxvqrNZDQ5evOwbkxdV5fXlwBeuHf/+dnp2eU5i3j28H3btfPd/U27ns92AIxGo/3d/dVm/fDJo6qyIs45dp5FZLlejUdXAK6ur7vWWUvrzXqxWMwmVHf9erVaLVed68M067qpm3q5XBzs7f3QF7747nfe+da3v/nWq5+uR3a5WSHeU+3Nd99/7/227bhzu3v773/43uOHj3d35vOdHQBkrTW03LRtu5nWI2uMsYZI6qoi4q5bAwAbEXhfeYu6bgyqtuvjXktOwOD3YgmlMIpNOyCDpbMt/XOrR5oywY4yzJBkIuGGpA1cujFusszI6oauxvhPHEyWM/W6viYRLQ3vVGQt2ZaKYJQ/52txDAPZohxk8lBpjzJ09JcZyuVTH+PPT0RW/ySJK4Xt3+ysvENhMLwxMc1ygvGCAjE+G+KajUIqAZA01jjH2Yh4jreIsKV62HEpJtww8A0ES8q/DjxsAzasj22Xzt+a5k07asFh8tpSBnW0WSe+o7l9t0P7rt21u3bXviuaqgdD7T20zDMLeqr0L/DlglmUAnvIbDImsVhNctTqUCIxJiT8ikH0UjQLqloea/FJymOT8s7BeAt9OJFnVV0zz0sqYqmdqHZ4C9cV1aOTSJS4oRCbLNJmsMW/yQAlykwKaA1eFPVANekMcnu0zz+J29yUGKIUpyF6iIrS7VwwtejMD+y5MGduRfkMX1cIF7llMaOQFosXR95+g/NLAc4b3ak8NfgdKmQpan7s9Gj7W8aLsj9Nw48yVqpeWib3qLwUPmusbRg7JSU22mGihS5tnNRRFrOLOIsQvUGFf184FFUrXxwNKNnc+KeoGyNp52ZMTWhK5R4ZLEC2AMWwNSmWNEpzUaKHWv2FBsepDGCcgUCgwmad92Qp0Q4OLlKrKhSEyVCaJN0wA9GAMQIkpz6VA7m12kLG7Ghlj0GQAoEwKJSiAJCtzBkjI/al3Q+JPcY6vAZ5zPmmrLhKfFxXl+Jxi8bYaLEkVkoqRVWpRJIph1pRObdSQ7th3y3vTUlCg/0awLmlGhQHWFGI5JJCByCjdtuYZD60ZbL3eVyl36QU8iUynO2UuijnSznSrWUsV2DwF1HB3aY0ceAigsoayrshqTMRIQcKZ2KGXIBte6wFFS/HEkehRKJQWRFLamjBXpDk5PrMaZSwhK9ikF028Sd9GYeDYij/EsAbX0Vp4AKQtZaInHcAvHe1HTH3nr2tLXs2ZPu2E5HKmnj4RmVmk8l6uRHmuqrB8DVbipw/xuF8nEZbDCfbyEvgKWIjrUHpvhRlIUXcLQ28m3TLR0oEL5IPibFIUtyRRqN7CvEcmyLIKAIt40MQhEj5I2n/GQ+yOKWSiDIKQgx2jcgd92nbu/MrZ4FNuyKz2mzahyfn3tvxpIGZXZ63AEajMXv3P/z3f+dHfuiHT59ezA5fG1Xf+uKP/OgLb3zf1cXzL//+7wCYCMxksgEfHhw9e/rQNVac//2v/NFv/P7v9j1+8q/8hwCapvG9MyzWVqauJ6Px86vF3sGeFTq7DrnPsj+fd873IrPJpOv9hbs0lW1MDXahsqFn8+HT06bZ6ae+de14vvPe+x+9xTTanRvnevEAxrv1xfXlzv6u793hdLL30otXV6uvf+Ptz/zgDzfT2dmzCwDsxNTVbrv3/Oz/u6oSAAAgAElEQVTZ+fnFp9967e0vf/mNf+endo+OLk/PAJi6qesxmvG8Gb1e1xeLZTUaLdtWKuuAVdeGZQpGRIPodSOheEhtLCweGUy0ygffiJGQMs/CEkrHtoHQMcLZZBKC3YVCFVwU+26IdMYM2WBAHkPiROmNEFFVV6NxA0CErDGeLRGNiYTk4aNHn/3M90DM9eI61hA3eOHePRizWC3ryrbO7e/teeHlanG1uAZwfHB0cfXhyfPnEPLeV43dm+9dXy+6rpvvzOc7ewAqW/XkN13ftl276WvbO+e6ru87t2m76XwKYLlYLRbXh8d7//Q3f+UL3/vFT3/q05Px5Ct/+JWf+Ikft3Xl2AOoqnp/7wCCbtlZa3rXn56frdebo4PDru0A2KZqqLHG2Kpi8X3fi4C9d76v6/pqsQRgbW1gnHN1ZW1VU2WNqXrfOe8QD3CLp7lZa53zg9o4W60kNVlcvY1LbAuEypxUCL/lmSxn5L/KigJZ0DIgABARquypVAUESpaj8BgJwRYlGtgjM/0YjitNkZDpz3DgCE4vQ2keH28sCxMbTvRjmgi2Dpcs0ieG4LqxMOGjFPcMhJO4xQhabjJSXoVEKH0hScwLMzdI1jnJJ+PFcr3heyxOL3G0adiFcBzkmrwOSdTLDHiocd6cV5o+bV/LPW41GbLKwjMewaSzoJSK8Qna3F27a3ftrn1XtI/n6Hftrt21u3bX7tpdu2t37a7dtbt21+7aXbtrd+2u3bW79v9pq1JQSg7rCC6z0o8jN7w6N8qfhZ+Da09YQjABmeSPijEZHKOoZCvekKU4y4ZCDBKBilJrSG4tvSmOGsjhSGXwQxlokSJCKHnCQvRWdHCRnu8MgEjyOdGkfZVlLItYnRiCk8NRygicwkEavKAaSpEAesOnpSm8ZTfRQfYx/sthy9lfKRSHNAwkh2OkcL8cVxMdtOltyaM3TIAbDDV5/4aJk0WU2Nbt+u5P9uVpUkIZznVbgMt2024/2VEo2zfQzUKjEiNTYsI1I0bDkkF2dIc10extuTHIBFpwquIziEqLATcZMoCeJhF6Y5aQ1ZXOWJRwgLA6tzV5qIgcug0CESzhIOP0WuirNQMp5GyGcYYZm+RFL4LTRCN4dD9FsEEjmNIB3TFwKaWGJijrQumoc9FEApExzCwxEIo16ebGqib/dvZVl7BNTv+ifoNQzkWKGWAlNLabbB2pcSPegNIJIUW0Q9ytKRiwJGuD4Zdvym/UQIk0izj3kCOsC6iHCpUbJQBclEyZcN6w6LU0unBEQzmCjHLD4QwHngI50uh0ugKJ6Y1FQnYcP0d2IgNqot7+AqZKF8Vs0Z1wlTRmchAPOICkFDAsoKDUO92i+65M+BYZQP0W8qnv0spcQqJoLwBI8/fDVk1xypQour4pMYwITIlEJWJ9nnPyFRbhFRiUDQj7K71Lwt67Cb3A3SMDLSBRBHqTCMdwbBMTsYtgRRYIkVhLznsyxhiqrZ1MJ1UdEix4NB4TQdivlktrKpvyyxKyFus0IHnI+z+DXVEr4HquDJe2NecblanlWaWw6aIlJg9opH8ajklUfwCyNFoNYVNGri/Ogk0cnZTkM9KGciga+ZKvplcAMMYIQhSQCItKDHCO216uVhvXd9PpmAXj0YgFzDJt+nVdA3jzzU99dPpssbpeonl4cVm99/bh4T2H7tGjxwd705dfeBmA9N31crG/v7tYraq6qSbNk8dPPzh5ZpvagGxdA3h2ej6b7l5v2tHOwT5MXVXV/fvTwxfHi4ujdg3g0eOnsLZpRsT+xQf3D/YP1+v28cllVTUs/Pz8EoA7PRuPxxWs77p6b1bb8dn55Ysvv1J7b/xmsdkA6MVYcD0bH+zsfOOjR5PDvf39vZ1n08dPnown01Cn0ta1kCyvr10nH3704cn773bXz9585dWjFx+EeiUvvfTyeDI575xvW4CO9vdRVaO6GY/H4uV0eQkAxsQDlQyFs5VE8iFhWY4rklkEhgPzCmKbMAD2nlnIhsUyKX1HT03BFvoEdJFQAFTLJlMULwkgZq7rRoThxBARoaqqiNCCytreuaZpjJHl9fXF1cXx4b3D/f1wUs3udPrg+Pjx2Wnf9Zt2s+66cTP23j8/PfvMW98DYLFcfuvdd4+P7+3Od/tNd7W6FgZ76Xu/Wq+cc2EwLOJ6B6Kqaqyx3vu+d2Sots356RWA/YP96XQ2GU1/+Pv/3Hw2v7o8/9Ef+OF/cvZrZxdnR0dHLpy9BvHOgQUiTT3q+94YYw26tu3WPYCdg9liIUSmqWuAQky1d/1icVXVdSjNORmPA4hYuG1b7zCdjruehcV5JxxOEyJDEGGyZuvA4q0miW0lGX5rf99saS8mZi+RV2RGUZDiLbm4uC1L0FCakLpNwfWJeiFJncpKsxyrnySNLgidSt5ZbqOrApDWnhEkCqk5JIWWMkxfocTp01X9SrdErgwZ9C2wlYLZBLq4vWIZqHGc21UgdXsKAGONsKRjbOI/RDCI8cupKHoROchx4JQkjLA8fNuIKQlvia2kEZa34ZZfb40wvU1y3e7mtqcSkyluLIRkKLvMGmqKqP24SNe7dtfu2l37M9wq5QWcmUtmb5IMYSpTJ7lbWdeQ9JFScclqjwgh1X4iEaMJq1EoCyWzJJVtziZCFCpCFhyjjCEIdk+rJhPVeuN0ikFlnbOsP4gihSr2zDH/qsx2C0Q+pQSQKqcKhig16DMS3xfzUAGYxGopHXwzhPOwlbaCAIbwDv9xwpSqardkw0mh/5T6Uil5If+uwhQVHUTQiK5keZVUX4/J9+WrSxW66EiXYyDWDVAvsNWBlFT8GbD37XeWX0qR8ePY/uD5BJTI7YPWYgAkwVcT7ARZtopv2ErcKzqPQmChaANaaI+K+6VMh017R223KmeQCJvUV7TRxBQVQ1oksJBToAZGFi6xtJiPGKMnDEserQgL9IRBRQ99m5Apqs1FekARIwxpLnEcMg82R6wCkKZ5q4QasC3dd1spgFiNK16n4lEiMoY4+D44uRGGBCvCh7KpNUvv6Z4ovybTpZqOUvK+h565q0irhudUMyieJyWi7pUEClJ5PwuSkhBQFRzFSy+gkLsdj3uVOFWv81HawuGoZBPmXmpNiZyn1N0BeRRBsGGWuycrSGJSwjtFqzYhZKaFs0WESNPUYu6xKNwo5fen6cX0WCEyatCNIPEi5QigvpIgdt/CqVIrk5eLZdcbs3ITeuM0fYWE7sVgMggwMjpkihiuiXVq/y0Zib47Y5REN52UTEnNszp5BsjobguD0eN7i9XLtCACI/LZxJB0pxaemYLCUQGMxJsk78sAk4BewjybjgGsJhMfzuqFAAISW9F0tNP2nfcuGEfGozHAru8sVdfXZzvzubAqs2QQs+LEkPohkW10RJB0DLWOPDGaaOkxubJXoL/CiO4VhQ+VD6fl1h+ULmql0LjXJBWXwHDvlYq6MIeqjpGqCQboGSmjFJQknmuUxpU+iBqmFDYD3kaGwCACB3yRWFml7d1xMx3VtSWeTsZ1XU3H46au294bxrLrAazOnv3Wr//Tr3/t7fc/OPvC93/h+fOL3d3DsW3ccnUwmxz+wBcAfPntr0+a5mgytgSqqgoCMt7Uy74m2MdnKwCG/eL8/N23v/pv/9S/eSE7m87PxjSl1aKuj4+PADw/u6jrZjwZ+65loBmNX3npxa/80TcMuXo2Yi8ADJl2027YjG21OFu4y8tXP/Xq2FSNMO9MX6gMgP3jB5v26uL88uUXHnz9nXe+/Lu/+6//hZ96fnL69NFHk/FsOp0AWLc9A+enFwza3z2sdo3dnTqG7X3rPYAnz0+461BVF1fXXd/dPzxikVFTX19dtZsuANdYI0wU+RKl+hqR60FPK0KkXdllotUkbWBrhojEh4zRRNtFjKTy5gVBSlsyuDV1qQnGiwuORkhALRFhZlhrq8oGjPLOiwg7ZuOFZTKdPHz48PnJWedeOjzYA1A3zd7e3sPnz5x3m65tu55Axpg3X389HEHzR3/8xz/4Az94tL//zrfetbYOZ/IQGed823Zd3wHoXWcMERnvvTHWVrUxtjL1ZDIRkD0IFkBUVTUajb/npe/5oz/+w0cPH588Px+NRycnz46Pj8NcWWQ8nhKZq8tLa6rdo93JdPrCvXuXF1febQAc7O9PdnZOHp5YYzzY2so7Zxvb913f9+PxBMBms3G9H49HTVMzy3K1avu1NVZE2q4Tl+kbe7bUFAKVknUFP6XaE2lpVaZLazeg2LTNS6LglZY10wIkBpSdQEWhBqX52TA2YHqBWAvnpxO2FLQn3pulG5JC1kH0nWnR5aL/wsmRPDtZlI4sn3TKUlwr4JMkQRQ9b0Fna+yFLQzFYyTC+XXRja12tkAGVZ7M06Wik+G3vKXSrFS8NPkOiYQ68fyww4LukvzU28WyEgfPHDby6CihJZfqJ0gfW1BKGFhOCCVst58o0CxOT6MdoKuOAtjpBZzedNfu2l27a9+drQpWkRTvJxrmNFBiJdthhELhkW1Fr2gFe8o8I16K4Y6FKVNIOPqXY59yQ7mQktVR7lIAI1lNA6IKxPzxIXiSu8nK2pCXBucYglIfrhujPs5o1qMCCrnr/IYi6kpBlYwwaiwaXkUQeyT9Fvnnn8j29NWleVVUEkmsqxgMqZuwfDy/RY0kal+TzP3Tjzehu8UJkzSWLko5jHItb20DAxQVq5TkLZRiUVYz9fFSGP2T4jFT5/pXIxNLkTVOQST1GUembmOJigol5zeRSsahhDZrJbhCRSWVhcIsMhIBhf0LzEwEMhrkE7A+qNIMAVOIHPC3SCQU9zWpXz27GRL44tZUHwCYC5CKmumjsCcCY+xApNMqbERIpVpVhpLkHtiuIqcjSEUGg1ua2UdFMISwbIm/6sHOzgBKxCLuSiIjBAhHS+INvC1WVpE8lXArsVMAPZyHQlB3gIKg7G24gQJ25GBdYgKFY7xAgBRrVDgJsjSOArOzvYWUNGtIZ/gu4nSkYW2UAEvsuNyHmfJKEeqYRi6DD9Filq+K0HCeujQmnHwCRjgWKUyamYVNiG4lAAbxRHhSbZ7UCIlyujFGJtwai7vpXithVA6uxBCJWJNvoshVRLESWga1pGwJphA1XVCyzitWsJiAmFkxCquQLVC6LWJxp61S9pmebO95SZsmwziOJi+TUpQ03jCP4RJmPlJQmjJEUZc+7Hv20Dpf0eImEM+82iwB9K5t1xsCjDFEMAbBbGqtMYYWy+swievFtfeubmpjTFPVzOJ8qEKMaPcxlJTTwoqIgbgRYK87a6gk6j2RMlOOqUQUGfLuHvo/cufFuUUIRaHzlh9yiVL1zJYJEoIpBY4bMTbxqyZYRJKZhKLI5kOvAyyNo6I0eUCiFaJ3brHa7E1Gbe9t25HgerUmVK13/WYzb08A/PrTk+W1uVzg+HD+7W++e/ji/Qf371+su41feyvB4vOZN189vzyvRqNZVbcs+1V9sL//ncuFhx3P9nuxAE6fPDp5+Oiikxajxepif3ferbmlkan6WmoA1lrHvut7EBljx6NpzzIb77AYIWsbAmCYRVjquvUy3t1ZnHHXtnY0GY+rmXXdaAJgMpkYYHW9OF8sv++z3zuDPH5+0jS1F396csK9B2BtY60hy/V03F0tZdFhZzJtxty5tXgAlp1r10fV7Jzo6PjYCZiMB1DVnvrRdAZA5Bwc+bkIM4shEjWHKK0TaCFJZmYIiYlbS6Ip0RpiwHEUg0vnZLL93NrUKxMXXUTEx2rEm3bjnXOut9Y0dU1ahNJ7TxBLYMdtt96Zz0ajUbfpDvYPnQsFMWm1XonIaDTqnTNEm7ZtmlqEHj99DODg4OD48Ojho4dn5+evvfLGZDpbLBfe88HBvKqorusw+KqyBJpMpnUzMsZUVe29X6/W052dxkwAuN6NRuP3v/P+ZDQ+Pr6/u7tHhE+9/uYHj967vL4MVTMrW4VhV1SNJ5OqqmY789XyisiMxuMw7dVqKeE8HUMEsOdpMzFV5Z1v2xaASF3XzWg0ISLnnOsdi4yacdd3zjliC8B5F5aPmTVeL0qVOei83IiBpgV5LS1RKdVRqh06MBB+rLg4lDI13LoMmB5QnZIuE8FQtE5n9SDqQxT4KBgQ9Qkr4aOQoKMPqKoW5RbSEYTelIrnjJNi7KRCKw9kt+G0lF9/IhwG/SofF2jp9IKv3xBIM+wLNpg+U3lHGg8AwHtOW0nFOaPzSl0NvMsIfMfHPWiMMYaiF6A8tEfScHQ0GlyQjNcpl6EA258KSDcl8lsfKAWRyJUo5lNJenOYtWa2SSFZ5G8333fX7tpdu2t/5lslIsYaAokXgqEBifYxJ8pQiJkIqiZzCBb00ZLJKYxfEI8K5ZCzJhyZYoz7MMICkISAkBArEew7FZloxWA1AmrCrIeJt0UVJbIjL2KsEZFg5Qk8UZ2IVKkAGDPgiABhFmutjT5w9uJhENRLmFz/PpTnHzR25U8pd0MIQQlXoUg5zZAzGCLYKvMNq2IsxXg2EQ68EtsKVcH0knxV/IPSJhKfy3IJSTSOqUKEdCYHscRHFXSpI/bBfBLPNNeYHoHJ52aoiSzO11izJdOwGjHjOgQ7F0eTdhTNc5Fp7VEEgK1s9vomZV4kHDYacUMbVPQxxuq8kzklfrVIdg+ISKgMQIYEEj4CICMQsAfCoabWQIQD/lvKUkleWZ1xzNEkgk1IEE+e0YwuFkFxTlJ4MvzHYUKGLNnBjJDCo0IslyEKEcUSIlAizqmgQoAh48VrIJ9qP6qEJ7gzAyLGUDCgSMwLDGhLFDCNjLXpyGYRLicc9qmP5jAV8dVbEXuKlpwokkYti2yAsoSUYgoIKcJdF2QsYw0RWWM56otwPZs8LADE4GATSBIYimjE+G7nA86kGCgiQ3GH6yqwBDKkFmON0Ur6pUqChCpNsXS8swA23J73nW5zHS7pCHWzUNQ1yl0cd4nnKE4XZlNFCY031IUlMiAS5kjoDBkKZyawxDN6obgXvyGehqSam0gRMKBztrBxSVOURUBDY5J5PoTGsndkyFrLkdATIMGOEPYpETFDxBPIe2eshcDYOHVbNSLsQ+hZQiVVAJSQJtEbgBhU0aqAcuSk7i9miAhH/FLoE2faKdlsRa53pLtAiyRIiCE2kBCGQ2Ebp22NQXgUSKkYohWeEL1YIUxTIIDz/QDOBORjGHxPZIlIkIP9GJ7ZE4hZglleiE2IzoIwhUTl4DkI+iUT5RmL8srCeybGGPaeJZj+kWZhjBUigTjxtTHE5L2n2lSVrccWgG9bdL6a1J48GZqOZyTkuIchW9Vh9mfPnp2ene0f7+8dHn3tX35z3VZ1NbKwAhGKB2oLW/IcnAQS4/8i9bBk01JH75cJtDHssuy/CBE3ZIwlYsfhFGpoOGFimp45k1pISv2Ix4DE08kJwUgb2L3XqF6OoAvMkRmG4okGQc0WiIR4ThE9lo9A4LAKkRpk64ZyO0ViEUNExohmPBiE+DpiZmZxxFbEQaramvEIQG3Marm+nph1t/Fu7SY7ns31pptU1cVm09EYAFG1JvJU+9HUEi0vFnB8MKlXdXN+uXprNgHQHMwuz5913m/ccmrNsq0YfkrStVxP7OOHTwB88K1v7+zswBAq2ZntdJu2rqvK+bOe18s1ANduvvXet7jjZjLbbyaznemHTbVsL3bn972tfO/C1u5cL64nQ9wSNZPNxvV9f+1X88M90zoAi76djqbr1aJfLdxqvbN/uLe3u/Tm5KNTMcbsVAAqZ9rWV7UFQSYjiwrSLdmPye/aBoDpGXbEzfiF+4fTquqcnzTjb8C88epLT06u3n3vIQAjHsaAyAkbqqwViBiIJ1RV1bseAAsba4REQB7iOx5PGue6qrIC6foeANlKpKoqE6m9IQrZ9QKOogUBBUoCxlZE5D0739uqCtjQd27TbkaTkQBg8p6ZZTxubFW5vg+E2DNDiGHg+slk5lgqj6vTs7btwnE3rned43EzZRhL5IxpN5umqSfj0c7ODMDRwb2vfeOdd7/17quvvnx2cXp1fX3v3vH+3u79B/efPX022dkHMGomTvxozqPRaDKZHOztTifT8/nk0cPu/OLpeDoGMNsZC0ndTP75P//9z33+M227efzR03vHD159/c3L62vxPQAyWPfryd7s3ssvADKfznZnO8+fPr08uzo8OgDQbTbGynR3tNosm6oaj6rRdOyEjKnIkKsJwMHxsQU2q/WoqsCubkhQ9a0TRrfpwjEmrndVVVljvZNw0HnYYVHOpZTkkaLLQEa5XjgbPdJoijEHDKTo/BTLj8hFwnImk1pMFE/+I4MgEkeJ2hiK3FtsqLoQqH32WYpn1nOwCv8jIWATkk015ZNAAHimWIVDKQlJyB0p4gpEpUYNpQua2MDWSAotMUlCJoWHUkitr5X8Z6pcRVMnpdpMEYQpeQAGKMRzqHtbbZ8kCj8ARCZWN9GhCwsgJDY5rdmzQBJnRti9IiIIZ9ALC3uhSEgli74cyKogZkCQjQvC2U8Ug6MVyhGSAoGJiFSuk4r8CoSgrBXYgMJdHcQzSibjBPxigVSv0QGU2lP2fyVBTS8AYO/i54zOEdeyFnfX7tpdu2vfVa2CgJlhVXkNzFw5cPLelwqrqLs3fCkIZqTqJuRjF/FBOTIuqbiIxo+Q5BTLVgVuN8x7ij8imxGit5NQCAeE4n6RMnIrX44cIDEJnQ6KqaPoaDj7gilImlEKUCl9azqYm4whQlABIIMxqz+uePqGqyvydb184wVpxsUEJHlU8+/bgVFhBAUrG3BLoFi2+Ibkkxz0QpEHR18pFXEBIFYOnUIFArZFQSxVZcvWHZ1LsMKI8nFBFA4VwxQZI9bGc6oLLq92UJ29ziGBUWWKJFsMgZtwTstuDgNtcgzFNuQjag1RIYmwKMctgztwS5NotMlCVPobYHZDGJH0bwRqkgfVDgugCpYmpM0xmIXI0AqtRqwMv3KPAXpadbw5iUiSKYb+a8iQkZCrpu9XDIrvr4wNtkgFUo4NSAfcipZ1SzPVNw1jZmUAsyTAh88mmJEsSAOv4oW4iIWduLAVpukneV0BkfC3EEELyV/yXBBlYaJiUOUiKCXIVpkCkllulcIks4UHJV5E6i03bkp7pXyvohfK0k8KEKEYYSY6h7TF0qMDSTpSHdWLAMT4BkgmmzdausAphFYdKQV8GDEQL+iIolH9UHUg+ySgNv4hU0nyPZGBqIJZbGvKVvkMjDjTGG2R2Wjw/YQ5FpV6CarvARhklWXIgEJZDy2cCuUGge+mosQDPEAcQsk34xYlpHgSCRuxMOTCmEjNA0E1JsSO+WAltMa2boNexjvjqrLCYquKO2eNqaxdLhYALi8vvXevvvzKp976zNXp1ZNHJwd7o6CdkRaEISPipVgLRYXkncq1VjSQdcCRdKFI90kyvyKt8taWwDZG6aptrV6CFQ1JRoE+RVfFX2OIOMs1cUApKXLr1Tq2pLKW7IkMGWMFDIg1JpjBO+8BbNputj8iY+u6GVmZjMejZmRt5Zgn452qmQFYbDoWY2XkPExll+uVc77r+gdHR4eHx+v1FQBrrbBMxuO+d947Iu+96x0T2a51JycnAMbzo056MdXDp89mxrDvlhtPZvTs9PxicQVgtV5eLdeu96986q0W/oNvf/Pp6Zlz3LZdTy6E57vOkaGQYu8dO+8BO5tONpuOyDJ6APPxmGzlQaOm9t53F1d/8NW37738Ru+6prHeWQDdpiMYY6z3fjSqG2r6zi+W7b29eUg33p2NO7epq/q9jz5yfffg3j0RappmsV4/evo4VEh03ofFEQofAl6JIePZc1wLIyD2PkgrtjLMzljDwiIStm/vOiITmVvmunmlw7KGkGADaBSaBD4UaZcT9t451/c9B/NzEbfJymBFA2OttdbY681yYs3+weF4PHHcA+idc8zWVlVVha1UWTsZ7zrXf+e99wF8/rOfMwaf/ez3Huwff/jRB03TgKgZjSeT6Ww6nU2nAKq6Ont+/uLLL1fWGKK266qqGjXjyXTy4MUHvhcAXb/2zu0f7H3mM2+dnj/5oR/8XNXg//m1X/kLX/rSar0xYADOdYvl8sH9F86efO3k2bNXX33lnXffefjh46YerVYrAMf3Dq8WZ5PpyBpDxoDgnbeV9c4bQdM0AJjZe2et7fqOjGnblhmzyQ6Rs9YG2Fa28p6FxaCKjpgYxVDIDTmWDJoecJs0FffrgEXdQheiaU57jkQHSFqJdhSZpSJYIQqoDIEbnQ/JQ5EEpOJzuhrfn2IhkzChuKd+eg1KzL0O6RnFOenUB2IFKKlFWyApqkyplB1oOBX1C5PQp/dRrquaSGTWlMgQkqwVgZMVFR2ZvifyLZ1HOUZKnYYAzSjnxp8qk0VC5fxq5Rz2E+WQEj7lSwq2kfSN8sctVpOFj4QyyUsvSjqKqf8rtW2F4l/1+bt21+7aXfuz1aoQrBQTzaLaGyNIBrpS4ulRE+XA/cs0w61GEAS7ZOZImXJLyXmQPIPRwFFoDUl2L2m2aO3IGEuUVUTllEliLHkt1Nilbw0vScEMH8sUCulAkNhkcUmhl/8OlA1VZFm14Mges11DzcBBlykzA5JkY3IUql67oavdWIvE29OFG2oYRVuILrEChYZ90G0Lnblink5cxzTpBJEoxaV1ChacyOcLQbB0OSaXbH40c3ddAgKVwyvnqkkcCYo6umiTS7U9cwGW4lgjoDTCINqPtnVenXU5xjQhGayUmsIVriQheSTfrziZZy3FvAorkqjMGoclYO+3ljdDQiKSDUXNfI/KSOU6ULFTItar3akUwQbTFo0CE0k7N0LGEIVohlJUN0SktpfQnwY4Z3hoD+GNIe8dIdwxDwzMEpPCY5zzQEaMLpFcojZejDsskrR4IEsK1EornkBTlpSClPtqYMfYxkYhTQSwk04AACAASURBVCcqJHDF+2K1cqkpVUtCZ9CRZMTUJxQyHI2RavwONieJukQB8vgigsrYOtdsISvAI/lzCIxIRDAMQDi+VMdf0vRC/9FB0VBf0s1b7tMbRvWSdhUbAwWwYjUD5TXq0YijsVQsm0RzYAyCKHqX9ArJtmFJx54UZARK4cqpFB1lzUwQcvQKQAJqkYwGLCm0GsUMIkMwxrKwCCprGSTChoyXfIyLLlrcEqLx6tnaqIsggPr4JN2gdFVAQuAQU0gh/Au+shZA3dSenQGxeM8yHo2dc5U109nU9f1iuQDQuW5vf288HvWu+/znvvf87MJ5Px7X3PbsBeIAePE1mkgsEtYqlmacSWqcuj3LnZzxLPiGKHKq6N/R9YXaKgZoE7AjySIDFyey2yyJDlAhqMSzwUJDB5chq1heYOiWMUTnmFhY+GZAnj2BjLGWSEhCpC4Aa83+7u5saoXGY4vReDYejRl0db0YV6O6HgPAhp2nykw8G6CGqZ+fX7704vHOpOr6/uT5GYDZznQ2GXv2dTOylakrGUlV1XVlG2bqQs5sPSYma6rT5+cbK4L+crlump3dpnm23gDo2r4WLJerzXr95PT5+fnlpu0m9RRCIrGMove+MhUBzPC+r6vJctVt+m7dtc5x2zkA55cXZ9crT3bVtZ3rDw4Pq8ne8+uF53696cQBwHq5GY0mla2cOBFxjkUMi3Fseh+IlSFj1puNMC9X6/+XvTfrtS1HzsS+ILmGvfeZ7pQ358qSalKppJbs7vbQDTXaaMB+80sD/h1+8IN/jgEP8C8wYHQ3ZHfDLsilsUolZQ1ZOdy8eadzz7SntRYZ4QcySK59bqpbAgyrgMNEnnvO3mtxCJIRXwwMvry8Ol4xjNkOe5JksjJkYIwICadgfDAL2DaNQFKqWyLmEHOexLkZp7FpHTMbQ/GGGe89EceEt3HnpNpgTIqwTqs2ZRuIyU3j5BqTTSnGGBGZxinyJGNJ1LOWFoghMkZEGuckTPtpdG1DRM+fPfniq6dvPXwQn3IuHu4mImPIBB49Ty/OX/yT/+QfAxhH3/ftcrEKMgqF5XIpkOCnwOHq+mq72wKIiStfvXr5wbsf7HbbYdgRqO97Y+z6Zj0OI4Cub5y1EHn56qX3/qd//fEPvvu7D04fvHz1ol8cxV77INPkBczgt955/KvPP3v14qUl61q33eygqV/6btF1HZGxxgjYGDMOoyMySEkAyNA0jJP3Xdc5Z0MQMnDOdn03Xo+RMvDxdIulHGEeGanuMgZs9v5kYJX8G8oOVb4WNBB3PWW2nbmzFD9TDe3jLpeUEX6OR8EVO1KZWIT3DJgUIZ4i9+SAXeRKKsGvNRQwUP9WOpKRaHlqjvCg2C4OlWZEKEWxZh4GEuujJNJFDWx5SBneVM0VigAxraSqO4nIlMBlnoxI5/S/ZO0tw0WKGV118pObjwoAAKxkvan0ixSnJlClDP4g59OMsPGP2eU/6bKxekKJauJlraOAMp2ZN83yf2CpeEVF3XmTd+Wu3JW78mtVHInq2ilILXE4qtlcFhBRLsfQj3juWl08eh4i2iwq9YqQ3xS1p8TnCiSvWHgOGBN1UMXL1Ko+p1NUhhCvU8uB9VIz6qKJACrK6UDXIEMgRoAeK0g9LZ1J9cbb3CS7Egt4JI2yqULIkIQW5RFlF2us3Biagw5SN+DMuZj/KVbC3OgtfbQuhyJVld/YsVuRG+WD6uMDJJEgykz9rqyP+Zn6y+SxFmGY+cSolE5wScGjfhPP9NeN5K7MJXH+jVMisPqV2gQg6WQukI7cUKZ1OYw+D7GrftbEnqm+uhTSYZp0dEhj60Sz02V6HIK86qRztbWURnpaZPZK1R1dJwTEQ6pU8gzUcCc2Y5I5W4yuhBJeVN1QbSrrwLxTQNxsiUyk0z1b9ukbSdANIgWtZzhOmeeA59M8J0y2bxUTVTpjY7L1XCNs0wqNywews7UY64x7XOr2qlaZ41nLQ6oTQGQFSJkriJCTFlTHSPWksRKlLEaq/ybkI/zVxIpusKzV3NrTSWmhsiqytTdj8Wi6IlW3Ks3hMLaB8jxX1kHJLc2YfyaEcr959yrzEQqcp5SNKzG9lPlBzWOkCknuIc0cW1L0xBIokrcR8msQ6I1GxligmCApXmhLABBSykolTDKi5hPTuooL+5A8GZE78a3puLX238CZsvibb3yNFyYAxBySSntYSaSPVcXKCiTEY3tFphLFo8VpPQqoxIvrM2kK8lUakncqdKkR0r3eEkxMr0WhcXY37AEcrVYX5lXfd23XTNMIMEtY9QuCXFy+3m5uADhn333nbde47fbm7bcfP3r88NmXL1erZSArRDFrKgcpii5l8U+pU6k39W3naQBpAelqjy9V/qqCN5AnMCmdukj0u5nYk9kv2VxdAp6TcSkHcuZK0mSZIjDSD9KNaUztckB5EwCBBSQx+Uk1xnwfgYDBgZkoIZ+2aY6PjqzzQcLoxzBM22Hqum4MAoR4DNHaVmAat4BxTKbtFvtxun969ur1Cy84v7wCsFotFn3bNG4KQSTsht1uxOTjrUNkYAGEEFrjdpMf95OzsuzJAp1zU9NvJwawGdkZ1x2dLvqV6VZTuw28HUdPfvTWttYCEEYIjCCAtH1n5fT15foXn33x1v3l9Wb/1fk5gLftI2ubxWLZdu12v5+Cefjg4adf/gVL8CP7XQAwDn6xtCIEIWsNmI1xVzcb//ajYYyHrMWzn8IExrLv27aDNUzkXGP1xCwX+2CKAAaREXg/xmRyca+xSGTNzKFpmhiPn6yIJACMNcwBgCCZuzILTUwsIQY99KvpNQIHsrrOjBhD7MN+u3fOuc4SGWNFIEE4rSsyZGjcjyxiIcMwwpmu76xpRHNDkLFt01m7Q/JmgAPvdtsP3//GfjsA+Ff/+g8fP377vfcW+2G7WPaW7P0Hj9bXV9EWef76FYDNZv3gwYP1ZnP/7J5IuL7ZiKBtGma+ubw5Ol3ErT0MY9M03nsifPXF009+9vm//Jf/9S6Mnz95StYC8H4y1nTdgllOTo+Y/fe+/71Xr87H3dQtHABDtNvt4k6M+T2ii4UIOdz0aHXkrLm8uFivb46Oj87O7q1v1iLctu1ysby8vADATBSDtwMbqmV8BVGyLIqTU1wwNYzL+CQhc1U7DlADdIJ1Z4JqOISKydefSc7bUNd4+FgpGZyk57LykPFSltU5hE9Hl2qoVY7S2/xtrje+mY2olRqGCohWkij+W3LkZtlMObpR/ekzZx2hEpxxQqgip6AoIlKNQkUu8mqp81xKNYTYByIYY4IPWmcGeOWlLHMiE4ifUTFqqgV27g6e8e75+sojVbmrvUstZ4W5eqvgHGj3bwUI/J3KXFrelbtyV+7Kr2VxZEg4hrikjEopaoAMGZs0RohKwSJC9JJoZemmqNLRy5dkBhURETXApC3OXEhC2Q4ZzTBR1UoaOGl2kgT64z9M8cApSi+kkgtqbkrmomwMAGImOyIyMWUVBCmgSjXo7MMCAKJDqVMdy6TiNqyhgMz+QoE5Igdf5cMfWW5LDTik1mWLpJyhjsM2qsrVuFopbzKDANG/a4puWldZ2TCRrU+ZNgd9iJipss4cjD9ZSepTEDNgMRsiVZ9LshFl44AIymXX5aUMUG6Z/eadqMijs6p4VXSYkjRR1ZhnvmdVkClXRxJxxgHeVFII0hGuw+/VkvbvQxFzDV0BVT3q2ZwdjDn+QyYZB/MGzr0nCenzA6NgxlRUg3A1YMmttZjUAA6lNrWBxp+cMeRBXp3aWi257soHoCCVcrtSpgt5mnRA1V5V612xZ2U6zUil60t1GQDpUuU8XICgN8C8acrkzZ8frLvExQpZEkOJdo836SxJI0h9rEM6pHLwKNSn+Zs56hzarkASc50/nTZftpKjvKNYuigunOfj1mDjgAr3gZq980pWC+ZsfOltuf1RmfSKgSozQWmjsEJKI0+NMqFkdkj8XlKgnDpDpLSaw1/ikctCjqS+HvL29FKmV2FAaheuXqk1SpAG+3Lt+IivW2OtMXFHGGMSCzHEQS+GQtYD05gp87A3re3YbrTxo6wKMYjZKBMZjSFjTb/od7sNAIyeiM9Oj5uFG4bRWetad+/49Or6ksCLRQ+g7bvV0dI1xhgJHN59760XL15Mk7euEWFyBkAQl8VanLG8EuYiQPlKJJ3OTha4+aHohNJVQOZNIScqtCp3QJ6vROq8e27pxuWPLEEz40kPFKAwW8uzCJpDdqsMKjcdK2Rh55wPPoSJQHBU6hFhDhx801rANG2z6LvTk1PveRrHmAY0CAswTtMYvGudI3RttxvGzWYnlOyenXPPLs9Pj0+u1ptp8lc3m/Ob8Wa7NeSC8h4XJkvw3p+cnL11sgy8G4WMdf3J0c31Og5kCNIuet7vT5xr3/tgt94Psm+4EWs55cIWP3kBG9Au7NhPfdN99fL1ojMdtvEK+d1+bNv27fv3w+T7vjfe+hDapul7vLx8bYID0HXddr31IRyfHQHUts00huv1Zrcfo4vhZrcVCU1jCTT5cL/rnGuYuWvbk6OT1+dPZ1NMIkjXl8OQeEGWygY2JXuGCEKM6R3Hpm0ABBYAfd+P4+gnH99PHs0SegUIxyvw8gltlnQAm8oyABkDgR+9McY1PUQEbJ0FhI0BYK01bM1+MsYQpO06L16Ao9VJ27ZxNJMPIjDGGWN9YFhLxrm2M2T/6Ec/ArDoFuM4Xly8HnnqF50P4fFbbw+73dXNlQiury+1y0wgYyiEACEOvOe9MXR0ejKNewBta5xt1jdrY2nYj++893gch5998rPf+Qf/0cXV+mixALDdbY6PTzfrdds0xpppYmfdhx98OE3+s09+BcAH72wzjKMbhkXfx8PXLGKMsc4aawEcrY6EQ9cv1pt113bMfHH+uut752zbNjHg0Usw5IwxgdXRhSptdJYzc0WgSLS0OSuJQiCuWf+hPU7FZP5SIMkcXfsbqMClDIqSjJNKptd+wcMiMFb9E/n5DDBzMtxSKLNxbbQIZCFV3Yo5Hcr+i32zIpI+UcOLMoz8gI4pQ7Mk72bQR82mM2RygHTTtTLKUUWz+lRcce4IJxBRyPcEZOpqZqbqFj9A8rkrINkAM4SJlWcQWmBF4fIzAryhlE4mJ5m59UTpdmxD5+VASh3+/Xctb8BSd+Wu3JW78mtUHNQyoKxbkkBKrjOgUhBUkyMBjLGiyYdjXUmSqvqbOHX6StXgA86r1ryIK/T5qKxEMxkZW1Lcp2aSLUGoSMl4rXHVaBSYpD2Pn1dSSuMpo0CIOmf54rBknS9jkFzIIN0NnkRZ0XMy+in6LJHowU1VIJFqLNGp+i1USCtRlMwHXXyTQBMlVVXzQUlggzK8yNWoJiaKoBJhbiG9GYojepNCqJYnraPADK7thhnWAZgbXGobmhrOtepZNFEZUcZGmWbluWItyKmCKD8jRhDveteb09OKqhGERpjm8IfKJFQGUzlI8yMVdswzUuOICjZrYG5FwAOsESmTUVaGfLWeHN+KOdaZNUA300SntV73kq1IhbCSrXAZa6v/P9M+Tlu9AZOGW2n8utIif5Fy+0d2WgNqrUm0NtXBqzxAjjelmGKWFp2a3FL9p1JP9J6sOMoKP4OMSfY5AaCBe0REZIoZVflYXl16fP1wd5We6GyWTmhWo/lmESBpqm/4Yl4q7whpPnggXg5WaFfMillP0X0dN2HG6IJ6YVQbo9pA+mEeR/mr3kXVqNO+STFEhuIt2VXkiOiuLz1OuuV83srGZtZ0Fln9QlwHEg+6qmYZ1QtVWJFTR8Ro7djRmdwqCoiqJzOLeTGFKwdTMmGW7DSxl7ILBaAcmJzb0/nLLow8OZmAAmahlH9SmIKIiHAInspiFJ3Q1Ju4TwV66SwBSJHpmpgXUU8tQbsSU7WIJVubiZd9b2EBPH/9RCi41oDEOWuI2qbbD7v9ftdY0zY9gEeP31quFk1jAN7tdw/unz1+++HV+a5vmsAsngEIE1lGPiUn+YB+1njL+oNK78iKKsGTVplofliGRgzrXWFSNHHoaihtHErO2ewdcHHdpZynJm3/8naGSnqzFhFxHWT0pjZTWKVmAaAUjsveTyxCMUoPEHCaLEMcvFBomxbkln3fNa5tHAHb/R7eAthPsM7Bps5M3k8+vHx9FQJ7Dqu+AzB5f3Wzvj7ZnJ6eQWgY/DBOApA1YLImXl4scLZ1zbINbrr4q49/fH7xahjC42/9x9P6BsDxyenldjvu5S9+9SevXjz/z//ZPzs7OXG2aZqWrYuhyETEEsiAgoxeDFkhc7PeP/nqvHt3ce/BMQBj3bFxbO1uNy5WSx/8q4vzx2+9Nb64aMzNNMWBizE07Idu7HrXDdNIoODlq1evH91bAQhChsx6sx+8Z8H55RUzJISb9fpqfeNjMCNznCpdFwZGRNg2xByimccYAwLzJAyyhCDOWefscrkK7H2YEBNKDl5ZSIi5hgkkkvJCWogCg8w8iSDOWUHIS50ExhAJiMHMxhmCIWNiBqS8IPq+FwGPQwjcLlqIXN9cbTbb05PT+BAzvA8+MDMLmXGapok//eLzeNfN0XIZmMdpeO+DD7fjbnN1Y6yd/GRaG4K/vLoEYK2d/DRNkyES4ca5tu3W26txGDfrYbnoAey3I0EWywWztyZst/vddvPo7Qeb7cY623YtgOv1pTF2GAcW6frOOgNDIuysWZ4sI3nXm83jt99xjWubtm065jAMgwhLOpSAzeZmP+yda9977wM/Dev1Tds4k+7Tk+XqCMA4XnNgsmSMZT83HFbeVCp5ZkiBCKCxekk2mWIKUliSYf4MP6m0KlWS5LeMYtPK1BZXRVwBOf7wlonuTSX3IVvJyvgK/kIFGiv7XX6FEDUdzq+qMM+ctBLThCwTJTnHsglWKkFRGknkiMJ9DuHzj8o3XJjsfOhZOiNBwkhSqxErkf1nJk4Z30T2kg62c4R1ZCV1vZo+AeIldap/lpGKiJ0P7ADxYD6OAhXzsapDySWKiPLR9gqkJJrX61KVLjokzN+uSP1bFdZzV+7KXbkrv07FQXWbmVcsyVK9tYOTDpEuaCXVaUVlJ6I2pC6zxGYp11S8fEVAahhSvhyvSDLV9AACTLpyLQsaUk9WVoIJSPbALDtMOicKVX4SIhAuopYrURTvts6xinMyJJ1WUB3qLqUIatWvVRTVOlDSo5PQlCSN65C5FLAk8QaAHBxYxGTd2yLvv84WCajcPNSGBFlH1a+qkDBVZeNnWfVHrYhnLIGM5/IoDrqhOpwOTmEHkKFO/fyhYS/BHop3tQulW9EPFMgk3AUFgNXfZxs5pUWbdAa9cDg+pLiSBDCWSCgbamdUujXQv1n8C1Bufk/L2BQEDM7WVgXLWqvkCiqoVC3ONBQiQiJMvIU2GzyiNY2QTwK9EfaotzlbIaNKBbxpjaWo5Zq65ddse0rUQglpjC1Zq2BdANFJIcrLpCDvsu4lmVpq2iu6+zoN4o2YMs6rLlxJJ8VTAsF5B/LAU7apym6KkmW0osBtyqYacuRxBussfOvhTIqq8/ONnbmWII89/sl6o2j2LKlJrOTaz94m6D2XCouV4GV9UD3WPGawcIkvFIneoBRsYZIGWHd4vpqL7jcjYGbjeW2/yRlUXs6ukTn1ABjSy0xFgKImVoSUvMJjpcKiFyYfMn1mkB58rqzpqlJSeSmvPW0rGf2QghCNLu7cu9mmSWu/qCRl4xoySTFUbc6Y6jlRrpVEryrlyAcXDsIyU7WU0qykxlgkmZAMkRgCfPAGwRj77uOHAPbXV4uuiXbyEEIQ9H13dXM5TWPwvu07APdOT6yzrnEcwnbYttZ99OEHP7762PMAWC7WwXLKsKCDYjWfTWj5ZyZKK7ZAaYz1l9VYi+DJf9HhtzNLcvUdEhMiqSUgpxYzP8vYoGwarVpbK+tKTeSiwlcnh6whCLOEeJO7iTiE460dAMiYRdcwT34ad5sNTLvbD/1i3A3DOI2OFgAItnVt6zoiM4zjwtJmt3/IaF2z3+42my2Ak+Plw9MTEen7fugXp0csZtiEqwDvmayJVz87ZgkIo/eLew++8c3v/87vHTfd8Y8//rmJKRSNgWvOHj341rvvjMD51RXDsHDwPgiIU6BtHAeYJz+1rhGSEMzri/Xx4uLk3gmARdP99Mc//vQXf/0v/sV/NUIc0fFquTh+8OWrq5CO4sJZS4bYsx98aBv27Jwbp+HFy4u2cQAWfQeytul2fuqsDd4vu8XQLaZpGoO3TQMgBG/ERflLRMYmW68l8n6Ky4gDC8DM3gfrLBjDNEIwDMN+v4uLrF/0fhr7fsVgNfbYOLGRpafoyry4o4g3BqAQfPzQGMSD4T4EMwU7Ta1pjLXGUGDilPxYJg4Q2d5sKBobLRkiEunbbr8fAOza3c1ms9ntmLltOrhmQ7vG9Y3zF6+uAYTRL5Z9u+jXm60YartuP+zXm/XCrM5Ozz765rcAvHz+/LMnnzeuA9j7aRr59MQ2TdN0rdlOxhGArm1FeD/srTVt2/tpGvdT3y1EwCG0TQOACNaa7X5PsE3XusYdrY6265ur6+uuawHs9tvjo6OTk1MxTIaMMUQUL67xPljOfID2w+7B/QcvXjy9WV/zxMd9xyGI8OnJCYDrq40PIUmCOvFSErD1Jqx4Svy4AkBVwsfKg04gAWvtKhpnTnNJ2VEPZNAhD8m3Kivim6krb3gh1w7VdniG2026buzwaSpGWB1eBmAxB4OgeFxSb2rWlHWpWfejZCJNtCuCN8X/pf80NpDmFMm1JuUyOcT0KUZM4UpEmlRFksqW6K2qmrqW0yOUYS1yeiXSMIWC2CuLqBqPJcmfSGChGQwjlJdUnNDB10iXh8c3y/DKoHM1Ur7LXYTivPgLvUH0/G3LHDSAykd35a7clbvya1VuCZm7clfuyl25K3flrtyVu3JX7spduSt35a7clbtyV+7KXfn/prjoO4r321IKWlInvlA6sqrHOXO8QSrxBmTNAUWmpGdLcSiUHcX6OYqjqRw0kMoTmANS1KWZjphqDhb1jMVQxRyVEN1iJRQCs/pynyhe+IHo44o+bZAAzGJtcgrW7i49UJqSRGqQRQl2knJOO/kn1Uml0Q9UDy57+9Kzt+OsqPJXUs59WT+R6//3ldSD+vKaHHZaYjlLj+qIKamP6CoNq+N1dbSTjqVk3dRPc9Dl3BNYDlEWN606QAkcpHLzST5hAYg1FBedhucmV6sIrMkXHpFQOrRIevRWCBqnVEVkIZ94TAsqZzokY6I7l1Ok6sxDXs9rdFAbQyHUkTDzWJnkFs43fcTQqRQDq8cXKSUhyEtoHiSEW+sgOVyRzwsi6L3Us0f1MEwkS4n5KsMgQbojmYAcv1DFhYlkd/+8c7kd0cUV49QkHZUlU3YpGWcSKVk3dTlglbuaQstSjTGEtcTx6nE4IqDyb9c7n0hzqNfBx1D2o+EKVUA4EQJ7Y2wKm5XUx7iGbArrzZyxREWVaL75dqzarYIiMrHo4BOqdsIbasPBxxWnYYaNNRlo7jOe7TiquxCj9+oLyOvllcJF5v1PPyUUJgwpzEIgRq+np7o2zd5ASImsUjB4iS01qJPs505SCr3M7acwEIkXRs1ZjMojsgYxVRvHSCUTb4RQaqVQfCmBhDm6eDYpyt3SpqGKe5n00WymKD2eKaZRqen1RIR55GgpaSUnVlbdXpWWmoAIRiNqCazyKzUZo1NT6DWMMVUoSF7o0N7ECFZCdfJQRJiZQM64+GfwnjmIcNs2AIwzx83KWtqO434Yl4vFfhyNNcYYa83J0SpWf7NZN82psSYMoxj36OHjfvHJdj06WsZtShZFZNQEe2OEoigBivypoYIIJLLo+Stp8ET11UjptzrVWp65+ZKd79X61jcCsvO2OrqRwpBVsGfRWdayrirOCzWQuDiU9HyO5mmahllYvOgUBw4A/DRttptxWpud2W+3bbtkjvfvwcDEC+K9Dzfr9X7YH5Nxzn703rurRXuz3Zz0dtn3lxcTgI8//fx7H7177/TYWnvv+OSoWdmrmy+vb1jYaE5SZmucdMbdWzbf/+j973z4LnX95Qb/2//+h5fnVwBOmkXwfO/swQ++8aE03daHF+fnrekYEoJ3JWoJIBLmrulhggB+kuPj5fOXL03jAPz2d7/lp3F7s26M3Y8DwPdOT59cXl9cXu7HXW9WAMZxbPt2GsdxHG3XLLqOWdqmv7m+uVytAYzj7sN33v7Bt39w7/SEgv+//uzPGuNu1jevLy7un92zdgRgnry0xsDGrRFFGyCy304SePIBwDROEe9M3htLgUfv/XKxANE4jX3fxglr2gaEeGWYyWdpJAWxCglSFCNEbxXL6yAGTlpjggkkBAtmGcfRWHIEYnAIIQQAgXmcJpqiMKWjkyO3aHm/d2g4hJj5YZym3X7vQ6AcDC/iGhN4Wh2tAEzbCQI/eWNpM2xW7erpV08AnJ2enS1O3n/3fQDjbjDmydHyyHvfNo0fB4Jh5s1645zbbTcA+r7jEJwjlsAixsG1Zhz9NE37cX9xdQ5AwLthD0HTdeO4H4ex79r9NFxeXbzzzjsAiGGttcZup4GFvZ9AMMa0bRuG0RoDYBgHMtT1CxEZx2EcxrbpoNtrsVgi3bMX5S8n3FKl3CgZEqHx/qQ8OO/Q8vQtnlOYQuIFqp3kMPm0quvQ7ipnb+EAVISBdq8KDPz6ko905V6Wr7SNLKNw+zGav1M9IPnBgj4k4b4KaApgc6oWKTQTgpGymiEApXvlxSj7qhuMjDhzz8R7Kc8IKIM3SjI1XmDJzPkqc1IYGZNzCDMzG2PK2TBVjjhEWSGZmebrT4XD7QMJUnB/rKZm1tVXtycsYaREO6peT0ChEjFZ6OZJovoKHUoZZb8m5fh/aMmvKlj5u1d1V+7KXbkr/38VFyPimT2IVVzzsgAAIABJREFUJKWdsiAImEWcpGtshTmmtolsPgs30sOIJmUxg7EIwnruOSqA+aggIXgogE4qvYlc2yUZwOAEI4RIYEBsovYBrQMo6c5E1cf4eTZJBS5H7YhgTFRmYZPyDgCWDKsy4awtCYlnJw2ShmPIQC2YpBYW4SCQeB2QJQeAhWO1RR7GkVbCL97PqLI5thIhrDAH0lGqop5kIkOBzlw/M5qQJIrqZJJgYWaYW5lq9LxhKCK/ZHMsFEuU1HPxESmIySgu/6LYwlYtpH5LnhZVkfO8xac4MBmyxiRwoEcXhYJklWYmYWkK3loLAYMNNLEgiKKBK975kMyJRvRwdzqAE3FWMsGIgMkCOeOYERZhBhFZSywMVstlZXpgEWNsOVajhg0fMjQpx3OKmZeTNSXruslEkbRktfUVtCwQGM10qFhJKmADZ53YvLSifYgknwnPxkMiUouDwlDKCy9ODodAJp5ji1+L0iyPOyVeEtIzaSV9vC5YET2cErcFCEZEPHsyBoBzVojTikKC9fEvk6E+E4gMTHqK4WzLHEI68AlDFLcbcyBDVU2UQ71FxJLVXxO0jpYnKeeE4w3CepqGYOF0CsRoW9H0MJvLetTFJBGt0ibNttQPUNx1IhK5oTCMsZSyKAIAGRIBc2CvViyCtTZyF01dJwm368RFUjpL+cCqIKRLFAyAvCeFRaKRrhh5rI0H4EXPrud9GndIsb+ndQRrbNZrQGRgM6SOwzTG5KNbzCFSOyqQBOIQAHAod1Alb5CJt9myWnEYYgSs2ZesAIxAIIIV3b2ce6vdIQERGWtFr9gWSKisxgYAmai3swhEbLwFOOlFIim3XOwVRZaqp+gFAIsXEbDaIfRnXGQcirJqjBI3yg5jIjWUnlmeUVzY0Z9BZNK2JRhLyn6E4kHqyEcE8eIZESJG5Cmi6wzxotqKOMrAAYoHgZO11jUtgCkMwzT2rodICGytJUOOmhCEwWG3B2CscZ1rmiZsdrvt1hC6tgkheGHb92cP7sdnFmT22+1itWwXjnkcA7//3ju//PkXzpHaUi1BGMEYyyzCbBOZVKrmo+tpD0Uq2TjwJDIkROZlrWqA6iXI0jZekRA4ul4IJp6Bjfs/mxKLewX5kCaXw3QQIKhMTEyAWe2apP4yyacNKbKXeHeJGHJqPRARiRecgISMsAQWY8gYY2NrIXBkw5CJhawz3ocwhcbafnkEAKOfBJ0zzrTvfvCQTX9yfNGaFrwn48I0ADDBUEDbddaPCPTk6Yt/8Fu/9Y3337s4fxYCn57dB/Bv/s2/avujdx5/c7vbiKVxzwMtLp4+HzZb058SBQCOQrA8+Ol3vv+D3//B+6+fXzy5CC8vnn7685+NtgVgr9dH90958NvF6kg8vDNi4z3JYI7nl6c9LY579jx47pzwPpgGtOI1c4vmxVdfAXjw9iPjGvI0kYVHYNmM+83Fa++57drJTwCEicbgrAsi64sbOl21vfU8hOCfPP0SwFuPT8/2y6+unj159tVJv3rxxZOr7rlrln/5wz/6ze9+++zRewCEedjthEwQYgazbG+2beOMswyOYMwYCzHG2kXfARIm1y+C96O1bYNmChOAo8XSNQ0JBSZhGBgiI0EkhMhJWMTEhEbMKOBEQgiNdZF3eQQPMU3TGuvDyMEYsiI87EcRJE+5kJMmyBim0B71ez8OL27un50O7MN2ojMDQNgY605O75GxgWXRNtMw7PfDyb1TZ3sAWxoXR41FExgOjtlb03iW1jrbuZPjJYDzq/PAfLW9evbi5X7vWQQm7PdbErD3XbsEMAyTcxZk/TTYKCy9I+Dq8nK/3csUABi4p0++tOSIp76/t1gsfQhd169Wx3HrW9c45qv15dHqmIhu/JaFFq5pXfPi/HrcM4AH9+9fra+apvXLhfdT3y2m0Zt4xVPc7ZGFGAIZz6ExjYgYskgqhjJYgh7DjXtPkoCIfBnZJ6KQI4r5wFFQm8wBEqKLV90QAemYv3JXAlIsgmaj0NgLyWKOjE1OkyR2og8wR25AjclR6Sn3/tV+k/xnBBukqkMFrhNGKX7pKDlVCGQIGcljKOoaihLVwGo0k0caY+Z/EQhxnabawJCNh+59mHluoKbT2MEqaiGpAMm5FzMWCURCvkMA5TLuKA4o34XIYIg1lggiQWoBB5IAY21Ec0XiJX0m66xUNLuiyUSxEmuhiGOgaDOPFyroNZmvgu6oBXLIlQiztTYijagnGqukhxqr1VMRPKseNW8svwBEtajycNW3NeQIh+jn5YNX78pduSt35delONFrOZLTSHVLwa1It6zXRyaPFCGXv01PZGFaFIQs4bIfqYioZGvLAiDHohk1aCXbT9UVlXAHPaz5MFV6RQQD2dv5NW/Eh+VNldaGiBzfVlmGRIHC3DP2dYUMzcI+onoaQyeSBM19L//+Db5VFeDRmizpWTPrffVwSpZfdfUWGQ5tUYePUP1sSdB8q0t/Y3+hmh50yd2ufua3VNOHRhUmFJUMkWpNTsbZmUEt2QC0BqlJm2NqUL2vZj6qs6qKhp9pJfojhTe9YYLiqjM0X6zJRE+gGanTC3n880mQN5AnzW+GZoS0O9RWmKqhitBxCZTuaMyrVHFryZpsiiWvRAvkHVVRO34a96l1JscpGyJRrRvQREwCYSTjA+VKtGeitrGE7jjHHECnK/GYwn8EoGrCK4d0ZV44AH41AqzGdrhlJKbQnfOreaHqgqb4f+EYkqyV8V313c9VjmQ8y+bcssbSHoHqKtkcKSlJaMmxmVssxGZOugRSFmAUkP6m8vXf6BLIT9TLp/qt9rtIXFLl4hTVWHQ3oehCKeyRABjV6/QVwOSx5+qlCIBMjyp98XyrUDWnSuDDmZT8Zf7rTRSChngA0GT0tVaYNZ4clUhARRSli/5OZJSFAkhJ6FT5IF2N8+6kRKZ1oGgmaYrZuKXdwFo3jgPA1liytNuuAbz1ztun905/8fHPicjZFtE6I4ghKtM0AXDOLbo+huk1TROrvnd6CoIxpus6JJsyQgiNsSwcOJCj1XLZtM57b20DwDOnYaYpEo53jHCxp6Mwd5FqSkhTClc7hioepGPXqWTlRjP6RXLOPHQioCoL3WxJFMVVWV79R+6OiMRLz+OkkDEuWgzmWS1zFyjeJ0uxm5RHRVH5Zooe3LZtnTXDjgFcrzf73fF2vCBrL24uHr393vnFefvoCKbv2snbHkAQ2a/30+DZun61uLp48ec/+dF/85v/5YXADGNMStg39rgzF+dPF/1Hw82O+vYXP/vJ06dfte6Y/a5b9ABCMCRimgbDfiWyW622L1//5M/+QmDu3X8PAI2+sS0R9d4/u3rl0O7GYXV67H0IuyGaI8lykMDgxdECzM41bWOOT85gjeMQ9tcAds9fGibXNVf7rWldZ9vdfnh9/pK9n8LoTAcgBJmCd40jFjIyTcFP3hjpFwvrACB4evbiYtE+2Qzb5em9P/jnf7Ad9puB6Ysv0S+oaQC8fPFsuTgxbbPZ7Pvlksi4xvSrbpymfKE9hzi5wSLa4jyJhMCuhSHTuCYucg7cuEZEAsIw7hvbGOOMsxLyDJKBejgBQJhh4m2HkRNaRO8LE8jAT36/2zeds9Z4DiF4AESOWYy1Tdc2bds0jbONs651PRkboyMJ2O73y6V1rnHOMXPjmnHvZQgXry4BLE/63XZPMrZdQ8SWKDtrd7vterMBYI1dLZeT92RM27U+BBH03bLru2E3pRHElKagEFiT/EFAnoPoJh3GPTmw8Og9i5ysVp9/8dnjR4+Pj4+89wCOjo6vLi+WJ6tTe+pDiBZGZp6maXOzoSMBcHV9aRtrnR39SISmbRrXRIgwTdPNsAYQfDDGsnDCAmkL665MuCw5GfU2kQqtlQ1eOHC5I0bmsjE50hSlUY6Jq96mLHJnLFryOSF1sksSFFl7kDmPpnmlpaaqyizH6IBTzZ9P+ekLebLM1n9Fe03KetTeqQSgilCqVhQRk524Ffh7AwpJISOJjjrk9EYM5C9dE5Dei0MVSRTnACLxXiMWjpABaj9lFjK1F5gUBqQxpf4VkPn1hYAS+TKfCV1HWayo7E4lXbTD1U19ZSUo9aSqqp6SGrod9E/P9UFxwZuV1Lq6u3JX7spd+XUrLimpqgcDRYObyd2kQyTHlwg0Ci37oKA/qUoojKSbFy02hQFmE0cstT0rfRY9nfGYpoigxnj5ySxdDs9Fa7bhrBsrPHmzyp31OGTFsvoq/5u6Xfv7dAzp1SoUH7MXM46Lw0pX7cysOiLxsgI1Y2hf0n2QCmYOZJYU80ciRFFSv0b0CsTMMykXqV/VT3+jgeJvVYqshZotMI/1muEfijbT/Gp+QO0AishUyAszrEngUofDavNNQY+pAvX3Qq9jr1Zhaj9p/JSnlatezPdEeZMUK5TvdQske9Vsmc9KHtyM3oqkZtMoeZ4izss3ZxTwNTMRpcsdVV9G6keBgiJGbzMEkaVsvOF5Z9VeJAAkBp8W0wdpvRBDhiERfdfLLO5jEWioU7mZMl8PnYZChRaczo+bAgf1BqKUXCJWLvO5qYBthvAiyPiRElBXA1aZsNkUyeyfN5dEccX0UgV5AdECK9WUJdtm7FoOoIvkKubKpL9Uy5+oXg+SR1umWrUAnRIWjo71mv3GFvSS2RnRDhSK6i8RjWLXJ5WPCgrhRQWEfi5zfpsCOAq158yVkksiEik3JEXJmDkC8i91Hcry063tlU8qaTdQW6QSq1Jmc0eVw1NmKDMCVVuHoBXqBizpGARIdwypDMgxFypaCYZsjoiUeWf0whNUFMVsb+Og5J0xe0AFDLH6RVikXyxil3a7zWKxHMeBNL6ZiJjFhxACA1j2y5Pjo91+t1qGvuu7tu1aN3lvrTXGxOOlPvjFog8cJj+xoLE23p/e9d14NRBCpLixljSAUySkiaAc3xk7rUSVHPhxoICR+icpuzFQ9jIym4FQMnYmygrdSjBS+EYRQHlVJYJHbpaNL0ibqtBYBBAmIiEymdHnVV44FBBvpcjxWbrMiACY0U9EjkMAxAjGaYprZrnsO9n+6c9+fPrgbHO9u3//6PnzL3/jm99e7c6effkF9R2AgTFA3HIh1uy2Oz9M09I+O784Pn74l5/9+VvvPQCw3kzW2J/+8le/9Tv/8O17+B//1//lLz79rHX37HLJRH7yAExHC2Bg5s5Ja9qAZy8//fjTj9uTk7h8Vw9Op3HYrTcf//KT3frq5Ozhs5fnwuCJSfTaIiEEIUHbmXHwEniasN9trXPGkPgAoDs6+eCjb//0pz/hYdcvF/t9uL7ZoXH9sl9v1o2zABpnREhYQpico2G3nybfNObRW/difB8RC/sXry/+i3/8exuYR6cfPX/9cj3Ihx989OrV+dMvvwLgyLH3xpq2sSLBB2+sudlsWteokTDOKLMQSxBh1xlAQDbuhtXREsllTgIxllhABB+8gwFgrI0rN6EPFogQ0vHwuFCzwdqQEeEYOiUBw240ltq2FxCzB0AGMYzZBxbC4mhlrIXg+Oyk6Rfb/Q6ANO09QASBw+Q9OTtO44kzLObk7ATA6PfLxXIa/PHxycXrixDG1XIpwsvFcvLeBwbQtu1iuTTDsFou+64fx3G9WffdAiARxHh2gSAE40zwAUasMcaYy6urxcmRcda6mCjA98t+t95z4Jvr6zD5tmkIWC1Xl5cXALbbbQRIgRkEZx0ZEzgsmv7+o/ub9RrAMO1PV/eIMPlxnKau6VJwO2Q/DBeXFwB8CM60PnhDVqOndctm0cgi8WQPoZyYV36Rd3b22WrWHJA5FHuVl2fOEhSrRCxZjI+pGf06razqPr/qgG/tS1EIUbVdNVY4hXYhu2Jrz5Y2nnpmjROkkMx0nCDF28PEgL2CvilLWLVLVjipBma5MVIppUu8Jljpe3b25zcL6uTC/VIHDBGYfUbdin/imERleomkiNNrrBEKkQmLaKIhJo1krOXknGAziFD1UT+sImoyxsmjyJTSh4wQIKbcx6AgV4kIVfOUMpUGXFGO6rfrulC8cllglaWV7vL7G7HqXbkrd+Wu/D0tLrOzLB2zzYG+jrWJComiICLbMKJ4inIqs9pshjHxfKWeFI4/o62x6BWigk3PHVesX/+h0sX6q/JAblr7pFpfqUSqhzMJbtVG2sdK7S1SYoZX9IHbZEuYJJ4Vo+IvU8mTwqOE0m2qyVSJItCT9NLuJksUZG5ZSHqQdoAqL+FBb4o9Y2aLrElzKHnfVJS0ucX8r2I9qeataNIJcCQwMtcHUdnCJS+qtJ6Mfl+FagkkwZWE/MsrmQzZlKOBNbfGUP2dVoqucMkDLIdSshU697xeYNniWu2Wg2VS0+WQxGVZVHYfmoGp/GC1LBHDHxSxanejpcsayga9VLli67jqBExCbIwBibCwkJ3vuEJq9ZBrh2LyWUE64k71FJaxUdLoVA2PtneKCTprGCwpzaVU+153jRRomL+Mth+piJEoN0P4EBGNc8yWhbITZoaz/Mp8Zc6mpzxa2cvy1JfBx+y0JNpjg3hQymj6sXSMS/OOzWrPy5hy/gPtnraUQXNh3OlPooOdK8myWF5I2YuyobPm26UL1VKM/xfwDHUBZFWoIuAhsbIGkh+aGWRzfVkq6dkxqm3Ph8PUkRHRPHVqFkllH+ZuHRBGH4lrSmaKQdWQLh9dL5lPZ/FVNN1oICNKwXo6WWXpFasukqWYWYVgbjqzlWIizSKLSI84x/4UWVk4bfrBItYaYZmm0Tp3//5bAN5///2mbzZXf8XBsx4bJ0S7odkPIwAyhgXDOE2TD8yNYxHs9ntmMZYiw2EO8aZjInKGALHGuMY554wdG9MCGMYxBpdbYyHCxDGG2pgqzLNIOEmHCMpk5dWVddx6m6FeVpIsR3k+ROVCvTCj1ZnSue25JK95CdKLkte9qEk756+MTxBLSEapMn9x/VGaGVIHwQxQKP8i17ggMYOjZRbXWACg0J+9+/u/909paU8F3epea/pp4G9/4xvPn7+gAACvnj3bT7tgzPbqCp6FOJA8/fLZf/oP/9HPGtpengMIgg/f+/BsdfKLX/3q5YtnP/yzv3xw/11qjnwIPuwX5ADs9ntgHEMgu+je/Y4/fykBtu1g9sYEAMYS78Zhs342ja9fPF+dradA7CcgLLpuv9sD8MFzsBBs9yN7lilYouHFVbfsjvrWCQBc79b/5Nv/2cNHp3/1kz9trEXTe+OWx2e7nV92S+8FgLHWkGGWpnXjMDatE1g/jaMfdnsCYAz1fXN5dfX8yVePf+M3xt2wX+9+9/u/e/Hq4kd//Cd/+id/DGDVv2OdncK0WPQTC8gwE2BDiI4uADA2mjJE2DDL8qgP3lvnmINr6Pj4CMDFxWXb9jGZuXOWAxuywTMJRe83kSOwCEdTiwGILCR6Q9NcM3PNlw0ZCNgLhJqmTSklyIDMsJ/GcWKRcRp9CCQYxolDiqidpgmCafLWWhZu265t2sbhZjPs1jcA+uMWILI0TkPc1qvligjW2curq+A9AOecMO93+/X6epoGAp4/e/bNb/5GjN9Ma5iZhU2wIQRYEAiGpimEELquf/+dtwE8+fLJ8dHRfjsKUarZ2v1+f3p8smk2kT+4xsXKlovlrhl88BYmcOAQjCUADx48dE2z2+/7vo8BxNYaMcQik598CIjRcJriI6cfURRTb24pjC/Lo0PBVn2V2ck8rpwOqrwFwEhVmZm/RCo2Fr8UzShxK65NTwXdDpOY44CE9lG6SDCgkAc1k20RNRSEl94xUJd5drcnmCvVu/NmS28kWx6JFD2KdiQj1vkYInEIiEdM9PAdlJKVHyrj1QrwJuSjQIMZJiU8SsfpJCZrpeRnztbp2TyRMvAy16QBr9Wg6XB5VFRV23cNK8r5wVqG1ylZIuTM+lmuKsuvrAnWiGjWC8oyr9Az+63rhwmI4Pb2IO7KXbkrd+Xvf3EAEDnzzP0V83bUUXdAZNh1sIiieUCzacTCFdumWS1FJpAaE9LrGqgvGTYka0LW3aiySmRWfiiMVeDUwZkyC6O4dXtG9Wo2KFQDji+hNi9lsUspGeIsYoW01boVomKTLdIvu8pU7oJUAFX6/m0Jo1FCWaFPIIGSAl8ZDAplcuuaFi1XVxTYggQOCr2pIwXO1R9pw4fWx9t1GtR0k7qCHMdSvIkRfIDKeb6Keknfz4qe6In42FDKJEjppRQdaIptoIwu/sIciKgsFaL630KNPF+3CVNGlntffzXLB6OI7PYxS8l7BAW8HjSVP05tZZNVCstUSlJGgVS79BWaKuRRKFWh1Gg6AViTKcR8hPVCjV03IGGheGoN0PiEgn7zTqQ0I0QpG/psFjJtK6yXOyJS1rnag6Pv/E2wWk16ebPMW7pV5OD3an/lqqsdIjncNm/KGJCVQp6gRtFsMp8rGjo6MvNMr+lJHVNgPYmf29fFXqNZyo8gXUkkVaspxECSFamKECk/b5m8dY1VpCm7RmTel0N9S3I+o0oB0u9ygo6Ka8ekS8pHdVBU74LCKYo8qf+UW0PRTIF5mLe0Pz3hRmUg2tXZixUVDqxhs6WXVFvSKRSkulO3SA+npzzCmTpxzaR2y+hyAoZ8pl1FTeZMpRuV0NTuACAYa62zgcM0TZ89+QzA6nh5Zs52+40ALGySGQKNayCIt3xYZ/b7YRjG/TDshyFK5WEaBeKcXa2WAHwI1rrOmMa5cZomP1FDIOkX/fXVzoeYB5CLvkUQ4Sq5QpmI2mCtCVYqyJHoWMBK3pSiU5JNf4mIEiMnIcgnLpFcfvlY3iHSQbV4UjPqYtFNDQglSxPl1qKzNYVkpprzUGJfOae51E9I8sY0gYOxASRCtmu7ELYAPn3yq29848Hv/OZ3uDcnPO6Nef/R2wtnH58uPnzvvT/7yScArs+vrBAmdq3b7UfqeGHNn/7RDy/Pnz585+RXP/trAN//3jvf/PZ3zvrVf//f/be87Bb3Tq8Y7TCKswvTTs4COF4sHj5+ePNs8cN/92/vNfz85euPf/nU2lWDm8fvvgPgi08+M9bsmB/cf9g/ficAdsDpvZPA3lLrlhbAsJuIwAznDFvihg2HcRyn3bj1o0x7AD/+i58eLU6/+93f2g77h289FLGfPT9/9OFHw/rjT64+OV48AMAchnG36JfiQ9v3fhpZQIZ2m93m5gbAYtEvF29Z2/zwL3/6j87OXjz98q37Dxpr/+qXP99P/gff/10An37+jCGLZW+sG7dD0/ZCZpoChQCNEEsWjbRMMI3DNAZjzTDsz5Yn3qcbZkKYrGuF2bgWGLuu3csISYGlNru/CIARUMwnxyFQY2PynCr5OOAZIDI0jX4Yxm7R23gEnWJ4+Oga65wbhgGC1rUCGiZPPnV0mMZusTo9OVsulsaYtuv6fhG400zZBIJzWHTN+auXFmSt7bsuhLAfh81mA8DZxlrLgcdxaly7lT0ATulRUiJDBjiwEAXPEoRNALBZbybvQdK1HQAi0/f9fr9zzrZNa4kmZu+n/X4fCRLviHOuEWDyU4T26Z5Ma/plD+DRo7c22/XoPZFZrVb77R4gZ7sgo4gs+gWArR+HYbS2NUQhRGkTXdoEPXtiKOdeiJ9nBkBVGEQND+pNXwf4aTKFr8EJKGK4figycE0cVCCqHMRDJDwys1zeYkJ1z6rzLbnk5tVfl4ZAEeCrNqVjUuUpAzeikomwEp8FC6o0o/RXhIVSjy2a3CtwVPocjzOLgIw638o0mQRHk82ORUASkz3EmIMEgJS1UvS3MZiIIoXJQkQ4BONI9SmUtMMJtFQIWOou1kgzBchmWh/OR+7rAXIQgR4jogrF1gApyp2qSZ2023qowpbZ1M7hZl1Ij0cV0Pf1a/Wu3JW7clf+PheX9XIVNUAC64dqTZEnUT4JpSsWzJwFksyDGpDRfjbqaSCTBqHEOipZLDG4IfPYpI9mdp80lnlQ36HimIXPLLHb3HREGh1V1I8qgLEyWmQrKVWvqIWl9rPFHD2V2Ui0ldLNaBJNZpJK51RBn/V7SuaF2ctKVlWHNdxJ9S9GjniaKVS3xKjKRpQBZOUXqsNWBoSvA0sy/63IXilC/mCNZMxXuySjxTauBtVJ5+BtpiSy5APdKdwgBwohEzbBLw+NkCQg3bqRMkYV93q+xVyy31Ly2s0jSsaCMqjinz5cWoUuGiAzQyBpBVfwaO6nTtGeM8Nu/ryQBAQSTS0o6WRigmvlJVRRtGo/y11MSnV1zIQyrE9vq3Etmn1nFcUuENKRas3hI5hdfR0HqyZeUhLVPo16yoseD1U79AldmAKhPEOiPv95xyKdJNt3RGarldLgqs9mkFCzjus60ZjQGa+JuzneL6LvRQ6JBFK19QKBkwmiVKIRDUh0k3iBl0JNXUORJHFVa6xBsbrpFqh3ReS66sSRpNYccM7MbKKyoTNwwCxnlKtsQqwmnop0hYRVDVLvt+IwUCN/Dl3VyGQwSYpBg9LgTSXpRWmiKcdxpH0ys18V5l73UDluzY4rcZO3bTaH315JNTegLNqKyKmmj6A8W5Mmx4svcuguNFy1FiO5kD4zo0GhhXJ+Peyc+hZCYJLG2cA2jHsAVzfXxpnddtd2rYgEYYCtcSK02W5tcwzAmJgOy6RVTYbIBOa2bVfLZdt0AMiMRNRYyxxIwCFMNDZN9+DBg1evrobtBKCxDUTiQVSmEFk1EQh6101FeZVypEcS84yJVHpmpRvWRvm8HSgFU+dNzEXApikr2+pWIRWTpFXNDQIEChwIpNd7xV4IEUp2j1gBVa4ngUBS6pW4fSBxkVprgnhDQsA0bE3TNA4A1jc3HK5OVn1/uux79/Ji7Y7Ogm1H0/zx//PHv3xyAcC2x13bTuO0Dd44hN3V6aNvffu9d15dvjzfbKIx7N6je9/78IMvfvH5SO4b3/ytmx3QHw37zcXz173rPvrGhwB26xugDIsoAAAgAElEQVQZ/Vnf/vDf/h//7g//9cuL7btvPfzN3/3911fX8czyt3/7e5//8hMRunh1wca2fec9ucYECHNoOwfAGBNYhOEsj6NYWBIcn903jhYLJ+MOQOPk5z//+PHD1W9/77tvPXrQGHez3d1fLCa/v3d0KilrDXV2EW/iGocx+Gkcp77rxv20n3YAxmGyzgp7gn/+4uWXXz3f74b79+9fXl7+9ne+9+mTzwD8xnc/5ID19e6rL1+5ZsHjtJvGpu/IJgNDXEoxAx0ZMgYhxPx01LTNarWKSVTbtg1hCmzargOLcWa33zrbeM8xx04I3hhjyMSgSAIEwmCQGJMufQweAIykywYFBDLT6GNiDePUHInAPrRNa4iMoWnywfvV0fL8/Pzt5VsAmqbhwMxhu9+dnp2QMQKIkaOTo37VAbi6uF6dHHk/nb+6ePblUx/C24/f2uy2xpAhe/76HMBXz58Bst7svnrxfAq83+3bthuGwRoigrXJgKqqAPkQQoAhsHc3V1dkzdXlJYCbq+vwtmf2fhy9byELZ912s/U+xVwfrzqBBA5EGMbBWdc2LnBg4bZrh2kHILAfpsn7ME5T3/XXl9eda401BBjr+s4B2G0mYZk5lyu8oPca1qa7jJGhsnHOLuTg8yzdlZmIJOlTc46aRWQ+UbB5sVKSZoq4Lbhmx15ulUoB0YcOalBGmMF6yRySeQ40e4AyzNRpmrWBSpglTKQMMz9BpSf5KdHBA6g4dHz+UF5pq9oQh+qycXW9RqyrQ8+QL0nTpDvCVMijQM/kFWIRMNQwnYdWkFYW5crbE33zwZTbrq7Sa11sOvJ8UAdl9mtcnuIBNDS+dKKmlswxaEz4paBenbN1mWOq+L+BvGGR3JW7clfuyq9JidGRUfNULV9LdYAPqrjHYCaF5bc0H/2+guyEYk+jmMRvLpoEAIUQ1DRDhJhaERpAOWexRe2aaQbzQlWiLhV6M2mYR11eVV9cETB1tVFIpfumM4aIVSRziWg72rOq6vkQVBIWwQWVUVJ/Airdl2LNnZEBalZKAZiVZHyjbFJZr7/XMYDzzlIZBRXMNatKwzpVPXxTmwXU1GPLbyHrZVk2a44blDaTTs2Vh7kONKMcyhiVwpx3MoIWTom9CKQXXiZzxwyr6DwWmpdOxe+FJSWKSmSuIM0hbW7/TSi3t0NhiCq8lOmcSZGXVU20200p8AVRTMtYG8c1CSv0ikmiEtCXHgsp/XYO00EdrJfJSnlTsuSDPzr3qSpDRkjykXm1iab5MtXwNWIpqoSSF2OpEwDAotsgkZwTRUw6a1n2Ytl3kk2ryngSqJtNN71p3moqp6UwT4dAs7mIBzMlES7DeYLeus6sJuccXBCXgUTcnKdJkvU5azbq4kfZY9WmqKyQOkao+TT1jzkQWaVYvFnaELMhg6jHHOoMFdbOLCW2J2IMVRsOOa6BAElmmATr4/fRIk3G6E6V8g4AwFakFJF4m03a45m3x9UoRa+YsRg1b2aqZ8Uz/ZZZjO7jyEZIQy3KVBfrO8/1lMIApFrxUgnNGePMtt6s1ZRzdqkLsQdMOgfJov71qzFrOlRmXTdFZs0EjUbJqzBPB4GMNUFoGkdhNsYuFisAgXmz3ZAx8ZL3fFbU+2ma/DiNAESMdW2OMAyBR++JqGnc8dFRCgkJoHSbjbeG/DROwa+W/XKxAGDTgW4ma/XmPKKkSJMhSkfUD0R7yqyVA4wy+ZQClWo4C26JkxMPB4q+g8Rss5TN5MsK6KHj5xbAyD/yxst2cw36VjuIKcp04iIpHJlK6OvcqwMiYwJA4tkQrGWisW8tgAdnZ9sdX1wNP3jn8fOry6Wgs3j9xa/+h//pf97u/NHJ2wB2wXlnpHcT+Ki1FxdXj5ZHyxYfrN7rl4snbAHc7K7X6/P/8yc/uvfOw6N7j6fty/VwPU7T6fGKjXn65AsALH7cbk+WZJwNHLq+2/F0Pu3D61c/+vxTAP/0D/4Zdus988hi2e66vmsXfgwShKhpOgNAAiMIBGMYvbfMgATbWCLjOO1KptDwtN9tfvCd75nGXb568fajex998P6zb300TINpegBk2surGyOmtSvvvYhM02jIcAgxa2Hg8erycpj2D+6dPTx7ePXi/NXl9Y/+8se//Nkvv/XRb/7+D34A4Ga/ubpa//JXn997dORsd3G57q1tOjPsRsCqjUOUA4NAzBCRaZqOThYhhCkGPzo7jpP3ZtEvAntrzHq7Xi6PQuBolPcyglSAxn1p0nleAZM6jS1ZjmlcIx2EmdmP2G32rmsAGGsDs4w+TF6YXdtFu71rut32epwmACfLVdctiMwwDPFe7812/dA+YIQYqslC6+t1GMc/+b//3Bhuj9vNdmOIPvn0k8Ytnn71FMDry8t79842V5vPp8/adjEMExkzToPrWtlsOTDSPYEgkLU2iEA4Xj18fXVFxjx9+lXcfWHy4r1zxhknwpaMMeb05HS73QKY/GSMWS5Xq+XRZrsJwe/3e9c6Ihhjjo9PAACm7xfjGLq23wcOgW1nyRiy1lobJaUPbK0VEuGQbm3UTUcEiQQmcBCpcwZmliEzzlGg8Jx3AJUr7hZIqD6YfyUqgGpvRJYKyp9yjGQFutUOVoxet6F4jW8ksyqFZpCZ9Q5xXVUoUMV5bLgKyT9oMAs0QTq9pEpRImIhahlCebXyCQERo9UOPqGswzAzGTJUQffbSDrXSIhpqQwl3ZDVJUgG1hCLAhVWAVMGyPm3Sl2YazPqG3tDqYUMKks3ZTigMI1lTp446ykIuHxTgMFMwtxutwYb9W9SZqsMSqCp8O/KXbkrd+XXsLjZX1lXO4iKmemtiSumQyc5lAY5g1iKqxJFCRUfTUp6lI/qNUz3DkYOa4jIEokBZ/ubCqWZ9l3rgdUHOPw4aWozi1sCBG8iyNcLiFvKCQ5Qi0iUFDyLUMqvFn30trsr1lmHM2aaFUea6NUs8z7ODHEgokzkLHELrfT/8uPWmCr9u/TsDb2trQKY3RHyhlIZP2rQchh/FQvPIs1SqF32ZlMyWIiAqJwEjTiFiai2isY6rLV5xMLpLIfkM3eFOgo0UA7XgGD0VPnMhJPxUAEmt0Y9p+kcwFGuo7KmHMLQyi5TDP16yDEZsVLVirGiN37uXRfMpj6r4WpAoVQBkoFJiCgem839UHgVF3nMgVU6IDoHROmAsFKA8toXFrJW6agdK0b26hVChmwxX1BeXswCMAwZomjoM3kqtKXYkfRZjpnU+fp/2XuzZsuS6zzsWytz732Ge+5QU3d1VVdPaHQDxAwCnEmBpMEIiJRMSrIVDEu2Xqxw+E2/wmHLT35wOBzWgywyLNFkiLZMDRbJoEgRxECgKXajp+qpuuZbdzzTHjJz+SGHnfvcatCO8AMRcbOBuvees3cOKzPX8K2VK9Pao5y8j935AenJvRZnn6L0jF+2KfYgMzIcwWfCjPME8gc9XewwMiMmLD8KQEya7GiTpEXjR5UD3MEk8lBOQOElAF6RuuRDac4MdHNcm79HJ0CkazKHwhn+DakRe+RSFd7tn+xECdp9VPOjoJGAXgoiNClx7jjvVc7dBoFy6DdQtP2yv9MmjGZE2PWbYKcn5ZDPiSegRBwxY8dxYQwdN72TjwbxJQKECGTiQcYsnKWhSH9aIXsk28tpMyeen0tsX3lnOq0UlwURKaWX81MAihSzVqycs0VRasXGGW/qjceTyBR5VI6cc03TAFRorZjBbKzlyCLEOdN1rFgpdsaUZbmqa2ttWU7iVcAgIeMsiMSxDw0DHINdivbNhCTCNKVT9lnocxzV8Ofwy+DVJHI9ih0YSmDq/cb76Fp6bpy9lHMA0Ur7KxlcjDsWr8OkRL0SOpPmPVw5IHHR+2VGTMTGrIpq0rWmKNXlS5fGldK2A9Ct7I0nr2ulD+49ULs7Y1V0tv2t3/k3RTGeXXrSdhrAbDxarFfOmCtPXBRxX/nMV//Lv/ef/ekrX3dOPv3si995/10AH7z+rf/mH/53R8sT50ZvvPYql7oxTSVl29XL00e6mgIolSMIlbvjYmJZX39id7w3u3J576Wf/anbp3MA89Xy6tOXuapkbQyx7Zqmk+P5KYQYI+MMgOVqJYSuaU2zGo93STEBy9XCsVuwUyAAW9s8Ne7O4eI7f/btH//SF7ZG9vqlyUjWt+7eevDw7ni6A0AVY+uEuFg1KzgBSTHSYi0rtbW1DUBpqcrSWbdart699cGirhtxtbU7O9v3Hj54cHwA4NLlK4VSpNwXP//Johi98uobq3UHUutlQ3AxeaQHMgCBcxAnprNtV1+4tLNYLMtyBECcBQsxdaZjos50Sqm2aZiVR9ZVocTBGXHWEYiZWREr7WznxLEDAB88KRCxopjBcNb5JMJt23kAqSipa7uSuW1bEjFdp1iLk6P9g3IcAwiYnbjVek1MIHbOGWPmi0akIy4AdM1qMi2IwKx29yZrqZ3Ik08+uV4vWamLFy4BOD45dc5dv3Gj0FgsV1oVDiiKsl4/YibTeFgT4qOjmQAFYa1ZrFvOF6PR+NGjRwDG1bhrO2ft6fHxpSuX6rpeLBZ/5ae+UhbV7/67fwtgazqt67W19uT0pDWt5sI6VxI1bWeNjCY7AO49vLe3e1EpbZ2rm3praybOEbFiVsxt0wIwxhS6dM6II5J4T5jfqr1Xvt+rg21NZ5jrkBXk/CDwUgSFU7I60s8hRx804SRnvwhWEIF8OPwADc2riF2OqsxAW+y9KuEZyfoQvu+PC4EVJY6XxJAXp6w4KkMbars/5UFIJxVihZR0dYr6S9Tkg7Y+GEgiaOpBPMySvDAS9LSwHSJIlwcSCpJ7liBQmsV5/27INuMzbjgvIJ2IOO/vIULi/LI5eR9h+cXJk0jZqG6F4Q/mJijMwevkn7Vi0fcd8OGZPERtBSErerZ2KNMkzvanX5SJqI/pukh2KPGxwzsv5+W8nJe/zEV75upgQURO4DKb1h87jilU/DWDcCJEithaG4zocGsbkjTxFmUwAhmg7EoK7hJIGc1sAjFHxm+dI4hSxOEoX4ScQsKTIAeD4pHBGJSiNpGsZHhzIP0H359YqA9uAAAHm6JLYgyF+NTjILBKIUUuaQbOOvL1uP4WESIScQOEytviqVXkQs93KKgHg74GIzNJrgCyBIMnhrFFHCSBBsxKnIhxrmCVnNuJNP4aDaigIiWQQbLogEzd8CQI73L0OMcAgBD65GJIAEetMBiyRM44Jg7XdwRHZQSXhMMCSnqRRxxYJRwm6C8B3yWl/GiDRpWULwGc7aincxwLASlhk++cD7wlhsSLtdM09Sga4iHBUHmcNAjQGcPMfro5UEScE5Xdih5AC+mVtrilstYAEmhmMIl1zjpOaIITZvawfjTL45AgrEO0kXPOrzev1Xnr28P5/Qpi8sdboCVOtA8ezsNzlECYWOCUUqzYOQtkAJOQc9Lr7Yo4thtInAxrOCcOMWxU0mIHHByLCVAbkVKKiUU8apZmIOKTseWY18EHJoNZ+3ASayyzgvdwEIJ6mrCYPkEbBZ9Jf6suISrVEb/xmTTD6klKYaYJ+xUTAGxJ8adCgS+JAOGwGJEwOUd5Nb5mEuctBkHCyRJ9KLIw/5vywYwQSIyqiQQJ3K1nddGsiLCJJ7sQUwEHxUwcIjxFhEmRsOeuDlbECkTAHFBfQDGyYE8SYhDYB/r0Z7I8+B0PHof/IHBigYiJMwkcyAsAjtp82tqEeJYtkMGBCIq0M9E75SWDFc/0YhCcIGOoHCe7lwZM0V4gv/DiGgREnLMhEpOIYkrfwMOcgISJvUsnLsawHP0KcTbGd/WLNtE/MyoyEWUhyDY+BeAtbRQAIT9JZrpk9VGUVUQgEptWdi/k4ol0t2HWSOSiSliCyJbOtOPxCEBdr9puTYUUqnLOCROBW9vqSgkp17YAqq2tzsiq7rrWai6mk615vSi0NsYenRxbF26umIyrrmtJq6IoCqWdodP5YlQKWQMLAKS1ac2kKi9e2D08OK6N1YWCOApXVyeikTfafCoYJu3Eigsn76I/iJWiujMhOJk8N2bP550YDRYRf0uyI/Rp1VzcPsFl4oQAByIFZ+CtS/JeBRHH4hclExN7yenpztH1Z60lv7QF3mEIISiEXZU4ZhCO4R4256wP1vcCkZkVkzFmVo2Om7kueFJB2dWTV64/efkCgLu3R21rGPOlGdmjbq2n+8dtdfnChQvPHDZkbQ2ArGilx7uzZrFann7wztGrv//HL3/1y587vPvO4uT2ow9eB/AvfvcPm7VMKzqar7wYVqPywmX1sU998cqFL5eTKYAvf/z5m7cffPv17/745z/3pR/9yYnrpH1EtPONV165sJoBUEHYqM5K3RgiVqPKNG42nW7PtqgoAXSuGRWqXmlbSn1y8m//799bmObTP/RDV65cebR/Mq9XAA7v3zk+vlMs5Dd/8zdPHtz87IvXjpar11577fVvf5OZVstjAFxtNZ1hp8fV1nG3IJGLuxdJNDN3TQ2gWTpTdda5rm3fePXNolBlqY/uPoC4renYLA2Ar//5n37tl//WL37xa7fef/eJq8+O9y4t6/qDt947PZ4zq2btAJjGjLZKMHWdgK0ztij0aGtSr1ejarxcLgBcuLQtroKVrmmJSDEDojVb66xpPQ+xhhi6HBXwccRdOx5NSBlrbaE1ANeaul0KibHihH3aOWJmZiu2W7cAGCgUO+esk66VUaHaprGNMda6RYfaASClLuztPXhwz4mbr1ZMVOrCGNM1XTXWAHRBmhVV6FxbmzXZdm86W9X1uBxXk9Gj/XsAFg+PD97fv3T9SlWNy3HFIvPT1Ww204Tloh5NpwCcc0pL1zZKa3JCzF1jUfBoXBJjuawBzE/Xexd2J+Px/unJiyPdSucUnrz61O1b73upXZbl1tbs0sXLbdfISiRARapQJcEsF0sApmthcGFvb6zL28cny8WyLEpgYjrpbF0UJQCl2DkAlXBLxhEgJumGkUUIMWXppSUpKeHIhOcv4UqckIw8onC9qh3FiecgQMTget7eb2+OioeLN6ts+E5CTUGg5K4lXxuFnyzZc4HLAAjZdIKG4JDuHMtUL0nx+JL80qHyPm1PcLKLDc6a+JFLIsxrx1F+AuIdwKKYo3wVCIJLBSJO2Nt4nnyZmiLRZEO0PigTWgKCC1f4RWp6sqcEJkgCmiDMZI1zzrEi1srnXHYiSmkCSWcITP7Yh4NAbLy5m3lzLlKn0jRkQQbow2jSk0AIORDBwKkc+y0hADPYVlH5dtZFqsQaIUHoB0kSrKdAslijiDhrFCvy2ZwJxOAoxjiaA4Gy+byfl/NyXs7LD2bRFKCyhF4kOyszn5CZv15cxlCqABUAlAAAhySeEfTwFJye1dqLhGD8UgYTDE2qaJQlZSAag3k4TmhhgPYkARpzSNFAFdiIU6ReCUmCqh9g1ums+1lPo0yRM0/hcdoJsibyUUTHWf/lZtBRUCKQbNxQuT+d4T/yalaGvsRTZHEiEiorw8w5G133Ei85NKU/jBn7ioiKIhI0aDYUAQeKAU4SVlrC1dKB+h5IQFR48p6kkfShZ1H6praGlPTf9wBN6jD6qJWkdFKiKKWFm++ITMUCovGQyBeyw0nA7sOC6ZtNamTSF/0TwcWaLG/vDu8hsKiopSWQ+u3VtfRkdL+Ky7wAYRYoUB/9Ih6oPmdK34e0lii2grgC+qf70MNhFEGiVbZ9mIiJAyRACOaCRydj1sABzJP1KqLTQYckpoBlh/Rzw22KpDUDIad7mhjKWRylxZMFLGQRz4E7JY6TQYGBNMnRnVE6MZawOPsN5CnW67hDwoevvdYZl3Sc+cezEMTuIB7WphghKmHlifQbKHXMWhcRFeXnWwQ2oPbD/RfazTMoZF9SJHHke2mkXmVPE5fP0JlqBmSgjb83v9/8OO5YyhdbIkkikM9kgGz6ZMBZKXB7GR78zWrIHhsUojRLgzGk1ZWnG5G0o4MA+2gTghDTx0q8GGqDFGcW0GO2TkhdkQulFC8TTF8LIWs646xhVprZmO709KjSBEBOqLWNMcY0LURG48JaCzjFTMz+oGihlWJ2zES0bupK6UJrNtZaw6ycdAAgjpmLolKskLY0DRfbGfI65xKQHN8CibMuXj0SyCiJ53Iyfn0yUcrq7Q9+pLiitGwz2KLnFYhwu8/NJTG8R9AvFz9FwdD0aRiIKaRFDU/6ANdgfXMImYlyWMRaa6yZPn21edBdferq5aeuzPfvvvXqq4snrwD49Jc+v3/3zm/82z9QXb1ezZeLroOqqq37h6daCNYCOKnXexcugkxdn1Sj4s7B+nf+yf/4wvRX//zDD5cHh2++fRvA5z7x4tbOpVJj+8KT00l5sH/w8osvfer5S6O9nUlRdlYBmIh5+tKekqNvfuuPf+bLn9tm0uOqbczLNy7fuf8QwKiqyqrQuljVZrFqmFXbNSdmXXVru1y1QgDGlWprza2Mqdi7MP1rv/zVCUwxe/LydHy0XnVrA2BNP4R2Qavlk5e2Lz91eWs8FWNWRj/59LPz+ereogZweDJ34pqVEdMYfqpum/dffa2qxp3gZKUAFGDpGlFKWcesOqU6VRRlYcUdn6zrrgHw8U98+sHB0Rv//LdvvvW9yXjn5U99drR7oSu0UuycKE0AiL0PwgEOIsx8fHzywkvPOLFtY/zN2tZYa4wqy6IomqYZjUbGnE6n07ZpjTUAdrZ3VqtaHEHQdR0zK1brdc1KyqIKPldCWY1YlqQIgOPAoD2Q7bev6QxToZRiJmNNoadwbj1f+3V1fHwKYLa3czpfAHDitObVauXESleTdeIsAAWxbefEVoWCMbV01569Uejq0dHh/sH+vf0DACenS+fo+OQUBYpq3DT1008/c2FvT4CiKPy+s84SsS4KpZX13vdSW9MRVUrpotQATGuOjo+n40nF1dZ0tlqupuPp4dHBzfdvaq0BlGXZNE1ZFm239rgeM0PQNK3S4bz8ycncWsdKja9MJ+Np15rVet2ZB6ZzbbueTnYRrtuyJAzhAfgi2UlZ+b55L86UpOcMSuKmfXDboLnAQiIIF3UAJLawKcVSLRkw5WuN3i5JIihTwzIFlMirGyEndWIhvfIchVfkip5j+vd8FZ57+cS1A0XwMbbIkB5JrCO2FPuUCb/HkWhopA2so2xwvdYwnFavYbr4neecvYMQEHEAOSdEzjNXyevrW4hdyr/ujbWsF8nRHnqQ90VCVAoSCPj9lJl+1P3ZMOlrTFcCDZdfomgcpf8w6eIR5Y2CJWvnvJyX83JeflAL/8WPnJfzcl7Oy3k5L+flvJyX83Jezst5OS/n5bycl/NyXs7L/x9F94FcFHxFKZoAKVV/728MQWUyiCMDUiSRiPf4Z2mBQ+hNeJgZuWvLn/hLV0WEh5KrahCNl7qZmktRe6kj0QkYG0h9kOgEHdaWe5eygKnMwZoipGL8TSRR8rllHc4+y2hzps3NkwBnOpSHWKZfvF81UI9EnBNhH2ERqwoXzVFwnko845EqyhySOXX6hh/fI8oolT0dw+dSYBKl6mkY0SISTmqDYyAJQMML1fuopXCSeEACQR+Jltzgce0Nw5wohnxK9knsB9JBC0rRlnlgEeX1JLoPqmf2I093bmSDRRaAS4lu3okcDrQH/2c4mO9vdwrnipKH3w3jV/O5iNGRvp7syE6sPZ8ZxJjP3Fss/XrIx7URphS7vUGLGAAQWwq34koMKKI8Yjj2uieH/0HxIGz89uxWyPsoaT2Flv3x33DAKo63D3OQ0Io/bZ6C1+KsD8bit5RnXv4TF13P6RoW8vWlMzUxsmowvb7FmP/xLKtJY+yd9NI/k3MRv71jg8PXMif5JrE4Dy2II09bcLiKJARwUKSgpIayeEY/VPIVuSysuKcdCOSzdUTumm2gzdFnV59HymOzpKCDQTyG9PLm8XzzTIvU/xs3uQ9cO/N2CoOQGOjdZ6KMy6uvJ8a65jEcefclvRljZxCiRPO457jbz8q3vs6Mg1E8kpBNFIX1kzHax/LvQErnBOQQrgsKaTe01rpQhKUiDR0ypYAAuKapxTKAS088+cTsicOjg+PDAz+W2dZksZiXRZEilIjZOUeANXaxXLqqElEIV1j008wEcW69rjtj/KVwzjmmPOB6kynBn7xXHPe282chRZwKdytJ0krixmavN6Q7BnyOrzDPUcwH6qeImIxBBeqG4/5inVA47+/ZQppYYSYnogjCZC2I4DPnOheS1zAzEXwOkrjgCQLrbMZwHAi6UOVsfH389PWLF56/cf10p3qP2lf+/BUAXbd8cPvdh0fHs61Zbdq2lXVX63qFDraaHh4+ArC7NbNHt+pmfeHiaGtr9vz1p3/xaz/29PXrT16+jOms2tkF8PyNa9LAgVfrY9s168PauHW3OJHDU721W2gHQNPoM5+8Pj996rXXv31pxmNnmqXd2x0LxqcHDsB0TEq50YgmWm2XFbGaL6zumvliJa6pihGA9amrlKqbdn4ihZ5cemLqOuke3b41n47cA21nAMqCLs4m884oZ6StZzs7r73+JpX6xaszXL/SiQIwF6VVZbtaeAyt5+uTN567sVyvL+zsGSIAK9tx3dVNvVwtrLWrVX10eATAta7UPJvsAHh4/8H7d25vj2dPX3v+5GT92ndeW7V2ujVTo8J0VmkFoBxpB0hricN9ULrQRKSVWsyX40kFwJiOmZ2zxBiNKlakNFvbFWXR1q1f7dZaceSTVPiULVprIuec67oWQFHooig6MZpUoRT8YyGfQ1j8pnNagcMpBzhB07SLxYKdzPa227YGAHJFoYutbcUKwjtbu8f7h+Px9qPF/vx0BUBYkdKlropKW2emXC6Wqxc/9vTtux+eLpfT6RaAk9Hx1s6srltj3GiqCqdP50dMToBqMmqdAaAKRaScE5+uTyBiLReqrjGIcnQAACAASURBVE057kM+54sVgUfVRKtC62JntnP/wb2T09Od7W0A4/F4Xa/FGa3UqCxFeGtrqlXZtK21FooBsObFcrmqV5cuXlq3dTke7e5dcK7rGgeopm4AOGeFRPlroYbssNdJJZ7a9t9nem7GH70GQoPcTY/hPcTo74hBCHPrmRVFpZUysU6R0SP7kWQacdj3SaHpFeBBwF3MMQjkoZYx2UusNZ2Hzs4cpVNfm4pVr/yjl0ab8nRwCjlQSiidg4lnifyvUbsMt7VFCosk1aHX2zJipPEOzBMa0CPZYEFWEsGfw2f4nGKpM05c0PQEKU83Ij/nbCg5eTFcGRgsjk0ltp+jcFDaDT49U9LbMS1S32yyJSmmQRu8lromUfXsNZTYmX6iN8pHJbE/L+flvJyXv+xFI2IWiCaQRPSJomgK31KSshL4vEjP1j0mZaXPnYHM9s/kqUM4h+q/dPBwJEULN4AGEgP1U1Ub1iZF3p64dB+VH82z7CUB4ATqo2kRDmsTeuExNAuzU2NJNNPgo83HPqI8Ts4NPsj0mVwQAkC8s9iffXbD/P+ZDpRRhijv81BFG3Qhl4aP+S7qBXlLoWfxTtbMuB7CHyIuXi4pMR2lYh4A4HEFBAgtUDQmBsinM+k/A5qFGyb88eVEJgxmEAETYdrQ1rKjfRL/pH4cgfZp4aXrdNJFFJRsX08mrzmmyfDw0IbKKxLxKxqMZUOp6NdEfAtZZRvYdRhZrjwm8mRlU92ihH5lytOmK6CvNN06kmpH2u4RT0JU2NOqDCkp44El/4+iPlvQ2ULpYFF/zDIzNvItmO8DRjZX/ZrM719BgkGiK8VjEAO0qQc1EJT38CPjTOTPYlJcW3FR5yunVxUpALhnZiPRMuFYjwfssnciOfJFQxFKibMYFl9vfiQu6z8O2A0HxC9cUiT9kvRsMXcaJUMiwZI9e0hdIfToix/X5h1pf3GJEoYop9HjVe6eU1JMfBAGzZxEGm2+03PJcOw2bdUkRPpdQIRwJCwtvzNGgSBZqD3Dytwe+Urv+/OYIUkvx/KEo5JSivW77CPr6L9k9sl2Ja7PQCnPs4011lp/cNRaI+KKQjdtA8CJZcW7Ozv1cnFyegLI3vaONd10MvbpF3zLbdcVhTZd63PkWWeVYmudscY6C4BEAbRerer1ums7VopA4jxumRcaUlTgrztIB/ZAInDiFEIqRuop6UkiIalrJkhJ/BVzEbOITHVwHg7JqParNe4wZv9FyJERd6pAFCmIeHQ3ZXQTSEjDGikcLigWcc4Rw1lnjfVol6+SmaqqnCnF5eS9995553uvHJwefvazn3zu2RsAvvfdPztczAW8Xq6VYGtLT3d3Lly8xFzs33rzxU+/AOCv/PTPffKzH9/dG1eEsXbNfMWsUJ888/TTVIxPTpcA7rz6yqioRqNt180V8+XJdF3bOZvZxZ1HDxetrQFY11Kh3r//qNT24PBoRraoZqOpGq9VwQAwHanVqu5IFKtCwdqOier1kUHxIy89d7RoAbhidvfD956/tlsU07Wp2kfvlJeu7xTlo/VR42haLAFc2dqzUs8uTKtpcTKfr+aL196/+ewz17pWtwq7mgC4Yqska82x1TQC7SiZffLj5bjYf3BwejIHoItJeXmiNJG1tx/s7+xsz2Y7B4vl6WJdVCNWCoBddJ1yjOJg7V557TU5XI3HyrG1YBFhrQAoza61Pk2eE8eQp595arFYXri4oxQ7Z/wOoqJoTSvitNbiZDweN00znRS61ADqum6bDlBccThZCyYiYy2BlS78qulMJ+JYlyAK6WOisyJmO0TXWgIUs7Hm3u27TV2XWmutrDitCgC3PvygHOlPvfRDitVisXjuxvP3y7si9uj4YLazBQBOmLiua9K4tHNxXJVjrberUjt3YTK9994tAKNS27Yejada6dVyWYzKpm5W6xWI2q7191zqooCQWXfOOl0oghhnlFLWuKbudBUyaFvriLgclycnJ11rDg8P6vVqOplY5wC0bW261pjW2m6+PBXHdV2XBRQrATsWAFprLqlt2zdvvvn5z37RWnNweGBNDRhnaXl0AqAqR6Zz1nQgFlFJUvbKuv/Z60M9X+j185xPBg0i8omemxJyxu+/S47N+K43FqKgixlrMrURvXyOIJKTPO6DMqfVhhteQpoNn4ciqHwYlv7UbtJMou0UL3tJ/kQEKI1IbNRAkm4QJHWu8WW6MSXYPJlTfqF651xynW4ezd4sOfWG1kagIYlzmcrUC9FAh6ClJJyOvVLgs/0ksRiR6uFERApETz3lnw8GK/3PoBSlWXIOEJeSHVP6Bz0IfnaSJPuVMrUppmkarknqk0gnQT3QynOFP5sWyiXheTkv5+W8/EAVjQxVoBCFEWWlICXi6A0gStaa9NwxfRtf7O2wpORjiBgIhJEu+0B6XoBwvcCGjoGkC3hTMUWIfYQVloC6JPYGQzlbckEbf5Vksw+6kkd/ZgPa+PtM1TT8/i8wzaNa1FusCej0syTJZxglUjDbJU5jkuS9T9RflpHwiNBIgjbSsCO2gRS9llvBmcylvH+P0QwpN/pkOO44HBCSozoqRsmOT9ZzTBlDqbGcrhJkcz4UEhkQmXoBD0oBX2kAfcxXAiPjAs1sVslyt3gfMAWAbjDyqK0KxeBHZAqlSEgBk6bBhx7k6EVAuJAgktTzbNxpv4ZwrIj/5YrSEFIZDiynTNb1ASrnd8FgyW/Azkkvj31N34r0D1PUYGO9EpCsYZFkGQDZcOLgM3YjCOFeA9oH9S7rR6yOMqrEHT3USylzgUTybnA4iWlJo5aMsAIkkS2ZJqlaj1Jswrup2rAReqXSV55SgeabcqOOHuNzsVN+BbPzmiv1cxBH3u/6uHSyJZ7trbCqvb0jZ0KrY09iKtXeAxAERaQPoqsg1h+pl5ZNZGVp62fXLceZOTP2THKkDRIqy4b50XyWAhKUP0M+B1VfdzaLoccUF+hZIZCRRfr4yECRZAelZdov6w0x5X+P4mdjvI+TJrl3Jns8M1uchb9SzscNhkl1Iv5uX4GIdYZYKcUk4qw11sZegJmUVlVZloVmpaaTyXQ8NsawCmzbOVdo7azd3h51TdO0TVVNIMZa43uqFLvOdp3tjFGKldaAz9nH+ZbekJvOOiZyKYZHBCHU1EkytSngjC6iApYiZRN3OkudRLGeww7YQXA2BQ8aJMUI9bJNHHuI0XNqR/6GZueIdc+LxP9PBOKs+JukvF/UrwgHcc41XXN6cLJeLR7cv0uwo5H+4N0PFTkAX/yRH9m+ce3SxQtk6r2Cr13aGW/PJluzaaWO7t5dNh2AqzduVKjH5kgbdXJyJLC27bB74eadg+Xd21d2ZwD2Lj/llKrr02o8Me3x3f37k3J31c4/fO09S+VzN14EcPPWW7PVajKd1XU7qrbM+mQ6GYt0IK6qEsBkOrFOrBPnxP/CZBdGTk8O9GjrnTdvAvjCJy/dUcXX/8Offe3nv/b6t77+xS/95Fvv3bw3X/7451+6f7SmqQbw5vfe3r0wmlZb2+NLTo3u7z+onZy2qjW0PD24eO0qgL3ZqFs0Uo2uzNTDo9PJzt5shpFWo3Gp3ZMApk3TVpNG8c54cv3aU6fzhS7KvdlssVofnJx4ANld2psvj3/jN34LejzbuaxAxrm1bca6KktVlgUA52zbGOcgznadrUZqazY9Ojoi2inLwgc2MhfOWY+/1HU9Go2qUdW1nek6XRQAyrJq2w7OsWIL412N9Xo9mYyccT6LorGdYjWdTK21ThwROXFOHBPbeIKEhZ219aqzzjpbMHNRlEqxiLPWXxWG+enq9HTu01ZuTSdVVRjbfvDOO67pfC5G0rxaLJVSBfNifvLhh/s//uM/PKr0i889/+Dw5Jtf/zaASTW2JIBopR1sQXz56lPLZUNMutDCDh6oNWCCtdbfFqOZIWKNbdZN2wkAJtrammmtL1250HVt09TM1NRrrbVSDMC0zXhUKR9bClRVVZWlc2KtZUXNugFAxEWhlSpOTxcffHhLxM22tm88db3tutu37xrbArArp1UlcKXW1vUKSq4WZCpepuxEMZ02voR3KGexyc2YLi4ZiOihM9lzZwr3Bkov9+WssM21pViR9ByiB58k6YPxy2xE0c87ANQyL2ZA9DDQDDd1v3B/VqwMmdoJ6dlfJmp6JSfrUkaH8GYv7ONT4SWJZJbkBB3IVRlW2jsIPRPvhUJw7YiL2F/UdKIvUUhcDDKNSTZzJdC/MtDmNgcLeOh14wkv5RxEXOTlQkz5TIfnB85hiV7dTBR7cZVc90RAdp9p7CsTZ6Z4uLg0yOKBhEpq5/lVNuflvJyXH+CikURzJgWTSM48ToNgGepjZUQGcjd9lltx/Rf2MV64cGVYz0yDt00AAalUeeTyIfAtr2FDnEQjwMurCG0G4fyRxmmQJa4XzwJJd2RnUZj56EIgHs6IvY/6LevvY8qGUTmQ0uGWCwmwRnKe9k9zhnCIiL8MPZKjV3EeA6VuAFZnupRhLUMB7CuUcIF3f51HmqMIflK4wyMqFkBy7RJ88FWwldOHofkIMbj4RZTBjyUfRAQcbhJIeEQPhgwUPMo+iWRAunykp11+AjdBnPHQnr9E4QweIEi3+gR9LAFxFD/0bXk9LS1vAMQU+5LUN0kzl0hD6HfohlK3ueDOKisDHSqsfMTowBhcl+5Upx5m3DjIQql33qrP2wpKfFJAOYVjeaomNPwxitTmmaceyY0D6uklccHHh/NQvryeFFRLvWUQdndwxvQgUhgwpSQBkjEailVztC0kbpX0VW4r9VBlch0MJyjrFaLSHK0UQW+ybJbkucmYUM9f06LPdHGBwDnn7wcmDvkf0v3gAdn2U5C10av//ah6gyft+0Ens13aRyVsODseMyIQRQU9LnjvX4mvxZWQIhd88+IjkiiuEETKSP5n6r8EIeGjSBLbp7hk0jgRty2lxiWRd3ME1K8BEHrsMl36FE03b6n1srEfb+xfVguS+RJ75IE5DF7P/pL4XKiDVDScgU6s3yDGdsQkYkVUWWqlSlJUr5fGdALZ2d0GUJa6KJSF81RpO2OtMFHbtlorPwBnHQHihIlKrdu6diKsWCn295gDIbCFieHAOsTFK5Wli4lLKOfLPSfJ9hWIiHu/qH9H8oB4cT1+3VNiMFkCCBHLxvz1sY/hSnsRJzYasUIZswqXxVMfvJSm0FqL7IXIm7zmBKUUEVvT+fthWTEgxrn3795F21XVZLY3+9LnPj1GvVgdA3j5pee/9nM/t1oe3L/11urk+Ojo9uLQzmYX1YXZduW2xjUAvby9M7tYoap2RlS6+4dHJ07Kw/3tolyXqlYCYLE8ONzf392dXX/hS3fe3b/5/vt/9Wd/5tv/+o+aunnm6assSwB37h1cf9o9dfkyW5qORoRuebqYgJrWeinYdtaCjHMCYa11qaldTrYuvvPG6x/eu+9306//1m/90l//67du/dmjR/t37h9evffgJ37sJ37tH/8PbvTFd9999fmPPQ9AV4VxrbA5Oj5ujN0/eLhYzhfz+dbW7qvv/PlW6QB88vKu7Xiur/7uH/2rL/3YT2tdlAUtH53Yel3tzABsXb5cm+bhB/fXpbIOW1Vx4eLucr1+8vLOxZPJu7duA7jw/HW5Q/vL+Sc+9uzxoimVdkoRTa11Smu/NupV0zamKEoQM7u9i3vGtmVVdqbz0AYAxYoVWuNcOIwPJrLOSdeVZZUWgnNWadJF2bUdHI0nY8CBYGznFwYRK63rui5KHUAG56CSRAIRrDhYV45LJ87F8xOj8UizMsYAGI3LyxcvdcYUuqib5p1337l37/7yeFlW1XK9AjDb3rZOVEmT6WS9Xr308ifG061V3RbVuDP7z33sOQDr5Xq2PXv48NHx0dFkOura1lnb1K5QarxTWWf8CjawelJ1beeVgKosQFAs1ahqzRpAUzdbk0nX1ZNxNRpVp6dCRK1pnTNlVQEgLlbrddO267r2/h4fvuecI6gH9x4AmE5mO7u7nTGayzdefWO2O7t82cwm08lkXJbltWvXANy9fd+aTmnuTMtU9I67GKgY9uWGMpQ2PPVfxw9E0s1lA5Uxcy0OhVUuRtIWz7gHCAD3h8A3/Bzp1r4evgxdocf0PI1oU1uIEjFxneAQDpYBYhYdAdghXoQWDlOdlbuZlpX1KvQrqZmEpNf4dqLu1fc8aYNJNctoHZj3BhfOnkKmYkQZn9ixeB9OCCiM0tBXKeGcWDY5MQYieH4ooyzBn3LIKBx/pfRbRIbzznCwDwn+CnWOerIgG8PZuepbDsYQRaUz14dlYJtyOotDweUsUX5xgFwHzu9Eqo+2cM/LeTkv5+UvbdHJEg/iww2cYANrJyn+nMmzZCJFwzdq6yLiY+khSeYGhJGYOF7K7Jvl3BALnFWiht/DIMnYCKKNop+IBjwf+R/pXQk1DG7vySO5GBSuj42ti4iLKdaQhI4IQOzrCVH1FIGb0Lsoux8HIETaPcYWF0FI0JM9HuWzEYeA9khGbyRsI6InvQCUvhe9cA8WVD8e30qa4LNqW+hHHF54IR9ID1P06GFSszyJOJ6/D3/GBggDAZoQjo2DlbKh+X0ENhMUlU0bMxmiGaAngNo8PJ19lwgRICSJo0k5i0BM8cwyUqilbyMutqC7MSCghGMnLVcIDHIUsFwfLpFRMQ1e4inLPFjW76QeM4yNRr2vxy0eX8J0h/wM5M+RSlxPMRyu18IDUhtgm3CoJuEBcWXG/RImNptayVMahY0GIFz0GJ/pJ4wAeA06y4tIidJOhqh2rrzn/KDHz0PvMp0VEDg/YwE1ECHvl0bEZ2P3ktvmTInLJdA+LoDYZmrS65TEXg0dzssZXXajJX78ivfJLjJmjDgX8aMwWo7EkBSVGwyaxJ8TBx+0H8B4iS789F0GuAawevBJXDt91XFSHj+Qnu+EeNgznMH1DN9/0Mce9r2m2JveA+Gxg+RiiKIubePwfgy1ILiUu3VAi7Dz87jpQe/PDCVCpEEORsYZZVEKCM3fHPL+XqaSX9d9RQMDK37gklAY9EQAiI3n7UBERVEA2NqajkflycFp1zVEonQlzjVt65zRhb54YQ9AVRXz5ZwFnTHGmEIVBDLGLtrl5b0LzjoAVmyh1bpeM5Ox1lpb+aAzESv+ERjTEZGz1hhbVkVgMMwpFOUs/QAws/PHoCNJ/JJSiq2N6kPgtH1SBRFH/ZT3oizBxxK4WLpsPXI9wiAgGEKBAwDeEu41GTjnWGmOtn6cZnigPx68FQAOLlYv1kHEMLMulEvzy4oAjIqOiDQfHDz6l7/3b1544fqXP/UigDdvvvmF5/YmEz1Stty9ArjT47tufbC/v981Fy9fJgCTHW6a1YODw9nObrU1XS669XxRzLgh5UQtVxbAfH36wf7+Tzz3wtH+vbdu3rp+9Zk79+6tGpqMRo6nr73zPoBJNTk8OP3wYH8xr+/dvnVxUp22ZluNm/XaU6FtamuMs64ajauqZF28/NxnnjXyp9/4w9fffftrP/8LAP7kO19/74N3mcqbb7/2ta/9p//01/6nnemsPj369rf//eUbz/37f/d1AH/n7/2tP/jjPxZpxhM2tiWiRd3dn8+vqkoV+uvf+QaA2WT22htv/+Iv/sqf3XzjhRefrkZ733rlO1/5ys//zr/8F3/7az8L4P1HnZ3vf/z5l/7ou688cemSY33/0cPlupltbx+enLz+/gcAfmh6cX1cV2DTGCJWijUT1dYRO2fa1gJoG+MlOTFt7Yxms+m6WRKR6YxzhlTYlewT11lXFIUxhsBd25Vl2Rnj111RaEOGWMqigIgxTpFaLhfjato0LYC6rdvTU2sFICdB0RV/Qpzgwv52BIC5azorTuuiKAuB7brOkSsUAFTV6NrVa1cuXlakvvnNb5W6VIIrVy/rsrwxmfi9UJQFkXvi4kVrTb1e/+l3v3P46LCu17sX9sajAsDp0UGhd3d3tt69eWu1WDDxvm2ZR6pQXVsXRQUA4kicYnIMcSJOmLlpGq0K23VbsymA8WgMOHGmac2TV584OT2ejEYynSilfOB82zTGmMV8vl6vWCmIMtYUhVKFJsK4GAFgYDWfG2NZ80svv+TENc365rtvTyaTem2mkwmAS5f3Tk+WRNSs2qJQUVEKQX+9Apu8qEO2knnTopoRz1Altts/kfSKTBINBW3Uk0PbUehk1UXIMKpKSc0Z6OlJY6KowvVdDrXFluLraUSR6/UKc/jFJ44f9BQhNDJECSR3a3yg55kbhaj3l3sNnmLjqW7pm/4ox3dP1pyGkkgSob0UkuCx02QKphTMsbPixGc1Dc5kIkqND4JG0nSEH2e9iJs9puz5fLl4DbiHEf0edgBI1GPNkmQP+Jq9Sz7pQlmPs0wiiE6sKP2jyubf6pMC9NERPYG/38jOy3k5L+flL2fRgf8Fxj8QHRRZWwhDSGY2ojXlj3In/1AfyoJoN4Sv+og58kndEWKRQvRAdJv5zmQslfpcWwmo6A+G9sjdR1i4fXcQupyXTUbeC42AxPiWZPC5SBDJgTTJHhpUsCnOhgbiY0vmkNzoea/b5DF+aVTE2UCc75GPcPp/J5iSVjQslDSC3mk8CLKLmgH1BnPvUibmqOfF87lCefwVeqH/2C5t4HHhX8qylGc9lfy3CGXkvtJohfvf4xPJU50/3SOk2bwmzSD+6//PgL9SKFm9iBZwcmVK31IPzMPTKrxz9uh/7/eMKzBbWoGIHOLbyNsJHur1udazKQowpkjSV5Mz9vvQPm/I/5ZUw6CFSgwkjJRPo/NrL+FDeUPOWWbVxzWSELPPPJVPQuxCCGvr0amwJLM6z/7iX+txqx6AjvBDP/xedQUQz9SnqyrihUsRw5Ie5UxhALRhPfgwjUwDpZDCkCQgdqISuJRt7RgOTrIBxCdMOJtUoNenXcRIIo5NYSIkQpAySMEULrciBpwT50LUGxGYSWFj4/UEDZFgG196ZFj1YqJvCL0HI42RBuv4TElmV9oZMTdIWl397k9bpMfDe2sgi9ccbqD8Nh05m8sysk5BSL+ZugQ/22FMlKbBd/L7sFrx/rtk4Wyy+Y8u0XOUIipkQOfNnj+2BC4emLhY60BgVlVZAtiZbc+2J+/ffL8oCtN1x0eH1Xg0m021Vqv1sig0ABGnFMNYAGVRaq3LolyIW69rfVl7w8paUUqtVquy0HVTW2tHo0knIEArJRzNKlDTNOvVejypiMhaB5AT0Yp7ygztYRERcckC9kEtlBZj8IEIBT9qoljQJ3qVJLKPnqUloR7W3HBe4uUNQuk0dpJLwYJk8olWw3YjIqGYY40kgiIeRBZmjjkNtLFdUaiqKtuuAbBar8QJEbt5q6ejpekU6YmmW2++c21nCuCFF67p6a41B647bV1zeHTv3v7dT3/s5Sefufa9V19/cEAAPvHJT92+deve8aNyd3e9WLbmdGs6Pp6vLo7G6/aoEAZw5crV08Xi7oO7y4revfPGZPqZ/XsnP/7DP/JPf/ufjcrJC9dvALh49Zm1rW7u3x2N9dbu2BlXd43wdtc0o1IB0EyGBCzjSjOja+r7R26kVp/99Kecsw/v3wHw9/7233jjre8ZjO+dHF67qj73kz/3f/7OPzt5cHD5mdXf/ftfmT88AfCNP/zWyzeeZi7W7XL32lV69iXj/sP2k08UjdWTK/ffeh3Alaee/dd/9N1//tv/11//hZ//vT/51pc+9aVX//y1T3z+yz/zI5/5w29+B8DLL376N37r//j7/+AfXLp0oagKYSLhQpfjcrw9dra1AN679d7+Yf3U7lMsohw1rdGFFsd6pEzXdq0gIMtkrSHCeDIKgkdQViURda72S9Eao5TyWqC1VjGUVgDVdQ3AiVWsiMlaI4Xemm11rT09Pq2q0tju6PDQ74K6aUipqqy6rtNae1ekE6cUKyIAxlgmZcUVpaqKqqk7a91oVDhrBCBSAJq6e+udt+HkxvUbuxcuEPDBe+/OD07L0i5O5gAuP3G5a5rVaj4/OlqtF5ocKW1bp5nnx8fjcgTg5OHRe+++v72zc7Q4aNp6OtkeF+PV8qAajUgJuRqATwXLTLYzihU5sa1sTSbLxdo0HbEFoLVuWnMKY5r2xvWnv/CZz7VtMz89efOdt05P536nPHHlyqgaG2OEMB6NikJ3XVdWJYjKigEUhbLOVWO9Wq61JoCJ9OXLTwF058P7D/cfAPihlz+xvtCs1usHdw+NbQEiqMCXk8gAxYukPtoqiKwgJTNJXuD8gSD4Mv37bJ0UDBj4Izgx0nJoa3jtwcvfELfgUlWUDmSIUEgn2FeefqPeaRt0zBhb4CsccDtBvK2MokIWqkvngsPBnCGC9tEUG0o8BDUyidXHeNYeUzYf8DMQ/xBhj9EjU7azlojY+1KTDeLjlDkbn4f6aOi477m+ZP/PlazwQU/ufsCxcwiKZSYiCP7kdtCCEOmwMcJecwSRl11RBafQ6dSp1L7X22OYECVnKhCR73AUKXupXw3n5bycl/PyA1a0iFWKrQgxiQtpccUKETkHr1KTT8wNF+w6HzHISQkA4u1mFJK6W58UTkJImATzgEUpnQIPKYKUDsJaxdO4zpu+4dplESvGS2PFIXW9B1+I4IittYAwKwASL8RQyqeYjwCT9FJoEBA/lDjWWaI8mN5jQeFuhyQvGETEfZ63dHJHKQDOWkCIeWA0DvUcK+Izg4iIYoJPdA/HxM6rKQIm7t2dEACaFBBSijgR8gAFQXy3A8TAxF5KiUBUULcQDaiArDjnmFOOGQqAcYYBCxBiw7y/PuI1CeP0B4gouKUFIsTeYA7UEH+RAIGJnQ3HcpkU+wtQQ1M2M/bjTIkH2BK4EeYtajyS6SnIvM5EzoUgQ2LEsyp+YbiYH9tjyfBJ9UD5/RzJtSqIJ88j9hoUu9BH8hkHzAIrmwAAIABJREFUQhRVfC1eYg5/gqMPlfSXM5NJkUmBkMEDQGBiVl6xYAqqr7e5XTgDL/moiYgRjXEX17SkmxSidps6HPVgG7zl0bsLf3M0CD7qIdzTAAaIHYEKTcZnGZI4D2EKiAixq72S7rdNj7f2fgZfuRPRSgliDBcEYOfE+QWcFD7/FRMBzOSMozjzCBBaRELD0wT441H93qaoYPqLfZ2kK4MYA33fq/VOIAyw6lc/AMUqkjBpk6nB8EncH+mUN8E5K4aUprSuAjHI1+fEDEgEEBjkj/4ZGujLPudA2KUJiIwMMLGrkNHCiYO4EHsuAqhokCCL9ySiwklHgRwqaruBfuzdGyF+NVg2YQFEknuWyMx+azhr01aMMx53FgVGkUC5mGwDJOx5h++wiGVmazuCci7s4sAHCEystJZwg1SUTIGxO0CIlBPLfvf591Pss6RDUYSYWsq7xhSUs6ESn71WRFgpxJuREaVA+FUAAStyXmIKOWd9/1XKDwsSMFFALyV5LBLWHvaxIkJnO6UVETnjRMIhPxE4a/2shpdshv5HLMwz2+ikIQHEuRCPGS+fjYV1QXW9hpOqGnWm1QUBMK4RmljbGWNm21vGtbrQ5Ujv7u2s7i5NYwG4ChALEX83zLpe1/XKOlFaLZbLolB+XyxWS60UmDQVLRlr0dRrY5wuS2t8BKU4Y4hIq6JtDClWhXJO4MTmWxKS7S8KrDbcpgqIQJFz1ll/Q5GlXrpKYgDRQ5qRwolfNn7HOWf8uvTLyUJYqaCK9OcnHCAEUko558JxQEIHE9phRNifWFFwOPgdoyhwV2YADCYia4x1FtRBZFm3i9Wi14AAwDqWdr6YjEoQz233xJWL799+D8CNy9P79z7cqth0rHQ7GY8F9MLHnn/r5p2tCuOtywDev3X3G9/502eu7hjT3L/38Jt//s1Pf/xzs8kFZbo7925f2N0BMJ1uj/Xo6M7+U5/6+Oc/8aOnp6cXrz9BZH/hJ35mVOmdnQmAxemymyqQK8rq9v78me2p0kZr6ELXpy0AJyiLWWPaput2drcc1kwtlVNXu4tXn/9w/wjA7lb30rMvE99UxZNvvv7mj37+xad3q/bkPkY73/n9P/krP/05AG+//W4x2luulqx0AbGKuJqU1da94zv1ej2d7gAYj8q/+5/8yq//77/55u3p62+99au/+p//V5/+wv/6j/6Xa5fGr3732wB+6a/9jS//zNd+/df/2S9+5Wfu3n24u7e7tTUjVa7XzcHxyd72LoCyKpvuqBa7Wq0n5bR2tm0NgZxh4yDkAIynpTUignq1rson67q2DkVBzbopy7KuCYCuiCw7YzrTTfZ2O9NBZFRV/hYXAJXSSvNqtXbWMJODGU+rulVN3S2Wy9ZYAFU5Kivq2tp2TVkUFlbpEF3r2Tei0kKsdFkJSbVVFYWGOFZcltqZFgCcHB8evi1vrderUpXr5frh7QfMTIzt7RmAux980HbNzt42E2n2MaF85ZmnLGFna6bKCsDl60+RyHrdEjMR1027XK2cyOnpXES8WNHEzXrddV3dsSr01mR6cP9h1zrDxhpzenwIgMpSTFccoeHut/63f/LZL3y+HI8Pjo+6rvOaExd61a2vjJ6s27UTWa2XxMLsxBpG4fm06UxZlPWqGZfl269+bzyZOOvq+VoEdd0d7R8BeFe9e+nJy6fr+fYTW82yMZ1jVvVqbVpTlqWIAUCkvIc+onVBQPfM1+/ddL9ZNAwwLEG4evmSEL0MNor+oT7IkLz1IkKcvId9tSIQC1KIMRiR2QWPi0Royjs5pfenBbkkFDmcir0YpizyfFOi5uDiAMWzNPa5m0QcBeEZLIgk3uL9YJlPKKhD1J/OCNy5H3Uv2SmON1gK1lkiOBEmImKBI/ayFc6BiZnDvWSQ5F9PEQ8Dhy0Re8WJvXHgAEAzk1YAxIoVR3BEEIIxVpw3iKCUC5PJ7G1SJwInSimRJCoyf5MAFC6sIwoXD3oh6yCayVl/LoedDQP0kL2LcQle6ASXsBBxPHkDOGfh4J0WQMgpIhzvn4wiTgQ25jISZ2xK79LnuY4a7mMCKs7LeTkv5+UHr+hNb85fVCQXsJvnCoJVy+F2ANqsO4U1JfO+P2PQ408pA2IENh7XwyA8kjgOLDpZJhLUBc+yEyp3xofXt5Qi/Ycew6CVcJZzJfNUUoZMfF9pMBiG/2Pj3nCBUDgNlLfvJCTtElLJTqN0VhYD8lCqjDIcIFjIWfNBb5CNt852NSf9GfgpEoLJY48SlbtAmUB8ZsTbBOksjfoDCgEvYB+kdsZTTRjk+U6DQIz8wV9QNqc+vpO9SUlVlNy7Ohh7in2kkLVAIg5FEZAJjyf06LErGNGb2QNQmWJxZjQUscaQayzHcSniVWkRpkC+bGw9HJqGEuYyhO0hasBe1Um3XdPmKvDKGc7MZVyoaUdv8gfZ5Bmb1Iiu+0FYALLk4vETkTSivr6hFxybMxdGnwyB+ATRcPUkbzptbgSJK78fUn6He69Fx/svkJhUv/KVCjHpQwKRAIo1RX00TErki+wB2hRJkXfjLBvOlmDkGYPZinZKb/DkvqX+yYwuTjxg59dMZLNRKe7JEieFmKTHXimui2zeJHHUZPnQYPWkvkikouffoQtxjcfG032UgTMEcRTMkrjgo5RhlrTmQ2dSEOXGEqWzf8V/khND8s/7nZ39GDCRuGKYlRNHAiIFiL+oggTMyluAks2Zr6h3s+WdIhDIReKHaYnBtkTouq7QGkKKlWXyt12v1iulFSu2xllnR+MRERljx6PxdDJdrBYAqumoVIoVK6Wssf4y6ELrUpdt1/kOj6pKMaxztnXGOOecMevOStcZEfE3gVSjsRgrkP4spYd8mR633IJoFWAQwpMyvXj/CvJ1l2/WPFQ3rrFwxi5+gAgheMgyBGFKmiWJNCZwoH6a4iR6yd9vIyLkRMKdqwSy0R9prQfHiYNbLkIlMtj/IiCMQKtCsxPSdHTv7n/0s7/y4RvfBfDGWzdvXLvAW2NNsE66zo0rvVod3bp/63T/7pc/uwXg1jsHT+3Ofu8b3/vRT+sf+7Evfv2Vb79399FnXtw9ON5f1is5ZgA7F1pdlh9/4bnVfHnlysXr164YY95754NPfOLZhw8e3b39AMCb9x7eW8n+4f15Wx8cHL34xKW9SrdGGWt8UkKpYJ22zjDpxaLWhTpenF4dX37tvXs///TH97bHAG5+cOenvviFe985+sxLL54u1kf7+zduXPnHv/b7f/WrP9fUzfHhCYBnn7168GhJJEqV6w7WmlFVmc406/qpizu/9JP/BYDX3vjgR3/4Y//13/mbf/jtV37xK1+9++GHP/3Vr/7IFz/zx3/8rRc//hkA9z788G/+8lf/0f9863vvvmssdnb2xGkRevPdd199+63Le7sAuKumk6mCIlbEVI2qtjHk0zjG64+atlOk2qa9dOVCU6+d2KLQRVnUTa208ukGuq5TShVlYawxxvpsu1VV1k0b4spJiKiqKr8K6nrdtc1oVN299cA50koDaE1dFlqxstaJNLoqJICQXqfkxCucWBGrS+3ENU2ttdKsmrrplmsAo2l5fHi8mJ+u1+tSKU2F7bpyb3L9mWuj0QhAOS2m4zEr2pltAVKNq63ReLK9c3J62q3WftE9mp+Yda1Gk6ocaa12Rtvbs5nWZdt1zIoLAlAQiTFK69oY65wCDp7Yee27r9Vu/fLHX9rd3QXQ2E468+DuHVWV8+NHx/PTibjOmELrrmn88i60rqrR1nT26OjA3yzPzNbasqy8I7epa2dFnLRNo5RaLJYEWt26bVo7mUxHZQngnTdvCux4azaupuOiItGj0fT05ORg/8jZXgCn4Oj/D+X7PR3cSpTkWNqyvQaXWMxGpPymthMywHjMMekfBAa73qHl9cvAxjkmifeeOcoGR9GKOKPpZIBn1hdBBiNSlCLSy6jH9PixFEkstNcj85clmgeZnI5ydsCYg7cRcYA59TalQaa8pdpiKIcPUwymGqHwktm7UWOWSJdolcIYokObgsFC4XbzMLtJKWJiivp+wqaFQoiEb4CYYeNZlcDdowqSjYtCjEiMsyAv0SlpenElRbUkJwHCQPppyLXzx5gN5+W8nJfz8oNRdC8sEIVIDp9sGqjp8d76itahfz43+nqPZM9Jc2nTa/joJXE4a5ygomCyZoZZNCCBeENodBpmiACnXCIIcipHl7JB9daet+L7ZCVIwESe1INyiqV8JYl44ffHiYVMsEAiWTakjcToyCgIhcFBckmMEAx1RCM8+WGjcpSM0AFAkMY+QJeI+koTdpnRKYWZ+RuHEoYahh2hBz8/PvNmfA8cM55E7YWigRfUrEi3nAgZeQcfe1+3hBMKEhHhHhZ5LLlp+AlhuAQxnP6I9UjQQPsF1aNJksYb3gtnL2Q4MQh6lkSV46PUXQnbRXoy+XaHYEMaKlF/3obygXM43xHWSpqFCJOmmJ8E3ub6a3Yg/zHUzMcbQsvchsYfVE8AEBdIl+DehJcmoC+DEZBOpfYwQeiZ1wmj0ho5UEzs7SOPMtQv32GhZb9lEQ8TJaSOwmnM2GDfeN4/QT72gF7k1gDFeY6Lg4iIh7B7Ft8r8baVZBVEvdjPko1xYskw6IkqQMog2UcY++DJ0J0QNpun4T1DEvi43/h9Viif5bQvezNBEI/E9uQN61wkW6+5CAhbVSgZV0N3foJ8+jUezu32kGU2lo0lElNJ+kvpexCQ+gC3fo0leqa96BNKpB0jcW8jErZPCxLf6Bfz5nLdZMzUt5dt7L4kQyxRwvW73ieFDb/m9hslmmTOsWBcknf9+Ks/+8XXY7R+gF3XWud87kgQ7j24R0RlGW4Ntta61kKkLIp02sA5y6yVUuIcRNquA1AUWjGPR2MAZaFX65WIq6qq6dqmaYi0c8JcuFiJiHXeeHQh1kaCXyPLSTpgKP6tlPczzUFgBKq/kjvnzn4pcAw1Cf+5YO0F5cazL5cOaFD/ZKgRAnFxwnMUIisx8Dq9J3Ce1qYzCai31hLAin34DEvM2wH0PIIAQU1EzjXklJOLTz311ptvjpUGoEd6b+9CpagqaLo9Zpjjxd1/9Qff+OVf+sqv/fqd3//OqwC+9pWf+MznPnHauMls5/0PHv7qr/zHD/YX8/np4fyR0sxFCaDtzP6jo+evXX308H6hlGnN/OT0Yy/cqFfN8cHJaFIB+JVf+WtzbP23//C/39m99MNf+Pz1q3vzg5PXX7/5zodv+9uNn7k2NdYSMylV101hVGva05NK6eLB4cnz4x0ATz2xu3txz9j/h703jdU1y86DnrX3O3zzmc+dqm7dGrurqtvdbk9tm8Tu2MhDHCOcIEFCiATiDz/4gRAhRIASIQRBoAiJADKyYysJlgNBVuyAExSU2MZut9vd7m53d9WtW9Od7z3z+YZ32Hstfuzh3e93brUR4odbOrtU95zzfe+7573Ws4a9Fm1NZqSt2HZQ6EdHx4+ePL6ys13XLYAre9P3zu9ZykQValicL+tFtdKDsXDWNPadx0sAW7PJe++8Oyqnpm0ms6uL8+X//g/+0Q/9yI9lTfPZH/4cgF/+5V+pW/m3/rU/9/d+9Z9szcYiVNcNQ2d5vr25fbaYA0AlZ8s609nCNsun9XAyJSKlFBTVtdXh/kTV1INhMd2YANYYZayZTaZt20gwEogopTL26RiV0qo1lUurkukCgBUuBiNjbVVX5aBs2mZQlgSZbo0f3T9wF7F39mekaXFsSOUgK4JwBYK62/wCAcqisIZZWnZuboqYuVo1DAtgf2fr+o2rAjubjHc2tt55986imb/82ktX9vabpgJw47krgG2aOsv0aDiwlrlpzk+PxqMxFQVrDaABV8ulQDfWLFe1znIRgiwb0y4Wi93dHQBL5ul4bMQUZUZKtU21vbP5gz/0A7rIh2V5dnYKYKvcUJCtjVFWFruz7ylH42VdfXj/br1cubNarVZaZwSMx+P5Yp5n+ZPiadtaERHh6cYEQFUdrprKcl1m5fn8RBSP8mlOmc5oOZ9rnQOYTEeLxeLo+LQoR9ONSZmPi2IgIlasVj6Ld9vavin7WSWlqc/C6j1koyiwlciywkORKCfhAaUHy9aLSrLcuPoCIZAYqiUCscS65LlQuNni2+0ajX8G/hVQScdwo4Gzi3kE8i65HlupBB4GppUw+95sxZbIDYPiDXRnrUaC2hBN6eE2eqdgjTBDAoyQlAqjL3KtlyAOKAFT4igCgk9m5hwV2ZN/94TE/sVFkyi2wl8dc5U4tBbtjSJKKe/cwk4m8tJrrpXzBe3Da1BM9Rlhg4CFPXz1DgCx+WRt4zxEbEGAt813/wmkA7Xp05flslyWy/LtU7LINeJH0VmoU4F0JeHEz2DjEpI/B9GgryOAwIeRwoWkJMzRYtWZ9QQQuOu9UYJPWxVwKux30mowbYVfEd5eqyDKNpHvkhPnUkacuOR0KqpUrxKNi4JErdOfu7WJDKpSSXhWGHPXNKWQI1y+cKq/ADJchDOvhsCz2BEl1fsmCP6WqV8q6h5zj/R7hmBCpG6+KOmrkCIwOyOhdDkz/EAR6pT1qSEP4hIhW7wCuqcZcQvkNJzEXked6FtdwET0tgCctBiQXU/QjTPRKUGiVB+UJk4CDsbNbtnQ6SnDQCkcFYnVotOVhObdxvcTFnUEPs9GgGQSQZv/KoavI3RbmgOA9VPMSV8oiNoCJAlQCEkP14t3dfyIres+6CbWo7rOjyNODeKExEO1buFOZyS21J1PCgq+WF+YbOkU4b7PiuKMRSHBk4iea2vQOTildhqkMvy2Lr6kQBKdlpeSfseXQ3fQm8J0t3mVBoXOuEueFNzcXC2+jyEVidtfRIn9x9NW+DMfvBsSGLs+pX3aK2u/UrIi1F+bC0U6VWUKkcWfH+eu22Uni2g6TpdEf+2eftEfo3g6EE9XD4gHeiDiwzCFmtNJRYfQndtCINhgy8HPIYgWIgJRXq0bOAiCyxricvUnNeGKAoJAAeySM1M3q72NizDwjsd2vrgiwvDxOqyxAjjfQ0UxZGZHfvp8Kt1p4ZtgeOhkXAo7j6CVbttKkXJaa6eF2dve3dne/vLjL2Uqt2JBQqTYct3Uw+GobWpXnbWcaZ1pzSKWWZjrpi3zXGeZu1jaWmss55lWSiudrZq5IiaoIs8Ukc6cxse6IhBSpJRiv0WC+0oofss4CTxqZQM5YXZ+22nA3d4u6ciuADFiseNWpLyimEREQtAxohAZA5Dooi0QYSGitjXhu/7ZcLucvQU18AWiEHUBjkx5Ru8EZ2bx9q6OlLmRKNTCQ0WNMYOMPvmJNz7x8o3Tk4cAvvwHv/f+3aevXN/NMvX0yeMvf+P2ydnxJ1569ezU/sW/8K++dftdAMVk6/btp3/iB/6Fw4OTg6P55miiVF4Mig+fHMHy7tbEDclw843bb7/+6kub21unh0e5VkowHA32dreWVQ3g8dMTvbXF1g4ns9/+/Bdu/St/CtKyrhZ1PSgBl7KkbUgRZdlwWA7KQXVmBCTChydHrTkG8OmPv7y9tz+ZbJ6enw9Kmsw2RJSwnC+Wy2r1/HUF4O6H1e27d8fTrdoW7zw6Asl7959snVXj6fb5qv7K218B8COf/YHTStWmXqxwuHjy6TdeHqjR1//w61+6/WXengL4xGu3Hj969CWY7Y2dxrSHJyflsB4MRotVZSwT5QCyrMi0ZebPfOYzj+4/ffLoqLXWEhXjsiyKtmkBDIaDalUNRyWzyXJlG87zvGlbFh6U5Xw+B5BlWZbppjE+DAxBaZ3lWmdK+2OrrTWWLTNrrXNkRVkAam9/b1COT46PARRleXx4Yloucs1RT84CF9Gk87wTJbDWQEjnuhwUWZaRYDAcQDOAls1gOLx2dU8R1/Xq1Vdf/OSbHzOmOZ+f7+9uAhiPxscnh7PZeGtzg5kh8vZbbwtoa2drnJVWaQBHpyetbYeDQoR1TnlROOeu4WgoEGMYQK71YrnK8ozrOs+zpmm2NjfbuhoOR8enR62tAYzyAUF2djfzPF8uV1bTqqps29bVajIcuQHNJpPGNIvFojVtWQ6MsZlSeZm1TX31+j6AT7z5hgisaRVB2C6Wi9WqKvTAGmOMbVunESYrVhdMlJ+fLU7tYj5fQEgEQmyshXN61UXCxrpTe/FyTvIdnlE8XfE0Yv3l1MchknUi9KnZepXBpJUCnMA7PHuiwDUC8A44Ixi5gkaTYmh9BbpwXyrAKurgv6NCjgJ69C3EMSrRxcQonTHswgS5YfQgip8fRevPIJJm77PoAhJJANLB6CyJMTyw3LgILJKociNH9tAuQg0REWIIrBCAjDLP3b0nIhwi5G7+10Cdq8L7hUQ02mlf3ZVykej04eOMJQMKakxfR2RwDo8ziwAq5eup6BD5e4BMgdMgVKKcROB7xN6j85lb+LJclstyWb4tiotV/xHMOCmBHYY/U+MWIjPywl1w4XH/9K6++ZBfiDq0wC/Tm90S+W5UOyHtYwDz6CQEV0d4iJP40xFBJKq27vO+GERhLjrSnrr3E5ETaHozE5Vq8IN+hhtF0lACFTr3ts61hggh+UWQjoKEnCgNorcIJVV2Y6JURvXGs+TV9QsW3fBddWvidPpIHJ3Euyuuf6HORCfqvXF8sP+uhqjmdsKel6A7oU0o9j9pP4zU/1zbDx03dwgrvEfPWguESUphW8f6w4eJTbYn+CYKLv+vd/Ps8BCCgOr8Z6irIf1JAWV4LNHputbkXoo70E15jOgYkBLiA8rvKqy97wV6QtBRrO2AVK2OOMXd6xK76+pwIXUQD3sYLoFYbNg88eh1hANh4GnfOhIUKEoPl1K3kA6zknKahOAo1tWeHHsJdm3qNBXMTMkadGhZxUCccbz+HieiuJBi39C3kGWjqzWOgLpupIETwJZ7MwCIcFAhqbA1IiXy73mtUtDXkw4UMlnBqPbsJkQQNDnxuWS3qc7lNK74GlmMLytFUVuHQOfd1OEZJa5d58HavRt6IuLD/qLbJP5GeLf7kun1tYRVT0xJYQr8yvmn494Mr/epddjRiNQu/L1+XKWrX/lD4DXIygWKDWaoIBN6q4onwAkNC5zPSznR3JGSuXDOqbcSCOrx3iT6eex1NtDP8LAAaNqGQEopgmKm5WoBwLLZ2dkdjQZ11eZ5DgJbMdY+PTwYjyZOFC2LohwWtjUiYkzbtq2xdjIaW2vqpnEtMluXXNjYVTEYZHlpW5tleVW3LGjbFkCWa8OWwVAgrVywauLUr7ab7DWWhhg6QgAl/n5rKvX2KZ7SytFH8UtO7O0zjM7bRTwRg7iozYEX+w/d5T4SMcZ4XOFobmS8SFUGFP4VpVzgthC+07NvEMHlVQ0EuVO1ugongiXsKNNiqn/+j3/tX/+Fn/3ffukXAVC72pzNzhers7OKpZ5ON3Nlb9188cG9k1Exc4mJ5yeLV164eXRwcOfd97/zU5+Qtl3VzXxRVzWubG7prATw8PD4ZHGiZD69P3n7rfe+481XJpNxrnW9aqaz6fHJOYDjs6MXX/mUAEUx+PJb7/6M+dHhcFaOZkWeEbUAGmsHg4HWalXVRGIM8mJgrM2U7GyNRJYAzhfL1kLrgZXmwcHZC+VwMpwW2cAKrarqweERgPlgtGzM0ZOjVsqaOc/0vceHJ6eLF1/eeny6PD89BPD08DAfbRDT4aK6uj1Y1fTo9ODK7jYr/fDuAwAbL7xirTw4qZ48OYDSBhjU5ur+aDadTobj0WgMYGNr/7e/+pW7k4dHJ0ePHj5UVBKhqStkINLOOde0ZjAst3e3l8s5QwmkbVudqaqqmrYZT8buGcumHA6axlq2HBZda+1CH+RFwWJ1lhmzAMlkMiFFEGrZXruxXw4zAHfeeg+CXGXM0jRGZarjTlFtISCCUlRmRTEssiLzgU0tZ5nWKgNwfnryzltvbY4Gs+l4qPV4MiqyfD5vIHxlexvA5vbW9mzsbNg729srsYvVQlozHg7nTw8m27sAtifj06MDrVVRZPBJpTSzKEKe6eXiDMD21jYptbO9lWll2T568uj49Gh/Z397c3M2mVg2AKxpRVhl2XJx3tq2gIxGw/F41KyqYTkA0BoeDkbVanV6flrmA9Ma07bFcKS1rqrV6ekpgNdf/ZgxzWo5L/OCiCajfFAOWovFcmEtynwIoKqqrBgeny2OT+YisCJta9iyJp1nubGtO7iJaaijk2tc6tkYGAlQTj4MzgKet6TVR0nEfUQXGW6/uGRi8QZDxIlu+bu+ocMhQW9GgZ3HJFkSiaVDoRw5RvzYY8PQ/wAaOzaVso410SXF4WsjCnA8MFlCF+0Kkdp5zis9dN0Z/ILbZ5AIokaXcLFhSSBpAk3danvM6T0WoHzcLoIL6RjeR9d+6EaQE7rokc405WJbioqAwtFtNuw8VOHz0LiuKteQY1IujL6faulN6rqzqocS/rsIRSJUjzC21/W4fzqYQhHnfvTWuyyX5bJclj++Rf3Rj1yWy3JZLstluSyX5bJclstyWS7LZbksl+WyXJbLclkuy/8fJeNoogHW/RKST+N3En9QuOXmi3fzIgoeIuGOQHo976L/CeCcRKKzWmqHTNyiUg8IRd4b0vs++H87g1Hs2EXHnc6jgryTxIUHfMxiX1F384284bPvsYIQ0ix4NAVHnGc6DXlbW7xvnBorExcP6ryrkPhHBYtf32QWf651LHzr7yYA6WyiWxiJ99nDn88s/u5wb6ZCPpfEDC1JQ/HqfrCzumXyvWjZErkg0dFBRBCDl0b7cWIzFn/pDeLcLpPN8dGekHFPrD8gwWGp+7s7ConbaufdFN3cYp9irJ++T2JwmfmIXZhMIaLTo0+ojXCW0jaRLFAXGs+5vVIyWxfdrOI57Y8lmvzxzBX3W1nCNHVuWemq919hFwAgLH30eezcDTpf47T78QCtAAAgAElEQVSTvZ0f9nG4JNPteYTOxG442iWIZv1oMw7dTfdFcA+NbYl/hXyXREIAg4Qa9pwNut6GlSDvCUDeUB9JT3pFzFv+fS8st27XU9wfYa78XXLxntDRKRtwQXWD40d0KRM3JgoDTO3v3j/gGZ6yJPC3YCmGFIg3rdLtTfEvglJdcqNunxIpotYYP8kqOh2ElQ/OEeGlrjfRSSyQcN95os6h0DmPpIxIwv3bpK7QVIgyqqDCrTQFl3Rd4O5oQ3rulun0istKlPiABnfnni+rv5zlnVHDBkgoQhy95zBhy3VuNypMIEFc+lEXCRHkc9ArUj5xWcqBCf4et99Wbl1YQhcFMdJiZKPh0ItWmYAhwmJZbNPUAIwxIpzlWbWq8zyzbK1hIlosltbyZDgAMB6O9q/uP3nySEQUKYEoImGp6qZuG+9brAkgY5lFDFdaZXk5EAELr1bVYr4AMBoPXcS9no8sAWCCDrO7TomCv3x3aKNjSMymJd4RssMZ4pPP++UOp1esDRZY79HqaQSLJFFEI+dkYR83w50pBZdktyNfYmPrAJGPqMuUXEJgZisQkEYaOS5hPHG9WuFMu5Noa1sfP7y7rFzWEXO+PN/Y3cqp0BqFLg54dXJ0sr+/szo/t8YAsK158OjB0dFhVtK9p4+aulrVsJJtznaqlt9/eABgtjlkkaP5WWWsKN1azpQ2xmZ5tlhVpDWAWy+/sHN9TxXDOx8+/a7XP1YMtg4f3VueW6XUqqkBNI3R1NSNrBrJcrCoMh+enp2B7GK5GA4JwOHpYrFYZble1KvDs9OtzWXTyHAwtoJyNITOABzMzbzhRWUGw7JqGmOL2Wh2eHzwUkazjdkHd1YAfu9rtz/7g5/VRXGyPNsYlQ8Oj27feftT8mrLebusALx1/+7Bw0dXX3jt3Qf397d3W1HG6gePjz58+KjIs+GgAGA+fEBKLReLxcMmTnpR5GyYxY5GIwAnxyfXnruysbFhTF3X9WA4aprash0Myta0LjmMMS0zW2tAYtkWWS4Qy6KzzNYtAMu2KHJ3V8Sy1ZkGga3UZqUaGY1LAOUga2tbDsumqjNS1kUmpZibzu8NAp2fz4tiMBiPlMrchq6ruuZqNB4AyIrMsrGmreslwRqb339079a167euX1/VKwDv37l9vli8dPP5s+ViczKZToYZRBFNx6O3v/LwxmgMQCB5WTq/6SzLlstlUQyU0q1ptFYP7t0FsDWbZrr42te/ujWblWWxv7Pz3r0Pm6Zpq+Xuzq4LQfuVt77+ws2bo8l4uTjPM10WOtMFW1tV1SorAFR1PV+tSkhZDCBYrVZZlhlriEAhO9pyschztbWxOR6N56fHtYg1jRAmk7yqTKYsgPGk1Flx54P3T06W4+GIxYq1CpkI6qZxaWLyfMDMgUYk3n+0BufWAWFHZNdIEAIcCvciEgLQ8c1e/Jk+Ou8hY/g70h3Q7D7sOuWJm6/ZR4+nhJi5L1UMFOXosIB9lyTJ0xL4WOw04GJMdPeueuNJ+hEBRMe94t2E2D3EByLpdCxdKYfFOLCjjlAHd/JunnqdDAQyADoHQileJ+/C2rhrNNojJY8mXDgbAAAHgNhHyBFJda136FERu+pUvDLS9UsCRpS4KN1CxvUL0e5Bwr1LKxENBRlQErjU7YWYJtMLi8n0JldUyO/KLq7UMzbwZbksl+Wy/PEvmSP6F9URoQQeEaRg/xfSi3KBu3siSyLsndu9lIQQsc0jrfBCpPSSRYGko/4UYqf5Owypto4o3uQGvGgWLz3ARy9Gwtz64+laiiJsUnX81lWRKBkossZYs+qytlIYjWtIp1z+WTPbXTeNvwcZSXV6iMD2iZIBuiUJ85KMx8+Ga5LT5ryyItzsFQkaEAl6jN5sppMUVj/BIAF+CISEXPSSCPqSJjvwFnQORF0kuiTbAK0tSMqafUYWCZ3sMFmnkIhAKK5Qf1KSa5rdaosLgujWubsL4rZXaICCuijWQ9GtODzG8ZZvgEgEd+lewAyt02xIHfgM4/ULkVzk6KZiffMEVVDQE/VutgSo06FDAnldNHPcBqkc7EcRrzQnWqNkotBdBxEokBXrqu7OXNTyhI0Z7+MgmCUidobvWGgmWZWghwxLET7upsvf6xdJNzi5260BNEuYKD8d/ogpCXc8JbQVV8Kfqq7xsOzd1/GnxD0Df99bJCUkic4izJ9/V6C1BsVt7NpjIUGIbo64yRMqEKBpVKoSIGydKEu93nutpSi/47vkZJE2qY5IRP1h2ErxIQRdsOriIMWZCW8jdtGpDVWyHwkQK4GyUVQqdamYUvge84F7ypKie7ePxF8vX7f1hN3pGBmLxLi1CWfrkwQhELNFHEggqe6foKgKXezG429rUzdGgjAUhehecVrCyRZhYb+c8FrmkI2KLLe+fZVYuhwjcGrZmOSNehaXuHMTioywH7qrgHCViIBgjcmKIlMaVly+i1W9qpvazYMxrbFWgCzLBNYYW9c1gMVyUTfN8cnRYrkAgS0z87JaQQBBVdcAiiLXWldN3bRtpnOtdZFlVd2sqnq5XBkf902yTLMFxADCzDG5LFuLjypxeIGhCwUdq78a5499EquXrPVZMsLXPv4rkb8bm25ocbvNicph48FrLMWCWNjtce7TCzguRfEASaA1pGJnwvx7hWVQU4b3I5MRALZQedPosmgX7Sc/+cZLN/ZefuEagPrkvc3NmdYK4NP52dnZ6e0P7z05PLt+ZW9UjJEBwLDI2oODk/OTVuXHi+XBwcFkOC7zKVF+tjxurABoYduWtUhWlNPJhAXFoGyqqhiURtAYBnDn9geDW29s72yZFk+On7791tuHjx5MJjkpdTKvAFzZRmtMY1rSg5bNQPPhyfE4z0nRxnjy/PUdAFoPxrNpZVZlMSZFZZnlRb6smtP5anlcb28PAOzsXKOjw+lYV8YuFotXbl17/dbHfv7v/9I3bt+5vr25tfccAGZtal4wm7Y5NzTRwxevXVfZ8MberUwaAEW58cZrm60e7W5tLVZ1WY60zqqmLfNiMCwXywWAupXzpgHkT/7gD37+t790drwkpfJCMxSIjMvLVOSz2aSqloPBcH6+GI1JaaWIsjxj5rIsARwfH21ubhlry0EpIoPhsKqWwkKkGC6cpHZIZjSZgMiyyYtCa4U5tS1PRiMAr73+yh9+5Rtt07atybTSSvVgQVdkUJStbednZ7Pt2f7+Xl7kx0eHi/nCoAUAhacHj7/yzT/8E9/73efL+cAWy2rx8NGDpwePj8/OAEwn46IoskJPaHh8dlKeHv32b3++VPpnbr749PDg1utvAMiN3b1ytVQ5RLY3tz+4d3e+XDn9Wqazp08fApjPb958/uZ4ODo9PW3bdjqeZiqv6+r9D96r6+XG5gaA3/3dz49Hg5293eloYEyjFZEwOQWuMADD3Fo7VLqqKmOMVjrPcvdVWeTuAGlF9Wpeann65ESTykiaelUOCtO0hVJ1tQCgdFHqIhMpVSbCWiutMmaxhnWWKZUBYHMB9HekJB68hK/34DxSuh4+kM6oEUltgGfhkdgIEZDwHULAsp7a9VKFevYf9YYR2jkOn/C+3t6Iwo0H8aG/ESs7qtQp/hI9bIRosfMdcJIgH3RMXEIX+rMa0CICHQ5BKHxnIgb00+ebiS/BM9Heng99Te09yaA9lpOQra2rgkgTBQQiydT4ZeoQd7zDTZSEEl07fkopB3YVfG6ymLnRiwvOLQGJtIcE38Dj9rB86MyfYZQCiLBLgxnmLeULUc0tQDSJ+SqFrWeEvW2MKBLgslyWy3JZvt1Khj4Z/ajSZx0df5QuxG9kkuLgu5IuDr33FOCeNiQoCgLLDNW4uLxBjiDIeg9TcutVHkQqur2EuhHF76SoThpYD9tM0mUs6ziDtzYqjxE6/UCQ/CUIRal2UIi+xT34oGfqW+wIgFKKQBIUdgKEpCTwboRdEJbOIyxRSEYAk8ZV6xQoPetqx7L7pQcBUnE3LBsSCCXeGbZTJHSvetMhBX8nJwjGZeolkpQu5BnQtROGGGM6h8AwF7TInVqlk+ovoJ30FwFDVB8sSYdDpSf9x1f9s+KHGFqNrljwwqfX4biogOFNSety/3KYY/JxhxIYh/US8EdQwzlhNll2gnY/gj6Cwmok4DZg3vhMVGB0krpX+sRNJCFfUHTGdLA77ZsHi1C9WKGU6oQ+Cik9eyPGbvYsEYpCoNKklfXaQ4Q+v4xhMzpzPSJe9YpE6mU27PIJRItA2LlrI/Hat96up1CesYBBXxaAY/QkdRRFk1KdNtbbQTjYObxukILAwCzCQnq9EXhxgEllATmnnQ4xj+IBklB9+Iu6DRMcAN0MBUEq4Gk3ZPEzGKvq/ieEx/p7zs9zsCF545JfEVIxjFbKI0REaa/18yyI4u/JogZVYhBCgmM4EHPSCyDMYUIp7R87v5JoAkPCsRJ5oSO+3dFLWGTChwKj8eqyIDX5P7jlLNPe6Cad9xxbG7dQ1H37gFXdBPpz6I+dayjsj27JBYBY5qIsIVxVlc5VSOvJbK3bwquqUoqKfECaBDQajQoFAKt6dXp2enRyWtdNrjPLzOyCa3HdNK7DpJQVWVWVADormqatKtO25vTk1LScZRmAvCiJNFu4BNPMrH0kxR6dXWfKcdbjWSd/JDpPGbcFu4Q4rvqgA4eKW4PBKuwO8ZkNhIUVk+o8qTwPkZTCpL3pETH2LM37AblNBGZxQQmVImjN7KkQsygtYRzJYAUAisYgzzICKxzd/XC2v5MrBeDBvQeLRXt+fLo51g+ePjg7O350fJhlOSnJyuJksQAwGgyPj48ePDmYzrY2iun+eNZoWdXta7dufuWbZ6tlDYAFi5WdDkqV6appzs7mZAe2beu2VbnSeQ5gNJpOt3fH441r29RqHB8vZ5uzWTmaDkdvLVYA6tZCaSuSQ45Pz2Zj1NyuFvP5qnp8eHbr+jUART5o2ybLpLFESp2cn1uxLHxe1S88dw3ZBoBysjHb2GpX9Xx1Oiyzew+eKC5vXL1SrZrFvKqVBlCIev/9h3t7u0OVt0oN8oKRHy2a86pulgcAXrv2KsiYus51WWuQpkW1yPMiyzWEIBrAZFQuTDsYlRsbGyenh7kesQizQIlprbUWwHRjnOfZcrnIspyUZuY8z5lNluenJ8eTyRiACFarVTkYALJYLCY8YWYVNYoACzdVOx6PtKKmqaezCRGYOS9zsZLlCkA5GDz3wv67tz8YlxtaMYtiFmbPezxBY2GIJq0pYyNt3bbGWmNWy1We52St38+Wm/mSrMzKcb2oticbs9lwd2dnb7kCsLu9bUV0mZPY8/miHE8aY969c+fu993/57/1Wxv7VwG8dPMFW9W1tnXd7GztEqg17Wg4LkkZa/e2dgCcnZ0cn4zZmrffeuvNN944Pjl68eat3//qF5/cu5/l6js/8QkA3/2dn757/96N52/U1Wo8GIrIqlnlmVZKZVkOIMuL2eaWNfXd+/eu7F/ZGE0/rO4KpCVYawbDAQCgvf3+O+Nh+fLzN+tqVVer8WhgqsodR8UMYDIsBkVpqkZZIcmsMaQd1LYx+p+xtVYFvnWJ6IB6BPWZ+GQdCAaOT0hIDgI69YbdBENT9z98FmZJdG5EpLx3rET1Vldl6FenxEy4dvdpapwi5xDofP2jSOSqiZ55XsPoRqMuNJh+ENvrAUgJIIs7phTs6YFwCjN5ruSG6HCRcl3vIGfACikwDnjHkeJO8+f+Dk8Gp8C+3d4zfPdDeTtS4JfeYkogZk7AiURuLuIc5iW67bs19znWQ4Oh2QCJycWRduZM+Bj4RMJCKtp6RcSZHsWLlQhooTP2uqnzA/R+N2GXABBSyWyEdObJzrwsl+WyXJZvu5KJQKwo7QVYp0oQJtIJYXMM1P3uxQefTIwCEY3OVY7lKaW622dBQtSKon9liutJhIOGxYvKnnWLwAaPIYl+NySJL54APnENeWIPwF1qi0bKRPH3rCvU0cErujR0QoIbTcdLAhsISpnQLScXMjMLEWmlo1dLbNQrKYiIYAHLFsLhbQVAK2XZ2eXEXXkn704YJgNwHoW+dUUAxCZ58RA4L2KGDW+pVIlETSBWwdPD9wlx9jpTbKokhgA21uAhWnD5kKA3Dk/6ehXIGivgkNECpKT1V2kAUhFoqDiNTqR2N+XcfvC5SQGSzG0qgG0YIjwwhEi4+R1wXfA3onBbE6Gj3gzrn06n14+QBQKrtfa3PYjgkjaGRDnuvqYKKUUoqo2F3G1MsX5LZ0RkozE6yqL+b6UVOwnfKTaVX0oQsdgQ6ZuURD9R/6ZSisgbihOh3CqlotItrCkpf8kzcc3y+SCcdqx3HPy/BAWHIQGXs8E3FcE0qaAt6FArhawTCYSPGjFmjqfAtSECkPTuDCfaTBGB7iBg2IgSljVsAEm+QXgg4F83FnenFdHXzH/nZ4Djvk3pT6AvMR9LMtbwq1bRfZvI74aku50Wxe0qgehYU6944E1Brgna7YTudldxAbYKojKXCyRuKheXAQKl4AwbHM64v5ztbrFykoMjqnvcftBBsxhujvvNap2DQCeSOFDtNC3ew1iFS4dBnyfSESIAEBYr7JtWRFq7Qyoi1ho3BA7ydpwXeD9qgrh02Jnb2Z7nKHdYJGggvTeiY0AAVOJFG8g2QGSYFZGKns7uWAkZNhQFgEQHLY7DcByvEggzO8kvzoyb8kAYBRCVZbED7OfMK9SyLPMEhAgUbpY5QUUsCEopa8Vaq7TWpI01Lo0vC/vMnp4tsuWgyQ50WwQi1o1La21MqwhlmbP4pMP1qmW21rIIa6W11lorY43WStgUsw0Aq6aulufPXdn/0Jjz+TyTvDXNcDi2zO38PM9yAMZKs6qIstFgWNVLrbQmdbZYLeer8Wjo9l5btyojFtaaRJiUJq1ExFrbcWXv4pwoioltawR+FEWeu1/atqUuXR156h42riIFv/NIALHujjUxOJhRyS0dCWlF3DJnntZJJ206LiRaa2YmpTLS1lovVDuVsdYuAUkQd8UTzkjivL6WM6Wr1aooSyEyxpRFAZAxBoBWzuOGRAFCq7pRZHae328fPdweTwGcLs0XvvSbL918pci2NmdX2bS5LpiyVc02M1s6A7BscPv+O5BiM9uWQh2dr5p5de35W8VsW8rNnBcAFJVlztmElpb57LDU169vjbmss62d5eP7J+enAIqzrea0ViTvP737PW9+hsriyd33i/296XjcVgsAh6d397du6WzcWqNE1wxpCxQtMh6Phw2c+okENXNmVTnKS+Jy2eTFcKjzWV7sClsA9x7frwWtaeeLhc7kN37z1we6+K7P/sA7Dx7f59rMawCronx4ePDKqy+PN68WOju3lgcjzSy2nk53AGyO8vfvH5STWSvI8pIFmc4Iqsg0lB6ORgAmsxnrwVvzr5+dLW6+fOv46fGyMdZyptWoLM7ncwBbW9PlaulOls5AIIWssdVgMFQ68y5+rcztKi9KEFnmqqlIkbWGmcu8dEeMCC7xkQjYiBErbLVSjamYW3ckb9x4Tpge3nvCnCklWmXWtiApBpnz7DNiCdQwF2Xetm01r5/cf1zXFYDReMhtBQCaXn3tDVMvv/nuu9ev71vbPHnySClqmubTb74B4Pru9vHZ+fyMi7wsVPHgbHV9b+/NNz91vFj+0I/8KQc4Hx2fKK2Wy8Xx2ZlATk5Ps7yomtXe7n7T1Pl4BMAY8+DBg739vWvP3dBaffVrX3349Mnu7vbi9My29ld//f8A8Ik33/zme3eWy5VSujF2MChNW6/qlnS2rGsATV1Psvwb7354ejB/8eZLg2GZFYNqtTLSDsqRlRbAan526+bzn/+dz1/b3oXS1nDLIpZFoHOlCwVgZWsxTW1alZe2bTOlhMWyUSpYdACtM0Uq4KGAE0CkiG3kxoFPB+2UN7V12sBAAIBw10pFYBDtcOQbCoA44HwRDY8cvD82MwvYSzfSmQKJYgZnOARLHvIzJLbk3gyEK4VWWqUq0VS3KCJKkUQFaByXy/QV+GUger64pEluSB5AARAYYYoEzYsLEIiwaM/x0d1WCUKFFw0JIZJPpy5LbKHhYf+Xh+IuB2aUveDugih4lucq0xCwtaxdDsAwUI9YCQCs6abHiZLB/cFLjX7gpGIfhERlWqnudgIppZRiZiImEItH00q7KygMwFqQdohBEYgti5BWmrVCZznzMpQmTQpaaWMaFqu1VlBRZtRac4AdcbQejblM2p2wGqwhAu4kostyWS7LZfk2K5l4zuJZEiWKx56SIrle6j8I6righehETvdAEA2AyKjTCqT7O2KE9Ub9t0I+X1oGX6V43WDiP0Jrla/Vk2oInl0id4wSYdK1C/qDqHvxDIt8DBHyxesPQl+S6YFYZme3JNJdFnJmK+LACwW3raj0RIqU+sNcY0Cp1TW83NPviL8sSSCXEjo8F+9oO2gWdEUAugSia614FYqoqIdIVUJJyBTyOYijroZ6Mw7nIACPC5VOxt1tlaB4ibqgvkE0OsQhaPQkaMIkVhfhKQjJ/W23Rg4TuvqU+Ku0ybgd0HDiZ6isjwVjR/209x6i+FDS77hnvE4iaJZBpJyaXYLejOBdFOPRijrCoPvRnUMy/KvdBIVm18z960XWZv7ikfyjC/VHeGHnrhOC9MUevL4QR4KCyA+3EXogV9KHnj1I6sYWLeNx7bsurVV4oRdxY8fNSf3Aqp3jU7oC36LIH/lEUtOFXRenOR5i7/ZJPl0r+X/E75yYAhqJdrirPCyaP0IKvUd63Y4dJ/movRKtOEJQ4bYt+WPunRHCYaOgLu+GLPC3aa21zoYARGknWMac7tWfYA/f+72Jdfm/nRrID4Al0GTnhhAcU8Mkra3BBWqcUHt3Hy+hw2tzQcmnz+K1iSJLoEhTpgBRWgPEbLvwg+TcC8l6m4Q4fUrw9fUr4iRcZnZ2itY0bWvyUgMQQVXVTdNaa7XOiMhaa4wR6JxlYzoDoDSGg+HGZPrw8RNjLIDpZFKUw6qqyqJwidGZxRirFDnOdnq+aOu2rhuAYuZ0RcTWaaIpLLpXr3sPX6f8Y+ccFkRilTHbKM5ZdrewnXgqzjhEcVOHRWFEK40GnJ9LYOvxWkc68c/c3IGSswgpJeKtKdFu56iTV0mwSPD3ZxEir4ds21aY8zxv2iYvcmZrWbTWSilhybQLKKlF2LRGq8ywHQ2L5Wlb5tujye54MAIwnhTny/rq/p5Yblurs3I6GhdFaVitDh+XezsAJgP94rWXViJDrYXGJ4dv3Xl4/4PHd/+v3/n9N195gYZXADSsYFc7y/xg53SQjz8+G9xbLndL2b66e3bSPDj4MoD//m//N9d/6dduPLdz8PSRZHh08DBXcjA/P1suB2UOYDgY1G1DRsbjolBqZdq2cROrNyYTd3iPjxeKdK5pdXw0muxm4+FhU5/PV88r9Qcf3v3aF/4pgPPjxYsf/1gx0lPFzar4N/7Sv/nKm5/8u7/4P588ejDe3YOaABiXUyMnd+/fu7G1V7Pd36EzltFg8PDR0/FoCOC7vm9j9cFdvVwZY0nr1lhnACWlR0XpjsP54mw0HG/ONiej6enx2cHxk+FwS5FYY9jyeOyiKCLPc2utUopZTs/ONjZmRGpQlrPZ1FoGMN2Ynp2da63d4lprsywrisJaH9ZAKT0YlCw2z4vVqmqaejQaVabSOrfWus3ZNm2eydbWVl2bR/efjIfTVbXUmVZQppEQ1UiYoTSJWHdbRmsaDEpjTZbppnXkTz169Ojw6MHnfvCHqrom4r3t7U+/8frjg6cbGzMAVdM0rXn++eevXn/+97/4e9/xqe948t7bVbX88Z/4yZ/9H/9WVVUAZrNpWZaw8ty168w8GAxO5+fDcri7vXPv4f3pdAbgxRs3Xn35lVXTEGG1XP2ZH//J8+UcWhELm/bVF18DMK+rq3v7XmtPWik1GAxXdW2cizcgzHcf3F+tVnmeaUWL1VIpyrKMyLkiCICvfeObP/1Tf3qc5/cePtra3BoOytVqNcgKrVRIWYzN6UzRcDAYLOetykoE+IR+WJsORCRkO5FNeozAoyHq3o1FguowmOvXiHlyW9u/EB7oPo+2SQdWpHt3jdj0au5jhwTLdObAADO9aSTSs0R6+cgScC49AyQlZnnxt4bXb89JDxevT/W3aNQFVEyBMpAuTNq3XiGCQCnP/Jm7iaAgNQTM58FBZNFd3IykMYmwIQhGVqK6D2G5Q5hIAPBRSgJCT7W+FAy7na45zJNjGehtxA4q+orIXwjsZpTZSlj0oJlGiKfk93InW6Rz91Gw97JclstyWf54F+e1QYCw19AEBwEvU/nnEjdDAF0Eju6GKpB8F8lrYDyRSXQ+g4kGgGjdazFhup36JBUgXHvBn8+l1ogaNYEET70el3TfhboTnucZRqTwnTJSQg+6GpIeCCAgFgZDKe2EQwQnvBCHJSoIBM7uGVJCBD2U160CLCQKPkgKdcrW3lgSTUgYRDI94YpCeKgXCbIbxRqPJK/8FHjntsiWvZdTcl3+ozQOHp11XoDkHFWIJZnG5G2KSxonP2pOgpo1ur1QxAhJWByhqDqRoDkJXppen+rG6Sc51dCGlUpHsNY3EvGL6FQgSrl7FhQXIoATvwuTGfLXa1yn/PKGySMv1Cbtxz4lagpnuO9UIUH1mLTp9nqHvRCnLCws4u6N8xMn+iOLXFjl9HFfZ09zGBoNavnuyfTIPWPvEPprEBfvQtPP6LF0QS9TCoQOlsbH3C8d6I9Hx/2TUKNvhdDXurY+jggGJZCS8CF5HBqUyRfb6R3IBGz2m7uwbtS98YxeBp8P/2LQ2/nTmRD0ThwSF58ioc0IBF/QU7kG8uiHdbEP6KQs6QYY5INEl+4ItvN2VeneTDWc1EUNdf1M4Lnfi/5bbwnqZu7ivAIhRJQEX9HGh0QAACAASURBVAwW0UqTv3MVxtY1A39w18dIXWaytQa7j2TtVCZRKuLzyQYOTUNQNRUgZTEgpZzih0MkZm8ycvIbQZEiKHTemX71nWmDlAJxICfMotwPawGhshxkmTbWivMttdS2pmlbAM/tXss0BoNhnhd5nhOpwWBoDNdN4x4HYFvT1I3Wuqa6bXl+tqirmkiRVzR6PghIDJcgzCaCgSAZigRfQ+coIuJyNVBwmWmNscwEsHCMo9rt3FBfTNBHxCAX/4XJJzhyMx84gTsEPUdkAN0WcklunGsMCyulRcBsAWitRcTpe53Gk5z3DrPSOvZKa6UUKUV5kbdNSyJlWbC1LFIWBQBmbhqrMwVhrTK2IqzyPGvN6ny1BGBaGY9H043Z8eHp4dl8fr7M86HOhlk+NNWitQrAYtmemmKa65aXG1P91jvvs1587id+6of//L978OGXfu1//fsAjuf08s1bd25/6fO//8Wf/hd/5Idfv/744dGHkmV7xwPdMjGANz/+Co8m7z86mQzKTI3eeO45s1jtT4dlrg4OHgK4dfWFwXC3thawsM1AjcelXhmb62y5XM7n5wAsFCFfLBb59qQxpkSW87kq1MHJwYOHH7702mcASHt6dnBvONj79Pf/8CdevHVzby/f2v/e//o//V9+9df/4d/95RPbABjONgurpaIjyqarhS7KvF7Vq3o4mexvbwGol9XNq1ceH53qLGMWtlxxA1Ja51neOg1VWeSGzfniLMspy7JBPiJSWkvbsG3b7b1tAKY1WZY5u7nWmUirMyJVOPX3+fkSwHg8ssayz6BH1hhFZK2NxLOqVlqPSKEoiqqqjLWknFsV5UXmTorONCkajYfbO1snx6fVsp5MR61piDUbeDdqNkIgTSJCCpatZZvnGYsFiQtkaQlaF4q0ZS6LclXPFeF8sTw+Pf/a7dsA9nZ2Xrr5/MbGBhHNF8v58eE337nTUv7WH371znvv/bl/6WcAUFEcHR9NxpOHTx6NR5NrV69trrYEOF/M54v55tYmgPtPnzRsnn/+he2d3ffn75+vFntXrmVaLc/Pz85Or125BuC8rs7m5/VqpTMNYLWqlsvVsBzY8aTIcwBFUWitB6Ph5tbmaDSs28adC2tNWRY6KwHUq/n//YUvPLe/N5tMhdlArLBlhqJVU08nU0dqLduqbtrWDPIBggtcQIqR2q79fIYckCj4JKC1BKIBcFGmCdZyfMObIVwtjq4GuuMojvP8JRUCknsU6G0oHTvrOWh0HC79qjNwhmw/gWcHeEpJn+JQO+65dhG9ayMaHNdMZeK4XTAFUdBFcr8K14fELh8Bf8JxP1opFkxlKcfriHEcQOpiQkmNjrT71WZ2HEMFOSgQ3aQBjw6Dv40466doUt23Eme4g5BBtAy9AZhFuytDKrAzF63DTyiB4KyiLkWnWIhwGtU4DNRxHxKXHU658MQdfBVmF38MqaktOoXG/kmvXqfvfRZOviyX5bJclj/uJUukK6/y6QwsqbpmXYqPvHtN/QUEJuqJo2ddjgx34fViHX2K2mdqXqXjvOFTBz1/n0vIEuCjMPbMmp2aI4RA87Jqp5rotxvfiy5n8GCir+yRiF+S9+Oz0WTGLutCfCe+G4TmwFBY0ilSTv0XcwZ7z4+um4nyIYRTpK6JwOTiQ64R6eBYcL5bk6mfpZ0SAYSp19vo4hm1qK7tKCH3a007LfH77pmeEN9BDvEBWdwjIX5hcPWCX3KgwytBxeJ1PxLqiRBGdVfJ/cSGbqxB12Slo5qP0kUQb1aN+6sbLzHiiMmjUB8yIMZz69TAFEBF8PYJOzTOnXOzUgF5druusxK42pyK0u90J9JHL0uP1N0TAXJLry1E6+4zFi/5rad//KNL/xj733pKm4ArL7g2I+I2PLNJ9zQHjBlakPRbdFOMCCvXN/7Fvl7UEaab4hkwO6i8Q0eSkflf+p4N6xQ1/tl90NtY6z/Xe66SR3o+X51ws0YREOQiSpc+UF4R7kSEtYnpbYroaYlUsHjmekV0T6EAsM6ZMfiaqaCM79OITn6SEJFXwiuRFjkFIhHYRelwve1lDb8wr/F3X4kirxSLGkh/LiO5p4QYrjGFOFfUuZSHRkVUzCfjq+nOYXyozwn94TVsRJgUtbZtVjVDBuUQ4jPDuBiMbrBESmlljY+8H9Y5ziULFEQYVikqysJJ2E3bGmOzrAAJC5eDssjz+WLRNibThcuIfT4/1ySL+fmyqrTOmOXhkycbG1tV0zTGuNrbprWWFdmmbtnCNAzRpDQAY1Mv1UABAWuZw1Hndi2VTQcZmrZRSXw+y9YlKlXh6iVR+niYzpDlJsit7n93/zEumt+QoLDJUq/xsPjwYrlEco0oxhMg3m3T3w9wH2slws6lTmvKdGaszYucmUnTIC+JYNnmWa60AlDXtYjVWcmGsyyzbTMoh+Ph8Pz06Nr+DoAXblzTOj86nTOoZSxqW2QDw7plynXhIuVZzq7e2NvMRm99+O7/9HM/39Do3/8P/sPv/PHPtXVNzcev7+wC+IWf/8Wjp/c+/uk/+d47X/mN3/mtH/3+T1/f2nm6XN15693nt6mgFsCP/fjnvu+Hf+Tn/s6v/JP/8x/ubV97+fmbjx4c7F7ZevfxA3c9xYgmTcS2besyz7a3NqSuoIsiz47OjlxyxO2t/bPT8/P5+WQ6vl8fv7azXQ52murozgdPB5s3XtjfAvClP/zwtZc+86d/7HPDSZEzOJf7d/9wg8qf+fEf+c4XX/iv/tbPAbjz1S9uP/9afb745HPqpMbX331vs8ivXblSG1Z5AeBr33h7NCy3d/dPTk7rphLAWKsU5bmqm9atK1M+Go+EbNsuXnrphcXJojKtthoQ0jQYFADOzuakyF2OHg6HdQ0WWxajVbWw1tarBsDGxmw4HAkA4TzPWmPyPDdt67z8ABeqh3PSIi76ClerJUjqthYRYw2Asii00tZwUWQ3bl774Pa9xjRZljWVJRDpHIBYyrSGBjNbY0S4qRqnRDGtreZLAIPpKMv0lb3rH9y7O5l9bDAYrRbnX33r9ubG9Pr+VQB5oQ3zO3feFZare/tv33lHK7W3v79YzqfjcVWtADx+cF8pdf3Kc48PnpRFsb21tRpUh8eH5+enTVPXTQOgbdvDo+Or124MhkMA73/4YcP22pVr5WCwfPT4a9/4OoDhdKrz7GyxGA+HVdW6oMZuCharFYCmbW5cu0HqyWwyKYpCaT2bTZeL5dnpSg+Vi87x+sfeuH79arU8r+taRHSuR9kgp+x8tXInCMB8Mbc2zzLNhUrRkPgTG05oh6k6PremU0sQX2fL6ghQx0A7JBjxS4cG4ANq+zC0vjfhylkkJB3T7/BD8tND/mCmW+eg5JAlPO6VNRRF3WsdVpYwL6kI0WfRF6FM38zdETyBIIn/08cx6cz1rTwfBbZ6I0tejijBdyb6akQcRpFTRgAQFr2nC+0mAUgyevrckoiiZBAzyEU4dwo/IoZLrO2NpP5RFgpu9sQ+26hXNkMAsIgS5fcfoElBsbf7daG7ev4LLstCxKk+wGYMyE+IV0AQ2ZQ3KsdVimPwwQou9ZGX5bJclm/HkqVCqdO9UUDnnXDdaX18CcqVLmp+4guZSHCeha+zvT7r6IufF/QSa9RVJOTjDIoMCkwtqXSd5/fqlN4j/a8EwUsjDArcsVUKThAJvghqo3V1QjJaxNoSbYFj3sHtNMr6nd6k/3ZYj86E2+GtAC+CZmS9B/0PxKOftLvyjKnyStHosPastfF/uABqlKKLRGILnNPLpZGtWgkBUhyvdToEIifIRdcndOt88QJmeF3SFQ04LOjBVNgdncHTLTbHJtzESLrqRBD3I7wAKPE7wvvs9DoThX/qbgf20R0l9zrgHwzbkXroLPFEC5uEiOIB9aOUtBl/DqMSOs7Gug9oolUJw/WTc0Ej+f+trC3Rt/SOXCvxkMSLmOtgWtIag8mj1+WgaEg+kRCc71t0+iI6/xYPJ8g+pWApLUtAut/zEg/AWuUXH/2WJZVX6OIyObMPweFrCcEfu4eT46w6uteToNYmo/PJvdAWEsu9twmgW4DEhTtF/wAc0Ock8oH4y7Brgkwi3cXYoz1ziMA6WO+PQzSm9TZ0n2IBAFtGuPstPkqGC4fJWmXOOx3pXkpmI8yzqyxxur3ArQB/EzyQOm/fcFavOIG9DpIA1BojwlprgVg209lGXuRVtTRNyGWBYCyJbqNsJc6gq8hJk8KGucgd+RKltTUtgMVicZznSqss003bFHk+m84EdNqeWrZ1UwN49OTJeFgIs9MeK62LohwNhk3TLharVVUBaOrGMcamaSBagExnDFhjJdygsNY6D05FihHDc8Q7AH5PxJGFYwxSJOEhF2YXLojo/xtqkvx28ekgcHa8VeJbkWkTiJSLRqqCH6zWXsZUyl1QhdKaXMAVQJFiNk5BqbWGCyesqGVTlqXSerVaEqlyUDaNyzBjiqKAcFkOhaxSJILRYKL0uCgHAA5PT669kC2qRglYSGeFaUypMsN078HjF0czAKVu5w8ree7GO7ffe+cbj/7m3/zrn/7hz5zcuzPlSmXTz372TwL4rjc+8+/95b/ywcM7r776md/+jX/0j7/41T/7Q587e3SXdHa4mF6ZbQKA0Ghr59Zz11g417SYHyttazNf1aZuGMB82ZSDNsuy2pqmZZkvc0WNMVpTZVcnixMA1/av5JoaY04Oj8e7uy3T7bsfqGK0M5wUw9HXb38ZwI//wI/+6E/+0Onh8Re/+IWj5dNPvfrK93/X9wP2g7e+fG3vhb/61/8qgP/ir/2Nb7z9/s7u3sPTE71s3z84fPP11/Rg1FbNV79xG8BP/dRPHx0dKtNqpbMsUxzSOYmIiPPDLory5OxsPB59z3d/z3sf/srx4nA23jN1ZbXMZlPxSbHJWJsjM9YMytFyZdu2LfO4xNZtVa2VMCutizw/WS5Ho6FSSme6Wa3cYRcRy8wieZbleQ5IWZatMU2IEi4QBpOixjYbm7NXXn/hG1+/vZFv6UzYsjUGgDFGWAqdK6VbbgE0jWVGOSgA5RK/ZHnWtu1sOj05PiiLMsvppG23phsbs40s0wC2Nma7uztN0xpjr12/kQ3Lf/Dw0cde/8SV3f3WmH/6m78F4Nr1axuzGRMGg+Hm5pZlrppaaT0/PSmL/NRFzLRmko/qpqrrum3bPM9Oz842prOyLIsiX1Y1ANa6LAfni2W1qousOF8s6rppW/P89eeOT08BNE17cn4mzAxujdE6u7p/9fDg8Oz0rGlqt8k/fPCA2RDx3YeP97Y2Mz1smrohSyCdZXXbApgUQ61LYcnzwjpxJRzxjsJ2OC7gBIk4tOMangtHVLhGJHr0IhKIxL0RAMiyCcCBKLkVHHPDcMiw3IXTDpFAUjrkULZEQSr5JvzoQbgA4vxwI9HqWGmcGknriTUE5naBPUqq/QtNi4vvdFEK6P3pDe9xnPiWxbv2RwwXpJnUXk8xKHBXnTfmhUlnIUXRnt8hwmRg0psChxBCyM8IEl20jYCuO4myz0HiZSgBrIPxjrYDgLWhEXaXybyffzqAxMWTgncFxe75wPE+dH7/jTh6gDjsbwqb2NlyLzWRl+WyXJZv25KtfxBkAy/YBYkgkNNIWPsM0pWoWYGizlZIkaqLSEgbEF7omr0g8yEyBPZSS2RSEHHZbzxbjk5g4cUg7677fUXTZWiewg8vloQEspEX93QLFO8/dGojr48EOed6+BiIF5yd3G+pugRKhUDNiQ5GBbnI9zxeRmWWxCsj1CudUizq2iTI6lqtXwrsou6HNOfiF6Zj+crnCOgMcEjd83xHpetDp7/o1MIU5kZc5goK95bhVY8AxNoOfSRhZajnhUSUaJwC/6WEG8N3kFLwkHQ9+aPn3tcPre0lT3Z4RmIoy5D3pdv0FAy3F5QUnNTVjZQAIacYiqgk/uLXGPHRsJiJ/5rEb5L0TE4RFg9WMAzE5H9e/QSE3ejN+GFWu6lJQfi6RnK9UNja3wr3PFP0D3OSHD4PwiGA6kB1QI/eFt2D4REdIwzeqW17nghdJ5y2JrqKwSusnt3l/jA/YvS9saSfd/W6cShScSC+M6FvHzXBkqDV+FjPf+4jX3O/dL7L/ohJXGtJ349nWRB8SZIlpUAMLvSzvxQU6GEXqbDrZRxGR3+T6gKcdjldfKyNkFiKpCd+SOIhINLJJ+kKeBeFTp0KL3GFjvUQffxHXE6t0DunLJUgkIUDuE4luj0WxJyU98QjGX6T5MHwXpI8igKr9ech+k6QKA3LojIFARtu2log1nLQ8wb5yPWZ2QhYOOhXe8wSgAv573KqEMglJlLMRDIoS1I0n8/runL150WulHJCqDFGq1Fra2ZuW5Mzz6az4XAsguOTU2c6ssaCFRG1jQWJUkqImcUyh8wHYGallbi96B2YhBS5W/phgpKDLJ7PO+GOfApv0qTFOZJ0XCEwwTjFLkN9J+iFxzr65vh8EIkDz+yfYkebxdnBSJFSZK1VyseFtLDKJdoil6ZPYoIIIiqKHIAIN01bFKWLS5gXWVXVAPJcAxwufSulyTKLmFW1HA5yrfPFavX+++89Pj0E0LZcN1bpbH52blkmw+F5s5pOxkWePff8Cy4t8WQyaurhycEH77z9zb/4F/7sree36zvvDYeZ7F2tDh7f/b1/BuAk2/xP/sZ//l/+R//xk4d3nnvx5pe++Ls7Sn3f9373Ox/cmQwGNh8DMHUFa8W0m1vbT48PtX5+klVnB09OzirLDleQkMqLcqJLtKYy0krLOjcszbJ5ePgYwNbGFcMGiiSnvY2t+w8ePXr0hZvP3aoNjp68/xM/+S8D+N5Pf8+v/7PfPHr83mc+/uJnX/3uL//u7/wPP/vf/dt/6c/f+tQnjj68OzMZgL/2l/+dv/Kf/bd/8NW33pxtLFbV08PHTw4Pd/7MztUr1w5PjgHcffDg6vamMaZqGqW1ZVvkRWtsbcz25mZZDgDUbX18etbW9hf+zt97+507hS4Ephzli7qezMaWDYCiLFxiI+cmrLUmKGONi8Bj2QJo29bYtjVqoHVeFE4l7XZXpjMAFpxlmbWGCFmeMVutMxCGw2GR5601ABQLKbfiqm7r7Z2NK1f2nz46Ho4GudYiCoDSmq11SREdsITAWlZaZ1nujARkrQCr1cpYa4Wu7l6dDofCMhwMnxw8AbCqqnc/vLtYLq9d2a+q6nS1evmlFyeTycHJySdff/3gbAFgb3fv7Pz86PhkPJnefXDvxrUbWqu6rmbT6dHxca41gOlkkmVZpjSVpdbqxRduzmabg+Ho8fx8sVxmeQ5At0VWyHQ8WS1XH9y9t7P1/7D3psG2bVd52DfGnGut3Z3+3L55777+6b0nPTVItEYuEDYG4jKJQ4zAFKEvO6lU5U9CnKRchYlDGdvYKYPLFMTlpGhiAjhgQCUpEq0kJNRcPV7f3Xf7e0+3z+5WM+fIj9msufY9V4Kq/ICqM6V3zzl7rzXX7NYc3/hGM7d6Rc+a+bwsD8Zjt7FZa+eLxXBlUDc1hJi53+uLEWNMnuUAoOT6zZtrayuro5XP/Mnn3vmud5zZ3tzZO8izDIReUQAYDobW5rVpYMAq77ze0URJR4haCX6F6aeUYCtEeBtFRqwk3fz9gwKK8yjjXkMnRcCJIGSJ4IBOV5R3UWs0U3kY3IHHoZFBNUiQWPzci3yh4ON/ROskuSOoPpS2JTzAd7mTgWe5p1Fm3fuQe5GOLP8RxF7sO8UBDyg/SkYRBDfDgJMoyuhEF5ClB1HaGmIfsyfWpwZt2yVRKMAf7hqNzQ5LxlOPPMnsq3dnXguLr5sJBBsMeC2kCA2XAFBC63xYSCt0oOL5e4IQ8dHOtYdbBAgRMVqj/JcD8MfluByX4/IXs3g60tFJrfQJ+pr/9iiJlmh+6PwqnhIhL8RI2q+OMt8ksietp9XLpaOoBOFu3T7PQZmRRIt1MjwSMpLU1qFDliFIIEWi+ts2L/Tf82Ud/dZnjIIXbhSO00R6K7UPY6cs+aSCDqS4azg+uRvUEYVvK+ODnOsESh9hlqTlSJSAjzwQAYJuH7AIsycWEunv0UbKp6VKORN5dTF9uvhEee6J1uv8iAjQj1vQmSn4nrhRoUAXJjSBJCRg4ioKcsfydAiv6DwKiA92ifbx6N0YjdJ+OMTllQ5+miJwtK1bIaxAAuM9HSKOjEPUDm/iikdhNQZYg8BAtCukXSopreHHo0O6JhPrWZP4jPavjsOjH+8wtsnYpO9Uuq6oexkdgbbD9KLTEY/OW8N7Qi6kPe0uRoe4kt89/mxfmeTadtXZ9l0Pn6WPiz7UgmQQkzFYWqlpz5bPz/kyFC0ltwTjfEL1uxoTjHw0Sk9gNnVcxmnpwntuS1yMKfxFcefsPM3v8/50eD9CaLd9BGR7z0wlezjid0FhCsduglp9IWy50ePAiZT2fusOfXZ0FDN3Y6sDb96qSBQ2Au/vFzK6EhEz29pQ9GZ0L094UMcElpRW33IN81weqcjkJuywa1JnoXRkS+c1kJTYEmGipXXqtjsGhNgfmC6RkHd1kzVCpKqqAtPq6ur62ppYO53P5tM5AGOMcf4YAjhil9ypnUlOfrTvc1FkpqkJZK1UTeWeNhqtbG1u3bpxo65rgHrFwB0dkxdFryjiwSxZlvV6xc7uflEUmc7Ksmr6jVaZtzUCTLo2jWKlWbueu/zIXvY70aCUkxF+Ats1LtGZKExIu/qVP+8iMUQxiTEithWyFP+Xzk3yQoXF2iq7QfSFTecoRoHcFyQAszd5Otczd61ihggrZzBzodlOflgin3KtMY2IWDHWGqVU3dRVXRV5rpQqq9JLT8XGNEprI2VvkM8n84fPnp7Mpz//q7925swpAKe3T1mDpjZMNBz064Vxr8juwX6/V0xnJYCV9Wxzc/hv/49funj25NNf+e71k33TM3mxYW9fu/Hym5fe834Af/iZL9x486W/873f/RM/9j+ePHN+vlf9ym9/6OTJM72srwh3JocAzum1errYuXN3UorS2YJXbt+6eutgfHt/9+beLoBxWQ5uXL949sy50+ezXE32x3nez/uFET69eerh0QqARx66NJ3Nx7NZVgwOqvlkfAc2X1P8yv6dZ7/qG5849ziA3/3Ex+10+vCpC/3BWq+f/c1v/1sf/8hH/sm//fkPfs/3PfHE+ZtfeAHAJFv9Rz/63/wv/+xnn3/x1QuPP7qx9eju7TvXrt2YVNVDDz4I4KEz567duLq6tcnMANVNYwWzsqoas762nmcFgJ39vfm8XJTV3Z2dxx559OqbVxvhZlGOhsNev6iqCoBSGQjM0JmGc2msG8AyK2YejPpuSff6RVXVg35fK9Xv9yFgZoj0egWA6XSenohV13Wvl4vYuq6V1tPxAYDV1bWMWIiyPJ9Mpk3TXHzw/HyxqOa1ARRnbnUZa4jQNJaYlD89WawxEKtzDUBpZQwIIOJX33hzOByWi/n22iqrbG1tDQBgjTHbW5vD4RBEm1snVoaj0yfPqKK4cuPm448+CeDSQw9fu3Ht5MmTeZZZY+7u7ThcXVXldD4TY101LNJUZdEr6qq8fu06rCWlACzKctgYAE3TZDo/PJzM5vOmMdPZbDQclmVV1fXmxiaA3b29qiqNMUqpprFVtZjPpyxKjGQqcxv/4XS6NigunL/4ymuviMjG2sb2xrYI5mWZZbnjPeflYr6Yr45WDvbnnXdfKJpp2f15lNzs7OZO5lEEgN77MeCEUMGShTbAO/87taLNn2YlIhCtdDQwdSRnRLhJpcsAL4Q4deBN1CHE+fb5EIEoklqsjNa8eMS25q6LWYXckXbkrbScZIhPWydJGBx1K2udO5a2z+V67leC0AvMYOuUAPhs9iRO1EtItkERsQWZySq23A97CznCPLZC2RHDnEYg+ed5Q5n1HJ8Vm2JgZhYjidJAEPd/l6mjcXUTGCAHb4gcAW29gykJiH2KFSCcuw4hghUvNNnrMl7Fc7DHO8okcw0EHQYJXPmzjvtxOS7H5bj8hSr309SOy3E5LsfluByX43JcjstxOS7H5bgcl+NyXI7LcTkux+X/59IGazvP9OjSItEW1/rABIPbPbZHBCcdH/jn3QyDW0yIrLPSWh+D60HrkxPrWfrFnbVqvYkpfOMsXhZQ3k7lbEfBzZBa/4jlCpczbESvsdbm1r2z/TJxRWtDPGLQHCieAuF8ElPDoR9HESEwK7HWirU22FmJiYiJrWnCI2J7Uze8ZT8tSU/aEUg7VyEKE60Tle+LO/nBmtAyCfUEgyy7A7bTIFifvFGCe5BPKh076CPygj0ZbYPQxlW6ziN6X7GK2XaCkwrFf+KH7sEhXCKsqWgADZOaOiTGkZNoce1OZoiDEbjkL36AYsy+wB1E5NKCJUuRhISJg8EymSVyuQMQViMIIbrDGV5TA236AvlI+SS2FABgAc2E6NaZHMvgnIySPJruQF0wUxIH6pymXMQOhz7ex9ExDMB9vvpzmlzDZIaJ7N59v4d0th5JTdtdi307SN69qRNMFV0fjuiQi1VvHSWoXWjt8+7XsE4L2t+TyFh3cRu4jLiu2lcgyVjfGZe0S+mn7fDdMwXL7m+hi4DfMOJyar0Gvf9HCIL3/iGhjeE2BIM9hSiqdhtImhl9B0ni0kqHJjrGU3jjABs8ugEQO5dYH5MYPEskjbul2AHvDCF+q3VeCO0YCAf3TGpXT9jqO61qh5hZiY+ngjt8QZiYmJVPCBiuj+tElneS+7xP7Q2dx1L4KC4H74VtJaS9jPcTBGCCa8/a6ur5sxeZef9g98qbV1w91libBn77PUba10DEJTxk4pXV1aYqm7qeTKYH+wf9YR9AURRZngFijGHFOsvquhFYrZWIO2ECg6JX5MWg19c6A5nFYjGeHA77Q60zIsp0BoAs1VUDgVIaAb02dwAAIABJREFULMZatmClGhhjLCsBoJRyh8/EVcTE3o243TX9di3Whz94b0Q4nyCQIiaF6CQUFtrSzBLIp02JL7ZfhAF/+H1FvJS4R7RSMrkioli5tjEzIMZaAIq5rmudaSJYKwRhpQjUNLVWyjhRDsmLvGkarRWI6romAismJlM2zh3G5ZdUiusKpCXr8d549/3vedu7n3jkuTdeAfDWa889MBpOZ3PNgLU7eweNMRAxplkcTifDAYDRePbGa69Pdnd/6Ae//32PnzdFLxv2m/LwpRde3Th1PjuzAuDZ9z1887k314vBt3/w+/6fX/qFk5eeKF998aOf/Mg3vuvrBhcfWJEXABzI6GN/+KlPfPH5vNd/5Y0rD5y9sHtj5xBUNVhUBoDlsja4entvOuWTawPuKZVpEO8flg+cW3330+8AcPLc6etvXm2MvXjyxMH+QS8fnbzQ35/On3nyob/6zON/ePkTAHRGZzbW19ZWL5x/4PXLLx5c3P1rH/z20x+98H//7M984Lu+/yuefjuAV77wp+fPX/wf/vu/950f/P7bV2+ePLktWt85nMhi/sXnXwRw6uzFGjQ9GGul52U5L0tBtajqxkhZljISAMYQgVcGK489/vjq6urtmzcP96YFZSurKxTSgDZVAyJ2qU3BUpnFvOwPCrfhDId9AFVZD4bF5HAKoGkaa6Wq6+FgUDe1GAPAGFPVtdYc15LSWjFVdaOUdm7jilWW5XXTiEiWaWspy/mRxx54/vIr5cz2B4BPa0ACa2yVZTkxYCwIVbXQmRoM+gAWdaW1KvKsqsu9/b3pfL65unbj9s08y1xr+73crf+mMZnOysa8cfXae7+KX3r1laqqL7/wAgDd6+3t7z/w4MNr6xvnRV557RWlVL83qKoFAVVdAejnmTFmMBhW1UKxMqbJ86Lf7y+KXp7nmdYAVkYrLhPsk489ee3q9es3b5a6KqtSK8WsAeR5BpC1tqprZjudza1pMs6JFRG7g6cGg/5o2M+0nk7njz78qNb5rbt3jdjhYFjVtTEGwHhyWJUU8z+gleNo/dEp7ghIoPeyNtBVDwJWkO6HIoihuHG3QCp4/EFXAek4CZHWIgHfBT9DiV6ZBIBDukAvHCIMdbu/q4iZOkcReiGYYqPgI9nqJiG4+0gR1Sb39ze2YRkOvgQ0T8FRtFXQYm0pGEj+bqHfMgJblsMSL5awI7fB6j7AhNAGKzFCREfoE1EYDY6jHyc4QoiYutmDkXBFe4N3Nk0hV3eBQAikmIyBD9VmdyGRECwj4iDn8ugBF2IUdgpWwicUzskRAln4wAa2ZP2J7TbEiXlpFfTxNk0EwgqDiLo/gD8ux+W4HJe/4EUjiFQQMUNErLEEggUxhwhcv33G84kBWOtzqKThDQDAgHVCRKy1ka4D4GOagSjVKSoL1FVlo3hFexyml7JJ/FWudXhykNCubRBlI7/hZYT/kgMlGp4oQeUw1ooISIhYwcIdGC0gImssM4MIYsVFowcYwFCuVpDroQAQayyCQPJIxD/Uxb8xMYNYsffPJ7JiHKMUXO4pyNdwKBsHx/w4eg4ziChWAKy1PjoAEIFSSqw4CNO68YsVkcZakD/4zbfO5VIBATDW/0bESgWSWcSKCSQckrDiFmYFfiPQMKGwj1NgwJ21AIRetwCuJerEWsPM4Zg5cYNJADNbkITDL8hzVwiEihCRtTYwFUIhhEOMdSs8YDTxp824KWuRnLjMi0ywRgSWFQg2QBWIWCYyKpBdVtp7JfCWESB7QAFAfHB3gI+R+oEbCWqXagx6VeRiQET8i0bsD6YIHGUc9gBSQhCk43sJgDu5tX3FAugWpPMnzFrEva0+5bqD/I0N2TBDivEWM7evVqgG/sygcEcEygFrR/i5RDC6OJxQiSIgJDknCFQnc45Pwu4gPisrFmKZmUSMNe4aJnZMkx9htzCJjDSkVDLWCERFON2oC+finmF9kkF/T2JmILeSbTvKEB8gnpzk2SoES6dShj0qssXWLrUhXk0gK/5MjDAJiNGy/jI/rp5kYbDLqg7xOpUnIi1s5GgCuOeggERrgrjBC5PGIZ9HeFxrjiBisZSmnZBA3Wmlwi5mowaVBOcSsXIbSIhUIjQWMXQLHRsL+9QUyT7j2mLFgEI8d/y8PVJzSbVwb00DE+cNIFIsECPeTiNOPeiuVICU8mqMo07j+Hu+Ne5O7BaKtS7vICjqiuEtEhHLpG18G7gVRhBRRGIp09qKffWl12Bka2v9hedfCAsARFDEAoiIJYhYBbbWKq3iYDKzMVYYOzt3FCjT2pp6OMwbWwEgsZnSSimttQgWs4Xb7Rewg2E/6+UAmFVjsTMeW6JFVYkVU4sQdg8OptNFXftBzjItFqax1oIVkWaxrJhIG2sbAFbIJlucIrckicidCNGuYiIFBeXWHxkmxaRCNl92TbLWECljGqWZiJqmZmInB40xPrWXNzO4DM1urbOTzKzYGIFYViIiRaY92DDWrbB4hyIuer2qKqtykWWaoayVjLWb8ZoQsIdlxcxoaqO1Vsz+jBSwtUYrxYpNYwAUeaaYy/lc+Z0NTV0XRTGfTUvbnCiGsBbW8nB46dIDejAA8KfPv8C6d3A439xYzZgMUFlYcN4b3rm621ssAGRrJ/7k8mceffpdX/HVb1+MxzIHFvXVV1458+CplYdOVy99FkBWjB586uGDK7e+9qlH33r63YatevSRnZtv6hXZ7PX2ygbAieniNz794Zdfef7i41/92mtvPHThXF+pFZXf5qwYnQbATIum2Z8sZuWtF6/Oz53eKsssz5valMOV4o2r1wBM5otzZ09sDFfLugH0cJDPGoN8+OjFk8+9+LKDnRdOnl9ws2fqxcsvfsUzz3zxpRf/9Hc//ey3fk3ex+/8X7/Cf/vbADzz9NnbX3z+1MWH/tFP/eSP/U8/PisnvdHwzu3bZ86dPX/qDIDK1qoo1geDuwczZWzR6zNxUUBpXWTFZDoDALaZYiryldWNsq7WR6u7u+NG6V6vMDWaWgAopeq6NnUmEKC2xhKRWDSNMY1xzLdYw6QzrZq6YSatVN007uAapRQAzpgY7kiTPM9FbFWWaytrVW2stVmWA7DW1k3TNHVR5GKNyrQTdg8//uCrL71+cLgHYHW4bg0bGM5yS2TduRaWWFRZGcMTt67qhdVCmc4P9w/u3L09nU9uvHWFGE8+8jCAfq9YlOXe/sHN23d21nafvPTQ1plzB4fjzdW1J596+s7V666S+XT22T/59GOPPDaZTm7funn+7NnZ4TjLskHRqxclgDzPq6axjOFwZJoavZ7uD0hEKS6rqmoaAIuyVI0p54trV96ciSCnt956Y7CyJsSHs0MA03JBQopJc5b3srpu5qaZzSdorJnXZtgDYEt7t56u7Y+bxmyf3q6s0cRaZ7Nqvn8wFlEARJS1tChnQEOUWyuCSOP4DUUMIhkVmMLlTdxt++4bBmRZQHQvdYaNoC1wlDBEYkEkwTYtAZ1TU9cRyPlWQSCwEBcjHLY6iPj86ZKoUAg7vL/O2GiCa9UvWAgIOpj3Y1Ks0PcOFmsxQ/zE59JyNfq0sAlsC3HfTtLZGkq7vTc2jVrPBX+ls9Y47wSPE5fI2QRlUWDS2gNdiCia12ywTTq9R6wILLsd3lrXBIc2rBiyHnMEtcj11ithxEzCToKEA7GtgNuWWc8PuuxRlo1tBARmn0nEaZi1NawUCQdOmRhkxcK6w+WItQgZ0wiElWIosgacIBIRETHWuRcQ28bHYTtFjt00OUSpGFHDc7ZaiM9IQGG0PavdahsRXhyX43JcjstfruJRdZRhQWkC4DbAQDv4v9tCIVVgZ/NzbgnWtld6VslrYrA4usgSGiAvHOE8YpIvWleZZfogaWpQo1uXvy4nEuryPKv3DUxBCSXNgNaZsQbWuuMWrPWJq7TSTveJQjcAlfvIhOgW4+5ogYknda015F3vQFEcRZgRuxjIGUpwjPjqA54IZCsF+1n4M3LKnZZLZHoDoRDbFxP0x+d3/aTaH2nKTK/g2aiEiv+XPHC01jj0wN62aR30ceDeMyvWekKVVci52U57ZBLCwQ5BjQzWZ39CgVNLJW0zgUhs4m/bDmAU8MGZTuLQkw2fJHxMyMAZxydcnZI66XpIXxuHmSJmbhsgAsVHLvKlEtLYYImjWSrJI+9J6Y6EhQofiXcFio8JzXMXMt/zzkbEHlZnsry/TBfaMfNvZSRPk7cK4cvwDqWvkN8Z2uanIxfY4M7oLOdJunczSZrRXpHU63xal8fhy/Q1HY1ukkq694+l3ovnyQAJ6aHateTBf8uOBaXknvxOR60R+bIr7d7dMa649gVqrya/iCX93OtATpcIQD/Qgw7fs6P14xsXaSW40Y7HObVyykkZSh2ku29Tp6OdPiVbCd379f1Luwt3x41iAtz0KXLPJMZKpH1P4kAGuURN3QhRUeRZni2qxfjwwBhDpJYaE1a+ODsCwj7gbAzis87xolxUXK6ujZixu78PYHdvtz8cNcYYY8QKXKJ/8ruy4zQhKMuqMbUT66yUNXYync7nZV3XTWP8CDvajx3pKD53aSvFQM6gFqxZ8A7uYruvT+sbm2itySsWRBHB2EZrTQx3SjizcpWnR76168EJPivOQiNimclxEU7c+DEMS6iFDUxN01hjtNbMilipsAURsdaamUQsK8VMTWNEhImqunJzmeWZGMOKM619JYS6aVg57pQBaE0ikud5T/csU9mYrUHOxlCW7Y0nAG7dvHXuwkNMylirSa2PRjt1aY2dzBaUZ2tbGwB2D24Vo9H3f/e3FxmXo5XesNi7fn1tYzg8dRbX99+6MQeweWZj9dzZvZduUFm+88kn/sPHP76xPrjd8Cc/f/nBi0+qrADw2t2Ds+fPf/75y6Xl3fHkpas3vunr3newNx2OVs3tPQC9otC5EGFjY21lten3eysD1diGwJ/+0xeG+QDA0489+eCDF6yY8XQ6HK66Ux2eePTSnb0DNnLu1GkAn/rkR9/9ld+4Osz/zc/99NufeujJS4+8tbPz2z/3q9/y3f/JaHXtn/zkTwP4vv/qh9/57COvfu76209vfc93/ef/9J//zNvf975yf1qND+8yATh47vnpfL72zNPCRFqvrORVY0Z5Ly961sI5sWZFj2g6mUyIwKzeunKtbqpTJ0+KFWLSWgMgZjQN3NnoIoo1x/UfrGUUgJSxRumciMqyHI2GmrRbD1rrxWIxHA2NMfW0stYaY0ZD45avW3jGmvAIZua6rjKdKcUi6uKlC6++/CqAqq4yndd1Q0IOHbk4kqapm6ZeH6wCyLNsXk/HB+O1jZWyrqqyOXdmvX+Jh8PhaNgHsD85HK6snjp9ZlGWeZavra2f3N4SoulsfvX6jc99+tMAts+dX1tbm86n7rzBQa8/nU77RS/LMit2US7cq5MVuTXmcD7Psmx1ZXQwPii0qutqPp/vHxwAmC/K8XS6tb6hQHuHk3J8+NQz75iNp7s7u8VoCGB7uDavmvF4UjdX+8N+fzDq9Xp70+nh4UGvvzWbzgDYymyf3BaxeZZNDienT26717Kx9UMPPDAabgK4u7N/d3e/3x8sZmNrI8YHERO8ARvsvO/FSZtWe+nAiHbjjBZO3KeEb30MSmuSJwo5oaOYCztOB2i4iBuKSbhTQSpAe4Zj+OSoEpEXpfwj7qtQLd2c/uqjByIooFZgSeuvF/hav+o5bWNUSVoKEh1BL1YgIhTzU3twkmAnv4VSGGABkKY9TjF1tyu0NICh7ntHjo78y4lckQSgxwc5Ok/i/KbeFdTiUaKgLbS99lYvgjCJgQBO+rZpTf3T0/zlJlmeLQ4QwOWjbP+OSy7U0ILRsBSMaT0njstxOS7H5S9X0UuCmFz4gDeTha22JanQuTalLlvvKYQzCYLtq1W7he6TrTJyYZ40dJpKFBqdAz28yidH0RzxIu9CEtoRCaFEe+/GZxEEJpJB7nHRrU8gLme5FetktndztFGgdcaHQhQnEIVPIvzg1XSCDxVkIuskIiG4oHmxG1CVY/JauiEZ+sDPRGiUaLsQ717amkbJPS4OYIhPDrdEYR+RliteTrd4wpN+95F+nUaEFeSBkP87UFp+VCXYG631UVNAPF9I7lFc2yItNhLvPeXC4v3z3QnjYfglrCuPlNr2JscvLiEe62lQAYhZObTllolDqdIG4SbgQ+JAxackoxV0WgfJjmDk00/S+Usv8i3u0C5LIFwCX9BSqf7eAG585QlnGtiNTn0SWNjEu7Nd0EnDO/1PSuzE8iJeogYj5A8ZIJbqcQ2KfsP+bQnOA56dDi94QJVskTi7pWTH/YtEjScddln6K7omtJUuVRP/Qed5cp+X56iZbLvq17VbgK05geJ/4fx06rb5iLbH5xypDrW/fonRisRo+hXHEQuoPzBEbko8Zea1FjjVZYm8aqtYHhYCRUAOnz1Eoumm3fSj/GoT1rv6yO163Y/TZ6SbwtI1EhQSEf/k+Hoh6gfxzQewdBZU8qbHK91bHFg8rw2J2CzLWEnTNEzUK3pFXqyurs5mU9+/VEEkAN433BobB1tEnLGHQYPhkGAH/cFwOJiXCwCz2awsF9ZaY61/hSK9bBFUU5ovykU5r8qKQFrpqmn29/eb2pZlaRofbhx1dKFkP0pIbiJxZ5fFMXZiGSJdUCBx6iUofpIInrD/ACLM/hns/HGcT2jYOYPpzQ9uaIYHGNHJVCvlPJf80+OEkx8QaxorkmkdrW1N0wBgtirPnHh0h1dYa935NkSkM89yFkVBLkzbZzQh09RZpiV61THquur1B7nivdm0NxzNJ7v1fC6sWWcAXnntxjPvMkOttc6qqsp0prXK8/zMaLj+4Jn9w30Ar716+avf/ezZMyfqg3G+vr5/44bd3x+cPCHjA5rOL5xeB1ArM9+78aEPf6Qs5eu+6mu2tjZyjccffOTurTdv3t49uXoKgC6a2wtlbFE2aJBdvbNXiYLq3dw5nJYGANi63XmyMMyMUmx9OOgP87wPwqKpARiBzop5WZ7uFdba6XR66cGLs/liZ3z4tgcvZL0hgF//9d87deGRtz/40KtfvPlPf/bXf+C/+OtVNd3eXv3URz/57Nd/5d/7ke8B8JEPfeTsib89XNXz2fg7/tY3/ckXX/jCZy5nW9sG1AgAvP3Jt7G187qZTWfGWlY609liNiOB0pmpajiYQTBotFZZ1rs7vb0xOr29vTWZTHpZ7kg3x2RXddXv9+u6ybIsyxuA8iwDkXOOb4wVEWbuFT1nNCVC05gs03VdA+gVPdM0vaLHzFVVVlUlYpmVVkoxO96zqWsmMsaI1mVVDvsjUqSgBVhZGZ0+cwbAret3ia3SRMTWWIFRnBHB1MaKLacGgNHGikxms+0TW5tr27u7+ydPbK8NR2dPnn7ptZcATOfT9ZXVjFW1KG+Nb7/4wovX79zZP5zu3t0pG2MgrjF5lmulxuPxZDbp9XpKKSvWGHNya3s6mQCoqpqUybJcgZrGVHU9AJTORGQymaxvbADIe8XZ1dX9vb1+ng8y9f4PfOCb//q3/eOf+MdNU/NsBmB3MpXKEPFisYCi/mDFNA2B6koODyYr+RoArdT6+kamsv39/V6vV5ZVL8uMmP2D8elTp4fDIYA7O/uLsrTWONuDBNAYESJaAdnZjJehlSRI7MvRN5H3RFfgevQsCT4JppzU8c4hsI5Zs2tTR0DnXbC1LG3DtpTiLmnRvuvRPdI9bndJha3FEoL2mLCAkAOekMB0Oc6REvnZ8VORkHIKKVEbnyBeli8NHiiGBfkTI52WwYoj1Xav6hemIelgt9VhKCIKRHuGTATDAoiP9EvMlq1+Q4TgKBkkK8g6CR30SKE2bs0/1lgbb3LqHxN7JTF0OjEcAsKewgxpVtwsiR8dkGt89MX0aqmVmGjLo14hwIpSfH917Lgcl+NyXP5CF+8d2ZXXiZKfhnSmXwXRtST8PEXjeLT2hiCul71Guo8M8ja53auUqcIXlZvl428BpNn3wr7fIgcRAZxPQrggKE0AAOd8l+qToQKxYpgVMbHl1CXKiFGsY1vb5nuR7BocBQ3aOpM++nud41DwR4t6tQRJG3z+gwrsvkzBCZxWZZ1R3YqN1tsYDBsUPQJsO++u3kA/xbrD5wFhuFEJ0xrBE4V56qruvhqGzwWQtpiCSo8WzwW7M5NYGx2dWvUY8GcXJo+SMJNxBbTUbLt2QFqH66OjFeBoZSQEr8vX5upxXY3oLRkZpnYKKARMkLSpHMOTEt2260YsvoEiEGavybfsabwsLJwlsigd5djxdFI633b9l1PwHGvylJ+k97o16qfSo9Kg3bemgPsRxN2OtPVK59sWWy89PdKgtPyI8JIh5jGN/aMQdw+QkA+wImIKiJZAAUgnXf9SbT9iOJcuaXeQPzMMbAE5sBTuG38kbaS00xE+xzo6eg8C+dJ5B1tDiLSfdhYRBY6t233/WWcG7u2m35WAAJzjfURkE/cND/XT1kVWkYJ0kHuHslVu0nWSViueH281M7QDF6tvTTvtHtQunnDpcv1HzGv3JYw7W3pHu3IIHT5SUmec0Oj4kvslHTJmidjGp/2QqioPpemM8bI8Juf0Z620fs0CZrbWWql7Wd82zf7+eH19Y2N9HUDVlIKa4LxZKPCPEEFjrDskezabWZGyWpSLEt5zhOvaukybBhbBL0OsmMawVu16o6WxciPiOt1JlXvvALs7iNiKhQhYwb8yAggxa9bGGCJkWgtgjXHvPDN7hQ1xjNvJoDDYXrqEQG4bzGuxDcG7V+COzPaqoI0LLRjzoJQmJtsYpZlJGWPi9YBkmTbGGGsyrZmprhuluWkapdlNU10bl5Jy/2Bf5UWWZWZiy7q+8tobu7t7ANZG6nAyK4oRK5pODjWaqmr2Dycntzen83Ln5g0AO3v7Tz3yAOYzqBWZj7FYrJ84pba2yulhaZqMcwDF1rmP/8qHHn38nZ/7zKf/4+/8/hOPPvmh3/vwex9/bDG9c3tn59z6GoAbN/c/c/klsjkhUzr/0xde/cIXnu9nw+u37q6NRq7XZVVp5rt7YwFWBv3pdKz1tKzN5olVBzQ2N9aMyOkT27uH043BaGW0srWx+cXX3jy1ub6xfeL2nQMA+wYf+ehH5FvX3vtXvukX/v2HVteKt1+68PVf9/4XL3/u8h995h0f+GsALn/m888998rXfesH3vrE5Q2UP/Bd3/HB3/rwAGqt6NNkBiDXuanKqqxWh4O7ewewAGuxdmUwOJxMDycTAKJVnmdra6tlXWZaj4q1ze1NgS0XZZEXbrK1UiJZVVVa6aZu3Ixba/OiIEVVZQEwUVVVzGysUYqV1kVeWGsJxCFbjgv6zvM8z3NHSpZV6bBnmOuaiLTWrBigxtQuxj/L9GJRbm5tAKjK+s6tnV6/IFBjIdaKsSAmUhnrxbQEMBjleU8X/RxMOsv29/b6RQ+g67du3tnZBTDs9968erWqqru7e+urq29ceevRRx7ZPHFy5+7uYw8/vNrrA5hMpyqrDg8PBv2+Uspac+rEySzLZ7MpgVZHIwCH05kRq1j1B5lrf6Z1URSz+aJpTL/XB7C+tkbM5Xy+ub7+6muvPvLoI5sb69NynuWZEgGwmM/Lg9nqiQ0rdn19bXVlbXfvrlaFESkXZs1F4JaNqRvrKB3B7bt3TVUNVlf2xuOqNnd2dgAcjMfW2qqqRSyRp/mWN2iHxOK7nKDsqEGknn5hZ7iv/F6ygouELSXIo0SghxDl0KzwcAmmDUdOtaJpaWsMO8+9UqgFN2glTALXW4Who83ds8e6lkfTuZOLYTxc04LtPSAOH3sUWxwwrRdYSax1IqoDzqK0b8s9a6ljZrLWK3WtU0hnBHzm6SQXkWsOpWb0FN53BzAgjWSu4k7uIuglqA2ONPSqnFdS4u0RYsXh9S21YhPeue02KQpHJnihEidagg1SAqhGwIRQFMY/KDKJTtR2y7UfsIzAUv6ZkehxOS7H5bj8hSk67K7B2yLSd36vTvUd6Wptwb0s3OA2aQ7Wm4RhiUwa0KoJ9y2BMJHAkXgRlOiagaJKtWj3T2igtQkBt+QFcx8mwvrckU70E7yHoxCxwFjbEBExk5C1zrrsE9u3dbaSplXsl7lI90sQPgmziODekzaqY/GUEF0d7gAA4s6ZFa3faEwYlzQgkZOR5PDqWUrUBK0YMWBBHNXZ6tH+Zj9PnfHtFGIVhXn0rnNepQ6fERFxkqLSWMU6sn/WOsdMMCsiDucDdIubC+d+w86cCKZ2edrgzIgQMOgWizW24w+JRNqH9OQevAa4BRfiLc7nVyzZCBn+TJmk23URKnSYxI9knMlk1hE4GgoT1e151Hvvp9y31wbo7Ib0qBuSIBARabmk8EaGMTqCSgiA7x6ipi12mV1t+5cuyy8xju37QYRolm9fO27bGVrR/kYkIcchOjd2jhZK23VvPfdc44ei84omHgdfoiOdar/c2gntTRaF38PiYko0irSFkl4U5rGzbccfR/a93YOOWmBC0jVnJfuZX9De15WAkCXJFeuZHece7BzlrEhIrtX2i7yC5w7g8h4T6Trxfhd+gCgZp9hP8WsaUXh8qXLk6vWNSuw00k2X0TY7Pt2dHCQhSYb7MCVVo+oXFFn3YVBcBSR5ljdkympxcDivyoappRqTZUZwCauIxGcy9m+8S7Whs6yuawaqqlws5oP+AECv13NC1qVipMSqYoydzxcAyv2F0ooUkQtIZh70B7P53E0KJ7nEghIZ8gmQd0OPXh8iTuMM1gYmktbelgx+q8shevyGD4K1D0y8KBc6U1rnxpjG2pAYJjphupc+2ZpEnPu8Uuwao5TyVq7Wr6ezqQJCTMqF7rpYRqZc5662uq7zPGOlmqYmQq5z5zhJRO7wjaIojGnKqnLnb4jYxjS5zmtb6xAObEwkqln7AAAgAElEQVTT7/WqqqrFbhRZX6mduhqsrv6fv/YbuugBWNva3jucbm6isQLiTGe9Xq6Ic50JWNkKwAPnTp88uTHo52Jx543XdW+diqI+GM9EjOW1zZMAXn3lyj/8X//V3/3h//L3/uj3nn9l5+f+5U989bNve8czX1Eau76xsTetALzjne/87Ms3/+jarf7pGem+2INbO/sXTg2n08naoAeACQpggjGWANOY4WC1NpUFesVIMwNY1NXLr766MlobaZWTXl/d2Ducn1hbrRelVtmV6zcBPHxy+K73/pVzK+vf+Q9+pOqrT/zup9dW13jt1IOPPXLlyrXbX/gcgL/xPd/78//bv7rw0EMXzqy9ceXK0xce/Za/+S2/9ZsflnPndqdTADu37wjz1ta2hawaIVZVXfeHQ9K6NxqdGY0AjGezQ3MoIlVVAZlbtPPFQjEba9xZTCI+nMUlH3CTuJgvtNYs7AhKK6aqGq31nZ27J7dPiFiBLBYLY4xbKypTxtjJdJqVpdKqyHMBpvOZVoqt8gZsYwTIsswRavPFIs/zIJStMRbAyVPbB/sH0sClPSCwaQRimJm0csfm6EwNRj1Ssru/43a7g4O9w/ms3+8tGgNAGdsbjda3T2S9wcntE5PZvMiLXn94eHhY183ZM2cB7IwPVvq9LM/nZTkajizASg+Gg5393aqqZvMFvN8xzxdzo1SmsyzLp/P5wXg8Lxfnzp8bDgcArt28ubKy8k3f8IHbt29/4fLlP/rYx6aTaj6dXzhz4fbuDgAzLY1wnudlVSml66bOsqK/PgBR01jnIzydT/b294aj3uF4urGV53lBKpvN5wSezea7uwcAbt3ZyXShlCa24uC6A3RBSrRvrqRBP0eVrow+wswVt6OwmbcwMJSQVJoipHEw1oPeP2cJHhT3yicfoR0hSmy6jaLc+zIGHrTNRJQwd63kiv8FvBIUKUJ7RIDTs5gISllxhieIszO2GUGco4SXYNZLPxc15kR5eOSR6MFFRTgdQDGI6rpagoLBgNcCm8gSdtjWJViZEJYtJHV6rg8itxTMUiECHk48qEQEJVi9FfRRxoVxFPg3JfhOSji1T5DmnvIJq1y9AgtDaNnUyOB6H1v/zAC2rQhb/0x3HqlLKuoQljXhruNyXI7LcfnLV5x3ZOT8vLUvZERJgwndLhy3fwqHFiD8Hc1GQTEMhp2E07z/XtnCf2l3/SUJ06kmdTG5pzIkCezCv0HcBHARDVbhLnfgdawAgdP0ktAFjRlj4bKUBamfCsSg1BOWHflTYexutUFfD5+IAC7OjkAhgzNRsGRGjw+JMcVelWWbnJvhusng6B9IyUM7KnzLRHhY0TIa0lZFIT2da2+cpzgCwZwXZ8tVGzNqd+YnNCmguO7wRIEqSXsDihDA3gdyeFUT7QQLPOPmhzcMV2IiFenSBNExS9reSNtP/5nT2Nsjld3nzIzojhtmJ1GsY9hJgH0SNd9ghW77GjrXWq3deLYpH/2DEJRtN1Gp8SAZcQnunRLjeDskd7o4KW0O4nksS8id0IY9E9qf0mb3O6IhoHsXQ3Lz0hdxEfn5T6vxPxyXFTqJZKyOqLK96p5GoYOWO8A5LIV2jRzZry/17RGXt024h5kNO3Hnk/gYBH0nfXQCu6l7h9/6uvVIu3gARHa7O2hHdOQon43w2Hu/ihuIhF0itL5j2/dtcAs0JJDokN0pN+TfEmn5ynTGg1NFW3ubLtbdl/yeTtmfE8FTK1+8FuOPPe0+3m8EQPcB7baWxMwFvYPimBHIilWaidAYY8XmeV5VpXSXWhtql2hLlHTVb1PE1khVLgb9PMt5PNnb3t4GMBqNMp3PZW6scdnXBEJgAonYugYAK5SxFrFKsVIaFvP5YjqZMZM7IBjuNRQAYMUSFjCFnSJZVUEjbrc8Inf+WDoUWJrGduegREq5PMshDbEws/vduJPQXLO8NPX1GfFLn5mscXQqNY1fPs5XUlpV27eWFROxMUYgZIkZpBiANY0xhrgApDFNkefEZIxhdg6S7vANVFVV13Wv12PmsiyZqaqrIs8hUjYlgDwrBGKsGa6uatj59LDoD8Zlc+Xa9aw/AGA4z7JsfW21sY1WitmKhVJsrC2rhTsk/aFzDw4KfTCZjTZHTSMGenJwYIX6vSGvnXjlzRsA/uf/7se3zm195OMfLW15/kT/U5/94nuffer8w0/LZ5773OXnUVcAvund78szneVFI1ar3rxs7h5MnnpiY6XfPzg8BDDo9fI8a4whgImtFSHWWd4Y1Ia2vdetub1zkGX9/mhwcDDemS5GQzW+e/fNazf+xjd/85kbdwA8/MzbLz3xjunU/LsPf/If/PAP/cLv/PYnf/+PvuHb9k+ubKz19+6+/BaA7YsPPvXY4//vf/jtH/hvf5juju/euf4D3/vBz/7x5+dNszIYANiZz7e3tsbz+c7O7myxIFYiKKvJzt7e1ubWQxcfBDCqyjtKA0TMAunng16/z1pTAR8sGZBeUzcOAWqttVbz+QIiItalfcxVYYzN83w2n7l9rNfrz6ZTY4wiAqBVz8V1FEVRVtV8sTDGsFKsFLtsAoA2DRFppZpGtNZNU5LLjiTQWbZYTAEMeoPTZ0/eub5bVTUTK6VMY621IJjaNKYGMD2cFn1lRZTKqmpBYnf29p984snrN68/eOEBAOPDg5Xh4K23rhZF0TR2a2Pz1p07Jy9cunjxgbKqSrNw3XTI3VhbVuXd3d3DyeTRhx+tquru7k5V1/7tY3esFDWmqevaNM2du3cmk+n25ub+wRjA+vrae559NytVlouDvT2d59evX62b5s7+7p07twGoIhsUSiwP+sPZfAHKB4O+bZqV1WGWcUgiTFXd3Lh1i5Qy1l44e1GJiKLBoCdi9/bGALTS0+nscHxIyJh1uodHuSjeohLy76IrBiP2i5togJ73K26LjdC4AxeTXSvs3W7JLKd0lKN+xMfGJgToGJWb9lnS6Ya3GlGMoV4SwZ3GdfGTU+ziDbL0IOOBaYivivI5kckSI4tAEBuFD4L8aiGzs9ndCxASAjCe1ijegzRFpsGe5+F91EYiuLDSQdoRB6TpPQAIOFC6gsDWtnmMvAQjD4iiV2t4UMiBaQUxD9TSYAtELCy3w00AMdk0xRWJD6iKVrbohdrB0rEhyZwLrBEApH02SS/tEdz0j7nI43Jcjstf2qLDHrwUUgykTAei5IrfIShyqWz2uyN1RKBY+DOHLZY4zM7D/NWex3Puc4LkloR89BqKp0tkibwKjW9FuOubCESFcwBkSRH1biRAojATE0SIXTZDAshYA7hjhwntgTmUqIBegBzhPrZc2gGMrngx9oPEttSOZyuXEJUbDOeAErocBJ0jvKRlziiRVQLvxhoJxRA+F0ggkD/ZLirfcZqChk2tJ0mclk6Js+bdGxFtrUFuKmbnueNOsGXypTGGicLJ2nDZL41YEsfRSmxKF2X5tos/gaYr3yNjES+7Z35Ci4konCLTGkET4sPnHQtukc48zD5YIqbt7mjUS764nTj4aNL2KDA8JrLhR8QWxba0zfYGgfsk9U4SQfrU5X6I/H8tB5kAuxC1s4R1l7oS6g+o1lfa6X64NKHBwrPiNEoyMnEIU6jcKRR1BJeCsPsqBxam5Tdcac8GCluVoP0raW8KA5M+tMhwuTWSdJPuifDqXPvlUaMsPw8uuZID6IgAWGDv4VcFbV+WK3GPB9qJSCj6eOURzaOjPgz+1F4tcdkmwiP9TxvO4xbPwSMxVIS3MR66jdT9O451TNcEn8siLMvoESnA0g5HnQ0zCKeI4e8ZsWWe+n4jGCbaLzxnbHN5JLuSR0LDpdOyI4Y4THbbIe+kwgyRxhjTWPd2WRHF3JUDiXAUCOAS77abJLEARJRpEqMFaJpmMpkUvcK1tchzVhRElrVWQpJkaYyLWlVM3Bjn12WttfPFwjRQiqy1wZVcrBFrDSt2zjVx9GKmDmJLwdi35E+fOJ9LuykA8Ge7cSvW0Z4s31irmIncPumSO7ezHythP2Fuo0bMXk3MjggiImuM2HD4vLuL/PqmyNIS20BhOs/HxhittbUGfl6krms36cZYrTMAxhj3u1LKrVEiMk2jta7q0m1TWaarqtZKKaamrspqMRoMxtPF9oktLvoA9nas1lmR9Uw1NUbcWfSZ0ouqUuXs1OkzAE6vb2hp8mLl9u3rZZNfOn2ink1u1zSCpsH6hz/6WwDeunL7B//rHzSz2WT+9AsvvfHcCy+8+51PzSeHmxvDT7746bxauHGBmEzRfD7LZdDLegcHhxq21++7aer3ChCx9ZlAiywrij4rGvRXjMD5p2tdVGUpgnnZTEtzZ/fuuZP0S7/6m2ia7/7P3uz1CwDPXf6T16++8Y4n3/XL//43H/s3/+zvfMsHPvaR3/uxf/jj//zHf3S0ub6zPwewuHb16z/wzW++9jMvvX7z3Prqjeu7Tz5w9uJDD75+7ZZzy33zxo3pbHph60RTVePxWCutlTYixsjB/t5saxtAzbK+ts5KV3VtLNa31/r93mSxgBWpmyzLADCzYsU8dzZcdzx90zTG2ggVWJE7GCrPi6qqs0znWbZQSivvCqe1YtWr63o0WtHlYjqbhg3KhXd4mrtpaptnVmym9QJVU9dKKUda5nkGoKqrldXVemF27+waa4gVsTtKhIxp8lwDMI0Ry6YBAZkuikLv7R5U88Xrr7+xvb0JYDafTVdGe7t7F8+fhxUQ7+7tW6Df61V1XZoGQE8Xg8Fge319ZWXFWLu+tn4wPijLBYCV0cpkfAhgsViA2RoLJXt7+1XT9FdXd27fKpSu67rIcwDPvO2Z2pjf/+Qfrg2Gk9nh2vb2u5599pNf+Pzm6sbFU6cA3B6Pr1+5WlU1M02n0/X17dXVtcPD/VNnT8I2WVEAKPr9vYODWZVdvHhxa2t7tljs7tx58smnALO/t++S6DiGtD8cNJXAhpzqbtuIqXyMIaUicg1RtoQWKHvolsKOmKfiyNICbCDWIzFBfJRkDhUuy5goFCPuis/1G44n4YiCyDiieKzvWus2JyImjp9JB2VSqyEkmXOXoWHEEf5bghgwAeSGU8S7x7awQiDiUicGfwor1vvI+wZycJ8UiZmEqRXJ/mkcgIjjWl0wgLDuHtQWGMn4Z0xlkrZ86RY/Hg4quayPQkESCYWAsKTWSLA6PGk8qx3glTuzGhQspeIn2qsjgbz0mDrkKCEmVtTY1jAJEbiAKsCKFXE5KZfALwGu9SIS8pwGb4vQQbeeW0XHWhF/DuqXWsnH5bgcl+PyF7NoeCKh9Z2SRO62kjFIdETNZYmViH4ZnkmIxR/VAZcN4z5KeirC3TF/UUXzVGmXb3Ak5ZeIsBBro27nFBG/xy85enWqZFDHqTHQnQbOU57I6Rg+rBtwOatJolkPCEAnPUy5lXze/c+7aUlgb2JLoiOiG0WBiwluJVFUP7scQlA+28cRQay15P1EQlQCUrYq+rAEfTrIXvJzEJ8qfh5CC6JCDulAjO7IuuaE/OIecIHIp8g01pAfGxW0WhERl9/T1WvhTc3OdcXlZ/GT09oD3QxzgAgUAwPd18YfOeQa1ebNIQTnmfCJX3XtEjninFyxxoKZWcRjYMeSdkJtlhwdI03YImAPlcUnvyGkUyPtf9113603Tkf7+oSBv/fNCKyzRK+xmAAnqt6ULK8EvrdPCtCr64rY/kKSNH2JzekG2KYdi2sr+dBfe2SGucgz+lUdX73OreiMjMe9dEQLljmn5a0t2TLoKAh8Pw3iPiXQ+GkW2HZsl8c1QNUIPd2m0Z2ae7rU/hH8gcPK8q94QnF33QrvqXLps2Vtql210ce3c6MgEk0I1CIB8DkNRWx4xYTihFNoaIeuFlbKY/5l24wQU+qF0dnGcdSkLZXu0C+ZE9K60rXWGkfi5uvuDp31u/g9VaULsf2RtNHrbixaKQKDRGsNgTFGsW77lcxa2KOFiKyXY0JEtjHMnOVFv78Ga/fLEoL9/X0Ao7XhAxfP7+/tctANrVhYERFjjWPuRGCahpVSzFVVEchaJ/TAROx8AC1EhESUUqZjL4qSB9SmZo5zFDeGdkTbtxoI2jElH4CZ/MZrLWsFiFirWLkDeRBeefJzZeOqAgAm5Q44NpbZywlmNk1jbepwFJoVRK+1wkzGEjNnWeY81Jgoy7KqrgTSK4qmaaqq0joTkSLPo19qgybLMmY1mUy01tY0g36/MQ0IvV4PjkFmYsW2MSvDnosiNFYMkBc5AGPp9StvbW9uDwe9w9nMVLJ3cKhVXizyauf6uQvvBXB+e7MQqRaVzPa3ty/N9veN0VujITHdfOOVy5/+YwDv/pp3SjV78h1PvP7KWxs7BzsvPX84mRKaw+nhzmH14uXXAXzgxZfHB3t1VS/G4/H+YTPef0sWv/Kb//HMmRNve/ghALUxN27dJlY6y4y1lSUznw2HqyKqX+Ru9KworXs3xjeGK2u39g6ov/IHn/n85HBRaP7Rn/gXzzz7DICGbEa9F59/YXtd/fS//rlv/KvvH5448Tsf/thP/tS//vt/9zuKngLw3IvXv+I9a5eefscf/PpvfPd3/qej3v6rN2589fve88Yv/VqxsQ5gPputrKz0RsPmDkajlcaY6XwxGo2aRZkXPSgNwJi5bSBWBv2hzrI8y8qy1DozpirL0tGRpjGUsc4ygPr9flnVip2RkaJNUimCO7WJHMqTuq4VMwFV3bg3pVf0DycTY+1otEJMdV3Xda0K1ko7b1mttDtnXER0lhHRYlGCRCkVY26UYia9sbl2OB5Xs4UySqwFESsyjTVGALBS+7vjrMh6g7wpSwjt3d1//frViw8+MBoOAQz6/Uyrs2fK6Ww6qytjRQTG2qquI3Ff1fWtu3c15MnHniBCUfRGQ8OsVlZW15hvXL/u3gJm54dbiQgrJSL9Xm82nQ57/SceewLAzs7d6WK2urKyWCwuXXz4l37xl2/vTzTrl19+2b3jtqyJM2IBS9ErZovpwZXdxWx2sD9ezKaqnwOgBmvbG0bqnf39sqpu7t5GY6ovfn5lZXhwsG8aAdA0TZYVk/F8crgYDla9AdbhuBb9ckBOySaeZpbq4NqgJdxfPnhEGR6RCB5hVhJQa8BXRP5sRn+v92CXVrHqaEf+Io9ZPFy5B1CQdzUXRttpd38rR+SICBkJ+DL2PIiwFkmEdnhECuckLn54IqaWcAkCz+i93dkrdhKUASuWbfD6i6pMKwUlNIw62T6cUinpNEE8r9hFOynu91905HH0qoyD4J0vXU4YgFiUYokUduwsOagO8R8GNS4skohuW7fGkJg/Bmu7e71iaJMzTd0zLCyJiBjjzGlEJPdMnASCFvBn8QASjNzW06J+wKyASdJTgI7LcTkux+UvW9ECEWMT1cg7jgVZ4D4MEtMdus2EwBkZ63z7wQ4EAOKid3zYF7FSJCGTozgt2Ls2wp2oJhYQNKb1j3fMGIKvjTg/vlYlCUI9Sdh/T3Eqa6zfpawPct+ZqcPDRJhYKefyIIA1SewzmCkhsLyWkzzXu3OKaxgxHIYFmdZFixInLE8skDCxWAmnilLgBDgcxCaBBomZWrwFEtG9CBCQsT6bmANMWikrYoyBiNa5Vy6tFR++Rg56Oa0hmBj9aQkSZ913oqPFu9ELREDLTRKTdyJA6r0IQJjZmtS91ct2EQOX7dEPis+Bk+KN6PpKgCct3Ew4cIhg/PZN9gPJHEIYiP1QwScpA0dQE/LVNW7gnQ+pZ+NEYI3hZLVRZErcunEhThAm+GSFEGmMocgPdImGMGvtSMbF4IIBwQ41KubgLyZELMZlivFzYa11XIMLFXSAJFQVmKrwJAprLzbARvZWApfuWCH/Xvjl4WGOdbxwgGKSmIIhznOKmYg5OLEGOBjZo8hRtSjcJ3U7IqxEOj/jGIbHtld48Oh2KEVGvDpARCHOCKwY/ixIEBGY/UVwpNVy47xVX9h3tTNTBDCDrFgmnxWBiVySLyYijrx4fF+EyB2YFduc0FMAxRD4Nu9TgvXvpxeJbRUqissTITYNAfsn3YsrLYXvbaI8SWannY+j6N82/UR7XaeKcCtR7J1rA8NzOuQ8ygOKBgBr0Opi3pwAwPNCsRWJLDDWhGFy74gTPQCRtalO6g0fTu4wBBwVQmJPXRmnrka1LmY1ZLfhhPy2SCjxMM+UNAN+1P3+bAFxfWHvvcM+lVb7XhBCugMKbsoStwM/kpLluixLsSh6vbKe7e3vkeKyqvIMbriIWIIGxSRWRKwQWKywf31ErCG2Vsx8UuZ5Zpo6z7LGNHVtAWxunCCiXGuxwkwWhpmJVF3XipXPFgaxRjQpK5bIpUsT61Mjwp2s7QortkZIARwMNQCzceKSyefwDdYPCtQjhTQX/qX0AYgCsaKVcryooxHrygBGZVQ3Jstza621lpnFGmu8uxQTGRGlFDFELDjkUAYJGr+ECCL+XBorImDrVhRLnhUArBFjbZZlApeVkkRgrWVDpvYh6r2iMI0pstxYI0ZMbYbFYFEtrJWs13evaLko80yTyHw2U4qtGDCDYao6y/PaGDe+tm6Gea5H2frKRjUvb+/c/toTX7m5vXnzpZcB1Hlvvrdz5dr1SxcuGCFLsMC0nF06e2KHpkUzA3D20jOWiGaz8U1Z2y5v3j0gLi5tbX/iE1/4qZ/9l4+97WsAPHT+gdu71+Ty6y+9/NkT5x/7kb//Q3dv7fzxpz71qT/89POff3UjFwCPrA2fftszv/XRT/DsrjRrgOwf7G2O9NZTT7h8o5PJoTWNacx8Op1XtdKZsc1a0+xMppuz+craNoByvtjf33nt5s6gpFs7hxlmN966vnXm3AMPP/Qt7//6eVkDePSJp//g4x/TGo898cRLb7315pU773n6GSnH//sv//bDly58w9vfBgBl88bVO1/73qd//8Mfeu6tty6ubu3vjZ99x1O/+Iv/bvfOdQADPZiNJwfDESuds5odHLDYpqqsaeaLmdIEQNl8Xk9q2M999jPnzpyzzDBGAawVNez4dCtixRZFMZvPlXZZPc1w1F8sFv3+wNgGAIzq9/sk6BcFSJgJQnVtmLG2vgpHIzJlik1Tl2I0c284GB+OG4gilHUF510slpn6uliUpQJZawb9Xt3UOsudfLFWwCIajz31xOXPfrGqpN/rl/MFaSJmKwxAkRYjigjWaq0aa7J+DotzJ89dfv6LAAb93mQyGY0Gs/lsfTSqjbGAMraqml7RN9a5mWPYYxDfvnNrZWW1qqtFuaibulcUi8ViOpkCyPKsqioxxrJeGHN2ZSUDkdJ2ODxx8tRrN64B6Be97eHqwd7B3nhqFZnGXLvyxtuefKbI88PDKYCdqzcb2whDmqY5tDllWye2/z/23jzWluO8E/t9VdXLOeeeu7+d63vkI0WKkqyxJWuzLe9WFm+xDSROvEySSRAHWYFBgABB8l/GgDEOBjNOnMBOBpMESWaQzIxl2bJs2QNblkSTlCVRJEWKj3zr3e+5Z+3uqvryRy1dfe6lbOePwAJu8fG9e8/prq6tv+X3bXuNttY0QE8VABqrTWOEFGXZPzw8ybN8uLpiap6NGyV6jAZAr1AXti7s3T8mWEgnjpNzHWcbpGKViZg7nkFs4bwIW3inbR1rtKcRqcgbP/Se4O47EeRy9lQ85jtu+VYr4TAoVtz2OkcqYURJt02A4z4XnV4S8ii8siCECEZwj4k6whoHHQcQ5Qx3bZTj46j90gkIkiCnM7CzuNsgDkaRMnZDCEqFk+oYTrcS8K4NMStvRFjZY32QRMbxbOdkQESCrGUZBcggl0ahyBomQbBBxgMEWROzgjH5iC7Pi4NM6/J7AswmuB+yNS5vI1HA+pw2QWyZDFmnd5BLLwkvGIN1yFTqOrQ+2ZPXU9nASg9Lk3ZyoMsCDfKQMFt2kiQRKSkgRbB4EYIqSX4CHUHONY/dWl/VIJMqiL4slVrKuX3eztt5O2/fQs35WdAyDaNlpw4PjTmgh5ljNGJgdcFfiayAYGr9RrwNp4XsmFgkeEzQwpaEgDYHcqIzAvGRscN3bhQg0RaW6WIa8TKkv8fZh2ujnSoaFTtDCeps8mH7cbghoChJBraAqaRCg0NswCncFgAZD5os+Z7A+QS2U3FiAyWiDwKm1hlgK2r50bVfc7ACdwW1DoiVdBA/jqMAEDxG4+P8Nw74cl9YbvMhJjOAbSMfnY4IhHOQSFbO6hx+EWFvzvSY9ecWwYobBp1au6M82a5RsIZG26VHJjunNQww7DR1z0KyxEvD8UIQtwPgritgcrLCc92FSWaicKIDQBPs2metQYht6ozDdeLflXDAwkASSKidKANAzIzGiSOAIOH8hdPHxjWlTmdnUJ30sij+hrpY8dQF8NRpBEgTCXRld/8Wdt/zri6QBq2fQj3bZoy2zCzYC6lEaTC4X8mUxDB3n4IOfWlfOObTh+Wbt+DruPxyvkPzB23JqhDcEdMF+8v0duYj3/HOsPbxsJOnbL4jlxw3Yn2pXWeJKHLqR5o+lONb2e5A9MCLaowr9hXNO9Ya9tkJfF6LZHrc+Y06T4tEIT0m3QOVjij52R+N8Coni+noZnt84ndEVVXled40uqoWUnksV4jOJsRJWrbWWiUVkpclBssBTELN53NrdFEWcUgMw6yFEAw21jCzIHKOhwbIitwtl7VWNzoCh102GNhRGJcD7yIDQ6BXJqGaZxAFz6/BYIf1OwXVLaIUwjrwWhARhcyMSwvaVvlyZ5wtSylIkKt6IaRqKymxX3zrbDrMUgprTZZlbiKWrZKCiKxhy1aSBHNR5FJKa633AbTcaJ8KsKqrsvB1lnv9njZNUxkARVlY5rpppJCNNkopKGrqRkqZZapZ+BR+/Tyfnoze++hTjz3++MGDW8V8ur65vdaYP7j7BwAe2t7ema0cHB72y6UMX20AACAASURBVDwrsnll+1lvkKsHB3dY6+H6KoD5fDFcWz2pm6bRg9Vhv27Iinv3dtavPPJzP/eL//x3/wWAz/3xH37P939iPLrb37jwgfc89eLzXy2r2T/8P3/ztdfGGz386L/+rwHAevGJjz775hvf/X988g/XV1kUBc/qtQuPLhbV7sEhAN3Uda2lIKlUKWRZlnXTWMNKqsl0PpnOAJj5/Hg07pU9PV+QUAfjSW3t49evi9k8L8udo2MAF8vip37yZ/7+r//aybT5+Ac/+v4PfPgPPvPJX/oP/1O2f+93/59PffuHPwAgIzkZTdR73/ee5959e+fgxoXLi1qvD4vty1ce7B4A2Li2dTw6euyJG2Zj/ej4eLi2KknUWquyWFkZiiwHoAwuXrgohCjLcjyeGGOysiBQJqWW0sHTjJDt250bCwPr9loIcsi7g3yM1ipT8/ksUzmIlBIIMIGL3O/1erppwLLslVpr6SKdowhGZI2tqyovCs//bXTRYg/cM5MQRFzX8yefvvHqy69Z1nmRWbZAtBMzQMZY0cAF7DKLw6Oj0XjsUOMLW1tCyMGgd/niJctEJISQWmu2HGWJ+WK+ub2WZ8Xm5laeZYujhSSRZTmItLFKSQBFUTBRv98D0cpwxeU8BWjYH1RV5erqCNDbd+7cevPWzWfefbJ/WOYDkBJCMVFVLwAcj0b99YEiuagWaFgQrW2u9/sri3mtNY+OxgCEpF6vFEIqpVZWBk2tp9Mpw2aqyjKldQMArI5PRtraXq/fISWcMsSYHsNTIB8rhKV2Fg9NrO1dAX7p+oSgU+BZXrTsCgZtJ60ozqe4zFm+mdzVioJNM7ClaKXtSBqMIIFylJHS4QZRurUIU5yB60zEbJQBKGOQJeGcgt1F3sfBL5Z3CglBxelguJ1cGLyXynwPp/QmDgb+yMfb1RfSVQNjaxk+8QcH4dWzAgrZJaMS1BV54W2SiM6kfr99lAZbBgTF8KwgTpPvkNtFCYcs1XMc2soManNPpQyTvfZDQsCN2Rk+w5rHV5sD5B3PXyJeRrkoHgCnybyD5H/eztt5O29//dt5monzdt7O23k7b+ftvJ2383beztt5O2/n7bydt/N23s7b/09NOftQ4p1BMYI2NQ9S6+sRzYeAKzGdeJQ5Zw+LECzG3JYt8LYzG50gEvNO60ISA1673SbPRGsm/AuNQS7Tc6yqHFyyoruS88AgdqkeQ11RhukY+ULEx2nXKXL1uMN1bRmT5DJn5IoP5WgGTArGeTc9Jh9aHurCed8Sb4Rs0+AhGNkYaGPJ/f4xLLssJi6Qk7xpzw8qOq2iM8ykB8Qt9/veuh3FsA4KsSe2dWRzrkapiY7Zr0tibA5unejYhL3ZMHo9JY6C8fyINq429JWcCz+trlE78efjaKh0Sx0vSvvguD4cRuM3Kt7oAza9wxa3z0r77thj0xyNnSkxYqQmOxcndG6lpNNoV6cwydbfa6nzU5b51DJ9tgU1OEPGo4xO50uXMXyZwnieYvJLal0Fgok+jo263kzt2rcfpleHxQP8CxQ+aX0PWmu4f6XCKNtVWX5nO6cz2O+d8Z1botSZNEiQ9FkmuiuSmMrDa82tq0R7f/DecKeKOz34KZ3prHG6cXraT3/Z/bg9xN2uE69OxPU+tRdLbamHs69pqTfHH8OmE7x7QfJ06j68zZrFSYcIPhbtQ8MLlDgLIJ7E4DISaDGRS1LJIjhfd842xT7SB8c9A5JjFIbnX8D0vi7d6/APkSxEmKjv3GcZa4O1w7IxmNkYLZWEgeWGBMq8N5vNUn98V06aAUnSMTtisuEgULJ8utHMkEoKQYathSslXDe6anQDQEkJgrXQ2lg2kkRZFgDqpmnq2hhLLrmdcHQ6jHOpeS55BnuhhCxQGvKeLDk84eH2jBKEIoRcAS49lsvkyEDXFTsG3pOrxiOEEEJYlz4GsMYk2V0c80X08hECQgoppffNBISULqmIkkprzUAvK5jZWpPlGQBBFLNnEpEUYraYSymFEKbyR0BJVdU1W2uJAAsSYJrN5qsrfWvtfD4HsLY6zPLs6PiwsabfH7z1xus3rz/cF7w/PR7v7gBQDz2U9/J6NjmejC72L65nPdaLLMsKYXprg4euXXGzHp2c7O4d9IcDAqksJyvk9tWvPv/SkPMf+P4fAfDI4zeefvJ9r7/y/P3bt//5Zz9749HHL65twVQrOf6VH/2h/+Tn/00Ar+98Y6NQ/8G/9wuL8ewzL3zJzKYX11duPnXz5GhvMpkDmM6mxnJjLMhaJlT64GRcVZVU+Wgy652MAeSrK8zQxrLmyWy+c3gs87Icrv7JJz9587lnnAPUn376U//Vr/zqD3/vd//eH3/u9z77++jln3vppWeeufmL/9G/81//u//+g699HcATT9986/79x15/6/oTNz/7mc/evbA9XVRFb/bQ49df/sorAMrhlrJ6Nj4RzKbRg14vU0qprCzLqmkO9nYAiKx4+NpD165elUIN+sPyeLSoKoBAwtVAd+9R4PtsrCWGtSyl0tr75IZXlbUxWZ4Zw8ZYpVRR5FXdCKFcJyAIKbVuGCzr2rJttBYSgkgICYAyNkbXWqssZ5DLnwvAfetcomo2sMayret6uLL68GPXHtzZbbTWje73+sa4PJWGrWyqhiGyTAIsSB7uH42vTK5eugrg6OT48sVLBGirGZTnudFGCGmZCcLVhKnqKs/ylcFKoxuAi7xohCSQIFkUhZu1sSbPlRACREVR9Hu9+WJRyAyMk9HJ+to6gKaqv/DFLxSqUFKyEMWgN5vPdvd354uFUgqAygvDMMYSSZWpuubJdAGLeqEhlIEGAMiDvcP1rdW1tdXxeFo3dZ6r8XgCRl7kTdMAyFU5Ojk5ORqvrg8LVQTRoEu6Y/hK9Dl14U6WE+8LaqlXlPySX2O1Py/EvDNnTui5p0Ct1LZ8aRD101o30UufT3HvhHa2nv5d8TBWzPNULBnrNxmzl9eCw24UC9kx5JDQhlJZJfVVDDd7sYt9jVIXmxWv55YiIzpqtmSfbZR+3OTJc/lW/ms3EQC7dySQbdv2S36gQdDw8WGnEykGLuxj3dEGljlu1vpLBs4U8rcQ+cpdsEHJC7sZ14UoVOaETR0zLdrjFaYfQpVcxodwu8+NZJEexm5tIz8YRCEn7s5ZRfrO23k7b+ftW6XJqxvXo14XMaXANP1FHRAh/EwkQ+oQz4KdXOVKvqSAAcFBYiASTNYjByEONtDpeEPI1hG97c9yuY+E/Zv+cQBUJ66SkqIf5NIPR90ygmqJzskQRBDk0n1Q9MBvm5Myw9WdIIgkZiQwwbjOp5ENEIU43AQ+9AgZMwnBgZNGiDGGAHU2iNpOCXAJLbvXMBCQgVMsjGOwS+CDfhe8qkzx7zC1driO1fq/EgkBYTVcCrl2g9yfbtAkkmCWsHUh8MFHTCAVNcJ6tlEhLSqy1I9/tJcIZHLi0wU4BVW0im7yC3cuWX5aeBRweoXT0bdnDm1lYofwxtrmnVPlMnRSWJEYEJK+cu2kYu8BtY4IOMIxiqkJOqInQBAh4WpCDji+uKGUcjKEZL9PEY305Qgf8JmfJwvjDq+fTdjZ8Coto74tsgbh6g74yFqOF/jppqc+HJ42P0W7DuG0J0sR+vP/U1BXwpkInSxNxl8T/gt/2vfrL2rhVUk6bG9s59quRfLss0jD0q9xTmfuRZxAdxBn9OXDYSlyBZ/FySVdIodABhXKJAHVRETC55dcei7iuersNyfHMqhhlJjLIjLeRjfGXiP1SlYwgdA4knPRGmACJ2uvb8cCtPpPIMvJz9TueUs343sEwAWIdgxZJARJ6SAfJsnG6Lpp6rrOVBY5lSN6zNblpPNlo6MyE4gsiEiScFWvXf5TQYJosNLf2tza29ufzyvHGRutrbVSyrJfugcZa4wxHMq1UYBSTx2C8IGAo10ulyVCZCi1abfigkTaEiB7gAQJIYQkch8RSymtNSQ897VshJQhpC9yqnA0/ZwZ5BNKWGNICCFEo7WUytctZR+Z51ir08hVphDq1EkliVyGXCGkZLZKZUKKpq5BlGWSiKq6llIoqeqmLopCa22s6fVKY0zTNJnKXA0QrbXKM2ONr5piuFcWSsh5NS97ZZ7nZZFXizlbu3XtwsWtSzs7d/aPdvtliQxvvfpqRmjyFRDffft2UZRlr7j39ptZKd7/bc/QdHHxwsa7nr7hAvAPDo7u3d259vCVRdXMqqY/KLefvPnFf/rJu/fvz2s1Gc9/+5/93k/82I+cjGef+fRv//nzXwMW3/2RjxweHn7wb7z/b/7sT2W8b/XJP/m//9mXvvr6u55+8qd/4qe+cfvNb3z9Gx/6jo/euPbwbDGr6qrR2ljby3OpstsPdt+6ex9C7hwcjifzuqqlEtcfeaRflsT2ZDIZDIfHJ9OD45PV9Y21zY3trY2vf+3lBrh6+RKYP/07v9fU1WC4+fLXvnrr9a9ffejRRuOzv/87H3j26ZHW/+Ov/W+f/syfbD5y7ZGtzbce7F3ZWP3ciy/k/cHa6roQkGsXf/9Tn7bGqrKv6/kTN56cT6fzxYKIFlVV1810NjsejZi5rqrpfCFlNuj137pz59FrD5+Mx6PRSV6U1lpjjDGGrc2zzIl4TdMIQUrlzKyk0roR0mPeQggiaF0rmTmYMiuyLFdVVZe9PohUJoUQLrlBlikGK6W01gCKPPfJc4mMbhqtGVzVlTVWG51lyp0ud1CNMSByieEWizlbC4LRVpJihlA+U6wzhEsh8jJTMsvzUhu9t7fX6/ems9nrb3wdRAxeVFWe57P5/PDo6KGHHnrw4H6e5Yu6AWCauihzbfSiqlwhZSLKsixTyhh7dLAvhKh1k+fF5uamtXw0Gm1tbMzn8yLLp9Pp9saWrjUbe+fu3d0H95nt+ubWvFq88dYbRdGzAlKoi5sXhyurt2/fqRrtXrrJ0fTw4PDKQ1eYbbOo54s6y9xLJiyz1nV/MKibpm4amclM5sKXEyJBolcOAFSLpleWLoN2lC5bmS+IOSltigUGl+kVLTHLSH1b9uqIO8f8JowzkK7YYyL+pyy6BZi6sBG1rCBKOKHrlGG1NM7TYCcJcsuyAvtKVI44uWSw7klpJv1WlYiPDCKi5x/OuWFJMAjckG1SFi9S4cCdWgk0XuEucXmOnfQfqTgDAoLaTJPJpgVG7EWsmBkcECTahNhBXG7R1uXt8NJJMseEZ/uMQ9LvWZAQPPcRHeFk+eBQQIXDeQ3cOZGug3bLzOy8RsAASSIR0kWiM/Ll/et+lNzQPVeLH/qh0/ect/N23s7bX9umzsBKfCOAW++K9tPAGS2nwBAzOy4iLIQUwuf2cLzDWm+OQlA2mSNiBTjqHFUswGWJbr0Z30H//WZ6fCzHRghGTup6ClmEwqGIIEt7O7lJeWblmV/nqV1hIcoTEevs8hRLHH3s6HQvjmvFijtRc2VCSFdy1iR9KsTIp1vcBUxAqAmRoHtO1noHh7049yhXtZlTUlXf76sfaEgl3a4bggBCSEfAIedenLfX+ZeH113gJcDBf5Jo7t6SLfwucEDTohTkqmy36GiU+MhZQ4MA24LyS61dLwJcufMoM8aTsbRJzJyu2PJ3YXLhiXyGgbO107bvADN70MGXyOaAJUWkriMrR4nOVWJhdgm423pMRNSmzUow1vaoBgG36wDmdjTuOBMBXpBsZdPgdZm0duWSz1qIKuxefPnDJJITE33VAgUJe3dKs2D3WiT5JSMFC+5oHt9FSODV2UuGBWCNDe8E+fVOjrlfjbBejI5fgP8wiMpLhyv99R0p8Tu01Di+POnT18WjfwbRPHPt/sot7do7eHsoKmgWwbPBP0t0N4cCF3D/LU2jSyMi1YY7mYmuQiENansTu9xp/vq2MJR/sbqz94thgwtleqBBoWg1L32B7rp2f6ZTqxtpNXkGhbBQhOBqopTUugZAAhsb68ej46Zuev3SNDYMUzjNkv3Q2vPVeSsFAVyWuVI9sJlMJlVVWWIA0+n8eDTR2jRaA8ZpNVJKISUJqpsagG60NfGsOQTPSiGXOMgyA3XzSqk9IyiRiOw+JZ1nmiRIkoUrdCDgsEWCAyj9NMNbm9hywMSSfD0dx7ytS7uZuJQEb6L2oDCzCR6UgtAY57SFxWJRFHmeZ03TQFBZFo5gGmPyIotArdY6zzKAtGkgLEkAmM9mSmVEJIXKs6KqasAoqepq4VAw13+zWPT7vTxTD+7fzsvi4NXR+vpKORzIXg/AeHdnsmhqFsfHIzsfS2muqM2d/R1lqrIspcoAjI7G1tpFVTd1c3h40pDQlidvvDkx+JMvvHT7lX8MIBvkL738/J996WtPPP3sD/7wEyejk+lo+q73ftfFYbZV5C9+/TaA1V5+jGq8f+vVybQ3kI9cXt3aXu9l3MvzGgAwbfS80XVjbt2+12izvnGh7K28ef/WoMzyInMzurC6VmRS9Ya3HhzYRpuqXlld21jd+Mkf+/EvvfzV2XQCYG2rePHPv/TkvHrm+mPVbO/LX/vqw5ev7O3t/cY/+idXHr32Iz/+IwCmJ9PJoj46Ov7Qe57Ktzf2TiZbq/Xu0eHlC09cv/EwgJN5o9kUKlvf3GoYeZELIYUQdaNLbTY21gEImR8fH+V5ydbu7u9lWW6MaZraHTjnHck+XSNlmQJDSanZ03ytta+bIYSUoqrmzFYI2TR6ICQRhJLOY9EY0lpLKYhICiGVBNAve4uqMsbI4E6lVMYBQcjyzLK1lqWU2mgnklprAcuQUmaCZM316toaQe092M9EkRW+lj0BWtuKLQhSks1YQPT65epwDcATN25kWaaUPBmPD46OpCCl1O7+XpZlCHy5KEoG5otFr1daaxgo8hygRV0vFgtXp75ptJDVbDY31q4MBnWjlcoEiUF/MJ9Xr73xuiNDK71hmZdEYnW4sbV6qZovwMhzNZ2MAGxe2BhNjptFLYizUjz73HtX1/q7ezurmwMt7eRoDMBaGGO2L24LKRttiARA2tgsy5gtSem2oMjLoiyFlM6cgEh8KXIFQFBw13P8heDKEXeIyxIrXEpOHih8JE/JjV7Y8B4EkbhhqYfYc5e9dHqzrU4QNKAQNJY+MQHCEgfC+MBTz0OYue8whrIAAIT0fC2U5hPesYQJYUm9KBw1oASO9O6JjEhEmZwlrBWzlsX8wE25HW/UseAN5U4aD5X5Iq2O0h7a0XQkKbaBnXppntI8li2O3N0RNswUtc6OxJysY7hxmTdxd5ec4uGLD3nfFXfqkMgRrvghyDqTl2EXbOUut9aGNUW4+9SQvln7JnryeTtv5+28/XVvynKInDqtpnZ91DzvCwG71lhXdcVhIwQChIBgYjCHYsYpL4ms0wdJJQBWGiK5pL155tId3xIYcEYjL4qA2TrIgVzNM1fIG7H4CQcEgSM9b0cQcyR3YLsAkbTr4vXqpcfHdYxYTlIRxalSPo4vmBo9gMcA2IJBlGBn7HtyjDtdhBRFSrXDWMpnSQZyiCfTmaIPXBxcEAnSeyiKYu3UAAKMK86eiEbe0ijI1dym2EEi4DnHH3ipiqI+7cSo9DEtrCDasIxEFnDDWEKWE3g7CpeJ51rU/5POOMhNFLFrnD5myZKl6OtpqYGjm5WLLYkdJdJpIvJ1bnRwCYVtdKCZL3Pov4riWgQ33HyYYomgLo4Z1orTp3loIJRXjyOhGJLJAeqJYyUmXwGRYykbwHu3dY3/3QMWdYakdcNww6qGD02YbPwu8WlzC9OeyXZSSbWfCFo4BWVZM2ASboZBvejWTfE+I8kLzYykcnHc+rTr0zNiRhKs3u5W0mlnAu/cuDODMw/n2belf7UryGFIf9Gjl74//RwvvUdy2F4Yhez2V7hMGqF4kvcqoKSqS6uTILi+dCDqzkiYINowqzi51GsDYcMpanScWCNaqt6uVPj1LF+YqHcm+tU7rFbH+5hbXsGRKLGz5sW0GCDYtrQ3k33o6kMyk8dHJxsbm/u7+wCMZTDJ4H/htO5A732xVj97yyAcj0YXL2ytr64ZY+q6KTIJoF40b799ezabN7XOsowckELCYbfOsctYy86SFGPJIn8LL17nb2shXOCZ9Hqro/ZslZJu3om61Z57T4WYrTEMSCGEFIJIm0bJzJEZC5ZCWOtKrLZb6X14/HKzJZAkow0ziiLXRhuj8yyzgSwE4SNxyxe+WLj3qWRjrVFKOV9RImGMMdYopdjaqq4BFEXhUcg8b5paSpHneV3XgkSRl3F6mVJNo/M8N8a4QG9rTd3UgxVfjqOqqlypQqr5aPb8l7+8c+sbD22vvu/dz00Nnnr6aQCf/YM/2lx/SKxQU413juYf//jHbt64vhgfFpm9dOmi89Q7OhyJjCBoOlvsH4yyXv+NN+5ulvv3RgfbNx79+A98P4AvvPCFP/z9P+qtbc8Pd19omm+/8exv/f6nnnnqqfvH4994+ytCZACuP/qu6zh68cuv3Nv//HZfPvvMM0KZmuxsMX+wvw9ACtEr+/d295vGZmV/fXXzcHyS5aWUosiLIs8ArA6HjdYTw7XWTV2TUIWQ8/n8aFE9+fj1yXQO4F3PvvfZJ9/TH/S//Mor3/ldnzh+sH90dHT9qfc9+ugjWlf3pycAfunnf/Yr33i9l4lRrS/1V8aTWTVbHE9ml4fV1YceBjB+9XUhMsOsBO0f7EuVZVlGQkghlRAH+/sAtrYvCQIY73vP+z7//PO9oqeklEIYZiHIHQmtdZZlzp/R14wnYmtJiKbRhcgBCBIuV4DjBsxWkLDW5Fle6xqAUmSMVioX4Sq2VipppmY+mzvfWyEEwFJKNwZrmYSom6YsS62N8+lXShrLuSyapmGBTOXj+WS4OjzcOwahqRp4tsRgZgOrWQpBAtW8Nmz3DvYArK+vzuZTqqksy7qqWJBDwxk4mUwWVQXg0WtX8p7SWmtjGOiXvUw5jYD6/UEotQGjeTZfSKXKoqeNKfJyvqj6/cErr71WL2oAUsgiK69dvQYhNtZXnnvPc4vFpLeycnh0eLizC2BluH7l0pXJdNJY+8yT71pdGY6mxwBd2N6+eOXK1778NQD7Bwebm1uXLl2eTMdgcg7ZbDnPC2uNIxSNMYIIzMZYkkm2nESG8MvC7RdR+kvkgyDjJwIrITHcB3EgglWRjbFL64JWslki76Ag1Xe6Dz9FihcZZIjWOIuvxeG05io/GA7FY8gLaRzEtihn+zp9rcjbyg5EsC5ridPWEG6JXueBpjvJBYD1ip6XMhOt7JS4s9RsMPxQR0MIWG5Y6FaYCtwxEaHjurmN5NhzcIQhL8ZQAnl6bsp0RhyKVzda1wsE47VbBErWMmwU+8qKMQTG44yJo4RbQB/ZDQhi62BYIoRoPGYQDHthh+LYozBGy44JneYPTLCVth+ew5Hn7bydt2/ZphwRbJlnS/md2aoN/vJaHxBZKcX/4DmkI9/GGEdm2dNVTzQJgmHB5LA2BGyBEq4TCHvLhk6R2GXGfmYTRMzEsYAbASSC+5lnHNQqcv6RHCaVOsSAnILjddnTeJy7lQLjiyvS4df+Wy8kxN6TwQgisl479+gQoQ0LYQZ3jHkUIN2EDQW4x+vwFByQEinEMVnrpYyOOBYHFF2aOA4xCCVLPM/JM8FDp8Pu3d/OKSkBGdKH2kTX56hLpwhhC+Bwm3Rume8mqIzfD2rhgmSrQiLTONuozCLgBFHCSDPcdZ+YAjdhVVqpNMiECKbz5LScJWFwLO+X+DWe2tdW2AbaiH7fgXUGag54DZ2GlojIOoDby1geBgHAzP41DyJ3iFyBqzpK8K5JAS5xclwrfbfL7pMIpYRkKZloOpelqcWhwmMrAZ8KQmFcAq82RI+B5FhQ6MF/QdHgEJc1zDoY9sNxieb7jgclk1NE3QL6yXJ0l3aYUQpLc0f38L3E+8C+eOUSqthdoDMbIbnrbGH1He5upXuKhya9NFnc/4/tLEHYPVV46hMimTvE87RPKwMC7afcUrMWrnP/c3zbAPicv/BeQ36tW24W4U6nRrG1/hwhfX6nxyV2uPRh4mvdmfCpi/2/HPcuqIDuWwt47w5OxuLdOUVda6UUs5lVU0G0qBZ7+7vWu+ax4w8E4SA/p2FSfEfDErta2UXRm87mWjeT8cQaYwQBqOpKZgIgJRVBWMMgIzKZ54VSpJspPLUUidbVWhqX18WvPAXbksv167PjBepyugWuRp5OxVfQbZYQgh386pPrkbUsZXhjwwZQMIexy/PlIQkyxoBZCEGCwG2yNQQVPKBLZMDSlTAHjLVSCldVeTAY1E3d6EZKQYTpfObyS+aDvKkbd2at5TLP3KoURWEtmqYGUOYle0HDaqOFpCxTs/FYKdkryqPxCICSatDrLWbT+3du7z64C9if+7d+dpgrYcQPfPA7Ady+d+dg96RgfXA0fvezT37oAx8q8rx3YXPv4E5RFKPjMYCTk3FWKm2MsXaxqDnvPRid3D2Y7rz5dr8c7h/WAK5ff256sv/ks++7+0rvX/zpC+X7vuOhtSdv3bszKIpLa/1Hrj8M4Nn3vm/81le+8yMfp6ycj6r/9bc+/9rtndr26qZ223cymY4m89t371umshwcjMb3dnelyq3V2xtb66trAA6Oj2vdGM50XbEUxtrDvYPtzY179x9870c/fP/uXQD7u/vP6y/fePSRmdZ8MHrqieuvfeP14dbF3QcPTk4Obr/8CoDXfuan6tFkUsrxrOnnK0ezkQZMbZrFfGW4BqA/6M0mtSaxLmW9qIqSNKC1kVIys3YZErUtirIsaH11c2N9/f6DnbLoOTxBShkKVfsMb0KIuq61aeIZqGvTvsxMgiQzKymaBmxBQkrFdVO5t7wociLKMuVIjVCZtdZhfC6/gCNlwvE4ITIptdaLqgK510S7fqw1shCAms9njTbWmn6/9/Dj19584y3JEoCU5KI0kmic4gAAIABJREFUrEG9aISE1pxnednPy6IAIIW4evnKnft3F5NqbX1jOj4hgrG2quqm0VXdAKjrSmQo8tIlJQBQ6wagPMtJSPdqGWOMsswwhomENZYl9fv9qm5WV1YPD44A9MueZezs713ol0pNyrKfy/LBwe58UQ37AwCHh0dEvLI+fO+zz0xOxp//sz/d2N7ORP5gZ3fr4sXDo30A62sbWa52dneUUiQECWEscimVUsw+Xt403OjGOtElcHAvzbTcgSynnIUTcthKJUjlhEBCWmKS/BaE0qDGRLrngKvWyhvl8jasDEl/kdx1OEcqgHu1gf1Tu7JjxK7CRLudB4nXU0RBsF798kIJ2qSa1lqXeSCyPB8FwiwTfS61/rH1mUB8MhYGXKojJhsZa2uCdLK6xx9doBYHptwKcp4wEsWEzgAFEh33L+xFlMdTRunF9yDTkki1lJb9gxFNjxzXG+RDJdB27QbIHa4UvuNYYCAoSMFfxfWpokuEZQuOmKSPYfKbS45BCiG83uStpMF4SoAgMu8MR7bQYzhuTnXoFhE4b+ftvJ23b6Wmgt6eYi9ItN1I0TsqKwEg4VGMoP6x9/pgcl8REk2LAVi2TvRrCa0nzQR9CpXwf7UIzV+pBX4Rny59OAL70ACngnCAyqJrCSfht750SNDDkK6C54ktL2rlCnbacljKCLf6Fpk8R3jAm85ckkdmG6CpGLwNwHpJ1aNBYdHbraMgjwSYKUIPreYNF5galoc660qdf2l5zRN41Q+A3a4yBw01NesxM9hap0wmkkVchMhS/Q45Yze7wiExOrozOs/N4wjCiMix6KVCJExLU0J7R7gvunQuK8vxzgTbo87X/jbufuVBLRd53V07PjUaeBEtMcK3Ih2SVNvuRXFJDGKe7AiXhP2lKHilnnr+iFhrIxZJIMD6ZK/WSik4CJlRQgWR1RZw+fZb26s7PpYtgYTwETpOhmRmNixEe/DS1p5W7i4BOlcHRcIjkk5NapFZl5OOmdkKkYU99+sXJMigOLhPfdZ0t8MiWWCGy27vJUwKyxw32Z0ga4xmISxbVzaEvJOLO7teOXJ328QbNf0niOdhbb1+w2081V+iJVuaHJJ3MonHxe7AweGXU0cxFbv/Mi0lD367l6cSNaHITDpWC+PoXNil2Nimyd/bgVPc16WJhiEkWG2XMrpDA3YJQINXiD8i7akOPXBy3uI5REtyWlWnwxSX2EDyM7cdt/+7QUghmASCi3HQJQVAdVWRO3VSMPP25tb+wSEQcFe28LV5OntI1L4IcW+IwJbuvn2/18s2t9YHK4MHDx4AsGyuXr18qI+kbNzOqCwryjJTGZMN6QucwYN8MSf3xp8CkqPC7xU88lPkxIbmXG0Sgun696vtaSSRlD6pr/OtzHJVV764nFDC6JBmxU02VLmi4OoCa0kI3WhXn6Su6yzPChdq7RRIT4EYjsG6VbeQUiilXBg4AOcaSaBG66ZpmG2eZ8ZaZhfTCmMts82LQjdNnmckxKJaOMNPoxttNIBer19VVZEX2hiwKIqyaSophZRyNp87CryyMtRas+FeKa9cvvSB7/me9/+ND0333ty5t3vl6mMA/uO/9W//g//lH770wss3H732/f/Sv3x0ckK66g9kbnH/7o7eGgKYTGd9Ko0xda1VnmeZure386XnP//4Mx8erm+98uarAC6sb7/+8itXrt5YFNvv//B3s67mc6Fk+W3Pvbdumj/4zKcAfOz7vmNDPtFMR3ZQ5r2VckiqpEYf1U3tls4Y82Bvl0gS2clkNp1VjW2GvYGp9PHoZDQZA1gp1KKqhJTElqQQUk5Gx199/fV3Xb4qVtZWV8YADh484Lx4/LHHti5c/t9/8zfW13507dJDb997c2N1oz64PzMWwB9+7gvP3nyiHu0eHIxEMdzdfRNPqWpWiyzrFX0AJ9MT2WQiy0hJVRRFWQqlrmxtPXz52t7+7r379wGsDVcXdT2ZTU4m06eeuDk6Gc+mcyGEUMoVOwLg0P9GayI0TZPlmRTCMkshETP0WRhYIaQ1RioFImNtlhVGa/d6a60dDOpCMZ2fJqxBAIDgHMWI2bI2RoV31VrH5oILFwlrG2ZTFPl8Mc/zbDrlg6P9ixcu3rj56Nuv3QOglGQ2RMJq9y6SZUsW49FksZgDGE2OrtDlXq+3sqLWhmu6rtZXVlfWVnVdZSpTeQmgmo2zXkYEEqJpGmOnTnIwhcmz3MXdS6EIYnVldTSZFFlhhGHm0WR8cHS8MVjd3NgEYI2dj8bjcXORhNW2v9r/8tdf2b58eX24euuNNwD0+n0pafvCRl7QM08/MeiXo2klWN25dccKDAZDAGWvGK6tjk9GADGTUrk1prF2Np8TsUeNrajrOvgQRGtS1GESjhToHlLysyyspdTLSW2pjhPCQhweGRNPdNSXTkv98PwFSY/kxTAmSpBQ9ulCOs+N/QVBL14cpbDQYepGFwYrvDoBZghiDj6PwaLvUvrGIflhODbZZh92owx0lQLaRYBFUpsTIvilc7s4nlELz/s4bhR7IQDMJBg2enF4g9xSdiNupXTyXpYBraSwbS5pcxsNQN1OmFNlof3C1zez7Tpwy58SNaPth5nZJTXujLiVXMFgJpcSiQAhAJLtve4qXzWLfCkdlz+pfSAAauHLv6BR5+HvIAiet/N23s7bt0BT5EDEWKIlMCFHDwVJAM7HkFoQig1bKaXLPBLzSwa2RISg8pHL5EHKG9IA69NNtmoUgwiCZIecklNdLCwLoYg8Lue+5ACwiYhlBRQmcuvGaCdiwiuj1sXFwFoWHoIgkCDh4QQ4BWW5tC5xSC2EwJU93/KqmpTCszEERwunUgrlTe6WvUeJq3NNxNbpbAn6QwCBrSYIywxmV2kRrdsMmEwUClodlNBRSL2aGpcpIjXBTucNaNTmfgmyWAQigm9rjFT333ZqqiMguQ5QsexKEFAcWhSprBWhAjjgll+4kIcQ0tIKRUFmMO3ehXoDLjU0G5NudAhyYQDWNOR8YITw3N8GCSBNp0gIpX087huOLwt2K+PWJkRscQeyRBA6k6PqDyd7X8LgYAdEUNrbVDtavJ+1lNJay9aGbtyJZWutS5AEv90EZsFEJNmY2GeQofy6m5BLDUlz+I4IujsFNd6ZuyWRNSbaEpwI79LaSKnifgcbboA9/dJxBObg0z4IyyYBPRHkfCe6ht9tCJNkWGNICn8e3TYLyWwd2YngZrt9LsMRCcshqRx7sd4DKNYqKV1wJVz1YSH8QUpkY0USMm4/XP4kCoHB7sg7lyzLpn0lrWWGDvVYY84Dw5pYgBw5IhGqeFI4E9FngmMx5WRSbjEJcTFP7SJ5vwG/po4iCWFjsHyrxHhKFUTtMIzum5Y8CBzyzIZzlXQX0DdjTQKOB1UgoRxBF4iP83aYhGwnQwx/i3C6AbBlTohiMnX/Kwf9xNE112HIB28CJWS3OgwPLLFfEeGLljgSABZCWWssjJ9NoCjeppIS80RdcQWmGWytGw6lygala0yegMcI4TB9j9yKUBqVuXOfhSWAiBuQEqTreVXVvf4KmKWUTT0HYAUYuQUrsra2WaGMDUtHPtAbPrkrkRBs6s3NNRK8ubXd6/UgBYDbd+4sKrOYL5qmIpJSZUIorbVlo40OuRMhJHlmHHRgmQmjjTXWJchjNpZtpnJr2LuDuVLmQhAQO3Iui1K6csZEJOu6llIyDACtdVG40tWWSFpH6oUyGkS+uoBuDDNnmYBhsIsj9/zb6HCAXVEAActaKiWzzNv5Eu2dwa4asrVWScXgRi96vR6zIcEAMpnpprFsy17ZLLRp9HBtWDWVtqYsC6MNgKaq80yyMcSQJIwxBCrywlhjrO31+wBOxuPBYEVIOZ9M+70ekSCGkCpT2Xwx7xd9AIJp3jRWkIZ96l3PfNv1J3r1yf/1O7/9p1984Wd//hcBfPDj3/uJj32gXjQ/+kM/LA3PmurSer9fqoP5fDKdzRcLALqppMrZQhWDWk+GWVnmRa7Kq5c2X/jiCxcfeRjAeDEm1Pfu3K6MffdTT37plVde+sIf/fTP/IJl8Vuf/fSdO28A+Orzrz73xFWSan7wYNFf11UzEEJRtr15cbq4C6BhvvrwI7Omuf3mndnJyXPvfd9odDiaTVQhR9PxwtUKH2z1VvKjk7EUajFubC7nzXy+N//i3Tv1Yj7YHgJQeUaa1rcvf/f3PPOVF1588Y8//8jNx3dee733rnfL/kqxIQGM9u9WVy/KRleVKdf7k+MHEHw0GT/KvLK2AYArce3mjdl01EAcjo7LPG/q5sjuK+DwZKyKAsBkNmk0u/yM+wf71nA1r9fX1gwApn5vAGAynWYZA1AqA2ExX6ysrAhwlmVOgHHvpWlskRd37tx96umnmkY3phKGQIKtAFCUPSIy1qhMLeZVUUhdN1muiqJYjEYupr4sS2O11sZVB2mMJim00ZPpJM/y0ckJgF5ZgrGoGhIKQK2boizqqt7f31tdW7vwyAUAu/f3TW2KQsqc6nllalEWJRRVTdPoBsCNx667OtpHJ0cWpsgLAxijZZ5dunh5Z28HQNYrlCrcwBZNBaqFkHme11XNlnXjqKsESOUZE0/mUwFR5sVbX3/7cP9APH29aTSAYW/lADLP5aBfPPzI44e7u9PjaW845iLvDwYAFoupsXjw1mhjvT9TdPVCnzJVz5prj1zRJFeOpwCm0xkzr6wOq6omybPZrN/vSykabRbzam19FQCTzYpSLoyx1rkVbG5ukRD7e4d57qtvExQROWbrUTjLAAeLectVAj90nMUbIWxLpeFFYOEClcixSBJJxJLxBZ7JR047Ys+SVCrS2uhNHu2xgSuGgjwAWskk/cePynq/CPY02N9trWm5o4M7wTAMQCrveedIMeCFekHE0pVJdDoauQS7AJSQTqbynAvWAbGuypHjKd7N3Iks1jKDpGztbUE58GxaRGHC5/KPXM5pmUS+MCNAkgQzSARR0LIzeIf4cS9yUhQcEcVVJzulJsiWKZN11j1n3I/TZkpteVFoYee3HDSdIHCRt1cLhvE6WRDiKYS6G9tYZiGFlMrZy8EwDZOElCGxJsNpf8xsrBZGkhUASLkZBpWHrONrFAQh4RbKaTrSSyNw0DZ7ux3gUPuujHfeztt5O2/fCk15Ku00jY51zyOT/mf3j7eCsfBO7461uub4ASHAHwGMYbiQNMSeW1XbwXwW5HBP9wzHNawz7gVn9uBHdKbLW1RSw2g6CErC1rlNX+LqFS59384q+TUZNNpsJ6FZ60bI3jwVHsFCRO+S5ZBplz0kIDiud8/POQSauJSX3D4q4JARTkKMHvFP7iKprfyUQENePXbIURd4jbP065caOoO/XQpnUAQ+OGjhTrhZWnRmOMdY666xbFuUbvn5YRDpyrunBbwgzqMDMHrFO+xNC/p1O6XkZASQMb0s7n+bde40c0+xSHDcuvZzSgaZ/NuZUysFxTzcAewA4mEIP1J7lBPoo9NbNCUk6E2cuBPVuBN9E/5qT4G7171/p49GmGvc6CW4LFnCpSDqpUkn70EEB8OSBfErnENy0iFRurzCYbMOtgAnz4rL7mV2IQgkhYOfrTdSB9Jjg7aShFwF+hLeXP89AhxF3vMsHqMuQhfn333fuV3lQEK7S8NtUgT/wZKXRVz/0+u9bBZPTm73IPiRtMNNT0K76NS5DvFVE2lP0R0ivZJbEn/WSL9JS6abqBTLLcm/sESORKuY+YQP8T0AEPFAPy4Ke0oxUULwAUHsIYyrS2o45N0ntCSR4LSHJF3pmSQsWZj4faucJekh3GEQJPIsE7DVwgoSdV01umHrM6iITAxWNuazWT1fSCncC+4z6NPSs7xpYWNj82h0eO/+vc3NjV6vD2BlONRaA9LZ/AA21oBZsjAcw8Wos8kOSxeWLQsp45siSDibiiTvZaakYmZrjT8/Ak1VCykdDREkmCGFRKB/UijnaixIkmeRwhU7lkK4lI4EV0TcO937Mm9Oy/YvEaSUVbMgiCzLKBh4qkXtVF8hvaRhGm2sdS6QjW7yPAegtS7LEoC1tq6bIs+NsWxtURTOypVJReQTDuZ5TrDWGCGlsaapm7wopJSz+UxmWd00APq9vhBU11WeZySpaepGN6vD1fl0JoVUSgGYzqZErLJse3tza2P9+GDvllisXt7+sZ/4xJs7OwAWn/nM7Xv7P/hdH82zbDafS2MmoxHPxcagaAz3ei6zoXY6cl03WZHnef709XfvTxevvXlre3370auPAvjGnbsNKZb0vps3r164clTVk2q8d7Dz1ptf/dLnnl+7WAJ48aVXNzPKcjVfmHpU3987KIreomnyQW9azd1R3V5fvb2zW9XzhlkLnjZNbS2IVZZd3NwGsFKWh8cnypr5bD4Yrizmda6y7/2uDx3s7D7/4ovv/bb3AHj6/R985Suv/Oov/8o/+p9+/Zf+s//8f/j1/35vf29nZ+/46E+J9OrqKoAXv/iFR7evbV/oKyXqmo8nYxJ8PD6Zz6a9sgdgY239octX69pAWALNFouqqrUxEGKyWDhT2erKSl1X1mJrc/Ptu/etNf1Bz1grs8wY44xMQgh3hoUQmcpORiduAMyc57nb7ixTlhkElalGa5VlxpimqaSULmOmy53MbC1by2y0dulPNYksy325G2uEEEqpuq6ttVpry1Yp5SAYF2ftfAO0boC+VGo2m7O3V9BiPl9bGwI4OTqpGFmeVdO5q42zmFcEKovizTdvAXjXU0/fun2r0U1e5nmmFnVlrR2urBCJyXQyn8/djIYrQ0LIBkvEzHVVV+CsUePpFA4lgiQIJRVLO51OpZAno5PrT9wo+/2d+7sA6mltrBmurhMRgS9e2L5x/ZHhxdXD/YPhSh9AVU0Xi9kHP/D+X/ybPz3ceujX/u6vFGLlzQf36vHMSOXwps3Ntb3d/bJXVIuZyjNiC7ZK5pKy2XTuYO5er+dHKoWSmTHGWpae4pOznrpcfS21b3+OkkLImdQyu0Ss9SpOkCjEX8DKKIifPjeVI1ImrVrp5SrPev6KrDGMKvBySllIZ+RLLSomcVIIukBrg28BOU/GE8ncjTd67LYKVkQYw2WtsJQIQ1F1C2IGeQ9Td43PCR6LdvtRMSLUF+UmH2SRYI3turTz9VJdK+GEKQTN0nhYGNHtNd7AUepo491iynQvJHuImwALMDpFF/xREYItpXveDpLC3jOYjWEiCCGsDnUlDbeh1m6zRRtNQOkSA3EHE3kRcNmErOd35+28nbfz9q3V5NWNxxE83VxMY+u9g+hMxwmddo7+7Mv/cfjjoTMbapI6UyBCokRe4luet9kI5rTN8R4KvJQI4PZBPpU+GOTq5iSAXYQElhXaKCg4Su+G0VZIjZEHZ93rhkQUufDS5yHmu6OdRxcbRGiknbxLE9lBUCiZQ/DLIaIAgLg/zkIGEAk4bCmiGGEZWj7uBxe7SsAjz+Dj5QGBagGJ4NyXDNANNpFfwu9haIgCC4e/QM43zenP0dvI2TfPXGgQwQXr+RYgXm+QDKNowalkvvHL5EzAj6i9h9ITwt1rOfmeggzxjgIfvJjSkWkjEOKFRm+5peiXRWGh3JVdNJMDAO0xML8pLbDV8fpMly3gEP4laIfufz5jFtzKYm4AHagN8EcuNiA+4HSLqLRfiaDeuCeFsbTLnpzUWKEI3RGwWwSP/0QsMfzjhMgEHk1Gbn0FDQ4wE1tr2RopZZiRcIV34q8tDtV5V9pXpF2BzgcIL2AK+bsyrcnrQuh8ELPvtkudkIP0584ShzXxK0fJF+Gn5CY/hvYtDQu+pDYk/Z8+8X7xw1uL2JFfnohZUdpV2F1efka3587H3aOXUFqK/50xyXiw0klG4DmcGADt0kfqI+CdUuMpd2YadocjxPGGWbtmrYkJA4DgFdK+keEAd98TSslvMv3kzs6PALPVRa8H2KaqVlZXFnU1m06NsdYYZts0GlIolcHwYr6QUgjnDxOogN+v0KQUjdZaN9roxWLR7/fdFEYnJ7ZhtiyEIEkQzGzd6lvj6aF7PyLxDu7InCnFwdokhWT/IjP7HAtkjSFBUkghhDGWiKRURPA+7NZGogdAKZ86Rrqyua4TgjFGuqyNfnmJ2QghXGAsOTUQDLCQ0b+clFIEckkenekiy3IAUrmyH3BFtB2SS0BZ5lVVCSHyPCeixaICuCx7WmsGVJa5yEKVKa21W5I8y7TWeZFnWaa1Blhlyng/d+FIeNnrGW2N0XmRO0fmXp4DmM8XZVE0ujHGWGNWh0MAh3v3L17YXkzGk2b+/mevSykGWb4xKIXWT9+8keXFN+7eHfbLajZ+6Stf2T88WOlnQua9smRGo2sp1Xg+7/UG80UzmS2e//rXqnFjFnrzwuWXX3v5/oP7uVTDrY2V3vDFV17+5O98cjw5eveT1/vDK2882F8cHT3xxOMb61t/6xf+jdnxcVU380pPKr1zeFxrO6l1mRe33nrbaD3Ii9HJ8YP7d9nYhx9+5PBkNDmZkBDEdlCWg37vZDKdTqdHo5N50zy4v1NZ6vUH0/Ho2ZtP/O2//V/0CDv7+4LoS195aTY52ds/+Mf/9Le2r1y7+dhjBvT49Sf/1U/8yM/85I9/5CMf/thHP/bnr7xWmerxJ29OjvY1i9df//qTTzx9797u2sZWXWslxac+87u7x6PHHn5ouDK8t7OjpAI4yzIhJRFVdWOMtWznVT3o9aez2d379wESJMqyAMFaL1XWdS1VRj4xHCzzYDBwpZxIiNlsprUuitKB4EweiJnP572yFEKwtWB2Id6CyFpjtDHGCkHM1mrjRBrhUXVi5qZp8rywzGzZRRopoaRSUqpA46lX9hlYLBbu5ZJC6kaTIGvNxuZWVVXVbOEckKUU1jaDYS8rVVXNATz00DXLbCxW+qtK5UY3TdP0ej0w7x8eaG2staurq2VRjicTB/Gz9S9Fr+yvra4fHOxnWSaEzLJsY2NzPB7VWlvLxti7b92dLWYb21sXNi5sbWxNxhPLJsszDXtp68psPP7C5//kB7/v4+srq3/2wp+PT8ZsbZHnZcb/4O/93V6G+WxmG+Z82MvzfGX1cOfAWp6Op02jy6LY2r4AwBXXqupqNp0LIqEEg6UUYMzni7quCSRIWmu10S6ThzGGmZXMumIsEl4bZaLARrohWS1jpMgnfA/h6ihShTsCUWoJrzePRJaU0veYIPCUGHVarkpZuRS+Tz8QwMsmXVmB0l9SfcULd0IQkWBYzyK68lzQtxIBJUSMcHhSt8aKDxo7y1CaSNhhROmzSMALSYLIpXyy7IpHJXoNotTjJSwEIbMVrOJQfAhS3Fzy3IpSp0iXo5GC12aYTRASgJZXJ3OiqMAKH//SSoZhDTlIpAhWdvcIEfQrvyDBWVVK6ZVjAkSQ1ghCOG67FDDelTASrDPmwxREwXWX5j/wA6d25Lydt/N23v76NgUXQRzqsnF0kCQQYHRIWNh6ZHHQMoV3TPBfRGoOBEtgZI4JSIeEg7S/skWA5KKa7eo+RM8hSlhbcMPrsMb21gQdDMCHhx19jCci8EPCJ7kMjIFDJv137BdhFG6mHKcYzFtRVkhgHvf06HTnBkhouQx7rkTcOqOQSAuyJPmLo4zhhp04iCUWYICEDLpYiBeMMYhhSTr74ecU9o2XYKTO7Lnd7eTDcFKC1OammKBeHWEmTjTyVoSntag1h5kxuI3cCHBfe5f73fohdKQ7Ct2Q/8nt/pIE2Jkenf70jEYUdpU668h+cwJg4U8wL691WJtwvPzeLiETcDaCBMGLz0jXrcVv0q+iX14Uknx6cWrXMD00ybja85yK0B5CQBiq+4yjW1qMU47HNN4XjbrJJIMfAYVNTg44XGgUtxWowmg8AYjpQv35DBMQIVNUeOFIELGQzmqSXmt9DHXMyJkuqD8uITeSC3Rv14nZhVdTe045lHaMKfNCyV936sJb0H2fKJ1yO4aOmM8cXvZWNzizEdpQsLgNQcIOZ4iSBSOcfvzSm0EcilLF36NnK2JuqlNzOO0r/Y6P8LQp3HI6YysHgrX8Zvq4bWvjMlJLxAAfhx5URc+FmJ0S1fqgclB0QNEFuLN2ywMK20rk04C+09y6rq7dzlLjXJw1A8Qoi7IoC92QyQshxMpwZT6djUZjl+mi3+9funSpquoHo3Fd10WZZ1nW1LXvzfWaHFutjeEGQJblja5rdyVQ1zUZElIIKUiRM9NpE/Qq11177MOaGyOkgFfS4LW7wJOJCAzLhsEtNQCkkIIoeMdbr31R5sJLKXlr4OPOmC2cC6GL+JZCCEHMgtkqlVm2AoIBboyUDluEtUZKKaW0xhXFJmutwzp90WRA64ZIKKW00WBkuTLGGmuyLPOxz1oPBn23ikqRZaNrU5QFwNaYXGUAwNaVZjbOxy3L2NqmaYSQ1hqHfmqttW6klMzWaJ3nOQOL+bzIc5UpVzKl1++RFKY21byaGfv+p5+s7ezG1UubSqzfWAPQ39q6ezQ7evXN1WE/U2LOLISSWW6E0haL2gBotO0JNZnXDKob/Y1b9+698fKz3/6Ds7reu31nf/cugK3NKx//wHdeuvHwf/vf/JeFkB/+2PfljVAraxs99aao3nr7DQCvfvXWxkpu2cwqY7N8UA6mi6kFNY3ulyWA+aKqG90vy6P9416vqAlzYxfj6ebG6pu33r6wtQng29/9TGUPJaNutFK9PM831jf+3v/8m8899fR3fOS7Xnr1VQDjnaPh9vblq+u37t19++1bW08+efvevePxScO62Lr42LWLAH717/zy3/nvfnl3/3CtzPoi29rYms7qsiibqtauInbVAHYxn13YuqCUyjJV5Pmg1xNSMpG2BsDKYGDtrNHN8fhkfX396PAE4KZpVJ5LJY227o0TgqSQjWnKXm+xqB2fMMaozMNbeZ7PZjNRZFkCh06wAAAgAElEQVSeT6aT1eFQN421BmyVlO6lIKmkVGCrhazrRkqhtbEaUgkbQlCdwd4aq5sG5MJvYRqjhVZZ7t5/khLWGGvY2kxlLnDVGmutdWhjrooLF7dvT94WQtSLWhvurxTT8bwc5O4d/dJXvnzziZt1M1LZRlHkU6mEFKsrq5PJ2BrrfIEHvUHdNCSEkIqIlJCOEo3H49HJiUtIUtcNFvXdu3d7vZLYWGMPD48h6ML2RUFid3cXgFLq+o0bAsgGxWw2YdusDgez+fhkMur1ewB0U62tDTc31p984kJVjbc21l546fW1C4/1ti6MJtPhsA9gZBplxcHBnpSCBFmtdVODRK/Xz7LMmgaANUZKmWXZZDzRtSWmopfnZQFuiYy1xhdOS9inJ+WU/AqOxWlaWomEZ0Z5xH8QZZaW1oeKdv5vIkTax21v0ZgWFA0v3lL83pPpMLIgXbfyTFfN8qKGf0Ar/CeNIj9tv6HgFcpIns1BHkZgFanU7clyazWPTDExEUZGHCdNQFujMahAnWHGREnOiOVAPSZJUS6PmxUWPoGL29YqrB4ldLqsTfQYDnwqMMV2wb2yFfhukD6Nl/xiDpXo6xrTbvrUnJSuipf2Q2SMs3Ej/u73MYoWTswQLkV6kE18F+yUSY5Md0kGOS2StDrCuXfkeTtv5+1brymCAkDSsxVH5ZxJjFJLmOegHAlhq0qn/7puXEoLBHwiibMOpWGCr4rXYsias8MefV5G70+YKL0tW4i3nMGpWs+vwDaJW5Uv/Eut9uuv7lj7nILb7b01/XUsfvFnRmSeFAx1gd84S5tnjS2Tb90nwwqHn52MIGKPXp9mxDvCQzlohg5UpbaTliF623vkc8n+hd1vBZhEvUcAk4OOu3xruDhgpt5s6GSEMFbq5u1O1rJ9prHOPScRbpxPTLJtFJT2bj9hy8OIwjaxn0E7lDiJ9Ml06udkcdH5KSKtrQE97mSLV6H9itI+/l/23qzXsiU5D/siMtdaezj7TDXcqlt1q+489UThXnazB7JBSqJpQaJkDRBkWJahNz8Z8F8wDPnBTwIE2IBkmZQFG5YESZZsSJZFii0O3WTPw53HmqvOqTrTHtaQGeGHHNbap6oJPxlq4OTFrb3P3muvlRmZGcMXkRH9msu+4CHAGl4Yj62F6AFYX8I9KdLV689KN86a5BCLBEAKGXR5iIUGXW19iYeF9XiEZtJgBwuk3/HpSae7NUBxdHARpYGSeNHB94l7xCSUGNA9dzPHJGT4L7iTDVHKGwElZP9DJvEalfpOg5LWPli7aRWoJhMkPDrUSkbMwRTzS0aKqIpKqqUVlgCtr5TTPKyfyN5ln3nUk6ISKPaiJ3JWl/sOZ9INKThgXU9sj7HX/09t+Ksn9ZWyCdRzxcHFYRbXIjiwPl/Dz3q09Am9XWNqlNhBL9HSFAbjweccHI/tyoFhlbkE0RoeqesnzuOuSDx/2PWYerVf3Pkn2rTtxHLbelsU3rtlvTw4POgaH4oxieqqWXRtVxRFUVjnHbomrE/VtTsFkNCLVuNKxLVtI16apgHQuo6JPYktmAtjmLwIvHRO8oYKQaI5lChxULXEmg5GMjgEM+b8CV48E1s2oipeABhrQ44rYhbvIbF4iLWWDQEQ9cQhr5aAJKeYtNZ2oYwxgJyxmqhzLkSNee/ZkClMEj1k2ITAN2IT9qcxJuS5CyFvzvtRZYmgIkVZEnNT16PRSERWyyVi8Q2zXK2sLQBxXSuA914lQpwAOtdZa9q2ZaayKJ1zImKscc4btoYZwKqprTXM3LRtWZbWmLapiWg0qtq2DZ0xlg+PDkVl69y5Ow8fPny4/dz1i5tbOyLolksAR4vFxuzcbLq/rN1y1TTeXrv2QlUV85as9WXTAbBMxhYHxwvn1AsOjxeOpq9dfvr+wf5bJ8evf+FNAH/5z/9nF86d/zu/8XcuXnlhY2vz4OFDbZw+vDOy5c9/+U818zkA0cJR1bbN4bzZ2ClLrharw/35yeZsdv2ZZwBMt3be//TT1fzwlZdfnu0+de/Roa6czE/atrNFcW9vH0DdNIZYvCvK8nixKorReDy5sLH1f/32b/+Xr7wWgFo1VjDWtpsV5fndzRoqbf3JD39Qdc5/6Wv/7X/ztwD8/b/7t3/h82/cfPDJxvSFqrDjcdH5rijMfDGfLxsAhSm2Z9uffHr7/PlLo2o0qiooiqJonfMiVVECKMtiVFXeyfPPXH/w8NGtG3e2t3eWy+WsLDPvMIZFxFgLT9bY0XhERETsfGfTAg4pAoy14/E4RBc651arlWUTMgk0XVuVXVmVznXeubZpmSDqVAgNshecAGL23i8WC2tL5x1UnRdmo0lsIVS9Nya4DZzrxPuAnAbP8tHx4Wy6dfHyhds3bpdF5b1rm069jDdG27u7AA4eHYxH483Z9mIxB3S+mC+Wiw8+fp+Zd3d2m7YBsFguy7II5d289516KLz3R8dH8/kiVI8RVQbdv/fgxeefq109ny8W88XVZ65euXzVjMt3f/IugM2N2d7enjXmyuYz8+Xy/t2by+XquevPfed7//L4cA7ASzuZjj+9efvB/tG9u3uvf+YLrXPL46M9r0f7D1dNDcCJL4pSRLz4elEzszWmc+q9X84X09kEgCF23m1ub3beHewdQWEs28LGMO3k9kvKfnzJyuOpNmDDSatJXHiAIJ0OoTjVkqeb1s2GjOsR8rmYdTGVgLBesR5KyNNc/NRDe8MmPy8unYHgofTY4TWa5X8CDQf6MFIKI1q7OSgk1MXapwmFjbAjrclSJCWQAOSi0QMCMXPI6B80KJgYHagk8eGpwFtwG/bhhetkihZSwPBS70+p+dFwSoEeKVolH52JkxgUENGEaw6wXEpjjEk8KR74CndKSboZQZoHEnPMaq2qIsk5zWyYvABKKipe0iRRdqYGPZFMinRJCyyv6gEknecjOkSzWXHWztpZO2s/W+3MkXLWztpZO2tn7aydtbN21s7aWTtrZ+2snbWzdtbO2ln7/6nZFGeUvGi9K43W3GDB9aLSHy47FdcziPQZJOVNoSjI7r9UcDZHw4dop1TYLvoUs2syxFfleM3ocAs5J5EDk/qguxyBAqRIp+haS6XX4kNSiBKteUnj0zGsrIA+tKWPSlt7TvZd5QevhSflLxGPbDweuUbx7DDRgPLJwRcda/FQUfbkUd+P9U6kn6oXoewbzJFQqqoDr+CQCDS4TTqJQMnhuu4kTr1I7rt4vzQF2aUcqZ5uQdHpmR6xTvjhrTU7BykejOiplf/Ng4JymshBlYtMeE3eRF2fmye4vdcO7eJU2FP8VXbD9x7LWPQwLZW1seDUc7NPd+iZTSGrnGcphbDqcC1S2mi9X5vykhiM60ltLQdQT55Ih1P7eW0Jr1EyumMHpIg/HFAZsczEYFvQYMKjK11PkQrpZE3ub58aR/OWE9WY/zyFS68vOeS4tpRmgvsFQflhsYh5qPZNMc0c0PcamXGlwugATs9jzpbQu+DTIbD1OIVE5ThA6gNN0Z8673sX2Sxpjm6QdR7Nw8kaPKf36PclXAYUpcRie2I9ds16J3qKIu/5uA9p7Qent/PprXVqgYGAlPFp/XmDXU5P+F1+1nDNc5zNuArlFGlowLFSlox4+n29aVqZg2eejgyJv6PIIp9QfIdOMZp+cEMGr+tjzZ/FbwUiDtD5yfz6c89cf/b6j7//4/FkWi9WAMgU43FlmA/cUdc2bCguFgUgA84U71aOSkCLoihHxeHhwWpVI5RJBdmSi8Iyk0buEit+x+0VYiNjNpe0H4RAkJj8EcwsXmKi/1BQ28dosiEFDJGArDU+LXUVJx5cMADxwmyIICJhbYCU2aiqeB9KeBORd0IEJuPEG2tc51Q1VK1x4dA3U4idZCb1TlWttV68QozhcF67KAo27J0zhosQfQmy1s7nc1MYAEVh66YGRNWLehCMMV3bgjAZjbumBSDivSdVYS6ZWcSrwtrCOc9M4QS6qhS2artOvB9tbLRt671MxmPn3KquR6MqLCdRrcpyOhvf+OiTv/fdP/qTX/7CU1uz47qW1QIA8ZEZL/7o+z/4/tvvXbxwoSrHLz17XeHv7e93tQ1zsjOrPOhosZrXzfFiRcRf/Nwbl6/ufvsnP3z06OjpnV0AH7/7nd94661/9U/+1S99/ctf/dzrX/v5L/3g7tGPvvW7X/ulX373re/+ztvfB+Doaz/64IZRNDBb4r7/9lt3j+admpP5vFAB8Ew1mk5Gy8N2b//BG2986f1PfkcMlePKGN4e7YSo23/3+99kwsp1TkTFe9d5ayejyd379+7cvDmebACwthDvCzaAnhyfPPPsK2zH6NytO3du3rn13rtvA/iDP/iuLQpL5FzbNoa5BLgwBTFbawB41r29PcP24cGBF+mch+piuWydy5WYDufHu1vnqqqaTicffPvbo8moadugNnrnAiO1RdF2nbHWGPYhTE/VGONc50XCseXlajWdTkP+RCJarVa2KIiYjQ1li+p5UxZNWZUiTgWu8wwovIK990H0WOtUhZmh8OJBoZskouHUPxAyWmrbubJw4b33Ag0JSdV5AWC5ODo6mE4n5y7szI+XDF41i+loHFYlgK5r66Yti/Inb7/vvSPCaFTdunVy+dJl5piJtWkaLz4IhVAWJnAMw7Yqq9nmBoCT40W7bLzKjdu3Njc2FsuFtcXJ/PgHP/rB86++PJpMAGzv7Nzfuz+bzeqmYS6MKU+Wq9/83//ZR+9/OBvtAuCCoYY8Xb3y8kef3B9N73IxntcrlJOdC+cXJ7cAMBljjevsfL6YzTadc9aWqu7k6KReNecvngNArMtlvVotjeGisCJqK2usMdZ67+F95kW9Rq5R2ENBWYYPWLoOPlhP2DLIIT2MOlzTuPv3ydrQEOo3/AJZIen58WOMf/3OwJqdkBWloRJ1qq2Hyw3ELPXacxDe+WRP7vZQBwsmVRDOvUGTR5R0G8pUiZpBNNVyf6OKv65PJwkNNgYioFTKUVSYWDHI9JP02j7mcU0pjb1Jp8GAXIs1BxmvqRBBQUtnYCRG/FMiUU/nNZ00K4xRaaCoRGW9JR9iCwkqT80InjRfTKRK6qECcKzUmHT/IA8odEQ197GfoPUzMVFtjvGVtH75WTtrZ+2s/Yw08/TW8zgt5CIXlRBFn2zn/ttwiFIl226BTeezb5TCzoHBwdV1+zKx4HDJmuwcAkkYWNCR2ytieH/fLcryRHvVg5DFw+CkdP5VLmSR+jN4XOznwBSOGeJC92goYdc6nzuu+TDguiqDKMfy+2jqJSwyosP9I6h/YsZuM1GzJUy5d3EACYUJR4FD/ubwsySEORO6n4XwreE8Ds2TlUg1IMmQYCnt8il1jkDivYgQUSwtAkhIcdKfhaF8faR1PBqydoGG3CiBZlnw6oAQPUAT6DpEZ9emLA12OG1xJBmsGcxz1hYoAyiZ4uiRi9A9zbeKNw39ySBeomvWfzguVBATE5PhULII4TWvfumpcUrBi70MgxqeOkqDo9z7njih0Any1ad/R0SxQASSDoTBCaacKqB/uiINn6OKmmEmSiwizvtQ+SOikF2RNI0B6X8GiE2oKhKqnuZ6SJSWcDja3c9f6m/Ikh4rkqQfskD6I71hRLJ2IDfCVXkhxKWExK7WTtpSTmfRs7bID5iYwD3Ze8qzqOTuD/atDg7+D7lmXGohA8ZgfgZXDdd52ogghErHQbsOPeFcmyjtd/Q30MGtQQPU7LSdhPzYvK708Yt+qlJ8Wh70g+0nJq+B00xlyHDXUEJRGXKUmNY9iCqRwPIShBmB8jWCp6S6BJKYuSlMF51q6VRZXI5AICtpz6xPTwsnmiYm2/P2tMNoOBPR+FJ87gufdc7dvXHnpVdefPONL373+99lMEGNMVwUf+bX/uP5Yv6DP/phWZbGmrZry5B7bp3SYSN6cp3rvLpQjKVtW++9eGFjqnFZ2EIFTsT7ePA8JdSPZGEmghKDiIhDmXWFRtyT2QDkRcIXYX0FPs9smDkczSOVKG8oFmcgImNYEnNh5nzwLUgNgL14IrLWMgeXk+bJDcNjZjbsxYv4CMeKaCh8QcSGKZyz1v5MvbXWOeclJnwU8QHRcN6Px+OiKBTSdm1RFF59qPPDhkWViY0x4hziCX0RESi8CBNZa733Ij6ASyGLpYh2bRtqmDRNS4RRVa3qWlQm04mxZrGY28KOqmq5WowKs1Ho7bu3rj3/MgSi4hQi+t7t+w/2Hty+e5eBRVNvTMcHh4+0qzemG7ONKbOZTUdVWd5/+PDpi089OpqXZfniSy//L//sH88da7saVWPv5Tf//j/cv/fxxfO7R/NH+yf1z33+8w/ufPpv/o//9danH/zhd769t/fg5OT4uO4WbXN4cjKeTjc3pr/1u9/qVI0x3jXtct42q5PFQrpm5Ntvffv74+nk7oMHXdfZsvDeGUJZWGa+sLvdNs1kOq2dLI5OiNmJStcS9KlLlwSmrEbv/PCH5XjakS6Ojy9fuvTslWfe+uSj27du+a47PDkcmXa2Mbry9Aur+pCUmnZx7vzlR0cHT126tprPm84fr+qu6259+okQjg6Odi5cAFAYy0QiscRZ07Ze5NLF8xvT6b29vdl0OhpP3vzCzx8cHRweHY2nE9VYT8Ma27RtVVXWWlVdLldd145HEy8+QORlWTCzMVZVu7bt2lZEjSmsLVThvFeFeC+iTORc572SMsWMdiTigQjOqGpY28zGEAfB1nUdAGOKsI+c9wFkZ5D33nVOvDeGoysZpKLeey/uqUtPzU9OVvXKcklq5vUJKbqmq0aj1jW2MC+99MJrr7780guvvPrSa88/+9yqro+OjsbjcVA7m6YxxgbGaG1BoKZuTk5OVsvVKy+/sjnbnM/nn3x8Q1WPDg62draOjk/KclQWRdu5rd2de7fvdl03qqrdczvbW7t3Hty7du3a3Xt3K5R37t8rbTUaj9kyoPPlvHbyy7/0H33/7Xe6Tkej2aptuNWrV67aotje2b55+5ax5LwvqrJzXdt1RWm9k651bM14MhKRVb1kZuf89s4OoF3Tbe9sFkXpnBcv1hbMBkqp6nHM652V3oHkSWpRFq8JPxvofEk7Siw6yZyBOOJ4leaDwGGaRQY4Va+0ZnG8Bh0OnrjWsn6WVb5eMlJWzPvrNSmu4S9Vzkpx+kWqUJMFe3by9npVrx3kzCSDTDjxUu0fGnNjD7vfe70DL6cgFrL1lp8dYkXiHgjCOov1BCYShcxTQ200zWB/8WDsSTfPISvJbFxTHBj9yoi3ikmVgg4eFdNeRY6qa8pz2Y8/QoYhHCHfKeoeAIV6nhyVVRXtSzsmBSNV+sylgYLSSJmsaxpadnSm1QEiBpz4tEK0/tVfPb2cztpZO2tn7T/gZsWDANherCVTKET9hPiaCDBIqmqdxOLAZh6wTY0hPQoNZQRwWtRmgalRDzBBYKG30YIYyTUT1tN4ZXa87jAbtl5ODYJRgvwzWfzHuwdz/TEnFgYCT09jBGlIUdHMQlITqqED4CLLw0BMYmS9cthZimprKgYQnaKnQIHscwvaTlaYKMSA9fBYKIFKMggDiqXswC7H8uVxJoG6RocUYAo8NotZPVhPPxNBuxiFGfW3ZKuHHv5UpCKS9DE3Y1I9SIYfpZfwnDiVuqa/xU6lDqaQwwSJPbEjWStNS0eBPnEl5fBEKHqKkfZ0X4MW8sLS9Ufk+/XdJzYconY1Jh/Mt453QtKYErF7lS+udPR/n266tnyR1nAcFGEwi0lzzitJhy/BqKKkc/YIyqm12f98ndjhj/UJTpUOMSSWEpGkuJLB79dGN1i2/eciPkPKQB/sxtzPBgcOENYUcUwrGbdlVBZ5sEXD8KTP9h5ZZB/kukbtFKiWCilGCCtuUEokpzzqJzKgwTDjv5r/Hk7Yky5GmquBkZKI91jI77DrSHEk64lWFWu0H7DDn36vzBGf1L3HRrzGbocdihsx9SyZUIM781pm4WSKpBuIgimKlLyawypc464UvD4p+jytvIEwSSxn8DCVJ6fZPyWc1qk1wLszOWgwucB8Mb/+zHPz43nTNqt6ubd3j4mUY6n5tmnLsijLwlg7mY4D9keUEvqvexcJ2nVtURVd03jfiRdjLABrTFmW1aSC0srX4r2PNWoEBFWTe5MCbWNnmcmLS8wLADGRU0fEhk3XOSKyxoioiGc2abqEmLz3IJCQqhBDEUp5g8BQEi/MTMTeuaIovIh3vqrK8FznxRoDIvGejambxhpbVZX3TlVDCj8RT4SqKDvXBfxTVY1hY2zXdWwYgDFcN64sy6IoVqtlVVUiWK1W1ppQD6dzHRsCKTFUwYa9l6qqxPu2bQ0RgJTaj4w1KhqALRFflmXbNrYoAIyr0XyxMMaMx5PlckXEo6pY1TUBm7NZyB3pvbzw/DVm3Nu7t7l17mSvuzLemV24QEcnPKoAzJuu7jBfNAUZdX463SCVB/cfXDm3ORmPQlzwybJe1S2U2tYdLxYb01k32vzhH3zrza/+8uLRvcNyG8Czr78226pu37xR+OL5i09980ff3/voo2euXf6Lf+2v/eHv/+E/+t/+KYAL5y5sbc/Ie69l09B0tLEUSEe1LsZEAP7mX/lzd+/e+5/+wT/4wqvP/eovf81/45s//P6Px9VMDV08f35UlgCuXLzw+ddefXB8NDtatPPVyWJJRbm9OWua5cHhQSsMQIqimpSG6NqLLx0dL04OH7G1u+cvb21OF/VxVU0B3D/cuzCbHNX1xY1qUbcny4W1ZC1/ePPuvPMAJuPJwXx14dyF+3v7T1+8aK2FCkggAiYbprJzqqvjk5OQTfJb3/lWWVSjUeW9RxZypEBECRVqrT06OppMppaNqIRiSoGTd03DRMxGvffO28mk61yI5rXGRgVDCaJE5JwDSVlVrtPOOwCqWlZVlImqAiWAidu2FdVqNOrZBZH3nmPcoq9XdVVWqhrUTSZmorK0y+V89/zuatkGd7XRIvCHsioAssZ6J3XdQJp79+/OphvGGC8SUlJ2XVdVlXOeWYlIpFsuFifHJ8vVyrC5++AegOOTORFJJ6+//hknXVlWIrI9m3WdV9HVqgEQ8sN+9NGHl69dvfjU5eXJ6ke3vrO5fZ4VBZcAVp3zDgcPj37zN//RzpUd32lTKEZVvT9fqh4cHQEgMk3jVMl7vXDxwnQ63d/fb9vOGHPh/LnLly8D+PTmhyLivW+7djSumropikI0ZOETitGRparvFWGNFsgT0kcOPhFVlj40j4ZGSwqoe6JWBWRx0ieCP40tasocOtAH82+fcNPHdNMofAdq00/py6CfGnSQrGkgRQ+u3XNdixzWDERw0oXxDA8SaOb7qgpYGE2a6ZreniQqrSkSfYTLQAmiILMEomnsAx0GSB3BY3dP3c10SWOj1BnKunl4brqU03GKAS1Sh4ceRQIgyQEfXGIAFDL4hSKZPkzxjELwmakXQh8t69WLKEGJLMWDZxJs2x7ShkaHdVxSQz0nPWhN808UBv30ZXHWztpZO2v/QTcbi1HG9PCJtWd5FJmeCFQJMjBFQ6gJ9b9J4FQ0WYdAWrqGoJJKaAfblqPc996n31Pg49DgRZYYg0I9g849jLZlcAOGnmiAAJRtkHDJLwnEzMNZZxCl/sinqoINcoR8aPmYucYjz5xcegMnKnMwR80A+GAmMEzvmowyF9HLp+Ag0qNIY06GaUSjIomyczKQRkGh8HaiuCLmwg6aADFYU+woMympShCHNstpQIjUYCCDo/+SYQgItwik7au/IEVvhfN3XryqUARxej9zBr0SLCtEMewvdVk4VyDP+lDS9+LnSKe5s3aR/ZixhJFCQakEH8dlhAw3iAqdqs+XFq6i15vWwNteeWICCYR6lCnXEVFVZUYK4QkOz2BRUDhyyMwqqiKI61cNpwU4hEKSj9WFLNdMYBaVsMdURQEvcb9EzUY0vI9u4z4BgWZ3dAwgzR0faC0KIU4IZCjwif5GEfKJybcBUkrrgUAiMQgouI1VQnkIKDQUbQ/asqoqvC1MpCQl0gmFXQlS4qgUR2BclYn6yLaoAsdei3giE7ZK73ymuMXEResxBv9y3FAiagsrXkTQA1lxq3Oe71BtJqzMXMyHmLKCHp+pcY0zsYowiJk1wdNsbNygIYGEhnuQqE9ab5qu6IdAOGYIqKjPDDewQlElJvEaJiKGFYfFFnnFIAAg0CMWOM7/JNMnLe84fs2fSIjQTKsdSelONO9V97x+MmVOtUQGw0gHpNI3+TVutsHJt8zpw/uwWhkpO0Fy8ChiTHC2mhJWnETM8NS14ci3sumW3QaWbQiQFBUFmIyokBrAZ+srRC+IjwEbgwJVgGYHA1IcLyOExQUmkGM/YhQeDBuKB55hmEPkZdopQoYS9gGAVRDPR4sPZguBxEs1LReL+XJxNJuNms6999FHXjpSBEk52rAP9x8eH87Pnd8S6WxZjqpKod457gNWQrCJetISo2ZRt11Xjcs8lQJRRuMa79R7VQUxqfoQsqGSFwnlRY4I3YBUmU0I53euY6LSFqIiKswUkEFVn+3PMMveiUIMsxcPoCxKSXFJqqIQYwkEr44sOXWsqIpCVUI5ERD5VMSu6zrDbAwTVEUgwsYA8F5NSeI9KZjZeyEiw4XrXNd1o9EIQNN0zCbEghGR9855z6y2MF3XAnDehWAxFbWmcJ03oS5J2zFzUVYA6roGaDodL5crYwwT1XVd2EKVCjsKg65XLYELW7Rdq5CN6QapNk0dxGjT1ADKqlgs51VVbE2MWS1PjpuXrl9ra9c0TWU2AAgXF7b9J5ubNVHZ+bb16Fx99OgWWtfO3/jiLwJQ340qI0LOs/Fy58H9n3z0r5tGPrh1u10uz+EEwM994foP3nnH1d3GxsZJK79y/YXFZPzP//WHv/Jnfv3ozqPGAet/HAkAACAASURBVMCzVy48PGkgdHx4vFOVjW9UbWF5u9yUxQGAN778+R/9kYpbTM6//pf/2q//69/75tHhPk8mngpbVJuzDQAnq/rFF16k6fTh4XsKrsqKDR8eLbZ2Z+cvXHz/gw8AlKOJKkqm+ujk4hc+v/AYkb14+bItK/+IjW0AGBFRPx5tLVq/un9n/8HDo+PVvb351nS2PDgAsHnxwr37b1lTol6ZwpIxKtx1jXeOmTbGYwBdU+9s7bz5xhv37958dHD4q3/6T96+u3/4B7/nvbO2CBMhirbtxmMpy5FzHZeFNUZEAs/f2JgBuHf/XjWqXNcoqK07YwoI2qbdPbdz+94tAJvVbDGfeybDzNaIl64TES2MkoIk7hRjTDUuIKZu2lHFRCSq08l01aw61wJgMh4kKq5zUB2Px1Dtmk4hXnzXeADT2dTasms617XbO9vXXrz6/gfvjYsSTiEWgO98s2o2Nsaiun9wCNVwmL1uGk0sVURXdTMuRmq9qkxG0waoV81sPOt8c+fWPQCLoyV5otK0rbS+U6eH+/vPXXvelhUxPXXlAoDJ5qxumhdefXU226ibJYnUaJ/evkgwi0fHALzDovGbm5vLkxXV5Z6bX5rMjK+WZuEXy5ODAwBG2DsZT4uNrSkERwcnJECnbG01Kk1BABarejqZGEPqvVNfTsqma5gsh7QuPoSgOmtIBAhhaIEViqoqGQOgbVtmttYIYng3kyHTwZdKXRBLSpBOuYjhexrsBShxD1UhaN9JIKloDLFnDppHL7qimFSEYAjtQSXDnJQN6XVRiT71IeiUDCAgh4MgsOike/cnkVUBmKQKag4kT+ZPCl2P6lT2KYUwu/AUphRRACjYGO3vrknSK0DiQ3ggc7YFU2OT7Ize1IidChGsMBRz5sRjKqo+svF171w4Ce2D+yA5r5N1gBQw0/uaFUnBEPgotFM4StQEVQgsUaoEayy4q9WTHygdCecjApHruhTnqBoomzRdMSqqEmwBZhUV76HqKRethAhEYZhAXqAcZz/NrY/iP3yazFvRZGuE/CksCOuwVzkBETVs1hHbs3bWztpZ+5lpNr2J9h2AbHoAwzeAKufjntF2XzNQn2Ct9h9m91EGifrLs+evv8kgKE8yDDHskAb4jaGxEHi8RW/6sweAkJpH2RCDwZDoXT7VvyiPMhQxiCRLuSoREYfkFySkCPuEw61RIOfVSkAXor4Q5fET5Iaq5pLBA5IkpSIgXPFmkViUYiTzExIaBSIW8SHnS7CEEZOFqQ8oT6J0/KHqcE7Cv0NI7nHPZxp2H0S4NrQA7q7DFE9up+mxtpQUMZwxyu0eIc0Djw7UHqmkSLeMJv8xD+u/SBTsn3vq3QCj0fU/kYmsESEM3WUOFv5PJUFADpQSAdPKT2PMD17v3GlaZcUteZ3XMKV+QwVdLHQpY6wBIsTg4hBUm0HktMo1A1tIB46CCkhIOzHCqYM9e2rsg7frkQL5LUVdMvr509jpsUszSARAcwAXIjyb+hl91ggBcqfo1qv2Aw+6Dog6jDnVtKZ7WDmX/R6G461Hpw0eFTRLnF6R6enx1Hqsx5jHmYmZXSDrfBmPNTr1zelLlJ60kp7YBmt/uCAfv/9w10deoAN1PmNOjz3gdPDFH9PSvg7/9Me3iJDBwWGugBgCHg9NquowkYgObJZhF9dC7tfDLABFz4UHxMhhk8NYx8jKKcbqxt4G04QoIKAiMRRekdHh6LiqyvLu/TtN09jCdt7de3C3ax2JBAS8LIo7d2/tP9wzlru6U9Wua4N5l6eGLUOFDI8nUyjVzcpYo9BQkBqALSxUm7oVT+KTiy5EXqV44ehcDPwjzWiQsEhn6ygm8ApOFM/MzjsCGWvytiAm9QooE3uVIDq7rjPGBtyzX0DaU5iCk0GV4/l5Dmi1McYYoyIcYtAAa2wA2cuiEAlpPcKpcwYQwsGqqkrGm1RVBajzrixideOyqryLx71Dkd/O+8Ja7z0zjDUinpgKa3zQIkitMc45YziU1R5VIyK0XWdtYY0BMF/My6K0hV0sF0TkXLeqawAb4/FitQzQ59OXLj539WmFbk7On9veffXRw737N1arpQVz5wG00s42ZtevXCmAc+PR1u6FT29+ejhffOGFZ+89uPvJjY8APP3UpWZUrLp62Xlj+Dvf/Z16f7GxO9m7c3PDuCvXdwH8hb/xn7a/8Y/+xQ//RcHGufr+weH77763uzX5P//JP/7w5o2r50YAfv97P3nphZeYLYHrzoPZdVIvlps0ee25awBuvvvepzduvf7yS3ur1e/8P79VALONDagaptt377puF4CKfPM733nm2WfGo5G1drlakCeoGhOKIMf9C5CILNrlZDypqgqA974gWq6W41IBXHv68sHRo7IorNqHx4cEGlcjZjpZrabTCQBrihefe57K0VPPXLXG7j981HXd9mwT1nRdu1h5AM41tRPXtONRuXlu9+Hegz/65u8vRcZaBLQ6TDcR2q6lmlS1LEpiONdV5QgaK+eWZQlF27rgk/W+IybnHLOpbBUWlQTFj4xzztpCW2et9c4RsbUA4FW8c94xEwfAxzCJwhgTjoYiQmfqnbelDTBQVVZN0XR1S0RbO1uB4ZAh7dTa4vDoaDbbvPbM9ds3bxgyq3oFYDSZHR0er+qmaRcC2drYdM5NxuO6WW1MJjvbO2Fx1nXtxEvTGTbLemWK8vyF86vFyvvoFPLOeS9t0zjXCrRpm+OTo/t7dy499dR8Od+YTgA8+8w152U8GR8c7terbrlq2lpUqa7rxWIBAMHZbQviZrVccQVjiqZttre358v5arUCIOHwudjpdHZysrhz895TT18cTcpOpO3qB3v3AXStMzNbFVXX+E46YmY24kVErLVBQIl4gqGUND0rtgQ46QCYgpnYiQuZXLz35ahka5bzxhD5FMQKMaxkyAh8z5sG4kHDVIUsHVlv0JCWcGjdyCmVs9dpkkoWdCwMZP5QDA0jPSmKqV6ODuVm0uTxmHxP6l98Zv+DoVgK+thaFv5kDPVQ6drjelWEhooSkt41+NXANAmfrB8pSERhE2MXwjkSTehjivOMUlWTcpYlUor7VKyRP1GyDy0hyuPJ4ltxWjGhtdEhqws8NMsiFJ1plXS0tdtEq1mjoAy7PkRpPLkMdib8Yx8Pz8hnzHdttI/34KydtbN21n4WWoQjI9/tjfAsK9G/ydBUwOCiIE53GlauyOUX+viV9AVxEh/oMSwMWerAB6eqGq2vvmWZnaDAGOMYR0EAlIMjk5QpoC2hh0TEMmDpMUVHvI3XobAlAKwagg+jbyoJYPTHMSiPg9ZFVx7RELkNeMMgEmxo9g4+IeQTDUMNpccZ8hMHQE0UjWlgANgY7z0BbALZIeKhaowZAh3x8RTPswyQgTymtdGnbx5DVdcN+fhPhkZ6CGZdgur6a4poPd0iYDOoFZKPjAxIo4hH79OUrSlgifj93Wn48x7HDFTo1+fjMEqfDnJt4wQQJOaq60F+RVLY1lQHRdRWkvd6uOGSChff6QBnGKAhw36FZZkix06PM3+c8wdK0qijRtWvy+hHHgTEJT241z77BR6fzUTBqR4/Tnt+bQEPti/3sar97AMxRDhp6Cmtqq5Ns6YztWEPp3EPVOis+/Y7FVCRno8h8SYFosavYecmHhFuFJXJRKA1uKTfkXF2Uzr1fkzxJhGx0vUpyzMXLQDCOtPNmyZxhrSkM1VPbZ80fcN7pwcjhMv1jGKtAz+lpRDu4dJdW12amP/6UkxmzjqlTlkKeaQ0pNgf057gAQvxscgh1bFrSv3SipTsj231VOmPtqV7h2z4gseaKnLykESRfk+m8aVabVCkYOQQ7p3HFs6Vxy9T1oMekkzdM4ZPTg6ddwAKw0VVdV03KkpJ2xZEqtLUtTVWRcaTSedikZiwc4kJSt5757vJaGoMW2Yw2HBRFgAY1DRN1zkVUqEgOYmYIm+PvUdvtMbNoyprXhYKFp0SkTGEiLwTkQEoHlT0StHfplAlZlU475nY5zjTZIJq+pMivpODaAQAMcfom3i9GGOgEBdQQgbHzogIG6OinesCLNi1XVhGlOJVmdm3LuQC7bQL8CUzB5STmb2Ixnx/SOe1GwBVWUFcGAKBmE1Zlk3biDgV49ISsoVRFSIqy7Lt2sIWZVkslsvlcnHpqfMADg8ffeDqv/FX/srf/jv//dd/+U9OrEWzIvjjpp1RAUALVgETndvd3irLRV1Xk+mf+Nxn51336suvPX3uHID5fLWxs1kVOhqNWlX4Zufi9Obh8k/8/Jv/xX/919/84hcANHu3V8f7ncBJNyom3aO95fFeNR49tXth/MrLe3e/BOAXv/pFFLOHDx+ZYjQp7cvPXp932jo3Yj13bhvAheno5Rev/9a/lz/79a+eLJdf/vxnH9y5d+d42Xmnbbu/vw/gMy+/tLO11dSre3t74Yi6scZ3TiSFtgI24rzUNe2oqqqiZOblYtl6KYpyvjgAsGxaESXQo6Ojsiq9at22F8+f31b/6PgEgBe5fe/eL3zpy5efvtLU9dHRcYTCCWVRjqoSwIcf3v6rf+HrHfPv/cHv7X3wwfbmuSsvvfToxs2TrssJeZloPB457+OSMBTg0aBjhBJJUMznC+8kYNNeOmsKL8JktzZ3ABw/OrAxiIy6rgtZU51zxpCxJpyp9x7Oe+sERkRVRMJBlZAiOTMaIirLyhjDxF3XFbYwxtQiZVWOJhUQEgtYZ52I984vl8vxeLqzfe7R0d6IKwBdJ13r7t/fu379uc53znW2sHHjJ+fNqBp1bfvsteeJ9f0P3gfcM1euvfXOO48ODqpRGdMaWNM0tWo1Go+PF0fe+1defVVF9vb2Nmaz+3fvA3j26nMni8U3vvHvvvKVr+w9ePS97/6oKka3bzywhsURgNFkNGZ7crhUR7Vv2cO7mHB278HexuYWgIOj26xWPHtvlqtmNBl3zsM7YuPFszCApm6I2Fhbr1ooGTYAaQgSIwSHpPfOFibwwAEXVxAKG8PuRLwx7JwnZlsYD7e9dX65uF8UJXEHQLxQiHBPhxxO+9nT2oiyPdpFQS0R7mMeNaOJGfrrlQsM5eNQYw5vB/EMGhMMDQQkgeJ5rf52FI2CzL1jf5P9FVkrkiaYwNR0yxB5kY9A91JgzcFKSdXMHVdKGbrynR4T1HTqDWWLJYuWMGuPGRtR/hi2Of4xOWXjya2QNHhwML0PYs1ZgBgxuhDZXZ3kfFaNk7XTK5OZcEGqcZLv8Rkpt1I/5BQ4EY23cKomEY/zGQ6AiFL8QtarH9d/4lyGSUke1DCKNauEKA7mSbbTWTtrZ+2s/YfebHYuAUAWmrERUoKMeOovBqb356Bz01OCso+96vlsgomGuAIitDBg2b39GMT9en3YNYbd2/tRhmq+JSlizv0YuiEhM8maOEn/Bnk6gE5yyufBm/QrSgIv30WHREMUdDluKua8H0r1DGYOIKA42PD1gDSD3kasLYORIIrH67Koj7iHajx0zRGF0eEUSDyVkN2uAzQEUf1Z89RCsyXaUy/FcWHt4yEdKJ+VWR/HeuuJoOm+p64Y/pgS0vDYQhigvPnaSOhBROzQCU3gWIkPKfQrTRL1E/eYivA4opTRiUE30qFXVRnmU39ii/oiJa8v1ieD1kCd9WdnCCWOrg+cxdrmQ9bxNELn2bEMRTgoFMOX+9Wc1jByme883n7z9P1IiNlwkVDuXQbJcqxZn6HwSfmUaPCcXrvOqyVjghm6SNohRZJlMD9dplDVWC6Y4xGhNM2DWD6NWmt4q5lew/lKu2l4xiqzEVGldCqcYsBB4obUb8RTM6lIWTDTEwOwnEBhHcKkT8jIOCBWPwVyWj895d9Iv1zfPWkNyukf5t8O7pKDx4d3Gd46dyG9hP70pWee9MMntp4xnrbLQGRyxADiRiBOirvmzvWavwDr589Vh66YgbQZEFsSsZIbLJMi4XKgWLYro+ohTKfPgBLAKdXMW4Y4f/9UNnT9mev3jP3k/Q8n4/HLL7363ttvLw7nTgTAeDZ+5umrrnH3bt4ReIKKeArcJhWharomBHB1J05cF7mIqHiEojeLxWJVr9gUAwJrSMBAxKcx2VQ8ighelMGCWIuXwUrhsBubwngnxhgCee8zxwpJIW3IiBfKy4hn4piAH6GoSCy0SllXUADhwDsAeJEQF+mcK6xRwHVdekTMHamizMaLBMYbloRhE0vuAoilbDoRMca0XSeiVWmdc9aYkOnSdR0RFbbwIlAtrBVVL2ItOBvMTMQmFNgGwMSd61SlqqqucwHxmUzG4RB3WRbG8GLZTqcbolI3q53tTRUPoF4tSLvv/uA72jWiuqrr+/fv37pzd2Pr/KpzAGbTiWFLisWyNqLlaGNjY+OgPnn26uVm2XRKAErL1jsLs1wuf/Lhze3zl379z//auRde/9Ivf+Xj7/7u3//b/yOA5rDeLTaeeeHp4zv36vZkeunK3W/9gasfHh7vv3fj029/698DmDv5+td+aWf33MmqU+bPvvbq/qIeFcXYuG55DODo6PjhvfsPT1bfeevtP/vC1U8+/fTdDz4cbZ2zReXi8UM8ffny7mzzYHmiAJsebgaRYZsZpjFWvXddZ4w1xnTOrVZNxaZzPpyPVnDX+bIIq9oIaFqNOrO6efP2BzduAvi1r3/9nx6fvPvxR09fv/7w8CD4AtUYAowxoSL2wYMHL/3c5x/du/vo3r3W+c3dc/ceHjw8eGgnMxAVtgDgxY/Go+MAcXq/WC6NNW0oQJRkqjV2Pl+kfAWsGsMYV6vlbHMLwN69+2VZOuec99ZYEQln/4uiyNuaiFXEOa9KKuI9rFURCQXbMq4RqtZ0XVfY0nuxRouyGI1HRVkEbJQNK6QobNt1xljv/Hw+P3fhPBEdPjwCcOy6ra3Z0dGJKoi4HI3Go7G1hk9YfKjpC+e7uqkvXb4CuJ+88xYxv/zK623nfu/e7xdSGEuBGrPZrHPt/qN9W5rRqHqw98CWxWpZP//8c13jAHzzm39Yjkr1aBtXd83GznhcTg4PjlWxrGsAx4vFSpaTYnTl8iXnvPPu4PBga2f73O65C9vn56sTAHXX3L+759UvVifXnr26ubn10ScfLR46L914KtXYAJjMJiIOMMaQoTI4CZgY3DuQTGGS0gTmVMYEIEJTu7BPy6qsm3Y0nihkPp/PZlNVbp2n1oXknCLOMkHBTDFMNAFMmuPwhspIqEmY4+4yALguKCknAh/oNUNLAuuaUP58KGApq3maFOKBgt3L1GwUpC7kRyWaEBg0iCqJemZwk4dfJE0i6El9hwbGgPbfnwrBANb1lDU9JHUhHSyI4RWU67cMwVeFQr2XmEQyaYNKSr3gBeUO0yCkIWr3lDdy2LZDV6JiEC2R9d9InEEnoKE2UFYVk4qZupD0O9J4cOYUtDoIyqSh+jOcut7XmUg1MG1p8E8wwPIRouSDxFk7a2ftrP3stRh4OEDBNHLTgTUWjLQYsZeidAZh5snujjZVFicx30of8aGa2efABk7+utPCTaOVd5pt9+ZyyP6WxNpAogwdR9TfMN02fEy5xwAiZtQDC/GV4qHUdXQOhD6Tsc02aQ/JDC+NgMwAv+mVmaR+JLGnKkkY5mdGCsTydCkGKEXTYK3lWUskHnw/RASQIRZKSsOa6A19UU2m/hBGyEuFstCP3X489CkpN/nGyQ/Zgxq5b0maP9aGCtqpIKqkiw3xjIGE7iX6YFTDu62pS70GkMJzwqfhjH56YN9DWl8UAX0QWTvtkqJ4ehA8D3ddfzzdN4r7BWmAvVZyalRrFHqcfgNlM5FFfUo5FIaUFHbkp6zvprWRp17FI9x5w2YTPVHypyNMiQeEhRoCC/ogvPS0qCP2nEaBFEwWB5Wst3UHAfrsCgkMiu9DgFtP0rQMB1xLM2KIVKlGGRwC3HrOlSiWyuho7oLGMLFeMQ60Svs4xSeorivMg0NJ6aOgkecQu6S2A4NFe2rNrL1G8gyuSUGMj09N5qp5h+ngwzW69Pc+/dtT7actAP1j/tRTPT516XC0QzYa9nLP3YDE3yOnpgHUGMvZ62NPzs/H4ERcH+uv2cwcyiIaXpUWTt4V0P5e/XRH15MixQpGT1RPbdLCmqIoZpMNEAzz3fu3Q8nmgG6IyP6j/c5148kIXmF4OV9658uqrEZVAGKc68pRFZbPYrkI+TG9SlmVJsRqOcfMxjBAQoBKtm8fX1gUB4QoFZB7DQmgH1RVw+nXIJdDxpLA/UIG3FDfmtmIeMTcjj5UFYhQpwpIORXOFhU2rCkVRjjIJyrEFEBJCkdnRQEYawA4cam3CIWtFVqUhQY8sbBhcF3XhXrf4r21BTN5p7YowrClE1sUxKSdGmtSDCaHYNWyKgNF2rYLsnlUhZC0llPkZgivK4piuVyGUM2maaw1TduId7PZbGNjenx8AKCu6z/3p3/ly19884O3v/crv/ZrE9/923/zzy2TJZoUBQDjXK2ymM9DqYdls7KFnW3OPvfaq//+t/7vg7EBMKsKeBlV042JLa3+9f/8b/7CX/1TJw8P/uF/97f+7v/wz37pV78K4NkXX5DKv/LcK++3q8KOfvSt35sf3Hr1s595dP/h+en0V3/9LwJYnCwWJ/OHDw+L0YYUxrdd2zmorrqTFy+eAzBv3Ww6+voX3/z2uzfe+vE7F87tXrxw4bB2UFhbXLp4EcD29o44vz3bHFejoqy6rkbUuJhNAt1AznnLZjzZWNW1Qpm4qVsuS++lGk8AjKcb7XIe8iF33ntRYj4+OTm3u9N1HsB7H338S1/+hU/u3LPMB0dHBDbGFMaA0LoulLK5cvXq4mD/d7/xO88//+yq6T587913fvzWbHtzseic90VRAuhcVxaFMYaZvIeIjMfj1WrlXBcqIAFom7at22pcELN4ZTYqYDbzxcnly5cAbO9sr1Z1NRqtVquiqpxzqmKsiXmfGQAMGYG61qvVsPw0ZDFWBRBwbVsUhlm8tm1r2VhrFFqWJXPI7ZBYCxMEqiiKULS9OTw6mm5sHB0cIWTIFbFceOfYsqqumnqrmHnvD4+OYggw0dXLT6u6B/v3d3fP1av64GC/burZ5uzo6HjTTAHEEvGIvRVoNapeffUzb7/3zq27d7a2twE0bWetLcpyb+/B7sXzL770AqkVf2Mxn8MSgGvPPXv92rUPPnhbVT/zmdfOn79QS12Oy3q5YMHezT0AzrnJZLq1vb2zc845OTw+np/M2fDO7s7GbLq9swPg2WdeuHX7xqP9/cJWquqdF1FmJrAPuGSIunXrJ3eiHEdRmvAgJw4Q753Al1WhzMt2YUrquq4aWQBGiZWdcypCxmZ+35snWIeiUtWYoCn2McADv/6aQ7e/n2bRoLmzp6+KH+Zb9VeeFsi9CTIIoBg4TNNV68rn2i2y0r6mQ+fHrkGOuiastbf1hjZD1iXXvePZMAOykkbQiEim8eYvRNK1Q+9dUuOGVlZv1lG8ZTRwkt0yGE3CB7P0Xj90/ngyoYENlEM0hxcoUjLO9Ll4mFxBMWRgDzGVPpzUGarzml+Hi3cw1YkCSHYZBY94IGU+ZXLWztpZO2s/Y80+ycLspVfkvSl4P8ZWgYeCdeD5AQ1hyqENnMVAdszROqdPAWCC9TPgAzRs0KX+7+RPTmBjFhoakAcNVcqYYt5E30mGmTIemAfeS4Leoo+9HViiwX0WwYsA2vXUSLhI7ynsexyRU1WfYbnhuFJIXjL6sOa0i7rBgDrZL9qL+UDVmHyPiDgc1jbGhN8EM9UYqwlx0wS5rkvAYVxmGlGYvn5cEY5MCtep1HI0PIQ8uE2vwj0GXj72Qd+dXpWhIXQS6EL9RZlSa88d3mjQlVP0Tz8e3qcX/+lRhGEkXepSfzfNEJSCwESia4vgsabDZZsUkgzbDjDS/vXJNxru3Cd8l/ubosgyXsYU8uxTv6LCjRLknq6MoBlSZ9cUubCLCRrKEMb3A9rlCeYUuxm19wBDhI+S33ptUfXj1wDJBpTc5NjlQLocQwCRtGSjNksgkJyC1IbLSqGcFdo873GxctB4UzBlWiJEIQunxvTvcfOvg+qUFPf0Gv57wlqPWQTX12lvfayxJkpsFE+Y8YFlgshI0k75qSkae6xufQ1R/zroytBwQeSGYSp0oFKD8mrR3BdaH9vjr3+MQr12YQb4BlEnBMSEwoAmPT3h3dnw6MkQ93NmeZkxZf3/8dbT/InFWmNQbsQZoenGqbRWTpSZXSbhkvWJVBXnuu/96Lv79+4z88HRw2U7X62WZVl4BwBNXd9/cPfOnbtd1zFIxJfV6OBoXpaFpvkuR+VoVJ20TYj7C8idd1oWVdt1gYRVVXknIDBiWq3sb0zlQNMBdVUoJKzeEFwc6gEA/bcqrumstUSA9EkqARRFIaJd54rCACqp8owxJqUojlslcCQCpXygHBK6hZs4771zVVV5EedcjG5Tx8zehcimWBCMmEIQGYGYuetaY01I6dh1zhrLzG3bEFFZFN67oiiYOVDGFgWzcZ0jpsIWq3rFRNbatm0z1Nh2HZMRSBCDbduqaFVVodz2eDQGsFyt2radTqedc03bbm5uEuHo6NAYPlkuAvx06fLlj2/dHk3G+4cne3fuUttMp5tlWdVtGwpiXZpseYE1PJ2MysI0rU7GlWd/7/6D+fHR7uWnARRlWXcy3ty8vXe7KtsfvPPjf/tf/fbH77778NH8xZdfeeH66wCK1j9aLsflZFRslKW5+vLn7u7tn9s8/8Jrr7zz4/d+9xvfAHDp8pVrV6/NxlM7Hhvo4dwZYmvsqnYf394D8OLPfc7sP/rtb//4q2++8bWv/MKnNx4cnBxTMfHOiXcUKraXIyW3d7C/WK3EeSKy1nZoXddJYtJN20ynm1CIiHc+RrqxgaJt3Xy5Qqz8ICSoitKMyul04/DwSJk2SqG35QAAIABJREFUNiavTJ8HcOPeg9F4/HOf/YwT2tqYtZ0Le80wMXPQdq4+c2V5dOxVNne2/90//Zevf+mrb37xy59++P4Cnes6n0IXvUhVlaoa1hhAzjsr1rAJ0yQiZVVYa4lYvBpjvIo1xnUuAGGXL1995923N6bTwhbOeWsMM1el7ZwLbApAUEWdE2KE0vORr4pn5q5zAKwtAA0btnNdVVUKNdZUxahr21xLMEz6qm5ExXux1i5WywvnZs88+wyAmx/dcl7mJ0uF2sKqqnPdnft3l8slUwwTPjo5+eijj1977XN/4gtv/PgnvzEejV979bMM89577xumtmnD9ndOlbiqRmRAQoUtZ1vTq1cvK/jSxUsAHh0+Ojo6vHJ17DrXNdqJv3PrVrtst3a3JrszAHuP9l597aVf/7N/6Tf+5793fHJcjUeNb7jlo4PD2WQs4gBsb21eemqqqr/4lV8syuL2nZuL+VFXt1VVON8G6t24dePh3t7ieLG9VbBlZhIv4j1zllPwqbbeQETGxD5kFACxtG1dlpWo2KJcNcvlcv76515aLs+99YMPw/EpJsNqFLkgXRTeSgO8KnmnNIuPlAL3cUmZ0bmQLiZL/+BJj87LpH7lr5M8AmLKQQV6PTs/N/UQUZmPCnGWYdkDS4O/MsEGCvkpAbsGn9Lau2QPZTXytNpASTVMVstpnTwryf0DCfFkyaBpCHFVImLLGYvMAQ2aLbOhApO92VH3SXpd+l38fv1JYRycNYtTFtHQpgMpBALh/iBgrzVE33MMyYTmZRLdkFADQLyS6VdCsgCTAqmaVMdM0ayQ5vo7cbxBpTf2CdUGz9pZO2tn7Wei2Yzk5cCfJJP6yKIebUlMHenE2RAsoGSaBhQgpl2M30aG3efqSIa19mw6XZrFRDitFSq3DUVjj4iFc1N9OMsg4MgjwxkZ1RmkTYuSiqI1S0Q6rHIzbKREBgjVxbPSkMetw8i3HMrUE2ZICO37fxpVClBEKvOdBNvauJHCqzKiSjH9SX97TaMORd5sqPybYCHDllI2ruTYHM5hmJUMNydjPoEx2vcBhGhz910a3iyjEP0AM3FOYZ2PDfN006xbqUhG0QZICadFnHWQwWLqe4PTiQxjyrN4xXA161rw30Av7LucsJj8hiJJYlxPKATBxPLkoM94U045ECOc02+KjDxrVk3SUevTjfqJiH8FIqXhUyoFGDdKAhjjbyIOoUmLC/O+dlC9V36gCKeZ1yIQoOm80oB/nO5q2IgxmpHiFbEwcbwgvSHEk6BxIWpPk3Qh5bPtERUfYjomUaufyEG/IPEEPTIXorjyNaOwuraKVHMIc+xA/DyriWk5BTe1ZONiyHDS/EAHK3iNjgPtnQBNSTpjaiZK4eA5iO3JqwHJdMr3VwAMkvT44bX9P/3weq4VAlIHFl9mFP2cDhZKP4jBtCa2PFhSQ0B2wB7XkdonYYJ5M/Y3J01AeQpM1PXFCxKN9aah0bgTGqzRfMoOCmVOfA79So2MK0Spa/6uzwxGQDiCrsQEhqoELDsUDR2wizxqIYSyu73Iy2NqW3dwcLiYr9qmabtWyE83xq52XesBuFU3n5+4rnNdZ42pV61ILNqeM+IZa4wxItI2rZmMXedAJKp13XrXAfDOJ5s2jEejPwvJP5BnZY1zq6FC+rRbkHAoO1iGZKAkopGj5oQgKhqr0kNVmZmJVMGG3YDhBDbCxKrixTMbDdFxMUtXYJkcdhozQ9WL9+LzTIaISGYGwTsfFoaIV0WueyMiRBzkYMgOKSqFLYL3DkBRFN55733BhXPOsDWGvRcRLYoirGfxAoWKFGWppCANeFbTdpPRKDyobuqyrJjZ1a4sKygWy0VVVWVRNm2tISyUbVGN9g+Pv/zGm4Xq3uHB5z7zmU9u3X/6yuVQAGXRdFVVTMej/ZOD47a+sH3+0vlzD/c7S/jKm1869h7A/qOj8uJ4uTgez8Yvv3D1x+//2Kzaoth8/TOvN45+8sGHAGbFtpzU1blzpizGk8nDux/v7d9++bOvfPc73zt8dPjyF74E4Bu/9X+e2z2/s7Vz4amrVVneuHVntrVTMBXMFy5dBvDo4Ni13VfeeOOPvvdDW40VBqCm7arCMPF81QB4eHxyfnPbhRBCwDsvhVWFlx6MCV6rkNTC+xg1G3yopojlbpZ1LQIDWGO3t7c/JlaR1ar2DKMWQFFVu7u7RVk5h+P5XESmk8lkNDlezAtrQmaOn7zz3q/8J39pPB7funHj2vXrl69c+/jd98A8Ho/btk0F6BQqItJ1bjIeGzaqIOKwisJuYqayLEVEEZxdygRRNcas6hbAztbOdDp1zhtr6+XKcImUJkeznqFMIMOGAVXxAuc9EYmqYVP7NnI4gYez1rRtW1WFMdYWhomJ1HUCoKqqpm42NipmCmWjRP10Y3pwdLA5m4ZN6ztX1/Xu9q6T9vD4WFWbprW2mE6nAT8dlRUz7z+8/+hgfz6f/7/svVmvbclxJhYROay1x3POnW9VkcUqjiYpUhIlUZREtVpDQ7JhwH72gwE/2C/tH2Cg/4RfGm4BhmFbRtuyYUPtlqjBlltSi2pJnERSrGINZI13vmfce68hMyP8kMPKfW6p3Q9+EIGTdVFnWjtXzvHFF5ERRulbd+78r//b//TgwXs3rt/2LgAABzBGM8gwjLY1PoT33n5rdbQYgttcbF9743UA+LVf+dWXX3r59Tde33Xd7uTJwY2D5Xp578n7L33sQ2oxA4BP/9jnP/nRj77+/Te8k6/+5V84GX/2S7/w/PMvIDSNnWmKIT7f7N3u8PDaK69+9979e0+fnhirtSJjrTZqtVwBwKuvvgoM88VMaY0oGCPGcgpDmSSbCAgDYIxUFLFsvOASXDwfrLLoRq+s7vrdMHbdbvflX/qlsXevvfJ25I4JwCAQoVLGcUAsdv4qbA3mm85FEBabE1RaRlUQYbqWkpFl+Snh8ctw4FKRqgKJdhfIykKppzyI+Wi/3JCKw9sLh1VrN3HBZvkI9dcKAwHR9IYsPqIMkby58p/3wGuGUJMghSR3MsqSlEI6GbZlirU49ai6E11J1inOSAxKla3mUnKQRinJUIC9CAoQMmSLc4VDsCgvXELbY8abE4ormkylUhQEnx+tLKOQlOy91k8aR33FXKpRzHTnRFGmQMlVa67KVbkqV+VHqtD/9yNX5apclatyVa7KVbkqV+WqXJWrclWuylW5KlflqlyVq/L/R6FiyImeU/keYvSiknwft/hJZKNY7aOVrG/RY6Rc90quKvtFimN5ZX6D8uFsE7rk14LZzhQ9q6qSf5VNSPk3gAKMKEQxYSaCoLBwtHNOfknRYR8xZVyRylS590+Yo4cLMwsLcPwNs3DxFEr+UVC1sbwe0z+Kt9Cy582zPoKEhCXBQrm5RkTp4pzkv1T/p+zymd3XUuMgZtCOrq8UGxMDAeWxSpVUs5AmexrVyX00V16vkGkFwDO/TCPxb7Pz7lUwteGZKZgeFpH6/m/qdu5Ntlvn0ZW94fq7Cj5rj54MlJdaXBxbYVqzxZ9vmvYqS+b+en32Jfver1XHn337v0ORyYHpA2oof6TUxGQ1nvwKi2G32mupH7XVvaxD/KD6q+7mbVos5oDRHbIysuO0a/eqi3H38gKEfO5UTS3LI0dbLdXE/QY0eSBKns/8mX3/OQHCtFERaTpYypvKcMpUAFKQuzTpmPY6IErKyVHtCJlccaE2qeM0lOUt2d0ytoYoe6MiwDSo2WRfH7T7U//M+Zv/Wn4zJceZGrO/FYrRP+7B6uPRkxChch7A6QyJXwVRymT/HSs6y4K90+ODfDoAatGRJzM2OQ4RCzNzCEHyMk7LEKaBqhajpHvmsQosS6v8sjiI5EVclmsePyjR84tTTEz5kkJIxebhXtSBSQxybls6DOrloIhWqxUirA/Ws7YNHKy1y+UyhBBCQMRu6A8PD904jsM4W8zGYbTWgghLYGEWHvreB6e1btvGGKO0RiRjrHOeSBEprTWHIFJJutwwTkHjUlXFvyOOhogEZtkPnRGFlNE2O3MT55y3CBCvOWutY6JqJMUiREhK5UpkEooIIsAcADGEoFQ8scg5h4jWWh9CCMFoDYhaaWubaclVYzk6FzggonceEQWkH4Z+GADABz8MQ3R/G8cxTjYza6211iXfd5Q4ihAAQmBjDBL1fd/3PSmtlWYBQHF+REJjjXMOBWez+Xa32+52wLBerZzzIfBivthsNghijTk+Pem6bjFfLOaL3jnbzD72sU8+3HTXbt25duvWo7OLL3/hJ+dGezd6N16bzebz9nC1aNvm7q0by1mDwtcO1+v5bFwcOjc4N9w6XM6tOj85WbTXTHMHPWwaRt4CsdH+u3/959/96z8PNpxtj3c+HCwPn56ds5m9+NxLs2b94Rc//NnPfPrlWzdevnWj2/Irb/zgD/7sq2++/bZ34/sPHo3juDm/ePL4+MKFCxc++tGXn3v5pflq/fjh4+c/8uKPf+7zn/z4JwSwmc1jqmIkGkZ/vtkul2sA4eS+mvJIl10ZOCilAWAY+rhymDlG3kTA9cF6fbCO15bjmvQhINFsNjvbbB4fn7hxdON4/+FDY5u/+PrX3r3/3r3794dxHEZ3trlQRCLQtk3bNj/z81/aPXhwcX6+uHHj8dOTN199VZPyIEqp3W7nQ4j/AMF757xDouj9b4xRSiGCUhT/AQiCEgGlFJAojQDStrNxGMdhHN1448at7XYbfIhZlWLwwbj1OZbAAKC1JiJm9t5zCHEDkVKcE2AISAjBGM3CpFTTWKWUDy5wGmER0Frvuk5rrYiM1oTYWMvMXdd1XXftxlEQdjwM40BKISlEunH9JosMffTBBGZZzpcIIMHfvXV76Ps//sN/ud1u7t5+nhiC98F7ESZSiNgPg/MxoAG07WI+Wy2XB7ttv9v2Dx8/fvf99883m9EFBFzMF57Z2maz2S5mq8Vs9errr/32//7bj48fzhfzo/WN2zfuLhYLrY3WRkTGbhi7YT5fWJrPZo1WdHG2uTjduH7cbrrdriNlh2EYhgEBrNU3bt4AYOfH4H0IQXLKQeEpsZyIJOf0dJiLAGuttFbBB+ednSmm8WL35B//4//yZ37+89/81r85O3t69vQs1qKUShIB85WpDIv3xBFCwvRF74nLO+f8qiHEdMzDJNsy1BOYdgZcfigLhgo1JfghFajIR3f1myy/ci2XJCpmGT6VD4DKz34oy7so0bOwn/pTffYSCsUCEDLKibJHijZAJJj/AUly2EdBqoFfwZiESpFSQJVWB3tdyq0u7c0+h2VmKixaXxMUqaFOUsUqfAwi8cpcAcMZHuQxisuCMIaIhQKNAUD4MniuoQlAuZxRejyhOboElREA882PvxM4XZWrclWuyt/rogUkJjFJ7vuFZxSqLouWwzEpW4SoMMvD9DxLVqirkHN0Sb8V8QAY7zdn6cIIEDif5pE6FGaQRM8RhpTcLh+4LICoSMU6hCelOUtcUaCyLEkh/CTL3okIygxHuuogFDUrpKIeFrmOU8S4eLWVEAAIKGSVklSOKMkpCGS8qlAukBaKRyTfPq46FSVhTLVZyKiSTAMABEJObRovAseomCjIe1o8okIVf4jjhhUREC+HQUJqQkpFUhURY+bT+KLqcjfFYDoySeFpZEIKioQlP96ktEZIRliSmUa8CCVqdR78JNmlUH0xdWHGC3nlxTkhpViYiFgYJd3gY4nZFUkg5bHBDLVSSkJNmLXccmkDAIIPSmnSCFFbiFOpCDViYGGGkC6hT9ksOK90LJd2Ul+i+grx6iKWpC4ChDG9QyHyJEKbnKFVUnoWuQwysu9yTgefdHvOWV/ydOR19AyUlXh5lEVEtFIpWIGAEAACqMyfcGK9M5EZY9ZLjnGXbqdC4Z1Q2CUkmSB4vkqJgIBlxicGMLYwxfDO4x9xVFrpiHG9SYxPx5zhZNrIWLlyCwAziDASZnoHpvNBQNhHcjC+nlkQkEhjDkjHObpk2o+SUm4HDvHEgLw4RcRoopgTgwVjKKPYGJm4j3IYIiAqFOaJWKzIKyJUpELgxNvGESYiRJbAIaT1BpSajSDIeX4z4QU5n08+SWDSlaaL3ohpDZVzRkRSIGAESJs8XmcHxBxycUqYVGknKBEGV6+fllmqLp78WfOCHMKL82VbxEKzEkCVWju+dvo+B56/BKwRlFA87IhI6oRrmEcA8hWovBIAACQwkoC44KNNKDAjEvA0oJyOIIhkNvtEJSeyMC8vEfAVpQJFH0kLArRSki5nCyhikCCCarr6HDjtCiRQpJgFCTmHt+d4YqOASBhdt+mcD6YxJ+fnq/n89OT05q3bz7/8HACwhLOzs8bMQCm7aNer1W772AXvB99Sk08MOj/ZRIE5uLFtWyXiQ+DAjhEASClxPl40TmYDmXSydDcOBQEEgZCA0kHPIoQK8kamuHgEYkLhlHVNRGKS8SQ7CIQTOYMUAkdd1o0ujV0Qawwieu/jltTaJGmdDzalFSnywUfLQea4BGJ8ZEQACCGwiAIcxqExloUlsDWGRUYXbNPGqRyGoW0aReCc1wiWtPgAgWO/2DuU0DaGWXRjnXcIYK3WWo/DGNwIAK21O9ctlgvnXPCyWMydDwyyWM26fsviAWC2aEc3jm5oGsPBIQqR6bpBAl+7fsTgAODk8ZMnN57ce//tbnf68OHD99959/py9Sff/ubnXv64cQEAepThfPP0YjO3LQBpS9t+o5RSAMqPi6YBgLPtbrHGreeZxqenJ8/deRG0uQ/vnm7OPvHSR37xV/8RAHzik59FhycnZ6adBWoOj2586y/+7HzXredf/sFb737l934PAA4OF9dv3H7Bzp2Xp9thuTo8226Pjm5sgv7m178GAC/euv72O49/7ytf2W4fHd9/vH20/ZN/9ac3P/Ti6EYfmFEBwPH52emTR7ev35qZFgna1gbv4yqQgIMPADC3cz8OpHC9vtaaBpRiZhRQinZdv2ifB4Br6/Xjx/cWhgBIPBjVbnZb27ZESs9aAFjM2++99tpivkTE2zdvzmyrrbWz2a7fKQlKGACePnzYfuFL62s3v/61v753/97nvvgLiOrJq69S06wP1lprAPDeD/3Y2HYcNyjSGBUCLNrldrddr5eS4q7QZruF4AmAQyAiQFKKWGBwAwCcbU+PblyfP37ghkFpFfNlW2O7vo9sOAAozfHONULKmTM6PzMmWo6NIQDwwWttlIIQGBGdc6vlmjkQKe8DSEzjjkIURudDQMKAHIhZWBvTjx4AyKr5er47vui6AVCQQwhhuVg9efp4GPubRwcAgH58/dU3jn79Vz77qc9/5zvfPnn0oGnnn/zUZ//w//zddr5K2a40CYJz/nB9sN1cnByf8BisbTa73mp9OF8BwMX50zdee+v2jTud79um3V1ctG0DBN77+/fvAcDa2m9/58HLL33045/52Hf/6jvGq6HfKeLd9kSrA90qAOj6DZFyg+92o3cBGJTWQUKM1Xm+PQcA17nVzcV8vTo7O+UA7dLSOA5bHxQgaoxJsSECT8hGIIm0EWASKsooAmaA7fnwYz/+2Z/51S+A8X/w//zB8enpteeXficAgEwhBFAOQRGpSG0mKZRPfkSMMU+LvCzGr9reRwUb8JSNBTJkUoiTxsWChCEEJBQAYSjQVCERUlqHKYqkxCBSFf0KSZQDIlIMTxFxXWwKx7BOFLUgQcFQQmxl0c4pvwpU6Du2Lj0TQypEiR/P/CxCK4GemE0koloSywSBgUmAYjgXla89R8BZARhAkFSrRJwSUQTK3m1sARDGTMml+J55voiUcIR7CILAkdyEIJPfi0z3nAUoRQctYwIAxZRChCDMARCI0r1vilI7RcXNMXUiIcnAmFEEACBQChdLwCwRS4ok8V90FCICCfn1UW+axlkYEEA4muuSGoAApGh/zq7KVbkqV+VHplSpbLJ8qdRCkIn4AJjMT8IMUaqlD5ZHABCRJZRjPNaS2MKsrUMVZgUyOzOZqzINipkswBQbLNeYBI+wcGLoUj2S6Q/OflGJ48kmwyndSvmmvLIIsEqdzT9i9n6s+ScokfDSQ0WNj3RTMQFGojeb25Kcz7IrSprC5mR5nV9UtRUrTkHyfFSBx6bW5vlkjqOdBVaZK2JgzAo9Jiw1Tful7+JfCKuUFQLR/2vCF1i3uEjxaUVN6m7BOjytgTL7AnvStEzENCbZ9JnQyv6EQOmLpK5DWcaS25QLkWJhjHkPkAghcMLBmQFK41xtkn3Sb8JfGQ1M+BCmD0k9kNNHoHBpcaByoEFJ9E4CRNXKjPM14cUypHt15yURIZ3E/0qDJqZXoPrKHCMuMWRycKJBc5ezVRiBpgCmACm1BXJO+JI+VTFh5VuM+LGO2xcz3lQQXnBKmVOGuO7fxILVJBeWDiU35Ljm4lGDhICXXzRNUOp49sdMA09E0ecCCiqUqVGImRrOeyKT3VBz1XG8yymXP0tQYjvmY0srFdUmKHgap9qhLIwyBlSPi5SI6bEwV8Fw82Bm9j+nP46dr9erpO7n7KT14fLBYFcge4LmJy+1pHquasve36bns7YXf7uXLkYgxslNK6j2fbh0dsteU6N0wezLQnt+FOXchCwouJZC1fDl4Kp5Ye01vXhV5GGS6Bd6acDSY6kCJCKGwMIxcHHa0wSE6Ny4PTluZ40A77qutVYbdXhweHh0CACr+eyH777z5htvzRazxWK2WCwOro3bi52xmghjPhYAghivGaFtWtvYcRijZhZ1PPYhU7g4bYYiT/aFZd27KYn5/ghd6uq0vABAUCkVLUCIEEJQSkUX5pgRG1JmYVFKRfdPo7ULXhsdOMT3xeQVUWOTwMIhBseMzGbyZwQAAM9BAEgRe4nuTuy9Vkl9Dxyiq5R3TkTapnXeAYCxJu7yGLYy8kFRADjvwBhL5IO31gLAxcXFbDHz3jOz0caNYwjBWisgu65LmbW13e22bds2xp6cnTbWEikiNLM2sJxvLgCgmTUf+fCLt27cfO/w8MMvPHfnxg3vhpmm4D2nbD/Ncq6G1XKz2w7juF7Oj1aL45Pz4MN217mxg6iLIihNi/kMhM63w/VryzvXb9597kO/9A9+/p/9d78NAG++8dp7999+8OD+Zz79uYvje1/9s3dI610//NZv/Y9Wa2tbADCNevDk5GDJupkbZeZtqxQG56/P5++dnQLAf/2bv7XbjY+ePJ5b/fWvf+PNtx8orQPzXBuaQZzIrtvNtSFjeud8CEop286AOued8342mwHAttsdtAcsslytN7stIi7m85OL49vLF0Ch0RoAWMQYEzg4N7Zuttnttl3XD4OxzfXDo7jIdsMwDOP1oyNm3na7RiQgdn1vCSwhAIxubK168OjhrZs3P/GxsNlcnJye992wms0RUng7RUopRUTCHEIgTczsvB/H0YcQ5T8Lt02zG3fFdKeIAvPFxXlMdn9wdIiIN2/efu+dt9vWdLuOUA39qLWe7MFIAgmnSkyb4/zQD+18RkTJpzJ4IkIErdViPtdaM3sRISJjTTRO+cAxwY7SKlYVjzyjTc7oJu1stlocaa1jaE4WOT09ISSdc4WfbzZHh0cPHz565ZX/edftXn7pI/NZ850//3ZMb6WNAgClTd8PiHh6dhqcU0oprXfdNgTXaBM5tdlscXB4EDwP/dA2LQA2TeOd11rNF3MAOHv65MIfr1erTmnvfdMYAdFac2Bm1soCQNvOt5teKf3g0f2+36FiY8jYFhGD99poAIjXm7bb86axne+Dd+vV+v7Zw2vXDrxjt+sBIASvtEEC4MoJjgAgWYWV0s47EtIWnRs356cvfuxDv9H++ydPjsOOXvnuKwAQfZyNbjgCBoRy4iWtIp/5NcSFJPr37MqTJMR4FqZaiqW2xpTFKp/fVE7fpEpMElliJcXkliovagJgssHvSfYc6LmYLhNAnF6ZgXZWDGpZADDJ4wkLAWR+dE8GSP3xoipUILRgqRopT5D/crq/DMPL+yoonhwpytZKz8d+SMKisfrJuSKPR9UZkUtyH2q4CFN1e+IuvwjKsEkGWqmre6pnbjgCUWInMR8IqQauGgTT+6YeFXUzLgPIgP4D0NlVuSpX5ar8CBQdD0GCogFmbyHY8wqB4ugoOTp3cYFMJyUWvUYkZJoh/48BJngw6TJZgc9eOrnIJPOiwESEKQ1OPMtFYFKqKQmVnNyOAVUREDLhB6hEGFb4IkmH0mqc3j5JhdpaVuqumB3Jylc2Uj2jztW/h0loTTq05Gun+wUhMj6CxYwI2QoKUl0ngb1viVQ1DXkeYl8IK/Sw/z6pqAKBlEoQIUzpirOYxFQq4qOeeYgkbH5yGtrELmYNvmKigTLPXXskxf9ViWqym2uW1gkkVLir9hychnziL+KXKvcRBwFQSALIIWBpZG5Y/HjkI+DZgsVDsMCCPAYTGbn39n2eJY3S1IPLMGkPyu0NdsE9ZdA+gBrI8C7vU8lTU+j+skAgw6XsGBujiE8x3CFxdvW7BAHzzWopj2TYnfcYCHDGqdPtdgyVASOfI+mRS5fzpwEDQCi8UvLjgmn3FEWkIooSy5ceyms5L0UEBCSgwnOJSDSbh+ilyBMXXBoRKfYIDXHC8JElzassY+/JMzcaVASDMACwCCWYnHB9NuGISPKoiJ5p9VLOHbicr2ZaGpM6Nk1cnI7od1lmJf6loOc8vEWxydU/czCVSuNjlVNCjIKRvu6NWp6+uiBMyky8wlx1I35AoDo482Yq27/UWX27/4Z8dZ8QSCYlsRqzchLE6Uhq5TTiiIiU/K3LCRx/mDZ5rSGKJNfCS+mnojBFAACOLu4Y684yDkQEmrbRCAgyX8zaxYw5zGezp8dPbKsA4PBg8fydOz/84dvMnlm6vg8+jG5Uhoydjd7HDpGifhiN1dYarVTPzByU0vHEFolqXJRzAAAgAElEQVT3yuuzOHc1j2M5s8rE5kmoDrkyiZfPn7j2y7pKlHC8lhu/QcTIwjBzJGIIMV6ajiKaCEMo5yEys9IaBQMzs5BGAQBCAYgkrNGaSEWKEBGZWSklAIGD0jqSON77xhoBGMbBaINEzjtrLCF5SUMHiMxMiG4cQUQrhQB932M+pbXRCNiPvVaaCL0PSiljzND1bdOm7NvjwCwcQhc6a21Ms+P8CCDDOMS8vfPlwvN4//Gjx++8/fT0fTrrnj45Ftd/6IXbCwAAeHTeXm/tZtcFZje6i4vNcj67cXS47fvFfPFkdwIAqMiHURHGK7a3Dq9vgg7BfOzj/x7DvG2WALCYLZQx168detPeOFLfe/3N9cHhr/zSL//u7/0fn/n4x2R2AwD++Pd/5z/65Z+dt3PQZr1YHdvmfHfBAUjTF3/uFwDg6f/1xzdutj/+kz/+x//qD15/772f+/lf/Nijj/3Vd18Z+m41X3ZdBwA/fOutl174ENrGR79fUqR0O58D4Wa3i/7FShlljAiHEMbg+9ExotGtD75pmq7vAWAcx9VisT0/C8zOBSIK3jvnWOCbf/u3APD09OzFF16YzWb9MHZDT0DaNjqyz8GfnJ0DQNuaw6ODm0eH56Tff/8b61vPfeYTn/r6xTe10mMYh3GMa6as6H4YQGUhBMKBrTUA4D1rbZo2xjpAQIrrCgR8ZP8FvQ+HB0cP1PveBxBw3oNAqy0WK10GnSJARKi0T2l8ABESHTmMrFgp9MEDgjYqneQCxmg3OABQioImxcQxIY6ANQ2HoBQlQS6sFc5mbQjBeRcTFnf9DkQaa+KLiOjw8NCHcPfWXaP0N775jcNXvv3cnbvj6W6zHTP+EWOMoFxsNsvFXHm1OlgphaTIeeedBwBSymj7zg/e/cKXfloTDb6bzZYXm/OuX91ZfhgAjh89NFrFXPYCPgTfNI2x1lgjOZvIxcW5C/7sDAN7AbGt3e06xbJoDBGRUgBgG+u9s9KISLfrDq4fxjAOSDiOQ1QCrG0gu84VWCkQLZQKAAJ7IlRGN0qePn38zmtv/tQvflHZ9t7b9999920dMbOI1taNIwihlqxz1LIyi9JLgnE/oePUjHzRKj1fSyjaByzFGyMLFcSobhXMVqAFZRo6viWHbRKEcsWrPqSz2MSSMyceqrV1LqP0bBivOyMwSYiq6uyYOfVqGqyMK8vzk2CnNCYZ2hfxMb0vZiOCus0gAJNnZB7ebOXNwnnC/1AylGaRll+CeSz2KprwT931ogkmCFZJdMTyV4SswEHJFQQAWF3rSUgRERGodGiaKZigTtYmJkhTcH7EenUbqmn+APBzVa7KVbkqf9+Lrni0SVBECXBJgZx0xCQmc3LdSq8XzBeWy5NQCSmRfIM1/l6KCl17nF36UYQJFdTeXYCIwIUKBE715KRs8WVQLFKX+YxLum6SPzFuUXXox7cDAjDkeIu4JyQEBGOWz/LbpH2hcKioAJnuwlUJiy/TRgj7fyqSuAihvQ8ITFxP/k0ttZLlP2mbaSgSVpD0NsGkV0fnxwSzquHJ5EAJkZbxQboFPGVSL40uFIZUjEz5KhMlNKmqaQ0mECSyvyCkkvcwtVCqlICI+UZDpMMIqk6kfIZ7cCgBHCTM8D2qBIQAIQSa7q5CCRJ5aT1XE5J4uNxayBht6gImegaqKkQkXZ/HiYjcHzDY+0TeLJce2VvzUM1gritNe+zLZa9FiObxymMuT2DaXyySPS6KWiU5hFypYbqVGe+dYHaAnOY45bNOaymzMIgVdQg4LVV8ZhT3xyfy/mVd7/0133TeW0aRVp02Qj06+d3VOMo0T7z3isqTjIgqm0pN8NebNq6gROliNn4IVkm84/WsGIu2mmSAfXwZj4iqLTTNyPRQOjyyXSkvnER3Sq2AXKp9Ov4r/8bqOJwerT7HOUtnfUkKIFmeni2Xp3SKjQEQKbmikklZ04kBnKQHlE0xIfepH9XByNH7QGKrK046qQ/p2+QvIiDMxfc/Tux0kOMlZa0emqoBUbQVy4XINOLFbldOIwSCyMJhVmGYRJaruZ3PvBtE2Gg1DG4cR88yOAcAJ2fnWpmDg8PT47MQ+OJi03eD0iTCTdP04wAAYz9qgsC+URYAfPCBfT3whBhz/ObR39fNqoOuGvTqazXdmDfbZO2bKsu3BUMgpWIMvegy5r23xsT4Cc65OEmBOTqw+uCJMHAgwhgbNHCM0EHMjERapVdGP7X4cSIKLIFD0zQheACw1oQQVPKgYQAgQq2Udx5YtNbOjQBojImxKSG6UAUfnW6C87axWutxGEbnGmtj1uz5bDY6hwBGa++9UthY68dxGPuD9VGsp+u62WwWAg/jcOfW7dG5k5OTdm6M1sPo2qYFAGPwfHNxfHr8cz/9pWZx68njH3zhCz/+T/6rf/IzP/u5s5MTAFgerH/9V37j6enZbrczRo1ubNv2xrXD84vNnZs379/3AKCAnBt98N0wKqVXi/ZwuXrle4+tVl/7+jeNtQDw3HMvvPfw8WvHp+7evf/4136N2T59cv+Ln/vct7/2zaPFnc//5E8AwJt/+zfatko3274X3pyfXzw6faqNvX3zxvLgFgAs5iut8KMvf+Ktt954/9HJl372Z9966+3vvP5DYXDOjSEAgGnN8uDwbLsjpbWxWpnReWPMYj672O1c8ACgrWEA27Tcd4vlqnd+dO7g4DDmLj8+PQWAs81Fow0gjS4Mzmml45mulb554xAArl27tl6tEcl5PxGIIK21CvTWOwD4q6998z//L8iN49f/5jtn3VYAvvfq90DQjeMwDG4cAWC1XgMEY8xyuey6XphJaa0UIo6ja2wD8dansDaaRUIIpABAFJG1Nq3bEJiFEGfz+dPHTxprJeZhr4yJLELTZV6MPrlKK0RiEa0VAAyjMAiJsLAIOBfDv86ccNf3kXIxxmqtvfMEiEoxi1HGhRGL8BIOjCwyjKMxYLXd9Ttg8c4ZbeJIWWMuho5QDcPw8MnjW7du3rp58917DzfbjVKtbggArGm6fmzmDXOI8c9DAuSkNSmjAcCNzjbNxXh27fo1FHjnvVPdzju/DSG0jY0bHyQQ4nzWBvbjOHRdJyxam/l8ce+99wHgo5/4CJF5cO9R8OxDmC9m0Zoe78DGzQsiTdPcuH7jnYu3OLijw8PN2dn2YuPBt3YWvACAMkp4ooayM1xEOPFMZwFgDlqrxw8e/9Ef/d8/92u//K2//Nrv/st/sTsZYwpvpQkxkI629ugGQRn8Z2sTC+C+LEsoAi6VybAdJT9MEKxGcSzV81L0EgBAAY5XISrnvCKysHwsc2MiAqSworgmYE8FxU9+nQUATQ16FrhPDd0XtZx59so9Mn2Nth+oTKKTM0gWknvgZR/IRFSMGctElFy4z1LnxDAW5aGqJg15jQNBRIRAVeAk3/SmApwKVK6LlD/HdyDs3V36IHgCgDkfN2R2tqgKWWbVwxYhYqVjTgRpXCHp8n38JdXOCHBVrspVuSo/okVX38tloQPT2VoAVVKnKRsBs9FKpoMaKrGWxEnS1ys5MXlX/ruVWkMvdq8UvgpAJAX5y3QnctI+4wtrv67KSrlfUlCPZLGrTvjaIwkuffMsEVJ/M7EU6UecaJTCdezVO7lbXrLFRp0/+fgAUHSbCsLF9iYA5UbAJahTRQEtJEx6HQBQxWNNI5z5i+TilL9UfS3Stnaa2xvWTHwUUBRHPzm8EWG1JjIJIvsN/QAhmz3upoCGMZI5ZK64QKQ9L58aCaVvFIUQpODdCD0le6JNWHDqwCVkM9HGey2tEIqUC/14aUJLDYUDq/wA99BoaTQCwt4jl4emxAnKCBiLk2nas8VcnSctOWYJV5iudLIAzUQO7XUK8qRNB0RsNqXGY0X2VCNUGa7Tr6iwsZIWjSRiTp7dp3tjl2rBWhHIkL2mU1K/JfsjA07EZyyBQ+QWy4ImiAFqwQeOal5hnTMxVn2+mO+nrpW/lIlPWkAmK7EaZplGJ/ppUvRHz94uoSzDilK+FKqhDGJUOfJhWI0/gjAI4nRuS3pnHo7oEFo2YVmEl46U/V2ZNt8HXtGuBuIDN3NdM+bhmmZJADOdFwc+x3UqXg5Zb9k72WuUzsnqknqcPip7NoB6kqrzP0ur+Ew2KqWfBPZqAKg+KNUZXroAABijbwkAIAsoJJEgKahimoIY9sFzWFj73O1bzg/HZ2fxdn/bNl0/AIA1gx+31jaI6uJsK0Fs0xwdHbrRGWPibeJut4OAi+V8Pl947yKNgpRXcgzVCpWTxf78lLPz8pmeBlzKMVdo/7Qqq0/Uy49FVExHA4AAkbthEfYeYog0rWIlSmkR4bglWbTSkMUeRmd8AWtMDJWglIq55YyxACCIkdbBFG6YQgijGxUpEIk72mgtIkFCO58ToXO+MZaZnU/cEwAIszbWB68UKUQOIfiglQqBtTYAMI7jMI7WRhc/37YtgAQObdMSYfTvW6/XSqntdjubzVh4GHoBWa9Wm812GMfl8gAAREYI/qMvPP+Nf/MXX/71/7B94cN/9Kd/PLP6Uz/2EzdXhwDw1nvv+MBa6bPN9ua1gyAgIsenZwDQ98OuHwBgZecxeRYiNsY8eHz8Gz/x6a/+6z/5H/75/7JYLN9/fAwALOiY1+uD9Wpx2g0f/tAL58fv+f7ks5/+DJM5OX4MAG0708YuZktt27ZpHi/mcHa8aNuz04swOAD45S9+8Rvf/Y53HkG9/KEP37h27f6TpyEECTIAxovY1lhAOj47dSGMzhMm6jnGTb12eB0AlDVAZLTBOUZ6l5Sezebd2AV2rhsAoOv7oFGnG+uqaWw/jEppa22UlVrryF1qbeORTooIUSnVaMvOA8D1G9f/6X/zm48ePRKkj7zwcrfrxnHc7oYZAQpE/z6QlN5tNptvtztEioYbIuz7vm3buJyiA6/SChBG55EUKSUZWnRdh4jWmOvXb16cn8cN7kOMTkBIeyGVo2iL9rmYCA2SF31kJyFGsAURVFP+Q6UUp0CWiAwAgkQKlXc+BG+0RlQxGiaiCt6DiHPjrF0QEQgIsx/HEHwMfmqsPbp23fvx3v17L7/40nf/9ttK6Yvt5ujw8ORsu2xmAKC1CSJKK220d2MvvR+Cc+x9UKji2cAiSiujTT9ubl+7Y0wTD+zZbEYZnobgNpuL5WJJoJjFmiZ6OnvvTdpN7uTpU+/84bWDpm222+1u288PFojg3HhwsI5bcrfbnp4ea0UAMrpBadKaDg7W6+XRkweP4jGyh0MlyUlAiMYJZUh8YC+Ispqvv/aX3/rn/+y//ZOv/uWw9fPZQkGMEuDSFQL0lTTMFRaLbJIOSWJNYPBZ+J6OwmQlBcDKZy6vigo3popSrCzJulUUzQWTQ+V5l6W5pCtQlAK/1AJsgnzTRSOA6Qb5pEJAVuIuif26Ox8A0Cv9ox6zJL6LUK/+8ix62S+VkM1grzI15o+wMKVJoMqpI/4p5VedJB2iiHCoHss3ODJdLEm8V0QfViNc9VuqdhdwOcHvPHGFxE5ek/GGULawTkA715gXWcyeg8n/prhccP5A6nQ8TwocvypX5apclR+1oiGpfHssFSRVt6hT1QGM2WSVnCq4HKJSjuqigtRcVOZ1isdI/N2kTcv0BfdFXTy191mM+FTMw5ikNWQ0gIUwKH0qlAVM14qzkls+WUmxbMuCvdbsC7oydFnMZrICQATVlEsnDqZkdxlCUwZ0TxyVC6573azGfhKoRRjGrIL1EJd8EiWiyh4iKap4pnuKxlxF/YPCVxRKCAMzIWbbXLycK/GqTM7iUmuhifuK5EICEYULm94zSfWs3k9go3qk4gqwYqnyIpK9H0pyn1Q74V7DpiGNiV8RIHt4JffBmqqQfGUzv5DK6yZgWk9YjVgqWqV6b64w8x4ZHu7j0cwBZ5gk+yOxBxEvUQZQRrWCwnls40al/f1RNQ0T8MsoNapLkBIMQd5L2cCeEd4HsfaTa1upvGB2SeMnpaqpwVlhEwAFZW/tt7Pud/pa0GDiStIPOY+RVObrRPoVo0vZh1LZm/M7ibC0IG2ccjAVNWJaCPHjlLs8XYeN24FTWPzYlgLQY4dLLhgS4OzLvDeqhVRPL8Sq6eVPcZ5j6Nhnxi4hWACpOOgSgndaS3lr7Lv91m0ph1d1WpaTP/b6soaVYXpdy/SD5PfkGb30xg8yJUm85gzybEeLPLqkUArwJKSy2MASpgQqjSdt1cvHfj6Pq2vatY2pWlFZDePq/UXYSGAPkS+Nhr74DIMI73Y7O2sfi1/N5+M47nadIjUObrlSADAMvtt1XdeJiPdBGJfWMrMPYbPdxQRi8b3z+UIp1XVd3FfRnX+iTfMRnSdwX/6W5k5HNVwazTzCmDS6vC7zUEykZYzzGHmfONPamBBC9OInIk0qCCulUOE4jKgo+ZVkHyVSCgHH0WmtASH4gEikyI8jCGhjAMB5Z61FwmEclVJaqWEciRQp5ZyLTCUp6vuOiNq22Ww2RKS07vuudNx7j0QCku5rO8csSqnAQSmKvlpdP2itGmtThm6R7W6ntZ7NZhebi9E7ALh5/cau2zHwarna9d3gxuVyDgD9OKzX6ziIJ0+PV9by2D949Ph3/sXvnD988O6D95q53H/05MZsDQDP3b1jbSMCs7adt23c30RktGFhY6JRGWdtu2jbYRgBYRiHfgcv3n3u+z98r5kt5/MFAKzXB8G2y3bWaAz97sbhChUe3Lh+dPjgog/Lm9cB4J33733hp8hxUKS2XbfreqPNrh9Ra0YAgMW1a2axXB0enm37L3zyMycXpyenp9vNZjFfKdt4DgCw67rNdstIipR3vgtdY5txdJvNZhzH69evx+meNa1tGhnkwaNHH3n54xKYObSzVjz1Q0zYQv04KOZ5O2ttY7S5df3GxbbbbLfzxQIANrvd6LyxBokO12trzGw2Z6IQQj8MZ5stAPzDL3/51Td/2LbzX/mHP/17v/uHtNncuXVb+HwMobE2rhkR0dokbkEEBFkEEWezedf10cmLEo9PgLhcrXa7zvugtSbCmIeKiEIIwzgoJNuYi7NN0zQAmLJgZCxZfLwQSWsKwcc0elopH2IIBfHBWzubzWbOeRBx3htjiZTW4oKL9WijtdHeB5EQg6uiApQQ0Y5SKgSPiM45FzyP7J3r+y5ylPFF424U5qOjo9Vi8fDRo7Oz87ffeeelF1/6xv2/CsEN4wAAfT8wIAvbxiJiCCF4L4IcxKMvuelY2Mt497m7y2YdTbwBvPchbVtQAuycBwDb2MD+YH0EgkrbwTmlDAB877VvNzhv7JxF2tns6dOn66Nl149t0/rgu74DyB6KKLtu54PfbM4MKmZeLOa73aa4hRKq7IxWoi0jAsSdMowDIhhjRzcosiT++99/6+7NF3anzg/Jf1zHVO8+kFEJMOU7ABUdl/iky9AEUjums3LvNC1OCdXD6S+SQV+WcRM8KV8SlkGMproKQOeTGqEKYg0TMMtXFdLpLQl0yr6gvXx1YmpH9YooLGP4E2YuMGGPOCxIoliNE5YGiAAnBbyMao0IxuDjU2vSGyb1M7egjvYV9YpkiNmbmKxLFkwrGXoIyF7Ub8ianFQTlOexDEUZvQSi9wF4lPSA9RURFMRo8CtjIQWwZaiZ+jGNV+xm9ivI9+3yZ2EKLV4syJAttnWDr8pVuSpX5Ueo6HRYf2B8tpTdolCGOYMMAICIcGQAKp0uK+nxaiNO7Fkp0VElycPCbaT0c5NkmBSkoqBKSpeRXg5AgPmucJSP1Vsmn6FJw07O/hJwEjrpRneJ6zK1YcISSQCkeku5ZPykOkAIgEjJ9BMr4FRP5rsiUsq6rCQUnBAD5vuspfHlrZNNExGAcvyVvQeK3phAKiTzWhlbBBRhmQjZMpFTVYmo5YnkjdcJowTlbPFjZsF9SnTq9D6M2Bu5zMLkxQSXNf4J9ZUH8+ewWngTTqOJIit3PBMBlTL/TDguFeaAlC5lx8TaWmlA8Bw0qcTP1utILjV5r9UpbmnqimSlfo95qeg2yCisLID9vZIh3F4MdUCoLvsnjFTRgFyur+d3Z8N9xZNk3JhaxhJ9QSArS7F7IlJqw5wZKm+UGL4HYaIs8/iUbVEerpFu2gTFp0wSLL58TuTVP0G0Z4Zmgt3pSHmGQ8m7BCCfcAJZUSGkNJ/FsExlx9VmmLgDEBF5qjrzupg3bAGkeVTK1JdDLiNOAaSUL6gskRSTnpkFURCpHL3V0HHm+DB6VmDq5DMnd7rTIymaacbg9bdx+9YOC5I/J3v6yHT+SIlbUE9D/mY6ObGavgLosTw5LeUqOn5+EnLbsHj1ynSYI2aaS0oU3Q8sl8aENCEShKgCIWI2e8jUYEC8vFqnY2o6GPM+rf+/Px5YbGdptmNG6pS5Ox5s8dwVAQYWIRJEpKTaCQAEYWa/Xi+PDo+6YeeZD9ZHwHBxcooiPt511XJ0dM17JyyEgx8DKui63dh7DhxlpW3bprVKKTc6731k4lgYs7SdNnjiXEv/SgfLNE6znY+c6dDKx0Z5Ej9oYUoMCkmEBBhCwDgazKUSTsF5IXgvCIqIA0ejY5laBiFCQAghiAAScOB81T1pmIikFMR8HZ6DD35uZz740qq+60TENjZSRVrrELyImHxzHAC00t57ipsxRMJUe+9003DOiaEUOee99421wiwgTdMAime/Wi4BoBu67W47a2fOjbt+19pmtVw8fvpEa9savdmcAsCinW3OL06fPrz74Tvf/9bXmMP88JoIwejFtAAg3cXt27eenhwjgbU6BDe48eBgdX5+sV4tbl0/AoDBhcVsce3gkFms0cu5HcU6bA4PbrpgPnT3DgAs50fn/kyZYSf8yeefBz/6AKuDaw3Rg/OnM/UiABijt9sNe9HWLtr5bhgAwVrrmI8WFgCOz7er9fpwvbhz7XrbLIaLk6fHT4NzxtohhEYrADhYrBrbXmy3WhljGz+M0WDZdd3JyfFyuQCAoRsODteKSJhPz86Dd4F5c769fvc6aTWOPQA470REAmtjlNYscnZxISJPz85m8zkANE2rtI4oYNa2RHGjceDQD+M4OgD4qZ/8qbY9+NOv/uuXP/bxj3z8zR/88O2f+Nxzn/v8T33l97/iklEKvHPR45UISOlxcE3biPBsNourCwAAGBFs047DYLSdzWC322mtSCVWwbnRGjOOAyiaz+ebiw0SAEvg2mtcELFcDYpEJwuP48iS6EhAkMCr1YqIRncWrUYuppuH5IgcAhtjrbUh9Nk10jjvCNh5DwACyvugjWEO2+3GOU+IHBgFRBhRAwCwAEI/bN944wdE+uWPfPTFD7303e+/stmcL5artAu0coGVUhEX2cZuL3bGKKXQamMbC0msKyJ9+/Zz6GW7287Xa4szRYpIAwCR0soOw9D3PREFT/cf3O+GAQHns8VTeQwAh7ObHMBYNY7j++/dE4k5psl7p4wyiTVmET5YH3YXm7HrZm3b73bb7uLs9HTonCEDWfRE63+2u6Thj9MUL9eH4BtjQ5DVanX//Uc3bt124xj8aKkFgOAFNVDxS5UcMT+fdvFLpf1kuPaMUIq/q4RJ5BFTmMM9tQUAKts5ZedIkRxfMp7TmZPClGB6klvZyS9i92xzQshOlFyd48U5oEK56RVF0mNG0BWWj80RiLkBi1m3vD0OfpaO6Yp9FrL1uOSPQNKJ8kdkGuZ9eA1FLSvfpdlJAh0qepaSCgnFVzYSlGV4UpCtNCmCGBsrl8V6we9JvENRAiLE3dNb8yhIJoSTxbKqUeJbMGstBTvhBDdEBClaDVlieBJEJSiR/Wcu0jVVJDw5YP5dqOiqXJWrclX+HhddrLXx53ISZ8W8ZNamSElk4Qwl/2ituyYtnpNwmQjJ+pwsR3W2ANWys8iirL9HecmT6Qsy05TCBWKiobKzVWw3kqpE26RFShGwCaBg+XNUJ8o4QHWwp7CSmTwquldWOlMTSkek0vRKaBUsqRRClKDAknmNJAWhXOEo+l2UeQCAKsUwjB5Z8ZWEKhR/zwxXSy+IaOpDGleppHIigipt+xmIVCndwgzJqyX9PsKRHHNzr6Tp58qvMQrcKLKpPJNBnEwVQ7nDnX8l+fopFJskwoROEDEF56wI3tjbjCGmMjUVEZFLxDFEROQQANPFbci3PGCf8i0RKyvYkhHwpY5UQ1j7CealjmVJZlIIJuwSl3f+cfLlqvcI1DM3jVj5DiN/jIgIPoVAyrCp8DGRBkv4vez+yEdGa0S6BwOZNReZ7mHnFkqq9xmnqP2xL567mDpVWPy83OPKLwx3iVoJz5SET/cXWHpPIv6kCtoXOzgxgHHLZ2QpwhJi5pyY1rScepxmKPu07LUgu4fkQzQB0omgTwdPGmOGnAs1zhhN1LoIiFIqHrRSOpEb7NlP50y2iSMAl2iAeaVKXmA5okaVgrroJ9npNvOue0t3b4j35mca43oYsKbIL637y+h+emRvYVRfk9mptBSnNQuFncye9lWNe12sayVURBQNYWmQ0tAVM035RIoyKRQPq/2qk9L1by/lNE0nWVrMPB1Ckm/rC4gxWoA5+EkARlWK6Pj4ZLZcNI0JIQwu+BDFBUevJQQFgD44bVQ7a0blAo8gGJhF0MfLmI1tmqbv+74bmANZ4yFwCEbr+CLmoLWplgekA2myX1Rjg5WxC+Hy39NKArx8Wk6GIxZmZkLt2YMIxovbiJHuidl1tDEh+NE5Y41kB39mjpesI6uljeYQGFAbTUjOO2sbItrtdumvzJAzdAcOWilrzbgd886G7W63Pjiw1vZ9Z601xmw3G6ONUiqSmFprUhSGYK0hUjEFMAdvTMlcDIrIGLPZbuPtYA6haRpjzMXmom3bprEAcHJ6opQyVvf90FizWMzPN+fWWNvMuqE/OzsFgF//8i++fOf2P/3vf/M/+U//s1f+5pwRwEEAACAASURBVGuPHjz8xV/6D3x31u12b33/ewDw/vHx7du3DtcHAGyNsmZx68a1z/7Yp5fr9fe++a1b168BwMnFtmna1WJxuF4FsvcevN8Pm8cnJxSImS0aAHjx1nPvPHi4O9s0MyMeLKiZJu36bhjPd/2w3QCAUaQUWU2Dc2fhwloDJEiKvQ/DDgBsY88vtnOjdl1/se0tIbM0TWObhgHZx8Cmp4fnZ7dv3X384KEiEqXC6LVWQcF2tw0+AEAI3ocw9H3f9x749PR0NV+AyDAMyKEfRwA4PT8/XC8bYwLzrJkBQBxhEYn3u13YLuaL5WpFqAVgGEcDgEo555h53rYAcLHZKGWNaTbb3XK5vH7jxpPjJ6vDQyI19IPSGgDG0Ql00UkWAJzz7WwWQvA+TEc+ITIQESD2Q8/CWqvILbEEAHDeLReLYewQVdM27XyGkXDgUE5IEWCWwBwzy4+jmy1aa20UEHG1L9o2MnEC0O06EVnMF1pbEHHexYiK3rnoZay1ShH6osQGiXy69yLMbdsSKefc6JxVmhBRqRCkBPUTEUR54e7d+fzg97/ylbNu99LLH9+dnI6ec7XQNI0gDH0ffBiGcesutFHaqBDcbtcBAJEa3eg9X1xsnr99l1CFEIiISA3DCABD7xTq77/66q1bNxTRwPzOO+++e+/9l174SDtvo7hz3httgw+7zQ5BI+L5ycbO267rjRjKRkQReHr82Hk3DMOu20kIhweHbdscro6OHx8DACkVXACKEjUeaOnEDZ4BwGjDIGPfIdqI6btd9+D+g+C91jpSgIQUxCHpcRwaY+MtaeEosiGbYCe4kU9PzCdeJZqKQTL+mPNdl49mW05xG8Ss35THBIUqQSQJjQGUPC2RUpsQCWJJATjJzoSP924sZCMZ5GcLfJug3LPdzFZoKBXnZ0otlxEF5ryDhS8upuIoV2Ui7p7FIXkcJj+F9P4IdxJ5F6M0JmCZh3XCnFLktwBqhZJjomNxgIFaZZlKpQZ9QEljmPMiSEbyaZXsB3SZ8JMIM0doHUXuRHOLIGGKhizCQqp4gkBUHydFVpIx+4qOvCpX5ar8CBfNwsBAICJJDDInmkBYcrIITAaawomgQKEJ6q/pexRmJILssBDrYQ6UVOSKLylKdD6Ls6Ux1YeIyT09M3cpKyhKdHGa8EZsBxTqpApPiRlNTPwH5LdEISWhEpfZtJXlIEePkokuTOJRmBSxCAinWIjCiEKZu4HSV8z0CYsQCmdnmWxREwEW1hH3RN4nCigiwOnONWIUeyqDE6+y80iGXgXgYLqAjHup3RI+Q0TACL6TaipctTWRwYSRzWCEGJylXMtGxGjbQxaIrmaEVPoliespoxUtfUCKCCmROFKaWwiwSO6qQisycDZ/ToQEZRIlza0PACCKMFE8iQPYs11nEj1NSawuxDvLmWkBiFULIksAxJyavNBmkts8AUlErMYtpRZhDgISSZAQOAaDKwCugD6RDE/j8qcKuuRVmNX8ettMz+Rxm9a3iOT4RIBEJCkHPZGqL83n9sZKCCDFaoz6CSIQEagpz2+0rCPEC1MUU6mX9DVx4BgDQI7jmjdyiQGQGU+AfIFZoLDxE3CNQfTKCEVrcoV9y+hMAFGYI2AvPl8xvlL0lIlu3BCpDc4356baYo5H5OixiIAIKT4XgIAECdHFo/iGF+QKIEGc1paAmFmy3xYzg6hC6ATmPH8KEd04Kq3iFohGC4njSMSBjTLxyq1WOkbWizqq1jZedGVmESYkZo9IwJhhPRaf9LTFS1rKqq9l5RZTUxpnKAdmylEWp2YCyaBEpCgukuQCiIAwx7WGiNF3DxEISaZs5HmPYBw0gZgrPG9LYY7LP94xFIlnCiCSpAUhnNIQlV2deUuJV56LTIGpyfGXIbCPdz8xZnaP1hBUeaRy4GAERAKKzh+F9k0iAhiir0V0OY/hiYkg+uWJRO19MrXkhnIk4oQwZu9NPv1xIVIKBKcTP5kGVxQoQ0ZEDClj9G44BeSu65v5LB6rTdNy8CC02+1QwForLN3QG2OcH+MIr9qZ0c1m2wGhQu1Gj4CKFHMKu4wYNwuKCKmck40wBMasoxIpiByHSDRHMbDK4noy7SWlD1mYUAmDCCtFkDypAQWJQCnjgwcBY7SAaK2QMCQ9EZGIOQiLNUZywmIEsNbGi5YUQghBJCBB8EIEgxu1Us5551xjk6+W904bbbT2wVurAXG73SKCLllolgtm7rouXuUYw6C1bmbN0A/5JEL2oW0aERidU4qCBEWKFAXnvWcAaJv29PysbdrVaulG58bRGt11u2EYDmbz7XYHAN6Fa0eHznk3jsvFAlj8KMvlQkA2F5s7124CwK7vm4PV9Zt3bfCf+/Tn/mL4FhlZNgtWurl+DQCWLgyerdXWakF0ohDtxz/5KTVsXxXebh0ALG3TIgz9AGuvWM3bg9bq06dPDg5uOkY0DQC8du8H5xcnqMzT3c71m6P1HI1SmnCG/XZ3bxf3CpBpdqNfL5ZIKgj2jtcza2a0HREA+jBqgoEDKwzSO7auG1TTbvsOlVExDqD3b/zgB0qpIECmgcEro9cHB9Zqsq0oDQCzWfPc88/74FZHq34Yzs5OUIlTI4MIyLydA8Bffeub/+CLXzKGrWkbxQiiCRazpjWmsQYAFvOF0nrousbavu+0UjNrRUDZxqHaCQLA+4+eLlazIMGo5s3XX7/74RfRNvcePDw/PV+uVykcmyJCCsGPbjBGj84jijXaed+2xgcXzythJEVaGQCYtbPRjczCwjHbtbEWQJQ2I3szm999/kU/9lrjMI4iyhoLAF2/1VoFz1qrwEPXDcw8jGPTWABczBcAsFwud7uthOC9FxZFauj61WIdL6LEAKmbzWa1WiPAOLrVcn0ezkc3Km0RwSoCgLF3CggYur5rZpZQexbTNt1uG7mNuFcvNls/qh++8952+zop8/j9xwftetc7EIjuokqhEHVdr5RGgZltTWi63UZYiNvIIBHRerFaqoP777xvpB39dtXemc9mr7/6g0fvPQaA87PzA32zO+/fOn5nNrONMduTE9Aw+9SnRg6BM8hmDxi92RwIGGMleN0YVHh2cQEAwSJqunZ01Bi9Pd1eOzo6OznZXOwObo7Hp4MLAgBWhAyy5xh2jwMrNIgxjxYBgO89IM1MGzlTZjaNtloQ462YSC0RsREJjVGBWVhIxdRzIiqHOo2hkCDBeCp0EUjxZiylCGLJHgtRuEShGcGRVtlKlBSxYnuXDEniaRxBo3Dgyr6b/P6wJMRJd7Emq91kIc2W5AK8CwBA1BVnOMFaoKTxSUz3QogIwhwkefuVFqRXFW0s29yBZdLuIMGDPFwZTCTgkbQDzgAx/j3hxhjnHBJXjoiq8oeoeFHOkTczcYcxSn58exb8ceXlYOiIQAQckBSASAj5frtCRJQA4oE0oYqKHoKg54TAQoqTgFmBQyAUghCYMrDnHCRaMICkVHIRr+aI68ACSMQgSEpRqRLKaAgKkYLkqSlKoZBCVuy5Ii2vylW5KlflR6kUNmoSpADZ9LZfRGCKBbfnjFX+mqrAyGfAdCwXFibVHv80BQq+ZHmS4stee6YVCff/svduv7okWX7Qb62IyMzvsi9nn0vdui5d3T3T3dPjMT0MjAxYQrLwA0IYS0gI+REJ/gj+AB545Al44QGEZISwhDyaAQlZZrAZbI/dnpmenu6urq7rqVN1zr58t8yMiLV4iEvm951T5oUHt7Szu87e+/syIyNWXNZav3VjzgxRVb/+6J35QM5+nJzVc+cYKnjfvJOE6c/q5XHUYnKoAVADb3VS09JTJZWhakVaartTB6aQU8XMRDibiWJIzP3QAqTO3f8mM18RBuYjLijD7LOZw1fxlcoclaYhn0xQ+oxmv9d/6qjL3JfQ0YQ0qqjOhId0V1lvZcQSlSkX8iDWav+r3kxa3zIfh8w+mFugJ+evuiQyJz9ZHAApTdngMhVoNrIsqpYZL30plYVxNF0zatB8cdSpmFH85IH5Ajhu8GTXzZNoVgClNswT7lcckWd0maY7d6XCeRkiYpNuOAKTqh2WpvUIoMqOxcpbF1X1Fci4X2piEqNrw7kzBcop66eGtc+JUwuSJLlSoJDiuVvE62wSSEbr6pI2oVaFnpOlP22P6tk40ZQUyb9RtPjhFgKyCo4SjebgoPpJ2dFl/hMwr6d7VqFga2YTl4J2JQOX+e3TgZSKMYcja/78tMSRjH58jFShHwUuqpL/lMkJ5SSuC1dPlm9uoNpU6ismz3bO7YhK1n8KwKnZWJLqTuY5mDDHaSqRmcp898wvSmuMik5Vws3SPp89NZv+iYHVfL1HGk15XTF2FboxxbmnvuZEARVQTzpN4R4Z6UxVTqe9pxnkpanPxx7c6RMiEe37IamsMUaiZMlhAOM4SJAQAoGMMTHElNbNOktELsVlS9wfdhJjfUumbN2eRFRshylzBYEkimEWTfp8RswpW/4UKfr8tBBdwa4Tv4uSqnOk1Uu5KLaQqJJIFDYcQxRoCohGSb0HIIom1EBiTM6Mmk+AdDSJqDBYNeedZEKUKDFtE0XOQWmtNX70USKRjTGKROdciCGVs2jbVkIUiakoNrFpm+aw3xPxcrlMczcMQxoVG45RVGGtCyGEGHKSyjgy8dl6HXzY7bZd1zFTPwzLxaLi8k3bGDaH2LdNY62729wp0DbN3XZTN8xHn3/2w7/0G9/79rcWbcfWrLp27Icf//Snv/Ov/W5/OAB49803bjd3T59+tt/v3nrjiXXNZ8+e/U///f+4bs12GA/9CACtaZwTIKqOowfo069ePHr8ZAy8Wq3UMoDr7e784jIOh7cfva9ktn1k02x3/eZuKxJaeACH/rBwzjTWWhslOmvOlgsR8eO4HwYAwxgIFKMSyLmGjF2t1vvDfmWsNU3yLSXDqjp6n3Ip9oe+aZqvvvpKJLzdvLffbdOiub25tY3tD/tHjx5eXj64ubm+OD9vm9Ya2LTjWiy6FuEQJTRtu1ouzs/Obne773zzvTRNQwirxUpVV8tl0zgR3e33TdNYY6NI2zUA1uv1xdn55cXFGEYAy8UyiHo/xhhjiBkCiIGIbGMXbWus2e+v035JVdfTyWBMKmgUUqVokbharsiY/nC4e/YMgLOmXwxsOPp4ebHe3G1jjAo9P38QY0wB/uv1WSo18/lnH7/+xhtXD9vg/WdPP+2H4dHDRynFpypEVbwHyJhk3WBRZTYimvMGEA/j0LjGGqMibJgjQ3UcfeMccnEPk1N7h2itBdQwMXPwftF1AEL052cXf/7TvzjsD6q4vbuNEvd/sj9/cJmssWk/KiGEyGxUMhNktsYIRFMM9eefP337rW/8rb/1H/+TP/3Hf/iH/+C73//1zXZvjFUdt7sdgLZzSUxmY/0YnW0hdNNfO9uycSmmvmm6EAKURNQwEVOMYpsWyv1hWC3OAVgy0ccwxpvru3EYr69vSElJVsuVI7kbtvkskuRtxiBYaxgmywsFnxIJYFhnVZSURAJTZ9hRSamtGoigUFEwZZNfYszJky21Q3NNKQtTL4nKWo7+KnPUu/P3VS7NUnBqXScmO1OfaK6dKXPlG5ThSK28ctIQ6kXHv2gVHPOrZ9pIPtDrV3lcXPpWxK+p1ROlovC2meQ8UxulVsyb/jtWsKq2MiWuqrHWpdUcFIdUHI4m4k/eKFTkgaPrmNsWQb4yRRCgdJRDLPGeXGCwsiSaXprEBiZK2XiSmBxjPArkqC6h2T6dpc4ixE1iZBpDER0zedPXKd5JNUsYMaYln4n9NXnX7q/76/66v/6lvvj/+5b76/66v+6v++v+ur/ur/vr/rq/7q/76/66v+6v++v+ur/ur/8/LkvFfHaUDCRZ4I58fKaCL1PUcDKvVUNbscswZhGsU4B3NifOI4up2gZnwQ1a/inGQJPNmtmNJbvzza2Qr/Qjm36jqb1j6+SJyWxOhGrNwsmT9FLjWpO/1foFOo06GwdLe7XfKfqgOtKAUjgI1ciC6TWJ4Iw6J9VJtbSYO5bnaOZ3M3ecSu+nOnPl8xwhPTVFekyZ8sfMyFjfnJvl075lY6HWdZDryZRkhpUY9RW1MVElSUG6uSuzMenxvGfrIpDrCJ/6y9IR9Qs5J4PtydqZj684NJ54TuWPaJrFSg8tH8wMvPPJmGykOZD0yFe2ru55t+bevJOr2vFgps5RCYaeO90ib9ayecuYpi5OTpYpz2BydNTq9JkcueYeX8U9urgHlCD26khJEyHnXnOTxbxm7QFAKW633p23IU1m7VcGoBRHRaRQ61nNn2JVn5Zi/f+Jy8GMsJTcq4iOUo4SarR5nWGdOoBUfiQ3X+6cbbGppEk5W1IKWdX5Jss0ZOJkVWfDRAwJ08pjgnBtVlQlpzmfUaMOuLr0au3BCQWpRCxDj5NJzWZKy7mW+3h824yMzNCTMwN1IHmk1aWk3FjWkKZJToluY4x8GuhW/B7S4OdflmmaURczF4nZ4Zv9HzRnq81rjebsa1qyhBQmX9f1xIuo7IXKwFLw3bSzdZYPRFN4O+MlN36aHY3ILzraswSADvsDL9phMDEKQOPoF6uUMRmHQ6+iwUcCEfE49n4MxABT0+QSE1CVKF3XxhjHYQSq8+YUzk4EpZRlVackvSBSzkw5ilJOmScxAjDMsZJ8YorFg1mP9otW7w4oiGOMREzgKJGIVVOUdz45U2KFxASTj6RlBhGBNed0M2kMxpCohuCbpoEqg4wxOYiS2DUNgBADCFEkijjXuKbZ7XcpB6VhjggobibJaTeKMKHvD2n1OusMcxCJMYQQnLUABR+MNdYygGEclsuFMWa33yVXF+8DiIwx+/0uJYVYr1e7/V5Ezs/Odvu9qDy4vOrHYRj78/PzOPYAjGHXuCDxyWuv3d3evLi+XnbdzfXd58+ePbp6BKBld3559tMPf3nYbRtDAjx6eNVZ7Hp5eHm5tTsAm/0+JVCzxE9vbl/c3tFXz9996+3dwS/WF14CgP1+eHB+/mLz/J//6LO/8rv/RrDkAyRiuV6rxlYGAN6P/XBYNAuCkxhijCKByXbdIuUBfLHZiIi1NjmKh6h93xMbEDtnyVgAMYah7w+Hg7UWJZlDf+jHcFguVylFzO3dZrv/OVva7Td/+V/5obH2cDj4cVwu15vNXWsYwH/07/6Njz75qKPobHPo9yEG70eoMqkfBwCPrh7+4uNPFPj+t7+zWiyGcYxRu7YVURUJPgAg1cbaZdftdjtmXnTdZrdv2ta5pjq9hRBFBgCNawgsoiEE5xwzjyOGoQfgnDXGhhCds6MfFWqcOT+72Fn74voFAB9Ccp4SkdVy/eEvfuHHQSRYu1ivlv3QA4hBdrudMUSGV6v12er8iy+fShSAjHWYdiWHGJy1TdMN48BEfhwXiyVUk7950zZ+HJ111jZ934uoiBoDiRHOAQg+AkLWtN63pnHOdk3LTDGE4H3ilN6Hx4+e7A47P4az9fnCrXzwy+WKQIKYckcSg41xzjKTl6iq1jRM1vvdwtm04/Y3h//sP/lP/+v/5r/6p3/2o9effKNp2VDXrZrFoknk3d7tRHyzaCC0aNrh4BUAwzXu6tHDTz4wAHaH7XJxRiRN0+y2B1Z474MPr73xkAnWWAASZdF2pECU2/3NO/ad1jaWrXMdK16EGwDKxGQAo1ry8amKRLDCWCQXDA2gGKMHjEgUMePgoQAjRp8ONGMcQBqFSqhtPTNNEUroOA6ixBvMYgiOD/1X6CklahtVsSpPzqNuRWpsQe5Hzmg9pfeZ5Pskp6SYgyQ5V6nkmGWiCLRHyk7VzyhXucmss0hlDEBrHqhZJ+cxc8elKGdK3fRf/a2S8CXy6KyF2Wc1smQu1hyHgU2jKIJrvW3eM61iJ8+/1xwkg5mAkAWqRIycwIqmlgDKAiTq4ijJjCgrMyddVWAS6CbNZhaCopr2+yT2Ux5aKsUjkyStUGvMywvs/rq/7q/761fispXlVF5VTsNJia1ARHkq8WaGVoCofJv+5ZnCWVpFZeezhPhFhaaa1KPmeZ7hgAUJTaFvmVcUZXnS9I86eQS3oHCAijTglMnV7h1/hKlAdkbDCgVmqFGJCqTpI5SBlr/Ls4VBZjEBp72pIs9L6n2Fk1Dm5hhWQ5msIyiqsuITDEaPflKVP1AY6lE67gy36IRIVIU8/cmZdU6LhJROyhzlxybhYEo0qYW8uRME5CzTklGARJgZlp37NAksZXWVBufDrAGtFQalHCQ+J98RYfLKK+LLfC4KCjwXRxiUUiCdLqI5HH86CyUiuY6i4BoFV3tpRWbk+FgEnHe4SFKT/Hc8vtmaKTsup3ujAvdmES2necU0cwWQ0vkumyDs2d4odD2RrlCDrGdSOE1mi/J8+Vi1RE/V2Tqah4JklZ5MkyxSEN/Z9qXTFsq35VxCXqBlI5daiwJlIiauhRAzEUWZWFWFcv0bhQpgyplTBl3nlhQw1uYXa06zkCLBY4wZ6iooFTFxrW2lR0B30X0mEHIKMaeszOQ6w1qnoxws+ZdpN6QOfo30PwF2dWtNp1R9omT3paK91OjatHdzDqQcq16yCSLHHIGYiDOinQ/dY5Xj6DiqSs6czji66yhqjmaDfrmV09GWVZ353xFsrAC4nuxalnRFPMsrtHRAUf9PU1cSKFtUlvrtjKmoAhi975rGe68As5GoMeroAwBrTGW54zD2h2Ec/dnFkpmatjHGAOgPBxCcYR/GOk/lOEWmCAGU0jqbhEgycyryU5cK5SMzWxm1rKNKYdWpyoAKEXOaZWNYkaebmQxzjKFxTYiRiJ21IURjbKrGpiqiYo1JaVidc94H0bSSNUoEwGyIjCqYjJJnw6JRopRtogDYEBF8CGwMEYlEZjbWqAoBTeMAhOAJMIZDCAnf2e13jcsldwE459q28SH0/cE1zlnbNG2aCGvt6IfUYWvs3eZuHIZF16WCzl3bqqofhodXDxPxbg7X52dnqrq5u7t88MAY/vLLF+fnZ+fr1UcffgHgtdcfX1yej95vbm+IjQF5P7bOMNHhcAAwtJ2IvPfWm3e315Y5ggzzzWa7XNgokud6GEY/Xm+2j65GQF9/9OCN15988MEnH3z40ers7MmjKwCPzy9fXN/87f/5fx1DfPT4tdVqdba+OIxqm/V+zGVJROSzL55ZthdnZ6vlUkWu7+6W7dI1Tcpb17pms9mpaOMaNvYnH/z8j/74nzprz87OXNOlFSxinXUxROectTbNAlSJaLVYpPVweX4VNYJ0GA6q2radNQaqIYQY47PnXwEw1rSNW1qngHPWGu7H3lneHw5fvrgB4KM8ffqFde7myW3j7KEfmKlp3Ha7s9bEQwBw6PfAA4U2jSMoE4ahPz87Y1DKcJq2hiqG3hP3i647Pzsbh9GwJdLlcjmOY9mcqhBi0w89WzMMPZ1dWOeWqyWAfn+wxvgYHlxePX32+fX1zVtvvnF5cf6Tn/z08ZOHaeF1zfrDTz987bXHb77xpnPN7rAfxrFpG+99ypoKIBKYzTDs0gYNPpCjYRi6xVIppdyFtW4cB1VNBcdDCMGHVL8jw4iGJerYj2EMV5dXQYbRj13bNa7bxO3d3Q7As8+f3t5sQpS2ddvNFwBbbrquu765Wa/XqSSOSGSJzNYa640RCRFeYjw/u9AQP//qIwA/+I0f/nf/w3/7x3/2j15/8P7v/s7vhBie377wYXz88MmiWwD4Zf8RczPGMA7j62++dvP8dtj2Z4vLu7u799//9SgA4MzC+8BMbWdc04TgrW38ONzc3HSLdrfdpEMPiqHv27Y9bx842xz2/RB6Aj9/8UUa+Gpx4YeQKn0lS5hAowQmSmXfrLHMTEwQtdb24g3ri5tnBF6vzsXkSkEqQmyMNZWNZhmUZnJfyWxSDuxTof1l2ZImiWP+OVV+ojMhpN6V82dTxs+rijbBaNMbtB7JVRKkV/HIIhhN5tXpm8LDygtJjpPzZ0MbQMwqs1FW8aBKJ1pkx1NuOxMrKr8vmTqArKsdGRePNJ0TsbaQN6GxxSpfv3upD5O0iMyO6w253CcUcxMh5UpKkrNap0ZyPdX0aqOqqgJB5pTMDEic0weFqIXOM3x0PqhcdKEy4SwpldFSUQQkdV2JKET/MmXur/vr/rq/fiUuS9MRV3CBGcd7pVNSPfIK66oq8sk90OIKNNP6X33NsIzys/QnSphgR+QeUlWPgcolVY8bnKEPBfLAUfq4l3oxNYfpfirPa+HdtQ3VCikVBlNZenl5SjZTBAACUHIgzoDDSYxA1mELY53JKlNWuyT+JMark+dNqV4xDV0mgla7ZfqHpokr/pgzVp29WGkmNEzmu4lUhdZF49ajUQAgSJTJByjVSGeqCEPF1yo8SECEoEoRdcqy7+nsxVSw5fRTtLZa5qq8JNFhVv93nrrl5euVYkuiGpJmRRPltKQlnKPDE+ZeoPPcl2lZYMIwjhZHbveVHfvaKztukdbMdpPJPZFqhoZPIytjKGRU0ZSFhpnmvSy7S4u3WG50oieBU3GPJFQSHWfyrP2cZnDuFzDbuzRHgOp80pzisyGkHJSigpfKWc5OnVRTC5i/kAh58qZHTgg/bZh6mJRlMzO0pDzoBayp+sTx7KCujZk/xTSW8lJRtZzrVmme1Zy8b5Y2snaMAWiMqZmMpE3/g9Rk9lVLmspVlfdmEuuRIEvzn7nxk6/mjRRM/YhnHFU1L58cKUKqSLWu82mYTzPFRPBJLdPpx3xDFqLi5YmrH5zspbILUc+s3JtZPmSRuo94xmoqjE50xEcUyBWHyhjmpjwiQnXbLiRIysZEkKNEyEUJMcaICjFbMh7eORdDSOCIXSwJYOYQhsOu996rRBi2yY9pHAHElDAxhOCDM67Qc3LLRPKMLAla58oQCrpNKSmjqkJSOrakrR3NLKpX/8SvUC1WaUqJFGqMd7OXjAAAIABJREFUSS0k3x9RsWyjxkRzTjgmQMQiQoBokIjkUAkghsjEROxDBKsxHENUFbYmoZmpNyEEVbHWSgYx2YcgIm3bpm5Xb0dRcdbGGK2x1ppD32eDH/PoR1V1zsYYG9fEGEMMbdsy51oWq9WCCYf9brlcNU2z2W6cc8bY65vrrm0XiwWAF9fXXde1XbfZbBbL5aJtd7sdCNba69vb7W4PwAffLBaPrh48f3G9Oj9jwDnnhxFIfoXwMYrCGPv85u7BatUuUku2a02Mksxgg/fj6J01i7ZtnDMW0Xtiexj6QHp1eQ6g67quG0j1jYfrZdf8kz/55995911qF3fbYevDWMqSpH/7cTTGCjTE2LUtiFOiQMN86IeUnjKIGtcY57rFMq3m/aFP3XbO3dzcLhdLP44iMQQYY2JkY0zCT8dxMNYY5sY11lgfo6iO42j7vgoS283GWh7H/vHjq6Z1IYyPri6IlI1hk7Kjhvfe+cZ6sRzGMbk9imgMYRiHruusMQBSfafG2UXXNk0Dwmq1bKwdx6Fpm3ows+EQ4mHXp0M6BN91ixhipJjSgA7DsFotjTEKJSZjyFobJKUibQBIiKMfXdOsVuvNZtu2GcBl8DAMaT2EGP0QvA/WNiLx0PebzR0RO9f4EFJvrDGpUjxAzjXMB2YjIkRsjE31nZDZEIzhlFMyRLEsQF6c67N13w/JvRWKtlmM4zCOvj8ML766Hg8DgGHwfriNLN98590X+xdd1/V9H0JIJ1fyI7bGsGFVEhE/Dkk0axrbLFYffvCzt998B8Ch33p/+J3f+tcvH7xmTDOOQmS+973vm1TKCmgX3xaR5fI8BC/irx5djaFfrS5A+NkHP9kdNgAePDl3rjGWRHSxbp2z1y9urV3c3ez2+z5VTPGjxLG/fvbCGNt0zccffULKhuwwDFdXj4bdZwDGcbSmgdBkPFcwsTEcxgGApmItAgmy3929+fbr//Zf/av/5x/94fPnNxJiAo+S47aGCGIuzAnHEmMVxBPzmYSJrxMry1NHgng+K/N5e4y+1eM188AsX5QXUxFIcstHdt9jGaSanlGVkdP2TzpZxdEsmB/3S2cjOTLq60lPtA4sg61zLpu6NLkknGqQVfzIY0hV16g0nF9I9U/KleK/ZgLK+4/an3e+ii5VFJRi8UzOqYyUuTXRN90lxTiJCgFXiUtfGlXBtMsDWXTS47xpFYXUSWyoAta0HkA1u2QmZDY23l/31/11f/2qXfYl89ZM5Tvibji6ZSqXO0M1yglfQbqij1U1tBzFR2pref7oBROXmDSnUqmZwJriueYh3i8DODo3eeVHq/PX8aFdGdNJT3D0WWGPU3GOxJIKkDvTtal6z01uWhN8kyovTLUNavtz7lNwrdnby/NVJCFgJgUBmgvYpU4dc8PCsPPkFKfTlwhHKJBmjZRO1MycNs2qYt7/JMAocPrC3MP0I8sdk6Kayvtm+IzqAhJFcYtLqaEVqioiOeYh8+CZ5qvJmy93FHQqPxXPv5nFlOglsexoyU9SzrRWCAVdQHmvpmDDIyoSTXLiHEHGtK4ptzYhuMUDeLrxFc6R5c7aVrFtT+3OH6w+Wkjw1HTLfKK0gI95N78MpkFFplVfE5xrlQZzpc4ysVo8xmakTOScPpoQxpkNuqzL/GERVI+gxhNK14VNPG8wbU3Jmyj5os12JXLXs3d2nWCdrfnihl3XgkaN0FxDBoCAAaW0kNM6OML5Ty8lzcfYJJECADGxMnLydCJCiKVmjiL7hSW4Ocn0AIpHcmm20rP0dYY5TZh3+i7/o9NSymuoqhdU1hWQXAROXCePl4iWw2m2DUl0lhhCobnwezJQMYCQCkrmxZBTfSSn++O2Mwasc3xzdkxOwnul8bEeM4GbRZCfTorjlrTcUfwravNUHyCa5jk3qgVSzTcVOLIYBlRlOqyKnUfLgZoW2mR/S2OeYawqApNcNlRVE9yQ14nqOI7D0DNz0zbW2qZpRGT0HoC1jtkMw2CNq43XxYxy5iAFL6syCKAoqZRNJmly+hUINAflJ3NaIW6mU6UGM0QCGWLiZB2pJWhjDNbaEAOgIA4xMOcA7TRqZgreszHG8DiM1jkC+eCds4YZwDiMZIgNDf3Ahq21CrAxzCYEn3olIiJqrWWmGNU5R0zjMBJR49yhP6TxpsDVevQZa1J4firPzUSjHw0bJmZrrLP9obfWNM72Y4YsDdvdfmeNaZtmGAZjzHKx7Iee2SwWy7vNHYBh7C/OLxKI/PDhwxDj4dC/9vg17/1+f0h4WRQF+Orikq29vr11zlrnGucuzs9NKlsUwmp5dnm+e+3Ro4ePH9/u9uv1+cOLZX/YEnPXtQC6thlDAOBjAGCZ2ZrLh1djGA1a07UA9jGMxIslH4bw+pMn394fbNOabhG9GttE0wAYRr/vR21ZlIwLMdWnZcOGh9EDsMYN47hYrA7DMIz+yWtvfuPNt37ys5/d3N4sV2dVYVZRgFItl8Y1CYY8DFtmTh5zItIYG6NvrDWWu7ZjJmctoMycyt00rt33t6IqEFJxzjhn1stF34+LtgFwc7u5vLi4ubtdrlbWmNWiE9VF18a4Msb4rgXgrEkVap2zIrFrWudaZh7jQHROrAA0CJExhqPEcRiVICL90BtjAE2FX3wISQoh0Pn5ubVmsVjs9jvDrqTMoP1h/83X3gBT2y3W63MRYWIlDkFiEAA+7F3TjGPo+2G9Wi0WKZcFNW0jIgn3VFViIjJlGRtjjMTo/ThjoMxsVBGjZCtpiOQohkiGALim6QcfJey2u88/+2w/HMZhhGgYgx+8Zp5iQLRoOh/Ccr3aXm9iCM661WolGkXySeGDqHLTNIYYhpIBYxzH99791ne/810AXgNEDJis2+1un37x6Q9/618NcVytV/v9HsDF+YPRD3d3d6vlqu3czd3d5eXFJ59+9vjxkxixWC4ArBaXDy6vmDH6oBqtMe+8+804qo99lLA/HNKoIXG9WsYQPv3409X5+s033rDW/sWPf9YtmvXyDEDjmhhEBMVILEWkJXYmnVjRx7T8Xjx//vpbj97/9vs//fDP7+7uNvueYAAYwyIRmoPEKaWsOJLkcpmb7FcwCVrppD/mYUd/HfGwfO9MIku/ahVy8/MEUHXdrw19Xd2SibElHp8MlvlNOjVxpPikV02PTpZN1Vm3T990Mrj6/hkpJn78kgGasoScBMjT7py+LFv+q0w2yY0oQuOphHKEDGKiX5HUTghIqjlJSZaQkm8sV7Iku3OZmfTvjKWWKBAV1RT5kLLQzCyx82w1xQF0JoROQ6ieAloIluS0mdpLnCUnghpjmYj4viDE/XV/3V+/epedjjygKHbp9M6flK8KqlF9gESKzo7p1qrLVXNiOn11wkwKpJHdiPJnMnNmLDy3gCPMRFqselkj1MqkMevtdOUUJ4VfFFY/3TcD9FAQja8l05y7nhAFpdhcZqwvcdMKvFABw0qRWcoDryNVkuI+dyJ3ICmlOZhzTnAi4jyYo7mqUGOVoCbxZB6ROO9uBqqo6qxzAvCkPGNGraqNljvrVCf0INuWJ/aPypurnXACKGciypHIdIoWHMs9WetPiFiqE6w5EKK8qNh4MyQ5QYqvuhTKxZGPZihmeg1mE0DFJk4oRtLq/jYbQV6yVSgpM4c6/vTp1+FYU88mcaYCLBM1KANoswVa6EC5BHBJSFR3EFRJIZTmlwCuefrKeoAoFCrFNTD3HqqqkryWciH0mCJVpu7MfuSX1jgkyr1TVWhNelNkthr6qVVgm92RRlyqHCJpBMx5IwikwJ0J40sxwvSSXR46W0qzDIp6tJ5TwoDZhzPwEiDNEegQAjMxCjh97NNdZclsrZkNt7RGzMkpAyBiUakBOwBKjHDKWKSioil0qLxoDu8m/I9eqlT2il2aVsBsQaGcylTs7ccg9bHQXClGxRslbcPSPJUSkChWGZSdUcLIsv9hig1kw6lIZFY3igNhPcXqmKgaYupx95JmVP2l0yyX/Zv3qYb5aV03Ujlka8IDnd9TfsxsHunwnfdl2vz5vEsbs64FFWSFhWYWspN5YuYoQaJJzr9RYoih4S59LSoM8sEPw0AE6+xiuWjbloiHoU/DYqJxGP3orbVp4+u0JLUuA0rpqQrnh6qSKiT5duXMqMTZj0WrUXF26E3LTxUKTvC/CtX769pRpLyQKTTbOmgxMjGjBvtLypGnImCyjOwdaaxlNsjJRk2BMmkcx+TnCICYoULMJfkdQ5WYnHMh+GTUdE2jIlAYayRKMjCoom3aFC0YY9ScnlUMu+CDQqx1IXqoLhepAjLFEK4eXHk/juPw4OJSVPf7/Xq9VpHdfgdgtVyp6qE/NF0bJSaEK4Z4t90QUdN2if5DP+76/osXzy+vrqLIi5vrRdtePXiQzsVn17dn55eNNV3bWcfUdK7tbm+3jZFt2KfUEG3bhBDaxqmoNRyF+sNBlaFhtViEmL0s2ZplY84evv6zT59eXFyoa/aHPvb7GHy6px/Gm+1utTpru0XTdOvV+jAOZ+uzu+1ms9sBOF+djaPfbbaNa5j506ef3222qebs6Mc0x61rffAShQAJsZo3o4a26RK6BwhUrCFVCiG0i47ZOOP2+50x5rXXnwBYLbvtnrlx17c3V5cXzlnV2HWtMdw4C0DPz7z3jx88GEWaxqWa5qvVipn3h0MICZQ3fhzudtuuccH75WLx+VdfNrZh5uQ8WzedMYYNM7PE2FjLIMMMAqVS71DrnALGmMVymRJKDuPYNpwPPaah70VFI/35n/94d7u/fHC+Wiz67UFDgCiA1dnZYX8g5i+/en716OE4jLZpgh8T82IyyCyWoOr96KyzzjVNOw5DjNGwqd67IFKoDwEE55woxn5Q0pS+89D3+/1BfdwN4931bR8PDS+gUBFnbLZQkXRdO0af+MjoR5UcyyIiadSudck8ZoxhTkYxvttsf+2733v+7PlnT58C+MEPvv/FF08/+fTTAGkbd3ez+fFPfvzjn/zp62+8nqLCf/MHf2kYByLEGH/zN37z5nbbtsvf+/3//Yc//K3f+sFvxwAAP/nxz5k+WJ4t+kP/6PGjm+vr84uLYTgs1u3l5eU33n4HwPOvvrx58fyw3/Xbw2px/uDBpfeBgH4Yvvntd+OoAPYHz0QxCrNlIklii2iMMfG5cfBsyBgigm2dMfTer31/9X/9Ydctd5sxsSEFiCECiUpczuycHDAfaNXWO/m7Fa56rAUc/VW56LH4SUUer63MufXsds2n9tEnxw0V+XAuQNemtIii1Uh51BEtp/pciKsKQWphLg0c2eFfJVFP6sykzRRONHVwenJujz/tO6rWUjWmY43gqB0UMbwIytOMZWlMj3wFFKDCkSXP80RkAkSRnGVJZ+Mulm/EKMmjObcuWiZOS/vQVFkBRRzJ7ixFcqdKASqzUNlqkunylGQ9JvWFs3yVsek4O9Pur/vr/rq/fkUuW/joURBfVSy+BrIhopxK+eiWii2ieBhl01eBW2aa3aRAJy+mpNGnVspXM1tkMdMBIDAxGEnX+bqBzdw3Ei9ImpSWtJYTTPCyqPCqKzO/KWCyYDbVUnYiXqBY3ipYULwlMypEFUpBQmPn/SLMfEvrF1njPRZlDHN1O9SJX825WGKRJ+rynDpFC5+RjmYQQGbUONYtK/vMrB4nSyY7UBYHs4SSaAnoxbx8UZKAC4c3bDSDX1CZpJ8SHjGhGvNBZJOmEqGmc5mFfM5RBX1VSfm5r+epaHjk6lid73QCloiyP8grOoaMllDG9XK/qLSXBSXC3DDwioV97IFZX5QRgTIXecmlmyU7FCtSRj6a7SJVyfCAqChBUhA909TW8au14lYp2l6ydTe/nYiiZF/CyRKdf+Z3pjhRKgNHASRrzytiWAW4dPdMRC90Kg/NFkh+REWNIRS0aEapo16dUDNvubmoX52ZRcgQMSVnJao7DkJAEQcxHQ9VEK1dqutWlchQiX/Nsn0SNVVAHEUAGMNVQqXkQuUFnLJ8VpcMArNmJ5ZpYNmJDmqZ8iCOTspCyRk0nPZ17T3N75soc0SrV11H70jMQaJQEdiJKGVfL6u0tEaU0sgSgYgFejw3lR2drv+i2EyI5UznOdYCi4SPcmLnceP4fgIRiCnjdoqcWg4EICnjWhQZaAmPB3DkL1FObkzLdlqJWmij2dCRPnvJGKbExMw++CV1YBr9mLCShH1IjMQ8jmOM3lpnnV10nTV2HMdxGLuuA6BKwzBY64wxhSHPe5QJzUwJTsrVh5hEIhU/CxGhlL0BVL2Dpyi2urXzSiNVYUMJyzOcookFgLVsiETFGEOgEIMpRSLSTBnmEIOxVkRC8E3Tjn7wPrRdGzUkJcs5R4D3ni0pNI7BNc4YMwwDCtElRmcdASEGIooxxBCJSRX90Lcp4ph4iN4Yw0ReQtu0MUaAjMmxIipijCEiwxbEw9A755hpt983Tdu1LYBDPzjnROTQ94tuYa293WwW3WLRLZ49+6JpGgDnZ2fPX7wQ0fPzs0Pfg+hsfbbf75eLZWPMTfAA7jYbH2LXdUEExoQQm24Rg9zcbVYJuWMax3AYxu0wkCew+er65rXz5Qe/+OD9b7+TVp4zZt/3McrgRwVE5cl6/eGza1ZZN416AXC5Wm5oHEf53ve+360vfv7LD9565+1h3B12m93m7tPPPwHwxuuvLxaL1x4/WS1WF5cXjy8vzIc/Pz8/B/TLF88B/OwXH+72+7/zB78/jOPN3cbHp9vdjohc00TNwYzOWedcGD0IbFhHRIlQ9d4TYRhHAGxo3+/P16vdYSCi1Wptrb3d3FxcPhjHMXkABRGRSKpt6xaLjgiqEkW6xm12PYDWNV89f8HE5xeXzBxjSMKDNYZKFsWua4ehT0fc2dnZ/rD/9NNPm6Y5W5+RyVzbpmD/oGwNAGIeB6/YNW2zWq9SOz6E/WG/WqyixOD9Zrs1xgQfDAfvfdoLd9vts2dffPOb33n99dd3q52K7A+75WoRRXbbPQBR9T48/+LF8+cvvvNr39rut4f9noCLy8Vmc4vDHsBisVCoqMYQVVNWYlLVYeiNsYllWKLkqpmKnLRtZ53dXm/bVZuA2tubu6EfnbGi0YBbs4Sk1KuIMdi8tfVw6JVVVcdhPDtbf/Xiy2yECOqWFkDbtswYfTSGJYQQwyD7Rbfcbm/+5M/+2acffwbgvfff/fizjxy6/nDw4/Dmm29Z6x4+fPjw4YPvfe/7ANars6fPPvv0s0+/861fv9tuFNSP49tvv/Xg8mqzvU3uvQ+fPGiatmns3d3dYtnuDnax7KIKs73bbr3/GMCL5y9WXXtx8UADxjgS85dPv1qsm9ffeH29Ont29wyAtY0qiNWwYSYRyRU/QoTltPCsNd6PTPTakycff/LJf/lf/Of7XjZ3W2bWlDhCAzNbtlCW6EEkolwSsmTTDpNqkaoIMnES4JhVHXOuSfWZfaBZvK0ynh7xM0oSemLDU3DWK+XEyU6NmQw/682pHPk1bVTMNWtyR2PQ+vM4GX81c2YNrsYjnYpfXISoqp+dKDenUsjU2SPSVcF8Ep2LQPgKvWwum9IUrVYlxURflZhF2jpRJQiJiUEEJZFs804APQCQFPttCuQAMxMlm0edxyyEI3cyiamUF8FsZhJbrS44aaqLVJ9dfk16MyVPAyLwS3N7f91f99f99atx2XxukpmQE83gAylPLjBV30D6BYnhJJc9AFmISR4uxFXRSqpIRbgK5DRpjUWpqZHFyswpDpGImFhURKSq7qqIU8m8Yx10QiVQ00gVN8OMS4nEGbY1ayPdfXxV3ljZVmWpM8pk0ogKsmta4jAlJXPSuBIUl55gFgAaJ4wssUEcaaWSxYEMZXBidXkyqGZgTIm3ciNVJEixUhOkRjzXxSegCnN3yzK5WuQRLsgKF7yYq4ONIhaacDIkqiKHVqcvOMVfkKiqSK56TVOR9ORJByDDsXWNzQGmIlYQZsjxhF0VYQEIJW2iIiFE2WEBQIwxoSG1DnF2Hzuu4VvkREqUSzgdCDwFM4tAmXKFBFCGflJJu1pOQRW57gmTRFFkT+JqR62Iuxa3McrST11OABBVqMiomSxEzMl4m7PPMSW3OWS8RouEUrBRImSPPZHSRl4rdVda26YXiKbCGnICGOUIFKqzlTYgG841qEIqw5Ke1VMhcu52CE3B7WUTFct5ph7naoRFqFMq/iDTcqUqdZEiEiPDr5pPGxiVXHSGEumIEoYoFYbO5wkRgUQliljD1WZQ20+Jt4xxQSITMRlRSWZyAFHUGK4LJC8SBjNLmCcwn6YexFBViUQsEsqxGZkZYIANI0qMMTInPx1J+5sNJSSoOBqQRFGFsXm/Jb1rOsaPsr+Wc7bi6inHbD4k85rJWlbxyqyHxyS0T2pUMe+kX4mZQMRUXQAVJpX/md2pyUGoyuLM5VAsiJRCVYimjak5Fz4UymlHCIO5nG1z9JFUlTgHNYMJkl01NZ0zlHeIRtSDY7auyn5QUESqAV2Ui/wiiWrYIKU1JEoxy5I2aaGMlJWtWnzugBgVNGNJc4sbCRFRDuPX4moNYvLjcHG+OD9bL7pmO+wuzs6e4alqTDndxnE4Pz+7uDi/u9nEqIvlQkiHYYgi3WKRdpMfR2NM2koCOGcVGmNIRVfSemia1qcyIyilOsGkZDkDHxBRJihijGkZxxiJYI2TKIABwMQigQgKsWxUVWIkIraUECUAZFrf+5R0L0HVbBM4IiY7JAqDE7O0ZEjVsrEtN9b145BzGhIf+j6G4No2hECGiDH6gQ0Z61JaveTA4seRidq2HYZBVRbtsu97a13yHRtDAJFzTYjRuhbMMY6GTNu1+34PIECcaUBgZ/e7HaBEOPQHY+yi6wY/ABjGQ9d1h/FgrGm6drPf7Q/7qwdXfd+ToW7RAdju94dxWC2XPgQ/jsbYIQwKjTEOkHa9AGBb93d+7+++89Y3rpbLeDiIEd0dvMTLi6sAAdD149hvmUiiOGKSkSMt14/efucdkzLnAkrExipTiGINOTabsTeWTOtGZtntAHSGr1XHqE9f3L7xDfLbzYvPv/iDP/h7H37xjOzypz//AIBZnHEU74en281rrz2+uRao8T4Ebg0xgLOLy/VuK6p3u/2jEDQOH/78Z29+451I1DhrYAFAiAyhcQhRQlQQaRijNGytTfXU4YfRLZwweVVmlrE3BCjt970z5JoOgDHcLVb7zYuOmnHsox8scQSJqHUGwKfPnr7+1hu73WCtTfWpfYw+eIUKJNWXH4eAc2OMaQhB5dmXz5xrvvHm2x///FNrHeUTOJsZRRF8aJqGLUQRowz9sFqtAAx93+8O5+uL/f7AbKOPMWjwHqC0gDeHg4pcP3/+rXd/7cnV4xvrQoxNt3j0RjrL87a9ePCgbZoX11/FINbYVCRte3c3DkM6IBtrialxbr/bxxhJdTjsVRB9MESZr8Q4+nA4DOvVmiBMcNb4JQfx450HoKIGFH1gGFFl5BM/AcRZDifSCFH1KkoqXqy6MXo23BiXTrTtzdY1RsHc5YpYjrr99jbCr85X/8Hf+PcBPHv2rA/x7ffffM92f/GzP9tud68/efPtb7wzjkN/2APwg0ek5189/+63o/T94fY27PuwH/ebfdc2aa5BfVS/74fFarE/7JfLlWhoGzYgQ7zf3AJgkaRxmMaNz7dQWZ0tt9f71VV3d3uXNoIhEMhwKnJFkCwosmlCGAEYY+HFsQMJMRm0tzc9KfXb3jZtMmsrOAQiiaxCzgCw1nAWqyEhBSWoYQNVREniSxJnmSjMJZZJqsucETOlg8o/WaTVLFuoCrExNfA2uXhKBKVwDE1O31xvoMRfK8I3q0Ra31O7oEV0nz7OvcxSgZbIiMIWo5ZQhEmAzbK3JKGzWEiRhStIErBnOtrcZI4wUSVJCKUvhftrSn6d5FdN5RZrjZosLiahM+uF5QMAxX2idBgV9Sy/ATEUVSOPst4oMUXrK3GR4lSdcyD4GBLH5iwZSLF7o4ER0WSPYeYkykr0ycO6XDkzeOprCuTKimjpSxK2U+rkKr5mkicZmQmciJx4MmcoG0J0hBrfX/fX/XV//apcNuETVd3Uoi/m78tJn3EenWxzKfE8v+Sa95Kid8QJZ02n9jNvrp+nY1knxpWBkYKcgLJBroBrUJ1ZC+tAXvVyADmRB8rDUzegklgLEU0EyYPjk0HNxqAvoS8T+88ARC1rUv7DCZUnKGN6qpKr4HBTKeLTYZ0SlUofqQolxYg5UbcSVE/+K3Pw8sQlrbs2X4SVE/voyZXNqxN6MENHTm9MzR+luy4DmJDVKd6CquyBBCpRgTWmAZ0w59oNPR3gcYcBpbnv0xwl1JIuMJFK6yNVnqom4vIOmpM3fVzSGNLJUwXNybEiRVJJ8lpKBjBbjTxJkQBARBnELrNEdY7LHTjuUx6DZlUlb6Jjynzd4p9o8mpS/osvneX5m1Gmtla6qvNvsihPUE31RoQSEJaR2kqIPLCps9nKotNfNZQXGW0FkoElw8ecHBGZC801QYOqlBJx5oMiG69TnaZSuWo6RDIcVxZTcdqbZRwoYGVF+GbX/OSp1pFJnYFkS3xuOK8pArLppaLOdaWUPXB6CJ1eswUyHTzHXcuNRn35FJzQbiqjTbM36SV1sGXDzM/jyRJ0tJZzqJPOm6kDO6ZWaamqGjN9an7CTPdqOVl08qycu5bW6T66OR+q0zqlEs6sOssX/MprdnLq6cdR4mq1urp8MPr+bLWWVtiwRKEE+MTgmma3P4QYV6vOte04DpwsBEAqD62Y9FUihBAy4KpImGaCU5lYNIoIsgmQhJJlAgCYcxh+wlwUsNakYr6Gbe5qDMh5UYWZYoyGDBlOxeKttQCC98xkrVVJt3FCvlP1bQAi0VmXrXKGkg2SiMZxNGzSfPnRGzbsGJK6ihRqnZRDLFZOAAAgAElEQVTzVCOFmYehJ6LGNd57Iuq6dhxHEWmcq9PRNI2ohBC6rhvHgYkWi4X3wY8egGtcCofvhx6qzlkCQXS56lRkt90CWK/WUSWEcLY6E4njOJ6tz5y1u93OcJ6I7W7bOtc27Wa7NdYuuoX3IzF1XRclHPYjgMvL89GP1rp9Pzx+sBx9uDg7M9YaY5ICapZLa20/9F3bMukwHqLoOIxkWKHJ7OdjNIYp+fCCJYKNiVGMseM4GtsBaJoOejDGdm17GMdD3/9b3/vu8uzCNo2FddYA+Od/9qeb3a5r2mH0fhw3++0wjG3TXJ6dvfvWWwB8DE8eP0IMD88vRtXHj5+cnz1IVGU22bdUkwlAQDDGxhiccQQa2SaPIQBMxlorotZY7wOIfIxR48Wq221vv/XmO0gAuuBsuZYYjOn60ZMx425PTCnN4vnZmhUEOJuXorXGWedDsMY8uDgHYK1tu4Vz7ub2zjrLxF3TrFZrkRhjTKeCSNn2rCB4741hIh5HP/oxObo61xz6wziOqhpDUMXoR2ddNkUDhtktFsMwfPzpR7v9brvbOOt8cOPQJ3sPgBC8sdYa45wjpRijMZYNE7ECKS3sMAzE7IMMg9/tDiIBoiGIYQZIisVCYhz63hAxm8TPJKoqsqFZyklFJbqzWP5m3AMgNE1jjVWOARLIAzBsxOp+OABoXOslxjCSbQzbKHHUIaoA+K0f/OUf/eiPAfT9sL48//yLT//av/nXX7x49vHHP/rGW2+b0W33z7/7698H8Cc//mfb7f4//Jt/8/d+7w+Gw/DbP/zhB7/4xfp88fmzT758/uLB1SUAZ62Pg4o42yQz4TAMDGIyIcSUwzEaJeJhGEOICtjG3V3fHfrhqm1HP+a65DGQmnyypiy3WWDKh2HmgKIpDQuBnGv84LPfwwzPyRLF14qKxwc21eJfRWQ/Os7rr0e+ksenPh3JJEWlQuGGWuVvyry+2n6piHBZupxyaGhNlkVF6JzZTisTrJytwmvT10fjL8lzKp3mPHfubTIJonO5t3w7vW/O++Y5m47Vs/nvM5ngFcyVjiTgk0un7zGfJspCeWJ5KQClonxpM6XsySZlBEo6VEZbqxiDLE2X2tyzfNCzHpREZYVkpU+TgEhFpp9y0MxekXt+lMJoIuDXC3T31/11f91f//Jedv7HXDmcYwBF+83MTugVR212ZcrQQHmqKmwVzcnOXlStV1lMOjm4VQtGWbOCVP10wrImwCIXOC6oUWarR9peVqlpZqoioLwnua+kWEwcsTIVFUOVUFqH9vIHM7WfjtnhMf8o/Hj2pvxrcmCjKXCyfjnLTDjR4viO2nR95fyWSnBkN6J6T5FapxtKi7MGpj7p7KP8b/1ookwlCLT+UQSZikaUABdMwgfVkiulmXn7ecYrA6+Dyny7Jms54clZJituhAX50xMyzh6YhDWa0ygJJ1rTTOchpcrdU7rSggLXpzKcP0EsVSyagM5J6kDRyYkACEhJJzImx08FCFIM0Mmdc8pqVKl5BGdNGyetTyokn6idf6Y98XJMe3WvK1Hax18nKP9rBaL5jq6vrlM6u2t2e/2Z6ZSjYUDIICSBmZJjYM5nSwqSkksUSLZ0Ok4WQRNup0h+wMlwTdOqAojATDGCiFPdcSZSyvpnOkvSBsiTLZKPLdRzhtLKqxhiATEnubh2qZb1qJ/PdIYq6h9JpsdPJENDml5SqV2oDU67vjY+W3I0uwUzhwWkwc68CfKzeStLKFrFUd7M+STWH/PVVif4646zSobUoXT61e8m14YTU4/WMx1IgFoZ2cvqy/TBzHXi5PSr+6QemDOz2RGmKqqcU4yhaJRHr5yO5Wl5TJ9MTI7AzMM47A/7pnFnZ+cvnr84HA5ra1P9E2EybFIuuQdXV23TjEMfJHZdKzGXsiEiNqb6wIYYkisvSJN3iWETYkw+F8lf3mRoT0SFU7pGY6KIinByexRBucFQHbUYoihRM2SvKJ7idenFKM411aRVnWYqr0wbNObUVxRjSOHSIQZnTepeCME6B1AMwTU2+eyn+G6JMaV9VNUYQ9ctCAjet23LxMOwc84ZY8ZxANA0rarEEK01IlEVTdMwk+/HhFmM49h1CxEZh2HRtK1rd/tdytKwP+xTjemmafb9wbAhokPfW2Ots5vtJop03WKX/MK8X18+CCHEGC8vLmKM3o9n6/PgQz/0rnEAXnv0kInbxg3jGEUa11xvN7A8+lFTFaAYQTR6n3Ju7vaHxlk2Ztj5ZmETTOx9aHKvXAw+VThx1kIphmjalAZUu6ZpGgeitmk/+PCXnz179v2Hj/d9L4M0jUU+hfRstd4OQ4wxQdUxhrvbFx9pADAEv93vV91i8N40zb4fwGBrRPVw2DfGAfBRRu+tNYvF0jqLKNYml1vHxmTZgykhcev1mplTtZbkW6SKRw8uAZBhNsbBjhIVOsbYj+Ou7w/DeHVxnpbPn/z5z588fBxi7FOHDUcR52wTm+12B+Dm5qa1lpn+/h/9w5ub22+89c7lg4ebzZ2oxhiIDDJXypuZCExMzDEENkY03m02aYMmUltridk5t9/tz8/PRSSGmBawNTaEeHv7IuVMcLa5vXkRg3ApGBlihOrQ9/3Qf/LZx6oaoxCxcEqnAiSne6jEKBKHYTA5GYiCyFiKXtL6DN7HGId+SNV1YowxUklyk8+zGlqiWpwJjg9cJfKj3+/3rWuIAimJaAz+4uzyensNIEhoGte1bRCfWGFnuyjqXBNC9u5fr1bvvv3earn60Z/+48ViCaXRH549+/Ld9976/f/tDwB88tlP//q/8+/94T/4v99//5t/9I/+4f/yd//2b37/t7fb/XK19mFIUKOK+NE3jY0hJCsCoGTtOHjXWGMcgBh7w52qpnLywYembdouRhGm7MOQqFEPYQUMEWhKTy/I/F4VUVSJnGuDDwW0zKzCsCEwJ6tels2O2AIdidhUoo2qaXPO7iaucgq1obIT1IpnBSqk8n1Cko+l1cTsSnTaXLPIgs6rgDBUCar052gMiSgFhJtbyo/amwlwdciJeU7W7yTnV+43w9Fmg52gz/wkEXJkS+7vv0hGP+75K8g5U7C0yJgzhJcJmOk0VAWsFOzBVAJRsiivgObs3qpapD+F5iKrWmzBmvU4cFV1ahfLFGCyZL8s/CRSpIFIyuRcAMvZ2AlTeqdJ9n9p2u+v++v+ur9+BS5blfz8wbG2VnCbwoizdpGVB8yemZ27LzVD06FeDn6gJvBKTKdocOXxytWOAIR5r6YgwtLGjNfMb0fp7YkgVvDSKcdh6c7RRVxq6dQ3H4sFkw9nhQOQmFt1Dyp6cgnQm7OVV6ve2aSdxJBTDf/0wZc+rBLTUU+m9yhqrkUqvHJGOhzdnAGVV2FM+lKnTr5P/1TBMEscGcWoySIrrFyDPeaSRqZ5uodLTe+kBqOsT80m07nvrFb4RJHUjQk2ziHBrxpUupKAkVn85K4LkKokEzPqZsjCmEzUVWRfuQQOVanshDwVMqlLr4oXM/ElIZKYNo1MlFFBlq1VoSYXCp1CsmtT9Z9JsJktFipLQDUbd48n9nRfayndrKe30Oko51f5skj3Wuk4E9knVCiNEjUIN5Mg38AmP5ineWY0nzxbi6U5yczyihy5xYIAAoGloua5ZRFRwBALCNBUCzi5oFCKycFs/7y0d47/zhu57O5yN5GUOtrT4VL8QMsgyxKc5iDddZzUaZpnMUwnO18mAwDVHzNLz3GHZ3YAxfEJoK9YEOXvqa1X2u9nzZc1OE8NCkwbeN5RlKksH85WqGb4vP6sLdUxltZ5Fs10Upb0dMFnZZ5oSiQCTSXcsoO0klZUrQQHFC4z61zdGLkX8x1St9B8Z0w9EY0xQO0w9nfbjWO3WC6NcwCcM8zEhp2z1hrvvUKZOOWbq7RVlYTZU1LLa3qKspiqKllNNZJgbKJYqjrUAAikmtREQdRwBuiRYBqCxoRvBmdtjAIVY12MMeQy34aZ0w40xiTYkZmjxOTVaKxRkRCCMSa1bLJ/n9EoabKS/10USb4rKtEYmyCzSvsYg7XOGjMMAzEZY8ZxJFDjmhhjmg1jzH7fE3FjXd8fUunl/eGAYk6NIRJIYrTWOteM3oPIWrvb7wFdr9cARj967x9eXQ3jOPT91YMrIr4bx/V63TRuu90CWC4WzLzZbtumZebb27vGubZpDoc9G766PAfw/rvv/T9//MevPXp8eXHx/Pr6weWDn3/0y8HH3eHw/nvvARg2GyJaL1fDQETwYThfr87O1tc3N8RFn1c11hBR0zSHEAB01hpjurYjaGMzjmaJLy4u2qZhY959++1DP+wO/Xa3X9gu5BkxF2fnXmSz2f7sww93u90wjo1zIUSTKKNqmK01Z6vV3ofNZqOkPgQy3LVtzhKgfr1cAnktxRjVQMFCsSwZNE2DhBspszGjD8bYrl2qoG27hBIaNm1jOYxE0VlYa8lYIt7u94e+B/Di5vav/PYPv/Pr3//gww8/+OiXPgarbhiGpm0BOowjgLUfP//i6e7QE9HucNjsdt9+8tb/8ff/njFcj6tsUqIshHjvW9OlXWHIJJAURKrS90PXtoT/l703+9UsOe7EfhGZZ/mWu1Z1VXV1s0lq4yJSEql9tI8gWRphRtA29pPhJ/8z9pMBPRgGDFjzNvZYA2O0jcZabEuyNTIlS+KIZItks9fa7/YtZ8mM8EMuJ8+91YJfDAyBm6i+/S3nyz0zfvHLyAiyxmyuNoeHRyoaTgiCmS1UrbGOnXgZh6HvBxNMLgEA1lgRT0qVqYau14Qv1EvaG+C9BH68aVpAw6phUiaoiHcOgHjx3pNiHJ13Dgr1CsG0wRd7X6Iiy10/tpoAFTo/Pzs9Og1x6g0xKrvr9q8+fBWAKp2fnde1bcmM4ziOo6p0Q89s/vTP/2zY7gF0+653Q900D+7eu7zanN45Hsfx6ZMn3/u937VcfhXA5z7/A17cf/jyF3/yR3/iJ370p77whb+0xuw2e+LqU9/5ybF3YQY3VVXZarfd28qKF2ONNXYUt1gsQquD9ahCnHciMowDkjV029bhxIKsDWgoc2g5PqRGnzmIRgkiqhLCl4d7+oFbQhR9HIJxadllxesAQMP/fSSYSgk++0Upz+jGZ+HzIL6ylKM8iMHcYgrccg11zSRkflEIwPRMAapyEddxKc2fAKI8vnasXbSxRAmFXUbA3PPMi5TrlGQPUaT/Cr3phlKSpjG9DG7h2idZFmesMGkDmE6Fw6OkMxx1s84a5odkD/ERE0RPjkCMpDg5xJmg7c18NRU+4fD8SKxfoHOnLyfsHt+lM9cCF9LN7G7TbbpNt+mbISWjv5mmmTTaYn8uiL8kQZhL04/g5j+41o5xGOLvJqYPSUDFA8iJD5yIBCrKmcTRyzbYrHjOPptb79xoVGJUZ78J+3w6r0K614KI18JV0MzRlFUquMLrSW++TKzUDD5MqCFjkCSNUNBn08+Kxz48hSKD78b0k1TvZHkahXu0JCrre43EyJ06wweJ8SmfLCDQRL7kzGnKfuoAwktEP8rnY61zkfM2zl7pTYI0IZJrzXlZsWWGkdVURAqwnFehGxhxIjOppOu7E0jNsE6JKPgBmIif4NIQCkjyMaqzgY1IzevURXkMElxTLTIkArLboVkXpl6fdQIVo0CxeQqFSlqiN0nzDxum+Hgce9JEEucena399DioqBwVd1rj60xQJXKqQF2aYg6yqgqEVFXIGhMdzHEOyhMZVgR6Q0UkcYCUOMFUkGEmip57shYQ3N4FH4QUB7A0Bta4BBJhM3XZjNAlKiadBLuAtDViwvqRUadpmw2GzNHMs9huJ+g5NyqP/ZN8P0Yrh4mfm4YzaQthwqTBQ9R9Mv2vmTBOVGaxQ6eAU4EXJqB0tRhqS8GoMk9ClApUlhLlbHn51h04fYl3zXKXKcK0FylN4aOyIbO4W3koJhXqJdtFmujpP0qkcNFqxOVR8pG52qVPjfxN0qWmUkoj4vhmvmwBQNmyApebq+FyvLi8DHt1MIJr2yZkfHRyPI4u2BIymX7oCbC2CrV03jGzYSPeG2MVUXkKbKBXb4z14giUF5iX6PUykiMqwW7Iiw9K4+hGTpRitEEmiIplA8pIwAFUM3kXfUe2VRvmCyPI1OA5VPJZDxQeakzwyqXWWlUV8cxmdC6sAmutc55BtrL9OBBAlrxzBKqsGcchdF5V2WEcvPi2aQMmWS6XIeJNoD77vlNFVdkxBoOmYRydE2uMH0cA68Vy6DtVXa+WXdcBqKqKiZipqupAul1ur5arpTF2GLZN04Kw22+NMdaYzeYqWVA2u93OGF4tl/v9vqps2zbnF+fO+0Vbb7dbAD/3T37hC3/913/75S/dOTq+3F1BZHT+9Y9/3Bhz/84rALrFQrwsl0svrqnMwXrRNlXX9c77EKcbIfgVEYgqa3ZQVa0r2/Vd27Zd70QcAGur7W5zcHjc1vXjJ0+Z+fxy03V9Uzcq1PUDACd6td2++8Gjvu/3Xdd3+3F0fd8b5kXbALjYbDS6TpMX52cn9+7fv3fv+dmZMfZgtR7DvBK/aFs3jsGfD7I9OzhESQIgKq7363otiqZu+mEIkWKNsSrjfr8HYiA7Ve2HQVWauvZuBMBE/dADePz0WV3Xp3dfefz0aVXXpu9tZZnNOI5e/J2TEwCf+cQnnPfb3b5ZLJV4vVg3dauiVVUDlHZXVmS/eMTGgGCMYWIv8aawMWa/221w1dQNgrsDVT+O3vtwm1tNdA++221DyPhuv2cKjg+893H/9N6J98w8DIOt68j7TxA4nIApuKpsPfQ9mLwXUhrFq/ZhuAGQQrxHcrwQtqtSbodgdRQwLSake008101lqna33VfE1hgwE+j50+e/8mv/HMDjJ4/++H//Y+ddXVfD0ItqZWtRfX7+/M7JiV+sAXz1q29uNttf/umf/6u/+ovzi/M7d06HYVT2f/PFLx4dHwK4c+fkZ3/6P/m2b/m2P/iDP1407dHR8W43PN8++uznPmNNQxUB2F5tluvWjSNU26YVr2zI+RHQpmk6cQCapgbp0PfWGjJcNc2u3/R9v64OjbHCAqCuazd6Saw3FOo9MRnDlM6LNckPUVWCqWpjLRnLxBlxhwMPJEmuSWhOu3S+p0ATaAJBXmaglsRtiV+DfJkkVKDNiGZwqMwg/g1SGhqEzc3rwCgAVQJVhd71spTOepFIwCxeiwvoKe/UiixwaeqjXFkgXiVR5MaEw/TkLCbN03y9imPWyS1khBFIwkmuFTDvnbIzy6/pek8W6lSqsyLcr0sQNetpBQzIgh/xhBKSLJGZKTUvPxB+o1NXpqxfojHFXi+mhmi+xRcPnMpT8unHswGNhf5DOP023abbdJv+I003Awzfptt0m27TbbpNt+k23abbdJtu0226TbfpNt2m23SbbtP/L6mwzCLMTl8onfGEd9M9uWRfkq+ZqCYDh2TkoJO9TTKW0XzFcrKYivZRM3dtiutnbCnHEPE1XkqdPs+2Palyua7XTpKm32D2L9RGy7O1VBS0OH7MBj8AERjB03Fh4lU+NFUy1jH2EkAhRAbPjjU1FRrrXpxF5hwp3ZGIp2QvPQRLn1M820unmERE0YV8yCNVMXfA9A/z87si05uHq5N1p6ZpdK1qs+sDcRDiqBGnBqVZpvksMvVsehmmIommEZ8+p2QjmH4Kyh/FxqdP0vOpG4p2z7oz35NN2Sipxsjg8aZPHvkpmjCigUUa2eTQOtd+alRwbS/JTFJSo+I/4hgOO82LeFgbZlOw9xFVURGVnEOwxJw1FbGz8+EuwomtTqOgKor0L19ymp6fL5RsRDWZi2qKXy3JWu7D03S0r8kMOX1+45w/TVFJFUgLQwnKACf7BUrLTIiUYn1SvkV+AECSzAo0GUeGBqvCx04gTb0hosG2y6vGb4NrPModramemuussY4fskYRLyUhmmdick2RplS2f4iZloaR0wAin8lPyyqubA3X0ERERQVp96DoeoKlNMebbzZx9w6FpuIBxLGddtw0c1TywJFOKfeGXnuRWhQqF/dZlVTp64Kg2GBnF+eK3VODEUJa50QxBhGFnbYUHlNvykvcNuWJWE5jBqV1qVMtShkXRzUWPVUqzrQp77K89C4v62lEgkkFs1kulsvFsuu7y82mspWtzDCMzjnnXD/0L85edH3fNg0ze+9F1XsffhwGSUSMMdbYYAsZ5Hno4TA5nPPRFCtOuxjkgREik4uoiPcxdrMKoNHLfwhhR2BDbEhUVISYvfeGjfdKRMGvJQjGGGMMh8ALRAIVVbYMwIUY5aqq6kWYuK7rbCUX4svHi9vMhllVRT0bVlUVMcYSQogeYiLvvHeeiStrRaRtmsraYRiIyBjTd30uaxzHqqqChZoxHKztjGHnna3CVW8zjqOxlpn6YaiqmoBhHNumraq667qu66w1Td2cnZ8759ar1W637/t+vV5773e7/Xq1Wq9WAEbnmroRld1+37aLcRyHcbDGODeGypzee9DW1RsPHz68/+CV09Ojg8OPvv76pz75CYXu9/v9fn+xvWoXi77vgzlqVVVEtNvtmMkwh5VirQWRMeEtiap6X9fWiRI0bIz90JN4W9dvvPb6MDoFBjeyMcQmxJFXVfFydnG13e28aFPXho2KjOPIVUwAjOHK8LMXL4a+qw19+7d8y26zYYJ3MS1WS0mQighMFEQLk3Heh8k5Oi+itqrquq7rWgE2xonzMjoZ33/6+P2nj/uhIxCRsaYhVKtFY5nvnBwb5oPV6mC1+sc/+sNPnz/7nd/6N1968yttXR8eHlpr27quqso53zZN2zSLth2GwavUVd02Cyd+2bZNXTMZzbJlQsAKUNu2BAoOAgwb8SJenHNsTD8Mu91OvKvrhpnHcbTGjsM4DqM4R4SqqnbbrWqMPBMMaaOUTgiQiIxhZnZ+FO+8dzpZgRGzCaHqDVPIJ+AGcb7v+sqYypgs8IMNOAXHIRGoTmB7QoMRFjBKORv2G9HlaqVCfder6n6/3+12TVOfnT8/O3/+6PH7u+1uu9uKSLNsRz8enRz+4i/+6v0HDzbbzW673W23wzB89PU3xmH83d//7aapD9fro8Pj09Pjbt+9+fdfevPvv7RcLH7wJ36SubrabN5//9E777xbVdWdgwcieufOSdhmhr5/+vjZarl88fRsv9t98M4Hhk3X99ur7TAMzIbZWFuNowvhhMM6aheL4MglmGAjhtWKnRDQQZZbMIzgVoLUi3j1IDBRZa0kCZq2eiDIfu+S0CrAdVhSWfQmsal5H0//JumgU5eX/zLk0uRhI0vJayAm22KmL5KdXgK1nCBUUtCyiEx6XCGJcotQYrlJ6E2IJq2NSbOa4PP0qxuGe4UmUvZdTjTpG0VdiJSmtViqF2UYnzKbCRZOqlcxAKoBoWKeFNmLarhqE3xPh70zgChBRAlhIpGmCz2py2P5xETGkDEh5mHSXBIwnZodJXwEBprHcuoaLRsQEVNSaJJqgSTC0r2oYmLcptt0m27TN22yABKImfbsYmPLoifuh0kVJNWJ+0Chh1NyzpWFXOG7UFEYq+ePqNRoYw00xMrM3qMnClJBFPiAEFOEEoNGRa7JX8w8UQHRYuHhx1l5zk9OBJoqyEwk2KxnKOap0JkTQYrRK6KITTIj5S8xuHUuiaYiCwySJNa1QdFUaLhccNP/4YRjYutmQCT5u9H8Qf7fdI1i9ptYKMeGScqbEyk3Ke+ZJCnIk1luWiKXaYjCxVVNmOTG0OV856NXfjHdNwy3XUhJons4jnRgusOiU++g6OGy9+J3jAhT8kgGKBLcoqsEdBKGny0BUKEAbyQBC1KCSY6F8ihTCryR7vmg6CbFxD4XjU0vJblNB5QVEEpjEP6WxHpcfJRHId8RnsYs3ArPSziOwUvY55gFlXA/TlOJI6vXZ8480XRYoemWDyXgl8aRYqQgIqLoyQ55n0nQbLrYlGd4ijAjSsoSrwrl3QsKZYoO61PmShL9gok6gFSVNXivn85diFTUEwhKEhyHskFwRKhpM0yzJtTJpytboX2xCgnVIjPHaU8t7pBTVIsSSp87NUorKz0exi717jTIYZfUeJNs6tv0W43XioulGR6SFFtsej4+dV3lII5hxEtt6sNHPk6+aa+T7NIp7uxJcMTli+jekZBueKU5gHIjz/2vSO6UNK7ZcmkHxWRSfRQ3K5zfc/bPSpQ37pDB7OFiskdlstAmaHJaVZZR7sWzdlMSIkyqABvebDfLtn3lziv++ePddu+8WGuDi7S+67uhM2zZmspWzo2CyJgQKPOP4W6yiNS2CnRTGMYUZFa9iGFAoeKD+35lUiJVic9MSi2DKATg1hSkQDNTSUiMAKl6JmOsEREiqioL5G8x9Uxgx40Jw8RMHG9kGwKF6NtE5Jyz1oRajN6FnUlUqqoKXDMzQ1TUV8EHJVHXdYHEHN1IQFVVFK4zM4feC0F1JBCaRAo1xoTNpKlrAF3fGWut4X3XNXVDhMD2GmOy07rTk1MnIiJN03jv+75fLpeGebvb1nUdZkbX7+uqCuRm27SGee/carkCcHHx4sH9uwDOnj6+3Gy6fvi+n/n83cuTx8+ftYu2c/2zR49DJ58enxwcrD/YXHkVQK82m6a2B+vFWoXiRUEYY1TVGDMMzhh23hNk0VaqQpAo9UmYQMa+9uqrz7d7Nvbs4vL88sp5t9/vjw4PAdiqqqqqrpvtxXlVWQW6oa8ry2yqqgFg6+pgvd5sN9vdxns5PlgvXzm6ujw/OjxyznVDD+BocexGJyoq5L0oAgMtAIZxCJOzrmtTGyISxW7fcQhbw7aqq7axz549A9D3fV3X/bCpK+O8J9VxGPp+r9C2bQFcbbbfeO/xZtcdHa6Xy5U1ZvQeRE3T7Ls+3I+uKjsOg3PuYnPZDf3rr35ku9s9e/qsMu3ZbwsAACAASURBVAs3ehMcrhieNhRDwS8BoOMw2qqK61VgTeW922y3In69IiIah6Gtm13fhyFw4xhoykQiKEcfrBTjiTMBhkLEFSElVeYQycd759QDYOUgYYwhhVSVFfHj4Pt+gOrBcgXAjQ4ptA6BnHPpbEdmrFYQMpGxzHt4kpeqUHjvL8+vKmttZc+unnXdsFgv6kX7F1/4cwDn5xe1rauqutps2mW967pHj9+9/+qD1z/+S3/x7/98/+IKQGsXz549/bf/6+9++pOffe3Bw+Vy9ddf/Gtr67quDtbHAP7z/+K//O9+/b/6zd/8dz/0Q59/882/F7cS9Q8evPrWW2+vD1ebzWXYXper9r1331+tV+PgluvFkw8em7bpO3f24iI4CthtOxUPiHMSbtkvl6t2uXXOpx0CznsCA+H7fDYMUeEkpSn7HmGKXjkJIDBxEMTMUZZIcDYSRPK0709beOH8KYKMJBjyE9PjHMXrjZRPyOMhoKpojsaeB2yCy0lSJnAzfRZeh0EPL5AUF8qIe2Y+klWN2F1pYy9wxkt8x89gXp5dmVdLOfAMuYa8MpCL+SCC1BjcJuuNOZtQ8SIe3c3Ou+lPZappDo1J5S8KqR7hUkJKAfAJiLIn1sxthhUkAWtQlI6UrmkTEwnAUIkAK0DE6ypagmXEUYErvIzFVSnQ6DEl99sNRS+hp6y00Nwx9W26TbfpNn3TpOQ7Mm9i13azIACyYxCaxKaqEBlmTrokkhNoUzAdwVJj8luIohyAktIIKqV9lkJUxjiYCAtEkJHV9MRKJFmnACUvcZPMVMyBQN7GNRUbmxixW2pBihZS9kqho0WVH1H+Zk+DJQmbnE5OwCDLjzJLRZS5VAzDJI7iu/zdDYBwQ+pNorZsNFJdJ/oid10qaE5RAIASuAAyUaHUKYN5AUVl5nOq4HxjhvNHps6d5OwMSCS6pDxjBBCC85QUVfZkV3ZAYq8K5q2ssSZQSbkmU5fHA2KQj2sAGQ4G+4SUEauCZDoi5ql3cxtzvjcJ5wRrp0PfNHuVMPE+hHgkHryxzfPIgEujaVzoTUpOC8vaCATJSTdNA0AKKTKb9aOqRr9AsdsiiAUJweAfSMXcDQ7aQ25pQs4OwUsKq8ghLk/nvUJAMZYxMZP6WF2d8FvoYVEREU6RH+IephL938fAThARIQpbDUe/eAGdxgiH3olhU5kqVS8Nf+6ouBVFveMmR5eMYDARzpmXLRSEWMGi3yaFORZNBPgQxONa4ZGmodwLeR+dHfEUqz4vFoqDC8xWtwYvkJO+kQoKpk/Ffpe0iaTI5eoWyyXXd/peaab13NzNCADNHBanJ+MuHEncQGEGHjvWcQqaGVukKlBmk2s43yGnvTH3QlFe2FIS0R9PJqKLz7QMVONZBf7BlL5OitE1tuDs/Lyt64+/8dFtv+12nTVmdCPXNQAR1zatG5146X3HzOJ9qICohKlb2UpVvPeG2RgzjIOGyBgaI2uDMY4jkVFV8cKGmE2wOQ4kEoBAwKkKGwOoiLAx3nkCiGl0DoBhImLnnTFWRAFKjAxMMtoS8Wwq50YOtKb3ClhjiSmECjHWQDGMQ9u0AI3dfrFYjkMvKpajgaeKVHUdqB22ZhgGJrbWeudIESgqVR37/aJtnfP9MLR1oyL7YbDWAgj2fcGuDdDKWmJyzlVV7UbX1HWMgOx8Uzeq6Lr9wfoQoGEY2rYlou1uW9cNgLZtnr04C+N3td0YY5aLxeXVJVRPjo5Dz4zjeLA+EBHvZblc7LvOGG6a+vLqar1aB9+Rf/Fnf3K4Xnf9YK113tVVIwo2ZtEunHcArvbdweqgH4eu7wxhu9u0tT1cL8MEipOcGUSGeXTOMIuIQjbbLRtzdXW16FoATdNcbq489P5rrz/c7P/6C1/YOXny7OliuWBju74DsNnujDF37tyt6nrVNt1yaQh3j4/r9eHDh68BWK1WV7urp5uNd+PVZvvq3bve29Vy2e/27XLZLloA3Tiwkq0qJWLDdV2Tehgr9UgUiTmoVnUVpPi+78iwF++cH7pxuaifPX0B4OLy6sG9E2dIgapSFVktl+L3TVVFTrNpD48OTNV8yxuvP3vxwolYY/phwDD0Qx8W7mazMYa9SE20Wi5fnL3wg18frPdXg3Mu3RCSLN1Dp3oRNhYkzsWYy8zGex/o/s1mK14ItN93BAreRQNs6/t+HP2Wtt75cXTGiooQRTPJAE5UVb0wwbMSEzMbw94jLGGxYsh4550TP3oiw2RUvDgghXMBYJizLbD3ftiOgY/MImWG/eI512Q9UG5mwzASsFy39a5pF+3B+mC322yvtgDcMO62/fd8z3ctF+v/8wv/R7tsfuWX//l/8+v/9S//0q/97D/+2d//7d8B4AZ3eXHJlb13997X337r4uJSyb94crU6aJ6+eArgnbfe/Jmf/Sd/8md/9Zd/+beG0TTNbrdbrdEsqnfefrS7PAfgRmcseeeVsNleslZQbYSg9Ozxi9XBAkC/640x1hLAxpr1wUG374Z+MI31nnLoaS9i2GTRxsmgLJ5WIjm4DfIRKio+mOqzJnzA8VwwWCEU53jlgWtxUDbt6MUOnrd0Qppn+cT8ZXKAgoNEVYgqT6G0Ex6IbzWN23U5GFWZKOYSm5nqWADm+F/QsK4J2tDsOToofVnO8DImIRdP+2P90le5rKJXijrfEPfJYBAlyI/tzz+ZIYeElsv6X+uWeb1zTpOIp/JBDeesRChsPUJnEkDhZhKgHJ2UR4AgCmZiQxLcMosGpUDnY5XVxdAWLjTL9CQpoFK4vg4KC0rS8sPT/4dHbtNtuk236T+2ZJXi/amw4zFRDs2RpHU29gswUpMlFGkKJRxSpIHSHp9ssooUlVbNtEOKczodQwGKGPwguLlHuPM10/knC6dQbriLOmNZqOAyg+IY0WC6DUj5sYTovdI1rKBJ1gWX+wFJa1lXlUyWRLwX5GlgGEpCrai4oRCCYwr0G3ImptErT7xquvkbztXj9UYRgI3hwtN2FmPIrBkIgFcfuVWeqIFIAMlNqRU/8YMzJgRQjejcq4j3iAFCDMIhfxLDRDAS+3nGn4SLDpZzT+W6Ueq9RMIC4cpEYLXjHaqkZaVBIGINSulERBedGyOgCATwkVU2ZENBomqInXgCmFgExImxiLVBvJ7PTBSYinyHMf4RL4AIeiIKVghEUBK2DIKOed4LQMwgNplnU9UcKIbTmbUGw8nUgGIs5vgIRfeqMllklig0OfUDZ9Ynnv1ruJ0EhaonCks8LLAE+1jFazZNindaKS6JMDlzL8XJFg6sZSydtoebKlAzI3rnlJwqmI2XGFQ3KGbMTEowSU/SsAeAiJlJxQQLoEwsh8tZwW8DQMEUAqSiHpxoJ1KFT/RXdPDPZBTR5guADxRjWEeqxBQ+gJSWFaoqlO7JMIGrsOgcACJrKxNJzXLyKwyZbGWQJnjcaUQVEGYl8ky5vgwhQEAIDvW9eFXlGGEHI0akpUPxlrcASjCU9oEwSpL4Z68SQ0kkTSxsVwx26iJxHy3gJq2EyabpNG3UCoj4vLEkaB6tAGASm4uJlFdVZaTb7PFHwSAyHh1xNFXWpP2kTkKiwtLsS4tQvGfOAYgSf0pgZvHJijbYiwV1TKPWpamFqlPW6n3YdRFo1aR9ajD5jL2qmPaIPHxAcmCfp4r3bmoAiNkgb7M3LNiz/JJ0R5OglJRhL855Xxkd+/Ebu2+M4wiCHz0YTV2FsCS73X69Xi+Xq+1mJ6rL5XJ3viOCrWrnXSiAmbwQM1d11XU9EQXjSpl2f63qRrzzomwNQlhqtkSs4kPoB1JW8YaNOK9BZDvPDCLyIonwNV4VxMzGeWeMEVXnR4obeBxTo7BsVdUwC5Go2MpmJVlEnBsrY1W8F6mMHfvBi6yW677vQ3XbtvXivffGGla1RGyIoMPQr1YrNgyg64emWSjgnDds2BrvvLXWGKvqY9cbdr0o0DbWO8dsRKVp66qyfdcBUPXW8m67a6qmaer9fm8tLxdt3/fMtFwuAJydn+122/v37m23226/v/fKvWEYrq42d05Pmfni/BzA4eHhomkvri4ra41hNuQcDd3guq45PIQDgL996x2rpqkqkO770YsXUWN41/cX+ysA5+cXoxtXbavQi+12u92u7t8dvSrMtuuXTQNgtVi0VeW9b5vmarftvOzE7PvBinZdJ8QA1rWVYXP/5HhV08nhou878fLk/fe+/3Of//p4dnBwCGCxPIP4j7zyirl3b+/GF9tLa0CEB6d3XzlYAqhZNv3u6vLs+fPHr3/0Y+t22fW7H/vh7/u9P/jjw5PjhV0A0cC278fVot3vVMX3w9AujGe8/d57R0d3AHBtBMxcjeOw2VxtNxtS3WzPDk8OFKgsAzg/P/vYKyfb0fthYGVl2zuvxi6W67PLCwBPzy+Jq67re9GLza6qayatqnq73RG42/cAlO0g2vd9Y+vDpn787MXZ2dm9V+9/5embTd26aEFp+tEZa9nyMA41KgBefDDRNdaE5aqibudQo6oqNzjx3o3j2XYX9gRbr4hD5Gsa+13Ye/ZbDxU2kZQPUxSk1lrxrmqt65wnqWthQyEojRucpvOkuqq2V5cA+v2OCdbYEHEohJgPcGp0IxE1i7rrO2PywR7IULBmhip8wMCUjpADPaQCEeLlovXOb7qe6iYIGWPsOIwAoOZye36xufyxn/jxf/E//cav/NKv/vRP/9xrf/c3v/e7v73dbl/9lo+Ejd+yAfHrr752ubk6Pb3T991rD0HM3/Yd3wbg3//FXxyfnL7++v1v/daPgWgcnTHW2rptWiL+yle+BOD4HtpFBSIIj8MgKn4EsYyjs7zstw6AOB27fd00bIflorGCt956f7Gql23tRzVV2HLJkkK1qqp9v7NVJU6jpyKJ+F7EO+fCNkIQo5UfPRtyGEOQdFFVESbUpvHwCfOzxoAqYVfnYItKgbyirM6IJlaTCFHhoXi0a8CTBAIQT9eDMJV0HEuJI0/nwdl9UNi9kslE0NSiMIn+YYIMCyeSiJxj2KXTlRxQvJAmImEiBa48OkuhrHNkOkzLs8gEZjITmIrMv6BwAB9M6UNXBWGJDAaoyDzMxXC0DglTFkqRqU9IN1pPKpWxcQLWIZ4RphmzFG8p4QUK55WFsWXScjUbrorAkwiIDZmgWylTrlSkXSnGAiQiA8BjFPVeFAImYkNI04ABpeQYTQFWCq0DhzGT2AWxRwhQYzR8pApRImaK53ZqIsDJSoJqul6WxuU23abbdJu+uVIgayZL98ifJWPygvMoTJloMtHKBz1RLBFYs4FZcQY40XxUlFXep/vQPVQhKM/wKB4wxSM0AFFUz+mcdDM2UXWZsSNKJ4NTqRq5xlSR+DGnQm4eSunsdSn5Enn00qYExUwSNUeUZYqKRL4y9pBmLjOIz8DOhtfiRTl6U4kwJDxMs5ICZlEA3mfmiAghXuTLuzs2PtCe8X3SyAnp6DKC2YjMKHgau54PARRdlc0HpwBRWhr7xZoneiJAu3DmGuaT3Jgm0/s5+TuzaJLgmY4zFhJVIWFEdzux5HAbPXtXyndWJ0aNAESdIdcWgeUQxPPvOBMT6CprWoJQ0E2Souyi6yuCwv1eTBN19nheqIgVTx2cuzPOLcSrwwGZxYeKXtTUrPkM1rLFuPGz/AXdqDimQQkEbLg7k6g6TST2dDEGCWRqNoQVnXaAdBiRkDshUqvqozln+FnawgqzQmXimV2IIgQ953SInXnD3L75Sr/2IgZFTVA8TWTKaxzhmD0f7YTmhb9S9EtYr5m8DqcTAoFqWKqFMkAhU458mbxkaEJH5aGgaDmiyeAizOFJiyhWKCeXpUhmFon8o2IoqZhf8f21TTIuoel9fGrqzGJLJxSeDIoNVxOJSHjJrC+2DBCK21OalJZZNeOaTsYXswEuJ2RxchMVvEJo/QNrNlXmej1nP5ltAwovwfYqba+UdlZSkPHSn56e1nUzjMPFZgPioXf1sgVwdHhIzM45Ja2rarvdgGCs1XBHjBmA9yLig/4fnGk67wN7q14ABKeTofiwExtrAQSnjRRljQ9kv6oyG1UhZoV6kaw4iwo0HPOIiWcwceyzuRazIcI4urZpFOi7fVXXAIZ+qKsagHO+MpWxxjmnAjbGjaO1VlW8+CxzAkNUV9U4jGzYsBnGsW0Xqtjt94h2ghiGgZmMsX3XE5M1tu87k5bEfrMn5rqqnR+ZyIB655dtu9vvw4nXerUeR0dMq+VqGIau75fLZd/3u/2urpsQ93mzuTo9vdsPQ9f3h4eHALq+Pzk+Xq/XL54/D5d8q6o+uzi3xhwdHZ2fn/fjYIw9v7xcr5duGALp9ujRB+N2ux/277z3LoOI2I8DG26b5mh9CODx46eq2g0DE5qm3mwRXIh6VQs4ibcRjbVsrCJYtpK19p33H+/7YXT++OAAwNVm896Tpz/7Iz9CbvzMd37q5JW7Lx49+cqXv/LgjTcaW2N0AIbRf/FLX/nYR9747Kc/XTNsX213nbXWe3f33n0AH/zN4ze//HfvvPPuarn8pz/z05uuW6+Wv/aL/+yLX/57L2673QBQy2wtgYdhMNaqccv1arlYHB4eNnUTCV9guVy60VXWqOoHj94j1TvHr9i67sfh4cPXAbz7+Om3v/H6oOq8dl66vh+GEbBEaJsmzCo3YtE2hpiY+6631i4WC2N4GHzTNACODo8+ePIEwL7r/OjOLy6qZvGpT37mi3/5xaEba24BdPsexP04sGE2PMgYVoSKOHUxdrm1xnLd1swEVS8iXoioqptAtngoSEPEeFUVidsXpa0dCLHCFYCDAvDivBMiHseRGMkEWNlY9VI39W6z67o9gwlkjBm6UXcKwFrDbAJgMSYcXwmV209CIRPpgiSpShkOUqVxdKJCI0RDAOtWxDf1CsCL8/Nf+Plf+MxnPzP0m0994jt/6id/9jf/9b987eHrH3n4et/3pyd3AFxdXRJQ2cp7x4TlYrFaLoMz0832CsDTZ0/ffu+d5WJljRURbmxd1eM4dv2eidvWAKiqdV3Xi3ahwnVdh3P/bT8CstlcjscOwGsPv2O9PPjbL/5Nt99j1K9+7avLg5WIr2q73Vytm1Vod3DvsBt2xBj9qAAExpCpLQA3jnVTLw5Xhk1V1baqBr/N7lmSAWFEdsE3RRQP1x2Mlxt+hFNp39PrT+lL4EQcpyDhSpCccUEBOKKsI1y73XMT8UYQSFzoAXGs0y8KqFxcc6a8yUZwqrNiX5Im/12xLVNeAS3cFJdJAMb6kiawHFTMzKelo9coz3MnU1GMBg8ngXxNuV+vokBJCk01UZmh6gXenTDMZKs5nTFTeo94fhhtNVS1wK6p/IDUcx0mrEHFvMla6Ut6dpo3eYhSLzBHlB3X8nSmS1QqgbfpNt2m2/TNk+x0VgUgyp+494mmEx1k93np1nF4gq5L2IldKhgRoHiQ5ttxpp4Ksiz+TRp+Eg3XubZC75vLREyfKpIHx1LkXns+ZqeJMKBCU50cK2chCi26K/9J5MlNxqIoN34WPGpFrnDKKDj4Nxk0hk9Foy1LOGPlLOOSj7ckp+MLSvVNH6V+Kk8vpxGZIdOQ2JhAG4T77sGASo1hncLURJIgUQjB005U5qcSIl1CxVsgGSAFXqmkOJCxM+UKMVNGEhSPo/GyNNEIYQ4mCDm1uiQK4tly+gEwTUSBUnDJFaflxAunzPL9Ci354tTuAkBAUUwPoYKeoRtQ7TqRUfRK0eZCuyimW8akJLHGE38SQHK4TDpZEkdyZqpw2YQZlEzrY16vm/Nn6u78Qa7gBMxiX8US4gxnslQUGpZGSJK8vU/kXTDzQJrmYeIhBNKRWHAYo+w1KM0oAqtMn8aeIWImkcwqzmA1ip4pAWv4UvPPkKBzOvGIP8nsaWZfA4RUKXguJWVEAz1osOKgkhlMVmbTKLwEyVLxd2oJUVaSCJxy5kzZJWYurtB4FSkiZwIUMRzMtC8mMD21GukAqNjPpu4rJi3yBpVFQK5sIuVm0y8WFa+zU8wyainFNnyTrpyfhkU/l5RUrSnFtiim8Uk/mFUyS7Ey26kCKPunHBKaD1N5fIRgjBP3K0JUbAgARueUuBuG5+fn/dArqG5atjbejzbWe+m6rq4q77333lpLUbhEyj7EuWLitPxVRaIRStoSRbxhwwQvQszWGC/ei69sFU6YvHhmDjaVhowE20YVDYaXRACcFyIyRF7VGOOcm3zYpRsAxtpw/2x0zntXVZU1pu97ERmGITxpm9o7L14MW/HesGGmvh9UJbh0DNR83dTEHCrpvPfeLdp1PwzBeKSu67BhNE0rIiK+MlUwEmuadnAu9Myibonge2cq67xr6mYcnRv9oqnDOO33+/X6QBSb7XbRtk3dXF5dWlu1TRN4z9Vqba158vSptXa1XJ1fnIvInZP7291eRIIrRhVh5sODQze63X63Wq68yt2TU1h65+tf/67v/RyAu0eHX31x5p372jfeev3h6wCstdbYQOsAaJp6u9u3bVMZYy2MMSC62u3PLy4+/trdMB+GMRrn96MfvYBZgGdnl13fhaA9AM6vLl85PfnOz3y677pXH7zy3d/1nb//jbd32827j5984qMf63Y7ALU1zPJvfv93Tg7sxz7+8aG7vHu0fueDx8O93Wa3A3C4Xr/7zjv9fvtTP/ajD9/4yDfeescYiI4//sM/8Fu//0eHx6cAvBubtnUu+Mdg53zFJjj4Wy6WgXQzbE6PT+uqXi7bQUYm2nV7NvyR19/wrjfwAJTN2W6vylzVMLU1tqrqy83uarvL7jVWqxBcaJ9XnmGu66ofhkhZKlbt4vjoaOyHy313fn7+bd9+7+GDBw8/8tqj9x7BKIB+6Ou6scYMw+i9J2uCXTkRqcQwKSJQWNJgExzJynAlM14T4ojPVFUFMSyXIh4zRaxBIIhqchltKO1lE1gAQWEMixdxop68iKg3bChd7xEvRC7cUwqV8V6i15YCasabSHnTnrifLLMAgnOOWJ0XxdjtOu+ciD578RzA5777u3/11/6zj37y0//yN379x3/kx//sT/7oS3/35XfefvvjH/uW1XJ5fn4GYBwHVa2s7fq+qWvvMTqnKszGi4TNqq1b73zfD8ysiq7b11Vtrb24PHv65AWAtl3sd31f94BZrpZNU/f9MAp2u83B4fJz3/Xdobpvv/vO6enJO2/v9/22WbavPnzw3vvvbbtdtagGPwAgmHbRGFu33Njaqoox1Wq1quvKVvmsxVTWOieq2g/D1X7jvafgDDmhweB7XNzIhrUUYgW6KvQa5B8W+sGEZ/JxarxOlfWjhGNIC1Wg+G2WGmlANZ1kz07xJfOdhbTLOSWoVGaZhHYh0K+pVdPnWtQTqXmkhbvjmOU062YiL+sgZZ2ifUuBZUmhZLIIpRJsJLVrbgAYpTZMjoc6fTOVHkamQHLpiLOgD5EWRtYbQtMoXZEL5WsMNzqZW6poNGINCAbT4Eqwpy2GJmOq2c3racVOEDf7sUR2lZZrX4xHsYwRzSSuqRW36Tbdptv0zZBsqa0XZnkzaRpT9guC/JNC8maRoUrEem1T1OJVyQ9EnbeUIwWFkTTOpCDmJwqhOlfBi3JSZbOOHEsqN/DZTxKJoEC09cu6tc4k3dSM0nCmEHd6LePUNbllhKx3JxggkemVJHmIafajdJsheiibGAZVXOvt9FbUcyD98mXt6SBt9oPyXRS3ybSIgHi9N8ZGvYZmo3vQ2F98fRwDhEgdm5kDnZWqs0HVZA8KIJjgaKReQ9iEGzp/Kj2wRZzclWY2LJqhJiU8lE8UIZFOv07fImK3YJ42wc45ko/PaoKGxQFthLXxSZ3hx2KAXor9ypezliY0WByI3xj5+EG+5QNN/qKIkl1k/DhnNss3a0YZLM8qeq04ekkFPgTPpspEDBqqBFWky+apkgRKU52IgmUKZjNrmjdRA4AiBuDOX8yg/ITTBVNBEIAZ+SLvVAOaT80bqyt9kClwlIA2kD88fRyZevEiIsImvJe8DtMJDthUIeQxkAKFpVali2CFVcTc8uXaAExzMWlEucOQdm1NK70AxEhWFXmjiLapTDbrHunO13wKUlEAkJQmAC+dJPmxjMup/KzIJX3wEuUkvtRZc/PrQgGZCRWkjKKuACA6vI90pBKZJDzKHbgQCrMZ8mEp9fZk6Hv9S0172lRlgJgMmaqqlpbrRW0ML5btOI677b6xbXgmuCMMwzoMgzGGAGJinVQqhMt36kW1staLBF/PGsK/AN67NFHDbbDs1iD4lwSAaDicJh8lvgWR8Zxml0jw4ZDOnzBNFgBhvouKOE/E1oT4cGqM8YMDEMK/9MNg2IpK3w/LaOY2tE0TPD8O+70xTKD9blfXjWEeu721VlW9d3VdAzDG7vbbwPOGCNrW2mEY23ahkHHoEeJQQ53zzKTRzJO6rquMCcF/dvs9MRtjur5T1YODg81mOwzD0dHRMI6jGwEcHRxtd1trzdHhkfPOe3+wPnTeX15eHh8fhcqcnZ2HQNiXV5fG2HF0zo+HB+3Z5uLZ8yff+vGPA/iB7/7sl7/63xvmt9//4PXXXt/t901dGeJ91weq8dV79w8PDs8uzrxzRNT1/a6rF22jCjI8diOAvh+HwTnv+2EcnAfxkxfnpycnT0df1zWb6Drgs5/4RN22KuOLJx/8yPd9z7/77X979+6d508e//XVJoxX3/d3j092/f5f/dbv/qe/9E8f3Ln35P0Pjharr7z55cePHgPY7jaWcHJ6/IPf9727zeb+6douD71zP/wD3/dnX/h/yFQAambvHRGLaN/3+91+GIZLke3u8o1v/Wi4SrDb7d76xjesNYCevnL62uuv+64/PjnZb3feD1dX5wD+0Y/88G7fMZzr95bM4BxAfT8M49jUVZj51lbOuwo2ULfOeVVtqhorKyNQAQAAIABJREFUNFUd5kDbNNbYi/3F+cXFom0ePnjw7PnTBw9f+crX/+6kuQug4rrvR8IY+FMRL+KZ2VoLgnNjKIs6IkTHJpwCXmfkIuSJkx8NyYfKHO41pKBP4V/AeerHsNWzimYL/bBahMQ5752rbAXVYRBVsZWFU0TnEiSQhMNV4UlTbN+Af1QDYxTgT4Kx1/ZsonijO0BM3u13m82GDb/xkY8AePf9d/+H3/hvT47vvP/ovfOLy2fPnqngrbe+dvb87FOf/MRqtQLg3Wis8d4drFbeO+d9blR0C8GobO286OiMtc67vu+sscbaYRi2Fz0AlspY3jmpqur5sz2z2VxtvBvE09Hyo4/ffwTga1/7Wj/0Dx7cu3fn5IPHu6PTo8OTo94PfT+sV2tjLABmc3BwWNvaWjv6QcQpSLz3NFpjAVS27vpeHZyXvuuePH1aV7XrRy8OsOG0Mh7O5Y009s7NPX+mE9zo3RuPAz46Er0GsCJHVaaEkicdYYZp4jPlr3RCeQh+QiJ6obmEjVC3zD1/TYSZaPswiAFCNFJRKbSJSUecgE0EzSWGzvlrVDCyjKZS+E/PFxhm1g+TbH8JMs2TnBJeSRA9VauA2ZN+hGj+iOStCxnxBBpxqm6EC6nBk+oQah7iMJb4MVm3hoYx01wTm02n2R/NvQt1Gq9KUTiZJ42XWiJZitt0m27TbfpmSzGUTXFKNdPYsoHZJAcm+TLJmvBdFnmBKiNNFF5xUifiEzU0iSuACpEWFM9oxKJEOcjBtFknEUkFRnipUkiRbNGYJ+a0ZlI4Ue74ZWZS5J5lTqhKOlfTFHKHKFBbCuTLDrOq5MSco/VNiCD0x+RshTJqoORgTiezP83EnGRqMWUS+yUg0XiBeMIAs2ol8DL7MHoES2GJVeNlZ8sTop06DQXQRjZfAkL5k/qKfMAIQnJmn35W1C0acxWTbpamY0W63hZmaIw5lNyTToObZxplq0eikrJMX+aqqKqGQHmhLUUVMkGSoEjsKi46psBOimvdrHmYMPXW9WGYmjtLlCiAa58W8DQZeaVPUrM1dRrNGg0gx/pAgdn+IZ4l9z7dxIJptt54Nh1s5+5L9YX3LlFylCpLRMGlkQaaJa36ZKetkBC4JsFvmnaxIn46UvMBKEQl3rWnwF+qkgFpGay8QMSJuqWXD1BgVikB0HKVBXdCyFtmcl2YcaUm8Ii4kDU3JJdD2St9/MVUBQoBjEASrY+nL0rOcdo1tfg2K8ppcoRSAofN6cAjDmMaXp20jDyqYWbPZ+iHbMTpBeV3RFRugIhlaN5jp5+m84TJ5yHF5Zs6LM3tqfQbOkuxZeRun+1A5QBdT6mg69ra9aJ0Zss8NXFSE+c5GmNUVDJBgamBXj0TG8PHx8fHxydOfNf3ZKKfZfES7iR2fc95EqoycdiugRj6XDSF99XgWd+ralg7FNwUiGqK4+nFGzaGjXOOTHATzM45YiIhEWETLSUDZRkqnM0nQ7iPqCXFhnBok/deRdmkOMLivfPGWmM4xFyuKjuOLiiC4sVaE0Ll2MraZBPKTJWtRjd6EWN4HEdVbZt2GAdj7GKxACAiEK3rehxHFWFbifMMkGrf92F6W2Pd6FS1ska8t8butzvnXbNeuxSCZrVciYgXOTk+UdV9t18tV5Wt+r5ftAsAqtr3/Xp1UFfV87MX1tqqsmfn58671XJ5dn4OgAwvF4vtdtt1va2qcXRVbV+cv9j3u7t37oaY45/8zGeapu43/Xaze/e995z4k5NjUXn3/Q/eePUBgDvHJ6uDtbVmu9u2TTW68XC1ODo6PDw6VDgfYu+ICiAgr8HXHT17cbZcLlfLBYiDh0Q2/OD+/d12W9X26Qfvf+aTn/6OT37Hl7761ulyMQLB/tTU1cLykYxdv/v6+x+sT+6MIouD9QeP3n3vyVMAbV3v/fg9n/gktS2LGHH9bsvWLpbND37+c7/3h/8bgAcPHzqv+25om5YGbpoWDPL+anvZNm1TNQCqqur2XdtUm+3l8enRq/dffafvR++fPX3S7bf7/QbAg7uvPPng3cN1ve+G2hgo7fd7Y6txdHVlAYQ5QGAiapvae++cc94Fo8XROwCVtaMbN5urZy+en52dfeyNNx7cvfvs6vLzn/2eN9/86rMXzwCs62MiNHUDwHkPhfNOxDMba02gwsdx9M7VdaUK78U7j0AtJOnBlsiAGSoUwx8qhaWqMnm/CQ7AiVlFrWEQiCUhpWnj6PsdkwlhMQhkuAKpdxrcLIhMQirwVsGpN832+/kemOTINWQlAa+LGmPYWOf7/X5/enLyqU98CsCf/99//qd/+n+tluuT01PnhrpqxPuT1Z33H33j3t27D+8/ALDZbay1Z+fnVd0wM7zz3jEZYg7LzTmXIUmIzlTbehgGL94a09YVAO9HYjsMfVXx6PqhdyJ+dbjwo6q6t9/5OoDRja+99vBqe74+aH/o4z9obV1V1WIRHMVOd+qrqrLGXm2ujEXd1Ezm8mp32K6DbfXgRuddXTd+dPtuPw5DOHVwo0MVvf1aZjYcDk/TLlec22Xwkjfzm5Cs6PgSI1FSfZCGidN1iXwuTFT+Ngo7zHzjp8+1yDyjXyKEoEapsrHoScwShUvM8aSfZ3rIhA5pXlp6RIu8ClQ/nc7SyzSzjITmQpomiRl7+oYMj3M2qE3TcEz9Q8XDCZrP38aXmtDOh6WML8NrKiqP6HKHbupTCZqWbZNkMkKc0B8iIkhzQhOynI9TrPR0cwVJXQwPigcRwIixHAI0JiXJWtVtuk236TZ9kyU72wQnwRXBjE7SJeqKhW+4+GquNwKYORsO2+jsXCuIyXwXcKYuTyp6ELU8SXGeV7NgB6+Jzfh8MnafK9YTZVHWeKZQAmWGL1dOExerZXckoZ8P1WawYoY1NfFG+ZMo/XxygDZBifSSk/Vq4ZUkGCzmgq+pyjnY61QQqAiGm3q7tP5HUXWeMUACmaOukqqdGpI+LFhDIqSrS6H8G905XZwgaPDklxAAkyJHqRD1aQa+ZGCIUqUS3RCfEgm+4bPbnRiSiAqckHsNwNxX3fQ6Dse0ZlLbFUCOIngN0Ok011/C0eW315gZ5Kcp9+Q8h2szP0/IqdmzJBACTz/Mp+iTkdx8gcwJvVmDCrL9WklzrIaM3MOf4BA9W/gigDWV/KNQmRDfg4NHyGmqc6yITEMbO39a02EWk2p2jZqnZMKRFJcGSVh0UIWKRtRYTKCZIpH6K6smQB6N+LpcSSJlBRBWPDGZZDme1A6KVSQCIZpmJMtQQCGxa4KnsBKLpyaVHV1ubdO2V3wVxk4j3TupDJRMkpHJLGRuOHR7CMteBo9KAzZN+vlsuEbBXZvysykUicjQ5ZNxdO7AUo3Im+5k0KCJb5824VIozPO7nmZfpMLi9lOoBLn/5uLiQ3w10c0Cyy0mTV7nZTrIoTSK8cBGh77nir1zIt4S11UFaD+MCIuJqes6BVV1iPMO751yoU8lC9twYdYYFhFiUmgMZm1MJnJDhYPzx2AwpTHMESEeoSEGYFMlVSqMucL09SLMUFFK8ZcwSQ8EKtNwDJImowdQGTuMQ1jZzrvBjdZY552IrFer0Q3Ou9Vy5b3v+w5A07ZseOjGtmlVdBxHawyIhmGs6zpEvBmdq2xNxN77qqqhcE6aph6GUURjZG3iEJ/NKQgY/Nj1fds2RNh3PYC6rpqmvrq6srZi5qfPnjHzarXcbLbe+/VqBeBqs2matq6r7W43juN6td7t97vd7ujw8PzqchhHAOvVyjm32e0I8M73fd/vx6Zu9tvtaw9ffeX0GMA49HdPTv7y6187PX3l6YsXx8dH3svRwfL88uJLf/9VAJ/9xCdGkX2/d+O4XrZt07RtC8V+v1u2Jk5QZjaW2ZAxAHvxo5d+dIP3FVOYM4tFe3R0bKw9v7jY9wN5+aV/9gv/4l/96/FqN7hhu98BaCw58ZebTVWZt77+tfsnRzS6//Dlv2ei1+7fA/CRB/cI3/btn/6UrRcLS2On8ASo9/6Hvu/7v/jmVwGcXVw27dLaKm4mBBExxiwWq3KlVRUbY+GJFMZwWzde/LJuhm7bNDWAtqlVpaqqw/Wqqev1aumcPz06fn52Hi7enq4W+35s6qX3MeBtZe2ibl6cn3svLro1IKg4N6j65bL5yMMHoqO19MrJnR/7wX/0P/4v/zMAu7S923ebTtmv2gNjjADivfMDsY1oyEAUmrn+BARENMlDjlhH81IGoAk0x62JiZSYiYUkg2H1XrIRGxSkpIbJEMM7CacFquq9j7E10sFH+BcOQqftMUFTmu9RaUcqoDSlozHxCmJjmrod+gGEr33jqwCePntuTX3//gM3CpM7WB48efpo0dTHh8fejV23B8BE3vnRuf1+v2hqKFTUw5OoiWa54p1HiEyiSkR13Tg3MvF6dWBqA0AhKjJ0w+Le3aOjo77v1uvl0cnd0bl+6FoVAPceLkWlpfVysSJWhbzz3jfqqj45vlNV9tGjDwAsl8vFot5uL0fXr9dHbM3Z2Vk4Yh/GDsA4juvlwXLRXFyc77vtcrUMkALxCF5iZdTnjTnKjyxuM1ZJ51pJ9F+XBRmKTfMhHdPEz5OPUWgOjpb+K/K6JnNKmDZ7CPEETJNAmRUWH0voWYFgS3DdeUk5WcpT9bIGkxfmorqTMAyro9QCp/9HYJ9sRTKgmbOKqd+Km9+anWsXqlUBeSLOyzmE98nAIwm5wkZh1nsJUhAIpAxg8sOQMs/wqexCmo1xht80jXqB6SWhPkW6bzh193SyO1lRUrKqyF8G58jTME4k7q3fyNt0m27TN2uyUQ3WLHXzFjjZ+1CxFeoNr8bXjqaAzEXq9MRkIpKot5BboSZOeiTydg1oumFKczETXIXk6DpFdqnMudRPTINqCudwvRElZCzlrNLMFBKTjWJK4ZNUk1nTbxSUvrwmVFOhNCUA+cIFgEDPxIucoEBOkiZhC0RwFFqiqVazQmPnTbg1dnOh2yMwNUTp4DQaJjGpeD+JfGJCtgUtSIoiu9Bp8Q4kIrGoE0bJ/EWaaIQkoacwxRwuNYU5AJlOlue9mihXSt2Q5i/FNqcbiQkKEFRhmbUAOAREPwPJuG6aDTMH5QHoRcOxPLvLKZEnZVJdYg9PEGPWjOvExcS1FwMz6RDZ8CiyXxEoE0V/9phmVgbMquHiCHGxOHJtQ7z4pN1ERKugeHHpWvUKTetGzfOHaUWgKC+OOIUiBJrAVg6qk7om1SRnmLyFazl/ONQwHEMXx+y5WzhRyZSNMsPPY8DRoEzGx9NF7pk3gACa5yTcNNkpXBLX6VbXZMen+fepxpwY64Rip1anjLMekfsKjMgKESDzri9WUqoOSqiev4pDQcnZk6YFWuzUEVZnRUKnnSqsLZ2MhKOmk1ZXMffnRyxTKgYm/jAZVee5fq3Xcycn65HpTABJYsWXSsooomtOrDIhbzIau+GGlUnopgLKp5aRzp6NLEOUmNMuHttUrlkU8+Ta6km9FWvlY98roCrBdMiLV4g1PLqxbpqz84uLy6snT58GxjjMRTbExN5L2zRhLJnIWKOqjOgrIG0+LF6mT6IZcayWqCRHk4oQjibEqGfO9lyBQCQmIH9Imjwgh2w1XevOFGcoUSTOWWsts3HjqKrMrPHGNLz3i0UDYBxdNAFSBanzo3NjVddseBgHk0w1h2EIW9m+26sKkR2HobLWMAcLShU1tRmGAaDo7NIYFYxj8IoZ+nsUgnjv1bExfhyN5dVyOYxDiJB+eHgyjsPoxsVisd/vVfTg8MA73w99UzfDMAIY+uHk9ERU+6FfLVeLtr3abNq2Xa8Pzi/Pc+s8qKmbYRjG0RGxsRZMw3738LVX7xwfAXj3nbd+6PPf+0d/+IevvfbG2fn5yfGxMcYYbpv26fMXAN56+53v//7vN5f8ZPe0H8a6bkbnxnFs6krS6Q4RS5jjzOBoxxrsRtXgzmIBwFpDbERku90tF8u3Hz169eGrv/LzP+f7/vDkDowF8PTi4sXjR/fuvdLvdtu+u9zvD/9f9t7kV7MlyRP6mbn7Oecb7hA3Il68fENmZXZVZlGVWVXdLUSrF4Bg0d1C8B+AxJ+ExAKkXrFDrBESQqJXsGnRmwayK/Nl5quXb4oXcYdvOoO7m7Hw4ZzvRryiWCB1SteliHvvN/jx2cx+/jOzTfOf/NN/cnN5/fLjDwBcr9vP/+arv/7q6+2+Jz9ERtusrp5dOWv/5//9X9w/PABwrk1aiIj6EGKMIUZrTRLIaWE0rgkx+nESjc45IiZjVNVHPwU/jAMAcs60zRBC46yPcbteN02TxF/xL9ariy2TE9ButyMmS3acxtdv3hrmFOzy//hX/3J3PO4O+xjDT3/8ox99/OEEosYejg8ff/Thi+vnAMZhuNis9YIedneRvZ+GBNGJH3qv8zWeqrM2HVyJRGx4cXcaSEUlef+nAzOliTJElGOJEBGYLBMzi5KGUFStMzBBFUyI4gnEllQ1ga1EQCjHJpOmtFRMqioQVrNQQhaKCeYT7swdBCg6J1J2wRjDs2fPd7vdcX/YXq0BtE0rQX0YrXEh6jRO600Xhrhq1s66YRhSLYOfnHUq8JMPMUKUmERikqrJBd4Hz8YSkfcB+crNDGEMgwdgLAcNBLq6vJr8pIqoIhIUoWnc/rADcHl14b1qjmYsh8NOJKxXz4L3x36fdq5qHIbj/rC7vLxkpuNh7/30wctXTePS4rTWdV07jv1+dxfD1DarGMQ5AwWbOYSjaFRVUqYUzpCWk0NAWROo0gVZ0yBUgLqqdzqbNoT5jnBGruf4Jyj6yKyBokjM/OtCia46aq0wRcVQXthZc6Eqf3K7NKkl9VQ/e2i1i+hMwNPylfyFc5eaKqnfox8uay5iuYzdwgx5VBuhBkBQzGyN6rBVlIM6TVqvFIkWz5mbdm59LV8rTTrT0JNKKlFm4ZksQ61fX24+cNI9i6J7bhug6rbz45ZeJjobGLmOpVJKVRE5f2yKJPvupD+Vp/JUnsq/9eUxT+2pPJWn8lSeylN5Kk/lqTyVp/JUnspTeSpP5ak8lafyVP5/KhYAgWpq4Jk9Xvld5yXRMyoncXEXk5KWzp509fpsvmKCEldmV76IO3eWPSsLgny+Wl40YnnhuHgR+R1RrfdNhShT3QcWT5wv8NK9YfHELV2YPVe/p8x0UX1vJ0pvF1WoyvLiLdeC7EFZ2ZGJGlkvUlU1iohEqDKxcr2iWyTNeNQ2EaSoe8zz7WWlpeUpel/XFAIRUYBSMpzFVWa5OKRMHEujVhhiy5vVmcVQWUVLWlFm1pTpLAxYYuaU1LvMSro8JsqOve+79CWSxPyZaVhze5iZiUUkrT6VFNIop2zWwr1ECY4pKZGvaPIoqWu5XknXJZNJB8t+lAVUWF3IHsePhpkqORXvvUZ+z0vl+TZ7u6etdnZHnUmOJWc56sUwVCFpeb/jRDOzJzLrbSYZvm9Bz7vwfSsnsQwJJUQO0eJWd37cTBhIa2IRHpyYFEyZpwmllO25dDrTfmsKjtIWnXmqefAqPWDRAq4LUEEQFRFJhJ15Ms5OlbNf37PFzih+qdFMRMrzW1rv5mcPuoVfOBac6BTFSaHQnNceRIYBSIyFzqBEROnyHRwlPJ4YVLpFebWeYvkkyXfvWo7f8nhGSV6cD1yer/KjxnpmlQOhnEAqqKEKZtaGLo/wszHV8qNyI3V5rizcuCpJnzIr4hF9o1RIKa4HlQDB5+wHLWOyCE7//sO6LMMljxTLTj2mVOTXv08+nBFr3/1aXsxpPQfJaXNDCDGE0+jXF+tpmA6n08P9fbdad67RmIPQsTH9MFhjo+YoGtYZIooxGmPTQReCALDGeBHDJkQPgkqklKMDSPETYQyVkME5qqOItbZEqRNrbW4ksY8xRXINMRpjZgqkaiJUGmtDjEtufpoNY+w0Tqn+xDoUkePpuFlv0toRicbaGIKxprHNMA5N0zjnvJ9ijClRiY8y+WnVrVREVNbdCkTjMK43mxgjpQiJlp1rptGn7BlJ9Nzf34tI27RJgPjgwWSdi96P4+Cs2a7WohJjSAEomejUn9brNYBTf7q8vOza9ru3b5xrttvt7d0tyu64u7s1xlxcXJxOpyhydXk5juM0TZcXlwCYeRzGYRhy3iFNO1tvrq7+7E//9IeffArgq+++/sf//n/45//T/zhO0zCOu8PeOhtjYKKUM+ff/PY3n3z6SZA4TX6YRpOJruLg0ppNK9D7EGLyMGUFiaprHEDW2BQX8njq7+8fQhjB7Cf/xZu38fTl7e292V7wcd92KwCHhzdffvnFytlx6H/2p3/6q9/+Ztzvnv/k4yHy33z+OYDX4r99u//69v563ZJM0bWbxj8c9peXN7/67e+SL3zbtv3oVcg4S0TGGmYWlX7onWtSiEZjbQogYNk6a1WkH3oFRu9hzOgDALvqIpQMex9O03S/O6zXF8dj37WNsxbAw/7wzLrv3ry9vrreH08EsLHDMI7T9OrFizQyn3/xBVmzXne3t7fPrq+IEb1npij9zfXVP/sn/zGA/+V//Rc/+vGP/vIv/mq16d68fT2eAhsOMRwOh/1hnziAaVX7IU7T2A/9OI4qMR/IqgAGf4RgpprFHNdXIwgsiX1PppzORqFGmWMSPqTCVAKSgMQYhQobBkGiKEGjCLQxrp5IyolRoFnVoOSaWo47ZIbkrEOWDYnF38yGCKwpMFLYbreH3f5wOPz9D/8KQAz82We/to4khlXXaiCWcL/fMcgYTh2fgmfiJKFVEEIQia1rWCXmpOTRWgZgDBs2REzESdgFHwynFDQ6DlO7bl++uPn1b38jqsHHm8v17nD/8HBc2QZAZ3h3v/P96C67YfC73e7li+eb1XYYhm++/ebl8w8ArLr22B9Xq+7l85fj1PdD//zmxfNnL27vvmM2AK4urgyZ24fbcRw3q800TsxmJs3R2SAxc812uNDJFmqGLl+n4r4jM8+uaH+zCrCQFpTVFOZMbkP51plIWUid4hReUtScUfLy21kdynmgFw4lC6ULi6ctJGKVtaWDNMdofo9CipTBjDGrpKlOqrWVtVr0u8IkZSx9u6ro1cW/KsYJSUIpkhmw0CrP1f7ZQllGbS590dnuoqTl0/lmKPpD6jhQfa1yG0jTli6uP0rLUal+Ikqg5JXPxCJQmZ1n6rcX+uRC/0FRDNMjk2deXkZ1XqSQK7loyCBAWd7nw/9UnspTeSp/AMUmI5IQgcwtR0InZ5kHAAXRQDLplJTAj7SbauiZ5OeV3LVy2r6EChnJcbOT2FfV5PDFJR1ntnRF5gA9yX2LZrggW+tKYLX5+ar1R/pIRUWzWlYlTkEBaOmmmD5Y5cq5gAYwJ7Ytpnv5nYuLe9JKZ5MaXF3UZ8y1jGMWJlI8UJM9CiiDREQle2CJCBXIV6BEXPPJJNdUJpKKHeWOzxpKkmKSsSEqYweokuGKRWAGjRSAUZuSoDJBiTRPhxhjKtoQiyJRMA3iAmNVAENFo0RnTJ7OCgOV/4WNqiRlPKEqhllVazbt8slFUDmqmkdxKD4HXKrex0SaM02mXgsTkpcfAJGoCinR+gBwSj0iEUTGkEjSl3Lcq9QYZiLDKqHsjKopEYCgM/CaJl4pj0uMkvSkksNHJWadrCDCmnLj1RyXCiKTtZO68ygrlzUuYZ14qh9QAQmlNBRpCSmUiVN8Jo1RkLQW0gpkG0N5KVAaZoCYKMz5ucv6pcUL8448a0zFsGaflrlEohRgyzhuQFCNoEhsk92S9GdK3mcqBCLJlxwJHJmfE0UkGnZKohBjjQqlSohFJYcTUFKVSMpMhmGQIpECClEVJWVTkEDhR8pcOYFM8QhKACMYeRpBIUpO3MHVa1WDqBJMGTDilC0nbVpICKoqxtiFXzOHGJmrVp9ezJOqqVO0WFmpYiXROVUXGCVbN5iYDQcfULZ2ORDy16NIgnvTbYWoikqK4p9CB4I0xnQQ5awkViEiGhXpLEqZZVMcWikoL6qXWvZvUygxoeyyBBxmy5xz/9IhkKITSslWU6+Sqo1VLz/S3s8p1wFiw8XbUKHZoKe8E4GUirY+f74+qvg2qqE4n9mzNQPJ548h0nQm0zyUs3lTLS8tuatKWNJzcLS4aqfcNcRBYutsiEGhrjEANhdXDw8PbAiMYRyh2K4vRGScJmJmGABh9IZyREfOLqMKJWcbQENMGwEQ8j7kCHXJOGUiyqJHVBrrAiQlCwZSNDs0TRNjTBddzloV9dGDwCnmXRQxABEbk09yicQmtYTYWCBF8pMYFXDGAAjei0bLJvrQuAZQVWmaxjp7OhwBWOsACgoGlMk0ThlRovdT2zZpaGOcGms1SozSuJaJUz6fGDw0H+at6+7vd4aJDZUs2KQKYxxbSvEERaMlC43GcRC065Vrm8Nxv91sE9h3PJ2scY1rQvBt2xHx29tbZ13XdfcPD2nKt5vt/nAQwYsXNyHGh/3u4uLSOHu/v1uvNynDzNAPQ98DZNmE4IMftTFXJM9/+vd+8ZMffrd/ANDvT19//ut/8h/9B//Vf/vPf/bTPwl++vKrLwG6vrpsGgfgcDx+/sXnRDDMQJAQLrfr9arbPdxfbVf5XoaJLbumiyqta/b7kyHrbDv4abPebpwBcLlee3jYxkCV7QbYEQ8+xNvboWlTpzbr9o9/8kci/nZ/fzjcng67zWb95Tdv94fDJx9/BODB6zFMP7i5ZMP9yK2xQeDHYJy31g7DDkAMOk7BuYZUWHH0oXOOQFeby9V6E6EAxIcw+nbTMZMQa1QJAdETcWutJQDgKToYo+RDVEAU4zRut5sYg580HcWO+YcffbBebxlyGsf97tj3fdc6kXB5sQGgUbtV17bts4ur7eVVJF51rQ6Dt9gdHz788CWAf/yP/t3Xb97u776LfrN7uLvYXv78z3++Wq2h6v2YLkGDn768Ef9PAAAgAElEQVR78+bVRz/shxMRj9N0OvVE5Br37etvAVhDXdv5EIj4dOq998zmm9ffQMFsjocjADZ2HMcQwjT5cZzW3EWRcRj7fow5zWP2WRrHEymlmJgMtsaCEUPMYolTiioD1YiJwVCOMs16UPJpZgvVGL0zpAprTNqXbHLaPWPMOAUmFo0wYXuxcWv3gz/66He//V1KpH5/98YwR4921YzTqKyTx8c/+fT66qofhjFGAEokIlGCc24I5FzbD0cBFEwp2zWZID7pMz54YsPMMYiqdt06ATeT9wHh5z/7xWef/0YQ2lVzcbn97v5uGHqv8eXzSwCn4Rhj+OijV8fTaRyGi4vV5cVWVfb9PRu6vrlAUngGvry8GH2/O+43m/WzZ1c+jq/fvn5x8wJA2za3t3dvb9+4xlnnFBRDUMXxeNhsN/kYp6yxiQZVNsYSODmDG2vTWRRiYCIlSnmTjLESJYZAc2i/nO9Y6u07kYZHyTxR4T+V7yOCkIhUDQBV7qAGMFmKFiyRvSyWqkoGIAfQyLAek1HonPGSMgljkYlGy2JaKtoJpCRVrQwT0Aynpy+WjpeOzk1AtQPnvpwlHViYSkmSEixlHVYVSX/WEmNFsFAOZ5sgDx5KKBLInFrbpGw+qgCiKBGxYaVUeWCTbBDOTuxWFBSV2LBqyv2W7hGXETQpo6U8+/GLSlL8UjyrPNPVZEvPUwEx1aiiolGEi94lUUGaQ3WpIJkhGfvMGa2S6pXasshK+VSeylN5Kn8wJWfWnrM3zxYr6sVLPddT6EUUllsxjpfHX7IZoi7IiQswTiohp0bQVszEGgBzyoyF3KVlK/JjqICLS/Rwvm6arfb0sNIYpjlsFs3fpdK3s9rye6kP5dTP5LKCaZ5F/FjAobSsor71TkdqZyl1KqkCpR15ENL4xJzwmrOxrJrST5hHGsl72rNkqGVBPV/SltmYv5XenasqxCV6dGeLbL8rk0G9pKu4DJSIosQZUCMq2sVcLWXZSknpWEDgebBo7sFSeTsbSQJkoQY9XkyQjE1SRkTS6LHhOfZ2xpIAlYQ4vYu1JfUOdP7CWXfqDJfBzTCfYgmEzBM1/194phWQWjQsKRq8VLIWTViUvD7O5nsG0qumVld1+orOeGuGhHJ7dPEMorOHvY+lhvMZSV9dIMgJYaUSv1ShElPO0ZobCiSJpcUMUBQx5UHz0is530FQigBIDSkLQgpVCiVjDOUPlj6oSg52uhzzsrn+rkHAzw5E1ZJgRzOKllYpc8oBnTfv2WmS93dNgVlHhua5miMWzgDpIyWzDj+fR22qGOZi1t5zJhhzFqaDlnss1ZUj7ea1Wyyhs0ESlTwTapB14cXgzIs/D9nj1bIcmMrgSGmmygrNX8xcxXIW5Fad78NyWtQjebFg8ocWk0fLZfB9pZpzM//8++iR79RTAEkg4Zmlt5RfJEoZMY0x4BCjsSbGkG4NxnFK6ylzNxLPAixQZsoJNFmJkxWDBOaqarZrljFqKZ/c0zS6tlWVoAmPUyDBW0ogETHGxhgTQi0xVhA7xKAlOHKyL42xSrDWhRClYuVR2LA1JiXeMdaqaErqnUI6UopDJ+KcE5UYyfuw6rphGNKUBB+EYK01hmOMzGytDcEnbuYwjWlOU9ZvJnbW9kNvjLHWikjwYbVaAzideufMquuOp5Ofgogyk7VGVL0PVbCqqoiEGNbr9Xa7mfyook3TnI5H5HwXm3EaRWSz2XrvAaxXa4WGEBJKCOjp1H/00Q8IdL+/u7y4ZGNef/dd23brbp0C1Q39UEJzirWGqN1ern/3y//zP/tP/6moJK1rGIfX33z1x3/yJz/9ez+Zxul0PFlrY5TDYZemngin4fTq+c311aVlHqEp686vP//iZ3/0cds4ANPkx2kSVRVRFWtNFHjvnbXWcMJYm8ZJFOvYGaNA03XNFHwI6/VaFU3XAtiu119+/dXrN28++vADY8x2s1bRaRq3m3Wm5TJtNpsQ4hgClKFgw2ygQIwiCZ8yyswpCDXqNXc6aEus0uA9kapGMgDper0BmeN+uL65BKK1DoB1zjlnrfE5HKpaw0xkDK9WHYBwPL1+c7s/HKxrbq6vVXScxhDCpx99+Fe/+EvXNACmabp5+TJG+dVf/5IIbIyfpsa54zCGKL//3e8AfPv6tmmaL776/TD2rmvv7u9+9dlf//1f/MVf/cVfhilP9+X22hp8+GLjQwOgca1pmn4YYwh//tMfAaBmO5yOp+NRlVKkR1X97s13ll3XrRKjtnHt5dWNRP3m26/v7m+nMLTtan84bNZb55qH3V3a9cfTYdidhnHo+36axjryBBr7AACkYfKJ/Mt2TSDvg7WGiPwUAASJRCaEGGJcdatpCACNUyA1gBnDCIBhCKIcyCgTA3rz/MZZE0L4+JNPUvjOi4uL29v7r77++k//nT95+fJlOlMSIGuNTRFdRYWNUR8sm5BPcSC795RbRjbR+0gCEBOnU1yTCgABsO93n3z8yWaz/ub17zfbzXq1WnUtASc5Pn/2LB0R/TBYa66vronYj5O1dn/Y+xDuH+5/8uMfX11eA/jm9TcX23XTNOM0juP46cefNq753Re/896zMQCOx8PDw13fn7punfyNrHNEY1qcUmkKXISeisQApMxdFOPZZXDxgqkSexZ/3ysYzgRtlU6Ycb0iKWqp+sny848Ug8r1Q4bF8gNmZTSrBrNsLSrpYyVy+fTU1HcxrrOEN99THovKbOS8U1XVCN/jSpCaO1/vvWsHpkvRRRfPnpzhZVReh84dWnRdz5kcpWdnqhM9ejq0qO3paJtN46LLqGYhi/rsOv+VTlHR4zpN6Q64BAVG0QMp5dKe+1i4nQRAv0cxeSpP5ak8lT+EkuHIcormMzKrwHh8uitAXN7Vc/HxHnNzcWzWQz+hVJqv9VIN+a/vO03pnT+1vpyN/pnSUozSd33kdHnyV3SluOEmCXUu0oqeUSVaBQGoOg3S3KAFNFqUmUWrizvzUs6fXQYCBCRchrJxrksxlYIpV8u7mtiPh2vxAhsun54VCtGEYfICeM1TXnu9lMRlMaTA5DPQlZhVmRLKyVJNWoWipAoBkUQp2NE8N7NJXi3mBVZQpqZMGxUS4iJGeGlebR0IskzQXQa2ACJUM5TXagGIzmrV3LPEO5sBHqqjSMSkZxexxcO8Yge5GkJOilyub1GA7Dxt887ROvwVOiy4bXo/rZRCcdXl4+sg1F85QRR1DRArSeINlRGpt9BFKyr6UzJ70j0BFu7LZWnk5Vgrf7TpafH//Mnq64SajyY5p2hey0w563od05yHXUsiwllDrsh4HWQFEWxifBbOKhem2zyaTEygqLGogHMw+dzKv0sg3ccqn86vP9qIizkpk52NhKKPak0xr6Jc2lx6+7fpllpPmYRgZm1Yy/pTJBORzQzSoWjMENWUTSKtzkxzZqLiyl13UR6ouSmPuBhl5XAmTsww+LykygFKTIs9mceD6oIirTc+uaqzZZYCOZTzk6oLUxnp6lN2tr2K0VFlzaLt9f/H9kVp4eLU/TuD1XN78mAtbTZafiLfaCVMTKMEkGFmHz0A772oNo1L3oUpAoMWjzFdVFesyXJmz6mS0l/lBAPYGBWJOW0IEnOWOaP/+auqKQdFEoxRE9afralUF4MITKqGKaEhaYRFxcCoIMaYoFYhQJMfeQJTOE1e2tvee2tdFEnIIwDRSNCELaqqM1YkhhC6tksfA2CNSY1pXBMLXVpEQghN06Y/Y4zr9QpEwzASSGLKaIXE8k+TYayFiqoy82azMcaMh3G1XgcfJj8BWK/WbEwY+q7tmND3R2ajpMfjgZnWqzWAcRy224019ng6AWiaJkq0xlhj7u8fEoLJxNZYVfXTyExd1+32u4urK5XQrbuv374BsFq1FKOK/y//i//8629fj+NkjD2NQ4xBJBGc4Zj6/rTdbK63m9Pp+PLFi9ZZFVl39mK7BnD/QI2zljnGOAyTxGhdM/hgjTEmOwlYa6OqsWYYRh8C9+Ph1L+9v59C6LpVctYehvGTH/zgcrsZxsFa531Q1Z/8+MfTOI3TBCBGAdE4TaP3SXoHSUKO5wx0RAqNIbi2ZeYKqqZtlah5IQYlMZZCnIyBc7ZtV6OfiJmNSQggMxtjosgwjj4EgCSFrInifQAQQmA2TdNcXFy2bbs/nlzjiHm9Wq02q6EfAMToH+7eAvB+SiECQox938fgV65LGduH4cSkIUbDZKFM2lg+He4/+/X//fuvvkyu7h9/+ApEu7vXq1U3TlPbdTc3N0QUY/QDAByH19ZaAo3jqErWunGaWtYIv+q2f/5nfwYgRN3v9l++/uKXf/2vP/rwo5vnL4MPfT/sdg8SJYP7oOijj6O12Gy7tTSJvR5DDDEQWwCJRl3S/ioRhSDGMBPFFB6HiI0RURB17Wq/2xOz9z5doaWUWSnigWFqmqZtG2v5+vqq69r9ft91665tAFxuL3f7/RT8hx/8YJyG/eFAbMZxmPxEgEjys+amcZxmTqWqQNkHBFCkSA6I0RORCAhBlQwbYlISAJvV9hd//vP7/W3bNdbws2fX6b5h1bWrrt0f9mkGf/jJJ9M0eD+BdLteB4lvbh+2283lxUW6YxvGQanFcLq7u++6br1ej+O42+2urq66tgPw9vjWh2CdW69Wxhg/TcxsnWFLxMTzkZkygZugQSQShMhgkZvrnKgIrdbAWTlXbKjIyuUniqpZHCzwbpnVbj33c5qff/7xRayWc6IB5u7NVh+K4QJKp/+syhXE7b2447uvFV1q2aaluYK5H2dfTvLwzGhbVrGwz7LeujTVio2w1EDrQzUW7ggvDKf0raIoKECyiMjEc0+WqezK2D1SJCqamAIlEM1fIgLOlNuiYqGQTgmFU7r8FDNUtFIsyl0vnQ2vno3R+yDjp/JUnspT+UMpttoz9WCbaR04P0YfH3bJOFyiQ9XIpGxqldTMtfYiI2lJpinMxIWwqE5zRchUJK8ybrL4OsNGzsoCFjn7sWz+DB8u3n30uQyaLOT2QhoW+xiqJcub5m/VrpzZp1qN+oXFvmxV+qhCUO87M5PFpNo1q5vITJZH7T0zgGkexNpprU/HYtqWg5lYZITFfeAjHGypb6TfkqnHnJhNGY5M3voJd1rUVMhhqA6bBb0tYGwZW8wg4pkukeZ/McQ1+koBawrAnVuHlOFuxj/yeMyKA87UyUUbanvTrwV1XczYUmubXwVVh3IqD5iHuKyghP1pGaWqflDGb7R8pn6paiXz8C/WbroEnmHu3P/i1lOfsVjRhUtHtbZS/2LlzLBXHZt30Zyzstwo5aWMmNU5yMvUGDOn8TVcndaXmD7PFLXcB1kq1VBKbtHpAVLvVbAAwFVEUNXtvGtR7gnKEJyBbouNvlAL63spwCghB8XN9FqRRFJDVnoLakqg5IzOlYCW6lEi1opzv08pr7bPvG+hgBIZFBC5kqyTZlxuaxZwsJYFmdeWJL567jcIKaJqHtFKWdU0dvOqJaroIUgz4/ksHmZ94OLaPi/yhX5f77NEwbPrWdmMi75rPWBLj2iuBVqjOBXrArMQKc97jwz720qVgpVsqAt6Bi0WzqMv0tkKKiSH5ZGR7RHVmjEXzBxiGIcRQIwRTApKsU0Tx5BSdmvmmGOS1qWfohpoPUWqXMnPFCiTYTMFbwwbYyXGBIUz0RQCG8PEIjGB5RIlYUAJRjRkkEAowLAlUIzCpEHEcArCm/jUTIowTQIk/mMa8BBjCvMH0DRNRCQSFXDWNc71w+CsTZE6mClFMIgSrLPGWD/21lio9v0p0eWMsd6H1LzJT41zCvV+alzbts1udwDQtZ2qHg4HqBprVQJUYwy8YAQbY9JZtFqvrLWHw0GBVde9vb3tmgbAqusedjtrrXMuIaer1WqaRu/DdrtNA9wPw8XF5eF43B8Oz66v26bZHw7r1eZ0Oh32x816C4ANjeMoosZYZoQY2qblrvvv/vv/4Z//N//156//NYBuvY0S/DQd9vuHhztn3Kptu+ZimsaHwwMAEXl+84KeXf/uiy8IMo3Drz77zYcvn3/17evnV5u0yo/H03rVRokWhght27Cx4qEqhjmBd5qDjdEwTf0wGuZh9Cp66odh9GmK9/uHD1+9dNZ++c29YbLO9v0AaAi+P50A+BB9lGGaQORco4pjPzjbCGCc67oVgG61tj6kcJlp0VJZ/wqx1gAgk+Aettal0HmbTecam3yWUwTPpmmstTEOUSRGAVREDTFVNSPK/W7/2ed/84/+4T9I+G/XNCeZgkRVTQ9aba7v7+6YaBjHVdf5EIwxo/enw7F71k7jCCD4MTrr/QSiYegvL9bR+y9+/zcf3Fz+4md/PE4jgFXrxnGapmlEBChOwzT0F9utMKfgko6EREX1dHhoXLNy2yGMhBhD+OL3v+37I4AQ5c2bt7/74ovj6XC/u/PRW9usmua7b7/56ssvE5Fqs95Yw+TQNq0xKYwJMZsYQj8MtDIArDNMBiDvfYy+aRrn2iT3yhkDH4KIMDEb8+KDF2wsEUuUtm2bpgEQggepAWKM4zQoJGocpxFMUsI+jn4E49UHL589u/nsd79O3/LeN65hWwLCaNb9fPBEpigQhBypKR+m1rgYY8JvRJSIjLUKHE47AL/4+c8vLrf/5jf/12a96rrmYrMG9PdffXF9dTWM/eG4B7BarVar1fF47PvjqmutNRrEGP7hJ59Ya7/65ksAzprNet33PTE+fPUKhLd3b7quvXl2k+J79P1JVRrnmEklqopITPc+orFIVoWoMlxyRiqktAXShILSFbugXH1lW2YheqiyATS5vzzm4J1Lj78FVyrS7T1Sp2pmGZKbJW3denqu02UayLkFNVM961vnnIdl0MPvk6hFSi4B26TYL3Wn935t0bGFVjvrnFXbXmjyRFGKfpxE4LnDXaa5Lg0IIEdgzK9WfSqrVHnEaDl4Z7DsXFnpLDOjWDoVbkQZhWKcFKWiSGmiwh2fZ52IKEpWWnV+xplKPw/KEy3yqTyVp/KHX2w+yVJYX+YsC9M/LSjlwtIr5+lSmpyfkgvH33LvV25zVIm5HKHJlkp0dAaKBwQtq0Q9xumxVMyi6RyR1AVwtGjSoqVnn9ZiEL9DgHl0IZiBBLxzD7cEJrAco4wOnFVQRI6KIOnTulBlMncu48OL6qtaUSIl01zn96JBBNQIcXm0M3ZDRgFAZG5vfvSsByxZTlTm0BhbBzCBaFKc7w25/Kpm9SxFBUWOf1KQMFDhkWXNpwj0JMUzAlKWwEJRyC+cuw+/0+UzIhMVRSRrb1nXqHgwETHTHItak2Wfp8m47JuTKishHSUt4TLbGTJNz6zxDvC4lVktmtderVrLlWdaE3XmM5jKJe5oCoVd1CU9c+g+24r13rVeyfJiG1ZNeqlHpsxIpHMXkF1jH9ec+locZ2v3z+bgbHZ0/mJqCzExF3xJNG9BNcaWcBFggqikqjnbM4/xQVLShH2rAVQpgkBgytGFBCzlHMvjEfMJU2rT88kpjX9Xt1tAbO98QOtMYd7uRXWcHzCPB5W7mrp1Kb9eF+G5MTJTFcoRuhzSbDXNNyBlRgRavITqd4GZdJhdOkXzUGuCe5djojMKSIAiitYNVWpLw1GWcN5DOHvoo51K5/ZW4T0+OtwXQ6DZsprx8PPupzrKGl+KqGoEvDvT/1+LlrbruQlWxOUs9agaQmcciuUyOvO7IyZDDGjbtf39KTEpVNE4W7BgqKgwEBWqEiWlNsrHOqHGBalPSF/JLc4tpxADEZqmVVEfguXsCJbiM8xbubZNNXEzqVhTOh/+oinFjXP5XSHDHGJMxEYhFqiqxhhjjKlKZp6mAKi1FoSmaUKMxrBCE/CRnLJjDNZYa61IdM4ZNlDt2lXKvSMSVcWYJj9IEzXfrFerfhjTFDRNs98fTqeTc04hYBUVUTUwxFLHxBhjDK+6bpomH/zFxYWIMvN6vQEQokSRi4uLEOPD7iH5aw/DuN1sG9ckrlbCKF9/92a72TLxfn8MMQ790XtPyrv7+zSQKYta0zSrTdc458NptV5P96cvf/9FY7PTdxqNU38cxhEtfJga54LGFDdjvV7d3d+t1+v9/tBYc3WxaZx99erldrveP9xtNysA4zQ0zjqb0gtRVDHAetWmGNAJqlNVEPvgFRDVq8tL2R+6VRdFfAht2wKwfJ2w6R99/FGIsXE2Bquqh+MxzTUz+3HyIRjbhJBYq6wpPnKJslpMbFTaNJKap6qS4WljDBG8j9vthTMOTK4h12Doe2Z0bcrSk5a8GGNCCOkiJ9We3IRXq2673axXq7Zp1qvueOpFJaUooxLEzRjjrFVV7/39w0M/TZcXFxebTX/qSTUREnOUtuCtc9bw0J9ijAzd7x7602EYRwBt40TFKXaHwzBO281ms1nHKJP36UHkusn7rmtFKYj0/XD/sGuaxjXN1eX1LQjAFKL3cnP9rO+HZ1fP1tvNbrd/2N3HGK6vr1O+ptY10ziq0aZpCCzTGGNa7VBQCB4AMWAUYE1BcxnE5GxDzOl09MGrQkgAElUGfAgpMooPgfsegPcjCJaVCKLRGGON7Yd+s9kaY479EcBpOClkvVl/8dXvRWCsVe8BYmOYuR4RwfumaQ2bKOnqF+VGNR1NJKLMhtgQEbMRiZopn77rOgA/+uEPv/rm9ynW3vOb62E8ee+3m83ldhMkpJPto1cfjOPp1B+N5Rc3z/pxeP3mu+fPb7q2nbw/9ScAH7x4GVWOp+PlxUXXdfv9w8Pu4eb6xhhzt7sHcOqHGGMI8dSfclhnjcxcFTIAEKhEKCQHguSCMGpd20WtSgqJKlK86veIm3wLmc7+czRy+bF6+tK7ilWpqZ7w0Fmap2fMCl6qJerSilj+thSljz9RWlEV6WT2vKsWfb/t8W6r06POPBgeP3bRmPeULNe1NB/FcMlVSwX+iqU60x+yoCuiW3N0njQRM+EFmVFKNLsczE0kUFK15ivkWaFP457CiJeRqedeYQ+Uppd1ULo8MxAWgzJbbPndSu1J3UXRzGqsgMUc/53n5ak8lafyVP6tKdlZO/EvgHqWJRN3aVDR+c/52igjd7R8s8iLnEywnKzl9qva4lrs2YX4rXXMv88H/NIgzJdKFRypnz2zGM/KudStgGCKtbJEECtrJ+NGRJkMlMNQ1obNToOZOUU0v187lAz7/EeGYJAlXhmZYs3mppWftd2iOftEHXtRhQjxTPp43N2ERc6G+QxnUKFA1bfqw7QEWimoStK3iog+ZxxVEKIareXRREssEoU8tRCdhc6jGWMtxK30yCWMlS9Wq8LyCPKgs5nN7/Cya2WFaf16xlqWKhBR8dKgeSPkEVlY6QAWhn95sC52S9HoagPnz9XxnyljOeAhzjBxKoO/GORa1aPlviiqKdXGvCRKHqF5GqisoarW5qVd74pz896hexYd9nywz7u41McfLRWogFmEOEX21tyeZDRyOos0xpCcl/M3kNXJ85nM452Wk3IKA0+Zq4XqlAkkMzh/AxWIXaj7tPj//BnvL8u9rcxGC3sw22BElLOrlj1eVdM6RinoZDUXkul+NnbLRuWTYTmcZWloolGftUzzwKrWpj22SCRlYy6x3At9g+aq5/bWaVxqvTqPLFHN5lo084W4mL9RE7xUiy+tRqTkJUDGXguwmHsxNz0drqVTtcwXJOfDNjf3XTvn+yVEqo0z4n2+0tOTF17ndSyorLdCjCGkTPb5zCg3SfXptMgDnzwZYwRRgp9sY5lNcqhM7pYl/mAh3pYpIMwu9qhsYkFBI/P5SyCBgllijCJMOYevD5NhjqoxijUmSExbL/EtS2A4JVBK1KA5BwKUQAlJjBHpYoAQYmTDcfDRR2IKIaTzPGWPaZomu7OprFfrGEOU0DTNOI7GZpfPlDvJNS5KHKdxu9kM40AKY7J7afA+jUwM2fiPIuvVavJ+mqauawEM4zD0Q8qWG2MASFUMGwKJqi05vgwbZ22McRj6rusIOB6PXdslN/ZhGlZdF2L003Tz7IaIjqe+a7v1an3q+7Rzt+v13f2DiMQo9/e7t2/eACClyY/LbW/YrNar1WblrBGNGkUMiY+H09A4B8Dapj/u1+sVO9dtVpv1hthMEqfoExM2iFxut0x0dXnxsNsR5MMPXnz9zbfrVUfAOE7IDv4iIobJOTN5ryyNbZ01McYgEYBh8jFGH4i5bVsFQMzGeB8WnFrc3e8ax01jj6eeAWZ2jXv54nliLPoQv3n99n5/NMZa64KoERUlL5i8TzxBIiOZ4e6QdTxN2ouqOJfgSPY+WGO7drXdXBKTMeSDb9YrIl2lROrTaIyJQk3jYgw+BBApEEJIvf7mzduL9frF8xsVjSL9MLRtc3GxdY0jxnGXPHz95L2oXmy2xtlxCv2pf/7sBi8NKxJP0FjjvVfVpm1iiP2pb9tm1bWGmYlWXZu2WAxxGPzg5f7YN2137VoyQsalHg3TNE1DP/Q+BB/EOrfZrNum2R1Ot/d3X33zLQDnGh9kfzhtNhef/ebX0YfD8Xh/d7/dbC8vrsZjD+Dgd4fDgVrXuKZpG2stEVSiSI5mkBpMKakxkTVGohynY4j7tmnTPIqIMYaJFWLYpDCpMUTnGkC8nwCwYSaS6EFqrAH0fne/WW+ur6+naTj1CiCE8MGrD1ar1d3d/eXF1Tj2Pvr1ep0YygV0NvvDzoeQnq4pyKwqQClco8SMF5m8+2ZdV1Qury4A3O/uv/7266vLy1cvn7dt66fp/uH+4x/8ANDh5K8utwCIdBj6/WH34QeviLE/7JrGPru68n787u3b66vLtK7e3t4BuL66GobTt2++I2ZmOvXHw/GQnstsVEPyD2A2omKMYcOJgZ52QRBRRIXNKNBCY8nMXAjPzgrpXkgrEXK2Hxb3OYtdNm83OtMNgO8TTQvpS8s/Ft+ZAV+lxTYAACAASURBVDtkZTK/XPXghbA408yKpFDMKNvfoWh1ciudPWvRO+2coykVDWxpK1FpyNzjXO3jlx71vtyBpP/P9dC5iqUWrVBNEeJLG+chWHhAVR2iWA9UJ3TRrnI5j1lpLyq3QtlwURHOjJpc+bllUx4l5TcgU55THJX08pmGTVXneCdi0FN5Kk/lqfyhlBI7ckYh81F9Lkgq0b1YuuXdJYG8vsgF14hRQaAFSSexNpKunrlRQAkfSVic15o+MruIzqUiMrQAQcsXa4y8RY7a5c+FGFtILCIqeXaoMFzSsxasxgq5aI6YmPq/EKezWJu5JFTHq1ilpX+LHhUYsnY3YbgqUkkvRJIfjeoLUm/XzsVQ6SPnoFnAHHglF6PF5KeiMpU6RIXBKZf4Iu4fosTz52TkBar5YnBBjKWFs/FyCtJ4pumTmLQCTfHLUtprBUG5EkfLPTGldBazElNGrowwZTxIl4OJ5cfyBBUMWAFEqdpX5mlxZh7VdPAFpKs1LLtTq69gSkF1dMaBFcoLiKSMR34hkcseUadSj1RjmSStz15ihO8qH1UZrhhcvldYutgU9brCVzV2YwXn0w4966YuVnRF4RcD83hcymKf48uqqkaAWbkCogym5GvDZX2KJJZqwrJT6udzLTfjO1AIIhOzGskbv+pyRS9PqHhGg6TMUu5O2ZlMPINDC5wI6TxcKOtUtvLcmNRzXSBRie24XDHlrocASkAhE1fHuhQ4MlE7sTxUFmM6r5z55NBMg03nHVFOAiPlT2ZFSQO5MFSIyoRrudyo1w95j2Qv77KtFAQ2tmr0hYaYFgmJRCIm5WoNLHpPVDMKVVJwHrIZ/p4tM1BJJpDWMZYmHhWqQZ3deZioon4oyOlC+X+0VfSRITSXau3khGKLg1GXHzpf/bXLVM6S9GjJq0KWVPnagnT0iQgz98OQcpUDYOYo4honIhpUosxZSOuxRkTMxMSaAuBmJ8Hc/TyEZcEqjDGiOnkPIuecltgIREwlxNuidWU2AKpLId9uVaSVYsG5laAqRFCVTCQXilGMYRDFjCQGNmwsW2eN4X7orbWqwkwJjkxDZpjSmJgE0SpEomiUKADatnPW9f2gUGMsFQZc3/dEHEIEcNgfQDkGJbORKIzk3jsH/U1BCYmpH3oFrLX9MIQYUwaMNALONfv9jphXq/Xd/d04DJvN5s3t7TgMiUh4PJ3evL3bbren4+l4OEnQKEFUUw6XKn5Wq9XV1aUxPPnRB8+qpm2CD027aq0DMPT9qR+ZeTrsDne37MPN9bO265wK+QmAURnHcbvZrLruYrParDtjWAWnU79u23oop5w1jbPEfDz11jCgq7Y99sPFdgOgaZrhcLTOwVoQ+8mHGLwPiR35+6+/BnB9sXHWrlerh/3Dj3/0afT+4XD000TAOKXApsGHMI4jcbBOo6hpOiZiMFGm5okKk7FNg8VRUHUn6ywAYo4xrruOmdgYglE1KkTMbZuTF7+9vTXGwmNMFNYQG+ucscaYhI1ujkcfw93DQ9O0rnFt2yRAaRyGw27npwnA8TiEKMaYrlu1bXc49MMwjD6suvXpdEp5llTRtk6BafIhhBBi29IwTnf3u/W6a9sGwORT9Ex2bXfjGhhzf+y7VRcUNYxp17anfhwn33Xdp598SsS//fzzrmkNG7OyAC4urn/561/3/fjhy4++++5NGHqj8P2IzbZx1gcPYL1ZTXES5PAfVfQSs7VWgwBQSdRjRUEVKaewV5UIpFtJqEZRscYIKIaoqhKi5mthWOtAEDIxTgpJKShvnt0Yw+M0XF8/AzCOw/Xl5cNud3l1FXz0IcQYVyuXfBvSZm/bDopxGtu2S7McRUKIxFRypnGMIiqNaTRFTuCcpJiFVusVgF/+9S8vLrZd1948eyYxHKbx+uqqbZvD8TCOw6uXLwHsDvvTqX92dbVedafT6XA8/fCTj3eH3TCM4zRdXl6mYyTE8OGHr7quu72/E4kvbp4T0e6w7089AGttCIEIq1UnUWOMKgpjVOfjXCBRgijYsLWtiALpljeFUM/6GC3UibrCUbXqepouGBdVD19UUr9J5Uxe/rWof659vvarF8tLnVnLDRYTlt4l831ysq9mdbO0ZSEY51277M071sZ7S5bS78o81HOgio7iATJrAeffKtpbNRLP76aL9pPmY6mrZ6Wj1Jd7W8ZIkm/ecpqy8ac1PlLxRMp6Rb5BzdVXfbho/lq6RUVcEtXXJesznHuex0JVoRHguZ6icZYfBFWBVNvy0QzUMGeyoJg8lafyVJ7KH1ZZwJHlzK9SpPJxzoy+akRiYYzjTKppEXWaTuH6cS2uaPMHk4xe4AeLxi0AlFl0LksV6I+uHDVTqt7T4cdWIbK9S0QoFprq2Xe1vrh45JnAzZ3XZf3V9NbCBl3yYTALlWTE6qJlUh9Q1IsMzCQkUSTBOQwCGVrOwkLOlTZkZefserSoue8vqpqzIReIMbd2AaaefRpIFvXc4kQhQnJwkKpDJKhwhmdKvchwSVLSpAhYrZ2hikbMczJLXqrrqTapammUR3R+aka9oSWfj1YcXDMMp6JkzbmGmZVITQ5oZ+pAbeg8n5UFOuuO5WJ6nqgCtbxXxZtxGbyrtn7v3FGNr1eCFiz2SP6lrIiiSqetmhFV1RJ9joDil17V2GWnl2+UpysWewML1XdeFcS0vKIX5ITnISWsLJ8UiSCy1ob4aDeWAjXMiqhKDEcqSjlHJ1TZmHp1ktCslMA731SXo06hmcL7/Vp2Oky0wtbLu4r5/oKYOD8HUJXlZCVooo4UE9WMVfkV5hiFi+fNYzCyqO+LMSjL/wz2pGxOpDXHnHVuaAklWbClXF05cVIHyi5SoXLdn5DELBpSKIHiKJTmAekhyVagAmzOEzWryu8pVV8vB3jBqM/IlsD8F5hIiuhYHGmUP7SgLaa5rXunzOT84/+NSVBNs1kM/S2GmL7z54wJ0uLcym3PI5YOPWYahkFEXNsYZ1ByTK9WXQjBa0g2vzXM1hpmmHyMsSFmEpnFX75REy0h2/IlC0g0qm2cAiLqJ59SNjhjtcR9E0nZoDKvKsaYMOsU+i/EqKrGcKI9IltNmauVnsnOpkh8xljvvbXGOhtiNCb5WQsxQLzqumEc2HDiPLrGxRz8NKcIH6fJObvdbE6nU4yxa9soMdGqmqaZJp+SBStRmMamafq+Z2NUdL/bAwghrDcr7yMUnev6MJBhhaiIsSaNVUo4E0WmaVqtVjHGcRyssdM0Je9dZ900jofDITmMP9w/OOemyff94FxOMXQ69W23stbd3d7FEI0xUxgb0xEQNSQ4oGlb6+zpdCKGscZaw2pE1RqrxLuHHYB+HId+ePniuRAP40TmxNa6aSTIer0GYFvXsPUhGMuNcynIQmOt6dqWNWWYASXCtaYjNobg1mby0rXNt/uHnAjIWlFlNsM4MeCaNh4Ou/2ubVsmTihhP4zbm7X33hgTfABRjHG1XoXJp5TNEqVrmsa5SXQKYZqmloy1zlhjrE0iQ0STF28WK0l7yCek1jC7xOScJZF+OKkiRrKmFYlts05r7M3t21c3z6bJjJOvCcrTzkm+tZv1uuu6y+1mnLzEqCo+qiMXYhyGYbvdAPD3x/Wqc86+eXs3jvtxnIh4mrw2BsQJCo8SR+8FSqoi0nXd6TSASBQiSAuYJXZdF6ZxGAcQNW51PO2875um2e97AKrExrDh7WatxKq6Wa+6rnt4OGzXm+fPXgB42B+GfrDG3T08fPfm7dAPr169+qOf/oyIT2HKl0RsussrihOzUcAHH0WYDRsmVURBFrWwKX+9RFEwGets8FM6og0DEqWQuY0hNXC2DSHEGNbrFQDnbJToA9quI5L9Yf/RDz66uLh8/eYbkfDs+gWA9arr++PhdNisLnzwInKx3UyTT1cpnNjcxlrn0t5PUyyiUaIhTol3GFDBMI1N0+YTnwnQEP00jdfX1wCYiQ3dXF+H4I+n4zAOH3/4AxHZHw4hxvvdA4DGOSK6vr7s+/7N3S1zDks6TuOrlx8MwwjgcDquuu6Dlx88PDw87Hdt03RtO4zDm7dv1Ws6QyTGGKVtuxii9x6qQPI956qQhaKaEhnRAGjKHx81pvPesNWCkaV5S+rEmd2yELn18M+fze+miOWz3Jy/uPjtLBvK4oYMC25ikTr5uxWYq0FYHqtvS3RwqRQsDYrySCz8tb9Xop+VrCHSo+Eo7VjYjWeCdWlrpg+/V+zOhuFSOz5rbvlazVFTLaWkZWiKc1CMIVUGZd+MhRKxMCo046FJVdEcYr9qEjPtRinPQG4aLSoqEW+y0SEFfBTKWZTy9nGWq3u2aDK7Uc7Asw6WHpGoLANaPZWn8lSeyh9QsVmcSYYhiLNkTMckz2ZpDpKdWFcKFaQ45UUS6AJ1yLwUImYqJPX83SSbkoWmZJIRrCBwQTwSpaNYz7Nb64yLAchpH5hRU8PmhpZIIY/Bs+yNmAL2A2AwYPLxnq55ORm5RDP7CYXix3MEkDRU+YFgztlIMu6RzTStV6eERWw0VYLKQhWohvIimgsBGlUz2ljuOVOugIwj5fEkEZXZZYbr3WvqRVRRVWuMqJQEOJSYMl4mKmScKmiTr6txLCJEhXFjKEW/0uRhWrGkIvpU1RkLouQXvJgvEDOzQJXAVHEvUo0BANucvoO4qE9p8Am8AH1yC0UAYosYNWmhBT1Gcg7KVK6iGxChut5UblGaHZTL1ORFlNaMxMQ34hgjMWkOBJP4oRkgyiGiNJvpebEomECaL1KVaiAbTaa8AkiMv+UVdwGxMjaRFchCo0wMJGKV9C0tKuA5BFihTC2TQgn8zOHY6pCWt4vmpTOGmweI0nbL8ENUFcnrXBM3joySRvGqkbJz9CPVuF4Sa9V3mTnHbgdERKCQyK4hJVXJaiE0+VsDUEgGmogVCJKAZElzA0CFRQSUaBeGYEUhCFiopGQBoXpiVKIrJyUyLwnOoSR4CT3NyGwamnTCFawWCpVYbjwS6GdM2cJpVeQYvJX+kIN+pb3CkpcFIeaKCICK5JMyD2uqJh2MZYDLoktfEuQfNG9JxBgVKS8kooQ5jGoOsp7PG4FWb/4Fdph3rqhUQmoG9jPKLaUCZBqmQhVR1LDJ71CmYlCK6QdCVHAZckhR5tNjTfHpQ0khrSpaAnWVYdbZc8mHvG9owbyuh2HZDPMmq9OYhUzZlwV4FWCxbGY4GN7H2XFvpqLnCL8Z8E3CTyEiUaLhnEZptoyKqZQsndqqbMgQoCCmcZy8D8YZNjkVkyc1QH86sjGGEQkKESJrYFx24oYiRoo5UbzmA1R0HKcYcqYa65w1JuHtEZimwARrbYwhSb5EFAIJMycAKA1slFhhx7LXwM6o6jiN1jpWDtFbmzMgiyhGHfrBD5OwRfTJJ3qaPMotkjFsm2a72UzeE1HbdlHjNAbXNo4dgNPp2LUdVAyTs3aaJgVWq1WKe+icS5sgxNC0TdM1+/3RGqsKa52KHk6HNMjGGoWKhFW3DjGoRLYI0ccYu9VlWhVt1zDRcBwNWwlxGPokPY+HQ7YqWzqMo2HbtetpHK21FxdX4zhK1PXFJpHh/CQXl1ffffs6RmXmEGPDXfL55cZcXT0D4KxhJmYINKXOIA2T1+2zm9NxaI0BwIa4aR4OJ8MmihIwjP04DsaYw+kEIMb47/3DfzCO49vbt5Z51Xanvn/x8Q/+t3/5rz64Xn/60SsA29WKJIoPKmjbjm0D1x4f7uKk0zT4RFBVWON06lsowK0zz65vnl0/E+g4Ts4aAI2xTdM466ZdcK6JYaeqn332m/3h+MNPfwhgc3ExRDFN2wjt+0HITBEwfJr8OPm0WTebTd/3NjmvEltCVDhmWHM4nBxbAKOfnLV+HPoQvv7yc4J3AAeNMaricDgC6MfBGOq6Vdu0683aWRtDbNvu+c2zafIA2q7rVitla5xRVWObME7E5nK7+vDjj6dhAPDRpy9+99vPrLdTGEOU43ACeAxh3QzNep329ngaeLNRkJ9G1zSN4+N+3z27Wm82qvLwsAdwsVo9HA8vb56/DXdR5O7++PzZs6ZtmI1zHYDb+7sQwsVmO/lIZN68ufvuzX0IuLq+Phx7dh2Az7/8lZJRUNd1Cm27NmkB3g8sWLUdAIkyngawqgbrnDVWE+82qmpIEQmYjffTNE0pZis0rlcuBN+PQwJPnWv8NLEx1rU+eI3KZAEKIWSmMQCiYRycsYZ4HIdV2z67ujweH6ZxWK1W97ffArjYXoQpbLqNikxT9sR3hg/Hg3OucS0A78PQD23XRRFjrIIuLrbpRlRjOYlVnXPDcDLGNk2zXq+H4TT0p7brOmcBjGOj0H4chrH/7s13P/mjH63W3ZvbNz5Mn370UT+OAN7e3jZN8/+w914/su5InlhEkPxMZmXZ467pHtuzi10JGgh6FqQnQYD+dQHCaGY1fqZ3eq495dJ8jmSEHoLkx6w6d1YPepgGire7TlXmZ8igiYhfuMX7JSzTMv/q26+XsEzTqGCensDLtHz8+OHw/BRimObp9uYWkB4fnmSJ2iVjjLUuhCgix2EwZIios2Yex75Noe76tHmemxZ9mFAMIkX2hGiNyZJUkGi1Zh0iIBpCFInJOFeYStYKIBsmE1MoCKDa4BGMyk1Ffi48owL0VssxgKhQUGNSlbFffeXS58UqoM/ItVoqSaY8X7lqTLfmL4yxa0gEZEsoJPFjdUQp+kzpLUhhTzVoGITVri/MmmNUhVIGzsL6anFUCS3nwkYo5aolyXOkkWCaYz1pLkljJVPic9JE6MAaQ1n2BiSE5O0BiSvroGgdmDAgp0AWKagiARR535AIZHUUc/8lUywJH0VugSz057ngLIUDEQaIkGMRksiEyIwAgAZVqli1AhEWtmgin2kZb+2tvbW39vvSknfkl4xdWOMeLxW9pFUDQLH6iHzhMdn9vPoiKXcrx5KMQiXmmFCXlXV9IbcGFPCgAvswO/6fvxCyqqxIJeZb88GNiSu8tsMlhAyyz7wWdFUkte5L4mIa4QgZNsgYGKYhJNeV7BGXuW1GdZLYsKKoyaQmGEuZEVGNuABiqb5k4ZmQEIR0f4LPFPTCjGaBcKkxrSrxqj/r1wl50N8VHxQdAUAlRp1p/uoaZcgU0kvivPpyKRw6AzWQaplkCmDR8LPlEDJylqUZIkIonc+yk7qAqbCCVeqbsma0EwXOg1XAO/NXrW9JV33J1phlFijpNFexrkB9qSdpTqo5zVRfL1+XfQW+5zWMkAHRQvTX+2xdt/Xt+jNhJxWU/7rp5pNsxE0vIOSEaVK2tAshMmCMkTAFV1cPqWXRtGEAqkUO2UtQpKKq5LX5cizaOGFGhQaKXmlgYAoWLvhSfmSCq9blnWK7pexKRAQqB0LB3aQQHvLhVfUBMwqV6ZxkyWpy0o/VnoFQe8fhOifn0fA1CgqVCac0IlRkM6UkQCQyWMKuy8gBIW+BL8mlkiaiUKwcUWXUWbOpbpL8RV6uIiUsWQ8mKL6E2dCSXkayPiwRYrWCvO7fuoC/vFhfO5Gs30Ctc1UPWfd8eWkh/hr0dnYTrqWAv9i5WkNM3crPWQ/+TJGicmQy6hYW3VokMpMFY3CZE5pDjsiABAkhIKIxah9CIhNCUE+3dXRqHtMtyoKAzGl2iAwgxsjWGuesLIt1VhT0FYULF2bWcjEA4KwLMYhI13bLMisttZxUyiPJbHLMvjIX7YxuRr94JHLWLstMxqkphQhjCDqoTd/ryrTWhuCRUL3/tDrHpt9YaxXmMETDcGqahgD9sjjnlILLNHHkvuujD0hCBoV5nKfgw+IX7SQiRs9EZhwnESaDi1+Eoes3HLnrWwAwROM4xhiscz4sIfhNv40xMnPftgDAHL33fd9HjqfTtNtdex/84q+vr5FwOp0AYLPtnx6ffYjOOo4xRrHWNo5EeLPbqbOhgPgYiAFROIIIG+C79+9Pzw/zsrRdCwDbfiMAm+3mdHg2RE3TbPsuMDtr726uAWCaFxEJISg9AcFZ63242l18/eld17YAMJxOjXObvrfWIICzRpgX731kDbIGgIuLi3lcrq8uLJlhmhgtg2w2vbN2d3Gx7TcAEEP43fffzfP8zaePF7ud9/7zw8Of//n/8Py8V++z77//8bufPw/TguQCi7NufxqtdddXN8aYlJN0WURE/X0a10yzriW5vrqZ5knXjA/h4mp7cXk1TuPT0/Pnzz//7ofvIvN2s0My0zIDwDhNALB4b4m894jonG3a5vlw1LSbp2EcptlZN83+6nLHLMYYZpmWZZmXp6cnAEAan56fCXGcJ+uaGKNwNMZO8zTGuISg60G5gjEWASMzGbN4zyCRoybEtIaEqHE2pDLfcByGsN93m94ZqyNCJGbebrc+BBBcQpyXiaVtm+aHH78HgP1h3zRdjPFwPF7uLkFSsXjvA8e4eK9b2ofQ9q0lo4/Nhj1I5ymADz7EqPKSNaYwSOeaZFQmikRt0yg+Hjm2bQcg3i9t26qwtCyTs5bILMvsvd9sN//6/b8G723j2qYJfgGAw+k4z0FEpnlh5q7rfYjs4zTN1lqX6t0bY22M0Vo3LzOqhGBS3aF0yiIgYIihbTtn7TxNx8Mh56CMABBjuLq8dNY9PN7f3lxvt5t5nveH4/u7dxpQr6T48P69CD8fDh/fvde8n8fhdHdzO8/T8/4AAJ8+fri9uX3cP3334493t3dt04zD6Xm/t9aqgLIsS4w+xmCtMwZ1IbnGMsRhOFrrAKBpGjJIlnLtO8n/y/EHSYSAPLzMDl7HcH2xFS6X2EaWJKtWYgvgBa+sEh7VAkR1wQsp98vt7JGFw699kzMOmG3M66sq7aU8ok6lUi6RGlgsX+XrVsGruue8o/npkvMwnQ9WECUyAeWJUMG7+HcUzrwK52tsQh4TZtEu9+GVDpnfVjS41QSaZYuScFrSOF4OK0soBa2uCQr1hyl5+osrKpqsMlktleKX5Ja39tbe2lv7d99+sQrKW3trb+2tvbW39tbe2lt7a2/trb21t/bW3tpbe2tv7a39/9vUgA+QfJfSb5C8m/6tNBTZmS21YojD1akIvmDVOTfZre5MZ3fpZ1XSSTj3KDu/TIoFDGtjI6w2qjPjZX5O8WiT/4Y9SUomSlFHsuo7NbkhCHPK+pbDJc7smetASkDtam5byYLJ4RGTzwuk0IPUX1LHGxEWTS+pMX+Y0+HVSV9Ee6XeiAzr7JRMmZWzYPZzkhTcKiKigQGrJ2CJwD0jTPZ6S6NcTYxSrJPrKJOr5ErCtb/5p2RD6y/MCEctc6IuTJmuOete7k32RSouZ7KuH/VAzOsDC/WSAx2e15w4i6vIPdYwbThfWSUevwylTES9w1J38uS+sIvmV2T3ubIVASAPPL8R67cXJ6z1GdW/azbvL9M1h6kKpxpNpEE4Gk5MCCl+EyAlc/g3mkAx3yeLc36nxq2ri19xURMWEdZYXcjrCaoSVcYYTOmoUobZKlY32cIrH4FC0BwktaZukhJ2Dbpkzo+vnFoIz+fiZX3l1LNXa2S1sqf9AsUNs56V4jKc1mlxZZa8l/XKvDXKEtEMDzkSCFMdE6Qo/kXnqu1T/AJebKgqo8I6usqnIW2WvJEBQISQRHitIZRi6tXbT/JqlaoTeoTn+lQAKReBYL40xzGd+5e85BrZK32lzIsFmAaBmHPXv/gud+fcHyRfi+fHmqSjBVaOVH0vCBw5bRhNNIZAhJCLeFRsRfLqkeSyoNGLImUUmGrjgnEGETmIel1ZAuMoMjMzEgGhtdYYoy5ytasvM2vBaz3krbUgICw5v2Q6UYUFDRirEfFcYuHVwQoRI7O6X6FWv9WMA5oOA1NpBBFhAGtMjNEQGrIxhhQ5iOJjEABjcuoAEBHODwcA6JvGWrv4RZjJuWkejTWbph3GQe+xrfXex1xPJsTYk1mWhYiIzLIsAMDMbdsKyzAOjWskyjiNp+MQfODI6quFRMLCIMZgv+0RYZ7mpm3bpmOIWpZEEyh0fS/MXqTr+nmZOTJRLsbC3LZt45oQQ7/prDPzPDrnrLHHYRhOEwDMkx+Os7HWGKO1g/0SyMDl5a7f9HmBJcdqY40wI1LnbN93xyfxiw+WAIAMMfOyLEsIs/eH49F7bw09jNO7u1sA8N7/3T/8w6bvj6cTCIcQGmce9/vj6UT0UQ8NImOdaxrnjAFjnbWR5XAaAaBxrms7AIgsp2H8gz/81elwNNYNw0ym2fabYRrnZdEDdh6n/eH40/39P//ud1dXO2uImX/+8afDOF1sLwDAWHc4nqYluFZEcJ6X5+Ngre26XkQLNEPTNM41LOIXH4WtbSR6jnJ9dX04HHQqnXOuaTb9xlrDHP/+t/9syaIlQYoit7d3AHAaxuPxJDEaREe0eB9iBJHddjt7DwB/8dd/85//7Df7/cG6dvHBGIOI1lpr7XAaTqcBAKb5IAKCEELIcTJoiGLw0S96EhpjkEjSLxhFLq+ubm6ub27vdGEAABnz4fZ28h4R2sbtLrbTsuxPJ7LGdAQA1ri2bbquywEkvN10zhrrNszyf/3lXwGAztIwjs/Pz33XBx85so8xxiA5644e7uoWHWMqdaWHmyHSwlDMjJDyiROSMYSIHNlaV5hT23aNa70/qCOzMTTPk4BYZ72fASCE0Pcb9fczhgiBYwSATdsRkZYLH8eZCAEMyEJE1tgQw/F0XJaZudesr3GZNV2s9z5Fs+YjUD1hVX4AkK7tG9cAwDRPSLjpN4fjvnEWAG6ub25vro/D0fvl+vIjx7g/HkW4aZz3/ufP9wCg9eVDZIN4e33zfHgexnHb99aaefHH4QgA/+HPfjNOy7zMwnK92y1L+Hx/H2PcXV5pyoIYY2RGQkAhIiTgyCFEQiOSEsjHwMu8iAhH1uQPoDnQk3NcZls5L+NaxjAxuIoF1VJexVPWSzIvuyNfPQAAIABJREFUzsHVRV5db60DfL8of2FJ+XjOPBkrl5OsHEnF0coDAGqVC+Csg8icY5Iks65SfbSWe188dRXmClutLkoPqfqX3Q5fPCsJzPXoiiCd9ZaStErW4Pj64jNnScyZ2QtfRyoZ6/F8eurRrGJ7uhQhc3dYM4YjoBSVVupZwZysJuvWmXTKRathEyKUwnSZX+cowCoCJXtoppiZ1zPw1t7aW3trvw/NVkpoCT2s/PJrhVbPwxXYKIp0vr/oWP9WK3WPX8US6lmeec564MJ6mEPN3DNrKax6DQpInBLLvWtI7SrzUYqjTOmEM198Ee1XhfNmXvlKIVYsIYXIqSZXXvqaACmzCeQEl5DlABYxZDOyJAJ8jhQkxiaSYZY1OraMVMoNmqUEczzdiz5ADghVOkkKNNDPYn4QZnRl7UQ1/5kjryq/VBcUOSMLGHL2FwCU5M26ujLABDk0KaM5CKwojnCKUgetRLdKL1oeNgkaUHCnNOk1DrViJjlQNQtphXC6QJMamQe1TuWKtWmMZH5JynFXY1ySNxMm7KlkFdAJKChOuS91RnI858vQ5jP0SFcDJsHlrGjMOdKUQlzWuQMs+6+6a92a+m9K1iMgElVZ1Xx6xhg5ixHOVE2dT+VhMGUvwrPrCk0kaVMs1QhVfK0el7J85mBtLEKwlpTGQrkVVhIBNGW6tYDSSpCaCHm8tcT44lgSgJKkHKFMRhJtee1pOT/P5gdKGkatRKphzhkzT/CjCJRSjvXj6y2z5n5FlcCRWQTi2eJIIXHrkVA+/TdavTAKCYrmkYlVosKVjJQ3V9po66rP54oUETsf2+nUKsut3pL1bysFyqfV0i1vOl9WX2qrTSDfWx/maZhf4kK5k1khzLuyzLJmu81X6Ulbscz1IeuyLFyPOdWOF0AM3mtWVGGwLm04hhgjxBiJSJgByTUNIvrgY2QOnImXzkTIKTgZJYSIgNaWiiKISJElLou11nsPIDYHSJIhRZkREBEX760xRDTPc+OaIAE0B5kewiyUSZfyNyTDFiyLn+fZGBKQGGZjSCSqmYFFNByy6zota0aG9AxBwOCDsLR9BwA+hNMwbPoeEcdp2vQ9Ep6GU99vmPl4PAJA3/cxxON0IqIAcZymw/4QQjTGglAqNMyw+EUg7i4vG9vMy9w0bd9vmGPTtQroj9PUtm1j3ePzI4s0rvE+OOecc8MwAAAi9V0/TiOL7C4uTqfjNM+Xl9c+8uH5OM8eNJ5OwPvZGkPGhGV01l1eXW22fYiBU0URJEJS/AvEEJGhnz9/fn7e+xB8MAAgUay1COhjEBHnHAsI0Ha7+XB3BwA//Pzz7c1tDEEEmqbp2ubior+82N5cXf74w3fffvUBAPquJUTvfdM0iDH4cHN5/Xw6HYYRMdWhPhxO4zQB0TAM7eZiS83xNPoQ2qbruu6ffvtbALjYbP7oD379zVefnp6fQggcYZyXf/3++w+fvtb66+M0Lz4YY61xkXlZgnVWWJxrmrZRvnA4HZ11iNR13f5wiH4mY7e77fu728vb277fAMDdu/e/+urri4vLaR6//fprFtlsd/1m883X39w/3Kfa5eNIRG3XxXHo22az6a1rAvNpGKxzAPAf/+SPP9zdPdrnptvsttvH52eXgHtmZu8DAHz16avD8XA8HqdpIqLGNSIgIoYMWqe1mHyIVoQZmNkARY6ta5CMCHT9ZtlMADAMA83z8+f72fubq6vDMDhrf/PHfzQvyzwvAHBzfbV4vz8cD8MwTct2s+k3mxDC119/+nz/eRgnANj2FzFy49plWZiFg9oUoogWzjb5iBMEDCGyKBaJzCwAhKTVt5umQaQYgggLsCaxU6uDLrzoQ7/Zzsu0LIsxxjVNiGHxc9e1zppxHPUhrbPzMhNC07WbvvPBa+5sQjwMo54hzjXTvITgrXWncZjGiVnato2RNQ/DZrOdvR/HwTknDKlqXO6znhmplD2ZEHyIgWM0iPf3n9u2OR4PAPDrb78FkGkcu64bp/E4HO8f7n/97bfMHEI8DQMAfP3xI4h8vr+/u70NwU/j9PT8/M1Xn3wI0zR8+vgBAJBkmqeHx6f3d3cgMAzD0/Pz9eWVD0FZhyFCBOdsCAshCMfIfDjtidAYo6d0jCwi1trM71kEkamuQJet+kmMU8aY80Sv7dyA9pJTIVT8ELAynp5z7fWd6QSGIswUAVLOOBpWXAfPOavK9uUVX8I38fxHURjym1LHk2gGr9hoeUoWAgAQVh5ZXSFrVqL8d+5g+QVzpqwkS68qX2VvZhBNXlWq25zhe/nZhaI1+FqKFCS5OGeZXNNGg1R3JLJJUTWKWJMtuXlFVAJ50kRSVrAzTSjN/ErdMyGoQKBJMuZCnloEQkzpx744E2/trb21t/bvu1k5V9G1Vbp5/vuliqsfKzdMEAAUKAjqm182eV2hWQEePWrzK1cA6CVsmXoEVaeKZ9P67Rfuqs7/Lz7ll9vLcQNA1b3CNNLzXtbSK78kNgmVMnz+HaxGLkzVc4rbn+r+ZbgrHLYyaqjQLYCc3iuBnpV5rS5iXNy4EqIKUDt0lQo+KxtOv1SkzuLB6rCWh0JULqkyxBSfnUJQXBM+JlK+9llVqUozKOW7s6ywyoOlZTLpH3mKylASsaiCOaD4rYlaWBUHQajXkkoYqSBRHlGGXnJHM5nOXEprOkpJh/oKAy/+YrnwD1QihpKxdEfWO9IgZX1o2bp5mRep7Iz2AICsDlNVBSGdypQeTliEc8dZJyGuXTpvApgwuCSnZUEvnSIMdTIhyQRd+7ZK1yIA4EMgIiizkL0eWTgjjUUIzxkbK3izhpP07iLjZQESEJCQeEXhv9jONut6WV5kCWJeD8C8izDPJYgIs4j68mqZxNWPNz9tfccL4mZfs4zGShQRZjK5hno95Vr16ksnZ5nfc9j87JJqYULpIktJoAmIBkAEK3T11XtSikQ0GbLLT5ZcggqyW8HqaQwAmMqjf6nVniHVeGsM9jV3gUKxeljruGW95uymvOCgngeBKEyIjEgmHfSICETRh3JrOaf1/5z1GwDV1SMzKz0De0tOGFnYNWCdAQAfISwBEgKvNgAEBGaOIXIsaxcK69KSa1FSBQOlIBdelR2cldiczWCaoCpGNsawcAxRi8OEEBrXpOJRwRNZPRC0JpL6ail2qZ0ZjkPw0Tmt88vOWWEmY4RZALYXWwAgonmZG+c4ovfekGHm03By1uo5sz8cmCMShbAAICKdhoGMZWZFZgFAWIZ5FJG+25xOx2VeQvRaMk9MguyDMBpsmw0RaUWU3e6CCEIM1m2WedHpNsYsYQERY2iaZ0UfYmStH7252ArCNC8XFxfeyzR55zoy5rA/DvPJUqMjYhFg8j4IcNM0l5eX/abLqiMAgDGG84rE7Pa2LItrmiLhjNPUdZ01FCPHhE15ALFED4+PAPD09Ny2LRJ1fTdOcwj+5uri6Xk/T9PiwzQtALAsgTntNAIwhhhEsdCmaZQnHU9DYD4ejwy4LMsSGBCMMcM0dW17d30NAM+H4+PT0+311c311fuPH58fHrqu+/TpEyPtDycAeHh+Xnzo+1YzdTprvUgqQCGg7nKYqhSbZVki8zTOtuHx8/jTDz/8z//r/6LI8jRNP/708+fHJ+/9dtv/+f/4Px2Oh8UviNi6lqMHgN32giMHjiHGaZ4Pp+FwOjVt+y/fff/xw3sAeHd783//P3/z/u727vaDtXaelxgjLos19uJit7u6AoDgeb9/FpGbm2tjrSXLAkQ2hNBeXChQG2NEMrlkFBkDIcSHx6cffvp8ebVLXrebDQJe39ychoEBjDECEGMMIarF17lGBBHNx/cfTqdhe3FxGob98dTe3//dP/5j03QAwCLsg7VWGRwhRl0ahCYLI+U81tSxSJgK0xFGTlWqnGs4Rs8Rc/29gonEnGkRAZZlFkjulssyO+e2mw0zG0sA0HdtiH6exq7vdrsLInx6ftpuNygyz5MaIPuuJzLP+z0Z2my2h+Nxnue+7UJC9xAAvPeNddvtBQAI69kmQpDFteQJiyjDMHjvOcZlngCFY3TWbPpOd8c0Tcyx7zsRmabp6vKq7zpE/P7pp4/v35czhFl22+0yLfM8X2w2hmiOcZznjx8+AsA4jtM8NY27vNgti394eNCknMysubCtdcaY9KeSD4QsWEdAqY6KdVaz6/plMtboGc/CBDULhhQwUFi1CFRl4tInyeZZyf1pds54WZ76SlisuFhl7SuikKoJ1cuKOKq6Qv1MlURWZ8uzV0j1/MxS9QsEWC2Bq5ArGS8vGsnrlllX4epS970m2JmoKpWdG4uAtH6nxDyTdVRoTKkhk7CMgJis6YwxPU1UCFZ7YooSWz1asgjxAkCsRgSgClHSeTJrz90Ahmxd5srLMxnNs3JYKL7+kxSNQkcESM6eAJK1AP2ihirLG4pCWqzRb+2tvbW39vvW7PorrhCgntAFWCzwRW1PgnM+dKbbSmF8mfWc3ybpusRl4Nw/J7PszL6hftLLVpciONNp8yPWtMOQFILCAoVFTVVEKICqzJwPsmB2dV2a9XN9lfJnWqXJ1Cusr8ZsGFtZ7KqzludGiYVtSU0zgOwVmYaVEAWtfl4uSK9PBsRsHRRIzjxagAgVUKvjyWu/IUIqfC8XktNHxtIZLQu0vjH3DpKLYHE8w3VasJolQADQKufZ26iOKWGk5I2VKJ/oR4yClEuunMtQ6wwVElXzBdWCXONKQQAT4qqCipY4zLTIFPkC2JLrEJVRFlgqCwerGHEO7Ch11G9K1u7gKrBCkavyRkmmWiwiXdWVs84V4K8adO5FoU7eImVni0jye8Q1HGU1Qgsjoro+cYwsTLlS8CokVxtWj5LVGbTurGT4phJKqdR9Lr2sGlEZNwJoRadUnjBJt5iFvWo1l2X4kkj5mgxOFdg59/+FHSMtcFkBhfWf/GipFwxKhrWrh3D2cRXtcIZXz046/Sjjp69kYsoysGg5zbK0ygGJZU2fy7BnLa3OehWcX8aZQOtay+Ju0cTKTGvw/bqpy5e6Q8opJoWUtf5R7dd1I7w66FdaJlWk0mnqYXF5zIuBf8EJMh3ACHDGetaRUOIcL94jIkZLbud9ysWQpDcKZAvPiuynCakooHi6sBhLMbAGsEeOWt3YglmmYAhF2Fij1R70pTEGdamtiJBc/EUAYjRkWFh5mTGECJGZEK013ntrrYjE4LX8rrBoTwgohkCGWJgjO+dCDJSLvxW1Tb20DBlEjCFYYzXwdpoXaywzA5IhI8KIBIhRYtu2TdcAgPdLDAGsCzGEGBrXMPM8Le7CPT0/A0AM4frmxvvleDi5xh6PR0Ro2977hZnVpY5ZiKTr+nEcnh6erXUoFhGEwVoLJrNgkq7v/OJDCHe3N66x8zx2Xbv4ReuSd13nvZ/mcbvdhBCfn5+3FxeANE4n2zgAsNYOw4gAztrjaby8unLOHfbH56dnEktodcqNs0bMEhZrzc3NtWtsCIuIGGsLO1JHYiRsyAHAsvjtbietCzGqa14rbdM028vL3fbi6vLy6uqyb9ub6yvC5C73/u6dEB5Pp8a53bZ31ljrjDWG8JtP79Ue8fnnEREJcBynyDKMo9vu7m5vCOA0jouPAGCs7Tfb0+loyFjXCMFx3O+Pp91266w5DiMAHE9H4Xh3e/30vG+cm6bpeDxdXl5+/9Nj13UA8PWnrxgoRjGuBcAYYxsZBbfbTUafoG1aMiaGaIwlROssEkkI3/3wu02/0SmY5+nvvvuOjANE19j/7X//P56fHpu2/e677w/P+8P+CQD+u//+PxtjQwjHYYgxtl0rImqg2m42uuGMMVeXOxYJISqeCyBtI/f3n9XkQuT84kWkcY21drPdIhIIHk+DtU43QmSOMaqTOos466bhdBrHT58+Ne7OqcTKze9+/PH2ajdM4/Ph+O1Xn4wx++OJCLXu8/X1zey9ADSunY2/f3jquu5P/vCP//Jv/vF5f7i6vAYAApqmZVk8szTOKf4szMaa4neHmjAhh4HqNVpiO4TQtA0AEKKPITI3riFDwXst714km7brRCIStK2zxmpOh912R4a8X9RhWYRPp6OINM62zh1OB+fc1cUuxPi837dtBwBN0zzt98uyXF/dJL/ppkVUb0erySWIuO3aUu1KS0JZrTyIqLupaZphOJ6GkyUzjkMMoe9bAhmG4Y//8NcAsPjlcDyo7VwDb3/9zTcA8vnxEQHe390BwDAMj09P33z6KCJP+30I4duvvx7naX843lxe+RABYBjn/eHw7VffGGPvP9/ff77fbLfDMHR9H0IAALUJ+BgRCSCo3KMmT+aUN8YYC42KKigiRAACEkVIC2hDOjwF1VO9xMjAS+ZTcVIsrPAcNMLqypccp4hJq7y1gopnUsYqz6xPeKmtZLFlFbfWh+anVMmUsvTzckSQwmde2DHPTHp5IUpi2uWBslrksZiroeiDL8eE9d8F0l1fnX1iSrey+LqKI3nQq0gKLxWEaqACkpM/Cbyaj1VCWSuGZrNheZTUc5xmfxWVASpD8blwfEbk5EZyRo5qCVV9TrOBSTB9ra68tbf21t7av/e2VtY+U9gVn1mVOHmt0RUsaz3Pi1NSVvLljA+t16q+VgEX1SFe2Ezm3y9eVD+OMHG1CjfNzCn3BDM3KNxj1d9rr7lfPsNTJ2oNdqVLYTwZY8n8gl4AKwWRxDN2Ai+vqmr45gCB9dKc764KhH4h95zjGswqTmFxgUREQmGh7NSq78oiQu4YJV9BhS1EU5i9oH/l1CYMSJnOKcJXcg4XKpiiVNjRGYVXn7j895d+BYBkgtSureIP5pR2lSn3nLRYZg1zOCmgCNPqBaUip3rvcUKOX/Sz/It5fZaVXIltec7yB4IaDAnVfGERv3CVVc/xSqm2XaJ3HvF/Q+BY0XNYvQPxZd2q9SHrWi0OrkloEkhlrLMnGyV/gDLmMxA4r1P1jkQ5M3YrcogZkGeW/Anl7VXGiLpURcRZFzWWrcjSwKL5m4QynnheGFFnsBak5WyAANmiDggiXKzZ6Zt8bZGjy0xCPSHrupXSh3L16qkp+dDhMnjJ52N1Bkja8/l0EDgDWYvELgCYMgshQV1Zu9qNZyP5wmp5uasqr+ZkrkifpuMMQeNzK6Fa8UYkzL0+39qYD9j1DCtuCVWP19xPSlY8d5Su84JmdnLuSZ0vWs9kzPyrnrb1ddWYqxMjjQby+Zmnnqp1rcBr5VhaH1Mrji2QE19gOVJhnR1EQhQgFk34SHqlIRQRDS8VACTRnafVaUU9a5kBUn63tc+p14QoOX1wzpaQdBMGIBEhQyCSUtEBAEDkSGQMJZuTMzZyFGbrGh+8Ot4qvAiARCR5CzOLs84vYRwmACBBJIociQAZmZks+mVxjdtd7nQiQgzOWsXlDRlmEYGubYXT1m67Loa4LEvXd41zGp4ZY4gxGsU6AWJgRFzm+fPnnzE2aKwm7UhnUs6RB8oiUC4uL7pN58NChpxz++HojKI5NC6LNaZt22U5Kh6xzDMCaKHqGKOINF23hBA4uNZN4/zw8CCCjWsVhUEyKMIc2tZd7C5c61hYOWbkqKcBc0g+kUTG2hC8gGw2m+M8DvOs87jpe0CYh2GYxu12u9vt/Lz8wz//MyFZQwDQuCaidG2jke/bTe8a17XNf/nu+922VwdVDS/VjLJkqG0aAei77p8Op2EaLw4nAGjabln8MntE5imgcTHw/cPjv3z3HQIYJAD48P727vpqt9uJyDiORGZalm6zJXtYfAAAjdQ+jgMuAQCNsVfXt41z24td17apsnYIMbJ1TkRCjITELK5xZEgAtVD11e4SPJNxgaNrLCEpyOgjE+I4DADQNE2I0TmHZATRGKPciEWenvcA8PH9u//0Z39qjfnbf/7tN19/7UMwxjDHTe8+fPqk0Rn7hz1e7ub7eZ4ngc4a40OcpsUaM82LIsLONSLAIMYY78M8Tn6exmmcFx/m+TSNAHB1udtuNo1rdrvdzfV11/WPj4+Xu10IYV4WADieToS0u7iIUdqm2w/T7fV7H8LD49PFdleOCjJGQkQE7z1HBtExIZHRbauupTFGTRcrwMY6zehKhOqqGWIMMRIiUQo01cOYObpGq12T94uAWCKWGGJs26ZxblkmJFA4MkbPHC8uLow10zwxx28+fUSkab8nIn3RNE3H47HvN13Xff58v8zL7vIKY9zv913XWqO1vz0txADMMYsNQkTG2uQDHoPM0+GwH4eha9tlnq8ud13X/fjD93d3NzpqPy8h+M2md84N46lxblnmxfvD4fDVx4+KJB5PR2Zum/bx+en+8fHd7Q2IDMMQQnx3917X58PDc9O2RDScTj/+/LNKv5vNZvFeq2Yr9TiqIyTqTmcGBDKGdDdxjCIpsxOHaIwVABYBZsIUVCNBQO+ELNxCiYwpyf7O+AQWdOpLbbVoF35RiS1qH66Nd5UeUD0DX322fqcsWXIJ05rrV5fJ2omspVVQGa7210rhSDeeI5KAon4VK2teMTQ816Tqt1QPqb0yVqityHqS9UclHCIisqi5WnL2pGr89SjP9RDJjiurdIEopS+rLygAJgmrRMG9ILgy0zXtQpX0soiw+rQX83euA2G1EvB8HHXuqUrXqFWit/bW3tpb+71qtpyPkhEHqc7jF0d43VI0xlqWJLvHJbZxjgjkVjHAtSFU52v+BNbrkor1pV4ktG0NKS48MzNcPOdtEnPuSCLV6xVeERF6NdjkgKhPzA5xK1qQr8qITOY9Wa3PTKIaJiJCCnusjYtZjoGs1mfaS3W7uqok6AhFzt3KXjeNRU6QblLKmQUEI0fnmhJpkHXalKKbIRJI4egCwMA6TMn8LyElSXDBYppb/YYgud6sokqhaF4WWqVE5zb7lCX0hiA7WujqAGFmEUGrxuyClwComCgJU8Mi1CCsRC3ETK+WFC2tL0uzJwmFERFgxBfg3dmqSBJdMZ/KGqCfYYhKAKnkrFWwSJiHZOvuKl9BvWiSFArpEV/YPdXF58sS8pL7whjOP0tRmWlokkBlRM2cQJrajllAiBCEgNf0NfXz8t+rDx1D9oXOpAMouBUnuRJRYk6UXo9IBABYRNP5Z4QlzSsRSaRaIkeELGdDcVpL0n8O1Eke0GkjIWpGcxFWsOYlOcuTqXwhANn1EkTAGMqA4+r2nJdWfhzmM7W2+pS70jQlgmFybAZYgTUdUdmt6s+b0haQsFRLRvIx9ctH90sVCcuZk4XtArJKBsQlJ8QtOCVAOvkQMRdZWq0+61YDXrkCIgIBoIKMLLEWsQs0eb7Iq2MXz30MqmQXlSN5ufklP5Hse1I8x9PxnulRjo38nDxhmckptIqUQm7LGQlVj4uBqu4CZm/o9AeSgBAjE3IQIgRUVyKa5wkAAgdrDbBY6wBBmIkoCocQiSjnM9GgdmENBmYREdvaGCORUa86YUGTgMKwhE3fj9MoLH12FBIRa43WzMmnMZAxuc+pgkfwXhE1hTIBwHvftt3x/nEaFwBo2yaEiIgxMkQBBCQZltOH3YeL3cXT0xOA1sfCxXv1vvTeA0DjmmWe1auxb7vTeAKAtmnnZbbWOueWxTvnGtcOmskuxhj5eDwBU9M0ABLYO+uMQTJAFgHANTayxMiucZtNOy0DR970G8XRNHI8+EjGXF/tQGTxS7/ZxBCGYbi8vNSkhMfTqe06a+1pGHYXl48P++enJ7WOsDAZBADmEJaw2fab3ca1TeQYokcEsgScDt4YwRAYbWSWODdNw5H3+/08T4ost13rg//5/r5tu2EcP9/ft03z6cPHu+vrw2EPADHyxc3VMk0hBEDy3h9iIIRpmveHw7ubawC42G6QTAjBGIMEIYSNNc65aV6EU4kkY+w0TduN61z7+f6p6TZpAbOISLttAaAUz1E4bJhG7/2//Pa/DgsfDkcAOAxjjEwIiLQs3nuPpyNud9M0jdOkYcWbZsssHKO1rmvbJTBzNGQksiJvAEAAMURAI4G3N9vxdGqbNnhvjHXWggJzQFrex8dwnCYiGsfx7uZmXpZpmQHgL//6b+dl+Zfvvv/TP/6Ns67rOnXNQ4Tf/cvvfvjxRwB4d/uu3/TW2q7ruq47nsZlCdaayDCOg+7QpnExxijgjFmWZRlH4Rh8GMZxfzzun54AwBq6uboylvbDqd9sQOAwDBcXW0DsuhYApml+eN67pnl/c+ea7mJz4Vz7f/7FX4QYO8JpmgGA0Gw3F6fTiUNQyjvnyJCIIKGuPcz5vlWyMtY555hjiDq5BgDCPEFKbijqS+ic89GHGHd9nw5FREJ0beO9b0xztbsEkGnmtml0p8cYrTGtc33bjtNgjTFkpnma57ltWuccADw+n4jo4/t30+wPx2PTtAgoCGRM02hegmQ28fOMCAYMCIiAITJktAqQD2HxfvGLcy7GuNtd9n2PINvt5sO7D3rO7A+Hvu8I0ftFC3zdPz7+8NNP3379deOcJrvcH08f372bpml/OFzvdhfbrQ/h+XC8u73jyJrBM7LcXV3Ny/zTzz8v89y33eJ9t9m4pkEWAIjM0zQRGQHRdLqEOC8jSzDofFwAwERDZADZL96gg6QZpSqKWHMJlVwJKBtVX5SxhKxdZVb0i4CRnP9bC0SsQkTN/taLi/D1BelQiki8foIvJYCzG7Koli3SqUNfAD718pK88YVMmcT66uJaTqmvTlJhEtFqgTXfUHV/FffSDzWPZTcO/UCFOpEcMJGvr59U45FnE7TK4kX4SsrPquFJpolotFbpXLFCswbyFLnmpXSbhKtfXAyY0jNnQbH42AJUyWySPiUAoqVOXyDCb+2tvbW39nvSbPIMUmWS1UUn+6pn1hRZciI40BzVmCIoVz8szDUnIPvWYIYHzllz4grZPyi9wxjSxC6YHhGV8TCzOSvEDJkRICBGEWENUaGUWy1rngaNgAiQCqfKAAAgAElEQVRwCm3M/ZE1moH1R2GSkZlQZamUQUZ5iyUT9WKBUuFk1d/TkCtpQjXPGoXRgWNiepxAQsqYwzou0g4JCKovFQkknxdCijHmfJGABJFDrmO4gh2CkEy9hFpXj4g0aR2kF7Agxxg0wZNkqA+RUiUFTfeGkoBTREOWiDgypOwtpd/pvWRQOCX+RMqID4AwCDAQoCaRESMCDAwGAAAFY4wgqTCCVrW2lqIPLEyosT8AyeNS1M4qqu4KIilSBkgQYzS6RJkTNISr46KOonKvkzyPhjO0FEQwTQmo4lRQogpYklW8S++RtDKwkqrS6LGscDRE6tQZU7mbhM0g5qVVgj7XfWIMpYD1ktJRS/FWl6UVmCUb1kxzkpauyUFEiJiKOGFCfDBj2ZKFGszbBFMt7YQW6YozSArnMTMIi2bRkSwfpuIeiranMtwAmFdU6i3HSGR0j6pLiO4K9duCLNsnChESog+eCDU+dB214uSYI7KRAPIWTZCUPhspf8bMAGKNKZhz8AFSMHjy/8yufGcnhuohZQy4opM6b0rVtJfz64EhZbsDAH11wvVE7CvgVdJNov6k6uyl8w6QEq1iRgMFONe4QpAk80IBwvKciQCaCmVcv11DqLUDtdoAAOqsCpiiTFPaMgQRZGZdiQrdcmTgHOcGWf1IakJB/EDD34RBC7cDSL2Cq7ML4XUeykqNW69NSGBBqIFjFCoqUXaBLAcyikAUAIOWhdEgEfglaHShmjFiYAAhMgmRJ8zHSGZZyss0TjqpO0lp0I4XDBekFKoBBCDEoPnUgAEgRgZgQGSOhKjzh4RIEDnm8FIDSKYxorA7R12Z3nt1UUwkQRRJaTPIIiJwYg4cJQKAATK5/IxxxCBkjUFceYo1GjkLILk8tIjINE9t26JRQI3RWEN2CYs1JjJ78YoPnsbBOgsAQMLBu8b5xQOja+wyL51pL3e7p8en2Y8AsN3ulmliZmsdEYUYDZF1ZlpYPRaX4Od5vry8BIBl8V3bcQRm6BQ9IQGA4XBaloBinOlCCK5xhiyDtF2LlMpXhShkhDm2bY9IMXDbttbZcR77rtfSudM0bLqekO4fHwDIWDtNx6urS+saLcdsyDjrpnkigvk0D88nYmvILLMWEnFpIVraXG26vg2BOUYRNmQIbIBF1w0RIAERbPrOLzNDvLq+macxTpMlQ22n69UaMgZPx/00jruLbd+1nx8eTsNpd7EDANu2fdOcDs8x+MbZ4JdN3wLiH/zq202TDjRjjA8RgCIQR2m7rZ/YGNv0Xdu0bdMCwPFwbJoOTR8Zb2/ufOCmsY1t7K4ZxhFNAwAx4HE4fPfdD95PH97dGkNgjDjbIlh7DQCbzfbx+fnq+hqQfv75MzPfXl9O07Lte2etBvgHjghgnAWB6IOPnhBjDI3BxtlmuwEALxgBHYNaufquAxFsu/F0Mtaq0+Lt7W2cB+Z4ud29u759enxwxgDHj+/fvX93BwB/tf+79+/eWdcYaxa/bLfbeZqICMm0bXd9fQMAl9e7/fGIzv14/3B9dQmAS/QxRjTNrmv/fhgAwIt0becQo4hrO+saDmEcfvyDb351eX11e3MHANM0fvfDz19/fH+zu/rrv//7P/mjPxABJNu3rbEtAITgL0J0rhnmpQWz3ex++vz54eFxs7mSCM41AGCMHadRy9EAgAAi0bIsxti+69R9b55HNedHjj74q81VkrYjs8AyDwCwLLPRYtiIlgwAAIFEbNveJq/GcRiGi92OQwDhrusAZVkm54xrTPAeAAT58mrbNGaaxxDD7c2NsSaOkQwiyeF0BIAY4+Xl9TgtD49Pxrim6YxxwzQ3rpuXpdTeWZala9vg/exn27jpNLIwsjcAADAtc/DeETBA8HG72YSwPD49/OrbTxe75v7xEQC6xm26HhDGcXTWEVpm2XT99e4yxqjh8LvtdrvdhhDGeb5+fx0iD+Ow3Wyury6n2Y/TCAB319eN64+H5+fDvu/6aVy898BqimcAaJtmIEpAJJG1FhENQeMaiUhAABDDYruGRdqmmZbZAvXd5vD8AIJN2yRWSgLWSkwyBjOojBKWUCXByn6TwEgEVCz3598rw8ei60g2nOWT3lQ8sQbYsohbFB+VyBAJIkhKCZJ1sVQyjVGM3llQVcmiYAq7KAgnJEQNYkofnqXExLEx2RYT71+FtyTBMGAiqUq/WbMAtIACRKoRrRLsGXbKqkpV5MoKZ1IDBVUeSiFTnGy26iiTepYFyyRhZBoas8KIOcl+omQZK0sKC1NOLcIq2KJJjp5nsjijln1Tl2ER4KiREiwCxZqeccWUAALK7K3RHHouIOe8pPnD/BclHaQYWJMd1wBDHejz1t7aW3trvzfNfuGz4ldUGX0AALCylRVkMKuPIms6kXOHxHM8Es/+Kb+vx64ULiirIvjiIeudlT9S9cxytpd78VUWsOpF69MS0lSsVphyCZUhFR+/qlPZmQmw+g+UQUKh00qic4e1FcHIIMLKelNhn7OBZcZfjUdeDjSDEGg4uzRm1xztmgIVq14L1SWFZ2OKX85lVNOUVKAH1i+W85l6wRNlfc+ZMJaU+bNo2oIFrDdnpyzI4Z9l1eXnvHgNvLgfvmQ2fIkL4avfsoRSE7vGfdTJ8vx9laRZblKQK4t99dNzIrizLukjU/bMX3h6+bO2xSf31AL4rOba163suCKMZhusAvUi6ieVepUlKUICxFz0CIrgKusu06chZP8+XLtDuYNSnKlrKzRCypOeO8Xak/NR55t/wbhcXN7qZYRVLgXtY3UmSE2G6izKTznflWv0L6yzVU5ExZWRQDgF1WpiBE5FHkx9Er2aG1k/PR/cq2lcgbtKZMUMlivxMdOpxOOjwmVV5L6cn53l0JXkZZDXV0oEhvmwUJH6pQaRz4CyoLLn+heOqy9MnrymyJcu+kJ7caRi/ZFqEITM0Tp7dX3pvT/wMaX0VFLkYxIBiYil2J9erL3a5x2/dAGA7pV8Fkk6v1avVQUzAQCJQvBkDIIwS4gxOSQSGc37iCgihixzXGavM5M5btpm56xThFlTbQCAiCQHZ0QAmJeZiACQOdnbBASRvF+apgW19yAiIZERgBQw6xoydpyHGKLp+xgDAM7j7Bdv0KhFgCMTkrBYY8kRgFhrr24uWCTE2HU9AMQQENEaKyDjNLVNY60dp8k5Z40FAL/4vu9FZBgH1zgiOh6PXd8LyDBMw2kEAGEkMCxANjksO2fJat63qMA3EQa/9Ju+77vj8dh27XazeXx6NMY4546nIwA0zjVtcxoGItps+nGaNCg+hEgpASWGELUYy+lp4GR1gMY5BmZQv1HaXV64puUoMQZAscYaY5TZ6+G53fQA0DYOQCLz9dWVteZhGA6HAyIqFkzGhnm+vr4Oy/Iv333fde2v3717eH7+/PCguP/pfrzYtM46Zy1zVHOp5hyMESIhALAwEQIhEhogNGyIIrMItI3TxWINRSaDOC/LZnsBYYkc267zIVxfX727ewcAXd82jf1we/v4dH99c91vuv1p6Jq2udzomX/cH8dpZpbAYbu5MMaIALOob1paiZrBEYIzTdmKiJh90gEydwO1J6KGJ/O8zMwRTZI42rYblzGE8Hw4HIdhnpfLi4umbTd9//i014WHhP/pP/zmp/uneZ4jMyA2jSME55wu+/uHh+f9/ub2zoewPxwVCh+H8TQ9ocBxGAAguSGDgEjTOBAJRNYYFpnGse1aACBj9sejQbi9uTqehsgSYuzaLuYS3iL86cOH4zj5JYzjhOj+6be/7futMQZydmxhVrdTY+wyz2QwhCACWt5qWSYACDF0bQdoT8Op7/vGNd4vImKMtdbGEAGga1uF9edlck3Tdxv19Oy6zocAAOM07i4vnbXMaMF1bbv4ZZymUjdGfzZNIwCH4/Gbrz5tt7t5Hp8PBzXazcsMANbarm1j5Pv7h+3mgshYY1lk8YtbLFGrE46IMQYWZubgvTUWQAzZw3EPAH5ZQgwgMca43WxF4jCebm6ubm9uEUGD9xUZnOY5xOisjczDMPZdpy63Cll++9VX3vun/b5xrmncw+Pj8+Hw62+/RcRhHNT/dLfdPR+eHx8eYojW2Rij2vWMMck6KaDjIkJmBtC82KD1dyQtTo4xAqPaGIRZQFxj/Ry8XxTwNVpBKCdhUgUCUsrkc0EDz5h5CsdInEL1DkJUn74sLSEV4UXNR69YTOpnSiWc9AhlWwjIyYT4il0qPIYVgzzTlGpFoBLmzpSWczm5ZrNFj6n8NdJ1LCmhY7LxczHQgoisqcHOlKNXrPWLAh+8ckh9MbrSq9pb4JcaohpO4VzPEhDhs4RHOQyqmuos0lbXqOAsZ/34EjXrHuc8V/WjsCjgqZdJTlt1zyTf/n+QoN7aW3trb+3fWytwJGbFSVuxlq3nekIX0pdC+a5yaGalveYMr9XtnH/tTMWHVRFN1kJAVM/GnC0FV95WupROYhUHpKTyLaChKswCkPOCvIK5arak+raIrGOWQouzJGYrNiTALIVA+l82MKIkWAdqzy1ABKkSSxbIMnM3lRMlaa1QgpsgyTtV8rIC+6zELtioPjtNj5wlmFuZaGKTa3SClA+zpxggAPMKrhazZDaKIgIyvJxyLKKFQBErsjiwYo/F7VGnCBEiR0M5/BMAKQ9X0hplFkBRpbHgaNUVRRPPL3810VVX5cVfGVw4FynkxV0ZDyjCit6Yaxlh/rHeqNnbeO1ORmXwhUiXRbK8DfITs2VXXotedS+zg/BazFrjUV9Ab/mxedZXeDfv6fowyFBaTbEXqVGlHnA1yhx1nIXAyke1AvWrowBR10gKIJQk3dew7y/NaTXd59OKNVXPbqhztmeHt9WQIVCQzQoAgowHnQ2ivDnhzli+OnOEWPPcvmhnQykrN5P17NJKPsbkj5C1fShe3SmtYHLTAyiwt2L5Z+hqmSABAIjJXS55PaIgkvoAYpl0haazP94L8lSUgPJ5Hkbeo1Awu3pdFt+KX2w1O0CE11fi+m/ZPQh+WayzgDgv008/D8wSA/e95t3j6jYR9ZUo6RnX5PdlUZzTa+34WRdqBgI5ASsAMCF7TX4KBMQiBCKAIUYOyR+ZDCVUC0FECNFHnpfFkpH8Lj0M0w7RWD5M+0vRzPRS7RERizCzQgkxaG1fAIEQU5FrESEyMYbgo2uIJVV1IGNE2Biy1uh8GaL98cgcTfYuj4GRUFjLDUdjqW+6ftNP82yt1UR14zQ5Y4hMiBERrLU6us1mq1AjADrbhOhjjE3TpnhVwnleTsdxHj2k/B4K7wszG0AiMtaoN1micIxN115st1orZrvZaF3druuWZdYgjLbber/M89T3G2ZelrnvewA8Hg993wGAdc1wmpjldBhiTCeqD0Ek9H3fbXoAEOF+04Pg4pcQFzJkwIgIc9BAcp0CEWYW7xfFpB4enw6Hw6dPn0RE82Mi9N99/8PuuH16fBTmf/3+h2EcPrx7J7vt7fU1ABwOB60NjQiXFxeEEGOMIXRtAxKStxGCsQQIMUYyNsZonJYQcW2TgDlrzTCO+8PBe++abg4hsKAxyzQ1pkFjAOC/fvfdtx8/XF1d3j9+jiH0m+3Hd3e7y91f/Ze/ubm+BYDTOEWOp3FZfHC2ATCPn3+eZ+99XBavPE595Q2lXEBEqGWWjSEN+FDqlXOAUqBGnKfJICKiVpi5urr24957WrxHxLZV8Av2h8PV5SUAfPvNV8fT6b/8zd/e3b2zzmKUZfHW2RADoFxdXgDA/dMTInZNo2tGS9akatQx5vImEEIAwMiiuNi8nEDw54f7y93OjgMAbDb99eUlgIzz8h9/86cxxD/9oz8kY0Tg5voaAH74/NPT4TDPC5K9uXn/d//4jz/9/Pnq8iolhUynKxORIYNI0UYUUCdfa4xiiwDgXGOd07wNbdsmaAyx7zrmFJLcNI2xRjuvccfTPLVdLyLjeAIAY6htm2WemJkIh/E0TVPbNtZaEA7BA0Dfd8YYH/xud7HbXQLg036vwUaR1+0fQrh/fLrYbp11HOPT0733USu2q1tZCLO1HaJybU30DMMwhCZM0whaIpwQRJqm6boGUBDlj379a5H40+d73QVff/rEHA/Hw6bvjTGnYQAEIjoNwzBNlxc7AHDWTvNyPB1vrq517He3t03TjNM0jGPTqJN1OB0Ph/2ByCKaKDGw1/Afp376aSmazIlEhNWQwJKzanIgJDAEIkQUQmTmpnVhidM4tT0AgO16Dh4BJQXVJJlJaiGnZkTKJTXep8jta7aexClpxYpfcP6KhxZpDTjXcMMszWf+n/IeV6+uRJ7EW14z0ApGhDUf8i/xZJWNMI+9im2QHB0AtaRXVI2Vl5YHJbXkXDHE+nnnpADlw1LeXn+XSAgAayrtQjMocsjr4WAZUT1sxSKlGC9XOfkcn4QiBhWpLJOmjEOkCOjrmAQgew4gnAPBVYfOQ9POhokpcOpVZu239tbe2lv7fWgWIPmjFYxKCph3xoUK40lgVYqNylwiqT0AK1zyRcykUkZXjOCMYZ0f57/QpOY6UN593vQKBc5eVItLI8QaG0jJ0VZMBrKqKRmaSI9dyYKQAydXjioAOYFzuj0TKgNPGRqtHlpcrDKOkPTOqqKKZFaZ/JvKlFVmtBWOTUEQKm2tUPHaAQTQiNfi+iYZyTvr0av6yDWhq7HDOYELZTMhK/es9CmL5gXPiy0jk+l9a0WOVBsFQAv55unAArPWdU6q+a1gW0U+cCUCJpelyhVuRRfqlY8V/pevkNUJtqLpukCUHphN1mns6wX6+tcCZ9X1qkeQV0zePS/A7GoEhBndrMgvebrPJkPqOVrXc7WyM/VySxcm2AvPiF2RB15+toKAnKwetY06PxZezyCWvbYKl+sKSmU7zsJ7X/SnLumTP6u0gPLG9Mx0kr1wbkiPOnvuyzGny1nPDwHhKKYO1k6JvIBZDCG8bkULkFVtOFseZ0OTlWYFuM/kSv7FLHLuBLE+A892dbULBABYAgggkqSjDhGBiLhSCNLpJOlhL9QegPLEV94C1aL+kkPDS0/K12R6Gc2diXH2cTpq8oIhaLt2mqfd5e5P//CPgGR/OATPP/388+q2oKeLpnrQtMLl8K9PMsQy8nUgcJaD8uwgTCcbIuZwLcaCIqIhC4aIokJWqaIulJUDkii/LF6i5Lh6XN8uACKcjW4SGTXJQc6RoMYbdaZLpW9TPov0gshM9v9l701/ZEmSOzEz8yOOPOp49Y7umW7OkFwutRSx+iAJ2v//mwSBlMAVyeWSwzm731lVmRmnu5vpgx8RUfVmuFgIEAYoH8zrysw43M0PM/vZRc45UkRAIXDgQBxKggJEnN0U4RLnvDBcLpd5ckhIGWSB5CgbfTOFJbS7hkW897t2F8lFSBFAcc4ZY+IfdVOLyDTOAFA3NYCEEOq6JqJLd253u2lyYz+62ZeQf0RUijiwNmSrGMeNRMRUNDE+HK6UVsNluLq6UorOl9N+t0PES39u2xYAhLkfhqqutFaPj51SCoCmaba2atoWAOZp7i89MygyQqW+nFdaN/s2QpbTPBFi8CEEJkVKxfTDjAS2sjHZ6ziNWimtlAhorcZ5nuf5zZs3p48f+6GXcA0AD4+ncRr7od+3dRD5dP8lSKiMPRz2v/ndDwDw/U9+8uvf/fqbN2/apkYEIlRKd33/+Hi6uz2UNB9K63hOE1GsFNTUVVNVPix+oyg8jUPVti54FhnHSUS8sMkLQmsdmH/zww//9Itfvrq5fvv2jQ/BOff9998ZUwHA5Ve/4cAS2PugFXRdN47TNDulBuYVL0RURJBNHwlxJEVLka+YWzwlSUFCEQjeK2O89/GQrOo6MGulKmuP+/2vZhcfe3XY39xcAcDf/f1/+e6n3xKpw2737u3beZo+fPpUVZX3HnLsZmVt/epVXVVtVe/2+67vvQ/V/qCrhhjuHx4BYOh6BCSlkUgrPTsHgKgosATmb998CwAoPPb9p/svD5fzcbf7+Pnzfr9rqvr1q7umMQDwzbt3//Kvv/z05WHfHrTufnz/oW33zBAjY1N6O0KrK2H2ISilgIUUaaWV0c7NcYe2TTP7eZzGWMp8mqfAwRqDSMPQKUVxmkQkBF/Xtdb6cjmzcFXZYeijPenu1Z0iHIUFxBjjnbfW3Fxdee+GyUUuWFnb9T2CvL67iykjL11/PBycc8MwkjJx237+cj9Nc9vsRaTvB+edCJJCpbXWMZOsCDAgaqUGDiF4ZvndDz++e/tG8gHIzCCitdrv2t+9/913P/lWaXw89afz+c9+9icAoLR5eHggwrquuq7v+v7d69daUz/0APD29R0AeB/Ol7M1tqnr0+UyOffu7RsBcM6HwMfDDgAUqWkaBaC2NREBZSt2AI6nIsYKYSQikZjRX5WIBBMIyxIEQBECgVIqBPbeKUXK0Dw6mjBSr7AaFkGJ2FSx7UNpT6RkjNk5Fg1Gor9k2SxJzlpZLjG7DxYBDlbPhCTq5K/SmcyL/LTpAkIOBl9gzWTJWcTf9JqVYPhE6Fkkz2wqf9JS1i7M8rLEREL52THRTxLlokSSogEWaWKjHD0RZLLwFsvGrdxAvyIhlF5+zV1024p2AnkWYgdyVuui7srq3+UF2SC9CND5hUlUT1AxCopsqLaV1JJUFq+hkkUco96UFpksfcIn/760l/bSXtofV9PFkzzp9htA6lnDhQktqOWWe6Sgg8JnAWDDAJ4qjflMXtc1S/VVF5//Jw95yu6XC1ZcN1ufcPPaVRVueRK+mdjPWgzAdR3VNQ9cbiNcpccsPKQMBQoaGHl2cv5aZIQtaZY7li8l45BZlEh8bYUBZNzoKYFoiQkphEBABBII8S1ra2TCeVIWtlyDJMtNfwh8+ooqjrgmPxZyrpRpySw9+3cIrOCNBbOK4oYgEsfa4pGY2ZURpZClACCJYs+DXFYPhuIRtxnIEywDCq64EdU2V0ASdFbPKXBGoc+Gfuk3LID4E2hmuzQKnWCBzeL3CadcnNGyPLvpjKxq7SzvKPIbZeE4yYgR6QVa774yaEwOjIvZ4Ek311c/MWqLxOSjsmzNhG2WjHjLgl2w4yePyWtZivr7rGEW0BfRMPVQlgtyP/OEl428WccQ5/bJa9bqQPYnTfRIH8qExCNNyp5Ye00+VSGWZ67F5iL9L8jj+uwqKkwmWOqGcMgejbgA3BiP6JIYVCD1L72OCHN0fL4lIgjypEfLHGZnzA2F0zPjssokyyagclchJy5z8XTPbkkj5aR4QjF41vL6FgkhiHAI/sePP/zs+5+G0Pzuxw9FNUqXckRpoQRTRxZRnD7SoHFZIMss5Ole6ykF8GVmJKLCVQhBgDSRIiQjIuJc1I2N1nEmU2g/ICHN8+ymuTxgU6y+LI50wILW2laVNTZ9llyPm1lr7YIjJGNM5tFojHazJ0RCmt0sCLaymfMiAMzzLMxkjA+hv/TC2A+9JkuJOUYVPC5KChwI5fbmFWkVHZ2QMPp8aWOJYHZzxMtmP8Vq1/3QxwnXxlz6S13Xxui+H0xVhcD9ZfTOIWKqHhOYWZQypNBUylgTmAFYgL13ShMANE2tlRqnsW5qBLl0F2OMsXYYemOtyZVqjDa7dnc6nX0I+93+0vV9P9zdvYrBsA8PD96H4KSqKyIQER+80rQ/7GxlIrRkjBGQIEyEtrIA4n1QStlKc/AxP6A11hrjvbfWsvA4DHVdMbNzrrJVnAVhrqr65nB8PD/O8/zq9hZEmrp6//FDNJE+nuy7N3dtUz2eTszMzOMwzPNcWVNANwZUWsfkp1VVqWEmQkUGCbt+sEcVl47WqrJm9G4Y+nZ3YCBrjRrI+/B4PgFANwyvbm6Oh8Offv/d1dUxVuAZh/HT5y/HwxUAeO+augosgcUYM88zZt0bcxY2pRQhaaWddxCrsSMgkSIlJW1FqgiVascHFhEWFqUVe7ffHwBAaRIRVNS2jTambWprDDMDgpsdAPyn//V/nqdJERljEVmAjdWH/W52ThGdhgEASKm6roX59uamqmsAGIYRkQ77A88u7tuu66+vrkjpmF5ARIBwHMer4/Gbd+9iMrj3Hz75EH7+/fcAME6DtVYrVTeNIM3OAQASHQ/Hq+PN7fXr//1v/iYEsUYDEnPw3kfKGGMQcXJ+np2KGaUBXHDOzwJyc3MTz7hhGFhYa+29Zw7WWGONc05EUrkbROdmQDDWOuemeWrbloWd91VlAaCq7DSPikgpstbO01hVdhj7cRwF5PrqCgCsNf3QK61E+NINwzjFUO7omLzf7QHgcumGYbSm8j5470W4ruthGGNxpmRwBZnn2VpjjL27e3XcH5tmR4iny7mpGyhrQxEp+vzwBVHubm8E+HI5H4+HCO73w9iPQ11VilTgcNjtlFJGqxD4+niMXfLBd8Pw7bu3AnK+XPb7ndL64fHx3HXXV1fxXe8/fRyH0WhDSsU8S9qY6AwbCzoJBBZWiiDXPwwigVlEvHdG23ice+9BgQgYa0I/TtNU17Uxyk0uZt50blbKIBEIiHDMZV1CCNYi/IrLC5Uc28UpIdvgFyH9ie9elP1SrvsNo4tvj89Z7onSjyQPyXVkCRfhYuXBWVzqypn/RJJbi5QrLrf0Lg9v07koXy2XboNTkkQdYbVUPvF5Rq2vt0UGjmNbd+cr2CiWAJs13LnW/Z4Kd9mpIwmoAMBbnSRJfVGe28wwwNpkmpXg8q4sJK4HkW8t8gGuP26N4/n2VNioeG9kbQKfjf6lvbSX9tL+GNrvLR/80l7aS3tpL+2lvbSX9tJe2kt7aS/tpb20l/bSXtpLe2n/37avlbKB5/aV7IEh2c1OUplU3Pp85RjDpw5nywUrH7u1U99igyueQ/nCJy5l8b9LBZ0UCBAddBaXTSkPSo/OPnPrcNR1XAAU+xIsKTqySW3xZEr+84vnIxHlvC0AW/SMdfwAACAASURBVGqs7F2ydcTJgY9bn5tooMzOjwCIKAiSR5WiMrF4/8HTicq15JLBDjlXrMO1fU0AgFPJ80zJYqEFKN6bmF29Vp5b2awHxZX1yeSVKiKSnLhWxtboILZZIClQFJfkhhh95TBsra4cc3DGUpJxTgCRiptVeoOUyc5uZOU1G4Pz4su0CvN+TtBCZFn+WIi/NlvGWLNVS95sy5/5hfHyxWd2a0xf9Te5jMWlgqv1veQ32PjQrTqVvbuyI2KheO6ybOkR8wNAXncUM7LLsp2K02IcC0Kq2FIc0PJztjRcJSjYXAYrh7nyS1pbi6k8u8xtZwiKnV2eTdfyosW5t9yZLNWbLY/rByy0Xe77aqbOlTF8cTGIfYo5VhEUpoq3GJMwCggLA8Ss+ZtXbvsv2y49TxSw7ChcXB3SJekaydHEjJi6lLwjEyUk77uvnLJS3Cw2kdbxEasaONuZzx82zCDetx7QZjk8mbzn07miyld/yIv8mffG9vkcAiJUVTWN478+fvoP//7Ph3H69PFTXddLOH+cfELg6B6/cmDghZtgHk7p7EL9ZQOV/ZccN1zwyBjdo4JwjKZWKgUMOudi7XVSuXy8iDBHYjHzPM2BWSsT97xISG9OJ6HkT8ggpJRSKmUQEWZhFgaW+HYOQVutlRqnKXZeG4MUlNZxUDH/2jRNJU+fc66yDQC6aZ4nzyxa2RJOnkNxESTFibf7erffn04Xpahpm0vXBWEAsAqdc4DYtk0IDEhVVTnvvQ91UwNAzBppjHHee+/ruv3y6UtwKdFyTl8rgIgEVWWZOHCIRWaCD4F9Y/cAcHU8TH6e52nXNuM0zvN8PBzmeQrMh/3OhwAAwYfj/uh9GKfpeDiGEJj5eHUFgKfTCQCmMSAQEUhgJHSzJ4W7Q1u3dekMEYXgmUM+FkVAlFLW2N51TV0DwPFw6PshcKgqezqfkYhDoFhIW6mqqgHg8fFBBKqmsdOIpN68uvvu23f90N1eX336/BkAPn76/Pr1TdtURDgMnYhYowOrx8fT4bArIepERgBDEFJGBGMlFgQIHOIiZw6E0DaVzI4Z6qoSIYVYW6u1qWwNAPZa75rml7/5ZXc+G22UUuPs7Oyur6+bugWAtusQ1ew8CyuNQGyMZoGqqiQnJQwhAEHgAOlIhyyKIQhE/1NEQESllTLGGBOCD94zMylSZPf7HQDM84QIEkQr7UNw3jNzVdeIFJ1P37//0QW21gQJw9B3fS8cnJ+7rhvHPvkJ1g0SCYMxNjBbrbFtT+dLcMFPcyzZPM0TMzs/TfOMSD7Wf1cqBA4hDBMDwGG3HxCN1iwyTbPWOoiM0/TbH39dmfiiWivz/Xc///jpy+9++HHX7pU2xljvZ+9dnOvKVtM8iUjKnMApACgI7/cxgSN8+PTeeXd1vCJFLGyUsVXFIfRDX1VVrKMiwoBQ2VqEnZubtj0ejg+nB+bQNDsAGMZ+HAdjdLtrp2mMmTq7rpvm6eb6yhgNAM47rdV+twPAaZ7meb4+XnXDMIzzYb+vbQ0An8bPWmlEQsCY0gERnZ/fvLl78/ru/uFzPAeIsK5qZjnsdkR0Oj+O09RUdV1XADC72Qd3e7wmwk+fv/y7P//T6LTng3v39nVcnA+nRw7huN+HEBDxeDyAwKXrtFa7to2n7IdPnw67ndHmdD5ba15dX3vv+2EUFmtsrL3z+cs9e19VBlC888wcvZhZAuYDLQSvlIr+kjkzEiii6HgLMeZJQARIxf8p77wIK62UJj8HAJinuW5UzBUf42+TcERbJw8pWRRFADTpFC6x4sg54CefcDH6eyW2cxSlku6R+U5+b74vS4ZpzwkCJcEaAYrIBoIlh2aWi5fYhIXhp9TUWdR/2iInX8uGabCr3yNNcFuuMWkzcdSSZfYckbCWFvLLt8Eg66fkDm/FzEyNReZOelyJx3oivKzcGQGiMF9CEIrEtOpamar1RZT4fVFAlgnNksTTISxa4LOh4dqRc6kIuOrkSlpeObduRdqX9tJe2kv7I2kbOFKSgrr5DpJaigA5tmGDEmQcAXPwbDl5IbHODY/bQFuJgWaOg8v3WSsrwazlqRsgReJRnXnMAkLkMMGVFh8hIMohAethllDccs5ndpUiSTN8sqqoUy7JD0LYsMwNjpI4v6SKwiUKddWJgjQJp4ERUqzkUlhjrv4KSzBd4uWwfitwDrvgjEbS+i0sAEpUZrClHLIsf8T/RkSqaNYxSncL4JRRLLO8FpcSYI2rZ67waCIQjlgk5Fc9y7yTZBTJM0dIsdhJYe+IsAnbRaCyhDL9E2qQJ1rSZNJ2Chc4rRSrSOspzk4WfcpIS4gN5uRQZRpW3U8ipmx+SYslf78pgZ1GhXmVlLB6yXtxTZzlJllhcFL+WYaIzyYPsli62ejpjaU2dH5Blqlj7PxqlnDz1ickWD0BY5GDIkol0W2dEXwRqSUl2MwVJxfKySLDPSP1k7Z0qGQyKm9BwGU9puMjJRrPs5/mFFdD3LwpScxLQoN8mhWkL9ZUxfygJT38VzubH5Di+JftzeuEn/kk3KgxcamWDYsAWEqzF2k2XgOAOfroWYAUAq0JvFqlAQQlVuUCSvsbEHI9m43CkMkhuCw9TKW6U+b/ooqXAX+dJL+HTE97nRhDUVBWvxEhInOw1gawP3z44b/+4legMCNccVUzABJQWifLSZsUg0jxXKYg8ajMcRZlIHWwjFkEEHKhGIQIRwIQIjN772NUYNTAMeNc6wG4yXnvMYfbI0BKQocEmYCAgISAYLSt6spYEy8OzAAYQc+YHVprjQAh1omGOAucAQ7RRiOiDx4RRNgzA4C2Vit9f/84DlPMShn7wYG1UcuEEABIVVU3N7eXrpum8e71q8B+nuemrQAgBO+8q6oKED0HrZUIT9OklNJaAcDpfGqaJnAYhgFBPdw/Oh+0opIYEwBEMBbVQQKlKISAEUQO3DRtxLBiwdyqqpx3PoQIOs9urmxFpNw4AkDb7gDw8XSqjG2b5uOnzwJsre26ru9GAOAgwc8atSA4Zm1of9jtDjsA8N5HjuyCIyIgRpAQWIRjbGwIIiL73S6SdxiHuq7Haeq67nA8ip8Ci4iczufbqyuIhdS1Pp8vIYTvvv0GEYZxNEYral7d/BwAHk9n79yHj58uXX97dQSQyphslEp7igOL4OzC7AIzzD400SyBoIjiqlNE1uqmqYWIlGnbtm7o2zdvL/1grNVGAwAD1E31+tWrxurA7J2f56kfdLvbRcz6sN975vvHB6WorpTSx8q2/ThfXd3+X3/3f0fUG0FEOOSEEGmNICACC0dEWEIAEa1107a2qlgkFkH2zjV1vd+1ADCNIyLOwYtIPwxt254u3V3wwzg2dQUAtrL/8o//5H340z/5nrXSmqypEUApatsmFjcH1FobRDyfzgiglWZ2Ifhh9obUod0BwPF4jEeE0nrXtKau/Ty//82vSSlj7DQNAEAIx8Px4fzQNs3+cHh4PHnvp9n9yXffxUP43I0K6Ycff/w///ZvEXG32yPSNM8cAgDEpIQ++L7vBURRLESDiggJlVL73X6eJwCY56mqbFVX0zQRYoS3pnlSStV144MDADfNERnsh54D7+raB++9q+sqAp1jroSjCIP31miFqBQ1TX08HOLEeO/rqjbGMPM0O1IUmKdpVoqaunk4PQKAc55IIZJPCRZxnuf9rr25vmma+uNnH88fImqb1vvw6tWtm/0//+KfQwixLAwADEP/6vbWWD2O4+u7u+vj0Xv/4dPH42FXWTMMAwCcz+frq6tL190/Pt7d3ta26vp+cvPdza1SanYzAEzz/O7Nm34YL31/fTxUdd11XQhht9+x8OPpHAdlUEUhJgA7H9p2xywR4gOAwIGFETPkJMCBIdbsytZApRUHRgCtSBCUJu+EmYkIKUl63jmnjUFUpFPe+bVROQf8QvFEwFSfsCTyWThUBpJEILHMtaCXxO4siuZ7BIBSlczMp5O4GM9KLDevbJqJt4gsOszaQwPWolZxVtgISsula0lmA8Atux7y0DFJ9+tRLz/lD2sRIn6RRe4n0kFJ0/TVtq1HiUldKtbaf0NKBcRUe1RkJZbH3vNaRy4G82XEaVCyEHM72JLrW8popQwEcwcRsxCepDeAZQIQ0+pKaTOl+F5skpu9tJf20l7aH1Fbw5FP+MRWzy++ULkOAyGlszmz1mLIWj1uk+zu65plftHi81WQmP+mszVb8WJWssSINip/rLEsUhzKNg3Lv/nXJ5dEJlQMVYWJQnrTZliZQUkpbBdVwfx6AQGhAi2suHMRDbLymwFH4UJDLNr2uncLOvaUv2KSrkqKmWR2XckZZfYWHosEWLy9Mjmz981azRdYZ9uGJ5OGkLkoZAQ4qu6ZgRIhwyJ1icTHp/RtVF6CCACEQAgBY4XGtB5j6iWJkucydyV//poaeVFvCL5Bx58LQ7D+tZCKVjuiEAlhmcLyx0LS+E/GXwiBERKUuXaI3QgmGDOY58x1RUhderkBgLJLnCTvN4CUsXzl8PZ7BreWVvOWzhBGFoZS4s8kSkmGchbHsTLsLOVK8RjIVECiaCpPaR1Th0WQF2Al78VYt4hIlbGXhstG/Po8lY1VCLZ2HC4PiT3J2R0XIRMKjpcQ5/WR+PQYk3II5hlHAcDk/wKIzCySsqoVpO/faM9W6/bFspxAsFkOImXu86oRTohpJoiI5DyEmN+SCwOlsyKrDiwizLnuzUpryO7e0eWJlhNbyqTGCd4cEAKw9KVgkVkq3+zFrzfJuSdlS0gBJCobFLYcwBjtnPMhzH6ytfn3f/4Xh/31//MP/zwO42ZbpIlEKT6T8f9P8eNM/LKtECFvFimoIQCU/Y4owoEFIuCIaaKi+1gEJQmRRTB7PqYyMQizm4VFKeV9IKKCS8a6ULEr0eCEiEopUgoB45NDCEQxA2x6lzXWB8+eoxckADjvIIgAoCKNyrkZEZumGcdBkABAEU2z6y4DBzFGK62EGURCCFWd6l+LBKWIWarKMoswa62D98M47NomFli+XC5N3Va2Op8vANDUbURMtDbjPAKAUgSIEZntu/7cXXbNQStxkydQAKnEhFJaWx3YWV3HisMxh2zT1BH+OJ1PprK7dne+nAGkbZsxQpBNMzkXCxPfvXrVD+MwDG9evwmex3Fs9+35fOovUySvVkqjJYDAHgmO18emqYQ5CEueVq0MgihUAkKIQNparbUOQdqmcc4BwDTPdV23Tfvl/osxVpGaRXZVdQaY5zlRQNE4TVf7/TyPLNJ3XW3NL3/z69/++OP/9B/+EgB+9t23s5uZeRiGaZ5FuK3rXdsEDrNzEe1i77wPznkfhBmYUwZAYBGW6Ayrta6sOV5f62FEMhyC0rqyVgBsVcWdw5oE5Hg8ns+nYRyrygrzrmm0MtGrkQMzB+dmAZhmYZHD7ugDt3UjIrFwOQsjAAlAPO3Lho6nDzMA+BAivlNZa61BjFi8hBC00TGf4DiN2ph5RqUVAKCItWYcpsrayTkAIKXevr4DRFtpYa6ruqqqvuv3bUuKfvnLXwLA3as3t3d3qPQ0jvvj0Tk/TlPwHgSYOSRwHyLcloxWiM57o43zDhFjakhC+XQ+W2Nm58/d+Xg4xOOmstXHT58BwJrm6vr2H//pv8zO7/eHEHxgaZtd112qqs5+x2PM4qqVEhbvvRAGH6y1IfjP9w/xiNjvDwhxX5MwT9M4juN+vyciDAAALvi2agHA+1BVlhSN04iI11dXcVFxCHVdKcL7hwdmPrZNnIDDbkcqZWoOIdRVJSDDOIrw1eEaAFnk1c2tUmqapjhpIQRhcN4rUlrrj58+/se//msk+Pj5Q9fHKt5qnmcWOOyPbp7+9Ve/ev/h/ZvXb4koHp6H/d5ay+znebq5PjLLuTufzueff/9dYPf5/h4AqrpSSjGHmKnTee+8M0rXVUWIsabT29evjTHDODLz1dVVCKEbBlJqv9sNw3z/8BAXOQmGwFoJgARmYyxIIEJOp6sQEQuHEBQpQPDeh+Cjb28IsUANxnTDSisQ0UpNRT5ZGXK8c9E0Es1zICAc/Q2eBMsk8yTG8wQWQWjFewARSYCTQBvvK/+UDbTwccC1nUaK3CMiAixAC0Io+ZaFQUmWYWWFqiELr+TWBHghQCnTvTaa54iaZXM/lYUBlgzkWQsr3B9jiTcBJFhM+Rs5YHEf/O9oRUMDzKbvqDokCuDKtLu8JAtdIlKK5GTdBTCl8EyW2NXt8ERMyFklBWTlG5o1kywcpe/KLeWcXLLCxw4VjQ0gylu41b4gC3Hb6JqX9tJe2kv7I2kaSr5jwhIQBQBIxMyKUultSMYaWS7JmAem4zZrYSJBOMMfG2tN1MoiJypuQhGnQ8KQE5wjAgPHigG8OvHXPpmRs1G8PnrcwKKNIubqJtGdRURYEhgki2EtMpFoBwMATWojBaTux4p5wtlHKxql45UsABAAFqAJIDFRgSBJ/xStlSL0KTBEFFECXVeIWfR8YwZEJBUV1iDMhGTQAEAADzEMSkAAOEjkSpiNeSwMzPFLAGFmwuy9EliS16UkCCFQBBFYWAAVISJG0bxAkBLLWEcPUUQAIaQVMhMIU8k8pYmZhRfeiioZ8UQEgDHFkRMJCBVkKNb+UwmSylMmHIo3FwpyDnYJwiBYysKyiGAK+yJTSMoAxMIiuU5uzq9fVkee2OL7luc8CSoCAOU1BW4rly3eZ4m46a74liz5re4FQKS4GLIkW1BsWZuplztjZxMp47bCXOlnI4MkQGIFXyEhZJcZydAJB0ZMGGK+I84pEGIIvoid0Us1TiDnaDvMBIwSWNyVKSip9J2iKMwACjClXhcgkOxTHSeYOaPtKdQegYhW2yACgJTBHIlFJzm/hJBQAAVBiXpCsTJHChRHKueZiMXZmVfiWhLe0lW0BvaXaxJtoyi/SNtrrWCZwOWQwgWYi19HzKQ8c4M1FhJyRtvyb4v2Q6m87xOFBICTz3qcD05HVdxRsfhx0vMh5jooAjVG3FVyRSCKNayYfYEHo86Qj3KCjAojxHcgAREpCU4gQWKZMhGu41VajXS2ZC0IEHUiWcaMk56QbDeJkHHpMQsHULpoV/FoTuQjpOCZVPJVLHpRfFHwIaqUnkEre+nGx/OZDHHHCcnLU8YQlbECpOctTHHtRWPRssxAIFt3crUrFsGMViKIcOAQA6ZjoRWtFAsLACkIPgQRICRUpFVW12KEoA7iJAgpTQTMrBQJiOc5ElJbIxx8CNoaZXTsTNtU0QUplsTRlXXOVcb4EJgRAJxziKC1jmCZCGtjBIWZtVLMHESsMZ7ZBd63OwCYhulyuhBhPCgKRzfWIKHVBgAA9TzPIFzVtusuLNy2dQgeQCprpnkGABRUqCO23zTNNM/O+bZup9l5FwCgbXbe+0vfhUncwBasUeDneIcylQWAWiMSIqFRlZtnpVEp1Xd9Vdv9fjdOAwC44K6b6+C9d+766lqYh2G4vroSkHHq9oc2bti+7ytbWW0+f/lU19aP4eHzfWUa8BCnwDS2v/RKq9dvr402kSESkAjHgxGEUVEUMYL3t69urTWX/uL83DT7uAKZ5fb6OI5T8OHu9nYYBxdwnPl06eumiQzXjZNCQACj6dPnz5W14zjXtj7s9j9+/AgA3337tq4qZq5tpZVCUHVdK6W/3J9qY2P5kVFCEKnq+vzlkYh2TX2ZptbafducTieNFQCMXff6zeu6rsPYCYAjAwpn9v08zXlRWy9VVRljjscrbSySMka3u6rrR0QGgHketbEEQIqYvVbE2NtaO9cLgJ8cANBBE0Lg0NbWK82kEciNM1ZtCGF/OACAUsYFF52U97YxukKt0TOzIFE/TwCggrvat5fz/WUYESgABR9QKQGIZUk+fblXSqEwhwDIVWUJQWkFgnVVv759BQDKkIhnH4BgmIZTd3LBIaEE3h2OElMozM60NoTASN3lEgIP4xBCMALW2Ljita3m6dPV/hhCaKr2fO7vT49Xh+P5MkR/z5tXR2b58vmeBA0p710Q7KcJkJz39w9fACAEttYS0jSNiGgrEziwhJubm2me4q68e3XXNu3j+UEpjQjMeLl0xuimabx3sXB2U1W1tV3XgbBRSkI4Pz7e3d0apXrvAEArapvaOzfP0/XxaCp7Pp9d8HVdIcD7jx8B4LjfH3a7x9Opv3RtVQvLOE9a693u8P7jh6mfAUCCCIuXEOGbWDPHVvbh8fHx/j5C3mhxmsZfDv982LVk7G9/97ubq5umqmc3dUMHAG3bAkgE79rahjBfLufvf/oTUqofpm68AEBl68ABiI7HY93UQnLuLz/77jtl1Ol8Ht0AAOPcTV+Gj58/f/+Tnyqju64/d/3d7d04uvuHx/v7BwA4HI8B0GgTgmgkQ0ggSCSis7DvAzOLKKMiUxMSxRSQOHCcAq01KkQkz8Ea7RwDhNkNddNqSzRHwUUAwblZQJTSABhlNY4KQxbAEGJ4BGbJJtb/ydIAR7kLMRn1UCkAAuaEWEEWMLOcuEIPBSFJ+7BgVZE5gpIoyoVVgAtGmQqD88Vgjxk4g8zyy7WAi9zLocg9hQNkBrkSljbvgmKgy2JwwQAhiovFdLrISgp0QTmFkq7D8SdU0cy5lF7kqKBQso7juh8Zig2RutnOEBOLsCSvgowypuELMICKExWFkRR6nxLcpCDzPJYE/ka5hRRkh5PcDxTcSJbCnLQpREKBpymeIstgFjFaYzLnLjmdoqxCQJK0USGVhf/kBPrfi92+tJf20l7a/69NZ7xpo87nU3rFp9YfYPNTNkIlpR1XGORTQ83ClhYNvrCtrx+kJXp3gYPSc3DhPrmH61Fk2AjzgR45aUEMimWsRBOXW0V4NVQEEaUIMWEiK0Q2IQLLkyBZDBFXoYgYgcsFjZKNRlv+TNhLRilRIooH2TCX2ank4q5QgJKCj63coKBEp6R+LkyUIEkCsu1EpEz004mMP6KEEeJk4a9EmmbiS5z97ZSvRCTBvEKWn4nLS6MjX4pjWU24ZKEmzV0GedZviUhGjqKQ8mtxidvIQ/B0BF9t635mqCF/+j1PKCjlFq+AKDUR0XrtYPoo9IfLCT7vL371z9/3VfZaA6C1R9xqz0jGXZ4OZ3FUXO2MGMsd19VKeH3Wh+WZZUmu5m2xAufHPrlVirNeFiUzDoTL1Rl2Wrq36eaaHqkDsuCeGwx4Kaodn1LW3r/Z0mpYItfTH2uipQuxfM6w2/KUQquNUeN5k3I054soO1lIspjER+OqQDY825VRjo5gYcTeFvvQeg1JQRNTv/NwNt/J+jAuy6tYXJ7TbE2X1UOXfbOA/2niloN0Q40VITLkXTDNfJikmGVRpNjPN1fX8UG317eXh8vXuvff2GT11yqInchz8METkaCEEJJ3LW3vXWYcEImIFG0SLTAHEAg+xNK6yR0VUZFmSGwIYqHeHIutjSaieZqNMSky1Lucoo5LOWOtdVmcRBScF4nQEjOztRYBZ+equj6fzwAQZh/rAiNRsmFIOeqTJha8r2vbtE20Jh32O+/d5ObjYT9Pc8wnuGtaRTiNQ20te++9ixHH0zjYxgKAVmbopu4yaFLaKBEbvIQgqEhp0poAgHQ0I4EEjkopc6hqe3t7Awgx6vPNmzfs+f7h/up4ZbS+f7hvm7ay9tOXL7bSUaP78PFjUze7dheYrak+fvx0Gh6O+2vvwDYNAGilz+dTe2xfvbrZH46n08k7B4DBe4iOnAAheK1U3LWqtlor52ZC3O92gTniZVfHYwjh8fSIiOM4jONorHVuaurqcjkfdzUAKMLDvm0qc+mC0dpqs2vbb97c/ce/+kvnRgBo6ioEfz53gVmDFpGuH0Y1ffP2zTgM3gcAABRtDItorcZxfHw83dy9Yu8BcfZekYrzPTs/zs62u9kzigZSLCBI1ti4QpWScRw5+H4Y9rvmWh9DCF8+38cy8ABgtNYKK2sAQZCJUAGLcG20NTqma0QQjYRK98PY9z0BAggRte3u4eF+t9sBABJWbVNXlSD88OF9P/a7/V5XJoXEIgHA/cPD8bDTWre7tqoqH7xzTindD+NPv30HAMM4fPx8/+3bN4jStG1lq+jxerlc5mm6e/cOAJybjTHTPHvvtdZ1XVtbGWPh3FfWxiyKsU40s3gf3DzFbAazm7txiA6SADAM477djdPUDf3rV6+QyFhzfXVzuXSxkD0H/ru//8+Pj6fD/qCUDsFrradpqq1FwmFwkA80H7wxmkW8dwJwfX2jFHk/a60BoK6bcZ6U0systIpnSF3XWqlh6CMCeH11zSLTPO33O2tt112Ox2PbNOM0Rrv+rmlAZJxGa0xcEi74N3d3Sul+GOL2f3V72w9DCF5rZa31Pnjnj/tD33fjmFwjRcBo7VK8OQ79XNnqN7/9ddf3whL9kUPw3jtEA4iXy6Wu6xDCpe8QUy7apqkRYZymn3zzVmvdD4PWKiZXve+HuqoBoG3aEFgR3b16JcIPjw/7XVtZO7v5d+9//Mm7d/Ec64Zh1+72u904jOdLt2t3WuthmL48PHgfDfasNa09ASFLBCmJYXbMCxEBJFKkXJhDCMPUt23KsYBE0zwboxVqRcqYigMjqKa1hBoAvPPsRWIozyrVY9FQYFELFnYRi8VHdp0uTIZKxJRxKAdrrwUURSgxombFd1au8rjloX9IQk3CJ2TRaiOdrfwpMct4kDjylm+nHqy+2WiRK+Pu1/uy6Jil+5il9fTduidSbKGw+FgWk2FB/FZCR5bjEsvatueCyXpYi0GzKAKy0h03UqyU/D9/YLSlC5SpiVASg2egND+HiFYZIVMnRApJBWVL2ySWc8I6X+rTvrSX9tL++JrOnGmL8UhB6vL3z5VjSQxjrfBDPsZlrX2vWjnOcbkDYa30Llc+feeCmi2QwpbVZy71/FHl7Ztg49KBFFfKOVkkrhBVZU9YjgAAIABJREFUBIDoDYFIIrEiRcEYFjqUi4vVLDIGKsGPACudP7LZVZbMhZVG3A0BKGKRAtsUfgt3LRw3j26FieAqZ0xx5yx4x2LBWz134eiJJS+QSn5pYYSrSY5ud0tUQxYHFl67AURkM68pnHcVOJEs1uXypZux27ie8CwRYMxFV67FvDhW0A6W5bnGTpbpeNLyixF+D+z27IbtJlq9QFJvkBe4KS67r4JOkQqLxPaHpMwsS25gtmfyF6Zcf89M25ClPHxCtZz8+/n4ELM8uJU4n3RuJUrhMhXrrbp8KK9h2SyVPMLVis9OByJAsBa8NiJayUm/QQEBUK2dFpMWQOk0yq6jT9MkrXuS+74yqmei5Y21YLi4umk7QbBKVA7lSXk8WAoXLYt/OV9Xh56kb8pVaXdkv9f0S8weWKiHWHooHN+dIP/46/bgLcpKvmXpMYoIxFoiT47eOEL4Ct9YT+4zIiNg8tbP4GPaPIDR7ffJfbL6VLTBvFiWmeegtI5I5f3jw+l0/vLlfhhduuj37cBnLZ7huFm7q0RQMepTaQgSOMRT0XlHRHmhRIUke/oixCy4saZN0ZUBgIWjs7mfXXBOcqbgiBpHZIkDK61sZViEmZXSxhoBYWYNOrpQiYhSynmPiFqbEHxkbszxHAKtdMDgPVtjh3FQigjIB69IDV0/9g4gpkVkhSra5Cj2RBEhKk05WFvaXbvb7brucjjslVLOzVpr78Ps5qZuAKCu637o53na7w+XrkNAUmqap7quY32Yh8+noZu0sgCB2YsgBCJSSqM2OipZSAKAwhACI4pS1Pfdtz/5pm3rD58+VXUNAFbbL+cvVVU1dX3pO0CwlX04nZjDvr0ZhhEAiNAYPYxDCPzxw8eu7+t6/+X0QFyzdAAgAbU2ShtS5nQ+MbMPgQCrqnJ+jjn+jDHMoaoqQCDCaRoZGESq2nZdX9kKAKrKfv58731o6tqFsGt3LMyBvZtf31x/8/oOAPr+YhU1tf3m7bsQApHywX95fLw67EECAIzzadfU+/2OCLVSIlBVlVaqsrat67geP375PA5j8HzY76uqvrq6wiBDP8yzd87HNKBIevKh8iEEP07O+QnIfX489f3gplmTBoDrm91ut3fz5L98DsxudizyzbffQOCIqQdmDuHbt3eePSJP0yQoLKGttc45cqdxDEoBoK1rbcw8TtYoJGUJLn0XpwlJEdL17U0/TA8P97/53W8qY5z3h+sra2yM6P/w6dPr17f1WF3OF0Bsm/rLOA7jcO66eJK8urklpPP5cnd3U9mqaWrnPHNQSgcOP/z2NwCgjanrOuY9rOoalZrn2TlHAIpQxUF5P89zCKy0NqoFQOddxChj1DAAaFthcJdhstr853/8x5988+5//Ku/rqvmX37xr6fzCQAAw4/vPzbtrt0dAMQgBRajNRF674uImLxriYhwdtw0zfXVtfcu1lwCgFidBmN9P2bm0NRVXdfTPE3T1LYNAGhtzucTAFhjh6Efx/GnP/mp1vRwelSEceGN4+C8uz4eRaQfeq31rm299/0wvHvzBgCIyHkfAjdNo5Tu+gsSNXX7w/v3fd/XugEAZnE+IKL3noNYrdt2F8JEiLauKmsBYJxGa+3N9ZVWdBmGXdsAYAghhBChZxE5nS+H/c5oDYin8/nu9sZ7f75c7h8f717dAACRYua6rrSm4JmF375+TUQ/vn9PRG3TAEDf9/Ps/oe/+IsQwv2Xh3EcX92+YubH86nruuJLmPiw5HCvxKGTuMLMEqKZh5mRkJz3OTIgJY1RWoOAEyci3s910+igxtkF501jYs7c2c0sgIzCHDAohfFslEUNitwIk/QvgIg+OARY4mwFkVAhImJIcNIanFspPQQkOfFUUUyKw8FGaEIE2FTUeSabFTFixfm3YjoUTSAGsOTvZKMr5t489Rr5PS0ZsgAKkJqTPkNixAycOffqHSnpw1rGy0HT8TtKJN2IF1vVI5n0inj0zHlisWEunh/5RTnnVFpigBmoLTT8iqyzIWOW4JIGFp1fRRCAckwH5vFSxovXS6nEcq2MmVH6w2wf3KyYl/bSXtpL+yNqejkbYYNqJP1zpfAlZXVJZwExBeIKHEz/ZIedoiU/c8OTFdyE5SJYdWV5ZnaKSZd8/bRdYR/pqRs9s+CF+QBfQAdcdyo9YOVvn+5ljk7+5UYVCUcUOEB2IkoWrJJPL/WE1qUMVuJGemt8DYMkcKDwvSxdRc8blBK/uunx+iFL0MFq6ACL8AH5zVguwwVmSThFNtZCCVmE6La55u4rbhw7uTYI5ynJgM9a5Fk8mFIAUO44YqpOB9nxJs/oOkfbIl9KXg6Y3eGyBTHCI+Xha5gal87lpbeVIp5QFGAj5PxBm28ebXlNfkuCAAByLaicXQuJgNcyVu5lXilbuG/jN7r9a/uIzeQkMUty0HuZwSVpZ3QsXWqnlActA4/4nUBKlf4MNHzei6QWrMkhy3wsZwuuTojVmk5RPck0sC4nkh+EKtvsC7aXjyDmkIwK0TaeX7hepdmfodgEyqGXe7w92Z4P9CuY/nKcbfWD9ZKLRARYJWB8Qu2ECa6h43xmYHl5fGCQUATcpA5lB1JCShhZnvOQBXRJezZuf4wbZlm/ywG1rH5JlF8Oh2iXEeaS3xNpWaLlXFsRaNmSywAKpTc8omhZIoxLGc7Vg59I/5hPmfUBm3TLWLeXgzU2hPDjh/f3Dw/ACjb3lw4gyArxTWfNum9PZz+/liBlkABFhIg+MDMLglpFsUtOu5C8JjFl/4gPS3oIQ4DAXrznEDhilXG6SGIxKCBCrRUqwhCUVtpoIgwhGKN98N45ADDWMrObXV1XzMKBSZH3sZA3AoAPHomUommeogGMRYjUNE7TNMVULYGFILquw6rDIMDMqZKysYaIpmkMHIhoHEcBaOp6GIaYXQEApnkOwddNzTEJidbBexGpq7q79ABwPp0JlNKKhZSG4BkBbFMpDUgl2x0TkgiIsDFmmsbDcX99dXh4fBQJsXrMpeucc8fDcXbzOI5NXTs3D0O/3+9DkK7vAeB4OCLh7NzD4wmI3r37hlFev32z2+3jrjsero02X+4/v//wsTY6HibM7LxDAGM1AFitQIG1ZnaTtXU3dCBye3sTgi+l1R5P52EcEEFANClj9PlyroyahuHPfvYn7969BoCxuyDC7c3tqes/fPpkjTFagwSlaJ5jdlGeZnd1ODw+nmpbRdjDKP3py5dd3RhrAODSDcyitNZKg8A0u7ub/f3Dw/3DY+DED6q2/fTl/tSNtrI+cD95Frp/eHh8PN1rHctDa/ut949Gq7vb27dv3t5//vhff/HLn/3J9waxbvcAcDgepmnSGqZpAOSmVj6EmlXb2KoycW+omBWCMDBrrWecvffKaGuraZ6JNAAgodJqGEellLH20l8Ob94prZq2YQ7GGAD4dH8fPBtjgvDspq7vBWC3a7VSsZRN07b9MIzTHBiapu67QUSci2VdlFYaAAiREBUpBODgIaYtAGFhDiEC9845XVVFUhnn6fHhARC7fnj//kdrDQD8/E+/+9W//NPV8di2bbNrAeDx4bH95vDdT7//+3/8BwD4l1/9yhhbN40LHgCZeRjHumnjyon1mrTSIQSlSGsdmAHleDwQ4f3DFxbe7/cAcL6cp3m2xlhrp2kSkbpq3Oz6odNaXx+vAOB0OU/TeHU8hOAu3WW/27VNfbo8Ikhdxzr1fpynypimrr883LPI7c21D94Hb61p2xYAxmkKISBRXTd9P/gQrg/X4zidTmdmduABAInEewCoqnqaxqurK+ccixhtMGfVdM7t2qZt2/uHe60UIhitO+eUorq2AHDpLrU111dXInI6nbRW+/0uMN8/nowxkSdEjPjdmzcA8vnhvq4qIuz6Lgh/9+23sZRNP45t01S2+vT58/l82bUtIT2cTg8Pj0pprRdmF7GewKFwdcRSiRMx5p6IjtdKa22YHRIprWKwdqNN4KCU4sAcs1iJhCDz5JTS8XQNbhbWiAjeSy5VVBz2ij9DOvRFEAgAoQQtlGghAc4Zr6LEEjk0LnpLLlaS3R9WJsPCRbO2kXnZU/kjvhEBVsbaLPMvfp0lowys7KUoAjkNKALktE/LEFeuBFH8fCowbXm7pPmIf8sKcluuXYmekIXIPOQykCTA49KN55JaqkNauC5m6+DXhTqE4om4cj4F4TgtObxapITZbW5fxz3ETmMQprUYL6krnGKrsfRcUocRcs6krFet7N2ZgiUf5oK/Fwfdl/bSXtpL+2NrmkUgF5aAjDIwCGZuuflngzc8UdGeKs0lVq4gC/EIhcW7KGvGW90/I3v56I1J93B5y+rE3bIUXDJ74fqe/A2uIZHC4zJwJVx4+JY7ptwukF1pYiXo9EVh/wKAKMxQPKcW9b54oG34tOSKCZAEFikRcCuCyEL6NRHKM7YYxhrSWNyjEqeE/GxZ9Tkp+rIWVjA3AIFsECVaVaVJPV3FWwBkhKdo8VImvkzG9voVLfJ4ERdODFnawpxWDxNai8UNNxImzeIyaWXtJfylvDVfgqtfobxo9fuy0lfrOMGdz/u/PF+evuOrbVlo6dNXaPJ0z5Uuf/W5ebV97UFx6T3d0cXTEFbEXDZwfqVk6HfBj35fH1b9TtufQGQVlZPv3axwWL1QCuYkZRulrZumSZJLwEqiXFwdi8RcFnfeoFzk3nQ25NSVkgmxHhE++evrRP0DrbjylQ1XSLN4+ZVTtGy1zZTLcoBAXsT5OFiOxyVy58nUy1dWaiG7YJnGJ4fKZkk+Wc2yflAW3OPHfMwt48KNf+9y5+Yhq1b6kcpYyfr6NFULVvq0bUa6DAUhVSNBAtJKCYibXW3rsZ/XDGKtFWyc/mUxFyx7Np86WFK5ikRvlFjr2Vjrg4+ZKCNxMKXshHRSJQcM0tHFt0DDEUcGFMTJ+cgRSBEhhiDCIhnwVVYrq2O3KquNjnm1QGntp4lUcswPIWijAJElAIJSioW1MXFI0zjaypLW8zwbbbQxwjL0Q3e+IFJ8iJBEFhwTGwMJKRIJROS9i+WYr66OiDBMY11XIbgQvK1sCOxD2O92kW790FfWtm17Pp+rqvI+MEhTN13XffzwGQAqWyFCCIFQCaDSZCujNKECBIrVb5mBIbp2RiRUbm9vxmk6X06xeAgAdF3Xtq1SeOmG3a6tq6rr+8Ph0NT14+MpTp7SChG+/+6nf/WXVz6wd/w4PPzZz/7cBz+MAwBobYno3Tdv5/C3czd65wlRiLx3ilApBIDJTTeH667v6rrWWgUfwVae59laE4Ha0/msUBmrvfdkTD8MRGi0NkZX1rppAgBr7X6/Fw7MYRjHeZ53bVsZjUhx0WlS1lTjOF364Xg8IMAwTCHwPLld3capDIGtrZRSSul2v+OPn5Uy/TCN44xASmkAqJv95/tfaK3v7l4JqriztNaIyCGAEQC4f3joh+HqeDjuGgAMLCGE86VriCJEKNy6eZrH0blZW0WIRMqQ1poIwbkZAHbHKxBEonGeidRut5+myRjbtm1MTQAAxhja7YdxbJvd8XBw84wISimtNQDGa371m9+9/9nHm+tDLAZYVfbLwyMCxEyacTe17e63P7xn5tuba6V1TJkXODjnYiCLRh037DSNHAQQSClrTLYQCAAwBylIAkJl7bfffvtP//D3iPDmzZvPnz8CwDSPTdM6FmGuquoXv/rV//E3f/uf/pf/raqaS9cBwIcPn/b7/eyctbUiPbtRKU2I3jtCyNmcObBXSpnKjufz8XC4vro+d+d+6G9ub2Ip8JiEu6oqYQkhKKUE5NJfOPBuv/fBA8Dj42PbVPt29/7je63U7c11152HYdjt2hhA/Xg+IUBdV87Nzs3Hw76uqxDYeX/Y7y99BwDz7AJzZS0zz85ZY9t2/+vf/hYBrbFxx8VyWEqr7nJ59+6dNdX7Dz8IcF1VLBx33M3N1fXxCAgCEn29ZzcDSF3XcRdM0/ju7RtEsNZ+/PLpJ9+8dc6N8+y8u72+0doAQNcPdV3Zyg7DoIluro5a6x/ff7g5Xu12zcPDDAD9MPz8u+8fT6dhGLXWx8Nxmt3nL/f9MFhTOefjEUuYMv2l1JbFKLaIesLCzCGaDdzsUISUEoBY9IlQI4Kw+BBIaBYXgqCQMMyTi3kYEBUzM2IM1lZK52p1maFAkl84i+4IADGPJK99CQQYQraKJukgy+fPuRpkh8KiQxUmtvpzUwigPKFEhy1fyZJNeyUNx4vWct66L2mE6ef8f1ib9tfKzVdbqRZY1D+EyCWX/DercDOAparjQsultwWKfCZvrDSPfHPOCbUWzb4mQBSOnIPakwSSPhW0OF77lVRWz0RlWdFNsg01mvhzdq/0K8P2cSvlJqs3i64iSSxZgcQv7aW9tJf2x9S0FLhlZaJCwHzi/V7FrzCLp0dwOvI392Y+mSw6WKC3DBksQdSZv2BS3DKTWeFuiyq+6g8UgBC2DCBfutFuASBVaH0CDEH0pyqMPXseUUJR4idIXkLJ42wNNqQ3CQIVSyksvy81D1YCRfI8kiRIPSXoOsL8yY+4SlKZCrhlZBELTXK3ISMcHMvplCnPhrZ1wtAVHfMC2SIOKWlKnsGUonMlwWylka9w6pL1Lj5dAGGprQ3rmSuwR+bjsl4bMRgmZmVJJH/iSFXuXAtbT6n8jLarJxQx6OtYSCHT0m+E9aKL+bBSURFASNVbWHi1n549EHJn5cm3zy7eYDwCT4kf7boFR0mejour4ArJlaXbi3NexpXTaUFf3WVr0sTlnPdwek8EEUvhj9Uk4/o5GxF1QTPX0maS4YRLXze7ZgvnL3YRAWZYslMshJd8vDwh7HKWPWlPvbvXlyRngeUAkdU5ILGeTEafNiaTnAA1jaAMYtEN1sdG6hcqTCWzJMJfmFK9pnzn5UhYxHFJuye5EcYpKstqM1jJ1ExutfnAKtkeirpQepw2YDZibMn2hE7r16za4qKRVPh0tCenTlhIVwixXu+xNwLRWxMAU/kRAQWI2PUdB/bOFzhv6UI5zJ/NauxNfnee/+RBvpSXEkl1DFgYRGLpr9zLSE3BfObG4gWFUJgrUDFzCMzeg0SvLkyeiQqIEBUCgDIKEVhEK9JGI2HwQSkV4QOjLQDE+jXGVm6eEZGImDmEACqNjYURkaJnkcDQD+fH8+QHg5UxKiQEMObyg+J8gQAMrJXxAXb7FgD2+1039FqruqmHrjPWGK0fH0/GGq10jHWNuOowjtPsjLbMjKiGYXy8Pyn4f9l7sx5ZkuxM7BzbfI0tt7tXN5vNYnPIFpsakRjMyzwKkGYE6A/rYQQIM0JTaA7JbpG9VN2qukuusfpm29GDubl7RGa1yJcBCaQB92ZmhLvtZuezz86iIESp4iC4rKqOyK8uFwDkvcMQno0xAGD9zUOIfWcvLs+TRG02mzTNyqLYHw4AoBI5K4u6abx3aTqz1mijS1lstpuqbq8uL0PDt5tt03ZvXuLDevP5+jopisuzl9b5+4c1AJyfXWx3m7qp3rz64pd/+zcIyBgLGqkAPviOTBLJGCvyXAgBiLPZXCkJRIgsTdLdfh8miJSSMcYZAZHzPkkkcmac41JKlQIAkGWcOwDvKU1TJSVH5FwkSnhvAeCgdVXXzjljzGYTTHSlsZbGGBpgvd9sd22rk4SRhyRJHzab6/sHLgVaE/QEAVlVNVdXV0brEI7G9wrkYW90AJAm6WI+F5xvd+vzpvEEyBhHRnFBO+e999o45DyE8IbeQBUIKAR1IQhWxh6Qee+7tgtRpO8fHpZny7B2mrpZzefz5UJro5RaLlZdp5eLZVmUXdvkWQ4As4vz9XabKN7qLlESAbXW88UCiEJ8pJuvv/n2w4cv3r7N88xomxd5daiJaLVazRfLm+trAFgsFlIlUqrNZtsrLHsEouV8XpZlkaZh8fkQQoKIESRJslosOOeIcH52vtk8AMA3336jq8p4AIR3b97+9E/+bDlfMi7ef/vN+2++BQDOOSJyIfKy7FoNyNJEcY5No4OraABoWwMIaZaGHWSxWFhrHtYPRZEXeVE3FQA455IkSZP0Yf3gnEvTgogYY1IIY8y268JGNZvNrTPa6IuzlRTiw6ePQvK5LMNyM0YvZjOlZNu2ZVHM5jMAaLuWMTZEsmqaJstSIUTTtoiYpbkx5nA4pEkKgJ3XAOC8K4q86zQAXZ5f3D/ce+94COfoKSyEq8sLKeTN7bVKlLMWEbW2WZYmStZtAwBZlpZ55smvt5uz5TLPcmPNp8/XZ4vlrChDZdI0eXl1aa39+PnT21evlBJ13RprkmTRNE1Y2uers7woqkPdtN1quZRS3q/XVVULLgD7UPK9gmGwKhopOUDso2X2npegD/rHGENEzrmUjCEPSu7aaKUS8oTQ27OH2e88udb1ciBKdu+c96RCXJUg4jH66Y6adoMk974PrTIoMkLcWz34qTtxmkCkqVVE3IQn1Fw8p0QhBCfpBNZMPGiH5ydoBgc3NCP0gCkM6ys8ah0wZBFyjRfWT0r3vlUjrgzJBwDBekkekdlEVQOG8iani/6riG/6D4eYjEP5U6Q3Yno67ZWp3MdTA4uheTgUBADHfCY8Oi/357JRsWMoBAEDL41HD/elDEqPg2okTrvksWYmwiNg9pye03N6Tv+qkhjVTSa7fbgxw6nxJh0LlYnZ6XGKUoKGv8eNcwQKcR/GgfyjqZwhOFJ/Ozr545AVjK5hJrr7o4wngqjIP2rxj7Ip7vQQd3kW76amTE3fTVxAjEuMAMF0DmBkFcZ6BzU0RIa9Xy3wY4PJE3LmyU10ho7E68TYIWTGgACAA0C4OEQ8oYuQyOMkj9B3vWCOmjsjHxy9vGFUlwvH5Z4OZYHSZMPZuT/wh4janiYYiVFPHFLfdbG4wbiexpkzVuAIZw0fAMCgDUfhqD7tnzCS5Gn0hzPphkGIMzgetVMUNhLHfR/A4+9P5m2vBDrimeH3xwmfWBFjiYOSH/S4CEL34XF46u+p33F/nKTTlx89FoaZBpomcrZTwDiqnmH/yTHAPcFZ/cQ4gVmnnlGnGHbIcxi6KZF/VNlBIRAAgqnL4HG9Vy7rJzD0F8WB85xkNtwoUE+9Dp+zCV82ju5g/kSPeu5xelTp0/6f6gX0S2JC94ehiCi6/yhCUH/c8+M8GJbyMbxHxIm+N8DwECCQ98FnaIi1HZqPYRcKWwFjfdhy6rvscRljP+DRRksRJiMLtR+47clbE5ruqIbD1+PZYHw3BOuKFDYEB7sYlQhPSug7FKZfDvTvqLyNyDjHpm3Pz1aNrr3rXWd831qO82A87oSaAQA71hUeLmW8tQAQDDO17Ry5MOisn+bRQ4Un5NFHKAQT4NFPa4gxbY211nnnGGOIPITRRGRcMC4Yin7LcN4zBKkU9u4gERGMNVLIyBw5hhgi4SQqcdZZZ0Nwm1D7NM04523XITIlpdGGPGWyBEDvA5sNAMg5Z5zFCUthahljhOAhiEdntHVGSGGt0aabpwtjjPc+SzNjbdO2AJAkiiFr2jZLc+ccEDrnq8OBMcFkf0XAgDnrvfezZZ5lyW57QM7Qe8BeXS7cZCIARyZTnqZJ12ljbFEU1rm2bQBgPp9po5umzrIUyDdNkyiFDLTRy3mRKAEA6+1WSpUkyaeb69lsJhKBQH/7y/92cXb5sF4DQNc11prNdnNxdiaFNNZCTxObWZEvlgsAkIJt9vs8z4igqqrFYo6Iu/2+yPO267quA4Asy8KAShEcehIRtdoIIXf7AxccAO5vb1azWZalHz9/2u13UkgAujo7V0lRNzUA7A9VmSXnZytjbKIUIC7ncylE27WM86ADeL5cAmBR5FImXaeVlJ31SZIioBQySxIA2K3XxhjwPhXcEwmGDsBaTUScs17tbr/HA6VJ4pxVSnW680QyUV21D9IXORdStdpIJYzVnDHGeWc9AeOMp2kCAEwITw6IJOdam6ZpJJdE9P7DVz/+8scBtpH1v/7Nb2bzOQHUVfWXf/Xv7h9u26bZHfaJUl999TUA/O//8T/mRaK7WgjhyUkpqqbWulNKhlgrWhsi2O7252crY+1mvTXWZHnetu1h/903Hz4AwJ//2Z95az3n5D15zxhKyQUxxlByFic5JUohY4aQrO26tm7Vcrn8+Onz+2/fh/Am22pf5Pn51avbu5tvP3z39vXbQ92cn4mynG/3ewA4W10AYl4U2piqrgJoNEYzzoF6SY/oyrJUSVJVVVFkQvDdYeucfXn1wgMFYg4R0zRtusYYkySKc951nZKKMey0DrvPcrFIldrtd1malkUZQtxcXZ4rKer6AABZmigljdHa6OV8xhnz5JumWS6XQS0QAIQQUkjnfNt1ZT4TXNzc3oRLCGdtsI9O04QLofe7L//oy65rdvsNAHHGtdaM43w+A4BEqf1h3xkjhAAi4zQXXAhmrQ2OLC8vzsuy3B92ddN88ea1trqqKm1MlmVN1242WwB4eXnhvb9/uL+5uZ3Pyof1+vr29vWr10rJzXYbaO6XV1dt01Z1Q56KLN8fqof1FhATlWhtwy4dwpH3e613EGUCjLpoyBgyhuE2M2hJZmnKGU9UwrkEAPDgnCNPwe8n8F6uOBdNbAECzA9bOoGLGyNFuRRx2BRSDJ6RI96F4HQ9HDhYz1FGATzYr0DwGzmKJkQaIzYfCbEBxWB8bAQjgznU9L42ggWYQLpBEyQKaAA3mlr0P3spFM29aYRZPeA/rdoxjKUoWREIPCB4nCCMaUk0dsYA6qZ1ISCGLB404IimjYX2Az/p3XggwaGE4Qw53r3T8CkOKiND5bAvsm84Qx7tK47bPf2DjoqJKt4Rr05GJGKuCaAeDnR4Uv/eb1Xghb8f0zyn5/ScntO/3PQchOs5Pafn9Jye03N6Ts/pOT2n5/ScntNzek7P6Tk9p+f03ymJcD/Dg3FsnGjeAAAgAElEQVTBsfbK9GItXMycXA0dab4AHKupHN8PHRkmjCX1ypFwYhkQda8GRR+c3ndNbvuIJup+MGY/FnV63zS54EKEI7PK8W7w0R2d790ZeoSgfD/m6wcfKdEAo48BOppdR2MMgN6kMSpfjuXEa874Fg2NGfS5xhrhEEMOe6uU0Toh9lkwIvd+NKyIBo/BB8tjVb5BJYn1MXF9f2XLIJiqeWujn0GYzJWgDXrirXEcpXCpN15oTtWkKDYNBv+AQdntuG6DKUK8sxx+TO4RwwABTKdAnIYUKzqp3fShpxIePffPSeNV+DABGaD3ngB4NBWm+KcfFAVPlNImM/M049+XKF7rhhWEff/3V96TVR7tUAABrI/X8jRtOZ30+HjnTicuavD4dz+04LRrkEbbk4mFzRN5xaXoT/MZxhcBMHgqx/DfeDs/eE19pC8bF3u/yqgv6NRUf3K5/nsmyfcneuqPU1eOw4jH/3oFgKn2HQ7f0jAQR84bRrWGfjsNG4z3hNQP/aDsjUfF9DtC7x0x7mrHPphwKKXXjYprKeTTvzVx+T5U7mQ6T7thUM44VSQIT7DRGokAgEXP/4/cIg1/hylH8bFY46hSS71VslTKOpOqxKAlQudPhjxuoEc7y//P0A86Cs46AFCJssY2Tcs4Bt+LcGyt/1QGYddl5Mk7BwBaa2ssA+DIECBYCgrBhBRC8uBdzBjjneNSCCGChk+wxfbeE/m2CXHDgUnZtq1SioI/CwdKKWuttRYAhBDOhXgy6rA/VIcKAKy1nHHvPJci1E9KGRwHBwtcRORcaN0tZosQoKOuq+AhoG3boiiIfNt1RVFYa7XWIfxukeeHqkLG8izb7faHw6FrdYgwEUySpZRG+0N9WC5X55erpmkZckBGZD24aM3ggQiRcybKWQYAVV0naco5r6oqVIYx3O33nLFZWa43G631YrGo61pwsVzMtbEAsN8fLi4unXOfrz+fn5+dnS1vb9bG6DxLPAQb1VYILgT/6v1XXAjvPeccyAPw4CkSAKq6IiKGrGpqABBcHKoDACqlPt/cBHPjIs8Ph0opJQW/v98Vee4BTKffvHnz7cePm+0aAO7u7/7w3RfG2K+/+Wa3rxKpjDF1VV/Uq7CNCcGrunn54so5lyapVJK8F0JwKxjj2/UWAIQUbdvtdof5nFtLzrntoVouF+v1ZjGbFXkOAA/3D1dnZ2er5cWy7LRBmTGV3d7e5Wk2K4vgB9AjNnV9cXbWNFXVNIlKfvDmTVGWsyJJsgwAdrvtdr398PFzOSu06aQQjPNauzyZG++5lAAQYseAp6CFl6YZeWIcCUhKFdQwGeN5mgVDV2vsi6vXD3e3Ye/quu5h/QAA1jspJHmhpBRCWGeDO0UpRFjaRZ6+fHFV5pkUIsmyh/t75z3WNS8KbcxutwcA03UIKJRijMvgMoAxZ51gCDHEnNHGOae4yNNUMLbZbKq6fv3qVTEvkySp6woAEqUulqvZbFY3NeOi0939+sEYe31zt1wsASBJEpUkSZp9+vzZWF/kBQOqqloKTgxNjC61WCzbrjFW58Wqqqv94VAWBTLUbRt2BCF4cCbABS+LWWe6tm2klIyJRCVGt2FeNW3T6a7IM+9dVddZlpZFUdd1MGNfzGecYetcqlSRZR5xuz8EBV7rXNAtTRLFOGuajjylSdJpe79ecybatnXOCyEAIE3TQ3VQSfLi8upX//hL5x1jiIi602VZvH75Kqy4TndFkWltHXkgmM9LZ522JlhzW2vrptodDhdnZwDAEG/v73/8Bz/kjNdNE5xdlmVprF5vt/P5zFq7ryrvaTmfe3Jt17199Spsevuq3u5289mcAHaHw+FwyLKcc4Hook9SEQTgIxsniMbacd8Psi/aMxEQ45z6kIOibVqGjHNGjnMuGEPtDBEwYEGNg7xH3hsXTaRd1OGLW/4USI74CifmWUGwU+9kZZBxU+lEffiYKDOGk8N4BhtAbowEAOFgEP3CD1lFuTnYw4yHB+yR2ynynJzpjs9ZNDmzDI9FOXok2enkd4x+LAl6G5poXDWEmMPx4SEDCrqkQz+EruuLw+Nod6F8P8YemALJx+B0TNPB6nFIPDoeGWsPB4/BHuUxXhgOdEQwiYQOPUAaDmsY0WwI30cM0U+gWPAs1gfy5JPzNMTxOLZAe07P6Tk9p39dqXc85AbazxNgsNklhKm/4XGzDfzWqXle2K990NNnkb0ARJqcVokBo2iuzXrjbeg5iol4mBBTaGMVRnBBPSWIHBHROceCvy6AEAwUomE1APSHbQIAcsHAIR6xezvAWFEAB5EUOqJeowVAH9M0ur6MLRpQhacQdM4TIIQQpdAzTVHTn3HygNSHBD6ydUVEQOcdIjDGekrJ+/HI7o9My3u4Q8BZH7oUkPpTeYQM3lLwuhUBShSHiD40H4koHOQxxkOHvkjW+zaJnk8oBDKYsEFhrIEBA/QcEYAHe5Y+GwhupzgNnQq9Pm5va08+2q32bYlmD6NIxeCwE/tmeU+IY1jnoSuCl7bev1VPcSPFiTcYBSMce9s+MSWeTMEnLT4iQhlhWUQi0RHqkNExoItxVsmB62cbC52JCK5/HmnQVj4GopFAiszZSP0EjDV6IB1aioOXgmCtGycxUOxdjBgXCIBhQPB93cLyAuyHEoGAQkBCGgKa95QZRSvgyIsj+ghSgYCAweC+CYGMQxYZMx+iEGM4FUT7bBzmRvDkFZycEsXBYkDBRAp8BF+9v5wBYBKi8x57XxND1QJrFeNQB2K1B7rh/fHzYQB8PylpKKp/aLxyAIiBSWIroe/xYX8kIupXIhAROYozasTNI8qPI8hOJiAB9W6WgrV6OEKNkVZomC0Ydsi4Uw1wHMLjgACMPPkYpiVatoUJgOPyC9ZPoWe98yPSj+bFROScZdMASjS66xrKHSZnqONwloGJaVW0TCMigdgPN0GoZPAeSNHgGSaB1qZnqtGgCxE8gbcOADhHsA4EOiRgSQjgUNcHJVMmZTiuMETOhTXWubCW/bgFjeNAQACckfOE4CmGFAciIpVKALDOEnqVinDADLX0zgO40BcshChFYJwhMmRecEYEzjijrbU+di8maWKNs94JKYRC4EhIjsi2NQAIIYWSKkmIwFrHOQKCtSZLM2NN6L0syZx3iEwKReDatuFcGmO00UVeAID3bn/YXV2+2Gw22806SbLg6cpZLxOJnAOA0R0gWRc8k5KU3LquauoXL6+Wy0VgaoRg89nsUNVhZjZtiwhlkVdNzTgL5uFN0zVdW+RF3Ta7/cEDFbNiuVxKKXiaAkCW5cv5whrjnf/bv/07Boz12yOSpxAzp25qzlma8zxLyzw/1BWA5xy7rnHOrpYrAGi7Fgnmsxl53zZNnufOmrap5/N53emHhwcAePniKkmS+4f1+dlqs9kYY4h8mmVJkh/2DQAIliBikeVSytXLxYePH+q6JqA0S4p50ekWAIw3ZZl3pmUMloulc6aqqjRNDod9mqTzcgYA2/0ukaoo8vv7ewKfpsnusF3MZ+3hsN5uQ3joWVY0bcuU4sgSqRIlO91icD+NAADz2cxa4wGFELtD5b0TjJd5UR9qLLHpWgDIszRJJDIkcm3XGevJW090d3f/7ot3LM0AgDhbrJZvX1yqjKFHIZJD6yXns9l8Pl9UfXiTumrrj7dOcbYy9sXLF13XtE27W2+r+iMAnJ2tHja7Q6sdk5yLfaM3++rQdETFbt+SJQBgDrrO8DT11njryDsGgMgF58ZowTkAMCm0tkmWKKnYDIHZ1uq8LJ0hrRtmDQAsz5ZFWeCBmqY2xidSZWnqrEvSNFD9d9uDd+7Tze0v/v6X/9v/8j8DgneOZZlxfjaf/9GXXwJA02kUQnrPGDPOK8GDl79Wt8a54PxSFQUiM8YStSgFIjpr73R78folIoQ41Bzx7/7hH+7WD1/+6A8vLq5+/dvf/Yd//x/+y8//7/Vm+/rVKwDQxkmF+/2BPJV55qxurQMmVSKddyFGysViUTX15+tPb9++k1I2VaWkvDg763TrrA6NklIZY4QQZ2dnCGgqLYTI0pQx1un2bLUEgLIobuoDkV8s5tvdVpvuxdWV865u6zzPACDPss4YQCaV1M63RhtrL4qibbqmbjFCyq7V+8Ph5dUr5+Hm7j5RCWPcOaetKbICov/Zn/zRH3/73XvdtuB8cDqeSLlaLKwhALi7/bjfrC+uriqAtmvKvCAi653gPM/6MN+frq85Y5fv3llnPt/chWd2h33VVG9evQaA1uiqqo2h87MLzljTPHz54x87b+qmY4yVszkAVHUjhORSlfP5ZrtbrzdKSgR01jnX04zeEeMIBEqpqqpUkjCG1vlgnw0AUooAsD15IYVzWuu6nK2a2goug3By1grBtNVoMsbQWg/gOQtCtaewBUcH5L3jgiGg6TQ5L7gSnA/OQFgPMhgiI+/JOeASiIj62NbBD1I4HHCAIJVHOdajAjwSRhSOGJ4QEHtfDf3NG8YQk+HhAbZM/D5GSYsQGckR+RIwJISRlAx7EAGMKI7AYy/Ee0E7nF6GfID6W2Q+YKoonj2Fs5kLRzgewg5Fug4imIvYo883gBrfHxsZYz3s7uFMjOE5kK+T8xtGADlBVxRvaQegM2Erh/aH5wciuHdihQP8CKc+hj0c9LaPcYq956CeDvZEOFz+sr4TAvJxRIwxxhgBee8BeuenjGEIrYVT+/V4sAsTJIx1mC9hgjlv4egY/Zye03N6Tv9qUiDvju9Vojig6c5Gp4HSHqkd9VpPBBQuhI5dqPVPnXgPG9T2/FO76HhWh/E3HP7A4DZxSDCEqyOK8a2nJMEQJWLgpsJFYiROHpV3Whvqn52e/nsaJQaXgN5jJQCLZ/fIdsCUohoZpaPGhnfj+Xxs6VO9Me2WyCZTzGLCqAzZhZ+jF+TYbvq+NtPkZ/SmcvzwlIU4mRBxUo1c5NAPcbYNV3xj24aZ9/ukKp5WY1IixloOU+Eor9PZ988W3U+W/SiX0w+ONTZhAA100m8TZdmAiihyXgNzj4jH972Pq4hxsADibfm0BwLPf9R0PMopuIgbOTKA4Rp6ZJeeKHjaXKKhIpMUNHCjIiMN+wEdPzRyVBCc4o28VWh3aF8McAgT2jFmgROyH4eZ/4SzyscNmRDLNHwydlPIdVTB65fWkE9cfDisyidKO7rJGUueLvonZhkCxk2V4mY6wdSTmYmxE35/Gjto+uoQRXIyNv0iPq3SE5NwIkmOH+2PTKMaKx69ERzyIiJBdPYPMLmaoJE9j6twOlDDYo+7dNzfARCBS+HQI8L5+cVvf/uPVxdXL6+ufvPbrxnrFbQdgPcGAIXg1lrEp9yYRL4UGONBs4YoFIcAfeCXIaDBoJ9DEHTr+kDV1If7kBIZA+cJkbwjra1udXB2RgQhKE1PB48HpRBfOAEA3ekkL6SQne44Z0IIa43gwnk3KCQCgvNeKWmdqeoqyzLOhdE6Ub3CFxf89es3X//mOwDKkpknx4AbbYSQjPGgbcSzzFkLgJxzBLDWIMP5YjGfzxFQGwMAeZbWTdN1bZ7lBKC1ns1mnkh3OsuyoGZV1VVZFFIoKeQPv/iDw+Fw//AAAO/evAOlACD4yjTGAMEX79795te/S5MUKKhwuq5tAYDI50WBDFSinPda61lZAmLbNHlRSCEA4OZ2d746k0LcP6xns1maJOvNOi/yJEk2u11RFACQpund/b339Orly7v7h/l8jlw0TYsAaZYCQN1URFTX9Waz+dlPf6qUfHho5vP5arnsuq7tWgAoisJoY41NkoSItrt9lqbIWNt2SZIE+okzppTc7XaMsdcvX1V1jQhCiKqqAHBRlABASdKa7n79YJ1Nk6RpGudc3bbkqdUdAHSMfvD2zW6/2+4Or19eeecOVU0AZ6uVcSa0GhAZY0JwISUgM7ZJU9V2bZYqxsA4AwBKSgAvODNaI6G15Bw65xzHqq4+X18DQJarRCrGWFmW2/1uuZxrYxFBSrVYKgAQQgjBhZDWOiEU57LIi0bbu/XGex90AAkgUapzjnxwTUsAwAEC7autAQAW78Ccs8FPJQAYa4z1iJBnGQC8evmya2qlpO70EIHMWrvd76/OzwHgbDFXSn3x9g0idW3baW2dK2KMpsV8BgCvXr8hT/v9XhtLRLvtXiiltTbGZimPwhS1Nc56JWVVVyECTzmbVXX9/ttv//gPfwwA9/f3P/vpz+4ebr2n6+vrLMu2u/1vfvu7y8urENM5y+fW2vpwUEpxLjy5JFEAoBJVVYegQTkrZp9uPs9m8yLP67a11qZp6rwPXZSlGQBoY7qum8/nSsq7+7umbZbzOSJ0XYsM52UJANvd5lAd3rx+DQDGmDRNszRpu7Ztu8uLi9BRTVMzxCRJOGPGmDzLu05b56yzqUoAQAp5+3A/n82VlOvNrmkawUXoEClFqMzN7fX52blz7vb+1hrDGUPGjG6Nd1zyDx++BoD377/+8//hp4euBWuUSs7Pzuum8d5HjXVgjBlj37x7yxg2h7aqq7Plsm6aummyJA37AwBsdrs8z7z3D+v1rCzLIjfWbHe3l+fnh0MFAJ0295t1WZTamM12Z4wBZCGafNfpQTkaJkC9lzuIFANtG61lohxFVQsi75x3DIkBQcgEyFnvM5WZzgEJOIrMNgCQOCMHUOSJ0Dt0yNigr0AUbnt9oKH44OW6v7fD4aJ5ELfxMDESeYPaRl9+RNCjjVIUr1PYHpAeA5hUccztez44PhEEbg97TYzY/gjREMcSHwM5AOc8hrv33lMzIgMK5B1FlHKSvh9Z9se8Yyu9aeOebsLwYHx2+G4ExUN8xRE0jhD06EkYrlUBTrx2D0FKh9mBQ/Wm8HAE/5zxqO2Bw/GIiHxfWzzppPGchE91Vijt90Lz5/ScntNz+peZBPbqZ8cb3vD/9NwY+UWCeFg9IpdGRhLCzny8A0N8aFTsH94KmCD8ObGDhqP3xsxGfTrooUbPRmKvnviYGuuzHiQHQNSOpOF8i1OZ/OTLMFwnEoGPXTIYF1BQWQIApGAvGo+vUceQCIh8H6h4DLQw6Y9JhKBITtCJRITjPwatRozchA9FAPUhdidvDKxClMc4jAj1TqlBcN7THL2ROvXv4WSwx2qHYiMfFNXcpiRYnE1jS/tpFSyX6WhWwAhyHslaGsbodKhw+lmvrjShY75PQp9mQ8e/Rlpm/CYObnQRAEf4ZorYTsoZaOoJzuutjI8aOBB+0TvCsebd9NogtqAf0yOtzwnQGhDusOxGvDS0CoeGjJkche8b3AHES/AwakfmrjTmNeDl474YNOAQCJCC8mEEYbEPcERUiOipJ6cQJ93dc/zjg30lx8XTK2HCqKkJvbZg+JUG+BjGb7qvTLObjP6kMXTUdX2dJpvopA19yf2jdNTJQ23jxDoF2mOTolXY+Hgc93hnHun3flf7vdtZvH4ZhoyNZcb9cIKv+1eOlSymNX3UpqfK7ol1oMlFRAT4AAAYlME8WJhcmgxVJvAQYwqEb4bhw2HvgyghMI4qQNt2WZpYY1aX5z/84gf73ToR4vWLF02jP11/ZsgBIFHCGseQO3K94v4o4gZRF8rz/caJ6IH6gDOc6U4PHTuslrCRMs4A+4iu3nlA5JwDgvMemHDOm86azjjXK/wKhohovUPGB3X+IEGccw4IAISSQnBjNZEXIiHyznvORdu2iUqkUgBQVwdEzLOyadssSxOlrHPee8YoUADOuevra8aRc+GsJ/CEwAQyQVz0ssh7BwhCcO8859xYxxnLi8w7t14/JKkCAKWS9WatlOKc1U2dJIkQom5qqWSSJE3bAADnQklVN02HHTKmRHJzc+28k1K8/cEfAMDN7XWaZplK7x/uz5fny+W8bbpAy3rnAkAoyoJxNpsVUojNbpNlWZqk2902y7I0STbbLQDkeSYF3x8q52xZrHb7vXNusVh0WjPGzlYrAHDOW+vKsnTOtV37Iruyzjd1M+judV1XluVqubw4P2OIh2rPBSvKggve1m0fwoJIG6OkVEpt9zvBORdiu9tKKZEgmCQLITqtgzqqB/Lk8zzvtNZaM4ZhShjdOue0MWGXIwApZdgqz5ZLADC2O1Q1ETDO9ofKOccZ08Y0TQNR70ZrbZ0NQj/PsqpuEskJgTH05I3RAIBIjCNjYLwHQo7kCTx4xphUMnBn6/Xm3/zkjz9++lQ1zfl8HuZqUc7qQxt48CRNYbNLVCqTxDrPhMy5TBo9ny/SNLPaAkDbNI7IcxFO28EIlQseoEfQWRZCpGnKJXfGWWuDflCaJK0+JJI3dQ0AzlnrXKd1mibkfdN1kgtAzLOsD26elr/6x1+naXJ1fmadN8YqpRBxt9vPytnHT58AoNpVHqBtm4f1RnB+9/CAjDnnPdFivqibBgAEF4ILBC+Vso0LqFIKIYTw5HeHPQB89/lzWc6SJP2vf/3XLy5f/OTLP/k//vN/LooyaBQCQKrS/X5nrM2LUgjRdQ6AiqLUbd223eXFJQA0XWuMfnF15Zx1zhhrLs7Pt/uNc04pxYUAANM0nPMsTaumNtYkSqVpaqxx3s3yMqymsNzyNL3fPDjnLs/POefr7RYZKikAwDhrrZvPZnmWbXZbo83ZYsk53+72nHEWFJa7Tkm5XCy7tntYrwXniEjkjDFFkTdtDQCc8zzLvvnufV3XaaIYQ2OM4Hj18qW1+le//DsA+Ku/+svZ2er6q98J58py5sm3bceQMdaHfKzr9uryQgj++ebm882NlBIAvCfv/XK5CMqG+/2hrqqyLJ3zSqkffPGubTtt9Hw2y/MiXOFstlsppPd0//Cw2x2cJylEiHnvvAuNQoYEwAUHImTIOScgzrn3PtyveE9CSI5MSM6QxXMHciG45EEGCckYU11tGPKBkYuSsf+fpkAaexMM550nElL2ttaI4MmDhz7WCkRk6Ue5ChGrjBqCELH5Iz4LIcJAHEXhI7A8TR6HEAFT+vAojTYukxPgKGX7+/C+/N4Ih+AYlTyR4qXwAKMoQsAThDBAsuPqUV/+APojZognCBg7YgAENA7TmPlQo/EHnnw6GBt9z3mhx71R5TReNuMI1WJ5PXIfUHV0RdW3IgA1RCDOeFQNHfEajWex8cxwVCMceOh4HECCqIvyVNWf03N6Ts/pX3oSMZzxxKISR5YmBKYb99b4SHyORvk6pXamFMqUgTnV16HgkxIB/aD1M+y+jzQpjxL2YjxK99NHJ+VHy4X4+YRcGJpFjzW5jksb3OUN2n7DQTXCDhjUcfq/p2IVB9FEI8Q4LQRo2tU0+fjkjWkO5H0Up/0bjLGeOZqAlcHgdDqSfcYY0dXwN/V3gH1bR4piaMnxQB4NQkBMx62A4aWpyHwMTKYE0EjIDOgj5Px0oug9BTHYg+DxhDt570nCZMxsgoIiUumrH+o3bczjtp7MJYSRN4wf9ahjLG6CohCmwG2o2uN+m4wLHHftUXsisBz4m6cw16P5P80uVmy6lmHwfDSt2xPDPPmK+rZiXAk9i8TiQh6w78ikH2HtqCMKPaaf3E0cdVGEljhUdLLApg3GsTkxexq7Gx/tWjhdTZGxw5NOpYEHRsKoLRpeIHIejk4aYwumRdP0q/Bm9Lc0FjI5xAynk7jQj5XTT5eNH7fX4Et24Lr9WO70rBPh8ThFjxbrPykh4jjno04mxvURc50Eu5yC7okS6sjHTirYdwiLTOTQh1IKYhAOtJyz8+Xy5//Pz2839+eLF5wzKRQAMM7BOILgQoAYF2OfApxcHHnvYixysN5xJE6gtQYAxhA5cs6Cx5NghRXa4qOMDVpsAOC9R2DGmLbR3jnOWODCgLxzjgnOOWcMAcOJFSdNhUQpAnLeciEAyHnHGAt2iyIX1gQHiG4+m3vvvXdKKee9tTbLMmNM8B3Ztq2zDlFY54y1aZp2Xcu5YFxSbz4GxlopBTIgT9ZZIp9kaZalxhpkuJgvAKCua+dsUeSd7qy1ZZlZa40xs7IcwvhKKeum0VqvlquqqvLz/OLiYrfbf/jw4Ysf/ggAqqrabDbzcnZ3d7/dbueLeV1fAzDGWNe2aa4AIM8zIpcmqmlbIJJcrLcba+18Nuu0DsGsV6tl23VVVV9enltnd7vdcrVkjHVdl6VZ6Mbbu7skUfNZud3ti6Jo2zZR8t2b18a6VKUAkCbpoa68d2VROGe10RcX54tFWde1MboscgBAZMaY2ax0zlnr5rPcOielTJPEGBs3Z26dzbIsy9KqqqRUZZE8rNdlUZyfnYXRVFL9+Mdv5+fnm/v7X//mqz/58stvPnzDOb+6vLi+vQUAzqA8K6y187JkyKQQeZogY8vVUms9nxUAsDvsy7Ko6waI0iy1zjJAhnCoq6IrmeAAYJ3OkwwZNHUnmEApkQtP5KwpWXF1cQEAaaK0NgDgrGOMpVle5Lkj4ILnRQEAxthiNld5CciI0DnbakMie3H5qih+rZIkzCtCZAlKLj1jgY/kjAeaP5Cws7K8/viJMQDEpmusMZzzTmvyHpl03gHAbrfLs8Rae75aWmeVlIyzLMsEZ/frTVgNN3f3QgjydH620lp3ofLOM86NNgCQX+XBgBoRZ+X8ULdt1xKBlNL7UbRY65x33Arvexrr8/XNn/7pn6ZparQGgH//V//u9u622jf/9mf/Y56Vn29uvvr66z/7058a44LDu7btOq3TJJVSevJh8SZKbjZtURRBwmw26yRJkiRpu6apq/l8Zp3R2kgp8ywfxIqQQpuuqvaIkCSqaWutdZqoWVlstxsAEEIs5rO26w5VdXF+nmXpdr9HgMV8HnKoDlWSqDRNtDG7/WE2K61z293OWpuoJE1SAPi0vl4tl864zXZ7qA6zovSOECHLUiLabrcAcHF2ttmum6bO0kQIcTjspZSrs9VyVv7iF78INxaXl5dff/6UKuXaNkuzqqo73QkuyhLw0wkAACAASURBVLIIfiGruiryvG3b9WaDCOerJSJud7sXlxdlUYT59vHzZ4YsUWq93f7g7ZtEKQB62KzPVittjHMEAMj4rJzVTVtVtTZGCMG5ACJP3ljNuQAAxsATMMacc4iIjBER53zwJWmtCSQhA+a9M9Z0XYsMiJyUve9I56AoStvuyTFPrsegwcP+5NrwBAoTkXMOwQshp+J2gjkwuH8Z7+NwPJMM9/5BzOLkIuopCu9IJMXjyxQiBrp0PIkMV+Lfx1sNt+E0QpLooeuJONmR+BtOTlMUewowJ/J8gPGPEDlOT319bYZ40oTTyKs0VA8jsp0wiRPQFiyjh34+xSoD0BnBZrzSxSP9gSnonuC5CDUAJv3Q9wsyQAT0Y2OHpsfDZA8mMUIw6KfXqOwKY0eOoz1UfFAmwnhw/yeDsef0nJ7Tc/oXlHpf9YN0DbvgI96w/zY+02+nRz5JouB6bNEXvh1/TvRWQvKBezk+8+P42mmmg+IWRifKQYlvoBRH+f1Emt7yAcDoAfh7UyzucbfgyIw8Ud5Q+xO5Br3NAQyKQsNV21AgxLodgZWhuL4XAQAZZ0emijBm2HOkEOVeBAU4IW8e3wYGl/ahAohB/VIAgnMGJ+wTwjADjnOY3PJNco9jMtIOI28S+2nsb5h213GHH0GpSbEUuaOhtEHAHynxHeV7UsjQxSMP84gUgiMoMi38n5Oi8qbHGHUlmvDEmUDTrgYAoGPCPDoSHDtu6PCp1mBYa6F+OD5MY4nxeYRxcgzkdV+Zk4Tjz+O+GDHxZIim1aY4TrHOCEDTkEb9bA3AKmjWQK9yRn1Nozq09xibCEdIbLJ3HPctxG6YVHAYunEmT44IYyec9kHId+wSnFylHH81gM/p5xSNfCcFxW+myoND/XD0/o5DDjDZ6RDi3H3iIudkgzpaDxOg3Nc2LlwcTzSTof296fsf6Ht5mE0TfdcwZ/sBYCgomjzD6NGTAeAw5x+n47UC/fKJ4951mjO+3+8c+fOzMwbYtu29v5dC6aA4ZgznAggYZ865sYvDTBhnc3/U8N4TAmMsmFsZa/oqMMY4Y5xHjU0i5MZozrkQQaWOhcmMiIyxtu10p51zONGP8ESevFSp4BxZoN2BERACY5BlGQBYZz35LE2Djl7wqKWNyfOciDrdAUCaZsiwaRqlJBdCdxoQhBT7wyGQGp3tymx2s/l2li6zNCN0aZEiofOOgCRyAFBKEpC1hjPRdW2ap4vlwnvXtE1ZFIGEbdpmPp8TkXM2OGvrui7oqdVdE3xHAoD3PnAx9w8PUsp3b9/+/K//OkmS/X4PALrTiJhnxcUZ7Pa73X4nFAMio41KZWi1MXqxmHed3u/3i9nMWdtU9cXFORBVh8N8VgIAZ2xd10mihJTX1zdJmqZputvtORdJkmx3+zCwF+fn3vumac7OzhDROauk7Lo22Lm3XXN3f1uWBQCZTpdlUZaFtWa33+V5Fipzd38vpHDO73a7+XwGAG3bFnnOGKvrdiBipFR5llnrCCBJlHOuyHNN/sc/+tGrF1cAIMADUJJmd9aWRWmc++qbb/eHw253CJmcn63ef/ut4GK92WZ5Oi/LcjYDhLuHh6aqV6sFAFRVJeRrLri1JssSQHDOMcSmbdtOp1kGAIiMMWQM66ZJhHIWWCod0aGqjLG5UgBQFkXbtm9evrq9vwXEuq4/fL559fp1kmZchp6xQkgmWFU3joAIjSXrfNvqtmtD+KOynDe6m5+d1ftDkKasX8v9ooC4GQspnfOCCecs57xrO2stkQy+TaUU86KsD3vBwRqz2+1brdM0TZM0RAz85tvvfvSDL3b7/dXlxfly+e3HT9YYax0AdF1X5BkAJGm23x+Cd0FgxAVHwxgAAWpjhZRh2QohwaJ1jnHunNNGSyl3223bdWEBGmOqqlouz9I0M9b96h/+8dWrN3XTkoc0kwDQtjUQzRcrAjJdq5RUKtnsNt77JE13+02o1cXlhfcu2PvPyuJhveGchUbdre8BoNNdnmX7ww4ZpirJ8+xwOADCcjm31gTtyDzP8yzbbLcMMUtTY23btnmeK6WatgWAQ129efmSM7bb72Zlcb5aXd/eHapqtVgqqYKPhSzL8rwIqpGpSkIwrsDiAcDl+XnY8uq6TtNEG73ZrqWUl5fnq7Ozm48fv3v//n/9T/8JAD4/PDRVjeRfv3ixr5u6qZNESSE559vdDgCyLOWMtd4ba64uLrI03ex2aZIURWGtW6/XYdm+vLraHapEqlk52+33bddyLpFxZ/3n6xsAWK5WRICM7avKWZeohHHuvQPvRyOeYKpP6Ml77wBEcCTtveeMAwDnPHgGYIx518fmctZZZwCICw4AbdMi4pt3b7753XcRWfTakVObjKAbNxwLiAiIvAfvPQtuE3uuKtJQ2GsPBAvmQaCQJwqO9XspdirVxl/H34Ou4dH9IEQMAhOEP2YWsOs/4cSDE38tob2e6ITxjIhqCuVPcgHvKFByEDENi2Lfko/1o+N6HplRwUD2DberwcKlB2WDuJ8cGk6gzmCrgMNtN4xodwSDFCU9xoJPQtMMzZgUMDlzHR1ZR8QUjT4mKCti8qh5GhjQcH00npme6tJh4KHPKEb2oclZ5Z93EnlOz+k5Pad/CUlMlFL68z9E2uoJUnIQfTjcZB0TQv2WHBXahtenv/QxauI7vd0gjd/3rl5GbmDMHqKkIIDeCDNUtY/CManoRC6Mr4+WoEcsRBDAj8/wY61HPBI/6QUOjdpE0aHy0LDhySOqJMjjwbXM8HFgakYj4yA7j/Wo4IRYICBwMM2dMYzAZwpORqInXkDGruhv7mjawTjty/h7eGnCoNBk4kAkmgbrjP6zCaEXVdWGytMEiIzczwRQDTWI4/mkpB2epkiQ9xTG0xTS9OeRz9KhMcM6mPjzidXoaaD4wmB6Qk9VbkK6IYy9GseYhg4d3QlFvNNPmkFp7QgGjSvt1MidjpTiaADEBCPnOJmKk/z6O/m+MYRE0GsDjHkNsYofYaXJDJ/0yOlzQcuYAj0XXzlCo8Mc9KH3hwU78OkxoylrHLtsCpLjHjTdNuD707Ac48j3fw+vR227fruY4M0hiykjOWhE4rFOYL/QYgdMVlPs/6MFgXHlnmxbkZsdqzqszH6DfPKG5KTJkzyH2R4zDb090Yp4rD5Oj36Zbrix447X4eTA0LcWpwNDBH18yT7EEwAgg6juPZQX99vxA4yTa0T+fQ8whnlWkDV12+wOh26/Rw9CSALw3obacc4ZY+QIECUT1MfjOh7iOBGZ4N6RdxYoMJJMN104x3LBuWDBYDD4liQCJvgoShhjiEGPxlmnGw2EUvTBMvtSORMiEVL0ZLzvG4aIQvDo1xKjl33vvWdCOO/JO5mm2nRSSABQUnZdh4hCSGMtIQgujDVZnmb5KwBQKlnMFn9R/nnXdYyxPCsTmd493L9//3633SfFHACE4rpryZMjR0CL5SJLk111YIwBYqAb0jQRQuz3OymlUok1BhGzNG1156xN8gQAuq4jgllZVlWldeec/ftf/X1eFqEBAJAoZa0TXDjy3vvZrNRa7tbbTrfn81VoNBdcSvGwWQshVKLW602e53mWrzdrQAgsQ9d1nPMyhNU2+uWLl23bdlqvlkvnbDBbXi2Xxtrdbnd2tiKgqq6UlLv9TmvzYAwAaGPqplaSZ1laWZNnqXWmaWrvXZFnbdsCQKf1crGo6zpJEiXVbr8HhCzLDlVV5HnPYVkW4tUQ+DzPOOf77f7F1eXvrq8/XX/+4u0bAADTdF1HHL0nY0zTNP/myz86VPXLyxd3D/cA8Pnm5odvXm93u9uH9Rfpq0RJrTXydLvbi157FgAxSZQQvAv0GXnOOTIMdqxh+gnBAZExnpUz0k4bpxLPubC20d3eqAQA9of91cXFZrf77e++evGXf+mdI4B8trz79CkrZgCQZoU5HDrtPKGxrmqaumqYSvaHA2LvipEQsyzLskx3nQveDBAJSKkEAYKzy/1+BwDIkKzPslTr1nuqm1qpBKDXNQYi560U3LQNKJblGQJ0bXuo6ixLACAvitVykWfpb796v/rZ0ntKkzRNEq21FEIoCQCfrz91rbbOEVHTdtYaBCLyQMx5H1aK9z4oS4aF0DSNtfbyxeXtw8PLF1eB4Pu4/fzi4vL65vrF1evffv27m9ubd+9+6B0RMOs8ADRNXZYzxljdVIyxNM045/V9vVwsOMNNVQHAbD47X53tDjujzcvLF847xlGpNEsz61zwkJjnWV4UQdWXMaybum7qs9Uyz/Lbu9uwWNJEWWdb3c7LmXXWWssYy7OUM7auKwBYzudpmhyqumqal5eXRpv9/hAcuRLBZrsDgPlsTgSbzZY85WXWtG3TtESeIZRlGSzQ9/v9rCysMc5aztmrl1fzWflwf/ff/uYXP/nJT374ox8DwP/5X/+vTMq2acrZfN/U2ugizznn1pqmawDg6vIcGVR1lSYJ57wzum7aP/zhDwCxaerP17cAsJjPd/vDerP5i5/+VAjunP98c/PF23dJkmp9MM4BgODCObq7f2jqtr8oi1BPCDle53g/7LHee++9JXLeB11OxhhnPHxB4J2zQghjnJSJ9xSeEUI0Tf3m1dsP6jtvADDENuttpOIO3MtJIu8xhoIkBOhvAgCAcYxArsfQAaDRCF4mUgyPTiGDIdX0Nv0poX6EyIZD01P2XkSAhMR/DxKaiOJeOA+4i44eoBH/ngLtETKweHUcqUCKx49T7chH2YxIKordJyBXr4zSv40UYSJEMjFmFSBFgB0UODw2IRuPWn1cifHXUG2M//cgb7gIj1mNGC68Ev1vTYA6xMMdxpil2LPS/VsTyhhP3oztRkLCCAz6/jlBRc/pOT2n5/SvJgmYSoF4jTUevafnY5w+1UvWo8yG4yX1OYS3prTEwAZGgqunNIniAXjC3kUtHRwCqUyrQURBjYQxHLzlxeNoJMFGiTOVaTGHWHykDiZEwGnTKMqQgVeD2Fh/1LLYYQMdixOPdQAD0dWL8gGl9BWkgccYaZQgZvwQ7y6ezMPbyIZzaRD4iOFhD2wqCUfqJtSaprWZ9mz/0iROyIRkmQxAKP3YqXJgQvovBzUInICD0ZzjOI3zBf1x508vMJ9UvR0eo9iBcdrSk/BtrMf09nPiPzXO/DARRqAUdJrw9Pl+/J6gP48m1vgtspErib9QZG4i6IkX6yOHOZ1Cfak0gMTwkR+gyVgZGkDU5JNjpDW5Wh56Y3gyKifHcDYQhjR+Ne3SJ/I/HsopvMXRinnk7qYOBkLjBu2CngHsUacfOqT/7mgODzwdjR+dTt+TWUj9NQgde/QEgImf8WNSsz8+4On4xpJPi6GRPibAaSClCKsnVQ4nk6k7zHETGzqKAAebrr6qkab63hQm5DFb/KRG5ZFoOG7gMGnH9fXPuZUPQ+mDcglO8DkREWMcCDG64B8aOIqHR+eWWDziNAIo9fQeMX95+WJ9+1k56ayrmw48JCo11llnz88uQgbbzS5qbx31xsmpjnqmNyjjk/ceGADDqeZq3xbfB3vlgjvnwrYmGAuBHbq267QWgIgcGWfIBjHBheCCEyKR74OhE3kCwVAI4XqnhJwIO90iohDMe++sVYmyTjvngps/AGDIhBTW2EDkGWuR4YvzKykVANze3W/322L2ljGoqsqYKks9In/16u1u9/96F5xd9urIztmizJUUTdtQoAu7PmZOnmX7wx4QiyJ31ndGz8oZclY3TVmUQTvSWJPItG6auq6/ePeOMb7fH169fPW7330V+Kmmaax1n28+t0273azn85knr41enS2F4I4cAKwWZ3VbI+BquXTWGGMWi3nTNlrr+XwWLNAPh8NytSSitu2WyyUitl1XFIUQ4mG9Dh4SpZTb7c4YwznfbDdCSGvtbrcTQgZaoe265aKPH5IkUnBunQWEs7OlNma/PwDA5flFZzpkeLZa7fZ7Y81ysRRcOOfLLAsqn0WeJ0myPxzSNFVSbXa7NE2k4NvtdrNeBweI3uJsPlNZussLgtu3b99+9+F9p/Vqufr24wcAOFTVertFAGOM854AVZqerZa3d3eK89Bqa6zW2rmeXi/yzHkQXKZJIrkICoNEQQcXpcqIOd9YZNIY13vm7Hdan6qEASwXy8VySd7OiuLm00dwpJIUALbbPTKO4AnZbD6vGs24cARlOVcqORwOAJDkuSevulYKabgJ5trkKU1TQFRChjnjrCWiRCVSCWtMWZTOOpaidy6ohW522zyRxhjvnVRKCGGM9c5xzoosB4A3L6+++fApTZIszV69fq1Uoq1BRN11KlFh6bx69UrrjnO+flhLKZeLedu1ndZcZN6RMUGVUud5b2HDuZgV5f363jnPkEmZtJ0GAKWUsfbzze3Fxcv33353dfWCc6F1lyTJbrcFAKVknmXGdNaaPC+AYH/YM2R5nh2qQ9ioZmXZdd1uuxVSCiG3hwMAMc699+vNOvhwWC4Wne64YORJSdG0bZ6ny8WiaZuqqc/PzwAgUep+/ZCn2XxWeu/3XZsohYid0WFPXi2XnsgYPZ+VjOFuv+eMFWnGGW/abjGbAwAyfNg83D88lEXZdZ3RRnddWRar5ZLA73b7Ycc71JWU/NWrd4uyvLm/+5uf/7zV3b/9n/7iw8f3AFAkUgkxK/PdYfvp86fFfHG2WiLCw2ZzcbYCACnYdrdtu+71y5fe+6pplot52BXv1xulJAB0Xff+m2//8Ed/MJsvttsHbfSrFy+UVHVdb7a7q4tLAEBkh3q/Xq+llF2n42ZMgKiSJIRRAkDGGSByznoTBAweUzDsnP0nQETEEYEgSVPGUArOeO/sUiVJfajvH+4uLs9uPz1MzxATSIEYYpd5Cur7iDyIZWt00H1EhsFkl3rhH4AZAg3cUwB7GFH36At8UJEYDmIQ0d1jcDGIxClCmXzX4wsC//TF+Zgm57Ojj8cz1tH3CIPZzhE+AIBwlEDGGBCBBx94wHjMiEHqBhAY4OUjIDIQbQE9RRQYvouYtEe9ESEftzE61+/9e4bqBQf7o/n3cHAb+21sSWzzIxBOOKrl4vhiPC0dWfkdNQmGwe2Bgx9IZ+wze4T8xnEckRdFFYQAimEK2p7Tc3pOz+lfTRKRQQpiMTIv4d/pqRzgSOxFsia+cXQmhqlAmHjB6BUDB8ptSukAPMlT9ap+gzphlDWI/x97b9Ijy5Klh51jg0/hHlMOd3xDvapqVlcPZDdFggIX2nCjnykI0FIrLgQJlDYCIaoBsVUssuaqN9+bU0w+uw2HCzNz98jMavVK6AbS3sPNzAh3c5vc7LPPvnOOtYaIGPBAhHqngRbIh0xFX7uRBRkffZbc8jCVl85/QuBlJjv2mToyeB0eeSd3gyUWotE52sISsdmm3W/ox2qHBps4H5oxpTABDQKYjCmcYH/skTkbA34n/xS+4OjCBsaqnDdGqO14/vwY4sw7yLPFnh05bzKvNht5v0ei0EfZBOEmANBIhYcmD9gl5H1eGAwE7wjxPMMx3TVlPbEw81xGMOL/GQl3mgoTCO9AoszH1bPp/FXAqYdgOgp32NhRbTCyMOHmcxaIztvgrAYILjjMTLoXVMwEDhSCH43wjG9Wmnefr1X41Vf9kWXv01582qowa3A21icgtTEH30WhYKNK0RcKR9NeB7ssWJpNVWeDY3r67O15BqXho3IGUnl84cLbM5FvHvuOmwN0kame6358nP34Gs7my+d6Mgzh0HazX6cmn6oFOA0IDIP/MSP5zOsW+t9N4RTufzIZjF3lR9WssGPJn41DPWbxWCA5L5QPEhA2a27HwgjCi+C3k0Bk55uW+SwQSsemLdP5Ww4Aq+Xq/uaD2/dpbYZODUojogunAADImDEGAIXA2eHRWduNo8kYbazlghPR0PWSIs65VhoAkKF1tzuCByGcbqBbDhhn1tqh7/u+18ZIIckaskTOjJYzAACGwJDIWjL+uMsCAwTknPOR0eacO1kWZ9xa7Q45rLVxHHmDOK1d9AZENijFObPWaK1u7m43qw0AnE7HpuqUtldXVwSgtUbEDzcfXr96fXl9cbjZA0DCYs6Z1ppxVhRFN3R932aLRRxFx+OxKJYAQARKqTiJBjX03SCFjKQ81RXnfLksdvs9AEgpF4usbuosy5wVZxRJzniWZb/+7a8BYFksm6YVXLx98/azzz4lsvvDnjG8uNiWVbnMCwAApLqur69eIeLxuF+vVlEUlXWVJMkiy1womyiKpJSnsmScFXm+2+2VUuv1Wmttrd2s1wDQ9X3d1BfbC0Q0xiCyoe/SNCNrnSgs43yRZcfT0Vq7Xq0GNVhrOGdJHO0Ox0WWAQDnvCv7zWbdD0PTtkmcAMJ3Hz4sFou+712woDiKgIgxlFIYssPQf/r2E2cfulgsTvsdAIDu+74FxgaNv//qq7brrFVxHH317TfOHd6//Ou/EgB9P9zc3V9ut8tFXtf1YpG9f/t2VeQu6osFK4RgjMVxzAUv8rxsOs65FJJz6exwB6XTJAbA8lTGMmGcA2HTdm3XSeFDKq/XS0t2sVi8e/1aK9W29f54/Ot//s+NMu5Bh91D3fbHqqnaXsj4VFWRjI51XSRF27Z1WwHAlq5cPHQX5thxAC6WiLXW2SRmWYoIiMiF5MiqqoqkyLJUKQVg37x9AwBaKSEEQzRAWZpUdR1JaaxVWhsf2+S0zHMZyevLKwJUSrVtwzlvu+50KqumBoDkuOeMXb9+U1fVoLWMxDDAIsscHelmj6auF4tca+3IhSiKNpttmqZv37yNomi5XALAerm21v6TP1n//Bc/11ozxpumIWDD4L0WpmlkrBm0clJrY23TNGm2ILBleVqvV24a+Xj7kSHbrrd9P7iRwBmr26Yfhvdv3wGAsaYsyyiScRq5Q43VcklED7sHY0yeZQBQ1VXbdUWep0myOxyM1svtlnN2c3fn3Ecaa7TWWuvXF9enqjqeSiFEHMdK677vry6XADAM+ubuPk0So7XSpmkaLvh6tRKCN03v5hDBWd931pp3b97nWfaw3/385z+POLz/yY8p4l/99ksASIS0HN998ulvfvtbKcUPPvtEysgYMwy9oyOPVfnh5uPF5iJL00Gp4+l0dXHBkD0c93cPD++urwFgdzgul8svPvv8eDwcjqeqrn/ykx8fD9X9bq+Ndcce2ti7+3vOuTGWMe8X0p0JCS4cHelmSYZIwDjnjHMuOFkLHNVg/Iox4jJkiBjHkYzYMHRCcKckRsskj7755tu//Iu/vP+wGzcNRO74f1oOyGM3soCceQRllDLcieUFuRgjBG55k0y6WzwdScAYcoHu1ThbdOaeHMNi9uigcS6tnN3qDnonqOavGg2wz1Dec2l2buyAIOH4rMkyxEPNGRh4tMxbArcqh0bHoBWY70n+WCGm5LY2YXOAoViuZG7vNsHe2VZj/txpC+H2KV4jOUKb0EQw3jMh35m70BF+zD1cT4UMIcRdEc8wcMgYRrQzYikam3kqMMB0w8znlX9ScBkQtnFTX/09mvMlvaSX9JL+wSUB3vrAbcwIyB/o+a3+2a52mvHAx0uZi3domggZBGcuiMBwHmOaNJwn9xzOwoZ21AlhoC0QyU7bcu4XNH8qCox5fRIBELkwqQiAxs3RBGAdfGCICGgnQ1snRRt3rk45wLyNQ3i6XzmChset0qPZACIgirC6UIArgIwBmXBNWLeCiJGhmHEaFJqRwB0Z+q24I05DxgDImCVyRijIuD8SswTWOuMRxrhfN4kcbjFm9LzmN8PkUZF3bRNg0nMrWDjYw9C6XCBNDN3US24TPAKNya8zIjI+UTl+wXUGyAFY0KRFQh/2A9EFU/b4IESMQ0QExpAs2TEs4fQsAosBnXheIkTIhQnUuT4cQQRNNRrREgEQWUYCgAA9JTxrFG+T49WVnqthSGBDLYLmK7QveC6diAhd5HfrKTbmnPIgMGQz56dAdirmvHPmpq0zuOQ7xVoXsmDsUnSYFpEh2GB/OmHYMeCix4uOuXBiXktA6A9sGfqDZEdzEZFFZAFmjgQQQwSygGwscxhYoQVNgGjjqPMdTlOl/H/kXs6xEULbTOy7K1jAi0DgewAIiIEYeSAIZupErqPGgUNjiyKCseAPPmbMp2sjGxwyPIbx5KTZHnr6KWXGY4eLvaddIBsYxTNCeIS4yBhMb3xAu66hGBuPN6y1QIF4nFpktOoiBOQTxp6xgS7CE3gBvJspmSe5x3cqQF7fAh4kB8t9GqvvPibv8ugZHEwQ3CGRh9Tuoa48XAqyj+3QGDJDFgGCODLsyRAZm3TT4+Dx9yKANYyhs8gb9MAFZwyc9Gm7XZuhlZEoT0dA+8Of/Pjbr76uT22WR2mS/PTP/hwA7va7w+5ktelsF0lJhiEC4/4kLIwrBGRklda673smmCMHtVGSSR5xABBScsastcbYUfNCQIwxV/ah1303KKWEjBi3ZAg5KmMRMS+SsQ2tNRwsWrIWGGeElkuMYmHJG95GQg7DAICATBsTychaq402hiLJHRfW6pYzro2yRIKLYRhkFMVSHI7Hyy0HgC8+/+KXv/pFc6iGLC9PxyiJrDFk6e7unqFM8wQAIikH1ROY1WYtY1HVvSVM4uRwPHDOHH1aN5UQPF/kwzAorYo8OVXl0Pfr9bqsSmcffbHd9l3fNvVmvXGG29liocm8fvs6SxcA0Pf91eWFNtoaQ6QRWdM0V68uyqYEjs7wtqwqKaWUbH84MMbjOHaczsVmvdvv6qYBgOurK9Xr6lS9ef1aK62UvtpeRFzcfvy4WC6dTvDm5nazXkvOq9OJA6I1kZBxFCEyZya8yNKmaYhskkTGaKN113VXl1dVWYMFp91TQy84J2PrppFCpEmilIqllJxVdVM4R5aCn05lHEeIsN/v3r99s8zz33/zzSc/+AyR/+KXvwGAxbL4ncU5EwAAIABJREFUw+9/d3V1ZQZdlqUa+kWaWmt2u70TG66KTArOGJZtHcUcGCVpXOTZH/7wB0CSnAGAsSZK4jiOlNIMUQ1D3XVCcJSCM7B6AIC3b171Tffx4cg07puKAbt+u+VCCMb0oFSyAIDDqb572MeJrKoSOUvjmHP561/95je/+/0Pv/gRABAyi2gADVB5OhgiTcAQ7+8/1uVxud4AQF7kyti+7YQQBoARkQWGIIktl6s4kgBwKE+CcwDKl2mWLQCFNpVFcvFqKE4AQCtDRFywWOaEsmkb5xTVWtv2AwAIGVtLv//qu81q9cmnn4tIRlogotKqb7siXwFAU9frzYYJzoVAYxBZmi2stYDEuGBCAAAaK7iQQvZGDV2bLxZlW//+97/PFtkXn3/RtT0AnOo6iZNYxl9//d37t+8Bed+3nEtl1aB6ANBWJXFirSVrgZqqrrIsvlgvb+5vGReb9RYAjNFc8OWyYALVMBBgFMfImSWTpDHjAABt1zFkUkjGRd+1i0W2Wa2atqmb+geffuZCcnX9sMwLKeXusL97eHj96hoZfH9zq7RaFgUAGGNu7u83y2VV1YfDMYmjOI61UYfDUcrYzTNaKbA2iqK2bfuhQ7SfvnufJMnN7a1WapGnADAoXdX19dV1JJLdbv+rX/wX1Kon9lf/7K8f7nfbYgkAp6raLJffffh4f/fwgy++2G4v+r7/5W9/naWpU2xJKa8uLn/w6edd35bViQsmBO+7/ubmPhbJoawBwFj79u3rOI1O5antmqIo6qqtmlZpvVmvkzQGgP/8y1+1bbNeb/b7Q5pkkXDUnjHG7g8759HVgGGEjHOlFICTk5sg0NYA4M590HjjkkGrKIqVRRGlANhUJQAs0jxJ46atu364/vT6y99/GbMMLOMMlRp4JMdVhwm0xqNgQ4Yz5x2Ra6sBQFjpzqGQgbUUCW6tl2ei3/J4pt5qS2GLgQBBLTlbGxEYC5o8T4NO54D+ABfAfTRb8mdQxGH0afOCHr+PdNZ4YdDku7tgLtUDcijdbyUscLA0LvFngA8ZY4FGJHJOJNkEAb3QxKEfhgREdoYE0aFr8DDEErBJ5ghh/+IBkAl+jYJHTgC/32Kzs1IGHgcSkWRihFpoCRkAggULRIh8UlGO9yIgoAV3GB4EBD7I3LQnIO87y91NBIA8tL2rC1FwiA2WBfDmxmSoLQG52E1hx+JrPW5bmHfCyQAJ0BJYQAvwd50Kv6SX9JJe0j/kJMBtGmdiG3Iz9nOxpmdKnfMN9+x78BL96VBq4iwAnu5ZZ9nQ+Q8vqgk03/mKSPSs6Obx3/5TDF9O29s/cul5rUY9k6/4Y0EZeUZtPMUKS71fyybmLlAOXgn0+AHj38Hye8xm4stGIDHjMIJolMLTRxZqxi+MF4xtRKOxtmODZvSrJ8sQYaQZ5hTE0wYL7QpnHT3SLTMq77kcnHzokUzTWdcEnsnroRwaMDZgpNnTPPFCs0wC/Tc25/QDPBE91ms2OkZkFpBOIFfnQ+jxEXVADM4y1NFEY6/70RH6ZUR4Y9f/kXZ90lB//Ktn2eTwzTQ+z1jVx2DLlRaDiXDgkAMW8s4vJ6Ht2LTnDs7nkseJOnpGmfi0qM+VHR1AnfGOGPqCRhXjdOfEqI3ulsa2cTPIU7zmNcWhOTzCf0SS/R3Fng0hGt86Ou8vmg6wx5zn3+P8ynDN+WwYjl/w/MInR/QQ3pg/WuAnQ+nxxQiBXT27hc5P+2fVfZrl7KLxZfJZ43gxWXu2iHgaH+d7q7EpvEnS+cPGYwx3DVliyAhACAkITs4GAMfTcbBqaDuy9OU3X61PRXmssu0KgSk9/O6r3wHAoDXnTDA+WOXcFwKEuTdsPAgAQMcyckVERGM144xzrrV2JwGT004vs/WvExfc0f1D1zRNjQziNBoGs7pYcSEIkHFWlpXfBBkrOHdjnXE0RstIJnHszm5c6GdtNAA5oZC11rlOs5acfMwMPQBIGZE14ORCnDHOGOKgBs5ZVdcAsNvvkzTt677pqiSNuqGPYpnnmZtwE8EBYL/f9UOfF4vNetMPPUNcr9dKqWFQi8XC7fCttXmeA1Hf9y7W9tD3lxcXTiDmwpK4uDF5ngNC2zZJmllLTdPked62DTjNqdFlVRHRarmsm+ry8iJL09PptNlsHEvYD/3FZjv0g9G6yHMCqupytVwaYwalL7cXAMAY25f7i+2WiPb7wzLP4zi+ub2JkyRfLE5lCQBJkqRpQpa6rkvT1Pk3lFJ2fe9GpCUyRsdRlKWp0bbr+7wour5ruy7Pc1frYRgWWaaU1lpfbDcEsDvsV8tlXTdpmsQyAgBtDeNMSNE0TZYkjOFuv+/6HgmMsevNFgAGa9erNRcilfF2vZaCd32vjR6UGs8GVqv1/cND2/Vt20shkeHt7f2/+z//r3/2Z3/6wx98BgBxHKtBdf0Qx7GMY+Q8llGcR4tFJgTXGgAgSTPOWJIkiEISWG2sMdYaV/00TgDg3dVFXVdZFpeLLM9zBOqHoSiKP/+zPy+KFQAcjgdtfeRiY60L3QSMSSmkFO6FH4ZBW+sOPslaIE9MdLpbrdbGWAgrBxFs1mtjaFmsy/KIyPphWOQLp4Yjay2RMVYZbSzFUcQZQ8R+0BfbGAAWeVFW9eXFdrnI1TAILtymnDNerJZ9PwBAtimssbfff6jKquk6hqxTGhGlkHm+klEEAGmaOvEsjyMWJ1XTxHH87t27d6/fRFGUMwYAcZJtNtu/+Zv/e7vdyijuB5UkqdGmquv1egMAXLCh78lSnhdd1yRxtF1v+2Ho1fDm1WsXMqsfOsZYsSjqplJKAZAUYlBqGIb1auUGeVlVQD5i1b7tLpK1MWZ3OKyWS8HZsSwBgIjiNI4iudtXyyLP0qRpO22000sCwOF03K7XeZZWdY2InPP1cnU4nQTncSSd89P7h/ssTZVWgDAM/Wq55Jzv93siWi6XSisAOJXVerVM07jt6q++/kobG0XJT3/yJ2kcf/3NwQGb66vLi836l7/9XZKmm82667ub21vO+Pt3b5wHzI+3t9vNuu1aRNDapHGilLp72LVdG0exG+NRFL15/VprZa2NoujN61dlWbVdly/yKIrca9u2bZImTtLOOUgpB6Wc+9Gu64plAQCCM2tJa620JiIpxaA0OKHiePAcZmk3uzt6cBh6RHBCV6UUWYhlcvPx5pPP3n+FXwOgEHzoB2RgtQUAJrgDVU4S55Yh53MDAMNewBIYSwQWrbHWAhvNEuZrpKOk+OPI0bMUdkIT0J4LE58gjtF+N9x8fpkHDx5DzTAGPMIP4TyfCPxpaBBJeP0BEbC5U6xHNSNfQTadsI4g7ayawfTjnIUNVKnLxobinYGkcMOILM6BlsOO1tL8uQFczHcI+HcjmfHus5Z9dMHsbgzFD7ljuATJBbdxJ/uuDjTtYCigC9+CGEbsVASkWSnwuae/pJf0kl7SP670cpzykl7SS3pJL+klvaSX9JJe0kt6SS/pJb2kl/SSXtJL+v8pCRp96o5KJxqFMFNwWBh1KeP52mPZ0Pg1zGRlBMF14zNplsHj3HB2pwXA0Sp2lML56+ZKmZm6zqtSfF7jIdJjmdxYjLFi4QhrVLg8kSk9+t1S8Do8s/CcVzlY63pDZESw1gYNGc4y9KepZ6V0FhmjRO+80qM4zJuxTtHIQ8HDhZOsahIbzjJBIJqMM3wAHDb5pAuG8ZOoL9hAB3FWEG7O+mY6mqV5p+GoFYSxmjT/Fvwp8/xkF8cfM5nsrHN8CVmw0B4/Ctq6WZPi7KwSZ2fBwYQ8GB8H8/InfiGns9mpDR8FQB8Fs+PYmtznhCwwZDVlOuvVs8PO+ftzPnbHE2UvoH1OghcEyqGovvHOdMnzMT4bk/6eML7GPKb35cxiG8ZD3VnPzBsOn55AP3uoSzAKjTnj8MhWaDpSnszCQ8HDaEEYB97MXzlao+nsNZrslya7pUeFC8fxzyb39tGsiyZ1nL9gHhHyjx2sj7nNLINhJuGFWeu5OWcSsJ4135nE4I9MWbN5PQxlPP8Sg4oBxy4lf2o/y27q/r87zbt89paY4CorWGMDjO4S8NHgxCAhGGecM8UHADjfW4wx5xHKGAPBswKiVUpxwZbL4s/+yU/X6+XP/v3fBoUH/+LzHwDAH7758qatGTAmhTWWcSBL3hErYxhmdQIy1gAiY4xxtJpFXsYIzh6QQpswZMF5PxiyMXO2arDIs3SRZlm2WGSImMSJiOJBqa7rmrZRnQIA6Sy70VeKcx5HEedC655x5jrBWgvO3QNYRMYZt9ZEURRJ6QKeAICUYhgMZxwQncWic5KQpKmLf9K2zdXV9Yfu+yxbGDJCSiJSapBRdLm9ur+7AYBBK8bY9mLLGJbVabHIJRf78igEF5zVbQMAQvBhGAY1pEkax1FZlkJKpfXpVMZJ4qLHtG3HOU+TpOt6IoqkrJvWWtt17amsACCJYmRYVqehH7RWWutlsVRa5XnOGTs0DQAkccI5u729i+NISHE6nbjgnLNTWUZSuvg8ZVUyzvJl8bDbaWOiOG77rhuGi+2FNt6udrVcGWPrps4WC86YtSSFaLvWEkRxBABOXZWkCRe8qhspxCJNT1WVpEkUybKq3DBEhtaaZVEUeXFzdys4dy5v4jhy7lCGvo/jSGsFQHmRG2se9vskjvcPu2HonSDxsLsngkWaLZKUCzFotT+djDFpLJ3SrVfqm++/T6Lo1eUVoOAyImOI4K//8i9eX11kWQYAdV0RgJBR2w3DoBD54VQt88XV9uJ4OnXD4AZw23RZXkiJrO/H+GSWLGfc+S1drTbLZdEPLYFdr1YfP3yo63q92dRl40IbEUEUxQClUoqsQc6d31UCiKKIce5aD5BJGSk1AJEhIxnnnKfx4nQ8yE8/BwAi1NoiYt20ggvnLMj5uOSce5PkrhVCDlqXp+MqL+qmA0BLoJRyr/bdw8PHm9vXr15laaqUslYjMi54lqVxFDtfgYhsGPrj8dQr1XSDMaZuGqVU36tXr966FyFNUwCy1lilGNAwDFEUdUPfdO2hPF1dvQIAIcTHjx8+3Nx8+snnfddrrYWItNacsyRJAECp3lgbRdJJ7S4vL/O8+Oa7r7M0jaNYGwUAfd8XeV7WpdJKCC5lQkSn06nI8zRJd4ede7VXyyVDrNsmz7MsSQ+nY9u2n7x9q7R2BeacrYqibtumaz//5BPGcOj7PMtWRREQGqZJ4hZAwflmvWYM7x8eri+vOef3u3sAqOpqWSy10n3fCSGKPN8dDlqrPFsMSrno20mUfPr+Xdd3X3799fF05Iyfjscf/ehHu4cHwXmvBgBYbzan06mpq1evXueLvOvbDzc3Lmq8U8JKKYo8BwClNBdiu9l0w3C3eyjyXBvj+vrzT95fXV4cjoeH/e7V1TUQ9H0/qGG7XiPDP3z44BYJzgUiGGsiBETgnDvBLAIKLtwTvcRcG60154Ib6yZKt5IJzhln3HAAclG2pZBkiYgYY1opAIjSWGmNTBwOhzfv3lxfvXq4PQCgjCQw7AcFANZYxtwrNK7C3u+wMdatT9qbhweHyPREAULehsyvjrN1+tG6DAH6jNZMOAPSz2GoET2MEkCPRIxb13AWg2aGGN2ew7X2CJatJZz5iiEA7vB3MPiGya5rxJEBuQS0OAdGZ7jvHCCM/nlC0V1w1Ecb0PAkBHQRqh+11Yg1ZrDKlcIDGpo2Hs4W4syUfd4L4INfu5LMn/MYLgbsNTqJwkfgOmx3AAAYBBsMAJh0kKHn3Q4FfeXBY4uxJ0fMSdOn8we9pJf0kl7SP6YkIGzu/SwajNM8a/JolXviUOPR1+7H5KWOAMDA3Oo7UGbjijJbv87y9Z4EAezoNDhsR8P8HpZxGqfvR7vUyXoZEWb2f9N0Hxa/cf10f40FnoODZ0JDk7shEDI0cVETAUd+WZq4PSCygfqYVuHwy1gwdzmbWcPSrL6ziociTYvS0x4a78GAOM7sqCdEBLMVe5bPCCEecXOPij/nWmaL5GQkMyvE/I/zXOeG2NNaHj4NiIko+KpxwSOICBmbkUpE5K3fnWOYUK4zl3M0NtzYOM7LpB2fO9JNZwQTnEEeAmKPS0wI3gXeU9pmZHuebU6AOcl3FmNpIpvO6hB+zGBlqBH68W/PRwWek1LhAbO8JzpsfAEne2T/hNASFF5tz5pR+HRGLgWIO73sFNpifsbgv3Fd6rwjTvgTAYEFO+1H88EsxnXwCu+qb8cvkfMpUo0Hub7EyPC8gSYo/xzWn1osVGJsGZh32fl1wYnk0+RayPlsOqPb5vOGr+68sdD5ghyf4q6g0FYegU/PwanRnynlrCjjDQRnk8GTqj+fif9mdlozzYGuDNbNgqGkswEP41ZrGhOPBu/ZnwT+wAotEWPM+Ggy3sGxIb1IEjX0D/cP796+e//J+6o/yT6TmWia9t/8m/8eAP7d//G//Ow//MckTi1ilmR5kTrPBeC3W6ETLGmjkTFjDSB3JqhaK8a5oyN9lGLGGKLzgQkAsZCMYZplAJBl+fF0NEaXdcUYtkOHiNYCEOWLfFffAwAyLjjX5FzkUpIkQkpjNDLGkYUAssAY85Qq54jgLGeVVpasY4WsMYwxIaQ2uuvaJEmMsYyzOIpc+xXFcr1affPVdzc3t+6WYVCMs6675yiapgYApYe3b99EUXQ6HaWQSRQPQ4+IRVF0XecoqjTPu66TUiZJ4jxaFvlCayOlWBa5C0xc1dXFejMopY3OFzkAWGsQ2W63c2Oo1qptW85FkiYAkKVJ3dSMY7HJj8ej68pisairmnG2KpZVXWmjt5t113ZKqYuLi77rAaBumqurK2tM27WrYtl2bdu2V5cXAFieTkkcu45su54xFkdRVTf5YgFgT3W1Wa2d9e7xeEqSWErRtG1d1xfbbd02RJRnWdO2LmxRkiT7w0EKmSTJ/e6BiPI8R0QheJHn+8PBvSNRJKt6SNIkkmJ/PEghN+v1d998yzhHxgGg71U/DEmapenis/efWLKXFxdZmsRR1LQ1ALx9/frDze27t28tkeAcgMlIXF5dRlEsODpvmPvD6XU/GGPvHvZSJoPSjDFDkC2y/eHoumAYlIxjNysKEUWSJWm2LFZKGcGEa+G//dn/k6Wp0oMxigMJIbIs1UohonNHkGZZlhV105ZNI0SEjGljoziKo8ha4+inOE6ElMZaQEClB0RAMGTjLL3f79qhBwDGuRAiSzOtTd+r3W7HebTIi6Yu8zxv+wYAbu4fyBJngiEflC6KggvBueBctG0HAFfbizxbSCFdzBKt+khKS0QE+/3BEWo/3PwIUfTKqrKMJJEQnPG6aZM4i+PYnioA0EYv44IvFqe2jYWIosSSjUXyg8++iNP0q2++BoA3r1/9x5/9v0mSEcGpqjhyIAWIi2yhtQIApRQCRFHU9y3nXHB+v7tru+795VtEcLF3jLVKK2fCHMeRlNHheCAiIcT+uG/bFgAWaRZFEee867r1cmXB3t7dbdfrNE0edg/Oe+ZmtbZkq7pK4xgR6qapmvrq8jKKpYuI7XjSvh/udvs311ec84+3dxebbZokSummbgBgkWVGq0EpRLy82DJEa02e5whw3J82mzUAXKzXxti7+4fvv/s+TZKqLP/Fv/hvLNFuvxeCuTDfu9PxcH/PAN6/e2uMOpVlkS8267XR6mG/cwM4SeK26b/9+GGzWld183A4tG1HBFIIZ4id54tTeerVEEVRXhRaqcPpFEdRHEe39w+73R4ANputMdaFak6SxFgLQNoaRJBSOoe5DJmxxh3kGKXROcNH1Ma4jQlKCTasx0RExDkDIME4Q+5s6oucQRQNvZYyLsvqk08+393+bdu1sYy0MYHP8uKBAO5c8DHn68XvWqyxjsYiBowBMus9a4+wwnnBcRnacbMSls8AfOwUhhuIgoPEsPb5VXAGg55N48m6B2pzwBKW3wkAjmu1y3K8kjk0SUQWPN4m9zlMLt1DacdsxzU+VGpa0keQOD7WqzVc/cbGmG0jZzhh/DleNwlRwnPGzU7Il8g5sYRRczBDzojnCGuW0bxtQ71nkGVs2/nTITi0n1d6bHQ2/Unnu8E5sJ32M6H2CNMQGHU6QZUAL+klvaSX9I8tiXHdgJHw8kvDmWBxtmY9/uzpXzRbiMKOPtx9piGb5/c43BvB48cHisOpQoAxv7N3LlCeiI78wdNErJ2L8kbJIM7CJYQNNo0ldIWeaBN4LrkvECfJ0qyxcNppk0+Bbwjr81S7oO2blpgzhslaxJlwCB93wowjekTVTGX1h5mzMzs8JyYweIOeKEy3UjoOdVx0n1b1vEnOC/eYP3hSaBjpnwD2cPxwHEaTPjNQwQCACAzRmkcDckIlE9fqe3W6MpxiQlB/hStHhDJ1/jgwR6/hZ1UiNhX1UWOcMWpTAzwzmnD29eNGe9yV5yzT+X3jb6HvwNrAZgeZ60whO7W8H5ohHLC/ASBwM+46QMLHzT0bp2NxzxDvGET5rAoUIORZi3gV75x5HdtsZOHDYMDz5nRnxxP2xPE7F0Z5Qvvg2U0AxODBdHrWOMBgPijOfzuXztLYWs83jR+Vzivm2TVPZZPkwwfNLglvRXgziLzyNzTwIxdO55sNXyN8/khh/uzxPtcF5xAXQ73hbBPwR1KY4CZKD+YjObyPfjyCV1o/FsvTrAYwzW3zF0opJQQnAG20JYMI/TDESQwAlqwQnAxHhGHo+77tdENkmq6t67rYXgIAZ8xNvWkaMzf+/ZpC4DXWYIx1lJ8ZhmEYuOSIiAyNsUJKN2v6+N9EQejpS9h1nRMBDcPQK9W1nYzkarXq1eAaZxiGPFukWQIAqlMA5N4wKaWUkqw11kgpILQjY9xaS8GBstbadb97CkcGAP3QCyG0GpTRaZoyZAxJCGGtdXy9EKKuq9dvXhlrEUBGEQLGcRzHcd/3t10HAMtlEcdx0zRd3283G6VVVVdZlhBR09ROF8Y510bn2QIR2q6LpETEtuuyJAG/ZEOe50LKqmk4Y8aatmwRkayx1iRJCgDW2jRNoyiy1pI1RNxavVqtrbVKqXwRHFB2bZEXXPCmbeM4jqLoYb+Pk4QLUdYPALDIcyn5qTxJKbIsPRyPliiO47Kqh2G42G4B4Hg6WWuXxbLveyk546xu6kgKLrhzq2fJ5mk2qKEsqyxLAaFt2yzLBq2U1mmW+nohLBaZsabtuiSOkSEQZGla1pVzcnyx2TjeeZGmbvVcFUXbdXEcZ9lCyMgNj3WxStNF3dRCRoIzKQSQNda0Xe/G+2q5Oh5PH+/u7SUJKYzVSZp8uL3Nkvhiu3HjwRgnmmRcRISKC1ks8l98/+FQVomMAEBEkR4GGcdW9zKKORcM2SLL2rZDQKcLyxbFIonTdNurVhvz7u3brmv7tjOWuHsLtO671pIt8oIAh0G5KTWOEwgR84TgQnCryChNxlgEY40xRsRRGidKaTewF/lCCmGsPZVlXdev37zRSsVJIoTAAQHgWJbt0CNjFiDLssOhZVwQYJZljsk9nkouxOV6fXd3J2VrjZKRdBM/59wNzq5XUoqiKKqqJoKuV0IIIYS2IKQMkAktkR6GsqpMHHMhm6bJsuzr774t8uXhWALA/vCL06m6vLjuul5wyZAZY4SUcZIYrd07mCapMbrvu/V61fbt8XhcrZar5aof+q7vAGBZ5P0wLIuiH/oiz5ExpfXV5aU1tms7KSQAZFnKGKuaOo5jZNj1HSCuVsuu6/phcIGz0ySu6lprvVoulVJN2+aLRZ4vyqpy8+NquVRKd32/yNI0SbWxWpvVcqW0ut/tjDWum06nkjO+3ayFEKfTKY4Ta21V12maLoscABjn3373/ddff50viuPpsL26+NOf/uSr778XgseRePXqGgBu7277of/BZ58Iydu2LcvTm1fXZM2pKt3ZSZHnwOD27qFY5Ekcu/A4eb5I4kRp9Vd/+ecAsFwuj6fD6XS6vrpsmrqq6v3x+KMvfqi1/ua776IoBoBIRgq0iyBvyWqrgZDIuRj1QUuIgI9BHYMOjtyUZw24U5ywhLhjbc5YoAJJSglOuc8YlzzCqKzqqwvcXK4fbnYWrNY+MA7jPu7huKohEQJZa5lkITAgIGdjrDdnsRMW16cgcCKZAoT3fzrwFlQVY1A/mNbqR4Zcc7eQs7XUYgiq6P3yA4Sw2f41mJF+biEL2xfmVkEMIWistQwIXWP6LR+NjRqQET1Z9scYlTMk81zCySu+x3nB9fcZwnm80XiCJD0XGaCkl4PMQP/YZP6x53ubc43B+R7wkS4g7CXorFBTvM+phOP+y+8A/cZxHrt8BoFo9vHjx85KF3YYT7YJL+klvaSX9A8/iXG/PR7Xsclab5p7ZytH+PAxBeB+uK/sc0Ej3DpKM5nj+eQ+41LO1YlhaZ4IjmmRc2vHqFAaiStHSDx5xryo4zdPpvCzv/yhE4bn+F8eT/rjaevUEGccn29VS2SDGBHPzC38dQ5ETdTIjEuBkXiZ2AJfvlB7eFzZcO85ATBTmIEDEzTVATmfW2Z6426aTjzBLaOz9Xw+Fp6wKmNB55Tc+SI/lo3mD5nfP42YqSZjYRCRIVqLHhhR0B2O6qvxcaGv3dnwmSR0ym9CInMlXfg5jioMDNG8lvORS7MB4Tmop21zNvzm3XRuTv3kvumT6VQUZ5S4B5NOQ+oNjmliBM8xTZAg+i/CRTTG3g7vnW/MORA+L9ks69kgDfc9m2gMZjQW0CFgBAAGbH6kbG34ZW4E7cLNzzo36PH8+BghKmf8kRETgTfpZxzGKs6b5pkZZD5T+eePI+HxW4CheLN7p8BPY44zYDxuI4JOenwPp6hL4zidFXfW3P59ca0wK/7YQNM18xk+ZEDnneseRjhH7zhv1yfDYEqjuReO+5+nJ0duf8D8t/ODf982YwVmL8RYEffmWrLIOCBopS1YxhC7vE7qAAAgAElEQVQ5jhpnAhpUj4in8pjtolW20doksdxcrH/9878FgC+//TrP867plkkC1hmSUxiJ43zAGCBn2KrWWgsaAAEZikgyzub7h9CDniw21jLGHWdx7Lv1eqO1JgDGuB6UkBEgdF3PwCsou3ZvlWaSM8biOCYAbTRnHACIyO26R0qRMUZkLTAhpdYKEa31MhVEdApKIaSQous6F7Z4GAaGzrScmq5drVdaa2OsEGIYBkvaWnaqDm612W4vBtW3bZsmMefMxbYu8vx+9yCEWC6XANC2LQByKeq6QUQhRNO2RJZxtj8chBQAkGVZ0zRkbZQkZVlaaxeLRdN0WZo6WZPROo1jbbQ2OkmSrmsX2YIzdn9/F8exlAIAqqqKolhKcSrLNEkWi8X+eETENE33h73b9m9Wy7Ku6rp+dX2tjVFKrVYrpXTd1Nv1Rg0KAIZBXV9d9oMa1LBYLLRRRLRer4ZBO/XZYpEyxrq+jyJZFMXxdIqiKI7j4/EYx3GWpgBwd/+wXq8WWXY8npZFbqzt+z5LUynl/W73+voaAATnSuk8z7kQ+8Mhz3MiKsvyYrtliFEUAUDdNJvrV9ZYQGz6npFN4hg5E0wWOgeAxSI3xt7c7SMp4ijqVQ9Aauivry4RELkAgLLpmqYblJZcMuRqGOIk+fq77+u6KfJFJCQAiDi9+e7Dq7ef6E4zAmtt3/daDUorJKyVAoBFFGVp0g99Esd93+53u91+/6c//bPj/pSkKQBYAGWMi43OOAeiRZ5brYUUMpJOW2qMsQTIGAByzoWURmtCXOR52zaO17jYbu++/14Zba3mnH/z3defffaZ0jpJYmOtjGIAOJbV9x8+Xm7WbT+cyqpXum37qmyOp3K3PwBAnufrovjDl1++urwWnBuUbsIalLpeFM56t22agXPOBRFYC0M/MORSxlmUkCUXuiRJYgJijG/WWzJ6UHqRF4BQltVyuf38ky8A4H/93/+3Il8SIBcizcTQ9wwgiRPBRdvUrq+TJDkcHoTgeZ7f3N5EcfTm9VvO+ak8ub5Ok1RwrrQq8lwIeXN/J4XgXLRdhWE8WEva9E3TbNZrhnCs6ndvXgPA8XSM48ixhH3fK6UuNussy1wAnGVRWKu7romjBACQYd8Ow6C26w0i7o+nKIoYY3Xd1HX95voaAO4fHuI4SpOEM6aGwXkeqOsmTdPX19du8fxwc/Phw4c4SpRWMor+8i/+olc9GYOMXV5dno4HADgcDlEcX19fW6Cb21shRRQJbfX9bvfZ+/cAMKj+Ybev6vpyu5VS7g/H68urfJF9uLkRIvn8s88AYH/Y7/Z7rbUxpm27/fH45vpVFMlvv/uw3++vLq8BQGtjrOXWGmOGoWeMITLOuRqUVipgHuJCGqONNkIIILBOpRsC3HE2slro1ZGMKW0kF9ZYN81aa4CsJbBkylO/Ozy8enV93B+BKI6kj9DtCUdkLg42OnACiEDWQ3VjjQADDIEQx3guOMMUDj76gvk5GcZF8hGEorDoeoA7XTKdkc+Xx7ON2xliHFUabhmdyxinzGYB7UZgSRacGwrwpg9A1vNrCBOVCYHXG23FrAdXz7CwIxia1Qfcs+ZWZGEDMT+4Hav5JD8KaMn1EgtXODkB+R/zbZYv77xM5yWcnjidHs+w0rSbm+yDIGCA0GM0Pw0+23+ElmE4U7Y8c9n4b8DMc+PDvxOMvaSX9JJe0j/YJMhrnnBcEjAo0Ebi0KewcIVJdv7xM2RDcOEHECgcBHRuE8MJX/h05ipstgqHf4jcch+4TppU8zRN4nN95NlOHz3HERT743L4uMD4HF3kF8AZ/xK0PGfPChvQwI7gRBKF546HhRgab/rHrYQw8qoAMGrZ4Kxa84VvMhTG+dfjMWlwPDKxSFPNgzWrZ9VcVk7aYMjOnjdSNAELhPKGvGh0evKUZnDVn3b184rAEwg1ckSzWN/gGYr58BvrPwMlQetHZMeg8Bi0nONRZMjl2XKe12tCcrPDzOmbkTULPTcHe+OI8cNiDI43wr/Q6s7CBYKFCc4pyImFmRCn+/noXTvjpEZ45PHlk0KH3+c1cgcF7q6xbE8UfGFwAzzxpjiBsrnPT/9SjD1JI0E2RrMO35zJ7ULREQiMtf40IGTrrmDIjNEQhuC8QhRq6LIlGt8ueAwyYWRfkcj4ekwOMEeO2o+iUMepimOLTFUedz5zFIlIjkmlcTKE+ZiZN+5suPp7AQBmaD8we/7HlMu0CcFxmno2rvlIpI6Dc3rgSHqiH9aB3H00Zz4Zn8+lQEROaJ4e3xHM2CmU5/zIgcJYGO+jcZjPns4FA3SkJJIhbc1nn37qfLHd3x9d3GRAsmR3h32RF3WnrDVJnv+n//IzALi7u+eMI6A2epGkvVKu5PMxw5CYYASWIROJsMZyyQGBC2+mPVXJWuunU993iOikcMiZI7yIqKwqrbS1xDhPkxgZxnEMAFmetG2HiFwIQGfU7IhFO/bnyEUCAEPGGENEwQUXfLTm5lw415OOEGeMMcaMd2eGAGC0JgBjzaCGpmnjOBn6wVgdSdn1bbZYAICMZD/0jLMsTa21iFDkhdJKDcNqtXLv4Ol02m63ZOl4PLx7+7YfhqqqHPU2DENRFAAQSfnQPCRx4hjSJEm01pGUyNARrDJJOOe60U4jhohZlvZDb6zJM+/sUilVFIXL//XrV8iwrKrL7dYY3TSN02oZaw7Hw8X2Qgjx8fvvl8UyS9O7+/s4iuIoqeoKABZpZq1tmjpfZFywummcy8iu74TgACCE6LoOiBb5YlCDtTbL0mEYkDEpI++KEUBwfipLbc1CCDCac7ZeLR/2+zRJnNnyzd1dnmXLvCjrmjGexunxsM8XuRr6YfAOEIlIKcO5SBcF3N/32oiIFjJp2np/OgEAffV13VTXl5dlXSGD5XJprdHWCMHfv/v0sHsAgNOp1MYCMODCEFhi1hptabVclVXpiFoE2B0OQnAisJYYoLXWWDLG8MAxOC5YDS1jbLlcno7HrusvLq8ODwd/PoQM0Xr3H8Zasl3XZWkaRRFnjHmuB6y1QEQIjJAJYa1ZFsvlZtNWlfM3KqNouVwt8vRU12mS7vYPDw+3280GgIwx/p2y6ruPH4p8YbTWxjZdp605VWVV1c5j5jAM33z//aurq0EpACqbKo5kkeeCiyiSLpPBgOp7sv7ckMsIGEviJIrirh9cga21dVkSAYtiUrpTmosBCH7T/iEvNt9VHwFAa5MmuTYGGOv7VisdxzEXQhvtLHy3m0ulBmPtZrvRWimtVqsVY+z+4X6U5Q7DAEhVXSVxfPdw37T9J+/e9X3fta2QwnlZtWSdi0kpxPF0NMYsi8KN22KRR5EAgKquiGyapsaapm2KfMEFd+bPLhOtlFIqTWIAaLu+adr3b95orW/v7vphOJ5O4PjTOEaEpm0F54ssq+pGRnK73SJj39/cAMCHDx+EFEbp/f7hn/7VP33z5u1vv/wdGIMM4zS9vb8DgLqufvzDL+6P+0RKIloVBQA+7B4iKTfrNQDsj4eH/Y4IuRBt11mi5XJ5Kk/H0/HP//RPtfWycW3M9fVV3/WueKv1Umt9c3ebpplwokVjXEh3zjmXApzTc4tKKzcBQnBhYS0ZYznj1lr0CZxNPREZY4w2DiZqrb3ujyFjzLn4jDFmgut+AGajSLZts14u15vi+FCycUEGywicMZFz6OFpTotkybqTMEPOsyVDxjhnZ9GQ54B6tiieWfbOd1f+RNkDXRY2Pzi7/wmaDp8HQAtA3ju8R0ijg233nGn3dc5F2nPAhwGWOOU7PFrUnXwBJzhCMCoy55ec7V8eJX9W7CCEe6QJsHF+fUBMZ+AEZ3+7qW/mJYwQkBAeOS/ycDE0C0zwHqY7z7eWTws8ZoWju2sKWswzsOf4UZrZs5xVC8+vn1d1Bt5sGAGjc/eQ00t6SS/pJf0jSwJgtsMOFBcG516zPfEfzeI5y7+5JGcUlEH429M77HyDOxfOO7UUjY7Q8Hwle0JPTZtyv7UdD6pG6wZHWPw9Ai88qe14+xmJEgDFmeM2R+X4LfikUyMAdIuxXxUDNTTeid6olqwF9JCDzjkbBDZf0Z+eW85adtaa03LnOI6wRjMWijbRZD4bNtnxTs37XBtN2GUkNR4TamNOoz/rqfmegU80fjn7fSa5C/8gMiBvdQFkgcAJckNDhCcHzAQ0H5MjOHuWqvGXMBh5P5yL1+bsyOMGcT9GfteLEmlqrTNmEYE9coL3x9Jz7f/8he6f6bViYytYQ88jPxrpdv+yTKq8qXUoVO3sg7HaYbRMV5xxpE9q+TwIe3pFME8CmHwHkK8ikXW+JWiihsOxAGci1Axo7vF9bpBNs0kGKESROofMgPDYdviM/Zvek/MqnoNLX5dJuIoIf78unahOvyHx5D4GMEuWHj8bpp5AX5Z5zB/Pa5JnGJ9LBDDq5Z8tJ45F+/+oBj59wcaeHF8YsFNRzpXnfjyGOlmwsxE2LwwAgLFGKRUnkTKKwP75T376s1/8JwC4u/1YliVDchTJr377q9PxxNNkGAZnCg0Afd933aC00kqZKPKjzHlL9UwGuO1mPwyAILhQoLIstUTOOVnfteAnJq/HIeKu7JGMLFmlFQAwxpRSSZIOShFRnmaGSBnNOVdaO5IxydLT8SizheBcKcU5F0Jaq92yOO7yHG2njWacM84GNUgh0L8ZFgA450qpJI4NkIsKQkD9MHAXficEJTgey7purDFDr8lS13evXl0lUZouUpeJMTrLUkQ8lWUcRW3X1k252WzSND0cjwCQpEmSxPcPD5eXV5zzpm3zPI+kNMZeXV251nvY7bMkjeOkH3ogYIydTuX7d2+runH20UWeK6WIbJplbdvkeS6E2B32b65fAcD+cA8AWbaIouh4PCyXSyHlt999F0eREOLDzceiyCMpAOBwPKVpygU/lSUiLrL0eDpZS9vt2irrlE35YrE/HJIkiZO4rEouWJYkbdcjglM+DlprY7Isi6Po7v6hKIpIRofjcVksEaFpGgBYr5bIWNt1b15dA+D+7nC5veiHoarrzXpbNzUA9P1wfXlZNe1+f/jBZ5+dyrLr+k/eX/36N78xxjhp3vXldZYk64sLawwiy9JUCtkb3Q+DC2UjhHj75hVnrK7rYpF2favV8OrV56/fvj3eHV2NAEBwbomMMcoYR/EQQVnXu8Nxu1oCwG9/9cu274WMtdZqGKSMnJSVIYujCIQEADP01trtdlueDlkar1fr/fF4f3v76u07ZAwAlHkATXmW1d3QD0opraxJIomInHEnPdPaGEtCCgbIGCBjQshiuZRRJPL8u++/A4A0iTbbNSJcpQkCa9Xw+y9/d311eSpLITgTzuurubm9+/EXXxTF8ur6um4rwcVisVjki0/fvQeAXvX//j/8zR++/va/+2//Fef48PBQFItFllqy+/3ecShcJkopwTkXIoohTlM1aIa4Wq1TpZ1XTWRsvV5bgqZXkYxSFEkSt33/L//6X65Xm//hf/ofAeCLL34oZaRMWze10YYhKqWsNVqrLFsAgDWmaZs4juMo2h93SRIvi+JwPOz2u6ura6dhTJK4bdvNetMNfdu2Rb6MZNQ0jbG0zQtttHsrOedSSs5Y07Zvrq+HYVBKJUkSx5GbIrq+L4qcc6Y6ZYniKFLDcCzLLE1pOmkGQFY3bVVXaZowzuvypLSKo+h4PALAZr2SgrddJ6UgS1VVd32/2WyGoS/L8vbmzmWjldJqWK2WP/7ii/uHhzRZdPXp8vKy7Xv35v7488+SLNXaHHb7Is+llFEkq7r5yY9+5KzC67rhjF9eXCZx/PHmLkvTqqqrqlovV3ES393eAsDX3367LHLB+cCwV+rTd+/zvPj6229Pp9N6vXUxjvpBuQYEACmFNVYKyZBzLrIsG4GuNnbUxjnukiEa6/mncUEZMSFDRCGMsVIId+xhySaR0EaTJiLWtl3dVO/evWuq3/VtH2cJAPRDDxbYyFT5zAERhGRGWwAYBg2ggZiQgnGOjPnj59mmwR96W3oCBmc0IQMY/UoTBa3fOaaeL+njIS3AU4yL0/qNFmikOAGDr/WzS4OtTMjMCSwYQyJw4ePgmZu8TU4ogANzZ/u9eZp5Zpr+nVkiuep7u/LJTmSEqXYi5UK7BJVGOCAJnrgCejI0a190iPIM6c03seM2ZmqIaQs6ln3uJH3sB0tTlB+/8fMbkEnK8bjjYVKcBEg7VsDj9KkswT6LzXcYL+klvaSX9I8qCS9THANQEHjFETirhnP+IRzm4Ljaz/bjIztF3qzMXzDt5zFwQAzDOoyWLDhuaZbcquNzZgjBl/P0LbjZeNLanLs4c5xcUIONRSGEmXdGb6Tp9/nWaMM5J7JEYYklss4EmHmvj842hMhBA0DGyJrQCmPUGWfF4JvI2fFZIkebBWaMgJAx71ZmREUW/CmiV60iI7LeJ45TegGBM8dzsi6GRCZQgeexdAAtATplaaAjyFoia4PF39hDZ8yCBUcQTHQeAHi7RxoBxsxg3i2rzjDY+NZFRh5Y43hfWEL9cGDsEdHjl2uyM3xnKQycCZnAeHQ6cp3IvIaSPLXhote4FdvaEXkGuBbQ0fTkeUAbAERryQE1IksO0wS1qW/HCXt4FsuGYRlAUUAffrBO/J6rJwaU4XHkLNoMWTs58YTzdEbXzFmd8YNwaO75Rb8xmfDSzJQEgIElO4I3GlnwsUa+w8e6EyF5cfRzbK4FgoCXPPAaR4p1CsTA0fr3E5kd+bKgpXXQkMCiQUBEFt76aSZxbyIRGUuIhIw5cydkTKMOLc88HPZta2AsEjl7LHDvEQNOo8Q61M7O2TE/aU1tjgBGuzeF+RMIALBggfhomQVTlv5AgkLdZ7MVBHp0Nr7Jyzv9JzYoGjAoDfCcdTwTSiKAt1GDwLkCAEPm4s+712Qs4TzNNzh+FwCAwObAeRz06EaWHmUWoUZhw2ORyJpxl+P/J4CZk42wSLhfCOxYIi+ED+wtZ9w66cvZs3yvkKEojREhjWRt6D9//btDfQKAJE8RiDG2ulhfvXnzwx//ybe///K+LG09KD38q3/9rwFAW/Vv/+d/y5kw1pZdl2dp3/UAJGVE1jq/tBJl23bAERD7YeActdYyks7o1dEEXLBhGKyxURy7F5szZslorRz3RNYCAUNG2tRdQ2na9T0QyUgSUQ8EbgsthIwE4yy0jEXmrdlHsoEx1nZuZy77vmeMSSH7YTDWRsFmFoApbaw1jCHnTGmVJHEcJ2VZhiGHTVsDEGPwX9l7s15Lluw8bK0Yctzzmavq3rpD357Z3aQoi7Zgg5IMPxl+8x/zkwH/BT8KhgDJMATJhiw2RVi2yCZ7uPfWfIY95pwxLD9ERmbufeo2/WKABE4AVefsfSIjY45vfbEGa1UaJ7Np0lSVMUYHDADKvBacBVxUdc0Y41ygMVEYx1FyOGRaGQA4P1vleYGAgZRlVTHEJI6VVgTWGu0mfFNX8XxhyewP+9lsVjVVnMbImTIqigIACEOZF7mUUutWCBEE4n59zzkC2P3h4JRPJ2lc1jVjmCZRU1WBEOfLRV4URuvFbLrb7wBgvdl88vxTrXXbtuerlTbaGJ2mEWf0fvswdbauqiGys+mEM2zqdj6fKq2btgnD0B10xhkRsyjPCyFEHIZ5kTPEQMpDlrllMJtMNvt9HIVa6zwvwiAga28fNmmSTuKwKnMAuDxfBVLc3d9P0qQoi7wsUMqyqh4+fBBBUFkDAKvVUnC+326SON5u10EQLeZzxlkYJ+kkBYD9Ybfdrr/68stJks4m09l0qnWbptNvf/Pbl59/UVY5AMgo0sCBh2WTAzGBYpNlD/frSZqen12KIACAv/zdq2kUIFlgFEpWlvl0uQSrOWcWQCADgLZt0jg8u7j88P5t29SLxRyRZVleV/nVi5cAEIQBcmJcGFu1qiG0cZCEYYyMW4R9lgHA2fkFEbRtS2SJcVIqDAPBmG1bIltXOQA0LbeIWus4DOqmjoPwsNu/+/BhvpiFccyGTQ9+99vf3VxfxemUgBtCLgJAtssOABBFyS9+9otDlhFQEAQ31zfaaAKeF9X5xWXbdhcATdtiEDZNi4wdspwLEUnBiaZJ8m3dAMA2y6fI21YzLrZ5MUknMpiGSXx+ffPLP/8LJp2dddK2DVkCQGsNMoaMa6PJe7WrmhKQ5ou5Njovi89ffoYInGM6TaeTtKgKACicEwNiRlspgvPlsm3quq7SJEqiyGlZ7rNDFIahFLvDbjmfJXGoVKvbZrpYkNHvH9YAIKWYz6ZVVRVVEUehJXPIMyH4JE3q2kWOKuMoEkwo0oLLxXSqmtqxkEC0WCwAgDPcHbIwCqWUhyxrtbq4OI/DZHfIXr164w5/KWVNhpCfX54R2f1+yxlbreaXZ6tvX31bKw0Ak+m8rCvOuQWcTWeI+Or12/PVChg2SrlWX99cz9LJdreP4zBO4vVmkxX5j3/4AwIKQ+ladHF+1iqz2+dhGMsg3B+y12/fSRmScwsAkCQSEY0lpTUCCwLhzpOmrQGBcecwVwVCcC7ato3jRHCBCNooos53ZCCFtZYLIYXcZ5k2BgUHS5VqMODuoGGA2ljGuGRInGeHrNHz2WK1uFy++/Z9U7YAEIhQoSEAZFgrJWXABUfGbKtN2zoHFEEolG4toAVsVBOGUafkMb70dEieM+hPtQ6KjW7ctAvTSISADMlJFAi9MjKMxC93XNrOS/zAAfalmc6qvDvpGXNElqM7vejlwQchWUPAji8WPcpliGBZd7/pg+qRF0V82B47SBL+bLdkkRwAZAjghRtE6qAaAwRk1lhgAAjWEnMOKxlyzhzn7rCiMZZzYY2x1Omi0oDUAO0QlHLoKAAUPT71ChnecMW135mie9Gox3B983susL9d5Q71HhGMiEisf7uvQYfvTqSCrmcsGWuQnG8YQIeoR1e4LppTb/qGrqsAYYDxT+kpPaWn9PcsiWNmY2wROAiBx/IpdH7avIrZkAc7MuGUPjlNROCoPUAAS8B+/xNHCnd9Xf2xTf2Pj7zZ61mevN0zgMNHrwnkN32vNtipX4ElJwUztBY7NDA0/+g0GWGI7mPPPPTuVbwLke7N/mx71MreghYARiihr7g7yo5fP35z31ceqgCOHOkNHdLbJjwqadQ8F9DQNbyLiutV7/y987E97NGxTKMu9/Xrya7HE8D3zGkaOMAx84Kjaeu+wuNPPX3k8JFvN4476qjJjznAvlpDDcGvA0T86CO9gUff/48z9F13lD6iVfa3pNFKHk0m79F8xIN2dRmNtl/oozWFIw7rowhnmHQ4+mp4Zujkoe8eldP3JfcEqQXSHZWMCMAI7akX2o5264H2yPrZW3yPIN7QXCICYsg+2hyCnkAb6nUyoo8nZHdnMOjhHvFo35FOKNrhhY8dV47mrXvJeLumvl9HNz8njOTHmultfT4+Jweq1c/1kYbCUCj2SH3I55Dz6LXDoeCKwL6NeNzcwY9vtxMPm9R4Cn50tQ4VYJwxZG1TSc7+83/4J43Vz1ZnAPCv/89/E4Thcjp52G7W64fysN9ttkEQ5LaeTJLdw3sAYEQMMQjCQITE+eZ+G09iKYW1tixLp/1UN3WeFck8YQwBGeNILnYqWMQukILRmiETofA1JgTQWjmpDACU0QBojeGcz+czIJBBAECCC39RBmVVcsGllFobQAqDUGultA6k1Ma4pTJJpy7CdRAEzjo7DAJLZIwOw9DFkCWiIAicylWcJEZrrTVnvG1bH9WBCyEAUWsthUTEIAxU21Z1dX5+TmQAoNRmsVgYY6qqdL/ss/1yuVRKVVXl2D1EdJFetDF1XQspgaAoyjRJBOf77AAAy8VCa7Pf759dX7etskRnq1VZlojoDG+rutFaSSnKspzP587J42qxsERK68vLCwAwxlRVNUnTsqo+fLh99vxZ07ZFWZ6fn1lLTk9wtVpJwauqEkJwIYqiMMaEQbrbH+I4ckG9P9zeTiepMebt+9vZdBqF0e5w4IwncWcVXtf1Yj5vlSrKajGfHbLckk3TySHPjDWzyRQA9llWVsVivqibpiiL87OzvCyiKFzMZmVVtUoBwGw6vXt44FwEQVhWFWdMBsEhzw/7AxF5hzZktSFJPAiqpuEyWG83s+l0f9guF3MAaJWqm3a/P+yz7MWza855XrSqbbO8OOwP9w9rAFhvdq1SYRC0TROF8b26f393h1xYQC4kdyrAbSsYEkOttTaWCUEEgQyCIEiS1Gn/LibJfL7M9rvZdDqZpNYYbczy4iJb31ZFAQCci0N2WG82xpI2FgGrum6au+urmyAI2rYBgP1hP5vOrbUikGVZWoQgjlEIIWTTNO6qmAG2TSOksJbiKEEghnh/d3dzc22tCeMIAJRqrTX3681mv//s08/SyVQKYa2N4thRKnmRTydTGQT7Q7aYzydJmpe5UqpumqZVknMAkEK4hYOI2hhtNCJjyIAxLmTo9E+lBAAZBIzxIAgRQCkFxnzz7df/13/8D19+8X0AaFRjjCEi1TZOpdQYhciSJHGskDF6NpsFQn64e7+Yz6WUWqv1dnN9ddm0jVvabVOnScoYtm2TJDFDbHUrOA+DgIZbOkjTpKrr3X7/5cuXgov94SCldLp7VV0DwM3VS0uWgKQQYRjmRVHVzacvnmttHEKTXCilFem8KMIgJGvLqsqynHMmhJBCAIA1Jg7DKEnKorDWrhbLNE7ysrq/vxdSMkIAcG4ZtFE//v4PXACo9WZ9fXmWZYesKD7/9BMAsGRn0+nXr15P09R1Ud22z59d13XtlmQUhpM0rZvm3YfbZ9c3XAjG2HK+WK1WWbb/8P4eAC7OzxnnVVZsdruvvviCcfH+9tV+f8DRUUlESimttRASAMCpMgAAQG+sjYjaOCeTFhlaa5EBASilXKvJ00qWLFnjYoIZa+baFVAAACAASURBVLFzelsAQBInwIUQwpKxwBBxvV7P54skTYNANkUDAFIE3LNWCJ37DEbAEYExr6OKgQw449YY0MZygZ3KfN+mTjLwCMBJUHh0JPatBxhOYvRKh+NDcIRBPBgfY3KPeHEUK2CUhpvXExkGsSPnHFfHEJxljxOkXPlHfr0ctjiBj+SFGCdbjepLvZfn4SmH2bor+Y5j7AzVrBlieLtkO3YShwZjF1bL+JLHONU1dozmuzHprnz73vHSIow+9OiuH8euC0YDNZauThDLUcdj3+bHQojXX/E5vYyBzKsYAODRdfXHofpTekpP6Sn93U/ihPbo1B7RM3aP3FF0O/0jcfuY7YCPnHUA3igAmT9bAIG56z7z0eL8p0deHXum8Lh+R4dAf6+F/akHg+qaL+WIxLNkGTp+rTvDO6OH7h85V3TUq8ZZ293tjaGDO0nsiBPrDAQ62sT2uk/WdsyA7WDoUPvjtvpO6BvZAws6PlGPSMlOLe8RjTI2msAhbFFfWRiQDo1ywgBs8BhrYFe7sVUzOCJiTJMN82PUrnG9jt/ku270u7Xu8vYjNEqHx3pq6mOpG6NTnnT4gY/z93YsPp/HPN7PDDrvQUC977jBUt4vhhGG7LN0oMwvtY9X+jtJLQA4JdrHTexy+Ok4zCxP7hN11cPxgPfKdke9MhSAQ+No+GL4BbFnN09tnD8yD4/aSUefxqQW4jFRd/LZV39MQPZLfpjbTrG4s2gBH/LqpJlH70bfgV4tcfTCngEczJzG28/vw4UfZZp953jN5vF6xP4nwrgbadjlTl/hLNjHc6HbO3pnsqfL8uM19a0FgJN7nTHDPyrpMfTuXAf0HgTQTwwaj+RpDU7e9R2LupcSABhn1hpSChGbtvnm1dc/+skf2KYGgM+/+N7mw/u6ruum3m3XOedatyFnggUc2S9/+UsA+PVvf8cAjW7bBtAKa4lzzhgHIhfBAwCapkkmEVnLAikEd4vYWSMCoLHOyo8YY2EQGCKlFGOcABhjjHFnrM0ZY4wbY4QQURhleZ4kXVxpMtaV5sK8AAAgMWSWrCXivCPSXagKAKrbJgpDIUTTNGEYImNt02nlOApScC4F142WQiCitkYKyTmv61r6mM4u7HgcR23bcsaVapu2ns2mAHQ4HAAgjhMppItWwRjudocwDMIguLu/m0ymjknc7vec8zAMq6oSQsZhnOVZIIMwCIuydF4UpZBlWYVRxDnPiu2zmxvOWVEWLl4NAGy3m+lkWlbVfD6Po+j27i5Jkqquy6qKwtBRCZvdDgDiKNrsdp+8eO4i2AghppPJbr939rDz2WyfZVVVr5YLrVVd15NJqrSp6+by4jLLcwCQUsxmszzPwyA8X63W221V1RfnZwCwP2QAEAZhHEW73T4KQyJq2iaOIkRQrYqTjguz1qZJ4sxdkyRpVdu07WI2q5t6u9tPJhMAKKuaLC1X87pti6I8X63qtqnqmnNelpXSBgC4DEG1xmirdV03YRBJKfKiWC3PqroAgNVi9Yuf/vSw3zdNk+XlbDpNk1QGYZKkRmvnb3S5mDdKIyFDbq1db3fWUhRG2LHkBgCKsgw4AxYwHpButNZ5ljHGhZBBEDIuAMA2VdM0jMF0Nru4uvrw5rW1dHl9Xed7xjgAlMXhsNvmRS6DGIiUVoKLQ55ZY6q6urq+BoDpdKJ0KwSvtZJRpKsySSfIeJYXiKgtAcByOrl/ePjs05dt27RtS8bc3t1e39ycLc/KslhvNwCgtYpCqZSpm/Z/+Zf/6p/9l//YGrPb7UlrgwAANzc3+32mlD5fLa21jW6V1gTIOY+jyO0bMoyrh4cojPKiiJOEOTMBzriQhigKIwDYbXdShkJwa20QSNWqLCvCOPyzP//3k+lMdOSdspYcoY+IXAil2ygQkzTd7rYAEATBZDIpiqzV7aerF2RtVVeCM85ZXhRucqZJHMfRerOx1s6n07zMW6XSNGGMtW273e8AYDGfp0my3W0vz8/jON7stpvt9uUnz6WU2/3u6uLCTeCmbbMsn89n7lLsbLnUSldN43j82WTSakOWdvvDapkorV+/exeGQRSGgguna2yJwjDYHzKt9SRNZ9NZ3bRv3r5zk1C3LTg/gwy/ePmFlKJVTErxo+9/BQgP67W11kWyMkZ/uL0DojRNkjh58/bt9z57SYBGq9/87hsA+MM/+IM4jB7uN2mSaKPzorBkf/jV93fbLYF1lVku5nlRlWV1fnZGBLd3d+8/fAiCoG0V552cQtS5hiTrFMQ6M2EiYpwb6w51QgQhHCuIQgoAMta4exdwYWrcbZIlIAik5IxZS4I7R73OyLoFzr0bShsEsiyrIs+TOJmt5vfVPQAQGXRRxBA5Q0QiqwFBSmGq7gqOMTTaaG2FlABYl3UYxQwYG91fOlmDLHlPsgMYe3QWeiTzCKGNrvaPsG1HYXpf6tT7YcdxESOU7wsgj+HQ3yiOEBlATx8CIbIev3qebnDA3ZWIHjaPZDWHRNkxMiQvpPXyTg9KEZCG8Hk9F+jFQ9YxpB1mY+iti+A70iiiuKcX+0K9W5wj1PERuXdo2yjRgIWHwv1vR+Li0L5RGASfv1NWxZEhjZtUYxEIu2El8nzxdzX3KT2lp/SU/g4nAe70cfFqveN+9AeU3+EGxutIfByLuyNy0G3lH5VyO6NlYh25h2iJ0B5nOlaj6sXeflP3ezo5ZDna3Y/Or5O7rNFW3509o1OQRibSTu29u9tEhmAJXWg8tDDcoPn+GFWyL588VwA9qTFgC0R/Dd7b5jqdQyRkyIic3dzxkQzeSVynjTXI9kfdO6ZS/C/jw74/1+Hkq1MG9JhbAQAAF4aoO7KZ15x1BgKM+boO/JTnSoZ+OGGdjt7pb02HOvX0qlPd7I59PHrevwCgM7zoVR7HOIO8SS6AN2DpxqVn5sDXuWvCd44rkA9KfsQZjTvKcy3gMZ379oiG8jedw6wcA46TAk/SMCceL7CuUr5uY5g4BFw6GoYBjGL3wWPQYakPI3H02o+s76MZ0H/p3zZyYXj0rNX93gHIOqjs4nZ4qyPW9w8O6nbjG/YBonncO0blvf9KHH01WoojZ0S9SvVIr8CLAb49fUf0/O3wPjpq89F2cbK19b3mB+WRWdXvSwOzPNqkfScQDsq/42d8/tFSO8mEo7b53sXR9ycVR++SvnenPurOkf34MTjvSqW+Ar+/xeQz9/LO4DC0m+CIwDlTRjEEwdmbd2/+63/63/yv/9u/AAAN9P7Nm3ySyDisqmKfZauzs3fv3sTx/JtvvoniCAA4otE64AKR6qpkTDLGGbKmrTnnzgwwSgJjjLXAGHLBicgYY41xlvpOjhVSIgLj3LlzACJDloiEkNYYAOBccM6dnFxWpVJtXaPW2lgjHAEKAIicc9W2nHMhRN1UggvndVEIEQQSnAYlY45b5JxHYVg3jSWbRmlR5K7bZCBbpThDznmrWrIUBIG1FhlzAVu07phKIhuGsmnaqiwnaZok0Xa3dz2bxHGWZVKKxXz+sHnggs9m07zIuRCz6aRpGldOmiRt21qyaTxRWiulbq6unZLafDYHgKIsrbXPbm4e1uvpdMo53+52aZJEUbTZbgEgjuNWqySJF7PZZrdjjKVJcsgzsna1XJZVBQD7/f7ZzU2W53Vdny0Xu/0uL4qry0tjTFlWi/kCAKSU+/3tcrkAgO12N51OJ0n67evX19fX2pgsywHg2fVVlufr9frF82etUocsP1utAhlkeV7XDQBMJ5P7h3UYRXEUllUlpYzCqGlbLoTgom4aAAiDMAxEXhRAEMfx7rCPwogx3Gz3cRQ7jbn1ZnN5caGNLYoyiRNjabfPpkmSpJOyKpXSAJAkaV2hMUZG8Xw2cxa7QoqqrhxD8eb9u8N+t5zPQhksF4tAhveHe9Uqa0lpk2UFAEghBOPWgmrVv/iX/yqQMgiizXpb1ZWx1nFhu8Pu+uq8LstWaSYkaqrquqoqF81jPlsAwF/+x9+FYXB2fsYtHnY7rXUYBFoprXWUpgDQvPtgDDHkWisuJGiNjAnOgzAoiuLu9gN47wFBEGhrEZjk4sP7d4hMCOkcUwLAf/ff/vf/4//0PxhjlFKHw+FP/vgfrbebNE1/+/XvgiB4WK8BII4ijoCIzgnjf/rVr/7Tr/7mH/3RH+nZzIWpIWtnk5QxPknjum52+721drmMhBBaqXQ2A4D58qzIMyJijBMgY1xpXRTle/1hNl+4fS2dTKbTWdO0QgZVVYVBWNVt2yoieP7sE+chkQEZY621YRxbo4EsIk4mU21U3VQAcDm7IGvKulrMZkEQtKopiuJsdVbVtXNPBABxnJRV2bTNfD4TUtiaACCQkgu+2+9m0ykAnK2WRVGoVr24vlFaFUVxeXEeRdF6vT7k+XI1B4BDnud5TgRSBnleZHl+fXVVlGXbKudvNIwibNr79WYxnwnOD3leV9UkSRhjxhq3miaTtKxqF3VqMV8y5N98+yrbH6azuXOLCQBlWQghvvfF51meGavzIn92fbnZbvZZ9uVnnzlzeCn5Zrf73uefh1H49v277WH//PkNEO72+x999ZVb2lmWlVV1c3VVVpUx+ubyUkoRhsG7D+9fPH8GAK3SVVU9bDZffPaZJXp/e9u2arFYbnc7KaXuAoVhEEhjNDrHhc6VMHZ96HSNOWOApI0x1iCi1sp5cdHGcM4BgDFurLXWCs611oODFkRL1mnLuoFu25YjIwNCiJCoLMsoiufzWb7PAcC02tGHjNB547WGOKDkvCaLyAGAIZpOqkAANNo4GysanbPoTI7GNBwQPjqZj0F8x3jR6JT3sHOwHRnA7QiM9K/wrx+/xcPFAVl0sOwIAXhjrg5RsVHVPGvsIcEYo/XVd83sJY+TEI7jqnTKMdYrZPTGXY6Ipl6eYeA7pYccNHbw5ftt+GUsyHXympPZOkc4I2nS/+c1HWks+T3qlQ7rdALgIxg2QkH9QHQD1UPgHpt2FcOjYUIPX/28Ghf8/80Z/VN6Sk/pKf0dS8L98Hr9PnZMd0/nc41NuD3b0HMpJ6dZl8lfLh1/7dyUECIbNOrB4jhw26l4/PGzqsvbc6bH9EP3+7HGkvvFfjz+2McV7lw2ci7XgDr/cAzBK9EjOisDHJ9c7qU48uoy0u7puDwkdnRQoafKhnMeAU7aTR2R5g+eUy6SCPoActBReN4IdhiZQTVyTEmNv+kvZ0eELnijBRw9MZh9Uq8Xd0Is4LiMx8THEXM4GqoT41f0IOmIhxq1izqHpI5upM6wtVc569l18BNqeNXYi/bJHOjJtYEnIt8b3QfHYuPYpGPoz56e+jgLddRm3yOjXwdGC0Zo5TjbRwrsuLQRyePaMLoox99TqxGbNiCtAffQadbR8HpAht3k+e6mem1nP5v8btFdBKO/JKeuXMDBrw+MQkv11m1da/rr9364OjQ3etvxHTQOX/Y83CneHsa9L3vUmIHy7ZD2kRphR27268TD0L7hjzrJj48HzUNBp3sWUKc27odgGLDfM0OO06lqRF/26MdQp48+73ebx+vH98gw0EebjudRO6FjeOpREd7nRjfE412hX2akjdZGS8EY4mI2+3D3/vbuFgCiOA4ZK4r8hy8/maTp169fXd1ca2vffngndJAmEQDkWW5AC5kgMjA6nkdNU1shuBDWWrAaAIIgyLKMycDdvhCRs5d0rpscP+KU+JRqAZBxpo3mXEghtFZeX4ZZa5XSSqm6bhjnjsVDQEskGAKAc2MKCIyhJUtEnHMAMsYEQeC4MCJIk7Qoi6qqFosFEbi/ulMgCp3yI7VtE4ShMdoCCSGMNVppKYULMovI0iQpqtIYE4mQgIIgCMKwqipj9HKxBACl9Xa3ff7smdIqy7JnNzfGGK31+dkZAbgY05M0RcRDdpikE611VuSz+dwCbQ/7NEnctNwfDjc318hY1TRRHO0P+yLPu+guTQ0Aq8Vynx0my0VRlfvD/vLigoDqqp4v5k3b3j88AMCzm5tAyv3hcLZcNm27P+wvzs8ZYx9u7+bzeZokAPCw2a5Wy0ma7vYHIUQSx4c8D4IgkPLu7sEFOCaAzWa7XCzns/mHu9vpZOI6tqyq66tLAKjrxlgzCVNtLBGlSaKMNtYkcayUFlwAgOBcG2O0DcMgy/M4jKIoLorKWX0WRQkAk2QSyKCuGynkYj7fHw5AME0nZAmBOQN/bSznolGKALQxk2TChbDWBDJwYnYSxZyx6WQWJ0lVVQiAwMIonUzU+3cfwiAEAACWREkUpb/99e/+9b/53xeLuVHGxbC2PuZ7XZVWtVEUz88u2rY1NkcuCFkYxYhotAKAD/d3FxfnSRJdPrvZPdynaRqH4Zuvv9baNHUDAHVdc8a4kMoYJgTVpFoNgNPpfDqZuA3NRdkGRFs3qq2QMaM0AbRUcc7ffPMNAPzzf/4/SyHqpq7rmnGW5YdnNzd393dAMJ/NXEAhsraum1a1URjGYaSV/oe/+Pl0Ov2b3/7mJz/6AQD84Kc//fN/9++t1VxwAErjREgBREopIqrLEgAY301n87vbD2EU1XVjiYCwqdv1enPeKBfKRkjZtq1TbRZCGGOiMPrmzZuf/eznSptOZ9m562EcAQUXdVOFQSAE3263TsF5Opkc8gMQXV9dMQab3dZBHIaIyB1Xbo3Z7fdRGMyn06Ztm7p2zzob5HgRAYC19v3t7XI+M9bePzwYY2azad00WVmena3ciVEUpbV0dXGptTbGxJ2TAdRarxZLAGja5vbhwVq7mM2U0ncPD89vbpI43h0OddNM0tTtpkVZzBeL8+WKCF+/e9s0TZJOtNZaa+cVnRC++PLzOApbVT/cr2+uLgCIiMIgnE4nLgT27d3tbDrlggvO7x7Wn33yidamrKqiLD//7DMAKIvy7v4hkIHbT1qlXr789HA4bDbbpm7dUce4KMrq8vzcGFM37e3t3XK5VEZzLhjnuulUy62xqlWT6cQFiAcixpi1xLlwF0LGWsbAWmuMASRjrXPKaEznSEQIoZuWyCIyrbXz++zwhrXWFUKcPNhnLp6ZFNKFbgrCcLGaA8Dt+ztJ7q/WeX5kDBHdxRg3XYApElxQF2geOWPWGjyGyeSP6x6W9gFbBoA8MugZnnRf0dEJO04OIXzs674GYz5zBNm8skgPIB1h6n7HkbzipawOG7mSsLcDM0dV89i9L6ZvzBEqGGMyDxUJO3/2PrPXfCTvlrrTEATqBRx7HJdnuF1+pEbY0Ycj5RI6Gh/or3A7ONnl6xvm3meHmndE4+D3mobSRi/usOkpXENAZDAOSQUjrOgJ3HF2OG3SU3pKT+kp/b1Kp37ZntJTekpP6Sk9paf0lJ7SU3pKT+kpPaWn9JSe0lN6Sk/p/6ckRhpUo2DZve7fkHN0oYdeXev4VgsfZRxpx3TJEhGRc//islhLLnJF//QoUf/fOI00hgZTZjpRGju+jRqsDnplIq+HNFKO61XyRteSvZpZb3Qw2FUAdVmPrrwe63fSYFPpPamRfdRY6DUMj7vQ/5ENubq2Ow23we4CBg2u/sOJTluvavRYnakfXvAO5o4zDJqJAODVWRlDRKaJOssQr4/p70yHW1Ps6/hdHTXWyer/ParD2AC014od7GdPix0m8nAHTN6eBcZ2Jsd16fqu15MbyiELRNhr5g232B/RMTtp0fFfXfGIH83wEYVg6vMd34cf+wLovutCJuHQ7ycL9nEa5tzQv11ho2HwS6S/5D4t0nsGHAKmHA/6R+qAzF+JE/gI6QTY+QMaLy9/Q01EdtDz9TPT3573IaOAvPHSiXIBdWET/eLuWtE9ONIpRkA6iVI0Xu04tG+4BT8x0DlRQCSvenBU4km5fd5+uX/sz72Owzj7R/W8RxvA8LO3fDou9Pj5vuyjVeXX9aDH0G20gyE7+oU5drFANLzAuxg99R0xHqfRAoLxft9v3/4T4wwJAx4IBm1TLeJV29RREACAQGSA1zfPGmvu7+/iKNxkh+lq8UfPb3b77NWbtwBgDTFk0SRqWxVEYZLEdd00bTudTdu2NV3gMhOEATCODFycVqcr4dQ3XFR3Yw1jvK5rLqUUsjEmDAJtTNsqF+0agLQxAKSNFoIj44AggCNDa1zceWDItDVBEBhjrFZhGHLO6qZGhkDkYq0EUlpr66oOwlAKWZYF45xzVlVlFAZBEAJAVVVCCs6YNhoZQ0RrrItI5rwfTqeJNtoYk8ZJVVfW2kmaaq2bppmkaSAlAGR5Pp/POefr+4fVchlH0SHLOGfWms1241TzgkDmeSGEFELkZeGc6G13W85ZksTrzQYAkjThXLx59zZN4vls9urV67PVShv94fZ2sZgDwHa/Wy1XbdvmeZHEiRBis93KQHLGbm9vp5MJAMRRdPfwEEgZx/GrN6+SJJmkk4fNxhgzSZOqagDgcDh89uknxpiqrs6WK2ttlmUvnj1bb7fIcD6bAcBuv2OMzeez/eGw3x8+/eRFVdf7/WE6nTjNR8TW2Va3TTtJEyJwTgMBwBjj3DVqrVulAhkw5HXVLGYLxlhZVqvlSmtdVTUAPLu+NsY2TXtxfh7IYHc4nK/OOltRGTiP2VWWkTWC87v371ultSm+9/KzJE3KsoiibgiCIGScTZKUMwEAk8lECGGNmc6m09kSAJQhsmQtfPP6zXqzDoRs6iaOEyFF2zScIQDouq6rOohivd1rS0Ey4UJobQyRVrooSwD4B3/4R4vFUki2Wz8A4ny5Ouz3MgjCOBYyAIDV2WVVtx82W7IkZXB5cVWXNWOMMymEjOMEAKwxLq6O5Kw2OmDSGVwWWQaISZwAwN/89teCc63btm3jOP6z//DLyWQipazr+v7hoa4rABCcx2GolXpYr8+Xy1/95jdVXS0Xi+vLC7cVJOk0TeJf/fo3WjXz+VwG0hhT1XWnmjqdAsDbt28mk1nTKq1Nq5QDUohMyEBIia1yc0aIII6Suq45F4EMyqptqkbKUJvaue7hnCtruEBtNGeMrA2jsChzC3C5OgMAxjAvsjSJAaFumrquz8/OjNHGagIIwxQA1ps1Y7iYzznnRVnUbTOdTVvVHrLD+dlZmiYAcP/wYMHOptOiLJRS5+dniFhVVZoky8Wiqkq3E07SCQFpbYhomk6iKNwf8iRJ3OR82Gzatv3sk0/qutkf9mVRXp6f101tjE6S2Cmfbna7KIrPV2ehDL9+/frd+w/LxVJrk+cZQzxkBwB48eLFyxcvirKo6nI5nz67ucqyrFXtfDapqkorpwMInzx7zgU/ZPn52dl8Pqvr5tvXb7787LPtZgsAYRiVZXV5fvGw3vzum2+++t4XgjMgc79++Ac//8V2fwAA1ZaHLPvs5Utr6dvXb53/h90hY5x1EbcAiEhr3RvOdE54CNx+CF79nMhwwRhDIbgLR+YigDlgwxhniETEGGptpHTuMRkhMMHaugGAQATamCAIwQIAMUBlTVvWVVIGs8VytQKA9cMDswwYc+cuECEya0ErEyZh2yIAWGO11czbEjMuLTnDKBphda/edqSRhzg6+713IRidjjQGfv1pSaMiTlKHyL01xxi6Db5lkHns6W3MEeDIKPhE0BpkPARvEzPoCA4CAHkZqn8KPFzw5sg4NsoZmaZ4wy8y3nsVMXL2e53LRATsxxcQGEPqYgt1Ieb6N1Nf9NhupceCwzj0ebomDpJMVzscOnIMaPoexd4v/8dG4tETR33qXspwMJv3oLr7O3XSYlcNQB/g5ik9paf0lP5epiEA6Ogo89syneyVSEj9CTYmaMBLyx93WDZKONLy7z4ye3IU+9SfHx/d1J20awfSC7wnkZN8J/TBo/p1HxmCBYYdbcN8gBpLxLzbOtdId+oNmvPQn6BHBZN3Lj2YoXcCu7MifkwEdMaox/abpzTQaaIRKUf+9Oxb0R+q/RnrzQg+xhA86uP+wKahEwezePLuCNF5weyH0VM87sPYJP24+Ef2v57iYycZO4zTmW30+Yc+P/HL2IEWGtux4Kgk8FTICJGdNh57BHJEHiEAWY+6OqtoZB5PgmeEPJV1wqecvKQ3yQHffX9rOuKExgu2/7KjhnwO7KGLpX41dEQSjp7205lGHx5hJByhuh4Q9XiNOrZ4hEi7OXM0145a0OVi/d+7xTWiGcd1dVmIwFqyzCl3P0JhBN5BujPAIRyw/VHnHKNyN5/9Ivc5aMTOjW8pHr/yBGkf/52GFUfeTe14G/Qf+/k8gs+P+UU6hr5D/fw+NCYvx8sWsWvFMRE5Xond1YmvyFCGx+dHbXZxRbuuAzoq0Js1+T33BFKT58y9Nf4oQNZxJ+Lwz/qrBAIfiNoVqpTinCFHo1utNWP48sWnv/zlvwOAOIkbUn/8x3+8yfe/+c2voyhSqv35T39ydXnz6vW7v/zVXwLAfrO/vL6Kwigwarc7NKqJkwgqOuwPRBAnEQAYawBRBhIBjLUuhmwXVNda0UXX7b5EAPeLJTJax3HkPJflRQ4AjsVr2iYUoTHGWMMAkTHnO5LAWmuAc7ebOHdvRBAE0pIVnLseLqtSSJkmidLaWCuDwDGVQkhni01kwyB0wptzHoIMBeNK6zAMAEBwnuWZ4AEypo3p4nFbYoxFUVyUOQBoo+bzWZZnxpjFfNEqbYydTqfOktFZfWpjtNaz2cz5KJxNJ61qtFbL5cKSdZ43L87OdvsMGZvN5tv9Pklizvndw0OSdoGJEVAInucVAS0X87worLFnZyulVBCGzsjaGNO27dnVVdM0ZVleXV0iQtu2z5/dMGQPmzUAXF1cWmsPWTZJ0ySJ373/EIYhEe33h09ffOLcNd7e3Z2fnRPR3f39fDYrq9p1DgA75BkAKKUnSepizhhj94fDZDIRXJRVlUSxczBXFiUQcM6V0nGUaG3qurSG2laVVemGab3damMm6URwvtltpZAM2Wa7Y4xrpauqAoC2rZq6Pl+t8OBUQgAAIABJREFUXr99UzcNELx5/66qq9Vi7ibeP/nTf7pd39+9f/f+9jaKIs6nxtS3797/9a9//dWXnzdNDQCTdPJXf/3XjMumbafT6SHLknBSNg3VdRQEbskZawnxbr25ff++KkuttAVcbzetUq0yzrYxicJvXn3z/OaGgXn58tPbD+9fvX37h//Zn6zv73frNQA0Ta21VUqVda2snc1n6+1WCtk2qigrF6I6imIA0NoEUnIhGqWQsbLIgzj66ssvuRAAcMiKq4uLVquyKLTRZ2fLumm0obqqhBRxHAFAVRSZauqm3mzWbdtIydMk+Yv/5//+yQ++2u73APBnf/EXf/zzn0splsslIjZ5zjknIi4453y7XoMj7o1JkmR3yDgXxtpWKc7JOGoeGAAEYRjFMQBWVQ0E8/n866+/uby6UkoTgVuVWmsCIGuRcWM0F4KI2raeT2eOAVxv74FouVgURV7V1Wq5jKPYGLPLqkmaurHOi+J8tZqkaVEWVV2dr1bTyUSploBm06lz17jd7Z7fXAdSfsiz6XQynaRN01ZNfbZaWWs7R5bIHEvetG0SJ2EYaG2MMYvZrCgKADhk2fXlZRTF+/0hr6qXL16ots2KYjGfR1HsQkIhY6v5VGl9e/dwd3sXRXHdNEAkuKjryvmgfPHsRlu9O+x3h/0f/PgHjroLgyBNE6XVt2/fAMDLF58AAmPsYb1ZLGdlWeVFfnV5MZ/N3Ybw4fZWCLk7HIwx8/n85z/96SE73D2sv3j5sqwq56vhw+3dxfkFWarqerPdLlcrxjgX0hJwzqWUAGCNIQBkaLTmnANasoTMcYvM+Zcka5lgWmtA4IJro90m2sfj5oILKR3FrFUrhQvsQ4iMcSadmwtEo7UxRnLRw3JjTFXVk1QncQoAZ5fnD+82iGC1Ua2xZASXZIhsE6R8fCnLBScCpQxZw1kfDbPjKDvXRIg0+Do8hk3dqe0x6XAG09FBPkAC9BzfKRZxdRpQgvuSjnLR8L9/Gfqz1v/Je1l0aB0RjsQWbwFOFrzNMnVYG8HfN6M9fl3XDhqHzOs6BqwdgSOPAGGUzb2RYRfDzzXv4/TcY9gEA1rvELPHoTDIOziIIUNJvWT6SHrofvj+Hv99/ADRKPNRhW1Hs3Zzb+QRx/cKdhfnfSzSj0gDT+kpPaWn9PckCXCyrfcEjMNBeMxSHAm3I7bFe9xwTp3dGdVlHGusjZKXavttdLTV93mOZdHxHwcKpWNYRkfB6DTv3VN+7P2e5ECizuXz4NONCIhs5/nxO9yyENBJ4fQ4kz+pPnKt6B0c92pDToMIGSNjh3qcvNRHlUYc94GX8rt6HRONJ/WBER3TH+2j+rsPbLjtw6PetbbHJR4YkVdV68lIGnEzbgqcctR4/MtxwKGuRcOrOwVLT36Pi+9L9c6//SXuCJX5kET9ae59tfTPjnFUx5p4NmUEzUaVJtvxiO4jdnMICU3XM+B/4qPBOCH6ei2+vy19ZIKNEWWfbUSU91+hX2gdEXX8945kxH4Exq/sSKzviPxNIx82vutG3/pQJTDsGkdvGBVIzG88DmiSd0Dp4wbREas9sIXHLRkpLXejR0DY3Rs7YEv9hOqvw63fAHxpo03P067e5XgnPZz0w4ivG7fsaOxp+OB9Co3rPszlU852XOpoTo7Kezw0JzNjrITrYfdRBY/qbcHLDMMwHrdvWJYnNTha6mOxAmi8z47L6CrXP9itPa97jI8681TKcm1lnGml0DLJBQRhVVWH7OBE9yCJifHSmK9+8KPd+mG/339yc/P67dvLmysRsPOzFQCs79ZEUDUNEVnLjNHaKM65UiVjvG1aALBkwzAQnFvnmazzAgYARNZyIQHASa1hGFqCtm0YZ1VVci6IoCxL16IgDI1WnAvBeatawYWLJ+PFDQALggulFWNMoFRaW2vCMBJSNE3dqXsQEdEkTRGwasoojIBAaTWdTojISfiCc8aY1poxJmWgjbbWMkTOmBPvq7oiAqeSppQOg8AYa6wJw9AY4zTU0sm0bdssz57d3DDOs+2OC26J9vv9bDZDzgBgfX8/m83DMCzKMopCZHg4HACByOZFEUUhABhryrJcrZZ1U6/X65vLq6qpD/vD82c3jlpaLhfW2rppppMJAeRFcbZccsYeDoc4ilz07fcfPkzSNAiCdx8+PHt2Ewj54FQvo/j24cHNhyRJ7tf3bdteX17leWG0fnFz8/rd+5vrqyRO3t/dAkAcxbPpJC8KpbSxtjzsEVmapkVZOIWvxWLeaqW1llI2bSOlTKK4aVutNE+5G8qmbQQXbaNapRbzubW2quvpZKpaZa11bI7WRgohBH93e3s4ZPPZ7P5hXRYFEeVlXlZuShBjyIWIothJoYyLH3//B2/evX395g0AfPnZZz/5oz+8e/+eMSaFqKqKEK6urn784x8FQjqCNcsLC1gcDm2rGLDrm2vk0gAQURwGCBYALm+u56v5rigZMikDo01VFFwIXTdlVXSkm2qSJM7yLNtvnz9/BkR5Xmzu7va7rRCh2xnCKJ7N5lWrldYAaJSpykpw2batC+usWuVOUU0URFHMuVLtpy8//emPfqKNftg8AMDlxZVWypbm6vKiqsokSYqqCITkL/hqsXThmDbbdV3X2/3u/OxsNp1Mk4RzrNu6Ver52QoAvnn9um7qQIr1ZjOfzTjnLhrMw3pjiRxrrMuGAKq6RkQhJFrbKgMA2pj79cbFu5gvltZYzpkxJk0neVGUTfHy08/jONF55ojauq44CywAZ+4wYW3bSBnMplOlWwDIsuz6+ppzHrEoL4tAyqatGeNRGEVheL9+cMvWRaPe7HZhGCwXi6Ztdvv9armIo+ibV98CwGq5SOK4rCql1Gw60drsDvsoigmorOssLwDAuUzNixKIwjAggs12m8QxEWRFDgBa6ygMVdver9ezSUqW8qJYLZezyfRu/aCUAoDlYjFJ0zfv7968fYvIIiHbtjXGOIr/8y9eAsBisdC6ZQyfXV9Op5OyKveH/Xw+Y4jGa3MvF3NEdn//wBgLw1Brs90ffvi9r4w1rnt3+0OaJNbSrth/+fLTosy32+23b978V//FP37//r1jCV2Qrqquv/72W631ZDIVUgACY5xxbm0D4Hhh0q3qXIv2WoYEjDHw4W4Qkbtd2lrGGHVK+qy7kXQaBsiBwFoSQgCgUooJgcgm8RQAnPa4sSaNYk2sbhpEkFIao5VWhgwAXF1fX11eMASlWmu0MTYQ0lirtRFCdoiVMSEEWVhvtuv7TUfNwUihwflNZgyREdmeGsRO+e7ohCYPjIZvxuTY0XEJTiECR9+NxZwT5HgEBxBdgMojgEZ2OOQ9XOnFkf6GtUdOx/fWw2vGSKg//z0sh+5Oc6T/gX28O4auJdZQzzm6x5Axoy1nyBg687sjNOcyntyjQucbdKjKSIo6kj5w/NAIDQ1fezm3l1+OERDZI0CHx/8fy1+dlIMIZKxzOjsMT2+FhV0UA6/W4r7ETgPmO9DjU3pKT+kp/V1OwodC8CGqx7p8iAA+wHR/aniFK2exdiSa99yY27URoXt+uBziPWPZ80Y4DjPRv9zv6tip3A9nBwEgWLJgfVSDjsgYH55A/mAjIBfkxGWzANg3yn1Dpi/COEDTh9Yh6w4y2xN1zkEydSYA47Onp/O6j8j748FVwJ2SyJkl2zVnpANFQGQMA9ap4zG01kJ3zFsAQC4QOhLTdbC7Yx3OvvH12vhwfcw/dfp7NDr/hkDPlrTjo30GYowhZ2QsEBl/ACJzccAtETloB/24gi+VMdKGISLrYqk74NKxhg7BdGGvAXsm75hT60CPa0jXRh+ocjjNRyyNBRdbBpGQIwBYi0SE3pKVrKNAENGMpt4wx8ET3V25yEbQEZyrAdfvDBBd1Oee+ewRAhsQRg8/RuiCYLCbccN/cpmLRKbDkojMKxASwDhi4PiRTuPzhOfyvcKlsJbIWsDuun2ARP0VwugfIGD30m4a9KQiOfTfdzrAmIXi6Gaxu6awZNwKBsb9YA9IvIeKY1YYAYGceiNo19EulhQ4DTVnl4LcX8J7hb5BeRHB9j3hwlYSMobAiFm/tzCwQJYsWAQAJGTcUeuDMfEQPgV78Ix9fwAAEKLoq42j2W/JDt/33QgAPfPalXI0gIjDnf+wkjuNZ94N8UjecM8MahC+c30HdzoNFmzXJj+JOA+o2z8Iem2Grl7Dnt6V3M+hUVXBmTx2Sw+t22+xD4BOXSbys5sIiBgOk83aYb8i7PQBXDYcLI/I6yQgQ7S282Dvt5KuKGeOhUIAgiHbkBFav3v7WrsuMgyMFUYfNpsgScsPH9pX32ZN/fmnn6LVYRAAQBDFTdMyFFopMIqLkHNhjSYiKaWLS2DITKepD1FmOENENFoxxgIZaKNdRWUgGedWaykF41yjZoi9kiBDxgXngltrOJdoLGcYBEHbtr2UyDgjS4QWERDJGiMFDyRXqrVay9CpnmnGmbHaGMsZl4HQeojx7XotSRKtdavbMAiapgKANI5b1QZhoLQCgKqqJ5OUITAgjiA4a1XDGE7SeJ9l7k4uSaLb27tJmgZS3G82huxiOi/LGggCGZRdtBA2mST7bG/JpOm8quq8yM/Pzq0ha0kKCQBV1UwnaRgE9w8Pq+WScVYcitlitj3s3eYWBtF6s07TyXy+2O73QRgFUZQdMq11Eicf7u8BoGmaF89usqLQWsXJZJvlTas+ef5CW1tW9SfPX7ixaep2NptKKe8f1svVKq+qQ5alafrtm9fOhvqLzz5tmnZ/ODy7vgnD4O7hIQhChly1Ok1TAOAo7jfrxXzBgO+zw2q5BMKyqKMoRuB1XQCA4DKSkdYaBGsaVVUVQx6E4SHLhJBKGQBoWxUEsi7rtlWLyawua9O0k3j6rnlrdCs56xY+hzw7OEbYWoNoCWxZ52kcAcC//T/+LaBpTfuw213mmTVGSimEqA5Va1sRBADAAZFom2V5XX3x/a9kEMRpCsZootVy+en1NQAwKSxAtcs0DyvTWhGQbCfTKQrGJXt28wwAdL5fH/Y8ENP5PJ4t7rdbHklr2zAMQxkCQJJOZTrbZ5nlQRREHNn0e3MylnH5u9/9piwKANCtAiAwJoiCViut1XQ+/Sd/+qdKqc1m/fbdWwDIsuznP/npz37+J3l2+Ku//ismcD6bKq2f3byYTmZff/0bALh6dlOURTqb5EVxfXPzxWefXpyfZ9nhbDGfzmcAUJRVIOQXL18Ggbh/2EzSKVkMgvjHP/ghAsRxDAB5ZbUlxgQTQjWtMTqOAmtJApHp+DKBMJ1M7vdbEKwq281+JwLZtCVj1NZVmRUAIERAREopzjCMQs7ZIdtdXDxLJ+nrd68AYLqYJWnSqLauq86a3prtfrdcnimt9/sDAFxdXgRS3q8fWqWu5ud1U1V1HUXBfD7bZ/v1bgsAP/jySy74ZrtxAXw2u11VVfPpFAnaupmkEwBwkZeqqkzTRBtVlGXd1F9eXylNVaMA4Or62XSxeP/uLSJKJh+297N0MZ/M1/tNlhfXlzcAAMhv79evvn0bRrEFaIwFxsiYpihWy8VXX74EALJK64Yx+OFXX+RFcTjs67q+uLhsmmab5V9+/jkAGCDV1r99/eoH3/ue0bZtlBRBlhdE8OH2FgBm87mUYrvbCilW5+f7Q3HIq5//5A/2WYaMf/36FQBcnJ0HYbje7e8f1ulkgow1TWuU4lyQMQ5vcM6tNbVqUquVapTSnAnH4llrnXOJFlpEbJVinDu7bGVaKaRSjeM9wUW4BmASldZJknIhuNGqbZFLEUcAkOd5xERT1zRJDQPgDIxlFnSjBBOkDQD85V/9Vb7PIhkKIdJpMptN9zojIiGEapVThp0tFwR0fXXTtg2Z1lrkQmprkHPobpUsR4aAxuguLE4vnbDhNruXoBA4ECEOfFyfhrtBX0CPVXoM6v9oyIP7XtUOOkqLgJAjQ9HxgwQA1lJ/IiOCtYbc2eRveKGnGgcogh1aeWTc0VV8FC21v14GYIwZ7WhlYpyTc/CFZDq3WJ0IYD1LiQwJCThYMJbQ06/YowNSlnEXrYislzst0aNqDVi9VwYZCFzobrStsdC/pKMxfWYnUoz0SLunyPTI3z3Xx8XpkKUvy4N1sp1GNllDyAgZIqG11GnJWEuDIfiYOn4KBfGUntJT+vua+sjaACNKAP1t1/i4G22oANCrRX4s4XCFh8M26yRTcJv8mMgb8vnUKbuNVYb8meWlbncosNFBPSqip6fIdv99V1WPq+CvJL0o7KM1j2Xx05r79tHwC4AnxLrf/LFGvX7fySWbT9ZaZ3tirFMMI2st6+Ocj86+vhq9nhfRwFaQhwY0vNx1UmePih17OvyxV/l8XCv3akcLDQQqePZ6GOMxtXUEjgbCazRpyNq+QR3Y8WxDP5YdfTc4pOtoU+vngMvEvNKf5wk9wdxRNqPW95QjjJvSEZQ0tIQ+NmfcWxnAEZpxz4xAhefHBtDh9L9Gc9hXggZC+WRG+Gf9fPKWr985l0fG86PGHQHWUW+cxDyGUa8N+Ts+3d9jd1qCfa1GOHmovLEGe7537N7JDwAN5XeKrR9RZB6eGBgx6CnCo4p6SsvHXyYkZ8PVTUj3nCVE09nVw9BPOOh++uYdbUboUWc/T3zVR39/9OtRv46J1uP0sZH8KHjvNiG/r40kDzrCtieayJ2W8cgVQycp2G7pEZFDtqfPHW3cR7UbD3jPJg419SdI90132eNXK0HPcQOiHe3xfrPsSHBP3LvWERDQmKkdlpYvloxxVzhcsEBIY0zV1M5oVEqujfrNN7/72c9+YYwNgmh9++Gz731pjE3itCjfA0AYhvt1RrYRkieTJA7jOI4b3jZ1A0Bd1Gwu6rqRUcAYY4xbsOjraqxxxrkAgIwZoxGBMWGsCaTUxlhr3DbOODO2u6GxxjDGGOdKKUuWI/PW1qLVLReciNpOt1FqrYls8P+y9169tmXZedgYM6yw144nh5sqdld1qKYoyAZIyaRsmSZfBEj8Aw7wX/Bf8KMBPerFsACDfjUgw5ApBrHFbjM02WQ32ZWr7q17T9p5rzzD8MNccZ9bTcPwAxs48wL3nLP3WnPNtOb8xjeS59VaNBv4vtbaWhNFERDkRc45U1qnaTIYDADAGJNmqSc9a61zeFRaEZFzeQYAz5Oci7IotdHj8bgocinFMIqSNDVaz2ZTAEjTFBnzfG+7i7M0Oz09NdYUZTEajsqydL7n08lEa51n+WQyFoxtNpvxaMQ532w2s9nMSeZpmp4enxR5bpSKwnC5WhV5MR6OduVuejAGgF28Kco8GAS7ZHdzc/X48WNj9GazefL4kTHWpYe+OD9XWs8Xi/OzM2vtZr2ZzWa7JL6+vplMJ27RvLq6BgDf8796+XIYDQUXL1++evLoked5SZxenJ0BQFnqq5ubKIqQsdVmay0xZEppz/N9LwCAzXbHhQiDIMuyIAjHw/EuifOi8P0gy7I0SQFgMp4sVyuttZBSG805F1KuVmvGhRB8u90CQBCE2+1OKSWFt1wsyQLnILVdbVae9Nz6LMsyTQtPiul0+urqpe/7gov1Zj2MhoIjACRJ/LOPPj46OHx6efn00WPpedvNJkmyvCisRRUnAPDi5sZYm6bpo4tzwTkiGwxHlydnk+n0y1dfLVdLAEDOD46Or16+4ELe3FwvlnPP89589sb58XEQBC40ZOSzuMiRMTBmMhmFkh9MJmfnFy9fXQ/HEwBI4qSwQNZoVZYAWZyAFER0eHASjUYVN+pJY7Tve2WRe56XpMlkPC7KQgphtJ5NJgBweXaGiJzhfDFXWiXLdDqZGGPiZDcdjRzyipM48LxwNhtGA6VVlqW7OP7mN9/78V/+6Ksf/TkAGFUeHx7s4jgaBM5iDgC0Upyhsfr6+hoAtpkZRtF4PM7S9GB2sFqv4zRxqmEhpS4KAFjMF7Pp4RtPnkkZ/MkP/0xwKZnvqBZjjGMYPE8qXUompJTj0WgXb0aj4SAcJGnsFt7J4RHnQmultR5GkTbaWDubzhDgbj53wU+PDg6zLCuL4vL0TFvt0sSPRsOyLG/v7i7Pz91elGaZEGI2mZZaxUnsSY9zXhSFJXt8eAgAxlpntCulZ41J0/zi7AwYW29XTvs3GAz+9mcf5nk2GY52u8T3Qy75V9dXcZqeHh8H4QAA1pvtp59/KYQvhMyLgjHOGAOw6+363W+8NYwGAJDnWanZMBjkeamUyvPi6eOnpVJK6yRJ33j2FAB2u93dYnl6dCQ4N8ast9uTo8M8z/0gcHFjCSDw/SzPv/P++0KIUpVZnj9+8uSjjz9ebTYu4/ZoNIzj+ObmNoqiwPeV1lJKImDIjHHOumCNQQTOmO8HUAccdAbd2mhmEQC01gKFNVZw7g4Pzjgichf7AoCs5YI71scP/CiK3NHNGHNacwAgaxAZY6CUYkwI6XSfwJBv1htnLK8LOwyHwSBI4p0qy902lr5ItilAa3i3nK+k7639zWa1NdogcAJigMboOiCphDpWvwM76FSie2yjOxyhBT9Oj9i4ATeU2B4K2SsO5dwDW9A9wvuimPt4H5f27qyEgNb/okXUrwk006/o3pOoDdyP7s8+KOuB2+YpVeMdBKT6BqcTbf1rmuFsI9A3oL1RkXc9gbATO6sS2twc9QSZWimLHZVutYycsetrpqQHMVtA/PqraizWoi2EelRqkwZ0hio/f/ofykN5KA/l72sR0DI+0N0VcY+a+voDaZ/UQOhwYhWV0LAwPbKkwwYhq4lF6tZGlYzdexxhfWRgL+Zl54ja5zeoOoR/3lbdhFRpD5qaAGoDyOF+C2t2oDXdrE42rFWbbVwVRHRmU32k0RkaJGzNIZtHVkZaRLVBo7P2aqiVDsODXboQyXaN0XrPIndk1qcc1QcoNHPkBqIJgEl1PM2qMf0Rx3YasB4SqlHV/YGvrmQMLNVgoDP7PQKqV1x91tY2lQzBUZkMra6tE6Gyt6udkF3zsB0grGa79qKoj3nqUFHY0ntNm9rmN1G36wXZATv1FHTAU7WyqCFmmpmA14PDzu3U/dEzY2uuo3t3Yr8e6PRk/7JmkPp3EDRRwBtQidi2uAl01Akv6PrGeUMXdVvgiFdWrat7rdl/ftX7DhtXN4FqBg7b66tR7AUjdEaUjJGtaXxqqc/OW0l1N9jXtqka4zoQ/P7417917u93sbN7QCt73J/5PpVfd7t+x7tE7usa0ELnxjCis1th1UsEJKpNKrsyxH6XCSqO36k+XkcZt7RoZ6X2KmHYbJkAAGDbjabjht3zyMZqZ69volaawNpI9uuKscYaQoZAkKaZMRoA5usFEM4XK2R8vd1qY6XwBfOubu6icLBabwDAWtvoZVwOmThOtNacc22Ms470pU/WOgdeZAwBGCJjjIi0qWyAkTOy1ljLOQcgIgsoGGPMycMAiKiti+qAnDOwTvlk3W7pXJJdtZILpZW1JvA9xlhe5IjIEZwHehAEnPO8KIIgYIxtd7tBGArB0zQNgzAMQgDYbNdSSM6Z0s7DALTWvh+4yJIA4Pu+UmVe5JxzIguIvu9ro5MkGQ6HWhsAWC5X5+dnWuv1ejOZTqSU6TYdhIPJZPzFl19OJhMAiKLo9u5uMBiEQbhYrYbDaDQa7nbxdDKRgm+VAoDTk2MGbL3ZnByfMMaTJD09OVmt1+Px2E3/dhc7Oub69vbxo8ehHzx/+ZXneYh4O79zk1MqdTefc859z7uZL4Ig8D1vuVwBwGgQuah5AHR+eloqNRhEx0dHn372ue/7g3Dw4quvfD9wxmWfffH5cDAcDUdkYblczSZTS0SWokHkNrY8z6NomCSpMdYSvXx1XZalJXtzcwsELivRYrkq0gwQGWecc621JRqPx4yxRKfMhQqNkyzNpPSysjDaeNJjDLI0M2C01pxLAFAq4ZxPJwfW2iRNfE9OR8P1ZvP+N78xGg0BYHF6V5alNUYbc303Z4ytVqs/+uGfcCEsMGcUVpTlm0/f+Oe/+VsMGeMcAQxjAhjz/flqMY0iAHh1cw3APvz4422aCC4cP/iuHxBj8+Xi3/3evweAcRT+Z7/26599/tny7vbX/vE/sRbBYlmo7//J//3+O+8CwPHRCZMeInEyNs8CzgxQqVQeb+Lt2pQlAJR5hgw38Y6IhqPhxcUFZ8wotdtsEPH05AQApJBFkb/46rkx+tHFZVHkuyS5PL94+uSp7wfRKAKATz/5+KtXX2ltEOGdN95ExCTe7bYbIcXhbAoAeZ4VZamUHo+GnidKraUQIYRKlVabSvNhi8D3AIghupxOnPG8yAPhE1GWpgDged5nn31+cn5uCZJdEkQjwYQUfpokeZ65qSzK3FozCIPhaKS1zvP89PTUkr1bzCfjEQAIIRljSZoOBoOiLBDQWHM4O5gvF5bs8cEhAADi9e0tQ/R93wc/zdMwDCej8S7eIbKjgwOovMgXh5OJ58vlas25OD05ttbmZeFJz9acWp4X49GIiBarlUsihIBK6/PTUwB4+eoVAM6mB9vN1gL4XrCOd8vV6vTkTHjh3WoFAB999InnhVJyY40QQimtlVFF+eabbz86u9zGW7dLpHn29tmzXRJf39xaIkQUQuwWiyePLrebDQDEaaq1enR5qZR6dX09Gg593xNCvry6cibhSZpqraeTyfHJyfXVlVL60cXFcrVCxKIonYaALGx3SZIkw+EIAH3Pq5NKobXGbRHaGsYY5yIMQ84EImeMKaURaz07AGLluhGGoQskioIxZJxxF5pTG0NkEUAbzRhDhtpoANRKW0MusCnnAiqFKvM9P88zaw1Z4h7Psmy9XAMAZzIYeOPJKE3SLM90aU5GJwnlaZoOx6HDFb7vnZ6eEICGXAgZBKEmi4wZrbRFAAhDjijIVuGaoGOo0coaCAQWW9Gkjoz+2pN7j5F8LdZqcML+x9SrM+yJAAAgAElEQVSl96A5a7tP6rKMnfwCr2nMvRZ2bRHs6xGgOwad5wU2DhDIulYXbQcrqF9LkzWcraMuth9USKi2XKQa8vbZUur9ROzwwnsDXsMmgt6kOaBUQU1oENS+4Nt5EDXQ9jXwvHoqtuC6BzZb+FQDywcu8qE8lIfyi1pEh3DsCrYdYRFeRw3VWzTs7bJdsqWVuOk1O26fR+tEC2lrw1pXhv2b62/rHbsWVXvPqI6bXj++XpKta6qf1Ocb+rdh/9ytWtxSsO1Q1LZ+7m9WDfHrUARVo8E4I2OdrGuNRUSOzDaaNgCoXAVao7eaRKvsp7AN0dbhL1qeozoaq7zC2CFRWvdI25y0LWeCFedTP9fpF6u+OrUcVkxg+8QGQ+xDohpm1YOGVWtbBqodopo2qs/8KkpkDz1gR+lbzYDtpEBqnRhagqZyiq/Iq9Zq8t6q7g0gQAWOaiayMa7rDPZ+N7u11tROX61dP64Z7D2CkFoKj2qVeKdF9yDh/kLvPhH20c09Dqn9jXp97xk2EnTnoKPCbmMj1Kx3e0uHMetDtH0A3PyKDDtvNwBUeuC92OndW6rHOnECEBnYipEkbE26qacmccqDSjN/z5yR6mWKbdf+rtK2uNFGtM+CvRqaGntbZJcqvM9FdtX7nbb256T6pi9MNOPUvE3dG++37ms+6ExKjat7ryQAtN7bQJWioKbxod3E+nl1Ks64uy/tL+xmG2vkE3DInyHjnFuwjDFjjEuFrIwmwizNgeDy9PzTXVoUqjRmt9pst4nvhQCglR2OIgIwSiFiWWqllLWWOVYRAACMNo7MNcYiushOlSMBY+gCiglglgxnjCEaazjnxmhEJqV0w2XIMsatNWQM55wYWGM4Y9paIlvF77OGM4YMyFpPSM6FMRoROePaaCkFAGiti7LwpPA8T6mSMRRCGKMtWc/zsjwDAKV0GITaGAQQQqRZxhl36WsC3weA0PcXyyVnbDIebbe7IPCF4OvNZjiMBmHgwjIGYcAZX2yXnPPTo6P5cpXn+enJqdY6CMNoMACAOE4445PJBAGyLJvNZmWpylKNRqPtdud42CAIXzx/6XuB53lX19fT6awolRDSEzLPNQAwFNbgfLnQyjIUd/MVQ3l8dLReb42xLpVNHMdFWV6eX+ziVCt1cXbuSblerS9Ozzjj8W4FAKEfIOByuTqYzW5v54h4MJ3ttjul9NFsuFqtAaDIy7PjU8HF3XxRpLkKVZlmjPGy1EWeA0CaZpLJzW4bDaIkzaQUSZIyZH7gaaWdIzbnjACsNoBMSs4FmFLleWFd5gUCAMiynCwq0lrpQRhqrctCcc/jwKX0tFIAQBbKUuV57vm+J+X17S0Zc3x09NkXnztnbWOMI2iW640U0vOD9TZO8kIKEw0nLou64PL5yxdk9eOLizfffOvVi+diEP35j//i8eNnnPPhYOCmYL6YMyDGeF6WAPD0yTPP9xFhOB5/8913AeDkcAZAo+HwcDyKwlCV+cFkbLQ6PZhxsgAQhgHjKAQ3ZIfREIwhYMPBkDFhtHbO2gK5MSb0w6LIPCE3m83bb711fHySpPH11dVmuwGAy7OLDz/+cDwev/vWO7d3N0VRMMS7+e2Tx48GUfiXP/4z98IfHx29ePlVlmVRFJ4eHQWeVxT5OIo2mzUAfOOdd7ab7cnJsbHWkl6tVoyxIAiyLJtOp0/feAMA8sIwxpM4Ho8ixlg0CAeDAQHEm530Q2ePzLkI/CDPildX16EfxJvYD30GrMhza63nu8RTmfRkGA644Ov1ejga+b6/3qyB7Gg4AgDHJnPOHHOklBpFQ2NMkiZBEIxHzrs8IaLZbMYYi5N4l8THh4dKqflyOZ1MXTKWxXJFREIIS1CUpcvqs93tpJAuARQA7Ha7MAyqwKaeNx1NSlUuVxvO2ctXr6p3PwzzvCyVmYwn2hjhycl0Np7MwnBITAKAF4YMpINAgvMiK4xRluy33ntnMJR5qQHg+u428IO8zMtSl0o9fvSIc7GJV2VZHhwezOcLAMjzIgxDrVRZloNBeHx0GCepNXR9c3N5fuHmMcvzd99+5/rq2ljaxcnJ6dl6vb66vomiyG162118fXsrPc9B3CzLENC9SlQTSQhojGGMCS6scd676HKLE5GttPiEgJas74dOSeDCXGqtXRBbhqgdxnZ2j5wDoNaGcd7oVq0x7nTJs1yw0Fr0/UCp0ljFkNcbms8YGmu1VQaU5AEBMIac8zTNgiAAAAS+2ex22x0n6XtB7WfsFOfWvdqMGXRRf+poOx0pocH8BO2JX33Guuish6F6xNRrtb/UnNd76A/rurpfdWjH6mHYAhzqxEhvXZgBoNL9vl7juYd4e3WzJhBMx1K0vqHrw9TeUqNNaELRtwYIzsmsETcQbFVBxzOpFqDq3tZD1+pOGzTcg0xdtFVPUl1L+/l9XN6Fuf3Lmw/qEXVP7sDDNqgo1XgUERGstXvy7kN5KA/lofyilMpZG+tkIM0+X4vOAPunW/VJo0XryrLtpt3wTB1ZsiUi6tJwZ3tMSUt11DRP52QEgMpLoTapazKfdJpZKcL6De8/aq90MmT0j+/umNzrQt+zoj00GtdRd6Z1D9S2glbF1RKGFXHUsKsNp0uEgIyxqqXY+C92uZrXda8hLKshwB551rmpZg4bENEyGbRfM0FNvbbHbY84qHuHLV/QZy+qkJxYHeSVTSQ2lElzLdajUg1wP482ABBZQzWCouZfiyrqh3c73c5Dv2Mt1uis7z53RO4/6iztlmnDZrVgbyjqG6FeH02elLoK2huffhdbpfU9PrwOpf31QISgrwjuINS+VXRzdafZbZvp3lrD/eED0EZjQ3V3iSZC20Z4qCIPVbpuu19J57b/L4UsEViDwOoInkRkLTXmdTVU7Xbb/b6XRqsFqpUjuOsX68zV31GaDtdbJu1/1dk3sP1iX05oF+K9FwQ7t7eJhLA3e71nWTKvEwQAervJPWPNTtN6Pl9UX46d9gNAJ85p7b/WBsRq35dOLQjYRnBo2tNC83ubUCVvgOOokQsXfKos1S5J3KMOD46KdbJab29u7w5mB6+CV+FgUGrn6Kxn0xkAqFIjMQAgKclCkZeMMUSmlGKMBWEAAEprqy33hLUWgIQQ5IJpMMY5p5qzdpQokXVprLXRhGTbmPhuLRo3JaXSRCQERwBriTECAIYgpSxc/gRPGmOMMb7nE1GpSmf5mKSpNWY0HJZlmeX52FlsFXkYhFprF9JxGA0ZY6YsBuFAaUWWwkEAiIJzFzEzTTOl1MHswFqrjQ6CSVmWSqmT42OllPNJfHR5ackyZLPZNM3zOIkn4wkQvXj58vDwMMlSANhutifHxwi4XK2iwWAURavVejgYGKXLsnRuqlmaFkVxfHS0Xm8Wi+XlRbjerEfD0Wa7i5MUAM5OTjfbXZpkZ6enqlB5XlyeX1irtrv48ODAWWoncToajYWQxTaOBkNj7Fd3V8ZSqfTV1Y2jjTzpX9/cCS7H0fjz+RcnJ6eeJ+fz5fHBcVGoxXIBAEEQrtYbAIzjhCFPkjQvSktkjXYr1mpjlDXagE2BQJPmwBjjYEEw4fZMqy1ZkNLT1uZ54fsBWbXbxr7vIfAszQAgDAcGDJGNoiFjIKQgK/JMGTBaG2QulyASwXqzSdL43bffvTy/nE1G292GwAaeDwDXd7fDaHR8cnqxWPpBmOf5fL2Wnq+1LpV2ppqbOJ6OhqfHJ8PhSOvy137zt/7jH/z+v/rX//q/+mf/5W/8s9/IkxQAhBDaJGit5JwJfjCdXRwdUpENoqgs1EBwAPA9nqW7i5PDgRCkMl1k6+3a6vLtZ0/BWABQaTI6GsZp8pc//XEYDELPD4IhR+H5YZzlZ+ePAED4gUDwpByagbIGACfjyZO33n7x2ceDIHA7x2w2RYZvPn16dnpyc3s9Xy6jweCvf/LTy4uLk9PTP/z+9wFACPnLH3z37TfeuL65QYDdbouj4cHRIVl9dnIMANd3dwPPY5wLFErr9WZNAJ707haLb0jvy88/B4APfvk/ff7FZ0Hgj6IIGM/ynAtZKj2MBv5gVCoFAM+ff/Gd9//BNk3IaCl8S6ngQinF3ZtsDQBI6Q2HURgGWpWIeHR0BEBFWYwnk6BKW1QmaTybzooi54yLQETRcLFaWmPPT8+cieV6sxmPRp7nJUmyWm/Go6HnefPFQnAxjAZJkgLAZrs5nB1wzvM8D4NgOh6nWVaU5cFs6kmptAaAUuuzw2NrzHKVDgaDcDDIskxrM5tOBZcAIIU3X67StDg7uciKPC+LMBwMh95yvY7T4uz0EgDeefsbf/uzj7knG28Prcvj44PReEhoXYQKwcXjy8ssz9IsPTs9dTExX13fvPn0qSp1s1HPJlPG2PXt/OTosChKKeWru+snjx+fHh0DwO18/ujiYjKZxnF8e3eXF8Xz588tkTbm+OioLDUAbHe7PMsHg4hzzhl3BuPWWHdmOIchxlmZKwS0BMZaxhgQGK0RnfbGnQfMaKOK0vc8BLSWAEhrU5bKhbCw1mpjJTJrrBCCC2mNtUDoaMoKh4I1FgmUUTQgIEAGCMA5l8LnPAeAIs9JSj+0SBxBG2vJGgICZn0vdLt9WZTWmCIvpfS0Np7PwZJjVF3sSK0UIvM8H6F16sIKZzR4zCWXq6JpOczojPN7uKnFWjVm6VJl0IDIjiYXayqrVwVU2LQLtDvHdnVjhZ32YMTr5I+vR3KtHw22f9anfUPvQWXvUDOBzV0tEu4KXs5OsAVJHceOrqDTFUqx6dD9Ft7rVRvxvYcfW5+pFhj/nQj2NdJVk+IVe3GF+lC1RuIV8qlE5Z8n3j6Uh/JQHsrf5yK6AnArpte/tGnF7lvw33dc7O6GRA0j0RysCE3IQ9q7Y8+fuG0BYBWAGDsMXXPCVMRl3daOEN+modhjAalXe7fYuqmNmV7Tl0544p6uDhqRvTo6agxQBY6szqbanp+gla577amtGdEYyxhaS2SMy62hTRU7EqhS8DmeiypTrk6AzXZM2xnoGCj2RtaSbWORUJOSaG96GttG9/hOdBQEstQ6ezexDetILfs8cBXXsmag6msQa21w13gOu9PTcFUEgLaOA1r7wld3EhCrkzB3aLXWh7e1naQWnHVnstveBgm2gUlbLgjaj7pcW5czcUPYrAyXGam/9uqV3+3nPbal9wne/6j3hZtW+NpCDa7BtolVrzqd71JQrc67T5B1bBNr0IfUjAgitrxbVY2bprrPtTNOVRXci8TZAZpUGxd3WuhiHda7UgdqdsBzbeZITczBni1wP5L5PuascWBTZRu7tFr+7WuxT8p1a6jvbUHnHqN7b9+rJ+Pr8XtVeRMzsvei1ZOC7a/tvlAP496Kr6fn52BnBKDK87Hd79pKXwOq+1i/7WnnUZXg0bwUCNTb1+ue1iuF2iXWK02LpCeVVmVZAqvovCzPAUAEgRA+WSyL0hirjUnTfGRsGIaIrDqTkCmjnE9fkmTGkDEaGDDGPF8677+ytFIKBGzXFFX7KyK6HNNlWXLBjdXGWiEEASHjDMGFhgQAZIjGOrWSy2kmOLdgPSmttcYaAHBEJwBJ6TnHUvcEbXTz6vm+59zDy7K01ggh8jznjHPO0yx1DXZh+1xUj6IsB4OBsTbebaPBwFmgxNvt6fExAEuzbDqeGmN3cTIZTxDg9vbOpQDWRidJOh6PB2H41dVVEASDMMzyPIqi2XSapAkAMGTj8ej65na9Xh8fHd7c3jHkyOx6vZSeNNoCwHx+Mx5P8jxP0vTNN968m8+nk1kYhje3N87ykXOmjTo9PQkHwctXr6SUWpe3d3Mi0MoslisAGA7Hp8eny/Vqt4ufPX2aZZnW9vDgCIiCYFDXw2/VAgj/6ic/ZYzNF8s8z4u8KEud55nzQA8Ds9M7AJBSEqEqNWNcFyXnwpMcAEgCAohAGmMY46rQLqZzWWrOucMD1lgEJoWn8swFAC3LMoqGgKBLMx5PAECVGiyG4aDUJSnjeaJIC2KMg3x1ff2XP/6RG+HAk6v18mA2ffftdwVnoe9nmdjG2/ndHQAcHRzMZgfL9drz/Nlkep3dPDq7GA5HgR+kRbmLtwCwWq3CINjGsRTCavXV8+e/+/u/9/477wJQmsSCcQBYrZdZUYzCATCQwnv69MnFk8eqyIVgeZEwDgBA3Lu+u/vyq1fMmO995wPyok1+vdP46cuXpBQAfPtb3+FSfPBLv/S//m+/k2dZIL1CA+OcI9fGCsEBwGhdFIWSoiwy5Oz09PjDTz786NMP3/vGe2mWvby6AoCrm5tf+Uf/ycnxiVLq9OQkz/M4Tf7Jr/7KdDK1Fv77//q/BQBrzMefflTk+dX19SiKfv1Xf1V68sWL5+v16uBgBgDvv/fen//oR4R4PJ36ngQgzrnnCU9KLsUf/eCHAHB+fjEcRmmaZnk2mx3mRbFerbj0BONRFG22WwAwxqzXm+V6K4QkS74ItVJFUQAgWesyYnm+53k+AeVFNhwOjTFFkTHOhtHQvbZJmnqeB0Tj8WSxnE8n0yRN1uv1dDoWQsTxDgC01hiGRFQqFQT+dDJJkjRN06PDQyGECxwxGU9ms2mR53eLxenxMTKW5bmQUistpcyLAgCG0cAaq5RmnI+GI8b4dhePxxMpZeiHAPDli5c/+9lHjIvxeBanGeeCgKWZ0oZ0ln386ScA8PTJEyk9S8aTcrNZk1VGq3fffitO09Fo+PlnnwDAW288k1KkGcRpenJyYoze7XazyWQ4HMVxfDdfAMBkPEZk2tg65gPb7XbT6fjR5WPnzb3d7d56443Vek2AxtKjy8ssy+/md+dn5wDoXsnbu4XbKKwlABsNIq21s10lqpJJMsasNQRgtLXWIoAFcoQyY7xKmYdgicqyJEttBA4AJJLO+xsYA4aAWhvOBWciz6uYDMZoF5KIMW60QUApJJHyPG7JKoCy0GVuXV5yBiLL8+F4KIQEY8uy0MYSkbGlLhgKBIBQBl4gtdGSC11qXRaWAVlC4bkt2pA1VlsrGGOtkrHe5bsncoOiKpX9PVT/mrIHFuvjtrZnrCSW9rLKYBCJbKW4beW+Fi22bazR1Z5r0d6Te+Byr9WVd1KLNmvjxwq8VcJAI1S10lSF/luY1cxz54aq2hZSVpanFTrseO5gPSI9k4X7kK4WRDteKXU0MYKKMa6k1SqBIXQV819bWsDUNf7E7tjWrWUNoqLWbLXu2/+bZfFQHspDeSh/74pozgLqb+4Vz2drkqp3pPR31orx6R0MleaGGpu0ZretT0RXVct9YOfm3o5c19JsszVBQFUi1+rMbAgCAABHYbXGSN14fo1M331i05yWgAGoraJar07XHeyL0dgMAAJViU4AbDsyrCO8Y31o7A1jRa/0HHE75EINByp2hqpDtTE6groNHU7YxVDD6tite+DOXTIAVZzK+tu6U9jRu3VniCwxhlCzElRPDQIz1mIVF5Ba8qA+pm1LLrSdBai8HavVZYGgyq9RBR1vB40aWOT8ERAZdpmJBlw0nayWdD03tnKc79pMEjSB7Xr8TANQ+k0G6HAp3UGGyrSTiNH+DXWdDbkF1YBUyK9leZqe9EYI6vdlrxnV4t5vWtvsTg3tZfdXPLSftMuOmg8q07Y2XY37ExGos4CrVd6MiFskiEBgqfOo+nd0Q997Uh+67hv2te9vw07WrcZ248G2DkSOUMX0rOaRIRJij6+9NwZth2j/ol5n61/257q9rC8GQN3xquqW12ucm+61qUso3i9tWINmL3L7Qr+ujglvvcXVL221MPtbYaenLYFOe8Hdod6km26RacF/M151I5qIFdXOBQBUvQF9uF8/jyGz0Ot3vedRT3bqF7ePWEtGm8FwcHBwoI1O0hQAOGfb7fZodpQXxSeff6qV1YryQjHGGTBrCFzsSCJkaLS12nrSU0qRtWEUMsGVqrJmM8HJEiAyhkRg3R6LQGTd0kJEspXbIBBpojqAqa0k6sqShwEAac1dfm6LjHFEbPzXiUhKjyFqrYmIMVaqinZshDcpZalKZBhFUVkWSuvJeGytBUDnQ+0S1wyiKM9zIYQnZZZnjDHP83ZxDABalQwxTnOl1Hg0ipNUChkNol2cCCGdl2hRlEQUBsFuFxd5cX56Zoy5vb09PT1drla7XUXq3c0Xq9VqNBoHfrBeb87Pz+fzRRgOjg8ObxcLAJDS8z0vSdMwHHDGw2AwHA7n8wVjlanm3XxelCVjLI7jJE6ePnkax4kq1enpWZ7n8TYGAE/4V9c36/VaePLVq6vNZuN53vPkRZako/HYTVOeZVqbHEAVhnG72+4QmOAy3iaMo7MltJYQUHqe0QYAjSFLBgEFE5V9i7WAzGqttZGSA4I1xAiRgdWGkAEAF7wsVZblXAgXHMCTPmPMjbb0JQCUpRJSEGCRFb4v86wIgyDLyjefvn1+el6WCgDiNF7qwpfe+dnFZr3cxXGaxIcHU9/3nl5eAsDV9ZXSereLjTbRINolMRHc3s2RobXGVeJ5QkovL0pt7e1qnf7VXy2WS0/K5XJ1NJtd394CgNLlYDAgssriy6tXi/Xi+avnCOR7MkvjCqUsblSRCYQ43n7yyd9ePH7COOa6mJ2eqiQBgG28/fL5c2thtZjPr2+n47ECBgDj8RQAeUXUKiDNkFmygfSKskTB/4tf//XxZPIXP/ozFyswSZPPv/x8Opk8efbWeDy2lj757BOG+B9/+IN/eXruZuGnP/ubzWYjOJdSvnj5an53c3Z+plUphHBe4T/44Q9PT06CICiV9gJ/MhkjECLTRhmt33z6FADKPItGozxNQ9//+OOPiHFjrae11ZaESNMYAASK7XpbFEWIAYF1/haqLAwZsiSkAADBJRHkRQYIURRleZZm8TCKPM/bxTsAKPIsioaWyJoqFGOapULw2WwGQNvdDgAGg8FoOCRLRVnOplMpZbpcBkEopZckaZwkAPDk0SUCplkWhmEQBnEcZ3k+m0y4EMaYoigA4PBgVioVJ4mUQhuzi1fGWt8PpeTPnz8HgFE09D3/5m7+8tXV8cmJL73levHl85e/9Ru/MRpP/uf/5d8AwBtP3zg5OZ3f3qnSAMFmvfzlX/5gOPSVLtNsy7kAgMlkkmVZmmcH02lR5ETw/OXLd996O0vTzXbr3jjPk1rr+XI5GY+k9IoiX60333rvvSSJ54slADy6OOdCbHfx1c3N6fGxO6a4EIAYJ8n19R0AqFIROC0bcMaV0kIIpRQRKa0ceef2aiAgIhcP0oVhEUIIISs6kqzgnGyFUN3LzpABMik8AEAGjohkzDCmtbHGGGOsLz2r3W1greHAiKwqlbWGC6EKZY21hshol7UMgYGFLMsASEpfg4l3aakMRx8BtTIAYD3IkzLZZb7nMWBGW2JE1lpLUnoAwBgna7QuhZRAzs0fGh/nWp0OBBV6aRmzypuG1ZipFZnqg/k1R2VHSqu4v6rGFmvXUlPPjKE9qxuNco3pawDWcXZwABlrK4yubLd35PdahQAATnUHtTTkoGCNlRqBqEYjNWDFjhLXOVd0+t4QepWtiUNNpunwXmMax/j+Nx22knrD3RKCleTXwdeNzztCV7DrI/AO49sZbqA2T2sXOTfjWbWyxtXUiKJ7sPShPJSH8lB+AYoAaJiRmsqqJWfsWBBVAmnXobTeE/t+g019tfna3sWda7vEQmdPr37HDk8E1WbbqYkcp+FyHO/b7AORpToBA0LnFGnl5fvFVi6YHUEbEQFtc+9+R+uaK/MY52PdOqg2o+rIkNZargIAPScCxwoyzqyxgIgcrSWGyJG3UZ+dn191BBHj3cZCjSPaP+q/+j7UWP1fY5GKr8Sa2WxNRDsMGlFFAFenNFFtGwnUzCW2SX+apjQzcn/kjbUMkXEGjsu0DMF240fujbgj9dzRTgBdtENAdVac5vjHjl6Umqtq6NIuhvukUvMcatZ3OwX9VjkI1bmxNcyrMCMwhJ73KUBjX7VHVb+Gw6pYL2oDKDZtb+6oJ/Q1NeyNYGcJUv/ive51zRjdDZVXtRv4agA7sKhRaQMoYxGAMQICa4GxFiXVEBagIab2dgWAZokBdMaxRWz1zDeoEPdnkKCObEpAYGvLauYe2gDFdruCZh+jTh0OX1bPxBb6taFIqb94evYD7SuHexfVH782n8/+imymCVnXv743XM2qr6wL68XmzIXr/bFejhVLW8sd9ZxSd396zWN67HfHk6xdfPjaFVhtxBUt2TVqdo54nSOkfvUr+/DuXKBztnc2ff1+V4Ux1FoTkJCScz4IBspql7N1PI5Kk2szWC6XhSo4CWtgtdpaoxlwF/eNgIhIa53FBVkA0JxzY4ncNsUZAPi+r7VGBpxxxrgzemDIGGNaKfeuSCm01pwxzrmp3euMdZyXi5CInhAEoLUGBMml0qoyY68lwLIsOOeDMFSlcvlzAUApJaX0PM8lqvY8z5NenMSe50khVuv1MIo457t4FwahsRYA4nh3cHBgrFVaD6PBdrczRg+jYVmWjs05mB0YY4uiGI9GDHme557n5Xlxc3M7HA6d62iSJtPJZLvb7Xa76XS63W2Xq/V0NhWcL1fLaDAEAN/3lsvV7OAg8IPtdhcNhgCYpul4NIrT1FGWwyja7nbbzWY4Gq1WK8/zr66ut9utH/iffvYFABhtpSfW5dZReJ98+plRRnri+vqmLEu3YS4Wy81uPfAiIcuNVlLKstRFlnuet16tm/0q8ANLNhyEqlBgkRCMMZ7vAVT2PMZYAChzl2dcG22F5/m+1Nq6t6woCul54SAkKhlDstyA5oIjolIld4uZoTEmCAMpvaIoEBGRGW2DICQw8W4HAL4fFmmhtfH9oMxzz5dEYKx9evnUGLParMEFATAqOhwIzuM05YxPx0OllBDibj4HgL/56OOjo/XZyakfBPPF8m6xHARhobXv+UDaupzNlFsAACAASURBVKClXJRlESe7Z48fXd/eXJxfnBwc/skP/3RyOPs/f+/ff/bFFwAQRtHkYBZIORkOOdkXXz7/8rPP4iQeBL7L7Q4AxW6ZZeVwFJjSEtEv/8MPfvTXP93dfanJC6QHABzZYDjiweC73/teNBgwAm1NmuZxkmZ5URYKAILAzwu22+0k43mW52UxsZOPPv4wy7JX19fj0QgAAt9frte///3/8PizT621nucPB9HL6yvBxc8+/JmLbQqAx4eHRhu3dx0eHR2enH748cd/+ZOf/OZ//k8BgBCDIEiSJAN4/OzJ6eX58u7G8/3ReBL44aMnzwCgzJL59dVodjjU5sc//UmpjSW3yYAimk2mADCZTE1po8GAyJZ5iSjG06Hbe6T0uJQA4Hl+WRRZHl9eXPqen6Rx4PuTycQY7SjCwWAghSCgxWp5OJsVZYkIhweHQojNZq2NBoDD2YFgfLldDaNhGAy2u11ZlhfnZ4yx7W7rshIBYFEUaZqdnhwXeZ5muSc9xpgQ/G6xOJrNACAIgl28K8ry7OS4yFVeFE4Dsd3uXMhXPbbfeOcbpabnL16cnl/keSGFXC4W773/nVKpstAAECc7TwohebyLlSouLs+/+fZbaZ54nn99d/fdb38bALTSWpnlan16fJznhdL6YDpWWmut0zR7dHkBAGEQrDebJEnPT08Rab5YfuOdtwXni+1yG+8A4I1nz/I8W2/Wvu8LIZarVV6Ws+ksDAd38zu3yBnngRBpmvq+L4RERCmlMRYAtTZSIgAYa5t3nMhai9ZYo7VD6G7Ts9b6ng8AZK0x1hhjrUVg1lq3cyJjeZ4RhdZSUZS73U5rY621ZI017ojhXFpjEEAKb7uNR+MxQyEls7ogpDDy3dqwxBApCANrreCeKrXgwiIiGXB0F9qyLLlAKVlZKHTqJbBA2qGLIJTWklKKVSbXrHY569r3AUAdMbDGQRXp1uZ2q5XwAPt46F6pEE+VC7MlumqPshbXdEHePk6uXVT6X1D7dfP/a9uwB3+w+Rxr53QLyNH9vS899J6K+9W40NPdlnQgDdXCAbQSTrdyauNjdWC/k5UQK8q7elQPunKGtnpC47XTqON/Di6/V1xjLEDtztNgrD2g3PbRCeaWqHUReygP5aE8lF+kwv7uSx7KQ3koD+WhPJSH8lAeykN5KA/loTyUh/JQHspDeSgP5f+PIrCKbVdFXWm8ngEA9o26EKHVwFmqf2s0NNXNTk1DQM59t1Xz4evsxGolkq1thhrrJXQaImJVzBTnqeFccBkAAIPaAIqss8Z0KakBkIEx0NV6NUHfgHe9nitVFiG6eCKEAMgaGx4Dlbcjq9pWaRJre0mnwKtGqh+kzpKpDYIYAFln+8MYIIIxZMl2nSYrgz0ispUvrAsRQgRkKhutqjqqdZtQDRwYp2itrcCA0GUZtGAY9PWltYEhoCXC2sQMAZHV9o6AyOtrqbFVQ9eRrjMoc0aUYMkiY85yjPEqxwcZ43R1jKDyk63MKKmZF8acjrt2m3APQYBqMquPiRHUeSIQBbTNojZ3MIIhYo0i2WUH6GgTyRin3qxcSJAxjrVXSP38jr6WMVaNR0d/Wi08BCIkBmTdcLgwnNhxqm/6yKByzW6XNdTvWdWwdhm09rLkrMqcwRSAc3ypQxYQ8WpBAFWWolSZPja2gh2tdv2PNQFQra19busPyGJlzte0DxCpdtYG6oZMdwpihtBZDdi8/IjcrSXnos/JUn07Am/i6dRZmACRAXMGPs00dH8QOk+rynBMAHerjizwbjBDgN6gtvr+1jS2MneF5jXqZH0GIl3NT32vey8AgdWOxtXGgbaxI6iiLhBB8/rXvkv3XXDc181SaKxN66miarG0o13r5AHAWuP2nWoNd8Omd8xNuxa0FgiYMxsxLnACY4whI6COvUEV9r0ZPWQMapve/lRga6Ztq3C61hqyxDkDIGvbzR2rrbiyvXArnGqn5tYMs/Ekt9hsNcbodi47JtoucUFlh+0mpvYcByJrgcggB6WUNtKiWWwWLhYbWC2kNFbnSjEmhRAl5RN/JIgra51NTVGWHFm6SctSc+mBtcSAS6G0RoaNVSbj3GpLzqmaAUPGBQcgY7XL0C2FaBJ0cYZkSRvDOWeMcc4AwBijLTGGiMiF0KZknEkhldbWWmfO6Xkui6sxVhNZT0pjLWeMMZZlmfNbDPwwSRLBBWNis038YCD9sCgKzrgzZgaA2XQqhdjutmEYkrXWGMllmqaqVFJIANDK5FkhOCei51+9MFoPT8+SJNHajKJxUiQAMBgMuZCLxYILMR5Pbm/vAn9wNDva7naDcDiKRgCwjWMilNzfbZMsS06OT66uri/OLnzf//L5c1PFuNSbzdJoWs3XnHOVl9YSEurcWlMdlFYTMSMkJ0scBQoOloo0A0CXJN0YM40mXAhrrRd4LtJIEIRE5Ma/niZGGlTp7BkZWWDIGcOyVG73kkJqrRkDwUVZlJ70PN/XSjPOhBAAYInCMEAArZX0pAWDjDHOGGMEXm22hOPDWVmWWbwdDodKKcbAWpulZRiGbkMpihwZIFGeFpzzPCsw9I3WURRtd2mpNABYo8nS2dF5mmdc8Gg4/Pjzz61R337vvadP3gCAo4OjwgICfPzpp6+ur0utR1L4jK03m8EwHEZDABCcuQzC1pjvvvctwfif/sVfx3n+P/w3/91gNvntkyMAuDw9ffzosTF6OB6/fPHckGFCpHHMiIJg4AwS7xZ3qizzLL6+eokMD4+P/0WSHpycH85Go9EIAOL11pN+GI1Krb1gEEQjhpjF8Xy7GwxGX7x4CQB//Id/8OEXn/3xD34QGbcB26vPP/+LH/6xMYYB5GABgAFqAA5ggQSiJQqQE8FkPP63//Z/H/khAHieNzuYjUbDKBoYY/7H/+lfPX386OLk6PL46K//6q8AIByEajrzPG8TZ4W2KkmX8/mjx499IaazWZ6WALAtSmPIJLvZ8eyb7777Nz/7EBGVIsY4lEpnAABHJ8evvrojY8kwEXpnZ8fjyTSOt0wKY40nPACIBuF8OY+iQRgGlmyeZ48uLyUXq/WCrMs5PlBlmeX5aDgiwKIoAXAYjUypkjgZR0MA8KTM88wYHQReUaTL9SoMQ9/34zjOi/zxo0sAUEot1+toEDAGbmuNBqHnyfliMQgCIT0AWG92eaGPD48RpDKFFF7gh9NJ9LObV+FgAADjyfTw8Oz51dVyszbaCOEVqgQrl4tNWRYuG3uWJYvFPAy81W2SZfH3vvcty9ggCD757NPheFyoAgB8z9vMd5PJhAtBZJNd+vYbbxR5MV+vokEQDUIA2MVxlmfPnj6yZFbLNWPs8OBosVisN5unj5+4vVspkyTxW2++eXN7p7TWpT55cppl2Wa5ubu5A4DjkxMH2QTjDFlZFokxCKhLpY1x8gHnjLQdj8ZgrTYWwIK1HBky3G63pdbuzSULlgCQl6U2lgQXSZoWRZFkGQBYYwXneV5qpT0h8zTVSgMSGMOAFVkOAEqVngwKlYFRHvN267h2DSIyUCiH6ciqasOpYxeRhhKIGEfGCQCyPGHAObJSK2JkLBksJAu1NkWRAQDjKKTgTEghDAFZC8QYRyC0xjjEIYUwtvLfpspXAKro16xBF1TD//rM7rmt9YUuBypaUNOG48EWQbTgCBlQ4xhXfWMtIQACY61bUgfcNLaT7kG2ObUbyZEQGSOygMQ4c6apjHNLtpK2nHzX8UtiDvhii0tsZeFIHFkPLbUu3i7sRuW7RjXIsmSdRNM6nDCoQkB1fAARsR24OpAUAZAlU6HgdqCM1RV0pBpn1nJBIzRhNTKVOEwuZlZj+eiGnirca8iAk2mRAQEwqFMXuQVg0cFYBwvJLYbW/+ehPJSH8lB+gYpoXQLuFWpOt44HYb17U//v3n3dUCDVRc3+Dm3wk+bsrOKkNJRZvSk3z0TC5qDD6nwDAlt7U7Zszx7Zea9dnW877Xfd49gylQ2j1LAsBEC1pX5DIzZev51qq7FibeW170Ed6JhhdY7cPzgqShG7DhNN5MoOb4a9KaC2Q0h1VBNWsyrVTdj9WQ9319e7IaOckF/TtVT/17u2rqoZ1GYC9jrVAUWdwWr5k73+11Cq85TmK+xExnRHdntVS6fUDqs9Lrpac1XHKvd0Rs3N7YUNPumMdgeaOZ6m37L6qz5334WErYdqyx71uk11pE3qP7HFd9VsWtvSVe0LyhCc641pkq23rXcELziRqbNUOkxT+9Tae78Ztqr5BL0Kab+qhgltV1hN7dWLvR0p2Lvw55Uq33FDEULrjFKFCOqi7vq5rCXsqH2nAAk7wSNr//P+Cu75wHR43T6gh84VDmo3juPuLaH9AeoPQD08TX1/l4tNvd1ArfKpX4f6Va3f7/ZdZ4yja2Fn5ql5c5pUTnt5mhq035uhOhtR05AK+2PXwv7rfLPayAW91603CFQB+P5TX7tL1udLO44IZIlxZkhLIQHJ8+Rbz97I8gwAljd3DBzTMbFE8Xrry6AWdMhlqmWMCSYSSqw1oS8JLOPofMUQqpCObp6FZEIwS2StYYyTtdZazrnWCgCMriTU0lqXOQGIqqwF1gBAEDguIxeca60BkXOujQYCwUWTjcGTnlbaWjqcHeZFYYwJ/KAoCyHEIAyh5rl834/TBNCOR8NSqbJUURRlWe74fc+LNtuttVZrrbUxljhHzwssYTSI3NClWX46O8jyHBHeePbGfLlI0nQ6nRVl4Zby4cFBWZae50dRtNlsd7vd+en5y6ur1Wp9eXlZlCUArFfrw4PDMAju5nNPelfXN2mSloXSWmltOOcAcBffCcbIOEqdEMFaAkJixCqVHlhLZOrUQIicM6sNZ8JY61z7EZm1lrRBRMc+VIvHna/VFktkEIA5p39rlLHAGBJwMowzAQBgUXCPc04EngwRsTRGeFIrpYoCABjnBsEYLX1PldrzPES0xpRaVz6OjsyNN8DAD/w8yVyC9SROUbCyKEypAOrE6NpGw1CVmnMktKePTkCQsVoGAgCKXP/SB9979viJx4lz5oWD+Xx+c3fzgx/9yOVi+uA73/3+H33/H/3SP4x3MRA8vnhUloox+Pa335QCkyQGgNAPrq5fHR0cn10++4M/+N1/8zu/A8YGQv4f/9e/++3f/pfvf+8fAIBJtmWRKq2lL/3Qv7u9deEIhee9fP65yzusLHm+HE3Gg2E4iAbD0ej21bWUnCOoIgcAKbjnSyKzvrsrjRWeb7Qmaw33SJv59UsA+L0//L3hdPb+e++FXrDbbR9fXv7jX/lVa/V2u5ZS5O6tvLsL/GCz3dwtFnGSbHe79XodJ0mRZJNwqK0BgO1ut95tNZAFsAAhwPNPPhYIQync5pWUygAgQAkQAMyi8OAgLI02TH7wwQePnjwBgDIrAJm1ejyM/uxHf/2nf/Kjo5NjIOQyZFzq8gUACE8EgyBNSgSczMZBECwWd9FwqMrC933fCwBgl+zCcHB0OAOAxXI+Ho8550orBDw8OAQAslYbLYSIBlFZlogwHo6lFM9fvBwMwjAIAUAbvUviwA8YsjjPiqK4ODvTWq826ygcuKQueVGUqjw6mGqlS5fdm/NdHHPOh1HkXhalSkQIgyAvyiRJh1FEQC++enV9c/fsyTMAGA2nf/rnfyalVyqVptlkMjGWpgeTP/gPv3tyfOI2q4PpwcH0UHD+0c9+8vjRo7PTU+eyPBmNnjx74njPV6+u8rw4OTqyRF+9uhkOBuvNBiwt1+vvfut9F4J2sVzmeX56csIZS5Lk/W++mybJzc0tAFycXwDAdrf99IvPwzDU2iJgvIvPT8/SNP30888+/+KLk9NTAAiDcL6Ycy4QscgLxpguNSIaQ0TgctqUpVVKuc1QK22dDsoYIAAoLCI0+bsYl1wAglI6VyrPc6dMAACGjHOOiNYaF2nXGsMYs4YsVXnqnbc4Q0ZkVaHIVhmfHR1Zn0LEuvkggQip0oQSb9ST1hJj5AAJq/4DZFXaQWusAQvcaqMZl8gQrFNRIzbpzo1p49XXe2NVeQdB7JcaFdDXncqvLf0Dt3NrB65QHfCl+fv+w+v2EWs+qCWjSr7oeEo3cPN1IKhBPqY2KYDaWMGJUgB1DscOmoAaIFBzcQ0YuyCpxcl90bCH/jul0cg2MKhLWfYhYC14QCPjVr/VkkhPLtsbvx7yqSFZw2NW0BqaulwttbTxUB7KQ3kov2ilsjVrituXe0aFfQYKoBIrG6biPnXUTc5RS+UdxqdmCNoP3OnbfNAcUi4qY3WUd3b9mgNw1m1YS8b99Mn1z17otgpV1JVA+7W7siLzWk6NoAYCXTG6DwK6xBO0LE77CYM6LHMtx7uBo/pcbU4qZ4+GDaVbnUPd47IznLZ/jCN2Yjfey5bbb32H5tofo95ZTA2gof1TrkuCtAxVO0wtq1mT0M2g1fRcr081zKnvqgaRmusBAMF2ztwes9FQbNSyJl1jsYY/doZsSNb2ulQTDq8bsX63m7VQA5qm5t7t7k96TU/3wx1SvRy6z6jnCGtlbRUOkcChakeaU/PKEiCwZqS7ELRKAVIvY2jmu16njnWrB9AF5uy95b3XvU2ijbA/Aa/BVntLpw1u2L6GPz+pc5s6qf6JFdRs3p3/h703+7EsSe/Dvi+Ws90tb261dvUyi2Y4HA4Xm4BgW/aLJXmRbcCA/zT7xeabAL/QkEkZsGVYkGgIMkcmKS4ip3t6erq7qrKqcrvrWSPi+/wQEeecm1k98+IHDpABVFbmvefEHt/yi285pFA9RHgPrRvISDxRAzo3HuzBpfh4Ow4S5uFG7Wc1Xs6PB3zng/j5gcT7TXstTvUBKcZe+GQc92385tAf5j7tUNgc/WYcycb9/u+NfAM2ODrid7QTPxtjXjEyK7jbGfLmsnig7IxmMjw4ykx0UAUOeKivEPFuRUhEKIQjCyCVEsdHR6cnJwDw9uvXEqUUssgnxtk97nSSFMUEQQhUXjuyzjVV6ZxLEp+xGhkBkH2qAOxRbAFKq0jYmJmIkQGkUL6DxKykcuTAESLaGB63a0MISGtDOEiltDEdMzCxNVZE6z8AkFKiQNO5JEkYsG6aNEmtpa41s9nMWQaA7W6/mM+7zrRNl+d5VdXGGnZU103dNFpJAGg7AwyzybQ1nTVmUkyJqKrrLMt89pjtbq+TdFeWu91uUkzqpqnrhpzL0qxpG60UADR1u91ti7zI0+xmf/vk0ZP5dN4Z8/j8caqT65sbANhuth5J2e/2zKSVYubdZi+l1EnSdR0AIEty4H0CmJgByXlPAI5HEYG9+0CvxDEDMiPTcIPJztv1o04TZnLWOSIKCrzPXS4ABDNY4pDFTAhAsI4RXdiN4eJFCIHam512xjqrtPZOCF3bms4wsDVWioSNdeT8QDy6CgCm6zSjUro0JSIKIVpjkkQ5ot1uHxIBNS1KcOze3twoIYt0Os2yx2cnu/UKRSBQVV3/yZ/8+NOf/JUScLRYZGnaGmOskUKcLuYAcP324snjx+9urnb7/dFsbo2pdjtLDoy53m19r9qmOT46UjL5gz/4X/+XP/gnRV5MZ3lnundv3v7h//ZP/+E/+i8B4PijD21TyzQHJhDi+upapWmeJNOi2NzeNk0DAPtqD0xai+OTYwnU7bfTTB+fnjlywQZwMdM6kTqry9oyqzRv6yrP8g6VVhp9AMq2WSBY69quVYl+9faiasr5fPZ08VwpobQGgO7jTwSAc26/2767upoWk9PTU2O6fVmFEwSglDLWMJPSGpg3661AYGecs+F2jtlZW7VdbYxp6q4qr6+v3r27TLN8Ol2eHj8GgPlyaY2rmur58+dHjz75D/7j/5TIOYu2a96+u7pd78HbAG6rpun+7V/++YvJC2ON1pqck0qlWebThdf7Mssy52zb1kKI05MzIWC9uVVS9HumaduT5Umi9Xqz8va8q9WtMV2i51mWAUDT1lLKSVHoRLer5mix8JgsAJwcH/sl2O3389lMK9U6Z62dTSdd11lrk0RLKZ0xfoZPT04Boa4rKYVzTgpx8fZyeXQ8mx4BwH5fFnlxcnz+J3/6b8pyL6TIsqyz9tPPf2qcPTo+AoB315cnx2daiq6uv/3JR4lWAPby8iZPtTWmrloAaJrm9PhEa/3u8upkeVQUhenMqzdvH5+dCyG9kXXXmSePHjPxxeXl8dEi0cnV9c3rN29++0c/qusKAL74+c8lisV0vttsL968Y+b1drf5+tXFm7d11ZyehAMlhXTOGWPTJLXGtV0nhUy0hoGtonOuqWqe8qSYGGPatiXnANDnxQaA2jkEKPdlohOttXNOSSUAm7rxtudSCPDReLuOiKWQCEjExljn47ECSKmAWKF25OLFlWe3YzYEBA4OhKSeeUK8PWZk9HEAg40BCgbuM8E4Q+QcK0DRpZkCBoGCmYmMEFKiAgDnrcdh4J69bQbc8ckYiyY9d/xFJhoj5h91oIHz9urFgYIwFoiCENbLONwLHr2U0VubjKRx/5PYu+kgMXHMjngoKB1I1Twa4lgcjE1H8a+X7Q4nIr4x/Ox7znFKD2Sye1Pl34wgZK/R9XfJXlyL8vHIWcfPWRyIF7j9eBHHhhMjrHNQMEa8cOQQ00tPI3sFfE+A/ofyUB7KQ/mVKCrQSjwwz+mBiHsZL6J+2sMX77cNjzjHe7TtwUIGe+xilMQt8irP2RmYpfCuAtz7MAatFoFcSGwKgWNG5sxDru27iNO9CMpRGwIiCveOEeDzSWOIOPCfEdMKt2Q09Hxo8oCN+JEgQDRXHLL7It7pHkY/gQPe38+6b3rE/A6RjjGwg6PZPawq8rKxLj8ADQzgzegiUtDbYoGAQ8Y5kmBgkNcOMKzQKzzIigRjoWOEfxx8MKAmIwu4ESse7dE7gA8CkF9oPsDFDwsDUy8vjGd7kAeGMszVCJQejcejKCNjwmGH4eESjOoZGsdRve95OO6wcBKZOSB06A0lKR5TjPZuQ/cxBkCPMtBIdoFB+hFhojwQGR2PAIQcDYMP6hx1drSWPRY+XpT+hdFkjCbgLrR0p4SO8t1T6yHDmMaq3zChpQhKwx3AS/RA41Blbz0QZWs8BJFHuOFI8YA4Y0Noc+x/fmM88bEuczAdo733TfNwj5xF2jIiruMeADkWIb7CeIujd7wa6MxAw0fj5b52X13EWg+6THdrv7uMgQUEe3N/vxTvnuKmGJ3wQIqGb+NnwfiZhz3GPQfx7yZp0nSVQGGdBQOrzSrNEusMAACBRSudT4xKPmuBlBIYkizz60eOttsdAhZ5Zp1L0gIRUICQgAKEGA+MrbNCCImCmBGEFAIAfKoKFS0ipRQ+J4/SmqwRAn2eiqZtBIosSY01wCxAOGsR0Xva9tqSM46ItUq22x05Nsbutru8yAWK3b4EgESnzpJPBZMl6Xa3S5JEpQkCpEnqsTBrrdYJMdR1g0JYcnVTM7HW+nazBgAims9mbdtOimJ5tNzt9wB4fnbOAFmaHS2OAGC320khj+ZHVVW1nZnP9OuLN9vtJs3Sq8urkB0CsKlqcixRGEtJkQGAaQ0gWGtjznHh2Sh4gNwhUwjEQTYwOMQBjWRiAHaOiLp+/1uyCCCldNahImYmZueTpQrhIV0hUEhkQv+tEEIKzQjMTovUtJ3fz13bWWOTRDOzTJUAwZaN7fwaO2v9YjjnLFmplRAoEYmIrfEbTyskR61tVaoJYFOtlVJsYb27TZOs3pcAIFEggpTyO9/57mw6+bM/+9M0XfzFX/yZEHh0vPR0dzadkrOXl1ephJdffeWcy7Ps5uoKAbzn+M3qVqb5Jx9/nOX527dvkyzTWjPDer+vtpUxHQBopde0/uN//ePtfv3h8xemM2W5S5Okrqu//pu/8fy3rcuu2rfblZLq9mb11Rc/VWmmtZ7O5oi42awAgBGJLLLomioRkE0nUiF1DSE471quk225QVnXdU0oms4JwW3XOUFH84XHapmctQYBqv22yLOmqb76/LMf/fAHk0m6329YKgCoWwOIZVnqJJlPp0KI1xevqro5Wy6FQG/X7ITojBEC8ySx1h7NprPZNFHKOTedzwDg0dNn+SRvqlowNXU5m0/rcluV5WQyLSbT5ckZAGgpus7dXl935JSFbVnOZlOhi33bfvs7lCRTAFitV//4f/7HXUvICSB0TZsXEwYqisliPu9M5/dVnudlXXZd+/TRY60VOWusmU2O6qr0myrPsjxLy6q01iyPjqUU2912PpunaerJWt00eZpNp9Pr25vOmMePzq2zu/3u7OTE50kHACY6WR5X1b6qKqUUAjKzEGI2nTJzWVUAMJkUi8Vst9t3xhZ5nibJbr/f7/aPTs89ePrq4lWaFjpJfvff/7unJ4+ubi+J+bd/+3fyLNnuNv/FP/yvAODJ08dffPHZ5z/9dx9+/NGLZ88b0yDSvqoenT7f7nYvL94BwKPT0zzL9vtys9u/mD/VSjvjijz/6MMPN5vNxZu3AMDER4vlzc31V199/fGHz16/vnj5+uJoPu+a9s2bdwDw8tXrjz54sVpt3l1eJkma59ntzer6+gYAlsvj9XoNAFonzrm6qq2xi/nCOkLAuqy9I7OQ/mgL25m6rve7XZdYYHbWms4G8uKNHwVaY4F4t90pqXSiIQFrrERRV7Un0dY6gehT0jvrhEAAYZ3t2qYoJgAgEKzzl9aM/ooEAqcNsiUAIjgmjALEgQwzElfYx90h75TjU7qxAHTIvnW0wATMHaJMkhSF8FRPIELIqyOj7hOVriD98giNGjV+75ODb5nF+KWRfDQWYsf1jG1Botw9XMrz4C80hAoKj0Zliu9V6zVOzxKiYieAHb5XaPeCTeTCg6gc3S58aBgvucZ+ReEgWnZAlBSZ4hj6AfqquFdlBkHqbkco3GRBL+1Fqdo5jj0K4bZGCzFqqnfkDsLg3WYOtUThDgAAIABJREFU3mKAUQinKBpFp6ORIs0hEeuDr/ZDeSgP5VeyqF4jvUs6PWm+q/2OAar3aNwHqj9HnnP43AjMiBrxPQLKwQYrcCweh9+IbYyBr0GD7bGDeyrySJU+/KL/rTfbGt/tAQcuGxnHHTgKR10b9X5or7/V4thD1Oo9bAO99h+MkPpOIwToL9o7QhBEomHUIUzDcdb5XsfCIA+8iOF9TLfX+SPUM6AW922xBvbb49NRt+wz3A14Qmi6B6LugHCjpYoiyiDlDS+9p/S76aCDAZH0geVCHJsoJoTfxzX0v/EA7B1mJT8o/dZFjIDLeFw8CBL9FPawS/9HmJs7cBvGBpiHeQuVMoeQl0OLGFxgBA9Yt38nDtcnHu/3dS/OAjBDCPMXBoIRIboDcx38jmELDYfqQDDuAwn0Axm9ywf41y/LpoUcjfgOBha+u2tEF2bWx0MYgeMC4K7PeKgmnOzRMEZ4YTj1h5cuY3y1/2Q0Afd9jgZb0t7rN0rPo57c7VckF9+gPISj9012ERgk7Pv3EvFcj9u725MDydZf4d8dU7A9HX/+/nPCw08cP9MTovtU6i5xvnN++skeBuycY2ZrTZKmKKCqawD29lytaws9cc75Tnv4xhirtZZKkbUAoJSSUrEjcqSzRKloJoKHVp+ABI6YvRs3u3BjBZE0YgjE3GsIwvPBVCdN2/gHsjSrm8Y5p5WylpjZR28kcv5iwDnnnCvySdd1TdNOiknbNjpJppNp3TQ+juV0MnXk0iw5WiyEFI6sECkRV1VVFHlV1gBQN/V8Nqu6Wkk9W0zbpsnSdHm0JHLGGgA4Pz3rum672c7ni81mW1aVFNI62u9208lks9kCwO3qVin1+uJivy/3u3Jzu9tV28V0UddbDPwKUAgpJJCzzub51BgHDIiS2DGxUgkAOOuTyWJYdY7aFANSCCnL6BgcEAgUiAJRsIjbRggA0IiAqJRCtNZaKaWSSmuJiH3qeWJu604KDQKIyHSGoQUERHKQknMAoLQSUjCwI3LOOSIRg7X5EWkhiMg5C2StM87vVWLnrAPnD5CUklAmOiOJxWx6/sGT+WymJD4+e7Q8OvVUWmulpCSyx8fHbV3+g7//97/94smjZ0+PTk+0kp1pAWCz3nzx6adJmlWdvb6+tJ15fH4+nUyByQMHXVP/X3/0r376sy+++vq1c26/26VpRs5JIdmxccZvtkQneZLkx+eb1dpHw1RKpipBLbRAALi5vExTRYCNaYwxxIggy7Jmhtl87pgBoG1ba1o5LZq6RmezRIMj1zW7pnXW+8tL51iCbo3pLFmGREmAlgCzfOrNe4mYnGNHaZq0XVtV1a7aO2e3mzVHS9dEiq5ti0QXRZGfna1vbiW5rm4SJReLuelaAJBSTPKpQNF1XZIodrBdr51zAOjTMX/+6aemM0zuWx88W+92X75+Zcn94AffN8Ycn56elRUArN69JBJN1x2fnUORPD4/2Zd7QtSQGNO9u3wNAF9+/fLP/u2/mc0efe/7323bRillujbJ0ulkqpPkdn3jSUSi9b6qirwoJlMmur65KrJcKYmYAcB6uz4/PWu7tqqr6XSapklZlU1bn5+cGGP25d4TkMl0Yqy5vLr84NkzYHh7+Y6I0jRt6vrd1RUALObzHnbSWjGwsU4rJVC0Xet9I5aLI2Zo2y5Pk9l0bqx7+fqLjz/68PGjx8Fgebd9sThSSvyj//y/SdL8T//8T4xp/97f+8+++Pxv/tW//hez6RQApMCb60tnze/86Dc700op/uazT5ezqXX06uKdj8Q9m06rqlltNrPJhByt19uLd+/OT07Wm82nP/3cU8XHZ+cXFxdffv3y/OSULX/59au6br714cfr9fblqwsA+OH3fs0Y++btKyX1YrbYl+VuX+52OyGkteQZbtd0xhghVVu3b8u3Qkitddt25f5NmuZJov3R9rLhdrOTsmEi4BAro6EGpQSANEvJklaJj7jnjOuoc5akUH5zWrLemJSJhRBERASIbI3p2jZGsUDnrEQlWDobQ+v7SCc05oqelI2laL9yNBYoIMht3hwQvIeWpyHExAzgwAFVlpOTxDMdIdA6h44AQOpkMEGIagIcttq3PchJkR72alFv0ngAGgZ5ZWD48SkeSTLeE2MsXvRv9+YsB73x3FCGiI2BTxONYMkeycORQDSSRO6MDdE7a8dL3/7/8OgouvmAP4a/gzbkhSEei3i9AnII674PiBzhlNArWH24HN9OmNoQGyfYKUSM9GDcYcR9SwdKRl9llN14GBUEKXLIHt5LeORi7Q/loTyUh/KrVlSvRw1K7CgexRjPiphSyOLwXpo3oqiepN59qteNgyYTSHPv5XBYPE7jwpu9RtjXHO3le/hoCKd4395qhMD01QdCH5Gn8GGf4ARDE4g+mDOEAHyhYfAWHwN3jwBUsFKMjrQA0SwLw2NROhgbq8WAgL0EAQcsN/QPD3lb+HzE1PpZvzP5Q0vYN+OZ1yhbin9CHE7dwU3vIYgxgpvChhkZ1gKATzE0sPpY0TfYjkHPXXnYe9HqlAFgjM0N/4U+cHw0BIlBOASx49xG5Kjn3IePMAwNjSCPuyOIcMUg7A33ur10N7zNB1AMjx4U4wQ2fQlwGoxEF+wdO/oe+WjiwOBTEwW70IMp7veTQLxnuhpOi8cHI6bGAAKAcAQ+j+XQYRZDR98nuvUHLaKb/kO8QzfuXxHcKwOc3e/PA5rUf8bDNwGI7GXwEA6CIYT+HvcybDdEIo79uyfSjY4p3+sDQn+qDsXgexMSrwIA4A4UHj8fXXgMh47HG2l4LF723GtvtDSBpg39josxPuMHiseYcsZl971zY2B0iPuEiEwj4XnczXFFIzI1YjbDK/2RjGh1v9x8UNFwWvv9yQBgnZVKdbb1Cc7attvty31ZAQADaa1AoXVdZ6x3DQOELEuB2RGD985DlEqpRGmtGRDA74iB/CGg381KKkQZUu4wd8YopbxdmFdtAdCR9dzJWCOl7LrOe2Kmado5U9d1kiTOEYIQEoUUzjmttJ9h0xkpFQAba+ezOSLu92Y6nRJR13VSSQDY7XfWOkSsqpqIlNDAwjk3n83TJCmrEgCeP3tOzr27vpwW065pq6pyjoB4s916U813jm9urrvGrW43XlfXWq9uVmmSNGXjbzbqulZaWWMBQauECRaTJQCwQ2b2NE8COCIG1CpBxLY2SinhKQmz6TxeRkLIoA0ieJXbr7KIqekQAcVA8PwhUUoRBQxAJYnXKrXWYDoftIKsjdodA4Bz5KyjEOSFgh0KAyASOL8HrTWIaK1pmgoRO2rBhlPtHTCllAJFnhTz2UxmCQBrrfMsS9JECumtZaWUznGW5zrPjpbHAmG5WCzn8yLLprNF17UAkOeFENBUu0TrtsuY6PRseXqyqMuNiYbVuVK2a8k6cm45mYi5JGtWm9WT5y+qqgSA//2P/uU/+6f/bLvfLhfHxloQSARSpUIIrTEXBQCYrpNKaq3arkMhGKFx5ur6Mkl1a7r9dgM+J4YlSwQgs6IAkGkxW5w+Zrb7zY2xIeOQ6QzOZZoXApkRhZQshEoyb6batFapxBKvN7v9viJAx6SFcEKpJF9vtgDQtR0zGGslKiElAyzm88Xxyeb2GmK0VoXYNnXTNJMs+39//P8IlMujo4+eP7u4vrLkrBfCCIo0AUeJ0AjQUiOVSNLUEWkpAUCpKTABU6vQKf7+dz/pmvoolVby8TTT3AGAUmnX8snpWZLncNtKTKbF3BHqDKvy1vtQX7y50FJlSQrsBAqlFAMrqbRS2+2anAWAxfy46Zqua8/PzoXAzWZVt/X5ySkxeY8XKQQzbXfbuq7Pzx4RuaquZtOplIoh5IWbTycC8Xa1mk2m08m0qsqubZ49fYoAROSzXZ0eHzNT0zRaaykkMUkhZtOpI9d1Zj6f+UOx35VN0y2PjrVOL95+XVW1te765vrrV68A4OmTJ2fnj25ubhxZa7u6Kqt6L8CRdabr9tsbAJBoV7c3WZo6Z1e3q8rURTE5Plq+u7rpjH3x9CkAOEuv3ly0XffJhy/Kqnp3dY0IWuuXr14T0a9993sAcHN7e3l1czSfL2bzi7cXbWeenj969erN28urJE0B4M2bdze3K+NoPptf39zuy3K/2ymhdJKmadI2DQAkaSpQMjEoUCph5v1+XxRF20ghRM8SlJAqldY6pRQTkbMIKFD6rgJAVTZITI60kFppcs7Te++X7VmXs9ZnE2NiZy0RKaUp2M1TONrkECWCJHBREuSgYvQ4JDFjMFYYy6lIkdcPvBgOXL1ZxIvjwF2IwLhuv91P51OplCULgSGC3369gOGFFBHk219Q7twpD8w//Pk+UWVww/YoWu83A9H3DEdvD6JPCBc0XH76wJeHTY6ci6DXC5iGdCyDSN1PTBBvo52Fb2kYQugBDavCff1xuoBj/XEsd/057ig8dz/opw+8Zf/Qrajmcj8i5t4HZ7ApPaiRwxi8SMjQg6NjadartKP3or4BAC7aXkaTE4YhCtkvk6cfykN5KA/lb2WJsSPv0MP4+whI6hU/HJTXb0CVejfIAPxE3M3b/AdgpieuHo4ZgMS7rAAHFCo+0jP+gQAz8wFEcgCk4jhBwqDmH3bah3js2d8w/oCZBM4aujOAI74PPRcfmOkgK4Q41j4VtXdx9MZFsVfRmbm3dwxsTsR5AwB/K9t7J0AEA31q72EYHAWJ0RDfBxlFfT5CQn3PxR1JACH29M7i+CtT/2uAe4bN1MM8B53wamNY+5HPSWguCgzjTRlejqDdECez706ohEce4SOxqB9tP+hhAEFiPBAaxmJfX9MBYMjDhMZuMQPLARTm+FUo3lNnvBKD/ev70MjhVnRkoYbgk/GFIQQ37CGoOI1da/nuKEH00tIwYaHieHQGH5A7prXv+x3vIf5jQXUoh2D2IJ7i8ACLOwj4qJAjBJ9EOW4Ob6UQ/J76ndLflAAEH+qReBopzCCHjiCvfrbvCKn9PI17238z/Bk2NIbWwrKNWgq1H5IIGo5eX53XhfwS3kX3+nQ0g8TO/TjGPRxqg5AwHQaPckAB3kajl/GDVD2sS5jV0dHtTzZHcueni2OG9ffK7+HV4RRhT77A/zxEoxEjYRuRkbgI/ch7jnRIwIWUUoICDcEYGnb7qm1aANBCO2dVonf7vWPSiaYYma5pO5+MhUKYtqnWCSMc6IqDVsCIyIx+6NY6BJBCeqLhs9B4Ux0fWkQo4UOPIaB1rsgnACCkMMYWxUSg6Izx3IDI+f0bU9mgQNk0rXXWKdeZLs3SJE3atk10khc5AFhjjbRH86O267bb7aSYmc4iCmZ+e3kZ7ASJ67oChO12a53xFSKgcyFZ8PX1DRMokZAhb2DoOosCnXHOOR+sQUnNzmmlmUAJ6YiscUmaSEGdabVIAABYEJMQwnSGUXiwwFpHPiG49E6UCgHY2ykD+pywCNjf46DAQKPQZ2knJnbsEENITQBwwbYOEcBY628imVgIlFp5uEFKhVqbzhhrCCwTO2LHDpCcsx76FFIUySTNkqNiMZlMhBRSySLPAcNtZJ5lPi3J6fExS8jz4mhxdLJcFsVEJdpbqEkhF7OZY1BFjlI05T5Pk1mRS2ZDlGYZADCzbZrJ9EPnHAJ/+flnrq4yKXSWIGIxmQEAC/V6cZxP57Vpq7p2zr5982Yxn795+dV//z/9jwDwl3/5V+eL0+fPPlBKmc4QEVlab2+KtNg646Hn85NHSZp8/eolS3zy6LHWejE9efHi+YcfPMtS7WHjqtwlStdthwJ3u0pKJWXy8suvdrvN6cmRn+TFYrHfYlXVEiFL9fRoWeT5vqoIlF+7/b4spgqAO8tVZywjMLWInWuK69ub9QYAlE6IgRgUAALmWdpUtTNWCEnWaOlnTzx/9myz3VZ1+eKDF3kxIeeuV6t5kTG7508fA0BTVsVkut2sU6m6ts0SvdmVUgiZZNt9BQC73UYCPzo92bctMv7k858jc2vsZDrl29Vm8yUAfPd7f+f3f/8Pnzx69Fu/+3ddW0+yR5vtfj5d7jqrlV63DQBIFM45JVSidNcZBtY6SdO0rMuqKpfLYwBIEt3smyzPAaDc79ab9dPHj6UUxnbbcg8AJ8fHxnRCiOVymWVpWZVMNJlOdaK2212RZQAwn8/35a6qqw+ePwfEfblfzBd5nu92u91+/+HzDzxd2mw3DJDnmUBR7ev5fKaU2u/3xKylBoByX603W60SIL68ulpvtsvF0WyyuHj3VggFAIvZ8vXr1/uy3KzXTdu+fPnlkydPfvzH//LTTz/VQn722b8DgLqpb29XeZGS6Tbbzen5yXe+9cnt1aWx7mR57EMffP36NSJ+9PyDsqwv3r0D4CzLv359UVXV2fHJX3/6KQDUTZMoPVksfvbzL8nZoihevX5TVc1yuVzMFwBwfbOSQk2KvKwq61xdVl1nlNIoRNuZRKcAUFe1Iyel8rTLOaeUtMYlSRZTZYMQIaKFc5ZaTnQihHTOWmMBkUPoWGybNssyz7oRBTmyxqZpJhR6Em2N8TyM2FnDxCRQCAQllWeNaZoiCyLWQjL2gjh764MB1xI9o4wSlGdGKKJoGoW7yK6IWQAyBdYnROBriCgQy7JEhGJSMIKQgaAZY/sgM716QsGl40BmOeCIY6l+0Iz8jyFkDYyUIwwfDWaKUTZjFHcj7ozvoTnqdHcL909HkXVohjFcPflfiJlCsp/h7bE3DwghQk97iawfowg3zb1s2v+MtYxAzEETOZyx0UzC+75CABCi1/h6Lx/fS4FD7NAAxN7zxD4I1B8Z4EhJGesAo28HGTr8HUx5HDOy6E2EHmJHPpSH8lB+ZYtir6mPcR14H7CA0fKNo2fjfcvH+D8zRddYT3LHunzUNSMUEu+tMKrId/R/74QVyL+vA8HHmBLeQ7aHc/o4edDfVo6H0o9nxBQBIOIyCESAKLxc0MNs5N2GQ9BliGFXer1aKhEVJO7N2RCQRQ/hsgMQAOg8l2eU70HbfG1C9CJGsOILtplBye7nsr/t86jMGC2CHqd4L2sK4ItHNxBGCv6ISx48HM1YPdZyiDyMOTr6aJt3ZneM6QYDvYOGDnbf6F8/nt76lAejVx53JEaTG7g9DrXHYfR4Oge4DRHZh/4Z+j8Cxf2j751CHIcJHQ+kbwKg99gI2NQBZvReGagfw2G94XT004CIPsw/APqs036/eoTcfzM2jI1yJ0Sb3FG3+sMt+mnD+CUDBHwX+zWKgjUAiF8i99yR9N4z4rs76ZtmhAAQBIveSDhYBA5E4t5BQjhcOL//PO42PDy65YZ+YHeXfCzMj8Tk4cSJOG8wzM+B8Puecfe/DkD0cB3Rd+uOm/ZAcO8YVn4THQZyxIiAyEDYU1cIQfxGE+hnIY5pLMrDMDI89GjytFcwUTRD+IbRxjAOEU+OZMQT7MGCAHpFa4RAjo8yelo33rijGUABnekYKEnSuq2JWKD04Zy00P5JY41UsijylZTE1HRt1bR10/q2kzRN05SAEAQJx8TkM6pGC1QEgcAqkc45b05L1spMKqVRoM8ew8xCCGOMUkprbSrjkxcDgyMCgLppAFhK5VwHDFmWO2cZOEtTZvb5o7NEIwpHSkpJzhHJNEvrumam6WTqnAGAuq2lkE1Xl/uy3JdNXbNjFkDOOefSLAWA6+trb5RFzgkWQqACBd5Cs+0AIJUZIYOQAE7rhJmt7RQIZ8lahxIAQEhBxJ5PO+s6Y7XUddUkSZLorDUNAACB0lpKoZPEWlcUhZSiJcqznIGcCwZHAOiTFgRuj57SIAryU+cIyBGx11EBBAgh+5za4D2+EYWU1litJAATOeNcZyzZwH89IkzEQmCikyRLkiQtJkWRZ2mWKikAYDadFkWulUrTVCkBAE4mAJAmOs9zAFgczbVSxaSYTmcfPn48WyySLGubSkpVTKc+x/R2s67b1lqzr8vN7S6RQqezqconWZ5MJx5ZTnRibWfazllAIb7z8Ue1695evjZtOy2KqmsAoHN4u74W5dYBtlUlUMyL4vVXX/0Pv/d7t+s1AJwfLcl2t6tLNk4q7YBPjo9/4+Nfv71diWwyn84A4Ld/9Ju7crcr90+fPMmyXCq5qzYK+fn5mSFz9fYNAJzM513bSqUYcDo/subr25tbFOrdu3eJlloLACj35Xa3UxLn0wkDlmUNKJqmLcutD6vXNJ3KKNFSZ7moOzYGhSBilAKkyrICAHSSNG2Xp6kj4wAYMJ9Ot/vdJMuMEB733O92N5vN+fm5BFAofvbzL4rJ5NXFm9Z2i/kMkgQAurY9y9La2k1VTScT11RFniudttahVACwmM6FACFls9klafrsxUdffPVlJ7QwdsIIjgDAIj56dP7o8WNrurquN5tV09RZNq33lTN2UkwBYFpMCdzR0dxZ543m/IbcbFZ5nnkCttvvurZ5+vRJovW7q7d5nk8mE0eu7Zo0TQEgy7L1dkPOHS+Pjem2222e50ppKZQx5uho4anQze1tnmVaqdV6bY2dTadVVd2ubtM0nUwmALAv91VVa6200tvd1rtyM1NnzHw+81aWl9c3zDCfLVabzdt3V8z85OnT1Xp7e7vK0gwAfvLZp4vF0cny9NOffvrpZz/Js3R1e/3Fz35qOzpeLtu2AYDdbltk+WxSWNPOptPnT59f39xc39xOiqLr7Jv9OwAoq/rxo7Pb9ernX3+dpdliMa/ruqrr2WTatO3V9Q0A/OB7fydL0q9evrLWniyXq/V6vytPT0+1Sn72sy8AQCqdZ0VZlgxQlWVZlnkxcd5uszONJQCwzmmthRZMHK6liJlZaiGE5IhRIkCSptIaoSQQEFGSJIDC2uBeJJVWKIQUTISAzhEzKKlFFKSlQJDcX2571JXIoRBKK38xqpVGEEwMApmZCRhBQIj/6AsiCClHKtLAjyQIz6SJicNtHwTJkAlQQEzJHW5iGBBRS2XZ7felczRfLoRAH1IDhRRjQzsIEQyZQcTw3vcZMN0NQ3/wzL0IU33n4xiCxV9/kQvR+KPXwmDgzxwkxzuKY8Ds4rMeMBxZckQxUyAwEIMMcO/drvWCyaFU0lfjn+khX46aU5iHYWCI0briUMDozWEOqr/fDRYoAPFQ6OLh6zgRvU/PqIOHj3+z+DuMcazK8KCPhKFFx55o+QFMByrvQ3koD+Wh/AoV1du1DdxloOo0WN5F3gRBc0YhQ6j4iB4OJoO9nXxgYNFWi4G99y5El1qKcAcCAPX2PzHCiMc8fIhejGQ9OkEQkI8rSQO3ilp9bCV+HBWVAJKGL/qx+u5IVtG6iQc0KdiJYT+8HhcNXbfRUQDjlSj40UREczCfDLykdz2LSE8YBAIHZ9IB+aPeLIzoQA5AEVCX3vCpR8MgIiTI47B3odN+unpzNBxsPfuPRqAVwTgsy9gsNGImfjcEs1chBA73xCgALTtkRPTOMYghXA6DN2MBZAZiIiYG8s8QmQO2yhCc8aPpCsSRjAtGJDdO9GjU7HN6jEcBPgDcAMuN0cd4u81htbmfPWAGlBym22PeoW9+A8cm4/6P1mRxXvte9BWKfjP0kR77tYb4ikDBAMzs0wdDxMF87CEEEaMQxeM3vhpmZi8kj/EghmiVhOC9Ug8nVKAIkfX7TRfn2ff1QOyJGwbCKQsdicCgN4kCRzaeYwQO2Spil0L1/VYM4rr2C0FDexjFMhZxTjnKa74H4EUzDNajsX5kwGCLFfZu2IqIIjh8HcC0keT42OcY/+7FfGZGAgq+M2MiGpQpABACidgLsj5Z1kAk+je4n6aePPbkLk4wDQjiSNRnIUU4oDgQKC+QS6040OcQRsCvDKKMGBv6MBTEHhoUAEDsvPFXIE2OGVgKCVIwUdTiQHirCSJH5PO33MMJEdCHiyJgb9wdqRZ7CzVv6EqASEBBjA/jG/ILHWCyfHjmR4vUmUYphVIRk5IaWTRN7TsmhSIgZie1SqRKdGK7ToBI00TrRIAAgKpqUEjr2zZGSsUMAqWUwlPssNwMAKATba3N0pQTbZxLlCYi17a+N0QspCSAqq6SRAspyl2b6CS8KxV45TNRSioHTIYmk4lSqiwrP1hrXVXtuD+7AHXV+GiK6/W2LHcAIIWUQlXbhomBuOta500sSTBh5TsjmNiCBCBUWhlro3FhSFOFUjLbRGLrnLUNgsBgqolCShc2OaCQzhEDEAqJ0jmnVcoEUql5PgcAVD4EmyOGVGnvAplo5feSXz9rnXMGEaWSiMIZEogoBJOjqDcSkcdzRfR8Fx1JKff1znMslKKxdSJSEtQ1lQAlJOTp5Hh5Mp/PPeJT5PmkmFhnlNZSyrIupQqJbOfTqY8NN5/Nlkfzx2ePzp8+OV4ukyxPU9RpWu33Ki0A4Pbm+ury8vry8ub6Ynv5anF09OLFCzJmNl9smp11BABpUeQJaD3N82dSCGeds5aJpZRdZ7RWANC19Wa1NqbbbTcopNRyfXX15PlzcrDdVrTeA8Ds5JQR67ra7PZt15m6/sP/4//8y7/+66ePH//Ov/c7AHC0PJofnR3P5svlUiZp09RNWd6sbtR0Ai4ESP2j//ufT+az5cnJ7faW1+5bLz76zW99IIri0fm5dCZREgAwL6BpkUEqvduV683aEnz08bf+6//2v6v365/+zV8BwKYsP3jxoix3Kknmy6UzHXVWq0SiM8YBgFDJvm4Swt1u19RlmhX+UpZQsHWBjgkphGiaRiA5a3/+8y+/+8lHzx8/EkxVtfcY68cffohCkHOLo+Xq5jYvipPl8Xa3u3hzcduZ73/7uwCwuV399OZzBn50ft51hlBnSfbu6vJkuURnAEAqJZB/8tOfbavy2598nOXpt198sF7fWimX89kPfvO3AOCPf/wn3/v+rz/54AWB/OLnL9ebcrcvt3tHUr28+PLZs28DQNmYvDj5D/+j/+QqAi/lAAAgAElEQVRvfvJX2+1uUhQoUCo1nc2KPEvTBAD25SYv8kmeN00thTiaz3e7LTA50x3N5wBQVRUwL+ZzANjudonW02ICwDer2yxLjTEAsN3u0iRdHh3t9+Vmu51NJnmWtW27mM2PFouubQHg9uYmS5L5fFZVtbP2+ZNnzPz67SUR2dbebFcAcHO7Oj5almW9Wm2MsYv50Wq9ffnqYjad+oYmeT7J86aqVrcrySJPs/2u1ErnqbS2rarWc4w8T5tyR0ynJ8c/++ln6/V6Np+lSq+3W0+CZ9PpblvWdf38yTNEvLq5aZv27OSsqqvV7aqYFACwut2WdaWEeHz+aLXaMOGvff/Xqra5ulmdP3sGAMy43m4TgRIFM04nc2ZWShHTrChMRwCAbScEWuO0VsYYZyhRGSCxc0ToczoRO2usE8KnrfFszxgCJhToU4ohOGtAC2YA0zn0TsaEzlkd+ZQQ6LFOiAbpqETbtkrKzhgAkKjms5lpDDNYx8ECnpAcAaKQgT8jgnPMTDLK+kCMgJgoDrKT82SWmAlACJmgIOvjC0dOCooZvTIkESUIW9clwGQ2VakGAEsUVRV0TEIoIRCY2bleFKGRcM4A6DG+nusDwEheJn+HCD2SNeawHuACFKKXxIgZvTGFVwIxoH1e9mdyQoggVcUr7yCdgOTQcqiSmIkdAHjHfC+GOucAfawDBgyROgUKASJcZXkhysuA2GuMgD5VH6JzITQKA3jpxVtf+j75lWGOUSS9HeNIZmZmZETqJd1gODKInIMfC/sIomHBxyErEdkRMSOikKLXQwR6FZjD8xwnnhkFcq+C9M15QY6oB1hDvtZefWBwLl7rCSawvluo1HDX+1AeykN5KL9SRY3NTyA4aQ6lh1HGn0VdHsc3QNDzwf41QLhHHQd2eR+XGVCKqNxz5ArDi4Fq09jXEoDv9PGw8KDFB0Bw1KHhzV7THhh437MIq0QskvsBe6Y/itsxQr3iLPQtoP8ruDUPKRJGL47Ym//X9/YAnRgPjccPDJeF/Tj43msA3nU8MOq+Hr/ccsBLPNMckK1vKhw59rCbvNQQYtz4FY3XoSMTT+zfH3m7jBYoTiIffPreLvSjOejMaDYO68S7H8cu9nXFgY1e8g/cWSO8u18O6nxvS4PsETM6vGd2MYB+EQIOZwmHkYZFCV0KAgpz9KAdDzk+fWiMFqsbtd4boPaL8/5RfXMZNmpvoRzxsl/0Po6+CoTo0Jn3fb/3Vsr31nk4UyF9TB94dHRe/ImPAx71ZdyZ+PAv3n8H5bAfI6qK+J6xD0QoksfRq2M5eNyH/ur8G4lfBGH73QMAvbd+b+UwJiEDLR8PPFofv3/R8CDawJj6BbtgHJrpF6DnEpGejpPFHGhHfKAVfHO0rDRNjTWuNUIhERArKaWvqmqrLM0kyiIvyDpjrVLKEQkhBaLPiO38T3JaKRJgOqMTLQQSEzsaQ9jI3LZtDC6GWqq2bZg4y3SYYXCJSiw5YpckqbEmSVIEiFk+CFGApY46YPaa6na3YyKKug0zC0CpFDMw+JBmDOyjGgJT0D+ZHXr3ZSIAFCAR0PrwZx5/Y0qShBgsERErpT2wTtRbciOTTwmKAqVA6YBC4CsRgUQERGCJQkoppXelDvZFGMILUGN9lgklRDTKwZDPRIYFlVJIKa117AAlMKA1jtkykCD0k09ALJiYgFigkEqKIp0cTWf6yHemyPMk0YhojJlMckTMsyzL0smkmM8XRZEDgNJqcnRU5IUz7STPptMJMp2fnU7ncwCx3W0BgImqqrTW1NX6y80VIBZKeatNKTUAOBQS8PkHH8qP1em00EqxNcvl0nYdMXvTvH1Zqrwgpv1+3zVtkqbBIMtaY1p/aeSc69oORcgqzgST5bEBdAyIQhcFADSOr25vZZKenZ2V+30l5Ucvnn/y8YdSSk+IvvPtb92UjTVd2ZWJJJGIWTpfnCx+7bd+4/xokSkFAMfL5dHJcTYpvvPrP2r3e1dt8qzA6WL97uWT80eff/oZANR1BcYhyoYFo2AUaZq+ffv2X/zRP0+UmE5yAPj4xYtyXxnrvn75Wit1dn6222zzLCfmpiwBYHp04kA0TaO0ns4WKKWUChjqpsV44pq2zYoJIALbpCjOzs/SJF0uj3e7zWa7Dc80jVL67bvL09OmbdpnT5+V5f673/7Os6dPjDF10wBAVdVaJ8+ePvnrn/ykqqrl8piZFvP5u6urD548BoDXF29ePH/+wbMPTs5Pb2+u9/v9u6urxWy6mM+do3dv3wLAdrsVQq5XK51NlNYolbU2ybBpWkT9s88/B4DPPv/s448+aduurus8z72FbFntFvN5UUxW6xsAIKbH54+tNZvtejadKimbpq3q6uz0NEkSALi8uU60llLVdcVE08lEK7Xdba0xJ8ulh72MMdPJRAix2++BYVIUXdu9urg4PT5u23a1XgPAerN5fH623+8v3r07WS53u+317aozZlpMrm9XPrjE47NzR3R5deWIJ0VRVtV+v1/M5tv9zhNWpdTr16+7zkghpZL7cu+sS9PUWRsv4WA6mez2e4Hu7PS0atrNvjw5PVvM57ebVWft8fIYAKq63my2p6cnAHi72gDgyckpIddt++HHH3mU8Or6hpmXy2Xbdaazs+n86vpmt9sxg0xzACjLaq7TTbW3NgRmNZ2ViRQsiNibJzPLosg9qXdWGeWMMShR69RaazyGlSZ5nhFR27SF1MwsIGJLiCLAkagkS4EAyMgeGGJiodgbUEohUHmnb0JEZiZySqaY+KYZAJzrnCNH7KzTWltrDbWImOiUkZkteAHeConAUgCwYyJg1CilaNomgnkC/eEHkghaiso0CApR+t4SMvWZFkkJgSAEMXSWhDGplJ4UO8coBAohceDXoeL3lUOR/67YhdHAo/+m5/+IBy/iyAH4bks8fMZMITBMTNXnbTi8+Ue4AEPgkIV7JBBHA5aRCjBuoWeIsTv+shV5kB76btyVeQ/ln7GSFHLN9XY3oX2O7A6i8jOeotDFPlHdPYEII8oY5ytOTlBQ+z/58B2APljXQXUYI433qtzBYzj0sQdo6RfrIg/loTyUh/K3tvyytLYP5aE8lIfyUB7KQ3koD+WhPJSH8lAeykN5KA/loTyUh/L/U1H9nRJH64/wzcj1N5o6wfiOqXf5HL4dLG0ObrkOPD39o/csehAHE5txxLqh4XGth1ZCBwY59y4L79o8xTsrHIY8mMRFe6nePieY9ByYRoYLufGt3GGLfVthQHgwjThceAHAN1xl9dZ9CPE+De+NrTdwes/7957E4atg0gcQvPRHU8SjR8BfaEZHVLgz1aO+jqyv4g0exqELlL0J2+COHy5IqbdcG1d6J3wfBB/2X1LCXozOuYO38v0H71jJDYPuN0V/swow+OZC/1V4cqhmiMR3aOU4WOcdtDX+lUe7ITYxvpWNJUZCYOBR4E/gcE7Yd703Pxvs0O5eGcdOYowTiaNjjIev/PJpP6h7RClGoRX6COa9fex4w9xriEMvwhT3ptbvaY77Q4TB6fp93UGG8Xe+PhH9zYMDEFD/fJylgd7EnXGfuPUDPRjaHbIDwZ3bx+iEMWUdnrgzNIBxUvIw2lE83NEUjmk0892ZBIgWCv2ng8ezp2eHwxl1wTfEgL2l+qjDnjRgtPU+ICS9UXUfwmk0hFCxd12nYAw/ZL3C0bDDrr6zBd63PX08RwB01jlmYYSU0hskJjLVWgOi1qmltqpqgaKpGyZou9ab3c1mE6WkX0BnnNba2yQKIaRUvgPeiyxJEuesN5lhZiklkVSp8oZC1lglkJitscxUV7V1RsvEWmutBQAhhNbSe2E7ImedEKLrOtN1Smk/IudclmVt2/l4AuQoBrhgYPCpKpxjYBfpjRAghUAmkkJasiEgGjOwAOY0Tdi6EJoCGYB83i0mVkoJFApYKiVQCiZAFuhzwwT7PmOtIwYLTEAAzgKi80dKxM2IzjnrDDmXaEQk51PQWGGNZeuXDUFKIQWiMcRMWqkk0QIFagEASqkkTZI0SbO0yPPpdDIpJmkKUkkplQ+jppScTCZJlmql57N5otXHH3/y7PnzRGKRaZ8uXAAQGQYwzu72u9v16ub68u273ds/f1PVVisFAKlWx0dHZ+fnRZZk2XwymSiSzz76eL/blWUJAACibtqyLAEQyLV1S8ZsEKSQTVM7JgDo2k6k2dvXL2fzRbXbb1a3UsrF0XI6nUI0hu1M521XrbGpVGmWtZtNV1ZpmjlrZYoAwNZaY4VQVVmtt7tXL7/O81wr2RnjnWHns8nv/vYPf/DD3xACSaosy9DZk0ePZZZXdZPGkBtCyc6an/y7Hz9+dD5fTNP86C/+8i+q7Xo+m9VdBwC7qs51RtYZEiw0qlQkWTGZPn/+4euLr1fbPQA8abs3b98oJZ4+Ottt94ujE2MoUWSM2azXAJDPjvL5zDperTbW2izNrLFaSkRM0ixNs37sxAzE1rr5fH58fLI8O0/z/OjoSGkJAE1VM8OHL14giu2uLMvSGOvc/umLF8DsbQmzvCDnjk/PfvTDdLVeKa3LqjJt03Wdp4Sr9frm9naxmK+26xcffDCdzay1RVGwkKi09/T8jR/+RjaZ3FxdrdY/W232z4tFkhVd15VlO5+f/f4/+T0AOFocEfGbdxeIOJ1NhJREjpwTQpTl3pgOAM5Pz/MsW60ujemSRDdNvdvv5tOpknK9XgFAWZbZ8picrasKEbTWbdusVquz0zMm2u/3ANB1Lef59fXN5fXVo7Oz7XZ3u1pZZ5umresmZBOaza112/12khfk6OXFRdt256enbdfuy3I68buLrm9vt7sy0enW7vdlpZTabUsiCqmurOu6TkmVJKm1FhDzPANAY1qlVKoVADRtM8lzpeW+rLa73XQ6yYvJ67dvnXOnJ6edsQAgpH72/HmaJPv9Pi8KKSUCXr17s5jPwblXry/8Qp8en7x5/cZae3p0hiiatj0+PgXEXVUDgMjyxfHp1edrQCGUBkCpAYQwXaeURJQAYF1bN611FgCFEFIpnaRCIhELFN7WmIl9xAki6Iz1mU98wHVitp0FL7kJ4cPAMAM7AgYmJmbUEgAY0FrXdYaJpJRSCGDZVA0KIAqews5Q8ORGh2wFilxljpyxtUDpM8xYMgKFY2bLEKyfgRmMIYUiCGnIWkqtlCPsbGNcR4BSEILwzs9EDpgFKik0AgkBiMIxMruurIUPCpEkoDUxOes8B0EhkQ68ie+XwWuhl6sPZZIRp4/h2ZERxKG0dEdWvtPEILWO/MYAQUSNjgGCr3HoLWLwIqcgfH7TEIJAN24jvO+TUUd9Iygx2I936AcAxFSpwwSEFwlQ9I0zj1Qe6KV7jo32+T29I3vUze54clCQ9EJox8NpHE1ZCKUVbE+jhDMS0Lh/EMZ636F3jB/8oBf42JG/QKd8KA/loTyUv8VF9aDIIeIy+ngMbsRP0aNOY9o39ugdnHHDNzx+BIKWyQd69bjBWHpOiKM/x8b37/Vx7cu4yt7h4BAMCVDSAfY6Yl/RWp8Hd1o+GGGMMN0DLiO+hoNKf4eL/AL/ykMx4OCp+6/weGihH/61EJzzTnjFMRQXHQMxOsnfrWp4Y1iI0czhYW/vdnSIMYeCgTkGh4+N+Gdi+mPgPhkL+KDhfUPcP/z+ObjT0/c+ce+bEVw3lrcOtm4Px4xcaSMqPUz08OwY6L7T5L2UzeO+xc0QYWK8+71fo8MdetDt8N7w3TceiyD9HGBtGAL89c7Rw2yP4KdR+sV+Wfi9cxo3H/TVHVaGETodCMD9STn8AQfjwaHKIXKPdwwe5h2Hc9C/BgH2GprpZ++9UveoSewXYry0I2E5isQ87mfo2EDoIrL9nsXx9YXTEG5IDu5g4ut8Z74GB+Z4nxJ6xIOgejA8HJbC/3aXwAJgiJ0bg2/S+OV+uAxDKnaA+6i7P+yI48qxX/o+qucoYEMc0IgmY98ww4FCdWcOnXVKKykApADrmDjRic8eI6QCFF1nAUSSpG1nEGVTN2/evpvNph486tpOSIGA1jqldUhxjkIIAcwuZjyXUhrTIWDbth4pbDsWgIZNWdUAQI79oPz8d61BhA6sNcZ7hUspO2V8dldmRqUQgJmdIw4xc4Gca9gHkB3fVeDBBvSsyIfZAkCJQghDRkqlUEspAcCRMqaTSiiNxnpaQwigtfR4Zdt0SZL4ZFE+RxQiEpCxBsFRhwDgyFnrCEhKGeLGCvIJc5xzMaIYMQCRYwbZlFIoKVWWZnoxm06naZ4BQJqmzIiISsksS7MkTbQSAgHBOAsAPjeIlFg1lRDw5MnTb33r28fns+l0VhSFj/2bKpmnCQLkedqSqqv/j703ibUsSc/Dvj+GM9/hze/lVJWV1WR3V7GppthNiZIoQoAtW7YgGfBCgOGFp5V3BiTI8ADbkDcCDHhayLBhwDa8ESRbsEwJpCwJLTZJTc1JZHexuipryMzKN9777nDmiPi9OON9mUVCOxHIADLfveeeEyfm+P4v/iGtqnK1uPzws0+LIg/jCIBUCsR1WSVJ7IdB7HtvPzxNkvjHv/qO8CdaawBpmqbblJmFIOvs8nYpWV/+03/iBUFjT1pV9bNPPg3jmITIizSZTA4Oj0yeOuvyPBNSAUgmkzwvrUNV15Wpo8mEiJhIB0GebpshNJ3NjbWmqpTWzBwnU629Ms+VVKaqTVUDUH4YRwkTFWUWR/5P/PiPsWMthSP6iT/00wBOHzzM1y+nsR/F8TrNljcX0ygst4v1Zf7Zh59Evgfgq++/v9ysbtfrZx/+8N7JSbq8PX9++ezTj99+50ldVdc3CwCQmmAcU1lW15dXlsmUteXs9MHjo6PjH3zwWwAWt2tATJLJ589ffPtb35rM9m1tScrZ3n4YTwBoP1gvlw7i5uZmvU21508mUwEqTO1L3ZBuSmmQkJKkklqroiy2abrdpnv7BwDfXl8CCKKJ0koIaY05PT1jdpdX19aYH3z/tzebbdNN8/lcKfXdX/ru0eERg9ObmyAI4jg52N+/3WwBPHn82FkXRdFqc/v9Dz54/NZbURhrpcuiCnwTRjEAw5QXxcHp/avFyjqEyYxJpWkmBH7h7/x8E2Jea29/vl8UdRTFjp1gZmbP03Vdl1Ue+D6AOIrTdFuU+fHhoXPWmFprHUbBNt28vLgAMJ1MBNFqdZsXRRzFt8vlerux1pZlcX5x3jglDIPg6ua6ruvZZCJIXN1cO8fz6XSz2aZ5tj+bA7Bkr24WZZn7vl9WFRH5nnd5fbPZbJVWVWUB5FkuhFBSCRJgmiQTZk7rLIqilgevqoYGqk1tjCGCILLWJkmspMiyDIBW6vT0dJtuzi9Wh4eHcRStNmup9FuP3rq8us62GwC+52fGPF8uJ0myN5+XZbnebCazvdVmkxb14dEJgDCM1putHybKWitVlqee522z3NrWDbFz7jd+7XsNvSuFFEIYY6SQzjnunECS49pUQso8L5RSxtoojm3lmNlZ7qFXVdZg8n2PIWzjx6IJnNn7tAHqupK6WUhBBHS20I4cACEat4gMx5UtO8/OrnHu3GIkC0GSbe241iICsXElkdAiMFyVrgTgq7DxadgsxNw48WWAnfb8BvAaa4ytrauZLTNroUlJB2NdAZYAJCmS1Li9gLXOMaRgkLPW1LWrDQCrKz0LScgGIjvrXOME+8tEn56quxOxZvecr4erYoQJiUYmwf1Gs8us9SCyub/h9cCj3aoL9kjEJERjET9SUxghzDt1oLGI0qHbURjwHtCNUQURcRvdsa1ZL2u09wzQ4pU0QKS2xdDJcTvIqS0GdXHV2h15F0/uRLamUXNgnA+6A/DBir2FjwNt2glD3DfRgJlHxW5fQqOLvIPD3qQ36U16k36fJNWIvrsy+yiNWRXevWG8OLaicH88NXp2nFn/qlc8XLyGQBk4NeyKuq203hMn/Y72GtXIMX/Ti9I77x4fKHFPafS+9tCF9e5f3f7Y6v70lW0i0WBnF6P+kbtVe6Wcdxp29PwdamvnpjsfuT1SvMsUDd8HSNFL++N45G333aVUxvDgS04zdxBEf23nS0M97BIy3NI1Lb4YMn+l0X7vPXZ0Rz/M2mJ1G/odJqqHNu1H2unm/ta74Zb7urYfxie4PdkzsAhf1mR3qzjq8faZLtLTAP0YAHcaSYSOuO3JyIFqGs2bvjVaoLXT2z1meuX67pHxblfu1H+3RrzTibs1pxGZOdRzp+J850M3oocH+DUzqv1lDPp6wDqMOurOFTqMR0MX/a6pjzc+4PL+uZ2RPy59p3PdQc9hoW17ZfdUYmd+DqBy8KnYvo268jDfbUvuBAF0U2qYzztrHsHtjOmdHgB2VBuJbXNq0FZpvPK07pJ2euTuusPDyk4dFu9XRurew53TqXaY0/DEKONXBsfwRgY3SkCC2nLHUdx4YzSuliwBIpCnfCVLdq425uryMs/yo6NDoPHPZdmhquoiK/zAl0oQk61rdNFXCTDGWWtlI04rRQRTGyIqq0IpH4AQoqprIQQxW2uUVLWxtjboNSkc4NBEj9bKq+qawVJJKaUbHGiSNXaYGySoDQIEahz/o1GakM01IUACfZgm1bGN5ASzEUJUVa2kllKgruqqBguWDmjCx7B1bG0Nw2AppWi0hARRG4yNBZFjg9raqrDGWeMMUIZeNJlOkvkEQJTEQRjowFNSzuMQIGZHYGsMiXYACiEmcRz6/iRJDvbne3t78/35bD5PJvF8ngBQWkmtgsAL4si5+ur85eX5+e1qu9rcfLpYlkUOQGtdGyOVevDw4cHxfc/3lZSz+Xwyea8fKsbU+76GEErKMImKoqiNub68urq4sIZurq8BHJ2eBlFc5DkRWWODKJLKz7Pc3NyUZQlACJXs7TEgSDw6PSISDLaON+uVlEpKAMi229vVxg/Duqr9IJjvH0ghpJRFluV5PpnNASwXiyAMq7Iq8vzs4UMwKhLXq3W6Xjnr4ukcQJRM10VpHG9WizgI3dQ9e/bsG1//6iQJ0u01gL/+f/78T/+xn7Z1+eL5i08++eT49Ozg8CtXl9ciCHLGg/sPAGzTNIpi5XlH+weREHVV3Ts+VP43nj996p+dTpqQI6ttllWHp/fS1baoqrI2xtg8z42tJ1Hw1qO3AcSBn25Wnqdenr/crNfWGCFlGEXr9UZ5HoA8z01tdJiAhFLa8wLn2Dpjrdts1rerFQDf9401ArBsldJVVT/97PPj4+OD+Vx7apumAPb3D24vLy8uzu/fu7+/v28sHj566/T+AxUGUsgm9HO63vhh9Oidd59+9EPf86y1dW2sdb4fat0WJityoXUYRF959ytZmlbGoCyff/HFN957D0ID+PiHH91/8Ejq6PG7X3/+7Nni+ibLS+35/+AXv/P5s2f7ewcADvYPbxbXVWV933fOliYnIqVUUeQAT5JJM66KIq+qOivysizrugoC//ziMs3SRi3X03p5uyzLSimZZ6mxVik1jePFcumcm00mAJxz2zT1fa8sy4vLK2vqOEkur66aJWubZgAa5WsvnmRFHvqB1l5d19a605OTPC9ubpYArHVKe9bWWjFAgqkoyiiKmLnh02fTmed7RV6AEMdRWVUExFHjyTFtWMLJJLm4PN+m6XQ6JRIvXr7U2vO0ur652abb+/eaKDS4WdwEcTKZzbdFSUTJdMaAMQy4qrYAlrcXSinf830v3G7WUsp8u02zLYDFzQ2A1XoFYmcZxIWpFCgMQgGqTGWM6+JGsiDRUIueUrU1fNWiNClks4lYZ5XUSmpsm/jbLUASovFMKwEICDglDDdsGEEQSTBBOGNLtF4mm1BeDDiQY2MZTEqJbgUmRVqoCiUXRZovBZHS2lpXGeP5XuNmsi7WKYQgIUiQIMGCWDRqenW1klDNuxy4OTBiwFqQLQWRJkmQQKMHbBkMRyw0swWDlJRCudo2gVbKqq5uTRiFKvCVVKYN/QUpRU/CfamS4Z1EBOax8VDHG7aPdwGCOqjPGLFk3at21VJanEODJDVIDaKF9a1swnCdQv0ORzdKoy2+MQUgcOctnV0vogzqLyB0AQBHLx9lxb0VTd9WHVLtCtfcdget0wgB0x2Wrxc+hwCW6IJltqLwDifc1/VOP90JjNAW6w6+7VDTCHH3jcdjfCzcAG3fpDfpTXqTfl8l1Yqqd5baHV6jv4ZOgKZB/B52oLvueHfppVEurYjVydK7qmU7dqPNNtk4y+8MDhnjQ75ehv0SVmEgadBKzm6sMNhtp4PE3BkujJkVAvdxfUcE1ijrXWG/y4rH3/vCN0TJLv+6o4ZPOy/goSfGtaPusTEbxD25RMOV9vbRed9uVtxxJYOMPz4V3KnD8NAdpuq1WyADgGs1NMcKtiMqZChgg37GnNI/b9qt3B2aqy/26I1jpos6mqfT+Xq1Kmh8nw8915PTO0pLr6Sdzh0q3rUpv9qx6Hqy1ygk0NC3/SBsjv6bjyCHQWF5jBjbbKlXRRtdvNtizc+vcYg9aq67pNBrqjm6OBrl6I1YdjU0h47qAd3dXPpSvkp8jf6MT/D57gPczGSQAPqQR/3EpF0t498F0rWT+DWXdwvQNKaDE6M7ulLtND1jhK536zxU9u4LmxWqUTjrKtobdI+PSYY3o2vqnUnQX+7K0B4sEZGgYWXaGTRNuxF3Q9Ddbf8uOXY0mnb9K4AmwGW7MA0iUKthMqr4XQHjla5hgEhKobUXhdEmWxtTJ0kQR7HveQCc4yYEZ20MrLXWKi2FVonWzrrFYgnA054zrsiL2hhrbF2LqnJgJgEpRRN1vbGcZiYDYmZbt6FpGHAOBgZAXddCErN0xgjR3GaYWUnVxisHrHXOui7idBOLoRMRWwaQSIhWt318gMPD1CBBxNTtyW03S6GsNUIKJtvc4wVeWVbak3VZ1lWj3Chh0XAWgmRVVFCiWSm1bWMAACAASURBVGCcc3XtrKtqWwuSTQhUXwdhFMRhFIVhFAaB70upSEFK6WnlexpAEPhRGHqeJ6WIpn6SJJM4jnxfSBlFcTKbAZjOZscH8zAOpSBnjTVVkaXr2+Vq8ezZi0ZribRWUiqGq6uyMjU7G3i+r/Xjdx43sXmiKJrv7XlKSSnIUlmVDL5erzdpmuXZerMBsHd4eF2XQoi6rleLpTHm6ORku9lkWVob18TsBkl2rIQK/MBZZypT1bmndBAEy+trAPcevbV/eHx1ceF5/jQISUgdhJv1KgnjuixR1wB8P0iSiVIK7LTnoYnu5xw7A6CxLo+SxDkH5slsVuSFret1WQgiAWKQEgqA5weSpCMXhfHp0SHBEWG2N/2d3/kAwgKI42AaBevLy7eePIazRCL29NbUx/v3nj17cfHyOYCvff2rQlCZF2m+LaosiH1Tl59+9OG9+w9JKaE0AFLaC4JtVmRpqrUniJy1Snu+51VlkWcZAMl2k279Wj168KAqy6IoijyXQhRFGUQKADvnhxGTCMOYpKe0J6U0ZSnBznFjHx34gSOhpTAGZV3Vxrz7zpPT03uffPJxXVd/8Cf+IIBktv/Jp5997zd/62/83N/2/TbWU5al84OD2pi9+QyAVlpr/c1v/PjRwQGRmM/2fN9bLBZXN9dHB4cAtOfni4XDJvJ9rZUfxgFRXhb37z2Mk5kXxAC+9v43b5fLj3/4sedHz1+8JKG0H54//eTZ8xez2dzzfQBFWRVFCVBdVc2SJaVkpqoqoyis6hLAYnnteV4SR2VVlmUZBoGzbrPdTJPJJEkA5FmeZTkAKURlrVYKjMvrG+dc4Ad5ngPIi0JK6YzLyyIMgtCfVVVVO/aaOOxVBUBJBabVZiOEsAJEDiyEoDwv16vNJJkCMNZVVaW0VkpZ68qq9rQHwBjTrOPGmLIsGfB8nRe5qU0cR865LM98z7t3dgrAVNXzF8/v37u3N9+7Xa/unZzMp7NnX7zYbLZnxydlXQPI80J5XhCGaV5kedZAg/T2Nori1WLx2dOnALTyiqJgOOssGCTAzr3z5MlkkpzdOwEwm06llJss35vOsjwry4KtTbOMGUkcB54EcLlYOOuIpHWsPc/ztJTSMiZJUpu6rmoA2vOyNMvyfD6b1eyoUV8XJADnXLOmWWvr2gGtWmXLKTEYTpIEIFsGkbWnIr/RqOcqT6WQBG7WaCXI09IaU5SFqG1ZV5WplZSB75VldbveAMjzXAhigEHOsnXsHJqoNFyIJvq2Y2vZSiFBqJ2xzjYHov0ex+yEY9ceT0WuWcmdEKSlFGzRbDGu8nJjeZvFk0QHgR94prbGGBJ6dyd8re3E7o7ZSlI9mGvIs+5X0ct2tINmdhOPoC83oRPvhptkBqSQnRl3b4nSw9gODg0IowcjhLHA1J1ACyEBviP1MBiuC87TGIh3B6QD1hiDRmoQThsNlUT7W9sj3Xku+g13qDGjA0k9YGXG4GaMB+Mb7ndqagSqXWDU7ttNyPK2BYZ+2hEme8DW8b3jTmhDgzK3QjF37fomvUlv0pv0+yypsRS8G1R7576x5D/sUd1S2D086KRRR6F09MXOSjvSfxn56Osz73dq7raO7m1DJv3e0HM2NL6lvW2o15jwG5EbXb2IRpaOPbE62kNo929nTTlm1rptcqQD2DfUUDAGxmTcHfcur76wbS90lqk0ukTUXuaOY+V+7+4zf5Ui2OEemobbGQa7dR1YiF2+aUwzjZ8f7+UAmO1IOw+Dnlk7UDqXMjsoZPz+3z1q+k4aIazOl8BuH9Kd21/hXXu+bMxtjnVsB/XTfojsGnjcmTmvkEg7IwGvIMhBt5HvPDbKqRkJoikNA2iOy10f5703Yh44pJHFC/VkXQ9feGdUfIn+YTvDxmsB96vGKF4i9wGy7p5zNAYow1owTPnd+4aWGDdOX6g7k7H7ONxKjt2AdRlu3LMdMB5WLyLqfCZ1GdyBj+Pg0cM8IIwf6UrZ8Zt3up6ZSRActwHERm1Dd6bca1t/d0lq507fC6+7beRfqb80tMDoSnsCs7NuNGt4U2BidkwQ43zcMGaG7ruzyPLQGtz3QN/nTXZ9HMpuvWa2relde11g2Jr4rveJviLGmjhOptPZzeoGYGvd9eKm8UDH5Jidsaasysrauq6TJIEQUuqyqpsSmcpkm8KxVUpZY+qqUYqEJGFqY20FQAqhtceNuouQ1prG/SUAT3uGLQALo2TQ7G9a65Z2dEIK1TSNs63Oi2MYY6WQJATAjp0QoudrlZLMzeJCvap+12+i79Zm9XSNRkVjwSfIdZZrBCIhlun1H3j8zTxNn3768SScK63AcKYEYJ3LbeosaaWVVqSEp7XvT8MomiazIPEBRGEQR0GglRCklPKVllLFk1ArlcTh/nwC4Ozs7P6Ds/29mefrxBOmrouqZqGcUpXl9XYLIMvyz55/mm03q+UiTVO2FsSB5yWTyX4cA4gnk+l8NtvfiycTIURZVWWea8jnn33unKusBbBZLLerTW2M5/nbolxcX5m6rmujlHTO5WkKgI31tAzDqK6rIi8C398ub7dpdnbvXhL5UZwAWC5vrXMqCApjtRc6qsIozLbbwAvf/trXAJja3G42hTH3n3zlMPQ3m41lrFebkwcP0vWq8bw5nc7samXqmgAlhNY6CPzNanl0fFwZG4QhAM8LVouboiiS2dzUhhkKCOJkFk/KshTKAxDHCTEUyEk5m83BNorC28Uiy/P3fvzHAPzC3/rbi+XNdJZMDvezp59uVqvHP/Lu2duPFqvVdD59+OBBM2Z0FH/xgx/U7Pz57PbycpLoTbq9ur56+PidxWIBoIKqqlwFIoqibJv5nieFcmyJOd1uvvNL/wDAz/70Hwl839dyuVqcnZ4YU1VVfXAUX18vrGMAtXGRDhsdSXZIJjNTV87YuszAXNc1gDTPte8T1Ga73ZvPiMRHT5/+xI9/8yvv/qhz9Xa7AXC9WL7/jW8mSbK/t/+Tf/RnXnz6dHVzUxSZkxLMZVkBcOz+1//tf0+zVCp1cHBwdHTsrCUirfRqvQKglLx/777UqkwzIWm13iil5nt75+cXq83WkQBgDBnr/CABqW1WWMdB4P7O3/u7STTZPzhs5ttms2mWEessQZAgIhhTe54XBEGabgHkeZbE0WazLcrS06ooyrIqBYm8KBp9z9XtajadCiGKvJRSOOsah7ZJFDNzQzVGQVDXJs3SKAwdc5rlVV152ivLioAmoncYBNttOkmmcRRt09RZ9n3f1Ga7TZmp0fiuyspaa+GKvGBASk2goiyUUo1mdFVVSsokjhmOCPPptK7NerN+9PDB4d7+erMCkGbp2enJJI4uLs6LstT7+z/8+KO8yJM4uby8vDi/AGCNCcLg9vZ2tV1pqZWSgoQfx4vNWit1+ugBAN/zp5NJGIZxGH3lR796enpGbK4vX94/O03CEECWpr7nn9+uJpOJ56mqzOuqrIrS0940nswP9wDkaX69WKxW2w8//ujw8MhYS0QEatw7Kq0BkBDLxU1ZlSfHpzWTc8aa2jnjnHPWNnSks7aqCpBlBjsQSEpPkAAZP5gA8DwdeH7g+5MwDMOQrSnLMiQjiXQTJB6IA63JCXKmLj0W1hrAgl1l6qIsF+stgOV6K4lBwjqu6rqojbEt41g7UoIAFEW+vF3mWeqcATvBHCbBNsu3ados44EXS9JFWab5Vvqxc7au66KqiFhKj6AAGIvaCmZXGyfICLJEDLBjlvRlaIF7RcfhlgEP7EBhHjF2zokOJABo/TyS2DGAGIsCBDjnrOizbGFku+0LsAN35vQYcYuDesLYGmKnBtzC61ZYaIuO4ctO6ks1PNBD3w7+tjowLUAbCQidKMtg4n6DHRDU8JzrLfN6yNTe0z3Ug+gukx2QSJ23fG5IdDQaMTx+1wA120bqZOkhrDp11WkdFYBA3IgBrwp8b9Kb9Ca9Sf/CJ8VDnBC+s8x3sR5eSSNdsF647agdouagqRVS26W34xYYJLs8usebr4PA354TcRf34rXe9HpLSB6RUGNWZ/SlZSVEb2rY01zd6n13Q391g6eulIPWUbcJSTHiTV0v0cuupqN9aaRk35EjX8I/8LCdDpca8ohGtaUGDvT5N+0x5Nnfebdvh4p1W/ZQyxFXQTt/Xg98mo5uEMZYcxAtNcqORKsJ1HJA3U1CiBG90HRot5PT6PqXDMPXppZb6WmPjg0Zo5hXKJyhjj3LOOrqoerdr6Ib983cYQjqENiXNNJryjn63DkfuPPGATO2rducanMjkIim3Zn7scT8Kgl25639NLwzVYh63oO71ugw3OtnB73m2ivplak1npR3f+DdD1+eK4bJi4ESGz6gOXbushiO2UcZvibr0UJzh499xZHEax7u1sLf9U7uh+KX5TKEFRrB0aEMwzBua3tHDWC0GKI9zyG6o/h593RkIAv7S8Ny1xeBxw3MuwNpMDnaqV7fIJ0GOg22YH2NqSEkG0aNGSDX6CCLrgg8LsPozzhxS1syszG174VVVX3+/FmRlwCkkJ0CItd17azVWlfWgmCtbbj8dJNts62WylqrlOq8qXLj/qwxxgTIGgOWDWUqSEqlALbW1nVNUgAIdOCcA0Er1ShOgiGUIEH9IYEUQgjRNKEgwYAgSbJZzwmNGkinl9oYlFF/3NcNaKBxWuZGYptwXGtPmdo2ajVgGFNs8tU7bz8yVfab3/9VKYQQUkGHoQ9AeWI/mARJMpnEs8lEKc3MzFYrNZ3MDu8fAzg9Prz34N7h0TyJfKXI11JKdZZEnu9D6bwsAdwuVzeLxW/99ufpZpMWawI5x1JIqfRsvrd/eAhgOptPDvbl0WH94AE75/u+lJLZSSGybN20jABur66fffzJZr0mEmEYbcvqZrHwOv0+PwgX55dPP/10PpvfPzvZ35sHQVBXFRsrOyPq6Wy2rcq8yA27g9PTKE7KPA/ne6V1oi6p1gAqWwdBlEyndVkJqQq2ytrE921VSakA+Eo6RzqJtTWX19vK2LKu4sOjvfuPKn6miABsy1Jrzc76vn9weHh7c6OkDPygyFIhRRjHAPYPj6yxpraz+V5ZFKY2+cosVqswioXyGn+CVJTrPIOQJ/t7L89fHuzNVqvVJ5/bJA6//xu/DuDocP/e40cmzy/OX15evrz38GEwiT/49d+89+67/8f/8Ff+7J/5MwDeefy2qteHZ/d/+9d/Y3W1VExJFH3lyZPVYqlIzGczAFnF5IRhMmCtpe/7SkMpFQe+Fu7f/nP/FgBTl8k0KbPt/Uf3p0ly/sULAVRlEcbx4dl9AFVts7RYLFfG2uXyNs2z4+NTKZVSehons9mi6UpPe1LQbDp1jrXW907PpPbqPAuCcLPdAJhMZuvb2ywvnn7yPSHEz/6rf7pINz/4jV/7+9/9zte/9vWyqgD8yT/7b/6Nv/k3T07PvvmTP/Wr//gf/r2//1f353MAdV036+HB3t7Z6dliubh/enp9fj2bzy8uL6XSy9VtMpnKNAMQT49efPLx9c3y8Oi0yIuj47Pv/sp34yh+eP/tvNwuFtcABCnfCyBYSg0nnCucZSLhed5qvbKdb9P1dlMWRRLHRLRY3ERR5IeBY1dVBYDjoyMiKovSOSZwWRXsOEni29XKGNN0gXNcFMUkSYLAX202VVWFYWBqo6T0PK8hYZfL2zAIHGO5XDG7OErYlWmWTpKJde7m+gbAZDINJkGW5yKQzShyzPfvP2DHjbeBvMhn01lt6uvry729uWPOssyY+tnz5x/+8MPbxQ0AISgIgquLC6lkVVW1MaEfRFFUluV0Ot07bvxChkeHB/PpfDablWUZhqGp67rMszyz1k6SGEDDxj55/OTBgwfnL891lSWT5EW2fvvxt5PJBIC1drtamaokrpQxkUezoxOldJFlQqhIGwB7h/HpQZIX9kcenc2PTkBs6tow5Vk225sn0ymAIs8Bpz2PCJqcsw5shGAppaQWTFrLgCLURBIspJS+n3h+SFRuKgcg327yNCWwllJJQeycc/cO4zLLJMBVCUCYcn1zQbYk8gJSUgklSSghhJBSe9oHoD2/cFW2TTfrTV0WAnDObjfbzWa7zerD/T0ASRJZEISA1HlVpWmeTKMsr7M8tc4AEOQZKzZpmhZrUxvnbJpmi01aW5Y6hPQAGAsnPa2DrHKb3J5f3ea5cc41K9Vr9sLdr6NYmh1+3UXAY9KNGkMHpl5MuQOXqTet73517MgROv3EV0rDTXggQu/csxeSgFYP5NUoLeOqtL/Y5piNuN0Se9xHQwUHMXTk+WRMpnLLPHJzeN9jbMfcUZSdDNKDHLRIlplFS9B2+/K44PZLqcBOgXRAki1YaoB9o+c4rjV1YJ6xQx03cMu5VkJpQEHvOPX3xP9v0pv0Jr1J/6Km1lhbNKpW1JJDjbYQgRyaDaC/fxB8B+2xHUGWmSAhmjW/5W2o338gm3Mo1xmNNsdvr+4XY/JgFEZ4KAQTGCwYjZbNwE20JaRW3hsMBbhfxbsCD+pALU2GRnLv+TLuWapmd+55sVFhXaMz0OwthG7PFW5g2vqdsv3b2QWMgUPHgg0XXmWBer02dBth07i9AxkI0ZGYr9KIw97q+u8EwQNdwa/ePmoJSCkbNmyn9ahp4xFx0dWkwQVsRbvvEjB4s27awYBINp59XDOoWAiy47rvUkRipFh6B3iNBmlrD7xDjAjCwN0QSDRlYkaHltrcGy7cOey8atRBDNuMItFmAnZguC78BaFDNQ10YWbiHafXjlkIas6cCaIZpDuApNF6JNngDGZ2zhEJQWS5zRZEzK6thWMwpBCNYhQPJe+CkDCLnh/HKwH42mFDjfmHc9wcaPfmpa8QsyMSvxkFvdtUbn3fcFeHfsYRkXMOze9joosZXcylHtL2KM85bnzntSjROWbX3NeXahTxfZhr3M+wphO6AT+8lPpXOccYDHIH8N4BUouhDmOSjpltNw3bPumMf+AcWpKOm14kiYZpYow8QoxqQq0cwL2GMw2eFnsDbAbEUL3xCQC1hNYQMKe14+3XUgaa6U9Erc4CGl0gNF0OgAVJdsfzmTG1FwaOZJbmq9Va67b7AJAQovWtxI0eRj8GdqEzCTms5twqIbTd1GguYuS4vRnYEqrnTNtVdBhEotHLJBIAN8Ob2QKNMMqr7co5cs4pQcyQgQKgUg2STOZ2ca2VchZgkoKcRV2ZIi8AMNhvgroIyUxElgQBTCSY2TgLQAktpIaBa6oiwHDWtYSgIgWgCdFAgHNWkJAkWYJJkBBSNVEdhDHGOKOkVEo6ZrbOOqd84Zy1VbuGWDbMUgpyzjnnGgaz0QHSntc0aVUZa61j65wlCCGEMG69TYUQe5MDAFESSS3fenzv+vryZrX49rd/Cs5prZI4mkwbRSFvMpvtHR7HSRRHQRKFJ8dHSZJozz86PjnbFwB0FIPd9cVFnuWr2+XKmPXt6jNSnu+ZqmoW5Gy7iSeTyPP8+fxB8iCeJNbYwPe19qIoaoaEtfaL1WK7uoWgqiylEsy8vLnOsuzgYIomBA0j8IOXX5xnWf7g0SMD4ZwIvVBp3/MDACTl0dHZ4eGpEHJvb367WJZ55QehlU1gHQfgB7/zoSK8+9Wv1VGpPT9L08nevjFmu16H/rTxnSCglY6262y2t7/drGtj2cHzvbzK4ygAoPzA1Ca7vt6sVlfrzWQy0UK9df+hMuZgMsurEkCeZmHgS6mIkBeVsU4HcWXcsw9/5979+4kfAEj84PBgH7Y2VZGna6l14PmF70tmZieUAhA29u/O5el2NpuEvvJ9bYx5eXn1zZ/8SQB/9a//X3/o2z91u1w+evz48cO3n3/22eL+o2Q6rzfZP/ve97aXlwD+m//xvzt//iIvi/2jo1/79d9cr1Z5nhEJZvzSL/+jh28/ATCdH1nSjoXyfAORGiNImCLTZNfbTVZXAHznrm8WQeiZ8/T05Cic7F1fXZaVO//iCx1EAPKyyrIyzbJNuvWjqCiri4svwiBxTHlZ+loDqI1jy0zEDkJL4YQIgqzILLHPpJUHwDEVxhaWjx689cVy9f/9ws/XefZrv/a9f/Cdv19keXJ0DOA7v/SLQTL54IMPBImj45N/7U//GZYyvbkOorBRsfyRJ08seHNzYxi1cycnZ8l0/vnnz05PH4bx9Pj+QwDs7Hvf+APbzWa2fxSF85/7Wz/38sXFj73//nK13aw3ippQNorZwbAxhXNOSuHYCUFpurHWzmYzAHVVGWvjMCSiLE2FkEqq7TYFKAwiAJWxzExC+lozQzlEUSiE8Gobh0mzkhVloaQ2tV3mK3YuiqI0y4SQQlBl7DpNARzuHwRBkGf5ZDIRQlprHbuD/QMQrVarOJkAiOI4y/MgDD0vuF3dWucCP8jSbLFcbDfrZr94+exZmm/YutureLtZBUHQjFXP906PjwF4vkdEj95+ixmTJPa0dsxKKuccEbQfAFBKKSV9rQh1HCkpTOTrw7dOPa217zUHALO9g2S2/9a7X/8r/+1ffu8bX/uX/vif2C5eZPX6j//0N148uwJgbXFp/cU0qquiZudHce64XK/iKEzzrFwDgFDK97xkOp1Oj6wzWutagIQ3DWSRr2uqAWhJYDbplgi1gKkrKWQbnFoIa+pmnamNlVJb66TU1rJzbI3zw7AsSgBFngEkpXDOVnUNcBjF+TrwQ+GT1SwA5LfXN8+fSq5hSUdeUWSh9kPfL4q0rC2UB0AIdzidZJuMawOut+kyzTJAKOEd7M9hawDp7cLTIpzsCQUtvVlyFO/pJPB8spEnAXBdb/MiSaYgysqiKmu2tjK1Aa8LU1MIIM3cP/n+B5l1L6+Xj3yJAp+kmyAIleFtVSodAahry2AQCxIKBKLa2WYYjOSMVm5zHZlIYufcXYjufJPQAGjXerEhIaiNFd7aODcBKq1xLBplQtedHaKFcEIIZ12z9XHnq3pQ4wB1DnPavb0DWy1UgwSYTRusrSvRgL1a0rD5s3tQ16Dz1vLP3dVgFA0E1Y0IaZtfmYhYNBDY1Y2eYUeVti9zzNZBdeEgeZAO27x75Q1ui9xU2RGLDqW1FCbDuVbi6A59h25o3udsC8lJ9FCtC0reMZs9Icpg59rQgHiT3qQ36U36fZgU7YqQ3YYFUCfB90TcDjuD8eKJnkdoqJiGW2hiNQy0Yv8sj/wh7rjz20l3yC3cuWnQChrd3ZMOI/INrV7ZHWH5lTd1mkS79Gp/yw7fNi7OHe8s7S7FOxnsNNVARNDO70R3nhjcmIzyRsdM8HgL++dK/WPUvel1SqivJB6BiZbtaxkn7HAioxGBjmJo/nUkTFst5yw1imw85NdRMK921QiF9A22Q1yOn2ghyPiZ7vo4h75QOzcPzhOpZ4v6zrvb5szozFtfba/Ruzuriv6pfoKNj6+7qvZRocYhR17X2USDZjEAEiM28ssGfPdj95cHm5GmKbgju7786VdL1JFrO4Xbvfv3ym73thYyc3se3lyjluPr3Cu89lG+26BDicbL1p3R0fQ87Xbb+FkA/ak13/2hfT+3+fQFoOF6+293+OzmQejI6VEO7RTte3a4PtKwbsfbsDAO9drVhtxpm51VFgCgpGBjF4sbrVVpKuPAjj0pcCdbHrppbFq9M7/uLIB9DZsPX7Z6vU4Zdbz5jKZrN4u5ccNEZV3XVR14vnVWQmilAHieVxRVEIcPzu7HcfL8+Ys8L6TUdVU5Z7pMHDXBpQUDLKV0zrJzTCSlaOQY55xzRpGSSoLYWkvUEBmwxtbOAIBgoSURUeMgQAipyFf+QP4KKQjNsYFja2srIK2pawupZB9aXVLjYJGdbVSAmtqTELTNCwBw5HlKa5KkgzCRSggiGUee9g7296aTCQAisraeTpNJEhshozAMtJqG/tuPnzx6/DaA+f6eFyWzeSTIEUy+XpbpxtVVvs2qm6cfnNcAFjcL51xZlkEUKc/Tnrd3cjqbzILA93yvOYOxdV2XVZHn2/VamtqmqdQ+GJ8+fRpNpk8//CGAoiyjIE7TNAwDPwiUlul2u7q9BVGRpwDefvIYli/OLz/59PPZbKq1DoLwdrlW2vODwPcDAKvlbbrdZlk6m83JOe1pOMvW2roKojBLUwDf/Na3Pvr+9zfrtfa8UHtxIsoid855frBdrxub2dn+YZEXWqt0s9lutrPZntairo3vB437v+Xy1tP66OxsOplWUgWe76rKOae1N93zxTYFsN2sy6KMJ4lzbK2VWpu6ttaGcay0LvIcwOL6uqorIpFlqXNOMnu+56yFkNa5hskvshTMRFhv1icnR8vlcjqZLJeLMPCPz04BvPPo4YdPn0bJ5Nd+67fT9caPol/9Z7999fLl//sLf/fJu0+CIADwl/7L/8pa22CrxWIRx/GTt9+Komg+nb711uMwmgCoDAESjpxl7fnbzUYrPQmCsig97TXKVkq4QMn5fGp9nW43B/fOttuN9vXZo0eNm0WhfQjPsFjcroo83zs4rsvcOmZ2UkdNMKUsy/b39km0DEVRlM45LwgZrJWSWgFY3i6jeHJ4sP/dX/nl97/+9Q8/uPqx937sT/2pP73JNh98/NF/+C//KwDCydRTSvneZDZz1iRx8sX5F8ra8y+++Ke/+j0AP/vHfkZoVR4cfucXv/vy/OXL88vj45Oiqj77/POf/Mmfqoo2KlEQ+tN5sF2lv/zL3/ngww9+5Ee+lhd5lm65VZOCsZadJSLf9+u6cs75vg+4uq6IaLFcAtBKBUFgnVtvt1EYTiZTcHNO0AZRqUxNYK20MfV6vQmCoMjLLM8UnDO6oUU83zfObsvcOrc3n2sSntJElGZ5nuXvPHwEYDqd3a5uAz+M43ixXKRpNkmSzXa7Wm9839ckAJx/8cV2u9VaW+vSLJVCWmuLKgv8UCrZzP0gih6dPQ6DgARFUTidzqaTSUsven6zoNR197OTngAAIABJREFU7UyxTdMwCARRZUwSxXEUSSXvHx8DODg+3tvb3z88OHlw3/c9YsSzablZNQGvgigCUORVkedCBKvF+e3i+PD0ZLt8IYjWy+X15SWANF2/vLhONxtfa0/rerWClpHnRVJ4gQ+tADRm+GWRm7oWQkohrHO2zrXnBXHUdJPn+c5ZL/TrqiJFBNaellI6a6qysgZNCyufwMi2qdBeFCe1sdvVyloTTxIAfuD39lvZdrtZr6v1ant1cXw4nSWRtgDw8dPPuMwfHUxdbUMVnN2fJ1FMIGcL7ohaoZgt0mjjykILl2fy6npxvdxs823gsxRbAOc3ixfXNxWzDgKGqqw2wot9zXUZKQKQRJqIkiQMoxBCKqkDP1CeTKYxQLAlgO169fBgWrK9vx/FQpEi5WtH4fnFIl87UxUAhNCQwjpLQtixf3zqsfDObkquk+j4Dsx7Ffm0fN6dvbyR+PCKZ6ouz07FhDtw38lh/Yt65rNXE2Bwr1vS/9ixdaNavE5YGeG6VowYVWyIjdc2R2st0T3Qibxoj4/RCYs9rqEW5TTyTnc8zuCR72vq8Hz3pq7l3KvIGF0+nZZjD/IcA9xYSHQKLnegf2OWNYQq78zfSYwb6U16k96kN+n3UVLNgUu3czQXGwUn3hFwByGzXaBdv4P0/FNDbVIXfWW49w5d16/SfShY7pX9hk2vXWGHreeua75dpu4u6dSbTXcb54gz4Fce78rfU3ztUt9TaTRuhaEULYXYH5Z1LBi/ImqPWI4h9sD45ei3s4FfcOBxubu/reHe4BtulMfrEo324+5WBjoTiabkd2HJK6nTi+yHxC4PI141eicwGl027rfnntNpycBuSx+x3SNqY1Q/2iWf2rE3vH80OgZ+b9xNHbQZqZTtcNQ8boLON3b7bRh0IysQMFpv0s2A6lzOcN8+3R/sfO0AYqsyOQDGnmDpCzTUaRjhvNvuA/s0gLVxK/VfOkz0GrjS8UxNYSREV7TfZUTcVe7tgWD/tzvX6CrMQ01HD7XDqcebvX5lPyi7uMt99fpM7k7HcY3ozvf2xk7rdWc0EQDnbD9kdvtpvHg0pR3Wqs6o+G670KAq0OYyfG/R5R0Ej5Fk0DsoGB/ctKUeZ9s11c7A6R5plpk7fUjoD/y7//obWv0FU2kp2TohBBG7upZC1bURgQSN+5T61bUf8NxZHXW+4gnDu3Ye6V7Y6193bXBnYI17cucy7zQhwVrLjNAPnHNVVfq+D1DD1NReZQxLIbOsSOKp1p4zXFfV+nZbVZVrlZoFM5qYq0TknCUipXQ35wgACQihCOTYcOMJlMi6RqVSdsoj7JztwkmTtczMxgFd+AJmJwQJCWedKY2jxg6BnbOorbONNqhjQ0ZKR1ZpGfl+EHiChJRCK+UlUVNlpVQY+NrzpJKe9jxPx0kwnUwf3LvfxAJ+8PgdP/CiOKyLdBZPtZIM3qxXUPp2dQ3gYnnOguxmW+ZZEOg4CgLfU0oqKYMgPJpMAJweHyvtNWYAWZpax9aauiyXV1dSSmcsgKos6qpeLxdhFJ2dHOdZXpttbe35y4tkmgZhBGB+cBSFsfY0W1sUued78739JEmUUv/oH/8joI2uc3h4+NWv/egnn3yW5XmUTA4ODrebjS0rJxSAB4/eKos8ihOtvfVysXd4eHB8dPny4tOPfsdUplk20vXaWVtkuecH6WYjtSYSzrHyPC+Iry/OAbx88fwn/vAfWVxe+2FY1Vb7QVUWJP0y31y8fAlgvr9fVTVA0WQmV+sojjNrb66vttstCdFMTiWVZZduNkLKuiqtqQkgQUdn966/eB4lEzSHbUJ6vk9GKs/zfb+oXZFl5Fnh+SpoWSGlla3r+2f3mHk6m93eLohosVr9L//T/wxgNp38yj/51ecvvqiN1VqdnRxLqbI8/yPf+omjwyNnagCnZ/dm82kchoHvEVESR5lxVVHmWb53ePzpJ88AlHmugomp6yiJs+355cX5wd4B1dU0irZ5VlsD4Gxvuk436/WCq+LwcL+qSmvqZJLMD46uL68AXF3fZHnpoPK8yMrKz9LQ94wlQbRa3d6uVwDm8/l0NsuylK1drm73ZtOiKH7u5/4fR1heXzdrhVTyp//QH3706NHh3vzhvfsvv3jx9pN3bm9uvv1Hf+Yff/cXHz96BGDv6HSzWYdRVFtrnV0ze2EYSpXsH7z99mMA2zT7zd/8jU+ffnTv/qMf+cpXi7IkEqcn94RUv/IPf+Xi8gKAdQ3oJN/3n794+dUf/RoD6826iZTteV679AlBhKIsCJBKVnUlCJ7naU83xtoASSlNXUZhJIg2m01jtQBww+RqJbXWt7dLKcTebCYEwfHJwcOr1SLL85OjIwDaU4vbZeTFVVVqJdmwlLKsKudcVdVfvDwH8Fvf/z6R0E6k2+022xhn4aC18lWoPU2+AjCbzU5OjqUUWus4iqMoBODFoRSklEQT09nW7Kzn6SgMHRDHcRgGSso4DA8ODwHM5nM/CM4S31obT5I4SZRWYRx7fgApW/tSEsX2tswzT6bl+mqzWi6fsTOWiCw73VCfQmVZeXRy9u/8uX/j3/2P/uLFZ0//wn/853/41/7v5+9/tbH4PtL33v2x6OXnn1Z5HkVhXRXOGmIui9xWtWvcAwsYW4NYKmGdy8uMgCzPYpk459bLJQDrLDvWnmeMIcnGGKUktbr9rlnJ/cAvKyOE2G5SQGjtgUSWpkKoKEoASK1tXaVpprQy1t5u1lVV3Vzd/Or3U1+IUBKAmSdsdvvs+eexVgLu9HBSpCUBYSAFCaU9AOt04/lOwBVFJpw7miWTMDo7COraVOSCwAF4Mj169OhIejJMDpTes047UfmCYEyzUSoJR4KUKmtTO7Aja7g2tsyFBduGKy+txzaJxJ/8E384t3j//J0vzm9/58OrpyfH1zf5Z8/PAdwutiAtpQbDWNvE+u433W7LxGBcLHZgwXBHHyCaGlg+SAc7gKVFRQwSRGIATb1c0ExyMfgh6gF4j4VbGm7XxAO9v5cWZPRgocNEO9CuPbYdQYP20HdgI8cuh0Ynu4wRlhruuJP1QGMSt6qIAr1U2rlY2pHDxmfhnS//HcmOWoq2lZi75uHWBzx1wmdTX3ZNI/XyZd9K3FvMdL3HO2DxTXqT3qQ36fdPUgO5OOIwhpVvWER73fSWB+jVfHYpsZFs2jw13ii62zsmbXC+N9Yw5FGW3NMtO3J579iu5YGa52jkuazbAnboo4YR4575eE3aVblqcyHaWeV3P484o5ZdGm0/w/1dHI9eJh+n0b7HY/KrL8Xrvncb5e+p1tjePOZZQJ2hcVc8Qt9SXxKbjb+0EXpykHqepW//jvzoCbrhpYKk6wJ+97RgZ6a6y6kN79gdCKOy7LjM3CHymod3u7VDMTxiIwGwc40ZdvOmBnGNOKB2tgA0uLns1CMbOpJ5KF/fUXdatGVpRmzVkH1XC6JeU6o9yO0Ps0XrVm/UFL2m2TD2RmOqKUJXiDvDpXt933yNIeyIPvuS1LTgmGKlvogYHucBYPKINht6suv4NtMeu44Wkd2KNM+PjN936MPX1XGn0B1fRm0t7wy0NojhKBseCta9pn/h76r/SeO7uXOhOJ7pd26/S0A3BWqapw9PRJ1D9mbJIUE9ud9D5LZFXzlXHy+t2Kl8XxFP6satFRHNk8lFeWOs8TzfDDV6XW25/9chfgIcUxOIZlCTp9ET4++vx9GvU+8dhuZ4b9GerupK+77ytHXOOccWgdIAsm3WkHxFUZZlBaayKK+uLj3pC4jWI0E3BqkzXyMiKWXPODRFJCKwayyjGrXkhmTsQ9U4EDvLrjtjIQJgubK1tbZxpOAsG2NrBwuHQKvSsSIvCAPle34T/yQMCSycDXxfKlXWVV7mUsnDw4N7JycPH74N4Ojo8Ojw4PTBvdPTI63gKTJ1lW5LgIzF5cUlgO3y6ofnL2tnSIhH987253tJ6HFdT3Q8P5wD0J52zjFOACcF2NWNOWSVF1LienELwAt8ZkilN6vVarkEkZCKrSMiQVQUOYC9/X1D5uHjJ9bagmGEYiWmk3Aync/2Dpq+TTebvDJEcI1ZeW2m06kScrvdfPtb3wJgmOuyXCxXeZ7/tZ/7+f/8vffffvcr21V2u1y8+Oyzew8fAXjy1a8trq7BXJXVydm9eDIp0jSKwnsPHt1cXWabFMBb77ybzA6iKHzw1uPVcrlZr3w/8IJYa01CCKkBfPbxDz0vPL73oKqq8y9eyryoygpUk/JOH74FYLa/n6/XRVF+9MEPyqrUUjJQlmWRZ0r7jV9IAimlK+c8P9Bal0XhBUEYhjeXl0LrJoR3UZRBFN1/+3GUJJ9+9OF2s/XjqbW2qqu6KLFNAQjAmForeX51qZRwVb5Y3pweH/zUk2+ePXwAwFqriJk5S7PD4yOtvdqYIssa0++GisrT7N6Ds3yblnluylKDy6JyjskYV1W2qgFIpZTWeVnn202jkplmKfnBLJkcHpy8ePkMwHq9nswmSRxNIi8Mo81qzcxCSs8PmqEupXSODRsSQinFjLwolAodOyLZTKSbxeLRw7d836/KfDabzabJH/uZn33/vfcuri6+8uRdYwyAsirLIk+32/2PfjiZJO//qX/d931T19P1LMvzk7feAbC8Ov8P/r1//6OPP/7gB7/9ySefMloj27yqLi7OAQjmP/8X/qJWqjY83z9aLG48L8yz/G//nZ+Po2iSzAAslre+F1nn0iyfTCZlWRRloZSM4lhrZawBQGAhhZRKCKrKUinFzlm2TFybumEb03QLUBh4URRZYxovDZ4Qvue1VpkCVVnqMNBKkRIkRBSGmyx1zs1nsyzLAGwu1gBfZxeLmxulVLpOi6JQShIJa23DjU4mk/ffey8tCnbs+/40SaSUVV3VxjjX+gCZTJK6rq01ZyenR0eHYRjuzecQfHJ27/TeKYDA95RwQorQVwLOD5LmmEEpJYlNVQFwxhBgqizPCrAz5arMbbri5iBJ+RoNYHaWrcvICSF8BWZX21oqZZ1tVPP8IFTCFNvlV9/7yn//l//r//S/+Evr/+Q/W67TTZovV58CqKpKCPnxpx+/ePlFmmVlns+myTtvPTo83F/frjbpFoDneXv7+/uHR7auLy9elmUVBYEf+qpSRZ7XpgJQ18bz/KqqTF07coLAzllrlFTa0w0rZuraWCeBIAwAMrUVgubzmfY8IT0AzvH/z96bx2qa5Wdhz9ne/f2Wu9+6VV29VFd3z9LjmTEzNmAswMHBsrNgC5ugJBgcgUiiBEjskBBlQUhR5JgsOEKK5JgECYE3MCbbYCfxNvbYM56e6Z7u6aWWrqq7f/v3rmfLH+/63eo25K8wyj0l3br3+973nN/Zn/Oc33I2nZyenw0Hg8FwSIA0y3b2j8o8Wy4mXDAAhshEKhcQgjDwdZpZA184gnNOGaMMgMcFjKREwhRSlstVMZmerbPUWOsLUqjKv0cA4kjY7dHeje3bju+XVh5PF7AIPBdALqUyYJynpZon69hxA9cN/CADSZUuwAAscjkSJHJdZHNDyEdfPhgP+G/88m/ffuYTLz6/FUf3AXzljXdX66ziyih3QA10c7S6cmBooSSaLa+DBQSmNgja/B6t2XKHKxvAV4OOHgvXWpA0l6qo7kU39EBI7/de6kHyBqD1C+u9W3/cTxVB2h4oq62Z9M6FDaBqedcWbzbqM/275p6pV4XeOkBnu5vi7szZAru22k12HUC1jfcxEFT6JtWzpAPoVchDY1BdHdfwpNalbDLZbDqCWr4PUAq5TtfpOl2nb4DEG16wXd26JbW/5m8crKvfa0X6DSWgVqmyf0rfzLw5tl5Jv8saWm1QneZQs1eRPunV21iaHx2nsaEi1O28V07AzeWb3aynJQCpnX/0d6qmRjX/1DFQpNmBr9aj3o1tT8iuXVCf30H63NUGEboBG1opnqYj20uy9rGW6CKbOXU5oq9m98FpgwBCb6S0Py2ATXMMWEoBbHio6aerzEynVddjlq4MoA1tsf5/V4qw/Tw2aktw9asGiXRGGE9zXd3jGyLVfX9V267fbQRXuqmpdvdhVbGr7NaVdKW2jZTthCCtQ+4OMaK+hf7QCUbqKphmkPTC9324OI1Pyr44TT9uLiVPtTQaGNczSW6q3pvIDY6rXVFedSnR74L+eK4/+F1YrlY7sgOfDVe2mUvvk80H+nJsOGFv43BtClFP1c1FtD0sVJUk/Qw3qtXTGyeVI6fe3CW9gjbWRzQjc6M2vT9b8vIKui21JiCcMxAzCLw086fztaWcXNGnqKZq1TfkKm1YN1enLrzJ5vZ+teh1km1y7nJrpoptl7u28Zpljtat5DpOkqVSKsGY0trz3DzPAXBHlKVSSgMkSZLFcrleriiosYoy2lDo9cpjYSyp3bBqbWFBGa2C1SqlLLS1oLUXSG0sqXRDykJyUrv4t9ZYVO6ioI0x1jDXCWJXOALNnOKcccEZY4ISrS0X3A994TrDeAhgd/8gGoSB60dRGEXRcBDHo2EQBpwzzrhMpwAWy4XnebqYv/3GY2W00poSlOCMcs4YlAYApe/cfmYYx5TA97zBaGikBCWwulQKwOT8crGYg1EhWBgFnMEopaVM14ksy+F4F4CSilAqHO04zgt3Xy6KgnMuS5msV0WW88ocXriCOwSEUZYVeeAHwnH3Dw/efO0rnhdUGsRhFJs0d11nvVwyyqI4PrhxuF6ufD/4ymu/A+DxycnBwcFkvji6ceNbPv2p7d19IRzuyMF4fHl2Pp/OAcwuJlop34+E8GaXF5QyxnkQBceP3g/juCJ8jTGjra0iz4y1QRRNJ5cglDFjASm1lArA1t7hO2++ORxtRcNRGA8dz88L/fjhQ9f1VskKwOXF+TpZf8+/8C9x19092Pf80AtDSllZ5ACpan36+PFwa6yVDOPY9bwnD+6l6/V4e3u4tf38y68wzgGsl4vlfHZxdrLPbkkpjVZZmhZFwQihwhWOC8BqxQW32niOs7e77Qr6qeGrYeBMLy/mFxcA7nz0o2q1ePDOe6Ptrb3t7bPj4yLP0/Vqe28/CvxwEAO4ODt78425HwayLNer5Q7gU2ZBOOPL+cxoDYASDljXdX0v3N7dc957JwqCZLmw43Garr/45S8BuL2/s7O/f/jJb/rkpz95/PD+2dmp63npev3g3fuLxRKAHw0cxwn8qFB2OpsPhmNKiCw1ZUzKejyMRqPFcqGNElxQzo5Pz97++ltf/dJvTmaz/+Av/5V33/gqgHfvvzccDm8/f+f8/OyXfvFzf9Rxb9x+Ns+z03ffKUv5D//BTwMYb2197NVPMGs+/pGPffrb/8j85KHlHpXZ1954fTAaAfiT3//HHcZv337hrbfffvOtN3d29h4/fvLu/fco5UEwyAsJQCpT+eosS8kYt1BCOJQSY7Rp1hBLCKs8OFLCGE2SZDQaUQpttLX1bQRjzPM8AhRFSQgcx5GytLCUEUo5AA5rKRUgaZIu8zxNEy1lUZRGSqlU5Z8hiiPX8ygj+3t7INja3Y2ikDNurNkajXzPB2C09lz3U8/dMdaCwPO8KAy5EI7j+mG0N/IAuH5AGRGCj3e3gyiy1kRRxPKUC2aMApAmqyRZaZVbaSG4zIq0lMRaxhixRksF1IF9RTRY5rkjOKWk0EZwSikx1hItq7WXEmIptLFVAOuyLJN1Foa+1royDM+KkjBhCD05PXv1m179a//JX/65n/9f5osZ88JkNqtmgev5x5eTZZoxxpzA9wK/0OpiNru4vAxdF8A4DP3An5yfPXz0aDZfUko91x0O47GUgjHuOEAlg6aUOq6blxkItQAhjHFBCE2zFICU5TrLXdfzXM8PQ2KtMWadrvXKeF4AgDHKOR2Px5TS+XxeFMX+7p4ECIMno1IWAMbRyGNcr+fDcUihzy7Ot6MwClypslIVSV4A0FZySwWTWuqikGVeGq0ttOtCW9dSDsD1B6E/SmSalOrx5JwL4jixw7k1Ns0KAIRRIcg6T+dJkmdpvtJZlltLCk0N95zhGAAcDy57IRzLdA3CH7394O/89C9+8Y1HgwnZP3zh7PwCQJquDVD5/bRWQ1vad/bUYamG/apdNzYQssLcm+5vNtBfdzq4gs7JFbjbPN/Ag+6gBEpgNh7qAaTmyRZ89k8EfXTZR9+2D4P6x4T+GYvUJ1X03yJ1jOy21BqANwi2PYhsSNFmZNB4Bidtmzyd2uI3rdQbBYbuuFmD8D6zSQm1MHXWtBaabCC9rv4NkjO/2zn6Ol2n63Sd/hlO9P9rAa7TdbpO1+k6XafrdJ2u03W6TtfpOl2n63SdrtN1uk7/f0m8+aXnvq9RqbKb2le9VGlgkfZSp36mH+jEtvpwTXboPPj18yLoaSldLaZ6YNOotrsSa/+qbsK6y7UNrbvm4q8pj/TruSFJP+PueQC1/46Nd0jvtY2cbKNfdqUujQqSvfLCldbtGSu3Wm+duLXp+OabH3Q52Sv0A4tpP9rQDeopVX5A6isF9m9dr5TZU8/qdMq6xuuE6unVtp5eLK42tG0HwmY9be9n9avd+GVjIGzm2DxJWDe8bV8HjsCaLp5NG+uvGtcbF7m9dmutpVtpezJdvSDtEukp6TU/elbvaGZPez1tWyeV1eP9ZulVt+vKSnraV3jeHA69S9fGRrrujA+9bK0KbVoAttH0vDK1GiPzbrb2RmTP/w25UolqsnSt0g4JWpkmP+XL6Or4Jr0L7o1RQtvvWxnR+ufphmw7MgH0LWCaLmhzMc36UymWtjV8amBuOqDsOw3q6XLWio+Ncbvt59RZ69RqDb3x0j7xtOZAs/K0bVm1tW3GWZVbF8tKaQNrXcEchiJfWlMQYgmjteJ7o2bQDkliN73G94ZAa/e+0Tn9MdVZRjXhwKuA3bZrlLYHN0b65hQnBKCMMVZmGWOMcZalOXNFZftMCPECD4RkWZakSbpMpJSOI7RWlNbakcYY0MqwCwC00dooRrnWhmhUhrfV80pqqmnlchmEEBgCQhg4pwA455QSC0MZEUJ4nud7PmVEOCLwPACOIyilnLMwCKIwZJE/iEY3Dg+fe+HZwHeJAYBBPPB9nkuzXC2nl5dJsp7PL46fJGeXF5PZdH97B4DriN293Z2d7d2jw9B3Pc8FrEtEkuTJOlHKACiKnBCqjBSucz6fT1erPE2GwxFg0zQBMJtOlsuV7/laq+293fF4SInjh4Mw3M2ShFoJwPf9LE1VmlNKd7bGAJFFyTz/9MmTx/fu3/3oxwBkaaWXpBhjh7t7o/GokGqwvU8Zp27oBX71bRgySqk1YJQbZZJlspzOizwP/RAAowKE7e7u50X5wnPPPfPCHUKYtuTs+DQcjJQsAaRp5gehF0SL2TQcDCljXhAs5tMsLYI44o4HwFjkWUEoW86XhFLheNZapbRwXCnLykOlcFxjkRXF9N57XhByLn72537mrffe29oaCyEAaKUW89l3ftf3DON4EA+462ljldXRaEuWhcxyAEEY7B3dfHTvnfVqWZkzF3l+/513HNddLReu5wNgnM0mE1lKqVSWpmEcW+65QcApI47wwgAANdZ13TLLCllmeV5kikJB+7B2sZgD+PIXvvDyS3fHB/vL+fz8/GK5Wm3tbLuBP51cMkKmZ6cAIKXneVbrw5tH5yd0cnE+CgdlWbieL5WNhwMAEIGhDuGeyx3L2IsvvMi5E9+5a7V2fPePf+8PAFDJwlKyXq2nF5MoGgrHFcIrssINwoPhGIDj+nmpLXPn83WepetkHXi+lMqWZRD6lZ/Q+XIZR7HrOXmWhq5zenLy6jd9KnL5u++9TQiiOALw4kdfZZQOt3a+9/v+RDwa/eav/bLww/lsygn7vn/5ewtiAPzET/7Ed//hP/jynZf/3L/15//23/1Zqstcyq9+4Tf+qx//8b/6n/7nAA5v3pwvl0E0ePbOy/bdt1fr5L379xhzomiwWK2rBYVy5riuNjbNMm2MLJXggjFurJFSuq4LwHNdxhghMEYHYUgodV1XG2VgKOHWaACu6xZl4bmiiiicF0VZFoSQ5XJRhbKZTaZ5lhVZHgRB5XeSUea77o27L3DGq037hdu3jTXW6NFolOfZqsh9zx2PxoeHB4dHN7d2dgEwSpNkffeFZ8bbW5QQwpgfhmWeLmZTz/eZkdXUtkYXWaaKKSVrzll2dmaUBqwscgBJmgpOo9ADkK3WlgrHcQilMBqwQlSOZag1ZjWdUwLGqOCCC66U1KWSpVQOrxY0KaXR2g8CJriF5A4ZbvllWTKHK6kASKWMkUIS4Qaz09M7H/noq/cePbj/4OTkLAodANFofHCwRwL/vbfeGsThfD7zfW88HsKoW0efqKJLGWMuzi/ffu/ek5MzC3t0cHD79u0kywjlzPVI7RfSy7OMMkYo8fxQSam1ooQyLmwTW5gxUZSJEHa5Ws+XK8EFY/Tk9Px8MuGUATg82NsabzFOpdSllEKI0WiUlaUUNgoGlxenAEqQorQO94kbbvkuo6TMcyrYbLE8n04CjwNIi8xnfBBwh1rAprkiAGe8LIkj+M2tMQDXGSeS+i7njCoDpUsqU8aY67qV8xVlNAyTShulOeWEIIMtlFnkSnEeetU2ard8/vKLd4jQLCn/t9/+2t/7+78+PHr++OsPfuO37jPmAHCDQHhM25JSbkhtZkVaX1WNbQG6o9TT9ibNYaoH0TpA3Llu71BFD6o28Gnz3FTbIFfgimwakjQ7eoW+KjhBSKV1WOOAVi7YRuDm7NYCjsa9je2dJ1pzoA79PiUAwQaIbg120GIP0guzWh0B6rAztodAa3c//ZPzJiAG2uCcVYP0G5zWEerq3FoTEFsD3eoTizYUwZVDTQWJuj8/5OB2na7TdbpO/8wn3iNM7KaypO0YEGzSOu16i84bXLvfkcbXXu1sxFx5/QoPUrvG6JQaQn9qAAAgAElEQVT4Sf15/ylg04S5loC0RGS1M3RbYWuD0F+52xe7T7q9ptqJ+gf5rijSWrTXexxpKCLSZXSVTLki60bLbdSsyrbe+OvtpqeT3x7nG9Kjd3KvCaReRh/KH3X7d/PixoZeURLN3v8huxqxGy26+V31gzzVv6YL89cK2KAGQntbed0BtfF/AxB6r9pevVvB+3/1Q9xVNWzcsWwak1YIytracWkjXy98s229NfZRTl+a1kkN6cKO2F49SEsW2Q0apaafSGUl9uH4oSIpTSPuhz21SXsBhNLuj/r/HrpsW2PjETBGYTtusVfFD029kd/+2Q7eZsI0ULHvjxwN8msF2vDN2gDgFh43bhJ66K7yemr60l1dptqZ0yK8q0+iN8xshf+q4NEWvXevdlDdp7RrKdJfL9olcdPg2LYvNqMG7bRrHiZdg7RvtLO6bdgKkXfO0Z8OLt4sRh0Utpu59ES4uiY2dsTMWqql0kWKRGYy9CIJKN0tee3IrwbqRle0wj4Njzcf69wqEdvLtfG62NSmfdcSSzfy6C+kRJalVCoIgjTNrDGEkrIs/TCqXnUEL6ScXEyVlpxywVl1rCBAFT1GKkkoIbSO1045obTyU2e01aQJZSMcFsVhFZ6LcUYIscaAUs64wznqtjdB4O/v7u7v7gwHw9APX7r7YjQYxKMRAD8MGGcWxmgtlZxeTmaX0/Vy/s7rX3Y481wfQDQYUKJFEDqOEwROGO0wxqwxd++8QAiJ3KoglEXJHafMi/PLcwDGGGaUH4SUMasNgDgIiqJYL+Z5lnzt3YfPPf/c0dHNwWiYLBcVu+cFAeUsDiJrVBwH1ChKyO7WiDF+UuSF1gCYHxBtB/Egy/J1VgrhgAkOIotytLUjhAOAxVw4jtZaSVlkuYz1arFazN+gTKiyzAkBcH56sj0aTy7OXde98/Iry+mEc76zuytleevuXQAHtx++/fW3vTgusmw4GL35la/cODy0Inj06NFgON7Z3QWwWK6UNsvFSpZFkiSUEuG4WZoIz1fGllICOD05jQdDSun08hLA9t4e45wQkiZrynhZrgDEoy3X84J4mK3XQTz8Rz/z9+4/enJ4cOi6bhUJJI6imzdupOv14Y2bcRznpVwtZmmWx6NxniaoyBHHvTh5kqWpknI+nVqjwyhKVivP99fJ+vOf+78B3Lp9++WPvZqslhcnJ14QrJfLVXoJwI8i5nr1edQqEMIFXy3nq7UYRf5wOExWi/l8KpUCoIxZr1Yf/8SrhNF333k7ikIqOLUIBoM0TYo8B0AZ3RvvG2NsWRwd7gvYtFRplhWlsqBbgy0A4XCkwakTWiVd1wnCgHNncnF+/723S1W+9PJHAJw8fGAZTZLF43vvvvqJT1AqjLHC9SFMxfYWeUkZny3mT568f3x6mhelBbQ0lHEh6Gq9AnCwt+96HqVEcGGsHY1HH/v0Z568/dXp5JIAh7duAfi5n/+Fz/3j/8MVYnJ5uVytvv97vy8cbzPPT9dpmqaf/bbfD+Duc7eHw+HtO6/82R/6M//hj/zFMPTzUv71v/43Xvv6W5//wm8AIJynpTy7uHj2+ZfeefcdxgQXDsAAlVUB6IEbNw4n03maZMbC97zxMLCWuK6X5SmsrWyoKWOUwGhNKdVKCUcUZa61IpQoJZMkAZBm6cXpqZKFEKLiI8IoOtjfD8PAdR0At569HfgeI5QSpGkSBcHWaKyk3D86HMSR5wgA2zs7uwe7cRAKwUdbI2pKRqlwHGuN1UoVBQBYY+0gLZJkUlgQrUprrVKyLEtKiYAGwAWntHI1oWWSW84IkBsjS1kNTs93XM60lEYpbm1e5pksKaGMMUaZtgSA1lppLaUhACHWwBBYAuu4wvOc5XIBwOGB54qytFmaWGurgFyMiTzPhOMoKQEIzy+ycjWfBWEcBeHZ5PLR8fG9+/cn0wmlAwA7uweEmMnJ2WKxOLpxY3trG1YLwRfTy/PjE8evrytKqQI/ODw8HA7i7fHY9QMDZgxJk6zaCKIoCBkHsWVeFKVSSlVh0JW2jNJqJaeMHxwcGmMWy6WUkgkyGo6iwfCOvaNlWW08UhsY6wjHWrteJ2enp1lRvPTKJ588OXH9CMDp2fHrX/kat7LMV5+6dfTKs/s+d13uRl64FunuIASwTHnsuTAGRkpVzJOMUzqOOKFMSiUVBUC4YVzE/sAApVKB45/OH7z5zvnWcLi/vQ3gcpGUUsehvxWHk0VWarvUjDoupwzUsaAAjFJZXpy9f8oisb13dPJ4KW2QSEeCDMahUaaaCACMkgaWGCK4o4y+ig1qWNBAVDQIuR9HlPRPNrUfFlLj/m7/791N2u5A+PTmXL1qUXlUqUPXdUVZS7rb5Zpa6/wXNfCkPTcQ4AqL2quWbVBfR0VWLxo0l6gttGsx+iaGaH9DfRR5+ljVcxpD62Laa/6r2gbNAcAS2rZnTapWERo7lEjQZYP6hEIqyS21xDRvt4e9prgNmNnGabxO1+k6XadvtMSbYy3Q/qx2EFuvmJupO0PbSrfpaequcsuxSTz0s6l9lvSOm7WO0dPStRxXlUfLSNp2L+tJho3fSafu07xSrf5PmadvMCF1abb1pdIwWxodf9H5ZKnlaBiuTvCeKlyjJGo/sIqdyM2lZbPZk/rqDZtkTuOejnRvXuWGNir9dJmk91Ybw61lSJ4mETaqtZlP4/qNtI7lPoCRbKghe1VAUhM7HTtCCCpGoM+L1eiiHgVXKZSN+m78ssnabjxS80IVz9VXTax6v2Koat2uDRW1hhrcIJiarG3vq8pfTPtSHf2mHcCtjB3Z2FN2e7pWHzCP+oOsFaMKp9iND9td8G5oj15h91tIaXuqo08JspGeGiVdz/f5SNI5Em9e6DVCD1i1kYwadqqdlrZebDZWjEqDrqGnPowk76HTJrMWF4I0DHxb+U5Ht20yC/SiULbldB6Uevf0GwVfWRi7RrW9r5tZuPHaRlP1bhEIsVX0lApQ2woUf+BcIFdPBt046WsbNhI1CBsAwAklUEYVO0PnzsHwy/efJDJTAKhj0THa3WjevPppuqnt2k1vUb0zSzPtWva0Hziz22faJnp6cesvZUrrJE2CKHaEo1RJNSlLWQXN4ILlWZ6XUlnJONNKEWUdh2kiOaip13ZFCGWMV7kxwZVSjFPKHMdxqgjdIJZzAatd1w0CP4qC8XA4Hm+NBiPf8/x4CCAMfM93HM9xXWGstdYQUJdxwK6Wlc/HidFKG11FcjeFdhi7cXg0GIaMkkqlLorCLF1RaG20UloWZaH1crnMkrSUKggDAGVZDMfjIIhkWVLCAVirJqm0y0s/iCqdzxH3HT862t3N02y4tROG4Wg82tndwcHO+w8YgKIskzTxhStVySm4oIPBYDgeGqUHg+ji/BwA1TryPEFQGjOKB0pK4ThKa0qI7/sVHZkmiZIpZdRoLUGenJyXRU4J29rezpNkdnYCIIpiELK1s0sIXNeNhwMCQgmJRuMqAvIvfu5zllBNSZ7nH7179/z42CGUhEPhePF4nOclAM5ZmiSUUKO10iqMB6UsHd8v8gxAPNoCwEX9Lx6O8jwri1Kn6Usf/0SyWj249yAajADcfPb5ZL0WwrFB9PDee4+ePN7d2YnjAaGkupZIs+zWjaPL87PBYHi4PTp7/GidZRrIywJaV4HL8yxlRgyGo9l0Esex1ppS4gdBliZlWb7ysVcBHN66lWWZcNwgirZ397SSi/UTx3GFcCylFX/KrQWBEMIPAs6YsXY6maxW8xfv3qli+I63xwTka699BQRpkuzv7WXLldE6isLT2eKtd94DoJX6BGXDwYAkKYHNs7TUlDmuEA6hvF1SjDGqyARj1irGKBd8a3trNhkpLRmjALSx2sjhYHx0dItzZzB255eXfhDZPHv/vXcBGIPdW885wuHCAazjelJKcEoIpYzVQV2sBUgp5Ww2DcLopbt3z44fR/FgPp8pWWqlAJxfXDz77HOPHz16/9HjT33yk3sHh//eX/y3pVTf+V3f/cu/+L//pb/07wLYGY++45/7w9Rx33nv/iCKZZG/+spHHEoGflCUEoDDaOB4geNeXpxrbYfDQZbLxXLluz6lLAojAIvlvCwLxthgMHAdhxKulVFKB36wTlalLAHYwiglsyxL08QYs1qvhOsIzuJBzBgN/ADAcDi4e+eu5/EoiBhj49F4NBwY2LIoBOcAjg5GXDiB7zuOCH2PCTEejXb3DzxKUCloA4wRKXNCobVczs4kkGeZ6zqUEGt0dTfKGdNauX5AqC7zPEsTYzSjlAvOKK3i6lDOi7ygIJ7vqhJZUXLGVZECqFQ1ASR5URbVrOGC8SxNtbFeEHqBV28rTHBCPEczzjzfdT0nTZI8z40sS608VwAwqqSMEautKhkX3BV5lhPGx+NRnqVJVpeYrZaUCQJrpXJcf2d7NwrD8e4+YbKS1nE5IdRaMplMDo6OpmcX0+nlnRdf0ItFFU+cM37j6EgI9/W3vr5YriihD95/vL21E8cxo9R1BYAszRxHMEocz8kL7bo+56IsSs6E67mVh1/ArtJ8Np8TgjiOZ7PZg4fvU0K3tkY3bxwAWK1WZamE47Iqao2xyphf/fyvzuerW8/e9fwQwDorl0kRu0IZ/vq9twKnuHO49+DxBbXl3mgoSwUgEl5ZwHeDyGdQKArpuyT0xCpRjmel1QAYpOM5zOV5VhZKu45DSfTcobs1GrpONR6CdZYXZamUITwcxWN/dGCFf+/J5Ox8wtYSQOA5+358ej7dLry/8bkv/Z2f+ly8e7sgXkmNi4IIF4BSmhLOqGuUJtoqrSCuAq5+aum5Pk6xHaDoNvUKM7TnqQbc9U3WrO3Rav0SrAVso2BQXyO3ITSNba/l0SgbEkv6x4zW9gJ1MS2ovnIM2DhddX4x67dsi8p71ba4ck9e2ZuQ9lhTg6UKHV2xaGvPZf3b3r5yTIvaLDaO2OiAMiVEG7ORK20fQWf9QgilzW/VsdTorr/6iPGai7xO1+k6fcMmbqyxAKPV5mHawyR9mrcDLAhtGEZKm4NkeyqtuRxDCO2vjN0W0LtAa5Tg608ovbJntlwZbKM52duIqtW80w3a/A8gYISY6i7XopLHwloYonvkV680Qom2BiAUpIqNgGonM9ZaW9nmbaz2vfs8tFuMre8KrbVXdMtIv4a2tzX1/2tN4FttSdNtZIy2nAFpWDNb6wz1srFtPDtCKK33/k1DURBCQGvQAdTbcqsvSWmzfTZQoNnFrWkisFNCYEkVOIXW943Ntt+AjxrK2I2oJ/1mMVZ3LAYl6CxfbY/OqAVo7hJb7NLnterR1dPJIh0eqomnWq52CJIaObSXoI2SLAEIKJixhhFKKDHGtMhAa12H4oVp1THr+LmmR+t07VDZfVNrq1FeY6FaFmKJIa2A3diov+8QSaUYZ2Epqzk4A1s3aPOqMdrShqGrK16BMEIJDGm1LWnjfMA2mVsLYk19j13rLNdjpIOLts3Udo22YaNtrYEhhtYEGKFNfUzTDb3ZQtqOBGVN0JOun2qyWjeN3/S5bcYLM1UY9Lqfe9ONwBrajtu2P4BO/5s0+BfNc9V86rGiqM1wrDXW0F4glypYSdNbllgQQitJbI1craU9qrhazIxpCqph/cZAga1XXYv2Lt9aA5hmDjJjDTEgjLZFVctF1a+dhrWpuoUQW7UPBaw1pq4jbHOiaIlDgl4bltb6XKhVMr6z9+//6H/5Ez/6Y//gF37djNwumBhAKCwlRlsAjFGrdU+zuMcRAu2VfzNn62EDgJPeeGrHEGx9nG5nt+0GEWGkWpQ4pcpYAwOAEqpUoaVaZnNZlsPx+PjJnFDCCU1mUwBlIefruaAOZ9RxhD8epklGGA1cTinxGAPguA4hKMuCUhoGgR961to4iuI43tve2d3dAyA4p5QOQl8I4fl+NBgIxvwwkEWha20YEGuJNUWyTlYlKKNcEEod3+FcBKFftQMXnHFe5EWRZzTmi/lcqnS5kovpNB4MAAjB/CjyXFcppcoyy3NYCO7wgTDGOgwAaBQ6rhsEnvXdLMuyLPd87zDwXdcb7+xU1tNlluzu71TW1txx8/WyKJQXb1uVE8oBUKaff/HFs5PzLFm7QTDe3/N9VxHq+u5ob/f0YgJABFFRlIWyGuT05EwpxTlXeZln0g3je/ceAPB8b2dn1xXc89zJZEYcGkexlFJrwx135/AGgNHWdmnteDx+8uDBwwcPOXezotSWFPdPHr77JgAu3LzMmbWr6cQl9lu+7fefPDnlDNvbw9EgrAabNpbFsXCcs+NjBisoCQdDC5wu5uOd7UpP8OatZ7iVpVJeNMzy4p0334jj2BMiVeXlfFEtAB8bDgMhylKOx7t//6d+6rW33gkHg7QoYXWWpwAGcby/u0MYvXfvHUsUJUQQBK4z2Nmdzue5zAFEjvvsc8/Op1OrY+44RqswiIbD0XoxvXfv/nh3B8Bzuzu/9Ju/ubu9/ezNm9Ja4/meP3FT13V5ocAIATAYjyLXp1ZHe1tlWZZlvtB5FARGS9d3AKymF8Fo5PhcKxWEXpqupMyDwM+yhMpiJw4ACCFUntJhVOS5NVYZIwj1w0gqawivdL6sget4eZ5PU7laqcn5jLs8igeDnX2ry4rx+fRnvnl7e/vB/ftaOMvVKhoO/TDQWkqDrf3DavISq11XREHwzO3n9w6eWS9nSqmiTN0gZNwBoJQWnKXrRRQPGKNhGN1/8/U/9N3/IuPiJ//W//gdf+S7ADy7v/OH/tQP3n/3nb/5P/zN/+g//s88172xu/eTf/snUaZ/9k//6+ff810A7j14/9WPvzoMnFfu3PlH//iX5ovFn/nsZ6aLqRfGqVwAeHR88uuf/9V4tLOerymll5PZOkk455aSwXgopQRQ5CWjpFRlluazUpVFWZaF1maVpZRR1xEAwiAYDgfPHd4ej0bDwcCPIiGYkkXge+Px0PMcAIJRxrk2EFz4QRj6HqPY3dlmnFBGANzeCx0/0Eon60RpSxlXSkuZFAxFni8WlRomJQSUgAsuyzzN0zAKk2Tmug5Aq8jaq9ValZKu15wLowxjlBPuOMLxxGq51IYAcBxR5pk2WmlPliUXLE8S2ziLAGCN0UoRa5VS2TrhDgvCkGmry3KyzgbjLQDrJFmvk2HoUuaUs9Xk/HJnd9v3eLqaRZEH5tUCc1YW0lJGhSik8qJIaTOdzV3XE24AYHo594LQEY7re4USKFKHc0v5xaOHnksBeK6/OwiePdr3BYTnFdn6zXff+dJXvvoDw8F4NKTcB+D5fpKWF5Op1no2n83mszTLziaXO9tbB3t7Yz4CwLibpHmaZdVy6zjCaO16fl4U2tR7dJpmvh+SIVFaUcpu3jja3d31XHedJGUuAXDKmceEECDE84Trbvu+Pxrtfv63f3u+XL1y9xUAO+Ot4fYWY3w5BSXlo4sFL2bj0PGiQPBouVwCGAQsN6qQNg4i7YjRKCpknitNqWWlkeUSwKpY0Bm/uX+nkLyU8jK9H8fD527eff/4wfsnXwdwY//m5XQiVabtFmHB+fnyzXvvO4Ot03WureE8ABA5gaU+J3ySm/fuTy0fG+vlRhvGla4PK4QTYyUBoZyCWa01I6wGz7SBGs1ZwDY3gM3eT2kFjCrQYA0hlBBqrakgFqxpQG9t7VTxfd1ho7HebrRMKKmtZLSFsTC2u/ptaE2wjYNQDaBa1hKmBi3NWwCjHI3hkmnN7ioExygsUdoyCmM0Y9RqU4FAS3uajg3+IZRY09KSDfarcBUhVa170W1ACCgIIcSQuirWVFUh7SmKEmpaorY+r1gAxNAO6Va8LgUBjOlpsHQmKwCssQb1qYcRUqkkm1Z9kjKGGoLa5nLaAGD1GfPKUfo6XafrdJ2+ARL/sC8+WLEO1Vm2WTprHqM5XQMA+r5Crrxbb0PWPvXh1Qf/aUS3m7+Tq9+2nA+uLtBPZd9yR+j2jQ1Op+Fk/inun8jTsnTF2qc++aDUK6bRKqwf74i0q4RmP9fKRwrpkXC9vJs/yVM7V8PPGmsICAEllNjanNlYYymztn+huGGaaqoM6mvD5oG+nfYH9vMmZfEBnUUaPq4ubkPB8INy6xXzgQ93kpB20Nb8V6OU2jBuT4ncY5j6JTX5tcQc6V5tqLw2+/6zrWSNFcaVXv0gXHFlKPenU9/6hPSepx+cU5eMtTD1VXbL7fZIvPYioKlDC7V6kmxI2DcIJhuB7f9pEkE3jzrtWvSoTIAAhvSnG+nWnE5vryPQrrRBB0U/TIDeb4zUPHLreaCRivZ9EbTrTS0j6WfVjRay2VQ9mapFhjSkq0VvOtVUI2pvDbazuGoLsM3UB2AJgTGtp9O+UXdjTb8BhtEqAFCCTKbD0Hn/a1/68b/6I19+5x0SOD518ytuN3qE7Ie0Yvd4tx71xrkxpl4wNnvZ9PWoa/Xh+i6iujGyqG5GjKnpc1BGCSOe52lrtNFO4MpSKmMqltvxncP40Bjte4HjCD/w12nCOfc9RwgehyGAOAxdxwnDYG97ezgaU0Icz9ve23ODMM/zIksAFFkmZUlgVVnOppPVYsa5YIz6vj/a3hoOBgA4Y67raKnyNEmTpChLKdV8OgnC+Pz0pOpkx3VgIWV5enJ68/btNEmCKCyydD6bhXEEwPXidLV2GKcWWqnxaDybTl3X2drdK4qC2somkQ5G4yCOrbWP7t8/vHX7xu1nlTJlnsVbO5Pj9wEkS3e0vbNezPMs3zu8wcjh8cMHl8ePhevkWQbAiyIDOt4ee65IVotsncg8L9IiywpG+cHRTQDTi0vX8zgXQRRx4Wzv7eVZtpSzrd0dx/e3dvcAvP366+7NWwc3bqxmU+EIrXWyThileZ5v7e6Mt7YALOYLQ0i6XCbLZZakrhfmpRR++L/+ws9/6fWvANBaFWXuOc7lZHLnuduu5wyGkS710Y0bSipl6i3Gdx1Zqlu3bk3Pz+PhQAhnuZgLIeLB0A9DALIsDdHaQGnNhfCDUCmdpkk83uGU1Kol1lIuPOGdnV188bUva2OVlNaYLE8FZwAO9/dcx9HaTKeTnb2d3/ftf9Bj7OLyIslz1/V8xwWwu7trtB6MRovFPEvT0XhLCLFeLvwg2rtxI1knACwl23v721tjQmkUBLnURutwMATRXLDKTBWwSmufM893LKzD6SAOGEyWprosAAjBcyk9z+OMDUejZLVyHFFkuXFEGAYvf+yjAJSU5yenRVESSimrIAKdzyZ5LqPRThiHACj3C2m440zev3/vnXunZ8d+4KfpOk+z87PjNFsCeOm5O6996bd39/cPt26rPFvO50qWqpR5lsmyBKCU1mpNhbtO1ovZQitTFHlRFK7ngJA0SwBQSgUXOzvbe3uH55dnk+n0tdd+Zzga/dC/+Rd+6E/9yVtHtwB88ps/u31wdHJy/Pu+5VvPnjxSsvymb/7sj33mM2fnp+n88uOf+CSA31uo6fnZMzcPv+f7fuD+/Yc3bt+21kZRTAgZDYcAvvuP/vP/56/86tbO/qsvfUKq8uTkCWXMGlgCY8z5xXm1tiwWiyLPPc/zXJdRdnBj/9bRrZs3bjiOEwQhgDsvvXx59nhrZ4cLXhYZZ4zArpZzwdnu/p7jcAB+4AvOObPDrS1TltCaM1Jm68B3rNUAppe5MRdSaUIo5VxrKF2FgXaUlBWhQylVslwn6wqbUYr1auUIlq0TQmmR5QDOTy/S1Vo43AKuF3iOE8aRJVZqSQgpsgyA44g0zYajodbaWksIZZxpZYTjNHDKRHFUFnK1XBFKHc+XUslCyVJGgyEXAsB6lcxms2F8c7VKpLKnpycXZ6ef/uyn/TgCsdX1apHnW3s7x+8fz6bT28+/YIxZZMvheCSEKEtZaV8OxmPHcZXUWZIZIwmjL73ykelk+sXXXruxvw3AC6MXn78dDYeWUE3w7jtfT5JkGA8+93/9yvd853c89/wLANbLxdn52b0HD7M8F4JbC844pbTSPC2KvJr+s/nscjoz2uxtjaTWq+UyCPzDg0NrTWVT77gerOVCRKOB47iMkslkcnJ2dnk5qaY2pYQSEg3i7e0d3/OVMq7rBr7ve55W6s2vvwmgkCqOIs/z0yynRaiIljrLs5TyIiOk0iN23dDlylqSZ4oqOMJ1CFbLwhOCUaeiwhdJtjPeVUoPAlaWyIrw+PRcK5Mlp49OHgMwJt8dDpdrxyd8UZZffPvtx2fHcVZ68cEgHFxWseyHkSnTKLw1k5q5QWZgjRWOINrYLoi07U4uT8P+f8L+/IGpf0fYw9LdNx2E7bi36itraivsD4NfLXRp+LRGk4CYLjx0Z1RTg4MNI5M+0CdGm0oIa6mtmD5b44y+IkkP7/etnFtQ2d7QbjyI5vhX16I64TaXwaQHlHvZddCHVM7Rq+oSSxruttcI6HdErUJBe03cqW32e+Bqspu9dJ2u03W6Tt9AideMV7NzWNtwjA2TAlylDDdMldvzJNDckqH5u92gesSNbZ7bOKtfSc2RGr1n/t8RGs0rfQoItZO6qwfqjccBdIFr0Gx8ttFo6hEd3VudKt+HgIArxNXVjX3z0Vpq0kStsVfb/wqf21nXkvZnvatWup0NXVx3ScVw9G1yeyWQLvsmt8aWud0z+91SG0RbUzmZ6fb1es+1G8Phal37pfash5t4MXVd+zjkAxuYNKRUH6H0fyG2uzZsmqcb2O3Vce3Vpcnw6RJJM9w3kEzTfi2l0qi9NTCxGvd9cEY2RGlbcWOQb1K/fUnIlb96XsY7w5b672rM2w5tbWCWZty2SrhtM9V6qo2r7Vb6dsK3eKrXw7XGY01Gdp3Y1boHca9ScZsVbZ+pVf8ao3PS2tADhNj+eGuWF2Cz2dCrbh+v2a62T02xq6nDpBvrWSdnp3NLasXHTa3xfgGthmBflvoI2b7QuBzt5kivORsVWgvT9fixl64AACAASURBVABAqpljaz8b1UxvTKWaciodW1opL5MGinfPCEazQimZvzDcWZxMHMYpNbCm19eVLnTT2a32LHqEeMvqV6PG9j5tPtxQOG3cUKHVt21GWadVacG5qJQTDKxulQgsIZR4gaetEZwro7jjSCmNthX3FAX+zs42YyyOoixPV8n6s5/6pv3dPSFIEARRFAOgFEWZW6MJoHWelzrLk8nk3HGccDCojLUpA5Ql1g5Ho0jHZZ6GcUwZlXk+PT+fz6cAjFYUhBFS5IXWilBGOXcYAWzF5hBCDp5//vH9B0abW8/cFq7rel6epdbaZ194YTgaAZBlyYWoJqtR2mrju97u/kE0GKyXS+EIAJOLizAeWGsd12NCBPFAliVAORdWltWYy/Pi7MkxIZBSvv/ee0EYEELvv/fu8y+9/MwLLwH40hc+P872IocVWToYDGAsZ2K9Sv0wcoQrXA5Aay0EXy/X8XCopJpPpwCObt+WsqTCdaMYwHq5KItCFkWWpn4QrFfrKI6SdeL5frZOsnVS5eMGwfH5OYwN42EcD71oeDmZvPa1r/luFXHIEoBS3L519Gu/9cXPfPYz27u7d+7ezbNUKnX65ARAHMdcOIIyLwjSIBht7548en86mXzm2769LIuL0xMA8+lUlrkbBCwvKs0cAMdPnqiyHMRBkWYAXv/Sb/lBJNzw13/1147PL3f29iLPA7HaqI+//DKAvd0da41WKo4Gr7/x+jf/ns968QCOf/H++zvjrf39fQCl0pRYwpjremmWKqWMko4QwnXTLJ/NpgAuAn843tq/cSTXK88LHJ/kWaYMKU3pxSNWeXzTpTZGOB5gijwLfQ/WjraHRbqu+rrIM12WsigGg0GarC/OzvYPDxzPs9Zoo1fLBQBZFPEwVmVBKVVSEkqVVH7gh4Oh0iRZLwE4vtXgfhBaa5UqXdeN49h1vYP9GzcODghRAKDU4d7eKks+/2u/8ge+5VutMWmacM64EBX6cYQopTZgriNcx/F8jxIwSsM4Eq5TZjkAznlZFqPRoJq46yR99969+w8f/De/9w/86I/9t//1j/4XAA7394PhcGdv72Of+ES6XruOOH78oMzS6XJ18+hgMp0A+Fv/0//8J/6Vf5V6/m99/te8MPz4q59866tfevX3fKvS6qWXPgpgNBw89+zzJxfTn/67P20JcV2HczHeGvue5/nBq698BIAXOZwJRzi+7/ueTwklhHLGj/ZHg8GwlAWArSELnW3XE9u7e3uHR8V6zjl3HIcyup7PK0er1hhZFul6kUMyzn3XZcSCaaLSIksBGOFnaQJQynixLKQ2lBBjEYax47qL2QwAY2S1XK4X88Fo6DgOY0QInmaZtdb1vLKUAOJ4MIiHlDNj7M7+XpZkSpVMOEZLKWXFuWdpSgiZXk7CKHQ9V5ZSK+X5gTG1qRMTjtZQ2mpt1qvEAMPRSAhSZLkBqXxHPrz/8COf/OTbX3vjpY9//Pjx8WKdSFken5wOh4HnckEFAMd3GHcf3n8/z4u7H/s4Y5wxNr28iOJ4NplVDKAFc/3IEa5WWojo7OJscnm5s7NzsL9flCkAPwjnk+n0Yh6NRw/uvZul6WqdUkqkUm+/e88LzwF8/e13kywbDuJhHBdS+b73zM2blJMwCIIgYJQB4FwQSkbDEQixWqVpenR05LkuoYQxZ8v3ARRZPpnOLSGu71HGjNZGKU7pYDCA0QDWWaaUDMKAU0YoK2VRFKXrexpWGk05B0AosUBeloyzbGmkaxhlse/tjbZcJ1hmKwC/8/X3dgbU5V7gxQPP8xxHlvokncKngJusEgC+FwRulBbZ6en9ySrZ295ipHx4Ph143seevQXgfJEvzZpTXC6PaXAjiofb0i5LFGkajUaFKQBcLi6Ho0BZaghbpYUmxDoiL0uPC01pH49V7i8sASMMT6XN2/QGW2zQfA2HV0NSW4XEQT9GX4+8q1KlfNlFzOk9VQEbUuFQEMAa2x0fa13FCvFY23hGshtZtOLZ9v8eGq0ADIXRhhAQakntyYqAtj52qO2OZptqAF1rtFKRWhWD1OCjhxKJqSXtTofNtXMlVGuk0nChDXbrh7FsrXzqwm179OjV2lawBu0dbkOAXk1tCajdul6n63SdrtM3ZKqd4gPdifhpIqYl46o/a+5E25o4QvNxey79AL3IduOrciTdtvK78wDN+z1pWiE2kn36M9sRS93L/6TyamOAhi9qOKq6bh/2tt3M4gPI0w8Su+MRNok0UpNTbat3CKFPppIeb9G9uZFTj0hqGbCa7qhP+g35QVplwcpJs6134nobJIQAphslmyEyelRga1LQEBG2pVjqUjsZP7AxK7n67E3PO1+/nfv5dPXazKvhba680+ztfecxFY/TGR/blsDtZ/eUol/X17Sl/BqhmhtrW0WYuSK8bSvWm1g99dUNjqeT+6n/Gxj1u7QB6cvZE70dlJ3Q/dc2CkA787u2a4ZUM5Kar5rn25F1lSa8gpuudFAzXm3D7ZLeV71u74YFwUbLNQR1Tek2ILs3rK7cI1traStxT9Kq4Sp94TZOUMuzt9cGgO33FamR60b1urZu5e6L0CxTzdmijUrdVLn/w3YIvZ8RqetQ16AyIWweaJZea2BBKK38UhB7pcaQSvkURK4++y3ffXfP+fWHk7PjLyhXEuP0TxJtW/cvNq6kq7N1001qq4LQKQ/XglwZxR1+1zAEqCLaUFq50IBwhBBcGgUQEBLG0eLxY0sJbcpTRp2enRZFvrO7wzm7nFwQvBwGznw1l6pI0yUAz3Vd1yEUlDLXccLQDaLIGCPLIs/zyWoOgFJijLHGTieXriMsrLa6yPMsTRjnjiMAMEKiMBzu7GqpsjzTxlogCn3P8z3PBzCfTimhu/v7SinX92fTy62dXcdxjNbxYFjmOQDG+O7ewfPPPTedXJwfn+4dHi7nc89zrVLD0UhpBcD1/CLLjLXRaFwW5fTighGaZxkXYjWfpckaQFkUhJB4EGuts3zmOI7n+X4YC9ebzeYA3r13/47j3frIi5Pzc87Fcr0ilBmQIi9dL6qifBRZbrQTxbFWinOW5rkfhNz1tDFGq3w5BxDH8fmTY1ijlfYC33Wd5Xzh+l6e5bYy9wNc3zfGaIPt7R3GHccPB9v7//Bnf0ZKtbe9A0BpyRlL8+QjL955dHy8Wi6UUqPhdhBFgotq5ASDISHUoyxN1mVZPLr/Xp5lo62t89PjPE2zLAOQZ6nUKi+lBbQ2SivX9dIsWy0WYRiEOzsAZCmj4bY07He+8trNm88E0cCUGWPkcHf36PAAwHQ+HUSR1VqW8uTszCjFuTOfz99++2363PPPHB1VE41xYQkpy2K9XB4c3SqztMgzWRaWUuq4AJK8gOtapdI8z9KEMO76/moyG4QeAakigUSh7zouocxaHQQBpZCqnF5OwsCpprDjitls6QeBsSbP81vP3g7D8NGDh0Hgc8dttjPKGPN9P1mtZVkK1wVBUeREKm1Z9YwfBEkuyyJ3HSeK4vOLc6UjUmQzWWbJilEDYGe8dXFxfrmYO44ThFGWritWrkiSuiBKizSVlsgiV2Wep2tZlnlRME4c12kvnAihZZEvrTHWEELCILqcXv61v/LD/9oP/hv/zl/4EQCr2dmf/3M/9MM//CNpmu4eHC5nE1mWUko/HuZFGUQxgMFgcOvOSxeP3vvv/vsf/2N/7PvLMr958xZlVDC2XM4BmDIt8vyF51/4oR/8047jnJ6flkWhVOk4zs2jm5Xts+s5QRBGcRRGwWA4CMOAEKuU3I7jII6MkgCsVa7v5smaC7a6uO97TpmUy8tMSQmDMB4AkHnGGE8LkxYLLaXVmjOiVTkYBJXaXTk/o4xTznWRF3lBKBOuyznb2t3jgklZAiiyLAzDKAopJYRSq8oiyw3BajF3XZ9xAcALYs5FlqdS6/lsfn5ymmXpyx95OYrjZGVPj88AZGkSRuHs8jKM4/3DfcrYaGs7S7NktaoiugCkKCWllAsniIeO65yeXERhWBbS2MJSD0AQDd74yhuf//znP/6t33Zy/MXJbLG7u7VcrXYOdhbzmecYAKPtrfOTi6NnbgdRlGeFkklZlJSTy8tpWZRBNADghxHjfL1cWdBkPXv84MG9h49v3741Ho4mkwKAUnq5WHLPtdZKqdbrVJbSggwGgydn5563BLC7uz2SSiottWGMDYejW7efVapUUjqOb40CoJQy2rqeB2uXi2w0/H/Ye9MfS7LsPux3l9gj3pb5cqnMWnqt6p5uzspuDk1SEiTAoiRLMmSaMAUZEmXThv1/+KMBSbAoG6ZgwtLYpCmKHIniDDlDambYszV7Zrqn1+raq7JyfXvscRd/iPVlVVP+YoAC8gDdle+9iHvP3c/9nW1g2fZsNqOUUVqtJoAUQiRp6tjW2dmpWR4Phjn2g8ViAeDyaOS5DmVMKSUyqbVO4sQwTRASp6lp2QAc1yOUCyE559BIsoJwe3tzbzwaPDw6yfIQwE6PUmZwCqEjbhTv3D2WBa6N/aIgMHQ4ZQAGTuCZRqbE0PU4oZSqvjei6SotyN7mLoACK61ykyMWS0qLQb93eLZghC8WywOlC1EAODvKXhy/rJhdJLFBNePIZW5YVpHJyj+4PolBqKqCI+kyUtO6gFDJLqhi/4BUNnhtfPtGXNKN5FgihU26ujWHDII1Fe+arwNAlK5jY5H2CoDm1O8IubqWtVoJ73w4G/0U8b8yECAKslbnV9yTmlPdlZdq5stN6Yl7F6k7oWWzW1srwYB0rHLOPVPfXkjbqg7fpHmBVHedVuiru0+DEE3q0FSdN54QsUhX5q5vWE+9Vl3QBV3QBf25Jn7uRvyJeMEno0j1VlkBP1XsSN3ojdZNvNbLfdrG+eSW22A2+pMeeRrL7XnQXHMrhKgqlXRf0jUqo9dq6iAj1R/1v2v8dWCEtbqf1o6nMNqFY1o1XV37ue6uMZa6SR1xoI6iXBs8rSnd0L6xxmRdXKUERRWIvvNUedvpWknifHTmqjNb5Wf5DSVENomrK3hlXZppoZr2jNZKN5rN+nQlFebTyiprfdJKH2iK7GAmpQVw61xOukW3hdSyVbsKnqZvfMqgdH5oeFvTFK9DLC08VX3baf/5gsm5Dx1gae25KvbNE993alxrcfcpSmllFq3QGYh64bTTqBnAdkG1Sghdl9tsCE+C/10BrovXtVIe6T72CerelsPylXNJkrqgHto1+7QhWwMBG8m5W0HT5U/pW601JU08UNTCbBVeqHXe1lWTK5SUobU1aO1Fu3tl3YmdnD5tOp0OCNrV9dcXiaq8TpfUv6KtV2lNuumVyhAbRAMwGaFxvDVwNzateXzkcWIzrIoc3GgtNJvxh263o6esyfPU/aqybax/IahCuatO9ptmppR8SykIIRWQSqvUBZRRbhgmN7lhREkyHAwoIUIIyzTDMAQghOAGl1I4ji2k8Fzv0dHhqy9/ynYsKWXZGNMyDcayLM1FQUAKEa5W8+UqbFsLuI6d5Tk0TMMcjIZZkkBJ27akEtDatiwABmNB0NvZ2xdZtlwsojhK02w42sizbLQ5BlDkedDrG9zI85xzHpmWbTsiz6M0BWA7LgCRF0VeHB48htaEsfHuXpZmeZYLIZIoKrMt+0FvOpkUeT45O5ucHPtB7+jx4+FwGK9WXhAMNzZLnqPVarHQw9Ho01eevfPhe7bnzaazg4cHmhsA/N6gN9y0vN5wi8ZJpDXpbWzEYUwI9QdD06AAkjghhHq+H4eh3++XQxYu51KIOIqUlADiMBxsjEBImqaB0dvauzw5fjw9Odvd35ueTUrH8DxNQbnjesFwIwwjDXL/3p0v/c6/vnb1imXZAExtLFbL3a0tQqlhGEqqPEtvfvzx9U+9cvDgwWwyAXA2n3t+YJgW48ZqseSc+b1+HEVRGC5mU9f3AWxsb6d5nkZR0B9cunJVaxRFwQ0jjuM0nHFuA9Bgtj88+PCj7/3wh3uXr5A0prJYLMNLO1tSFgDyPLXNIaCklK986pWjhw/Hu5fOTk7ni+XetedKo8U8KbI0VUpLqUzbzvOsVlhgdnKiSxUUpWePH6eTycl0+syVK1uX9i4/81ySfTTe3pwuVkWWATBHA6VVlqVKpowxSokfuFQL0zbKXAeE0sFwYBicUpom6eOHD/0goARKaylFuUjKtEcgZDTeEIWYnJ5SRgBKKCV1IFtCwDmXEiDEtmzX82zLopREq1UhCmaW8XbpdD7b2toybItQ4vrB4cFDxjgzWGlYajnOxvaWFwwIYSdns6A/VErneX549AjQpmmh3BgpVVLleZbEsWkaaZb5rvft73w3z/P/6X/+VQAbm68/++u/fu/u3fHWlmnbGlpBEkbCyak97A13LwG4vLsn4lDl+a/8g19+58fvjzZGn/n8Z6enx/1+37VtAJ7v7u7A7g37npelyZs/+Pav/Lf//aUr+wQ66PV6/QEAqlMhCsq4YRh5HIWLpZKCUkvJNJyGIs8A2I6tMkIJicMUlMThKs9ybpiE6kKKMFoAIIQdP3r05jsfXtrZhQanZLS5kUQLwpnruwCkELZlamitleO53DAty3Z9/+z4eGd/r9zd/F6glUriiJAy/hwBpZbBc8uRSo+3dgAIhQ9+9M7Wzri/sRlG0dHJmWma88USUIxQ23EAeL5fFPnO3n6eZWmcmZYVR6lSyrRsVUeV05SncZrnCTetJMkJoYQatmubrvfHX/sjAC+9+hOPjz/6W3/7P49W4d1Hjw3TmC+XnmvvXXvm5BF/+3tvAXj2hReUlGmaSqXzPN/c3hazmchT07Y9z9eEATAdb7kM00xQbniu749Gd775xrX9fan0aDgC0B+OTE6o5UopRV4sl6s8L85mc//KfpLmJQj7mes3GGWHR0dZnlHKODcKoTwvyNI4iaMkistjK88zMzVd13U8T1NaKKUAgzPOjRJJpJz5g36eZXme93uDLMtu373nua5WsCwLQJ7nSinbsjjnlFBJoKD7/QFlPM3yuJzkps7yoigEpcxy/KyICVFZsUoyOV+dTeYzAM9u9i1/4FjmZJUsY+G7PtF6PBwJQRWVZfo3kad5PjmcLTlznt3afzgP05SNB5sDN3j5xisABGEfvf/e2zc/DlyDG+5Oz/iIgGjpcCuJMm4CwIODu69e3Tcsx5SAkowDDFIIwzCkFLWMVLtTVGcvebr0RFBFRQRppLNWGqsFpRaa043kcP44b4RCVct+rXTYQnqtyr2M1rIGNla2K+19AWVM7+Zq1spF0HUA9i6nXdYr80qloaGUprQK5k4JRScEJGphVVdGm6TqvPLGUKXXabqhEajqa2SjWz9326hlTlLxSspStUaZAaBWLdf/rzqrFOXWboa1MaRqrF9qs5GGsTUpfn1o/j+Z91zQBV3QBf15oyp2ZGuO9sm7WbPrNchBfYjUu6vWa/s3aW3haoe+bulPQAhtsWtE1n+vGdZN9W2VnQfbE7KD+HRLbNEvdHwUmhOw/K/WqrWH6NOY7xg/dfhbBwLJ+onxNElhDRqB7raubVqtS1xLIfJkSaUBV7eENWYA1PZi1VGqyzzpBIBSilRqx/pMhVJaVWFPukeh7hbYNair58T6A1UL1luM9UfK1xqvY9Id0c4w1dBre5B3+qh2cV/XW7Yqy1bQadmp1cO1mRapXCzOWUSWwa5rtpo50+QRQYNKdWp8Oq1jXlgrsSOynHuhXWfnOpC0ri6tOWLdpKbpT77XWcioEp0/IfV1FMGka27cFKlryKiyhdSVpFd36lOme9usLrttne24r621juxW+vJUv6+t7yfEtqaodX/t9Qo/iTsQSrWuZdpSQqcAwOrg5euDVYm79YeqYtKsy4rhFhnX7TQg9dzRzebUIIn1rK5/aMpca15Hjq8geY0ynD1AaqiUluhfuz+UWzUBwKmQ8fLTn/mJbURvHZ0Grnd5d/u9+0v0XFKvaF3XT8rt4sm8Z92l2u3i7ndrtgnVTNFPbqediUjKTE2kDEJR5qaGygSgN4KR57qmaQhROK4drlaeY/c8B0AulGNbUsk4idIs9T331u3bb4+3hhsj27S4wQCEYaihPccxDCNKUiWVgoqTmDM23tgIgh4AED2bTJXWlBKtFWNUQxmmIaSptB5tbAKQoiDQkEIryQ3DdT1KWZ5mvcHA930AQRCIPHc9TwkhigJaL2ZTx3G1o5Io7PcHABRlaRRrIZVScbT60Z9+f3Nry/P8OFwt5jPTcgCYlnVydCSVGm5uDkfjNE1LWzDbcaLlUkgJgHG2f+1af2OcpQnlBqFsa/eyENqw7YPHjwFMzqY/fOutf/nr/zzLsuVq+Q///t+78dnPJmk2O5u6nnu8XAGYnByPNsdaqdnZmZLS8/3To2OFqeu5nDHLdACkcaKVyvI8GAw839dKEsK4aRw+OnBcx/N9AIbBHx+e9vp9MCNJ0q0rz/7+7/1aLuXOzqXp2SmA2WJ+cnb22VduZFl2dHJ26869v/af/c1HDw/+5I//KEuS/sYGgGQ29wcjahjL5TIY9IejTalE0O8fPT5wPG+0tQWgPxgGhDy6c7vIUi2E0jqLo5zzIi9AeH9jDODsZBrH6fe+88b29na56kzTyHI26PcMgwLYHPa0FkJyyzZGG5vT6fT3fuM3H56e+F5wfPjYZgqAKBKhiFJaQduOe+/Wx6bBZZaHy+XpwaNgtAHAe+75PMt9y7z14IHtB6brifkyCsM4cJfzuWGaAMLlMssy23cdu5emUSEUNHFcTxapaTIAw+HAsqw4Too8Y5QGvUBrvbG15Tj2owePykTVgNBaT05OtnZ3NrY2QTCdnEmpFITUerVYAhCaS0XjKCbQfhBAyiSOHMexbefv/N1/sH91A8Dxo9P9dy4vlgtFiMhzIXLbdiijSogy4VjQ7/cGw42tvSwXhuO5wcD1B+FqOZtPLMNa6QhAHEWjfn8ehX7PMy0ryzIppVZqc3P85ltvffMrvwtguVh8+7vfB2U/9VM/5Z6dcW6KQsRhGIahY/H5cgXghz9++9qzz33hi1+klP6jX/1fOWc3XrpeBlgs9y4KeK4rpHj06M43/uRbq+XSdchw4K+mx+FkOTu+A6Bn+4DiBucUUhZESyKLoiiSVFi2nUQhgCwJGWN5mixm09F4rFBIpQ1DaEJB6Ifv/BjAeGfX8tz97fHu/iWtlO04cbTau7xHtCxTWtuum0RJCabnWc64aRhmkReW7fi93u0PPgCwub3l9/0iT13fy9P09OjIMIwwXBDOKae9jU0A//4rX/vum3/63/3KL2vKDg9vHx+fXt6/NJ3MAt+jlJRzpsjzaBX5QUApC8Now3agSZrlUshys1RKZ1mupQZhWZIxzgmz0jSXCtk8vPfoEMBf+du/mAr87F/9q1/6Z/90MV/2+sHB44PN4cD2/Bc+/bm9K9cALKazaBVaju33erPJ2fHh48Fo6G+OTNvpb27e/fgugO9+45uSsP/i7/833/761x7fe/izP//X/+3v/+HO9mYYhi+88AwAJRV3nEKpKAxN01qFkRRiOp3ubI8LqXQUA/jh2+/sbm9zw5gtloQQx3GWixlnJIlTKZTr+QBKlZJpWZZlLWbzvMg54wbjRSHmiwXnBoDRaKiUcl2XUmY7LmNMKWWaJhjreS6AJE2KIs+yPBcyzwVAPS/Y2twiGnEc51m5mpBnhZS63+s9f+OV1YO3lYxPjudnpxPT5OO+ByAsdJ6sLHM0cAZxJl66vKNUMlvMCdFFoRQpAGSioNodBfbJNCuU3Bv3j44ef3gnCpfJm29/CODa5X2GPAwnhsRqecLc8cizjmaZaxqLTGhiAoD279y5naWJ1+sxCkIVJ1pIQTUkoQ0WRgghdao+SoiSjYq4vQeUpOqLWuVh9ZTbiNaV8NCRJDuiKkpJTJNmJdbqWt2+jxrGaySb9Sq6aun23lNLAC1Cicppu9UId6DWMqEkrb1SdOuOU8sqtUt4+TTqIJUd8bJmp6oOFbLZWGnUbFT8lreiyiNH12JLR4zsBPkHOpfrToWkHgylUcdtr2XDupq6x+ru0N062pJII/T/2XL2BV3QBV3Qn2NqUtlUqEulHmquwk/sfw2UV2/xHdtF0uAPTfyvNfCkc+h8MnWAnC49favt8tdAQ53PBLWuiaz/uP6S7mgAK6u+FjSoyz2H53U1Wh0mutx3z6S1+lr3iPPc18WSxv6pfanxvKgUe7o6WOv0xbVXQjU60FiDCdYu+KXg0h5+mrR2mbVKlACtkrNF27pst/CWJmvIWsNCw0/ryNE8tA5Pd6GyFhxpJ2HXmrUGmdfcIHTlStvp2EYKqwttvizFFdVNWFN7VpdiGq2gq44taj2xz8+Eqm3rRqWVMriM3Nixfj3PW8cwcI3WBJxWUOo+UQmLLXNoV8Sa+WoDwHaLPQ/4VCNW9mqFYlUmeZXRbfdhVUULrQVDrTVUmR69GYAq83gd6gDnEKhuvU9oxCuJEJq2c/jP6o6OMNlgV0+8Q1oeakVzB5v+D4tyXTC+bmINljV2nfrcDkZ0lctH15XXpqRNWaQ1Lz7XPY0QDqXrZIpEk3oVkVocX2eqmQIdSX9NdiV1KnDS4JdNRVG0GBj009c/1eOrly4/S7WcXgs+Png7IboZn3K8mgV9DhZd65o1Whv/Mj6TRhW6VLX3kHYi67onq3CTzUVEK1J3tRCCUpJnmWmaJufcYHGaM0Zmi4nJOQBuWkKWOWfhOjaB0krNFrPR5pCbzOQGgCzLCMA5JwRZlhqGZTJje3NsGIZtmbZtATAtWxWiEAVlLE8TyhjjBmWMc14UuR8EAESeQ0nHdTlj3DTTNFNayaJIk6RM++D5vYd3bm/vXZ6enSkpTNfxAp9SZtl2FsdlV3DO3f6AMCbyHJTmWbZ1aZ8bZhhFW3v7vaAPII4ix/OyNLVt5/mXX51NzmaTHhXYkgAAIABJREFUM86M0dZWuFxw0wSglFRKgdA4XM3nC5Hni/ksz/NCk29/4xsAvvqNb2xvbTNGbMvO5vPHR8fTyVQrtbW9uVzMLcsBsLG17TiOYZiWZaVJkufZeHs8mS1EIWRecI8C2NrZXi6X88WCGSbd2CSUUUZ7g77jeWdHx6PxGIBlm1mRP7h/P4xTxowfvfn9L/3Ov97f24uzrIzfF6fJi889C0JOJxPLtH783oevvf5FRvSl3UumbVFulFOIczY9PpJC7r3wYn840lpTxrhpHh48dBwXgON6s9kEWjNCIAUj1OQ8FzJJYtOyJ5MZgOPjk+Ojkz/4xh9tbY6FlKN+f3J2uDkajEb9OFoBcB1Ta8U5iaL4zkcf9F2/KKQizPeDd9/+IeRLAMaDIFWkjPIJYLS5STTi1cLz/c+//sVcKQBRFPu+vzPeEu+/TwxDgIgiJ5RmaZImcbmaTg8f53mepjSLJeeMUFBmSAWAlot7uDmGEnmWUcvK83wwGj28d08pdeWZa6ZlZUoCWKzC5XK5tT2Ow2hjvBn0emmaJUlKqOk4QZnwOkszw3S1QlEUi/nMME2tdVHkjLKv/s5vKJEA8ByPKPHg8NDvBR5nu5cvM0bzLBNCmLYNwHVskaWL2UQUuczSSM+LLANhBueO45QJnaSUeZErrQDNOYsioZUmlBBKh8PRP/vf/zcAL1+/8V/+nV+4/tLLq3B5/+7dF158MSuE5we98d7i9FBTAuDn/sJf+ta3/0TIDCA//9f/xpd/57c5N2IVuY5TLjcKOZ1MeqPtS7u7o+Hg/r27b377jRevvxAtZts7Y849AIZJZSFEUaRxoqTi3JBSrxbRMlpatl0dB1JatsU47Y+Gq8XMC3qcUkrYahVRxjc2xwA2d3azJHF9N4lXjPHJ8eHNmzdfe+2zo/Eoj1MArusKqThhUqLICz9gjuclUcw5W8xmZSSBw0cPfd+njM4nUy8IuGUrrWzPA2XctB4+fATg1770G//1L/6C3+9LTe7dvS+AYGPTNA2pNKfEsm0Alm1vbG0zRos0m5ycSqniOOGcScgyTQ1AKDf93kBK9fjhQ5Obnu+vlqvR1vaDu3df+8nXADy4exuMzc5mt+/eIxpFXkRh5HoeJUwU0u31Acyn08HmaHtvJ4kjysn2/t6DW7dvffDBS5/5LCE0XK0AJFk+2tzWhH7w/genR0fz5eL5554rwoVQajqdApjPl7aWcBxV5MvFcjKbSak45wbnUos4yQGcnN0tCnnt2rU4TuMkJpSEYeg6jpJy2B9cu3IZACWUMVpkWbhcnU7OtNKc836/H8fRdDYv9TGcMa31vdmD8WiUxEnQ6924fn2+WPR7vaPHjwHMV0up1MbGsN/rayLTNBNSrcJUKy2ELLIEAAUFSJakZ1l+5eo+ZYbUzu6GuwzPLMZd2wAwWaWkIGkY93uMM/Hw8DhK06FjSwnb4qZhANAws4RdGo8GFl+lej59yAlGvhMny1V6BmARcqZZ4BipVKIIB6710rVnMnU4i3NCVSElgMAf3r9/78Gjg/E+GXnG/nhwMI8491JZUGboMplemT+b6Mo8EBS1ALqu9WuEE4IKX6t11eUDjZRT+yNV7lCdUnR7Yaplmg7e2dSia9FCNwJ799rT3CZqk8Qq4ngFXXa8a9o7EKBrY4HmJgUCgFKiVenhrAihhCoCUrmtQ3eerMUgQsr46DUv3VsJaolwTejVjT1EdTupL5iN59yTFw88+YVueCq96xtT0s5tpdvHqAU5XSumUeOfVXFN0epJIeyCLuiCLug/EqqtI1uEpwM6ddU67b5cnxKNLXnn9thCHW1y5w5qsI7cnau3Q09s5ut0DuAEuofJejE1vvTUYtA5oTQIoFrcpYYHyoa2WRcapLE9X9b4b4+pp1b4Z35b4QbnLe2e6J2qURV+UztfAHV4F13BaF0v1radqA5aWsEADWzRNpEQEFWiaKVnJAEBaV0oq7da5rVWAKHlo23H6HMjRXBuDtTMrfeMLuMwVj29ZgbatPCp86ZGL/S5IW9sZLsCAynFtiYKpm4kl3Wn8vVqyHlm66dI1Z2tDWaLSJb9g0rcwtqwNqU9pTmoHyPrk0e3A41qwpD24fPFNnjb02kNNtK6YwaIUtnetQ1tSu6AnxVGVpkNruHsjaFbNcHo03uPlGrrc0yWW41a66FuGxv1R6cgsvbXk01+Yj0QPKF0fpIqe+FOrvkyabxqO38t9kMXtm5QflJvkqq2WOyKnDUrDYLY9YCuRPwW560jY5RrVan6VgB0EnOX86+zn6NCj7XSipYtInWEIoUWUaS9wCOmmAG51rlwBoHBjM5+VBdIO6mTKu7bqp5CnZj5ALojuw6InsMx14DJypeLVlCqBiCV1krmeR5FkWkahmlIIaCVwaiUAkCeClAVcM9zbcao1sKyrZ7vb403lVJlhETfd23L0tBSiF7gW7Z3dnKyOR4PNzYOHz2UUgF47sZ+r9c/OTpwPPfs9FRIwZQEtGFyxmnQCwCEi0W8jBezaZ6mIExBi7wwOJscH5WO2IZpjHd2OWe2Y/u9vulYlm0vZzOD84OT43IP2bq0F62WUZLuXr4slDJt5/bNm/3hMI6iZ2/c4CAAhJCcG4ILEGLazub2jt/rGZTmaZpnWZ5lAPIiK4oiTRIppWkatuulcSSEGAw3P759C8DO1naSF67nKsoVaJSkg/HW/PiAEGJZxmC0CSCLoySJyzw8UooiR55lG1tjRmk0n5fx2mzHG+/sKJA4jrlhLCany/ncMMw0iZVSy9kMgO06lusvFqujg4ON7d2vfvUrYRQ9/9zzp7OpRQCAElzZu3R6NskLcWl72zSMr/z+V3/y06/85M/+nBQiDCMAfq8PENswDx48sB13MZsSSimlQX9wcnx0//YtAMFgIPLc4NxgPIuiNE2TNJOAAnIhz45OAFhO8JWv/l6e5dPZzLKtRwcPkyQMfOfho4c7mwMAjDuBHxDCGeWe7xumrTWdnJxZphXVPvXbW1vH81VeFMPNcZZmpm2F85nn9wghnuvlGgA++PCDa/v7eZ5NppO7t2/vCRmHKzcILMcGIYQxAJTw8Xhb5LGS1LRMxrCYLUcjv8gyAgPA2enZ9taG7dgAFvN5FIac83C1mpydbe9unx2fABBCDEcDzw+iMJycniqlXM+zbFeBa2JQwwDgeAPDDmaTOecs6A9OJ2d+z2eUuK4/OzueTCYAbk4+uvHc87Ztr1ahM+xvjLekKCzb7g9Hs8kpSnxqtGFYVpbESRwN/J7WmhDNGKOE5EUBIMsyALZlEUKTJC33wzTNKGWe65XZtz/86OaDhw93vvfd11977d6De4zSz7/++qM7t8wog8htzgEEtvva5z6/f2n/5vvv9rz+5b292fQsjZMg6JWJqomSnLEiz9I0u/H8i5bB33nv/b+5DAkwny+pLAAILQjAGAO0LARnjDLGTMOjXpLERZYDsBw7isMizwfDoRP40WIFQkzLNiizHa/XHwKYT2dJnGS5cIPBeHv75PA4zovTyfza9et5IQBEYeS4XlGI1XLJGJdSxWGkNQAVh2EZFjYWuWVZfX94dnyYJnEmpGlZfi+Ioni0e+kbv/8HAH769dd+/hd/afrg9sMHDw8eH778yqf+5FtvfPpTN65d3T99/Mj1PQCEstV8TinN08z1fcZZnmVKKc6NPC3XvhzvXnr+lU+DMMcLjh49ev7lT9364CNumHdu3332+g0Av/2v/tX1Gy+Fx0erMLIsI00SgCThKk8Sz/PicAXAcR0pZbmQy1i6WZr0BoOz4+NHDw/SJAewf+Xqcrn6zX/+a0qpL7z2WjAcedPFO29+l3KrHCbOWJaklLHj4+MojIjWcZpsjce2aUvklkkBBJ7PGE+SBARSKQbqeu7ueFtIwQgpkz5BSEJJlueA3t3ekUolcWyZJmeMUqZUmfVOc87GmxtSSFkIT0pumkEQTKfTJM8BDAYDz3MppaIQtmVRwueLhRLasW0oXeLgRSHSLBNCKKUnJ49Jnj+O5yMj8WxBOFmGOQBeSG4aNsN8Njc48UySJaIQmW8agWsqCgAGZRCro5Pw85/7i6ez+OZbHxiW+eL+M5pA5AWArcG1NC+CwDqczZW0qcV8Zu5tj8KDCdd6uQgBuFTGSfEvvvLHv/wLf23DoZu+cTKPhVLKsFl9RyOEUErK0DGVTKJJFRC7OWQ7WuE1cbJFwrqCZa2qJU850rs610o6omiuIhVHIFJL0sUOK2meAKBNwtRWw1/ee9aEukoeLTWvqr6CEJT+KaXZJCFEK6mULiFIwkodLdGKANBE0brSUgYplbGyDm5UN/gpF9VWntRQurReqGWcNo/i0zqoI9xXiHBHtKl97ohWqhTKusH7CQiIlroNiP6Em89aD61fzi/ogi7ogv6jpCec7C7ogi7ogi7ogi7ogi7ogi7ogi7ogi7ogi7ogi7ogv7/IU410U1S4FKZ01pTEY02EUmrXyrtcihHbVaDxhisMiivbdFUN6EDCCW60j6SyuCdEIBopQhd1/90tT8Va7oJIUwax+XafVnX5jU164S0RpuVbRF0aQzXtSOrDLfWwiiW/OqmKsJpbW7XdgBpLZWq6mrdXkWEnrO60m14ENJROnayRgNApfOvGlmXVZnv146edRO01gqKKbqm1qt4LO0Lm+E756mstWq9MMoY0VpoqTUARlnbm2XnqSrkXONJUbJOCQXRhBClCDRVqu7uuiZKK/PYqvHkE4wazxkKgrR+FESrTqvQxrRE7c9etgcghBJCCUipyYRSgKp0kk0oa1rqOcvu1Jo09rDr8R/BCYPSRFMFpZVqWJZKlIV0FLolB1X8Gl07dSgtS1611qRMutHO105TtQBoHR2nbRCIVkoQUAJSug61VoGsXoPlOGio2jOe1qrYZhxovQxlZWBGCSltEat+VErTqvmVKZ7WVAO6DSLZuDeX/ykAnPC6PRRlvHBaDha0Vh17wFIVX6k9NK29dnQ9y8uRoayc7pQSrbUUijHGGJWqqIawO6taovXaL/ulmpyEUK3lU1/Q9ZxpNovGklZTDaVKHXvlIU4oAEVAlKaUgkBJVdseEABadUalioBQjiQtpKitAFrleNmbnBIJKXLleRY0B5CKlIBTjUol3tkNa2dq0DZzezN9qJYgpDaBUNBQaNywy4TgjZFxs9cREEYYoaIQGoRQYpkmILM845wAMAwjofJHb/8wsI1C6MdhPMm0Ni0ThVCSwgRAOM+0YlralMYEoE1WnNY4tmMK2vRQPcE1UO919Y+VeXI7Nk372+RZWkhFGdHQSilAlyVY1NZaQilVFMTgRZ4SqpWWUmtOKQDftqB1nsTM4LkkWiktilwJCDUcDqezGYBCas+ye/3BanLmee7GeCdeLC/tXSaUBn5gmwYAnSae60pFskwUhbAdixvMtEwpVJLFi8kCALQajXcpt09ODi4/84zreYORVIaTrOYDzwVAlADB8clpKpRM8ze++off+8EPSo/vLE4+/7nPAPgff+4vPHx4f9Mb7u5sHzxIhxubtuOWmXsM0CxNAHBGfc9zXNfz/Dsfvg9CRZFLrWQhCCWldWR/ONKASSg4HfjbnE8My0ilfvftH+eSAfB835SF7zgQKSnilJq2P9oxnTRa3b75UZ7cA8BNk1HGOJNCWJallY6iSMlTIYQUsvQKRxzOHj4ol8Zbb72ls3Sj3xemsYqS/nicJwmA1XweuL2dz3x2vlxJwt7+4P1LO7vHx8dKqlWeANgajdI4KtLEBAzGLNM4PTs+XT7T29icHD4utxDGQAEl8yiNsyw1bacocmZbMJjt+7PFDMDNWx8Fjmf6/snZ8b1H90bjrTwX0MS07PlqoikDMJvNP7p1u9/ra+g8S6Ik7vdszqGVKAoBQGRCW2p8aRyFkaJGkqayUNzgJmUqz49OTwC4o4HOC6L1/OSEEDI/TRzPI9zIhNKrueEFAIp4NV8uwmj1uRvXN3Z3BKWu3xOPHydEXt2/1Pd9AKZpBu7jglJGlYjCcDlPmI6zzKRcgQGYh+lokFuWDWiDGwqwHWe1PLxz6+5qsdza3gZQFCIOQxBqWE4YpoxxmcW210sKENebLFIAxx+9mxcCoFGSnJ0c9Xo9RshwuAGojc3Nl37i0wBENAsX8yBLh4PhaNALV/O0SKNoMZ+eOl4AYLx3dTGfzh7cu33v7v/zO18eDLYsk7uuV+SF67jlmcINlmap49giF1Dlfi8tm0MTQkidYYY8/8ILQstvfeeN//Sv/OXLzzyTLFcWNxfhwjDM9997H0B/OPit3/rNn/vpnxZK+wyfefUnbt+663segFxLAKZtGZbpOVzmenN3qz/0v/nGGztbA4vm6WqeagXAIkaeZUVREEIN0yjyIprNCKGME854nIcAHMcljBmukSYpIcQZBIZhQKPIhena77z9HoAfvfv+5atXf+bTN7ThRmn+9ke3fL93884Dz7WeubYHQIEqqUWWQaprLzxHKTs+PBrv7DiWJbTub24AMDk/fvjw3se37OEw08q3HRi0v7UvTk5cb/DeBx8BeO3zn09X8zAtvv/9t2A6bjA8Op1+xrAoYVkuDc0B2KYNbokiD7b6nu9zRg8e3NeaEc4lUQBOz05e+akvKqKEyDORpoo6/Y2dK1e+/503v/y1f3/j4/sAfMP54Q9+dHewGfQ2pMxEnppMc8M6e3T7dpiKXABIk0QrLbWyLNsN/G/9X7+llf6Lf/kvTU5OF2czblgAqFZSSMfzDde7e/Ojv/cr/8Pv/saXJtPp/qVLlDAAs8UcGkWSSUXmYQLK5/PF9ReeE1IqoYQWABhji9VSSKkVcR2v3+9vbuz0ej4l1DCN0pFHaZUnucE545xRXhSFth3DMIOeYztuHMcAVquV6/v94YYs8kePHqVpOhoN796/N5nOymP90u6ua9phHDPDoJT5vh3GURgnmpAkL0zDAhAmCTS01p7nTu8eUZ4TlsWZMgjzOZ69vA1AJCST8TLKDIMZjFmGN/QDAnDKCDV9uQBQgHHWW8Rhsnw8Ho7Ho13TNG8fHE8XYWAaANJgQjgIZYE7HgSbe9vXfnywcm3HdqzTxcTiFEBSSGzs/vDd9w9+5jWb5UOfjgI7TIgJlVOWqQIAhxwEviBstVoqoYVmVGrbNJWQSgjUSd80JQKaN84b1T2mjiakiYJsBGxCKAiUUhqagXVNBZtHUN0AtSrDMtaiObRWpeRJNaG0cR5pBMjubar5vwa0FKSxfNSl/waglaqPe13FEqpd1HXrlkSgKadKa60hdXWpJISqSohqLA2JVCiXCe0KiKoSPghDdeNUjXwPEE2IJpRAE1DSTTNYSzxUSUkZpYRILQkIoUQqyQjTHZG/vAaVdpsUrGpQ7dAGVIGzCFW1FF+GvNJaV9fxtWD2jR8iCGXQ6mkxci7ogi7ogv7cE39y76qRlnO/nIeQOt/XcECbvoW0P1X/I2uvrBd2LqLcudqqACV67eN/kNZcqGuP8QZtBFqP3MatsxPhr/UuQI3B1Njd+ZhodV+tYZGoAa62lM7vJSzbbei5fBS6ZrjFUitOzre9dZFEC652wiGWD3XQvtaxt01S3fpw1P+Strd0XXGXwwo+qEM+V0+uhWdZc5juds45auGJ9vP5CfEJr7dMlcCH0qQWICr4UzOqATBWdjgps9uQElOihJzvz3WUUGuKShxq4CtCSJlAo2kmqQe6DXZZRlVE7UgClOmXW+Csg8kqDVIlo29lmzIIKNVo8uZUU4q087oGqaseLBdZxxG4rqkaO00Jrb17Ca0Qw3qCV+ENdYualTy0M4e0y7kE8Yjq1N4MiK4SNnZW31NGbn2d1WulHcsmkmIX3m6ps67bNqwFN9Jd0Xnt30bma76swoM2aGa5iiohvfGtr/p0DRptmgzUeRqbgkkj8JfocKcXlFKEMYORvueUqGk6XVLLEFJWKOhTykernlh3iy6zoqNit5K2nwhX0OwzFRUit2y7yIWCkkpAC4OVAD5cog1Z/PjW7SRLldQFoTlszoLCVFxpzar+4UpqQgSjOpekjla2vlCrS0L9qd7Sam6UUhVYX415o8nS67t324DmmfKPdhelJE2zNM8U0Q6xsyzL85xSWsbVWk2jfhC4vk8ZK3UbQkgQSjm3LIfwFQCbc9t2idKGafnBIBeKOXYqxGx65vV6kBLAdLEI4yhcrQyTP3fjBiU6DlcbW9tZmkXL1ebAAcAMy/GDQoJxQ2g2W6VK64P7H+Zx+PnXXwdw9vjA7w16o81iOpueTb72zW/2At80TMM0VJ6X6bl7w41dKM644/t7V66ulovlfCaKIgrDJI4NxgAIKZSG3+vFYQgCSqlhGAa0NgzGGe33AGxu7y4Xc4Pzoige3b+TRIvt/cv7z73wpf/j/8zyDEAuBIh+/PjAYnh48BhaK6V0miqtfM/rBx4AbhhSiCxNpZSUUm5wz/cIJUrrjfGWYdkARJHneZEmGQh6PWNv93p/MMqi5bvvvLN39dp7P/hTAKIoDMitvb1ldOuf/pN/NJ3OR8OhUooSUqbn3tkap0mY5TmnbLWKVlE4Wczf//DDOx99tLGxYSgBIIojx7QNy/SC4NYH7w83N4siz7KUEMK4MRz0AeR5VmrUijz3g54UQuT57tVnz46PlVKjrR0Av/qP/4nBDdu2iyKnjGuiX3nphskooHueD8C1HQDHh4erVWg6HmNcCS2EOj05Mi17MZ0BmJ6duoTbrksYpZRJKSzL1loLIUFIeane2N6llO7tX+GmOZ1OT48P+8OtNE4cz8tzQRkHIDQ0gcgyYlLPD9I4NgigFGXV0s7S/Pate67nua6jCU2TJMsLy7EtIFquHsQJAMf1DNNKkzSJE6W1YZj9wJdKU25x0/3Dr/w2gPkqMS2HMUOIwjRN07KKIlvMJx/furlYzJ9/9jkAr770out5O1euOo7NlDybTvIsy7N8a3f39ocfAfiDr/y7177wuuEHhLDA6ymlszyXQgFI08x3PQBXLl+ZzaZKKce2GONKaSEKQiulmkEMAJkohBSu6/yt/+qXrly9UuS5tm1/OLx8/aX3f/RWlqYAnr9+42e++MXx9s5iOimKohf0lJJCFAC2d3cBKCnzLAXI3pVr9+7fefz4MWeMM17kRZblWZEDyKVWSnLDkKJI05QzzrkpRF4UKs9yv9cDUBQFoB3XPT48tGx7c3uLM2pZtrBUHEVJHAP48XvvX7myb3k+uPOj774ZRYnjOHdu3/niF14tM6oTzRiHaTtlwD3H9w3TtCwrLwQxzUGZ6irPP/v666fHx3cfPszjJFWQQkRh9NJnPnfznXe+8+YPAPziL/3d2WR29Pjxg0cHqcbNmzcPj49t1yvzU81nUwCru/O9q1fyNC2y9IVXXi2ySIhitLn91hvf/vKXfw/A93749k/85BcM26aMZVkmRHHvo/e9oP+1r3/t7r0Hs7MZgNls+tz1FzdGW4vlIkljziClMi076PfCpBAQAPwgCJfLXq9PKDVM88oz10zLLpTSwPalHcOwAPhekAudF/JX/5dffeNP3/rsF167ffeuhpZKTaZTAHle2LaVF+LSzu69+w+iMMyynDGeZlmvN8yyFMDR6enJ6WlRFNeuXh0M+2eTyeHx8e549Owzz2x5W7PJBIDBuWM7WkvTsgihhmUFlKVJvFgsOGPPvvAiAKXkcrlUSpmGa5nmaDQaDPppmvZ7PVLmATfNJE2jKBqORgY3LMe5tLNzevoRNIpC9Pw+gJ3ntn3XT9OEc6NYLZbL43Q1m6zQ96xR4FKtAAwCN5NEa005cUyWZvkyyhW0ZdA0ynp9A0AYFzmYa/HDo4mTGJ99/sat+3d+/OGB78Dd9AAczY4ZFa7jZYL6fvDo5DRJNWdkZ2N4Ml1SAZQgrCCG5b7x5ndeeXYfOt7o8azIwjgjll8eGSiKIslTKagmWinGCYRQSgAarHLOJlBaE6UUaJMz4DwRej4CThmUsswOXX/VWIbUd5T1o7668mkQWoWw7MZVr9SQT8JmugtoVhU04g6pfn8az6QprxEw1q6lQOcC2XxDQerg6K1ktV73uiDSZbWRY9pmEBDGWCnyM0I1NEGZ9U7X5bWVlGIOiKrR1bqxaGwCWl95NCJxh906TGdzvyRalUjsJ13VL+iCLuiC/vzSU46lerNsPz2V2rzZnSt7aeNCWzuventt9lqt6ggZRCtd7estBvIUXkrS3Z2/vfZ/UrYxdDCfDsLSDWxXYhZtxLK10+jpKqb1MHvna6y/IM1Zs96WKtpifa/W9aHTvq87JZZ4SH2uo/PzWu018tbyQLrhCbsskPU3yRrzJebVwk9PaWjFw1pq7bXPjU0pSsSgC/qQqtS1aHlP4L9rzHYMD+vvymwaZXnnZ6aG0orWD2kKZUCV9zleBrlu7AGrv2s17bmOJQBAK/jw3EQos9ywDtMNSAelBQBVQVEoY6xSQrrg43pNGiAE5YqQVenNutEaVbqhkgdJalPObgygqr9pJ7Z3R6DTGtClyEMYKYMVakDrNcRHVyFxCKn/QkcaQ4k4N2FyypmiyfnA2Z1J2kUROw+t4WgEaD+S2vRZK00ppRSlOrjqhqcha+hAde3UrZlog1SSNS5J+34TMrW1scV6VbqZr/WGVg026RRXSY2kgubrmd5k4KmxtlarQTSREiY00QUtNfJZopmlQWmbtrwx2mxxWtKpt5PwsvNHGcO10Z3QhsnzXccNLqQgnFKltZacSMjcNhmAvh842mRUcKmTJKdaF4Xi3LEpiMWiIgcAJZkGMcy0yAzKmqiyTwrDa9bS9ejUt5nyHCCN3W0jZ3evRKRzwjQduQZHlp0KCCFkCXEChBCDV6cbZ4wQIqRUUipCtIZlWZf39ofDTaVQWsM5tksIi6MoDqNkFXq+T6WaHR8JWUhGy7wZ/aDHGF3F6eRJ3hriAAAgAElEQVTkKIsibjCLGzrPizAcBEGYaQAmdDJdPLhz5+pzzxOio8XMcdzrr7z6xte+enp0BGDv2jOcm4OtXXc4+3df+cec8/FwIxMFYTTLMsPgALI4XC0W0Dg9OiqKHBqD0UhKyTjnBjctCwDJCyGEEIVhGdwwkySmlHm+G4dhkadlqEeD0zRJY2jHcW3HjBOqKf/6v/k373/8saqHIC9yKGmapmlwgzPGWKGkwQ3Hc7lRpuq2tdKiKCzbZoxxxg3DkEprwLSdMrHGKhdpnBRCcM4dy9ZKQgkpJWV8MZ9XtjmMLxZzpxcs5vM//dE7159/QWuVZEmeJSUCyCiVUlimUd6ZTWZkWS6kPDp4lMdRJgoA4WIx6A+zIrdsJ8wWjLE0EUqIS/v7/eEwXC4ABL3e0clkPp1IIQ3DEEKcnZwEgw3CuGHZJwePAfzwnbf3Lu0rKZVScZo8d+3qaDD0bNP3PaI0gChc+b4fJ1ma54xblukIJQGZpYntWMvFHACjhmEY3DCUlFJJEBrHkZKSECIVJDMAXLp89fDBvZvvvssY3dy/fPX56x+/90FvtEmZoZTijg/AD4LpfJ7lC8YMyhjjfDWb+Z6dJRljFMDpfGYbPPALIU44555rCykNzm3bYjRI4gRAMBgYhpkkKckKTpnjumkuqJaXnrv65nd+cDpZAAiCoVBQSiZp7Dluv9fXSuZ5/PKNG9PpWQnziSJfLYrJ5IwT/dLLnyKMFkVh2/ZsuRiOxwCuXr7q+oHhep/69LVv/sn3FqvIcewsLwghuvSPAMYbW6vVihCiQfI8F0KWQAg0YYwJIcoNTgjRD4KXX3r5rTe/Y1k2f/ZZpbTUyg8CUeQA7t/+2HU9rZRhWnG4itMUgOs4O5culeG881xYlmU7NmdUKl3keZplp8fH0WrJSJWEGkLGUaa0MrhhmiahVGlFKWOGmSZpCahFq4nr+wDZ2Nq69vzzBsVsMl3MF7btxGG0vT0GcPnSzo3rL4Fbluc/eHRgGoYoBKF0Z3+/SCMAQhaEENOypFKr1Wq8u3v91VcppQ/vPZidnp1NzgAM+73A9/uDwVaWuWnGCA0GgyLL55Pp//0vvmTZDoDlMrq2c+Xhg29pTbbGm6vlsigKy7ZEUdi2nRYFACmKIssIAaEU0EoI0zTOjo8KIX7mP3mtXNqL+TxNb6ZJMp9OHz44sA3u95LpfHXlytX7d+8B+OxnPz8YDcNwJUUmlaSECiGlEFCagJimgTJGrWEsZjPbdZfzhWlYk+OTr3/9j6RUP/36a73eAECai1vvffCt77754w9v/twXX/+jr399Np9bplUGFAQQ+D5hxmir73JeFMUqjAilpmVnSRYE/dFwBACEWpbFGGOUGoxrrX3P7fUG6BwWq+WyyHOlZRiGzz33QpZmeZ6FYei4bpqmH733LoDlctkbDKazmVIqS9Msy0yD93pBf9Dn3AQQR1GWZb1eL0/TLMvzPLdcd3s8Pjw6enRwOF+URu4QUlGNopCfevXT86PbRzcXVy4PLo+dIlpmRAMQhY6zhDO6jLJlmA59a+i7JSA0GLnlIbciy9kq4YyqQh8vV5cvXd4e9v/h3/h5wZK7RzGA09lC6clu3zs4OZ4sMmoUfW8zXArXMntBMJ2FAAioVtTpb9+8efszLz53fWfsLfIoXi3SXKmUaAsABdJoJbUCCGOcEAYKQGkCpZUuE2MRxrnBCJU10FUJV7WMVUs6XTG7ucI1DkblgVwLbLq6wZ2/hVWuNjXyWMmbpFHTdkSFzk1Et1J6XWgVjbIto0NNAPuukFEL3/UNohUtyvsXqZOEduSpUqyoYNu6qWu3MkLQiiVPuVYBWuvS0KSJ113qPmsUtbkLV8ip0qizkNZtbdpNiFZoc2mflzt1fW9tZUICXY7+J9xdL+iCLuiC/lwT1xq0c19t0Ij6OnwOjeludi0QgxZZKq/wBB1bMKzBYm0Ch/Kpapd/Ij0s2tqf3GHrynR5+S5PoK4Xc8tgi4nVvqfda315BpBzLe+eN/rcsw0A1wBMnWbWT5K1z0+U9MQ36Liid5FGoEYXz1OnT0gLWLYM1d80LHU6s+KxusU3zW16T9V5VzoQFCmTKa+xUMkbhBClVD2aLfTR4g5PUAf/eXrjzn16UuN3DtgEUGbWLoUcRrUBxamikCjDZleWdBQlYljakJ5PhbIOr5EWC9Jrv8uKr/WXaeXzX02wKgMyIZq02Y+fbHLp/lE3QLfTj7RYWz0eKCUUWecsquZhs5wAaNXamtVzgUJraEZUOfkJIUprBVIHy27FJtSAGqmByLKeaisoJdES1DwvGnbm/PoE7vxRSXd1LiagiwXWi5ZoMEKkVlqjAnO76BXWxM+a3WZ+N4gXqYut+SulYqU+SVqrXIQqy2Dd9O/5ZnY4aLq9VV2TxuS2swvUhVW5IymF0lQWqoDDKQCL6EJpwuve60i4693YZaR5pN166/5sWTr3cPcLQkpPImIwrfLMZmon8AC4RpYnscUNoulKCKGkTagSsW1YzqCnlwmAcBVT21FaMq1BWaMv6dZTg4sdU4j6JtM+sk66M3KkeaIL/zb3gco0oMxdQGjtma6hhZTQmjLWDIDnuIwxrTVjjBtGkaWFEBrI0izJ0iIvym68f/+eZ1nb4y3HdQ1GRhuby8Xc623t7O1pJQHMT08sw4hWy6Iopmdn/X5va3s7jSLHsvb3L3/w8R0ALqHjrZ3dK/KFT71y9OgB0XL/2jVYQdDrjbZ3ANheb7lcPXr77cV8+aN33720s2NaFuWMGcaEaN91ARzcv7tcLVy/5/qBYZqXrlzN0ixcLrYuXbp3+xajDIDlOn3HnU/OQIiUkhAIUUAZBqeu1yvhyPHWOC+KxWxqWYY7HPU2tx4+ePi7X/7yYDA8Pj0FwBglgGdbJiMUUEUOraUsslRwbtASzyWMcjDGuUFK10sCwgy2mM2HG7yM1eAFge160enJ9t6+KvIiy7I0oYwrKbIkcoM+gMePHnBKw8XqO9/6Vi/oJ2mmlCCQRZF57iaAKA4NzkVREJAoTaSSG4PB1nj8yude41QnWQbgwe1bo61xkmTc8TY2N9M43rm0m6fpeGszDkPftQH4rhuF92fTszxLNQHlBmHs6PBwNV9c/8znfuur/xKAZVmiEAY3lFaL5XI46JdWummS5mkC4ODwcLy9NfJ7fn8wny/TNJGFNExzMByZtrWcTwDcvXXrpRdeYEVBKc2znHGe51k9UXmWZgCOjo/7g4Fnu1IKx3beffN79+49fOb6S0kUmYyFcQLg0aNHs7Pj/5e994iVLM3OxL7fXB8+nn/58qXPMl2mq4rVTbbj0AxByGAWA4mLgZbaaCABA2g1gNbSQiMLCiQkUWM0GpE9Q5EcdpPsJtuwm91VXdXlsyozq9K9fDZe+Ovvb7S4Jm68zOaMAC2GwDsJZGZEXPP7/5zv/845nLNMpNo0KKGGYXBuMEpVkZ47ejSdvfjcM8ywp7NpkqaeY+WgMJRsd3sALNuZz+ZSKMZNDRJGCdGEakwm4Se3P3WcJgDKTKKlwY3xeCCVarc7SRQ0PKfR8i5dvjQZnQKwLJMybkCnoT+djLXWzWaz0WymQO4W8OVf+mVuWNPpNMuy3e2dn37wgTCMJEkAopXM80dbppnPOCllkqSAti1baZUmGSX5aTUUJYRSKbKV/sqNa9fvPbh3dLjf6a3MHw8BNFstAA8f3N/a3CSUSCmFkrZtR2HgNRqf3b0zGg0BXL1ypb+yShlNk5gRZEJIIbTWhFKtVJKnqeE8T4SS8/sC3x+fngohmp2WyEScc0sdj4AkcXy4f3DxyhWhlMiEltJ2vc1G+/t//h0AO5tbO5evEKVu3/p4//DIbTTG4/Hm6mqz3Z1lCQCDAoQKocaj8Ww6v3jtRhL5juNqQmdzP0/oNB9PVvorDBBJKrMsjtNGu5ul4u0f/uiPv/3ddrsF4O233u6ubT3c2/canus4D46ODMZMw4yCQErV6XUBaCWVkrZtc85++v3vNlrNtc2Nx/fvr6z2Or0OAP7G25SQweGBVHo2Ht++c/dLv/xrcShazd54Gq1vbgHY2r4YZ4kfjG2DEcZL3ZhIobNU5GutFGJwdLT/eP/Lv/xLs8nEtKydy7tbuzue54VBKDIF4GQwCIX4yi9+9Re+8mVkym22xqPR3fsPWs12mb/LarbadrN95/13IKVrWyNASM0Yb3d6/W4HwM6Fi48P9seT4a3bt/cO9m3bfvWlFzc3NpIkOR0Mdy5dBOC5XhKFgT/v9vqE0ul0sre/TwjZ3dkZDId7jx8D4Jyvaz2bziglBGCUAphOp7btrG/1AUwnkzRNOecAkjTl3DAZI4xCgxKWpQLAycngWB0rrbMku3r54mQ8i6JsGoi5I/qUzvwIQCRDh5tra02tfQI98eMwSS9vtPwodSxPag5gtdNMsyxLmWcbVGmqM2J2kih8dHjn7Y8PAfhBtrFh8+3tZy499+hkjzDbbjiPZxMhsrVeeziaAKBESaWkxf3UPD4ZvHLpsiaDeUiUmcFozIIIQDD1KZG2a4pMUUojLRRIJgUIoZxBMwAUOYk70wvAD7rSeYC6F0cNESxcV6oduSJQVgy+8oaa8qcBQrRWpADNSHFKqhcKy9P1GlI/rCyuqqOL9dfopcJpDV1YAfmvqq4ZVfcWPi9126TQzZYUN12vdaVj6yU8szAwi3hiStM8uJNWlJA8QhKltIQVF7ZozZasNFrUqZ/6TEr0qrUXBSa1AqC8vW6Ynsu5nMu5/E0SDiytuZUsceYXYEIdaVAFZFV8rox4vezb+wS/vgIYyGLNXyy1T/POrMOgWi/T8YqVuijdz1yKSblr1JHRpavPbMS6/onUjOriSKoELHIzZsn212e3hBJkWPxSgxLLxs3bQC/xvMprSfV2VF2zwF71GcJgyTpCjr0QQqDVU95cgAN6GQJBFUqz2NwWmJLWy+OidKtcuC7UfI7rD6naoPjlr98x65v1UkucUUNqBDIghxo1JeBEGVSaRBkkY7lPscqU1iCEgQBEQQNkkby73tKlZOVDSX2QF/Ws5xgntVsZCk0i/0YTaELy4DZS13WiszfqUsHJI6vmaBFAVDmXFu4lAMkqeHKZCwmAoYrrUz5IIz+xp0TltEECIgEFmod0JNCi0PnqNV00NiGVplSCShUWWRv7pByZqsr7XDYrWfRT7TkL7DoP17g07AnRUCC0zmF8YtJU2l2pUi+CkD4FdMyV4qUFoP7GmtJaqXgLR+JqOJMSSc2FFnGOzk6/irNMzvwLoqENxlUQ9hobnsUATE4mklGpK/j+TOFzjfXJAURQxe5F0YJFhksAgNIKP0OkkKZpyjRlBsvikOt0d71nEglAijTXrAnVhkW45JpwAi61UCLeWekAeJRlcxEz0zYNQ0hVcI9rRa+53lfr5JkKLawFVEOW1MLgVuO3vjgRUkR4qjGx89qu9Pvc4JZlEgbD5EpJKJ1n1g7jiEkuKOOm4RicUSaEHE8ma73VZqudj844jmfz2eXdFy9c3BVJwkxDEwLL+vjD9w5PjnKaKaek1WpLKTvdLiHEMIyVtbXB/oHjuJbtXNrdBOA1W63uynwy/PjDD/x5kKbZPJYijS3Lzl0+7976aH1r5+Dx/h99848p5UKo2XwutcrJiTu7FwGsrq/vXL1ycnzMOeOmE4UBZZybxnw+Z5znBWacd1ZWlJKZyFbWNyhlg6MDkcWGaUkhchgrDIICz5r7m5vb3LDefeeD9fXtk8FgpdsDYBg8jgJOFWTW8Jw4jkUSmobpB4EUmdlsAshNdyGlyczcGKOURlFqWPZ0Mgr9AAA3DaWU6zWiIEjCMAqCo4ND0zSbrZZIssj3AXitjmWwD3769u9/85vddjdJUy0z35/athHHEQA0HdPk+4cT07B6nbbSKpNKK/3+W28+8/xzmlIAs8n05vPPr1ruLIo5kbce3u/2O5PTWRQ2oyBwHBtAGsej4WD/8eMoiXd3LykdEUKzNLNd79bbb//Zd74DYG1l3TStvHbP3ri+ub4RhTPP8+IoCKMIQJplURiZmmpN5nPftu2V1bV2r58miZLCbbQAPH54/+bVq4RACEEZVUoZppmlKUCsRvPOJ7cAvPX2W1/8hS9vb27GcXr7ww+Pj48zja//7r9glGUim86mABzb/PnXX19d6SkhT8ej4XAw9H1G9M2rV3Mv4E6nOwuiJBWeYxPKCKGW4wohpuNJp92yHA0gmAdCKMOy0yBSGlpDZPrK9euf3X3w1vvvr69tAxhPp9ww4ySGVpRACSFE2mi0iNaspACvr28kUeiHAVEyjWPKCCWEMZ6mceKHAAaHRyDUtt1ZmH7uhZfe+eC9wenAMm0ppZJCKgkgTY00zSiFYXDHsbMsi5Mk5zMqpfLAppJoz3WUkg8/vf36V752+cbNvUcP/CBwXKfZaufLwFd/6Vfef+vNRrMVRoGUMidzZ2nS63akyAA8fvxIZMmF3UuUUMc2/bnPOYOGUopIlbMjGaW5R3aapkopy7Y7/T4Ax3NDM4ijEECj1cqy1DTtG88/f7x/SJRM06zV6YGZb/zgh//k9/4fAP/g7/9ncRCCGj9+8604zXiS7h8d/fovfnlj60IWBgDCwNdKSY3u+gaUfufHb1zcvZR5qWGYFy5ffuednwL4sz//7j9cW9/c2mDcaFjWMDz98V/+4NnPvfDGGz/5+NYnFy7uAPjokzuPD/833593m42Tk8FkOsm1T6W0adtJEADwPNcwDcaYFCKJE8Pghw8fNZoNzvlH774N4EdvvfMf/p3/QEk5G0+ms9nrX3j96OD4G3/wjU575bP7j27eeA5AKmQcRnlY5HzTlFIpTQil0ETIDACl1Gs0VlZXZuMxodSfzQ729jbWVqz1jWAyy1f1tdXVjfWtYB4kSXrw+OD0ZPDBrVu9/kompJFvQUIFYfzOe+//+bf/9OL6ahwGBuedTkcoHB4d5F0gpRiOhkrJZ65dt2wzE1kQhHGcrK6tGZwTMACnJ8cG55wZhmGMToee5730wgtuo5mmyeb2heefex7AeDS0LFtrZdm21loKkWVpq9PxZ7Oj/X0AlLFWq5VnUa90LCUyzg3TsHI4klEutdBSEkKZ1iZlYZCcnM63mvw0DeZRAKDdcqdB+P4bRy9d7hsMk3nQaVnjme85BnRGGQMAQrtNbzaXDw4nsySLYnF110ihXrj5uZ975RcAeK57eLL34ad3rl1Yk1k0nGbEz3xfKmKvdrrHnRmA6dwXRAghlN386cd3iJYX1tpXNmzJya29k057DUC/2dzsd5stLwzSvcPhw5MBYdQ0TMu0CaVxHAPIkkyXqtbChqiMO4DknANS6b0F2rZMwFjCxXT1nDPaFKnG1MJeI4tfoM8qOZXqUynGla6/OB2ulMS62amgqaYlNlrAhXpJ6yyOaUvTUwOo0w/LdxRhpyoDqrq4KDl5iq9Tdb6qCUAWDAyy0I5r9nCpzOaq+3L0rSd01jOGZB2gLcq9yHpASMmD+OuNq3M5l3M5l38nhT917TpL7ztLhiIFNFWAbMW3uqTDaaJLhnzur0Nq1mlJnFzshLXdJv/0lBW1BknVgQm9dMWZu2rglS7RsTO+wgWtsmSC6hJYWCz9S/y+YqNbbqIqSFytPiUustila79CV/+vEMaybSqgc8lfcXnn1lovY1DLuFrlMVptr7V9veaQvThO0yVztHZoWuKJZbtVWsvS2WYORFZQJ1m8oIJgyoQrT+3Weqmf/LwoRu2SetcvQ9MAARikQaRBpEGFQQSFAKC0oLqCIAsfVrqEsdXfoIGaR3atBQvFgpy9J68aowYKZ+3i0vxgWGlFUc+FdIYLXHhkE4DSItBBDs9QqnWeh2bRCpSAGJwu3p3/p3SdkaqE3epotYYGlM6QZ1YBkQSKsDxPgkCOeRJJaAlKVo/Fk6IrpK3Wo5XeVamh1eWLhyzptFXrFxzNqi+0UpQSCqIK5bCYFIsZV+HQpBqa5ZStJdaq9dDSlEd1CZZiEZWzOHdiqkVbX67PGVmAkFjWJ8vFpCyeLjA0DamVwRH6owsrz0iVAYjCiHh9pOKJFl+8+2kTvtSqF99Xy1dVhvLy5QdTEK0EI0omkcXUdr/Rc+nc9wFQxBbPfUaNIIwZpZFQIIJQJLOp02oD2N1evf3gsZaZNjmXEKRyqyfV8lUzJhaNUm/JxSK2WB6WNoKqTosVr4ywkS9uZS0JoAmhnDFCidI574opSCUJAKU0ozoTWSpFqhSkIpQGUXT33t3rV68LKQBwbrRbLcPgwXzm2rZIYsO2idYyy2zTsi0TQLfXIyCZkLvXrgWzicoSblqa0jBOwigZjqcAHj7cG58OR6enUqkr125YrhfNx41WZ2V9I44TAN21jfWLV/woPRicUkL9IDQ5A4XW0nOcja1NAK1mI5UZM4wsyzIhm+2e22hEgb+6sRXMZ2mWACAROXh0P00zJWV/bR1aGabp9TqmZQ2ODiEVAN8PtVJOoyXE7PHDPdNuf/873wvjNBWCMwIgjgOlRJqlWqSckvl8FkzHLEuC6TSM4iQIAcRh1Oy0sjRL01QKQSlhjIlMW7Y9Oj1NswKzUEq1e72Dvcer62tOqxVHkWHbnLLJ6WA2DwAIQq6+/gv/9Ld/K4qiTqvNOFcKURhdu7Qzm/sAOGN+4Edx3PKanVZrFgQg8nQ8erz36MVXXzk5OQGgCUQmbM9Y3VrJ/Em719u7d8+fzW3HybIsDCMApus1Wu1Gs2G7TmdtLZwHaZL6s+nlG89+5zv/PB9ijDGldJqm8/msee3y3c/u3Lhy2Wt3TIO2Gg0AGxsbUsrTwcB23E6rLaWIfL/d61MCZphUFQT5RrORpul8Om33eoHvc87jKGp1uvuHBx/eugXg2jPP/emf/ckvfeVrz77w0sHB/guvvv5P/8k/Ozg+7rV7lDHLbQDo97uUMcZ4HPmWbW5fuGBPJ0kUOo6dJjEAztnlnS3XMkyTt5oN0+BK6SiMLdthnOfjSgrJDIsppJmklIEwTVkQJn/8jW9MZ3NNTvICx2nsOa5n8zgO3nv/rfc/+mCl1yXASq+Tg3fPXL/W63ZhmJwzzhgoSZMkTWLbtEbzIwBxmCRpsrFz0TIp5ezi9tZfvvHW6sqqEBl0cQCQKCGlUApxGpuGaduWS80sy8Iw5pwpzQEQLUWWKWUfHR99+slHrU7Htqwo8De2twzDjIIQQOjPHdeNoziO4rk/73U67Waj3WlzzmzHzqdkHEXz6ZQTKoWglAoh0jgSmbBM7rouAKZlGIYE0FolqeCc257LKBWZYNywHQ/AdDzp9ntJHHNuZKkwODcde+pH3srmD378dqvVBvDMy6/FQRBl2XA8dR1XCBlHUb/Xp5Tn8PTVZ25qSj96/8MHn362tbPTWVlpdbsQmclpo9n92q/8bQAGN/obG3sPH9me12h34ki8/NrW2ubWj/7R/7Czu/vqK68AsJ1GJlSSijBOxvOpbVkU2rZMxpmQkjIKwLJMJWXk+8cHB91+t9lsnJ6cZJkcn57mDv5/59d/JQyCDz/48HMvvnTx6hXFvd//l380Gs/92dF85tMdBiAVglLKCE+SiBIYBskyEYVRMA8mowljCoBpmYZpbu/sGJalteaNxvbOxWa3G0kVpsJxGwAODo63NrYZoRxkOBqPp7PT0XhlZd3zmn4YA4iSqeO4wWzS8lzP5LzpMdMIg0BTBpD7j+7nLSOEMA0uCYnCOAiDwWj0+ODwxtWrN69fJ3mUVSG9RpMSIkXa6fUd10nT1LDt0emAG8baxiYAbhoiE0rmOGTKDcOyrCAIAMxms3zf8cMwCENofenirsG5ZTtew2u4Hmd8GswBeK5DCNFKe65nMBqnqT8PjZ3WRscLRjPLpgA8m3GHZtKeR/FK09lda9umwSiJU+lnE5GZAEyTc0baDcM1vE6ScY7PHu9v9jvzOU2TFMB0pCfRMNPTTx7NHLPdb7WY3VVG5ktq2Nbu1gqAW5/NTWWoVLNmex4kH+09uLDy+Y6nrEFIRTwbDQGAmpbBUpk8PhjMJhGnhiJExNE88BnnOaudEgpooTUHLWPLkPxwt7YX6yWtKDfjcjdkFJvzAiCsdu+SorEw9PLNmda+znFuQnIFXC6flS4C+WD5wU87WH6aLJMZKwsoL1uuQdDS+ivYkQuVo2aOlX7b9aLVbK6ScbMEvlZBYyo3KKUVdBlPvGyDWqUKJ8EndMtF01YWdwGLLkUjIjUQtNQzdaHm/TVG1rmcy7mcy7+zwp80tpfZeblBvpwoolgTy21L1y7Pf9A1ZPIMWZAQpRQ0KM1N1vJi0PrBWu0WXQBNtVw2ldGrUeJoT4BdZxh2QIk61g6kSJH4lZQ51RZ4RHkBqcErevHtEhBTVLP4qBff1otTvr0sydnf8wOzxRZaJ5iSxSVP3IcFFpXflqOERUC7WmlKktkCJSGLh5aMRgB5HMIyEF752JIoVofhSNXyQBkpb1kdOFtaXQIpZyDG5Q9n7nziQU/x084vlASKEcEgOQTTgmiROzpXAE2OHavSjZqg/LUqX/mZ0fJd0Ln/BYA8VLRBeVXtQoEouiYBShdooPCBpgRal4oQqTqzps9U3ERNNc3ZdhSUEMIJBZTK2Y2FqqQJiKKqKlypwRQQIVUL75jiRYVHa44TqzyHt6RQREuovKY643kGIJkTMHUVV6imgi6OAfKqV9zUoihVkIAaXLuMWtZUzoVWWDZ9rjFrrZVSuZJMyrw4WB5W5QTNQcol1++qSGeP68sRxMpszuW760glCqRrsdJoUi0HulQsy4uxWIdq60rt3kWRFw/JwU4CnZlUbK/2752cAEjSjEPRilm8qCaplbA+92pSXysLbfYJxf6J2UIZGBSQuja5ut5tW9oPJwaTAIRWlBgWtwVhBjWJAqMi1RJKG4QORqcAtje31gNkjxwAACAASURBVPvt44lPpSCELcJdVNTgqh2Irvqd6KWlj7IyQGutSZ86uSsTQWlQmhNnNVTN50xrbjANUEoppVorAs0YhdIAKCFplkFrxo0wjBhBv9d7/tnPHR3s2Y6dc7UY571uVyuhlNBQLa/hh4FB8PlXXpMi86cTACbjp8dHzVa71WrNR4PRcNg8GQRRPB2Nj45PpmEIQKSp5zpf/fV//+jh/edfeplREsymo1noz6aW6wKQmXjz+9+989lnaZptrK1zSh3TUJBxGnuGZXsuAGoaOkw9z7NsJ/T9wdGBYVlxFI+GA9NxRJoBUEoHQWQYXCl18Oie7TiW5Rzu7+fsyDzjrRACoFGcxEm2srb5O//Lbz149EhporRY6fUBnBzNGq4DSdJYSSlWum3OmYylYRot01RCAJBKNZotrZWS0p9NLcs0TCvLMJ9OKeNr/RUAluOMT4dKotXpjCfj0XAohfzKL/+qTuJurzeeTgGMp7O33/jJ//2Hf3j96hXbssIwfHhwsrG22nIb3VYLAGPE4OzZq5elxNHglBAioYmRPtzbm47HOVjW6fUPH++N3/tAcaPlmZTS8XB443MvAJjO/dwB8/bHnxiWs761PZ2Mb3/4Qa+/2lvdYMw8OT7+4ZtvtpptAPOZL6SczWdr6yuAPjw5uXHlMmeM2Q5jDMB6qzUdjwkLLdvh1BCZgNYqTVQeldgwATz38quUsiSOhZRKKcaYVIpSKqV840c/OhoOAYRCvf/JJwbjX/hbv9rd2Do6PDw4Prpx9RkC4oeBY3AAoDyKY0JIkia7uxd7q6sf3/poOtKT8SiPf2pw7pim13AJITKNS+uTcNPUCpzlnsgsCmOhwJihCRFCml7v93/v9+7cu7fSXxNKA4jTeHN9XSrxxVdeXl3f8BqNl198MfBnIksZpXmoUCHiyXTa7HZlktiMmq1Ws90RaTYZjborqwBEKtIkchuuQ42LNz6vhLq3dzgP5pZhUkqloACUymzbAoFUIs3Sfq+7sb46HJ7eu/dAce65DoAkiQilURKbjs0ti5lmo9kkSs0m497K2mw6AjAZjQ3DHJ6eEEpc12OMes1mEoeBEN1eH8Dg6DBNk9X1DSJVHCecMamUaVmMcZmJaDIFYHASBaFlWZZjm5aWUkohJKAUxsNhGAQAbNsyTMttNAHd6fezTB8eHA6Ho43LN8NM/NwrrwJY377kz2Z//u0/tR1XKbW/v+/YdqvTHZ2cFERvraej8WwyYaZ5fHT86he+uLK9m05Hg8Oj+XjcaDQBdLs9P447q2vT6fTg41ue3bp049nb73/wkzff/tKXv7y2ug6AGdZ4MpFKj6ezVGRN28jSxDQNKDWdTFpNB0AcRZRSyzYpJcF87s9mBKCMus3G7rVrAF75+db+o70kSYPZLIkTarYOD48pcd54483r16/lp62UyGajMZmOpJSgBJpprZVGGifT8Xhtsw/AMC2v2UyiyOBcCKGAta0NEGNycDgZTymzAAxOR4bdmA9Htm0/2t+fz/2VlZWG1/C85iyIAbQ73aOj488ePGjYVtM2TQZmWlKKLBOcsk6rBSBOEg1GCL17/75nOw3PW++vBXGQpOlkOs21GMexoyh0bYcyZlI2OD4OfP/i5SvrW9v+bHpydAggCsNGo2FatibEa7WSMAQlrucaptHu9gGcDgYG55cvXUrjuNFoZlnW6nS3r9x88ODQthyl8tw7KWVUa23ZzslsNk6Sla3VKElu7x30XZnvWcEs7reaz+80gyhNMunZBhQyqdoN+8L2Rd6yAJiWefD4aDQ49bp2FBv3jqeciOFEHE0fbW2tA3Btd+7PV+x2kkih9XqnkTFnnGiuQbTe6HcBDEbj03kiM5llwuctPR1+tnf43LX1a6vN40l8/2gGQMI5HMCaG+PhOI21NgzXdbTKRBiBMW7ZACTlmoJzXj+HJXU9LDcJym242n2Lv8oN+oxhUt+oKwsHZSykhSa44CnoM/fqUqcsTYTSVskTU5a6p9KKVByKomhlusHSRixZn6QKylM6XtT0ihyNVOWNZWHqUfEXVy9wxLNoaWH+1C8ojWKlFAEhRSrwWrClxV+kbn8+gUmShTZb2FpnramKLlLZIuo8i825nMu5/I2Vn5FhrcQMKuv+SWO4zJewsIar5V+jBEAqY7m0Uat7dS1J7xKWScoMteXKWvrvkVr0jSph918jZ/a7WpnL2ukC9dMlREbzImlNaLUTLL/2bG2XQY06LrhA6M4ipYvHLHb8+o1YbuxaWOW6Y2ztihr8U+uomkZR6QL1omtN8iPPCj8oblZFLMEKECkYj2X3lSUnC+Cr7Hr8f5Knku/Kfl3Apj/r9ifaVRJIAkEKIFJqLYvGY7xoc0LLwUzqGlbxwNrjaIFOaaJBSUmWo5pqXcvHrXV+DKyhNTit6z0lP1YDWjOyVNYFskcgsrRoYwoQmnt8E4CAsrKOOjd3tCbFKakqi51PnhwM1IA2C99humBH5hcRrTUHNAMIUZRAEV1An5qkGQWoJlqdORj+WS2vNWh5IICSSKh1OYxY+ebil/q/izzu+V9E5y40tMBDqyifdaZu7T8VtFfO1TORCpauWdbzSrSuXHieqFepeD/lvKCo7JO3LsWi1LW1suzhpUWQACCUQIle091a6d05OQHALVNpxSnNpKqKUWnEP8O5qagppcVaVevx4g/9GRMMAKXQSmiRPnf1wuUV58Hjh4yknGoABmyqDcukwygyqIxSoRRVmlKdgfMsSQAMhqfrq/0oFZNJyLwmKROjVCvlonzQlc58pgq5yl60iiYkD0UA+lQf89KzKcefSUkKLdYhDZVlQmqliDINnmYpARilJfORyVS5tt3pdofz+Ww8sS2r11uVWWyaphBpfo1pmlJK13W1kkcnx8wwpFagdDqZBPMZAMN2NGP+dPLJ+++NhwPD4OPRcD6fG47jNlrcIAA2tm/2Vtc3LuyKNBtP516j3Vi7OJ7fvnTjmc7qGoDjo+Pf+5dfn0ymVy5eklIpKZIsDeNwHszNfmc+HgIIOs1Mytk8iONEZEJqRRl3PO/05MRtNFY2NgGcHB6YprG2uSVlOjg60hqBPzNtdzadmqYdJwGANEmbna6Qut1fA2HD0cS2vZPh0DTp8ckxgCiODIpO0+VacsZs0zRNK6b+2uYWM8w0igAQctjo9rVIZ+OREJIy5nqeJoYQ2Xw6i6MIADetPIlqs91RBFfWNtI0OT446HiuEiKczwFsXtj5nX/8X7uO222319fWvvWdv0iS5ObVK1mahnEecNAzuGEYhtaCAM1m4+D4uN3pjSczxtnx/j6AJIr63d0Pfvqu4TVWnr2exHG727FdVwONTmc+nQI4Pj6SGhcuXeKW9fHt2+tbO6vrG2zH/Wf/+2+HQdjt9ABMfT9JYg1cu3Qly1LLMI5OTlbXVvu9Xs6ElVorDW5wSinnRhyGhNLh4MSwbcMwo3AMwLIdubNu206r08myzPEaSRIrpY4e7x2cHFuuB+Dw+HClvxLF8Xg03rp46V/97j/3vKbruErpie9v9FcBMA7GuMjS2WxKKJFZAijOOdUqSiIASZY1rVYWhcwwTIMLKYXQUukgiBUjOTvStBylNaUsCOMojB3PvXf79nd/+MNmuwtCpRYAtNZra2tBMFvf3nrtS794erh/sv/o+Pi4226bpn3t5nMA0jS4+8mtR/v7NqevvfJ5KYQ/GadrqxtbF55/9XUAIol/9J1vGwwC6vTwQbPbfPnFV//oG7+/sb6htcrnP2OMEIBoygyApFk6n8+kktwwKC8muMG4UCKOU8q5HwRRENiGsbW1fTw4kiJTQgKIkyTLRLfXOz4+DsLQNtn+40cNr8EpPcgjADS89a2tL3ztb4WT6Xj2rTTLHMuWQo5HI4NR5tgAbMvq9PuUkiSOhRBKSiml7TgSqt3tbmxvA7h76+NH9+5vbG+PTwe260axXNnY7Kxung7HH92++8tf+yUA3/z678ZR9MGnd9M0ZYxGSXz9ymWv2Tg5PLxw8QKAo8f77/zkLbPRDP0gy7LJaGzzh5zQVqs1DsKffP8vAPzgJz+5uLubUMJNM0kzTuLpaPTtb3/r5jM3n7l+YxaEANIwjpNECpnFUbPlZVkchWEYhmvdTrPdtk0CIIljQpAmyYVLu5xRQHPOZqNJf3X1wuVLAN764Y9mk9nm+noURR3XDWbT45PT/YNBGASM8Jx1O5mPVBLbluXYpmNypaVWWkrFmNFstvNjj8HR8cb2ttZaI6WMiSRxXK9vuRdWVseXgk/v3gNApDo9Gfzgh39lW9Z7H92ihH7xtZ8DcHh8zLkBgFH+9nvvjI4OL6x0ojjybFNK2AYfngwJIW3aBdBqNgmhUgjOuFR6OvfX+iuddvvg6IgQdNvPAeh0e0kSmZZ1enz48NHe3uN9xlmcJFevXd/avTQdjwHMxqPZdGqlabvby7LUsKwsS5TSnV5fSA3Ai6Jet+t6DRBMRuNMCMrZsy+/+gdf/0ODG67jAlBaMErBAA3DbXz+1Z8b3SEnDz70JobLecumAJggQ38aCVMrtFzLsbiU2ras1bW+lIbLDACZZus7Oy2PHu9PTcu7um3NMyKkMT8aDYdjAHaPZFKsdTZM0zK41KR5+ziY+pFgptaRZXsAdrdXh3c+BUv1PIs8u6HbH9z9ZLPfvHixt3VqHk1SAKk20iwL07DVaduGk2g1GZwypR2DWCY3rTyXPWItCYjONbTqyLqmSVfRuksjjOYKS6mTV8qNLpUsoqFqLnLFX6TY4iUhhBJKGIEq9ZjqCLemJSxuXhhGujiULtTKPPdp6XdNarpRAUWWWSgJUcsOZKReVb14xxn1iJRIoFaaLAftXyax6DwwPBbKjkauU0ETSknOBAVhlGqt8+WuHmKLLD/zrGJHCMp8sNUFunYD1bR6BHQZ8FVD53gkPVOtczmXczmXvwHCFzyfUhYrbz13bu79mTsraw0NwtgCDSnvJPmyS4nKoT5dIj8o0kSUMXrLeIYoftJVPL4iDBmpXqtL2npt1SY5jSpH/CpwYkHRIqSeiluXJvqTIBQB0cVD8og5JVO+3hg131As7iy3TF1CMaTeitBKluTRwhuiZGfmZVVF9coKlftLdRZZPPxs5JWyeGXdNOW0UhEWnLUlD9WlXi0UBaKhZM1LvaYGkEVN8ranlBDKimBvdSxOF0AEoVqrXHsoQVggJ9XqkhFXOcPmDYZc9alrOIv7GKGlv6wmlCmtoaUG0xRSy/wMnxCwYgAQRTQhhBpaGlAWUzaDAaIUy0NAUkqQHzBqDWhauhaj9vqlBi57rdA5dAUkUejiVLUcWguKojgDYhUQbn5eyrSqT5biPq117oVUtDpAVK5SaQIpVKp1xdzNJ4rU0LSYMtAlNpqP3nLQIPfxRjEFq+pp5Fl4ioZVOWrIAI8pTcwEbJ5JMAICSqgSSpchUwlBCQDlMdygNS0GTXnmnbc0oPOkgKXbOfJzbQBQVe4cTQgtrijQSCKVAEAoDAZopZXO+aV0oTrmnUGLztGalghWnjuSUJq3EQikEsWCldOOSdkAlOeZmsrAhTpPRk0ASjmKo3aKgkRZNl+Z3KlgjC6OK5a01jpSqpUm1MhrBK2UlFxrApYSwiHtaLq5SptySNIEQBqG9joTElQTohmgFREACDgBNJH5RKjG6GKcaTBaxYhXxSJECMnh6CxLieDUdmDILAIgoCm3JMlIpmg62e7IFy+1H9y+YyEVaDBoADKNSJPHyJBFptYZYQm0zZkfJRSpQQmAYD6xLbfjNtJEpio1qCOkBAjKIKmcUa0hlaBFgp58zi9zo6sRmg9/TYrQpmqxShcDmJSdAUAzSolS0OWpkdJKChWFITdyR0uDMyNJE86YYZkAKDOaTeZ6rojTOIza3XYwmXS77e2t1ccPH6RRDMBzPKLU4f7j2WTiuO7+0dHa2tq9g/2GUpd3d512G0B/dcVk7HBwMh2e7ly5/P67b7dXuhtb6wAdD06oaQNYXd/wp+Ms7sTJpLd+1SQ6m57M5unla6tpPAOwsbo1GgWj2bzhSQogE6ZBprMxo3QexERqAIPR0GDM4IbZasRxbNkON02lJCMwOWs1PQBp2Dw+Omx3u2EUKcLCJKOMpcGEW6Zt2fkg39ndsRuN6WgqwX7///xXo/FMSiXSqOW0fH8CwDINoeR4NAjTREo1HJwwz4JcF9GEEhWFPoDhYLB1cReghuV0++tZmgrFCCGm7UbHA8NyAaSJTFJpWWaaKtNwp5MZNYzJ/HQ40AYzTLcJ4Hgwvnvn7sbmpoD64O03H3z80Ve/9rVEZBTagAYQB5Ft2WC01+50Wt0wjqVQMomH09kffOObX/viFwHsbm11NjZfee3l2Xwuk2x9fQOQ8WSoCSGZ3Nq6AGA8GH3zW9/avvfoP/37/3kcxrvXbmxs7tz5+Na//sY3V/trUigAQmZey9tYX19Z6Q9Oj13bXe11ZZrJTGSUARCZCPwwjBO3QSazaO/h/Y2VFSFlJuXmhQu568f2hc2G14yDqYYSnFmMrji9KFM/fOPtZ24+k1M1P7r1UUzZYDT50fe+98Vf/NWDvQOD8slsdnx63PC8TsMDQHWWBuFRls798M03fqKVUDLb2lyH0pbJANgmNRrNLEkBKrSUUuZzwzBYqmQWxQAsUNNy/DCKk1QRogj7/o9/vHVhR2tKofKgPO2NjXkc+cE88KP/63d+ezQc+L7/3M2bk+nkzr1Pj0+PAVza3jwZDl958YXh6WAehv1Op9luJSJ7ePv2Z3fuAGi2u167qwAl5MnBY8b5a1/5/Le++0dxGjm2k6+rhGqtlZSSEWJwPptOxqOhlMK2bQKd5mnfDUNkihLqT6dJuxXHscFoEPig5PT0NF9Um802tFZKUUpd1231ek2RUa177c5oeAogTVJuWLc+/DAOfNNyskxoqcZBkGlNqeEYTQBKpUEQTudBmqR7Dx+ur65evrLrWOa7tz+YzWar6+sAvvC3f+39n7xxeHLiz+eXVtdahgxn0+def+F//c3fnM2Dn354C0CSplEUKQ2Ts5PBCdFye30dwDzw79+/B2Bvb9+2Pcdrjo5Pbt682W26R3sPLl65FEn2P/73//N7d+8BsDn+m3/03zGF3/i7f/e1L38pU/pH3/7zN99579LFy6liinAADLrBjXEUEK1tzlPqvfTyZYsbaRr7s1nEOADT5KDIMuE1PNdzhycDShmyVGYZmAWAOJ3geGwyajsNt7f7zic/bLqd+fizZz733MVru7PZHIDOhEgj7tgN12x49nA8oowYBu+ttgfj05hYADpdplM5ieIgiZ+7eaPf6e5s70DJIAjBSLPXAbAuBSfsP/mP/yOZiZduPPfuJx8SLXc2L0YpTsZDAH/5g++Ho/Hm2kYHcYclru0eHc8iNjO5JYWYTacAHNMSQjDOW81GFCdKqcPB8fr6+mQeROnec8+9CEAQI8nCd97/6N333o2T5MbVq6bB/Sj53l/+wGD0tddeA3D12c8NT47u373DOR1Pxq7jSoUgDP0g3t3dARByLkQ6m45s1231ulJCKmIznjHNHapECMCyPMqoyMJ54lvMcIT67PFQHg1evn7TtbSRZzYTotMwEpmZjLsmtxh1m+04jX7y0QdW2/X2FICb1zeNVqu5sdVor3z60V2tsjRRMpHXe57TdwEEMmm1ulJqBZamKZDtbnSthhNJezAahcM9AJv9K1tr8zQ6SKzESJIZoYo3x1HwXLZ5da07nFMAnx7NUwFJYHZMyzKj2WS1215pe0KnSZa13Q4AQb17BwPfT7Wh8ryJGoqQXOWolMdSUScowxqh0HoqNZ6AUlpoR1pXXm4AKRkdVGkoDVakUVRVmOjKDazkQOr694SQyombFDyL8l9NjNyBRpGlSFakcDqnjGgFpZBzKunidwpS5tsGUaRwdSK0Zt4WZ+v5i8DKCO65rgeAlqisLvBcLbXSeRI9VhhLQitKoCGVJpQRaCilCS3Ci1ctXAUiJyWI+sQ5c65yloo9yS1LqvUi3ynRQBW1qPT6I6jREM7lXM7lXP5GCS/3mDMHUyg+lstfbbdCwWh68lBncS2hBXpRLKmVI2clJav9STnjrvckUenfToottXI5LZdwoO5PqPHkq5a9Bf9Nr89NYl25SJb4R80xcRkOzF0PzmwbxWlj8XuJSpab/zKyWHZK2XMVRljvr38rltsS/WpZihcVWGvFutRLFyx9WjiyPuVJJV6bF7xoCymruHfFpluBsiVbc7nKGrpy5Cj8kVV5PRiRjGhOwOgCc36iZk//9mdJkQa+HvyUkMXDz4YEffKx5TeLIVjDPqvcSkuEwZqelddjOYYAQaUX1nq9bD+UEYFqpwykHH2VLlMMwqLYGoxCETCtGMlDVUIDhFFVIsJ5lvBi0uriu7K8tQ7R5QwgZcL7/Ji4fBeFQsGMVjR/ahFwp4qGWcCTILqQ0tMXi8FRiComjdK5f3fpuV0Op3oHFJ1ZDq2qCxaX5B7AQF0/LBRUpVStLUv9d3kYPdH3ugDUy7VSE0JAKbSrtMGjhi2DOMoVXwWqEq20YGUuF1J7/qLbyFPeVecy00J9BaC1AkzOBVVKhBzcsgBQJRQSRQjXYcvBl17+/NHjk3HoNz1LUWlLAsAy7CjRpsNiolOk3DJtwaJYUMZMJVOhADCTj+ej3sqGmTnJbCakpQkUBOMsb14hM4ObWrPaMUh9rpRNVNamhIvrzl1VZRc4LGpje3E/AQiyLJNaMYPFSaKh82iGOSQepEnL8nw/mIwn7WbTdq1gMjnaf9xpN8bDYc6gHNy9fWFrq9vptns9Smiv19eUpCLbXVtreN7+6SmApudZpmE6zoenJxubm5/dslpeo9/tMcZT3z8+HQKIptO1douEoQ3GCVXUUDb/7O4nn3vpRpwIAMdHn87ms4bXZIwQKT3PORgcZFl87foNkxumaeRDR2QZ1YQyRilttlp2wxsPR0rH7W5XCAGg1e3tP34c+L4G5ZxbrhfM5xevXjvZP2x3+3lC20wmSZIIyg72Hv8f//wfb21sRkkspYji0OA5zsUbjikTdXF744Nb/t27d4PAF5I1LBOmTVkEwHHdYD5njE1H4+7KqlIqnM8pN7WC6zXy/k3iSGuEQcBNS6eJbZino2Gr1bQt+3DvcddpAPj67/6LWRD0tD48PHznjZ889+wzDc8djkYtx3l8uAfg4uYG59SfBqPpmDKeZlnTteM41lo92nt06e/9PQDJbDKfjIQQN55/fu/+I8uxsjhOkpgb1nAwoJaLPHOuEG+8996n/+C/GEwnTa9xfffKJIyYYZmm6YchAMM0m43m5559bmN9rdFwZJZ1u13btv353IxjAGEYxnHsz/04Sz2nvba6trG+zi07jCPKGTctAK7rFRoEIUTrLJMZRH9tczaft5vNDz/8AIBhmJRxlYrv/uX33v3g/UeHh67bykSqtO73VnKzt9fpUSJjf9ZqNAghtmM93Htkmbzh2obBAbiumyVpJjJKKLSmjEJroXWSJIzRVqcNgFIexwm3XNMw7GbnT//km4PBsdfuxHGaxGm32wewtb4hZaZSp93vzadjJcXWxiYI6Xa70NpreABM01hfX+OmuXlhW2QiT4LhuC5nJA+9Fwb+ihRa6/bKaqvV1ADL2K999Wu/+/WvtzudPLc4JVpImSZps9NuNVucc9MwcpZQPnoBZFmmlHJc25/PTdPyGl4w9zVgcMM0zTzvUxgEQgrGmBDC87zYnyspsyThjK2srQMIwuB4/3F4N2i0WlCk4TUm04nrNULPE6kQWQYAKouiWArR6fWVVFqkaZqNTk5u3Lzhz/3Q9wE4jLS9huM4dGsLSgsGYljH+/vvfvDhL37lq0EQAWCMW5aTJikl7GRw+uy1K/1udz6dtNrNLMsAWKbRaDYNg2/tXHQ9r7u6ZphWHGc//qsfZEL+e7/2awAmp0eM497dex99/MnNl176/M9/9Xd+87fefOOt3/iN65wbUvgAtFIGpRrwA7/TbnBuXNzZMQxu2ZbnOr4fADAMFkdRlkZRaCil8m3N9Rqe12j11gAohUaj6dgmN5zRYPDo4aMLWzvfCb7b7/WTJJnOZgB4oTpprVQYRVEcE5Ag8JMkScLIcTmAQGSZ1s1ux4oiaNJZXT0+Hf7VX/1otd1+/Ys/3+qtApiE797+5LahcGlj83Q6mcxmaZLeuvXhPBbv3vkYwOnpqckYh47jGW93bYOlIu7ZNgW3OGO2CSCKonkQGIbhuZ5ju4SQKI6klFd2d1vN1mg8BhBF4cnJ8a3bn/i+v725cToaxlHU8LwLW1tKiLfeegvA3sOHz77wwpXrN05Pjq5cu9Fst+/e+rjVbGaZLM4gKZtNxlmWnn762TPPPJskGWV8Pj01TaPT6h4aFgCptcgyqeWrn/88pTgaDhjn81AkQdZZXYHvA+h4PAmifsPZWV9rOB5ldB6LOMPrrzzj9rfDyWMA0+lJ34AWDE776vPX7t762IMx8BOpwQkH0LQc27RMwqROCUAt4zTODgbDTnfdcexpMAUwDYerTfeAUceyx34MRjPNPzuePLMRXNrZvj+hAA59sdbqUWqEfrL/aJ8QYvf7fhjbNttcWT05mQAY+pMoUaDkKdrKU2TJMCJP/aXumXXmkuqTXvAWK6Bz+VkLQ0zXo6o/6R2tFkZBqeeVly4pCaiW5fIxpZlUKeD6jPZGUAVZAioVWOtaOB6iAFp/S6kQVifFuY68KBOqStVthppzFX626EV5KmJlaUCThVdQvQb/th17LudyLufy757Qf/Ml53Iu53Iu53Iu53Iu53Iu53Iu53Iu53Iu53Iu53Iu/38Iz9neNbKeXvp3ccZVOfTWv3ySILQ4oKnfWoU/IxXHqPqxfE6dClS75Sx5L+f+nInYdvZQSFdknPzYq3Dq1dALhmYtlE3VHQAAIABJREFUiseCd7Z4Up1KppdYiWTxiQAkd5bUVQuRkilWb6bytEvnuexoEZBymSJaHMmVPz1xmPjUY8glj4Pa6Z8GnuzUejOSRQMQ/cRr6ny9s+9Zapkzop/4pOvdi5zfVIvHidqxqK6amBRVqAYBStIXLaOJElL2JiGaQBlEmFAm0wYFgSopcz8DcK83fX3kPSHlqKhOX3UenbHy864YjtUR8tJwKZlxOV+2OvAsn65pwWjU9YlYpIEiRBfsW3K2BfOLC+YgrbiACwrfUkly1/MyAU5Vq+IecEaEFgYhNiOplEJTkscRVItX1nyFywasTRdSO44uv8ojMxRHzPn3TKvc/ZZo0CJMOahSIKxsuyLZfVFPDVmGXVg6diYAIKWu/HYIq/xpoEEgl0u7mH9V52ia5+2pnIDrcRrqPZk3J1nU8smhQpb/o2uBCAqXJmhFKCWEQ9lZYJvR5X7bMQwiJQBwxigjBdFxac2r5glZmqD1fpD5R1rQUhfH+5Rwy2RJlggoQSgAgxGTMRDppMnWitPutE4e3ZY6S5VDSDJTDABjTGuIKDKV1hRCaYuzjOe9ZlItAEiRMKFP5WGz4UnKEmihpMkNoVXOgaKMCZERkMW4qfGjK55AGa2zimtQm/W1vi5JGGRpoC1yKRFK81WVAMhT2ShoqXQ+rAghDc89HAyklCu93ul0FATh0fFhFLiUUYtbAAzOL+xe1lISQgzb7jQaSRIzpTzXdT2PHB8BYJQalHn9dn9l9XR//9qNG+1WiwKWYaytb+wfngAAMwy7YTY8txM83D8Iwujh3btpmuwf7DMQAD/47ndSIbhSaZbJLIlDEafh9csXv/SFV9Mks2wbQKPZ0EoLoQBIqQijhuVy7ksp0yS1LAeAaTuWZRHCOv1+nCTzyWQyGu1evdTprbqtlpQKAONOKoTbaXz8p9/SIJTzYBxc2NgYjQdEZQBEgmAODvn89Ssm50EYjE4GBHazbflDX2UagNdsEkIC32fcGJ8OCEAIDaYzrbTXaOVen0kcUMYBwrjhctrf3hZJTECg1NrW9vHxAMC//pM/+dLrX9Qi+/DhI2YZa+vrjFElhWEaURwBuLf3uNdte64384M0y1b7/csXLxCwRKkvfOUrdqMJIJmNpZJa670H95XQrufF0MwwpZTcMIcnJwDeevedjdXVwWj0F9/7fqvdIYR+evd+q91tN9ozP8jHlIK2TNN13SiKOCWO66RZRhnrr29oqQCAslkYctMASCZSwzAmoyE3rVgKxnnT8wBkWeb7PqVUg9q2NR9Peq3e+z99azIZDwYng9MhgFazJYS0bGcaBEenQ24Y/nyWiOzqpctbm1sNzwYAqOOj/clk1Gw1L1+6xIjutJtSpIyRVrsFYD6ZpJkAIQoyTVJCwA2ex7uRQs4mUwDNVotSqkVmmObbb/zV4HSwvbGiKN9cW7UNN09dYjACTZIoOj08+PDWB0Hg/9yrrzFG0zRZWVkZjoYAplPW76+MT0+j0Dct68KFHZXE4+HpxtbmtZs385ZJkiQMQ6IyyzCyLBvOZi++/vr6he3An4dRACBJ4sl4nAkZzP2jkxMiIaEppQREa23bFgAppRCZlFJI2en3GaVpkoS+zzhL08R1vXzm5vXVSgfzeWul6zabR8fHcZLkudcYIUzrpm0Nh+PZbNZpt49PjuMoEllmcCOnWDbbZhxG05kvckf3VAR+YHsNBcJNazrZB/Cj732v218ZDoemZSsp4yjeunz909t3pvPg4sXrR8fHAGyHM8INjsl0PB5Nut2u5TV6q6vtljc4OgBwYWc7S1Pbdcdx/P7bP51NZ9efe96wrNX19f/yH/5Xnz18BOC3/qf/9v9l701jLMmuM7HvLrG+/eXLvTIra+uu7iqSzWaT3dxEaihSki1ZGnssYSRDMAZeBvN7/MMDG4ZhwGPA4zEwtsY2hoY8gqhtLGmGkkYeaCFFNdfuJnvv6qrqWjIrt5f51nix3s0/IuK9yKqi9FcC6qJQmfki3o27xT3nfPec7/yj/+6/Pbx95/7uXhTG+7v7f/HdVxrNFmN2lmQ5q0mWppNoFoRBp9WSSvmu2+/3G83m5ORYae3XagCm46HRstFsRFE8m4U5G+m5i5ebnc6td98DcOvGzU6refHS5ZOT8evf/77v1sMgarc7y72VaThRUgBQSjJKbc6UVlBgjAJ6EgRKKsBYMgUARqjnRmF47dp1v1bTlP/R7/9r3qiv+7UfvPLaUx/6CIBet3eH3rYoieOk5jqnx/233no7TpKl7prXaucykDnchnAtcXFtaaZolkWuZWmFOEuoUQBqtZrv10aTiZDSsW3OrSiOtdKrK2sW58dHRwDCKByNx2km6vX66XCY+79Pg1kwmz137dr21jkAb7z11v7h4Rd//MeXVtb27t5ud7qOYzuePzgdJGkKIE1iIWW73ZZK1xvNSXBwsL93853XHYt5nms5LoBpMJNGnT+/deHixcnJEbetc2urg3eMTDWnte2tNoBMjcMZ7TXbFJ6Q3LWhdcYtsr939JTdaXSWAfg+Hezud1qGMUY4ferDV99/exd2NBmnNQUAK51mGMSSqUym690NYZN4FnAlfSqoa+5HYwBpGGxsXKq7TioNs6TSRir+/m6/5Te+1F11OAFgcRPFQc1vB1Ecp7Leah6fjnQy63T8uuvnnvthCmXXDGOolDOqP6qxOo+YVWfNsLmOVtWCq0pvxWVwcUv5lZIbcRFCdiZEaP5roRKU2vHjGjPnmj+jmi0iLUqzC3N7pSDBesiarVZcUVPmujbIPGQ910EqYXRz03AxJqRsQyUyqXzEX2KYne3fQ3n95jHqlX7kDyMEyOnBDHm4R0/Kk/KkPCl//Quf72wV4VNsyBVBlUseMt+gyeNSfS2KWdiKBSdIZbsuhcBcXpyxPyuVFAGRFQBxfo0UmVYrEdBnb0Blpy/YSkpztnLDXHxUfpiqQJp3vVJ9KRvLz6uUKmXRZk6pB2PKUNnFgJWStypTcyFN5/Z4IcTKwIHKzT8EPltArXNgZd6IR8oCi8tJLSvgZD59ZSsMqWZFeVisLn593BSQxQ9jfqgQLmZyPguGEXYGbTYlImNgitTP+foxlBgA1GhCNDfCZrCJocTMk1GTyhRX0q+bM3rXY8ui7WfiRwCSA9AV2BD59M9ZJKukOMVqKQKOSbXukk6UFCikeYQdO2e5LFh1FuNztuHVZfFwo6vv3Zwu8eHvAcYopTSlxqWWFIpoBgZpNKV83u2HH6UfmsQ5iyIp5zpPS22Qs0kC0DndpYHWRWMJjC6iw+ctLdg2TXl2QIo36Czzd67R0hJ8M5BaA6TIIl4MGUBy9Hi+cuf1VJfuIyhj2Zhy+hhl5bMfWcKP0/0IQCjJeVU1CgzUUAoCDpFEe89f2Nxu11WaiCgEAAsWI5raqUjy9JBl4xd742M1cZxp9CKNTv6TCi0sqaA9YusyvZhtqIzGdVc9c+X64c1XUhJwqy4y6XAQAQDCFczQOE0tSkCozYih0nEyE2pFUxcOAO3XIp24RtQESXxLxKnNmdTSGMpg5QNFGJSSZ/e4M0caxYY8J6UvezGnl8LcfJnvTcWrZUrLoHjJCCFaaxAupXRdRxpEUTyndPc9XyltO07Lq4EgimPHsbc2Njmnrufm+/Z4ONBaNVqtyWjoEEynYyml5zhOrcYcJ09enCSxkCIdj716g1uMEhKnGbWcOJke7O4lUgM47J+89uorCkYS+t3XXv3Is9dbtabf9ZZWz91+520A/+8fftWrtaMkEWnscJplU8/l3U49TmZE0yyLAaQppyBKG8Y4pTSOIq2NUpIQOgsCrYo5th2Pcct2vCwTJ8dH2pgkjChzlFBSKwCaEWmolua7r752fuv8rTu3uu2WkBmgGQUArYTNmM2t773+Zs1zDscTY4zOwjCSQZisrKwBmAVTKWRvdT2Jonu3bvq1Bue8XmvEcZwmSZ7KxnK9yXiijRmORpcvXTkdTgi3o3BmuWTn8tXf+93fA3Dl4kVl9K2b758eHp3b2WKONQlnSRL7nvsP/8F/CeC7r34/E7LVbJwOR8eDYa/bMdpoqCRJPM+lLKefhpSSW9ww5nrObDqBMY1mYzqdrZw79+q3vpNPQXj3br3R/NAzz/RHY8f2ZCoZGCOMUcY5AwBGd87vhGHYbTcs16Yo5IXlODLLADiu67ouZzwREsb0essO536juftgdxZFRggAg5OTbruxv38wmkya3Z5r2WbNevMHr0VRNJ5O86McqTWhNE6SOIo9z+OMR1HYbjQuXbgURUFvqQXgaP/e/uGByJIHB/sOZ1EcxXFU892tzfXByQAAZ1RIaWAsy3I9RwoRhSEMHMeuNxp5+HMURdyybccKJuPzW+d+7Cd+YnVlJdHMsn3Xbbz9+usAvv6NPyeENGt1IcW5c1vDwQln7Lh/fOfunRc//sKdux8AuA9z7fp1TnD3/r3LFy74vh/MJvVGMw5nOeF1GIbxLKw1m3a96XmOVmKl5sVh8KHPfYpZ3LZywlwupTGG/uN//D8IkTHqWpYtpYjS2GjdqNcBNOp1y+JRFAqRHT3YA4yUMk1S22ZaG5EmAILpVEkZhqHv1y5euvTJlz5xdHSUZcJQMpvNAMTB9Nlr1y9curDXP333B6+/8867UkoQyhi3bIcSBSAKE8L4hUuXkySllCXRTANRkilCjNIr62sAhicnhJDhYHg6GD597Xqrt8Yd/5VXXttc35wGM0IYgDSRk+kpp8hEtrKystTtPdjfa7frMg5UlgEIpxOdk9oafW5nRxsc7h9IIZWQ49FJFAYAToejb37t6xe3tm3bmYyDr//BH7z91ruf//yPUmaF4SzNEgB58LVl8XajLo2s12q+50ThbHl99ejwOEkyAI1WS2QJodSxHSmk7Xor6xvcsi2vzlgK4NLT1+JgCmJTYt/f3V/urb/2+nc+8cKLjHGRZjnfXSIF5STLUkqU4zDX4oExQmSEwK3VHEoBMK1n4Wxn+/za+QupML/1L3/F9+ofef5jd27cmIynxHIBPPeRj3Ztp7V67je/8qvHk3GtXv/I9Wfv3d2tNVvGdgFEaUw5g4xbnt7qtt4+GGZpkgmhDTkdjTIpAayvrbWbrVazFSdxmmYGWF1eZowncTIIZ6PxCIDUShvTbjQzKZXSnIEASZpprX/w1tsrvSUAH/vo89rgq1/9/R/7sS9ceubZo937q5vnvFo9TRKlJIA4TYUUUsrecm86nbiO7Tges+2NtbXT/tj1HQBhEmuDq89cm07GSThzYTqdjlRaJGJjdQ3ZGEAwjVpNXxDe8n3LwtHJrlQqjpVlmeHRPS9pA/Ab9d7aTjQ+duMh7AazW+fPb4+miW3ZK50aAKoVp8RihGoDIaapkoq0G97pdNzqbl576jqAess7HSRXtze+9/7dVsMfDqeDWdS0628+GD0YfycUCkCswGzPduuaaGOxSEptiOd6DrfTNM1lnF+rzUClNnSuxjzE03SGt38htau6EfCw8nTGMqkAcosbTemAQKoabuEpUsJ1ptSR5yrWmRhxAuhHG4dSRVgoDLkCTuYmRdVqLXTFyjcf2ylgzkhJFoopQEAW8eKL/02pUlYOrSvmZAVqLeouzQfM2YUeLUVW7orqiqrLy/zhpLQSDQr2+B9a5ZPypDwpT8pf38LnHkIo7b4C1XpEBi043HD20ll0rGR+Kx0BC0zxMc82j/3jTOoWQ6pebAuQwJhHKsDD4uXhMufHfEQUl5bwgm95jkhU21YKpjPI0lzUVFC73DWJLk7zSJltohwrshjIUiyd6cYZ8VXBocoGoDIY85+kRMbmQAYeHaNqrRVEsvLssktVhrYcmMnRgwU4RSq1VM/kqvDCD3mkAeapdYsmFJcooYDJs6FgrrOUfc5PAfMsS9SYPIkPp5oSZRPFCTgxgFbQpgKwV1ZoFZ97aBR+2GCdRZqLyX9I5uc6kUa+zEiRrsegwOjmrqQPLYh5ppWcfrvSyjKlUe7xRgDkCesNyhPTIgd9oQ+WPrnzRTJX0xar94wyeLb1xuOUEGgQAp5qkkITmnMmVvHRqvfjHMKrvv95N3NXxoL6G1pDKwBGa02J0doYDVMkLDHaGKPJPKnRnKx7XrnRMHPUEsWxdrE4dLEIcwDUoEzqTaDzwcvBawIQU1K7FuCkIQSGUGKqL3M5HvNFTlFm36peOTMK5qEvV6rJNxzKkNNkwhhFVWxb6seff358ur97cCeOMgCMUqUyRSyU+ZPyjpg5gfwPL/MMjKZQ4IuUl5QQIw2MYZTA6Nzb1SglkhRp9InPPMfDww8ejARRnkMT26OKEp8C8GFHaUwpDYX0bddiFmQWzMZyOgOxZmkGwAbrra4Th0mkTbsOGSbSCCkptfIsQMoYY/R8ouY6dbU7C9AR87FcWDA4c9/CrCnSRyJH8wuMldA8eQ8RUkZxIqTIhGC08Jc8HY0ms+lLL7zQPzyilFJKwnA2mYwc2+r2eowxACJN93fvgRDbcvbv34uzxLFtJeTp8LTd7nS7XQD9wfCtN14XSm2dO9ft9Qb9w+7yymA4PniwPx5PZtoACGZTrWQYRSvr6z/62c9vbZ23CPE7rVqr9du//a8A9E+Hl9rLQgrPsS1qDLE21pdqnp2lcbvZbbUbAByLN1ud4+MTQ2DZVpqko+HIdjy/XrcddzyaAKCUuH4dgBDCGMK5vby2Pjw5MYaunNvJjRnKLN+3/uU///Jb77wLodM4Xrt86e7unU6zlnsja6mNRpxKaB0nYaPmaakajVoUTj2/GQYBgKWV1SgI4zAiBJ2lZSVVEsWOz1qdXhBMGbcABJMps2wpxHe/853dvb1P/egXY6lSQ7pLy++8+fpv/e7vArh65crL3/729LjfW17aOb+dSVVzXWaQCcFsH8Cli1c4ZcpoqZBmyrFdY4g2suZ7SklmUQCu59oWH58cd9fWLGZNRqP8XdTaHD7Yu3HrFoCPPvfcH/3JHz/3/MdEkr331T9YWfVsx42EJJZlsnQ6mwHY2Nzw/dr+4b7Wy9Ayy9Kd7e1wFhzu7uaLLktTSii3Hd+yKJjjOBaj3LbC2UwZYygD4NUalmVNg5kQang68G3rtX5/OJl22u0sTQaDAYBWvR6liVa6Xvc5t9IsdW3r/NZWs1m3ExbOpgCCWXDtmae3L+xMTk+jMOx1O1mWIuc4FFn+BjDGHNeB1mGSQGvP8zzfo4QqKfN3xeI8TVNu2ZSRRq0RhZNoyqlXPz06PXf+yvmd8wAuP7jg1BpZFFsub7SavW43jsPVlZVz5zaTOPzC3/oCgHA8dJvNpXbTcx3OuVIiTVMlpeM6ubnr2HZjvSmlZBQwxrasANJrNY76+/WGnyN3WpOTo5Nmp3vzg1uWZTmOo5SmjHmuxznLRJa/15kQlNF6va61NlopKWzb8Tw7TRKRJfmzuO/VajVjTDCd3Lt5czgcLi33UiFmQQBAZJnrutPRiDHuev5wNIriuNlqTYenzLLD2QSA41rdZvuD925EYXTx6aeTOLl07dnTo8NoNFxeWd7eeQqAEiJJUoswz/HD0dR1W6D2D95+5/z5i6Px1LF9AJbFO61uOBudnJxsrK8aY+r1htZqaXktnIwARHEchZGUinNbKhXOZtyyb9+4wQhhTi1P4MMYbzTblm2vbe9MRsFfvPxyp7u0vX3Bc2uT6SSczQAwRvMhipK42agt95bWVldhdBrHtXpd6wCAZXOtRBIn3OKMW1IZp9ZwHX/3zv033ngXwAf393e2tp997sWXv/b1JBVvvfPWZDx2XPfo+ECI1LY4AGLsRs1lDLZFCQHnjDFKCcmyzHacYvuNkkvbW8tra4qwX/7f/pfJcPxz/9HfOdjbv3N/96OfeGk2DgDc/+DupavXRrPovdu3szQlIJwwRqmUst5wASRJ1G3Wvey0V7c4IxZnvu/UXYc7nmVxbtv5Rj8LQwNwbsVJaqTy/ToldDga9E9PMyEANOt1ZXSSCqlkHCdG5/IOWSaiOJG5X/Mbbz7//Mc+9enPfOU3f/OX/pNf+MwXv3TznXeUlL5fOz3pA4jjmADj6cR1vdPBsFarb2/v2G7ds90oDiyHAdCQjuc2mq3xybEFk6Vxw3e9ZmsyjS5cPnfzzWMASiRa2IbIWs0eB2OpxO0H07sH6UqnpsXhUpIBoKBuq15b3hg8OHA84rQct9Hd3FzbOz7JVAaAKkUJY8wSgo3jfmh813b3Dvf74/jnnv3Y1YuXAYyj09/5+iu+w1da9X4Qjo1SAtqpC+bvTzIr9zWmIMSOopRzbltcKeNYNpFiOBzMZkEcKwDUtwXlBpQSUs2m/VeWs7bG4pOKsH4I2nvsbQ9fPVMdmSsI1S+deTKdKzdzu6kMj9K6IE0vFOK5Bl1RXx9Hkf/4Qgoa9jPKV6mJV+DNRX3lR2eVtYei38709q9uBFCaDxXjDyhTQpJC4S8s7ofMwyflSXlSnpS/WYXPd7t5hos5YvaYLXMOuGFBYlyCI+VBTuFXX0X7FkCZOVvZY7KKAXNJWXIDn8VB8r8fcRFbCC8s5MLCpeyxfTkLOS6QhTkatPjqD4f1tAEhdG54G5giJTmpQEFnUl+YIni2KtGKO/UiyLoyeqZaSTkwFTe5uRhcSGBatv2RhhcwUpFHpXTurOgnhbfa3DNrAc48Oggl8rgIyaxeqCCAZdUgZI50l+loyqsFcvFQXeUHRVRzPqqGGkOhcvCJQzMqODGMANB6HhE8zzP+yOlkpalnEclHenlWnyhZDcq1W+KOBpjnISTG5Ieqpa6Q33Y2iGIRtlpAi/NGmuKqMQWQVmRKMIA2Rhti8gj0Oa62qNAYlBnqKylEFm0/26/FEEglPcuWWipjHOYYQtI045wLWKh2o1LnvPUVroAzb5TJc9QYTYzO3WqI0UZDG1P8WawrnUODpafpfM2anNnW6EW4MyorhRBCGclXstIKBkYZkzuFGTDGyxHK6yFnGr5o5WI05gD+fELmCYjmL8lih5zXpxcvQbVaBc3zLN00B8qIBoiSNAtfeOrptZWNMBwODu5nxgdgMcdorYhCsbzL6PZ83Tx2XZaFUpozV5B5snDkOW2MYHCIZYxIkdWoDUDHsYXkuWuXzHQwGO4/+9mPQdBX33i1IZlf6woWATDacpkMkogbCS08Zg+k/vyPPPeRH/mRUKVhKAGcvHfvD//wz2iSeu0eyUYdqzaQiaRGEpUjNVqbTGYWtxZRUvlsEFKxN+h8GRXXyjX9WH29WIfGAEUa+fKNMQCRSlGlwGiaZUIJRqnWmlssX4ofvX4dhERxbGmbEWLbttF6/+gwiqPBcAggSuKa57uOEwTB+a3t8xcuKCFszm/duzeJwqefvQ4gShNNKaMsybKDg/0wCltLy4xZluuBhUYLAFDq05/4xOXLl6nt3Lu/qzPJKJNS3n331ndfeQ3AhfM7SklOKYxM4pARsdLtdNqN/umpMTRTEoDn2oQaAEoqv+7WGm02HhlDjUEYzJhlAbAsi3ELlBoDx/OX1zb8eiOaqfHpEHhguzaAKE2N5b3++vc31zb/8A9+79Mvfkor0WzU1npdLRMARjnNmjeeBXEYeZ6rjTQGyWTIKFFS5jvWaDDQSruel6UiDMJWpyOF8mqNYDqZRVGjswQgzqQCuffBB3cf7J6m6c3dL1/c2Uni6N7eg3/9b34vTjMAuwdHaRTbtuV5XqtZH0wmgAmi2TlngzEbgOt4SZpIKShjjuMkabrU6UymURyGd269/+xTlwCk4/HJeLyyvOx6znQ4pYwBZhYEbq3xxve/f/vePQBXLl2pey60+tKP/cRXfvt3jDKZEcIITohQsl6vA7hy8VIOD0kpRRr7vmeMYZZVb7Xz/CfcskfjkVBaG91t92zbUplIosi27ZXlNS0zAJSz0/7JLIxguy7jhFrHRw8SIWzbklI26jUAnusYrWZh6Luu0XplrReFiWURkcWUFPvV8tLSdBo886Hnbr39xvvv3yAE7WajVvOkEHnQvcW549p5miOfeZQSRhm0SbKYlDlY0yzr9npxkigptOIEpl6vTZNUyczzrMkoA+D4LmWUMGLZdhpH61vb/YMH+wcPVpd7jUaTMgZg8/x5aaCVaHU6juuGQZAkcRKHQRDUG00AjusKKSnjmVDaJMZoBm1zbjOLE8yCGYBmu7e6tiqEYIxSSrVW+avKOWeUCCkApFkahrOVXm9z81yr0xmc9JMkfebDz7oOv3vzxmgYAvB9bzweO7ZDCdFav/vuOyAknM0MIeurqwAuXL6yv7c3ODlOlHZsz3NdaGPZjlIymE4cP0/7HqbD0VPXrveW1w72dmMeDo770/F4MhwFwVQpAeDp68/cunlrMJzYjWYcpU6j/f98+V+Mp8FVr06ZGwQBgMPjw7Xl1TyIvtfrgpLJdKpkb9Dvp7MpgFkqur3lOIriKN6+eKmz1EuTtN1p/8Wffm08OHn5m98BcPfufSmUYfz+rQ+SRL/2+hu95WXX9QhjWqs8sxkhhjIipRZSfPqTL25evvrmq98CuWLZvNnrnPb7AKJQEoBSKqWuNxudbq/ZXrp7687pYNTqLgOoD2b/6//+y7/x679+MpyMg0gmWWupC6KFFEmSaCUAJPFsuXeBE8OZlkYxkDTL0iwTQogsTTQBsLWzs3XhkjD03v3d4WTymc9+Vors7fffv3rtw4ywwWgI4GQ08c8/dePtt4WQNdc39ebBaJTv0Lkbpk3gQUNlPrcPB6MwEVmq4yRmGoTSyTQA4DoOCMmyTGujtKr5NaXU8enx7oMH3OLNej0XGYzxLMuSJAumYa3mM8IsixNKhBBpJgH0T4bffeWV5z784S9+4Qsvv/zNza2t9a3t/d3dRrMplASQZBmhxOKMUrK9vaWVCaZjZUgShwa6XvcBzILxdu9SGofRZNT1KDei6bfXzm3fvf/Ov/2MZN9hAAAgAElEQVTGt+xkDGC1xojS7YY9nkyms9k4Rgb3yoV1KYyUgZESwKC/76uldnul090Kx6cQ43DMt3Z2FKhIYgCTMEilOd/ZqDt2mtqTWTjLxEpno93Sb969ce/gPgDH62olpIh7TX8cR+2GH4ep1NRYPuM1ZjEAMFpplSap0YZRUMo4MYxqu+as9pbDlAA4nCR5jugz4vURpX1eKtZZxQgrbYxH9ObHlIJ0q6LBVHwTzzo9lA1Z6P0V38hC66MVm6ragYUOPP8+qfRr0dK5RUnwOJyw/EhrQ0hx3D83rUo1ddG38itm8WtpC1RHiFJqSq12oUiWLfthpcoTtLAJ8y+WLpqELBQrAFqVevWT8qQ8KU/K37TCsXAcL3EqVHfdyoY5h6VMidc9Ir5QOToCQFDu+3PBUHWpK4VDtY7S+2jhj1RiCiVQABSg36PIVaU9JRlh0bPF2dHjRmGOmi1gw0W85hxhLW8m5uwDadEgU5KYEICUeXVL4K0i2cu+5fjLQpQV1VQwj7n5nT+shEFIMXY0hytKGIKcEVlz0Xi2l1VIFKXXHebuZdXhzI8bq5x1D3tAVvWMxUld5YYFW6cpPyqWEAHAKF1E4C6eUlT70EyVSIUmRBEYSgyDZjDI4UgIi3FaBPXmigBZuNHpStUPVfjIyJRdeyyMl8v/0tNuPiQFfcHDNS+mY6HWzO9ZLK3KFXP2F1MOhgG0gTHQxmiGynpcOMdVXi7zSFvm479YRmaeyYoXbyKhMEDmcEtqIrVSYPNqzjgkG4CyM/pqRdUkYEVItlZEa2p06bIIA5Xn7iYEJNeMidFEU6LKDueJtskc9itg/TP7EKGUEEKNlgCM0blbJyEkdwojIDAKhoDO37zinSELOMygotrO1dwz+15luytno2jSY7kJqoUsdksDAmp0Hr5smfjZrbVZmnjNXphxt9EEQHHCCZFG5xNA5kcZj8fmzEN/FO/gAg8vmkwIEVLAaJsTpmIALok32tbVdfvm2+//9N/7xed+/oWMrTzzr/7sV/7JP1MwLFQAYjMWiRwOpn7Noc6sj8FP/+RL/+E/+AVjaRLHqe8AUD/90avXN/7p//Sr0cFdcLveXapbLNPGAPmaMQaO5YAQPedMMOXbM3/PcjC5uooIoaBaK1MFYRduDfOVUPa1sjU6tkMoUVpTxilhhIBS6rkegEyp7lKvf9rPpKg16pRS27Y4Zxtra47jnA6HAJbanUsXLr538/0P7t//5Euf3NjcFGnW8rz+YEA4dzgDsL/XHw6H59bWNzY243A2HY1ODg48v173aqxHWyIB8MLzH7t0fkskIQiavnc87dfqjY2tnV/75f/r9GQAYH3jXCKyJIk9izCiCaHT2cyyWZpmxOg4mABo1lyn3lheY9PxJI5iY6hS2rIdEEoZz+lGKbNAqFY6jiKjDaN8Mhwur60srWzs373nuEXq52/+2ddm02B3/7DlNZu1WhgGT+3sKJkYCwCgWa1WM5xsrSzvHu7vHT7Yv3vnY89/KAtnszCilgVAK0UpC2cB544QghDqer7v16SQcZyMR0MAraXl3Xt333rnLcat91595U++/jUDw0Esbl26dGlzaQlANAtdziYzsb6xLqSsu47LmVRKGb23dx/AJAgsxnzPY4xGcRjGcRjNLEbvHBx89IXn/ZoP4PD2bc0YoWQ6DRhjhlFjTL3VGo+nUZL2ej0Ah/3juuumUdRsNF56/mO37+82/LrFrTRJKKUbG2sAbNseT4aMUsdxG56/vrbq1/ycuDONIwCOX2OWbfk2ZYwzZtkOBRFKOZ7HGBOJAhBMA20RoTXVmjHLGARBEMWRxRtLS93VlWUAaRp/8oXnP/SxF1zP9j3fr9X7/cFv/8ZXjJG27cRxCODGrZu3PvhgY2311ddePTo5sS3+73/pC+vnzh3t742GIwBhFLZadc65VgowlOYZ16mU1GiTvwqO42RpSghqdV8pZYy2HXfWH4ZRLI2RWgKgnEktQQznXFsWpcTxva1z505P+72VlTziOBiNiOf5nNm2wy3bZcT3a5xb9+7v9nrLALorKyITq+ubjFFCiJLG8700jRgjSZxy7gCIgpkmsGzOORNSMc0BIqR0qC2kyHd2mRcltZKW41BC4jg+OTw4f+F8o9U+PjwEkKXpzs6OX28c7T84OTlptZqcUmVMs17PU35Hs8AAxrJsqoUUnudZlhVMJo7vEzDGOACR0TiJkiiZzYIP3r8xnc1m0cz23E6nezIZd+IEgLHsWZLUe8t7e4duo/X+2+/86q//5tVnr0mlpFKD0QAApfRkcHLaP7r+zNUPXb+WZpFre812e6nbTFwHAEulJqTZ7aksHfT7Fy5f5pwxxpQ0zeXe7d0HAF78+Mcnk+n1a88qpR/sH+4+2PvIc8/brpdlGafEYgxAnEaEECkFo95gMKh1+sf9E8e2Ot3uLEzyHa/VbnOLCyEIpd3l5d7a5mgw2Lt7f33n0ul7NwGkmaCM7+0fWpYD0HqtYVvccW2pJDHGYhSAspjr2jKJDUCMIXnGdqVcxwGZjYZjAMurGxJUMuvXvvwvNpe66+trf/pv/3Dr3NbG5rkbb73dH44ADEZT9cd/NB4MmGVl1GKuH2ttLCuSYrVeBwDITMQN11puLSmjGeM2dxklOZ1oq9UEIKWilDqOK4So+fVmoxkn8f7hIQxcx81D1IXUjNEoijudTrPWCKNQCDmeBL7nOY6dpAKAxWWcpN9//fWf/4VfWOot/dav/drf+/t/nxh97/btRBkAUinf8eIk3t3dtSx7qbvUabdPTg6Jlt12O7ZTAATYXF8XWSzi2SSJWzYnut3qLs2OeCbjp1brAGiW1nyXWXwyiSexeOPe0HVrT28vXdheH53u1VwJgCLMJhPjNgi1HI/JcKBhkdXlra3te7fvAVhqYxyGw8l4e3VNGN1ueo7i94+nNmV37r87DWYAXnruMys1PpqkK53mcRAmmajX/XEY+KZFQLXUACxGbcYYlJSCEZ0KCaUZst5yZ3tj9X4/ACCGoWGc5WriPC4Jf0V5KDzDlFzexV+omDbk7I3V+yswXrXaqgJW+D4uYsoeVtdLq7EauFWaBSjNGRTm60I5x8JOKa6Th3SohxtvjMZCAcvV0GocUsXGqHSqwlW5cC0wRQx16f5Rau4VRfSxTcA8jL20mue2Yu74YFAe0+fVEkIMNFnonU/Kk/KkPCl/kwovcL7KtlricYuNf4GOkYqUeUScFbfmtuLcseiMZT+/v4I+/XB5OIe6Spu0Ah89hLM9rpIcZptLrxL+meOShXAr0LjyS2c7XGIRZ3t5tpFz9GcBEBFCdAlHAuTMsC36dhY9LaVU+V8enWsKd8siirmCJhVVF8yPlUcVIQU4Y+g/dnxKVK1EhYsZpRRG5xoLIWWGCOQRtw/1vDIq5DEXzVyqkhJbm2NBgM45BIv2lsAnzSNzF8rHmVrzhhFTIJJ53gxoCsWoDa107puKRSwDqoN/BnF9FOU5AzwWa5ecQcwJiIbIh7qc9TlYwhYVndU2iFnQTc6/Y+Z/nO0hUGiLxhhK5kCZgdEGxhgNegbALBSSSl/PYGmL1V3N6nOmsw7nUmkCQilJtHA4rbt8GmWMahSIUWVLKBrKcmCelCe/8ydRRpVWWmktJdEaBLnTB6VEGZnPHQGheaBxAU0WkLqGLh9Ey2lk5WF2Oc6knNxiSHPd0JAixxEKVJ9QQJeny4/ofwYPxVk/NsilHCY99xUut4jiq+xsrqQFEk0p8lB3rQ1AjAIllBibyGkyTqKxZu3hzG5tdwFk6Z5bA4wGowXebUBQrORKWx73+wKSKzNkwRhtDIxtmGaUU85FloYnANZXaheXa9967fu/9Iv/wUf+vauzu7vW+AfPX925/1NfuvHmt52LFwG8uLO+3ts8GIWGizDob3b8lz734ijOPPuC4CNx/x6ATiO69qkP/xf/1d/9N7/x/2E4icnQ4/Um9U5CKTQDwJk7J80kpVZPTOHjXDQ4j5QvmDVRHB4U4epnEejFaiUwmuSR2ZVBJwBj1HYcblvrK6tCidPhIEmSfM+s12o3b9+yLG60OT49CYIgDKPJdNLtdpMkfeappwCEYaRhmvV6u9XM0lTEEZTSFEQp3/cnR4cAhkdHVy9c9P1aq9lSWfr0U08H47HMRHNpuVFv1GoOAN+1+6d97jhKJ4HIpFGu7z24feu//5//xysXnwGQpZnUwrFtRmTd8eo1jzOWZRkhJIzjPPlGMBmnUcLdmtbaANpobnFuWVIqQkixj1Dq+LUsiSejsVbS8Xxu29oYEUbNVtuyOYBGp/Ptb3/zzt27b7z59kevPxvMpnXfdR2rHwyWO00Al7cv39/b08Z0Ou2948NOq95otU0aSymb7SVqcQDT8VgJ1Vhqp0nWaLa4ZSupx4PTWRR5tQYVCkCaZbsf3KKUXXv2eser3b17z3NsTWAYc/3aJAgBCCFUnLY77U63c9A/urCxZjPWbbd2ts91ui0AUonBcNgfnhDAcW3LZkEQdFqtpU575/Ll3JEwy1K/243CMErSpe5yliWe57uNxps/eN1ovbayAmAchMfHh089fTXOsp/92z/3T/7ZP3U8N5GKWmxnY2t9dQ3A6eCEcZakCaVsudeTUsdRyBgTWcZtBwC3bM54HEegdJakk+FAZRkYN4z1jw60kgB6y8s5D65WWue5ibTZPr9z8cKFWzffPx2cAOifnAbB9z7x6Renwz5p+If3blB/6drVqxokk9J1XQAvfOSja8vLg+GoUa8pJbudtuf79z/4AASMcwDdbscAeRhsq90CzGQ8oYRwzg2hUgoAnu9pbaQQSpI0TTlnYZQyxlzHCafT/G1SSoJy23HSJJ5Np47r9g8P0izd29t95bXXfvSznwUQBZO7Dx48f/1alCSc0aevXE6T2PW9Fz/7uWgWAsjSVGZiMh52e8uEMEaJUBnnXEMrjdydczyeLK8uCyksi1uWraRmjHPOtVZpmjFWeGg1m800S9+98R6lhFLSbneGg9NoOgSQBwu3Wq3nXnwpCsJgMjk4OEilbHS7WZoqKWu+D+Do8NBrNKTWnFKmTZam9Zrf7HSNkdPRJE9t5NXcerMdR+F0NGy1Ws1uN5OZ12hqmRmZLnXaABzOuu12ItFsNDcuXn7rlR84rnvl0uUgmB71+/mpBoxOkphzeuniBcYIIcS2+WQ0uLC96TAKYHBwOh6ctNptRsjR3m6vt7S1s51E0ermOb/dvHz1GQBRkL70uS88//Hn7NbyV/6z/zyczc5vbUkp+yfHKkuVLMLYOadrK8vtmnd80h+HYRTFUsoojoPJtNlqARicnliW1el1ldK261HKXn35W+vblxvN1jtvvw1AE7teqyWzUAjlOG4SzDa2NmazaZolFiEWZwAIcTIhRJq6jgtQYwznTIiM2xa3rO7SMoB6u0Mc/9WXXz49Pf27//HfufX++4KQ565cuffB7Rt3Psg38/Nra4N7d7/xyiufePFT00n4oH/iWA7nnDInmE4AeNSEk8lTF1qe25gpKTUIYVGSWIaBMovYAJSUwhhK2WgyWVtZbTTq/dO+0abVaiqlc94YSCkECKFhFBmpp9PZcDi0LD4YDOuNer6T909Pu8vdVqv1+qvf+/hLL73w8exf/PNf/tmf/dmVtbUHh0cA6o2G0bJe8z90/dnZLCSglNKD/Xutduv81vm7d/cAWNx2uLW/v2uJFFbmMAwno85Sb1xzjkfBtW4bALhLbHeUivvj8Nvv7WYQw93RX7xx56c+fvknP/3xwfAYQNutuSSOBgdWqxGlIU0F7Gh6dFRrr3hODYCROtMynQmbnoZi2GlcTbVt6ND1/KVm185dTKmYjiZQGaVYXeqOo4jbhqYmiqZOq+lYFgAlhFZay3htqTmdpbuDQaPeioIgCFh/NDgdh7kU5ZRB6YXqvsDrcjH7UATPX4FsmYegverdlcPEh/C00gOjQjr1UGvw2D9Lj4eKBVEqp6V1V/5WesNU27V4RiVyb37x4b7m8OMC+izry9G+xcOrX5tfMCho4Utldf6guRpZ+H/MlbdH+k0rSRPmplo+dJQChhhdJHQlJaUTdM6u+cMt6iflSXlSnpS/roXnprUmJe0dihDaIkdtmV0jtxsJJYXrDnLutzkmk/sGllBekdeWVLbrYovUShFKCCElMa/Jyby00qUlTVBGfudGKCX0rHdSXjfwOH+0qm8gioYtBBEhxKgz9xiD3O8slzzGmDy40iiV9yWPhaSEmKp0KIWgAUBVWfd8rHJ+swVOUY5iAanS4soZDBMgBoaSElIp4UxCF1WV6FXFr9KY+TjMARedxxhUxGshrsrHGpS0x7nInCOu86iHvK25B1KeOoE+Rs7NJS6YLhChUtIbo0FM7sB5xlsvZ3kr/CNU2RtSgkrFRFFqyggHo3MfSgVijKGaamIRY1HDjGZEArAZHMuzLTtLI5tAGqQalDFKSR4yYygrU2QslquBQeGNVunMfLj0IqB40W0CGMOJZcoFX5RikkvYC4QSmqdrKYBVqmAKPstytVBKCEChdaHhkEWoBQUhhDBqlTqOJkQxKAptoJGHhYKUwdyUoJgtM4ezUaZwKbUkTojSWmmtYc5gtQaJzkhB9UhcSo2SlOi2R4IoBRBnQirCbJvxwiFLa61NTCmjhOUPyrm/lBRaKUIFIQZGEaM4o1YZrKJSiWL2DYGBVrpsfoV6gRBCoIkxxT+pUkopjMmNWFocAmtjxFyzpZQiD1Y2qli/xXQzUENACaHlobvI8UNtiDbEGEIMoSDEaMOgtcobqMrZp4RqraTRjDJCKDFEw5BSvaaUGKVp6eVHSl2VEEqVksQCAGqY1ppwQmAZzVy+d3By7tylwfhwkCk5ygAwzgTXXBElwQgjlBgoXYS0U0opkI/W3N93ruPnW02xqNXcuZuAggiqOCGpEkiCFz78NIDPfu6FND39sHXpuc9dNKdDMh6bTEp152d/5ir5+Y+wnMOL14StrmjTsDQIk1k22R8s77woNl5oci74nwLov/u15c3OJz/59EsvnFMK00xTa/W7X/32//E73yZGAWDMSA2iFTirKu1VzJcYSTQpsGaan1voPAH5w9vMvAJjWM6FKVNCKWcEgNAAoQRMAZ5jU4urLMtJAdLiTQEzdDgYng5PKbcsZnm2nWaiVqvfvvPB4bEBsL66mqapUGpjff3+3m7DsaI4uSflLI4NY68cvQ3Ad5xLFy8kUWS0ZIQwbq1vnT85PspEtnX5ikslAAJGuOPWOlIq7bN377164emnf+8rvzNLovXVZeSEuYwxy1iUeDX36rOXO80W5zzLsmA6rTdbANxaI8tEOoscx3Vc1xgoJaWKKGGgLE4CAHZ9OYhnDrddx5vNwgzO917+hgjDn/lPfyneu9nbuAzg97/6727tHk4HQ99hzV4HxHS69TgeKxGtdjYA+D7vdhpuzRsHUyG15S8tr/WmGfG8htuw8hhqkdjcZzal0kgtRRhMvUbTEiloPQhm7XYXwK17dw5GA6HSV7/1562VdSVk5tfqSmQg4ThwOM9fuVESvvihq61GXYqu1EYTFSVxw3Nqng0AnebyUufW3XvjyTSOE0ax1Gw4Ne94MIiO9ibdBgDuWJBCGlKvtabDgVOrzeJ0JvTbN28fHB0JIQEsdTqu63bbLSWjixdXGz6XKsnSjFt2s9GMkgRAqpVOhdZ6FoecL7sWp5SkYSBE5jdbACYnfZVlrmVzSmnbu7P3IJPy/Oa653qKsGg6BbC2uiyk7AmptTEEVr02nY66nfbdW+/LLDFKADA6C8PYotJ3iJFJp1VPQUAUNSzntwVgiFldWWnWfYtifW0NRg9Ph2EYGGPy3CYAmnXfdV0CnJ6cuo7jeb4QWRhG2xcu5qls4ijSRmaZtAnnzHYdn7s2tey6W5+Np7btApBCaKakMTWbnru4JSWo67Wa7c7SspGZY+XQ50Vuu5bjWUL4vi8MYIwyyJLAyql9JKkvL3dX1innSsvxZKKE0IQS7oRpEMczAErL2Wzm+fVGvU0I6Z8Ohcy4xT3HQ6nhZFlitHFcN05Eq7e2sbk5HZwMjg6I7UThjDALwGA4/sYf/wlj9GB/33acJEoyP2k2mlIkSksAtuMYYyg45VSqlDBMwhm33TBMKGONVh1AlsokmikpbdfRAKUsDUPCkyCYWm6tfzoBMPnma2+98ebaufOJwmAU/Pbv/O6zV69RwmdRzCgrFFmlsjh2XZcQMptOh8OBbLd8v/6tV1/fvX8fwJWdndl0arRaWV/b3NkOwvC9d262ul1feWGUDR4cAjg5Hfw3/+i/bjXbYRiOxuP1rfMKZDIZZFmstcypQlv1msO557Pl1eVup23bVpbGiUT/dJrOgkQpAJbjJlqFcbLSate7vTffvVH33JWV9iiYBbMYQJJMDbMsryai2EhBPceynaPDIw5i29y2GACqiMNst0YZY8qkcRaD0DSLp6NZFss86sbOEmXMn7/8jZ/8W5/PtHn11q0vfupTkW2fjsfbG5ud3hoAt9acvPGDg/5xq+5DaKWV027x2SwNo4zNABCjujTaau8ICA0bJFM6o9yuea6gLMsEANtxZ7PZLAp812s1WsF4cnR47Lpurm94ng8gCGaE0jTNtEIcx3GScW4Tyup1T0pBKAfguM7rb7z9mZdeOj46/cErr33y859/570b3/7O957/+Cdq0wBAmiZSasexuOV4nh4MB0kaPbVzWXNr//hYIQWwurXJLUsEQduzl2yPGhmEE891Sb1rsqjduQRABpGkzePh/e+9e8CZD2Km8T6D+c6NB9vbV+68fxuAT7Lnn+nZ9ZYOMgtGGEIgEE+JV9+4chHAzbfebVo14mjqGEt0bdcSUQJFlUa7vf7UxesAmjVvMA19h0TReK21PpomItVRJEWEVoOoMAVgjHEQ+mn/3NK528E9Ox43eh1hqE8JM8z16gDIZAKpFDcMJD+xJIYSSgnNz0xhlKac5lxHpWNE4aqwwNIIRU6Wkh8C69yp0RR6ZuFDQAqGHKoXVscifIsA0CTnEqe53yChVCtFCCHU5OkHKVj+hPlppoYq46ZMrh0ZYkqlkBXU6lQbTY3Jg8Y0AGYYyc2owjApmb4WJ/0oY48KTYpyPnfoqBzwKqPLXKMGhIFQlKRQHIAmEoqU9g8rjDVllFGktAEXsC8hhFCtJVnApxV1xxijdCWwyizwUYAQI7UihGpdHM1KrQkoaB7j9JdjyE/Kk/KkPCl/HQsvDoYeIZzIhURJ0bXwKqzEQJ/hBl5EOz5ypvX4IzZS+iWaeZqKh++gFAvftLlgyBubm+VqfrSVG+EPAUdnT7xMnhmnklUu79D8avlL9UjsTFqXCl5VrXoRFIzHZAA60+MqtvXoDX/ZxaJp8xCC8qOqb+S8HlTm4pE6z7SwQJDzqs7O4ByoK8hfHvZjLa+XQGQOs1XbVpIDnvlWjmw/3KL5DwNomBKULQFxAhBDDBgxlGhKNIEkRFGiAVBKCIxUQkhhiGGUWnkAvdFFTGfZgxyuy3URAhRwaeXpZ2Z4/tUzbX3MHJFiDEvNogR3y2WR6z55ep6iWzAapWqVuz0WWGIJbcIARJPi/TGk/HDxfFMCyTBzcsoznZk/uZilOez+UOsX2Pqi+4ABHIcBSETKOaXESJGCMMaYztlRtTFE5Tqf1kZrZYw2RhOiAEOIJhSUGkJKwJbqhY9j9UmljjZvTO5Lukj8hJKCaD4nhsBQkIf8defFVMeZFDlPCAApUlAGgIAXWCoxMEYTA0NpifvTyiiTBcdhqRJWOWHPDNt80M+8laRMAy1BBNzDQL1+68Frtw9cy+eODYAwSrmVqJTOjw3mS6pK81Aq/tXJq5LSF+wK5SrxCNXGgBAB8taDAwC/ePnChU98UQR3MTzU8YRLSRgTaSqEsDORG7pU9pmLhuMQY6ChlWKKod2xiZVK7WxsATB3vXg0IWboOMzivNdY+o3f+tpXvvLvWGOzYdcAhBJGKcqYKQUM5ltsZQGbCthvzCPUBWeGdHEbqXjBVuoyjFADSKUIjJSS5Yk+gEwIo3Ue380Y45QAUFprpVrNpsU4AKV0GEY1v2Zx7rruLEkt2254Xq1Wa7Q7URQBODo8eHBwuLHci8Oo1mwZpSjnXq0mMuH79dOjXQCD/snaufOD8VgI6dVq3bXN3ft7X/6V/3t743yxkzGqRGKMoRZWV5Y7rVan01ZS12v+6upKHi8spSIGtm3bruO4NaVVlsrcZRxadzpdACIVaZykOmaE1pqtm+/fuHHzho7jN7/77U9+6qXjgz6AL3/5/9y9/2A8Gl65cuXC1ubp4JQAszCUUnCLAvB9z7FtYaAzFqfph69fW9veSSeBzmJitBQSgON50Ibbji0Ft1JmWWmSqDRpdnuEsnxz/8Er3z062J9Oxrdu3vyHP/XTN2/efP3V79vLy1oqz3WjJAYwDadXrly6uL112O/7ri2lDIPw6Us7aZKE0ymAJJOng2H/5JRSGsUxjJ4a7cUpgNPT4cc7HQDtbvfwwaEhPIoCx7KiKPLqjYO9vf7paZykeVpnxti59XXX96ejwdMfeq5RrwdRzDlbW1lRWsncAQ3Gti0KbjHWXuqd29wMg6k2+v69e9PJBECj1bJs23bceq3m+P57tz+oNxqO56VJwh3f4hwA4zyOI6UVJcwYI9Kkf3ISzKaAoZT2lroAZsHk4vlNIWV+OquMASWO40zGM8vx8gWsRaqVCoJpmmWMM85oMAukFLoIE0CapL5rK6U45/lRrjGGMWbZ9mm/n5NdJknqNxqtdptSFieJ43nctl3Pl9KkSZxKDYAyJrUKp5Pe+vLO5rn+ycDn/HuvfNe2+I998Us33n0bwOVGc+fS5f7h/sbWNiVECkEpE0IwzjzHA2C13f7+4XQ8qjWboESkqVZaGSnTLBiP0xSKQuQAACAASURBVCwFwBiP40Rk4mQwsLjFOVdSGq3TLDPGCCkBUEqFEEmauo47HQ6Jku1Ou9FuK0pFlk6nU+RuoUorbTbPbd27f3c8Gtc851Of/ZG3X3tlFoYAOOMiy7QhlDmEUqUUpTScBZSSIIiSKATgOH5vdXU6Gg1O+rVG4+TkpFavH+zuuq67tLa6ef4CANtxR5Pw8Ki/deHKa9/65te/8bWf+em/7Tqu1sZolaYJAGiVpunaWrfX681mgee6SptwOllZW9vZ+v/Ze49g25LsOmztNMdd97z/3pR3XdXVaABqWIICgQalAURwIIliSNSACs000IiSIjhgBDWRxFBowgiKCpKQAoFAAAJBoBttYcp0VVeX//Xt++75a4/PzK3BOefe+35VE5JCA3XE24P/7zv3nLzpTubeK/de+xyAheWVxZWVPEuODw+vPXV91B+22mEQRkKKt77//TiOASwvL+/u3gffj6JWp92Jona71YmTSZbnSqCawNaanF0+LC5cOHfc7zNzKwqJyPe9MpWychMGfN+3peksL01Gw0d3br/+pS8Zx5999EGRpwCSJL9w7txbjx+HnmeMs+y2NrYOj/azLFZKBYEHQMuoNCU5W5qCBXda7SyeOOeEkEoKv90CEC0tvfP2m8Pjk+1fufzmn39/fHTUXVrae/Cw3z9ZXFmvksO8941vDAb97Y3NMAyd0e1WFykCP7BZ7ikCoNkF0vmeLE2udRgFPjsWQljjCrZVMrHxZMyMXqfbabUF0XA0FIImcRyGYRgEldctM6yxRIIEFUVZ5AUJ4WkPhNLYwWgI4EsvvXZ0tH98ctJpR/d2d/Htb/3yX/8b/9P/+D+srqyub24C2L1zJ8/z+w/vt6MoDIM4TpSSylMqbHW7Pcf3AWyur9ssJjbWMkkZaD8xCTt1afvK3cMPJzEA9FrtwTjpZ+4Xfva1hfWr33r7g+TB8ZKWozj50ceffu1nfgpAkRcHgwfLdqB7npXSWPiAJ6UzRhgLYGmhMz7JQcaCFheCJBmb0irFzIZIBh4BkILX19c6rfbecWKluLi+env37kI3GsWlNa42j6zN83gp0i9f2dIuuXvzQZaNtKBxmvDwxIkWAKkFpBAStjo4a1RcnmlaDbVNrYQQUCON1FycmllTl8eZivJXmC+nZL7AU0/zvHJVI3F1RZxj1IBl80hNWmVnyQ6oUYmojgeqwMx5u2T+96YPVaqxALhO4Uin7j7dQAIqt4+KKmtmLs53Ak2xScyraE2BUzqvJ0yKmYE9p9nNo5aOuYKQhRRNkBA5dvQFhZ3JmZzJmfxkiJqiMs2VGiOs1/QGZXoSqpgtqbMv5lCs2TZzyricIkvVV1SFI04BqlNYUH2QNl2aMdttUEdrTi811+fa0IQAVoGPjXVfHUuJU5WZVlJU5dboYp0thCqn+c/byPOwEOE0OIcGPaUn7m927rnivqCb/m3yV91Is73xxyTPO6UCzEo9hSMyTXfQpourAITTGOx0sCu6v2bUakSNgdMZkQgVRjjnBTm9Pj+Lar/ZuqeqecgCLAULOCmcIiO5lGy1INTUhzZOR4IdSekcEaSnlHPITAFAqzm9YC544gkg93SvzDX1ixD1aahF9S1NMVkQNXAqTV0m6zZO3Xx5LhpVTBMzMc/w3MpQrXpRVKXX83jqIMcNpXWVX/IU1DgbqXn1jqYunHPUNDStOebBTTAqtKEVqaLk0hSwYHIWrvITrAkiuYYj2bkqmhxcMlgIUqpio7JN7hquHDkbrLbxGq4Pixm1o3KVCQrcHAmfhlCnE5KoXremETDzo+YIgqp85W6mGSrRII1TwLJxgGWHWtNlrokRKsRVkKhTuAOowufnCSib/udp94EA4xwLC1SB9UQECGLSljuHSXHnw7tFxhtLncwaAKYopbFSEjlMI/Gres2ZCnOtn1t+TrmDzymt1S86x4LR6nVdMQLw9//z/+p//if/9aWLYTkcKimFMcZa3/dslorE1ImhpKDUICutYaE8PwpGWWGLkWqBna1I6CjLNKVSuHyUaqWFCd5851OblHrZz/ICQAEZKFVUB0BVb/OTZ15cpQGqXbMbcJ1rx/zZXfN/TNfa2R/1FS0lO8fGpHGcp+lgMJBKisqfSwgi8rTqtNrK9/MsBWhzde0b3/nOyvLS09efApBMJmEYrq6vH+zvLS+vBO1Wq9UKPW94dCSBlbU1APfu3fNb7fWNzQf37q6srQmly9L4vv/JB+//0e/8tvR9AK1OZ3Fj696tm8eHhy//1M9ef/n1//V/+Sf37t1+/dWvkpAAsjwFOy0o8LVhp7Wq0gqTUgRUlnnUahVZLpUfhpEXhNZa57KyMDV6VVgAYdTK89Lzg/FkMoz7d+/e+ezWzZsfffQn3/v22vJSkRcADg/7Apgk406vPRyeKIFJPNGSlaLvvPEWgMuPdi5duCAEVVCXp7UzZZom0hmlKI4TAJ72TF5IkswchGHUXcjz4iRLj/cetboLu3sPAfSHg9deeH48Gg76J4HCr/y1X/72N7+5tbVp0yx3sGwBKCWunD+fF3me52HgGWPCwB8Mx3+2/44nJICNtbU4zRYXF5nd9vqa1vLTm7cXl4JL53acw96jxwBC33dgImdtSZ6vlc7S7P7du9ZYY8yVCxcAlKVZXVlsd7t7u/0sy85tbX3ju9+7du26p5UQqIh3BYmKQcLzdK+3UBSF9rT2gwtXrz28eweAEtKPorIo8yIHXOD7vucJwPe9vCzTNAFA7LI0JSJjjOcHjx7slmXx5VdfeeeHP2R2gR8AeOXFF/YP9ibDkVKSrZNSZi631iophBB5nk9nchiGZZ56nmeN8XzfWtMKo8pdztPaWceOnXPO2iJn55gdG2uHw3Fl1vpeoJVyzuV5URo7mcTKH5uqs5jzeIxaKWIpxKtfej1sd3cuXv3hG3+Rp8nK4taNTz7+1ve+B+C99977rb/1W5vnzh883I2CYHVzwzIr3z98/PBgbx/Azs75zz78uLuw8NSLL1pm56zSfjIeG8dMEFIDcMxlWbJj3w9GoxGRUFIVpWHOtVZVhR07IYTS2vP9LM8G9473H6m11bU333l7dXW1ctc6ODjc3t7O0qSwNvT9K196dTQ4/uGbb0hB1fBJpZSUUvnWlc7aVrud5/nj3XtSilarbYoCAAkM+8ed3gIJOOc2tjajVmtheQmlkdrbe7QH4P79B0fH/ZWV9WSc/sH/+YcEMZpMzp/zszxXUgadDoDhcFCaMmq1Dk9OFrqd8+cvDAYne/sHv/yrv1axGL/7xhuj4UApsXX+XP/45PDx3nMvr7fanXfe+sHiytr7H98A8Hj/qNvtWuOcdWVZep7vKa9fFHNnwFBKekqGYRgEgVQqSRMi2r19a6HXW99Yz44yAOM4Xl5aevq5Z1ipv/zO91eXV9j3H966s/f40YvPvwDgT7/z3ai7qJRm59g5qaQxJbOTUjA7dg5AEIVSimQyNs54vuqPRkmS7ZzbkVpprZc2NgH4i6u7u3/6s195fWXn3KPf//1f/YWft0y3b95Ms8JP0orSsdvtTsajV158MQh8IcTiUmdw7zgIglF/4MoSQOBTN/CtLdMs0b56cDjOsqwozUInkEKmWQJAClEa0+102q32Sf/k4d6e7/vWODDneVFtakIIZ60QVOSFUlprryzLojRKydAP4zgFcOOzG2urK6++8uVJPLp1+8ZkMl5cXPjp11//5ne++3f+k78DwA/8SRKnaT6exM8+dX2n3cryIivz8cnweDBQSgNY7LTHxweBcqEnQSa3QnBuTd7tLAyPZSA1gILoePzo4PjkX//Zzcx8y0F1lCiMJag3b9757FEfwNdevPLTX9qJTx6XRVHYfGklylInAbKljScAFnsdl8aMcpJlkzJ27PKyHCcTR1KpYP/ouNo8B0UinRBs+uMjL+xsrq/0x1nBCTtbbfqaWLNbCIKLy+2TA68b2qIYtDsreVHGydjvhACUJ1JTUgGQQq1z0dTWw+wUlqpT2KlpwrVjSqVX0JzSUd/e2IqnrMjPCX8+GHymWjcGYK0HV4VwbS5yrWHTzGlxVmrl7+AYgir/wIrOpKJ0aizHmQ3HtYFBc0ZljQw22hNX2PcTlklV5ykYSqePgk+Djo0OVpH7T3HJ6stZRNUX+4sAVOcKnyM+n1YOYOdISAakICJhrSUhYCz/2NPcMzmTMzmT/7+LAlCjb1N63rkdoYrXmIuJpgZJqFGjWUnzi/esmCel2tWaQrm2U3kGTWEKuTSlMk8N+mYbQH212T0/h/rVNZ8RP9YIGWZBx83RHmiWD6exi5ttswoNqKIIT//CE5vIk6Bstc3XO9Qc+oUf5x35/24fmcdhTpdGjFPfzUYQpz49oSDU1+YwrTqqt4FEaPZN/Xhd2iwpOU4hfxVBZO30NQeMAajCg6cA3FwXiDrf8twPsRAsiKVzmp2EJTZKsCaJClNnjrSwlhsNga01DsJTuqrerM1T5pjPqVXzYzCnLswm2heAknP/zPVBPdDUUCtWk3L+VHc6taeuXk/UgMAOtk6bwnWWcGLiJnSfm/J55n86rxGeGuoazpwb7ukfU1S/chIkrkNQpkMTekqwdWXBIMfkDCBZQDu4yiMSgHO2qrqogT0QCSWFIHLWNqWhmR/ctH/aFXNJjWoeh+kCIWf4Wn0D18mp6HR3zul2DK6qUf3UdBy09Fy9vhkHyyCwrBRZVOpjnWln1iWCZa0CcxX3PZsLjp2YacinFi2ImvyHuCJ6cM5xySB4GaTwW0oY4ekq906RJKFzBHKiUT9Pu4NivmXTHmwil+aXgGZFJABxUWjf8yBGedJTHoCWEB99ePPqxRfZWUNCWKu1TkZxoCWMc8YAyAQJOFbCb3fzwdAXFAlyj2+m4ZUodONPfwDAmwzJtwgE2KF0eVL2/MhXobOA0gCkdXAMcnOr9fRlnvZTvVY2OwmfeotmrT39H9UN5KkRIYjZJUlSsB0lwtl9U5amKIPQD7QPQHteaUyWF0KI0PezNDHWLiwtLy8tbW9sddoV1jDs+b4fhGmalmVpimJclokQ3V7P98PjoyMApiiXV9eDIFjf3ukfHbOUnu+vrG08//Krb3zvO59++hmAv/uf/r2l1ZXlXrfdWVzbWP/en3zrH//3//jcxvkoCktnqvZGnseuaLdCpWRZmna364z1g0Brr3JIdI6NddoTJCQJOeWEEkQkpO8FAIoslyTTLNu6eOn3/tW/7C6udNrtw6PD9a3NW3d3szgF0Gt1CLy6urKxvsI2T/JMB76vJUFWToKjOAaRlFIIaofB3YePjvf3uHCeRB4bggZQ+SKlcQKAlFfmeZokSnvsHEg+2L0HYDQZt6MrR/uPr129dOvmJy+8/rWXXnxh0O972nNsJ5MJgGeuXW23IueslDQcDQG30GmPxqNWGFQQtnO2025dvHDu6PCYnXPORFHIDkLK8xcuVA6J2tNby0sPHzzyPM8aS0oXafbo8WOlpFSyNBbAnfv3L1y4kOfF6tZOkee//mtfP+wPFhcXHUMKWfMwSAFwksRKSj+KfK3zLInjCTOW19YBaKWNNb4fMLuyKBh8dHykhfO0dzwY+UpXFS6KvMhzpbRQUkrp+/7B4WGapcPReGmhB2DzuWcAHo3jTrvt+77WOkuNUl5pYlJWSQEgy5NWuwVwr7dweHTo+17LD0tTxkkShQGqBMpFPiUGISEEEUuS4MVet7e0BICE6J/0h6NJp9fzg2g8GkedxUmSS6UCzyflAYiiKM1zBOEH7/9IB2Gr09va3ml3OnlZLq+u/r1r/xmAm59+EkRRp9t7cO8OZVmSZGBkSbK4sur5EYDl5ZXNcxfa7U6SJF4UZUmSFobHE6VV1F2oHItsaQWR1h6YkyT1PK/X6+VZUZQFERFV/ulERM660hildCtqRYFfFPkrr7x6cnxUZaoJg2A4HIRBKIQgIW7durm5vtput5N4sn3+AoDhSd8wF0UupAAQx7FU6uHu3UvXnhocHSqtAMTjUdTupMnEWaN973j/YHh81FnoXbv8lN/tfe/b3wbQH8Wf3Lz91m//TjuMPOV9+fWvMqMoyn7/xNgyr9LmJJNrVy6vr6+X1ozG426nNRwOJ+PRZDTsLPQAOOs2trbCKATx4vLK8sra9sVL7735Zqe30Ftaf/DoUfVDBMFslVKj4WhlNTLGFqURJITgKpTEmtIJZLm7cfPWysoyCOk4HfQH6Xi8ub1VLRHGwQ8C39OPjwcAfurnfuHTjz/O8zJOkhde+jKA9957b5jlQRTZJDWmCKLgZHCcpokQIEmerlzCzSTJWp7X9sNWOxon43Q8Lo2Nx+Pe0nJnaQnAKJ7c2939m7/26x+/996Vq1d/8a//6r/47X+VZuXmzs5wOB6MRgAm49Hh4dHXvvrTbEop1YULlz74+BNBwtMq9AgA25F1eZwkoS9AXNGeGmP3Dg6NVKGvAGR5Lkj4nm+tfbT3mEhopbU2SmnnXMUlmqTD0XhirS1LI0ha63zfU0pBEOCKwgC4eefO3uH+X7z95n/4W3/76etPv/mDN7/7ve//+q9//c69ez/6wdsAOp1Ot91efPZZAFpLZj7pDydx3O72Nhw9fHQAwJNCsVlZiDq+7IZBnhUC0sCEnYXtjXVLAJBk7v5JfnM/SZwzQiUlJHsE8gUlZjw5OALwL7958NG97V997ZLHph2SsdJZZ8tSeDaPEwBhp90KI+YiSfN4nLa7YSvUagihZLvV8mR1gpUnw4mng8CTdjJ0TjxzcfudG/eiUuVlUSWPUmwD5dYXFzxYDbexvrR7dAxeJhLOGkEMQGsVF4Vywgk0bhc1NtdAjc0ZX0UYT1NNDTOI7dSOPFU7Pm+FTIOE5rfymcVIM+6k5uEGaZzXXLnW38AMIcTUOJnTIZkwZYVqop1qrbsiLGrAzJlZUz1b6YFu+gNcV3NOj2saPDOkasNgmuRz5uLpHFFjVNSqzNSinNkatXlSeWVUWW6+2IZ7wqenNiMaVbcGkZkqp0ie4wM9kzM5kzP5CRRV/Td1hpulfqhXzWY9nG4Cc4v2HHoJak6heAbA1XmV53cuEnPZj+uFunKUn0NFpgDKlLqyCR5oiqnC1hrUgGfw4dw+OR8IOL00g8qaCOW5B3hmNtMcbHPq2S/AImfFfw5Wmr9vrgWf//LHlvnjfwHTHe4L7PdG15iDEU9b9tUVEvPnezMIBnN7Ms9tpJh1QLNtMwMQtVfZKTZPItQkftXAzAFlX0RvMv/9HK5GALEAC3YSTjELdgJWgBVIEmGKXjpJU15KZstl6VgqDUBVSWZ4bgTnWzzXM7NJPe2YufjzU/Orxk7mi6x0qzp6Yg7FpwYUm5/CNZ4oas+6WXOnvcfO4VQK5wremqHwxK7pNfeE6jffssbXsBoxUQNAmFPQqnNYBuo3lqsilBAAjLFKUOT7aWEKwwTY0mnVKIJw006iGtFjIUiKJuEIT+ctTVlPqcFDpxpb7ffGjX5c14KfzFtSoZDEYAuWp67MjU3d1VwjYhWcy8yQtmG0rdBD4kbLrvqcaii2VlWZa5ai2SSZ94vkhsy98b5tFkcGCapgYqamQbCMEspBaFYs3DgdL/sbAKQgLUVZEkk3B92iUaTndfa5yYfpQjl3ZU6UkNZZCM1SC08ASLMsk2qSJlSYQMNBpKlR2jem1FpqGABMniBHUqdxJtm5LBcl+P5nw4koRDq+8ymAjUA6k6eJ87TvRqm/4lvjnJCSKXUWgGayzvhKlc6dXnhmn+etjmYCPLku0FQPr8aZvuA6AY5hjLFsGaSkEhC+5wkhai9XZkGQgkpwUZZ5npemIEHtVtv3/cpDzfd8Z+2D3bvjOPb6J4EiCxEbczwYeF6gpASwvr6urCWl/TBKiyKOEwvs3r3z0mtfebp/kgoFICb65M03Hj960I46Kxs7v/N//O+pc91Ou9WKClMCUIHO0snF7bVnn7o6KTLnLIDL16+fHB2VeRm2WlVLtdbWcFmURFlpnDOWQEIqobS1DMDz/KwoiWh/b//O3VsvvPLaQrfbarcliU670wnbAHySo8nw6rUrxpYd34M11hbDUUlkqqTD7FgJkWYZM+dFMUxzArqLiwqcx0Nb08Ja3w/zNBNCKK1Go0GSZDoIQSQ87/bNGwBu37mdjgbHJ8eb25u9bvfixspP/cxX/uj3fk+3o+FgtLq8BGBjbd2xE4KcY9/zACsFdVqhc/Xb0223lOfHcXzr/v1zG+vOmWqaKCF7i4tpHAPQWgspldIQKo/Tai4cHJ8oz+t1e3uHhwD2j47G8cQ6+3j33tvvvPu3/6O/+9TVq8f9Ey2Vc67VigA4Zs/z2mHgKXX/1s3l9bUsicssd85WPUNR1O/3J5OxKU0nCtiYwPd931dKDYcDXcGR1qRxXGRZSXlhTJanSqndB/cncdxqRb2FHoCTk/7tu7fXNzYY4vh4AIYXtvr9/uHRUdhqrSyvAFBaaU8nk/Enn3567fLF0Xj02e3b57Y321FUTc4wCqIw9DyvBiQZxljrbFmapcVFz/MBZFlWFEYpbYzLiklWFNpvj5Kk0+0GPa9amUxZlEXOlj+6fafb7Y5HHw4Hg1YYXrh0+d0fvP3CSy8DuHz12t1bN3d37/m+Vxb59Wefk8SeHxBRq9MBYMqSnfWUVkpaY6RSVDoSJLUu8oIhAEhPVbEmjiGlXFtbX15cGgyHk3hSlEX1NlUWuZIqTZI0TeCMlgQl0zRtdzplWQLwff/hw4dCiO3t7cAPPKUF0Wg4HE9GcZJUa55QyvNDpbSSSpAQwNrWDhEVWcZOAzg+PLCmXFpdrfwxdy5d2H/8qNVuKUVZPDnp9wEwqajdu3zpcpkVEpQ5u9DtpVlKYF/pvcEjAFKK69efWl1fvHD5qiA+2X98986dczvn7t68WaU2CqIoCKMkiYf9k6XllbWt7fu3b8ej0eXz1z798OOlxWUAQvqjUfLo8eNuu2NK0261SlMGnlcUqbXW1xJAacog0K+98pLS3v2HDxicZVkQRVubG2VRPH74CAAFfrvdZhKfffzx8tJSHo9vfXYjbC2leba0tADg2WvXvvnGW2EQTOJUe7rdipw1xhpfyk6r5XsaQJImxpRhrxv4HjMrqYVUSikhhFSi1e0B+M63vjXs9wPP+9NvfvPnfv7nH9+5dXJysra5ORlP/vLNt6qznE670+t0VldWKxRraXX99VdevfnpJxqO8xGAJe1FwpCAY2bnAq0qPLS6P8tzAEmSrC6vCCGSJJnEyUK3GycJkSjLkkg466r9qyxKIuF5viBpnQ2C0FhbMflKIQGsra6DOEmSd9/74UsvPD8cjTfX1+7dufPM08/+m2/8GwD//td/zRibpqnvB2VhQGhFkVS60j98PwCAPG571PWxEHlR0AtU6kovNmUUtnurF/fGCYA4N8fDAZhZisJJJzxjIdl6ksjroVMAgCvfvfW4yPJffGUjYxW2tB8gSzPdYtnETXi+7wx1woik54TXAi+0Qj9ob6xsSqEBGFNe3LmstPr43l2zt888XFpqby0upPFE+74MPACYZJ7inbXljuSuFosd/2Tsl0UuhO8sV28TwSOGkl7RcNd8gSY+89WYIX5o0MPPC+GLtu2p2TgzIj4nc5YCpjDfXElTEHSmfs9jkXX4Gs8o+ufi1RqPSAaAaaTSKT2+Vnrd9JE5JVzU9szMZps2wzEAN9X2aq+YSvsVtT9mFRN1qg4zPZ6ebDlmgSNzFeTaU5WngzCvLpFzjoicZSauSCQrhvYzRPJMzuRMfkJF/NW3nMmZnMmZnMmZnMmZnMmZnMmZnMmZnMmZnMmZnMmZ/H8havqJm3OYmfscVbk0mjwEqOOqwcwOQlJzLjb1dZmPs5sevs07E1VB4fWJGz0RqNp48k1/qPKdapjE5tz45g7Kmsp/7nzuFBnlrAqOXeNqydPqzQJXaRqgPPUCrByUnnQvnDmzzZ9+TW95glbkC0+tPlfBv0I+f8RYXXkyEe3MTapOfgKa85c7dTr3BYeWAOb7oq5o4053qvnNuM939fS0kdCELtC0yLpydT68zwXgN2VWkd21VyKD4AQ5CSfZSTgBJ+AUQZKYe84555RSgoRr0itLKbwgBOCy4vToTaPOxVzPNOfAtZ8WTd2xpgeXTx7gzppNAFjMOrb2nKRqjsOxbfhjGle8hi+SREWDWLsCN71Vv2fVzzUUCdPhbMaySW/ILAA3Owie+RrP3Oe4PuImAjXsjKdGZkps2YwLCSYAzrG1TikdicAkuSmM1hqwBEdwNNdpRETMRJCClKxSdc8yClbRJfOThxsWdW6WluoEunGPrCfJfPaYhiWTIRpHxSe87DDrs+bMmaeDW5q0poUlIUlVeetZMMM6R26aTqUK4WEWIDiQmP3A3IH1fCx/9SA11WbnnCRX17iaakzNsb5ULOIs73UVSQHAOusEjBAKbnqiTtMXAHU/TMdpemo/O7h/Yk4yo6IWYmRJqnxNBgC0FIIpYC6cyw0LCGGc0jp3RpGsk/wUJUuyMIohAx9ZqsMll8Tm/p+5BbXSCQDkqVQsNHMR5zqKjHMQpHxfoAqaRJFlQqrG1XS2M9D8ojM3HxrS1Cqqfr7dp5tW7RjMJAh1ng+QEHCu2+kKT5In21Fr0B/Ek7H2dbfTBmDZOcfMLEhIKYWQxtjjfn9zbWM8GVd+N0II61ySZgvdbpZngR9EvYXRZLK/v+9H7coBLdBeFEajwWAwHkHI3tr6Zx9+8NEnHyulL1x9yjIByPv9//a/+Qcf3fxMQ0RhdH7nwpdfeGU0mXhK5GUJQAvht8Nnrl196ZWX9h4/YrhbNz4bngyiKLLG1CGok8HiyrI1tiwKrpKsMxNJIZUggaqLBQVBEPjhD3/whgXnyeS5a9f+UEkphBIyMykAz/cmk/H25qaQfOfeHWuNJMdsn7t++frFwmDtbgAAIABJREFUHoCTcdwfT5SSVgglJVvjrM2TRHiKXe14kmc5eeSc1drTnkeEIArSJHWO92/d+PCTTwBc2N7+7OaNoiwuXb7Q7x+3fXntmWv/4p8eR71eEPg721sABpPJ8fC43WoJAUEQQjp21hhPq4oHsNNunb9woXR8eNJnghDC8zylxNJiz5hyfXMDgKeVUJ5UXlkaIaT0/MeP7x8cH+/sbLOrmTeXFxbv3L270Fv47p/9WZGXYRjGSRynqbFGAttbGwB8zwPI10op9cHHH6zsL29tbY1HQ0G0trYBQEo5HA6Pjo863Z5WamV5eX3nnKeFFGI0HNd5VJQSSoatVhInKIrJJK7oIPOiWFtZqWb8g0ePbt3d/fpv/M08zR/cvae0v7a9kyaJICRJnEQhgFG/n+VpliSXL14IggAE3/OkJAihPV1NfOucsYYZ1jpRRWtXvmXM49EQQBynaZaTEJpIe5qEHI4mhp0fhJ7vuaJJMa+0Ipm7Vm7s3uHBnVu3z21vvvzal+/u3ru7ex/AU9ev9wf9/YODi+fPHR+fCKWKPHfOKT+c9A8ASIGlpWXnSrAiIZlBUqZpylmRFaVrUo4Qk9baOVeaMgoj61hrD6A8y2SrBcAaC4If+EVZplmuFQkpW51OfHB4fHR05fpTAFqVmyRznmfdxUWbpQuLi4LgHSulNYDJJC6sNUnimLfOnfN9vyhLa0prnR+G/eNjAF/52r8zHg7TJIlaYTYZHz9+pLTc2NgYj/oiaAWeBrD/+Oj2nduwcIyyLI2AH4RxMpFCWldKIQA8+9RTnU5UpOmn77/XikLrzOLiggOGo1GSxADWlleNMcuray+9/noex54fHB8cXnvuRVKeKcvXX/8qgN/7/T+otJRqR1JKHRzulaYkAaVU5Qy72G2d2zmXZbm0dnl5Kc2SosifevaZdisaHB72VlYBdBe6vucPx3ESx1/+qa/euXVTSLW/t++cqzwfz1+5TG+9vby0PDg8yrJsWUkGV7HJQlBpSgClNc8/8/S1ixeSJLm1e8+yCzwv9IMwioaDwWqeAfjGn3zj2qWLJ/3B3sHB9aefeXD//tLycmdh+cHDx8cn/Z3NbQBJEj//zDO9xcVWFORpXAr9wvPPjY4PuMwXOhqATPPr65urS4sHw2NBJJU01pZl2Vno5sz7B4+r+RAGAYEePX4khbDWpmnebrUHk6GUqsq+nSQpkdBaCyFBpKniTHCCpFKqKEsAZVlGUSiVnkziwWAwnkyIxIcff/zlV18ryyoHfeoHYVEURILZjSfxQq+bJtnD3d2jQSyEAgC2LU0SthMGpStDj7Tfi2CslK3u+nu77wJoq0y129uyc3PvZhR5rnCZYVYolJJKKqcB5Nk4S4rUOKbIWC44bwe9QTKGLf1WVG1pUnmwrtNupf04SYp+nGdZsbQYXTh/TgkNwPdV6cokz9788LjMJ4zQunx7pftoX5SGjSkB2CILyHQXFiS5diA6kVho+4NxKqQmoY21AOCscJVOQzhlQM2HmX0uCqN26Tsd4VIr/vMmI827IPI8s9O8A+X0/8r+YjczEWoC9vl4lfp6bVFNIy2aZxqlmm2tiM5oiyprb3rPnGtkrSIKMSPmRq3r1zqzEIJdQ1g058RYq1tUU8pwbfrUrqaisbRQe0Y2uiDREzrbKROyLruyFuas4Sq4DHM06nX0N4QQji2RYLbMkALMLISw1hJV9D9nciZnciY/YVLDkVxT987Zus3Si2a95TlmYBIVjDK/qTWLKc8tvaf3oCagu4Y8anDkdF6POUiBp4BEBQ/QbEOsEcMppeMprGn2eVa32eUZizBNS6xWehKzyEshRFOF6R5zaheZASuzfWnup+nzfvN8qkfmr32ukv93pfHnn3/0dMgmPQnczuE1n6sBT5+ZxRTUeHCFF1Vcjzz3GDfKRgPHzvHAgMEgKWQdGzwFj5pkQlNV5xTQx6CanbRGfEg4SU6yk1zFCFtBTglSzcbr2DgYrVC60rFQUiohwaIdtXfWNwHcuH2jrs58zmU8MbA8Re7AoCq+e/7WJ4TnWjofalHPIIJAQ1NJDoYAAZqmSJqCWNVr1MSccKOZMRFq7Wuml03pDCrEqUYi63Fq+CXBjGluqhm3AsAVY9ATAdTNoM1eIWqisLlKTCwBA1c6o7Xv+8rYnOBsjfDw/C9U+p8U0IqUJAI7tkRMpzSk6e/OOBacs1OAEpi++FV1HETFzCCmg1jPuyen/XQyU52Xe6pLo6aIMFxW3SlIEjmqMTjBIEBwxf9AUyJLasjIp/m16xzZ08k5axFPZwQzkYJo4MQ6fxA5IrJMbC0LV2gls8wEWgFIbe4sk0JNbtGoydMltarwLHZ7yul6WuYoAggEqaTNM+V5BBEXGYDUmol1MvB9LyisExBk+Y0/f3fj0sb5yxtZUgIIPCG0NvlEkDbWyVbPyc4//6d/cHA03Fr2f+mXXgOwdfWp8SiJAu37wlrjecIKYuckkVfVyg+sc2lZKKXm95X5V4kq8tiqylPUmgA3HbvPSTPITVnTt5iTPFtsL3UXus45EgJCOOZxkgBoRWFZlgD5nhZE1tnSlPFkcm7rfGlMRTAnCEpKJcX6+oa1Ji2K7OTYMXqdbpEkKA2AnYuXs3jikiTPMuGHh8cn9/f2Nra2/tn/9s/+i7//Xz713HMAPnrvhztb21p6NRWAc2VZCC3yPA0lAVASrU4rLfMsS/IkVoG/sLg4Ho8Gg0ErjPrHR9VYW8fM7Jyz1gJV3LlkZmttkqcAup2O1Kq92P74wx/leQpX/sZ/8Ft/8M0/vvHBh51Op8oefnRytLa5lmaZr4QzDmwNl3lRCIirV64CSPPs/U9u+L7nisKxM8ZYY1k4T+lgZT3NSwDpJC6KkgASVOaZtZakkkBnbePP//J3H+7tAbj2+mvXf/mX/ug73758fnshVD/60TtPXbmcFuV4PLp64bIQBCAvcrY2TpJ2K9SC8iInYnY2czZOcgD94XCHHZEoypLAFVI5SeL3P/n03M7m5auXAMTjmCZxGqfKCxg2CKOTo0NUKW4sBwCA8+d2Jmm8urHx2iuv2tKcHB1ZY3xPeVD9fv/qpYvVqxK12lKqyWi00FsgKdqdrlKqf3gQtVoAOr2FIs9X19e157e0eLR3YMsizYoqb8BCrwcgiycCSLNMSimk0lqtLC87Zy+dPx+EQV4WAHa2t+/s7uZ5GU/G2o+iVssa24paRGS5hhLGk1EQ+p7ntVpRlmVBEGqtBFGaJWHgA7DWSilVFUVeK00VkKGFlGmSApBSrqyuOFBRWiGVELLIi7Qs8qIoiryiawSYmMui0IzV1dWt1ZXXnn++3e3AmX/3r/1K0GoD8ILIOTs4PiR2k4014xxLBeDOjU+rtCTb53aYrTElCSZwlhXkh3E8KS1bRpUBuSyMLUtP+1me18QbzrVbrTRLT/onk0kMoBX61tk4ToSQQRAsLy9de+p6Fo9v3bl7eHRYnbGurm/0+/3lpaUsz8ePHpmiePTwwauvvvbsy69UC92tGzf6g0EQtkprJ6OhMdbzPKW944N9T6vF5WUAD+/txuNRu9ezhTVl4Qf6mZdeSibj8WR0/drTx8d/CODu3TtL3YWjk75QqjR2EsdSqoqlJMuKaina2tzIi/zcxlpR5McnJ6UpVlZWHu3tbWxtrS4sAjDx5PGDB0cH+yvra4LE3Rs3sjQhojSO+ycnL33lawC09uI4sdaVRdnpdJhdlmUOVmullc+2BLCxtr6+vBpnw/3jo53tTWYkSZImydLSIoiyogCw6XkXrl175913A62XVlY/+uGP1jY3Pvjw+yvLy/F4COD85cvdVvsgG1hjuV5G2Pe8vMiKslSyyqwt263IVjQbzGurK+l4bJ3tLfZ2Vpbj8QTASb//wte//snHH22ury92u2/tHxwfHJYFbt66xcDlK1cBvPuDt770ypckwZUlWcmOe70l39dpkWuOARTD4y9fXWYmJaT2vBWpAIoCP82TuDChHwBwHnfaHWvKvYOD1eUVY6wUoiiL0hghZLvVATAajq1jJZVjdtaRrKJWYaxJM/Z9D0BZlnkhrbHnds6vrKxPJjFAznGWZV968UUAu/cfbm9vHR0fryyvOHZlUZRlWaRZ5AdLXfH4eAxAgn2JtW67FbYLk3lCgESv1YHslMY8mADAlpo8c+Hy+c1z94/ijx88JNmS2hdAkRWhhMsSAPEwWd8IZehHUXtn2R9MDtphF0CWxmG7B6AsjRLKWDCzp6TWTkkW5LQUnXarSKtMdwUck+Fnd86/evWq8Ls3DpN7h8PF7uLx4VFa5cl2eRCq1fU1EG0v965sLsUp5+XYsHHkuUoxg5NSOMFwFeU4VzS00712HkOs/543Hhp1pvlqetnRzEqq9c7TRsgXG2iVwle7UzAgCMwzQuz6VHxmodSqq5vBm80/NccPVVkLG4tlBpxOVc2aIqmGDHmuXg0VpKhA0qn12WhiDTiIpoLUqKBTGLOJ3Z4VWRUwZ2M1aiVmGuu0P6Za3/RP1JpmY5HVdWRmgmSHmhyTKzcfNETzOJMzOZMz+YmTxjtyZuXX9l4Nl8wyuNT4XfVPtSrXB1kzlImaNXW29J/C8OaAhCkcyQDcjLljjsuEp79b7Q0VOlPR7VV4qDWz35n/lVPyJNo2k7kL1QblqE6XPPXVIldRBT9ZdLONPZEKhn9MHZ54apb14v+J/Fu2mgZhnbt59omrgZqHy/BFN85KI5oRJnJj99dsKHNZYbg+JGRmRzWUNvUQdBX0R6SI2DX5Q+opM6NKme7dmPvQsEvX1QcLQACSIIVz7AgsBJFAReVi2YIMSCulAAWGY2esywozycume6ZJeedgx0pxmYHSs68a++3zs+ZzXTdz/GzeBFTnsdV5JwFwYDHH+jjDwglA5a5bnUA3cBqd+gUGRINRNjjt6RGrZ+30/ZlN4GkjmEg0qlOlSjX9MoOWG2iy0gq5yksAIiFQsoMrtSdDp7M0d0I4Bp9Cd2u/ZyGElCQlnHN166ryXXNYPZuuUwDPzK0Zp94iEg5EENwQ3DbJgWZzcTYSp8ZmiujNdZdWuj7DYMfsAEcMOBJCWnaOnSRJxJZrBFCQsM5RBeXOtOH5pWmKkVWLZ72EOnZiClMKEEgKIQSzcNJXnCeSZah0RR0YkvKlLopUKk3Ncfgc1jkP4s1g0Dob9VxXNb1a35yURgsJrbi0WmsAQgm2DnnBScZKyyB49N5H33vz1vlxfOH6uTAIAMDFaZpqRVJqC5Je+Md/9M67P7j/8z9z+XD/4Pd/900Av/lbq4uryy4bFoVRZE2WGDAx0qKouqbU2ve8jJua03RHwXwl62Wg9oysZ+Xnl6Tp8DYzYGYPoAEnDw4OEpMnJs/y3BalAJEQrh5Hss4xOyGkkFIpJYXMiyJNY6VUlTQDzGEYjCeTbjeJ41j5OvT80A80CaVVhdRkeV6AJNBdWbNCfPqDt/rjUV4UYRh+/9t/+rf+478D4OjgQAgppDKlAbPSajQcPvPCs91WkCUTACuLvQsXtydxHMfxtWvXZBSOBgMi8c5b76wsr1y+/hQAKVWe5VmS1S4gQhBJkHTWWWuErjI/ZMzuztufjSdjZttqRXk8/NrXfu7+rdsCLJUEYGzZ6XYghbOu02oZm2rlHfeN7wXsJADnaGV5QSvd8mSSJE4WfugHfsta69KJ48o1D86aMIz8IBiPx6YsbV5oqQpjLBycA/Duj95/6cXnnr56ZWNt9frltd1+tr209NWf/WmT5s9cubI3GQEgYT0ZARwFvrVWayXAhbVgfubqpWqY/vzNt6OoNZlMDrOMwFpr64qjk5Ow3Z6MxgDSrChyl8QJJ7mSFFpbGrPQ6xrrlJKf3bkDYGt93YEfP3jA4L946y1j7fra2p3796SS3XZrMhkDaHc6ztmyLPMsFVIpImsNA9ZxxWyoPS/N0jt37zKJpy7sSCXB8MOAnRuNRq0wrOaMkBLOQUih1IOHD1ut6Orly5PJuChKrRWAB48eGWO9qGWMXYBaWtsIozAvcj/xs7KoZnCn2/W01kGglM6LkfZ0r9cry8LasvL31EolkzEBokofT8QkhCClVJZm1bqUpZmZJNLzDcMYV5TlwsqmMVYqzSSsKQEkk4lhSAiRZwGwvLzsXPfk6PD3fvd3w3bnmedeALD7wQd5ka8uLgaBf9zvp/EEIAjhLIdRBMAPw/FwuHP+ApOM40R5XuEqSkBXlKbRDFCWZZbmrShyzh0dH6dpFvi+EML3fVVNTmOIyMH5vg/wvbu3RyeHa2urrSji5eUwjAAUeba+tlaV3oqi3sbGeNh3zMOTk8pv1AuCvCjSrGh1OtVhUp7naTwhYNTvB60WgCQehmGYJ3GWxFqri1cuKyX27t+/9sKLD+7cqrDyXrs9GKfWWNKCQd1er9Vqj4YnDHLO9XrdqsKB71+6dr3b7b375p8X1rRa7TXHyg+qdWZ9+9zR/uM0jbXnS8Ldm58tr210Fxbe+sM/vnn79vOv/QwA3/M97RGQJMnSwqKUYjyZRFHoaV0U5cHhEYDVxcWttfV2FD3c28/z3BgjhOwfH62vrzlnxxWYG4Rpkv7w7R+8/NLzj+7euXP79i8+93ySZud2No8P9gGcv3S91e7Yg+Nq/nierjb2VhC0w0B7CkBp8h++//7W2loritI0Ndakeb7ValnrtOfvf3YTQBAECwsLBweH169e/ejtt+48uL8UhoeHh455bXX13u49ANeuXF3b2iqSmJ0NPQKEhF5dWnq/LKo4GF95WSGsydmBrTsZjpyzfuAJUjKgNIkBEIS19v6jB2VptNJpmgdBkKQZM+d5vtDtATg4PDRZTiTYWaVlWZrKKUxrzcxKKQBJmvq+v7a61ust7t7fTbOsPxgsLy7t7e1dv3YNwBtvvXlZX/A8j9lppYQUzFhdXTfpuNVVRgYADu702dmlTlsIbYUxpdEBa60C34tda2f9KoCTO3spu8/uH22vr9/vT0rrG0hyhZBss7y3ugbg5VdeLMaj23dv3Lx359zaxSLzJnGqpMiyQuUZgMToThQZ69I0174oJkUU6R5AziSTcRIXAAYnJ45dyi5qhyuLS4XTJ5/cs3m6sxT2D4sq4RjDCImo1bHlJNSt5e7ipQ3Oy/JwwrlzsGW12kvoApYqJblWLcTUIjgVUjYD5KaWHYCpX+Bsk57Sf89pIoRTMVNPSK0s1krB6ftoZhpVcUA0xemkkM45V53fT4NpnGM4JgliJkc1ukmEyqhs1ASe0xUAYOpqWenG1SF87eAwPWefU+3rT846JhayrnhVcrWGsJvSRTY6GBFNrdtTRlcNMD4Z1VcpjA0eefqIvRkIwFlHQrCzVX3ZOSEFM0slZwFnZ3ImZ3ImP1GinOGKs3c+kxehSsPAoBpBqA5+plAUEayzNFtyGxSBpodbp4Sac58qS0r1ANfZrwGgDt8FpmGVFTRBFdhUl1IVUjlQESy7udX8CWchanAxnv5Tt0/MXa8xkLrtLOqcwA2+U8FeAJpUITMsYCYN0jcFtWZHWHO3NlsMpuge4dTGW93rpgb8tL4zyGY+lKKBmebjiR0zuEpGBFE7UdTOFHB1aHAVVsqMBmXl2pdvbtSIG72E6mQ3gCC4KnHLHF4rapRs2qg5NLl2ZbPO8hSiE/U23Ayx5drZY9p7xACkUMYUlV7hbBEo4QkWbCQclaUSUFpLSQxnYQEQk6bQcaViFJUbnZDkbLa3twsAwgki51hAAFwlRHbsiBwzptG70+hP57jCygio81ZPpxGzZZZSEtgyE1WdAOeY6jAfgOFs3UlVpyrSQkwttekQigrP4pn+0zjs1u+SqLU6gpDEQGmttcaTjZ9gjV0JZqryeUgEVVJ3N4v0FQQFkASIYayrEpoz1Q137GQ9v8mBuM4ZyACsq+akFUr6QpZWMJPydCjFJC3AjlCTe1tjpCSttZQkBDGzMwx27JjZkiQAQgvBwlpr2TlXu8ZxpRg2XpbVB6qgV5KiRp2reJd6LRKSAEghKnyzeTfmpVKyiZndlBQcLITg0srqCIbIVemuUQjBcE4JpaRksGNDzVA7klIKa6vwdlu/qTQFfSGmWj1EVfcaYRP1OwByAtVDlquFLmcI35o8N2nthEtcWhuQR0JUk8E1yyDNgbaoopunoT/1qjX/6mJ6GYCGEySdyQmSyQJQDpEFg5zTedjGKN/bfbzSVragstVRgyMARmgFbUuSUnFmrVc4my+2vcfjTETdw+MRgINPfxQu/TTnLlQEJqHkShB9kgzUwkZaGACRI3ZWkaIm3ohmYOl0uWgsHm7Gt56Y8+tcHbxdtdcyS8A5Z60TnpJVqLsxpshGw6Gndd/YwpVSyXY7KoosCkMAxrkw0DYzmTGtIDBFqb3A116c5ZPJpAovD0O/Px4r34egMAp95Ukp/SDKsixqdZbX1wEUea60srYIw7B/0t9/vP/Ssy/u7T2UG+vDyegb//oPAEjlfe1nfuYPv/GNk4OjUGtnylavs9D2paCKzOHi+e12GLQCbYo8N6Ub25OT/vLCwtLSYj/NJsYC4OGIQJ4XGGPiyWBlY1sHUTKJHaC8oPKoLdLY7/be/9H7K0uru3dub64vDYYPr1y6lCWpioKSFABXlsyUxkMHaE+T5e2N5a995WeNNXsHewA8Dx6Rc8VgLIWQYIzigt1gZWXLlkVLKwBlHpdkyVOWZFHaLMt9P8iyvHTDeJJcv3oNwF+88cYkz19/9WUnPGOCH7z93b2Lg3/0j/7hP/wH/93JqK8UAHg6XO71wsC3tuwPTrKiYGvzIl9eXLh85QoAa+z49r2NhcXd3YdLiz0GCmuW11YF89MvvNBrtwEcPHwslA6jIC+NMRiORo5wbudcp90uC/N4/xDAxfMX904O/uTb3yosmzxndu1eRz6E9pSEf/fubQBSSufc9StXijyXSnbaK1p7ve5CMpl8+tkNAFs758dxvLi0AqLdx/vGWiXlcGCkkOe2N01RAMiZARdEYZ5nXqDC0K9cjbLC9CfjlV4HwKef3ugPh9aUuS2TdBIl49yZLMuV8rXhChvdWt887h+XAvFkNByPwPapZ58+2Hv02c3Pqujdc9ubvV43DMP+yUleFASy1oRhpJzVnh8nKYAkjleWV5T2WMjSOeX5XhB2FxcGJ4Pj41FpDIDBJIkCX2iBbndv0C9c2Qr9Trfz7/3Gr7cXltK8APDi888cHh62PG8wGm6urUTaO0wSSfJv/OZvHjx+BODN7353ZX393sO95dU1GUTsGEWRxymkBwsv8ACIQI5H48O9gyzPBBFXyYuYnbXT06Eg9LMs85TWUrBzxtjdh48gla9Er9OuOR/yrEIYO912HMeP9h51Wq3jQb/IspOTPoAsz65cvpwmCUEZY4TUyXiyd3Sy0vGFpKJMAUhjdRDYyXCUjheDhVaoDw4Plhc6kVIfPnjUXlwBgP1hWYzLLItH43anDWfHo35R5FYJy/bSxfMANtfXksk4TZPDw/20LPOyDFq89/jh8kLn5de+DOC3//n/xd57PVt23Wdi3wo775NvDp0jGgABAkwSQZCUBVHBo2DWWB6Xxy4/TJXfXU5/hZ5c9oPKuWbKMx6VTEnWSLJIgKTAhECA6Ebnvh1uPPnsvFfwww7n3NsNyo9SVa+qe8+9Z6eV9lrf7/ul/3Vte+uNN95oO9bO7j7hRre7NJjMfvD++2tLy4AEsLy6/OmdO71ub29vV1CthGKEWgaHElJkrmsDCOJgf3Dg+l4cxk/2DrTIt1eXOTeyKDRt+3QRbaBp37l3/8adh99662sffHTTMk2lQYhaW+0WaaxmYbC9vf7BRx8rQhzLMBhTSnJGG55LGZtOJgAowWQ6m06GtsEarY6VEinzld6SpCTT/O0f/RSAa9mSGnfu3v3P//l/9GhvMtofou0lieaUg+oPPvwQwB/+wR/EUdhwvcmgD9vWlFsmPXX+Yue9H9tZAsBvd3OZC6JNJiKZEMMCmEEdTbXFWTCbAfAdO4jDBzs73aVelqdxnvXc9jQIipRNo+kEgOc3CGOUUiUzBsooVUoTUBCqpCj899faLcJIr901DXMQxS2/+fEn17/+xq9ESeg1mgCU0owbly5eHgyOwjAigMhzbvCbd/Y7S0s9zwCwMxmsNe1M5ak4dAzbsh0KHcRpx09nM2xsrAHY2Wlwzj/49PrjvSHPRKgMznMQbTIv1Gk83Qdw5cxX9g6Pbt25ce9g2nsYXXDlLOMep2E0aehNAE3OFJJIM89xQGGxNM9pkEz2x3duP378q699GcAonN5/cvfxZNjxWhCm0XQZpUkcSQa3vZxNBgCS2bB35uqGZ/d3DwRNtJwtryyNgjSID2zmxIoCCAlRWhJJNSVgoIW3UxkYp8QSWteWA6gyQxJCSC4FLc0HawGqhKZE0kXNMiq9NHTlUYdaAliUukp4UzykIPwUIXQuoMnKGqaQsLQopB1WU6IFcKAERGpBQKu0h2UzyssJtFaFY36B3wqEqWvbxlLi0QQKlBUsKaWUEqYraKm0IkXgJa2K1pYSEtUACu010aUvTYFiiutAFBgWVO8EpR2AVqoS38gJ+4NC8NGFMFlIoJUKXQIgDBoKjJSCA9VKSRBCCxT/3DzyeXlenpd/hIWjtHb8ZUtYLezXehvUnCJQMY1FmetzjlnCL9wMi6fjqf+qGzydfLkWUeeszcLlJxwLnvZkfEZd6vroSi31jKv0/I9fatO4sKHoBZK23omro5/R03oheMlnVGCxAfqzziHzllSmRsdJW0KgFFmQ+H9ZoxbdHKoGlmDixBA+q+d0eWTO4h5Lx17/ddxKD9CK0pIHpYxyRjhRvIxLqCuKqmTDiquUlgCrqLwqUV1tbVjz2zV1qwFNFFC5oNfztZrNJ+f3nCWktU0XWQzBU7PGWHjkvHdqfrdu+Nya8tgt5r9lOBjGAAAgAElEQVQYo1orpZUocRQIIYyxKp22KvGgJoTQqtUc0KhCOlacd8FWaqAylC3DENRVLe2US56tGsyC7lFSKaU0oBVKmpsu6I2LMymllDJK6kk8vxGhNcxacIIuO0WXmu6T/UgoLWLxnDAUXajdM9+kky9LYXH6DAPqY6+GKkwzKCVlWLbCprVEo5W64pj+oPzUIAurZ2nNRgBVJa+sXkStUKmu552EcpIXfUMKrnuhO8j8BfxlL+nx59fqAVRLynG9iIaG5GZ0//DJo9mNoQhAHZEYKs8tGwAXknAiZA7klIBxRigfhgmL5HLTbTcbAP70J/f+k3NXVlYcGc9AFCWas5JaL98OoktB8Sme+FkNObYsFiNQr6Rls4o1lSippNKKMpZkacFfMkKTVHh+k5oGMw1TU9d311eWozgunuU6tsE5pzTN8jTLbMsCyP7hwaltp9PpFFZsJmMHR/sm50kUM0qoYYk8j8IAGowz23EAWJadJrFQPI6iTz76kDGWpcnNWzfPnTnDCW7cvAHgrbd+88qLL//oZz8d9wdaqyCMGr3OZDoD0dcuXQBw6tT2zVs3tzfW0iQdHB1Zrvfw4aNkFkop9/f2uGECsBrk6KBvGJbtuI7fIoRqpbIso4xJKdI0AeB5rtJaKdVsdzjjbqMJLV969fNREru2WXAfm5sbnBGqtaYUKoPWUqgsnxmmPRhMAHDOxrMhYUaWqqN+f21t3XFtSjSVuaZU5DkAkWVKasa41ipLU6U1N02tAMseDIfXrl4F8POPP1pfXcmFuHf33rJnfvftH74yib/0pS/e333SbjZnwRSA7zV9xwrDiWXx4XjIKLRSnmsttRvD4RjAYDQ2OI+T1LFt27TCJKaEQCshZBSENjcACClNwwiCGbdsw7KEEJTQIAw1SBhG50+fBjCbTYMgfOsbXxeaPtl5+JWvvvG9d74npczC0GT84OgIAGe8Pxx+4fXXuWFopWeT8WQaGaZpGOb+wQEARikBcW2LGyZR4sHDh71uV2uV6UxL4TgOANf3syRN0kQpFUymShfpzrMwjtqdrsgzAFJp0zQt2zHixLQcblhSaRAym01Flkkli1Xg3oMHrmNfOL3t+55h8E8//ti2zU6nNRwVPTPyPZcZ3HYcKQQzuBRUScEsN8tzXehrtX6yu9vtLdmuB8rSJDXtRhFTQ0hVpNZtt7uUEIOzyWyq8xRZfHdwdOXSxfF4dP36DcfzARApJKWXLl+5/+ABp3RpdS1L0yCYeb6fhAGAGzeuv8zYysaGyFLTMA77h9QwpVKGwWzHLILhijz3XQ/LK67jFCZshXxeiPRSSgBxkhBCChtP23Z834uiMBe5QQ1U2qkHDx4kSbq5uaGUkkot97rFHuW6bkGoSSFGw2Ge567TzGUmpQiCoNloci6n42lnZRmA43pBHAXT2dnz58PZJM3zBzdufPXrbw6G41uf3ljZvgig1Wg+2nlomVaeiyRJbdeO48j13Nl0EkbR5156BcBX3nzznb/5S8dxb9+4zky+srJCCHb39j738ksFR3Pn/oNrr3zO89zJLHh8//7G1vbqxsY7P/y7PBcrvSUtJYDLV17c2XlysLfvuK5j24QWYVV0kqa2aVw6dxbA2urK/Qf3t+2tay9c+cXNmw8fPpyMx2cunGOGqZKkAD/c4PFosr62unHq1He/997Fy5cf37/baTVNy8qzDABotnXqNKWglDDOlVauaUPLNMukLDfiXMpWw9cQtsENgyspkjTVhFJuEsoePHgAYLnbGRwd5HnW6nQ+uX7XcR2lVC7E2XPnBv3+p3fuALB9Xyg9mYyVVrkQmRJC087S0mqnLccJgKvbK0k0FlmW5InbdJEpwyB5njw+3A/SdKW3DKDZaMRZ5nkuo6w/GPiNllRSSgmNtZVVx3YA5ELmU7G+tpZm2XQyCYKQEGI7rm1ZQZhHcQTg2tUXbt67JaWwbYcx49v/5Pf/+z/+H3b39i+cP1cMU6vZnEwn51dXojDodDpZkh72j8JgurW5YXt+HgUANlZWVn0jiCa5zohQ43TWMK1ZLsPZuNHZ3t9/DOBwNP7Zx+wr164No/eeTKbMIyLlJs+kzFuOOR4NAfz1uz+0HX7h4vmdJ3sXp3G21FVpathuLrQWOQDierNwTKnFGUtykYqQcaPlusvt7SAh00kKIIxmD/aOOt2Gy+k4Ti6vnfvw9g6RCSTrNOwoJAAOI3l2ZTnLhcktTqljNRjIetc/GE/DRBpUAbCA1OCJLjx8T+y/FThQCoRQUgSnJaVyXSt6PDqMrgUnXf8mdbx8UoPOSqQiCz8Lj6xC05ShWj5DaJvjgGdZXBIUALh87ELQxsWLi4dXl5emiQuxHueCAq1km+omcwFQ1+i/EhAWg2FVpCqpgVelB6/lm/oxWkNpaErYM/DQgllCBQDJ/H+U7onACahYxz76pVDxeXlenpfn5R9k4cDfI+kWpSJj5skaqsi9OL6g1yeWMuaxCBx64X6Vvqj0jl6Q86s97TMrc5yZeDa991n06qIdVcmNLS7fJYF1wtJy8ZvP4AHrdtZk7LNOK7bOY/tdZSpKFihWffK6Y3WbHyLzIye+LMy05lvY/GQCgFKK2hy1IhrJyaZUvFpNiJwYkSq4y2fymieZpDknVfM6eoETK7ZcSqBUXqYrATjRHJpoQYnUSoCCUkJBtIJWtQ+FUloVZnRKg1Y9VXsSF120yNJooDDP1VX31RFsjsd0qfFKjao0BSuIzgJ/VQat9fDNsUf9/+L4VDfVFUtZ9838uYsDqzUUoBSqKD9UVSRNwTdrQlFGKtSktMgjcxBWDT0rgh+qYtgIraBi0YbqVSbVhCzmCQFAJIRSGlAlI0q1qkCoLowcwcscIWVkzGJmEBCALUxKorXUKLP3oLTArkBV3WJCCkPawnD2xByqC6kmzbxbTxzEfNzmY1EZNtahJwBNIQmIUpIQRgkooeW8UoU3ekVYlw1ZePGIVuVA1ni7nOT120Uq9/iaFlTQhekm0YpVw6SJVgRFQAo9h6InRIACns5zDWEBeRMset+XI1s4+JPqNSPlDRTP8j/5k383HuetXtux7aVWRyhBVQyAwAE0pVA6A2GgxDCswSya7AyPGknTYgAO+qN+mC1bbY0MyKG1ZVAKBV3Z3kJrLQuqdlHJcnwpqPtz/q0uN405yi9MHQr5gTJNGBMEpmXHoSBFND3KTZfYnDLOMimYwV3XXV5ZCYJZEW2AEwKCw/7RLAyZaVKp8jxb6vYuXboCJX/47vcBQOvTW1uu6ygpGu0OJVQpKWXpAZDGMQDLshhjiujhYLjz+NHa2sZgNBiMR29ufXUyGVFWWDDIMBgvddtHTW9weLS0vLSxvS1F+sVXXtraXAcghFhZXjZN07RMwzQpIYTSdqsJbjwZjrMkAbC6th6HsWnbtuspiWAWaEKVVNyw5l1I6WQ0Ho3HSZq//uqrWRIfHe1+6eyV3/rNX//On35ndW0dgOVYlFOT0lwpDmUZrD8cfXzrw9//jW+trvYAKM1m8XipsxxF8aPdB5RpDUUIGx31FSW8eJ0JMUwjSxOtSZ7nhNAsy0zTevjwQZbl26fOAPiPv/1tKUUqs+FgsHX+hVcvX3v16rXZNAE1HMuGEABc006TxLbMXqvVazaOhkNovdJt7x72FT0C0PQbtmU2Ws0z25uPdvcIJZRRRgghJIlj4XsAXM/dfbJHKQUhwWya52I8mZw/d/YnP3t/Y31ja30DwCwM90f9je3T+/v7URJprWWeUxDTtpI4LjrPYswwDBDiNRpxGLa7vUePd6UUBjd7vS4A07QY557fsGxbyRzQrXYnTWK/0TA4GxwdAkiiOIljDS2kpJQudbu5VLMg3Hn0+NqLXcpNAJrQKE5H/UEUhFmW5VLZvsdNi/KkYTuqsv8ZT6ZpmvzZzZu/9tWvZEqNhsPtrXUp5KmNdQCu7x3u7ydp6tq267lCyDRPkjhJ05Qy3mo2USToME3HthzXTXJhOjbjhlRg3OCOJaczAFJEhm17rdZ0NrUs23FMCuX4DbfVZqZT2Ph0O23FOKX09LlzURQVsVGSNNt7uKOkBPC7f/AHhDJKmeO6SinPc8Mk9/2m2+oqQpIkBiBz0Wy1O50ly7Jsy07SNIriwsyqyFAEoNVc2j88NLjBKA2CWZ5njmNDa2jl+Y0oDAGcPXOGMpal6XQ2azabRCONo5zEhJDlXg/Av/ebvxVMp7evf5JmanVtff/gqNft5nkmCaRSpmUC+Npbv/mDv/yzMJjZpmk0W/fu72xsbORCRlFkGKYSAsDmxvr1658YpsESmiSx49kEEFm2f7D/4tUrhav7/uOHe7u7f/fO9zrdjlTy9u1bju1srK8vLS//9Xf+bwD/9A//6dUXXxru7zdXloej0eWr1/r7+2EY+p7v+Y2Cn/KbrbOnzo4HwzzLOOcN34uTUAihlcpFVqlRVbvZOjzq/2f/4l+k/8f/TpSKwjBLUtu2MyHGoykAwvjg6Oj0mdNJmos8e+HVV//HP/qjTm9N5aKwL4ui6PLVay9cuvTO/qFhmY5jM0Zcx4cS49m04dgo3FCkiJPUMvhkFkDLLBe9pZ7tOHEc37x9G8C13/5WEseu67gNfzKbNhu+kmmWodvpfvTxR9ubmwCU1vfu3m55rmGYRAWKUCp1nqeMMd9zAYymgWdSm9lhLjW4kirPsiRNKGFZkhdcuWkauZKcc25wzo1upzMeT0Senzl1Skp9++4dAOfOnhNCvPuTdz//yuellJZlGoYplUrSJMuzixsXAOzu706n02++8c13f/zDD3/+wVvf+MZ/91/+V//q3/7rU6e2x4M+gKVe99Hjx2cvnA/DkDEq8iwKQ3t7O00SAjRabQDf+o232vmU5YHU6TSiYRK3DUo0VSoDc/7n/+VvAWTxkE8nHZ51LHs227Opk2dZohII6fvtPNUAHt+/c3azE3m967cHS5Z56dSqgYRxI5M6FxIAUWQ6S1ptn2n16OGdvfG42VxScAh0kk4e7IUA2g222nWDRI6z0DRME8Q3yQypBdO30PFMAH2Gnu/E4azZ9FKZu5abR8O2ay413VwmWicANDECJRVKJ6UTWzKwGHK79gKppL0K0VQK5ZIp01qjdAlRJxLh/H2iHAUATerMASXaO2nctyAMVcLlcQvAggSkhJBj0KzGfqSy/KxZxMJPXJVtBABdqfgLKrB2x6mDXJVMaB3bvZKRarBV85MFzKOogFido2+hQQRFpMyTI1CJJyhS98xBcd3r1YMWjX2Ol8+Wm5+X5+V5eV7+4Rb+9xyv2YCFAHtlqRM3FCdUy3YhUZ7YVqr95sT+tKDwKYmT2hhI1884aYe3sBRXrM4zKl5J9E+t1jX/Q46f+oyr51VdEP+PV0Av0HgnbBs/Q0n1WbuFrmmE4yZ9zzhxkZE8fh6pk9WWW/dC/WoYUQXCn7dyQXv3GdWdczi1JrSuy2J4l/qKOZ93ktmr6lWdqxdOp+WIS8YK1wNwojnRSuaEaqVywkxKKCWQSimlanJJa1AUISp1nXCaonAogdLVhCTl0zWgUWZSXhzMqj/Iie8WSWxGi/QmZb4YpbUG2MKkPdabhIBAKVVFMagOapR56581I4qrZe09ToguXeiJKloGoPSFoYUfNoDyYP3U4qSqj5QG0QUxRcpalsLPvOoVbVg2g9KipiVrX4RJkFIW8kPRmIKqo5Ryg1FahpZHTfOWr1D5iHl7qre9iBquj9ebkCpxzNxg+OT0LBeUxcVgYX6RY9+R+UeJFAFCivgDRb1QmiXqekpj/kLPJ24NBJ/WV1Snz6E+WThc8rPlS6OrTtVFgNIynVEdS4jUnXRsMVxo0zFG8njrSf3nXLaY92IZM4A4XtszTduMwYU0JfeJ6XPCAWhIEKIUoCjRDJSblpkp5fqdmcwtagDwfU8xTpQmUgiZgsA2CS0iRRQRAKAVVGV4v7gELPQaKVJbLiRNr5YRXS2C9ftZHtKaMuo3G1GagRX8H7RW1DCUzIkC48w0uWUak8lY5JlpGAAyIQ3DSNI0yzKDUodzzvj21rbMUtv1ikQKk+lEK0VAODcc2zYtOxdyMuwXplvFwCkltJa2641u3240WnGSxElyems7zdInu0+uXr4AQGvBmbm5vvLRxx8Roi3b8DzbMX0N7Dx+DCDN4nbT395aJ5Qahtnqdp2Hj6I4ufTihWEYF3zZ4OhQSiVyIfMsSXLKqNLUa/pJFKdZVrTaa7UO92/HSToYDr/2K18uZKD9Bzd/+5/8zv/1b/7tSrcNwPKcPE/zLM0I9Vw7F5nr2EmWdlrthu8BGAwTk5mdVifPMiGyg6OjYDxqrW8bhi2UUCIHYNkOoUxrIqWmjJkmVVIZnrX7cCfNs/39fQBK44NPPmm3GnESz5I0iqK/+H/+8r+4eMF1rTSLpcwBBPHMsJgQxmh8RADD5G3PHU7Gh4M+sxwA7WZrfXXV9/zBaJRlqQQoQZQmWZZnaeK3WwAOnuwqrUSeE61txxYqPDg6GgeB4zi5EA+fPAHw+MmTME3iMAqm0zAI4mC61Gnv7DwgUhqcF6+V1Nr3vCgMm+0ON0yppGk7Wisp87WNLQDDowNI4jq2llJrTCbTx48eQus8S1ut1nQ6BSDzXGtJGYfWIDQMw6PRiFKWC5FEiS7CwtpOYEZ+o0XAaBTlWa7jZH9vb3dvb2VpiWoFwLSMjbVV0+SthrextXn79h3GqG3ZeZ4Px2MAmRSu61imQSmJgijLM8MwbLtFCUlyUcQJNUyTaBLMgiSXR4NRs91ZWbM0GAyDgZq2CyA+OFBSzSYTpfJYSU51MJvduXWr3W6fv3ptf/cJgA/ff/+Lb37DtswPv//O5sYGGCWUNtvtu598lCYJgDzLlML22XOmwfMkdUwr08xyHEKpVrrUTpkGpzyN0yAI81yYhsEYk1IyRimlxfYxCwJKSJKlk+nkzJnTgMsoIdBZlkRhUAxTkqQg5MmTJ57nxVEkoujSC9eUlDv376WjEYD9x4/yNPN9PxvNhMjjJE6SpNFs6WRqmvboqA/g7/72b0ZHR4eD/iuvvtRst/SDnRdfvHb3/s7hYT+K4nXPBeA2uobBBYSQUinJCCHQYRSuraycP3O2eP3Ho2EQBVcvX7z2+dc+/fjDWz/68e/93u8ND/Y0cPb8eQCK8evv/ezqC5eTLCeMNhv+nd2DzvJyt3vUXV375KOfAyDcESKfziaGYUiRz8KZELnrtKRi7abPOAWwf3C4ub42nIze/ou/2Nvf31hbbfpup7cEQqM4WV5eAmA7bjCbXXrpcwf7g063G4yHN27e+da3LqZJYjg+AEjZ6bVfevHFf/kv/1X7ymXfc7IssyyTgXNOLIMDSJOYEG74PgAhZLvhag3G+Hg4DuPDO/fuA9jY3JhNxwQkS5IkTZMo6w+Puu3V0WgkhNg7OADATXOpsZZFIWUMUH6jpdPMJfblc6c7sgvg/oNbmiDPdcNxmGFYhuSUGoax1O01/KbtOUARaRq5EE3D8FzHtmxCJq7jCiH2D49mkwmA+w/unz51+sWr12bTKaVEQxNGKKFRHHU67W63B2A6G//am792+/bNx0+e9Lq9jz75xYvXrp07c2YwGM5mMwBb26du3r5NCeWUJFGYponvOZSoXIiGY3/u9S8A2G65ev++nh5CJpq4hAkdRlpQ2m4PR+OebwO49tLnLl24YjNMB+Obj45uPDlotJyzZy7Yln3v4w8ppwBgsLW1lddf/+YXLr/88+s/vfdoZ7Pj+46IMhklOQDLI5qwZtMjCZbalm23p7mjlHEwPrq+89A2bACXt9cl2ErXHQdRu9k8HI2mQcgJXNfkebpxdh3AS0vNV85uapWqQsmu8vF4nCur2zCDJM3DEACDpaRWoJwZeBpRoUQ+c9eiGkgQWobrKr6qIE4JVsgCPjlBmz1TqKhdZAqkt4jwShruabA3x6f6xCcATWhdnxpBVXxeKQ3MjRorD626qQBKV+65J81T8u783mUXzAMFVZ1H6xrpGl8X/kkaC5i0hnXlz1Mmn1prDUpZFcAIVX1K3pdSWqZxLZ5TtWHRFf55eV6el+flH1fhxeI8l3mPk0rzMmfmyoB4lflKZZCO+oOUqbifXhYJKTJBkEUW78QzyCJ1eaLUDyEl6aQW7CPJfJ0HCosanFyfn7rlMd5gzrEdO1yzbs++y5zUW6DXjj/2l+0Q893sJJ93kpRcICGrjaz8r779MQ9cPT93/kfNDiyo3k5WiSz8Xnz+olvq/Ia6RCRVtU4QeHPwMh8N8sxOJtCUaErAODGqvMQG0xQKWqK0bKMFq6WU0FC6zG5EtKalorZwN6E1/wWgTMandQ2ZtCKksDqsgrxAE1SJrAuG7GSv1IVqKF3G29GlrzEU5omfFodswa3imKlb1WRUwUwr89hFbFaReoXWWtVGnfUIVdrsEvNooiEWCaiCfFwAllBzFrX8Q2uqIVHNuJoU0yi96hRAKQOhUFpKKaTK0hyUQ2tKaJGDnjJahgCqqE6tahqumnIKdfzTchC1UloVQJdULCEhhFY5dyr3n88YjOODdJz2Oonx5mh3fkbxkGLUFC15Y0F0rStnhdZdHWesF97yObSs37tyEZkrtLUuwhXN2cYqwKQuXZsrQ4T5DCHVW1N/s8A0H7epPb7gPLVoHF9eCAUFoWSiVSIpgUltB0rf3Ln/zfhlMBeAliElDGAaDITlEqbBTE4E5RI6UQCQxGmYS26ZEJybNkAsg7Ia/he+6gtsbl2lxfVWYXFs51LCgoQy7/Wi+lmeZVItraxPDvf1fMSVJKBQSipNVKfRpdB5EispLcsCIBj1XL/dbJqWxU1DZzmhRAghpNTQnXYHAGfMtGyli5xFVBNiu040MzlnlmUVjrdaUsu24yzf3d9vNpsHR0cgOHPq1N7uk/5g4PufA5AnkRL55vpqEM2YwYIonEXBlfMvTYOpa5sAmr43Gk+SJD1/+XIwCz742Xuu46ZZniYptGbcQJkGJLYdL0mSPJOGZZumySiTQog8L4IpR7Owv39wcHDgODY3LSlkb2W9f7D/xlu/8fu/+zsPHzwA4Hm2ziGFMg1zpdMezfoGZ5fPnotjYTs5gGkQJJlmjI6nMyHkZDqLosiyLBWGeZZYlgMgSxLKTcNyKdMiF4QyQinjBggZjsdv//D7xXvCGN3c3Lp+48Y733/nRx9+LPPs9s8/brlenoZLvS6ATMO1rWkY2KYRhEGLNyjjjm2d2tw0LAvALAhG44mUKggjx3YO+n3bMgjnk8k0ieMnDx8C2Huy5/penITctNI0B2WM0R+99/6pze0wihnjAKazYGllSQvx8NGjR7u7UmTc4I5lmo7DGa3VaUmaDgbDNEk831dKnTp//vDJ4zgIinTMlm3NRqM8TW3H8Tz/wrlzrusmcXx4cAClWo0GAKmkFJIQyjk3HSdKU0JoLmWr2UqzvNtuAWg0mtPZzDZtq2djmd27+elgOn60u5vE8SwMZZYCIERvb25qyDu377zypa94vv+9t9+5c+/e5sYqYzYALUWuJGM0TRMhBWVMKglowhihVAgBoOG5Dx/tNput7up6e2nN8jzHbWW5kGDcdnWcoJLQGTeaXsM2GKNwHItTGkdR/2C/2W4DWFtZeXjn1vLySq/b1QDjnHMDQKPVWt/aAhDFmevYUihoUMaElLkQSmtCGbQsHPzTJKE2Y4wBZDKZ+A2/YCGFpAY3ivAIlFHbtkaj0Xg6oZRaJo/CUCnZbPjtXq8ImhkFYRLHF8+fBxAniWeao/7R0spqt9MZj8cAnuzsBEHQ8DyZSymEZZpFlEwArucRSAD7+7vBaOi6ruf7MstMy2KGEQbhwWF/FoacUgC5yDrN5kG/Twhp+D4hJMvSIAg+d+2FVqs5m00AtFuNL77++vJS7yc/eHs8nS73elqKRrNx4cqVw/09AO+88/aLZ05vnzv3N3/7t5ZpxWkqtQqDmd/w0jhutxoArt++320t53kOrQgQhLNet+25TpKh4Xvnz54BsH+wzxjd3tpqNBsAHMc+Go6vMhYnGaWs3WkDSNIkmAUry+29R3utduvh7dvNZtPzPMooZQwA1wgHBxsba4bJDctUWtuWZZnmYDTI06QKoKMt0wCQpilhnBuGZVlpkiqlwiAo4nKcPn/hxz/44fraSm95KU2z4WB44/adz7/UGE8mDd9//8MPAZw5f5FoOekfObYpcnDDdJrN6XjYani//vqrAD54Fz/95BMlklkUsywPY9IfHi4j9/wmT1iuFIA0yxijWinLsnIzJZSkaeZ53nQ6G/T7RdyYKAiHw+HpU6fv3r9j2y4IieNIAd1u+/LFK2mWArBtJ4qinUcPsiwzDWM6C67fuHH2zPnhqD8YDQFsnz2bpGmWprZtBWEwDWa2ZY1GA4AZjNmWBeDhnTvNZMTDscEkZyJMxgaoFjRLQ0nRbHAAd/cOdu7tnN1Yaa20vvWlF9mHn2SpyJNJ222cvbC+NxgDOLt16vLpSx7Bl3/ttS9cab/zvf+3ZZuMmwokiDMATUWFxngyM5LYNBumZobEwXg2CuOVbsPmHgDOzKbTnsZh0+t85Yu/EoWzwSTstjtBFGX9Ydf1AVw5vX16Zbk/HWulKMhgMp7NogzCsnjTpeMgAyBFzJnJwDCn20pkUvN6z7KiKI/p6oOgUISX4K5AKhXIrLAmmVs5nNTto74RqSnBIt+LLvw5njq5gpF45p2KCqHAWnXtFu1YTiiIj+GLOeKqBaba9HPhmuPPWxCD5sG7i5CUVdrusoWaVLkTiybUTtWFEcNiIp+54UjJn85taio1ftlzi9ITqaRuXWW6fF6el+flefnHVypn7VrMroXtY5wUSuv1UqBfEC1PEAXlKTjmo1dbPM53tDJS3dMVWjCzW3z84rnVPleTNQVNhWffcL4llSv6s4jWkw+rGkGi1wcAACAASURBVKarH3L8BH3yr4XEcgu9uaBN04vP/gzDyWN3Jic+F3jLxXrqRXfIhZCRJV2Gapc+9kSldD14TzM3ZL4tPlU5vbhJ66rzF2bPCchRbd16sd3V86po0iVHQ4kmUBRgUESD0yKRitCQSkulia6dowurOlXm59GEFsETSw6IlN7H9eZceHvWziS6hD4KoNVOD1WQjNAocrjPc4sXjZv3kNCqjG+t5k1TKDI1L5JnBfLQAFGV7/y8AvVHNS0XaeLiF6XQCkoXdGohZRAQejzCd/UmlCabhXspq9xFSKVXrfi+BWu16m3UavGbMjmNVlVmba0JoxSESpULofJcilwwg4KAkMqhG1pD1VkX66EBmQPcarRQuMkrVWa60ZirqQlI7adddUOVyerpsjBrq4E/frSMkbkAkudkPkEZEaAOs4ki944qPP5RzElOTngBleC3nMq0esMWqlQj9eqr4/QaKXI9F3wcpUDJnpeDVJrRHuM4T7Szvu9nwc/SsYpUcsJihilCwJgjUs+mirH7O7thmJ06uwYwWaQGYkyBgRFKoDVVUjFGLMOQuWQGVUICoEpTg0LmMheKaxOEURDMrX116d61EFkJxz/rrjzRPgB12M35ilMG8KSUU8PwG81GHAXRvsE5ACFypbXDDQ3NKLUMI05iJYvAqw4Aw3YYI+sry67rxSK/dfOW1lorqbSeTafFUmi77ubpM4P+URKFURR5jItcZFlmmp7ne4WmIYliJcXB7v7e3m67u7R/eLDU6/UHgw8/+vDcmVOddhtAHAVS5K986fXG/+b3D4/e+JWvrGxtTWezVrMxC2cACFFRHKdp9oO333nh2jXTNM9dvGhSalim7dhKKgDcMBzX5ZzluTBMUwppWCyJY6k1YzwIIwA8Cj/5xccHR/3z584k4WwsEsLZH/9Pf/xHL3+RGdaj3V0AYZ7kSfDW195cXV+fDsebG22lNKFq/6C/d5gDEIL0Wo00jYiG77hgRsNvEKK1EhQo7PsopYyx0vZVa865ECJJkjiOXn3l8/fv3Aagpbhw/vzK2uqg34+S7JVXXv3yy9fOnb8UTP/P8XT4wuUrAM5fvBBPJt//6Xucm+dOn9lYXXEdJ4nT7Q2HmQaAOE4fPtmz7ZFUstHw4iR2LUMA7upyOJsVpnlCSq/pU84JJYxyUCaU7LZa0yAghC51ugCUUg3Pa7Q6nU7Hd931ra3b9+6ahmlbllSyyH9FKKWU3r5/j4KkWfrNr39dpsl0PPJ8PwimAESWM8NQWuV5lqWJ1lopdeHiRcMwhMiHhwcAoJVpW1Jpy3G4aRmG2bHdG7duplm+tFRarA8mk1wIrRUBvGYniqMwigmhzWbLsW3JKIA4idIsG40GIHj3u3+bZenlC+earUYUhUUcwDTPWo2GEIJS0vB9rVSW55ZpmqZhg2ZpBoBRdvWFq+vbpz/5+JNMKL/VWtmwkzjJFeFCjoZjAGEwg1K9peX1jY0kCqfDo73dx4zRyy9cS+Ko8LO+ePGi4kYchWvrG4Rzz/Nt27ZNM4ym01mZlPze3ccbp87anCdxxA0DJIuiWDOTspK75AYnhHDTYJRKKaQQ1DQd29IaUipGGYDN9Y0gmM2CWRwnYTBTtnX1pZcJQTAZuZ4ncwHgu9/97hdeew2KEsaCIHzYfzAeT7RWlNLidWs1m5zzLMu0pq985Uvvv/9BkiTD/tGpjZW9x7vbZ08DGA0O+3neabcs2771i1+snto+PDgyLWscBFtbmyJLAFBmra6u3HnwwHVt2zQ77dZoMjY4X15aklI6tg3Adp0wmD24e+ftd3/04OGjb7zxq1ka+74fh9HNjz4CYBmG5dhHR4cfvPf+1QsXxqPR7t7ek729JNe2Y65ubgK4fuvuxctX/uzP/5QQrbTwXMd17SiJADUNgiLB1NJSzzaM0Wz66c2bQsrdvYM7D3Ze+8JrluNqSgsLysl4kqRpFk4PDo4cg5uW6zgOJWg2m1kkAHDKZqOj5ZXuF7/wGmNsFgQGZ61WwzLMNImjKATg2pbB+SwMDUaYwYejSaPhc84d30d/tLa6CmBza0tp0ml1DG5nUvU6vd94661Ll1/+q7/4jhDywrlzACzbWllZOXvmFFXy7p2dPMviMNAie/j4yV8N9wBYJDYYH076rs2lVkSzQkHreJ5rkeFsUiz2jPEsy/I8i+LITRNKaZ5lo/HIcz1ZpNUjJAxmjx7vaKWDMHRdx3Ud23GajebuwW4xgc+dPf/w0YMojJZ6PUZIs+Hdunc3TMJWqxkEQbGtmIbBGWOcJUk8C8IkjofDQbfdcW2bMwKg3WngcGyaTEuZxYlrmIoYIIoRJommVKHQ5UTJ9E5yOupsr2999ZVX9/dGTw4Pk/1fML97utsEQLPkz//q7elk8u//2ktXrr6wttLtT4LzkiRCH04CANtn7Ezo0WTWonQ0Prz+eLSyvHY4Hd0/HJ/baLdcH4DMWdNrNT3ryuUXljbP5bP+m192O8trj+/dWestmQCAdtOHJMvtllIkyUWaxgqaQEqhHEYszgHMcmka3JBMVJZ9c9GsxBu6UOYXkSOB0llJKU1RGhvOkWLJSJIqx+DC4af39adkplI9X+R/WRT65hrZuc/MMdzz7LLA0RXuOTUgXFRAV2ecRE0VoVc0mWhSJyhcuBxzdfaikFffuJC4FNFEFyC6BIclfqzBWpGQcN7sShwuK1XY0ajSNWreBaRyLdeVdKNJRYYSXdui/n0d9bw8L8/L8/IPsXCcXKer3/rYAf2sVY4AoMf3mePMJupdpzj/eMyP4tDxsBhzJrC648kHVk85zu+Va/pCIMNaoVY36jh9Wm0rJ+5xbEnX1Z5Up/l4ap9d2MPmVdYV11Hd6//XFnH8xHmgyJMC/InnE32s0ytLxTJBbcFvKDK3mywooLk1V7WfksWbAphrKecIRFeKuvIaraEpoc9wr8BxHqJsh6546pKBrLxJC44Guoh4Twtqj2gAUuYKigJSgzOuoYTUDBqaaFUattGSudOVzSJqX9aiqAojFNu6gqZVihK9MNiFzSMIpNKoE9w8xTcpJWiRtkNroIxtrTSe7oR6wtcOvvUpqjSjmzOoi71d3koVXuakjIwDaE1qo98K1OlqrDRIwaMCJTFUTIeSlFOyTGhTgBqlUdW8mmoVJ6Z14YGui9SGhDCASKXzXOS50JpSQouaUVpXTGmlNa3ZTw1V2MfRupJKVy+QgtJKqJLVpYRW1BqqNItF0wiUWoixc2xaAaj8rn8ZVl1kLIu3tWL6CFTxslBdZiSUxTTUWlXxQLVmBUm4GAFzMVpqmYWmqkZVu5pQPTEZigdITSi0hgLRpRqFEg1akBbH9Ti1foZUmHneAeXrUxuE18+rvyDVfNGqWgQ1pYQxkxmCG5oSEadSqtcuXNp9MFPBBMD22S4BGNGApEQTqgg0hY6yhGpimQSAZxm2ZaII7a5kkfmDAqQISVqMJCVaLbrNH4skPx+RBTxfbAdFxIVad1GicqKJhtLSdRqUs3anc3B0yCkDILWgQCpyz/d63Y7QyjCN4kKpNIDxcJjmucXYKc/3PT9NU6X0g0ePGqOxUMr3PAD90eijj37eH/Y5N84zFoYB5wbnzPF8rVURYM6yTL/VPHz/g0kQDKezTOScc9OyOq3Wcm+p1e0C8Dw3ioLl1ZV//h/+B3/+7/5meamX5bmUIkriwkfV4LTZaEyn08Fw9PDBDjR27t2zGEvSTNtuZSqiAMRRBEIc2wVRlBJAiTwTQhR5daIwOjg8OrV9qtvpHO7vddvNztLy9tZ2t9k6HIxc3wPACYmzLMmzPEmCeIpYWyY1DcYNcGIBmGUzQDPevHBmK0lmu/2xVno6PKKZYLZVDIBhmrbr5UJJkWRZygxTCMEMYzSZrp8+W0Qb6B8cPNnbYwbvLi39s//0n/23//V/w1vOpVde5JzatjkJZgDiJAqTsN1uSCnCNH5ycHDh7GkBrQkpqKVGQ+VKDccTrTQYcW0zihPOWbfTSZNkbXsLwLA/YIbBOeOcKw3TtpI0BdD0/SwXxVTptDtaE9fzN9fW7t29TblhcM4ZU1prpQqLbykEpTRNU8dx4mliWnY8mw77R65jiyQB8PjxI6316toGGNs/2DcMo9vtGablOE4wnRS8Rp4LLWWeZhJAHBNCwigKo9gwTdu2ivgd0zAkhAiRe64bB9PxdBKGYZzEKYiSoiB8PceyLWtrY52S9f5gwBh1HWs4HEklXNsu3l8pBCWk0WrlWVZk/DANI0mSXJYLxCwI1za2HNfLsyyTWuS5FJnt2DqTUuSGaQDwG40oDKIo+MkP3hlPRq++9trOo0eUko2t7e+/8/blCxcAXHrjze7m5s7dO3/yne+sb2x0PI8RwgjpLS0/3nkAQDgu51xJMRoObNuJozCOoygKueWCCCEzAEpKRaVjOyDEth3LsiovAqKUHI6GAEaTgWkaruv2Op1eb0lkyfWffwCtGw0vTRpFEmQh5fff/bvLFy60mk1o7fq+63mbGxtZlkVxDMB1HE5pGEWM0uHRUbvd6nU7Dx8+NHVuO24UBAA6vd54OlnpdcIwBMFyr3fr9r2l5RUF2ukueZ4HAIRvbmworTgv9BDpeDx+9eWXz54+E0azW3duAQjDmWFyqtSpzQ3Lsihjk/FYpInIszzPALiO3Vtb23vyJArDy1dfuHP7ztF4POz3e2sbSolmqwVgMh1Nxv1zp0+998EHK3zFdZ2G50VJmGYpIbrwfXYdO2fcc13bNGdR5JjmN7c3kzj5zr/5151u73d+9zcA3N95qAk1TD4ajpfPn1k/tU0Z8zzHbzQDFQFwLFuTvNFbf/HKlYPBwLYszhijUFp5jus6DgCTUSHyTrttcxrlqmBmNaVCqDCMChJWCh3HSafTGR/1bdtbaXRWzp25+Ytf9JZX8jzbXF8D0PA8rmXT81SacMOM4tizbYik126N+7sATJa1Gv7PPtpdXWqvrjU925FCTcKko6GlLBStju0QztIslVIqDUYZpSQIwjSODdN2XRdALkSaZWoy7XY7/dFQa9u2rTiOHMfpdbs/+dlPAFy8eHk2naZpCq0919FaCSkOjo5e/fxr9+9cL7Yiy7IMzhmlUimildY4fWqr5fmMm5NhH4Ac7BujfjMPbJpHYSSlTJnl+Q0ljZFkhukBsCw5CxJqs1euvNrwjScPHvPpwbe/9ppodX/603f3BhEAUBOW6bXt6zt7rVb7YG9iuwBIKtRsHBYTbzKLW55pNHzTJCajnmturniRyIJEnNtwAUymoUwjKBVOh3c+fn86CZI8u//okc/YctejMgfg+dziSlGVSmpw4hrS9yyARFnqGHy13QEQEcRJiSrK7Rf1ZyV9lBhnURCps2MXAl+lH5xTc/QpHTepTp/v/qS+b7mxFzhRl2FroEl9vxoOYB4Cv/o4LsWVIgUqz4riqwpN6+rfyqMPFb7FcckUFYYspEgNuhCBai4+atQeVJWAViZ3BMrMh2TxZhoahBKty2jpxaXz8FYnJLu5FWlJUB6naVFZsWolCSF6/kVdvRpWPS/Py/PyvPwjKxzz5f6pskBhHfsWACrrsWfomcqNrVp9f1lZ3PYWFnKC+eI910KdqMPT3q+6uh4oDY4WBPQ6HNlCEOcFu6dqNf/spVxXDr+Le1iVLuVZrunPuMXCs54+tHhKXZ667cKOQyqKcN6giqQqfhNakGWELpgpaRTmR3rxnp9d+7nzaQEJdN3eml6s8Q0We6b+Q5PF2VBGcSSoQuYVuzMFCIrQhrpQ0hYR2ZXWBJoySikxDCPLM62L7C1Ea1okFdWsjKmoF6hhXbo7FF2hQGg1diXZsdhvqOCYQkEGaFozX7WVX9VVdf4BpTWIpig0x085eNdXfCZbtsA84uRQayBXkoJSwhmllDBAK62E0rxiSIs66xLO6SKzCimndQHedGUcqUpKtYQ6ZN4orXXR3JrGgq6s0ghQRpxUUolcCCEpJZQyoRUhtM63WNiqlvMRpe0DBbRWqPTnGoqgiE5YRGpUpcM4WeheUpj0zReD48EHq85ZgKbP7Nhn9XgJkRdmIinp2gq8aujad7u4i1IKkJSy+XCefOgCTVlj+hLRllWtTF9JhZqLjEOkYMTLu5ZvUzkXK4pO1xQnrXMtPSM5TL2wFWEwajqy7L9qfkODEEbAmXAJN8z+QaK0IkR897s/mPzl27/+1WsATl3YpJCaCKlzqiBlrpWWSrMsV0K6jgcgCqMwjojdQ8pyAqNaeWjd9QUOP6GBIoug+XgLcOKsWn4p6ejiO0qJ49iWbQmtm36jyDBjcK6U5o7V7nQUJVGWGpyuLvV29/aiNAXw+ZdeNk3z6OhQSFXEFuCMKaUc3we063oAVrSK4jgIQsu2HNcLRkPDEG6v57hOnmZFLgvDMCiwe3CwurImpIjTdBbMmr7b67Q91x31hwA8x3r04MF4NHrjm9/4/g9/vHtwYDWaMk1WlrpntrcASJkRooUQpzY3l1dWCMF4MiWEzIJQZXnxFiVx4jhunuWEIAkDDZrlwrRdrRQhtHBHzbNsFsx+9c1vMqZ/+PZf/+G3v2063uPdvSSMLNspiLksSQnR4EwpxS3imH6ShO9+8JFnmy9evApg9+AwyaadTv/ahSuuZQPEbzYo057vxpmQqrCgTLQmhBnhLBgcHrqNhHEDZNQfDnb7gw8//ADAcrvt+V6r0+aURINJlOTDKJ3GwjRtS+aEmgCUQLPZvnTOun7r9rlTp23TmsySLBPrq51i3PMsX19bm8yC0XR6EEdaSpMzEHI0GNiOvbSyDCCKIqUktLZsK04ENwzTME3TTLJMKW1bDoB+f+j7XjCdRnEMjXA6BjCdzUzhGCZvN5sAOu12nCRCCIPz5V7v0c6DdqPZabWH/X7hbmwaRhCGRwf73DKlUnmeT6ZjJXLbsoLZtLTV0loKwRhTQoDSXrd7eOsWZdQ0zTxLPd8t2sUYU1JIIWZBcP7SpTiVj3buRVHUa7XSJALQ8B1KiW1yrUS33TIt0zANQhCFQRhF5atM4Pme67rjJGGEmKYpi3paZlEZ33Om08l4PO0uLfvtLjUMy3ZMxxOTWRYmxeqRJLFlWY5lUeUzzhzX/erX3rx/7w6gNzc2LNsG0PSd6WjgOs6Xv/D6wdERhTI4FUm8s3OXcw6Act5ZWZ0Fs97yihQySVOttZAyS9NciiLcMONcK5mlSRiEa6ur7VbraHCUpImSsjCwBaCJzLIsTZKXf/ulSy++fO/TT4JglmdZlqWz6VTjCYBzZ8+sLi93Ot0kSQBdmGdKrblhIEmKYXVs27JtosjtT28kSZokydbmpmlQkaajNAYwGevd/f0zayuUUsfzOu12mgu/2ej2en6rFQchgGF/ePHqyytL78RRLHK5d3BgGMb2xmYYBpunTq9vbAC4efOTZrMZzWa9bpcyFgTBw0ePV5eXPn7/vS/8ylcB/OzHPxQiHwyHm2srBmdJlhmW9fjJ7oWrV23HKgDzeDI+PNx/6eq1nUc7gLp66XKv275x83q72TBNXiyDh/1B0/MGkzEnxPW8L3/xddt2kjwTSbK0vm57HoD9x48J0Go3sly22+0oCD3Pb3W7S2trvR4pl2KkjeXNTredSnnpwqU0iR892smFVCI3DFastBqEgRimaUBQmIPxWClNCLJcFFGAXa+plN7YPhMG0XA0ErP40dH+e++9f/nipaWlpaODfQBZMMuh4DrxdByHgRQyiMJ2u2kZBuEMQJoKKfOlpkWpNJjOosx17G6n5TU8nclGqwHANszBZKSUopQmSUwIMQxjMBhwxqF1EVSXUFrY23LO2622bdtHg0EURXsH+7/51m+99uprAEajYZbnaZaKPKcEWZ4JmXd77UG/X+RQYpQQQiijtuN0O22DwrFtzrnnN7745tcf3rsLANOJHcyalsqTuB+HbaUnMsuUpDnNiXPvyWMAMs2cVjsJw/tHh1c7Lxzo232V/c0HH77xxZcvbm5xYgJ4PA2iPElzZYWp3+ic/erZn/7s7TjNs1zNkrTYJ6Mk21hu+d2uyE9dPO2mWuR5ennr1OEwmwUpgB99+ossyrQgYRYsr66T1OCmfnB42LNbZEZHowGAi1srndVV4pha0jxP0yxpeh4jmoV5TEzbbQOYEHW0J7RStfvESdQOFDhTa63LSFikSrFdWzAQXYFKQkEoVaqy0lvAkHP/lv+PvTfrsSRLzsQ+O4uvd4s9MzJyq7Vrr+qVzWF3sznkkNRgMNKLHvRAAaMfMdKLAL3pRQ8cSC+CIOhBwACjmeEIHEEUqUGTvZNdvVbXnpmVmZF7bHf35Zxjpgf3e+NGVhU1AvQwBMKQiIy41/24+3H3Y2affWZGK2H4M/WeGstmEX1sfLSzsefGXD+1IU65Jkt7ZvmLLI2GxUWtkiSIVjyg9uBYOAkr4yxipiQL42m5f1t/ns+e3xLqbHcmnDGlCSCRJjlKLcZpjtxSOdpjP2VWyrLe1IpJuSxC1RiBjZ1Ei3JQZ+f/XM7lXM7l75ao//dNzuVczuVczuVczuVczuVczuVczuVczuVczuVczuVc/v8QAzC1OZWNnDYXa/9ciqzSIZdRpOWXDV+JlFJt8Kz5gk4HEYFStML4WRHhthCvauJqJFgW52NgtcweTpuiLQh67Zl+Drux6W/WNj47zacE2qTeNo8ocFjwclZYle2vLYtrUYFyMRlCTcrrCtWuPQNeSYhehqwWJYpxepglswoQEUX66Wk/+2eT9orVa12G55Y0zUWZTCGElsp3egYEUQrUNJ6QM/G0NpqoZHlGi3tNALEwLTITlvTDJbexqem8mjje/ENoiGIkK+yzhukVgjZWKSXMAcFrgtUwisRX1mjmEgApr0gbFQHinXPeRTpSWoswKWVIA2AJLjhlYvIRGWc8OePhSZH1AgBxMgizE29SYyqQ5hlRnFrjaseBhCQ0EycKIGIhFjSEl8ja4Hzp6qZ7qdakIUpZafvAMAs8iEBGaSd+MYuiSWvSgTmwJ8WkLTdly5rKZdRM6uL+hwARaAWt2ueSAwdmbZRWHELNKkvjOrjgOYZ1i5tDYDQpv6QEWkCGHSkSoHIOgNJagwCqK1aRZgTDVIikxmhCxQFAZOLAjoM06bZ1YAYUkZdAsABq9h2rlEkn0xlrqwCpK0ozV7m0F/lZBcAjKCdioEBMTQomSCkBQmBpKiRqWxcOi2dfUctADcwsRWQb5oJlZmMjHwII3nutyOi4crW11LxWiiwRXKhESCtaPHfaeZ8kMQBIqOpKIeIATcaQCeKJWKmmh5ECINyIB6CIhEQTBWYBaWOa6mbMIsSKELxjYWO1VhasRNoW5lpTQ7hr2I5LRqnQghu5YAQuYvACQJT2LMZoKT1pA4C1FoEoSxKopTNrhihSDG6XCCaBaFJKKebALFopSJP1TwCUUpAmfh8WLEpWYiDKaNLUUnorHXGobcTO8bvv7h+XE6PjgorLe2vjEAD8xb/76Df/3l6/ExN3YAs/PWE/kYqr8lHU0eM7RwA89Foec2R9UEaLpERRnwuqpa49AFglUEYqL0atrD9tUfcF2WFBWV+pNE+0YB8vFvtFUhgJYAz6g95kNo2i2MbRdDYGoAk11xv9zV63My9mHIQ9Z3Gap1kUWQAMBmTuaqujUIUkjrp5TFoPR6O1fj+JYgD5zsWmi0UnyyMbBahet9/N84RExDeNFMqiPJ4WhyejLEuzNLfWVFX5/ocfJHE8mc0fPHwAYPfixR/+5BeXr+z1uv1bD+5t1Rvx8Kis6u2NwVavA4CiaC1NOI6Uq4lkqzeI4/TR4eHazjaXlXUFmlWvLpRNOIQ4ioyNoXRde2WtIrVmFYCPhvd7m2vdbodd/cprb93Yv3fw+OG/+Dd/+u2v/c+bW4Nf/WQIoLdzCcSz8ZFkMaoKpClUCq6oee4mADr9bPJ4Pp+7W/t3h9PJeHy8f/fua6+/Oj4ZRdaWLgBQoJPDg7TTdexUZLM8i4wKUg2n826abm+uA+h2eqk1zD7tdB4+eTA6Prr57s36t47JUsJGUQCQRIolDHr5xtogTZIojgJzJ0+jiOZVACCkyLlQV7PZ1GoVp0knS7NOdzafHx4cDra3ARwdHU6n0wApypKUCSEIqfm8yvoDcs5LAMDESWxv37l5++6d+08OHty7RyFM57M0ONXrNF2JLl3aLeZF8L7hWx0eHRVV5SDKRvOqBrC2tm6jeD6bswtxHHnnNamT4YkwCzNzAOCc90EqDux9pMy4KKIkAUsQSbKsKYWhSYnw/v1HWZpARGmVp3FEknbSiMKF3W0Awr4q5nCV7WTMMp7PeeyNNQpoijz3er3ZZFoUXjBh6CiN5kVlrRUSxQqBAfTXNot5oWx05fr1/dt3uv3+17/yW8eHR5GiE2NrFgBprDc2NorpRGXpsy88f/fWjdHw6Jvf/p2vfetbwbtifALgg4/vvP+Ln3X7g63ti1bbNOtURaXSbHtjrTHHOklEJsqzzoO7d8bD4cbOxfHJaD6bx1HKCEIKQHCkrQ3kr12/wsy9Xl9pfePWzbqu4yhuLDFhUVY753713vsVcxZHWbcH5rv372lSWZwASHVy+dL1F19/48YHv/7Rj36wvbEeG2OAsqr6eQpAIwNBKzWpXBbZ0WRExnBghj2ejnudDoCLG900Ta+88OInn9zeu3jhzr3HIfDQhUSb3NpHRwcAdJZfv7j5hZdf/O53/rLTH2j4rY31rJseHB70B/m77/0awCe37+xsb167em334sXau7oqO3mqSb77ve9de/YZABJ4Z71z91D3B/1I65OD404/e+7ZK/0s66Tpz3/6SwDD4Xirlz7zymufPNknCYNedvv2zfF4vLWxVsxml/d2AXzw8cexNXVZnVR1t6p//e57+/cf/N7v/W53fa3f66ngAQyLemvQJyZDdZJAoowEBLiHOQAAIABJREFUJCrMJ7NaAHTS3DkvxxNHWiSMhkNj1Hgymk7H3TyfFyWAEqhr1x/052WVRmlva/OqVQp1P9bOIY0tAHBdBZ4XU7YXRyfjay/vjqbTQa9fO8ciDVPbRFHw1Wh07BWev7JtbDwP0s+y4Scf7R/eAVAWJ7Ghq9f2qjkZ2/MyzPLu3uZWJ+vMpSrrEsDcz4MPmkxZVEYZZo7iuL+xPp3O4jhdX1sHcHB4GEVxp9PRxlqR2XQqnrVQWVS3bt28dv0ZAKPJaGd7d3NzezIdXd698OjgEYlsrK2NhkPPVXMLlFIh1CHUJ8OTqqpY60E3I6mJXWozAGVQYNLaOk9hLpLYJLFWR/0sChPnJx5AoTzmLpD9k//77V/8+p03n3+xtv1bt/eJf/7Gy8/vbW8D+MlHtzLbUd3e/emDf/5n3/tP/6M/SCkqhGrvrMQAXFGMJtM5b1O/X+//ajgaJp3BcDoaP5n0u+v9fB3ApY2NSXbSiXPnZXeweXHnUpwkybu/PhwfuJLqegrg5ESmG+vr8Q65clYcx2nsOZDS3TyLspTjNQDz/ftVrQMCdNSkHRFoUSZy4QcRU0vNI2qyngXUcBgXClpBtZ0jAWFuiIdEoLY/ddtiUpEKHGhR8lDarKQFq29R37rNu6JlC5hV32fBDTx1vYCnU9QaK0svd3g630jQkBAbP6khGHIIpIwATbHmRVa0ai5YmAkg6KV/1JaSFCiFxfWQsLRc0eY4ikHUlK1X0C33U/wi22dxfY3VT1AgcOsMMLOIXzTkUYqUCCutSSkAIQRuyZmC9hJEWuXQlOBsLqH54pwgeS7nci5/98SczdP9fMb331Ke7ekNP5W1euZrtD7m526yOPwy6+9T359qo08PcvbzRbvilSv6VKLjAtSDUkvMcfX7JT3+bxM6A4UuAYjPkqUvjjP46Sm2uPzgbB79asHHM6KWtV3w1M0TWcDGK4hwM3ab4Itlwc2VfjctsCxnJ6iFFNUiiXI12/s0AfNTMHOThn16UivQqTYCcgImYm2gSUhCCF4pxdR2+WBG1EBqInUISltRmqGaGthK6WYCPIuwKBN8HWoPI8JG6TC/euVZABRFN997bFVMZJWO2NZeQl0Eo0hr01ZbbIHCRRFKavPwBaSUacwdEQSIcG20dszWGAN4DlprL64pgl770EBRdQiKKIoUIKQ1WIUQAlqY/bSnDRlljCEKQGgL5UFASpMmZg6RsXEUz8vSGIqMEc8IvplBJmnKN4qAOXBgARkibTQ1bUlIPNgoZRMlFCHUWsMASpNWoFoAzKsqtToIh8BESiulm5QaZq0JgA80r+o0iVh4e2dnNBzVVZUZYWhfzSUIABMpiDCBBezIRpFnD6jauSRJ2tzGAK1IREJghgijyftWRKSyBeBr4AMW5TKtBiQQiVGquSISIS0CmLbOIGutwdCkANPABIJgdGaMUcTtTSMhiGfWyrQVR1mYw6Jeg4iIF1ZaAfDBk+L2ZNpOKWyNjpRiYR88C5oHz+iIhYnahjSLPB4hWs05On1/Tl/tZeEhTWjKD7XW+uletLIjAGgmgIhBQsS6Qc6FmVczr1qHYREICIGZQM4FMgBQOycgkMY0fO03vsTrF7u7F7PuRni4/+HPf/7m1RcAvP32e38+fvQH/+QPU1/qsj44nr35/OX/9n/9b46H83yzv95bB1A/ejJ5cJPv3acA28uroB+PJifi+zaK2/ZowkK1IvuZ2UPLxU/OfHI6YZ+xkxDAoDqE8fCgk3UubG0VsymAyWS0tXtx98LF4+FxYFZEzjut9W999avD8RhAHEUsUjt3YWOnrj0z7+/fT5OkqCoOQesmnbDSWonIcHhyfHKcdzre+8louNbNdRRVZQWgv75+cHwSONy4dbOT52uDfhxZpVS30wneec8Adi9fffnFF3YvX3Z1SJMkSdNQVcxsjG6u9dbt28/vXepEa3mnO53PqqqaT6fWGqW184GUBpClae2dViqOk9jGPgRNShsDEe+dRCmANMv7vb4P/u2f/Ohrv/H1v/7r72/0uv/jP/vj11599cN/fX8+nzfPWWTMO+9/9NFHt0W5Zy9fHU1nz169trW+3u+tAVAUv3D9xcl4yMICzvOs1++LiLJaKWVjAwDO9XpdJqWUzjp5CN4z9TbWtVaBg28zx10WWaU0h3D31ifPXb+Wd/L++locxdV80sxwUZYcfEVq0OsZrefzoiirwSBO0oRUBaAsax9YhIlUEiXOOefYGDMry/Fk/PE7vwIwLwpmKecz6mhxAWRCCMxNT6y2+zwzQ+RkNLr/8NHVS7v9jc17d+9OZjOjVVlWG+vrAIiQpCkzl0XhvU/iGEQf37hxYWdnc2MTgAgH5izPty9cPD485BDyXnd4fKyElda+rgAE76qyoDix1jYeLxFprYnIeSeSALDWeldPp+MksgIJwRsRRaS0IqVmRQGgrgprTZokdVkB6OZ5VRSz2SzN0qZW2dHhkdG6qdZqjCGiSEfe+8hagmpeotFwODwZJVn+8luD3mDovffF1HtHStnITodDANZGSZK6snjy5PHe1au7V65sba0rpX75Nz8+efywaZsjSR6CPz482Nq+KAylNSko4v7aWlN2M8nyn/3oh+s7lzq9Xt7pZZ0u0u54NM7zrhBzEAB1zT4EiIwnk5OT4Xg80VpxaBZnNto2q5MxBpCT0Sg4P66rZ9/64nwymhTzqiiTJAWQ9Pofvv/eD3/0/VlR9Pu9b3zzW3Ga/eT73y3n80orAGmavv7FLx4dHEz37xkTEak8y15+/Q0up4z7z734IgATZut7uYniJw8fPnf9+o2PPzZR9PG7740nk1hRdzAAkA/WSanLu5dGo3GcZlVZvvbyK9ZG/V7vvfff271wEcD6YJBEdlbVf/WDH1y7sre+1q+KQtLkH//H//j2rRsAimJeO1eVZZqlSqtOp/O1b/7WF9764sGDA23U1voagP/ij/7ohZe+MNi59Nyzz+1d3PZV9fDRozdfffX+wwd3Hzx69aUXASRR3O91XZptKFW4ejyeGKVmk1Gk9WQ80lEMgFmiOIHAWpv3eo8PxmVViTSQkAfgK1f7Wse+Mb+quipLZuYkSa0xjZlZ1XW/321D3CJVXRPp/tqgaRvV3O5yPqvr2jvPIVy+fPni7t5f/ct/4V1wITz//HOhLgDYKNpY6x09eWiTNO8P2IuqXZRkxtrRdA5gVpSbO2vzk/rVa7szij+8fxgCj8Zjr01Zukb1ex8aU7QpR02kyrJM4mStP9ja3EmTDICx9vDw0Lm608mNMQQqioKZB2trURRZYwFkWeff/egvXnvlla99+atPDh5+7ctf/d4Pvve9H/7wG1//zTg1ALxzDdZTlRWHUNeOMfdrvbKsbJI2utwzk3dVTVFkI4uaWINr50qllKDfHwB4cPtBEU9CCCRyUNQzuzYP48B8dDQ+enQP+Q4AA5pW4oOiKhnWJ//d//TP//Are69oRUoVzgHQimoXHh6cPL87DZ6yJLqwc/GTo8NL2xeJaDIbAdjqp9e3t6CconhjsNHp9KezIxfq+ZzITpUKAELwTTBUG5tGnQubF4aTsdGKCAWnEzEAnPci2hjjz6Yq4ymz/TQ2uOqFLJWzoHFTFmVoGCxt+LktAd7WkQYp4mXxoGbVpacO8+8lZyyFJXL6maN8mkkjWDW8FrDoYoTTYWTZ9/H06hep0G0BnJVE87aCjCxaZp6e25KI0ezfZFmv9qxc+GnN4Zdm4dlGp9Q0jmvWf1p+e0oJWgxDtPxAtc7cv6+rfi7nci7n8h+OmKYGhYA/69uWxfjU+iZYglztnytfPeWK/39ROs3mLRwjZxTQZ53bYvQzqrI93CIYRXha4S43WTR1FlromtXKJIsf8tTQS3JjOwit1HGjRWnJM6G8py7v9L/TgFl74UuEcHG0RY3CU4252PnMpH7+Hwv19Rn+/YIa2bK5GgNwVYOvdJg4/Z/oaXS33ZKW57jccrEjrZ4xAadXSUqYPRCsVpFWWkQCiwSIEnCD7rGYxEYiQYRqj1SpwCwSmAOEoQwAIvG+NsoEFbS2No6LcmqiyNdl1skAbO1e+9XPfrIecekUB2+MJw5pmpbVVJFtoCUXWJEiKAJpQiAtgsoheGEWq4GWHUmBjQ+sQD6w0Uor7UPgwMoaAFor571WlEQxM5dVKfDGQBEpIt0AVxQamDwED4IoJQ2oLNK2v1ZNWRoKgcmSNbqeu4bsKAStAMCxCEhBMYsiKKKmOKNnCYuuDopIQEEkMFsCC1xVB61FQFY398Qoql2QRWQ2sDRWEkE15clI6dqFNBX2vt/ruxD8fEqiFcl8XlgYAJpBpDTEOa+taUp3C4SF6+AWBF1SLTVaWLhhFJIiUjqK7OIBIgF88BxC09gkIu0DE7Wo+yImL1ZbFs1gG0XeeQQo0o0HpawlYhdKQIzRIsKetdJEioV1E5dXREJBwqI/NTVNlEQgYEUKgFLN086JNU03iab/T1P7DI0PACFqaEO0KPQjOK0h9NRrufI/ASJRU45NN+9fY9sux0G7kLQ7LVtQEQs3LMKmf9AiarAarWi6WDWEcBXYN5F0H4SZlKhpUT/z4u4LX3oOqSbbu/NL98qFr39wYwrg//yrn/yjP/wyf3Sbnt3xaxvjX9+11YPNjXTveipptn/vMYCDWx9fy6tHT0rV3dzN+2k8IBCsqeoQmradilKbBLVsqfQZa5C0ZaFWYx6LO7O6TUs6FQDKRiBltK7KwhjV0Go2trZeeeX1g4cPmEMcRaPxUISt0QACBwCHx8dr/cG1K1cGeb+uvSJi4VdffePmzY+9d80zY6OoKgvvXCfPrTFr2zvMAXXBAg5cVRWA9e3syZMPCHj1pVfm82m30/HeXb60d+fO7eD9zs4OAEDfunPv9r0Hv/873y7Lejia9LNsOh0ScOHSLoDDyXjvyhUxWrxL8yzrdo8n470rV+bz8tF0v6prAJ1eN7WmrL2EEGpnbGwsVdOinM9skroQAByfHJs4Lsvy8PAwttEf/ef/ZGdn6zvf+c7rX/v6v/q3f7q1udXMH3OIYmtAKlZVXc1m88ksv7hlTKQA9LppJ0/Yp+PJdFaUxugkTU+OjjfX15VAq5a6q0TmRQmItZaddxxms7n3npibR995JwKlVL/ff+XNL777wYdJ3nHel3U96PaGozGAX4wnwr7X7Txz5XLt6rqu284hIRgTA4gjUkpFUcyBh5NxVVVJw88SvPrGG80D8L2//O5bX3pLWKjRFW3TqcbLo2WSwXQ2e+ONN4+PjyeTydbOhVlRRNY2XbyaFXg8HNkoIpCrawHyPE+yfHN94/KlPV6S1pmDiDbGu5oBY4zWKo0zG9lyPgfgnYuSpHCOa5f3+nEcaW20Ulpp71uYO4njqauNNnGcgFDXVRTFa4O1opzlnfzo8ADAk8ODKDLD0ejZK5fqqtZGE+j4+IQUdfIUQBLFgYMLIUmSEHzrOQeOo8iYSFsBMJvOszyzcXz85PHRo0cbOzsMsdZYrSKj237ikBAcgE6nc/j4UV3Ou73uj7/3XaNpbW3QLB9VODgZjnqdXl1U4/GkKioiRLFxRfno/j0AeXfSGaxtbG/bKLZRRKR0bq9de8ZEkVLU0OWOD4cnx0frG5tG6bIsQvBaa4Ekcby8BazUdDrrdDtvvPbGxubWydGT8fBkNh7FcZzGyWw6B1DXM1izt3f58eHB9WeuH4/HOXOcZTg5yZIUgIQQG7ve77/9s1/lV/YgCN7nea5jGh2nvSwBUMzKuqyGozGzbO5s/+znP7v8zLPf//O/6PU2+ptbTSsk9q4q53uXLqVJFBv90ptvbG9tVVWRxrHR5uYntwB0O/nWtWtXnt22VldlOZuXWisbRWmWfvzhAYAXv/Di+vaW/+X7MVEQGWyu52n8+MlBp9d94ytffeGl1wFMJvNI07Ry6/1uksTJ2trg/j1Santr01ozns0BrA0GWZIMfRFl2fiocCFUdT0fTfZeelEBjVJ2zps4Zg4AKRuNR6PJZEJKRXEScQ0gOGYWpbQ1xlqbZXkxm8yKMk1TIuGqSVKh2nljlNbWe3d8XIxn43/4n/yjNE9IU55lALzzaZpqpbzzIYT7+3dHo9F6fz0wBx8ePXkCII6TYl5knV7NoXJBfGBfh7oE87N7uwAODurj8cwFt3/0OO9tbK91kkcnt+/t94uq3+1BxQCoCfWxWGubNmIAjk+OLu1eOjh8MpnOAPR7faXV/v5+3slD4NlsJowkSYno9p3bNooB7FzYTeJoe3P7R3/9wycHT7701hvf+q1v/m//+l8enhx9Yes5AFESG22IyFizs7N9fHw8K+vpvBCR2oemRZJnibWpXRV8lVgq2ac1e6vmBTTi33zzCwBG8xEgymajw+nFS9dgosMnj7SJKY5e/MLXlQQAP/nVOw/uHXcGzPCTMe3kQRnFhMhGkQEABmmtE2PhQ97JKqnG40Jzvt2/cDg6ajJCLnV2n5w8Gc2nmvRsPhoMOpPp2Gi91e8N62kSaQBWG60N16VzLIxO0p3NZ4FBJAqqdh5AUdbem6CWlcNPFfISCXvK4aEFeXHxGRZ24hKVXHRGlMaFOt1UREiplcyHT8kSh5NPeRrA2ejs0nPAKkUDZ+2KszwYWjbOYeHG0gTaTp7tt6dg4gqhorGo6PTv5iSx7FVwWuP7lLMBQISXntvizBrU9iyXk9qjycLlWs76SuC65WxCwqKCZ+OxCdpMvRXbcuG3nQVYz+VczuVc/i6JeSowBpxB0k7RrLP+9BKcE3p6/ZOzWuSp8Z920J+KeTVDnqqmFbf8rDwNRn76Ik43PIOMrWwrC1WwoHMu1MZpiOkzVvbPWexbGHRF7zw1DyvI42nk8alRlx8Qzh7oM6bt9FBP9xtq1dPiys9OHi3g5EXC/BmbZOXsTz9c+HgLoPlU+z/VAofO7tseXOEUxV0m8be5+EEkaBKrYGjRaFuRUkZQh3ZL1OwD1wCUUqXn1MYCCQFa2ea+heAjExGj9kFb7YiNUbYokiT51Q9/BuALX/bQgGbypESUIiiqXVAqCkINUhMEkKZKN1ujBYohPnAIAiiGBiBQAiiL4AMp5Tkww2jlhbU2QhpA1bCkQE1+HBnjPNOis8wiT00p1ZhqygqJVrUPmrRS1IRDhZRjtiRaKc/BBR8ZLexLDrE2aOrQB9bSQpaioDRJZOAFEB+kcYbBMFpzYM1USxFHdl6FOOsapauqaOEGbjwTrZVqWgIxC5FS0LWvACRZQsoE9sHXpQ9xmjEYWnnvtTFaWwAgFhFjdFXXSliCREo55yLSXPsFssxBtDQ3UgikSGmQElLC1IAspIiglIjWavGOi/deK720XQMkSIhVpGC8BGYEFvGiW6MYJCQsHISIRNpS60pZYVbqlJcKIqMMKSUizKy1CoFFoLRpZ08ELEYpAUOUQEIIpAh68Vg35h8rELf1LmhprT61Kp5SA2T5FpHEkQFgjBYwQbevRvv6numtJQJmUdSk81ObPUWaiJcugCxN9vYNbDLiSSvdBJyM1oHBTLEod3SEclrMxvlg7ULHoHMBcAAGa/lPfnbryjPre2p++aWXn3/5uem775ijfcZVV02//50fA9iI8cW3Xk8K/8tf3BkdvvfiK+njB4cgHRarXFHW1pGOVmvBn10eF9Zzm6S1KAsBtK7AgoWxuqRIlqZWmzzL8jT1wObWFoDnrz/jqqJ2pTA7V0XWhuCLsiyqMo4sgCRJJ9OZKBoNJ77ys3kBwk9/9vbm+npZuulkDKCqK2vt1tZWrz+YT8ejk2Pv3VqeTSeTKMt8YACz6fT+/XtEtLO9/YMf30iTeH1t7cLW1v7+flGWx8cnAP6Hf/bH3/nhD421l3d2XnzuuX6/l1r797765U6eHx0eAbj5ye0rFy+sbW0+efhIa6UFH9+4+eGtTwZra8F5MgZA8N5oDUjwHMQJs9KmKuZVWdg4rpkBTKbTsq4//OD94Wh0+OjBy2+8BmYXWBQppbMsbWaYmZlJWxPb6MLm5lfeeGMym2ijJ9Nx89oenRw6x7OyDIE1IUoTN5kEDibJbRQBQBwXoyERlNYg2MhwJVop5x0p3aAGPviGPwUiCE2nMyewWsVRlMamrD2APE+9Q1FWRmsfwlqvW9ZuNpvVda1gAOR53ut0NOmyqoqyiK2+++DB0XSWpvHx0aGyDW80mU+naZ6LCIfAvFgkiIha11eEBDSdTMaT6WQ6vX3jxsbGBjOHELSippl1VRVFUUTW5p1ulCTdXh9E83Le39hoHtLjgwNmzvI8eG+jqCwrrY0xNk5TImpmxsZxMZ/XVQVmCLwPVVWxiNZ6+fqXZVlWVZQkKrLgEMVxlCSkaDweh+CaXLxLuxeT2F67dg1VcX80Wev3rj1z3VVlFCdVWQAYj8ZFXUFEaX18fNzvD9IkJkUQFEXZcs+JrLWzyeTOzRtFUV2MY1dXztXBe+HQPA+K4FwdxVEnz11d3X/w4Lq93BsMYqN2r1xpXoSLa4M9xzc/vJF2Ovr4SCCz6Xg2ycfHo+HJEECSdfJur6FSMgetjWhSioJzoilNMwBpWh55p4AGcY7jmAMrpZRStavboJHCfDYjpfIsh0hV1R9/+EGWJv2NTVfX6xsbAB48fLjW77/zzi9v3bnz3R/9qBqPut3O733zG3uXLzfE0vGk+LM//7NLu5fyrNMfbBBRHEWhLge9PLa630kAZN2LNz74YD6fr29txkna7a9de+YZay0RCfNkeAyg0+mdPHmYaf2Nr31NafP8C8/PZrM8S4piLuCDg0MAa/3eYH0tjaNXX3nlL//yLztZur42SJKorqs8SwGsrw/S9S3v3Ppav3kYNeHJo0dbWxfcbCiVByDVLNtYL+pSKVSzsSvrKLIfffxxnqcb6xsPHj0G0O92j4ej40lxJc+3d3am00ldVRtbm3EURUbPxhMAs/lcKaVIe++1NtRmSDDphnsKYY+gtLFaG2OjwGE6n58MhxtrgySycRwDiKwpytIYVValIsMkj54cKK2TPAu+qXOD4EMTE6vr+uDg4PH9u/1+fzafdXu9TrfTxO1IKYDrytXeR3ksEkDkXTUaDeuqajYQoTyNXTWbFvH2ei/PkqlzsTFL4Md7D5FOJ0/iOIqsUrqq60u7l0IITw4Op5MZgNlstr21HUXRZDKuSleWZYOzK60rVz8+eAwgTtKd7e1HTx6eDIdRFP34b36Sp9mX3nzz4Pgo7+QAur2etVqYtdGbW5tFUc7mBYvUIdzbv3d8fAygowiaWMRq3R/049pZoaBiozSptJfFAH77zRc+fvBIbHahN3jmytXb9x9U00pn/UfT4v/64V/9g2/+LoAvfvmLg/7dmTOVm83V0XNfeHa9awUSGau0AzArK2ui9V5HBJNxXdUA151ONp6Nj05GF3c2AMxm5aCjtwY7WsXHx0fD0bQsfBSZfseEeceyAtBLunmWT2cTpRVU7UNN8D4wi7CKFqk/pLUJbfuUM7L0p3DGUTrVvnQGoVua+E2KNkIggAmnuVMNuWKZBt4q+hXuYmvaPNWNdFWIVnyo1jH7DGhz6bx96pro1C5qGtM0WSgtvtrGdpdXspyHMwDnosjXwidsk8kEsmyVI7QyXafYYOtktSf+WW1XAVJYONC0TAxf4KRN0bMWjFxYlk85iu2pAUJEi9ygczmXczmXv3tiBIxTTfM0EPlpLGvxdcN0OotFtiDasrDiyk6fFyFb6AOi073OdDQ+Cyye7tgqgrOaaBUzPSUqnqrXxWm11ya0QA1anbBIXP4MpPBT2opOh1iBGRdK9iz8uJiy1Yt4CmUVfNaEn72uZsgF3X+5A52ZgEV/aWBF1585mebsT2/csprJ4hwWGQinGO2yKflTc7MceyXtY7lRgwyh5V6eDkFLKi57rSTSZJWAHQdPCEaLD6GpMQogs7EPzvvgvEt0GlnFHAARCbWrjTIArFbMwStJJZaq9oTU2Bu3H/5n//Sf9l0A8L/893986fKFIEFpMZA6CEVaG21gZpVrigl6IWbDnkm0pihwTUorRdR0AQUBCMyB2TAUELxXDRrv2QJgdl4AGFJGG60MC4LAkLZR4t08BIZSTYdT7z0RrDGaiFm0tgQmCII413K1FIQDK00h8LQojVY+SAhBk2qK6xltECAsSikGO++1VUF8kwHX0DmrqlakwWxJF9o5kaD48t4lV7qb+7djZQGQUQ3LyHtP1ABt1FiyVetLxGRM5TwHFpEkjiNt3GzuA0fWoO2ozYGbBH/NKigTaRsHZSCQugIHAOw9G9O+NI2ophaiggQfAgANbRSBpelg3m6pQIqaNEAipRQJVGDRKhCJdxUEWpEi2Cannrhpxg6QBDBzU7yUxYmolh1JBCFmIWl6FOo2bC6iSC3zuduEpGYbooZfqYQX7wzUaSBikfhNLYFz+RI+9TYLRIGUIoakcQTAaIUm/+f0xV68QIsXikiBoKjpWg4CsQiJKN2+5bSInzfOgwgFdkoZCCvTvk3WWhYAGgha63IsGt1iHhXlrL/b0+kRgJ2t/rMvPffF33xL9GT+5Pjdn32ymcaDpBee3Jc4fn43AvDGGy9UgZRdc/P7P37n/fGBuvfgKElzKF1UJQATG8/CiuiU4b30LtoZWRCtSRY/TtfRxSKj2lpM7aycHB13Ot0kTT64dePFl15+6823APz07b8BYTqbvvbSy/cfPYisuXr9Sr/XVUTTsgTgQxCh0XgUm6R2zlqTpdmjxw/zLPPeH5+cAMiydGfngtZahLMsnx0dGmN6a+vGqKYqBIDpZHp4dKy1Ojh4MpvPImOC9+Px6PD4+OreXjfvAMD29n/9X/5XSZZp9u9++KEyJrDsXdrt9Pp3PrkFYGt9bfvSpcNHD30IFy7tKqJvffvbN2/cGM/nDw8O0BBvFdVVFae9yJq6quuyqsuCCEriygt6AAAgAElEQVSpuq5lMW9xHJ88eVLWNYegiA4ePri3vz8eHiml6toBGGx0SgrW6jiOpvMJkc6SzBjtA8/LOQAWRDa2itHDZD6bKqWMEYgIa62oAeWhtLWJscrzeDQypEhRt9exxmpFTclXFmRZ2ul0hPHk4cM79+4leR6YrTa+dlXtAQT2ipBEkXNuVhSDQX+QZYdHx6PJ1BAB+NX7791/+Pi5K1e3Nja9c9P5bH3QE01bG2vPv/KytgbAN37nt48Pj44PjvJez3uvg28Ditz8aD3cJE7e/vnPi7KyNnr84EFDKw4ss/n88PgYQFFWRVk+98wz1y7s1LX3zu3fvRMCe+/MokRD7VzPWOZgo3g8Gs2n0xBC7Rwxa2MADDY2jw7ec8xa6bqunXPeuQaUdC4UZQnAeZ/EcXCumIwn46EIr21szWdTY8x0Nut2OgBC8O+8+8GlSxf2b98hpb7xzW8oRb3B1ccPHjSw0Qsvv7J//96N99/v5J1ut6eNcT5Ya+fzmXchyzIA2hgOnHU6zOy9j6Im0BKMVuJdYluMKngfvHOuTpJkY32tLCuj1drm1mQ8crUD8GB/fzScfPjRxxd39/bv7z/30vOkTQg8WN8wNgaQ5HmWd7NOZ3QyNNZqY5S2wTllLIfQNLsnwGjjnXPOFfO51lorbbSJ40hr3Sz1Rqlut2ej6Natm6+99mqSxHsXd72rprNJUdYPxg8b8yG5cOH1N96MoiiK7KDXmU6nZO3jw8O4QYSJkqwzfPzkk/2H/W4eR7EI3v35z7761bfW1tcPnzwBIDbS2igbXbx0eXx0TEQb2xe/9NXfuPnRzc2dbW0UgM2NDa5cf239d/7B74v3OrLDo8N79/f7nfza3l5dlQCGo+Hbb7/d73WNtZ08HfR6s2LeH/Q2t7dmkxEAV5e+mAUfOp18XsyKYmqNTrKctK7rUmoPIIstSxARY20caQ/q9wdbW1siAZAkTgAkaXJ4dBxCOBqOjFHz2ezR4yc++CiyVutmOfQ+hMDKmCZBO81ygKrKibQ1RoIKTSFyAZRSxhhSutftlVUNkSSOADRtrAFU3neTOEqiLE1ds8IQGkJinKZ1XWtjOARjjFbZhjLdve7axnqT1gBg/5ObnSQ6OTqqOfQ6PU2qrItpZMfDkQkMoJfnO/2+d3L74W3niQgKlKVJURaEtr65EkxnM+ec0Vop1ZTNrWs/Ho9lkR9dVuV0Nut08rKq57ORVppIE0Er4ymURQmgk3e6nR6AyMZJEqdpOi8KFlzY3pmMxwvNIyJstA4+hOCIqL+xOTw6+OmPfmhVBOC1565FzqKai/C8oMQYR0R1nXY7h4H/5Lt/DeDqTu8Pf/O1o1n07jvvPH58R0Ks4iyYOFTF7ftPfvA33wfw0htfWct7N27cStK16mKWmKryGiIQVdQOQOV8EHKe4erdixd0HD0+cQ9ObjBGaa5G00cA5mUpjCyhqzvrEF04P5qf6JI7HZPZNM4NgG7S0aQ4OBvHsdJaY1ZNQyBmBG1rnwBN4gstvKYVJXzqQ9DC71t6MQu/YJlKjDYbagXtk0UeXctoINWyPM46CwQSWvonq3l0tAIMrspqovgZaoUsfbnTIO+ndl5uTGc+XKY3y3LI046u7cWgpT0uR1/kcTTIoCw2XOSyE5ZB9uVBqJ2ntp74cvaWV9TYPUv66cJba3JBVAt6Lr8EZNW5kvZeLVQeLyf28wHeczmXczmX/2DFnA3pAGcBxkZWYkUAzhR+lJWN6FOfnf65+PG3BMNWNl+GgFY3/pwdP4VFntnzDIHvKb21omWF26wD+pxroc+al9VN5PSPpVpZOXc5syUJhM4kei83JLSdaWipFU9Pf6kcV+j/QurMidEidXolaHgaeTtz0qszRrKad0DtQM3BWoOFzk5CC/U2H6gFdLkcu02GgCJZNNtohudFVUZoRUaT1VANmY0DKQaRoAYWHS0ouFC5UDdNbFiahh4AUIfQJDcTKRZxbnoyLC5e3jTE86oILIk1v/3KCwD+1FobZWUx0UqVdR0Uhdpvrve9M66YNHUhA4kHfAgKIl6MMlpr9o2fGxzXAEgJCSuT1K6GiFaURlFkdFXXzjlRhKamIYcggaCC92WotUYWWa9CnqadNAEwr6pZWXBgES6rwgQLpYJHcKGhugQEbXTwICgi5YJnVsF7RXA+tHUhoRnCiozR8Bw8q8baBAsF1yCA5HyT18ggYUYwCEeHh/NZFWtlRQGoaweCVkqBGugnSFO0W2tjAVRlaZOEjFFKJ0YHYQ5MRltjFr0WoBCgSaSKIhWJ48AeQZuo9METN3BkFEVOVGNTNQabIWgFpWBEeSIA1hpNqqoKAEHEJBEEBqRJsW6IhBbEDPZemIPWpAGttRehFuyF1uSCD8xN5pcmBObgnWqBzdMXCu0+xBAXgm4ARsGim0oDhzYJw0xERKIaG5tD85ATqQXQJkrpRYUD+bRRePrqEABRBEbrFhqjQkPqalvgLMC7hU0MoKm8BhJhjiObZWkIXNVV7fyKjSpNPKIhArjaQbMIEmWbGvOaSLjt1xNm8yjqITDNpz/+xfvf3PyqBwPYefb5/LVX4iSTYqi6sSJ767i+MLY66xTDoze+sAtA5aYeF3G/v7Pb/f6fPd774pd3X3juZPKOh/jaN7egYianrTaLa14xyBcLyBLV/Sxnol1fVon5KmA0HJL0kijKsrSh+E3GI2vsfD7f3dn5h7//B4HDjY8/3H9478Hjh2+8/AqAbqd3dHJy+/7dTpL3u2vGmLIqrbVFUXTy/Hh4AqA/6OV5PhmP8jybjEdlWXY6neYsmdslWZSalWXw/sMbH1dVlaWJ0ebJ4SERPffMs00K/7e+/fdv3rqZdTrXnn3h8dHRpchCayJ1786d4WQC4Iuvvnrr5q3d7c2Hjx9fuX797ie3RuNRnCZdpfYu7V7Y2wOggneTsSglAqVIG90ACtbapjQhAKtNwzyKrN1/cH8yHt147917d++SuOBd89AM+j0XW1LKUCAT7z+69/GdW0kSPX/t2Y2Nhncz855ns1kcW2aJI6sISZqUReHKSmc5gCxOtNHE8KRIUfBei9R1ba1FCEVZAEiTLIqSKIqyLP+Tf/O/X9u71F3f8HWttSEJSRwDALGGGKPrqhKWcl4o66q6Vgu9+/qLLzyzd7mb9+7cu3dczCNrsjRKO508y/ob666uABSzaZKl3UE/eI7iOIriyNiGosUsy74Eo/FkZ2sry+YifPna9Xd++fPIGIFM5/MmHT62o6Kqr16+XM5no+GwOYGLF3ZODg8aJJeUytJ0PB5WVWltPJ5OjDEs7FwdnGs4oXmna6ypq1oRJWkaRTEzQgje+9pVDx48ADCeTCJrqrqKKi0iVVV7V8dxxGDMQ2wtANbY272wPlgbvJKPxqMH9+4rRQ/vP/CurSQwmU6KsuwN+lGSHD98lGWZVurg4DCyttvtVlUNIG7LVsLVdRTZYj4fnhzNZxVpC2mRqTRJ8m53Mh4O1teZw7MXXwb70fHReDy21vT6AwAYj5KdbDqZjYaHV6/uRXHkndM2Kebz2WwKoHbu8PHjvNPburhbV5W1kQizcGwtI/CiDZUQnHdaqSRNrLFlWSii9cEFIrpz9w4A0TpN09F4vH//3tUrlzfW1r/27d+bnhy+8/aP57MnWZoCiKO4LMr1jfW1fn9nZ/vLb76+sb1z99at9957dzqbAcizjInSJHm9vxlHmkWKsrz+hZfXt3eSnhNfATgeDTe3tw+Go6zT5RBMnEpVDU+Oszh6+a0vHex/AoCrsopcGlsXXDfPOfg4sp00nU4ns/m8eYB73Xyt3x+Ox3mWRoP+w4cPh+PJ5vrgo1/9sigKANeuXnLstTZgmU0nRKjns+HJMWkbJ2k5PwFAyrhqTtZoY5hDWdU3bt6w1gjTZDpt4n+9bndtMOj4kOedyXTSyfML21tZlgtgo6jB5Vmkrqrgg7QdHxULK6VMFAk3/f2ISTeFaJoC2f3BYO/S5UePHgQOVfOQS4gjG0LoZDlEsUBr02SExHE6m80bhcssSZJEcdzpdC9urE/LspzNBGDvGmR5eHjosmRelUrr6WxmoSpfJsgA5HEMQLielG690/GhqoVczSejcRr1IpHheKzLCkCkTVlVzIxWSYkPwTkXR3EIMp8XADqdrla6rl2apHGcuLoGyNoIgNK60SJ52hHBZDJO4rQsCgEPx5Otzc2NtbXZ7AjAfDpu5jmO48l4NJ3NQ2D2PrJRVVbZoIMGWmLWiiJlisJB6XnluloTwWvK0wTA0Xjyb7/zy9/5za9cu7770f7j2o+d8zYxjlEr9XjqANz/d/9H5eVLr7/1u1968+7dO9/9yU+5njlXG7LcdFDUSpSuPAvzcPTYxB1jusZGZVVbq0bTKQBhvz4YPD46NpourF9KokioAPFm79J45lMDAJraiiJNFyCtbOWDJaOImgIvAIIP3JjMn0qbWuhm+ZTzI0tbZgVSO6OjSdoV/JTRcMqJWOVGLMY8+9vnuFXLY9Dqr0s3Sz1lXi1QuU8PsHDzqL2WxeYLV2/larBom9f6XmeoHss5Oc2NXn6wNFFkac20KCSeqvjfsCwXsCG3SO5phH5xkOW0rzhhK+6gnI62PJWmp805Fnku53IufyelyZU4VQrLoNjir+U6135wWkV3IWdWbIHgaV76qsiK2mh0nDzd1kY+9fNzZalUngLbTnelT//yeQeTFWXydOXHz72ep75oIlry9JenVNNFQGuBzi1iX6AWEJRTQOMU7Gtui1od6BS2PY3stdud5kQsldcKGrqIDbbaGcvU0WUM8dPXtLxvTyEHzV4kIqIWnXakzb6kxk5plfqyITkRSLjZxkAUBCGIMESsUaRAirMsIuiGJxiEtTFJFGuyeZIMZ1OrjQ9OWXOp320yZMdFGZyk+dbVS50ns6mW2kSyMYhv/fQH/+q9XwI4Edezhqd+XsparzcY9O8fnjhOHOrCO6s1gKAIAhgjLIXziQ0kEUsAWClSygCIrbGEkjk2SS9Lq7qqqpo9B3C3lxudAJhVhQ+BSEHIxDZSaWAfd7qGeWN9bW9nE4Dz/mg8Ds5rpR4+vFuUldGmdsGkaWRjAEVZKSjAs8Bak+hoXhQhsIlMlsSRMgBmZckQ0hTYRdYkka2LGsQiiOPEswew2e8aRbHVzvtyVtaM6fhkOjkJNSeJnpcOgA9eG6VUm8kSgveeQ4DRtmmT6urCxNaaNE1SzYEUKaE4Ukqr0dFhmucAQjlNkgQSIGDP2mpLkQQvQQjgBi8QRcz/D3tv1mRZdl6HrW+PZ7hD3pyqMmvuru5GAw2gSXEASUgETEGUFLQUIYcdfvCT3/1T/Op3hYNhmwxHSA6KlGgOFgeQoAiguzH0UI3uriGrKqc7n2HPfjj3Zt4sNMkH+sGMqK+iKirznnume8/e+1vf+tbq5M551w1OkVEkEAtMdSYeOmPEYusIlGe6N+hXy4Wplkix1xsA0Eo11dKnRIKrLA/WpJAQwYXQSnU0FoQoBddM+k5nTTCeWIcbpmDj6hlMCeCcM/CEmFJUnCUkxhhnvNMc9M4zTkpK50NMnlIURAmMQlh3uLMkOFagJUdaSUZelQtarXsvCwgXndspaskAcEYhJVrLRHZPcKK1vC0IXV+kEEAKwfd7xY29vda5k/Nza2yidZ94V1BnHcMVlGL0KQHeQ/D1Jikl50G6nh2XAzFvjRL0jV/7WvR8qxgB+MVf+/pZdVZJHp4sBtlgePO6/8nxyXRxc/9A25axHAB3vCzyEN3tV27+t//mW7PDm6NJmkz+NNNbHWvJeZcYK4Xya3+f9ZC/OVRvCLx3g+8Vanq6HItWL4MLkWKED5JAwXvbAhCc53l2Ph1zzrI8X84ne7s7i3rRtI2UAkBeFGI+39vZXc6XMUbBeQpp0B+MhltpPZSyzjKTERGaahljNKYNwcuy8EDyEYC1dr5Y3Lt9J6a4XC60zu7cuvXoyeOqro+ePn39jdcAZEVumsqY3mw2lZ12gRTDnR1OmFZLAFs7O4ZSVvb29/eOHj369JPPDm/cOLx58+T0dFk3daeGOdotBD89m7gEwYUQkgnOg5BSkuAdIbcsC9M0xpp+r9dae3Zy/MqbXzw6Oc+1Epx3AIpxVnBembbUQnKxqJfGto1lRHeVFABawayzUkrng3XOWDc+Oz/Y36lni3q+5EUDQB1cj9ZGwNlg21Zy4Zy11iqlkvedlU3HsXLOG2N+/Vv/7E/+/P+RgulcM8ZAzHsHQAgiorY1j54+B9Girq/t7XHOrfOZygHs7uwN+mE0Gs7m87qtnp08393uJcB576xVSgEIIbRNa9pWSt3hs0IIxlhMHcM7AWBEVV33+4Oqrowxuuz54KWUxpgi08QZgEzr1tgUw9OjI+99lmUppZTSYGurAzfPTs9AnWxo60Ny1lnnpBApJiZWEjfO+7LXhzCZyoajkZTSeQcirTPnunZ7jLa2YgjBB++cUkpIEYOPMdq20VJ2/dHX9/fu3b07Pjsdm/bg4HC+XBhjQ/Baqe6BOJ9MYgyj0ZZUqmpqRsQYa1sz6A9ms1m/3wcQY+Scp5QYYyGEplqatu3KHlpK41daDUiBcVbNZ0LK/+03/9df/fqvdMiLEL33fvAeAMHkq6+/fvvOja3R6Pz05Pz0uKmb5aKanjzXeQFASlWUPam0UrozIjPen52d9fs2IRaDIQBwxoUMKTZta43VUkspQ4iT6YRz0T3fMSaKaTgY3Dy8+YMf/uD2zRt3P/zRtevXv/bL//gP/uPv1q0BkGfZp5999vToESMShMePH3/vu9/lQoQYOREA27aj0aiqqscnT/dGg16vTASh9PHJ6WS2nJw8A1D2ioO33no+niitvbUq07Ztx6enhdS+rUS3kmKETLm2bawps8x7lxVF29bjyaQo8mfHxwCk4L2yMMZovRtiuHF4YIxhhMFo1B/0Adi2YUJwITKdOQTGqdcv66qWpUlgXaFRSm6Dr2prXcgyNdodLRbLw8MDIrK27aaV1ph+rzccZMbaTGezxdQ639T1gw8+ePvnf64Tg0kpxRCDD13h1xgjhVRa67xggQMIYC7VKaUOZKzbRgoxGAysNYvZ1AcHoKttcCGkYHVlhBQhBM5EjJA6c84BaOraOhdj1FnunCuLQub549ns7OnRG2+8wVnXjkBCqpwopsiYiiGUvd69u3eOfvA9ZscAAN4vJOPxeDrpb43GS2dsO1ACCZlSJiUAZZEbaxeLBVJq2rZuKmdda2xTN0joWNhta5zzW8Nh25pu2EZMzrqUIgnWjULj6fjk+DjE2CtLzkWM4cnRs+D9jYPDtuUAUgwxhJiilFIIvrM9YowLxgf9fsjTjZu3AOxsjzRb6iraxcyZZhk4xSSyHhEJsFfuHAJ498c/Ws7sH37nr9/64hduHBy89+MfcUrBWpH1W1+lKAEwXuZC/OVfffezBx/+xje+8er9a8dHJ9Z6yUSnAB4AEoJzYax3wc6nJyTbEKLWMgTXEV1z3WdJ7m1t729tS6Y5o9u715USuR417WlKDgBRAKLgghKLnqz1272hJB4AyBHcEECWZcxyLmQI8UoX2Eah9II4cIlJXgJi62RrjTamFUuBCHz1lpiw0moEEcWYLqb8K1BfN+mveQ5X+S1X8o9LGsPla1e2WZ3T34LCJWIdCXGDh4iYVj40lzdgZV1Ja80frJMX4KIxGxesyNWxE1bWSyuDykt5mS4N63iOm5d34Wme0jojvGgaWcujr3mlq/VfWvEPLg/7+TDuSyzyZbyMl/EPNgTAVhx6rIbFFwow6//TFRhylSpGemH6+LurXZ8TL4yiazjtb4M1L2HIz4MiiajrQV+dONZ6HFez3Uty/OaBCWuNkvXvsNJI+VyC5JXDr3PrjcP8zZFeoFBt3rsXbv4Lm6TNjdPVqfiKusiFiuTVoyKBsTXogUv0dQ1NXgCTl7P/i4uXdSWPUtrAMdNPVRNxQTHrNiZKRKnr1yYKKXofHSFpIbTiYCmkgGZCSfEQAVjfpBR8YIhsngCCEzwTPME/nVhOHIASklIMUX46e5QoLwVsQD5Q3/sv39m79QqArcP9oj88ff4cIj86mz2bLna3R0rni4ltbepUyQJAIC5AKVrvFvM6L1MMIXkfow/JA6gFBGGr37fWnbdL7wzFCKTgfTWFyjIA3lvvA2NM67wTIRSCBA1sCMa01hoANgTvg+CsX2Tzfjmv65RgQxyU5Wg4BPD0+VnwsdTapKCVHvX6J85XznIutvv9Tp9r+eQpISlC1ZjeQG8N+s/jzDVWKbUzGNiUANw7uCakuLm37UIE/HS6/MO//HYkn2XSwxWDbQBZkc3PzxkjH2OnaskYCz545yEZAEGkpNBaRZ01bZtnOviYlX3GWd20N2/fAiCSu314fSeXISVe8OTc8cn59z74xFSNzLaKrRGA1qOannLOJBeSE2dEiCnGGLrmNQFAa00gsJYY05nc2pLRwy7qBJT5FoAiF+2yZSnKrLh5fff09LSuXEIse+Wg1x+PAwBb1zortweFCWY8n4FQZnlMtFg2SGHQKwHkWWadm9dNCC7Psu08W9aNc54xyrTwgQBU3jGGPJM9ypb10jqnperpghGv2gZAa1rG+GrYYIlt6kluDiFrJvGLQRCdNTxRurqiXi+TL/nGUkgueIyxNdaFKLiMrZnOF5TAXuBHd35MjKTgwXtGbA1lwiMyIDg/mx31izCfjntb2w9/8pC4mgYYKwBkkqmnPxE74dnRWKvslVdfq8+m/8dv/cebb9792V/64ps7uwDsZCZFHZuTLNs7Upi9/6CaWBmTNa23BCDraaZlchej42rt3SGQF41fF2WptHHRL9ypNWOdgORjZMQeP3k8nowra27dvAFASgmi3Z3ts/Oz8elxa1ulVEpx0OtVdQ1AqeWiqoio3+/vjEac88l0IoQA0WKx6J4mUzeL6cSa5ujxQwbK+0NrGmuss7Z2rhvjvI+tsVLI4+OTzx4+KvPi8Nr1P/6zPw/Bn43Hr8YAYDY5/+zRw2W1/OIX3xRchBi11jHGLC9u3LwJoCjLzLTBmqwoGGcJ6fa9u2cnJ94HH7xrDYCVyZcPWZZnWWbbNoaos8x6v6yandEIgG2auq5ynbMYtdIRiAnVYpmVZZEX669PSoyKokB0SFmhZK/QPjguxHg8A8AEeqXeGo7mi+WnT54wRrZtsrJn6qapK8EYgKzIZ9USxJ2zxrS97Z3GGK2Vs5YTViqrHXM+xul0cu/O3afPj6fLqmlaIvIhdFCCcU2/36vqpmnboiiqummNzTMdYozRAphMx95HLYWxZjQYnpydhciIKHWcxODRMaytbeuGSh6cZ0yFGDqBTKDTsAPnXGn9+OhJDD6m+J1v/3lKkEJwzqxrRafQCnSuVoJzKeViNocQX/ryV2KKXUbdNHWK2Y0790zb+JB6/X6eZcQoxqiUiisQlvqDYRI1A/M+pJSEEJ3hr+CiY9fGEEPwi+VCcZJKMs6k1svj5zHG/nBYLeYAvvKzPzsY9D/54IN/95/+77u3bzHGbh5cW06rkFKH5C6X9bW9bcZ50es557u+bKkUYywviu7RXoNHjZBCSskZWdNyob1zSunIHADGyTpblOXR2blp2y+9+YUHH/9ECP6tf/Evf/zO9z7+yacAtnqD0dbw7a/9whe++uVqsXjw/gdVVVnr+1ujwdYWAJ3lTV1fv3GjqdvoXSA8e/b0o48+GG1vSymE6FCYLKXEIyMiKQWQOGNEtKwqzoVSGoBgvGmboiysNe+8917b1vs721vf+Obo+vVvfOuf/dHv/z7WbuxMZjeu70/G47Pj50Q06PellJ3RymePHp1NpzcODvb3djLBz6Zz79xHP/7h9rA8G0+D9wCm47OvvP12BHEubF17548efvb8+OSNO3dMU3e6xikylZW51Fn50DiLGJRWB4eHVV1P5/Ovf+0XAczmM8bIORdimM6mSvCd0XA8Hofgx5MpgF/5lZ+fnk+EEEIK7yNjDDFKpXReyCzvwNz5fJFtDYX0KsuNa3ta+eCttf1eWVWu+xBPltXp2dm9W7d98DrLtFJWqclk8tGDj1/9wpvDXg6AcyGUIsY4FykmZ61UmnPetXoAiCnFGIxZKXV6760xgolBf9DUdVfAEpycc5xxY11X9uBcMCE5V96FLMsAKK2R4KxjnBljnA/PTo6btokxVHXdORdVs9nWoJ+QQkrB+1yptqmOj46ctXlX6w1IKdlGKKEZKe/D4f7ezt7urA6Ccx8SAKV0nuchRGLMeR9CaEwbfQzeK513KrFtXWV5XtV1WZbEmPfeNG2vVxZFUZu2owk3TW2tc94750ejrRBovlw8fX58++w80xxAV3QKoXOHiju7u0VZVssqOscl29ndBZBlQilFVQrem7aOUh4M+4wTpESdns8qAMZ4UvL4+JzJj6/v72rZB/PBLbJ82yaqjAHQH17zbroM9qOj8b//kz/+9a9/bXl+RikiUScB3LS2U3hcVo3kg7k9BjPBe48IMvvDAkBPHwoVQVqAG1cRBa1kr+w5a6TI2roBwDPSuY42McaF4i64vh6miCb4IusF3QeghFoNlZsdB2tyHnCR6lzOwrSRclyyHdbq1h0docPZiV1NH4FOm/UCe9xY3qz3/HehZ3/DFhsJysW/fxseuTp+jBHo2lkuwNWN/uZNZcsEXGS26TKFSkhI8UpW1dFCCeh0M+mSedGdFFvdiAuU8uJ+dx6yKwJmSqB4IT6WUoxXEtSUVuvDtS7P56aXKcW/+56+jJfxMl7G/y9DgFIidFatxBhSSisHV9rwqkTXlbkWUCN0rZ9sg4t/MWKvRvZNKt2lu9pVdbRuR91rl9be1FWzwP7G/mhc7ni1k83DAEhp3ZPZ5UndDJFSt/PVYdZ7WSXF3dx6McyvzphhPXth83U83vsAACAASURBVAVs/LgG8bp5Zt1k/WKXAtbKxumyXzStAcMXdrrhRncVbN0AF68WFS9fvsBCKdEVFuZqk9X9DxtrAlofkq0KnKvKXpeYrW8vceIXAMm6F6EzEIAn94JOJ9YeNgjBcWgugveMIXjTy2TXVaty7RdGKeldG9rzuvHB+cNRL9sZBtd0jjBwmVaSMRKdRUDgiNY6n7gSMfNtBAAFkpkiG/MdIkSv6pCdzdvZ/Fk4mwPQpD59dHK6JGJNpOStT8I8Gz9etG64tUVCA7CNBaWcCylpqLUsDgqt4cyzh09CiNvXDgFcv77ftG0zPdkabIEL7z0LMQVHLGotZJDdrY3BG9e2beuiJ+KcC8u88U5oPasqAHlGWvkYWWvcfHreLpec0DahHk+fpwQgAlqJNsH6oLU6sjbPdIzhfO7Pnx9lmQTQKzMfo20TMTarfGsXLgTilDhJyepqCYBr6WdPp5nuo21jLHO2tzV6+myiRkVKcM0MAHzoIEjEBDAllBTwoWmqWpEC4OA5S1syzfy8mgUZ+olXi3lIk9nB/S+8eusugGdPfnRvd7Q7TGQjROtTdvvwK+998OFAs6B1M5sBELrQPuR5FkPdEywXWWst5xapiR1TFgiTafR+VyoTkKbLh0ePVRazBCBVzz4CUCERSACweDI9SujoiDD1ycn6S86ITHM+ayTjJJ03xrtcA4y3Rkrhlw2AumYA5QQQuLMhLH1TC8Z5YohMAgAGHCklv6iy3paMJnjDWGAAA+PUAkix6Xu/c+PGkydPQVIoZmwqsrzykYGltLI/AkdESDFxxpQUziXJCIKxYLqyN0nFKXZ6vCtFSMY5E84HrH0zWtckHyXjo7Iw1fKDjz9iUhBDWBHUOhod69zhU4Kz1qfogxFZmRh51wBg2tpPP3u+Z9J8unV9++nRx2c//OArP/O2T/mP//qd8bMjAPuvHB70t8ykHvRzEWXVmA8/ezgYDf7lv/61kIXkWgCkeGB5lHTq06N33jPl4fjpWUuZDkl3ZkUUZGAtSDKxZlavOHQxrMAjLlk3SsSULqQ2CQBjIQa64DF0ZIwUfQhIsN5u7e4N9/dNCk+eHgMQQjDB+rr/6OjJd997N9Py7bfe2t3ZHQx3usctsul0MV3OlyrLn58+XbTtjRs3Hz7+LNNyMht3REKi9OFPHmRa101dZjm1dVGUddvGBCFFN+gZ5/qF+u4773z22aNc50rlz8/Og3N5ni2biqdO85699sr9rm9KcJYp1S/Lk2cn3pknR08AjMosE9z5yECPHj65eetQZao1TTDt/vYoYwSAB7s0Vkka7e+cn5568NOzs/e++93W+Z1r13/xq18FkKQoM103zeOjJ6/ef/WN198cT8639vZOT2dMate1LUOkGHdG/f3hEEJzniRPy+VitLVV111dxEfSj4+epZSG/d58PmOEZj41y4XW8truNoDQNFJqIXWWlcvhwnsUWzsxCcHRGNu5JCtG1rbNsgrB/5+//VvVbDmZLx99cjQs8+fVQnXgnVDBRymE854hSSnbutofbVGERwIwni8ScDIZN21T1dXOaLislkvTFHn2+MGDm9f3AfAQd/Z269bUbeM88v42F9wGD+8oxK7YE2IIPnzy8NH21pAzmk7ne9vbKZEQioE6uUYuxMF+FlLqDwc6z0NwnKQS3LvoYwBw4+B6VVUMvsiz8/E4RscFK3u96WQitToenwO4fuuWzEtlfFPXoQdKEFzGBO+dEMK0LQBiLHoIKQc7u860Ssq2rnOdPT4/Z0J0eqN/8Ad/POgVQsrD/f3RoD/o97dGW72isNYWZQHg7PRMcCY4O3169PbbX7Zt27WpVlXlXTw5PQWQZ/n+/n7R65W9Puc8xLhoAhcugnGJ2XQBYDFdCpXxgR4Oh6Esrh8ePvnssz/7i28/+fSTlPDlL3wBQIju2sH+F7/6dtu0vd6ol/d4jEqwsiyCtwDm04aIvHdNvZBStaZp6+b09Hh7tO1snM9nAFRuGGcB2bDfPzs97RYgWmUpIq5XU8tmJoQUTD55dBR9eued9/71b/yr3/rff/O/+e//hxvXtt98/Q6AH/74wSv3X2uXC6112RtYbzKle0Uxm8877EkQN97fuXXnSz/zj/70j//IPTrqlYWLyTs3KLOQCMB8Mi57uWsbaasni5n3UZdltWykYLlk8zoCkEUO5xqu2/F4dHjTNbVIKPPCG/MLP/N209QAPvroQVHktw+uvffuu7cOrwndcy4cHuzt7G3fu/8qAFUUg4xLCiG20TfLZlF5ozisqZxxxjkAuj/i4NxUmqIse81iubuz0x8MjWmF1Lt7fQBNtRyPxyfn5wcH16u65kJsDQeM8V/4+Z8NvmsageC8XlYRfDab8wRv3Xg8SYSQ0NYVgGrZaiGbYJVg43k73BrM61lSsWlbxigGAAjOn52PGUeulBRcZqxflovJ3FsoiXnVAjC2VoISCSV5odXSWmPc995595vf+Ga1mKfoAZSDst/vWWcDKFOUEvWLPQ2/d32n/eQUwLQ1TT1rTbO/vVUUuwuzvD3YcyKPVLUpdnxk71yvLJSWUirBeAIhkTVWChX8yrSq1xtY64zxSifinCtVZhkRtSGqLO/1+gCIuMrLnlScM0JkTOyN9hK84iDG0WlYM6alquoliBBCtVhUi/p4fLa/u9/vlwDq2WlVV9r7ZlkzEp5lxqZSsLpta0uz8xkAoizrq1xmr9+8bZOwNJe5FCz3HponzzQAkxhQiKxE5k/b9j/8+V9sS10WvYUZ9zMJQKgs58I3bsnjACHr9S1ledlELxLx2hKAEOcD0q/fuT+bLTik8+1k2lIm8iKOysz2SwAkiOU9ksYZh5SWi3lZ6MiUIuGMsckD8NEnEIMPUWw2Xqw75EAg30l0X9BPViYqSPEihaBuiXXBT1mL72DlQ9Dhb6AYiBGIGGMsphhC6Ljb3cfNGNLKRTR1XdgrjcqNpHLFI+wyQlCKAUSrvGaVQq4lFMHpkqVxKQoJrApmKUWApdSNQAAH69LXVb4U06rzj0BEvNMRj5uMxe7aGbGVgAx1MCNid90A47zjX9LKtScQEiMWYiRamfx0+R9iQmKUEGlt8tmJdKVuRSQ4SR86cZROkTyuyJ0dXskuJb0u+rMTwHC1Lv0yXsbLeBn/cEIAL0KKl+gXXRSdLlntG7jcpZxwt3W6BKM2q2+XsemQcvna36OcQ1cZi5eGb5+z6cVpbVbANs8NLzKUPmerzzuHCxSTLn/V/dk4Ei5oi7jw7LkEZi+2utxLujAtX6OOay3HC0pR6jTtLlsONu4G4cr1XJzahYw1pYgrp7zxf6LOY3q90qB1P3/YxDWx+lpclhE3r/ZybwKKo20rrSVSlAKULJwHwMl6O28qP8hVVDHP8i/dvVYq9JXkKBNLANwyKKGEYpqBC7432M1EmreLZ+eLUb+3098DYOs6hMXcuKUNssi12nro5B/85Y+awJcnDQCiSJEx4iwx4kREk1mTEolMJcZWFt6cAIpEoYMeJrMlAQSXa9kvgxAAauONSyezZocXeSmZ0MQClzHXcqtfQBYArDXOuYFkvTwnxsbz5dls/umDd8ve1t5r921ncCzJVg1Fb5wrRtfzretKqn5eZCoD5wCmdUvEWPSS8fFsQYBWqjLN9nBQW8OcBcA5b6xNIXKibq3YGht8q7SYLhchOgA5Yx+eTr/81nA+nuckEsky00rFYOYprUxmq2oJrogRZyKEWDmzNRjujvon8IpLANvDm/1SE0+3b97rFXreLF+5/4ZvqgVw62D3/PwJgOAW/R6zfqaSLbmsbZIi3tgdPjubOh51TwLoD3on1PYz2ZPXShU0n/eZINI8HzLb8ZkRfTANVEG1T1Iw1/q0KRC+/nqu1sJdOaNbyCFduhcSwUcw8iFqKQKBJ5bAYwDj5HwA4IJPMXUiWy546/1OLrpFdkwrBU8fUggxphSWT3eUFJrH2LTVLMRYMgZgKxfWV5NHZ32fmEh24hnPvcl3t/YrroztjA6JiU5oHIwoRM85Q+qE7pJgCeuaCRfkVzwvQoohxQ6170AWxbng3FjbXWrTGumDN1ZK0Ql4+WCDS0BiXDCiiMiDg5TBGc4ZcQfABHz84Tv/43/3S+GYQ8svfvXLZ88neQhh+uzeYbl7+DqAu6/ee/p4rN54a8Cb9uHHffS/+tbd9Mp+r7f18L3/MvrCPQBsMJRlGV3sHY2fPJvfvVU+fviJlGCZ6K5aEU8uxZiiDEgXq+ZERJyzrsbjg7+gANDluNelOpcESr5Sf2AJCMHnWTYc9LOiKLeGs+kYwMnJcylljMEHP+iVi+XCWdMvy6Pj51xqAIvFosyLUueLunE+FHneNK2SMgFv3H9tNp93TxNnLAHXdve2RtuLxbypq8FgwASXUtVNAyClyIhprZRSRCSEoE6wjxHnfJX2AIxxqcRsMiHGBGdn4/Hx2XdGw0FnaHp2cnr/jdcXy4WrjWDstTfeqOr61p27f/2X37l9726zXAJ4Vte51jGEej4zdTPYvT4+n0ymk7ZpEePWN/8rAPTZI+scY0xrzQV//713P/nkAWn1wXvfC8nxlYN9aqqqbqUtlGAieCdL2SuK4aDsHqfYNN67XlmEGPVSKil1li/ni7wsemVpWgtA5Y5zHmOw1gXvGZcxRqk0AM5YhwqFMqaU5svFwbVrn3zyyf1793au729tb3sfnPNZngEY9kvyrpeVPgTBmY9R6+zw4KBprI0BQHB+sazmMbXUlnk+W8xzrXWua9MWRaG1AuCWNVIKISilfHTeuZgS54xztu5oAwAp1Vff+vKzZ097Zf7G669PxufHZ+fDXqm1vHPzJoBev1fVlVJquVh0hEfbetO0QimlBYCmaWIIy/kcxIJzKcS2bWMI1lrTtnzVnO5QV9474tw0TfA+xtAZNDhnpZBYzcnRGDMdn1eLORFlWs2Xy8l02rTNK3fuAPjoJw/u3b7VL0vO2GAwYERHT46Gg8F4Ojk9PweQUhr0cgKm0+n7Hz5IKd08uDYYDISQUrDO7ubZ8fH5eLy7u3vy/JnSWa8/EDozTUtCTo5PprM5gOlywXlTt01Z6t5g+P6PftTvld/8J//k8aNHWqlX778G4LNPH7z77rtgfDGbhpi2trdb55x3TRPzogRw/PQpETu4eVtn2rTWWTOfTbkQrbWZzrvnmAGUkmmaqlrGGIWUvbLs9wZNa6fTWcfX01mmhEwpzhaV4Lxy/uz0hHE+Pjs72Bvu7O0B2N4ek8wlZyF46z1nDEjj8ZjxTj4ah9evMc4/eP/9Rw8floNhNzVYa00bldaTs3MASCEBnIu8zHGWYohKSSI6Pjlrje2mgxDaMst0L9+/tu+9k0qVg/54fJaIvvv9d+q2AXD//it13Uzny69+6c1HT58uW5Mizs7OTk6Pv/zVLwFgfJBSiik6Yzr4wFlb9krKdNErU9sAaIwjEnlRdm3Ouda//Etf/53f/Z1evyc4f/b8OYBbNw7vv/LKfLGo67puGin52dnZG6/dv3Hnjmnb7sLzLOOMMcaMtW1TD/qD7dGIMRJS9gZDAEIVkrFSyt39a95x69rnx8+cs4wSgu9MeHMlR1tD6wwhOec6fcbgnVLa2JVuqdJaMA4k07RFWcYUm6bmnFdVleXKOQ8gLwqdZ3lZeLBgayF0VhTcWSICIgBjzXi+ODp9riTtbVOm1A8fPto7lIrzQNQhK5zxiBRDiCEwzgXnKaWO74yEjnXrnAdRTEkpvayWXd2fGE+UQoydJU73hDZ1nWWacyYYiwRj6rRmxhEx7zxxUfYHqOtMqflyeToeex9aY44ePwZQxlZWNVqTl/myXsRgGuuVwNnEHy/82XiOzqVKsrZdfPr89K3XXt3bHj48eqw1pcRMcsX2NgAmxdmsNVFLkWc6f/3uXjM5RUrWOeM9gBhjCEFyvrs9CJMngLe+Hi/OG4Myz7aHQwBtW3MuhcL2di9YlVhfZ0Zw5UOjStmtt32MSKvOb8F5kevWVmAhIYGpsNLCJiSGxFctHRsrqnVSQWxT/5BWwvG0SgqAC7ixm6w3ySCfE4kRT0jdF4kzvqIXrpjsfDP36WDGTZGsdT/FBdEDWJ/DJi1jY/MrqehF9hNiVwFZZ0YX643NXPbymtAhfQkXzeTr9K07ye7YaYVFXl4AsOGCk7But15f0maqmS6YJrgg1CS6vDoAlLrabXfmnZQ5MbbmfKxytxfT1UuO68t4GS/jZfwDC/Z3b/IyXsbLeBkv42W8jJfxMl7Gy3gZL+NlvIyX8TJexst4Gf9fxLqTbrOusqHqsSH40ZFyumr/59dfLqmPlw3DGy/i6k4vil9/j6DNU1y7xOBFkue6akR48Yib9MWrhaZVA3aiS/Lf1TfR1R+7q72gkIKIiF0Uza6U5tY38AVu6U/fi5VoySYfceMOJlDXTX1RVbziv3PxEVy9tLWZLS7E7dbVzcs3E3Gs9N262mdaf7LhQkfl4puS0kZdcfNQ6/1F4iIhkzJ5y2EVrHBtbBsASdP2QI3K3v6onxWyr6WOcbdH2wc7r97e6e1uA6iP56ZFVmR5mUOKtqpUwVrnfz7PsWrnhvE+6/Wi9c2yLkav/l//+fs/+M/fnsyXkeVMCAAgmSJ814QesWo0AcXWAVXnBOJDICLPGSdiBDtZWmeCSP2drUGZTZ6fAnj04ccppUygmS7aEKz3ijgAGz2w0gGMMXQssIhAQMZ5mWe15ToXLvKZsQDqQHVgWqhEzC+ef/zoEYFtj3Y4F52ZdSJ2bWfHJEhGJhBnTCROTINJLhgTGsDecBhTmteNC0FJIbmojUkhON9++JMP9vslgGllQ+TDfrGcw7pmd3R9e3t/0rqitx2obFoLwAdL0QnOQ0pIpJRMIOv8/rU8HxQAyqzwzgiKmmTdLJ2x22Wf790Ybe/vX7/x7ve/B0CkeG3vxumJ6yvM6qCUVkK/enCwbFMryrITC9P5jf09X1fXRzLP+a0bb45ynfyyKKJIWcYAwHs3b61HSjHF1nNGLnYOTz/FqSaw6Fek4dQ9CRdfYOKI3kEpKaWIIbiQBNdK5nXkojNA7wg80RMi55CMLaol5yQ4X4nMbzw0jQtKCEIKPngXLkrmKaZ507jGRhZqF3UkNTp4WvGTBZRJ00UDwPiQYgoxdJ03PoSIQDGUSjnGpZQAwBjAQoyMs0QrWxWEyDgDMW8tgATuQmBCeB+IOKXYGsOJvGu7/iqizkcHITifkg9Oap4QJUQ01igASI25/ebbKStCNuTkeT4srvV4NH/97b/88ePpL//CVwE8PZP/7k+++z997X7Roz997/1/enjn7r3XzONPuWsWjj859QAOb9198MOPqgeP3vzFr5wH99F33g2OMa58SqrIASipzKICKEYwWolAxE55ac3FEIJ30iDdcNGpO3VzTFwz3ajTgSdKiAkpxJCQlk3deJv1yzzPAMQYm7bJlEwxcQatZJHpne3h+WTcWRJJpauqMk3LuIhIMSVG1CvyXGdVVXfKht67RV03bTMTYr6Yx4Sdne2yKELwzhkuOABnAhGLMUkhQowhBmct55wRSymdnZ8D2NkazRbzfr+3tb3DiIWYXGtciIxIMgB48vTpjVuH5ycnxNjp+eT5o4fTybQYDPav7QtiZZED6PcH1WzWHwxAtL2z42OUjN2+cePk5LTM8/HpMVad74lzkVIajyfl1tb9L711dnb2K9/653/1/R+0nQZlSoKzarmcS2EXjeIkeJ8RNa2RigPoka6N4ZzHFEOMKaUQQuQMxFSWiUgAlM6Y1ME5Y2xKkRhijCkG6xwRS6Ez37AhRmttfzj8r3/jN/7w9/8TQjy4fedkPGFEvvs0jfv5t95Uks+Wy6Y1i2VlQzQu1sZ0puRSKkY1EdNKZUrNl1WutZDi+XklhOgkPq2PzgXOOBEFH4J3MYQUUwgRcTXPeh+sd1W1fHZy3C+Lzx4+fPLkifd+UdfThSvLAsDB4aG1Jqa4XCyOj59LKRnYyfOnjPPOqLdtmrppWmOsc4xLLkQI/mw+DyFIKTpCYts0tm1TAmPCGuO8Syl2/BXnXEdv6SYaJSUBMUYQGGMhhjzT/X4/BQ9g2O8VRT5dzKUUTdMcPXv+6r27jTE3Dg+m0xkAxriWzFqb5/n1vV0hZZ5n3vvW2hSxWCy6u9fJDpS9/hff/pnz07OT45O8KJVUtbGtcwCIC6G08T6Pkgv1ymuvB++ePXr09j/6Oe+cMwbA3fv3l7PZs6MnH//kE+fd66+9JoWo6irPtmbjMYDgnfehWi7yoqx9xTnnSg2HWyEGImRKA0gheBeyNY+4yPOd0bZSmVbBGttZNhMlIvI+dEunLMs/e/jw3r27P/7Bu2+//WWd5QDyovjuuz9ACHs7WzHGjr53eO3aYj5fVBWAflmORiPBeYxhe2eHiIwxrTF6d9Asl5wRACb0Yjqx1nDOUgqcMyFYptXHnz2aTmdZpgBQisGZ6BrBQTEmxp8/PTo7P9/b33989MT7AOD0fPz46KkA5svFH/3Zn/V6/Xu3b9+5det8ct6NM/Vy6YyhlDoOLCdyphGCMylB6NzYfd0wJhIoxqiLkhH1C72/u7Oom53tnU47cjKd9m7cSDGkJKSQWgnOmRQ8xiil7CzmU0rOtETgjCElqSQxssZ471NiAKSUFKOSvOz1er0WVBDQto1WQjBGq/5SUlIJwVOK3pqbhwfGWi4kSam1yrOsG2e01sa0Qiqk1LZmsVhyIWfzeVbst9ZirS8ZQvTRO5+4YDFCdDqbIQAQnEnBbu5tn0zGFIOW7OT09PDWaySzRd3mRQ6AC+GdSSl6H2JKMSUfovMOidjaXE4qZZ3zIThnU0odIZ0LAUoxroTznHM+BK20VBrRg0hwVodwdn6+td3vtudCeGtt25ydjyWj8XzBuFCcGWs//ugBgJHECJWP9Ugyrdl4XM2bdppllcOyoWrZAqCcc5eMs+99/BmkbmxoGk/chBB4AJwB0Fbx+PmRFqxNaCazbR0PhzxG27S2bm03eXHGlZT9fs80vdmyYkxxiL4WguXBi26tEkI4Oz/dHg51XlTLSnKekHxAUzfOJwABMcVIDClR9CER+RBSdBEpkXVw6BIIRpxxt2HH0k2+K8UsrEWu0oXHGwEry5pVh9lmunDRjvViF9b6J2KI8UL7a5U9pvDCVpd6Vi9mYet/06pPq0tENiSnV+lHupos0ZUsZXWp6TI5BFZ8y4uTuLQNX6dSaU2QXG8PXHaqbebKF43ca1vxlSnplaP8dBaZ6DJ3pc0kKqV1D/kVje10sXBaM1Z/mhz5OZnsy3gZL+Nl/IMI0TX4rlqLaQMku8KHv8AZ00WPLn6ahY7VAEoXk9vlFrS57eUM9PdDJNej8yaUhu68L1o31xrAwEqFI17Z9CLSxr905fcbk8uLm9PGTLoxQ9HKXvwCqrzQFOl2uNb92HjP1buwnpd/CqFcrwAuPilcKEtfIos/vZ+Nz+vFq6eNv12wNf6zBiK731CXa13eocuZOm2sI148Zc4ZeZ8YQdCo1492fn9v1OcBgBApz7GTMaXlcK+/vbPzzgeP/vG/+WXGW0GBZABQQpVRR8aIIfGYFYxJXkK7KAiByAIQSQYurXD9+//89377d3/nt35P7+9MlilkkrgEEDz55HKdpRiIIDhvTMOZIBvbpk0b34RArDMlJi21VtbbZVVTgnEBAEld5nllg5ZKE1MRnWefTHGNr3cfQkoxgBIYIjCPETKczc23f/B+28wAELNIiXPNmWqcc5EHH+ZPjrOLjzWlTz74uPOYkkQ+pUwwIKYYGJGUGkBI0XqvlZRSueC7D4xxKZWo5/MZcQB7o1EIN84M5/3DgZsJrpate3R0DJzqbORX7UKm2NrpGrJSBDVtjMmHEEOcTcYAhr3MOJ8iAqxgQnJxND6VcUaKLarl408+BfDKnX0txO5wKBK72ZcJgilRt262WKhCzNsaADAXIk8pfPT8aK9UN7evfetf/TqKWoQp3OqxrNq2vzcyTcNBoW1lv+RRdjdk/fVaQfkpIcn1VzmluBpUCAAxapo610XwBIbW1MTJWjBoqAvd1siCRwwUPWIkJB67ZhyklDr30hhDjCnElFEKPgTvEaNgXHLy1gMwxroYOaNloMSkFvxPvnu8eHpsvWoNOvONbiASnHV9StS1mnIWvE8kJ+MJgFwX1nR55GqQCKGDh0AUlRAAYoxElEJUjMUYjXMpphAjyXU9YS3FRGkly2SdFZy1plHEQBIAQ3r9zS8KrYOkFMg0LeP8s6djlg8Odtlf/fUDAD/84R/mu6Rk0QRZ7u2nFFpjRFF8+Mkj5+m3f/P3AGT//o/2y+wP/+Ljn/v4+VZv//GpiUqkiFIXjbUArKsTQtMa7pWUQkrJiHXOxwwr9ahuJEwpErGVYDBjSLHDold1snUik1LyPiQkIblUwoV4PptEZwG44LlgRCQFZwTJeV1X7773HjpUF9i7fq1flh999PHBwa4U4vs//FFCaltz7dr1k5OTDgJQSmZZNvS9k7PT0XCYl2XZH3TgEaWVRwpxZqyp6qr7OKw1jTGMcwDWuu5rdXT05PGzI2L06r1XMp0R40oqzkOMsT8cAHh09PRbg6H3vm3bu71es1y2df3Bhw/u3r27s7trWwPAyIYR5uNxbzAkjhTCtWt727vfNK0NMQ37PQDi+Wmv16+bhgtOjA1H26qtP/jxB4DMszJFAhADAKalfv3e3dabRVVxzp1z88Wi8z9hjGIInryz3hjbGSUd3jxcnJ5RorzXB2CN0YzHEIP3IXjyLCUs5vPgfdnrF0UOIMXoQiCi+XyRS/nxo0chxWa5yPN8ubAueAC2rifzJSPEGJRSOgshpERUlj3TNt0zyZnwzjsbBAuScyKaL6q6rqfn5zv9HgBvrLHOhxB8YEQpdS7DMcYoiPiqtTMtSItfbwAAIABJREFUq+rR48dlUTjvHz15HEMYbQ29923nEgGYtq3qSnAWY1wuq+FgoDLVtg1AXBgAMSYhpQvBOBdaKzhv2tZZK4Tw3mc668YQYyznkhTjXMYQunPpJsiLBDPGwIg6W/YYvBSsS6klo07ITCr5o/ffjzEpJePJaV3Xpm2fPH0a/PWiKAG0bUOJEWN7+3uDQX+5rJq2EUKmhBhj9w3kjC2W1ceffHb71m3nQ39rdD5bMu+b+aLc2j45HwOomoaEFCqbzeYxxD/79p/v7++9cuf20aOHBOqkIe6+9upwa/TRj3/0yr07vX4/AfVymWVaad2J2A63RyliMZ8t57MQklSyKMuUIqVUFIUUCkCHKw22Rvs7eyenZ0qqummm8wWBrLPO2+57tbe7a6xdLJdC8KIovv+DH/7qN//p0cNPnz162CszAGV/UBblsF/apooxbg2H4/GEMba1NeScAXj9S1/USo9PT4L3xXCotRaCI0YhhOBsa7QFIOuVbdN679qmMU2jVO6tIcTGmAcffvTml94EkGkRrfGmVRxciOPx/MGHH1R1zRgN+r080wBm8/nNg8NX79wWArdvHfTK8uR0zDn7+q/+E2cqAG3TBh+ctSF4CjyloLXWSvJMA5EJDkArxaUi3yaQzotkzGwyIWLdTSh7vdXT5F0IgXkPpLZt9/f2Ygjz8/PhaNR9r2IMMcaUImcEIikEY5y6KngCAMZZ15FNjMXglZLdHYsh+hhj9AACZ4wo07qqa8b4cDgAIKQCEXG+NRyu5i/OQgg6y511trV1XXPG54vF7v6+X3k6CRDqqmqtBVhw0S7rjPnT07PqfAIgBCs5o1juDJK3dL6sVlZylREq61BFH0JrDIhCil2ZijEmhGSMMRLW+W5mV1oLITq7cGIshCCEBCEEtyrlAsSYznIgEefdyrsse9777tmMIUgp67o2dV23zXI6Z1pxJr13kqtuJVI1Vrglh5FaCBk4yAluiWIMhdI6VwBYX1NkgyJzbTWtDBEfbQ1kXvhgk6VlZ2XD+e7ubvCeQMFWnx4/v9a/lmIgxK7PPaYopEgx1cu6yBQ1TooyRtYvcq373aKnLPjj08lk1vQz3suHnKgoORfZfFkFXay8WTqJE8EZfIyJGGmpfJKtdzEFEy2AlEKiyNgFHtitGTvSxTqx28AXN1ZUG6nFVeztYsMXEqRV/hIjrUTnVwsUAIzEakfpc/Kti2NuyHBdSluu33EJSa5zsyuZ5FqRcfX/Syhyvenn5J1XmRTr1vQLGLFL4BJd5r2X1e/PO4X1JV7guCt48uKWdanrZfq4qbnVpYwsAaxTzaSYUkrx6lm+kMFeTdlfxst4GS/jH1R03LHVDxeeA8BauZBdvnYBBaz/XpGCvBxzLziCV+FHbAzXF1WryxPZrB5dLWP9nXE5iqeNuXQjNva9ecS0etdP15MuYERaFwk3Jo0XLmkTsl29+RKnWx3ngg2ZLqbW7hf00zPKlfrW1dcppY4GdjkTbYKel9utPwtaSxtvTGIXxUG2ue/LCbMTfO7kU1K3esBa+O2Fm7M250Hqatdrr6PVia1XJEAEEyzPil4h79zZ/hdv33k2OQawO2iHI3bj9mhwsEeFJuixnURq83KezPICbYpBW0+ZZmCaeRcbEVOIMvmoenwAIDkXxKT3+s/8zr/9D//L//xvb7+2tbCRVJ+akEoBgCgxYs67EB0RkCQHE5wl3WnoJGCFOKREEaBEKVnPSOQK3s0XdWICgCiLJiWRU0wIPjofGGPEGIgjpYAVR5IxTomvP3QCh8q5tebprFKsIz8KYjx57jwKUs7pvMgSd0mI7u467wXjPAUQmGAsusAJMcT/l703i9Ukyc7DvnMiIpd/vVtV3Vq7e3qbnu4hZ7iOqKFIyGPKNEmZFEVLMiFYgg0D5oP9YsOAYUF6kCEakh/84OXBJGWbMigKpgHSMG0uGpscUuRMs9k9S8/0vlR1Vd1bd/u33CLiHD9E/v+9t5syZL0YBDq6uu6t/PPPjMyMjDjnO9/5TgyGqIUCyFxmSYNoVFXOeoPRaBvJFtNl3QH4td/9EvvVV17++sCarKBlR8u6FSWJpls1NssASHDLoxMwMbEPoQ0hz3NjbdO2idAxX80YhkEh1JatZWesnvhKTWyqakAWwP2z6td/78ukVQef5TxwueHBawcnJ8vWhRUJA0BkclUVG2tMM29e+9b/9dXX3/07f/vfr6fXB4evpqGaKbqj+4HJWKMaQx0p9AN9HcDo5dUJAJk1VgXuKclpxHBuozEeqmTIUSxyNsYG31lLSao9KtgY43IlBpEoCJGIEiHxwsuYpM0T/KkEij5oDInvx8aANcBsd6a0hQy33v7iz730x6+Or15rs1EqZQOBEoyhJEOZZSZUgVQMzHQ8/a0/eBHAsm6dy0kS9M9RRENgkGggQirRG4BqtQptZ4hVhYnY2iBRtQdl1jaxgpSgRGq6SFlpshYQsAdAmfvd/+0ff/8Tfx3aUpYVRM3pnFf+e7/je1959a2b1z2AF1547je/8tLJjPev7bvJ40ft+OpTn67f+PrqW98sBjt/+af+BoD33n77/Qfv/tC//oMvv/VgVvn9x5785mvvdUpxsUx85ATfZkXmuyCa8BcQkmR8z2ITUZBGEcMkIsqcNJIMGwatmdiaTHpRjSpt161911DVdW4NgMloFFWsNSoi0NZ3Dw4PV3UzGA5v7V8H8OUXX/zcd3znp5977sGj47ZpRsNBnuUHR0fvvf/eeDRKg/zR0aPZ7Gwymdy8vi8xfPUbX9u/du3mtesElINhwssUFEIwhl3mQGSty5zLnCMCM9s8A9D57uaNm9PpJC/K8Xi0WFUao49hVJbOWQC3b+wfPzrovD89Pfvuz39+lJu285Odna999Rtb29upavagLGKUrCiMc8S2aeogAjIhdCEE4gmAVHJOojhr86IIvvv6yy+dnp792i/+DyLStC2Aru00embcf3DgSiYyRZ7HqMtVnTqTZS6GCJscIJRFMZ5Opjt7zWxunG3qBkBdVUVRikgM0jaNRCEmJjsclKqaOQcgqLZtx4qz+fxLv/97RPzo6PirX/lyUeRNZZu2AVCWRVnkMUbnijzP8iyrm65t2xCiMwzAWJPlWV03VV3V1ZIJ3odVVQ3LQVPVdVUDaLpuvqgAqKqzBiIxRhFlVVmzI5lZRLem09vXr3vfnc1nk9GAWjJsiqyXAZ3N521TN11bFkVZlsTcto0qrHXSv01EzOq9957ZRonL5XI8GmVZBpCsl9EokYglxsFgTAQmCjHklAOUjhNigOr+1WvXbt546skny8FgNj89fPhgOhwYZ8bDIYCdreljN68DqOqaiYejIQFFmTPRIMlubk2la1RVowTVvMitc3mez+fz4WS4XC4B+BDbzt+6dSvP87Ozs2qxrKra5mVWDDofbtx5AsCDg8O6aZzowLpyOHr+U59qmgbAS6+8wkTf/R3fAeDuW2+yMcvlIoRA0Cwv5vPZzt7O7OR4OZsDeOq555qmnYzHi/ncEFazWZ6NqtVya7plmSUGrOuJL+czH721NoQ4my+ImIhjjGVZApiMJ0TUNG3XtNGZ8WhYVfU/+kf/03d85jOL+WxYOAASw2AwmM/nReZUxfswHo/qqppMxmlWfOnLX77z2GMqMplOB8NhWeTMzIS2Xllr/twP/wiAb7zy8snRo9Pj47zInTN103RtMyiLW7duLk6PV8sFgKLYVonOmqIoQtsWRX46O/vWG2+WRf7Cc588fHQEYFCWw+Hg/Xv3fWjrevX4Y7em04n3YXt35947xwB88CISYiQCqZDqaDyARCLV4DX4ZGkxMxvbtu3x4eH+1Wt1VYUoW5NJ5/14PAYwHpSvv/nGyfHJF37wB+4/fBAUs7Oz2Wy2mJ1+dncnL3IAmXVZnjFzqhCtIkWeEbO1plrWAJgz6TpTFjH6VbWqakmrhkj0MSZEyhou8pyIVLXIsvsPD1XiZ1Ug0VnHmywBgQpEJMuL1XxRt60xtu188CHEVB66bdvu7OysWq3qZe3ywhFNS3tyOkObgj3GGKORrm1P51Xk0IYY33jnXVuOR+NJORwCMMaKqGGjogAxsbMuGAEgqpPpFEBVN1FiZgvjLLXMbPrSzUSgXllPVNlYHzxUJpOxxBiDz/O8KIs0PzR1ZZgJEFDb+vfvP5hsbW9tbR0/OgaOb16/CWB3d7o35olUQ8e+rbcmQ2GwNaELimw4zgGsJCJQo8GZDDBdp0TOewlRXFakCWLRNF7Eh5i53BTlMM9CVGIUjs2anOAlrpq2WSpIrl696sbTN+6+umxq5wYp4aZufGZoUOLw7PDg7HBVBVW5c+3K9Wu7IYQ+EGs4SLSiIDLWsGFjHcF1EmRNhdS1vGEKB55zIvERP0jXYGRaffWjLhVdIq8Alx2XD7UL3tJFfK+H+3qlRrqcMUe9ZvTGkyD0AuN6UeQ/LV2Ci84ocAHMTKC8rlHINU8TkgqCX+RlJBeNLvR309HU1zU0u+aT8GUvdeOVrbNnLghCXkJxLwpxrkW1N3dGsb4R1N+xtQvZX1WKONBFZ3v9VD5S3fTj9nH7uH3c/nQ0u57R1qAMUl2wNcN9DV6tp9R+n/W/LuUy93bBWqmXsJY6Xje9vO//W+WZf8H2obTxC+gdXdpBL1PzLx0C68DUn7jDBXyzvx+Xlqp+UbjAdNxkOVxYG2idI639TfswCnmOxK57fQkrpMvXmo6yjlheOpaef77u3OVTrOOLzOfbPwT+EqTHAdYmBq0X4QtBwXUcNWVbJhcuaV5fQKGT3jOxUZBvu/dnRw/vt3ffefM/+Jl/A8Dj35kPMWcOkJOuqrLxzU9+zzNvHy6e3wJbAxgAGiOiFrkF2Nd1xtAY7HBoIcJWfQsA4624vffL/+NL/+t/84t3bu8uYvbu4UEWtrwbIUYAUTnP8hC7IstV1EdvYdq2ybJcRdc8XU12MIhE1RGvpBPAggwb5dQZ0S4iJ1YyABtSIjBEVCSazAAIISIEy4aZFVBSNtw2ntmCFNYBMIYkBuvIx8a7XIhWscuLvKoT0ghrbAQFJcssQQisUBEQW2ESH5BuOZGIGmJOhEwRsVAFcR4iAJyczaYDV63qYIquaoMwWWeyEXvAGBgAiJ0gKwAEFThT5mWQGELM8iK9M21LeWbIsJL1UI+YcYncM2PEA44KoG3b3/rS7zurPkaJWuaZgJsQi9G4jciKAsBy1Vp4y4V4Wjl1k8Effeu9//Tv/fy/++/85DPP3PZHxwC4WmbjIuPQeims61pRJ/0LxhurtH+bOG7eAO2TbNI+xJnltloa56pFO9keQ3zXLq2l4LP0whhiiqTiFUxsLLNIx8wIvCl83x+bSIICysxKbFRjlF5inxixDfVZbsfqgym3B1vTonAhH0HUrrlaUaMKqQgpcmvd1BlC1zXR8tl8DsDlNgJs+jdaQJQQTFGBZpkFsL+zff9QGkZovRLKsogMUuPrZqO2DiggQNKjVyFQTLCmsDAAdqSURWNs1VnO1Hf5oLj99J0Vu+/6/Hd/8f/4LQDTq9fPjpd/92/9F9uT6SJ4dUVZjuuzs+3xjhhz9co7AGbHh6+/9mo2GAQ7WFXdZPcKgawzCknIsyMbARGyzhljkkuxCWRsQkFEKTGdRCN0zQDtC/3E/hkQEVHKJjbWgjmKFnnuBoP0dqt1g9x13oOkaWtnMx/EWltVTXLv9/b3796/P8jyw0eHeZanh3Jzf78sB4dHR4ePDgB86tlP7m5tbW1t7d+44X2bD8pXvv51A9y4th9jMMYCkBjLslzVncSYKG9E1HlPgKpuTSYAnn762eOTk9DW072rMUqe58umiRKPT08XyxmAIrOvvv7GrevXbt68/uofv/TEE4/VTVNV1Y2bNxbz+faVKwCi98VwEIKQsVkxrOpOJVrDTDSdTEPnASQqUNu1ZVkaY7/ypd/56mvfvHHjVl66hw8/ODg4ADDeGpcZV7PlHx4fRsSnHn/8+dHIOkeEBGrkWeaMM8aKRGLKi3w4mQTfsnU2y8EWQIhChrumZSJAQ/DMJnJg5qOT05StWZZl03UEtcbt7exMh4M7V6/s37y9WFV10yQG5XQ8Ojk5CxJGg3I2j9YYAd29d3e5qq9f3QVA4DzLmNF2nfddkdnD42OXuVs397e2tyfbUwA0X4bTORFZYxMrMIQgKgTEGFNSLQAR2ZqMg8h4PJYY66YW1Sxzs/kq6QCURT4o8vliwYbzPOuCt8YooCoEBmCcITIxRmaWGK9evVoWBRF5H1SlDzCKMDRKpBCsScOB0/G97xLGmqRa8qJ0xrYhNNXKWTaE7e0pMcXgU28nk/FLr3z15v7+6WxWNfVyVTnLW9Pp/YcHAL79s99+9drV5Wx28OB+vaoGw0Hm2HddDGExnx8cHQNYrqrbt24NykEXwny2PHl0CJsfPTwoRyNXDB4dHgIoijIvyrZtbz357NHDB+Px5NOf+Y6jhx/8wPf/uRD8N77xdQCP39q/+8H9G9ev79262dTN/v7+ZDwuB4PxeJSyud99/fVrN2+9//ZbAMbTLVVtV0sJATF2bZPgIWKOKm3b1nUdJaZyDGwpxESmSxOjxDTTESXEg4mv7F65srsXQ8iKEsDx4aOmbY21R8dHqrqzPWUiZK7tutnZGQDv/Wvf/GaWZZPxOH/wYLVapjJHW9s7iOHBe+8AePjB3U88fkslhuCbqrLZYPfKbgg+z/L9G/up1gokWmu6rlVAlZpqcXP/6q39q50Pg7J8cHCYzsXEbG3XrEAcoxydnI4G5b333pnPZgBcZolAqtYY59gwOWtiCBx9jJ33HYDgA5ucreu67v6DB2++9lqIsW7aKKEo8sV8BkC75qWXv1oW+WI+1xhVRUUmk/Ezn3yW11E5gUTvmZFnGTOvVsvDw8Ou8THKaplqcxXSNTwsu6Zp28ZZurF//daNm4YRgm/qCsBqtaybum7azGWicT6bH5+dBe/btvO+q+sagO88G9PUlbV2OBjWne/azhorqt77NKXHGK3LjXVKxpZ5WZQGGAyLYTlwoyEA5uZgOVtUiy7qYLC9v7c1/uBkpSbLMgXS7DoajqLGLMtTvCpK7Ct4EIUoiQNYloP5Yh5CMMaIauKI+hhYSbWvkZImgRRaIOIoXoEQYxSJaX5QeB9sltuu++DBwcnprPbxyt6V7/3sZ5iZXAHgsWu7z02Jzx6u5rNse2ptpgRBJHBL+WBUAjh+eDQe7XS+62KMSlHVx6BR2ToSNmSTscKMLjZBW8dBnQo0hpgxu8QRJG1923hb5MXQjs14OGtXXehYWaQx2RBA3QibAnCdF4IaonIwnI53gaLzXZ+bX2bGmj4BWBFVvQ9kDDEcjIMFYI1jsoBJKk+bxfi87CadQ2zpf1n/9hGXBWkrksu3wRE32F0PNKbIjaQK20SsIlEEAG/MOe2dEL3EEcRFiHTt7BBoo3y1hqEvMTgulDalTc1QBhL3Zc0K6bO+LpgkdP73hnRz0Sc8h2dV1igj9cDiGhul8z1J19gqenBSz49DlNzrDR1l063NPilogT59JHGFldelbEDyUSgSH6ORH7eP28ftT3Oz2BAA09/UA44Jajp3INcpwUQXsamUdnxpAVBaf3rezkE8XIyP9R/hIsr1/6mdazJu/rtw2EsdOJ/y16e/uOyl33tWxCb09NHTYYPJbfRC+ijhGkBcLwmk5zojRMTa30NcWAtx0Uf/UJ/7A15ECnW9gdbJE31/Npv70/UpGJpK7a3Rm3711PSLuXzSy1er/R/VC+IphB4gSPFIAkg0uWZ6fg8/dBggM+JF2FoEgTHG5HcPz6af/m4Ao/FX6XgupouFOGpq9aNbTz+897aZSjhtpY0APBgEijGzVBSFauY1enVFNExSWQDIrz72v/zPv//f/+wvPvnUlcaZiEKrspvmXVcTLABSdF3nMiMaAWQ263zHbFUCFJxySURVUtCXFNJG5LYkQgheVZJvKSI258iIQTTGqAI2zJYMgSh4BeCsI0MxhBiitSnXUyl2xmQE7sdNEvsznBVZqDtjLJhCCNb29XmbrsmQCZMaTqNFRcAUVK0atxmuREmRUFSDKIBMstYH69hmBQAFSTlwNp8tq4xLMi4QBYVjIyrJOXSZTfW+iWGZmcgRC0ElgAySJmbyMY3tfOesjUpNG5zNmcFOAOTFMIr3EA3RuiKCSIg5BqWmW6bc5zIrAtSocVbEaMAkH2d//NKrP/veO//hf/zXv+f7PgXg5MH74udFQZkRUXGGCK43DDexfNX+XTKmf43W9eL7l4ApCvKyAGQ4HFTzmOWlszlZ4XN7t7+vIAEURGwZxLRx+Pq9esEjAtJY96rKTCbZuBHWWkSvMTfaHT8MbZOTscYuYptSw5iYyBBDpYcwXEbBt45EQRYBQIwKCJgILCKkygiWrLPwIcZuBaBbmRwaGSYzAlUNhoyqlAV3XgHEEKIEVWUyBuyYYsbOalt3wyIb50nSsf7Md36ufOyJ+EbdepOxE0Y2NO1s/sv/5Ld/5yvfArC3882j1XzhadnlTVt34QhNsHl+L95vQluvAgBblOVwJGehxTxXOTx9u8iLDjQcjCJFAL7xSRATConRpykRYOIN/zRKsMZAKeG/BGJiY23w3vuQyoUTgQyz4QRQGmetsSAyxoyHE0MK4OToUdv6nb2delUp4AxHiaPhsPOeEqAWwjvvvdfUrQI3r12bLxZN2w7K0lkrIR4eHQOQ8I1PPv30tf3rq8XcOjvZ2mqaOoSwt399MZsnBTQfvMRomKJIjCIiBIQQmImorzFdV1XXNMcnxxLCalWZzBljm7YhUJ6IhMHnefbKq9/63Pd+FxH90Yt/dPvObTBPdrahKr3anW19xybPRpPV7CxEz2yaurIua9umGE/TQC+Kwrns8Oh4NLn/Z378J65/4smvvfLyF/7iX/qnX/qDNPbarhMvxpFjVonWGAUbw0WRpWRYxwYFiYiopHGfZjVRNc4NJjsAhuMJge6/9+4mnxeQ6D0RqUji7+RZISopS3p3Z/fFP37p9NGjL/zojyUwOlGoJIS79+8bpuLW9RiDs2VmuOv8oMjeu/cBgMw6Jm7aZjIsrRnOV8vlalXEfFiU5XCY8qNr16pq13bMzIazPHd91jmTtb13J0pMbeeLPAYfrlzZ69pmNp9XdTUsy7ptARyfnGaZW9UrY8xoOMizDJAQpa4q4zIAxfpSnXOrrnbOjcaTulopoVpVCRzpOm+YQwhC1LVNWjTT6miNTTM5E4mISAxdG31nOWu7ihjLqoLEVC4c0OVqdXVvx+X5ZCRJp3K5XB20jxIcOV8sPvv8s6PJJM+ymuvlYiGguqmdtaXLkxhuWeRlWU62tt6/d+/BvbtVXT/+5CeXyyWU8rwYjScAVqtVWQ6cdYcP7j94cP8PX/zK5z/3uYPDhztb0+/7/PcnNtxTTzzmzMsELbPMEWJbQwOJrxaLs5MTAFmWL+czUt3eu6IAMzvhZx5/fDAcmyzvugCg7tqqaYxh55yKKDAcjUIIacFKFogIQFCRhE7GGEOQIi+MsW3dpvimiIQQRoMC0Cjx9GwOle0ntmxCo4Cdra1nnntuNBm//drrZ4uFjzGE4EO3f/PGM8+/8KX/8zcA1HVtDFtrCdq1rYWJvqurypB+4pkne3QvehAvFqv5fGnZbk0mo2efzYtitVyK4slnPgngy3/wh1uTycl8URYZkYQgw0E5HAx826Y86HI4zMtCVdq6dq4kUo1+MBqEhMAm6re1AKxzRTmI4fD+4cFgMJju7EBlMhw8vP8BgO3JdDoajsZjSVkqInXdrFaryXQ6Pz3xXdIcjN53ho21JgYvEru2CcEbw4n2BfUSg4qA4JyZDAeT0YgYULGGu64FcP/+vXgSQ9sYY5arqigLIohIXmQikpBl37bM7LsuLwdEZIwT0UiSCOyGLYCiHI53doPEqLScnTCTihJbJUrVMqNQ0wbrnHJcNqESHg0nGQ8rIetsWnDLsmx9WxQ5EUeRGCMRRZE8LxRxvlgAyPLcWGuMqdsm2Z7oWfZEzIlQLyqiwszMpu3aGLxhjhKdcz54AOPp1nRrSsTWucfu3H768cfuH52Uef6Z7/ouFTw6PgXA2tWr2vk4KArV0FvQIYJQEazJAKCLAZ11hjRJP/sYAxlmNp3vKcAgWtSNdTmgnW+ZM2tMDCIia+8AQaKP3lmQDW+99a33jysfaJS7Zb1cNEsADAyKaRQ4l2sMZZl/+6c+PRnvvvP++5lBjALAGOI8g0SAyBKIDDtjswhNYpQAUlwQRCprmcPeIenZeqoX+H7r7ukaKPywT3XOK6TeKPuwz3QB0+uxuhTWXX9JoXSJ44DeuOv3pgtfha4BxnWd7bRxjaP+SSdOQWtOHt1aRqO3Hjf46/o7veV/7rcpnVMraOOobmg6FwzJC338CL1jnVZ+wX1e37QLznPq0kWq5kbcf7N7it4CUJyrZn24XfIoP24ft4/bx+1PTbNYx1m01zRM0/hmMu0XpY/AVT32dmlTP7Oq0jlxfs2DubDremW6uCT+S8KRF47XH3u9RHyo3o5uVDou777+QedLyGXs9E/o178geno5frVeSlUBvrw97dyjiucp5BeuQM/3PNdyOV+L9fzK1gBoutyebEQJNk07S0IkjfSr6vqiL9/LPj1gLapMgJKoP0eFQNoLmkBVmV3aaxOd5PXKrAoY7rx3MWSA6fz+YGBJAQRbOVr5rmGnTV1nE11pxjwVzIVydgaAyQC1ZBRgjVpFkw/GpqlVWrN3zQxvA/j7/9kvfOVXX3z+hb25l8H0SrMyNc1Ni3I0qOsVAOaciaJ4UYGyIVhrmTlIR0pq0t3kqJGJ1JIIGcPadtYYtiZqX4wDRMGY2AULtsawilcRkKGEmLQARNUQp9TRGAUCNtwVuTBRkCwdJwrg78USAAAgAElEQVQTRw+mrCzzpmkVqoiGOHEoCutUIkQ1qk9FpERJ1QizYc0JiQVHZJhYEb2HBGdt3VVZZpglEcecc50nETaDodHYtUpFRqRh5YUoJpJKaJwbhBgF6tNgYYJhgJPGkLMURdoQ2UIjvCDPNGcXOybDXVz144oztjlCyyZoFJFos8LmjqxCDABSLpUDuiAUa2LHQqPtK4PZ8tF//nd+8d/8qR8E8NM/86N+8SAc3DMqLZl8UEpdJzuM+twxBnOK4guU+zF7MVgCIqJIUPFdk02u/uw/+KXv/tQTP/ZX/7U4uy+FXU84YCip0DrWotYSqQhAvbWrSIoGpBqtcwC8D1HFOKNMAHwU35oy365bVUIxsNPRwDKcxmGZJznBEEU0Rh9SJlqImnfS1RWpGuOMBACdsgjyLCNlH4NjsiIZaWZsqxHVEsBsURtrdrLMG/USfYj1snHGaMaOKT1rBcUYVKKoD6E1sBFcZpZZQlsB4NHgV3/lV67e2fn8p59aPTyoO/zT33vpzTffaJbLYmCff+YmgONq0VGEj7NHjyajnWI6itlqeu1qu+xOju8XO1sAvM80ELG6rsKARuU0Vk2eG4pdekpMUgxHQaRuvEQJIVEiwMYYa61JE48q91NvlKgpkiMJvpHkPCiTWcvYM3GI0oWgiqpZPJrPc2cB1Kuly9zyXmMsf+4zn8ksxxg73xlj87wAcHR6rCJXdrazrNiaTsfDYZa5+XwO0eef+9Rnvu3bAbz3/rt379/71PPPxxhi8MJ4+qknHx0+evEP/9mzn/xU7EXHJEH/aW4MMaTp1rAx3NPifvu3f/POncfuPfjgG3/8R4PBoPFeAWtNVdfzgxkAJnWWT+dzgl6/fXs2O2NrndG2bUejcbVcARiNx+1iWYwGy/lsuVgQuByNgki1qoi5qWsASTcNwGg4FFVVPTs9OTmZHT86tTYfpazP0Vi7xuU2dp0xUmSZCpZVDehkVKaDaNTRcJAceFFdzubX92/EEInNWnuyBBC836h1qGrUyMTOuVl7BqCLp1vTqQ/hvbvvHz46KJ37yot/9L//2q8OymJ2rBkzgM4HQ+SDd4bLrNzemoQop918Oh6mSe/ugwNrzHQ08l13+8bN0SJ//a23QSQSJcRqtQLQNg2IOu8HRRm8z4qCjUnMp3X6Izrvr+5dmS/nddMQMJmMq6oKIYyHo6pevv72OwDce+9ba0Rle2t668b+dDIJ0SNJuVF/kW3bEFGR503dHjx8qMCwLIdF+fDgII2H7a0tQCVGYtM2TRoVxKyqIYYoKd04xhBCCFGiMSYvilba1Wo5KIqsHKQnyIS2afav71+/9djbr7/+4OGD0XB4ZXcvy91Tn3gCgLGmbbvFvfspQhijRNXZ2TwrMhogBaKatnvr7XeOz16Jqs5ld27d2t27Ot3eXSzmKlIWJQAmHgyHROyrxVNPPXP79u3Vcj4ZP7m9tTXe2k4VRa5dvzYaDo8PD2ZnJxpjDL7I3GQ8XiyrO088AWDv2v79u3e39/ZuPfFECHE4HKravBgCbMvRfL4A8MHdu1nZALDGeB/quh6UwxiiqsBQnyYJkiACjTEyW2tsKjPXtA0Zk7LI67pqmmZ7Mvrc933+0cMHBwcHXdetqoqge3t7AG7dun14/969d7vTs5mArLXMNBmPD+9/cHjv3luvvwHAjccxhJQ+ECWwxKZaaYyDMiuLLAFzIDRNY21urGvqmiFd23ZNE6Oo6tVr1wG89vrrTz/5ibPlqijccrUwhqEIW/6Fz7zw9Zf/CMB0Z9swEzER5UVujalXlTPcxdC1dRrkZEvftZGQleVwNHp0epqvVneKosjzt995OyV0z87cC08/9Wc///mqWnW+jVVgQ75tYwgxhLZtABR54Zxjy1VdLefzrZ2d23fuxBA2kgW+6yQ0IQSoOuuMtXVTt03NhMl4lAZe13mJEkR9UzPRsqoBLJcLqFqXZZlLi2nbdaPJVAUhxq5tiUhEU+JLaqJaV1VVVbOzkyxzZA0FyXLnrE3gU5nl0+GQrT1dzIhyZwdR5uzcZFCsuqZtWwDee0min9YyE7MZlIO2jUxclpkxDsCqrl2WuSyPrQgiAGbDqfWB2zU9DQRCWQyIUNcr1TAaja0RAEVZTkajrm2dc9dv3hwXpYdp60Z98EGsywA40cJ6QwhRDYl0jahaIhqOi2xalkMAubHWUV01jkyQUHetdUzGNk1rsxjJABgVg+UqgEFsrBpVFJkjNjFq7C1wBdT7brVaBYuzRf3E7U9837XvPDtZvPjq149mJwCc1WXbGGSFlUFpxsVgd3uLuNjbHS9msyRR7UOUruu8d5kzNmPm0WiXTUbdIoqlkNDGKOpFfBJrvuDIbOgRibV30e3pRZ1S6hB6V+BCFtrmlq83pDUi/YgSmA0bA0Uf9CLiVPJx43ukL6RJd835ANbg4Fpw6jzrua8z0/tBpCkuvzkYpbOfg5p9stj64zXIKBdy1XtPZS2/TWtt680VbnaT/mAbrJDOjc9zYk8vAbV2ldL+tOnAZcAS54TP8+R3EolpJmEmVV5f/GaHy/e7/5UUeunTj9vH7eP2cftT0iwlj54AEK9Bw35ZUBJeVzjd/KUbcHEt6nE+z68xtQ3Gtv6hFxYYrIHLtMr16whvJvLz06BfJ+QSsLfpCcC6hok2tXDXy1pK+uuJhOjlQkBQlsvTudJazEyhKXPoPHmhv7B+ndhgHRf7QYZUoFgLuPUCimvm4uY0CoKCUp6XOb9J63MkWkyUwCbRsIhYVUmkl4Dki31Kcb90Gj4PS0rfjSRFQj27X5UhBgKAIYbEkAbERCpTEDEzcYzSdR0AAwIJSAggYu4pkmpIo2rKrDEwXetV1VrDoJRZI6oESowkWGuYjKEmdMO8tDavV2fEavNRV68W9QmAbYrsIjtkmaN81BqKIQvORUf17NHozicB2Fnd2JgVSmxCV5aZo66O2tgbN45PzH/9t/9bAC9/+VtXn93xxlqvsQ1niyU7iKJaNZYtAIWoEsTxOVaqIoEjiKAphYfEJLaGVwbAoMwGBYSgHDZ04VatM+gJjmyUIZqKBnOC9whCksqGWmvTcJLQGMMC9YkiYUyC/lW1bqKxNnEdVdDXoEgAjOE1nkwJECWLiChe+jHNMQoYROxsZpXiIC99KwLiLPF3ViWpdYPQ1QJjDGnnBQTDBLVgALB5iCExao0Ss1FAgkSRVJa8B5KIJIhhBrTrOsOWbaogkacbo1E01mC0DSSh7t5HUWsdmX6sSxSCZShlSgjM3IFpcg31yc//4m8AeO+Do3/rb/6F29efxqO7NnbkzK/8+r2RiX/+R56xfgEg0oihXdAyy1jXtN/0evNm7hClNkguxTCwXd47Obn+gpZKbXQ228wR0ldZX08PxFjn3mwsxzRZeHXdGQa7GdqmcJlAuIsAlKIlUaVxFmOQqLJYdkISJHRNC7UAUiq9+EgSmbumqhRtCX/1xs3jR8cSPYBBlNLYrtW6ax1IgriMNZgqCgMsCqAK1PnorGm6OHBmFdWAKqblqh5kFkDdhqJwEkWDWNDwqpM5GUvGynweWgCAc5p5/Yf/1c/9sxe+jerjV19+f/vK8Pr18uZjVx48OklFXcyoyBq58qlnd64/dfeNNx6+++aVvUnRLdTxYGf7+L37ALamuRtv+9VpdBq8RT5bSj1akVhjqwhgsHunJj/v6oxHhsl7rxqIKHgfgjdlCYCtjRJ8CNZaEIcYiDnEaIxxmVMPADFG72MUNc4WRblaLRfzRQe1xvqmLna2AUSinI0E33khk7Fzp8uTPM8EkpQNo4LZ7F256tju37x1eHT09htvlsNhEKnbNqgCePwTT52cnR4dHg5GY4kBEq7s7L755pvv3nv/yWefDiFNjKprpcsYxRDXdZPWNh9j8mO//bPftZydPfvk0zefevbsN3+zLHIQ5YPRtz37XDEZASgknlSLZ09O6k4ODo6u7O6ezeac5a9963VLdPvO4wDy0fTMy2i1MMYYZ11WuCwbYmjZ5HmpDAC5NUFZIqLIdDR89Rtfe/ONNwH6x7/8TyLzYr4EECOcodlyEX1LjNl88cRtRWzqVTNzyYlVY3M9VVa6Nhm/+P77Z48O9dnnjHHOAIYAhHZlXZ5o2NZa33moCkXDJD4k35KMa3xM74+07Z//wR8YluWXvvyVm9evgXu7ogtdZtQyn80X29Otk1nFhmDYI05HIwC/8+7vPfX4Y9PJjXsPDxaLGsief+5Tq3p1dHK2Wq1OywzA4dERISB2bEtVMzs7NQQyhoA2+BGnPGvru0Zj8L49qpe7u1vEiJAIcZmVHohpfGARadv6zvVrUG0aPxoOb+zvnp2eARAfDQDVrvPGOJdlW1tbbdNY66Zb2w8PDgBcu7o/LGzubNQe7KrqGtAQI0DJf48K4/LD4+PO+67rxqtqNjt6994DlVDk2Sef/gSAPM8JagjL0yNIhGKxqoxxi8XyyaeeAnDlytV33n6jWdYhhLpttyYTCX5ruiUiLsum0ymAzGXFYHj79p3hZKtr28FoPBhYUFE3ddv5um4BTMZb1/dvzk6OKS9DCEzGWFfkWVD52ksv+s4DaBZPl2WZj7baw6P5YrVlrMuyuqrb1Wq2qgC0yvPZwrE1vp2MyrqZQ5wl76PmmpnYAID4YTmgKBJ87NpsOGqbWhQiAlDijRIhinofnMuM4RT8XC7njcQAXiwrAFnuMqelxZXpdG86HZWD05Nj730XvXMOAIw5OZ2dnp2F4Efj7cw4Zy0Bp7NlV9fjK3sAytw4l1d1LRrBDAgb7O5c8b4VKKdIAxChuYGRrou+tDYvi67zzbKyeX5weADgyrUrQti/drXIs/uHurszlehVou+aNM8Mi2J+fBx8S8YqZT5w9EIaDRVFMQ5YAIgajc3bEGPbbE2mj1/ff/X11wfOdr5tmubOrVsAKh/G29s71/bx6ODWzZtNXe1UW+Pp9HS+iEr71/YB+BBX4i0Ms/XstEk1YTKK2q3mAIbFQG2R9UX2Qog+zwxTwaTOmaaOABaL2WK5eHT8aDQotifbw4Jj1+bFUCxbksJaAJ0SkVHrYlutuqZuGlYO0NAFxyZpUK7q1dnxka+am1euF4X41letihjiLCnzHh2/9/Dk7s29pzXQaGCCMzaElakm023jbHpTVqvVcDCMMdR1LTEWeT47m6sIW+tj9KoATOZU1Htv2EYEFY0SCUYgTM6aJFushhgaRbCsa0OAagwBUXd2pgDGkwmIQvAqBFDTttPR0GSuFSrLAWMJwAatfDemYLjxdTAiblzOTubBt3bQdl0FIIJ8I6UrYpQYYm7syouTNjMUA0mqbZY7yzYqxcY7Y0Noj0/rqztXiqw4bVoAmZBzTkxsg5kMxwM7u7pd5sXeld3B9uB1wQDAcDDdn0xCEMD4TsaDoZIhahCVbKndCoD3opSY0kKZBZmMfd35gk1LeRcCADIkQmRsDBHQZDCljOGe86diyOCi+9PbRxSV13yIlJ2tfZ5YzyY89+h6Sy95KMYBGmNfiSB5MrHPe7hID1kzNQCzUSzt8+jWaXgAgZWUlc49zLVfpvFCNPpDNJJ1mep1VnmyrdHX8el721Mv+sSzc6R4A0aadR6YIfR6PqqSbM+UX5UyDFShiJsUeKIE5F70BNNlnWOWF5Pl11cusNSjt8QEkt5aVQCisXcyaX3TdX2HP6ZHftw+bh+3P50tSc2dS7PjArp4Md50vm29y4cwu37rRjDjEjlxg0v+CROlfnj3TWxuEz47j3le6MYFJPRiQvF6B8XFRYmSIPAmSxqXv6Af3XDhKs4Ji39iW7M7ey2Rj1x1j9D2MO06u5vW6djnRyGcx+v6lZ8S5viRXl6CbuWC8XDhFideV5RgWIlE1aeNhklVnDFBIhTWMBKeKJJMAe5hRYIqpTU6ZcOQVeqtEoIQqwGYiBRROvSWimFO9yEqKIomAS1INMzWmgT5Je4eGcPWAgTOiDM2bDRaG814QAQMAaDJV7kU5IeaZ5oBHDuV7MbjD+6Hf/C3fu7BWw8B7Dy+p9aGEMm4qg2rZeVM3goZ7Ytj8Icenm5W7WTZ0PrmYSOkc+k5bsyszaPcBHPXWy+h7NTbG+vdCcrQPl0rfTHBjglNuxDy3MQDFErG8uYoPWYuCoDtmhQIkWTlUQ+DB5/K6diU0mWYRSNFX2R59E1/5ZLMLFoP7AR8JwqmUo/xJXdxXYZe1zAd4U+4n+d3SvuMTuU11Vplrc6p5yZY+iNQJNYnlZMyKwD87u+8/OWvvPI3/72f+LEf/t6sPatte1DhwOIL+cT06XvWxJibTI1h6dCDiQqktLD+iZi8iGKZWNmAeXt7TMYRG4hsHm2KPK9Zx+uYxsUDrss9SSi++MUXf/jHv4fIdZ2Q6ePn5xNRmnN6QxdMShDRAIDIgmANhDDI3WR37/rW4MaW+cKP/NAg50QCsk4no8GDeYUYrGEBFZOxHY+9QEJQHwAsNSXVqiczyAaHD86+9ltffvD+4fS5p7/3s08BoPG19vDhw9e+KftXq0dnr/z2bzzxo3/m8IODrauTre09xzmA1Wp587H9177421c//fzX/+8v/bW/+x995vm9YqRvfvNg3Ia7RwIg2PxHfqD5nS995Qf/2l/69Hd//mf+7b9xezqlnP7KT//09hXzxd/+fQA7wW/v3vj1X/2lt956689+/7+Cwe5Xv/bHAxN+9C/8q199+RsAvn734dsPjnXum8IwcwhRRa1JUsXUZ03FqEAMKSlPU+xFFV3oYox+LatHzIashhBj9D74EGyeD4fDcns7Jn0NRYwxc7ara2e57doscyISQnd4fARgd3snt67z/uDk0atvv1Utl6ratJ33PsYQKg/AMBljHh4e7gZ/djbLC9d27aAsmamp6qbtAORZEWL0PqyXS3LOKdR7L6LOZQCu37rzlfffg+ozIofHx9evXJktF3nmnv/0Cw8OHgC4c+3Kbtdc3dl+/e23dnZ33n797WvXr7dt9xd/4sfff+ONO08/BcBkxelsbp27euPW0YMHUM2y3BpjrYtBRpMRgGq1ODs7MdZMx+M8z0bD0c0bNyYZXbt9o1qelWUBwIdoLN+4cfPG3l7X1dPR2Jhya2qW1ZIoB+AsiIwtzGK2XDXtaJATKRG6tmuarrANgLZuOtQxRCVqmoaJk1RcUrNKU0Ro2zzPmXi5WoIwGgz/6k/+5H/3Cz8fQyCilHfP0EhsDI1Ho8loGES64J0xSGEs4Ma1q1vjsTU8LPP7jx4RMVsu8jwEv1qtkl5bkedVC2JeLVciyAuJIdW60j4VESAgqii08yntP6Tf8yyDIvRxJMuE1P9FVU+mWyoaojRNkx5tDEFVrbHWWW19Ikdb56DY29k9ODwEkJdFOSxSQQ9iNsbmWZZlWZ7lfVYhMChyw/Tw4cP57Cx0TZZlh48erqpVWRaZtWkcDcfj8WQcgo9dd/3mdWPNm++8V7f14cHR3XsfAPjst71wNp9LlKIoBoOBMaZpGyYejYZNXScK8L0HD0chTibTUnU+n2dZVuRFEAwGQ0FDTQsgHwybts0GpSvz1Xy2vbtz/9133nrrrYODhy8899wXv/QlAG+89q3HH3uModZyU1dbW9MsL5iws7OTqhuzymg4Gg0GReZC05ZZVrdKxMasHf7k2ouQapZlxCwqqkhknyi9LGxa4xLdWEMMwYtEZpNZI96nyOhoPL6O69Vy+crLL5VFGUVOzk6ttWVRPHjwAMD9+/cz5/b29pw1s0W9WlVd1zVNc3VvR0WWyzmAIh8tZ2ci6rLMGOPybDgaheCreuW7TtPgNJzyZwmASgg+hGhd5vJcQIcf3APQdd2j4+NysRqOyvls9uDB/cl4dOfm1fFk8mM/9ZcBzE5OJtujLM8VcFnOzNa54H3ll6Gp0oocDYDYRpOEM7/t05/55LOfLPKirlfvvvduAu/qulLVV7/2ynK52N3dHY1He3u7s/lZ8F6kpwkr0LZt4i/HEJ1xzAxVYs7yLC39XYgxSAyhbdtUmdBZU9WNBL9cLgAsV6uyyIdlORkPiyLPCofgi8FQJMYY06Ps2hrpnvjOe0/ELsvaukmfJqHGdKXR2cGgFAlZljexTWKdsWsBnC2rD44W061Ttb5VWjWm6vzM02jPlKM8VWNvmnpZr+q66XyX7EgAUcR3XYRqXKvlWGeskSjee2OtNQbMILDhflwFJeYsy0UjMxERJFrnqro2vAUgxshEzjkitF1HzCr61Keev/vuu129unPnNoARi4QQQxTvLQFMvm1Ojk6PF02wRb2qAVjDEhE1RNEueAmiIkKkxGBa+0OyTjHqiYcxxlVVr6pmfevIMVvGcFCKqfOBeevdt7fG1e5k5/knXsiLAkCMFKgdjQbMNvhorYHEtm6393aHbX16qgAkCkQlRhaNrQ9d+KBdGpsB8CF0bQYAAgOmoAGJhnHB6tk4SmvPYk080c1Lfd70n/uPD7lXa1fx8m4bd+Y8iQ5r23LDwtz4e2sSSALdzpUjL523p3DS+oMNx/JDndsYrpujnDt6dP5jvc9lZTHVjXt74YoSXHr5cLRBV6F6Qdoba79aLxxD13jpRnSI+u9tPN5zP3d9M9YZ4BsvZOND/MslGn7cPm4ft4/b/7/NJoI5XcTGzlGKS4KA64n2nz/ZrefFHg2kzYT+0d0uo4XnX770j/WytQl8Xdiclrh+It+kF1w8JF38YLPlIwjg+fpDtLnmNX1+89F54ZcPX/5GzeRygKsPe13YlA7MtEYtlS4smkTQjT5xz/baXOdHTqoXVswP3bkL6iMgJhWNhmBNr5xiDWeGQgAxTK8ymVBMoXWHRQOpak864+S2EaX04D5Rg1QMJy6kkACq1GeAnNthGlRJM2shIUJYxRCxqogX7wGws5RlBILNyDhj2cBnWdM4YDJYrpYADAnZDHGoxCJL4dxev/3oncXf+09+4eRkMfpEX/mhipKzC2RXdVhWTT4omdiKtogXn8GlZ09YR0o/OqR7s+fymAPSU7uA2OtFS+0CKTjZWuvfEnrFGztElVTWhh8pwCqJ/qprUJSgkAv1j9ZnIKhy4p/2pXdEkl1iQMysEHimPoHXkGvqejLJ61Vtss27QBe7CyVjWFRENFURSaUFiPs4NkApkE4Epp5onASbLo7Izdt6Pm0QqUKlV/0GKGlyJ1SVAEWq0iNE7NkBsLtXuK7/y7//S2dnyx/6K1/YZW2aoNJkLpNFB6AN0ZVZlOiaDmWvBt938OJzioHZrOpuOsAidPPVzPvgbK4hbKxc4nWEuS+q3uPIBNK1mmE6rNfBg5MGbiihVsekbaryIb5lY9JEQkxK9P+w96bBlmXZWdi31t77DHd6Yw4vp8qs6q6ssau7utXd6i71pJYaTAshOSCMhwgb4ylsTGBsZIcNChsCOxyWcTiMbSRLgAEhgQzCqDEGWkPTmqqHqu4aVXNWZuX05jucae+9ln/se+97r7pa/PEPiMiVEfnuu+/cc/c5Zw9rf2ut7zNMgDBpbqnx8123CojEkPZye/X85sC1k93tt1751lMfvSq5R0pixfTSui2MAaKI8qDllZKyHJQlH7eKvl9m9XSWGcde4/0PHLz00hc/9sj9T31g9/YbADY/8ODf+clvPPbQ2XMfvmqoV9595ZM//Cmt/VlnfvXu5FOPXQGwVla4cM780EM6fN/PVvuXezoaSVZrNbVvvX3zpVevAVhpms/8O1/cv3Pmz/9n/8X3fe6za2yvPvzEr774/KnNlS//4t968PIFAO1b13pNV1h89OOfeeqhB59/+/paETKy68Z8+LEnANzc2X1lPGEeeKiqCJSYUxabLgqKYoiqGqOwSTN8Kk3iLkZd4uBExposc0qIMTprVRFinE6nPBj0+j0AZVEohJl9CJl1g8EgSqyqqm0oFVBfvu/yzp07z7/8O2/ffKfp/MpgQMSDwfBwPK7qOs9zAK++8dpkMsnzfP/g0BgTQ9zd29vbPzDW5mWZFz0AEmQynVZ1470HKKhYawFKcFoqJ7xz8yYxT6eTyf7eaDjovBeRWVXZzCX+2f7KaHynCj7cuXP3wrkzRb9flsWN669K97DJ3KypAaz3B6NeIVFgXIghy0tb5KGWGAIzBd8AcFlWlj0Srer6zp07Dz30yJUr93/5y1/60Sc/5vJ+WhgHvRKheujy5YcuP6AZxuNp5hx1RudQMIo8h2J/PA3eT5tmWte3bt7ouvaN11579AOPpcrx4L11tj/oQcmwsc7FukmzV1g8qaLsOedI4vra6uHu9uvXrq2srX/8ySev3Xh7Z2837xcA1lZWgqgmYROI902MEkKoa1W1AK5euRJFm7ZTVZWQ5YXLbBQJTLpgsiNAJTpng8AZI6pt1xEQQiRKaYkwxpRlKfP1zhhjSlOk5TDP87WVVQDr62td13VdF0Ic9obO5oMByqIgNr4LAIxBCCHLXJ7nSbyobRrnnBJs5pJaqzGWiDWlrnjfdZ2qahRVGQ56qQGd70Ri21WnN1a9I1W979KFnd09Z7lfFiEEAHs7u3mexeCH/Z4xHGJIrL6rq4Nhrw+gqau2bcui9N4zc9d1IUjmmImJOXMWQK8s1lZWhqtr0fv+YNAfrjCxNbS/s113gV0OIITQ+RaqIqFrW+99Vc/qqspdlhn+xIefBFBV1ZmzW75t2qa6cPFif9Bvm4aI8o3BpasPAfjm17/RK3t5UXgySmg6T+p8FLbOWDsHc2NgY8tej4mY2bCZC8EnEhkiAMEHa611zneeDQNkjBkOhr08JxFnDICubSfjsWUOUfYPx51vncusNW3bpYWy6zpjTIixqusPf/yp9p9+pVeWVVXHrp2MD+eSOSGICKCpsrWtqtlkXBT5zbu32razOk/+R4LVrDWciLWlrqq7d+9mWba7v5868Nmts5ODw+3dnYff/z4fQ11NDc1+1AQAACAASURBVJEhdAv+BGLOizyxYbZtS0zGWumEmG1CCU0WhR1TlmfWYTAcxhiYaZ02Cfrm22+lTt513Ts3b05n09OnNoej0WA0spl1mZtO2hRxZDZ1W0eRhBYxEzPNZlPp6jRSIiXR+UhEEJ1MJq+8+orE2HYNARurKwCYKc+zleEAUGuNc9Z7bwxPDg66tq2bGkD0IYQwOTycTsYiEiWqCJKeW/L4gBAiM1SDsdDOMMEa43KbZ1ZdBmDYW9kYrd+6Oz271ru9dxs2zKqDS+/74KMPP/729TeRwquCtulENZF1hBiY2VkrKm3n19Y3AXjvx5PxoDdIlINE3LRN2esRUV1VKSBkjDHGElEMMS9KUul8ZJK2a5cAUoJum7YNQVofd/f2Tl+678UXX2yr6uqjjwIYWk/be+K9USU2QTUEb5kcoe186h/OUDSOIMaoalRSlzkGg62fZ3NCNFWKpEpglaRrp+JDoliAtXbY6xVWVlZGwjA0fevG9Q9c7WX5qAsmLRl5xpXPulaYQ1ZkkBglZLkR9dev3xwNSwC9XhaDR4iwDAGEpuPD/qD0KrW3TegD8NIpZ0GX8fVlqfZxR25Ryabz/77TvovyJ73HL8fC60f7NlpszRbw2tFHaL6YYhHhT/kbunxz6Qkvf6TjmHXptR/55POdo77nZZzYLh45/Mv2Lk5wDHYELVm/6Dh8udgyKxb0lji2v/hu33nMtV8kOi6bQ9C5+4+lM0QL3JL5ON66wDe/a8LMPbtn9+ye/fNvFouVkhaxo3fhLzj26zI7/L2m96PPvUeAavHLu2fMRazoZLzoXefVeTnBPPfhaBFbECEfxyL12AePXi4h1mUAazF9L3Qq0n4YC/bGE59VOpZ59R6WIIyjJE1VPVFDvahqn+fvnzj58ud8WU2L2OIfFmLBx0NqoJONPLEOLZM0CcqaxEnE8LwIwjEbIjYmqDjDaW+vCkMgRtodOZuas3CohBYtE8K8oF8ioEys0Kgakqqo4cTkY4AFgklKqhpDCELibTRKquIleABwGWwOMmKcGkOWTGxzVwfkcX11pewDgEWnPSIgeukVceXSzd965Sf+m795KF3v4kbTJeFsWwhgs6C27RoR8qLExkCOHtx39qulLuC7faJ3PZ2jQbG490d9VY/eoSWT4Ym8SKSsXD0eFaVlh9cEJS6HBy0ElaBKSSvjKHuTlv3sGMA9bx5HTbRRag2JxJSBdli3RJEhg16v8jUU8xS++fem8wiRhSScSEWFdZ4nuBhxC9gaiXFHF5XrSsckGef1KQt3bCHFdFRZQ0RzSCENtOUVEarQDbIegDZo6A9Hln72r/6/3/zaiz/+5//jz/3Ap24/84yMm44NAJupxppdQUU214ekxRRwHFi2eWhDv1f4pr10bvPl69eorTqvbpkOyUy8VOtOA2dxr5fdYd5BYIusDaH1ba+wXdsRc3pKxjowExYST8TMzAQmdcTRzC87iDCoDfFwMtmbFLGH9Ayd0VBNAPS0oRrO5RI6YkvMPrToWrG5tdYlNfY2thQps2TM5M6eHU57dnbqnGtufv2NF94EsPvOmy997be/8OP/CdXvtJN3vv8PfOatm69+bG3gzp6zv3P7xsstAHeu9zf+9//riYevjsyrz/yTr+6/89rvk9/35affurC58qW//kt/7k//cQAvv/jtV5957qOfemTrwYf//j9+7gf+8L+Sx1Z+5Rd++2/85adfePOhcxcBbM+a33zx+Wd+4/mPfmrtJ//aX+v1uPVx4/4H/uGvfemNa9sA3ri1mw22GhnxvIxK09wrohJjSmNxzkqUKMJJYXdewESqAp4jwaxqrLHOJlELUS3yXI1V6HgyTYOIDYcgIHbOvfX2tbIsIrTf69VN0ytLAM+98BwpnTt//kOPf/Dm3btff/abddOURXF68wwTdb4DsDpa2VzfvHThQts1RVnWs0l/0G+77vBwDEXSFFYydd0kzJTZqmgUAYiZg8jOzjaAZ7797Pmtc13bvPnaa1XT5i4CtNLvudw99sHHAdx55/psOhvm2d7Bgffd6bOnnXWPP/rw7Wtv7U5muzs7ANY3Nh0p9wcAhqOV1DFCjHU1zYsyhDZ10Bi8b71hZqKvPf1b4v3P/81fGA1OrZ06mxRmnHP707rTeNjU0oaqaXsQ50xJOTQCKEt3eDge9nve+bqZFWUGlaLID3Z3Dw6mF85tAjjc3yclNib41M/ZuExEfAje+7TZzpxjZmNo68yZr/32b9+4dasoigfuv/+t69fyLEuj6aH7L7u8t394EGM3q2fe+35ZxhBy68r+CIB0deO7uosisntwcO70KcCwtaNe2TRNQgCjxDzPrTFRIqAxRKj2+j1n8xDnoii9omA2IYo1nGWZ995a03pvZlUMsXe2B+DyxfvKwWAyPow+bJ45XZa9uqlclqvqdHoNADHFIB2Cc3lZFF0IPgRjbZZlTd0kfXZjTPAxeO99jCJN03RdVzeNAm3b9MpeOg8rQXRjfdVYU5a9tqk776PvXJY3TQtgd3evLHNV+Se/8mvnz55dW18dDftvvf32sNdr6gZAL8+3zp2/fefOxsZG1/kY48bGevCemddWV6fTGYDtnb22i8O6scaJaujC5Sv3u7zXNk1dt4P1AsBsNmm7WlRDWx8e7Kv4CxcvXbx4MdQVkW6dOw+g6zpmKlfXXn9te/fFl05tbpzd2ppNxw9evZoNVwB8+9mf+Z4nnyycIxHfNrbITdZr2pa74Eye1gNmVlWbggfeN22rSsZaJhLV9CgTlQyDYoxs5joNRVFYMqFtaM7WGsbTKYEGZVmWvaptiozbrtvd3V1ZWQUwGg6dtV3nB8Nhf2UNRNNZxUzbd+48/9LLn//B7wdw6cq5O29e810bfBe8t6Ujw0TUtu0RXqBIc3de5LZuDdudu9ef/uYzRJRn2VvX3wFw5dKFg/39nbvbUeIz3/r2++6/L4awemq9ms6SR1BVVVNa70PCYIk5+Oi7IMq6WPoXzLNzsKKazqw1ajjv90AI3gOo6qpXlrv7u87aum1i8NVsCuDOO+8MV9dslgFga4PENPm4PE/0PjF4Uc2KAoBTEiECJXadPMvyzImYfq+ASOKFVI1t3RR5Lhq994eT8VvX3/Gd75paYrDWAiiH/V6vDBKNNf1+P3q084CBEijVR4eujSFYy4CsbaxojDaWW5ubd85viTYAtlb75zZOQcz17Xfq9vZaz+0d3DmveuXKfdPx7p39PQC5zZyzhrksiiiSZLtDDGysYa6qGYAQojUWqqJinTXG5HmuKgBZ55I/lLmspooNZ5QTSFSMSamWSMNNFZz0tUTnymhEzz/7zN293c3RSnII6qouiIg5BFgJKkqQtWHfZUXWaBIlk6D5oCAEQI01qkplCTDBkqd096wx1lqBMUSGxNmgQObYOjOPl1gz6JWOghpHHfpF//LW+mi4Qll/OBhI6ACEGPq9QWg7gFSN74I1GVm+e+PWteu3rty3BcBZhkgi1SbQSr+8cTO6XDrxUShROCVlJGEl5Xc5xMtysGMA3dwFmruKmH8i1cikNX0Og30nMnn8xAt7Lwd7ecx8r7OsS1keqMf2hrr87+RHF6+PWrnw4HhxqhPbzxMpMrp85+iYhTooYVF7dqSi/a6rSNVJywh3OooIutgE/K5GeBdp11GrdLEnWJzm6FQppUUXJVrLtJfvhrres3t2z+7ZP/9mj2CR95wZ5ygejlYnAO+FSC5gviUQeeyQZWzr2FmPvUEnAB869vOoacdWNTqOQyZGxqPTH0Pslg04BlCqvns5PHYZdOzKvvOEy4aebOYxJOndd2+5xB/HcnE84nh0aUf5ZEfFsirH8+1ONuXYa15W7p64zvm5U2KCiCTZB0MqUQylTDcolEljgr2ghgWAiBylhzIbBhtm4hADY17XLzHMF0oViYGZkdKgJC6cAOIkTqpIFPIGUQRKEmKXCpfUWHFW2QpYjSWjoMbZOqMVnN5kbwCIVbSd4Q7lmh+e/saXnv3pn/j5wag3WlmbNB2bHoCmqjTjnIyxed3OMld2qqzCRHaumn2CMFT1eJ87zoB65IUc7yrH0Mpjz+34G8dGSPp1znetCx+GFKwLBBQKSYTcywBAIrVZeEJIJKPL9FlN+oBQ0LwMeNFZOFV4qEJknpFqcxO6zkQP4FPf86SQvvjS81GE2M77xZHfJouLnhdr65yydAFEkp0fmAr3JfXeBSUrHSVVJywy9bqY5B1x7A+YVwRZ63RussC8RRWs3GgEIOoLU6I36qt58fm3/+i/+6f/vT/yb33/H/r9CG9jMgNgS0cc0YaOTbaAOeeD69jQ0hhENLccYvyxP/EHxy2Lb53hBeFOSvVcoK6qc7dv/vSPhnKiTmJTPfbwlu8mcGBSMCWRebZWAaVIpKSUePWJYEkDyC77FZG1NgOH2FTTarO/yuRUWNhBGADZDKq+VcuW1UKYPKSObDqiKBwAcNtq5jTy+GDcNN1aVn/h04+q7wL40tZZANeff/Ujl8+/9eqtR7/3gtnZ658b3J9n209/vb3+wh/44z/8Wz/7VQCT2ebVx576K//T/3Z5K3/fA6d/+Id+6PX99s23r1995OraqdNNNQPwzd957Uc++air4+X7Lz/2mcEr169/6+/94sr0znblP/zo+//BL/8mgCfff+Fz//p/9Fv/9D986eXnPvzRpzYG8vbN22cGgy9//bmPfN8nALz/oyt/7//5qil8XS9mVCY2zAZqiOdzFkWoQqPEVNofRcgwMYg4qWYLNB28GJWw1sHa0XBY11WQCMBaq6qi6Pf6r7z5hg/dxtrao1cfCiHMqhpAXhT1rJrVB/WsBpsYY9O2h+PJ/ZffN6umaTRduXz/3e27165f2zvYz7OMCGdPnXLWhRhijCk9PqVpKDEAZu68n80qUXHWxihp2/+Fz/+el15+4ezpM09+7yf/7j/4Je73h/1e9M1P/czPxLYG8PDDVx958OoLL77UK4vTm2tf++azTzz+aKibuqoMYTBIaZjeZfbg8DApbtXVtJpMmEzXdtVs1l8cY9gEdCKysb5x+87tL3zu85/9ng/+2H/7Fz732e9PEpwi4jtft13TNkWZlbkTiSHEnd39lWEPQA+YzmYukzmfF+jS/fc/89WvfuObzz74yCNZyjYiNsaIaPBBQSFEIlIV33ljTKqSq+raOpcXGYHyophWVed9CBGKEEKn6eSY1VWvzDuPrqm74JlMiHFjdZWzAsDedFIWhcATm7qpD6eHa26j68LG6khkfnururZZHnwggImVSQEzr8lcjLaFrFDKyQ3BO2tUxAdPi/Urqo5WVg2byeEBm0wSQuF9XpYpJsfGGWuLsiQ2VV0Tcar77vd6PoT1tXUAvuvY2QSFSOdTAmfCLdu2S1GE4AMgZZ6trK1V1ayuZ3XdhBAIZJlT3y6KIsuyrmlWR6P7r9xXlOWdu9trKyun1tfSlHb7zvZq0/R6pTGGKZT9ftu2hrlp26ZpknD56urK449/4OoHP2oMDwajr/3GV0MQNWHr0uW9g4PENjAYjqpqurK2ZnQ47JVFWc4OD1bX1uBcU1fT8QRA03XD0SjGWZEXqpoXZSIoaGaVyUsAT33ie1fXN411nQ/7e/ucuarevv7mm0JmZThKzNHWWmIjMVpriA0TKVGSwyad6+IatqpKHKKIA6VE6Vk163zLZZ5kvp3L1tbWdnd27t7dPnN2azQczGYzY+1wtJJ6eN12bGzV+TtvvRlh9vYPiOjCpUsXz24++sQHTl84ByDP6eDm7bzIiZJABLzvVGPnfUKm0uxCzCpijBWR4LvZrHryiSeIaGdvd2NjA0Dm7KyqLl88f3br7Ns3ru/s76+OBqPV1RBlOh4D6NrWGKtELnMxRhVtm5aYXV7YzKW4MjGB2LBly4iIIfouGmtmk3Fd19YYAL2yJKbNjXURyayZTiZ9QGJ49bXXPvyRjyScS2L0IcQYiTgvim7mo4hxjgzZzAFAF0SkbZrQdV3b1fXUd54IAqiEpklM4jDGiIpztmk6w+RckpHhsigShsVMbFi6MFxdJWYfAgjG2sTdycSpk4cQmOAyqyISA4OZYQz7lLBMJnPlYe23zmxd37+t3Du/dblVTOup2DjX1ZGubpsQQr/Xt8wgCsHHGJxzCpN4jQFKs/0iDU6staJCzEyUzmOsjTF2nXeZixKCD9YgxuicS2M/AZ1go0AIwbIdlOVLL71wMJ48cOm+VB8t3aGCWJWBGINhJpDNbQR3bJw1AEKMHKJqADQdECUilawc5RAsvAjilKWXHB9nTNI/JyIR6TRE37HL6jD2sdo+mN7ae2fcjIscAHp5z4EI7ENksjFEJuS56Tp/4fypzDGAGIJzFlE0BhWrovedP++yvGrrNtimzgA42nHKlmyHtJYeh+fS9Jk2V/MBgaXREYZHc/iNANI5FT6fOPK46WKL8x77PZnDdsug+sJhXB52rIXvPsmxMLgqISVfY7ETO74dW75/rH0ni7DnX7B4b05UhVS5vhQ5nfuF75E3Of+KBbnmArwElrkOOLqrxxxlgE4klhzLAV3+slBATS734nPm5D3WxTbinw2A3rN7ds/u2T+f9rsm/d2ze3bP7tk9u2f37J7ds3t2z+7ZPbtn9+ye3bN7ds/u2f9/lsjXUj7eScbiY2ySR+ngwHdJHDyRLUjzn7rMGVtwx73765eBsO9MW8Qi+w5HkTBdtmuRTr/IaVuebxleIj3Wwu/IbVu+lVItcaJgnOgoSrUM3b1H64/Ot2DUO1bJuwiGHb14d1bdybzIZZoZljSCSgrwEXHKd/n6o2pYBRIJyjzfLqoys4hEVTgGoBJjjGTYcCILVCZV0nmZtjEAlEiSqoQqIKJxXp4rRIZSlZ+qZwaxjUEhmpUWKe9BNR0gMQm3UKKVBAnP461RNPgQACg7ZRstRBQMaxQI1gRnR5N6t5nVAFYoODYz40Zbl3/zp3/lp/6Xv5udH0ZrVTPmYvfuDoBWw6nBGQ9msrPWG1dY5i4EZ50hAhaCF+kmz28SAE06QSd7ySIqvKiZxrvSZRfx26PQ5eLHEbZPKQp+ohsodFlmTXQsqTf18pOtSH035Zymg5J4ezprkpNPinuatPwUIhBSgKKwCIXgAbgi7/V7bVMZ64jdIjqbeteCY1tVJOmiz+9M+h5RBS3CsDrPi1zmjs4DxQvygRPhc118QAikIpKEtUlNnrs5iZhESSxyqlDlzFLTACiyvK6bwprOUbk5ClX743/2f/6Br3zkT/3Yjw5PnwVwuL9nfJM7MrGFK45u+DxAPH9ObUTZK9KDyEbFhnXETec1z/jEdLG49QrVuXDNPCFy+QpAltWf+ezjlT9UJZs5BZEKgCDRsJWUtkoEYmMMkxoWQzaNyi5KkKhqnGEGzarKZBvWs7FgK3VoABQ5s7XaBpNnEIVElcgBRk1sunn3PbUa9w6ra2M/a0eXL6hv1aLRWqUnsymA7e3JzeLsL/7Fn/tvz/3R1eHZX//lrzRmdP/m2svf/tqppyZf+tWnAYT22T/yr/7oF3/Px6ph8YXHrhSjweSdvZ3XX/3aV+PNl1/5if/jpwF84Mq5V6t4Ph8c/M5LDs7t3Sl72RtvHurLbwjab3/jZQBPPz34N3srppCNDfuxh849eH7rnz79rdcPJxfv23zjtVcBDC9eHA6ynUO1zqkKUerMiQdJFUkanudcoimjJOUUq0YVWgwHVfgQ0kExChMTsXFOFWVRsiEAPgZVndVVv8jTAxsN+ltnzliX7e3vA+hCIOYbt2489/wLn/zEJ1yWra2uZlk+XFufzCbj2RTARlyPMa6urJ4+dbquq9FwMJmMrTGD/qCeVda6NF9EEfBcwT3G2HYtExs2hvnuzg6AzoevP/tsv1d+8tOfnUymw8GgqhtpZ7PQtpMxgB/50S9evHCf+jB96/W6mkLi4d6+tfbcpQvj8SQ2NYDD3e0iz27fuHG4t3/2wvnhcEVi9G07GA3qqkopVNF7VSnyInO2bZvD8dhm2b/0h/611+7u/cNf+bXhoA8ghkAKaX2m8K1nhu/aWed9FxkGQOGKYX94MD4wxg0Hg37Z6/dGJuL3//AXizKrJ+M0nlNFbQwRIIlRQXleEJGzczXbkBLjY/QhiKg1NsQ4nc5ijFVVDzdW0xQRfGPIZdbOouRZoYoQZW88EZ0BaKpaIVXT5c7GGK/fvO1cP5KqIssz6+aEucwcQnB5DjYsBKBpWmNVlWIUAG3XMhtjnWrURGps2BmTOTfo9YqyAHB4eGiMMWx8CNV0HHwoB70YfJbliYFOFGxdUfbatq2bZnVlpW07H0LbdZnLhhtDANa5KKJsnXOiICKzyKINIaTCW2uMCERVohBAoomdUCV2PnEmgtkYNlmWXb95+5GHr1558MFvvfByjHJ3Z+++8+cAfPrTn75189ZsNuuVZZFne/sHk+n0wvnzKvLW29dTnlrdtFH0/Q8/Mj7Y74/W+qOVu3fvBoUr+sa67nAM4NylS11bnzpz+tT6+qmtLRDPxofP/sZXrr/x+sHhYVoAzl+8GKI0Tdvr9/M8X1k/5bumHIxefumlG7fvAJi03aOjdSf6zjs3p9MxrHG2J8TG2rZu5ise21SvnedFUeRFWYooiCVGXtwlZhtCIFrIjUiM0W/vbLftfUHnDknT1D6E1ZWVim09m6HIsjwnwI1GaX6o6qb1QUSqutnd3c+LIs+z0cpqWZbB+9nhAQCfqbMmz9xg1LfWCDTG0HRd3bQxRk1MkapHObYSVU2iwiQiy+bm9m0AxljvO5a4vrF+5uzZl15//e7uzvr6yrlLF3a2dwCwNWyMKkKIdd00TdN2XdHrHc5iiJI4SYUV4EhzRuGDvd3nXni+Vxb9Qa+uq8PxIYCNlZFAc5flRXEwHsvBvt3ZVpEnPvShc5cupmTYGGMMAaAQYnL6QghzQR4gdaosI5dleVHmeZZlqzdv3UzEDgFqFnU3bde2XVfkVpStocQgSUTG2sl0BqCaTJqm6aLvD4Y+hLqu+r1eJ7FflmzmFcfBx6ZpcpsKe421Wd120YcoUedMJrFqquD15t0didQFL1Em4/Htm3f2tg/TXfdx7hYaa0BkjZmXbAASo7UZABDHGEBqCMZaJhaJxlhijhJSomvZ6xGzaAQ5YmZjgBiiuMylAxKfb1n2Ggn1bKbODAaDum1UDjXGyeEBANO0h4eHUleFFYKSqmEoscvMSq+fFxmACBWNUIkiDkzEXjDvyRKCtQASowWYNEal6CkAuSo5O69Ls4abrmXtIBEOdw4O7hzOPnV1c3/a/aNvfNuxAji33js1vHh6c8OwbRvPRC7PALaOe/0s1TREBTkbfM1Qtly17e7+1Nguy+2gNxhQBsC6LEkqkixctuQ9LRRscCyLcFnYgyOHeOmvLhIc/1mpeMf3Qu9++1i5nS6LkRIpVzo7HaU4Ltp6bIdKx86lJPPNxsnN1CJDMVE6Hu3k0uYqCTgujtfjB8y9kaV7uxAgP14/eFRPvWjwQslg2TBa6CgcbS5pfhHHsjUXWY1KS4dcAagskmyTsuhR22Su6D2vvKGjb1wwbt+ze3bP7tm/aGaTrgDjhNLznKtRj82h87ePidScKCSe2xycOT4j4/hq9F2TyU8WOs9z2NOSqMcWQCwAJVpo/eIYCnEcuZzDkYtFlZbXdUJ9+ni7sSQzXqCQi6KEd9cJnEjax5LC7yRiRXqETh19y7JVy5tz4iTpbzxP0F9irsvM/ncDrAlPkaPbTAvYa75UqSCmot450wgBrKKBdInnKjRCI9G8EVFEE2iQ+gazITCTMgzPWxBJU6mZEgQaOg8yDKZl2dwCA1QIswWD0/NkkEHahilZtaRWJUQyAiaCGkKQmcm4EAMgauRybXTqvp/+H37uy3/r6VNXNpWoM9msxd7eQaL8z1zmjbPEVdV0MWZ5om9BhPDimS4f8hLvwwISPAKoFj4MAQI58sWWz4kWwNwxXlAsutcxF+joGS99ERJaEnrPmRaTi5GqsiEAE3jhn0FJEiimS8fniMJnjrFqAnaUFuXGRM41PliymTMAnnvz2qMPPuhDdL2TwxJHFesARGUOdxIrCEIxaow6j1ak8QRiSozhSgSdS4Mf69qLl7QQg0+bcFWKyf9lYZP0fBZ9kYjASmohZB2Apg0ucwEhyzh2zOXgQuZe/s3n/8Qfu/Zn/vv/AMDpS/fld96ABsrNAs+nube5HAQEQ4LMxrYmMrWGnokgwJwgbdc096WRQTwvpqPj/89rzEmD+LZwGTFLG9nxHPqf75jSw7IKMkzEMAzRmDEAGEMm8tylJKq7mUY1UdVHiVrmBeadgHOYtlXHTNYmBn2N4utaWw+Ay5W/9FP/6HseOv/xj38g2Cx6NVVTuCx2wQQG8MEn3vd9H/29n/7I3n/1Z/7yk088/uHPfeixfjedHn7vA2de+sV/9MH3nQbw5S/9lsjBD37x06YPZfFjXD0z+uwPPjni1ct/7PFf+oWfB3DDbv7Bz/+QD3u0cvjQje2P/4Hf+6XBKiZ3f+PpZ3ulNbkBcPrsxUcu3feZ73vq8Sc/WGxmL197K9/oP7w+iJPpJx97FMBuf/2Xv/qM2tNQSROUSBQvIpLULQCo0ygxzTLMbKxRUmNsjEfV8ikmEmMEkbGmq70xbI3rvG/bZtDvAQgSO9+VeRFiYKaktFDVVZblg8EKADDWT69urp9+8tEnxk3zzs2bWZYPBsO33ng9y9zW2S0APkZrTZ7neZ7XTT0YDGPwF85fuHLF5UWZ5CMAFVXLxmVZ2rEzG2MNAGud9wHAK6+98smPfu9f//mf/ds//7Pnz21VVZVnLmdTlkU3OQRw4+atvTu7jz/6WG+1Zwkf+dATP/lX/uYPfOaTJs8NQeoaQG9zg4uymlXT8Xh1fa1X9l2WNdVMYqQF96m1loAQUxLApgAAIABJREFUvKp2XTscDL71zDevbG0+9P4Hb925/aUXngUwatf6g37VtXcPd2PXFYUFKZPpF25WVQCu3aju7u3u7mxvnd0q82Jnf7Jz49b5rXMPXn3/rJ4kTY/VtfW6qmvvfduGEIyxqlqWpTEmSEzK2jbPo8isrk8BTJxl2aDsQ3Xv8JCZDsZjAL/05V9Tipsb6w9cvBhEmC0xWZe1IUgUAKPhoMjs3mS6OhwVebYy6udZ7hEOJxPvw2Q8SVed5ZlNxddtA+OsMcTkrBXRBNQm4ossc1CTOWuty5wry4KZHr764PrqKgC2tq1rYi6KUhXbO9thW8qi2Ns/qOsmzXmiqqC6rinVgRJijCFGkSb1z/5g2HlvjCFkUMQYEy8KoMP+IJWXeh8IWuSldVnsmsjGLUhUY5TUZwxTR9Tv9R68/8rzL76ytz92xhpWJjbWATi1dd4V5beef/HqQw+x4edefHk06DdNIzG6zCWsfGNtLcbw9V//yvjgYHw4AVHRG86qGtOqadu0gDTVjCTcfuvNavvm4fZtYmZr66qa1PXjT37k9u1bAKbTaX8wMDZrmqqu67OELC8kysUzp7bOXQAgWVFNpuSDdbwyGLTB152Popl1iJIepZCQiDFsjSUkURUYQ1CNkm4RnKMQosSoCokiTEQ8nc3Gs9moyMeHJYD9w8PxZHJ6bW24sXE4mRJzWRSHh+PJbJrlBYAiLzrvo+jKympVVcaatu2e/9a35IGLbV1XdQWg7JvP/+AXtqu2qSpRcVnm8owMJxmm1IEZIILEJK4VGy8gvnt3+41rbw0Hg9FwCEBELpzbYpKXX3tt9/Dwwrmt06fWx7Pqzu07XdsAOH/hvPe+nlXT8bg3GBBzCvMxs82yrMwBBCVVZs6KsjDG+TZOptP9w4ON9VW7qPAdT6fMVJM229uDQb+qqhjlgcuXTp89s7u9c2ZtJa1KUTTL8xBD27QAfPDGufRMAZR5VqgxZTkcrayvrjPra9b64GMMmWUzB+baFK6eVdVouNor8/WVldB1MYQUQwJSmbNRAhvjnOuVPWYD8aLRmHmDQwgSxUeVEOFEJbk8bK1pF8TjxnHP8KA2EK2q8cH4YDrxLzz33N7B/tqpdQBroxUiuMxBlZlStCAxkBtr02OCSlGUzhqozuqaDIjZ+46NMQm+BIg4BF9mvRjjwldRQNu2TQu6ihAlfnPT6/VN1IPJxBrj2Ny8dauZTgCcGeVb3JZGvIiJAUScWSWyzuWjYaJ5iQqVqCoxCpE1TKJCoKjKbJbE3kQMJtLEWEBRQGSyLEtiYiLSxVCyWqNNU5/bOLNervsZnGbf8/4HEzzt1CWigDzLnLPGcIxt3XbOmRDN/MYQK7MACokSmuC//jvX8iy7eHbtyvlh4s40xgSNUQOrexcgdrT9mDtVyUOae0o4ZrrwBt9NjXgSczzmaZ3cbR0RAS3ID5dh7PnfaOkkn2DHIoUsQdDF/mux5zlq2xz/02WhNS33Tcc3mUvXce5Ty9HmlGjpqctR83WJSC53ZfNo9pKXKOlRQnXh4s6D+O9GY4+1QpfXcpKTPmXc8HJvuUgYSju+RFh5tB9cbo2/4zLv2T27Z/fsXxCzmC+ZSdB3OeO+F8q4xCLT7HmU8Xg0J6YjluJox8lBThx73N5zGaMjCmX6Di6Tk7jfMQB0CT0pwEqLlXNx1PwvdPTypC2omY8QqGNt+d3tONncHOfgd+vSLZt48m3Csa+kdPcWTwPAQvPtGKHd8fMdgyOPXeIcjmRDqmKYrJ3najGD0/YgIhF/+RBjjEmZcc4WRKQMWSKSSGl0yixs5gkppEwwTIY5EglImSllCRFLarzM+c7ZGAYxVAQCgmG2NqXdsRLBiIoSgQwRwTAR7zYzGVAJoN4865tT/+ef+9tf/r9//dLFzZBT47PJVA5mh5PqMLM5gNHapgRQyXv7BwAHVRZhwxFKchQw/M5erce0jU72rqMe+515u8vncfxXBRaULqkTnRC/S0PieGh3rlV9RE2zyDRcPnQgUf5jESVe+B+8yJhU0QU/eVJJIvJRKMQyz8ZVBHBlY+3smTPjFs53zmWUmkuAgHTZP0loqepO6Z7FqDHKPA+UkugLaZLXZmWCBDmO7GIZpwAYnHqtqkqc84kCmDNdEgjEDE5+OoMIURMgC3bkxbOxXQAhhtAFY/qro9vvHPzJf/8nAPyn/+Uf+chTD+p4x7Qt2bC8ocSkx8aIzSyCV8A6m1kHrWMbTFYuHk+KthBYl+1aiHml270gZE0BjLZl7pElqWu2hY9dAj58O8tMpsuRSpToPJk0xJCeUlSoivfBEGdGI6StmwISQ2iqulAB4LvOw/Scg49JAQrRiwRmzp2BswBe/vpbzTg++ujVRuT29Vvnh71p01Zs1kdDHx2A119/9cUv/4Wrn/rRnuoLz33r0oXyoc8+PhitVl1zZav/gY+/DwBlWzd3fZntn9rugmu+/PTdf/zLL5w6lw+K22JWR2cuANje3/kff+KnPv7xRz/2QP+lOzfWnv3Grz/9/Mc/8en7P0Fvf+ubz73yBoBr19/8K7/wC3/w93/hQp/r9eFkb/zxBy78xF/6udOjFXQRQG9zDW3XG6FKPQ0EVYkiUZQWkRoLUYFCmKDgeZ7kfN5ZPsoYo6gaYxJhmSr2DvbXV1aqqooSABRlnljDRAJD27aF6sHhYdV2bDIAUWI1m3WtX1vdcM55H6qqatp2Opt96INPSgwAYgzD0crdu3fW19aqqr75zjuJqm84GHbdnAfQZQWB2q4NIX2dpJERoxjDqQ+srqw99dnPX7nvvr/z9//u6spQJIJIvRfoXI0hBrL5ysoqHDLu9nd2dg8Ort28vXn2TFnkmTUAppPJoOidPX/+cG9vffPU+GAfqhJjPavYmNi1AEIIzAYsIYTLl+4rivzpr3392vW7z7/2am8wmkxmAMoi185fe/vN2zevrQyHTGosnz97Ks/KRJEWYmAjMQaGzqqaweKjb9tmNm3baRUIwNrZIXObF0UQuCyzLm+aVohEpWvbum0AFMYQs0bfeW+cnVZV3TZrtJIkbuZBI2tmTWUIG2srUWjvcALmIrcS1VgGEGKc1b5flgfj8f7B4UPvu1LkuQbdH49FZHJ4CCDPczIGCmMtMSmZZWWCyDx8ysRp3XLWOWdSd7LGtF1XFkXR6wFwed41DVTWNjeIzc3bt1R0MBzNJpN+vw+AyFSzWde2KjIcDEIIlpmIiqLY39+v6gbAqbNnQwiqsJmEEGKMIqnmACJzWr2yKKHRGDs+OPRtxUxRFUp5nuVurlKlKiGEzvsPfeDxF15+ZTyZrm9sFHl+sH9w8+ZtAPz013YP909trt+88Q6gpzfWh6ORM+awqqyx62urAPb2D3d39yT+zt27d8m6zc3TbZg0bZMVvdlsdvHyFQBZliHgYH/39MrW2bOn797dVtWdu3d39/f+xs/97CMPPQSgbVu7t6ciDz/8cFlUe9vbxtq1jc3t3R3NCgActAvBMu/cvHk4PnzoiQ8ISxcCt23PZS7LAcQovuu8sTGGhX8iIQQf5/mqSLt2VVDSo2ZnrcuMs/ZwPL505szCJaKyLNmYEGORZ2x5NpuxMXlRJkBtdXW1quvxdDro960rd3e2Y4xrK6MQZTqd7h3sArjUO6PAwd4ecMka03VtWgOLPM/zjFMZQ8ryi1FFVBTA5ukzzrnhcHDr9u3JdArg4OCwzPOmra2zvSIvyuLOzl6R2XIw3L5zB8C5C+fZGOtcjGKsCz7EEAHtvI8xpNMqNAaBNcyc5XlKF906cyYFgksqAJR5Vje1tWSMGY/HWZaJqA8hL4rJ4WFabJx1RERZKapt0+S2jCHmeT5fYgEVAdsleV/bNKrKRNYYZiT9Lu+Dc7YsisyxtTZKLPKsbdsYQ5LYBmAsG2PrybSeTQwxEU+mE6EYJUiMKYYXQiAwsxA7aN21EiMzrxGbqukAkFXAjOu9rOT11fVb01AUQxTlzNc74204ArB16oxxxrAR1ZRznZZU770PMhytpQfUNLXr9wH13jMbicFlmTG2bZs8T08yMhvD3Po2cwNjuJ41KXs05RG7LAdhMhlXkzGYp/sH33ju+e/73o+tjIZRZDqbAriwOVTpuiiEWKbQe55pJ1E1LitnEv6oEBEmZmJRWFKRyNbSCZ+ckkYlAO9FYQyrDwqgbTtRZQORUEjcWj2lI+6iHa4Mti7dt397G4B6sT3DTOPD6e07O8bQcJg7iGEoGGwBtEEIyHplqCdd1xaFffj+lTa6U+vDIi91QgA0ggROKCz2IcsI+pJL/XiF3Hfb9hzPW8ExDO3kMfoe7+Kky3gsZr/A7Y6C2UenWvjRRykpi13a/DJ0nkd4dBUL2A5AmiiOvnyxO/pdQDs91qB50dBCqnS5CTu+kUvvieqia6ShTsso+Xf7riUi+51/OYalzvcTy68SlSMQebmrWVzSPTTynt2ze/YvolkiRlKWwHwm00UWPSk0KS1QCh/rUkaMjtaGJSimi5iUMpnFu4tg2nId0YjvSHs/CsrN1xYkdY50hDU0l4s+gj0VdJSdlRq9eL1Ak5bZ/9C5hzZfuuYCMQunDViAp8QWJEQy19fActGerykLxIgXMGdqrSiWJQaLMxHjCCVcbK1pXqsDxMX1LzGtebG36lxgAwpOx8p8ZXrXmrVEMI3lBMtU3rseixfDLrSNy3MJQrE2RRF8k0RdfNsUbBsvRU6MCMCycsZQiosKKSiYjTGkynOAmhQUOmkQo0AARDSZLVVj6FpVMWSYhTiATeo/oixQJSUSUc8RxjGpkFiOhvISgJG9Tk1hVrtuphyD9B3xXqv94EbOSO9hAN/++v5P/vhfuv3cm5ce3moR2o4jYWe631Ydc2+wtgEgz6zGLnLfh2DZVRIcWdbkHxMAWUB9iwKM+f9MrKo61z3AvKskXIkJMAoFRVnmAiqUYnLTcYQXz72fqOHoWWIh37IA7hWyLP6NUefieKRKZMgpGREhMqm4KSIYtpI2binaSmBd5K+KAmDYEEMIYsgoB1VyxoXQ2jw/CKFDDuBwFqexG+UUfJWZQiIrGeusAt4H1QiAjTIZFY1RYuyiSFRStVATaT5oRYhVKSCSsiFiNWyYwTSvHxeBzmnvSaNKpBAkxChCimzuTDGkiUxkjFqnMCBiqKgEy5R270xsiVWjQsBKZEA0FRlubvqDMYA/+5//1Kd/4ENf/JGnHnnscju7lgW0XvOcNLSBi84DQN8CzCDYLCdjLCLIwRmJUc1ySM6H1PwOg0QN6TL2oWkYpwdqij7QqShlABo3/wiKvB+iELGIc4hAVWQEw5YpmoTBwgAOXGQcRRTqXHF7Z7tcH/UdWccp4M+t9PK8rWELGxEp1DPves6BRVdW23EHoOq3f/LH/uUqL6ygv7399G++uLExau0of/j0m2+9DuBn/urzD7x/ZfDMl8+txKbVr//qV9n0927tTrbf+jd++ImIPQAbw7X/9U/915//w5+9TPzA4w9/7Ps++Nvffu5bz7149sI5kds379wB0NTmmal/9MqKnr9y5ckPfvWZtx+7cu5Hfvzf5rdf/4v/3bUXnn8NgC341NapjUtbr736Srk/Xu+bN99+M+vGm6cvi28BXHvuNyI5SUMvbRWUbJaJiEiYj5TA1rCm5FyREJSYVUMMQkRpLyEiZDhzzlirQCctYsyd7WIYjYbpMc2mMx+6st9/5OojqpIylWAy0YAQAORlP3YN9bLd/X1bZNbZEOTs5pnrt28oYLMcwOxg+vb1t0eDQX+02hsMd+7ejWBVCVHY2C7Bnd6zNU5gmYJq57uizEWiAl3bJXqEQb8/3rt77r77PvfUp77ym1+FSr9wlPdMjKfOXQRwerReFFntZwRtonn9+p3x7PDXfv0rTLxx+vzl9z8IYGAw3d67cHZja3O12rtryRjnuqaJomCUeQ9A6No848rHyXQmynUTptOqqg9IfFtNs8wAUImBOIODyHR2UOau9dq0vX6vvwjCZFunzm+ONsd1PZ6N2dLa6Y0HHnr/ZDzdO5hllFDXCCIJ3jExoeuavMyZXQgxAnmvBBAl9MjZLAtQZzMRqqN2okkApygyAD56CRK8shofvWEte25Wd8a6zCTkrji1ub6zu3s4mVy+fKUNZNsqiGYmu3Fn5/777wNwYXNzPJ74qEJiyJBhgGKMImBj5is5MaloCCAwLBGLj74LKqirWUxdIs9jjKtra1VTWefGk8MH7rtvfX11ZTSyLgMQfXinaVof2FhjmJmrqrLWZdYN+oOmbQF0TaMSYKhrapGYO0dz/0Wj+DzPAfjQWWMs8+7eXvA+BN+0LTM5m0fVrq4BiITc2ht7ezsHfSFDhm68c6vfK0+vr6Xy0ndu37p248b62poPYdjvFXkxnUySgxZiSDNnNZuFLq6vrG9snOmUxBazxjNl44PDXpEjeABkqCjLpihffPmV3tpmv9+vmoZzN1xbf2S4OixKAFff/+Dhwf7d7bs7d29vrq+X/cHhwW4IHbmynk4B9FRHg+F0fLi2uXH+8iUmUlEr6pt2FqLLCwAhhKjaSEZZLsxCBGJRSZQLSe+ejOl823kvSiDyUaTFysrag/fdL6BZFwAY48J0agbD1rdR4rC3GoLu7e9ba9kSgLqaZsZkDGM4hDYG3y9LSxraJi/K4er/x96bxlqSZOdh3zkRkZl3fe/Ve/Vqr+q9e7rZPaPZKIrkkNKI5Ii0KcsSJFKwZMECBVuiaUCEbViWBf2wBNiwtUA2bPOPoIWwYMmwJJJaSEnDkUx6hjPTPQunu2aqq6q79lf11rvlEhHn+Edk3ntfz4CG/1FABbqr6t2XmTczIjLinO985zubAGzev3vnTjU/mcxnFhSDjoqsl5vCmfl0cmV3G8DJdG6zvG+zPRxZY0NTIuiLLz7/8O6dw/0nO1cuAyheeOGdGze8b3Z2zhBR7ly/lzeLhdTl+d0dAKzh4YM9VQpwRFGNxOFWrJ+YZmHgAwRAIEuQPCt8EyL8bD79+vXr4/Foa3N87fKlVJvrwqULDx89OrMxPDg46g+GGxuj4L1zZjGbFM64LAPgDLvcNJPjzFghOxi4xs9nsxk558o5gErYaWM5d5ar2KiCjHXWRu+ZSKICsMyWjHLMsiLGJjc5eplK3XeGiVLBFqPRUJNlPC8XhGAYbDTCkIj39XDQA5AN+y+98uLx/j6AySKSxnJezadDE/XMuA/g9s3rja+OZ/MQpYnx3t7extZg3Nsp6yrPeinTtvGVglXR72dklEB5kRM4y3Oi1lap6zKKRhUCjElgkcYgKsHaNhc7xpjse2ddExr4tJNQ0eulqNW0LJuqiiLG2ZPj46PZNITQEDtnDg+OpsQAnrl4TgypSh6ljJrlUZqohpxaZhLOAMTK2w1bVVWvNwBFNWxUlSwhSlNLbgAIQ1VIgmggIoJjx4Y1CGmIAEKMGZMqg3sBHsbd2dvPi7w6nhXZbLw5AqCuzjhToy6zbPLhYNjUE+eYJPOhtloCyIqemJoQOM8cSLi4dP7qoiFSTMp5r38egOHMS2mMSggp7JoqK2GFHibTiFqskJKFBj1FKoEiJTW00R9m0/EBOy+uC+TGZEedrlTdNsaSIdklo3S2cufpLfHHxItQllbep8sRICJNCVgr3xDJG1z6mCJoHcCV2heYqUs1IAAJxE88EhXRpekO4i7JjaBAgHYZS6sioi1sa4xRaHLYAAVFbR24tehqS//s/qeO75C8Re36X0nZKDRKICZVRWzHAgBxB8cm/JMoPZGqKFrD6Wl72p62p+3frmaxtoG0f58KkAFIqzXQFQLuAEucgsho9dNy0e8oVx0DbT0opVjbo7C+RLfXI6zwntNNOzjw/9+6u4rGpassU251tVe0PLXudpffhJVWR3uB7oiV8AqtXbvdhtsjtXvYFrhsYY5vj2PpWsStu9clTWiZWbACgwmAxBQsg7UOKoYNJBaZJYb3VZ4TaQAlFBFMINYsM4oYBURLpqsmjTAAUYRYmTil5raELyYDk9usaTySGE+Mxhq2FBs1xqWDlv1LUG5HStMVQInERwpViQDq2O/xiciecZQF4swfUxj2Jn3zQsnb979RAfjv/8z/cLg3/dDHX/DNYho4UzuvfFNHIpv3io1hD0AdmtzaeVmqKLEx1H6ZJHYmkFAmXeZZt0YMlnjz2mC3/dyyGSkJPnZoN6+6fUWK1A9Ow+Vor4Qp6VSoeW3g2/whYiUB0ZKQiHZM1t4SbWUfV4FhNuzIABCoCkIUFdIIo6ZncgD9Xr+s/OFCL54deIqJMqxR22hDqnxuoF6I2FmXORdFfBTvEXT5XkBBK3BdQCBj9bQ8azvfRcFMBGUlBqcZ1i4YDI0kYBKkTKWlVWm4U6GKnpgMMxGLSozCTIO8mFeN3RwD6JP8yi9+4cuf/+qf/tmf+IEf/oSc3I/1kwpbLjvLUg4GifRB1tq2RnmMQCRmQ5TSgbsn0mVQOWXCtJmehOWL2zFLlsO+xDA/GBpYDfxyeNeM95QvxgxRMKgO1cEREWA494vj1DHqvSOKNer5fHDu7LiXhbIOEXVTD0dDAJ/45K6vFhvDbPp4rjsXrg033/nijZOTo1/71b9nL18D8D1/+DPnUevJ/HNfv/Xyhy4z0ze/enMxO/pDn/lEGJ2RegoA/nCwvfEH/uh/WOw/mj98+NavfeFszz0ZjXYLe/fxJM/7AP7UT/3UgzvXn9sOW7vZ3T370Y99+lf+n88PHr77lc/9M8cLogigv7lZPnr8p//In9rczi/ubD+zOxqfv/I9H/94Wc5/8503AdxeINYVsigmvfrUMe9BXbL20nlILClVbdOQWqGQta5tD+GiKIJEIbpz586F8+fObIwBnMwmIcTZbDYajLI8I0jd1JPp9Py58+d2LwB48OiRZrYMao07mh57H0S0yPPRcFTXVWLD+cYT0fFkcs57VRmNxsYQYNhYkZjIU957Jg4qxEwSRTWKJMVRVX31ldcAPHh4T1WZ6dy58y8899yjvfuDXva7v+93qch7d+4AePe995+7eulwf7+cz3YvXvi+T33fyexIY3jpxecFLi2hTYwaNTQQEZdlMYoCqcp2XhTD8RiAzTJjjTEmSpzP5845Njw+c7Z/MAkiLlXXVWU2SdZBhOaVZ0NE1lnLxgLwMVZNozEWWdYv8oMog8GQsmI6ebCxMd7YGKNjusWooqQCIhOjShSXZcYYbenn5H2IALOJIRjDzloC+r3eviC92i8+c632dS8vDqazBPARMCwygplVFYCtjbE1NviYWbeztaWqi6qOIhJDXdcJ3Ts+Op7N5nVd960TVRJJJBGidG8EoK5rw+RDqOrq5OTE8DnNs6quffD/7LOfS8d87yc+vrO9DSLD5mh/v26aotezWabSpC/i/hD3788Xc2fdfDHLsmw0GA5HoyzLNrbONFUJ4O69e1ub4xDF2vY2nMvyPGM2Cur3BwBClO2tM43I0cOHVVVm1onGixcuWGPLcv54/wmA2Xy2MRoCqGpfFFmom2/eeHe+WPzID37/q6+8DMASbW9vTSbT4WhIqmVVTibTQb+3sbFpXZZAlmtXrngvIfgo5PLe8eF+HVEu5obI8EZVlgCyzBKzML1948bnv/gbr7zw/Pv37ylb67KXn3uhms0APLh/v18UF89frKrywYMHz7/4YubyEGRze/PhvXsAHj18VFclM7382mu9wXh2ctzUlaoSw4cA4wFYm0EkhiAxxigxhszlGZu6qUWk20Qpy3IRpFxjwwxoXdcHhwfndnf7/T4A64xhVkWeZ3VdV3VljKnrKsvGw8EAQLlYaOayLKuqut8fiMhkOqvr6uKF3cW8jBzTMVWVxt+LiI9+PlskA8oYk2jCmM5908BmxnCMIcvzB/funb1w0Vjz6Mn+nfsPALz4/HOqmmeuV/Sm88XJycRldj6dxhhfeeMNAAd7D7a2NpgQmoaI7z3Y+8oXfuPZZ64CANk2EhmCNkF0Xs7nYbbIMvcn/4M/CuD2++8VRXZ0dAyAlM6e2W7qxaIssyI3honcYDCoq9oy+aYBYK2TKK7oxRjL+Sz0MmaKItLUdVUBEMrZ2KYqpyfH+0+exBiKovBN7TIXQ5PSU9JSYCxZa6N4UZ2XZYuhSExriHM2c5YNG2OBThZGFUTWZUsAJRW8FhHDLCJFUaRc9SYEAE+OJtaiyPKN8cajwwnTycmkgVTb5y4M+v1UCpyIROGcBUhF2bQ4l8TIxswXcwCilGVOosQY0hauS7mUjkDGxESIMWC5cUNFBKrtq01krc3yHBKNMePh8MK5c75pYozU6QL3ev04n0QVceltblM9EJpYVWk7MIZE0r2EGK2SSeH8GENm3BJiWxoJCX2yNoW7u2AnUYhCKiJinYuKt299k1xTBf74hz5elgZAVc1tvl37elEe/sa7X8oNLp7ZvrJzYWuYscn8YpG2TGNYhcmyYWeNOTieg5wltsxNbAAwI7NWwbHbWDt9qpXX9QGLDh1itiQ105J+m5IZur2aqMtmbq+roqvrLW0tOv3J6cwzav2jFUljDSY97SytcMq1VLzOWGgVKFfx5u7gpU+FpTDlklKwdNJorRNWMOy3WfnoPLs1hzJZjoouFaq7s29zYZft9OOt3yu6ZEVdZp63RiunWZocmeS0tjioqP5WX/a0PW1P29P227bZBJIRdyvr2jqMZdiq/WwJGRIIKq2/v774UevML7357rJLNHLVOnRlFRE6dWerzbxD/9a/SdEqFH/wlO4fa9DoMh8XHbt9/R6SmYD2kUjXcaNl9CwlZazOWQXuFKkeyqntSpfHtfe6/nmnfLx+cHd72nE3Tz3S6f1QOyJfeymFkmEAubNVtbBsmKLjWNaVM8EiAhkTCBEAkUCFGIxURwJJ1DwqCGg15h0zMxO12n+dCWIMEZDkxkeDjcwGT7+hAAAgAElEQVQ5UdrePjudL2qpVt2+3PRTWnAMUAMYViIQMVtjMkMAyBaBlDg6m+u8qWOAN/WE53jpq1+8/+f+wH8NoAj8zAtX5pOJZI5CNgs4OJxEr2A7HA0ZAQAkAHk5ncYIMsYQGDEJdoe0f9PS/Pr2Ll0Bk939pz8FpEu4FryGK65G4tToJaYbWFudGU6TKyGR0mGTawg7EVqkToEU89S1pGddAuHrxlI39FAVYw2BY1RSMGlQryLGsvhQxxqAasyZtkeurmtxwXJumNKkZ0pp9wgSOunY9qaYyBgCKBH8JD0AEVQTnsdRVU3UGFUMtRZSCi9LjOQMkZDCgJFk09uuSmxVUoEERIXhlJuP2tepE9MCElUMG8PW2QwSqhj7/T4FATCv/fjybqgWf+kv/O2vfP3uz/z5nyxO+lQtFFNWozMGQP2K0NbtiaIiAggbZqOdPFe7lnXiPkpdVk5r2rUGdzcSyXXQ1VAv3+2lDKgKKOFTq1B/9x5AwRBVBpQoBKm5YaKwaELpAfT6BkE0Rgbdef/Jq2d3YY0dFFKHYWZ9XQLQZpEP+7d+480vvXnvD/7EZ/TcbjUz2c6FN9/Zf3jvJoCf/i/+yOO929W0OR7jd/++3/uth/Of+3P/3csvPfO5tx6/Up8dZocAHrz74GRv+lf/8l/7Cz/147/05pde/NAbzZ36J370jXcePfqJP/yTf+Pv/hKAeaj/o3/3k3Oa1oP86qbefOsL/u47N742eO27Xn3uyht3vrUP4Oa9h58//NK1Dz135/rtB/fvn/2R7/3uy5fOXzy3v3f04KgGMHT9r/3mrVnjqeewbNq+UN2mkHjXSwAyrWxKDNHW8k4sZRFBJDKkiqap5yEQ08OHDyUEAFmWiUSBNt770DCT9z6EsL9/kEj/D/f2PvbhDz949Pjewf2i1+8VBUg//+Uvnt3duX37VgLLymrx7LVrIYTb798e9gfj4bAs66LIWwjDewCqocjzWSipWwdijLR0g4gALMr5/ft3q7IU3xCRs46Y8zzvDwfT6QTAxnjY6/cW83nC4w729sajzY2N0flzZ+/cuX8ynQEodrYyi9D4xPGJsdEYQSQiRFQuSgAxBO9DiFFVfQhZlm2ON9jlTYiDwXA0TGp3CkaR585wIxUT8txZ66raW5fgGAdCUM3zjJnKqppNJ9qU1piHd+/tP7AAzpw9a5hDE6KgXFTKbFzubEPEMcaUt57E3YLEEELdVLPFrG6qVPXFWZO5DMCzV6/WEiVKXde+8b3czGcLy8zEqQRN0/gQ4tbGpiofHZ8AGPTM8XRirT06npwcHgJwmxujjfHIRx8CgSQEZmY2AEIMSYrxeDIlQlXV3tdQnS8WpMpshoPMOduFPaKzrqmq2Wz24NGjd268O+gVz157pij6xwkSIsoz9/zzL0D17t3366YJMSbXcLGYJ3Tv+OT4/O5ZBVI/qGqU6L0v8gLEqeRIEwKxmUzn8/nMB18bb6BQreoKRF1iOMajkTE8rxu2btTv/85PfjzPXF3VN26/B2BRlrlze0/2N+uq3+9/9GMf897fu3PHEJ1Mp3fvPwDwwrPPvfxdb0yms5s33qW69k3z5HASY7hw/vyZ3d3xxgaA6JuTk2Pv/ff+nh862XtoCZeuPnN/b282Xzw5OBjmPQBlWZaLanNz48a7N33TXLhwyVjrnL1/+1ba+F585UOqOtrYCE3TVE2MEJE6ePVeRH2IAIo+oiiFUDX1aDjs9fq+CSEEFSVqy61Yy45t8DG94AnEB2F7a8v7pqkqAEwUJL5769Z4NDq7s7O5dSaGuL2zM52cPH7yBECeZzHExjc+SqGqquPRaP/45FJVnxyf7J88AfDstStF0dva2trY3Dw4mpIQlKy11thysWjqGoCoWLbBNyIiKob4wuXLdV31hqNXX37pvTt3AcQYn7165f27d4+Ojs7unCmK/O69+6NBUS7KReKNDoZNXVvngve/+eZXbty41c8H58/teh9AxK3IrM1y7g22huONqvG722eY2Tq7tblRV4vZzg6AO3fugqiuFxvjsUCPjk6ms9nWaEjGdlK8IEbd1GQsMYUQmMlYC9UYQ10uAEQEznNb5FmWuSzbGmwaY27eejezvBSpbLxvvLeW61ACgcFNEjNdarACCacLUURiFIkhqEKiRAURJQAubf2q2jReoSRRQdPJBKqWTRrHQZFvDkfT2hubbY3HB/V0Y2trc3OLGUmrwRoXmiYxHEOM3nuRmBd5FLXW9oYjAESmKktrXYzRWmesXdke3Q4eJaqqtdYYm2r7OMu+Uemyf3xTK5AqYs0XZe7sma3NZjF31ly4fOnwySGAoih2h2e3tWd8SSDEmoyxAIyJ0mmNGJJW0kiTWhEzgTkZT50/1YKmS+vBMItGH0ObZEMUokKSUG0gM3j2wqXr92+zM3Xwe4fvAfjVt95qqlLFXD674UzouaxnB3036PUz1OrnyVxVCSIhksL0GNB+rkHiZD5f+MUkJCjcM3HjA3FbFqy1eIjX/a3W7dMWs0vG0ppTsv7PJdlkiUW2O3t6ZFpec+kT4NvaCu/r7OOVcdUZ2Kd8uO9wiQQ/docsMVZaTc4W1Vs6kcuEue4MXTuxdUyVTtV3XbpvbSessUxWbrOuRDWT/9ve/poTePoK6Zqt57w8DUwkWDqnLXzagqwrU32tC0mZDH1n3PRpe9qetqftt3uzy+VUT+0zbVsueCu/sfO0P7BIt5vBB4CaJZ9rDYRZax9E8E79bqkxtDq1g4Q6+tp3aOvHfYdv0lOoZ8ebJyDVyqV0wBoG2/7z29f4NTyi65UllqWnj1lF+TpslvV0/61x9VZZ6p2f+0EIpPupyzE3VPsKgCNnmQyphibG8qVnnplP948OH/d6W0ToKPwaYoyq/awXogKSVK5TzYiWFWZNAsCScD8REzMTOZNVlY8+AiBlZ11dNwfhuMhzXtbtUWmDpV2ZOWMsESU4t0uD77Z4rX2Qqmw2x3k2dkeHE2evWHPl//q5L/z9v/J32TOArYvnYm4FZGpqXP/4+GC6CDlxvz8oMls2MwCFK4KgnC1UnSZkXSSRMbUNmcdldfiOIpn6bmVsLC2Z7lcxVZkmmCUQSGAof+cCdiuD5NSFujJDSl3O//rRbYy2neq8PIvAy7etnRLU2WtYvk7CSW5TlZQJ4CBkxIeGNSba83R+/PCx44wns8aKpcyRIyEiYkNMIQJAE2GtJtAnlfAGUpVR7R40ErUJ4mit2Bi5rUlCEUi5/hEQIhVkCgEDaqCEpfKBkjIlcoy0MXqA1CT9y2ScMyHRRCVxJClUtSv6jWrUVAAhZ2eFeCPS5/7eZxvPP/Wf/f6NEcL+PmeGBsll2AnVwlgmZmNh1LRXZpYYukWNdKlhmUTm0cYfmFumBXHL3oxt6edkthJWL1OyN1khCX+X9BvqpH7SD0RI81EJ4AhUdVhMFyyxTar13kfOenmo6+efu1ZPqxvv3DBMH3rlZV/WbCKAhqxCjwUf/cRzKGfxwaObX7l+e/bl//hnfnJ+rw/g6OtvNtlwMOz9wKc++v6dW9c2L3/XG1cOA1UF/8sv/uoODgF85tM/3JPfe/vg5B/8wj97+ys37918N4D2x29814de/4f/+vNvf+NtAH/sZ/+Tf/7WZ0e5/dSPf98//Yf/+J23H33yU8/ef/Bg5/LoGwfVzVuPAFTaDDfNz/y5P/vN33zv/hfffOaFa7tbo1/4N9/40Y+99uLVSwB+5SvXYwhmnDWNT3TIpNDJRKScVrjEtyOlVEZpKffBhtHWeU+CFaQiQsmRk/livrGz8/yzz929d3c6nwHo9QqADFEiWEE1xnjmzPbde/ce7u0BuHLlmRs33334+MnmeKuJIcTIsIP+4MUXXrpz9/2U/Lizfca6bGPzTLrb0Wjj8ZO9GCKIi15hbQAwmU5DjG2cJqEnKtqFl1KcxjpXFAVUVNSyyfI8z0xvMASwc3YHgErMcxdC46zdOruzODisgpSPj65dbsqy7I9GAKLAcgScMSbxyxQIPohIuSilrADUVTmfz+uqttYNen0AtffHT/Zns/nW5jjPMgDGGDJmc2PjmYvnBJ6gxGDDs0VpQwTQ60EBFZmX5bysqro5OTqqyoUxZn/v4cnxDMCLr6DfH1blwmZ5WVY+iMk9gX3TeB/SAioiAjLEEsUYIyIiUje1qBhrEu/YWucD+kURQzAuuew+LwoomroBcDyZ9nsD57IiL6IcVXUzHg6stc6aJwcH9x88AnD14oUmhLJc+CCjwXAxrxrfJO02w6bxHsDh4eGg3wNgjc2sLavKEBEhzzJo7PV6SAymwjWhaZowGo1e/9Arh4dHhkzR66VSNpnL8rzIi37d1FE0cUpjFOLorEuc0CLPXZ6riDXWcEACBUTqug4hzuZzABujDRDNFvMQJYrWdXVhdzvP8qoqF4tFSjXo93qZc1GVQ5gvFpvjUV2WyPOTyfStr94BcGZj3B8Ox+Ox93E+XxT9HkrKnZvN56PhsDN6ZH/v4cH+ofc1QlO4/NlnrkC11+9HX/umAlAtFkcHT0IIh++9f2Y4COWi8c3X3n7HWmfYJIWdrMgvXbzYK3qvvfpatZhvbG1PpxNm57Ki3D8AMJ/Pe/3+2195a2Nra/fChRACsQk+gMhalxbMEKL3PopOJtOqro1x3ociL/r9fowh0Tlr70PwZVWFGHt5bkxb7TnpOSTBYVVsbWxAtV/0qqqaz6abW2f6o1FdtaRmJmqaJnNZWU2DD2VZqWpVVcxmtLGRDzMAdd1UZZW0ZyWIknVZ5oMQ0cnh0X6RA5hXVZH3bKHeN3VdOVecHB8H7x88erRz5szlixcARJGoeu3ypZP5LIT45ODw3O5O09Q+hLe/+jUAH/7Yh6uqDj5ojOJrEQ3eV2XlvdcoqRyWkgo7Y4w1BlqrhhgRY+ObOit68WQCwPuQZ9m9h48Ggz6IprN5r+jVTSAgK3qDUQLm2HsvTZk5R2j5BInfl3Kfo0SR6Fw22tjcOnOGAWPMoNcbDvox+MViBiBzzhgGdFZW/cKyMZmzwfumrsvFIqHGTV2LRBWx1hkmInbONqFWId80qcIMEeq6TiaEcZYY08m0mS0Wi0Ua1PPbW8N+ZrO+cEnODI6t36ufHD7RLH/umWfb2uZBax862EVr36Qey7M8KhbzOQBjnAJRhI1tmorVELExlimpX6f3QGJbcypadSpRolfVpm7KVIQ6BCZSEe+b2jfMbBPznPloMvUiAMbjcS949mz6Q20ajXX0wTATTGgaSdGpQMxgprRtdcUFBZSogUnEJooEIlYVEKIIVJvGL3yT3lpRtMWfiDSa2fTE5fJkMit6xRd+8/qgGAJ49vzFo/lJL8sAMOfG0niQDwd99TToW44jALaXoYslp4Sm/eMpkTmZVwI68UCqXES9hDIuKSO6sj9bP6alcWJJDCQCeCWFr13SV2J30Dpet5ZWhqUD0+YlrdlM+MDVl97MKmUFnXXeeZUpo2kJ9a15ZArljrC5sqw7+uPyppORtsQCO89ueYOre9KkFNm6bdQ+WcdDWXnKa54EgM4v0DUfMJEVaQWCrh5Plz3VCXW1mmTdF3FyS5e/B7VmpSQrs1P8XD6rJjX5p3Dk0/a0PW3/FjYLQNtKG6eUizsosAtOnV79l+SfJUy5dma3fayf0K2zLc3o21bM74CEYpmbvASD0o9LFtNK2vgDa/Dpn3Xt4w98wWoz1NUu2X5Le+qyGzq8itZJm+0n6wCkdqd3J2iXc5tSE9bQuOUXrfHt1rZFXSYOEJJBgFNoZUJX41IrRNQxN00VfbnVoz/14z/yS5/95S88uV9YDqKpxrQyQ4UNG1IlTXJOlDQ9O9hIglCXkmGNSaazMRzFT2dHqY5nFJmXTYwitUTpG2upu6dlKsfyLqnFeSAJv1Gq6wggY1CseNgrG66P53l/dzB47m/+g//tb/78lzaN3djeBlANimZas2AKU5+chCaoqC3smc0h1FtSAJkxZRm8j0qFQFVEIEREalKnkSw7jtoh/DabZjX2LRDSfbAGF3eaph+Em9tJQK2O9aoiNgCwkrQ1U3CqtRNQeWkjicqyCPzaLFhaL+tDD1GJMSpDRAjEhq2xzLTwpRMxiAD6PRd9XccAYoWLYhBVIYaUUq10wJDV1UgRAdK+eR3GTEkOh1IKihAY1HjJHFs2lDpXoiIwkWETJLYZ3SBmiCAJP6lCiUEtu1dBSY2VIM6waEzzipmssYatCmKMRb8nUXyIkkANpVBHsk5G43y48a9+/p/fv379T/xXf/L1157F0SNRB4B8UOa25GHHTdBISGKcy6GULsSSyKzG4PQgdTOY1qtxEamC1zSQtC14qCAgihIxEy3LtStIQYYTQYJVAXKLsm6aYIhCKAH4JjRaOKNf/+a9QdEX0W/devDqS7vCzjgSRAAFjJTho9/zO/xsrk+mpte/9OLl4uFB8+5tVwUAX/vGt375S7cni+p3feTF3/Xpj//av/z8tSuXXtrZ/WN//o+CHsvjxwDu3pxsP3/21ddf+ewv/8udM6MHs/mrV7f3Du58+Ae/39y+l9kA4Bf+5t9+9M1vfPJ7Pnr22YejM2ff+Oj2R1+/fPfRfty6pmfpE5/5IQB3vvrmZPbkq1//xu+4du5f/R+/+cwLl+8fHGss/5u//nN/4Ic/CeD85Wdg3+JQG8pbr6Wdy2nNSQQfajeQrifbFC/TOR7Lz6EQiMTGN/087/d6ZV2e3dnZ338CIEQhQozSy3LrrBJEZDwcv/TCy6kuQZb35tOTQX/w7DPPv3/vPWtdE+orl66GGBvvL+7sABgNRwlO9iFkzt28ddP75uKF88xc13X0AYBEKfLc+6AqKsJs0DIvQESGCUCMcbyx2VRVeomms1nI3bs3vkVAljkA58/vLhazPM8e7e098+LzeZ5fOn/x6OjQ195wK8WoTc1gZs6LXjlfqCqxlRitsU1dFf0BAOuyLM+b2seQKjrrolwsysoYy9zW0XLWevFl3RjnQhNUlQGBsuE8zwD0ijyK1rGsq0ZVh/1iMBxAtamqsxcuZMUUQH84ZDIg7vWHvcGQah9VF4tSUtXzlD0gKhBrbQKcsywjIhGJMUJRNR7AnXsPtjdGVV1NJ8eDfoEsI2uUTdX4fpEB2N3e2do68+jxk0eP949OpoN+T1RD46P3WT9PlkN/OJw/ftKEYIyrqjrGmMCLEEKMyqQAjDExCDMTQ1QWZZU7l2WZtVYlS0TXJ/v7B4cHg8Hgxeeeh+rOmZ3tza2N0TiGVorRWmetDdFDJMZQ1fWgPzDWAEpMg0HKra6TEqWIhigiwkx5lg/6fTYmy3IAz167pqrWsIhAZTgcXr2wm+X5YNCPHZ0zzwvnss3B4Ijp5NHjRYj98TjGWAwHr776IQD9fo9VnHUg+Lr51jfePjqZ9PLMWltWzbDfA9D45t69u7PZ/Pz587sXLu2ev6BZj42dnhzfuXVzMZ2k1YsJztnDg4PF8dHOaDAeDLe3Nje3dvpFL6lLh+jzPD86Pr53/56zplcUw/FYRNhwf9AHsPfwwbmLF6211tqm8i4rbM/2+30fI7Op6pQWaq3L+1m2Md5I5DUmHg6HO2d2prPJo8d7ACaTibUGqoYZCpFIhBhCCB5oiWwheMNmMBhkLiur+uGjR29fvz5fzM/u7JzfPYdkVzCBcPHSpaZujHNN4+va+xCIeP/JAQBnUC7Kk8l0Np2XizKyDY3Ps4wAScEPoK5rVR33ey5zEoMbZCpy8/Z777x789WXXvzW7dsADPNwMHj2yiVjzMHxyXQ2ZYI1lOd5UtVczOfWMBs2RGcvnN/Z3tre3j577vy9R48o76Ui6VGJmZnZpmo2Waaq1rnDw0NjTIKwe3nOxlzcPSuio2F/a3MzxCiq89ksL4q830/7EBHNJsdMFBM7N8YYPVHLbWSbWWtFRSR63zg2zHz1ytXxeHj71s1FVQPInTXCUK3qxrKqIkaJUVye9Xo9Z1Nsj51z1vq8KJgMETuXceWt5Rhj2i9jlMP9faiyMcGHGJqH9+9r7U/evztojtITBe9n5dTl7t6j/fcf7M3n88nEZ/1xCCFjC0ClhfOC98Zw7rK5KRPPkYnzIgNQVpUqjLEiDRExG2M4y3IiihKWQUDnHDOBaDgcSYzz2QnIMHGHoxEBwQdm0ysKa2wVxRr+6Hd/95ff+tpx3QCIolFViatykRHVIUJNDIGARQzJbDCWTVdCW2KMIfqmEQ7QmLss4ekJGjUcdVnGXajxvmqadJEYFWBJqRVmbE09L8uN4fC49FVzPKkXAAqXMXMdfIja+KasFle3d7a2tnzl1YRicwSAcpfKIkkUDj6KHJzMdjaGg4wi8SxJVBtWgJLUY2vNdB2yhh8mesYScmz/7nbqlaPU+U/LX6SwYnehpewkOmu2NYew3tbSk9vzPshGbI/7Tqe0aN4Swm59K11aHCs7vKVxQrEshd0yLj/gzCWQscUj1z9PzkBiyeiSz4jVM0NladOjM9jTL9aYCC3AmrKql/k3WPdKEk4dJakcUQd6Mki6fK5k2Z7yP4hE0gh8uzP9tD1tT9vT9tu9tQXslkvbcnvCGqh3al1ectlXH6w1Op21vIxB/VZt7Zt/69+vNo5l0Gwt2vRbXwRLhPEDoOt6mHAVbVpLWte1L18+0DIVU5c3Re2OknYooq4AT/elHahKwDq9rt32T2UAd92Pzt9bhy+x2rDTVaMYThyryBm1KSBRrl29sLvRzx07RowhhamFkGVOLUMiIyZcQBNs1OGkElMWMBmmFsmRqCrWERn0BgUACM0Xi9wVWe6ms1nRK9i0Kd4tWJsEmJVCEGNgSA2DFYYgSvNaALC6WehztqXZOTvYGdDmX/uz/8vf/3tfOttzo3MXdi9fBHDr5nvz/RO2WTCwwVNeWEPDYW5NJI1JAS2IzOZlBIvhmJAvTsnFMJ0J1Y5Ta4J047EayLZDl0OvYpapKOiolaqyLqKI1WSn1QTrLqGrWaqthbNSXW3B/oS5a4tx09JqY7BoPA2NrdmCS/xZVMRDwGSUIhMTOCcHqr0XAM9eOL+zMbpz89ZRNVPXExj1QhoJKpaT32KyTDSooAOnRQkEQ8wmwUaaqK4s7eOREkL0sVEfNc8AoJc5YxxEJQLaULK/TFuyW5UBSGJYrlF+RSFC4HQUA2CGYUNklgc2QGA4VZvIGsxkaGSM9zLjMHrp0vt3Hv2Xf+IvfvrHfvBn/tJPc3UPAD++74oMSMa+ttLlTGBm25VZTAYdUZstT0hpYmkc1kL1qtrKrnfwMhC7d1WhCgYpSERZEUWZiQ1zbNeGFoejbnIoBZNFmfvITRWkqgFYYx07sHnjtZeb2r/11vXvf/3a8OwmN4smeClMeqGccQf7J0Nnrcn10gt+bm/+2jdfes5hXgFwbviHf/z3HNR2fnw07o/fevfGn/7P/8Tx48Nf/8V//LHvfcNOCMCFnl7+nqvfuj79jV+/+ek/9IM/eu1a7oZHpP/jX/nrD+4/+Ys/+58CoOnj8sPj5/6d31+9f/+rX73xyR/+oWEx1sNHf/ln/9dzF57F8QzAT/+Znzre+2Yzmf/8//6L9+7uP5xM3/z6O5OTw6wf/831uwBe//gVH5m9Bo7MhqEtAZUSB5jRibCvrcftD6Ir1LhTryBNc0slz7PGNw75qN9PDKmyrnq9ovH+vXt3sjwjQlmW09nsw69/ZDKZAth7/Pil5569cfu9yWSSF0WWZVmWF73+rdu39h4/2tzYBCAxjkbjplk0TTObTbc2tiaTyXQ+V6LMubTOGGvLlNopAsKqpBWg0qq+isTFYhZ8MGyOjo6quo4SPv/mV+bz+UvPPQPg4uWLUWSxWPT7g/l07lTOXzx/+crlUFWb2zvVdAJg3Ct8ZCay1qXCxAnlyHIOISa1O06sc2iIkZnzBAUa47KMCFEigMY3LnfGmpPZvCiMRALIMRdZ6y1PZ7MQtJe7Is98DOd3tvv9/nwyJcKFixf7w8Vy8cl7BTHHGNnapq6Hg54xpntoGGPSVlrV9Xy+yIri+Phk3O83PoQYQ10C+Nr1633L83IBUuvsJz7y4bzo1SGAaP/oEMBkOuvtPSmrJs/yq5cuRo2PHj+OIvNycW17c2d7G0CMcTablYuSuNEoxA6AD6FqpgD38gLAcDAUEREwQSA9l8UozhoG1U2Ttrw8t76J441Rr19keeGyIrN2tLFR1zU9fgLg8PioyIusKARqjLHGTuezzLl+vx9C6NKNTQwhKXkRlJljjGVVWWs3oKkSiDVGJBC0rksJYXtr82Q6teViUBSz2SyVbM4yN9o5e+HSJQN5/9Z7X3///cvnzyv04ZMnP/KDnwJw8eLFX/21X48x7pzZWpTl/FY5Gg5SDbHcuV5RACjrhiiyoeDrV159hYmCKYhpUDiEOrbmBG1tbbExx9P59mj48U98YjgcnX3rzVndVItqPN4AEJpGYphOJx95443JybHLc2Yi4Ls++vHUdb/yC//o4b17g9HI+5AVWYxhOpmDaGO8YV1+eHQEgK0VgfdhYzyWKADY8HQ6bZq6ruumaQA4a/Mib8hXZSUpssbUNA1BnXOj8RjA7KT3YG/v8OhoOByd3dnp9/Kd7S1j7OHh0cO9RwB2z+7meTGdTn08KYrCsMmLnI0JIexevHI8OQBw/fr1119/PYFrea+3qHxVN6qIgIhkeQ4gy3OI+KZhJpdnvX6/3++PxsPXXnnJGvPayy8BuHr5MhHdeu895jAc9L1vDo9Pzm5v7Zw7Zy5cQErEdtY6Z4CLV65cu3rl2eeez4uirmpflovZHEAkC2ZnZ+K9xPjk8ZPzFy4BOHf+wqOH97/+9tsAPv0Dn7LWfutb9clkNp3O7j9+fOXShSzLe/0hQCcHB2lhJKKqrEQ1BG+sTTDnkmQKQ2wsFCGEsiypKFSVmCYZXscAACAASURBVMqqaoLPWqodWWsBjEbGIBhmEA0G/fHGxtbOTgJqVZWNSSXjq7qqmzoJ+FhOr78CKBfzk6PDLC/6g75CQwx1XVMTFmXFoQGwMSoWlYSqAmGjP3zm3MWap1fGu8Ot7aauGk8AxIcgCCFM53NV9Pv96XQefKSBacqqNywAWOtm8ylUrDF1LSoCUpEIUIhtvejGe1EJMSq0qqrgvfcNU6y9t22tm1BV1WIxT7xI74P34WAxG4zHVy9f6rsMwOHRYTN/vMOlDQ1JFPGWs6ZqKq+lIAlismNmMiYhnylLm5OwH68KgnaG3cpeAKCiSR4JQYSZVUhiNMFXvtw7PBSFwmyNB+e3dgG893AWw8O65mvnLoVY3Zzd9942Ph4v9ke9sc1d2pt8wnMFEoL3CZhmVRMi51kOwNkYYlBOEtbo0pmX/yU7ZeX9odug23hgu0kvbZo2pcGwwbJsfHcWWk9l6dV07s/Sn1tHFdcduQ+ijSsr+JRj2X2y5JMsr6QtwLrkjUiSImoZFpokz1tp+6VTdeob1vDa1gGkjqrZajaeynfrrtLRdLqvSR4V86knXV49IYe09mGaNB0oSUu/oz1iSbJchyE7soIArdn8/+VvP21P29P2tP02bBZoKV9oF1kA7ZKrwKl4zZIjSKfDXeutW7vbH9eBvbWP8G0nrsh0HUK0ylTu8J/TNEw9dY0PrtOr/R/dIt9uSbzuAWsXSuo4aWvRqvb0FCnr9pl0vTXCZrq/7ql01TErFHEZA1uBjCs4sts8ljjV6Tgd1hIQOvlC1U5hMNFTVJgSkhIJoiYwy0a/eO/2jb2jwzwzURogJp0dkcBsWgiolQxEG64X9lEAwLdyPMkGTdQ2IopaODskdQDquspM5qyp69Iymqa01jrryFpuuYEQQAFjHJMkPceoQcRE1UYMgKhnis3tB198/+/8rX/oRrsnDya//Iuf3Smy0eWdIuvdvX0XwPH+iclsYCIfyGj01WhUjEeFhEVuWckAKGtfVXPhXI2RKJZYjBUCReUWb12Jz6yFRJfxWl7OmARcJiCVlJW0i6kvm7Q1s9OorU+8lk0oqyFvvyphXuu2xWqeEQEqSUNdVUgNVhNpeXRnwaQLqwFgGBJUEElJFKpiONEZjKiOMgXQz20MfjQeHR7OQEYEhsgYa1JCe0rAZmLmZKK1RSjTPCQ1rO28U3CbjI22rA1TVIFIpgQkoI9FJUgwDIJJxl8UILbKRJFUvSqnvGkiZShERSNxxslVMJxwDQkhpAo9PqqzrqUkAoYBHwVoYsitMZZ188xmVn72H/2bW+89+mN/5g8B+J3f8yz29zr1T6aERQIpR3wVnydWJjB3L3c3C7jNiqHOpg0qaaw7LHg5g1aL2Wrpae1e7qzhtBosObdcuHwaaTqPhns266ePjclirAwNg/fnzm5Yl+dFDxosghcLwBlE0e1R71hw4/07N/71tz7y0dc3zm7+ypfffPW1lwHE4fYX3vzqH//Jf282H05m81dffkaa6sz54Ze+uaecHx7fA/DeF3/9d3z3R7c2+MWPXLaRXn71lZ/7W3/7X//6V+Yl/cF//zNff+cLAD71Yz+26fa2F9+6OTv88R/9wf/pH/3f8dMfu3hp52Ovf/g3/smvqa0BjK5+5rXf+zuPvrz3i7/0L+yo9/Uvf1FNuH33ycc+8uGNfh/A9bffLODnotYlAlN6NVpqZCp/QsYkfydtO62SZ0tUX1IuFKJkGISoIcbIRGVVBdHJZJIQn6qqiUgkfu0b3zCGiSnEuLW5dfXqc7NyDoCYEyuzqkovgYhCCLfeuzXaGIzH47IsAczns4PDA0DPbJ1pmubo+CRztt/vD4dja01LGxckEbXlKKtqUg6OIpPpFIAP4fDoqHCuyPsHx8dJKa9sfON9KuDTH47qxWw2m1nD995/7/y5c4f7TzSqDxhtb2U2TXJqgnUt188GnyhjgMJam+T5gLaOgKjM5vP0+szms4Ojg7rOTibHANhl1565dOn8LjE13hMRlEOMWUZJrjFGMexUJHO28d4YrsryWGLRG8SmMc4ACHWT5Y7FisamqU3eJ8PW2KVwZGrGGMNcV5X3viqrR4+fnN/ZTvtUooU2vmGbqYZ+kR1OJplBL8vKxltr37z9HgAm88YrHyKi6Wze+HqyWDDBsmmapqmbza1NACpirC16PbLORPEx5faKMZk1LpENgXb+FHnurBEJZVVFZzWhGE0EEEIYDQeDIj86OhwONnbPnV+mG29sbgI4npz4EI6Pj8uyXJQlQCRU1XWv32fmNpUYALVJ+lHUB69ItTjgDLdSF6IMtUTDXmGYreFeUajqoqoaH65eugzg6tXLs9nsVz/3ucPD/V6veOmF54s8z/Ls3PlzJ7MZgP233+73e5lzo+Hg6pVLdd1UZWmtNcbsnt1J6RGzxRMQenlWleXZ3Z1yNl3UvpwuYpQiy4QIQAyiEsvF/ODxox/7kT9eWN4YDV54+aV/8cu/DONMW0lZy6pMpU4Gg75KrMp5b9C//rUvp4DN1taGy7KjwyPLXPR6R/v7xCwiomqda/k7IcYoxtijo2MfQpoqZShnc7AxLTKYF6pSLcoYg3OZNUZToq0PhlDO5wDYmPFoNBwO54vF8WSSGe73eotyyoZTFCGJeG6Mx0eT6WxRJnnKg4PDxXxhXDabzQBcv3HzR0NUBbPJs6IOyFwmonXdTKfTlBFsmCPgvRcRZp7N5lFFRU8mUxG5cG4XwEe++7vf+epXBv3e44OjM1vjoiiYMZvNbZYdPXkMoJzo5atXiLCYLQbjsfdhOplOR1OAlzVSiCmEWM5ndV2FpvFN+Ce/9IuLqowhXLx4YTwep2MW1SLPMu+PAeRZNp3Nq7qpy/LRvXub/Ty9bqrChkUFqi6zKqIi1EmpSmdtOGetNTHKYjHPsqwsF6q6KEsAqtIrclUYa4OEZAzXTVOXZaL1AQjeQzWEUC7Kpq5TMaKlyEynL+nZ2KQwWXvfVFWv1yOneZ5z5LTOWGbDMplNt0aj/Vl5fDI5MzjDhPlslhSdScHGxhgX5aKtVAP4EMtFqcSTyQmAEKMxBpCqbpJVJKImCjERkTUWgHMZE1tjXdbWk+kV/RBKEWmaGoBv6vliMa6Heebmi7KqKmez4Wh4sLe3vb2TdIcW5ZTKmv10ZOAI5IiNCSKlj4uobeXlrJXFJCJjKHMmzzM11pCKD4ksz0zWGrat6KfhSFBjiLi150XJWafqCRCZ1FW9NdyBpV6hpW8aiQAGIz0uR6OhbRwfzcuib46rk7Iuey5XVYpp0bPWGDasPsJZBZpAiyaGgDpIb9gHUOQSdQ5OYjOrBOAV6Ke6SmRYN2+w7setnLo2XN/Z2EtzuLOll64LrYyrDwCd6xdv45UrtmHr4y1TxHTN4F5DKglJVbm10TjJF3UXSD4StT4ZtWrfijYeurzlZAHTMpWttdFXKXjJUj8NEYJOeXPobnE9xvrBtrLm13FdLFHrRJJgkBJTi522HmbXT+j8mO7OW+13+oAa2dP2tD1tT9u/Lc22qMnaIpb2g9Or6DpKt7YTrWXara+BK/QPp4NdH4Qg8cHfp7O7zWstzrO2wq/FpZbn6geXfT11Svqk25F0+e3a+sCg5RefDkBpyxjs4oW0fKIOmtAlhLnsig5OXY8MrqGRutpJ1m5ytRkt739t22o7ihIWSct9CYBaK4kSKTE0YeF6LndZQP03/v7/Sb4hoz7WREjORlP5gBgiDTNniAKxQI1ha6yATIgAJCx3XAWSRDczcaSY566qSwDWmty6xWIBgIm8rxiqqQBKu0smAThVYlURjRQDYmQLNpZdDsDz7i/9tX/wd/7nf7r3eOaMzoGXrp0vhllgsz0YvXv9fQDjcW8mPncmaFiEaARntnZ7OUmtqtGYHEDlS1YVYiEWBGJDzEqqCG1aKMwKAu9sG4LKGtX31JRPHd1x5Nq5ngaBWlOnTQRebx2k3VpKaxPiA601dIjaWZlEq1lJEooLJo5yalK3Uz7N1mS+EyupijKRSowakQitCCJ+c3MIYFbNH0/nG+Neo7A+AGQz63rOAKFqYvAAVISzzLBla5hIISGGECVITJr3ojAKaMrGQWev2dxluYM1HkAT6sY3EJCyMYbJMBFYmAnKbfCYNNRd5JyoVSoXSkqVrfMOGMOWmYlCbEL0loyNEiAUFYATnvuGbC8nokilRKuqveLMs4MHN979b3/6LwH4fX/wR/7kT3+/hsBBbYwueKkbrapQVTwYtL1qmA2TMWQNDIOpNhYdz+H/Ze9NYyXJsvOw79wlltzf/qpebV1dXTW9d8/WMyRnOFxnRJrLmJJsUxQ5lAUItmDAEmELlgz/sQALEAzDImzDsiUaFhfQgkiKQ5mLyeHsw56lp2em96Wqu2t9W+6ZEXG34x83Ml9WTVP+4x8UUOdHISteZmTkjRv3nvOdc74PkVA1kkgSRKpObiQt3NTlnVksR3XyIqKRtKixXCwUdd6cRKqlUpKhZN4N830AjoOS8oVvv3F8O3zwey69feP4pXL/439pI00UlSZVEkAhEUbjq8+9nW333v/kxac2Cz69vvnXfjh7ZzjLUgDz2/0/vX3w4rfees/3Pn5wc/+Rh/duvvBy78KFUxuXvvEH39kQQwCaM0fd9mbxU9//6JdeK9+6M706qEYF/dLP/sQzf/VHv/H5zwAYTa9fvthAcUwdd+n7H/qHP/I93/r0H8GHn/j4h15/5fX1tgOQFjf6N8zVgX7kmcf+9N98JlF0+fIjwaSWOh975sMA/uClV6x4SSlrOV1wsMbq4PhYrQQvhHt2nxCwpN7kldYj5hARvbIsy6qSQgbvAQQORVG0mo12qy2lYOL5vNBaK6V2t3cBjCazW3duW+u++txX19bXi7KKbZIPXHigMqePj48AGBO1tpux0bgsqlazmWa5lHIZ+EiprK9VtsUydhACgA91+O+9b7daBGq12408N7ZMEtFrrfvgY7XRfDrxgY0xnc31NE3G05mSuHHjdtJan97e/4Ef/BCA+fHIeKUUh+CVTorKeOtDCNY6InJ153hUewha6elsOhqPbu3vT+ezqiqzZDG0hCxNGo1sMpsryUJI531lLEhrVevQJkoTgg9hXpTBVNbapNmoiiLJm1GdliR5Z8uikirx3sNbDsFaO5sXPvh4R5z3iVJ5mpIQp0+dbnc7pTERmTLG5LkGsL7WTRHyhiLBATybF61m59T6ugvhfY9dAXA8nGZpIqUqtB2MbK/TEoTRcJIlybQoTFUCODo8JKI8y2zgRprOSuuc4xBIkZRyPJkCmM3mWulEK6WUFHQ8GDXzrN1sZGliTNlqNgBoLaazWbPROLPXmpfVrevvCBIgstbGcjklFQkqyiowd9qd16++2e10zpw6VRTFZDLJsgxAojQHjkEqgZMkTbQGgwRZazrtNoDHH354c3dXaS2lcGXx1tU35lXVzHMOaq3bXev1AKR5LpXqtFovvPRip91a69rXj/vnTp/aO7U7Go0BHA+He7s7wfvhaPzwo485a966etVY20lTIWTckYWksqryNMuzdHR8WEynpQmtdpsTmk+ncc4IpbIkmYyG3/fhZ7QgN5/bLIH3Sap0mkeppTNnzmxlO1VZemeVbDXbrTs3b1hjWt1mbFH/wEc/+sf/+ne6vd7ZBx5wweXNXJEOHExVZY1mfCqcc855RAnjJPHea6XarbYxVVGWi9JdG1szpVQMGGOUVnme++Ans0oNjgFYU1rrYilfmiTdVjPSg2Zp4jwDaDTy9Y2tPG80RuODgwOpdFlVp06dev/3ft9xf/Sdl14BsLu93VtbCwGVMWVRmMrGtgDrHDPHNC0JoaXsrveMnJMQHLzSyeUrV86cO3v9+o0oZfP7v/u7w/HYVOaZZz4oleDgA4fB4PjqK6/GAqiLD56TKiYPnJCyKErvXZalJEgKqbQCwCqV5OoGUyFavbUrDz/mnbl1++bR8XF3rQugNFVRzLc2No8Hw06307XdyWxmjOkfHb39zvUnn3gYQNZoEFGWZQtnmytjimLufaiFqmXKnr13zNBKe+du3b6d5xk4FMU8DuMD585ub2+z90JJbwv2YTYdEjAZjSejUcy7xEUAFCughSCRaG2sIyGW/rfWqrexXsxmgoRS2hC1e2uaqWi1XHEMYDybpwp5ou70B4ejwTuHo0SHsppliZ7NC0cMINdJ5BmP6JKxJviQpRkRuVDrZjYaTWtNrFZ2wiupffDLjuC4FMcueB+8gnbWOGelVs45Zi7KAkBVmeC9EAJEZVlaazvtzvbuVjGdsHWx04iEYEHMIGaKbANSkpCQCwgWCOAQYoMRRymaGFQQkZQiwpGxeV/V4twx78siorkLj1IrFVgJQTLh4WQ8Kkb7w9lgHva2Tu33CwDW+Z32uYDx4dGkmbb2ziS3jvo37uyf2tjh4GblHEAmSSV6GVMFH9KE2s1MCVU52VzfBtC5A2DOdRBRT0GAxKIGkO9ybBaQF0cfuOa7j0mwuLkQCRKxSnsRrC0jssVQLN2ikw2+flHHQlgwRC4OLxzyu5zreLh2r7iuE1n+sT6HOBnWGLot2ZNIIP6zACjvbnU+ORPfBegtQr+Vq68H6p54uB6QlSuq2Se/O5ZejMKfixouAj0sSlE4MFNEVeOHxPK66mA00kyC34US6r7dt/t23/6dMPH//Zb7dt/u2327b/ftvt23+3bf7tt9u2/37b7dt/t23+7bffv/wxQvKOpW1b8IqHuQT1I4sZqcAIoaySHmcJYlg3WKilHLjS5Pdk+dPoGobgJmjrUVAE5qDBHp3AgIdb+vqIvkF0WIVBdIMgtZkxvWtWaLpFNgUF1xhjrBx3VxYwgrNUurGTuq2wdCALCQ6aMoHiwgQp2mopXqzSgvEyQJ5kXSlETkHyTiZXf1KmMyg5bH78ljMVGssqG6FyZ2wyDEuxC17eKZBEgQUxRUllJSAEBSB9/0pXFUVdqJohAyUyIjQZKEsxYABXYcEqUdato7EQdGEIwtI5VVlocAZvKQUkrnHXNwwQnvpUzzrAHAeZ5VFYPhA4egVQ4hjAvWlZG7SCmVJFqlSVUaw04n3ErSVq/hgrCtln1jH8Bv/PHnf+X//P2WTta2Mj83WlF3a1N5b/1sUvS7XQYgdLquGtN50em0KjtpJ0kC5ytFUiuE2Kxtp+WMG6xSwEslLXsyUSBGeI4SyfWIi0X/SEybUqDYlYyTqlRiJjCx8Ii63HRSpEt1aylHRcR63vGiNzeegQUtUrOLjDlribqOdqVONtLxeDAFHRiRRjHmUuPjRfXdJ4rceVyr71XWABAQQpHSggOIpRZaJhzKoIl8MJQmAIbHd0TwjbzdyJVRMhgujMkTrbSiFMgSAFJo562SOkuy4MN4MvHBJ1oSfOQAQIgsOKifDKbALBJRmbLdbLUaKYD5tHJVlUglpTBAliSCpPfO+NKHMqY98iSHC8YBQQYOQtXtz61mk93MI6pVhlAFsMzSPNW5VmnlSgumyNAIWIRUa3a+AlEISaIDVBWc9dzc3jLzAsDv/dZn9m/s/zf/7d/irEA5cP0R7x+a8VgIzzKwJwDMkiXpxM+Lucqaada0PggphdIkFYtYMqOFUiSklSSlJCW9XNTN1llpGOu1IthKCu0bPWNLISQFKCzU5oXwnqVKvPUyFgISmmli2VMi2HoAOuNShK3uzs6GbuycPv04vfpnL9pZkSAdDKadbQ0g02am1AMXz5fjoR8P3XrruWe//PUvPP+X//1P/t7v/CGArLs2t/6Xf/3/OvO5r62ttT7+N/+qvT27/eoLTz+z+d/9yj//mZ/8YQDv/7GPp3pa9osHLp//1T/69LPPfu19H/nQ3s6FR558cvTcVz54eQ9AfmXtO9dubOx2n/qe7WpymK5nOsk++/kv/tQnWvD+sY98AsDa+7qFnb3vTLb3xEe+9H9/5do7Bz/9Yz85MNPf+p0vPnLhNIDHzp79zapIdAeATGTly1ymZVlBQ8s0Pm2mLLO13nQ8YpAEhCBL0vuQagXvIzOjZU8qkVo7U3lrnWcGsiQ31jrrOLaXSs3el5UhQlHaNEsrY4wxJKI6JbI8qQq/ub42n04aeXMs+uCgVRICWeMERRaLqtft3bp9M0tTIcS8KPNmLqQMHASJRGkAtjSzybTZaqkkBWArI5qstWYOBN5Y6wIYDJok07X1DVOVG5ubR0cHIYQf/eFPcLDBGwCmLJw1o9F4Y31tY3PrrTfeyBvN8w8+OBpNqqqaThwAyKyb06TkajypitJUVZo3jPOD8ZiFWMtaAFzlyAf2XBjjGK1Wu9Ns7m1sHuwfeQ9BkSxMBcZoNEpkFmzFFEiIZp5naeyKQBBSyMTaoiiqVCeG2TErrYpiYoVsJCmAeVHNAqdSSiVH86LhWUh19fr16XSqSCw6x1GUJRPaUpSmSKtsOp+/9sa1VKHb68Tb/aH3f7CdZ5WppvOZkmI0HZNWOk/YmMcfeQrAOzdu3byzv7u9pVzQUmZpNpvPK2eVovFoeHDYB+A7bZUnJJAE4ZmVlIEDQiAOlSkbzRxAlmepVlKKVqNxcLCvpNxcW+u0Gt45mam4f12+9FBRFI6dtbaRZYfHh96zc67X7cY2gu3tLWsdh0GepgHodTt7p043mq2jo0NrXTPPACQ6ZTAJIXRCIA8y3pOAJCgty3IKoDJzBeuLuSdRVTZJkuFkkmdZZaqskb927U0Ar775RqORVWXxxOOPKCmtc1mzAca0KLrdLoB2u229a+RZmqRHB3c4BEGkBJXFfH/flmUFIMvzVKp2s6UTPS3MdF6Ss2KtV1S+9CFJcwDOOWuKtbXemZ2dqqrmxt14/dpXvvCF3dOnpdQVKgAfePqx07u7HCwTiRCOhpNbN64XRfGDH3l/0mgCgBQ7W1s6y/IkYRdSnVRCltN52kvL2cR5C4ChIIRxprKFc4aAWBcWmKWQ8an0DO8jH4wPnrMs9d4IwBjjg+t2ugBmE55OJkpmWkpTlVNiKWW72RpPJ4PhEMCjjz66e/r0dDxa39q21g0GA+998Oj0eqW1xjGABy+cA3uRprM5KuOmRVEy9QfD/Vu3VJpRlgIwk2F/f2R1s6VUWQUB433QaR7c/M5B/9bBIYDK2kYjP7u7q4hGx32m0Om2vTUHR/O4WL3nyoMS5JimZZUw27IyznsfqiAgwdYBUF5WHDLFLJWtJgH+W9957q23337g/LlutxNdCCl1mjQqW3V7a41mU1krlCpLs761ubG9KVtdADrcUkyVcaV1zofS+GaWEaRUWggFoDI202mSNUhKlWhrbGXMcDzstpux5QVAp9Nt5E2llQQfDlyai+Zat5xNi2IuJHwxAzAYjIyxs0npzcwUc1sWLNOsJTOpmBALEte3drudrmR4VwlNRmktabvVLNvpcd8BGI6PvXUEfvD8mdsjcxyaB2YwKorZbMJSaaGi5yQFsixb7/aOj/reeSmJ2XoHJRPE7cCWgiSYhVRSQUrhQ1BaEoFZSJkAUEoTBHsOzjMFpaR3Pk2y4INxHoDlUFhrvCcl0jyTShWVabdaa1tbg/4o8rSkSvaaecozdsayc4UQbmY8Wqle63akzgCQY9YieGI/zxq9RKlAUhI7azmQZgKQqBQknbVBkgxGauhGNioqrUTk/CEiUxWtXFeylVZzTRLejKdTDhpevXnjVQBZIuez4xffurXZa7/3wQemY+62smF1pCfq1EYrIPLnkibNzgXv4Y1zYau7sdXdY4EMyVxkACpnBQcJspKEEGAKwXOAEDXD0gqfI9fdHbE9YYWfKNKxx5LASDa+/Oxqv1rs34lR46KecMXPRpSuqmv4BRZVklSrtcTjq1xIzGAIQMSOtXr0lAAQQhBKxNbzyMgFH0gsCyAFAgIxVijBSVAdhy0buutuORIL9vuldg0Q/V9mQJAIIQjJQlIInpmX1JBct/ssumgQgwKKcterDGZUj3X8E5aRYFi2CC3aRwixeYhXWwmXdJF1I07s0QtMoSbaxn27b/ftvv27Zgspm9W20JVN5btsZVVd1oXfU/XOd7+mlYMr779HRGTJt3FCIhw7ald4TE4gzcWLxVZR90OuUiefHDy5pHerZF85tljyeaXzgJYo4j1fvvJpXrnAlQ13+ZLrvyyuZaFwc1dtf73jRlCVgRACLahOavySgbrBlRfbZKRAWUprew4+xphKKi1VwHLzq52B2AXJCN4jVcpa47zPtSKwCy6SOjnvo0SbJKGkUpJcsJWxsa0J7ONgEQIIkEIIFXtbQEComeG8sxWzDb6bNYJIFIVU0mxeVuW8nanf/e1/CWA0Nw9dPnPwzmGSdeRaXpnZ4Y0bEH5zcyNRqtVMAUyKaQgiT7UWWN/YFM6D2QevJVwIWggANgTrfaSg49UWaVoA6rRwO6i+HbToN7l77tZYIdPdh5e3mzlOSV4AzNEHqo+d3Hle8A4uSWFWH4PFly0aUBbcMPUsWM6t6IsACx8Lsbc7LNBUXjRBR7ybwSSVNLZUgSPxk1aypbNh5ayzlasES2LvgpUMIWrxFmKR6dQHP52PK2OYWYoIcaolLQ4JCLAgiJgmYKKy0hwmg1HwCYBupyvaLes8KS2qAoCxlXcWFKQQMQXACEKyrBW6A4fAgKBA0HneigrdguJfYnrDW1d99xrCJ8kLWOtIQBAF5sIFKRWA7lr75a89/9d//h/8nV/6xQ9c3CxevS2zUqSBXBAqYakBgKUNPgidJlq4YIYTJRIID+Eh5IKKgbyQIBKSIISvb+NC0QcAIJiDFBDBeCavMZuJ4LkqnXMxVCNJHCBYwnPUevKB19rpwfEAkqZMAHKLPMtOP3iJyHpUjzx06uEz66WU1XTUO7PjVQbglW89v/PglbU8H964bfQk2+o89chDn/2Tb/7JZ7+imk0Ah9dvffWFm+d3OhfPX9zuTSzsogAAIABJREFUNp/7N3/29S98/Sd//Jnk1Okf+blfHMgMgLzyHnv1zfngrfmQC1N9/frRzq3bl86d+6OvfbnHdy4lTwCQ0j7xM9+P/YPB4WB0XMn96zddeHtI5z76FP3mF48rAYB2L6pb13X6njde+sqEi7/1yR9/9Ae+byaqm68f/P7nPwdg7/wZKyiRWpBwhW20mpX3QepEKedCXElSIcrZXCkppTJFKXViKpdoZZ0V4EhcqNNkbpwWorI+zxphPs/zHDrd2twkBBc8gLIyQkhmv725sbG+XlYFA0onpixVksQHzZpqZ2tHnDl3PB7nWW69m81nb159fTKdbK1vABiNhvPZpNfrDUeDLMun8+K0OtVstYJzQoiyLAAorRuNBoDKRF1XIaX03oUQkiRZkPOytdVkPIohRyPLh+PRbDop5tNWuwGAEJxzW5vr4/H4FPP6xsbNW7cevHxZKt0/7o8GQwDdbsdUtpobArz3cRtKEp3nOUkZaR91kgTvo4ZMmqSNLM3StAy8u7EphG+1OwDyZuPw4ODWjetPPfq4EJJAWmolZQgsJABoRYI4S5scPE+mFDU9okaWt9OpAcAQzvpzFy4eH/fzZjPVyajfT9OEa4bduAQJIjKV6Ru7trauk3QtSdrNxnh45Jxz3gHw3s3LsiiLwCylaGZ5VZmrb1/P02w6qwAMJ2OlVVlWRVmWVVUaq6Vc63bLcjaczkxVARCqF8DGVK7ySZb5ACkECVJKhkDVQue6cE4pUZXlbD5vNjIids6VVWl9nRrZ3t4GMJtPQvDWud3tneN+v7G+trW1PZ8XcbnJskwpJYSorD21s9tut8uycN63ms3oKcyLedbIpBCruUuKSs9ABGqn06mQSmnNAYGJAyupJtOpFDQv5tG/UFoaY5hoo7emtRqOxrFjNAS+dWcfQFVVjWZ+dm+vLIvnnr+xu71pjWk18sq5Xpa1Wi0AZVmVZbm2LiaT6cHt20TUbjXHo/G8dOW8YFIAitm02ciIRNpopK22mk2PjvY3NzfyZkMn6cHhPoDJdA4hB/2jRqspQhgMh5PJZGszOzo8ypolgNK4h598cjgYjkfjROoAssaMJmMIsba+Fkemqkom0Ww1mo2mc04pDcA5J4QgQVVVAZBaRy/Ge8/BKyWJKIQQOFhjOLj4w/NGHthPJmPv2VTl+tp6fzRsNZvbW1sArl29ZivLQN5eOzg67PePldY72zvTyfTbX3t2e3MTwFqvN5tOi6IsyzLLGn48KWZFkiRrva4xNraxT8fTYf/4j//4s5/61F93zlHw7U5vPi+llA9eOH/l0kUAWZbkeXbxysN3brxz7fp1KejxtUdev/r29374/Y89/TSAo3feWl9bs9YZY4yx83kx6A+IhDUmVJU1BgBLGQQzB++9s9YH8/73vvfR91y5detWqvWLL78CACGs9XrXb948Ou7v7e2dOXNm/2B/vddpNJvOWVuVAKypYs87mIMPQkqpVERGhCQA0kdXL3AIzrlmoykEaaUOjo+7rdbB4TGAoih8l818ZsuCmWezws+mD+zt9jY29kdjEhKAqkVyqDIm3keArbXCeSJy3sfbbYzxgYXSVVlKlcIrRpIkTaU1ACGYZCBunNs91ciLysvnX3un093Y6K31p8Wq21yWhXPeOWesieTWSioQidhTzyL4EPGrEAIkpJRpkhlbhuAjdySDI0wZQtCJIpD1FQFRtAeAqQwi4aZOyqLM8zxNs7KsOPCN69cvnL8IIJF5Rzi4iSid814SeeeDR+UZ2mc68qe7PANAggQRGCGEIJa9vqvhSr1IEhEnghpZUprKuQAgBC8lKSnzVAOdIPJzew8ZMexPJ1WY72w1AXzztf0MB900W8/lO3duVNae226/Y91kOt9Ze1zLeJskrGVnIYlD8C5cOnU2zddG8+FwNDowFYB5NSUJkoCLrb816rVK8lijW2Ehk72AxL4rzKuRxhXnFovYZNERzUtKE9QnuBvvjA5vPTZ3dUivDN3K5Ig84JHfnRfCpMuYsqbAWoZczN8dmfLibkRZp9VwLX5jdMZPPHnCkqgLC87GxXAsz363V/9dP+PuwHMZDVLdgL2YGyefIWaOTj1hEW7QyeWDxF3y43UoGGKo8Oc3gd+3+3bf7ttfYFvCkXwvNkP03cDkctt5F+K8JZyHu2Q47jpHDSScVIjRyavF6ZcqxHSS6bmHl2QB3Jxk25jvWomZVz5yQuFY/8q77K7/1qI097yH7hJQvnvvqTNn9envzkytgFOLvb7e94m+63RLSR2xRBqj+huisxMvj1GTFXMAAnMIhOBciBAJe2ZPAENAKBJEcLQQDI85PClEPLsPTpAMzNa7jEMUSYkE9iEEz4GZA3spSBBrQUhUYOlDqPnavA3egYhIk9BCihooZR8zftZ6WAtDtuLdzbYpxpNQkSChRLPZiijXVktZlWPTjq0IkhppIr1rd5udZrvRah3euQMgOGdDaOS618wTKYgciGzlAhNDlIYBTMpAKqk56ejEx1m533Wm82TAaYXVZQUAj3eFFiw1y6NLZ42jV4QAFgut7DjCxL7W64i+aUSVowsTNe/AS8j7ZM7XXDyRXq9G+6KJEyie6tQxnWDSiysPYGKSgRDljIIkCtHZApRAopMwnxI8+0i9F4I3znpBJIOMIyy1CMEbZ3xwAgQi5z0zKBYJgklACiYGeTjP5OGEV0I6Z6LKZAzE2TEgtVbeR2q7SkhBVE9gISCkVyRBzM4HHwIHQRBey7QVg3olAQ7BR0kD72wlVLIC4cdbWpPrBA6R0IoBsCBCWIiBJpu98XDyy//glz/wvivf8+ErT62dNpMDKcvKJkmUC3FBBSmCnguhycNWlLYgCRHdXC5KQoCIreEQeQ+5rv2O6xhRpZLMh3lAQoT+0ZoMx42E2CeZFIvZxcQC3hNC8IFhTEi1KEaFOz5qdFoAKMxDJr797deefORBVNNyPPA6azRTP52+emP/0jPvB2CN6DUTM5nnnQ5arbmdqu76hz/24XaJAgrA86PZT/3lT7z4ne8c9wdPXr7wJ//Hb3dOr79w4/a1f/Zbcv38y89+GcA3P/v5Ry4/8BOP9JCUP/cf/IdWfXY2nL5evfw9j5/u7jx1aacF4LmrV198Nn/wfR9xR9+68/IbpuyGoTPTsH+1/OiP//QX/vTLAN774XbnAfmN3/yNX/vVL/39//LvPbTb/dqffFpT8xd/6T///c9/CcA3n/1cr5EVJKCZBDwHnSfBOGeDD4FIAXAS3nnSQiAQwTmXMHFgUpBS+hCLBLwW5EyVSOHKMhbU+xCEoMrYREdRUemsK8tiOk0vnDuXZZlONAOjyTjRCQAPDiEMxuNiPk2SRErRzJsXzl3IW9nh0UEjzwEcHO0bG5548umrb7zaarWF1P3+kVZSa62ULssSQJpkSZJE6RJEFRlmH4J33lpXzGsd6iRJkyQ1VemsNdZmaRqBqsl4DKDbbUmlsiS/ffvO66++lid6Mp7eeOd6t9fz3k8mk/jkEzOJlAhaSg9C4BA8Agd4LVMA1tqiKENg731VleBweHT8zRdfuLx3XkovtQIgpHRlWc3nAS7TqXHeB3bOS1mXNXlvZ1XZyBvWe+d9WZnJcNSMaR2iukDV+ts3rm/01pIklUp4Z1HTDQvvnTEOQJIkDHLOSSlbjWxeVo0sGwwHxWxK7CNH7XQy7na7lbXWVtZK5iCdnM3mSiqtA4AszfqDMZiNcULKuB+Z0gbPly6ci3SNzU6LBA9G426naTxASJKEmb0Ps6KczuYAfAgCsfAnZKkCeDgeH3vbzPMsV7H4UUjpvU8SXVUeICGF1no0Gq2vb0SmwnI+N8ZMptNmI/chOGfnRSGFTCNcCwBI81wrDSKSMvg6NRk4GGfrsBmoqkpISYD1BqCyqmbzGRFppaxzce4JwDnrvS/L6s7+wdpaz/uQp0mr3Yrr/7m904PRaDweZ2myu7PVyNIZ82F/kOfZt1985dyZPQDnzp4xxvT7g16vd7h/R5AYHqu02RpPZoPhOJLhDvr9CxcvcvB/9tWvdFotCiyAH/j4j3prtzY2drotAH/8//zRK9/+dqvTAvtUkswaqRbOlfPKsLYAtvfOZq0OS3Xn1u1KyiTJ+oeHUTDbOhsvOCy4Ta1zSimttbXWewa4qqq4lKdSIkBKF8WgpRBCUAgcwB6s0sgKXeZZOp3N2+0WM/d669PZzHk/HA6j9PZsPt8/PHzoocuHR30hRJKmkkTeaOyeOTOaTF59400A3U4bQiY6IRLee+98s9Np5vn25uYrL73y2MOXAXjrHv3gh1985Y00y5qdbkMrQEip9cZGs9m0NpZ8hryR375xYzaedtvt06d20jRd63aJpHcOQFmUxljvPZEw1lXGFEWZpJl3HoSYrlBSefZEQkgplWy0Wt1et5zPD27fvnnrlnUWwM1bt6SU2xsbRALMh0fHO1tb3htm5hCUTgBIpZVSOs1AZK2JBbMheHBwxgJwXooQrDHBu+Dc3Ntze2ejh6aV2t7cAtBudU1lQrCVMTEFzoJ9CKYyUipTM/MiMJzz1hgSxGBjjfd+bsuyLKJ4VGDWWWaMA7sQwJDW2DlbH0RReQDjWTGfz85urUlB/aPDt95+Zzwess58rGUFA0jTVCnlfUhSHdMAQgglpVKysp5C7NTg4L1QUgrlRdBJauezEKJOY+3zRPkPIWRc4kAkpRQiVrExAOsMEfngk0Q3GvnFiw/pJCH2R4dH0+ns7WvXAFw+s+N0yJXMmrmzMk1yKTh3VDqXtrrddhOAh2OOqo9CUJRh8kJKgjjx6+keJ58kUaaVcS4eTaTIMh04QArv/bwaWQ6dLHvr4PCFazevXNgE8CNPP12EZKvXWmup8eT4oD88s7MRnGUyIXDpHABD0I2EF3V+SaIP+tPtjQazk9J3GhJAs5EK6ZjESeR00om1DMAARlgwhROiK/kuMNdJZLaMvlZ/Ku41qjPJJ/+lZWQVv3zpaS995CV5ZfzIIl5cdYCXl7o8QichGS8ucfHNzIvvoWXd4/JqF5EnU63GySen4pXRqcO8E7jzXWkoV0ZhFWusCeDp3d6+vNT6Q4uYcDESKz+7JqdcHuDlT//za4nu2327b/ftL67VcOSCHvgeHA73VomdbK68WFfvQe/eZWFmrCy/qxvSKk55suUs/l1uXguU6S5I8K6VG3i3LaFOkS2vne/GGnkBjK5k4+6GRxd/WsopLHbNuzaGemdY/fpF1qveahejeLJlLjyBe0f87oup/w2L/TMQwLW4sQcHZg8GgguIeXIbotoDJwBxDEHqCtO6rFUIhABmL4isdxHFsc4RkVYy5rpFgBIUArsQnKtApJXIlLRQ3gV2BojYmweUkFJIvXBLAkQNoobgOXj2HMrRzWouJe1sbqxtrvUa6Xgwimq262vrRX88LWbQaRbI2qrdaTYzudnO19d6b8SSikxZ5/Mk6eSqKOYISBIlEmnZQarBeAygYtaJDg4L3G4xS2mJJt41wXgpgL7U28YKii745J7VXsBdUP1iNFd8Kcbyfq+4N1gq5dzjz9XdvjGAZdTsAqvu2snJFucjBvPCCVn+KIq+Uw3ks2AOWhJroRQtv4m9beTaFxEIFCJ2r7BlYQEgqOA5hCBIJkp7X09O64xUEQ+PUthRfpsB9gAFhGAVoZUlALQUSrCSqQnwZayfjPidj1zwAASxlIEEASIEb52Fj4wQSWDmsKjEBIMhSCghtdQu3oG7Fqd62JWQjoP3rKUSUnofatVjkCepGymn6qvffOkb337t7/ztv/Lo5YdpPMBsFO+sp4qVECEoy6qRBUmBNCKt+2KViCWwRERJCmYgEDM4xjv1tSYu2ECaglDErviPfvaHRDMDFAshAwNgYyRgi0qnqQvBWC/zzAi2/eH86DhPHAC0BYmy16CiP5GbPbm+Y2/deOOVqyDq7Gym7AA8+sGnyfVLN1PbO5zr3A3s5Oh73//k8MbhnaMJgHPveejj//HPieOjX/knv3qw339x/+YuZpfOnTvVXfvil/7wb/7SfwHgW996/V/8j/9L+xc+OenfpN72p/7Gp/75P/un7zlz5uL57VOn1jQJAJ1Wy7/wnWG+prOdCxfpH/1X/1NpePf8pX/89//x3uVHbr30TQCn85++9a1XOlZ84ANP/+mXv9b8vkd2m8mzf/adO89+rbGxAaCXd6/bEVpKQxpYtkZKl8vIPhCCnQKQQiUERpCksmY+nk5FEA5QMq2qKsoxe2cTrRAcsRdSOATrbBDsvPPeeRkrgDxz0FoXVXnU72d5Ni/nSikGxYIvF0KeJGVlvDUmsPOewdNi1ug0Hnzw8mTUB0AkfHBK6UaecwhCUeQTIaKqLOJSapwz1kSZprj/hRoz5RB8o9ECkCbpYDAASAqazqbHg36a6M998YtpoqLkyNpaRwm6cHYvzTIiwcxSydFwZIwlovWNDQDlbNZqNSsDItJaSREghPMxQ8TDWQlgOp0orYILwfvZbJZoff7c+cA4HA44FBHWbLZyQWScmU0nzUwnSgSGsY58UCHmTqTz4fqtOwKojBtNZy+/9FJy+cFGq63TPD5wOtEh+MP9O6fOnPPOJWm6vrV1+85BCB7gKCSdaO1DEFqToCTJGITgR6Mhe9NuZY1GAqAo5ru7O87ZqirLyuxsrmdZaqxb73bSrAkgTfTt/cMQONG63WwkSSKI3FG/qopetxPLNvMs82ylgPfOOiahYjdhohUj63Y6qDW+ZaKUEHj19dfKqlICWivjTMoq4ol37tyZzKZKIkkTJfXx8XFl3VvXrwshN9bWAczmcx98xEeKohiNx+3KCCGTJCFQt9sBEEIAkRAySrTipJaICLTI7TGAsphbY1WS+cDWuhCCESZN9GQyAhBxydl8fnA4lVKe2kl3NjYG47EADYYjAHu7u1mWjUbDRpYf9fvPfuP5Z55+opFl3U6nkTcuX7kC4Mz5C1dff+P577zU6XTb7U67262sI6V7WfPO4XEs1dRpPhwOBdGbb7896PcH/cFWr/tDn/j41tm9cjrZ2dkCkF5rD8ajm/u39g8OPv5DH+tu9FzwJFBUZlYeA7jw6BMgwaDxaNjb2CqqQmmVJDpJdJKkkT4lsDHOp2lirQFRs9msSjOZztIkaTZbUVEEoCgOzgwlZYSzy6p0IWxubvrgABhnp8cTAJ12x1g3nkwaea6U6nY6p85eAJClr1rrXnrxxfF8vnf6TKK1c244GHzjS18UoKcffxzA+QsPbGzttHuHrW7Pe5fljcl4wiCdZmVltdIAhFQ3r721d+6ckCpNUqWkUtpU1jkffOisrcd59eK3nrt5cPjQpQc9450bt+avvV6ZajaZvvDc8wD2dre881KqLM2cddZ55723DhBCpUmaAZBIKzuPZ0uSFN57Y5vN9lvvXJ8V81h2F5zf6q1du3n9PQ9dOh6Mjgb9vVM7BwfHRJQ18qjDI5WUUupEg9ka46yt+YLYu6hQx/BgBBuRvU6rvbmxyRyCs2KR4fbezWazN958+/bt23kzT1Od5Zkgmk1n1pjomvhQewBKKSI08twdHlPtmlCaJgCMsZPxZDqZpllirJdaJ2kqpYQQzSwHcG73tC2L0ojxqCgMZ1lza3PnuHBvvH1dqOTMmVMA2s1m3HaTJJFS5lkW5dpJCBAba2qPiiTq+jEhhIiLsJARyw4AhJTg6FCp4IMQECSYDRHO7u0B6HU7CK7TbnPwxlowj4eDw4N97yxI3Tk4APDg7galstNuKlsWHLQUEJyqJDgJIeqGYhLRtZMiOjjMITBLcQ94FwmBajSNgcDeC4KS8cfKNFGzyTT4AFsdDo+u7vff88DFc9u9wXR663AMoD8sFfKd5u61t/qA3+m0vJn3h+OzOy1jA4UAwEyLzlpLahXAKknTNH3l2hsuGJ2xADWyyN2ReD/zxsdKeSI6KdugGsmN61VcxBaxw6rXdeJ9LQHJEAKWYNky+7/SV7ba4rbi4i4jvaW2HYiW/csLR/7usG5Zklj79fVI1+xdVGOEghFLaFcve/GS7zrVamgWnfJ3hQrrAJnqAoS7nfllZcE9v/CuL16CmfWw0EmLz8pV1cNKcVTFahh6EpbGe1SLhy9vEi90c/5ccPS+3bf7dt/+4tpKdWT9L929eN61tK3wgKwcvXdZ/7eXIC5pHFcrLBf16Fhmzeqdpl63l6jkyfniCsz3rOd3VWKu5rRoeal345qrbsNJvu+ebWplC6XvxmzvaQpYoJzLpBivjulSlfldhgYAcyCQoBPfBQtWShHBJwQiZjhwIPYMVrIuxOAQBLGQkLJuGKOaXAQMLFg6BXPwIcjYaEMUC5GCDySIFxCoiL4WkxCIrIUAk9CxEghAsBUAJmbSkWCROCAK9gkBgKVkAnsvkwRMzrrRZKYz5Yrx7Zs3N1oNAKd2L2+m+dW33u5sp8IWzUSrRJZF1Z/PGs0mBwvAW5fnWTMXppoJCiC2LkglRRBe6PF4CCBVLePo3ml597Ce/K0WqKvB4Loxon7XvQD8cjJw7XAwAAixnDgrcCZO4GrGXY/Awnm/64Zz7T7wggnm3WZDvMAIf4koqE5cC1CttNEAgqPrIiV5a1ItI5ojSWRZ4l2VJErMTZxKkhQRueAkMQCtQVJaF2UiqaZLZZJSrly0ECBE7T5mkqGkEIJNhURwAI4P77hqlmd5YKSNJsBAEIIEicCIUGNwTohAgYgCC88iwDMhIHhjK5BH1LJmEIQk4kDMAhR4OapYALjRLeNAQAjBshNCSCEjBMABLCRnrbm1utu2s+k/+ie/9jN/6WM//LEPt3Y6vn8AQHhrybpUJlLPZ2Xaa4qg6a7abUTG2oAgF5SR9ZxY3lwCFCkbDISW0s+nSahYSF+ZJIkhN7wvfeDJdLKeryetPFGKi5n3vrOG2ZxVKgGgkxWFv/jE5d/7l8/6abjyoUcHt26spXLn0kNNa994/gUAZy6eS5uttk5kmtjR8XA0Wtvce/n557768mu/8KmfAbB7M/3Cr//G93/yxzrbvWlpH3vmyS9/7msffOzownuf+t9f+BVx8x0AP/zoXu/v/mef+e3f5uLg0R/6xNWrL/rB9WvvuL/y4+9XYXr75nUA3TNn9q48/N//w396+qHLmwlde/PwwUceuHPjdR6U208//ULlAVx9/fbaVnvYFXtnOze+9fwXPj/f3ejenEzfeOP6v/eTDwNY2774Z9/4X7ut0vhA8E2d2KJoNHKZJf1ZIZkAaCUFgUVw3nuPXCJVauoMnGkmqmYb0NpHhiYpA4g9lJJQuihLQTUjwXQ2r8ry3Nmzly9dHo4HztlYMaeVbjaaACpnz++dfenVl4PzOssBVJXh4KuyMFVZFDMAo/Eo0Qpg77111Xg6Ozg82NnelkpXVVWHATEiEeRDiLMwch8TkVLqzv4+gNt3br9943q71dnZ2mLm0WTSbOTXb832dnebjRyAsy4QlNaNPNdJsrbWZVBhzGg02j11KskyAI1WU4JKM4vytUzEga21ZVF4Dt4TAG9tlmaD2QCBq8pMp9PKVGI2u12W1s6DdQCKWZk1VKfbvfb29RsHd86dPr3RWxMQzoeqcgCEpCzNTu20qrKsnAWgpDx38eJkNC6rUiIWiVCW55PRIEmzspixNdPxZDafBw5KSuc8gKIs8jzvdTqT2VRKqZU4OjhEsIlWe7u7Wxs9AFqKfn/gOeztbk3n86IoirIcT6f67J71HkAC3et2JtOpVto6L2Rg5l6va2wxmxex2Nh7nzZzISh4L4SK1MbWu8BBCrr0wEUAeZZlaa6U9M6MRgNrzcZGr9duee8C+7yRAxhPp0oKF5yZTtM0z5J8MplppY/6x/3BAIBzbr23trG+XlbV3traaDwuy3Jzc1NJdTwYREhoa3NTSgmSgoQPPpa5x5If65zWGQBjbVVVs8mEQTLJAgfrbJ7lSsksTefFDIB1LoSw3u12m43BePzsc89funCuPxy956EH4wx/9rlvPvzQg2VlstT2B8NT21sEmhelse7K5Yfi6nrrnXecMefOnC7mM52kpjLzsmx115NMC6nYBQDtTjs4lzYan/yxHxsN+mxNljeqqvzm888Vs+lkPgegk7Sl86yROWeyZlPqJJFCMuZlFZGCl7/5De/DfDpLEl1VJRE1252d7Z0sbwiCFAJAs9HIAidpEksjE52ARVmaZrPVabfvHOwDqEyVJRlRpGATwccqOSFl0h+Nt09tAdg/OPjBH/qBP/yDP+q0Oxy4qsxsNptMp6d2TpWVBXDtrbc6nW6j2RzP55PpZDKdWmcDYKrq8fe+fzKZAmAhyqKYjifW1hhhq902xpaVYZpHfo/e+saXPveVZ37k48ZaQeSZ2YXSVDunzsznZWxbrsry+u390piqNNaH6WTSbGQAQOLOnX0AVy5fYhBJ6RnBB+fDvCgn4wkJAnMk73YBPvjolzFiEo48h9v7B81mg6UEMBiNbu3fuXHrVp6lw/E0zbLjwSDRCTNXZTmbTuP85BDiOYN3zExCKK0hVFyUONRazkmSKim8t6iZ/gigWO4N5kaeP3D+3OHhYTPPH7xwbjSfWWO1Us55rWugNkkzKVXayImo0+744IkhCNZUsZS4KObj0WA0GDdbebPdc9YJxXku8zw1MUnTEKPhbFre/vrLL6XN1kMPne/LRjops2a7rMq4nzby3DiXpkmSpFKS1io+TtY7ElJFfkniEOC90yr13hljSAgi4ZwFRBQBJ0AnqbVWCKG0JBII3tkqy9JOqwXg7IUH0lTrRDnv9k6dKorii1/+ytm90xyc89W5vTNxSS8mkzkVKRtBmE/no7IkkTjvrayGowkALSQWtQlEolZvjq7dvZgQ1aCkICIOPpjKxadJSiqrwnrnrbXgC2cvcNIz3DieTB+58EgkP9XaGuP3J8PCuMcvXtxqd4aTUZJL+T5WAAAgAElEQVQ0O52uD2hmKYByPgtgSSSVojRjpncOxsbz1mZjt9MjlQJIVZqopFridAsH8qTBusa8YhpdYiUaPGlNxkpUswJl3v1zT46826uFLSsAThrQ/lwojUA+hHuPxsBk4cefHFu9ivCu2tZ1VLMSiC3KDIAQXH2COn4CgBgLOfZ11xLXKgQnEcNqI9U9P+aknuEEDY1tVryIIE+CTEE1zZNfNLXhriFFXZ+wrHY4Oe+/NQq6b/ftvt23v7imeAUI4dXljFcX8fo9vDz4Xajkvdjeqn13FmgZ0tOylfkENoyQXyxcq/sAVt6wesKThBGwSqRMCyDpBKNcIoCrkOYqxLk8wvcc57u+jpejs/prTvbm+sPES/BqdYQX17b6jfe8YCKIZUUec6z9AVA3EMCDw4JAMBBAFOo9XRDqNjoI8mAfW2gBLAmWhRAicKyzEVIwiEgIEiEwB45IYqKEsdb6wAhCqNhr65m1FF5QrVwgBIAQd05ijlQ+HJbTRkjJRIBgqTl4JWU5Gx3zDGy9MYUlAKnSfWuDZw1SabLezo3zQblmknWbbapLPqt23mb2lXFSJFqQDeyt8yxlKmPnOGnhglWksWr1/KlRx9VbH/G8FYSpHnpa1riuZHdX7j8W+GXs2V18zwnGvQDUlx8D4j0K7OtpeAKKIlJcR7GaOjt77xPEiClfWpmQVNN4C6oha4pEM3XLDbFw1poszQA00kwpIeGD91r4wApBSqHiF9eJaIngKVWpcZXxRgnFIGaR6tyiqn9VCKBY+MosQEALqAL7YBAUgCzVTuRpI/cgeIrt1gRWUjHDWAvA2kpqCQ6CghKAIgqBGQwvRK1+RQQBIpAg4YkjdHyCD969llRVlecNLURlrXVOJjEkgGMnkQQfvNTSsmx2QrC//unPfe4bL//tn/3RC2fWAZhxyMiS1CyokaZuMvN5TdFKgijUjjbAoa5HjSg9CyFOXEtmESypRBKZshCSg6+EJVWGg2s31te7AGSWsxStVtuxcLPKhaohSRWl9ZXuJsQBQJgXst8f28EPfvTpa2+PfuN/+PQzH3vgTnDnzzKfOyW+cwvAlz/z1Y984qMqSc3+O2E2a527gkbrygfw0Pc+8ubNMYDbr735oUef0sevPfrUA7/2P/+ro8Hwb/y1T/7ch9/7e1/76s/9138XZQHg07/1r7NWO+Ty+luj8Rc+d+nC6e3TFz//9W//q9/5w5//T3/h+a98CcATFy6N79w+fPXazuYDD334yXav/eSV83LQz55oNaXdOHcGAGXpv/jl/+2dm+Wn/pOf/8W/93d4Nv7D3/pdL+itw2l/bAGcu9BtaSFTm1ah1Ujs9NjN3J1+/8xW2vKimlYAMBXOhkTJubNZI9UsBoVJtUga7fnU+HgztM4bTUcIgPFBSjmZzkTilZSJkvOyBFCWJQceTSdEZK1lFsYYItrd7rSbLQD98UgwSynLYt7urRVV2W4333z76qlyezQaRjrIp554cjgafv5znxGC2s3WxsbWm/O5lMqY6v9l7z2DLMmy87DvXJPuufJV7X3P9LidnVm/WJhdmN0VqV0AIgABAkEGxFiAIUWAASoYIUFiQBGiHEgZEKIUhCgCEIzEICEC5MKtN5idxWLH7PieNtXd1dXln013zdGPm+9V9eyCEfglItQnoruq8r2XeTNf5r3nfOd836GpluhUHBBCCB9mzFArS4IZeT4BsLi45JyP4piIIq07rVaDUQlU4SnIK+fsa29cO766nGbZ6rGVTq8ri6Kq63a7VdcVgCybq4syLwqtNXu2phYyyotiMBiwIGcYwLjId3Z3bG3jKBpPJq0sm+vNWWNW144NhrtCSADOOojEWaMJB4PB8ZVlKcg6NtM64lYku612qP0IEFvWas0vLgz2DzyzEBpA2u6snTyBqi6LvCoLFpUQJKWQAp5deGyFIGvtwaA/GA5vrK97U/Vaaa+TeWYpZaCpVqbOkkhJUZRVVVWtNI0iPVfWWiqBwBz3rTStjdFSOu+ds+NJTkJUVdXrtYMAZV3XMpLWuEgI64yH8M4pKcDee7+6vARASCmIiiKvTfXwxQuj8XB5eanXaUmCkLCOAXhHnXa2vbv9xWeePb629sQjjy4szJ84caKqqqIsAVRlBYLWuqyqJEmPr62NxhNnHXvutTvhWpVVlaQJCeUhjGXnXGgNwQKe2TsPIC/y8XA4GY+FkAwxmeS37mxcOHsmy9JxPglqd/O9ubzI19aOJZFstdL9/YMXXnrl1LE1Zr5y+dJ0PUI7y1pZdvYDZ+5t3avKam6up5TqH/TX128BWJiff/ypp1vd3quvvGKdc2VJJKqyHI33vLVB1iCA2oOD/fU3X4+j6CN/6S8nsa7K4urLL61vbHTn5gB0Wr3hoOz22nPz81995pnjp04lrU5tzCQvTFkAGA/67H0+Hre6c1lbe8BZm2Wp0rqsqoAs67hl2ZVV6TwHfQNjTJqlURwTiXarDWBvfy9kTL33xlohSGtBRGVZkcRwOAagtL7y5JPPf/25wXBEpKRSUmml48FoVFQVgIWFxTzPx+Nxt9sxxiRxpLVO4nhxZS3r9V547vcAfP2FF//aj/5gWdWDg76U6u69e/u7+3EUeRAz6rIGkLU7r715/eTDt0+946n+7p8mnZYxTgi1s7M7HA2HoyGAwaBfGbO0uHj95s0sTXQUBY8h0lGn3Qk3sHOePY9GYyGUEKIoyrqqpJRgmLoBy4QWcZJIpcA8HE929/ba7fbDly4tLy6mWQJg4+5dkvLyhXNhrcvzCYGXluZCbXejvBlUSjhoO0pBQgrp2YO9MTUAZu0d26pk76wxAshrk8YxSeU8whkpJee6XQlopay1+/sHpbMkhNRRkqZxnADQWisdESGKIimEiGVw9QURMwc4EiRUnJIsLBOYpRQklNTS2KqqagCtVHfTdtXJrHdJHAkZecj5hWWSUaRjHUUA4jgu6oqIqqqsjbXeGWuU1mASQoqQaGwYQqykJqoBttZaa0IzyQBHSikDQB80HESjxO69d3GkASRJZI3pzveUUndu3bq7dWd5eWlubm48GtX1ZG5+IcwhxagwUZ1o8kTGuElZAs57X4EneRGuHshzI7NIBCmlCkI9bwkqMBXiIUALISV514Bk1tnBaBQgxUjFJ4+dB2098+qLm/392vdisQjA2mqucxyw77h8+bGHnpCkNm5dNVWpZQIyHDrJCFJaQojgmSdJfGwxnutkaRRLqZoSATjvrNBwDDAT8ayesHFnD+lUR9Gv+yo9eFpeMas8EY0AVHj1yMeORJBH4sW3bJvGlP9GNPIwemscepoCvyECoqPA6f3DPtSLvG9/h/8fCQinsOys/vFwOLO/qHH2eSr9xDNA9pB4fgQSPWyGMBvNkXiQ0IS4R6FRbqDRo0E14b4fNAtEw0VpMmH4Js21B/bAHtgD+4thoTnJYYpn2jM2GM16WcymacwyQPcXtfOfsZ68NWMFZkBMZUD4vmm4CflmOa+ACjF7OgrHHHnj4TrJs8bgU4iSp6f1LVajtwxwiho26+LRtWoKH4YkKB2iVI2KXzgGHQ6kWZT4UNmDmkzYdJW772yP1taBAEVNTxQKpGw4AoddEXsQgx2RRyNNCIA9cwA1CJBSaSWVhBReEBOkgADYT78tAXLkg1YkJAEkhNBCGmuNZx1phG40IAgSLGSQdCIS3ntvvbdBQD2cuGcP77yzJAV7651jbhJ6JAQxgQTqUug0jlPmLqQjlysNUGiJKwrjQATnkyTNy9qZSginhCzKOlISQJpFsVKV5VhH1kkPS0IYZ4lEUVn2BMBYyNCfZLagH3VxjvoJR6jZPPvz6D3aNN/7Jp2C6YsEeG7cEN8AiBz8OBb+vqNwgy0D8HDhBphpezPP+hKCQw9m4plADPFUlIem3kr4BcRgwQoAySA1KcKLFLKqnpXkuuZ2mgKQJGtntPRSiNr5IANunAlqUA3nyDulFKmm4yk32DKRhGgYnczwTde/kNElyeSzdlwVpZ9SuoyxorY0jRsaSNZz4G0jTBjOhwdbkhBaEIQxJgj7Aw4AC/IQgoQHM9iya1zDGRloykchIqlUuPkjKT3YOWdn6v4lREQOXGsdWxYQ6Wq8tb/zX/zC//njP/ZRAO/99qcSX/LmHaTkhNPdNlvb+JaYeuMkAEgEynnotDM7NQ+AvRexNB7w0EJQHNfeK1MrGWetLnQGwFi/vbV34vQx570mxIqK2qYsLUnjjYozAG4wGo5GCwsdudJ6pJt85Iee/tpXr93e61989xOP99LlixcAfP3NPRFluS37m/2XX9/64OknyDtGafZv3fmT1wAcn+vV1egrn3npfR//0c8cm7v15uZ7Hr/8T7/8pRuvXn3y+OV/8ulPAnjnhdMXji/f2d66HalyMrLt1UfOX+qPBievvEsk7eHBGMD61Y3Vi+f/05/7xLXbo9//5Kd/7G/+VISqGKwvx/qzX37+a1+8CuA73v/kx//2f37v2Rf+4S/9+mJ3/syp41ev3eREPfzQ3Ge//IcAVq+dTFu6k6qCqwTcXV386Ec+lray1158ZnfiL50+AaC7mPYHEwyHi6dPbe0Pv/bsi0+vrD738uvF8CCN9IUTpwCMyto63htOjCWVZIK0UgpChFA8tN6SrXZVlFEU16aOoqgoJkVZMnN/MAjPWqvVNkVurXWelZRlVYbbuK5rEEJ/BkEiieI4ihbmemVV3b232et201brYH9XChm0X4VQzNOq5tk0wuzZO++CTqW1TqmImbXWi4vLdV1WVfXOJx+b73WtrQBoLeJIe+fzorizselMZZ0z1lrnd3Z2Q2S+v7ffbrVGo1EUx7aurbFZu2vZO7BSUahu0VoHBm6sI0FUlmWs1GOPPLazu/Pm1TdV6FIllDOshGIBKYRnBEVaKYKeIZh9XkySpOVDXTSJ+bm5OE2jOCqLKjxxxtRlUWZK6zjSkWZjkywtTRWmuQBraqWtNdZieXEh0nL9xrWIXBYrHccqioIErVA8KQstZVkVzvuiLLXSWqnNnV0Tnlrv86I01pSMyWTSabf6w6EnhneTshiORgBOHFudjEbe+dFkYqQgYYkoiiIlpbW+1coAFEUBwv7BnpQUJ7F1WVmWWqk0iaVgN0sSMjvnhpPJCaJjq2uOKUnTqKq6c/MA7t65c/fevU67Y6yxdW1rMxqPtdYK5L3LiwJAksREspGyCJgHQ0ihlJqtHy6oQjrHjDzPnfNpkrDnqqy2trZDi6TV5aVet9vtdiVYquj02fNXr9/Iy9J5H8iwg+Ho/e9999qJE9dee81Zk0Txzs5et9t553ve88bLr9zauBsWqNFoVFdlr9uVQlTOg11ZHkRJFiA/AN5aeI6jyOt4t9/f2trSQLudJUqfOX48rP7sKVvoVd61Wulcp6WjZHNrd77TddbmoxGA4f7BQ488cvnhKztbW9a5gLuaum7OPcga1Kasax0prXVd10TS1NY5V5RlAEYBxHHiA72V4Z0jiojEZJJPinJhfi7gXFnWurexMTc3NxiMicmDnPMBBQuPbRInWkeTPLfGpknqPY/H40YNdnMzeGtpmhhj86I82N/vdrsAlUUJISdFeeWhKwvLKwC29nbLqr5z+1bxyMOT0ai2Zu9g4JxTUmftThAKWDlxcmF1bXSwOxwOldJKibout3Z3L5tzS8vLAOqq1lJa54ejsWcWJIqirGpjrQeJpqkLRV5Caa201lFMZf3q1Zfe9tjjFy5darcydhbA4sKitUZpURZVb6742nPPG2uqqnjyySeSOAmTT5wkUkkpKIqigIIRoSzyYjKu8gIAKSZji1yZqqqrsijG29s7tbFFXgpCQO6iSEkhxuNhJ8ucd3fv3YOS3jmlpDU2YO7M8MxlVReTiZCiqq1ScjyayEhHceRHDGAyHldlZZ3XECB2zhN7kGy1O/sEAKWp9oejm/fuWVO/cP1mLVpj0V47u3hna7euq1a3C8A4671PkhiEJI6UlEIKzz6KUmbp2AHw3ikVK5LeeyIhpWLvpVJxpGvjgvMlpWR4pZRv/CqE1Lv3PjB7kiRN0wTeW2P2Dw7eeOPqXK+7urISx7Gxvi4rABwnWmulXJHnLOAJPpDGmUMvRQDeOhF77z1xEOuWUkghgjMzdRnRBA4zUEkKCNBCr50oAsDeWWe1SqUg522WKemrYjKW8Plk3OosAygre1C/ORjVvYunbZmPC1vWTsdplLbYjgJ1N00TmSS+Lr0HF2UcJ089dE6r1qCejIqCeBQmRmdtnOrc2DAeERoTNoKTEAxBKvhbswGHUM3TfV5yM4fSVEI7LImzXHHwt+k+ePKt8lqMKXTWeFRTiSsc+UjYZbNRCBGw38MgMVSz0GGTnEOq3uEqfZR/fSS4vb+E84j2ZUMKQ7NKNGc8uyLhbHm6iz8L/Dui6ENHrwJmMGYzVDG9goehATMEidmZM6Y0bD8LWBpmNh1ebD4MiR/YA3tgD+wvmqkpKnMf+sKH0/Z9m3m2zKBB3GYT/RQ++fMZTSWQjyh9HMlL0WzTTGPk/iHNlqFGVG+GSR7O+kfQv/vSVPfZFEqd9syYfYimM/8RZOu+JXO2rrwl48ZHVpf7j3J/Cu/IpQBCPw4Cew/2zJ7AJBqnyrGjBgb1M6wLID+tfJRCKim1VLGiSDhN3voQKKFBdA+PzoAI0KEUkog8w09X3KKqiShSSggSSoTNJIRgbtJvAAXyLh9NFnrvnGcGNywPwYAHC5kkEQkiU3tTKphJZeZXFgCoKO1226nmLFJwloi1kkLS3mh/ZzDqthIAympjnVKS4QQRW/KhTELF28NxqMVgwVILZ/3s9JqrTffdxYc3Q/CcDqt9efq5xq+6vzKYZhKk044yfERKYHbrNMjj9OhHgGaGhxcQb1Gl8cyevSDROCXkj9zkQaJAzERmwH6KXYtpdSSTIG6a5Uy9Xg8vXBw1FUm1cVR7Zpcl0R4XWgkiqSJFoCRWSgkAkiTIG2Oq2lkLQdBKKEnG1aJpxM4Mz01HaQkogjBcpCqWSk77b0rWXsnEgZyzgoSSynnjQpd2ZgBRpI018AzBQf+dGdZZD2brmUJFAwRBSA4OoVTC+/syIkchZx0AIOectVJJrXV4zTgrpGOWWmiCN9KAoCxzNpel7n/+5X8N4IUXb/2tn/6h+PRFu30rgilMnWrZSCPAk599b+w9yzQAUrPnfCrnCq7qSuhMQLIXqKpkLmIF5FXa65GUAPr90d3NgxMXzot8Auu9s2lEsE5UkzhS5CMA0fxqN+t6m9rJIPLV40+fffjbP1C+efvLX3n20bUkWlgGoOJ5UxRJR7WXT182PZoU+9dvzx+f6++PEzIA2kmv2Lt38dwpd2/jysXjp1YW1WL8ge99t27NvfpH/+LDH/9hAFdf/NOFSZXGc53V4z/4Pd/3qa987sU/eSXiyac/+9k333jpg2/7PgBDV6xfvV61E1GXX/3cV06cv/jhn/jO4syHYy63Zes3P/c6gGe//voHf+QHVp566AcmH3vxc5/r6Pak2Lly5u37VTHauwNge3PjyiMPHVtae/3Nb1y/sXPh8RNZu3swKva3dq6cOvex730ngPa5NTOp/PoNc+Ls9p17/evrly+fuf7Gtfd917c/+ciVF67fAbCxs7+zu99heeL4yau37o7zOtKRJRlpnUSRQ+jrapzn4Wh0e+N2t9Mxzq2urKwur9y8davf7wOI42Q4HPS63W6r/fq1q1JIIcXq6jGC11qdOH4cgDF1FEdz3W6SJvNS/enzz8dRNB4OCFRVVQCmlYqU1uxcVddgllIqpZxn55xSam9/DwBIpGkWIvk8nwgSaZJIKRm8vLIMoMgnUor5hc7g+g2l5OLCvHF+/6Afx1RXVZh4Wq2Wt1bGsYriuqqtc6EPFIQQSodawtoardR4OLLOLrZak6LI5uY3t7dfe+WlWzfXl7s9ACyVEiJS2gp0dCeNkyiKvPceDfrvvemPK5nXzhlrnXPOe5+PRnGa+kkZegF7RlUWJGrjfF1VcK6qqjzPvWclpfMM4OzpUw9fuhzSKL1W+snf+1evvfZKkefWM5Fomm9oNR70WZAQotPKvPeTojC1nUwKpRMAzH6u153kRV1Xw/F4rttJksTDwdEkz0OxYavdrm3d7nRkS06cMdYnSRxpHSZBSQTA1LXWopUl7Xbb1tVkMm63WlprAtWmSuIMgCWujWm3svc+9WSr1RZSCSGrsiQhAphVVmVZle12WxaCmUMXdTB0pE1tlWwmq0baIuScfBC7Y4Bn8nxSKq0jqyOQICFbrWyu10uzTGvV7XbAgepeLi0ujMdjtmZxcXF4cNDrdLz3IIzG4/CGVqdblaVzjoTQkdZaCcLO1r2qrtdWlsPkuH79uqlNq9UCIKXwzoNZKi1JeO/ClUniGITRYDDsD9j5Vq/75msvf/7Lz/Qn4zNnTgNIhIiIjp0+Ocrz69duXDh/vtXptTvdOIoqpQEsLS89/Nhji8srZVFs3r2btNpCQCtNUjJ8eFJISwBRGqdpWptaSi2EcN4555wQSZICILJVVTlrlVJxpDudjpTknNu4u3nq1EkhNYBOp/PM579w8fLDy3ldFjVJurm+HmtdlGWgG3vnamNDP+4ojpj9YDAEcPLs+bsbtwO0dPH8hbnFxSSO290e2EdxHMWxsbYoqyRJAo1gaXW1Pxxub+9Y65TWFlhaWblz507W7mbtbugw0+r0RqNBkmadXm9vd08qMT/XiaOoqqoTZ84AMPnEM4QQVVVZa421vqqKvKjKEt7HSQKAWOW2tMYQURRHUZbVxnhw1mpXVRGETSf5RCtdV9UkH1vHcRwJEqPx2HrviiKA+8F7sdaSECQb9ZaqLMuiCNqRUkSKKBBWvPNCkmefTyYApNKL80sAijIfjobs2VkntWy1WqWp9/f393YP6qqqagugqKoiLybj8WQ89p7zPEfIWHtWUjfZiCjqLS4LnSgliLyOYm8NkfYsxnkOYGL6BwfDLE0r4pPz7as7eeWpzoul+aXhaBTY5WBIKSOtiShNkjiOszQF+s457zkJShfWWmOFJEHknCVQHKdzvQUS3o0mwRXh4FMJaU0dx7EgqkwpBIgQuO3OuzTLpJKTSa6USpIkTdPaWAFMJuOwytfGOGsrth2thnWlhYQg9rDOs2jmGWedAoPZO2YPghBCAoanLVQaO0SMDgUVjy0vZrEGICVFkQRBSMnCMcrC+E5rmWQtpFic6wF48eZNZ+pO0ptbWKhMIaS3ruTKz/VgWCaJBBC1NQh1Vcs4LsrKOjcpvJDGkpXkLFsAQdDG22nL5rBcUWDuEDAVMJqW2xHAh6hYA+/dF+Y14N8R93p6puFHEAc/tKOBJTc05TCNHsKIbzE67CEQdKAYTIfq3bPawrcoMc6KF5vA6pvCvhl+icMfYTQMcUh9mgZ0dHQnf444d5rfxn1HmwKtoRMPgYIOKU8xVgReWghOGqB3CgED3vMh4W8W2Inpjr9VgPvAHtgDe2D/lpsSJOhQwHg6qU1n+UMYLiAldJjgapaS6YR9mAr8FimaIymxWflXg2tx0+Ls6EdmZYgND3aWm5oehA5nYqIgicNSiAZgAoQPRU2zoYSCQyYEZuuRI02PNju72U5mF4JmQ202TWEvNKzqZnvjUxEAyEDSDaPlKY7VXGRxXxPwo+shnCmDLHZoMSyImE04WCyVZ+fZe289O4IQkIJEpKUxFQBTuyiiNLJaiEhJ4jhS7Kyz1oEx6zMohEoiUbMngjeGJRXsiSgS0k4qAEppktKyTSM5znMtZBZF1hkJNa6q2YC1TpK4ReytR+VYCOEESaiQ4ZewSolJXaXZimBZGUNwSqE0PkrixCkA/f3Nfn9/eW0lIqeFKWvjBQkh8sGeUjKoUKVa1WSdt1IIqch4G0F4nTpSeX9UKAGgFSmqwcJ76zUgU52bioQQzstQdyOooUuTDN+38+x8neiYfdOPwrMPlaAAO2FBgG+6xjTFpMHLIdbTXL0SgqZ3ixSw9ojvJTwHoXAvwCKJpLXsUTvb3BVKSSWUdQYgFkxwNBMpmPqDBKF1zOyNM5JUVdWRVhZVpGMAQpI1hlERgR07L+C9oFxJREAiFQBHk3k1N1SxVqWUbKWXRAwrCMbBOwVA+pqFg+dYUSxjgJiED01d3PR5JAI0QTZZB7aZIoJNM92EEp51nEAICZZCe+8JQgjtvRe+YYY667WOPEnH5MESXkhOsphJxVk28wuJGIygGBipqDI1Na48TZ/+6e/kwwQhlGIi6zgENkrF1jqAualT0CBvyUJYI9vHzgDAy6+99Ld+5tbP/p2/dv6JC7xxIx7ZyltHnEWKGS5Uw0nh7Wh7887q+cuTvGrHwpalTtquJlsTgCjuSWa2hiXLJLbDA7PxyqiEQ9ZbuvzG9dcB1Mny179yo5fMX3rHKk1KJ2MpYiSwXkg79mYI4PXXN08dP16NLU+MWV4+6Ed5/dLWrfW769tXX7lz5vgAwMXWZO/GwerZU3NPvm1j69V/8Av/7Gf+w++y7WR16fFWfAzAC1/9U0p78xObqL3v/LanTCa3+6NzIj39HU/96t29dpYAuLO9+aef/uN3f+jbj7X1b/1fv/o3fvZnWmcu/+6v/KPRxtYXPnXjM5/6OoD/4Cd+eGGu9Y1nPvP0ez7w8Z/8gZVzc0x7GvN7125fvnzif/qVXwTwv/13v3jvG1f/xW/9RsTinY+cO31q6UufFH/8hWc//Jf+siw7APYH649dvHz55EWN8Wgw+us/+lf/ya/9tjODf//7f+Q/+bt//+RjZwF8x/m56sZLvHkrYriJteUgr5PtO5vnV+aW3v++raufBHBnb7u9cnbOJvtbN1B5IuuZYiGYOTd1GikASuve3FxdlzfXbz3x2KNJHEspR3kOIVScACjL/M7m3fmqVFKmrcw7qwT197Zb3TMOXhIAACAASURBVF6WpqHuNU3SvMjLqnaeCeh1ezfWb+b5RAohCaFWiz2bqoqUEp5JyspYTwIe3nhiURkP4OzpU3Gke53O9etvbG5uRJF0zlRV0e1kd+5sAOj1unlekBDWuSSOy7LqLsxb5zbubj36yMP797YAdDuduiwzJZ2pIq0ClGOqio3JqwGpCIBwjr118EmWaikjpSDl1ddfe+2N1xbn52zo6uDN7sGo08mefuwRqUSSRJUpi7Jy3k712hKtCAyOoqKS7P3zz3/joUuXRoORNV7pGEBVmljIvMx1FCu2pKRUurY+TaNJPqyrEsDtOzfe/463Ze12u90xefGxj37UMkexjrSSEuNxH4CSEiAhFayP46zbbk3yItZ676AfRrs0v5ilWVXVpefFhXmlZVelgqg2pjSmrGoA/eFwbmmprjeEIGctHASRtdY5NtZI4QG0U1WU9drystZyNOIsjaVApMk5k+jYWwtAEjnnpYpPHT81N7dQlEWSpK1223sOfPnjx08opQfjcRLHdzfvbW5vddod7701No6iibVonBlFQpV5URkjpFRKMVBb56yvyQJYWlzeOxgW+UQKamUZszfe7/f7tq60kloqAOt3bu/s7p4/d5ace+Pajc3t7bIqn3z0ytLiQrg5r6/f+uX/45/O93pve/QKCeGdW1tbM8bubu8WdRUEc2/d2Syq28fXVllQURY6ii2cVlFeln6aHYujiL2XhHPH13B8bTIZW2/zun780SvGmG63E9bKPC88iJw/trrSbbdarTSOpI7iuq4BtNrt9Zvrzz77VZKynkza3TmwSLNkUhkSSoa1CcREC/NLp065pYVXnWOCSOK4LKrhcDgZTwBYz0op50yWJcxMgpZW1hz70Xi4fnP99OnvADDsj7707BfjqPXo295WV8V4OIG3w0FOUtZ1gJY4SbMiz23tDwb5qDRz8ys7+/3tvcH6jfVvvPwKABVH7/y2b2em/eEoSzPNYlyKVCoF6pd1zQqAGw1Kh9s311cX5kWW2Ikxtm7F6eigPxmMWlkG4Ob+vjEmrwyJWKiY4Pv9MUFooSUpADvjMoOWxkjQ177+qvPCV9XvfebTp89cIhUPxgWAWGVJnFSDkSurqrK2LMqicMamaVoVXBR546Qpubd7wESOfavd6nXbdVlUk0mrlQXBSltVQkhX5e00qcu6k0W2qq2xmRI7EADa3jmQsbVQwhNr0sWk0ErVpkqTVhSFRtWRFL2ynBBJSEgtYysG4xzksiipbQ0ADpnWggCKbdMoRBIzSZJRElJuSwuLqwtLvipqZ9l7ncaWLWmfpJ1AV3jlzdcXsla3O1eSa3VaF9PjX7m+UVVFErc92f5wAMA5o4SAkorIeg/vsyT1jpIkrqt6+tgKFooBUiLSkTFGK727sxOCBCkFAGccpCQgidO6KJl9miSOi6Kuh3kOwNV1UVVkZRrHYFpcXHS1iSXtHYyc8/l4BGCEKGNiIcamNjU7Z7tR7K1IRFyJOG5nACiREohS7a33sMxGUC1CkxpmV1sApq6kZOucQOwtrOOyhqmqvb39srIAnDHeoq5rL1jKzNqapVtZWty+sV4U46VuBuChY5f2x/1WFMetrhK2zo0pVdYVrgZ5vzsoAGDkznVP6TQBox1HmdKbTmRESuus00FrEYCUI8+OlBAsMK3rB4hkkylngmAfxID8LBoCM8PxVEoolPEdRlEUxOWZAW66Kk6rVKYp9JnK+QzUREMqmyFxIE8spmFaKGho5MobXUgmCH+Y7Q9NC0kIQUFwPxxCNbR0CuwjAGj63R2WnDQj8eRnZaFiCowCJMjPKmKOgq8NiKtC6UGTn24IRuFAR/oIHCE68RTLbK4EGuaQZ6+kRkhuc6jtba4WQbFvwsZGZ5bDlxM4Os3XF2oqwvGmx+I/B1b6wB7YA3tg/9aYwjflpN5S+zfd8i0gRvqmN4U/Q6niYbH60WrAIzVjR7YFCuhRxPFI2u1b2Z+RADoC7c12Ni11mxGj3zLeox88PCgdec/9lOp/4wCOvuO+wd+nxTLVlqEj+6Fp6RtNc5cNhSFwgoHK1CAWgoUUclqVSPDOwnFgzEnvvRAqi2PnrBDkPRORlMI7DogzBASR0DqGYR9QNyvgHQtJwksDwDprCjc/38uLSRJJQTIvq3aWOMdpEnHtwimws8yum6XjsrbW11UlFYlIWgcAxljPutWd00SmqrM4ctZH3kPrWAoPC+Da1VcIHAsjiaJIZe1UCFUH8u4Uw2LnIhnWbseuJlOXlS/FpD+o5MSuxASgmBSV5ShrkychpSsnCRvLgEMkNQAHBMdLEBEEEalwWesKDNE4LUSehIeHB3mCBARYEIjEtJmyAMDO1xLMM4EAbgRwROAkNvUyDRcbwgsprLckZSQFN23HPdF0182uptlPniZIibWS3ljvfBJJzyZWTICoaFzvAcjiJNLaMsN7kuRtBYJlpwm1cYPxGMBiRxjvamMTrSLNIMPelWNLBKlTggbg6opUcGZmPGXR3F1SNWMKqqCByc0MeNIiOGueHQBiB2ZyDLDjWa6bQARJoqF9sWIFIRmEUMIQlOk96rqmwyfOMzvvQ+94n0bto8/PkewveRgSaNQmG9eMAXh2kZJNEsLTdBoQgPD1uI5bAORcsr+78/P/wz/+7u96//d/6KnuSi4ODhSzZW8doDWAmEiU6J04bSq0ZCQgdCxcZeBdJAlANdpVcSSEhMVwNEyiJMpW52JpVfa1l25/6YvPAWglcnN99Nnk+Qvv+CESNiIFNpatSpK6P46FBHBq7Vga9yb9IXfaz3/t1u7+aPPOTtrVsSS0V/vGA9Dx3HPPvz75wvOdsyuCWg9debRavhgNXn/9ua1nvvQqgEeefiIi+au//ofvfpd57O1nO+fOtiPrrWudlhfffo4mfQBns+ihj3z4ze17j1y8dO7Jp3/nN//lT/23P/eJT/z1zRefXzlz9jd+/1kAv//FL5xcWPzED/8Id+VpuvXo9z71x3/w6Xz49Vee+erDp5e/88c/AeBL73/fzZvbu4Phmy+/cf7Hf/CZV/7kXR/9AHv5znc/dGlQAfgf/+uvtlfWD4Z3otL+lz/zN9YuXf7QB5741Kf+aKM/eOThuTde+AaA91xZdXk+9Elv505pW2cunO/v38w9tibjf/47/7JWCoAoq3pzu4gUapdGVE4ce2dIeO+VVqGm3rOtihLwUkpTVadOnhxNxmmcbO9sH187DiDPcyFEp93udrq9haXnn/taXhSf/dIXL1y4cPHc+dFoGGbldqttrSUgy7KTx08QIYkTIpJCBmaoc14K4ZmjSBvPWmlrnZRB0YKmnWoM2L969/bN9RtVVXQ6C8NB0crSpaWlqq4ACCGUFGVZOmsZ7Lyvy0oKubF579SJ45NJDiCNk1a77SaV9Dw5OGAQC2WNkTpiY1hrAEmv54m6nc5+f7CzvVUbs7N1b39nN1IqTFsAlBDtLO2kaVlWpMSkKKwzzvt2lmRpAoBAtbFgFHVtnUviaG//IJ9MhCDvfJPMIbDnLGsxUZpl1jgdx0DOnv206r6qq3a3myRpkiSdVqaT6NGHLgkpmD3AIUGlpEgiZmYjyFrHQK/TYeZup73fHwDQSjGwtDAfabWxVSZxFGQcpFLWu7Ac3Ly5/nArA9iGkjg0E0uoRgyNX4y1s9VWCCGVIsA7L6WsjTGmkfCLo5gIjnk0GVtjqro+tnoMQGCFF3U9KUvjfa7UzsGeENJa67y31u3tb4XBrCwvj4cDSFkbUxmDIHIBAuC8j4MQcxwrKb1zxATmSEdZmklBJXsCv/PppwC8L3r31tbWxr17rSR5+PTpc6dPbW5vO+e2d3YbhcT5+ZPH1wCE22NpeSmKdFHWo9EoiZMQox5fWxFSegYJWdf14KAft7pKR1EUCyG9NwDyfLK0tFgXudbae3/j2ptRpKVSyyur3tmgJ+gsLyws5Hm+tLxU10YKORwMvLU9IToLiwAs+1u3b9+7feehhx+mJJ3URgslhGSuZ6xwE5JvZVnkRRzHUqiDg35trJKq3W43ySQhrbWWaDjoR5H2YGONVIqZX3j5pVCjeunSRaX0l555VknZW1yYDIa1MVVdnzhxMnSOstaWeV7V1Vy7y1IJElmaEvCNr/3J+vr1MJg8n4wO9nZ2trM0SZJ0a3fXmhpK5pOxYFhTARgPhmB21u9s7+7s7B30x8bUO7t7Qoi5ufl5IaYXn/uD4fxc78TJk3furJvajEYjmSTD8RiAM1Wk5gXT9s6ufPXVMi/6w+FgONJxuyyKRtXEmtLX7SRx3o+Hg6ou3/X0O8qy3D/Y39ndDo/teDw5eeL4aDhKszRK4l6345xVUimtjTGh8rrT7RChruoojgFyzkVxTEIIqQIqF3AJdqFcF1tb9/Ky6LbbzHo4Go0nYwCtNDHWKaW8g/e+ro0kkkoWee4ZSikAZVEorYUQVVUqpRksBEVxrJQKcgcA6rouitxZUxsjwUJbax0xWWuCfMpcK4uV6o/GsZZMLJWYTPLXrl07c05tbG2FbMSJ1dVOK7PWCiGMtWVVKaWNNUE6qKpKIGjySO8dg4syj6PEee+ck0II2SgYllUlQHVtpOQoTsDeeWuskVKEAlUAUkqpVfDEpJKRUkRicXExjrPBaAQgFVl3pUW1gScphCQSChWDiLTWkVJoUsehluJINWTQvCAX0rSCSBBN/U4IospYUOCgAECkRCuJGZY9c+RdbZzVSewWeq2dA7fdPwCghWrFHZB8+eVry/Mps+31lIcB6Uh5E1BjIdg6ApxjQawiFWkSAsZ761xwcZv23+yPKEYd9asOSys4AGDBb5op7dwXP4XXgKC13eTRZ4Ba2OMUkZsd4uh1EjStacHRiE0IMZPTCN7c7LI2ComYEsOB+8KqWdXKUejvPruv6zRPv0JMXdXZYcO5NKTso2WNBN907JyOgKcVlYCA5BC4zU6Amlfp/ssAHPbfaXbORy5woxcUvgTfRMJBvP2bw84jhD4iAfA3nfUDe2AP7IH9BTDx//UAHtgDe2AP7IE9sAf2wB7YA3tgD+yBPbAH9sAe2AN7YP9/MRVI2QDQyFzgsPCR3pqM4fs73XyrMsNmU8g0HabeDrU+prIZzStTRd5Z/f8088XT/FjQ1jtaYRne9RbS9ZEmMc1IEcja0/+a8dxXy35YxAUcUsMPS/Tpmy8C4605ubec+jePbjbkw+TjdCc8U0EJowySemCI0CgGABqJbg8rqGnuGjJ4zMzsjfFZlgBw1lpbGSOt8pGSSkgtdG1dVRnX1EYiMCSYGVIZL6Ai5wtnfT4xOopCGxLrTZolVW3gkUbRpKylUjqKTVHEkSbSAOqqIiHnWpmURIKSSLCB964uS6EiAK20DUfKSygHwVCcxroqy8VWEpMjLgFILTxDCZlEMlGRIh1JLbQcTAaWfW0cAFtb4xx7EJGAiNNeZz46cWbNFuXVq+uhmqD2Pne2Gtal9dbxpDaLrUhrspYneQ4gSpRnDspesy+DAVvDT78USUSAbYoK4OSMG4xDbgqFBCnRtJ6RmvYmYM+hm+1UbpHYCwaIGCIQLiSOqJuCGBDOe5JNFrbhZUyPIwTBQIGJSbIcl3UaEShWTsTaAvDFkG0kPYyzcayVJmNrDcOVbWdxNwlsLGe8q8FRoqXXDIY3xJBak5CSCUDS6jqEPsZBvhHMTQEvcSNAwJBEnqjRrAGLJhF8VLmmYcl4kORQ1OuZuNFSCPtxwgatg6ZaAEwCUgjpJkFMIPQHD+KojKDYaKeP3fTn9EFi8qEpliBiIp5+v+y90TEBxI2cgoAgUgQyUmQuPE3UXlvMiH7n//nDOy+/+rf/zsf9wmK2s+NqxLLpH18Y88Jz159+4oKKDZiYRG3Bxk3291rtGMDVl24sHV9bW15miCRpQ0iudX9o4rXu9VtfSSIB4MzJ0/3xrTib06nI94tkXqMSAiSk0llnUnoA0VxqkXSOdWLZe+Jt3cld88bCrSgVSkjh7VL7GIC2XPrNf/2bVZ5HL9593wcfYx1t7uydX7l49nE/2c8BtBcWNjd26rH9h7/x+x/fffsP/dW1Z/7g97/t+z/ylS+8liO5/J4LAAab19/7oe/6xvrG5pu3/p1ve/p/37j7v/783/vJ/+gnL1685MzO3/zpfw/Af/Nf/fIXP/PF7/vA+69+7rnN4b2FR9/eGqaTWwfXnt0kxrmXPg9g58Yrv/Arv97tqGFeL198yOnqqe9734nVOSTJnVs7AB5/95lrb7xyzZr3vvup+bmFl/7ot/sHGxeOL37uD377zPnzVx57AsBrNzbPnTw/7+/s3Vp/7dbNW3d27uZYWkyZxfr61aSdAlhcONbK0p3h/kExVlHWUVoJBEVDdlzXTbflqiyjSGklDwb9ylTOOzBrIVtJCqDX7Zmqmptb2N/bPRiNdnZ3kySdX1lZW109der07du3AJRVeeni5X6/b60py/Lq9TdPnjjJzHmRg33olKWEVlqVRemcl0oWZbF3sGedlVKuLi3JRihAFMVk4+6doshJwFn31BOPT/L86889d2x1BUD/4KCu67m5nnU+S5M3rt3sdtorKytLiwuD4TCKIwBRkuRFmbTaZVFGcVyWVSgSVMxRkllrAXSSJB+PrGNmf+PWrSRNAXLWtNIE3gcCrxCwtZNCeOeUFmAoqZJYRFpb2xS5W+cJlBeVcSb0OK6rSghhTO28BxDFERGqsnTMpjZVbUbjCSCYvXd+KkpoO92uM8a5uipMXU7u3rs3P9cDIY3jMDEGrUnvPHve7/eruk7jeDAcZVkaVqe9fn+hN1fXpigrAewdDMqqWuh1qrpmcBrHAHqdzng4mkxypbV1XkkZqtAVNd29AVR1LYUUgpjZWCuliCLN7L1HpLUSEkBeVq0scx69NJVC7O7tD0djoi0CGWsBOEYaJ8xsrVNCpVniPGutpZR5kYdJzxirVOSdC2U7Qa4XjWSkCMIRUkrnnLWOpQjSEp49HNrtjq2rQNZWUkY6ev6ll594+KHl1VWp1LVbtwDO0qQ2BsBkUtSmzuJ4fmFBChknaavdfv7FL6+uLNd1Mxuu39kkws7+weMPPXTsxMmqqoSupNJKaTCH/RD44pVHyTv4ejwa/u7v/M75c2fPnj//+c9/Li+KkyeOA+h2Opubm6dPn6nKImu1Wu3OvY0NEvLM8vLdgwMAW3t7qytrD58+ffrUyc39/vZwNBnl4VJTUEMD6rqOdBQ0Ruu6VqEdmLWmrtvtdq/bBQCh8sn43r1NEhQlcaQjY41xRsEXRfGHn/tMuPe01sPR6NXXr/Z6nUleLi4uLi2uLCyvhr7Pr77+upLq7NlzF86c3djZWb+1bq21zm3v7KStlgcBONgf+NrCMTyXRbG7f3DQ7+souXNnw5syFBsmrZYUcjSalKXd3RtEUukkGw6v397YzLLssStXADjvB4OBUtLWRZ6m/YO+s+ZgcDDqDwKVeGFu4WBr+5VXX+vvHxzs9bf39iaTyWg0KoqChAh9t1xlISltt1vtThTtqShaXF5+87VXbt2+tb27e/7MqXDWN2/d6rbbxUGZV+XS4oL3bjIeEYkoiSeTCQB4NtYy+4YZELipjXQpIfgPEDqJkySNk3hhYV5pReCyLK01oYeSdU4rCXZSq73B/v6g347iK8mVheXlvf1Rmk5rCbVO0jRJU6mktTaOE2NrIcRUKRXWuuZ3Qhq3SSodpXlRDseDwHs17Ne67Wow3huPe+0s0qLbyZB0Tx4/cfHSBQcGsLO9FcrbrHOR1lLK8SQHsbHVTAYntFS01sZJTCSEkJ45juMgzttoYUvpGCQkA8654LQQHfr+1rmwKYqT5cUlT1SOc+f9ve3ddqvdH40BrPWy0XiSwnRSzQzJVHvrPHsQiYbtAbAImpTUuC3M7FzTaXtKcwlqQI7IgZ0H1dZDCCJIQhhtt50NJ0MPVjAMlyWdnVG/lcRr5y69uXENwMbO1uPn5sbV4E9e3WplUStqP3rutPdmbXUxI9HtJgBYeFcZSFJaM4R1ltlKqYOXFeY0670/0tr5CHttRqaeRS1HIq5ZSNS8PYRHhyJTBMngwFQ4JJId4TjP/jXtSRv+mWgcuemuwptm4Rnj6GbMClGnO5gdCE3f8pmf2RyH6b6Rz9z5+/ZPRzccfT18n0dI5ocDpCOXhGgWP95/7Y6cwdGAd/rxEAr4pgcpHS2OnOp4NgOfndS3rI2clpWCAfae7hvIA3tgD+yB/YUxNZ3mpitUM3HO1hpgtuEoMPcWdPDwE6AjNejMEPdRtZtJdorKzerlp+qKwFsakjFNGZhHZ9npMna4lr1FsPLoMtAU0M8444dw55FVhN/yyUP48ijnmvm+lw43fgtWwH3L05Hxz5aa0B17ujY3jA8puan2J48AirALn5RCBHnB2XUigCRFkZyuu76Tpr0sIYDYR5KEFL6ua2u88ypEPkLY0HtBCiUFSVHnjq2Xnsm60oTer9pUVkLEShzsDZRSHq7W0aQo4X0cpwAigUTFS73u5t5eovXe3l1JFMlEqYR0BEBJIuEUudrWEbnEOa6KGHUCrPRaQVfTWxKCnbMCPnJOsPd5OaysBbGirBUDOHZq7tiJ3skzq8dOr3QWukvtYZRkTkXCR9uvbuzd6APQ8XySdX/3c3/cnVt86NzZa3fuvnz9WhaJdz1yqbIagEKjM2U9Ezeek5Ciqms3hSjZozY2r8rKGGbPXDLIM3kP5711DoDznuE9gwDv2TjvvAfDeW+sa3TiBUVKxioWQtfW5VVZ1oVpuPJT5osgpSSBamvKslShzS2JWQMEAimlqtJWdeWZwX4hi5Ti0XjimHtZBEBp5b2pa2ccC2GFgIFVUVIU3Et1Grp81JWQtXC+l4qlKMpHJXshBJxx1o+Dqo5Mta8dTanjwXEXAZOddlFnDp6TA3lmAgsbEgjTXuEkBFEg4EDAH+l3JIjEkbwEB+jVu6AGFHIgxMIH1SEBIYhIixkyW/vQFn7mqzU7IsB6bsSFmueCxdQJlmYUQF1MFYqYmYkTjpwZA5Cxrmv23q+dmL8zPPjPfv63fvqnvv/8+fOyv2dGwyiyACjRj7/nEQ1199qbK2fWyCntyGo9yM3mrV0AlfftpfMVnPe+tby4ff36Gy9+vV/xt31InTj9tteu/h6Ax6So8mIyqLbu7nY9D/e3st4JomQyHDhD7cVVAGZ3fP363VHuBnnr7Zd7v/GHf/Td3/P+9Y07N+9tDO9urr772wGka/5n/+MfWd8c/dIv/mb3xMXVJPm1f/zPfvIH3nHqwulTF88DKPZ30sngo9975e3E73zv2+Kk/8QH3qHmT/za//KLncXjj5ztAHjHx/7KK9+49vqLbwy2N7+YRL4l3/mOD//9n/ulS+fb3/GuJxcrCeATP/Nj3/3B7/nnv/oPtm4UH/2Jj1x79uXrz73aWTm3XQz/ynd+rPXY2wD8QHnq733hhe78skq3/9X//SsfevLxlz77zGip1z59/t7dAwCduLO/V52/eNw7+Xf/+3/0ve+9+Pi7vud4JjZGtWJzY6wBkMQnf/sPHj574oU3dl65ut4fVTZOy9reu7Nd1tWCZwAf/p4PWvhPffHzD18+VSN76eUbZVkYxeTJVHWeN7GucNZJqqpykk/Ksjhx8sTa8opW6uatmwDWjp0wzt3dvAvAAYuLixs31//d975vYWlp9/9l781/LcvO67D17ekMd35jvVdz9cxu9sAmWyTFQRI1UIws2RocUUiMREhgwYIBQ8lPzmA4CAI4QRLkByeOYxuCbSnWQNEWJXEQyeYgdYvNbrKbzR6qa+ia68333fGcs6cvP5x773vV5B8QAb1RePXevefuM96997e+9a21t3f27DkARps0zY2ZeO/OnDnbbLbKqhiOhkZrIWTto+KiCz4IIYQgH4LWutVo7G1vt1qtLEtHo0MAN29dO+j3BdBuNYAoCJsb69euXw8h1J7CMQYfwpWrb585tSmlfOTBB3f393b29jdOrO/v9yfjKYBeL3Lkcjq11tao1kzfNgRlEs0SgLc2xvixD31waWV9d3e31W73+4ef+YPfH45GaaqZTT31hOhJCpMmUokQg5SUJgbMk7ICkCdJt9OOLtgQuODKuhNrq0rKsqxC8PWuy2KqjQY4SRJm1lIRxHA0mQWHcTYnrq6fGBzsAdzMzEG0SaJByNMsz9IaASRASBCo1+uCOdF6Z39/a3f3kQfuq7/TSqm7O7t7B30iIgEOwQc/mIw5stKqthPZOHXKB5emKYCy8lKCa883QYJmtlfOeU+OqKWUcs557ylLgw8Uggf3uksAJtO7WptUa++jkmp9dTVLsztb2977dqsFQJBQWnnnY4zNZoNIGBJpkiilT6ytz7J7BCEkAyKyqp2AheAYMbtlXPcjhGw0G1oppYwQFsxJkmZJotOkLr6WJSVGP/Hoe5569NHxeMwxSiGu3bz1wIULtXznza3tdjMXRHmztXlytb+3bysbGZV1WZpMphMA1tr7zp27cO68Umpw0M/yZgghOBe8J6K6oJvBwTspKDUmP7H5zPve99k//bM//uKXfv0/+fR9j7zn5ttXAAz7/VIqnSSNdqfRbK6urm/dum2tI6Xb7Q6A1y++pZiXzt83KSshpCsrbYyUUkrJNHOk1cooZRhCCnHQPwQjhKiVEQLW2hpQYxIxeKnkWnf9xPp6q9kiQUKKUE2Xl5fH4zGAu1t3nQu97tKZs+dGw5HWcW11XZtk9cRmnjcBvPHmRSKxvLIewVoq7zyAw8FAq+XBcHj1xi0AInhfuTzJYoSUyalTp7sr6/t7e5Px2Ft7sLcPgJ1HxGg0JpXHKGwMuTIba5tKmvFkMpNQSLPVlbWqKiaTcWJMr9MZjAaryyuDwWh/exdAq9H83J984Xf/4LN5o+F8aHc6ACOS8yF4X09NSqsY/Xh41vyfVQAAIABJREFUOBoOptNJDKHGUQjotlvOOQDFZCoFKalijEvdjrU2BN9qtYrpREjRaOWLmZAESSVi4FowAgQpZe2QriBBMs2yJMuSJO00mp1OhwDEsLO7s7cfAEymRVVVeaqbjZYxxmhTeV9WVkh1eHBQr1W1MaPxWAihjGbAOstgsZB2JwIQOVbO+Ri99xM3tiHkeSOGgsBpogEMJ3ZbjExmzjTTSKKMAsyJ0UabvNGsq013trfSJBFEkZmJKutqq7r5t7tWc1ZCyiTJIMg5q5SKMaZpKqTkGKSSAKSUMYJIYu4vA3Cdxy9tVa/iiCjEaKtyOB5du37j8KD/4H3nD4fDVqvdPzwEYB44V1UjpGQrJ6QMLtjgAwQDAqzrZA+JEKKYw0BivgRiUGSuJeCFICFmWteCSAoikJQKdISWtfJ8MB5IoslwnLaaaTL1nvcOitBM23kHQLeZjKY+TZr5UqvXSrYP9t68+VY7bZ1Y3cxbph48hZAcg1ASkUGRBI3KiZFSK5ZgN5s+5rbUPAtbji+sFj+ABVY3/wNY6D/WsOAx05UF6jYvTsY8wDqKjHgethzBZ3Vie47aAgQiYpr7tKAW8Jo9ZfM91pvTIig8EuGfH9ICi8Q8NqSZV+RRsDc7pnfCe7QAPWdSYwub1LrDY9HfUU/3RoezTRjHYta5N+XiSiyuyDzefidoWUs1LSQAjgXhR50eXWfMsU3UT927xdrvtnfbu+2vY6tN+o68hBfpnvkf92BqRxjbD8PgcPS5hcMNFpK+iw1mnjIzimD94j2iHvOXanMZBugec5l3tGOZr3swxcUcOj/uGbXznqzdfFK491zeAVAeS4XNN6DjG/8AifQ4vPsDb8xfn4Ekc3ZkrB1viJkoEkMQM1gARxqcNdNr3jHVVEkQKQqhAhC9G4/LZiLW263AltkzQxC0JH/M1UcJCBKx8sqQJFGxKkMkKQMJZxWAyJE9pcZEcGQBoSIQwNFHJajTagEgcGW9Uir4sNJppec2R8PJeOKzLK+pmrYcOzcBRx0jKFrPUmFjeelkt4EYh5MSwPSwciFA0vJy2jvR7K21N8+cWDt1or2Wp6lqNDUAkxILy1wiloRDFKNYbFElKeZJiy9evQxgrbdclGhpe/ny5dPLqSv2+wf7S5td6YcySAA6TqSo7WtijZZHhmaZagKJmtYkSXgfShdciARIuUREQkgp5czSb3azEURSZ7YjMzMLIh+i86EIHkCIUQrRSJtZ0vQh7g76h5MhRyRGGZXUz0oElBRaIcQ4nhaZ0fWKVUu5WHAkRluHcTkyRnkniVV/MhwMB8vdxvm1UwBaDb13eDiYTpJE13KgvVb7YDQ4GIwH48nVrS0ArDggANzR+h/8xqfbKsIWg9FIZyZrZjWZSGnTXuoGUCQE5hBi9B4hIMSyGAGwjqqKS+sqZ63zPlAIwrkQQ3DeeWsBWFs4W3rnQvTWViFyCNH76Fz0PobZleFy4pQRgogDk6B6WU/g0vnZ1yBwrPFXIAQOgSVm6ft71vcCRJRos0iIz6DcOZjL9Y0mnoOhNX9BGIGDUgDIRZKXhVJyHEVDmuLg8H/8x7/9yV/++C//4seTNDnY3QLQ1QQpwKG32d25efPkw4+UUo+2t7O00blvCcCrr1yksnz7zvUTm6t237z46s2f/MDZSufts2f1tUvnNpYB7FSjZp5dv3On0+xKEVUQ+9feXl3pJd5t748uvr4FQHmdZd3T50/Ei686Xv7RD77nc599di+Gx86vnT577rlXXwZw+MLgyftPX9muOk1BXjekagSe7m29NKqWG8sAmnG61ELapKceu6CToth6a+XxHzOu+I1f/9V/8j/9q28+/waAldv2X/zj/1W2zdPvfeA7f/jvJwfVL/zEJ4uf+tCf/dt/Z0ejn/5bvwTg9rU3nvuzz51/8iM373zuwoc+tNagkyb7t7/9e8n5jR/5+FPk9gC8Nt7Zc9UHNzb/45/8ha2KrLN//vt/lPe0hvz5n/0ggGkx9oidE+e0at/e/fZXvlU9+YEP50ttGgzWu91/9YefBaCiHva390f9ldXNpdXpjYMrWbPz1MPvMU3T6S5NhxMAz7/04m/+2q9997sv37dy6vpw6LzXzFopCuSDreFyqWSi0sgzoBAzsUiVpen59Q0AJst77c6oKLa27yopNWEwGCiper0VW1mtNIAYvK1Ko82h7UshNjdP3rx13fuQGCOlTGrSYpIZo4sixBhJiKIoYqdNggbDwSvff3U0HQOIPrTb7eXlpRhjDK7VWFZK9TotEL3+1iUAidHnzpzKEpMk6f7+gV5SGyc2mDnJMu+89Q5AaZ2s8SwS49HI+QDhp9PCxzAajVkbAAej4WAyfviRRy48/Mizn//T//Of/dP/9r/6r3/1l3/pK9/8WiNN8jQFoAVKVxijdJJmiaysBc0MH7SsqdyCma33IUQi8t6vLPeIwByTNGk0WgBICqnEYDBghtYqBBTlxPvAs6g+AEhNmufZ8AAcvVJZYnQjy2o/a0GEmVcYGlkWYiASZVnUgSYJauRZM88BSCGtdc08CzFKKRKj6gqAylkGNxsNAEoroaQQUkrBXMV6COaZuUJ990lQ8N46p7QmYu+9sy7GoLRWStVzcFnZoizXOh1AeOdsZYtpEbxvt9qnT50CMC2K8bQIPiopbLBlVXbanRCiFHFjfaPOTsUYqqqEEKWzUup6KAoxSinnGrvQWmdZbkxCYA6BI7Ks0UgSLWh7d3vv4ADAtJgWZbG5ufH5L38VwAMXzkmlnn7iiTeuXKmTPasrq6lRSZJYF65eeduHkCXm1ObG7v4+Mx/0hwDOnjp15ty5ne0drczaiU2p9Xg8dtaNR8PI0RgNgJlvXL1MMSRaamNW1k/8l7/+n+9s33XOV8U0MQYAkWg0mtt3t1ZX10aHg3Ja6iQT2mxNi+HBAYDNkyeNSS7fvv3mtevtTtc6lySZtZYjM2JNSGQSZVl5Pymr0lkrpAohEHlBVBSFcxZAjCyESJI0SdMTJzaXel3rLDNL9Ebj0eryKoBOu3356qX+oB8CTaZFkiSHw9HOztUXXvh2fXnX19ZHo9H1t6/CVXm3F2L0LvQ6y5sbZ27een5/7wDAwdb2f/OP/nGr3Y4C/+mvfvru1s5nfu/3rt+6CRK372zvHwwAtJptJdR4Unz9i39elK7daW8f9MelXV7dOHHS1Kqv4+lEuBDLSQxRsJiWTpLOGsmHP/aJU2fPAvg3//yf/9GffBEkkyT3fupdlEoSibIsvfMzGwqpiVgIoZRM0tQYQyRWVtceRhwOB2VRAMiSRGvNzHmWDCYTH3xRlkqKJM1i5LzZBFCbJpVFwTE656ythJSChNBJDUeKcAT7CKopw7XBOldVJaUCkKVJr9vm4EOIidbrayvjwTjWlEula0pdZDba1AzfJDFCkHPWOicFWedoNmsHV1VVVZW20kmDEff29jqaqmpaOgdgY2UZga1nBLd1MOj21o2WVVWFECaTSazF74i0MSSlMQmRCMxZmmhjiAQYJknrda91wejEB8cMkyQh+Fq6l5SuSYuRQUTeB0Gcpg2t5Hg8dD6QkLWlu5BKGV2XyxBRq9nIlHY+CCmSJF1aXgawsra24sx6Fib9PZWku8P969tblWVmanaWa2o5CUg184ckIeoUNUKc6wDWxIPa7XDOqiNKEyGlCDGGQAA4RiVV5CAEZNaSLG7euXHf5olTKxtv3txeaZwAALL7ceR5cunO3dVOa6XZ0kIJZYpqFPKOFDVVE0QCtZcMwXMoqmIshI6UqlxnAkCSaClFiKGuZCHMRR7fWfHGi0K1WeQ0D8hwrM2htLkYJb/zfWauC2COI2eLnTDPS4UWbI17OIJHiNu84xkiSTTzksFxTDMuoLp63YeFpufRWc1DRV6EhYsT5+MB7T3I4VH6m+qyoXjsHO6JOzkuSKaL06ivHN8DXC5wUwbR3NP1GLBbY7eRI+as3rrLI0LqDyCbmO1jVsqFd9u77d32bvvr1mqriuMj2LG5495EEN87qP8gHjkfuBdg4A9ri4QXHfmv0XxWOdpzPdccG8d/YDJkrq3X7p0qZ7TDyGKeEzs6KRwJEx8762O4YT03L2atYzu7F3E8ujp075bH0MaF0cnRzhfbHNl80wyOnOVKiRHC/AjqavMZSonZaqPuKc7t7BBAQoKjByAQtSSEgBgaiXAhFs5KgcxISyitB+CDF1IqKWWSVsF55/Isj0FMXFFVhdYpgKIYCqGVFp6dVKZyrIwcF1PiKCBqv0UOsagqa8uGFpPpuHKVYqSatLBwHoAvDyVCmhsdQqaV98GVHKeWui2R67ObbQDtU2fPnuwtraZZO+ZLqnuqhUwMp5X0+6FypZsZKRgGWc+2YutiENKFEHTw7NmMxg6AwqCyIUtEiNWLr7+5Pypc8Lv90bPfudjMUgDLLWWkZHCIDIYPMTBrKSBJUG3JAu+5sr5yruZLploe1WQcb8RUl+gQIs9YvSFwiDFNDYDC+tIFJVWWJESiqOy4rBDRa6W9ZlPMgmEfGVLNDIiU1FISGILJhQjAh0hOjSsIIqPTwejA2ipwuG+9/d77zoVxH8DFa1vjcpqnlEClxqScdmLV6qVnlzpv3+VX3xoBSHM99WNX2d29wf0npC4GeT5Rq6Askp4geACgyuvpjB0pF0lwBjObWhSfmBUhEVKQEIBiFpZt/cBS7TMfAsUADoiBZBORI7OPzBA+cu1uxMzTqlAUET1ATNJaH1xF0TvRrKvcY0TwiCBAOs/WBWsjx+jjDNP0PvgQQoyRY4yV87Fywbrg45y5Wn+/rOOAGBB8dN4HH6PnyCglVpdWAQBiOvAeLlPKFUXSS8Ju9cf/8ouXv3/lP/v596/1MtTZZuIqTfKkZ8fD6zfunnv44Wp31zRa+fI6gNevvaiTN0n6B1bO39kan9pcGU65u956642b/+GPP3/qZAaAmhurJ1d3Lt4m7bXSMcjuKQrT8uqN2xceejjvRgCvfefGH339pU//jU/s9of9/W89dOrcP/jvf3N/XL747At/8o2X73/ySQBd8HOvvKmz9lOPb55Z4UuXX/7Iz3x4/ckL51eXvvYvPwvgkafuu+/j9ynXn3B2++r3H/nR91+6Wkwu9c987ImPfOeRm9+/DODFv3z2Y7/4s+Fgf3f7+uOPPbg9OfjqF76shXrsfadf+s71H/v1FQAnlvRLb14qMf6b/8Xf//znvvPJn/vwex6//1O/8vPy5NLl/cPmZAvAmQdOpUQNaZ995ZWP/u1Pb71+tZBycPXgZ/7Gx6/seQDDSWj20tde+E7x6MONtbU3rm1/6a/e2Li76W5eudo5sXXlMoBKoNk9+9b1OyTkz/34jyih05MP3re8+rmvfOFD7336r158EQBp+fbO3q2dvfc/88zr+30vJCAkRODgY6grQxHJhqCN4hDH02ldNziZjn0ItXeBi6yV6nY6zUazCv7yW29E5sra7e27l65cevqppwG0ur0QfD0r7O3vt1qtTqudJGY8HkWpahtfZmKGlFIqBUFE1Gw0nnjPex5/9FESIs9zAI1m8/KVy3/+tWeVICF4Mp0U08np06eqyrabTQA3b982Sq2eO1dOJqsry2tra5PRuLB2ejjI80btmQPmoiqt9TEwM0ya6iRj0N7ebjEtmp0ugMO9PVdVa5sbeSN/+dXv/eSPfyzJzImN9ZXl3rmTG/NpLUjJlbNCKkKY1yyzkqKuxFRSeh9DiCEErVSaJFIIH4JSCkrPCrGrEmCjtQuRiMAxy/LJtARzjIFnHEBw9N5Zjk6qplTKe2+MIhJKSlXDBCEOx+M0MVmqKxJE1G212o3m1s6+FARAKWWtCzF6760N04LzNJmURVGWJLJutwPAVpYXxWwkYoh12FZHqjWynJgERk+nUyWFrLEKQRwZYGvd/v4BACKMxmOtTZ7l+wcHo9HIuWCMSYzxvvZzC8H7Wf4pxjowrKz13qdZXpUlgBBjq9EUQggSUql6XwBLIYxW8/SJ8CG4qrTWBueqqmLm4WjUbbU2Vte3d7cBtJtNZvbOt1utEAORXF9ZC8D9585v7+4CGAyHZWKWlHQhTMeTVrMBkJS6kTc67dZStwdgeXVtfeOULR1J0VlaHg0HNcAdvBNESmsARquqLBItDw9HUkqttNRieWXVJGb7xo16psvyRvBh/+DgrUuXV5aWesurk6Lwo9G0KkJlAaxvbLz00kura+vdpSUbQpZnMSICUqlIciYww6isHY9Gk8mk1WqFEL0MMUYpZZoko8kIgBJKKelCKKtyMDpsNnNwdM71R4dJko4m4/omG5NMit3xeGJdnJbDw8EwTbPrt27Vtc9pmu/u7ZRVFaxNRpPxtDgcDMvCvvzdl7/4pS/XjKxWu7u1tXvj5m0b/I0bt3u9bqvZ1Four65pk1HaAvDcXzx3eDi6e2f7t/7hP1zdPNVpt5aXlgCUVZVlabPRAgBEH+JoOmk1GoVn76yScru/86df+EqdrfzKV7/ZaHa6PcMgZTIhhPOuLIuiLEOINQYUY1RaN9utZqs9Ho2FkCE4CRlC2N/fH43GALrdztbOjnNOCLG8vEREiTEhcmFdM1fBWwCuqrx3kbksi+AjwFVVhuAR/REIc7RiJqM1cyxLy9EXZVkDrGVZTItpaoy1XhkZYhiNx5NpYasqa+R1EYlzXhkdGTF4pbSUMsRordVKxuDrBXcIoc5U+hCYg5C0c/dOqWW1v7c/GAE4u7auWQgjDwZ9H3jq/GQyncLvH+xlndbimyKETJM0yxvamG6nF5m0NgKSiGtCNDP5MJVaMYIUUgjJYCElwPOMMaQQzKy1rvPFACmlfFBgqurUqXeRYZ1rZ9nJM2eg1K1r14uytM7v7e+fPrEGQEpNjqz1teaDD8HXdo8kQHP75ogQYo3J1bxImv0nQvBz8gIxwJEDMUUwI0skMweOPnD9PITApbXRucSk3nJialZ6kCI2khzAxBJh2m2sX1hXPrjC2cQwEPuHRa/ZyLUEwD7oVDMzkSApnLOIrqyqiYutRCx1CUCeJlIrF4MQcqaONYswjth8dYVy/RWu5aSOgZKLWGlB6WBimhMTf4CWdxxp+4F2RGfEMToJQZBYRJqxZrPMQTnmOPtbYDbO0Ezu64hjQjzHNe8N7RamMfNl/azPo23mJzTbKc8D0jnuuYAyZ+ydBZZKCwIMz0Deo8DhCOd8ZzBKMzzy6AotcNfjpjrHmKDz8yQ5D7IX1YyzJNBRDf277d32bnu3/fVqtbP2vaqLR7/eQ/yjBYEc70Aw63ff8fssNXsvYHfP4DmbG94xi/F8eKc6DcSzZNg94ywfU6OcTxf3+Jfx3Av5yLEY98KKdOzn0R+L85qDkrOsH71zlD+W7bqnj8VZHM84vvPizFBIpnmx9gKXZIR6ApzVYoMECZolM5lryTSOYBYkBEkS5KNrZQmARKYaUXCMwedaD1yVGs2RAxGYBBSAEGe1+daXCgLBa81FeRA9h8InsABSUIw2Fb6wtiiDkkaBK1+EEHUifE0CCiE4d7C/29ByVJbBxyRRmQBQShcApKEMkUnRynJmlFjq5jo3jzxy+tH3P7R0qmu6KYA++lkGQTaGMoTJ1BXCk7Qx1Y5liMED4IDSARGCNaSMPspEw0ZSGl756AHsHBbSEHN+bmPltes7npEk2f7UC/aTKgIIIsvMTChAgHxgH6JR5AKUnE3hlY2VDbFWTuRoq3mND9WLpEUyVpgo6oVWHXz6EENEZDEcTAD4wD6AhBsVJQMusPMsiWXBQqhEzaxgGbBVVVhvBEWfaCUJEJLKygOoXDBaTksOHkV10J+OfQgPbC5fWF2Nlb94YwvAta1BkqCZJs76qgr9WL19qz8qXNZQHqZtNAAvxdRWUOLuwaAAddopRn3nvDYxeFdXHJqkwcWYEMVseVajLgwCxg0AgBOIERxQrxRD5KBEHV/V8o51urpeTnHgQf3lkCAhKBE0I2GDO3nOrgjVFCRU2iSh2HKsSkpzYTQACELwzJ5EZIEYI4tG/fDPqE8UjxLWSad23yYSJCUWGQiOYGIWzJI5RvaREaMASyPlFBqAVq1vP/v9K3v7n/yZ96MYe51fu3LtW1/67o9+8JFmu6PLCQAoU4xHOovIGzcOy7u3+6vddU7Uzu3+fekygA88ef+3v3v5Fz/1fpWvAbdWVzvXbt++f/XEibPdv/NLP/2nX3oWwJ2da6WvWvmS6S6Fg74g1o20HO86SlWv125JAJsj/vTZs0unTr/0/Deeed8T5PezRlwT/LFPPjn+o4G1UwBvbt/98FNPPfb0+7736gsvv/Fd4e1P/0c/5u1BsX+wcmYJwNJ7z1MquQyN06c3lfT79OwfPAfHv/bRJ06ee+DnPvE4gFfevLh64f5vfvNrDzz0wKmlpbCrbLt5+fW3P/HBZ3qbD33x3/0HAB/4wNNKqeapx1576+Lzf/blC6vdS6L4q2c//yu/8lPq5Fq7uwogt6lS6oXvvfUTn/rY6sn1x3/0mbW0eOH5b7146dK1K7cAvP/pD719fXuUxFfffCtpJ2m79Tu//ydra+bxp545vPrCNMsBlNHF8XD95Mkbd7ZffvnVD33go195+Xv3nXvk6pU7nxfPOjcG8OB99335pW8btpmhycGo2VuRUkfvQojKKCkUACJZOWvS5D3nH5ECkUNp7eFo1MgbO/t7AJI0J+bKOWttu9djRqvZ3N7d2T88GAyHd+7eBSC3t4jIaE2g/uHh3a07d7e3Hrr//kaeRxVHowmApR6Ukt55pdRwNGq0WkVZ3N7f+43f/PuVrWqLXqXN2okTV65eslWVpabXbe/u7ZXl1GhTo3udZvPy1Wv3E4UQbVUKoRhg5qosQ4i14MPB3q6rqrLyQqq81Uqy3GQNH0IMIYZoiAA08iwxuiimr3zreSJkefrlb3ztR554KtHKORt9ABC81xqeo2ZiBaO1D25SlMxcozlaGSmFt66yzhhV1yHEEIgEc7SVBxBjqGlHNZdNKVjrkySJMdbG4vWAJoiSRIOFkIpIJMZ4F7JUxRh4Bjcgz/LIbJ13zscQvPdZlhBxTfAholYjbzYa+wcHQlKWJiTIBbe5vialrCt8q7JotNramPF4IoSI9UA1CxCp5oVlaSolHfQPYgzGGCGEEEIpLUgYo2vIstPpWOuKonzr8pXRePzAhQubJ1ddZXf39y9ffRtAkmZgVlrHyMYknlxZllIqrfV4NKqcA6CVMklCUkEqkipJkpm2sxQ1hAEghlhrbAbvQ4zGGO3spCg2N0+ePX8+hlrSMY7H40sX3zRStVut67dvB+bH3/f0aDBYXl4GcPX69YOD/fd+9KOT8fiv3r7uQui1WyEEW5UhNK/dvgFgdX1zOBhMJpMkNYODgzp/E2PU2khlKfj6DsYQHKILITALrX1Znb2w0sizpW7nq1/8IoBppI319Var3Wl3IvPqxubr33+VgNNra4PBAEBiko9/9OPnLlzYuXO73+8nSo8mUzCstSxUnfUMgWMIRVEKqbIs7/f7AGltup220bo/OKyXOlJK56wy+uCw3+60l7vdRjOXgpTW1loAB4f9aVFE5p29/V63d+furV5v6fy5+8bj8XA4BPDGxTcffvChhx586OVXvjuaTEOEde5bL3374uuvDweDpaUl1LXbJu10e0w0Ho/2d/erSfHIIw9DsIvy2a98DcA3v/l1KZV3YePkqUhy5/ZWNSmFoFqpsK6h9sHnjUa0jmMsptMsz2IIRPQXX/16rcOwvL65tn5KCFFZq7RyzkUO3aVl51z0fi4rBO+ryWg06B/s7mzlaR5jyLKsmeVSzNQG8jSNnc7V69eXet0Qw3A8TpJkWkybrZYC1amRcjploNFqEpFUstXtxBCCDyGEhQZRraUiZni5qEp74+bNypYcYz2ZpmkChgCyNCVFEbHb6TSbjfFwNB6N66oREGLk4K33rrRljIFjkFIKosUi1ztXVYVzLsboXAklhCAIoY3utJoAlJYJxDT4brPdTLPbB6V3furCeDLJOq2z587Wg5Wz7tr1681mMzGJ0sb5wDEmeTPGmVqFlMqYhIh9sJj7HhMhREYMM/lCIRkxSXMOLgQfpdDKhGhiRK1Oo7Spgw6pVJpKIcRgOMybjc3NTQT2IQIoi2l94aKgyvla56emUEgpa1Ea74LJTKzl2IkIFBkcgwAECUkSgBRCkqwLOiQJQcJo6X2EiDXsF0KoqjAtq2JSJEQR5vTG2W+/9v2t/mClt2bDFMDhZDitCqkY0k2KItVKy0ZumqNicDhpDKYFgODduQub4GhLa/JEaSkoSBEjuPJuUloANd1VKomIo5KRY6wMxjG+xkI+C5gTOd4Rgs2wuDn9b3YRsMDJhPgBnsuxX2sc95jWZCRGZBJqju4tgjHimoExU3qvQ6f5Qc6wQjFfVuMo4FrUxs32UH94rtw1xzgXB8xiFvDRoq8js4QZcVPM+63DzSOMs97v7Gho0SUwR04XvfDimOie0PDo18gAkagvepyjkAttBDoWhR+pF83vGB/f37vt3fZue7f9NWkznGAxJ+AdyawfVLzAPOPzwzQqjuayRYH1PZv9kI/8kLdm2hnzH4t3+J0b/pDU27HZlBbYKh1NOu/EIxc9Lyade3q89yPzgziO0f5g8u/eS/aDQC0WNEc6Xn3N838LhUEiIgjMJnUOLrIP0TGCIKGUSZTRymRZczIdAHA2TIqJmzq2ITfNZiJVlkymbuy9ICRGAAgRk9IVlRVS5GlrMpmC/KmNpQ8/9sj23uG127cB2CgeOLP25q1tw0YXPBpVZDSxXGma4Wh4MCkANBO11GwOxsNo5P5our68HCgoSRzZCALQbuebJzrPPHX/yaefaLfEUpd04mIsZSNyfjCthgB6kPDsEJQhKYQPEFGQJ2eFICmgUcNT5IjDbHEcOCAQg7hZAAAgAElEQVQW3uay0kK4UQTghM+TpvORY5AyOs9FFTrNPPiqVgsqKyUXKkKAc8GG4IMIYCNljcoVpa+cr1cqzFxRnN0g4mMKiUREUzAzhCCjBBFCjDEyQBIKgJSkJUL0niMzFAltdOXLSeVCmLQyA6DTTFKjx2VZjv04MiQnRilBMoqydmPwPkAFhCqGqfdS87jwHJWQuHT7ysiWAJ55bDU3MoSQGakEaaMbebo/KG7t9q8ehNVuDuDGeOp8bOZGFdQfFCdWhE4avqpsQVKQVgkAV8qY5kBEDFSD3cSRwIAKAQDgIbyAYJIEQRwJAZwCAiTnhT+1AiyDo3CmfozrJSofW+0655QS00G1tz9JsunK2rKbTne29lfXykYjAUA6Ye8rF4QQSmsSQmiLYykBRpynkdkd7hJYEEiAhAR4pvEXgkkiYJglBAsKRJDQzMZHSJUB8GWS6smjD3RPLNvq1lZEejg5+NVf+9iJTu+Vl15fPd0DkNoildJ0eqH0clA99tDpwWS699aV8w/e/8atPQBfff7Sex9qvnpj+8Rjj9lgr1y6df/pM5nssaXG+tL9Z88AeO3u+OzG+u7hAevWZ/7o93/yR5/J15chWo8+sV5WYTKeALh49cqn/tanbt/dffPS9kd+rLmytOLfvvzijTvp6mnRdONbVwCcuf+xpz/809cu/9X5dhb0audcF6ObipKGDo9/4hkAgga6SjjNpFbVQH/ud/70Ix/5lEnTF1547Sc+9jP/1z/93wFUWfP7f/iHDzx08tGVR/JW4yMrvWs2p8f1nSJqan/7+S8AeOnlFyuHV1562VdTqemzn/m//8n/8T8//YEHltuTRjvP1x8C0N8a/L1/9N999l/864//7C+9/o3nf+ef/fbutYubm8tPP/jEmdPvASCaPdN8iyYWuQ6RYrPXyBtVql567XuDvTGSBgBIXRX76vTS3/nVX3nr4sXH3rP2pVd88NGzv3vnxtLmCQDPfu0vELC52Xj7Zv/6jati46FpWTrrSKms1dRaYRZ/QivdzBvj6ajZaitbamu7nd7O3i6ArLLB+9F0am311rWr1XRalpV19qmnnr5y5dLdrTsA1tfWp9PJ2srqE0++bzoZX7l6+dLVK81ms9ftVVVNboJJUu+9EFSWZZqmzjvrXAjh7P0P7e5sjQ73ARTj8YlTpz/w1PsQg1S0tNSpqnJw2K+dJQAIpRi4ffuu1mr/oE9M07IcjMZ5I+91O8F7AMPhsNPpJLkWUk6nxXRa+LA7HAystSTE/nAAoAqhfzjw1m6c3PzMv/9jcPytv/d3R8NBphVCMFICCBzBQQKS4L3XSiillJTWufqMiEgp2W41D0YjBocY0jQVUgQfK+dazRaAYjo1iSEhEm0O9g4ACUaepsyIMdTLAOtcMZ0E72P009GknBaHg6HUOsQoCbZm+3ivlNZKS0nOuyrGsixjZCI6vblR38fg/WhcK/BKjrE/GA7Go+Wl3mg83tndA9DpdKQ2u3v7UkoSSQx+pi/NEUD9PBijvatIUJ43yrII3ocQtNZG6yxNirICwAwpRNbMASQmOXnypLO+2esprcuyBGDSdDKeGJMIIarKjSfj8WSy3FtqtVqD4UiSAGC0iTGCQj1TSymZY+1tMZkW9YQVOaZZo9FsuaoK1imts8ZwmudVUXz7ub9IEwPAWns4GhRVNZlMCmu1Mc1mM3g/Gg4nRVFfmcQkVWWlUhsbJzqNxu27dzbW15aWlqWUZ06eAjAZDSfjUaPZkFIODg/TLPMxxBAAEYKvbzfHCMFlWR2ORm9dvnx4OHjPA/c9/aEPKUJ0dqnXA+AG42/85XOrK8srKyuf+8KXfqXfryU4H3zgwdcuvgXgd//wD372Jz7RbTZ7zdZoND4YjzWEVDLEGKKvqgoAR2iTCKkUQlmVkdloXTNAOYZGngPgEOvnLUlMlqVKKal1nmdJkt6+fatGwPO8qZUeDAbD0SjPG3nekFK+8ebr06JsNFsAnPfnz57bOHnm+o2b2/2+8yHGuLd/wAySophMATQa7dKV1nnrbLfTHY/Hg8Hg4psXLzz84OFo/JWvfx2AUaqYTDmy1sYG7i2vpFnGMWqtrbNKqfohl0IEZiLqEEkpg/daqd7aiXqC8wHWR1Bk0KQoQfDe5XlmjJFKz1ATKb2rnLMheCIBZu/9/v7eoH9gjFlbXalPKkmTDzzx+LQsbfBZlq2uLg8Gw+Hh4dLycs2dU1rXmAkJgQgSJJSUUkqd6EQDEC5y4Bg5hBBC9NaOx+PpdGKdEzT7vjQb2Xg8cdZpre3UtzutZiP3PkwmE611Dbp1eksHw4HzXioVQhBSBo41US1EH0IAYJ0ti8I5FwVi8EVVKWM4Rhd8jf4LghDGWZ8mSX8wSrRaWVrKoJPEFEUxnc6kOZWMzjnrXM25NknCICnUgsdmkhRwkaMxiau8lBIQMcYYvdbpnJlAtVgqA0lijEnLYhpCJJJlUQI4PNgfjg6lNqPhcH+vf3Bw0MhypZRSyrqqZk8ftLJc2+UGu6LypCHIGEVCE4k0MTO6d2SuTf/m1AuaCVnTwjWRGbF286st/QApyHmv61UywAznfWX9pCjTpUYi0vX0rHzzu1L7ZpYGKgFMq8Ha0rL3MjEi1emkmO4PJ+994OHRZOA41mORq/NPIYIIUsYYIzhNZCLJORpNCwDDyTTGaIxxVcSCasKzKOeHtGMqWcxzBPDYtotoqQYiF3DkbEDm+bIQmJMMj+Kne/UVMdeHZOY4x9lqXE+AEcHMkRYd3UuTYUBgxgyc60UusNYaDTwCVo+0yOafXWwjZu6Kc1oKx+PVdDS7IJF5RsypF7kLbJTkAuKc8WPmKOI9ISzNrlxd4bboeQ6XYtahIJqr1GMOjc55q8eJnccOj4SIPAMrww+9oe+2d9u77d32/9em5rXA9VBYm6MdgZIzpYwFM6zm7RHTohQBtMjVzGcXgoioVVPmry4mrVnq5miSw4yHFefj5xwAXSB1kSOREELMD4nrWm8iYlGPvjX974i1L5WcCZrM6ZdHZdG8GKhpcWqzoz56Za6kGRHBiHyM01a/u+DjL2bkBUw5e4dnx4sQYvRByJq/yVIK7yqtZG1gMvMo4MgEEoK0Ye8RAkWAgw+eiUlJACaXKkJBCQihEy1FqKZusjeYEo8LAJ1usrqUP/6xjdPnz5zf2PQkX/qL57Z3RkqpIjiT1Ho3lCfUa/Sg1GA4lLp84MyTzXTc6bU+8iPPvPDiiwAs2p/88Sf/h//lf5ONtWajMT28aNKlwc6dn/rU3966+faw7AM4ufSgnUz2JqMkw8vfe5XKyXBS+RjOr7aXT5wA8MEHzi733OPPXBDrQagCwvlYBhEkCa6UiBkACB5ZQYRMUeW8IK0YVXRa5OzHQhOAwlqTmhggSw8nvbQK3BCIwWZkgqnriVA6J4Uelo5JEXGM3roQAllIAKMYQ+QEPhGQlIJYy2g5gmUQoqj9eQNsFERCkSyDJ3JEKXMAe6ME1zziiiIXQie1eZDzLAQIdUEPlGHM6JDI0gwuTq3zwWmKRek0SdNkGAMgypQjG0paqRtOw3gaOcQQbUIyMwrAqIxR+0auJSWHk1FRMTN5VNe2du8ejDsJA+gkWghRRkoCtbJcpqmLtt1qPrV+9o0vv1DECKCp9IQde9apuPrWy6fMg8GXhis3neYZeS0ApGmTXUn10wZnEuWc00pGkvVTHYRkhrOU5zqEkgS5MpWqUEIB0oWZFF0dW9UacTFG68BpkoPK4bBAANARUREdTiFkTlRduT5om6Swlcl6SqyNhkMAb97YfvBMr5HqalKabiPoruIQYiB28AWAYKcM7zlmzUypnL0NkQUJwQEc6iEjUSJaAeGF4lhZCOIQo4hKBcmkkg6AF189uHJn6+c/shJ271Rj5N3GqfV2lsrD3d33Pvme57/2PIC19UQ3dTdU3dXNZO28UTzYvv3o0x947bUbX//uGwBWVk23t+6tu/zVvxyokLTbX/vOy2d2+48++ejJ1aVv8xTApz/9iY1Hnvjc//tvnv/qGztvjy+v7Z7cHpfeff7Pv3/o+e/+1t8EcOH85ivf/MssObF0qqFc5cZZ4+wj674489jTenTrjcoCSOzoeze/v5El2ztbj33wgaR30r32ve04aK6eysIWAFlROWUsndj65psvvXBj66594e3pBx7fbFSj/+d3//XdogJgB8NCyLJC58SpDz188s++8a3STr770hXTarbXT33yF34RwN3B8Fvfeq4SumwsB3vz7NKp8dZob/dSP0sf+cmPffF3vgDg+2+8rWV268bVW3euN9Lsc5/5XLfXOByUz9u3Hzp/HsDg7s6d/jhttAIRSy21auRrZWUPLfu8EXwAoDmMJuXZ+863jJHEO7du3blx6Wu3ht280VlfunvtOoB2J/vIRz+m0uSNt64WnnRVTdyUTNpMUh1ZRQCw7B0xcyBJPgYpZafTS5NkdXV1MDwE0Gg2XGnPnjlnbam0ufjWxW8999z6iY12u/PYY09sb90BUJTF8vJKp9vtH+xrrX2IDz/4cCTBICYZmAAIoZ2PUgmhZPAuVSo1yUFkV014vDPc3QbQ6XYRqlE5TdO01cht5VmIXqcd5w5aIfhuu+lDqGy11z/M8rTdbZvEgNA/PHzvk08AkMwHu7tZZ9lOS6FUo9no9w/SNNNpmqRZ9A7AcDKO0Q37uz/y0fc9/MCZt9+4cvX27UdbS/3D28zrM/oeuVTlXkQfbFm50UQkRoFZC6qtlkejkVbaVY6kqpxlosPhQEnhrA2EO/t7ANbX1ve2t5ZW1hKdhOCDgMo1vBNKKiW9dQCq0rpymmUZE6ZhbGEn1bhZNYKXwbvaAN0FmxIrlfpIUkutZAwh1cloMH29uAygkedlWZWVJeJyPC2rUghSQk0nk/5wdOWaAHDfhfsm4zEzR6Y8zauiSLOMOCqj/KQsrQVAJK0PnXY3b+ST6agopnmWaKWdC0YRUZ2CJQaVVZUkqTFJCCyNcTEUVVkX+McQI7M2SS2TNxwOTZKEGPqH/RBCLcUYgg3wRirvmX10lXPOS6Wk0XE+s0+mhbVV9LbT7XJqwHEyib1ezzlPOo0QAPYHB3e3t9MkSUySJdnS0tLW3n7/5Vf6g8Gp06cBnNg4aXvd7Z3d4P1Sb5ljfOj+B0fjcX8w1ol8+NHHANx3/gJC9IH393f3+n2dJiYiBi4rPxlPKufrkwrB5Vmy3Ok89dhju/v7Usrbt+/mzaYQIuutABDT0F1ayvM8MebDH3h/o9EYl1ZlzStbO1FIAD/2oQ93lpbv9vs1dUh4R63eYDI96B82271alBBKgHD+gQtbt7YHh4Msy+sFznRaOGdDrL3LCUQcohJypbvcbrYm43HwvpHmiU4nozEApZLbd7ZvXL8zGRd3traEUMxRaxO8r3fU7XR29kcm23NBTEeFK22js3Tn9h0lZJI2Z+s1JVLVtlWlTWMyrUjpRKnbO7tLJ88m+biZNwHkWePOnV2WRupUwgawC0EQTatKCBFmfk3kQxRSxBh9YCGiFLIorda6FhKJgJSiqioppFYaYG9d9HE6LapIzWYDAEc1Hg+ZmSKENpBSQrOzk8rqJKmznsPRME0TqU05GJk0MSbGSGBIKTjMVJaVTqSQhQu5UsNpSYxTGxtGGsss63vNiqSIgaVQRWWNUhDSR1gfvLdZLcUIubS8PBgOE220D1qaohgDJJRxMY4mFYBJOZUIwUchZXQ8KX2IgYUIPpB1WhkAwVovhVRJWY6ngBJpK41GcH8HCQFAIuDikILvD+ywKEsyB+OpbHTHxUR6t7uzB+DUyZM1oOW9DdEJit6VjBgpQqoa95RaaQgwB1cF9i44IsqyLJF5mFeOQ0AK7VyVpplWpiwn1juljfOuNgo7HI68D2E6kp2WkHJweLjTPzhz6kxizHKnlZgEQBCqcMX+4bApWCjfSLK17poxxlqnjdZJE0AiBQcOMWiSWpgQnBQieBeJOQoRGEAjTYoJlcTsPWkSEYfDSmm91MzI16lTIhLdvNPIO4d7vn9wCfBP3f9Uq9mNQgRBAFav77761otJmu7tT9a6S80cFDk4KWPiA8kagBPC+aColrvhVOfNPO3mG8KkmujGxAOwfqqECNYjMmp7b6IFtlYHMVIKPiL2zSDGGY9xTsSrY6I4Lx4SUS5gNl4UUdeREM8ATQIiIs3U++tMmJjtl3kBwM2iz6MqcWb4+jAEBEMcK8wmzONTErLmKQsQILgGy+dMEZqJCxOhLu2ua84oIACzoI+kWKCZRDSPMekYXwQ1q0VCzO1H5+KUNE80EsXZBxcoJnG8t0TwmFplnO2d+Sjk5PoBBhDYg4gR5nEpZsLY8zYnZt4DS85q8N9t77Z327vtr1tT87zMnHsE4AcHOZ7lkQjHtplDdkecxePZs/+PvTcLkiw7z8O+/yx3za2y1q6u3qZnw2wAsRIrd4oUgxYZiLAQomRTdjhE2g4/OCw7wgqH/OpFobD8QFkySVEibYdBUgYXEDA4WEgAHCyDAWZH93RP9Vp75X6Xs/zHDzezqnoIhR8thvt/qMqqvPfkuTczzzn/f76F3nH24iEt9o5w5qizoPWFmvECmX425tNL085ZGeJGyW/xLB6kR5+5oHeaqc2fXbzivOcnG2iLzb53nPTgnw3t+uxrNTMmz+dvJQh6oTYTWBDpJGmIsQRuBG9EaAqm3hsfEAK8DywJSSyVQCNVrSj2qANXCh7TkQ8slco66pF+cvG5pwBceXzjmSt52k1Ue1P4/v/833/6+uv3+530eDJJVJwIBWB9eXmpk98+GO1OTKIVQ9/de1MZ/8a1t7/6/NeW0hwAx8krbbp0/pE/ffH1dqdDiuqyZC/zdGlr+XDmIwCiHnzlhZfa6y1B/tHN5b/58z+zszvcPz4aDe4tdQBge/va+dV1cGnuHFEeRC50nmqlytpAIlUagPVVEqW+dm7qgop0KoU3UrHwrg5kG+iuZNQVO/aWmYU8C149q8Edmo1UCgvkiyDJxA0NMPIq9koFCw5Bsg8UGD40Ej/BcVNGZM9NfsSSlGBmUCBBiIKX9bxSLwTIOucJgiDnltHcfHaUbDoyr5ELgiKyjKmzgbnRqIyUaD4S3nvmIKVc7cX74+poVrUzSQo1A0CaRHlL9Vr9dpJG8d7N+0dSKMvlzfs1gd7z2BUAWZ7F8K0UJMTQGnM00uSr4Ns+6nWjwWwKoKht5RwCzi3lg+PJ8c6OcTP2RgceamZZAOi1Jp1WrLNYRiSDaKhYIBGMnQM6tAyIYmeDqaACCYojpqjlfXCGpU6bN4Ctg86DrQQKV1pvQqSXrn//blXZm8MCwJP9bGVltbe1Odi7NxpbAvb2h/1eknbbsS4Oj0cAskjcvTdZ6aZaB1HVyg+DFpIwm0wn4wKAlnQwKJ58bA2qAzMmKZUiAjPziSNWYA8IzwTjSCgWmtgKDsE60greArize/ixDz6Z5TMzLbJ8iY3LVja3t+9vXThPin7ouUcADAaHYBnJVjkrrj75yP3r359OjbN2ZbndDCTW4pXXbx+Nq0/90t94Zq3vvBoOSpGs7B0MxwdH5y9uAnjh81/euHnzb//8j/+rX//f48q/8t0b6XPPvXh9V7azT/3iz+29chvA7e2bTiTPvn/lV371U9/9zrWo5Pb4iKhbv/znSW/z/T+yBSCItsg6ZrC3sXJRxsvbd48V0nVUKsu2X7oO4GDkLr3rEUx3f+PXfrcdtUZp/v0v/+GLLyQ2BJKyv3YBwPFonC3hqPRfe/OtO7d3vvvWjacef7S1vnLxkSf/k//6v/zzzzwP4Mv/9J84IYwNkVTexlMkIxf92dfe2uq3P/dn/0sFDaCeTG/eeSNW4g/+t98ejI5WL2zZwG/uHq71V169eR/AYDzN08xVJu6vpXFc1eZ495g5KClJKB8MgNiFAHp068rSufUPtd//W7//x+dXel//1msf/+H3Zd10uZ0D2Fpd8WVVzgqp8/e/5722tWylYhlNZuXR4SEHD0AnMYJjVr12J8syrTUIVV0dD44bJFFRFmz87Tu3V5aXdONmQISAqqqkFBcvPwLgs3/yBx/94Y8s91eraqa1Xl9bd955744Hx1maNZ+r2WxKQvgGDh1CCMF775zz1gophRQAsiyPokgL4azZ3Z20sjRpZZIoMPvgABTFLEliY8xoNG7l6TPvfvd4MjHO7R8ehsC2rgFMyrK31J9WViml4xhEcZxI5X0IAJTOABCj3+3eubPzP/13/7guq8C8ubF54cLmn35hGuk2CwGAiGs2UR6h0Q00FsFLKYlUM4xa59t5q9/pDYrZwXDQTtOqrGOlR2bimXs6AtDRKl5ddc4ReyGkBypjqrqmUzQMSAilI2bPCLlacgrEkg1bYmZ7bmUNgPUmEkRCVtaFEJSSSopIqTSJGl1I5rB/eMQBkVZlJYy1UopIC8d+qdvZXF8D0O52imI6mRZLvSUOHMc6zLNiKqqqGe2ZWSklhfDOESjPsyROmumiMjUW4BHnnNKRVqqqzeHhIQRVVV3VdbfTbebkyWRmjVNat/JWmmZEaKB/xpqmHJkkSQPn8Z6lFN772tjaGCEoz/Jm9ucQvLOmrik0pjr1aDTSUeQ9CxKzugagte71esaa8ay4u3fQPTjodLvD8fjylSsXLj8CwFqb6I3pdHL37p0AWllejpTePx5U1nng5rWbAG68cX0ynbbyVtZqLS0t25q9t0LIVjff2dtrWAJaq8Ojow988EPrG6s7d+/MimI4HH77Gy9sbW1df+uthv68uXX5kSuPyOC3Ll56/4c/0mq3v/j5L6hE3tverssCgFLKWZu1WjzfqxYHuzuD4Wg4HAZIYx2AvN0RUjKH1dU17521VkkVEExds+fmmyKEyLIsinSr1ZqVxfjGW2mWaKWyODe2btZQRVFUVT0rylar5byfV3uF8Owbu/Zbt269+NJLaZIIpSIdSSnPb10QROzZWWdqA0BqCkEwh6oshSSlpTHmXU8+lXe7QorV1XUAg+PjqqoCoHVkbK2j2DmbJCmHYI2hhcmPc0YLEUdJUZYhgJmFkNbaplCQpmlRlETCe28dIwSlVRzHdW2MNSgLAN5TXZUCiffOmDo4O5vNBJH3vq5MkiYA8jyXRN57rRWHEEexqU0IQUgViJq9HIACh6SVMzMJ6b1n52tvGl45AFrAsZtvqVKSvR+ORv1el4O21gE4Hg277Xa71SKQlN65xg1MACQFcWAAZVEgcBRFSmlmH8dxFEXGFEpKpVXso+ZDHpjrqqyrUrDXWTvN80SijGMXxwCMm42mxWg6Ox4NGYjybr/bGlpuZUlp/b2dewD6/aUsiZur88xVXSkdO2ullMa65eVVAFma7+7u9bvdupwKIbWOvHfOuSxOoiiqTQXAmNp7l+cZs4+TJMvTg/390JBeQQB0HJu6qq0JIcyKYjQajyfTJE2Lslzt99qdHgAznpRVLXJZ16UWgoiEFEQwxhpXjqcFAGu9zCBIoFl/cFNTE/NRMYRmTGsK8RDz0cB5FiSkVCQAINKaQa0sSbP0YPfNl66/gZB+8md+jnRkLLRMAVy+iJdvpD6oLMniLNre2U1jVdiZZx9KpyPVfBwEAURaShJSCuGc4BDYW2MtWAPQUjNb5y1Bz3UjF0vnBcn6B0UDVjlrIr3Ik+aPT+URTw45C1H5y9nk2WTx9MDQgAtPKnmNZ82Cn3YGNHMKc1k8XDRCi1Ts9MeCsT0HjSxocg0Qkx5ogObfFgpoTN74TEfPXPqiULvIfBfgFJy9Swu+0InK02myewL+/DdIPZ4mzOFMoyflYDxwVljwuYAF0uYHv40P42E8jIfxb3PMydo/CK7/4D9ORTROkJMno+zpJLSY2N4xhC9kHcMDz5wMzzRfPJ1p/J2T45mBu9l6osZFZA7NDHw6aDfHz+Hsiy0kOqN7eTZOX7WhXcxrkWeml7MHE50RMT7bwhldzTOi0Iuprpn9CIAACYKGdOxd8IG4mdSkgAgcgne+UFImkdZSKiEQ2DvbOCDDjAhBIEglk3ayvtZ57FLvfc+dv/hILjeXAcSzQ1LOxvnwXvnP/um/+MZXX3vPM2t5EiWx/vDTjw9mJYCLq2txqutqe+f+90c+LPeWbT1L0iTPknc/cnH72g6A7738xu2dg876+no/PzoeR3Gnm2Qjwq23Xm254d3dQwC3D6cqJ6X945fPn88Q2+OPPboWVt67c/P1O/duAPj6vZ2ly4/6IO39O9mlvogjlCGIWKZK6BilB1CSSRASgmURK+mrCqgZwTsSSjMkgCyRrmARXJBSei8Czfc+CURCnCGQGMee2TrvA5SQUkjruDFJgGp7QRwCvMlUY+wsmMEcfOOE2BgyCsmBmKGlELrDpIhYBFhvm8IHArRQDTcbolkjnVIy/MIr3gVwcN55x94xV9YnQiDM330AzltbWfbWBxhvAlA5jr1WUjUyoMupVhFKM7Wutt5JgSzXs9KPJtV7HjnXS7sAKl9GbR15f+/+4b39oSffStSsDgfXx8flbK4Np/3M2bIyppWxVpVK41iUZX08LqpZrRQDqFw5GE+VoiyVK/1URSowArRU2tsSANcspA7UeFwgeMfBGeu88WmSC1gAxayC1BE5qt1kOoykIpahLK/dHcogOBCAv3hz+MRWeCyKCSQlNldVNxdSusHRgFuyrCsAWqC0QShi+L39YW1C8LS51om0vLM7A5Ak8vz5nuj1ppNJTnOVyzDnZ52KrZMMisjWXkjhmOEhhVAkgrfEDoCSYqXfCdWxVomtrHU+zSNrqpdffHlzqdPtpQCK2TTN4jRTf/bVly5ceWxr43w7SsbDI53Kj733EoDte9NvvPZ25GMqKarH90fRzf1BNhLvPf+hN1974Rc/8iEAz3/xpZW1Nbm1+ZEf/cT3/uK7tn01OX9R320uXskAACAASURBVC9uvPalb33xs4e7ewA+cemH/vA7X//rP/uBV169/6XnX/iv/sF/8Wv/4z/50I999OLaUi7U1qOPAgjF9MUXXvzCF178O3/rky9+d+87n//CL3zi2Wlvxd8apMk6gPt3718s5O17I52137yzu1/XZNiTqpxb39h6+fVrAJQkCTaxfPmVV1809SNXLn/2m984uDu8fW/v1q/cORiNAdw/OOitXSi9rB1Up3/73s6nf/dfK1fVl99lhtutTgrgjZdfHo0nUSu+dm/UWzs3Pp5UpmaknpKmwkI6dyF4ya2kVVs7LY3zJEgaFwBIHQOoprO1jbZF+Mzz357Nhv3ljdIOLWh96wqhet+7ngVw+/7O81//ynhW/qf//t976e39QyaVtUSSHQ6GdlFaUpFycL1up6orCJpOq2bktXXdADEunN/i2r+1fWNWFHGaS6myJGNmIejw8KApP7Xb7cFg0OCCJ5Pp/uHByspqmibsvZDyJFuLtK7qioiklJiT8jjOUhX6WbsHIE0S54MQovGmiORqZWs40+10GuZ4EsdRpK2zUorhqHjt5e9FceKsHU+m1thGITHWOu90KjtiBGOMECqEUMymxlqldNzqAgi+qotJZerPffHLw+PDdltdubTFtgyBlRRSR0DD84ISwjgXmFeWOkJQw1ae2yVw6LTbSZJWIUSzmRAqieLpeNptdWrmQcMK39sXQlhjU8fjyQRKJXFS17WYm9kLAE1tglkCIXihoJSQ7Ty1zu0fDs+vCQBaJ2DpuO600yTJZ0UZgstbcRxHQjYk66S2/nh0h4i0jqTStTGRCtZZEPWXugDqsprNCikEETljtRIUSCsdGIFDI32glDJ17YRot/NIR4FRVU3xVEip5rJxQlrnhfDOeWPMdDaLkySE0MrbrbwFgDkYYwmUxIkxJpoXpCgELsrCWAMgzzJBxM4RQAhSEHvPzNb5xk8DQAhsnTXOBhIgquu6qo3nYIxVUs3KAsBwNDgeHHPgoq4Hw9FwNntXp3s8Gl+EaOaUWVlao4zxs9JMpkVpvJRiNqt6vd6d7e0jNQQwq6rxdLq+tr65IZK0w6EEnIqSEGBcY7kBz8FYB6I4zevKvHbt2q07dx+5cOH85ubd+/fv7+4B2Ny6vL9/cLC/C2Dz4kWCMM5mgFKy8B5AVVWdXm80OG53e+x9q9MxkFmadbu9SEd2IVoaRZF3rt1aCgFCCCI455kDiOZewEK08kxrvdxfllLcPdxXelVJxQGj0bjRjiTIKEqNcRxIk4pj7b3XOk7mVTPEcSql8s5WxkghpFBMwXvfeO3qOALg2UsplZIcgtZyPB5tnNvY2Dg/KqbL3aXm7d7b22sWbbWp4zghCCjy3iul69qIhXmxUrph4UohvfNCawBCqgal5T0LIbznKIo5cF1XSZxorctyaqpKWAPAWQggSeIszyOtTVlNJhNmjiJdm6qpCF+5dGlnd+/e/Zsf/sAHbu3sjCeTPM+Zw3Q0rstZU9jqdjogqoqKSEgFFUVCSW/8fOnZLDubtaWUcRylSValdZply8vLCGE4GgA4Oh7cm+31l7qiUSVnTiMthcjy3CHEOgJAQqRx0tRJtY601pGOiCopRVjMuGVZMrP3bkHJN8ZaFqiruuEIDSezncPjWKteK4dUTsZxFO/cvX9u60q/t0QyAGhWLDTHzzX7i5BSKilnRZGlGYA87zh31xijtWbvtdZKKeddXVdZlmkdNZ0EEMfxrJix5yzNtI44OEGimQ6kUMxeEIWApnC5IUVd1559UZRZ3kYDbSYCEc9drcgYGzwXVQUF4xyAyrqMiCRRgGf27JtxNYQQAjtmAGZu78MUBHMwjjkkUkiATGNl44PnYF0ARBovL3d7y52lvf0DGckkaxtnAezs7zlrrTV5lgQX1rsrS2laFrzW7yKYVEsAlq0UBM/sLUIItRWUWMfMZWAXxxmALE6JnBSykdwEFhU4zIMWwokLd+oHE7kz+c2ikhnOpj30jrzvgSAKYa68eNrYomj3g7LGBXZlztkGnSI8zhTk5kXeMwqPi3rkSdZ5mpLNq6OhWSqGhb/4SVMLVOPCFkk8kIQ2DZ7UXk8Z4XRSbgx4IGWcp4XMp8nzSbbciFX+ICTj2ReYV08fKKkuOPTzZ+ddonAmiX4YD+NhPIy/aqFOBtyzI9lir+h0LD4LWj+hUy/micXfOJk6zrYVTn8uSoZnqoRntsX+cj9+0D+aql6THtI7u3TSd3pgBjrBclJ4oB2c7l/hFOL4QNXyBGk/b0HQA8cuLvLsdTUbhmJeCcOJ3D4RApgEGVMJEWIKgT3AAAQFKYNUiONUBBbBSXahdoGdFBQrArDaa59fT89ttDcu9bYe32wtR90llcSBqno4uwsgkVQM1et/8b3/9TdebHfbP/vXP7SSdNf7rdt79ybWHE1HAJJIqansZkm+1HOD6XB/v9vpHFTuY09vrXbWJyu7AC5eWNJxVphQVnY0K1fznoviKI/f2n59rZNcfWINwJ73baYsEkf7g6uP9cez4e3JdDXrrl7e+s6LLwFYTfSNl68tP9WOEoKgYGuAKNZwJBShWd9TjFBDIIiIA6w1ioLUwmohGCk3oi+k4gwEW8wCO4jADCaIBz40YA6Ny4/xvraBGYWthrNZc0TlEyEDcSlhNpfjxpiucdD2PIfVCiE5iBAogIYzo7QQkdBaKsCxCXMxFnaORcBcxz3MAXkNg8S7OQlFSimhOIQQWEmdJBK2BsgzVw23EdbVlhqDHoFYqcHUHk1snTSSWVAiaO8BQwKKhBQy1lJJEeloqZtWszGA/emx3QuzYVEYxEJcXm9lneRqkg0q+c1rtwZ1DcBzEEKSFBx4MKkHk0JxfWd/AuPOr8dRJACQEnGsI/jSs2y2eqXwQXjrkzwFEFzwHoKCByioAAJ8EgSEgpOhYVGpWMENdg4nM7fWje/vFUKoZZGsd/OVpdZXXtkFsNzP7gxmuy+9fWE5WW5r4+3+cbXWS9qtzNqQpSmA4XgWx8JxtbKcmSN//XYpgE4rbbXTyYwBBEedLAqziZ9M0E6Cs2BBSmLO9CEAQpCHF8xKCRCks1JJIkCEYB2Bmy/LbFZExLDGcRZHkQ/lk09sfuFzL332y9f//i//NIALGxdMVb72/Z0feuzx7NL6wb3b22/tfOi97y0d3v3cOQCj2bcvdtu7h8Vrr36VJ0//H1/+s+ee+qFXrt37h3//H//n//Env3tjG8CT73q01FSVWGkvbU+nf/Pn3vfb/+Kf7+ztt7V0k1FnaQnAQYyD3dkf/d5n331+/T/4pZ8zBzeeevby/etvmsm5j/2Nn2YYAHDhmUef+srXX92+v5sk0qv01jR+5lz66T/60t/5z34ZwM///L/zO//Dbywnaz5bGdGRUFTHXQ4hInVvWLf7WwCKuoCSU+eGpRUuHdzYj3W7tZ4flf7ud1/XEADk0vp0BqZQEVpZLkP57Ze+U2v90vU3UqGHlADIZaqXlitfC+/2BmMYRlAk5b3DsZAaACntPAsVT6bTsqqYuck2gw/A3OUjz5OiLv/Rb//O+GiSpXjykUdfuf52SMTnvvw8QvjXhgCsn1v3Msk7yWde+HZr64oWSZQkMk37AWZjjb0HEKexY3ae7+3cc95praUUzjshZLORs31rWwSazmZ1XRa1Kauq3W7fuXeXCIPh8b2dHQDddhshaKWfferprQuXsryltUrStK4rQaLd6QDIsrZzzjlHREJIhDkhl0KIosj5GkA1mwWpCfDsvXftvHU0GQ6Pj4Gwf3AE4Gg4fPqJR4UQk2lRW3vn7v3zmxvG2rqql5f7xWQKQHY6t27cEDrJW+3aGM/GecchFLMZM4tZBWBvf2/75q3LF87/9I994p+9+j1N+PV/+S9/9kc+Zoyt67qbJgAilUkSHJxWir1nDuNZ4b1XUmqtATjvJ0VRecRpmmXZvvOVceOy7kSJ57C8eR6ANWbv7j1r7LQsjbF5FBHQ3ASpJFkAUFp55zhAKhngZEQMV7naez+ry+F0AmBtuUdSt1RbyAAyiRZa9gWEEBO98H3WSljnaiM67VxKpVQju2GJhJKNTqhY39iYlWUj3mKNBZGSijkkcdwUfHudLkiEEJzzRVXuHx0pIbM0T5N4dWWt3W4BcM7tHRxohtaa6rqsaqm1lLrX67XacwvvurZ1bZTS1jmltKlrDszMVV238hxAmmZRpDkEydBap3GspNBKKyHZ+4YO7+x8N9E6W1fVeDodjIa9dldKVRvbLEMiHWsdxZGO0lzqeKnb7fSW+d79ezs73aUVAEVRhjS/v7NnreeAm7fvUAjtVsvy8MWXX0mzDMDHP/rxC1G0u7c7mEzX1mgymUaJYuFtNWVQ862sjG13endu3R4cHlZl8RM/+pPT6bgoClL6Rz/xiaYUrqLMmjosL1d1ff/OnV5/ZW1ltaxNnCTN27R16fL+zv04SeIkieLEOyelnEyntTG9pZU0bwGYFqXSUXepX0/rhrzSCOcppU6cc5l9pLV1djad9HpL62vr/aW+DzwZT2dFuXCB4BAoBIp03NhwCyHZB++cWDgpe2ec9wjkOYTgmYPSUV1VQqiGQ00QznEILKUcjscbG+tXLl89Hg67S7281S4bscsQQoBWyvuQJBH7oIQqy0rrWCnd2JsoKZVSzntnnZCKuak/UhLFjfqh4JCmufdTKSUxCRKTycR5xwhaioaYj+ClUnESZ1lKglqtfDqbGlMrFVVV2ZgF3dze/sZ3XlJCLi8vC6m8Z88shIiSWIoG5YdObylOktHgmIh8I9tpPUmptLYNLNdzCNBaKa2TNE3zvCiLuq6rqoqiKE1SAFvnktrUJMgaN5tOhRCsZEN/HpdF80JKKaV1XdfeOUFU1zWocciRYbF4dtayd1VRiEhEJK2pRQhCaR1FsnGoAznmXElrfQjeeF/WdmV5uartwXCnv9wBUNd1pDLP3lnr2dd11ZR/0iSNk3w6mwCYTCdJkoJEo2UZOMRJ2taqtjUzh/mKMcRJIqVKk9RYU1ZC66g23nnfSFI2xlZJksRp1ust9ZaWpJCj8XQ4HHr2jft2kmb9vB/TGIK1lIilzpNI69XlXpR33ry3C+DmG+mcy+xD8xYRICSCP0UneA4BEPPjAofQ7A1YH+YaQY6N9dax98Ga6fG48ji4sbs7rUhKpeZiuJmS7JkDe3b46Q9/vN9fmRwfgSsl48alalo4cMOq8kLIdpo8cemSJn00HRwMJjY0H3LRuBh5XuRt8xLaaSWxqTWeMKwXhbEfjORo8B8LucZ3qDKePQgLYONpjW3+NAUKi06EOZbyBCMy32s+6dvC+PSdbi3hLHqTTl5zYWZ6UjF9EA1zphjZXPGJhNgCd3IqndmcvuB9n1wFzaGRZ9GRcyTjvJoaTk5sqqQnSWY4W+s9SVxPr/wEUElzy6azIuzhzI1tktE5R/whMPJhPIyH8Vc01Om0cXaXbFEkPBnywtkJpvm9GIQf2Fx7x3RzpsFFQ2eglI3uI+bjPv3lo0+KjOFEUQRngPvhgX4T0ckQHxalylONyx/M06aTIR6YFxkf2N06mQnDKY3gbAkWoeHqnhhwzzfPCM0Kct7sXF/QIXBgZMIH8kF4QWiQApIgiIlYemONcc6Rlr1uen59+cJWb321DSAiTlfiZ3/0UeoYpWdcHHrD40K21bitcgDSLl37s9e+v13/+I89XhSh046Go+G0Oto9Hiy3u49vXQaw2u3/399+8bs37l5YXd7aurQ/2JvcG9Rsjvd3NpK2KQjAUj/vrZy7f1g0yxSdp63V9dnoTuhu3t6/9dhTPwzgo0+vHM8Gzz33oc995tOijPvp8nIqRgc7QzsdjCsAlUcvjnfvHGw+e46kCN6zhGSSLsD5mgSAWGUS3gVTG5cl2pCDDJGUFAlUrqnuudpCZGJgJ5XrLkkwkQRIgAj+dDs2LN6+EOADasel4b2jeTny+KiAYEmIFedJq53KELwDN7x/55sPgPQMzyRI7hyNZoN7Kk1kHCkSIrh+LwHQSjOCMN6KZkU+l5YJDISAg+EYgGUpVRIrxa5GsEvtPM/Smp0kIpqbGwb2xKyVFFJJ4cdDczgs4iQVJHYOawDb9XClo5653HEexjjH3jI8RK+VVMxH4zGA23ujoynnnj7+ifcur222tS9ndUsXiuP9F96gTAFgeBCSJK69q0t1PDCD4fioMOfyLMTdO+MSQMfJ4chG3iytpexg4BFcEguVRHUxAhDFXSnJuzJJ0xASobQ1U4coynISEVczANXxHoHTpH3z7YMb98snL/WllDt705W1LkXxlc0ugLvDsqhDP1K398qtjXNLmX7tzdv9TtZrrQilbD0E4DwiTaPxTEhYx+vLkaLgvLm3U2wsZwAurbXqYT0qyjxXBYo4jVWisfjizWEsIXhXO8hIa1N7pZRQ0tuqWdU1dDDnWAvyHmxMDBtURpGgJPrxn/rAxdW3004KAEHu3L8PghLdnTt33nzr3nKakRRZmkwHAwBPP3n14z/+s9/6zhvb11/+0isvPf7444Pjw8lkd+Nqfmdv//h7NwG8/7lHvv/27jd//497S/1iOP5v/tt/WI7MD3/gctLqXb38eKQTANtvv/mrv/p3lTrefCw3sp+5wbNPXP3CV7+1Fujrf/5CGkkAl/trN6/f6S4n9w/2X/3W67otb9ygm3evHx3OfvOffgbAj33sIx969ona4ne/+LYTTkSZsxSYhQwUwrQsAJBQNqigYy1CUME4a61UsRCx1DppoPIO5FQEQQmhKoxQrbjfJg+XUsk+lB4AxXGA8CxI5bWxUURCyuADgZWOAJS1iaPIeV8WMw5BCsneBQ5KKSLyCACkyp2TR5NZlPYLZ7/23Z0kz0nGh1UIgaWoAdzYuW28y3W0vfe9nzx3dWxqR1ILFcBCyTiOAOR5Vpi6pSIiKspCSxXHsdKyLEuZJgAqz6Y27VZLUJBSVlXNHHrdpaPjoyzNnn3X0wD2D/e10mura+1Oz1qjo8iYWirFzhtXKzXH3TCzMRYBQggO3rNvcIvBTKSKAdR1Hcha50OA9zwrZqPRKInjc+vrh8cDAM+sPzaaTFqtjNkrIaytd/b3+/2l97/v3ePxtN1fAhAp/da16xcvXdFRVDaGzklL6qgsS2dtFOUALm5d8pV9/c1rjz16RQrhnd9c6b918/rxYLC1fj5PUwCOnRZkbFBaq1hbawNzt5U3nl0AlBRHwzFrg+DZmrKu4ySJ02xalnGanWu1AEym072qWl/fCEIEDiSVZw6A854WwtFKiKqsHPs4TZytZvWMlIjThEBxHB8PRwCWu+3RcDocHUpJFzfOc5AGE+dcpFRVjwHcnxZHx0NJMM4eDYazosiztC5rKYSQsrGPsLXJO3kaxb2VtbqyVVlRs4oQQivVlBKMtcycpTFAkY467TYFaK3TJC2KoqmpWWvrqu73llqtVrs1i6JIRbF3jr1vRGxDCFLKNE2zrD0YHDXbTGkUW2OkmO/BBO+EjNl5IDA754z3vtmeEkSNJU7jciaEmown48n48PBoPJnWxkU6mc1mDSTcNmYppARxHKW18cPB+NErj43Ho5e+8x0AtanzpLWzv9PrdPMsPzw8nkynq8vLrbzVaXeKqgLw9q3t0ppZUVw8f96xk1qWtQlCT4qSFwOjsSaNo+Ph8MaNGySwtrp2dHhw6eLFO3fvdTud5pjxeFzVpqrq+7u7N97efvappx0HqaMf+tDHzl24CODrz39+OBi2O51ef7ksq7qqWKjj46PjwSjL2lnWAuCMOT4+TrPclCaKI2OtVopAzKwjXTfMcaUmk8nR0dFBEl+84NbXNqrKjCejwXBMhEbS0Vouq6ExTkolhGLjQGDnhRBKqOZ9dN4pqTw7Zi+F8p4bIBazn5ccCFKQc66oqjzPNzbOVcZ0e/0sz2pjrakABJBzjgM4MIegtHbOSaWc93EUz4oZAEoSAkkpmxpDFMXT6TRJ4hDgmQGogBCCFKIoSqVlnMTVqAAQOJAQDeSK2SIEY+oQ2NRVEKooijiKiCiOksViE089/pi1zjMnmXYTV9d1YCeV1lo2RTellJSSOZCU3ld+7qnLTSEeADPmaulESqiyLI0xw9E4zxoLbw8gieI8z42pdaq0UkrrcjZ13iulhJAnXF72ASF47621gojZN+WSKIoaWfN+f3llbd1bB4X+0nKsNBidLNqdDWo7BYBOy3F/uZN867Vrk8qotO2D894LwSv9boM1noxHWRITkTGWgCRO4iTxnu/evcWBHHsAHEiQpIDmtu8f7HrmPMsbAKyUEsDysiQhjgZHWirn7XgCKZT3NkkXYvSelVSBvTG1jqIojo4OD4mE9y7NsqPjYwDL7Q61ZGAIIYx3kVRaCgHoOGp3Whv9Hk5qURRCQ7udYw4pUNBq3hnVaMQrSRAcIAQaBo913i2qeFrJJNI6iSmkV89dBNFyNx0WrteOt3d3ABwMRxvLndHElIaUCm/dutk+uHPn/s6jWxuXN85ZawAE5nklzAcC2nl6NJBpGmc+sft+WJUAatukWw8wrxvW9sky+gTi11TA3lElw+lZCwHFMyniWahfeBDqgZMU8h1FwQUfbfHHAwTmE2o15nSoBzEyZ16NIEJTAj7JQU+NCk4gKv8vcZJuiga68pcsqk8xJ2dRLCfV2kUNms70vHHCPKnsnkFJnsqcLaqgi5Ll/F42GeWC/3f2uhv7hlOK/Rk85b/hBj2Mh/EwHsa/5SH+v+7Aw3gYD+NhPIyH8TAexsN4GA/jYTyMh/EwHsbDeBgP4/8voRY7R+/cOjqz8dL8eea/zcMHRBZPtmfegbc/c+78rJNnT5CIBELgM/DIk82s0y280y0zccIxCAAFopOtqtMWiLAwlKHTFudEhNOXOdPl0442ZICTDodFg/TAfQqn23wLIkCDDT2R7xBigTwFB/Zz/bwQJEFHkIKUECIENhaANaaylfNmpZcur+Tn1ttbW+1Lj/YuPb7eP98ViQLgBsN7d+8TRklgVxXkfRJkYjyPhZ4Dc6p3Pbf5xOOZWrn66//8s6N7e+MC7K0QxtrDvaN9AOOx3x+NtFTjYsCCirIOLbW1vPTGjd3JYPKRp58DMJy2j6wVMNK4ltJ1WTNj9+5xHvXLkl+9uQ2gDStDubt9L8+S/oWNshoVOgu1t6PZ7rED8O/9/Mc31+VkdhtZC7CkEooyyEgqEUhyg45k4UNghrZK1nHsjSOPoISxFOkiGAD58tL2N3f/9F9957EPrv7wLzwZT2swhASxwMK+qNmIZGbPDCIS0kNYhgtJozEfBHk4z1Vw0gfJQbAP3rOGPpEfJdEYSZMQcjybgWLnEs+BvBVcdNopACk1Atu5b3xj0LfA1xJGYwdgVBReVEoob6tIsFZxJ5dKShGC59AozXPw1Jh1Qkwm5u7B+K1bI8hRK08EAoCqqK5utn/k2ceOx0PrZgwOkLPKL3XION4bDwEsdZKs1/n611+/8/m/CCHOpB1OypVe+1Of/CkiVbpGzQpSkiYSgS6ur/7Yxz4wO9r94svbX3rhrUcrLOUZgOGMfF3feHvv4+8/d6WXpt1OcLWpHckwp5b4AGbjvRJq7/asv7QcZavTIGdDY00xm00A7N47Wm7LrY3lifFv7NTvfaZXW+fYd/LWG9sHjQzohXOrN+/uX764NjkeFkVwIt4+9FsrYjSZJklkawugk7WSJNUqn5XTonTnV1sq6x+Py0Ty1asXAAwHx7v3945GRTaic30VRQpKI4Tg7YnCl/cuipagksCwbhZnWfAVs9CRgBSVa778Dbvb1ZV9+dqNp59+JGn32E/r0fHaZvz8C98CECnRzbV1NC6PBmPz/mfe0+v13rr+yvHx8YW1LQD981cpo53D7ZHB97+/P5hs65j+1t/+VD0uvvC5z3eX2wD27t978uIjX/jTbx6PzbnH+51i1svUlQsXdsazT3/2Tw6OKwBx7cbOffg9F9G73FZmdmOcLS9dvLhVM6ej4ytPPguge2VLjdz2l14YHN1Pk/DjH/6AOyiO9g5+6See+q0/fgnAP/rKi1efXO9srLci1e1dGUy4nEycC1VtpY5Es/MklGUOAUpoK5hkJCTAQgg40jNrAEhFiLQxVQaRJFHh4BlplpVFqZJUUA2gMAzyUke1DxCq5kABBAEhGs0sJUUInoglKUWEEDyzIBE4cOBIawCFq4NUlls+klrGIrJC6bq2Kvbeh8bLXilVlk5J6fxkY3NrM0llFKsknUwnVV1HSgEQUg73D2ZlceXiRa3U8XBQ1XUSRc77pV4XgIWIdQSiuqpMbeq6Lqui319BsCGEJx57HMCTeEortbt3v6rKrNU6Oh5EkZZSBemkjN1CEU9K1ahGgsAcCJBCCilEFI0njbZponQsBNXGgTCrqtXVldvb25793Z1dAE8+drXVyrxz0+lMKZkmSW+pG0XRq6+/WdX26uNPAChms6eeey7P2nmnM5tOneekFfsQoiiSWne6bQAHe7bVUb/xW7/1/ve/J0lEMfGT8fGdu6ON9bVeJ5vOZgBkpEVgReQ5BCEsB+s9CUGCjPEA4iiKdcQ6nk0nbG2mdTvPl9dWjHW1sTrLACjnRBRRFFVlWZYVc9Bae+caoHcz+noOzDydTEBIW6mcmlarA9IEXN26IAMA1MXsD3/nN6XGxsbK7bT79GOPTr2rmWov0yQCYK3XOlrt92pjb96+m8RRo9IrhUySWAoBYDKeLJ9b668uE6jd6+Z5Tm/dtM4nOrbWJ1EMINa6ttY6X1aVtU4rlSVZY8QxmU7nSHghPQcO0EK02+0sb3kOpq739g8msymALMl73V4cJ8Vsao3zzisp8ywviPIs9Z4BDIfDuq6YmZkh1fHgqCwKWxsJcs5PZwWAqqrZhwCMJ8VoPJ3MSmaazaoxF0VRNYRuz35zYyNNsyiE8tS/KAAAIABJREFUstyfTCe2tkR0PBy2sgyAjqJrt6+NJ5Oj5CjLMmPMeDyuiipNkiRS01kJYO/gMEmS2XQ2HI6MdZPpOMsySDOZTsvaJnHUzF+zolRSbm6eL6sqjhPPyPJ2Wdf7B4fLy30AWmsgvOuJJ+I03t3ZORocl7Xd3Dz/1huvXXvtFQDHBweXrlwlpcqqnk2naZZbY6uq5hDKsmyoBrOySJJ0cHwkSPb7y+PxBCAOzJ5JiLkPmDGTyeT4+Kjd7ZbLpZR6OpsMhiOt44UzLqIocc416FilVSIyU9fsvWcuq6pZkBFJa51SCoFISHieFaUQoirnJunBWZDwzFKJq49e5RDYuzyOhVSe/aRo9EY1A1LMbYoDIKUMCMxBR7pRDq2NybOsgQY3qzqlFYfACFIpAOy9c56E8N5JJYgoSRPHrpESXNjwEkDeeymEIFJar64sM7N1jogaD/osxframhJiMiuIJHNgZu+5rkohs4b7X8xmzrlOt3t/58h7DhyIRFXVdV03JtQgJaQiEt45Zk+Coih68vHHCPDeNW69nn1dG2uNIBFFmhGms1lVVRAiTuK0sZhvXFowF6WxzoWAKI4IIoRgjWkGAaVlHCekSZAQBAaEkGmW1SAAeZ53XT2tqpVet+P8LCgxdrOilEK02+0ABmCstdZKITw7IYTWurGJUkoJGeWtNgAOmExm1tRZmhk7yVttIoqj2DkrlWwunEiw93VdZb1+J+0E9saYEFSko0bwQevI66iqCmbWcVTX9fdefrnV6l64cOGxp5754z/4DADBoVJR4asEHoEdO0iylo2Azjt/CSG3kOtcJAWE0CQCjTUQEygEQRAkYq2EIClIimYgIsuQAkDQMirqaZy0iMjayf5g1kyCnVYymU3b2ZIU0eFw51tv3na2Lku/3l3iVTc3ql4kHszBGxc8Hw3HgVnJqNvKJ1MJILBVSgaprAtn0p4F2XlOLzr57xymh7A4+DQ/ohMetDghctNJEvSDVPrpDOH7TE5GpwS2k/+dwWCegCXppBth0djZjs5hvHOgYpNZhvBgS82BixMJgQFxxkJ1ISQm6EGvmPkdWnDGzya3Jwbki2Oa34wTm+zTI05odqDTOxHOJNQ4ob8vDjy9JWeenT+m07s4N8UBPdSOfBgP42H81Q0FgOaucw8ytn+Q6deZsZsAfqDQeFqVI4Qz2r1nTqUHG3mw0Xea6QTMh2QSdIprb36GBT1azoUnQXxWzvGMMMdJnRDzyeidL3tmMj2jAHJy8gPwUQoL7ZKzHT0xTztD6AbYmsZOWwqKtIglKSIKQQrM6hmsNXVhi7l58dJa55lHti5c6Dz71OWltc7KuVxnzquJjKakjkAVALXRXfHu3qtvnr+4LlbapTFLsfNmb1pcajsHwDsb9zqTO6Xa3atq981rtzlCv9WrjVWQcRQATKaFNdxa6ix1+tt7+8Vs1ImjRx9594efkdeuvf7Hf/I1ABuX14XAxY0rrlbXt69NR/vXXjWJzvaH46oyo2++BCBJ6Cc/+Mx4eLi1+ejFtfU//eoXLWUvv7p7fq370Q8+CeDcE5u3X/3a+aeWyOuyLtNeW+g0gIIUDiGNFIBQ1OwdgprcnNw/PE7O6aV3rVhTO4Q0jpgsAENUlXK0W690VjnNQjEgQAoCgR3x3C6gufGN3y0YwjBVLnhSRBGAkojhtAa72pK0HmApAgHEp28aNcLSHhBC2qTjEQcKkXLKM0kNwAU4bzRBCAhBQggSsvn8hTDn3ZNQJHNSkQySqCSpScjGT4AISjcdDtZyTZ5EUFKRiqUiJl2xlsQAWBJklGTrfSWHxu3Ppt5zpESuPdtZs+x48rEnd4vO2zvf6PpgykldFjohivS4MpaDmHcG7AMD8O5oOGE7LaaDTiZG48m3XnnLGwAQSoJc7P2VzfcG76ejMos4UoKlYCcAgCkgZK0kBPFrv/fKwbiiSLpZUcwcCfq7P/sMgMcudwfT8Zu39g2wtqRH4+LSlfPCYm119eXrR59+/lUAHMl2Fl/fmU4ns/VO6z/8xR+Nk2x/VJpQn0/Xmy9VJDVYtjt9CFXXk+e/se+SamO1O5oWt49vAbi9e9iNw3pX9nqxVuysV1VFWguSJyMWCXGwx+02Ja1ua2mZBO/s3FpdzkxtokRFuql9sIq0lHFZT0ez0nvL0wHMSCsqEF25sA7g4iOXddq6+b23SlOMZsP+7KjVa7/29tHLr98j7AL4az/hn6Or928NGHplo/3Xfu4n/vwvvvLSSy9f3Hqks9r5j37hZwDISM2q8T/42K/85u995vNf/c4H3/MYmdl3X3vrU5/65PTg3rMf/DCA9lJc3XxzasbjV9+eCJuk5tzWE+uH/vlP/9G/+/d+2VQ1gBe//uLvf+7LMztdurx5sLv3re98//Hz/Q+879n20vInf+pdAG4fTQ9H5djVxrKpHHo9TGfO2SClZdZaAahNrZR23hktiSkiz1Y4MtYKLUMmFIAJhdi7KJLOMZtaaxU8G1OBwqyuGn60Dz5Y752XQgVN7IOWqqlPNck6SXLBc2AKkFIKKYPnQEFAePaNFYBgYQEtEyGlqco4i0xlEcEaLQiNnkNlWMeRRTA6ytY2OnHMECxFXVeByIEAaJIM6nU63rkkjiMdXb9xsyyL85ub/W4HgFLSe6d10u10DENJlUQpgVbX1kfDwXQ6BXD18ScPd3fiKBZSWmMaT4aimHrr2u2OEI0CGrTWWmsiMDMRSalAsMbEgtFIc5Ao67qsTe1cnKbToiAtifB//l9/lKYJgFt37ownUynl+tryjbdvv+vq5ZX1NRnFJMT27XtLy30AL37/2tr62tLSSpKkAI4P9uN8XpayRbF79yaAV958YzCoP/DRZ+7dv9vuZIEn169dS3L97JOPHR4PrQeAjfObREEg1N6zt1mkjTGD0TjP0m6nDcA5X1nnZsWtW3f29/f/H/betNmS7LoOW/tMOdzpzUPNVV3dXT2gG40GQAIESYEgKZKSLYdkh+zgF4cl+6vCDv8C/wYrqAjqmxTBoERKdjAoigAJihNmNNDoRo/VVdU1vPfqDXfO6UzbH/Le9141QNofzYg6EVX1XlbezLx585599tprrzU8Onzj7Y/6v93p9npZnj1z4zqAW8/d3L5w4fjkhBmRCOBg7akQWxtIvfdZNx+enMQQQ2N7nU4/62iSq92so+Leo0cA/vqHP9i+tvPqzRc6Wfrx4/t3D+7vbl7WOhnkcJAAOITNlf766mpR1gdHR500DRy10vNi1unkV69fA2Ctq2az6IPSJngv0PrYLKzO237YPEvX1jequgzBj8bj4XCMVer3tfdxMBj0uj0AeZ4D8M4rqYwxVVUakwohG2sJAkCv2+33V6RUk/EwhFCUpXXNrJiX81maJi2UUBRFWZZEsN4JIY9PjqfzWVFWDMyLajqdAZhMppPpvMWbxtPpbF6MRxMhRFXVRVW1M5YU4uDomJl909RNc3h46J1Ls2w+n7dA2KDfn06nZVUlxiitjDHMTIrrspKi2+8NAMwm8/FoUjd1lmSj0WRaFJBKmtT5UNd1i5chxqppEqOVl/OijDFWTfPu+++vrQyyvNPa3UhBkmj30pXtCzuXrlw/ePjwgzt30jQ9enzQlBUAkximNPoQOWZ5JzL7tq04ScGoqgrAaDgcrKyGEFZW1jt5XhSld15JKU3ivW/DbVXVo+Gwrqut7Z3EZJPxeFYURNLolDksoCUpq6pKs0QZ5YNzLpAQxDh1VpRShtY+yHmtjXO2XVB575UxbWAPMUYORPSZ114fTU4iuNcbkJRgms6ms9kUgCBZVVW302VGVddZmmZpGpvYSnunWQbATidCCB9CKwzCkbUxztrWyR3AbDbPOwBDaQ0gRK+UqqqSJAfvKUa0VieRBEEqpY2OMfoQ2+J0Vdd107TfSu98E/yHd+/evHFjc3OzqitBEEpnadYKLPFCwVEyc1WVVVFaZ32IPvjWNVsQKW2kFEsTJ6mNufXc8x/dvRNjzFoxB++bupZKso/O+xCi0ZqApq7Hk7EgAcA5JwXVdQNmJaW1VpAY9AbRuSRJJsUMQAihnBfOWSWVIBJCcAwATJK2MsESVNXW2ZCZPM+EDKKojnrdbtU05ePHWksAqTHOuVaUloD5bGp9MFp3806EbGdC7+Ogr2MI3tXBuzzrKKUaW9e2Xs3Xlu5GSWQ2JmlbzgMYIKW0kLJ98Jy1zlohKE0zW9fD4XA2L5ilSZKqLMuqAlA3zfGwQJgMFJQkI4JOEyNE+6gdjaZoW3EXeQuf2iu3c2IIsRUBb+UCmCPHdpVKWSIFMRDb1WbkEDiEGJyzZT26u/+gP+gR5XNrayd76QoAxGT/6PFql7M0nZSzndX06CReWe/By6qsxZJmEZkFoI0iISPR9kYvBkSY1W7/sA4AnK8Y3LqA0lnGdq4l+Aw2XEpBLjOpUw1HIiwdWkDteRcXgCfGE7KPZ8kd0YIHsrx7p33fpzneGdp3FmB4aWkTT+kz7eZTvx1epKytyOKifb79BJZaYE8oc/00cJSB1tPyFEk8uy0LHPK8GTcvwcMnj8nLbm1uE1eOC4h6cSQ+3QHifP/534Yl0tlrcY4Vcy6JbWu+/Mk0+ul4Op6Op+PvxlDnfzkXFBbjPLfwPBbJS1IhP7kXLQQ8/ibM8QneIbVQ3k+ZPlvxm/aFLNpd+BzHkegUclxYVi8C7CdizNnRlsHoyfLfGfdz8dY/iaGezfq8PMdi+xMESmaIVtzkVG0ZQvpUKwCZlkZCsrNVUdZz55pc0vpW7/K1y5eure/sbgDY3envbpnVFWpUIK4lpgTL7GMMMYioOgB8VJtXrs6HD6pocqTdTlIMJ93udYNw//uHAI4fzG99/laehG9++0euo1SSTMd+bzpfXc10mkTyAC7sXihts9LtkRBrg0GIXNVlMT6c7D/+2U995scbxwC0TP/qT7/x5esvvfTqrUfjg6N7w4hhurLBMcrExNas9tLqr/7cZ3/03sNr158Rm13XUHd76+7D9z4enTz7+g0AMaXL2yuhSgIKSgUluZ3XFcXuyhpJRlkDIA5JIquZ+vAHe299//izv7xz4fNXq8kx8kF5ctLpSgCV5SizRKaakhaXJkEt9htPXf2YQTCt36oQiMJHDkwETcIAQB2UVooUxyYEdgRiAcjAISzLnpFjZOGZOQStNSBdww07kjHG4Fv3bZKQMnq7fDgFRQYJPlPNRoSKrAIrkCHyBBEjhBTegYhSI9vnrYwcI6SgtbXevRPnIaVKI2nvGwAcqAn8aHw0nB2OZkVRsSWHoMbaDboqSQUAJp8leeV4pd9T3Xz6+OHM+l4xP55MnPdlDQBN41kIjkSBjkfzbpbZPFlfYSEoKLPQoQfBcRpZmw5xlRoBagC23kuSACrHMrokFSLJJmVzPK5gVGAKQk1nZUz6AJxURTPfWM05+vWV8NH+iKSgMtRz+5UvfObf/uEbAEYTnnZ9WY4U++ncraxdkOKj4/n0woVuvrIebAMA7IVQR0ejxoVBd/1b730AMWTQuCiFFABI0j/5ued+9rMv2GIq42FVh9mk6K12ySTRuYWSnda/+9UPjqf1ajcVkobzMqHkX/zmS9lgrSmOZK+PFoQFgbi70vnSZ9fR7RTDSaib3u7GYHtjw+QArK2m5f7amq2rzs08Zt2UtTqY1kaStwzgwcFwa+d6Y5uswzcvPPvKzecfHnz8vR+8V4z3g4/TygN47uWXP/j2X528/eYXPv35v/j69565/uqLF1b+/df+6Hf/3e/+zOd//satywCuXl47uX3xwbsffm/06MZLV3Y3tz/47ne4MtduPvP27f21jXUA/+Zf/1F+rXs4qdT8KB90j6fVjA9FMgDb4UkBYHPrmtLV/sf3VCLvP9jPd0SWZk1jtRTe+9YavgXQOcK5KIxyLioRlDHeWQhZcQBgokoF1yFIUlHAOiZSUrAEiRAXEuuOU5UEYud8lFJKuGhBECTFskRPTFKCQc47LZRKdIwBrQkMWihBKwFYDyeNCc46KUwTg9HCCDReARBEueTpfBIgAdc4sfD9klII6UILFpitrd1ERSmk9/7ypYvXr17Z23tEoJvXrwMoAqajSW2dD966GGIQQszmMyU573SmsymA/Qf3b9+5LQTFGMqiSLOuSUwn63IaiMjaCoDRSfvWvA+CuGW+eO9Pjg6FKx4PpwBSk+T9wayYR3Avz6fzeZjNU2N2tzdbfh8BK4M+Eeq6WV3pf/eHb9167pl5VTvPZVUf7u8DUEo1TTMZDa21ZVE0TZPlHTeZCKlIisODIwCbmxcI6KZ53bhZpG63Y71N0uTew72qbF669UJ7qda7NDFN4Xqpbh2fpZRSqtZfW0p19dLF3/sPf/DmW+9LoJdo0/fFfGZtUz6sv/PtNwD8N//g7//zf/7ayXgMqRGidw6MuOxmaL+VtnFZp9dSNCQHABRjx6hyNv6z73/b1QWA9a7Zvf58YtjH4uLmVu2ag5P7tcXrL73aSny+9f7dsqqJ0fiQJ6ax1jlnjDLGnIwmTV0D6K+uDUfHVVWH2JAQSkittSDhvTdat3KNSZLM5/MkMWliqqqT57nWutVznExnk+kMwNbGBjOYWQgy2oTgrXPj8Xg4GrZ4ZZqmwXuTJHmnV9dN+0THGEMMnSxvVyfe+163Q0SiodgaqIIECSkkmFuN2qIs5/N5UZbWu+FoOJ3Pi9k8+BDConjWLifG02nTNK6qnHWdbodIzKezxJgW8x0dD2MMFNk1tq6qNE2DD66xxhgXYZsagATWVldtVY/H0+m8EkqXRZnlPaUNM7dvwdtaS1lVdTUvkiQpyso6N5lOVlYGtW0Ojx4D4IjEaClFVVbamPWdHff++0VRiuCbpgbw4mufuffR7WI+G6yukxShbpQxq4OVxjqTpIoZQN7pEpFSKsuyumnqugao2+llWba/vyeWRFciStKUQDHybD533idJ2libJKal72ltGmt54SERhaAYInPEkmvVinVqk3jnI7MQMsSgtbHOgdk6D0AJIYguXbo8moy00S44rbXRejQeHR8fdjo5gNlsLpXWSltnOS40F5WUjtkHv4Q+VVXXSmti9j5obYL3IITgW6FA7z0zCyFMYryzUkrng1TCuoYBtO7bPnD0dcUhhOhDWVYffnT74u7ubD4fTya729sAhKCirDpZcuPqlf5gECM3TR3As+m0k2daagBCSCGomM6IyDm3KMsLap9SAIGdQSakVNporYuq4BBAIk1SYrRdGs66yKylEEZZ73wIUkrnvZQSS3C/fVK10s45pfVgsDqZzsaziYgcea3F1IwxRCSFEEKE4FlJ7/x4eFJNxvNZAWCQE7eLY8Ex+mnRxOibwEqRTrTzEUBd19ZaIUgQSSmV1iSkFFIpXVT1ZDICEJmUaN++0NqAl5xTkNGmLdPWde28z7MURLP5VBAESaWUFLI3GLQPlddGG5nl+ejkZD6bdbvd9bW1/f29Yjra2toGsLOz0/GjnVTJphIC3DBzJBIEttYdjicAQliUltuv8FJOvs02YhsGqW2j4dNeLhlirL3Pgm9L6RAgIqWEUHJ9vfvitasfHYxWVjp5Z3NczKfFGIBW6tL2xdrVlS8H/cyo9d/44jP9HtfzwMxKSwBKwIeowEJItHFXxKYMnmUItJQyBIPrutYqO7t0nJfMp3M0QTyZdAGniRa18x8vW+gg/hYsbZngnXJUzhNdzrMD6TS3W97Ic0jd4tyn9+wUdjsHaKJNB/lUjHJx1cTiHB64wEkhFq1NizPEJbWBz87z/4LsLaiPZ44F50kwjCXES/QEgLjY99Qj9fTvTx79HL3mjJNJOL3bZ38tz8j0xMuejqfj6Xg6/u4MxUT8Cc44gGUQkVJgwZ1clOAIFDkuqtNnXl/nmJXMAnTat7Ckx3N7HHEqwLsoVS2KWGIZGNrf20pYW/uKwVE7zs+9ICIIyBYmWuKXdBqHTsPRsra3jDBnqGr7Z1FRa98scysHTmeMR2aOYdE5gvN1NwZAgqTSPgbNkMweAcIaAfZRp5HqMQBf1XVTceTVtd5zL+3eurV78erWzvXB1rbp9CwlHsB83kg7m8mZQVqU00GWschIKinqKkQtMwBw7JzJzIUSzuR6/F/e62z1w0bspp0HLACUnpKq3j+er21daN57NyEjEuNiHfIsSQc7GwZAJ80P79yJTfPOg7s3Xv3CGmj/3khrqYLgXqf38fsAnn35s372+v07b6b9la1B7+V/+DN/8Rf/JUDOYzHoDmQMAD7/xX9I29fCD9/pdOM3/tN3dm++/OX/9jf+87fuVeP7rXGBHM8eHY53nr3pYq02dj96+9H+G299/r/7EslMorLSA7BOKp9m3a6OaTdhZJsYORUt1cdRG18GAPkg6WvN3HjTYW8DAT4IlXAZpNbSKACh8kapuWuEZIoMIWIsokCSpp4kgJgEFSWilsQuOJYpR4CCJsmCPQQAW9tEUQrRyDTY2IgSuqd8IhRzMWkdrwUR+5jotGaOMZBrjEylEi6ySHISCgAF1kZGxCBi9F4oyjtZsGXjgmfR7a4ByLN8Oj2papexF0xdQ4ZDgGocK1YAlNTTad1ETYH2DsZV4UbOXdrob632NlZ70+kBgNmsmPlHKlFVSYWzDdLgp5MqPDqZp3nimhpo22xNPa+R6rwjh8UUAmvdTWHhhWh0+x3USjDFprGlTydpHJBOwDZFdFEASIwXSnrX6E6uUjV3gZKui9HFSR24ybcAxCS5//hjDX7z3qyfyd5a51tvP/zln3llfDTdXrmY5wmAw6aOLvFKN+VEhfjXb927v39447JoOCseH82mxwC6q9tJd6e6/26Rd+fc1VJSRlPEbGXNDgsA80mhdnfmLhztPe6qcZZ1vK2DUZIH0mgHC8CKntLczzqHteutrZ7sj/28GOtBxoVAobuXAIRoIkwoVRrJ+on0JttcCxhEI4mrZj4FkBilUvFAcIajzRdeQNKnguYT/oWf/4UXX7sB4E//8q/+4E//EA03BkdHD776l9Vs0ly5uLa9ubl/dPzvf/8PAPzvz1/71K3nfud3/t0vfu6Lr3/2xWL44G7qw7y5/eHJ//Q/Xz94/BDAv/69/9hbXZEel5+54mvxJ7//F9vXLk8nJzJJ3vjGN5699QyAR27yAm1285XSu7F1igyGxV9X7xzNL51M5gDi+w/ryuu0P7YpZ5geHQepFSG4RkjlY4seKSGUkWSZEYIRxIzgXOsy0fI5hAgWEBCRI4TQSzsuEEhKhAgAUlgEjiApZGBwq12wbBtacAeYGUJACcUBEaGdcQXJdlL2MVAAS4rRgRIiRAoGIA61jwIBrUeKQ5J1bF1zw5wL570kCpERfGYSAIg+0VIKFWKorSvqurFWa7O5sTm3HkAQpqgqEoIjjSfj8WTc7/ceH+yXRYcI82IOYDabJ0mysb6W5/lwNISQ/UEfgLU+TdIs6wLQxjCzVloIqYSw1motrWvGoxMZPFpyKAnvnJDK2UYIqYBcq2kVV1ZXi7ICkGa50loKsk0jhZqMpxyRZ1lLzrZNA2B7Z9M2zejoaDgpO90VZ3onh3t3Do4U83g0/eEP3gRgBW9fvfzdb35vddXAKGWSlISd1fvz+ztXrnY7GYCk0w2NnkZHCeqmYebAbJ3zIWqdACCBw5PR2oXd7J3b/bUBVHLj2Vtr66sPP3z/+Rd3e3kPwK9+5ctpb73aOxqs9atYxsDOWe9CIoylRIgIIOuo3qD/6qdfETH21rcOHtwrpycfvvuDcjYadNKXbuwCkEoLdsG20dMnRJfX1m3g/YOPW0KicO6lGy90Bht3Hu1LpVY6SaLF/tHIx0YwfvTOewCev/Xc1trKh6OHVd1IY7TUIUIoBSF8DC3/NAISQSBKyPWVFcWklFZaG6Pv3ru3d3AAYKXfM0bvHRyQlNubW03TANRYW1trR2MAF3cvB8kUSEMmaWdAUkvxaP+hFGo2n7VsuG4nY4YgynRSlpUGZUnqrPc+CCGTJAVw8PhwNB63Fc+qrmez+fhkpJQWQp1m+yQAFjGwd55IVaUFIKX2nlrLER88ESJDkJBSNo0XRE3lyqKWqmy/cW3vb1k3jkkqtbqxUVtbW1amE/nYNrZdXCWp2d3ZaeZTZh70ui89/7wAb25t+aYBRwD3H95/sLf3vR/8oDsYPD44uHzpUpKm4BAj71y8BGBta3s2nzjnGKgr610gIYxJJvPCj0+USgAYkwgSiDQZHt27ezfPezHiZDhSNPaVnU8KAA07jiHGYEwqhHLeS6VBIs9S52opFABrbdOENE0YHCJC5MjsQxAkFnCkD1ppKaVJZFVXHAOBmqbWShHRQk9Aq4uXLpMWJIhJ9LsrnSQ/3Ht0f+/B5ctXmsYCqIu6XYwabaQQDC7rKjGGAjnr2rblXrc7m82hlwthxFYsyDqfSgVgY2OtKEutlEkSZ21TuzTTYG+bGKEaOwGQamOraPIuTecicF1Zo/QPf/TWtSuXBr1eUZYA+v0VQDqg0+2PRsPQuiGHoJO0ccG1Mi/RSaU8BwGXaJ10enVZzOuGm8qFAMA21vrogdDUNgSt9OFoVFd1URbB+9YPpyjLfrcLuKaumsiZ1tO6BhOEkiZp2yp8QJLngWNZVp1uV0gOMQjvHXOMQUgFoPGOSYyLagBTR+YIGzA5Hs72HzdNA8A1JUU3yDveKanlcTVOTGqDMDr1AbLNBdolltSBRYhsrePIgeGCV9ow2k4jCQbHqE0a4pRJSK3ZWxLSM8slOua9D9GQFFmnIwQQiTm4EHRiAKgsDdUsTdK003Mhvv3OB0qpft9N5/PJaHzl6nUAm1vba6XYSasuTF00qqsbirbxTJmSZmswaJOHGCl4kGDiiEgxQlAMkZikUBKA8k1tbYwkJIEIMQTmXiI6UiWZRktGBhrXSEFad3yww+lwvbeFzFfhAAAgAElEQVTBoXn4+O6kbABsrvTX+5s+hMbSpc0120x9KDf7z038sJO6IABA1U4jhBgpBsQoCCGYJK1M5JpVtAUA5hhdTHrGh0gQgmSIkQQI3ELYUuhz7LrTDu3WuIjPSu6EtmetxQsViSd5hFiabLesRNHyBCGAtmSzyBhFBBOYmEiI9mNljjjtOG69aJbiXBFgH9ss8MnOPW47FYjafReLDyJAEgCOLVmkfSMRCyfvRe/3aTq4JEMyGPEMeF3ggGegLJ+RI89e234l9WLvM+y2vWOC6Qya5BbEXjQY8k9hwSzfxHkIGOcNfxjnkNqzjwlM4W+lWD4dT8fT8XT8/3eoU9Ttic38iX/PQEc+nZk/QRjH6ZHo3Ma2OHUuftAS/SO0u57xEhdMt7O/258imFpjNXG+ttaGDzqLIuep9ERLduWyrnRWBlue8wy8PDvX2cGXl7tETpkXB2yZcEtJE8ALgveMKLWWEMpD+ZqED8OZMgDQ6Zmrty6+8vLOi5+6tHFlpbORJWntaB5sUfmqledTekWKVIQoAymV+MAx+hCYgpe5kUYAqMfFweF9TeuJEElDeyc2WxU8nUdDptMBcPl6t4oyT9KdjQug9x6djPNuTp5NpfaP3rt1+VUAm2kirq2E0P3WGx/91zef30/7e3fu7q6uVpOpKh/eunkDwFf/4GtysPaLv/jlf/Vbv9NfS29csZlOhtWRYSMiOWIAo/HR+uZLX/mNX/qXv/V7v/Las6//wguznZW4Mmj2ORcdAN/+/rdurqfNqDCJFT4Mj6qihjTaQzhnAzSAbkf7ogLYhqQuQRyjr6X10ZNe9EHD1n7chFoKgWiYoBAbz9FzVFKfFl45hNg+kpFbE3MmphgDhATAFCJRG615UapuuzdEiIsy9aL3mnRUWmtTWUEMisyREahdcHAEx0gEIySIgw+Ro4yRGS44FgDAxG0/DEGGGKydW6t1CEoygm97sV1wNsQ8z9BUrYEmwEoKFlIGD7T1XFpf2ejIWNmHtbUX1tdff/biK89sPj4ZbnU7ACS0EiaAJ8XMIyFJXiDpJEYLoZWOur0zVe2881mi8tQMVgdJRzaHTGjfiMLpWkoKkyYmTYgQOApmArQSAGIICIoh2EXrovWBXLDRZ2lez6dFOQWw0r3Sy5KTeaMUFTW9tNl9dnPlwcFBHtXs7lHbjOmCaax3kFnSCzH+3p//eUqx1+/vPTr6sGxuXuoDqObNvYfvPjqcvPipG/ujOLN1YNkgKi9aSqpMxN6jx+/VR0ZWyuSdLOe6JC0jedeUJlMAiOK0dnvjWmRqeHjSWJtpWbuAbg3opmAA6ytdJWuhGs89TSJEGK0lKRJRKLEQc2KQVFcvbVflqvUa4MTIf/zrn1lf37l/5wjAxuDSMxcvHx8fvfPx8Hvv3GZ/l1n+6q/8wm/+4//qP/ze//W1D78F4E+//YNbz1zb7mwUJyFVunSTb3z77ur66jWiy1fX1q7nAL719W8+eLB/7er2O9/7PitdlPUXf/0rb/349vOvvfT2/eHK4AKAf/Srf++7P3o37SbWJWR9sDWD5o357vt7W2vbAKpSjabjOB3NPIA0STplsCCSEIFDm95HAgghhFOZoaUo0yfH304M+KR409+83znGweJ056UwfspZf6IudpphgFlK6UMQUiTG0NIfllkZY5xrkiRh5rqp25lcK932JEIlQylHk3Fmkm6nt7u989F4cjw8CdHt7uy2fbuXL126/+B+Y+1gZaWxNss6zjYyzYlISOWtA1CVJRFaN+3IkZlDDCFGk6SJQNYVAATIC0lEQsi2s/t4OEq0nM7mbbI0mkyZeX11MJuXRFhbWxVSVk0TI4/G09b3eTAYBOfG1sdZ1U11hzCKMuwf1Erm6zsffvg2ABf81UsXfzwezyp0pDRJOq+b2dyaPM2yPDEGwHw6y6RMlIQXEdw4p5QUQpaNTVgAMJAuVN5HAmzl8kEnhlgV1RtvvfOl1YFOcwD39vZWL1xIej1tEpJapkp3VtbmddnUVd00dQMgWPvGN793OJ58dHDw6IN3y+Hjd370/ddfePb5526QlKFVhQhN7UKbSQkSUkklpFSUgh4f7wG4vzcarPRu9lfW19bqxokQy7IWJMFYHfTSRAPgaIWkbqeTd7pM5F0I3nnvs07qrRr0+u0z0x/0BclufzAdj5O05ra1VYitzc32ye91e/3BwJhk7/FhWZaNtTHGXre3tbHVaj5aa3u91dXNzXIybQJPJ5PD6cT5oKViZucdgHnBvU5XaAohNN6FEGOIABmjhZStXEZZVTEyM2KMQogYYwiBIFmEGAO3TFIikGg5p0oKIsEcvAvMgU4T4wUuwMwIIUYGUQgxkpRtFKzrmpkrW+cCw9Eo6XRCDMyoyqJpbOtLHoLrdre8j5Gws3Ph2s2bWsq//JM/nhfzT736aitKeN3og8ND3zTFZGLruiqKXp5H54IWFy9fAJCouNbvqN3NNMvTND85Ovng3sPWLBuA9w6AoJilWaK1UtL7ECODqalryyBeoITKqPF8fO3as9eu3ggxROYQolJU1xURlFrMwBy508273V4xn7kQQ/AcZYyxRXtFKqx1RBSDb+mHSqsQlAteAEpJAFev3WCKQhATa6031rf29h7d/fhuf2WglHbWAZBKLoyqBRY9vYwYWUoZ5KJzXAgphIghnPZkEAkpybmqJT5LJQURg52zzCyIYoxZlk8nYx98lucAKESTJsF7Egtu2dra+tHJybwoj46PHx08BtDv9a5duvze3Y+euXa13+vFGIWk1BgOoW7qXq/XTotCCO+8NoYI09EkTbPSOtfU7ecoldZJwjGEGEAUvB8Nh/P5nAApRWsFrpWSSnnnpvNiWpUr3Z6UMkkMM7cSsVguD6QU1KJLJM4zEBYqipGJRGv23UpzZNpMmZ11i06nGJ0PMVQcnLfUyRIpRXQRYK0XyzippFSaAW201oZAC8VQpZlkW+sFyeC9d3Y5A8cQQwheaRWjBySAllnJYI5sjFFKOue9j1K2GrYQQgTvq7Jouj3nXN00OkYi0e/3i9n88ePHAG5cvqSl5MieQ5ImgmTjGoBCjILRyoee0ixagIwWuBmBKIYFwMccBRae0MwcmcEUIhutEiMBJFp6zy0rX5u8aoISnCTsQ3J588qWtQAa7w7Ho93VnQtXNkbzw2FVp5LnsyNjvCDhQ4skCiIoJREYgoRRRNzYiOAi4mqvD2BrNcHHB8urWRhqn0fD/saITstP+oy9eEYfjDi/FjilMy51tMCna4U2EWupHk+qa5296GwNwNxuaXkvrVXBQnt1KQFJpwnf2QlOr3mBAZ4mmVi8cklnXe62OBdo0dx3dlGfyEXPX+wyXaWzCz7lu9BpfrjIJ+M5JPE01z495flE+ix3Xt6d82zHn0zL6bRDHQDRU2fap+PpeDr+rg71UzPRn9h2KtFxVsc57wnzyd3Px5lzcYJajQvihfpuSzTEEt07BzO2s/IicMRFOWgpQnyKjHKM5xsO2jjbooTn4dBFKWkZQM5VnJ7oK18gTlhE0WXJSbRBKqLVSlngmgsxZyII8vBREgGNkGVimo6OF1ay7d3rr79+GcCVZ1b7O6kcyChqiAnTQQktZIWOQBXbRJe4tD7vknRNNDoNtolaVtaKELK+EIIBhEnjp41Z2/RNo7yZh2xndTf4h6ibGCSAfGU1y9U77z/++vf+7HgyTzuymTWTqr71yqvyw5N33r4D4Aufv/XFW5+r8/T733/7z7/61cqWvV538+Jzx266nW1tv/QygGdf+9L/+Vv/1pfTizv9Xl8cHH7sqHGM3vqaUq3wDarHd978g8nhhMu9k7V/+rpcp3rvtixGkenS5SsA6sP92VxGMcHuCkG72GlYuUiaZFOXRq8A8HUTK+cS8qJvQ0iM9rZQU8eMWoRWSCBZu+abo1TG+Xjiq6jASlNoGIRTORghxBKQJmZWUgKR0KbBEgCLABBEXKAIYCImorggUy4gbIaIWK4pw5LgFSIRWr1wQQhg52sHQRwUWJD3HMBCi4QWco3RxQAIibbBhzKjfFlpAefDpCgA5Dka50loTboFhCJDLrRmIgCO0YU4KmZ7j46cC1rLi5vd69trw0klFULMAeRZdzLhENg21jJrjlolSZrWTdPppk0La4KddREQHK1jAZBOQY4JUqmoJIBgAxGRoCTPZO4RAsfIgjhyS0GhECNzJOE8rOcQwIGlFFUd8lR84823Aby0oa5f2njv44efur612V3bmx3JLC2b6nsfHv3Cl16wTAB8MMIII4X3KIpyPI9XtlePJvbCStrvrtw/mAGYuXlTiXuPZpeuzR7tT8rS8mq/sVEwNAsAdeHRhOe3tzr5PKRdkyhjldCaKBCxb9rcMriSxpM5gmo8ULluqr2LgUhlg9IJAKNZ1XinwYoFy1xmGuxiDJE5UfKsQs1BCFAmDPzh+x882jvsd/S/+eO//OynngNw6cIuKz68b3c3V4db2cd7zXY//f533ly/dvPFz33x63/5HQDf+c63v/Gtb+z0O93Viy+++MIzz+/Mi/moTIcffzg8nK29uAvg8z/zmXd/+L7w/sKz16/sbrxz927syi/+xut1lZksvfXs8wC++8O3bn90ePPmha1eejQZRQRn47yyAbE6nABIksyrbDov0qzPkIFDbrp1NUe7qucIIAZPkNYFlZzJdHxCoYKf+OcnJvZz209bnfi0BvQTg5mXeCctizvnivynh2wZGPjk6v+Ugk9A5FgUc2sbE7LG2rquWrXWNqiUZVGWBTOklHmeO+eOT45bqNEGappmbXUdIdoQiaisyjUpf+1Xfg3AWz/+EYDxZNzvD2azadNUnbyTGFNWVZpmWZo5WzsXALScOEEkpRBESikiIUisbWymghrnACTasE5WV1dn87lSWhCdjEbPXLkIoKorAGVZzYo5mPM8BaOxflaUSqvxfK61biEVipicDEW3J5U6LooL3ZWbvZUHefb7//mP/49/8b9du3oRwHvvvff+ux984YufPRlN79++o4BJUXPgk+NiOJp9cOc+gBtXr7B39+8fSSU3NlZNokIMDFpdGaRJBgBCJSY5nlRN4DzLJ0XVmc6zXv9LX/y5i5cvnQynADr5wAgznRx9/ODR3YePbt+7v398PBs+Pnj4CCGSJABV7X7zf/xnE+s6wMVV89qnX/7VX/5yRyOUk4S5dBGAV6kScfHBM0KIARCQIHrm2rMA1tbmj46OPvr4jy5sb+ed/rz21gYXgpKqsY21DQCpSCvBYJCcFwUCKylDCM7ZttUUQK/btU0dOCZpptRcKh18CMF7R51Op9/rA9DGVFWNyCu93ur6+tHRcVGWRLQyGGRZB0CeddoewP7qmo308OH9k+Fxr9drJ+gY2m+T7eSR2o7MtqEYDCKltJZ6yW5pm2eFlEoQGZO2KTW1uiILYiMRCTDFKNovROtecgoZEDEHjgxBIAFBYqEISOydb1c7WusYI5EIPpwMTyJxnncjs7XWOdcCc1Lp8WRSV8VsOrq4e9HW9Xg6+eD2h/eN+flf+uV2KXXhtc8eHR4OR8P+YLC+uhpDCM4FcJJlf/K1rwH4xV/68quf+/z48LCZF6trG8282N7enkzGWTpL8m5ZNwBiiL1Ob3V1vSxmgqicF5HJex+dFyRISgBVM3vmxq1PvfxqURUhRh+8kNI2jQ++3+u3aI5z3lrbl3nrp+Js09LxaNkJ2d6h4J0QAkS2aZLEJElSjksiunb1KgAhwCDvfafb3drafrT34MGDBwzs7lwgolaBIUau60ZJJSKUUrRYoUatdCtMCUATpJLOOSIySeIay2ClVJqmZV0ByNNUShGZQ4xGmxB9jFEpA4IgXuCei0UsMQmpzPHJiZRydWWQpsnq4JlOpwNgXpRSymuXLi9MbGIYjWfGyCRN+6trSaKx0IsQ1tqmaurGVkWRGJOmqVSqbWnSSaaU8c43dW1to4Q02rRd2EJInSoAaZrGGEnIXqej0qSbpiFGZjR1FZbfJues90G09iuLDiU6m6QZAJz3AEmlAArel0WRdQQHX85mXFYAhHAMCAEQWR/mZWOd9QFSCpMmvW6/jQBCCGutbZrIMTiXJqn3jpmVVkIZAEqaGGNTC46x1d0zxjDnQgghxRI+4rKYJ8ma1iZydC7EiBhjWdWz2RzAydFRUZZd0anrOjLWVlequqmqqiNl0zRlVQOYz6ZTacukBjkjGTEWTRNsjJCsrI9hGaoiLSA0tBjforGLTkE+kkoKjkyIzO1K3vmglcqT02hCUhCHMJtOd9bXmVya8Nv37gznxaDXmg02WsvEaMcuouykUgmZGFMXDoraucgoGSMLLWPwkVkQCCxglBaZ1r31HQBjUXz9R48BCUQ6tSJdWjmefZznwu8ZB+9JkskTu56yOJahfVHjpHP51ekOC9AWzD8NaMM5OPIcS6ZFN4kEzrQWT49LgtBKSC0+hVMKybkVRUt7JAK3bIXzHdZnZ1ukqYvTLSiHvNhrgW0uz3t2/rPbcLrl9GdqPRaW/eTnm8xpAQwvDkzn7hLadKR9Q6ed9j9x908/nmXf3jJ3fjqejqfj6fi7Np7QjqQn/gFOS0dnm88qMX/zMZ9A/T4Z5pZ8dj6/lbDUxW/n3rY5gBe/hQgiCLQr1CeS3nBaomwVjBdsfaLTCLk46DkY9cl0d7mRAJyGNDq9eBbUapEwIy4CeFvbW+5JkZCqyME39vrO+qc/fem1T289fwk+afK1FEAdKlZjNqKKFgpaQYaOdayMYWSk5gAk2AWeH00C0t5GGskpbdgF71gQBdcASFRa+CpAhDryjA+O3Q2Ra5k4F0JVAnj7vY97KR0flmuDzhzhuKjqhHpGEmor5TNbFwAMD47n/dXrm8/tXFh/fLx/Mq42Nlf6iTuo52+8887nEgJwEtQXvvDK7Yd3v/yLn/nqn7xx65ULb9y+O5vb3Zvb9WhfRgkg78qXv/LlP/urN1d2t9Z3L0/HhzQ80sXJ+s7K0eFjAPExLr/QqTWvr62rJKtrKopofcyVciEoCQBCk5cG0hTRzG0knQgBbgScS/qZdw7Ayfv3/cy7KpZV5eW6bDxYSi1ioBjaxgcIIl7gk8RgJRXAAszREWkAQglaDkGSBEkmQeQDCKTbZXfgCGJIIRVaWiQiA5GiEgvXJqkFRyI4JRRipBhbe9YAEbzytgbAkYCwwF5ICJVl+VpAcEXtXZwUFQCpFFFsGkfgtf4gM4VcqgAshcQFwB8+2HPFvHT1Rr/z3OWNbicbjR2zIaMATKvq9t6w9lGRylXKzrlgq6Z5uH+8sZ7YEAAYLZXQECJGO5zX+wfHm10TKdFGNEK03yVltLCWIwltSBnvS0ZsU2KKrQs1omfSksiABJOAVEyOowoeHC2A4WSS6BhY7A2rzVVRVvHHd/ZefHazv9J96/adlk8SKUD1fT2L7AZZfv3SFVtPHx5Pfv61T3WU/2BvCKD08cXLu3cOikfH0zfvPXKSXcMy7WmjwmwIgIgubO+mq7schjLGWDuOBEGRYkQqWAKQ1AtCVDEES5lJWbjg/YrOYdmFcXelTedqLVMXMiImBCkJrElAKwIjNA0AIYRMTFM3WZI9uns4qzjb2Lh3PPns51/a6AwAXNvZ/MOv/dnth0c/+7OfOdravHN3aDbSf/ZP/8k33/1g/nCvjh5AxaJoGlM0m1v65nNX+tfWHjx8kHu/Q5e+++dfv3pwFcBnXnstcd333/vBxa1uR7l/9OufqygKJW1xIt3wX/32vwTwwcPh//q//Pfffv92PT2iptzo9ZHqx/M6l9JBAah9rH0U2gDsfCUUB9ZCCY5CcvTBAxCQITih/79YMJ6fQGkxJX9yl5+YSM8Dlss5/Pz/tYvmdprmU5GqRYnnXDg4m7GpzQGYGRy9d+PpNGkapRdetwCYSCplrbXOKaUAVkox82Q6beXPkqxLhNSYuqrzPLd1XdVVv9cnIYlw/eo1AB/d/ajb7Xay3Dnf7/eIyFnrnJOJLMqiTfl6vUGIwXsfQ4yIzMZ7H0KQSitJtW19yZVMMwHEGL3zSimjk/X1tfls3uvkAIzRq7ZfVhWB+oNe7SwR9Xs9k2Tj0bgqSgDdrLsyWB1PplPLfeO/e+eH9x9+NB1N3vzrb37nc3+mO30As5nNu+btd99d6Q4EoKRWKnWxXB1kq73B7Tv3AFzc2bx3/z4L9ezFG8qkMcZ5WQCiN1jrdfoArAudTnf+1vuRRFk7jjg4OBwOR4M82z8ZzsYTAI/u3AvBHh0ejmdTx2jttHMIpVWS6bL1E5dC+PiZi51nr165fO1iV0ThZ03RRKmmREpIADn5ZtnkEBmRwZEDohB0PK0B9NLs2Ytb+0fHJyePtcTG6sbRpELDJKJYOD4jBPbeC4G6qfM8r4uKiLTSrVzjdDoFYJRijkKIyXhUVlV7qwWRI+/r1ukX3gcpVVVW06Lo9/p5kjJDtN7EzgOw1rZsNpMkebe/urpeW9vvdcajE0bUSgNomWAkSAudJSkDHJmYObK1rl0n+RDyrENCeB+U1BxLZkRm4igESWEAkBSICCFIJTly8DFya5QkWx5W5EgkFx4uvKQKtzj/gssHIRQQO1lXGrWzfUGlRkohhdrc2n18sFcUcwDdbmc4Gu7s7Hz6tU9vbG7XdZ12e3//1/6BIhw8flyXBYC3f/jGcDoNjOm8aGwz6PdVmorgm0CTeQXgx2+98+1vfCtRWmvz3IuvJkn36jPbdd083HuYdXpJUQMYj0bOWUaMkauiTNIukZAkSEpnfasdubV94YUXXiqqsqoqBittlNTT+bST5Vrrtrvcex9jlFKFEIWUkZlDMMbEGGM7oUklhHDBti2YQlBjLZiFoIsXL3Y6XQDTYkqC1lfXNre2R8OTR48ezefzm888BxLO+9ZrpYWNiGCMiTEACqAQOZWyFXIFEKNQSrY8OAJMYqy1ALTW7Q/WOa1N09RgCC184OhD0zRpkpjUzOZDAFmSgjnrdtW0NGnW6/UPHu8rJR/u7edZ2pJYL124UFX15ubmR/fuHp8MNzdWQSjKmoSyjW3nbm9tZFZSdXq9JDmJMTaNXajhtWFdSSEFSGmlhRDOOZLCGBNCCCEsnO5AznkgJlkmmbUg8p6IYohSLDUovffOgihNUyJqccAWG+JlhSmEEGMgISJHEtrVjRO6k3f6nW41lQAkkwuxdLWSYOZZVTnvpUwaawOoqS2ANE2SJJFSOO+985HR6/ZBFJmdc9QWMjS1apIcg5BSEGmtlZIhBCFEiAGAcxZA+3hkaRaCn83mzjUmodl8DqAoiqaxkbksykePHimtcxJpllvnrfPb2zsA5kU1RunXpUOMwXOEsy56MEDWLTKRRVV7AVfRUkOcCC2IDKDxLoYQQoBoJz0mkPNxoSmJtvuLBbFAcH6amKSXrTlPa73V3fULK4NVAJNZWfrSaHMyeXx48tiQLstqbqYr3R7gWvRZGMUESCGghFTReiGglPa28Rw6HQ2gk2QhBrGg050zmf5p4Fr7PwtW4k/WJvkTP58leFhAjbTQMuTThHC5xykOyC3qtihHLvdjWjZEL66Hzi0qFvneExd0HgVsEdbFe1osM5iZ0SpX0mI9cXrNy0rp6aE+ka+e4qGn6eiTeTKd7dYuEqhlOpxlySQWOfMS6zzVtuQW2G9vQXu3Fkk2ET2x/KJP4J7LrfzEReC0wPV0PB1Px9Pxd20snLU/MeiTvy658mfw5FnZ69z8tygJnu/+Ow1LOB9ScO4l58pX5/6XT4turWg+RSzIWqfhDMBZA/Xiqs8okKcVy2WRjD5xEXTq7PZkCCY8eYVoi28L4+zT6f9c8cp5n0kllBj001deefa1X/vUePJuMv0w1gWAREEK9jZIRuMU66Sj6yg8UcouIM0AUPAQ4ni/SFfUQEhBJIVgx5GJWLaIjw9Cp9l0ajfydO/HH7th7R+PmeZ2Vh3drwF8dHv68pXBFz73HKj/H//8jYeHs53nbxw/+rhPepxkvd1NAGE0efvuPQ5CNk22Nri8tpkS/d//6U/W8uYrX3jt/Q/vAfjo0eML1169evmVX/offmO//O2/+KsfGqGlsqGZNePHnSwCODw8uf2j9+585werKu9VI/n8M0bTvLHG0fW1HMDutZcrgY8++PDiVQsH7cNaN8u7vWJWaCnau2eLCk0kOai8H5duUjrifHY4yft9jkk8PgFQBS/06uz/Ye+9YmVLrzOxb/1hpwonh5tDZzbFoGaWSJEihbFsyR4HzYwxfrDhB2NeDD/aDzZgQy+WDXgAY2TMQIAnwOOgkTnSKIwESSTFEZNINsnuZnfz5nzPPaHiTn9Yyw+76pxzuzXPtuC7Hgrn1KnatWufqvX/a60vBBmP55kpVNRSe2hLyBTprondUdeEF/97o7RWpEjArFQAIGw1oKFJiKAVlFJitFYKzsdOR6nT00msgbUszB1xRJEAWhvSBkBiU4lWSehlhcRQ1zULrNZWmVZU9B1kRndW1KxgyKRJttbrzVFVAWja7gNrSLShIkmasrY6I2gfSYXIesEK1yAWbqNNbKJI5Uk2KAZaq34vK6vQeA/g5qMHNx+Oej1dMZVtm2gtWh0dldPNemuj6Ggba8N+v98PJOVk6lt++ORgxW4muqeUal3rdQqgq2GEJTBEmDkqLQQSRuQAQNuEXUfT6nA5irRq2pCaIrbcKxIAStFo3gbg0d4kLbKt9fU7+5UL8QPPn/vj7113jQNgLFVNGKbDRmbQ6tK555t6dKAf2OHW62/8xd3DFoBJ7EHdZoNiHnOTrYQkZXBdVhlnCwxrbn2SYXO3HMvQMEvjWpP01pQxxCBVACDdYy2itNKZY0XatEqcGqiVc9pNPBkAa+urKtnUGXnvtFaSDxV7pQiGCNzUEwChiYM0zVZWy/3D0bwUpfb25wHFp7/0b7zGpjIAACAASURBVN555zYAIPv5z/zczd/7/djSL37q401jbJpy7X/uEy9P1Ir8+VcBjGt3dDhRQ377nRuR+CNXVrN2PPdhbWN99IOjaVUDeHCjPn/x6mOlPvvyq7tJKW7fNtP62uHew+bTH//QcG0NgK9D63TR+umjo12T1tGv9DOl8/H8cGO4C+DR2EGQJrau6n7eg9L94UpV15PpuCtTAIgo72OW9xaOScvhUpfcF5n3/Rvap3ShFnfQe7Lk8qGnBlDy/sc8nWxPUbmWUIjjV39qsaBlZhfpFDtiDBBJEgtAadU2jdYqgTXWdMxKHwILirwHQFu7ubGplGnbtm1bH8JsOtvZ3i2K3mR81MnCbm5sXTh/Xmk1m02Hw+GTJ0+SNG3aWkTatj1GXCiiyMzMQNcbCQC0NsG3XQHc1HWibVnXkTnGKCyDfi/6oI1OsxRAjGHQ72dpOpnN1ldXsiSxSdo6b23y5OBoY2sHQK8/bOYznyYvn9vZ3Rj83V//ew9v/PiVV15+9eWz//D/+EfrWxcBbO1s+Ogf3TuYFvNe0YuwYKpb/6EPf8joNEtSAG9fv3nzxq3P/ewn89TqtA9gVrqybsbj6mhUAxiPp1lefO2PvyosHEJTNc63lVKTxyEIJ0YBGB88EXCSmCyxORRZw4LYuDQz0rRdnkm1evWDl159/rK1yvqq5uBC6KfWADZywwygNUnwQXVQQFJExMsF1YoD0NTBmOT8mYs7O/GN67fuPHq0ubGidX9WlyvFgJgAKNEiVNfVtCwFqRZUdZOmiTKGWZIkBTAYDGP0IlLO503TjsbjNEmMMTbapm2c8wCyLNtZXbPGTmbzw6OjTphvZWW1CLGDR4UQqqpqqnp1bSMr6Nz5SxubW8414/GRRO4wenmWEakQ2CxNjSJzZPHe+xA7zqx3IcSoWYL3JHDOQUhYmNjAdBbMipQXH0JYjKQW3zXqcP8AhJWCRtfpBC/cpUEKcLE12gII3iutrNLGJr3+IIJDDJFDvz/cw6OyKgEoow8OD/u9/mc+9qnJ+LCak9H6+Q992LXN+vrGvds3AfzO7/4LY0yvVyRJwjEEkSLPV1dXtQ/sHIAfvP76cDh89dWfevLkyWgyXt/cSTnG4EaHhz5KhyM+Gh261gUf1jc2g/dKOWsSYXa+1cZ2biEvvvhCVVVVXRlrtdIi4r13rVtfXQ8xuuABdEqRnW+VtUmSpvPZvCgKjrF0FQBrE/aOmdu2ytLUe1/XlYhcunw5y7KyLgFEjlvrW2fOnH38+NG9B/cODw82Nra2tnbG01HwoctXdVMrpWyShBg7JClAwiyAMabzaxISpQ18MMaEELTWRGS0YeaF60uInYhE8KHbeDBz61yIoZrP2qYGYJUiovl01jZN0zR1XfV7/f6gl2Wp0fpwNAZAShVFwSK72zuHR4damyzL9w8OJuPxYHWlbSoAF89ud73LLM8hC9eg6VGltDbWAOAYg2eTWG1M161rW+d8sEYTofsWeO+Ypd/PEWP0QULseiciTEp1n0S9hIRnRU8pFUMkpWTRckLXWWaWyN1TYhQw4L0frKwkly5WugGQhvnjg715VbfeMeHs5tq9iW9NludZkvWsTdDR/ImMSSDQxhAjzbI0TRNrRZmFMDepJdUYLAxC8B5E3rs0y5frV3cJtQgzs9a6yItgDZEP3aTBhxjZlyWLNG0bWXqDwYULF6PIfDrb3tkFoDRZnbYcBqntht6ktVbEkZaQZZiF0tVCMfn0ymaMWhhnHyvRE1SnfbmsoZYIawY6MCOquTscj5mkacPmcJAk/VuPHgCo/NSq9ZCMD8aP13vZx1/+yOZgJbgmBDEqdp89AJFZdWKLmlzwPjhNGQje+Q6Z23p33NmjYwftY2wdFs3BZbW0kKOixTTw6aLwFKv5/bItT9EmlhXeycbh9D5DQCAGL1Al7z0KTpqNy4/c0s78vR3D46cs9hMnoJfjHYWcagt26BM6ASF2z1+801ObkFOvj6e7gstf33cmglMtXOnkQZZIzafPXuj4BI9viJZA+5MzO/byOXWyJz+cXN33dH6fxbN4Fs/ir0qYhfbE+7H4T8Vxi3CRWP+yrHdq/LU81qLcOJ1+T0ldLJ60ODaf5HI6+UFE6Fig40RFmJ5+5BL3fqySfHKyslxn3nvCdOp2WY8vyQTHp7WQhFlcne7BqgPFdzRhgueQGWuFTJL84Cfv/vh/fOML33r5l//O53ZUKp2SOlSjCdrlGkWSis380YFaLfZvHR3dHl/+1CUAaSImGEOJi2AQcUTrY+W1gWLdmUxw9JFNFHvw8Ojcxmqi9jKIRHBMprMxgJ99Zeelly4EQ6WLdx6PQgwZVd5X9x8/IOG/+OZfAPilX/ziC0X6Z+++fXdSf/ozv5jT9PHju7/widf+6Ct/drTvhgMAOLu7+i+/+uevfemXvvn1b/zCFz75rW//hTZbmW/3b11fN1HNFYDZpNm9cvUjH3r4ytWXZw/r8f6PIVuzA1lJK2gA+PbDe69eufriy1eO9vd2Vld6BVRmxgeTwSC3umhH+wCSRAff6BglVtO5f7g3DvPi/r3RlUt51PMYAGD37M6kRVX5+bghp+DTOJrFXJvUqizvWgnCnjQRAwJF0EoZQ8qBEDpDDGGtAEUMoQ6ASKSUMjH4qm2DdLwwtlmWpIqUihxFzKLpLKJIQxQATVZBM2sRTVCJFYASa5IkF9u/ph8AIFJaWQFxbENg38ynk/1eP91Sqiqr7nOlla7rqFK0zgOKQCSiFSmzcPlQsErT3lHVlofaqPPbq61v3739ZDov7x00dekBtCKH86ppoikAUqIpzWwzLwdFXrtOxQxKK2105dvS+wTqYDqj8xvOBecD6aTjqdVNlXa9FRaODGENILISCrErSBCihBhc1Xa7Z4mRQByDscpHAtDGuLWSPxqPz2wUWZa1Nb9wYfX1m49fuawEbV0FAKJUSnBBkTZtOf36d9/Y3Vp1tf/Hv/PHj/ceXj67DYA8//53rq0P+t+//W0XpWVpXBtDw21tMg0gBvmX/+oH5cFRnpEP7dndXuvlweGhZ97d3KhbAnDn8Wh/Vvsg4mAUrE7KWP3RN9751KvP12V95+ABgO+8fa/ypp6OqrZaKVa2t3ZHR4fnz215jk8Ox+I9gNVenmRhVLn79+6ur/UfPHxy9GQSJDXpv1I2AzAepD++9fCdHz56+Hj+2ocvfPDcVn97p3SjN98qf+rKpfVUA3g4mrY1jRAPR/Uf/sl3P/rq6ubZnWyWfP/Pvv7q1Usf+PwnAdy6Pr17/e6FnTMbZ/pclU0ZbLr6vdcf/vQHPzCum7U1AyCWMbT+P/sv/8bNN29k0QWjwfmdu/M//87XJ7UHENtS6bzIesFJ07QmsRDnY2sSI8ydlapWlKU6xvapTa4sIA2L1uTp/fkya57yoDy596n5Dj39JzpdFpx6nOCkkXlq0ZGne5FPxTKNEyFLU1MZa4zWRinVDRLAsWOfaa2apkmSxNrEh1hktquWm6bhEERaa6zVdjadkqbJbNLUpfOugz4B8vDRw/6gH0OoqqqqS+u99z7P8ul02vEW0zRp2nZRQkEBiMwdHimE0AHZ2qamJIuRrTGtc9PpdF6WZVVnWVpXNQDmqHJqnRMRpRVFEeambvJCK6WUtgBAmkxydnXl1pPJf//r/8DA/Fv/wa9o17783OU/++brN69dA1Cs9mel7xWDvD/QaRJA7MnkvduP9gM4swrA0Z0HVunvvf7WN9wPMpvWVR1jaFrXNK4TjwuCAPTzgbGpsTYkkUgF7xOb5FoH7wCYJBOO7IW10lpzFLAUiarnVc+on/7gFQCXzm/3+0lhYlvNWYU0yTKb1sGDYpGZgg0A9gjAMfB1sV53pRcpACFw6+rJrDJGv3LlfO3a6/cfRtS9Iu0wcQCyvCirZjafMyR6Z5JMaaVAzrmqrqqmAVD0ep0qR9PUNnTdY5GO9yrQ1gIw1oYQkyRZGQxE4L2PQs65YrDS668AmI7Hk8molxXnLl1JEjaJpRZNXXbfhe7qdeKALBJEwrIZ2TkRZ2m67ItQCEG07gpfa63WhlQ3BDrZSDFLjBFgrazWWpiZmSN3paxSWitDhMiBu0mACJhBGowosUv1zvtYVRuD/mg8Nqnt9/tt27JIlucrq+sArNF5Xkzns3/8v/+Tw8P9ra3t6WRstProhz86Hh+99dabAPKit7OzQxCjSISruuYsnc1nP/eJj3/hS18AcHR4OJ2OsmLQ76V5olIjRJTYhEBFnuu+BTCbzWrTrK+v53kWOQTvhLnfG6yurZJS27u7AOqmKesKgDE2xBAjh9j2e/0kzURi50te13WMLEBdV0qrxCZJYrVSiU26r63zLYG01k3TeEdKUdu2L77worGm68AC2Nnd3d7cefz44fUb15k5y/Lz5y/WTS2CotefjCddijDaBB+Yo0DSNOn6MTFGY0yHzGVmUpRYG4XTNHXOdZ1Ko0wHlyvnpTZKKxWJRJBY2yJAoveuqcskTQDE4AE8fHB/Mq9u3rppk2R6eHDn/t2LFy689vFP5EUfwFf/9E9u37139bmrRMTMIfg8z3Z2tpM0JVCX04y1AHkfnPd12xibKKXqqhIWrQ2A4KN3QcDM0Ttnre0w4855a02X0OqGYwhAwSzWmtRYa1Sv6BlrFcdOmyJJU2Ot1toYCxBLt6ESIeqszLs06FrnWifwUWBN4rwrfduW824SqZXUzhV5pkhGZb2+tXH1vLk7brKsuHz1ue3tXQB3bt/yMYiwDx4CH3zd1CEEpbVJsi7TQ6nEph5ovA/ei8B5r1TXQwWHBdA1eKeV5shHhwdEkqYFEZq2VmZtse4onVgjwtbapmnG4zERDQfDXr/f1DUAlSVeglVqNq+10iQUYtRkRDqvS6DT01wuix06crHCKTKkly1sKEWyGB8sWtxGaSLqNB8hogkiEUStqxTJ7uZOludZb7B/WN3b2wOQUbGxUnie3q/8YHX7/PZFZbRCKwyK0XSGhDFQ5zrHkQOaqmmb1qhojWHRLIt2pNYaqhMzpGVxIySnFJ1PrbuLTmVHjTiNkFzynuXkgVhSnWX54/GCLicPWTxr2cg73hzw6W3Ee8TCjvcPsmiCyvu2DQupruVuBk+b3dCiZOwgh7T8VyzRmMsjHfcvj3upHSDy+D3jmDYny4OevGGcLqK7bdRxW5NPARk7bPvT2xx6+ubkBd/zPznlH/6+6N41P2tHPotn8Sz+isaCrP2eYvNf35fEolV3rFO8uOdU2blI1IvU/XSFKcKLlW25Xi2e855hF3WWI13jkZ5en5bu1ui8SEmOe4WniunjZeOkMj5Bdp4+Gi2TfHcEOl5bF+scjsk5XRt10WY9GVOlNmEEJvESV4ZrNJr92Z9e+4W/9TnpkQkaQFl5s90nQ+X4kFWQvh69NT7/uYsZGTeamHwdQDu/Jz4IGScShRE8iZPK6ULDLeq3TMnEceujbdDb2UyL66PDUf/Kai/JdDECsHZuOHV+78H+xYsXvvSRM3X94nfefifRejZ50utnSRUAvPmj773y858drg0b726/+92e1pN49Pq1n7x45cUf3rpz6fI6AEoGs703bt+4nsjRk5ju392TtDeT8Euf/4X/+D/9j6Y3bgH4B//T//LP/7ffee3ysNy/eXc+nST9yy+u/le/+p/Mb74zNBHA5MbB6w9GH/rI1aRwjx8+uXbzRt/50ZPxYCdKW2t/CABtj30pfhxmT3zrZ7PKqLXVQVo3vlgfZsMhAJCe7++Xe40wwCqO2uao1Rs9kSiKjyfDC5ikdGWeskoRogYrMIDADQChgBglsggYJFDjqh7Np7ToLGsOpnLO2CAQjSgUpOt1ipPoABAHid5oFVmEJTFGEUWIMTrvF0oDACnuiFCBmWOYldNbe+ZMHFilFKhqPADnJLJqA9IkAclCG50iKAoiAEYI7EezqRHOlGbxtx7vtU19ebuIsAdaAHzklZcejeP9O19hw1Fb5sCu8UI6S11AJ3dF1jStZ99qRanWLobWucm8sole66+iPwBASsls1G05ISARiTEG1kg6krVvXNOQE67d2HnHEmNojbbEwbW8ubkJoFWqalxogqSa2H/l+ze2tgYaqnby+KDqPDob1mKp9HUaRMfkh9fuvX39rhZHQJ6kD/YfA6giaUVFOg8c+4WNlDsE18SeJSUCIIV++yf33333rjHWEhWFpGnRekxnlQs+tIv+qV7JNXQ7LzVhWAzYyT/58p/+09/6yjBL69AC6Pf022/dSikGFkKitOLgmMjHmGe68/qsq6AUKaOLwk7mbb9vrUIUefO3fnteBgC5RpZlqh+lOvzO6+6F82s/e+HscPf8qx/d+sEPH37yY68BKEXuPH5y9PDx49n+Fz/9sXLz6lDLnbd/dPRk/MGffVk0AdjYwDryL3/1xw+Pfuri7lqs5sjVF/7G577yO1/d+PAHL3/seQD10aGZTqTnP/qZ8/VolOzuaB6cvz0Z7d36w+9+H4DRYW11w3OytrLh2soLP3j0eGtz+6XLzx2MJ/cePQRQlXW/P+j4YsskudjLn0yJTkkgLZMkCaBOZeyFUv1x7bAcPp0k8WVPk9SyAFnm1ZNu5AKw+dTas6BbvQf9QN0AgRTRoN/LsrzqBNGsBZBY27kE9IpeZ5NNpIiodf7m7VsAmta9ePX5yHzn7p02slF6fWUjxti69uBgv3sRbfTjJ49W29XdnZ279+7sbO9472MMaZoYo7uzDyEYo8syhBCMVt77pmk5Ru9b9r6ToA1B2rapqlKIetZsbWxubW5W5aSs68l0CqCq67Xh4OHek92tzYPDUQwhiiilldLD/mA8mQLQJvHe/6N//s9+83/9xyHGyzvbP3nrhxziax//qbPnzz548AhAPXdp0ptVjfZhPJsbpUMTlE3v3H2Y9LKFI5YixdIczInFy0JqjchErZTprGyQaR1iZE111RZFL8ZgSAdw8CF20wgClCERAisRdo3jOMjVix84/8KV8708AZATw3v20MqUjNR7CRUzpUnWtKoDUM9jpaG6ZEckSlOnWyboZFAAxRxDnloWSCBC/tKVD5T1+NHB/kEdVoY9AMao6Hy/N5w3rVGKmYP3ZV0lWZ5Yu0TQct00AGyaOx+UMaS1ttYmiYuxa2NNpjMObI31wSuoftGbN+2jvb10Mhv0BwDqsuoVA2NsOZtWzk2ODsfjg6oq11dXx5NR29YAQIOuPeS9gyC11lordUNESZLYJAHQuLbLq8ycJEopYmEFYgEHH5dDI2FWmiBKaUXQ3A1FeWmhRhx9S4pYgkhUWpGQCDRJluWdm5AIfPCe2VqbpGmaZ857pQ0vqIoCwAff7w/6/d7ZM2fWVlfSJOn3in5R7F642BsMx6MRAO/8xnAY2rau5m3bGogV6SXJ0Xi8t/8EgFbKWNM2zcpwJUYoERCKPM+yNM+yKAqdzIXWpFRizWBldZAP11c36rZh8Nrmwk1oMpsarY21znthdt5577c2to02AtU1d5qmUaSsTZx3mrUxOkuzuql7Rb9rER4dHaVJaoxWRCGGEOPVq1eNNW3bCGRrYwvA+vrG/sHenXt3uobd5SvPrQxXjiZjiTGGhVmQsBhjSCmOwXQUShAA772xtmuFO+cEkqTpdDrtUmPH486yTJtONppijDpJjDEQKKWstd6zd24+GXUfYJBOkjQKK21CZGLe3Noq6+rw6PCdt99+4cWXujxz9syZg8PDQb+/tbXVtg3AMUYinff6nXWRd16EjTFK68RaY0xZzifTaTkvlTEASBmBkFJaG1LKOT9cWXWtq6oyhNhJCSdJQollEQGUNtbaxGprDRGFEPRyc6VIAeRdyxyPd+6dCneXpF3rZpPxZDK17IJWeZJLiDp4ms2T7kIltuj3yAcfJU0Sa23rZ60PIhhPptPJFMBoNMp7BUAs0vk2W2Pbtq2qSrngfAQgoCIvCBRiJFIhRKUCKaWI2rbtDKaY2dqkda33LkYXghMeG6O14W5iFCM3TWOMJnCSpgPQdDb7/uuvv/LqB1fX1iezOQCrBkflbJ5Y3daRRZMOEow2HCBaOi+mhVTsokRZ1CwdqSZE1h1I3/vIHGJkSIySQNpgO9XLroWtFCmI9x7gl56/ECK0WWmbejab+ab5xAeeB7C6tjuryqYcv3TmCgLVddkfWCFRCXODwAKgnlcr26tkjDhPSlmtXAjjerY+WPEhSlsCqOqGWQiRRC/W5cXSv/RTecrKBTiurk7bOnfAkO453SdnQbhebgKOV23IqbLwZDU/9pJZkKdPjk1ExEsc5rJI7LqQxxUnnYaWnBSY7y9ZTzckT85iAfMkUtJZDR3PZU9T1xcVprznOMcYylMPPW6udlTp0489huGckEmWkJpTvVMsS+lTIB3CUjV4OSs+1Yw83QnGskcLQNRSq+x91+JZPItn8Sz+vx4dAwLASRJ77xjsqUblya8n7cfTjUhgCaZf9v+ebnRikZuPEY507EVz6iWOF8Vl1j8B2R/Pz+hYAqVDNS5fe7mWnaiPvWe5eq9F3PFRj8GRx5Z5IrxcLzsR5YW8x/ECSgQb2ibRPkihUVajlRiVa9dJ1761vgXQHs1VX6Vat9PKpIj5GrWRvRjWyokPBMCK9HrFo+ZJSCmEkIApsoqsxbBH7ORpohc2qbIUxY1GqJvZg323tWHN8OUPXQRQNkFZdefxhIBepq5evGK2zt/+jf/zo5/51I/e+cFPf/ZDAOYP7r1+435TrMTa++boXrTj0bh6/ObZsytl47uJ7msvfeDqT700nzx868/ffvlzX/rPf+1Xf/W/+K8HVr39g2/effTFT3zipwFom7x1/+EXv/DhNZr3d3rnPvUq8iKcXW9/tKFu3ATw2gfP2WTuq+mTJ+MPfGT3yf7EE5RQWiQym7ZuDiBnnaQcyyqWMyBORxUfzEMTwu7ADIfu+k8A2JC5uo1Wyjo0c1c/HvHcF6vUzkoJ87gwvJZOAarrZRCglCJEBV4MrKURCLibbXNkVkpFJh+jMHcutFpZF3lSVYZM41pNxNBMQmiFncSmOw7HxiaGNAuYhYNwFPGxNdENMgNgPqudrxgExCKlYX9l2B8cHI1jlFzrtg0AplXop4koPRz20oSsJa1BFIWiiAcgEiI4sG/qpkb1o9t7r5zbOLs2eHLYlt6/cHUVwPrumVp576OO3hrSSknLOjGRdKLhQwQQIBRqQiCINaqs2kf7o3uHTQjcOu9mJYCmaTJmCFTnCEydEWbUWjp0pHOucdqJTMLce89EIQRDNkYvwLSOAEjru49milRZ+zduPNCKf3z98DMfvpDbwZVzZ7/z9mMAbI1vufWNhvG6UH14X7aUQEgSFU0PgI5MhluEIk8nlSdtoFQwtiWFjl8GR4m2KlHalhxMrzBpX4sm0lKXJlcArNXsAS1aa5Y4aRwgva21VEkIxuoBAGgDDkQwpEVUJKfZag1iJr2A3SW5aGMjy5TL/uaG89JK1AZGcaEZgLK64lA0emLLxlN98+D82cPr3/6WznevPH/u/MVLACjdPnN2eq14/c6da999894rHy7f3Tv6/T/42r/7xc9nF18+fDQBsDHY/sncbe28cO1bb7gr2y8+9xyG6eGD6to7D1/+3GeP3roDIC1HOHuOVjaoPLLDs9i4cuN3v/343fsHZdWZDmtrh73isGIgEEBerfRz59v9yaRs292tLQBPsO9dwwKY9ASjsEiAx+lSTvbKT+9s35fL3/PHf91f6NRKchqhoBbgC7znuCfJX47XEYCIQgxKKWMsofHed02WPM8Ta/cODogUcwe24zzL0zTFgjEnVVXevn8vtcnW+uZ0OpnOpxcuXBoOVh6rR92Z1XW1tbm1MhwWRdHv9UlRR7RsmpZZ2rYB4Jx3PhBBa9VRNUWElNJKK5s4VwPI8rRYXev3B1VVhRhX+/0kTdp2vrO1sTLoA7hx63bdNFcvXTRaPzk8FB8b7/K8uHDxEqAeP3wI4P/+8pe//s1v37xzL7e5Jb4/mlJoSeiPvvpdtfyHJUXRjCZJmk2OjkxmfYjKWufafl7UbdtJChqxzMHYnIEUAKC0FhGrNC1hTUoZ8pGVJGlSNZUCNOkAtkTWJADSLKuaUjjkVjfNPFP04Re3z168cml7fT6fJTECiBwDx8QaxNgzUNqKzZRWrfNacV03ALToJDUhcowLLDapDmqJ2HY6gDrPep4VizCzMJP3WZK8ePncOzcffv3b3wHw85//mfNnL+yNSueiRFdkOYhi5MjcNG0nUmltqto2Rk6zdDo+8j5wZBHR2hijUzsAQFppIaVUkfcSY5hZ2WRvf9/Y0JGsCTQYDLOi55rm6GB/Pp80Ta2N2lhdPxztz6s5gLW4mlibJQkzKyW9opfOKwBaK0XoiPyKVEfbFpayKpumgSzsVGNnfQ0YMcbo1GjmCCERKEAZI0LdycQQjUmUAiIFgYhw58utoGSBoGTmLM2DxKZtjbGHR0dQSG1almXk2LWWjNZlPfM+fOZnfi64tj8YtvX85js/bg4PqtHR2c1NAPN59dyV586fvxCCs5pIqT/5w9/PlLq3d/TkyWMA58+d29reqqs2+rC2ttm0EbpViqwxSqmOrM0cFRFEjDVFnu1sbx8djVvXvvDSSyZJjiZjdI0YpYMP2mgXw2w2O3PmnLVJiDGGzvwMIQZmsUmiSHwI1iZEqp5O0iRkWdblBOedMTmAqiqfe+6FJE1nszFDtre2t7a3Acyq+c1bN4JzTdOuDIbbm9tlWXrvk87qJAR0cDGOgGRZZvSxeTRijMDCY7rrdhGhVxRlVaVpwhw79Gu3/+z1et0TlVLe+dZxkmofYBIjwp0Wtk2NAEprpVmAeTlf08Od7W1FaJv64YMHAIqid/bMmQdPnmysr9+/fy8En+f5eDKtqrI/HDIxAJskWZbZxIbIRKSN2djcg3/WQQAAIABJREFUjteud9ccgFJJcD52kF1mH/zCWpqoruvReAQgz/KN9TUW1koRqaZpqiqmRcbM1hpZXoHOw7pt6hDCYgBMEEZncQfA+3Y6nc7GYxUbSZNpGCNEG2PmqrSqAVTT+d54dHZt9fzO5qxuShfmZW1MMp/NRdl+kQNQWiVJopTi2GVx0doQUVmVghrKdPmqqkq9sOWxrm2xVCiMzHrBlYkCxOA9Rx8cQRRpZpYQmrbp3pFSGhBtrCJ1/8GDR3t7Mcb1jY2rV54rywpAU5dVU8U4AEvjvAIH8Uaxb6NoblsHIEbWJx0kLBEQQsRaa9N944xJolUiPgaBgGLruvQn3bCna1p5HzgE4RxxAsyr+XReBo62A6gG11oSW6xINL6ete24aqj1unXVMDMeAqCazdNhkSmFGGBMao2IVG29u7aWZqkkCoA1SpGKzEJ6UR+d4FC6Jt1T+tCLftjircmpxfzEgvo0CAXA6T6ZLHnKOKacnYKnLLhoXbl5Mgs9KfWWPTdatuywLNGOH9btKhjvNV9dkpZPXosWsMxuuLOEnLwPE0rASWmKp5xhlq3VE5PT4zd7qjW4bMriVC2KJf99cd5LaUnqXM5OXcHTlfh7iuqnq/DTn7qT02Tws07ks3gWz+KvaKj/t0/gWTyLZ/EsnsWzeBbP4lk8i2fxLJ7Fs3gWz+JZPItn8f+XMEtvvKVLHi2mU4vJEi8mPktPsg5wDgJ1EirvAeM/rTsmIp34nnRDVGGJDOoA66pz2Oz0ptkmukMTiLCCOsbyR/bdz4KlszY6BBcErJTGiQIJ0akhnl6cDHew9wUVmxRzPJ7DnfIhZAFDKxFeOJx18sOilsTzqLWKkUOI1lrmsHxRpTKt2xA06rY6e+7S5ZXizW+/Pp7h/NYq+RpAefgYO6uDjU3XPCmZLHITS4W+UOtCSAMARKVMmJCQCa0KdVWHYaITFdowX2GCXwEwKn1ws35hyFbTO7cvvbTbN42eVg/mdyInAG6/fSdVZrWXulgnmTH9+tX188N8dZBxr4pf/OSnATwevVjOys996Utf/2d//OFPfOLnX/vot99848xQffeNW9e/eTNPOzvC0UFdXfvRrX/nb/+1v/m3/qY/GF96Ye2t64d1U9/+kz/41u/9NgBR/lf+w3/vuZ/58JM3v+FE/LCfJOyuveuao8ePbgM498Jz7eHk9p1rJhkohF6SzI/mtpeq1FQ9SnkbgJdpInnTxJ3d7X5vf72ZPrr9ozwGZazS8cn+PoAzK2tFX5HV9GTsDo+kaiUgUb3Dhw9a5dg3ALwGMScqRKY2+FQxCxKbWTsXcgAGWZIlPaWkrsrUqqapVaIO59OaS531ohgAgVRsmnE1s+MJKUq1aaQpLFkN5zRMDqCOkWFD1BlBQ3ecpswa4oh64h0DYA6JTJWCYaocG9BKrinqo1lbOx/AAPaODvT6MNFxbxpL58eTWc+YclbBVKwIQJpo9uDoi9yISgdFkhh973B8eaP36ec/0fo5gHL8ZP/JfpZTYlQVg4QwWM2qWTsgXTdlxxw3PtQS0lSHSDXk/kHjZ/fLQHPPJpRudgTAppaEnAixo6CmNXqpC0TON67T5wrZrdG0EXtQUVVHIqV0VsfaKPFEN96+BmCNzu2uZb1+/9WXrnzlL97YWs8dmh/euL+2Gy4894r62g8BKG9FxVwnLCIUYoikUpAm4pJp4RaiLUGUwiyC0oRAbXS5yUlpJg8gSKIJAcoHkMmqNm08123pfFRUKNIA2gBj1PKL3932XRTPBJjOEyMIEaUViKAIINhAnkSRsh5qYZhC4I7JRraMpDpKIDOTkAUABmWJdapWSBgyif43v/q1TOu6vfXt1yXPMgDGmlnTFFk+LPI3bt/4b/+HX0+tzObhnZv3v/v2tdffugagyNUg7ynYup7eubbx+NWyrP3k4GY5Gu9/49pP7t8AgGomtq8zqhL9woUXiG787pf/gOrW5L0Lm7sAymgmbVASnPOjo1GWZiZf8a5+9ydvrgyGpUkBMBEDqTWdS9aC7QTCKWkoq9Tx0F/oREEp8ql5f8cmIpLFssCncO6L5L/IuSyn7zqNWhDu+HeLA6qlMhOzdGRPLBYiYmFrTFPXea+XWBuiV1qbxHbKZa3zaZYPB0OtjU2SGILS2lqrtM6zHIC22YP7dw8Pjy5cvNg01awqXWiDdww0rj1/9iyAsqrG49GbP/7mKy+93O/1Dw8Pp9NpnuVlOYshCHfGFDE12gkMFPug89waHULw0bNvFHsA0bnMcFHkmoglzKoZ1+bajdsXPvszHdhwd3u7quudjXXnfT/P3r37wJLJkzxT5mg0+6e/9S8A/OBHP0qhBtkQYJBRUNDp8hIpsAfgmCg1DgSbLjTTQgSZVkSZtANdOAHpZHGxSR0v4Mf/FwKEA0xqFIjIJkmI7Jl1x8QjBuBdbSTEWEemS2cGz12+eP7sdtrW7fTIEmQh1wajiTmSAhMxM5EIU6LotPZyjB1iRjMtxA9V9/3vFBKJJAZNCiBmItJMigMc0/MXLr9z5x6Av/v3fuOXv/TzL774UksyZp5WpUiMCgCnvcxxBFC2TWxbAXxdA8iTRBlDRHVTM4vWDMCw8QJmNsZ46diXFEJQQPd5iCzVfM6Ky3aeJHY+nzWu3Vhbi5Ctjc2qLgHkWaaNbpzLi7yumzRLRYREKLImSjpkk3JaUQeDJCajtLYEBkRZc0yHFeEgkSJHZl7uZIAOS7z45kQIkQIJcQSJURpCDFFGGwBRglaIQeaTcfTtSj/L8r5N7O7OVpEl49E+gH5/OCtnVuvrt+415WRlpc++GU2P6qpxYcGSPByNzs4m9kk6mRz2Bv2L5y7Gfn/euIs7uxvDHoDZ5Ojh7Rt5nqdJHoITQVW5sqpHk3nAftHvAVhdW+UQsywb9IYHe3tF3rt++50vfP6vDQfDu/fv8oIpnCmlRKSqqhDD2vpGlhc+eojMpmXRSwDUVZ2kSa/ot65WLE3rlaKi6LWuydIUQK8oJuOxI0XEu7s7ZGg8G5FSayurly9dHo0OAFz78Tuh8a33NknWNjdZUdXU/f4gMaauStfUAKIPNkuM1hwZRnWQySRNYgjetXbhONR9cyTNkhC9Dz7Pcq2Ncy6xHSlZWuc1KREYa0gBgui5nZVkbOfYDnGo/dpgLdXN+bPnm6Z8crBf1XWvKCajSVIMADTe3X340FpbFEWW53UjZV2uDAfKKB+dkghAJ1YbWzeuUy0SAlMcDgaiTd04AOxbrZMQY1L0IVBE0/k0shdwlNjr9wAYbXwMxPHB3t75c2dAlOVpliQuxERbSwubGgnBKsUM17ZGACEWGIJWBqEB0FaVTfL57G6epcZTDNH50HCMzvdTC+Dh3uPr92729CsXzjxfx4krq15uD0d1MVhRmsblHEBwrnVt29Qc+ux9CD5JLETyLBPSoA4dKUSq3+s3dcUCm2bWWqWok1zoCP4KBpD+YBh8CxR1XRljsjRRGh0gMQSOMWil57NpmqbMopU5s3OWmCLR1qVLAA6uv62FZ02jOeRak7Jg8Z68zY33ytUArFVKgZUCR62EFRmKRlshjn6xQyiKgfdRpGG24lub2lwb1pqhuvwg0BFRaaVT610jTji0TlKdcgyT+3sNgN01tVoMxuU4K1S0Ls164n1hQjmfz0k3kQHMq2rX+zQmYvOOe6CVJbh5tb+7ecWpAkDkufNeF1ZHDZYlP1spUrwQp+IOH0hEpIiXkoiCztXp9PJ+mgqtIMyLbUB3T2ccTpAo6CQNyVobY+RjcCKEY1iwC5RGVyaeZmlIB2dcstkUARI5SuwKNIWlDSNOoxiXwsQsLAuosl4ceulBBGEWgTbH5BDpzlk6AYIl5vA0Q5oW7KoOgI/3vNiSybE4G+mkirvnQRstLBKXF0d1KpXCAtXR7QjCIpCO03AMJZXTlxQnxepyu7WASS7Ro0LHnuTP4lk8i2fxVy3MCVXvOLO+P6PJUzy9U55fdHzbsQ7oKYbfX8LtE2ERKKXomIZHnTj3YmHpZL+WaxYRKfB7sfiLNYWAU384bWODkxx9qt4lgogihZM7jgnd6HZskUWYlaKuUCAi5sgxmMQIRGvVyVdrpTsaBXOsGtY63bB+ejT+0hf/9gfPXnj9T7/feKek1R34NAjHunJ1JJ2ppMfxyJ3xbtI2YxLRcQpAPLwvWPYMF5oTiJVIIWhJrIu6c5kcrFA+3Lp3p508bj7x0StrKnn4zjuV0dWcnhzMAOTKXD63VjmU8/nm2f5aP63mD/OcuRp/9pPP700OAfzCv/8zf//X/v7Xfu83X3l1K5VRRY9f+/jZK1df/MhHX7v5s4/+53/4ZQB/+ts/vryTf+Gv//Lrf/St/2v4e3/9Ux9ZW9/R8TCxelSbr/3BNwD8nV/7777wb3z427/x3+xe/uClD+6aw0klQZl0ZfPMPH8EgKq6t715lUdeena4fuH8zt0qjo6q7QOtZq20EYBmwyZRg43Ho/sOSHY2UawcjQ929x7HWPQ31wCo1eFwpfmVf/vlfhKT6bh3Zmf/2oP5/Qd9krVBbj0DSIStSOsBUUZbEUXQeZYLr/oIAPmg2Byspgm5tuoqjdF4DrY6WbXWOu8AjOfT+eFUZ9YFv72zndikjUgSNbRpVZc7GxsAEuUqH9IsAZEPgcC03Hk13l3YGQC4dH6z3xt6H6p6ZhSfWR82LnKkIk2PZmXVtgB6aT6aN02rtgaDtZX07EbPEF3c3ayFHo/nAO7uHU5nJcfohAZFb1a219zRx14++4GLW02oyDGAfqpTkkyhZ2hnPZcY2hDyVX0weqKt6ucZup4/GVFErtUxHFXlzmA1BmoDmlnT6/y3KUah2VzqUmGQMdWxtRxNpMYzAZh6PSrjtIktk9ISgksTTbElrTODndUegL2D0dG+eu7M8Majxxcvnrl7f399ffVgX771/evv3m2ZLYDq9NdRCN1X7Pg7e0r94Xh3KUBidISHSCfM4xlRYLVSIB9ICEqJjzoKRBSTWrwE2+ODEY7ZwSTRL+VfhYgJJxlAQERCJB11D4AIMQQC5qBIKRVpsWsWWu7UyYKZFcHDCAqjKRjrJANJ0+0pA83boFzI2zAaNRzmeZad2Vj93e9f62cmsAJgxQ5Xh23LT47iD955+O7do5XV3cdP7r/2/It/8pXvleMDAGSSrdXgK98i3P7KO4curBbFURMLK5VvAfQHWU7yeDJhpqK/YpRlRGPM+so6C3dWqkmSEovnCDJ4KmGeJmafyraniw5iPB1LgV38pan++Kq+756ngp7+0zKpnzhsiwhzDN6XdT1YWWFZFA2pTbp2pDKGIMbYJEnqpmKWfp53fg51XQNIhTY3Nj/+0Z8mbW7cusmRNam6rrxvnXPdlRkOhutra8aY3e3t9fWtw6P9jbX1GKMCPXj0sDOGeu7qczdv306SxFkTgsQYiMhayzHaJGFHADj6qmq8i6H1WWaENGuTp9ndew+H/T6AXjFgoWnVaq0Sk+ZEP3zrx0WW/87v/9FP7tzpHHI2iqFSqgmM5WrV1Wld0bO8ZqdvT1/aE0l9Wf729KLcPVSOn6M5eo4ssMZk2gixj7FlTuEAEEVu/dm1/oc+8Nyl3Q0Cc9t250Ini+5potqibDomtC1Vm0UY0lmSSzemXDDaupqLOxrzoshUDIhI3bZplk5mzdntXQDnz1741g/e+sp3Xv/8pz959cVX5vPy6GiMjtwtADOA9Y2NqQiL9FdWsqKnlCGljNZJlh/sP+kUPEOMiU1CCGVZKaUgSLNMKZUmqTYWwHw8LstqpW1FpK6auql7Ra6UqqqycW51ZRVAjNFok6YJBKSodW1n9W6sadu2s7thkRCiCLRasPs5MoQUSWR00rECAaOTYYss3eWQ400ZAEKIXjxIEanFBgmA0iq2sWuyiBJttFJquLJy7tyFui2TrOj1BqsbW53KAYAksVt5wcLX331rPht/7GMf801T9PpZ3tcm/f73vgsgRP+db3+ToHa2NsaT0UZvSGvD9V7P2OTRg7sAds6cyXo93zTOhdY53zRpkc1n08d7j2rnBs0AAAv3e/2mqb0PyprWtYP+6uHhQV3X3vs0XVjMMy/s6ZXSaZphOfvI8yLLEgDM3PlHa62aNlqlEpuI1rPZJIYAwBjd7w8Oj47Onz2ztr7xaO+htubipcvnds8eHh2++eYbADiwAHmeg7C1ud02TYjRGNsdoTNJd96lWeJDIBDEKKXqprGJzbKsrqvuhLU2MQbvPdnl8IYIwpG5u7y0UDUXAIpUZyentWbmcj5fW1sBIBzzotcr8rzn1U+uDwaDoijGk8lPrl+fzmZra2sAOLIXN1xZuXX7ljA754o8O3v1SpplUAoLXRoJMQpLFA4xplnazc29c6Eb8wRKEhU4EiOEAJIsy3zR6z7w3TbYGN21XVZXVvKicM71egVEqvk8urZuagBtUzP367qez+d1XU+nU+ddnufMMUnS7rOqlNbaKK1D5CzTSqUgii0zc6f/szoYnNva9CHUdds6FyK3znnnbJKcP3+++0qORyNhpq4cIDCzsCRpUuSFZ2l9BJCkaWITrXWW5zEeaKWV1t47/n/Ye7NYy9LrPOxb/7Cnc86db9WtsasndrPZZFOiREqWZUqynUBBjES2ATsxEmQwDGdAHhIgQPISIMhDYjsvyaMTxI4SJ7AdG04cx7AUDRwskaI4iD2wu4vVXV1dw53PuId/WCsP/97n3mpKCfImA/UD3XXuufues8c1fOtb3+KY5rMDcN7HGL33xhjXtVobo7VAnPOJ0NDUtdZQWm9vb8+ns63Nrczmt+88VxUlRxHHAApTbI0moVta8lFZDVdkZlQUnhmis1EBABR5PYKtr9n15pmFhwQGLEn3UykiFoaSEGLdtG3bz47PMqu1lhBsYdsljFGFjUbpYnxFuASwv73jpd3eLs/mUyKaTVea1JX9LS36ZHqcjKfvolaatEbwUDrGmGc2sxkL6qaNtkuuIM1+BAZJrWGudLIzLNCkeinJoT9Y0rCxtfTVZcoJ/agz//3DgMud2Ol1wij7vuUk1ChCfSmr9xfrvxgoMiCipFVJlx1favoeLsVTSWH/Oete7z7FJfRyuhdB0BDNJL6LCD81c3v4osvF1d/n1RrKTB+UwlgWQtJg7iHGdGCqH+P9ByGI6Ro8rea5Tmaf0kh7ahf/gE97tp6tZ+vZ+kO9zFAZAjBkFRdiIWvAcL19gitJhrr9WuN4nYkko7v+nH6S2GDZtdaD8C4YIswpv9dGDaIgvdJYSmNSaU5+1MqubX1flVrvcr/z65l3w4H1e6CVuiRMmbwspaPh2H+VsDAigBT5QyFysKafn2iNCbE/Oq201dZ7H4Q54vH771/fu/nGF9/YLZVBJz6NC7C6U2rFFbIA1a1WBdXcMauNhgvXZgC4W5liHGFzVhTESGTX5BqklfWtgQPQUTe6uosHmIw2Y1AcV6PKtFG99Knnd3aXAO698/HJnPdu7G9c2Th48dps6pkXf/4v/ex0JfXZ6t679wGE/Gf/5C9+brK5ufjZL739q//oo7vfuXHtihxU9373m7Rx49/5V/4MgP/0r/4P05l75aWbP/Ppv/B3/+E/vP/ed99/70PYfFXjuz/4zmufuQ7g0bsf/PL73/6xMQ7ujOXsYb1Yqa1Ne+UgRD3a3gbw9a/d+9Kf+Mnx1ecVlFMZU/7kvDk+nL/BO9GLn88A6K0tYW0klOJvb42ubFfXnr+NOzeO3n1HnnSbz90E0B6e3xK+/eVX9UT71RMAG9f2tDHKddA6TS73Is4FBmrHPnDOiCwQIWWs0gAyY30IkyKHovsnp8Yo54M1iKu5LbPp2RJAsVFW44K1rqqNK3tXjk9Pcp1JZLaaSdetA7BCq6u89ZEjx+itVlorgLoQW9fuTDYAZLkpSu0c729sb1WF0fjw8MwFtjoj0onHFJgWtW+It8rRZrXzuRcneJ6vXdlpg/nodAWgzB++88GHzi+1zVeN28zVj71848buztnSN11jbQDQ1szK/9yXXnj+1sHm9q5rfcvy3bffXMSgdJapHACJMHSMnCtVM4+qYtbxay/dfvXGncPFvOmWAKZ1WC7bbjHvuhnChqvp4enRZKMSEhNjempnq/DgqINmlak7t/bKyWYUp4QW06O8MgBevnlw/8np0aqVw/l4s2q9u3Pj1tnKnU8f3n30fl6USFiBuhhrQlADcJFmPfbP5BAqJ9VxUqQiR6GQpjrEwMYYJSqI+Ohdopz0D3lcF7M7isPbfTDaa/7IBaaWKs0DpVp81OgnJg9YSj+uBZGDHlK4VCRfR9cm1EG8IlpoE8QrUkZZF10mPXqnrY2cOQllTkxbDt5DZa1+6dbV0shpFAAb2xsPZm0OmHKjlumXPvuFnZ3Nt38Qp66ZLpfzugWwu109aTtmU5VVpnlXjNaTjXFYNqcJcz8+O5vWXVu3VTVJUWwIripG1maRxXgPYNU0LJJleYwywH5Yw1WDWVyDjIMXWFvUNUaIiz9Yl5Uux800/HhR3JKni1qXw+gBESX03mKNc6pL/keYm6bpnIPSi9VqvlwkOkyeF4kU2bRN4rvVde2cC8yT8RhA64JwbJpaZ/nezs7J+TkAbXTTtnmaO5Huma6rynI2X3TON22ztbFBRDHEpm1CSMOLdRD2wXedY45kjTFGKQohtPUKogEUZb69tf9bv/1b77zzzn/87/+7Dx8+iOT2d/edj6QsgOl8tlitiiLu7e0W4/HDx0/ee+8us4TAE1sGESS+Z1msUzfuU5P13b2WWHla5znV8i6hzJ9IdS6llOuTDgAB0WgNiDAHkAZZivBNGqu1sVG+9pk7n3nu+siqrmsis9bUM1nWgBkN128YfHHpiy5YsyLgPnkXvtiD9XBT4chKa1IKUBIlMJNRLjALBccADOTK3v58tfy/v/7bdx8c/eSPfX5rY9NqrZV2XWeMBaC0ycuSWUyWkTIpWY2ROXLwMXFUx6MRKRVjLMtqMh53navbzmZF07bLxSIdhQ9+uZg757XWzrk8z53rVk2zWC4TsddoIxDtFRFFFqWUDyHLMuf9ehAC9c+NDMVauQiOCByHGCkVQVI3xiWpszQELMRgjWYWMBmrQQgSwCJESqsoSewS3nsngZRSynTOC7Vp1AlHtkYDiCF4EaV0VY3aZiki1WQCEde2Z6dnj548AvBLf/rPvvji8//r//jLu7t7L7z0qVGWv3P/rvNudn52//5HAG7cvr2zu3f0+JGxlhRJUmomGo/GWZYnOo/W2ntvdKaMbVb1g/r+a6+/MZ5Mus6JSK/OqZQIR44i/XgTZo4cbRLeUwqADyErbIieQJm1bdsSIXEYO+4AjMqqrlcHV69sbu6en5+VVXlwcHDzxo2z09O7d9/v0lhnrQUSYrh163ZRFGfTM2PMcjEHS+TgfT//BImfKzDGkCLvfYxMGbHAeY/eeJFwbLuOSNke0CStdZqPVFZlomIl2EOYQ4iJ6D07O7t+/QBAbJoQXAw6y0vvHWstIlmeaa3KskjPg7VWRBaLRZ4X8/nM+1DulE3b5m1elmUq47m27ToXYwSR844FxhillERObUZW5cZa7qf3JTpqnueF9wGQdHpjZO/DpCzaDicnp+Px6PDwSPb3yqqad23nPPr5XSaGGIOPkTvXgWixWECk7VqbKMAxdm0XI3ddZ62GIIToXYeua6IHoJXZGI1jZG3UZDQ6qdvIYq3NrO06l86eoqSqRyFEIiWCEIP3AT1xLaHPaSxPRz2URUbryEopu6YIJLHLZO+MsaBYVpV3LXNM06W00caoEKLS6uGjxyGEsihu33nu8NHjw0ePmmUDAKvVp6/v74qZ2OLDR6cqtmVZvXD9ilFiNrdfOGsB/JNf/8pIJ6Jab197ttqlhrB0a0WOigwpsMSU2GijQxAAq1VjyyzGyMzKK1ONfb34+PDRyaK5ee2lamwAvPfkA8PubDF7dHz2uU/dsabIlOUgBiYzeYK54iiClI8sPticQFCkcpsTw3kHldIZZa2JUGvGXb/Pw8AUwoVCYnIpSZdQRIa54eijgBQ7XeIkrmfBDEYdRKmuO3iiNQ9xDdlh4FCKJG626hmKwzfR5dwzvacUrSNEXGZFYh04DNnoxRBtIohczJYZBpRe7C/3k7XXQVCa8SMXZ+Lp1BaXvnV9xAMGvT5/6VBZWKme8C7UF5toOCW9d3jaJV8cIvVzgmQ4pRcff/kv+llKeGrnnq1n69l6tv7ZWSkTu5iwhqEoA+lbqnEpbQDQA4EpqJanRqetDfdQMBxyV7l4jy/e7FPcZHJ5aAZQgECYeUhB177v0tf0XdbJG/X89bT5pYFlst6n9STvfhxcmn8iEQQRIqUgpIhiCIkXGYNPnTVaI8+ssSZGr7UCRFiM1uujM0oLJNNxJXpy9eA7v/tb4ObVT9+uxnndmjTKJvhmTFtlps+6biU5m92H3b03MjNSztWdzXIAy1aWi0XG1rLAMZhhpJhU3vo2RmMigDL3i6ZRKNmpb3/7wyJz1/eqzYNrCGg6D2Bnd+u77xx//6Ppp+9Mrr54a6MKQczk1ddLMW9+5a1f/cpvAZjdf//6jbzrllevjT7QVq/UC9bWH7yzM6KtUk6kBXBQyezU/OO/83cPblwb7enf/Mb3/DKOrl7N/So34//or/2XAP7zv/yfbO7lf/Y//CV0T6YP3inHV6qtG9EHU8f//StvAzj8YLF784NXbuy0XZNPNne2Jh8dzf+Id25eu6OTYqcC0KqQMWP25Iufv/KLP/283kB38rFovTHKjj+ej1+aALBqqkrdhrmqVSbceC63N2OIMMrs7+pxBsA3bdSUsUr3WowcYuTohWOK3b3nzVLPl6vFaqaI6pUbTcZtFzyz70IKUrvGGa2qLIu+PTl5PJ+ubly9BqXqtp4uZm3bAWC0Vak3syrzPL06AAAgAElEQVQErxRzYAujlYAks2k4BFrXtH7Zer+/OZnVQStSigXwkYk0KQOg89Eo0YaWXXc6O39uv3CuOZ83j86DtiUATa1EZ8sSYOFgdXUybw7P71rocUXXdkYAlOKNgrau5UBzdvLQCl+5clAUZRTJ8jz4AKDrOoaA4VwkoVJRcN2WCtdv7U7OukczDyAri9u7mxNs1XX77kMczeLZDJ/d2SyVde4UAFk7stp18bxZVKWdbIy2tsYh4MbVPeevHp2dAcjtaHfLPzw5XPJ89XhGsbt5i+4eLdsQRJd11AAYMUGMIFHQ0rcIJykGujSieeBDCwAOnpVSgX1iseVZYXQWiV10YIQYIaKN1qQZPbFo3ex4SdJchsL5J9RyabAdRKQxVN37nZG0VxRFOPYhZbJ8Km1AirVlGCIVSUdkBOWVicga6Yv8mgwZan0b2FhTOPY+8tG0c3y4v1kSawCzjha1Gylcv3rgD+Wj4/q8bU+XrVBX2FGWFwDqGIzKdG6OOt823ihVWb+/u7HqzpINNtqWRodcB5bADiCjVRRWOq9yK5lP5jyItD6hzGsiwdOJxNM/rFHHJMLevyk9n+FSuf7CgwxvPWW2LwNXn1iXZOmBgWoyxP2UOB2ZteOqOptOO+eyLF81Tdu5dH19ZONckechhJQed50TiDAvlysASumb129SjHcfPLi6u2eMMcZYYzKb7e9f7ZwD0LmuKApr7dnZ2Wq1CtFPxmOldCTe291Pk5qn83lK4wWijSYgxOics1m+M5kskuxGt/rgg7sfPXz44f377967O5+eT7Z3tre2P/zwQ5sXAEKMuzu7u9vbDx89/pVf/82vfv2fbtgCULYoQpQ0G8GHID4oUusLNGCRBOL1PXwpWepripSIIxfluzU1ktZJ4GUoOW2gbaYSEkYSQxdcmynZyOXG9T0An3n5hev7W75t6rZWYCcxRil13meXl0jNw+1D691LDvoiM0+TtYfIgIcLn54mFrCg70cW+MguxCYwwAwirQDUrWOgGo1vjzfufvDh48OjF+88p0gpkDF2azwBYKDGW9vprLX1omu7LMuNNhyYWRJtNs9L77q6bsZjXVXjopT5w0c+hKIoHj15DGB7a1uAo6PDyKK0WjWrw+PH1w6uE5ExpmkcgNli4X1mtIkirnPOBxeCzfMYWWvdP/5aFUUxJMAKwFIvnkqi05GTiIKwiJAiUqQYEURaFADRihEZBEYIrAbrAwBaKdYAtNEg0TrLsyzGUJYVAOecT33rIQKwRpOI944Rsyyvl8vx1f22Xn34wb233nnn8OQYwM7u7qc+85oQsrwQkQDMZrPJ7t5quVjVKwBHjx7GEHwIwYWqGhubsTARNjc3TVaG2E/5aJvVZLT18YP7zHz12sH+3v5iuQQkMQ0B+BhE4FyXZVlRFOl24hjzatR2XYicHuQiL7q2UYqstdHorqnteDIejet6BaBp6o3Nje2dncVyqq3Z2d3Z3786m05/8O47bd32Sb5Sy+V8a2vr+rWb59MzERRZdj6bWm28d6vlEoDWBgKbZc57EGmtsywLITjvtNZpdnlRFlopH2MS+OBEu38aMNFaC3OMvfGPkbVWBFWONtKznBeF9y4EVxZVjMFaO52e+xAnk0lZlgkKB2ixXGprM2uTxMLR8bHWemt7y3uf5xkA1zkXfNM05WjkOreYzUmR0oYHf6aMyfMiiITgRURAMYrSJssy75waHGRiFTDzdDonRaenp5PRKC+rsffr4pMxmc0sgYzRxpjMZosYI7NzzugcwGrZLJaLRIk+OT3ZmGyURZVByDUkAUCRl1ptn04Xh6fTIKSUyqwJsTk7O2sjJ4KqCCdpJIGkWeHGZgntzbOyGikASlvnnGgmiA9ORJTSBJBK+DUBiCEYbZTSikjnuQ4xz4sYPQXl+xaBgoibZhGCny8XwYXOe6OzrnNnZ+dp7lYmsZad1hTStWKMd25Mo+2sFHY6H2szAiBBDezIwfINr1NGkw4q4WScpkMTYgQRFXmWbEDTdqIo+AgiKGLfrJYLo7Tv2IKvbGYAPrp3OF2tIgJpnq/m4lfXdnYR7HhSehmJUgBGeVEUuSkKqqrYdT4EIiqLsUEwRolWABhQSkUaOpwvsrehiLVOnrAeMJew4Mv3eI/UJfrfcOT9YV/eStZl3Yui5rrhGKmzOIGdFxVHuZS14fKStQlNZewh4ZM0v4UuvTOUgfrPGFiTwz4whmLQZcbKU3Or+0uXgsb1VV03UV8c4o+ESgNumcLHdb2WL2qJ60LdBYw5fNDFrvdF8ksF4HUccOl7h4s2OINPhHHP1rP1bD1b/wyti5a9gQnUm9Ce6XDJXPcApCTfJSnpoR+xgJR8xIUbuPz/p7a8ZJERI/ffrIkgzBEiitTaJwAX5vzpGPBS99qAew4bDhWtPvwf3BL3wx9BQqRTki1Ewpw6pUIIPjgAiqxSShNBq1RxVUQxRmFR/TjL2LGMch07z2W1WE2//ttfs1E+9c//xU+P9zieAjh3WsVcuiKfXM+KW3/rf/vB/Fj91C/dWdqHi0jKLQFsUb5Y5VfvFLrzHCMpo40GIwpMJM0RQKsK0VW3WPHKXdmaeDf/6EH96J13X7u1P/cdAD91P/X67c2r24fHH9f1IhsZ9hQf3nOT3edevvLHfv6zAL7/rd/9oz9982S+bH/4ZFH7N3/v9Kd+9rNcz67cucbnDWZHAH7ix9948/0nB7e3Suf3Rjd+QO8dFrKYno72N3/8y1+6+nwB4Cd+/NXNnUl1q1j+oNvYfX2p/WRzJz4+XzxuJhsFAN7x3/y99+YnG59+4WDDqHFBr9zZ2cri2cMHV65sI48AxlnZnM2Czrdf2DSeQztfHh1VpY7lRitWQQOgatzMzrgN0vDy5LDa2RXKWEQJMSFJpHVtMFaHhBEQGtc511mTSWKRAKWqFm0DiW2UYjwW672QyrNxNgYhtB0AFYMxhghZZozC7evXZqt6Wc+KssgzalcLAG1T+yrTm1rgJ4VNkXBkEeJRYZZdDSBTOjOGJMxXq85zZmyVl1q7uoueRUgBCFAEhiIWKXXODq6OTMG1kVsAQIilNYSgSMpMPX/9ynP7W217+vHJojlDokdd3bLG6ihx2dQ2M63vPnh8cnrSFJtWFRJSX1II1tjNyWSxairLFanS0Ne/d1/h4xdvb+SZBlBmZSvZbKFOPl49+PDtd+8dZlVxVE++8JOvPj/aBPBgOv341PmAGOL2OGdpTk4bq/KPuqU2PF2sANyTE2PNyqlAarK998O7d//63/m11qtIVaeMzcYAQrdS/cOoBLQesdwTh9bV3hRjDXRJ37nM2i74JL6T2VwbU7vG+5jZQgQcWZERpSRJAgEAUho/YFt9bUUgIuoiQh6sR0JOmCOwDjqT9VAAQRTEgtSAwimg7/FWIC8spDRMCq8VKREFsrrHV+Gj12Qs5QqGIwkTk4lEZ7MmhLhZFQDOVj4EyUszDehcvP/x94zZWLSLl2+/mGW2XjUAPMxq2m6PxUevddWG6GnhTlekx9WoACCS1X7FBAErTcoYRBciF1ZFZt1DlrpuWiItwuugWYawt2+SujSesTe5Q7KA/691gUiut11rY/wBwTI9ZcoFgtRb17NLOfkF0VoXRcFpdxWRotza1FdrtCat0wsfoyVSWiUVhflyAeDqlQPnXWbz/Z2dvd39um2D90rRbD47PTlOIMv+3r41ZrVaVWVVVdXh8ZOqqJiZWW7euFk3DYCu6QSIMeZ51nVdCEGYR6NRcN1/9V//ld/4ylcB5FY/efiocfFKXvzy3/gbHGPjw7/+F//t5198kb0H8NKLL86ns7/99//Bm9/7fhf9ZjGxed66jqP4GMq8ROojIyU98ZQGAc5PILmy/ufp9oV1/S39tvfcaqCvDESMIT8UYe/EWChFviVXjwv9wq29Ozevb26OAGSQbjGPHJlgjLZCKoSEbvbEmosvutittduXC0qtJPHQAW/u0yoigHS/gSCyBHAQan3sfOwirDUYdj5CtMnqNrgQb9+8tVytfnj/wXQ2u3ntep7lIUYAbduu6lVS7PKdq+smRk7aXEQ0m80BBB+IqG5q5/2oqrIst1lWluVkMlksFgDOZ1PXdVevHnRta605OpHWdTF6pSjEmEpPiqC0BpH3HUPqrhWg6dpxNY7CfQqvFCmFNZtUkYJKD4Ws5VbT80eijUKIIhxCZGGlEvseVtkQvdZahGIMIkJKE4RZmH16jjUAQZ7nMcaTk+OtnT1GsDZbTKcnx0+Ojw8BjKpRjLEcjW5dP5hsTK5cufI7X/21d9/9wc7OduR4sL8HQMBlnl+7cbNpu++/9WZus/1r+xyj0vq5W7cA3Pvww7wotDYmK7qus1mRZZkAT46OJhub1WgEwFpb5kU1nvzdv/e3TWYnG5uRIwGBOUVQAGLkGKOI2Cwv8hxEIqK0TqY6beOc35hMlEZSS5iMJ+fnZ8vlfF29nozHo9F4Nju31u7v7V25eo1Z3nr7rdlsrokSctc5l+X5iy++zBxX9coY07p2NBopUW1btz01TyUVcwi896SglAoxxMhrlQphSV3/zJxYWsICgjZaswEQQ9RKM0Ekxpi2UYr0clkbnfXmlijLc2N74Wkf/GyxSJOasyxr6gZAWZZVWQpBax04aK1D8E3bQKRp6qLIASitjdKr5bKoqsg8n545H7TWIErnhqMwD1w8bZQ2zjkitbW5NZ2ex+DTA6eImrYt8uLgammM2tzYtJmdT6dKODX2sjALa22Sb7U2E2mMMRSjiMTIAFZNM51Ou86LRM9sbTaZbFJRZIYnnQKQRdQt51nbdK4LPF81q6aNIRilQEi00OB98N5ok2iPPCQRIYS81Hk5BtB0bU/JFCilnHd5yIqihHDnujXrVmkjkJS+2CwLMSqlBUhVJRZZLee+a7Is25hszOeLZd08uP/x6enZdD7LiwLAeDxeKl1QvpFttOhqrynqWFTGc+c6cOIHmMGFJV/aBy8CKOqFF0VEa5VSGgIUoBSFEGPsQxVSyhqTRMfbiFz7k7Pz8c7Bz/zEpxSZ87MlgEmxkZcT5xpjRMEUVTmZTMrcQtHmxkRnJhlPVZSBlCGCNsbYLM+s0RI7UspBALRtl3jptO7IHlzGEC1RYuqtvQgoNajQ5XQtsTho6Itel78uwLMBC7z4rDXpcnAYSlEaoJ6YJb2QhST5I8K6qiUyKEgOtWW6+Clp/V8uy8lQmxs8DCLz8OgNV2tARC9FJHLpv/RcCwjr1u8hfb3cIt3//yLSIbrIaHv2I0SgiWQd/EiaNt77M3Wp3Hvh3tPPl3w6XRxSH8dejqUuOfunEM9n69l6tp6tf4aWWcs3AZdKRZf5Df3Lde+dXJI2kksVpqesqQwdC/I07EhKDV/Vb7KGQ/t/+6EI3JMqGaI+qVCSdnEooSW6FdDLLw27QrSO/y5V+tJe9XAkESQqQUIhhQghBI4xckytCdYYrVRkTgI0QULkaCACDlEAGK2zLPfCIyvOeWN2JlnTnZ1U+aiOd3k1B1CQrbIXitGtd776lb/+t/67wzcXPI4Pjk9e/9ztn//Zn863dgGEYzep5qGWcssslishUbHrfGeqylZjChFAWe5Ys8er03v3j2/WVVWRpvF77z3YuHL1y7/wRQDTux/Vs27VLA4OtrNMffDh/d2NrSvX9kJYHp42n/3CpwHMFvdcmT33Mz+x/GB67yG/+dX3D4vrV0eV9stG7M295wE073a7XE7YPZh98GTpfuLLf/K3vvGtWVDvfXzykz53b/0egOOzx8WWdh/98PAHP6yub032dk/ev6cbx8tqcdYB2N7bkqOz9w9Xs/MPDw6Xb3z+c//Bv/dzyLu2fRJJm0SHaZHZDQdl2hpkeDKuHx5t7G2MX/n0/ce/982vvQXgJz5zY3m++vDu0VjJaN+uHpxsXJFse5OIlqsm3TVasdHkPbPAhQDyQws+Uv+Oc6tV28YQjdHTZWut5cikZVJu+eieHB8B2N/frkbj2fmyrCY6cF2vmIPrunrVlGWeFxVSCBXD0dlUaVIYFVaTZY7CxAI0LgLISguGEtU4znQmotsu1l2oPXURShcAtDbOraRz2+Nq0bRNExR3kZig5ysPwAcXfdia7GxNRgfb5tWbN27sbrx17+jlmxuVypq2A4AYuxhZ+HwlpsVGaW/dfPlr3368IrdjC2tyAEa3TdeWNs8NrUKIyP/oj316ouI33rr//senWW4AGDU/njcnT2bH03bp2I6rWRf/3j/5xtd+4xuvvfEKgFvXxprk5sHmaB4+ODrc2ZzsjEbOh/l0FnxYtB0AYtnZ2dmYbB0enU7b2UeH8zZqUdaR9lDLZg4gU2Yo6qqhB2VNVR7CPhn6nQZjwoiORUSKUQUgy6z3wXVBaQsk8cfEuAIzJ0iRCAyFvsllHWSm6rp+Kp68pOmwZlCui/oCkCQUEpI07wY7qPqImZg0FDEpYU66/8JBK0WMfhgLExSEyDNDETJjocEhEpYdvO8AiDLb43HdtQVrB4kUTmfHuR4RGVNsLukQwOx8Kg7L1WyzKje2NrxvLYKKoRzt5LYA8Oh8eXR+pq0x1vogYEQm750xVpE6X67SmbHGdt7TJRN/+XwAkIG1ht7ED7b5Ejmgh7J6OEouf9LTVEe6IKP+iEjSRYPSBbFOGGK0WYOiSVpeJIqI974oChFkRZ5nxfHpKQ0thwDmi+XO9tY4L1JDdyotWGMBrJZLTdTVTVGUi+W8851W2nsfQ2DmrmsBeO+ybKtp28lovL21df+jD5KwXQi+7VRSJRtPxiJSVdW067TWAoQYvfNG27/0l/7yZDIG8N3vfZdDfHx82jmvM7N03R//+Z9//dXPWKXe++EPAfzjX/31b37rWxZkjdkcb4YuOOcgKoZQWMsxADBKOw5KmSHzoMsu9Kks6inxevrEi4sGheEp6CczoecRp/ZCAzLsYtsYwovXdz798vPb2+MuutjMALAxShlDJCD2CAKtsl5dQS6pLw8Xc80YutwfkX5PCuvksqdGp6PrtyIBRUaI7Fm6wJ7FmMKFoLQyJoEjkQRRSJFpncuyPMvyqhqdz+dt5x4fnwAoP7jnXJtZq5TWxk7n8ywvtNaZza0xJo0l0TrL893dPa1NYJ6fnjKpsipW9Wo9KI/Be1f2j548Ojs72dvfu37t6s7W9vOfeuXN7/xuai99dPg4iomSZNBUZPbBa1LW2tZ1CUETYe/dAPbHtmvjmmska2snECES0iANsGKJqceTWaXzZ60BVIRwlIho0qMngZRJmWsMURQM86peHR4fitKdqw+uXmvbxjnnOgegbhqt1HPPv7R79er0+Ohv/8rfsFq/8qlX2q4djccH164B2N7dcc43zkXvbJa3nSORLMtdCMtVDeDmjRsPPvro+vXrxuaRhbybbF7Z2d0P3rVtm/qCBdiabK5WS1JqY3N7NBrVdS0Ca0zbtpdvy6Ioi7wgUkliL8uLrmuJkHqoY4jWmqosu9aH4LUiEV6taqN1VZUARuNJ29ZlWd64cWNra1dAx4dPvI9KqSLLmQOA+Wr5udc/e3D12qPHHxtj09ne370ym84ASpcpcdl88EZbZg4h5FnOwkn8wdoMwKquR1WljfFda5QySnfREZTRRqkeYB2iThFhrY0obYyJLG+/+faPfelLAJ58+G5V5iHE0hgRMEueF0QqBK+VpgoA8iwfj8en52eZtb5xzFyWZeK75XmRnjJtrLVZ13YgRURd64JzWZblZTWIlsL5AFJ5mZdlFaLE2Apoc2u767r5bJr2Uhvddo3W+s6dOyABjrQ2s/PzMs+SEGqMzMyAxN63kveemSESQ4w9kiapd0Fp65slM4E0BGooV4iAIzKjy8xCSTedr5o2MMqytLYHYTNrrLHWmhBiiOx98N4TURQJzIYjgM65EH2WZcKRiBJ2CRFmVkoNhESJMTJLCMFYE0MUxSEGEUkSn23btm2XZ5nSWmu9WK66rrt7931l1M7Ozp3nnwdgrT2u67sf/XBf86J2wQc1O5l279zaqV5+bc+WBkCQmJEe8K2keU/JVivSwz2uFGkCsaQRJpRbLWmelwISvyHJyzJr4a6uy1EBRb/37t3pqrZZBeC5awf7+UbnmrLSNrMbVYUgAIeua9pVXuUAECUfV5EouKiVAVkA1lSeY+e7jhyA5PeV1sJDj/Qg3biuY11mfvRA6+DQ+2rlUxHDEAik3uJLClprqz44sE84riHEEghBoYcYIeu87RKRZAjj0t3Evc7HOjNFX9npP7XfhTVtVQ345vrAkoAG8zrQG/b2IhAc1HiIkrzm0Ax98b0/mpNeqtrS+nMAkKIhbEwKA71JB4uskVyiS95Snj46XISktI6K+muCi+x9nYf/Punys/VsPVvP1h/yZXptIwBPV1WS8VOpMNxHFMlKJjKh8CfaHmWoJg1meejofmrFyOv8s/cSPc1Qer7GRcUOn3h1eV0IffQlObmYNNbvrE6OafArac8HjDMdlZCkoIpjCCHTOoYgECJJcIPWSpOKMZJWVhvvQ/AhMybEwCEA0HnerhZZUUKrKJJz5hWCgdTd5thg7/MAvv+7b3/r778/P/rW7371e1llx3t2cR6+/tUHv/qbH3ztV+/+8X/hSwC+/DOT5283erVi1yk9ykZbikQK56GfHIfFaQfg/OP3hR7wWVy03cen8afuvLK3f/vt906DV+2yBnD3nfsvXd8vc2Urq8bV1VtXlqfTD5/c37lzZ/dK8X/+o+8B+BP/8mceHX1wZXM/33Tq9q2/8G/8hV/9R9/5t/7cF2Onbdadf3gXwO2XXvvWV/7eL/3Sn3jjS5/7b/7af5+NtrTy1uuWSZUm2i0A9x+e//y/9HPZptx44yWo0s4P2/ls8+DgraP52YcrAL/4537mv/2b/8d4s7x2Q0+syXONkYr1SnSmbSUhgW6KIuXMykdhD1WONze7rMpbHk023n3nHoCXb+1tX7nezt1suWqbsL03oSIXImR5UeS9d2YJLALNokJgkGiyEBJQwiO6xTlIjDYiuu2ayWjShsa7ru5851tjCIDrXFmxVTDEs9l5Uy9tNSJNVqksz4tyDGDOEmL0PljW03mXWUwqW2SmzDIfxOo03Si0IXgfdV5qna261gc3X7koVpRNrYIhSt2yprho40en083CjjJFJI2EJrFC0rwDhK1JeTI9O96ZTsZGkd4abSDGbGwBPDmctp5vXjXVpGzb4LuWhJd1YPF7e7vjogIQffvkZH56ttydlEVVhagfH7XPv7r1hVf3f+d77bTuAJzVyw+fLGauxXiszcivgg/nqtAfNd2Df/oOgC9/6fmDgp6/tfOKGsc3I0gdHFxtlq5tfTm2FUcA07PTjx4+2No5OHXtkw/PvC5gqYtQ2Yi7dmQzAM556JQ89/qMzMmsxIvHeIhKey42RGnFUcqiGvUClLH1DYQVVIxOOBKUxBiJwXwRiokaLMxAFSMIgWk9GOuizp1AARIGEdQFHkmiJBmDxIW+KP9TzycgQhQFpZWKUUhpBYqRNUynQ8+gtFYrFUMgQBEZbV3bkdas8o4ldB6AzaLJ80IVHz86rmwhqkA+FUcfHz0s6o5UBiA4NtrqPK+ZrfhRplctuhjaOK07BrBovRALSdt1HIXgxdhxVbRN7WM0NgewMZ64rm97vzCvSVH9EgS8bq3COuD95ODsC9s8UNVpnQBgSFGACzLqZTwy/RuY+wbjIacY+A+MwYtorZgRI7wPdb3a2MptZq2xWZaEPwRpoogxxmgiVZVVmoSgSAMyHo0AZDafzWbv37373J07o6Ks66bzndE6MhdFeXtjAwAJus6FEPKiCDFqbbx3eV4IsFr1syyKotDaiI7OO2EuqlIp1TnXte0bX/hi9Q/+PoC2C0VeatLFpPIc0XZZNf7wvXtv/uCdr37t6wDaGMbVRGL03geBKYrGdwLYPJcIHzsAeVYZpZiFCLxOji5pbg0XLr2BNUmRLxKTHhQZUrn1Fe7xAerhSFaQ6FyHeGNXf/al525euR5Fua7JTcgMAYhgx8EoUyjiwB6iM43Ilzv4LqgdRHHQQxxupku3jXziJur30yfkDiREgcVxdEGCgEEhBGNtZq0LCWRRBCKlGBwiGwNmybPsyv6V6Xz+zW9/G4BW+saNg7IaJZTAx2hYtFbGWB98URRIM50IWZZba7z38+WiGI3KsqSczDDFzvv86MnDxWr50qdeUoqszW7cun37pU/F0N288wKA733zt+r5nJRikdliSUT1qpmFunUuy/N1Aqq0El7fz3rIg0kubvjhZHFM5F9DSRCGBzU6joFTS4oxSqCIiQhKWR8DhwhAKBqTRY4sopVyzlVlOWC+/aOnQNeu3bh554XoFuV43LbdtK1HVXl2fvrSyy+/9rk3AFTjcaYxnc265YKIDq5e67qu0FprleU5gI3NzbqumRlEZZE75+vlMsbAIlVZbW3vAMgyW1UT1/nxeHz9+g1jbAiBQF3XMXMKrljEGjOqqqIoEhaZ7mhm1lqvVst0SoxJ0pTwrgveMUdrzHg8KcoCQNs2k8nGzvaOMebo+PD49Ljp2mvXDh4/ehRCCC4AqMry2rWb09l5iCHPM+9DVY1DjE1Tq2FOrtY6nSNjkhhP4jxqjiyAVgQghBBCsNYoSsgXKaWYxQevje6fTREiUlp5H4U5kROv7F39+m9/40/9mT8NoKomHDutiGBGo3HrnTZGx1DXDkAa4b1cLp13V65dn8+mk8nGbDZt29YY3TStsSbdSD4EgdjM5kUBkLFWWwuQzfI8oXIBaUZklhc2y5rFKj0O1uiyKM7Pem1lZrbGdM43TT0alW3bCjOEsyxLwH0SQvUhQMRkWaoyGG1ES8LfAaRHQGmjjamqSTXaqKpJ0668922Xep/dqnONC61fkrFlnheZfTJbLFerDDSdLwDs7Y3xWWQAACAASURBVGznWa6VijESaRCRUiJijVVEzjkA1pjRaGKUci5opZVWIQZAda5V2qQm2zRoW5ihNUA++LIs2TGI0gD0hF0qrZXWWhsi2phskKLOucxlJ0+OADBLleu3fvCuzKdZbmLoQuh+J353oype+Ma3pk0LYLRTxi7JS+CC0zc89okCrZUmUhBK06OZY2aUIsqsMUYBEOa2c52PSikXOQSjBOfTJ7/yO296h5985WUAt159IeqqLDUpVgqRWZgVibBv21Yl5oS1IkRZaY0EFzoX66Yb5YVzvGqbTpUAQoxJebGvC13gZwOilcb5DWVIDFldX5m9qCIAQyVsnfElzPFiBOH6z9duv0fZnnIB6zHTclGcvOwdBvgzKcUoJSLJIyoaMlQa/lpk3ebcf58AQIoHaA1Qrgn7ooYUdL1vQ9yzFjb+/bPPP2hRf8SydrPDflwSNcFQc09nfWCNXpwUgQinFvzLV+kS3plCJrnYPRpO0f+v3X22nq1n69n6w7PMUPK6NBT74lWCCvsldGFeSfUq7L3D6bfAgBF+QqDt4vfJa/fJKvfkSfQVpLQVUd8J08OJfKl412/x1G594ivWTu3CWq+xSAyal2mb5EWjsPfeBw82gJh+9tyFfAiBfAjM3HZd5zrm6FyXdiEEX+RG2IRAuizA0TfB1xLy/ElQZ9++C+BX/uE3a4ovvbj35//s5xcUPz466xbNGzzSDrOj81/5n/8xgAff2P03//KPbzz//Ib3XOfvv3P2wx/cd80qelmcB9USgJdenFzbH1eTIlyrPp5380DjbBIi2Y1NVRYAnBIyaLuVrxladu7sbz730ru/9/3lB7PPfvGfe/t/ehvAl0cH17l+6ze+/5k3XiviR5//Uz/zlb95+L/8X9/9xT/24ka9urfQAP7oj1/7m3/l6Icz/wt/8hd2Xv61j48fbx/cOHnrB5XG4cdn/+Mvfx3Al37s9Vd++ur88PscaHPiZGt3XFX2hdv14Wp+5gHMGm3L8suvv9Bm887x4wdPdm+RdXW1WTnkCAQAriXPum6oUu2qlaPF5samCyF2okn91Ou3AKhyfHL/8Ojcvf5HXlfTJ2SUGU/qVZtFRlGlGnTq2Og8NKkQ0QXXdT7LSh9ChAegkS+75cbm5sGVg9PpFIztzcmTk48RxZCZlFsANibjYrRxcry8dfWKC77uGrEGMVNeZrOlzbYAaG2dj9qo3OYhBkWKiLTRVZEvWq/AAJxXFE2MqirtbNXMVnVu89azCGdFlkD8xWpZN92oMK3no8UqxHJcbbrgI8KT6QJA62MQNS7tc1f3vj87Lkp8570f7pbh0dliw3gTJgBKrXJQdKQzs7dRnM7iybRbLKaV1bmitmUAraNV605OWqN0tbm76uhotnp0WC9c/dpL+yetAvDobMWjrbd/+HHsonczFyVmhUSs6vYLn/8UgJ/+3Mv3775lrc21Jg6PzxaeTNOFrq2tHkYYByd6dLLE8UnrJdPlaLVaRlImcCbGkAEQjBF4AkFI1rXzNf/roja91prre52UoiofJbLGfDH1rjXWcN+PbtAXnAlKr9XJIw+fNqjTpp858jo6XiOVKdbuy/S4ADABUqIgSogFChAhTnl431IIIsWkqMcnSdIwbiEUlKXJrQQik5qSVBT0HWrQjlPbjgBYrHwbpntbY0bwQcq8Ak3Kba5Z+7Bozh0AUlXHIUZLoQtnT0oxLz333LxZna+WCVZu2ULngRFiyIzVpFReWUMdOMRQVCMANsvPZnPpv/opI7lel+Q1BzvaY1t8ads1Goa1bhNdhqUGRHJoWlr//g8oL10y5N4HUmkiCjSR1kpEiXAamZpU/7XWxhg1zC4QFqNN17XHp8cQKK3Ho7EL7oP799Nn3rh67creXlVWeZZVZam0dt6tlktt9ObGJoCz87M8z3e2d7a3trM8393ZZRGltAhSCySAs7OzwOyDd84TRPdteMIxuKZO066Ojk9y0oo0AGYUZfXevY++9mu/edos9yabAKwLilRUMFa7yI1rbZaHGIiIOWamBFB3PisKwAv4qUyl9629o70EtPf3ev/QDJctQfu9jYSsB8InLh4SBI9Y6vipT9148faNzcoGv6LQlQQVdAcAyI02UFHEkahCWbCSNojpr9tl/7smKq+/f53mDrfE5aNZxw0+CoDUUBlEfJSQ6p1EBAkhuBADE4A8L1ZtyyLG6CgwSsfgBeRC2NneXiyWAH7t61//8dc//cKd5/d297KysDZLKM+qaco8984BaLk1xgDCLJPJ5u7ePmt1cP3m3pUrmzt7AEKIZVnWy8Xe1YO8UEWRt01X5Obs+Pwn/9jPtW0L4E/9q/8aMbetK8p8uWx95//qf/GfffPb38kym7iZ6fASGT/EiD637E9C0ojpLyUBhCTzl/6QFGmVGW0AkFJt00AYCsYoEQpdFIgmGGPTJGUvQWmlSFlrNza2lTYcY7Nabm5ud107YCR0797dBx99eHV/O7d2c3Pri1/4wo1bt+++9/YH9z+YzaYA/sVf+tPnk0op/eorr9Z1o7UuMmm7dmtndzezAO7du3ft6lVmOT06Go83Fsul1nJ2ejqdzXb3rqbhUXlZRs/373+QyKfK+7KonHMJCUrIow8hz3NjrbXWOccxZkWhlOLIzrv5Yo7e/usYg6ReSGajjet8WZaJrFdV1bWDa6t6uVzVq3pZN/VkMtaalKLO+cR8fP6lF4nofHpmbcbM1lhjzOPHD9ra5UWe7kKbWRBppYigtY7CIYTkNTjGNGCqLMsY2VqEGLTS6UAGXp4AMMYwR0VKlIJIjDHEaFm2trbfe//uN37j1wF8+Rd+brnoijxrmqYoSig6PHyyWq20UjRUHEIM3rngfZbnx8fHq6be39tZ1XXkyJGTLWrqlQ8hK4rReCKQvCiq0dh7B4hOs7kSXS+1oDOL9LsXQmCWxWoFQJi1os3J2Bj98aNHeWZn8/n2xiSEqK1N90xwzvvgfWDmzGapY31gHHA/CCjGxXzuvQMgxBRw9Ogxt4trpZQaADoXTqdzq1NxMXbeC0tmbZ5lnXNJ8DexxpTSAPWz4pUOMRpjlNJd8ACqalxWpl4tQoha6zzLBOJC6LpuPM6SZEfwIeFTeV6E4JTWCXqMRCkqEMAa2zT1eDy21hqjg4/G6sa13vknDx8BqJvmjdc+rQR1XberUyVixeko51P17vkRGwUgo9iSSrEF9YW1C0Sy919Kp+BCK62IhL0mFZkToz9ZwR6I07o0WqqN49OZl+7WwWhc7N659RKARsbarx4fH02Xi72tqiqrTBujaVRlHKO1GkCWZ48fHj6Y1ZnSRunSh86FUUYxonUu6AAg9nz/mBSQaTDMl41yD7FjQAkH/G5do30KOHsKn3zKEVy8HvrvPrFSQ8xaw1QkVaNorXIxRFhpP0kISmlmZvAFjJdwVcRUm6QB3BsqPQIgcqR1q1x/aShdFFpDfJQEjvsglBNRv0cXBT0sS584sh85pOFY1qXDFMymEU3DOWDmob64/qzhMqx/6k89qBfTGZ64pxN0efrM/7/t27P1bD1bz9Yf7mVAlFkjUVKkgl5NKuRl5lxnyQCIHCX5CUqTXpgERrGw8FD2V0qn7JpF/LrurzDQK/usRFsdQiQIqYQzDgomYRC4V1CKIFqESVNWGNc54YBerBoAWFiTlpQ5DxxMGer/AECASg3fylpFKrIEiJBokCUEABR9DE5EmAiKtLF1WI1s0fw/7L1prG1bdh70jTHnXN3uTnP79rX3NfWqkV0OrpTLduwoMSQxSjAEC4SAABIoQUFCCEQj/gQJfiAUkvxBSESKIQlBjuIQkpKdcu9U2XH19er1793m3XvuuafZ3ermnGPwY669z7nlipUf/MDSHbq6956z91577bXXGmvMb3zj+/qGgAuzGQDfNov1klS5KnLr4L1fLyNrWRSbac0QKevqpnI2F5CaYJ06QzEUzox3GgD/4X/xUxeuzrwsW9EZqI3M5HI1tOqUS7QA8KXffTMcTeyFC9/+7tu//BtfcYqXLl1aeXdxb+Qu8Dv37gLYne3sF6WF++zrn6gePHpydGr123sXp+iX0vQAbo5HzmVm4vJL46bvHn9wz8zkuR/6zAe/+e0H3/7Oa3f+EIB7H72zv6c2v8D2arV/ePzo4as/9iO/8MU3/6f/4W/+uz/7hz7xwz8E4B/8nX/03/6lv/Df/9W/9es//hPX/tCP3X/7zR/+8T/84bfemrLe/fKXfudwBeC//q/+3OTJqplcXB3dp/IFHmk1msT5ktfz137oOQCQrpu36+aJW4vOzN2DB9c/9Zm22wmrY9cs9PgEQHbx8uH9jyia3TvPo36QTXakiVyJOhN7rUclgJl2k1H26g9dK/I6VFMNIiePbI8PT9YvXb2alRMAbVxVnpu+h0a2zjeRXWEsl7bs+gBAjTjgdPG4NOTy0ihTkY92LonvlChSD6CJfgoqC/fxvUcqPogWUdWrJ3fxytUr+7sAHj45WGfWuWo6nTy8/8GFWy84Z0JsYu8orgN6AC4fe7LRoIkkyMRiBZysemMwcqI+AjB5oWsT4ZreZFlxuFwTU+x1Mi37PgCIPuZWS1ccnz7J8+ze3Udd46uYB171uS3CHEDJmWffxoBOJsZevfRide212v99rdEJMQIAsVmz7IPIjSu3JYaPH9y9ULnxp19bHj7e2bswJgJg3fLe43dIuNPYRu17Dw4kzke7M8oAHJ0ufOzzXpb9iVLwQQ+fPO467/KRMhK3tLpw+97HTx4fPmAqgjO+izAlE0eNajlsLm2oVVJNttcJMVSCsnE2+YFGESiboc8clUgJhjkiLNcdgKbzzhUJ60qD1ynZyBbYBKCpeT6UboPBI5iUiHOhCIDJkBhBUCgpRQizAyGR7obSbzNYEyQQmMAMtmzOFCoSdqACVhg1xsagzuYSIRITrBcT9ZsNgUiJQIEhkjy4jSkzAAoRwvE6jjMLI6FtM5MZW80XJwUyU04ArOsmz0ofhcgxOa7ydYz7uxcP533Sv4hdYGfBVo2xLivLrOt83Qo4KzLb1jWAZr0GQEYTLLudLyMavg0AYHMeRNp23A22higJgoRCSQHBdml1vjymTTGdftj0vUQVaUI/kR0kJoEzEBEZYqIoAQTSCCDC+NBnNjPUUJbFKFCKoqoaYsytAzAeT3yMFKNhds7e//jjnenMWjOd7B2fHAOw1onGGzdvxuAX62XhXO272WRy9dq1+elx73sAIrFp2hDi4ZMj7/3pYsEuA9vEkwrBAxiNR953RklCKKpKIjSwg4OyGIlsADx8fHzj0hUyRtgKq0Z89M53s6zcqaYJdIOxUbFZirG1FqKWGCJkKSRjYssS+8RmUpWNWsnAP90yUBRbQsQwfC0mD32bqHzD1ySAUASiXxORtdYw9W3Xhg7AjjMv3Jj8wOuvEGBIybcWqkxRYtBgXA5ASJhgeQvpsxAbFiJSRRgmQ8kYE9OMMVgkMAuUMpNBuY8RQJXbKCBDfYxJQ1BUfVTLZvD5hYaoXdQ+aBAUuWu6jkxmnZMNE7YPHsRQYbJMEnxgZu8DEbWdT7pvN2/ceOuDD7/+3bf+9J/4UxdtPqqq9GJnTJQ+Dfgvm2Z/dzbKs9qH/Zs3/+RP/0vDjPbZxFz6d58ALzHG4DITVWcXdgDko3xzwtO0qNqgO3vVh+9+ePDk2LDpfYAk9wh0Xdf3RiVdDURMIfZEbKwh5pQJJcTEzAaBkl43ERsi0igeACIZpxoZ4OhVVGCggq4Vl2fCPZLmNmO1agPM4ZPHyiHPq/3dfR8CiLMsA9A0jcRY+77xs4cHD+rVsqrG3/jWt995/91PvfHGctUAEJGm1yihBXU+lHnWtO2sLN57//0iywFcvri/WC6Xy+Xuzm7ABNaeLlYCEuiqWdd9ByCrqqBYrOuTk/nOdCfG2Ife5ZlAYwjJLtw6MxpVEkPb1EGiSGwaz9VIlGKIfd8ByEtbVaO+E2bK82q5XEWErMxXzWpnZwfAzSs35otl1y6yyZ4sTwtjjLV9iOJjjKGYjtL5sFieprn+5XKxu7O/WJ7u7e3VZQdBCMkknRw7gSpRULXMTdMWRc5M67oejUYAyqJYLpdZPg0xikjXdZnLmCJh8C0R1RAFGo3hLMt93zHgu76aTGcX9v/Xn/ubAH7ghz+nQUMgylglZNaRYjIaLxbzzLm2bQAUeU7Ahx9+dO3qlRtXr50uTl3m6rrxfYii6UZJJKVzXjTLODO07PxkXArb4GOaa173kdlcmM3WXex8cIb7CMscYggx7M5mAJyxVVX2Xdv7vrR533ejojg5OZns7GTVKGGsEdyH0IXgFW0I1jlAQ4xEFEVlGH9u3vzud3zwhtmA345eJUwze/OznxmVYwBff/e7j54cTIpyd1YGMUbMcrWMASDs7+9b4wAIU2nd/OQ4KMHHaVEyUBSFELE1kywHUFXjVdtYWzjugvfWZiLecJY5FyUmHN84ssY4l6lo3/k8z9u2KcpyVZ9uepIiGokYyqtVHWJMOsWzUWXIpiRdTkaZ07/45/71ojkiTxoaBY0sG5CwW3sG8Ff+l/953p66LI/RBxv7UBNr13tiY4wk86i6qzXNeBiR6BlRKc6KseoAyjtj1tEXDn2HrNDgnJuU/eOTjw/rn/zMi8/dvAQgt67x/stvf3O+nk8rtmROF/jc6y/ceen18e4lmHT7jXLy5H//O7/y/r0nRUmv3rr243/4U3nWGevVcEeJN1pqWGTFMEWiEUkDV2RYhSmUYZNiv6oyb9xNz0qrs1HioW4gkhg3t/zzZtUAwMRRYvIAHE8mq9Wib3o7OBdhg95qml4HSEQoeRKmm9qmk5nWeCHElERBvB1XTvZnSexVYpqzAQPWDk2CrVn3tiuqAlE1ls6GqM91rRWaFpeqBBgddJsVqmAQGR2KGE2o6Ja9KBqGmlMHhikRbzwaz5dUZ614Bg9DHqRE0LPyinA2Zk6qqZ04bIA30xI0YPiaCisY/j4jLM/iWTyLZ/EHIazE6AFiijEYS9DIEOs4Z5RlnpZhykPG3LRkoqqyNWnsMgFzxCBSUSUVJh4o/grhRDRMA1CQODjNqWzWtKpMfLbCH8b3CBuaP20nuzd5/0zabNsxGlzcthvZ/qsiYlJ7ighK0fdp8pIzoyx971XVclbm2Uh41dRVlve9b7segLW8ajpDyOZ9EzVz1hI7Z9lkq6YGoIjF+nSyeyHG4Ls+dH5S8DyE3tnCzMdXFQBVtfKyKoIEMpmL7amzZbvsRgX3YeIbAJCm/ntf/N2dr46fHCyv74yvTLPKuLzi67OqrgN1CuCorsdVcXln1LXrx4eH0bJfzVeL1Sv7t7X3AJZ1H+frzGSTXtrI1XQn5PmDX/rN2f7N9Tr8/N//JQD/0cs/+v4733Ino/uP+jy/QX1/9/D++PkbD967+Y/+wfEf4SMAN29eeOnW5L/7L//8f/OX//KH9x584V/8E1euvvji6y+8+c77j49WprQAPv3a7YfvvXnjR+5kF/oYusfvvXn11Tfe/tY7Byf4yT/xOQDtdz/+qR95I4a5Hecgvvfx4f4/+ca1SzsgD4f3780BPL93ue51sewuFpWZXtPg664hUz16eJDvT0MXAHzrzQ8//dK1cvcCoTNFp+t1nF7slv0UvR72WRAASn4dQYHTrK2KeFKEANJOFYBvWg2xGFcH85O9vaxv6xyxLDJTTp/MTytTArBk7z860BiNUNOqZTtfLtd9rConIbx/9x6Avl87pv39K0Cc7u4Za6J0EptV5AhAHIDGGzau7fvj5cpyppQ1Xd91wTruF+thfGw8MsaFKOu+55ad4ZN1qDsfbdZFA6DpvRIv+g5zuTAr67oZlVm0zkCUKLMZgIeHq6wwzrr9aWnz6q3Hj5v7X9+7eDHq+uPHB7uTCYDTVccmm07zZb1ed6egvovh//6t74xze2nv6uliDqDzXV13sQ++930kH1gJLArVarQLYDK98MEH716+fKV/fFS6HLqum97lWRvEqKRz7/FyeTKvlZltpjK0diVNrsiAlQwsIB1IG8NlqwDgY59kH1XAzKLQNHxNyLPCGRtjTGRDM1hJ8DlBoN8vNulgQwV4isp9fgMkGlPa4XMbFo26yUTfaxAybDjJG5JutPdT2YrzLt50lrRU1TCl1IcNlUw2crwt2FounMnKUZaXbBsft7lOffBMRsHG2Dwvg3AXMB7P1n4JwGlUts7lOZEPIQiibCwftsBqSri6ZUdumernDgRtSv2h6B0eZSZR3S68kweyYeaN7cBme2fH/ft8HUP9nwh7uq2wNxQMHaTtN4xLQHlYT2hyeKibhjZNrKSSlny306ds23Z/d9cY896H79+4ei3tTlmNmtVy3TSlcyHGpm0KsodPDmc7uyenp2EjHte2TVEU6/XaWVvk+aisMmc7Zujwdpm1McT1cuWyTFUT7YKdFVXfx3E1G7bT9L0PCAIiZ/M8K0IIW/6DnvuTDnIym4c+NWqVvqNzTIstUka/99wdFkZQG70tyhg1+T4Roe07iX5c5tEaKIn36ptpzjevjwG8ePPmpf2LEpdEGPByFSUSJsiWkbF93zMeDZFJvlEJD2WiGCTBkQRWJWczZup6jTGkc63tgrGGkuewSu8VBGNMZrjXxHJVH2XdeGML51wQWJt5QZ+ywzBFYRNYR0wbS9UB6RtKBgDA5YsXTxeLX/zlL926fuPG9RuTybRtW2Yy1jV1D8BYOxlPqjynvj95crheN6NxCUBlY6GlG0LQ75tlYowWYDZseDydTadT5ywbliREAFhjDZuoGqMoEENMm01zIVvl3M2yeMsV35Yxm47LwNQ5g/fTejfG4GwGIIo3xoiqc67rO5uRqnbe930ffF/XNQDv+xijdVZ917f13u7Og4f3ReXilcurrk0sy2o0np8cHR0f7e9fKquyadtLs4lhXL167fDxAQDCYGzx+PDJaLqX50VZFovFgbPO2Syl8dVyaW1RN7VzrsiLtu+o66yxnn0vA8m6LEs2hojW7ZqJymrEbLKsAJt5f5qGfJkotZyJaL1e+rZVDnlR7sx2RqMxgNZ3TdeOqsqHnoGu62dZXtdNwhauX78OYF2vF8vFqBq1bVOVo7Zr1ut6Z2dvvW763gefbismioiKiTEZPaeCNA30JsBFYiSi+fy0KAoVLYqi7/rEPUz5x7BJ52mC1wMxGMZYVezt7n147xTA13/ndz7/o59fnp6ScAjBWPf8c883dd33ffC+LAcnq52dHTZORZu2CSFmWUaE09PTLHOz2RRAUZZFUYYQfO/joLALJu6jJuZ+ULVkT5ar4+PTZl0/eHBvMpp433VtoxKXqxWAxWIxHU8mk1EIvmlj27ajqqyqyoegEpPudnILAZF1DiogFHmxnq+SEV+Clq5fvT6ZTph0PJnkJjOOxXe2q40xddsCKIvi6oUZlDPLJ/N20UqRuXkfvA+G7QAjWuOM8T4EIEYNMabzfl2vm77LTAZgtV4v69q5zDc1gMMnB8QEiaPxmNg0zSpdNCH4db323vZ9F2MMofc+6/qhxvAh5JlTiSJqnbt96/Z4VDV9WM3nbdscn5wCuLC7MwJ0uRwZyRCFgw+xENaoniwhDX33RW6MNV4YIBUoiA2x5RhjGtZO0FXip4qIsLa9tL0/nzkN8yDMKGXfn2qUxQpX98errj48egjAUGkdP3fxdthru/DYx35/PBpXeZGDM1vXDQDjw5Vbt/78v/+z947rddsdHp0Ym9V13fTzID5wC0DEWeZemTjJYcuZHc0G//pnJbotmEaJW48hLSVm6Fny2jyQfiYmwzZ5h1pjByDy3Da/7/tt7jrn7z7pgS2ouClOcE5rfLhZbt58MLnZCJgM5MktkLdtOA37omdpeAN0Pv3m6VlbXuO52fKnP4cOb3XWzP39EMJzBNPNNmjAF3F+9PAs7Q/7t1nfnn0M2h6yZ/EsnsWz+AMX33+q+lk8i2fxLJ7Fs3gWz+JZPItn8SyexbN4Fs/iWTyLZ/Es/j8PCyAGYQOCWkusaiAMNSqsFBEBmKEXIwAxWElUxYCVYTb9GObUihKBWlIFRCGDMPJgFqGAxkhMybjGGBOhKkqOh0GBc4THLR2SmCGyJUcOalgqqkqD3u+GIDkQODDQZ4ihUA0i6QOyKhwPM54CZRBLakFFSIDEUZZ3vY8Sl80aQFWVV69cXtVr+I7Bzjm/9Ou2q2xeVmMAjrnr6iBS5bnCs9HOr7MCsli3I1ciAOi7Js/K2Njc7EZbacFNv8hLlqaP3WlR7QPIYQ4fLTNrLk8rY+nhsr68Y7Kc5n0TAkJqXUa5dOkSBdy/f8+QFXKvvPxCqN+eTcq8ygDcfuGKm04X2mRVJZ33nbQHd4lw+ujk/aX/s3/sCwAu7l576QuXw1cf/R9/++/9Cz/6Ker69cE8892/9W/8Kx988+2v/tY3Afzxn7qDD37t+s0f+Ev/6b/3r/47/9n80f33vv7lP/ov/5l7f/WvdevGMQFYtvM8j/H+AcXeL4/LWQ7pDWhnVv7al78C4GWyL7x0+cGhPPzo7tWLO7dm4w8+aGPdPffcbl+fjqocQLs4HWWYh/ZrX/n6p1+/07Z1MXaHJ/Hk5OGt1157eP9jAObqZXfryuLB6XHbXR9F7ZfUVeXJ0iMujtdvv/cEgBNdLnw13q27vouR2Bgi33c+xCAMwBobSFbrpu+lyBdGMT9dTaqKrK9K19UdgExi17eicmEyPj5drpteoURm3foYTpP9buFgrXb1su7XGUTB0YvETkwRxXgfAKxXa6W2936xWmTZKMuqRdMoMh84xpgUV/MIwESJTRe8Nrujar0IPsqiX3atAFgtm1GZwZhV52+48cP1fHppRM7Wde9As93LAFay9Cs6XPQ32mVZht9+996v/eaXP/uDb7Aa8fWqbQA8fHB85cpVt7e6kAAAIABJREFUYjx4/Hhvt7h1/cp4PPrlr35jZ1Syyd47OARwuFjWjTQh9CEEyVVNahA7Q998+30An3r+IrGNbGBtqyzGFaNx3fcwWdvFJ8fHAI7WUZhN5jqBDC3mzWiUng3fMGzqQg9jpFt1VkW6/A2TgVVKM9smsy4rCktUr9aJ2mBAPgS2MNZupI1+T/v5KW+ujTS70jl649D73qhFEIhEhZQ388vpOcOUDJ8ZRJ+b+jn7jSZxrsQXgyYCGG94ZOd3SQGYxCQHbdhRw8BSFIkhlqWLiNx5ZY4gqIbQAUizXQxSFSYh8ou6d9aDrEcy+kTmiiwvoogSR5EQFVtHT2IgGZlvGKpnjFFsCICJF/B79QoBIAShDb/AJLFOVZUYNnn4qc789jhvyAoDD1LPsRI2h35zjM6YAoY4fd0AmElFoDobTZNaKzMzG2dMcpCwxghQlqUqnMus4agaj467vp9NpwDavoeCidq2c1m+WK6ss8enJ8YYqO7t7gLI8/xLv/Kl528/9/qrnzg6OhzJeDQap+EAwyYRhbK8CDEaZxODVFRdngEYzfaq6W4iw/oY583CGpdmqoh5Xa9clp0dx80ZuzlF0zdAW/rkcCiGX22+lHNcDvq9381wuZGAXO8tU4AC6L23Go01PopRtdpR8FcuZG/ceenWpX0AREF1LdRSkqeipOwKElba7hvT2XjZQBuJQiFKlEEWzHDSR+PkAwOiGAOBnCVmpNPDMIUgogpSQ5Q8aYmoDzGqARCi+iDEltj4oKJqjFEiH6OXDadOhZgY1IdoyGyGKGhgMm70xrwPo2oUY/zg3r033333U6+/fv3qDTKmrtd5VgGASF2v2/XSK7JRJTHoxpb33Ems3/uL3xODQOQge0dlURpmNmbDbEUIQUWMscbYEEJ3xr4ERNPtQGRj5EAKpY3f7bkvWFk3dM2BWJ34oIZ87Aubby4eitFDNYRw5er1+WLZdd3pyVHb9VseMRu21vW+q9u6yLOqKmbTybVbtx4++jh9TePpZJRVL77w4kcffbBeLA6ePLl19dKlvd0szx8+fAhgXJVMKMuKQW3bFNWYjQ0xjkZj61yzbgB0NjTro4ODg+l05mOIMVrnYggEdS6pjiJzTjXu7114ef8lYn773XdFxLmiadc++OA9gNu3bvV927RNIljluSuqyXS2E1WrogTQdO2kKkbj/MmTk3QKBO9VRWKYTKc7O3sAlvO5s9YZu16vilkxXyx2d3aY2HvfNnXKPZlz1hgfEpEtKjETi4ozzhmblP5ijM7atmvzLAPUsOl9b9k469ITstzFyCpCtJ3HYWM43R729vYB/F+/8Asvv/rquCpdXvgQ+hBzl5VVlWU5Aet1DaCqyqOjI5sVi+Wi63vrTJQYYhyNRjHGEEJKA1FiDMF737ZdosGv54u/+XN/Y/Bdg/W9t8bs7V+SqA8fPZqX8+B9WeSkOqpGAGaTyeGTJ11nVus1VMuy6Po+hJA5F4JPp5Qhdi5LynrWWmtsluXJEUVFAgTAeDy5cuUqM6y1RgEmdSaEdr5c9GEFYFLmmRnVTXP38VFVjiej8vG6zZxj4sE8B7BFzi5ru5ZdxmxUYrqxdl3DMWtjC4CNbfveGkchxhi99+PRSGFijBBJ2xEos0n3iSzPnXUgtc6WRZnlBQDrnHWm7zsCmM3e3u7FSxfm6+bw4JEjHpUFgAuT6dXZ7MIom/lOfMMmLvuehb1oLxwMAHjxSjIM5RIbNtYYImHDccMbtcYYZmetYQNlZ2zXSx8Csd3cNMkaC5BhEn987/5bbV8bh6rEe48eHK5OARR2fPfxo0k1vnXx8vHCV4Wagg6O1zdvuqybF6QA2Oqqr1++fuml1162eVb3ffPxvS/98m90zWk1KjpqAYQgQVpGsg7EkO6YhrHiNL4m3y/f0Tn3bSQlxMFuINGQQdjMcJ+9fKM4OVQKIYQoohsmpp5VBBhKEd0++BQrf0NU1GEnz+fFzbupDtlfN2XWUGeeZdtN2sTwJvo0ATJJ/pxTWx5++RRFM829bBab/yx25/nYEhzPH9anaZ/6fbczjA8Oz98OhKQDlUaLNvzMs5pW8c+xS8/iWTyLZ/H/w7CGSVUtFBSNhJ1JkVlarZaGoFFIhgQnSXoNIAhIIwZpJGCbbQc14pRHJVHWhaAQJcUgcSGihggyLDaGyUGAeLvISu/COlh4g2gY2tk8vuGqJ7NQTZbcG2PuYZdoWKSoRomiwpr06OBym8ZzJAaSmDmTpH+C70XFsFGoMZQMOibjUVUUi/VKfFDr+hgJTCqwZnd/D8Akc4fzk9yVucF6uQaCSDcdlYWxbXRFCQDWNaoNddmH7x6E3b0XP1O20agE49RwG7wBcHoabl/drUq21DVtrHu/P6sQEMSMRuM0Q3H0ZE5wfde8cOtq+9HH88hXnn/u/od30a3iugdgtTcsRZHl+7PM2rrtNLP7n75FYffhz399MmMAjx6cfuK29jbeuf3i/bsnP/S5N07e/ZU//YVP/K2//Q/3rt2JF24AmN269uTx1/HoOzd+9Ac++bkf+OznPv3z/+Nf+emf+VnL+cN+/cc//2kAl3bz0w8Xp4+xc3knuzQa3Xx9cVBfefVO/c3H9boBYBzPMvNRi0YUEZZx+Hj5T7756F/7Y68999Klm5deAECVXYQPx5P+pOnichXrxePD9nQ1vnvv9PVbta1bALc/+9q7H37wD3/ud4PQv/0nX6BKR6NC6oPJlSrMdqqiAHAybys3JpOVhRmVfSANfRdEoyBGAuAjCSwDI4cnJ+txTlBZx9i0B5krjM0AfLxcusx2vn98fHyynFvKhIiN8d7P684ZA6CPgUjXpw/X3pdFeXU06TUGTwBF4cN5A+BkWXu11lIUbrrOZUzOKqGPMXPOZQaA99J1kY14Ulbq+6Zt+qzIOt9mNgfgxdV9XHdh5Oy33z/s+nZ3Mu6CdG2vq7gODwEcL7um7w6OFm8/OM6rWbV3xfHH01nVr1vv+yIrABjiK5cvtc364YMHxcXdqKZTiorD09UXv/rW0WINgImr2Y4WBSk4MMgGoFdBkb/94T0A/+TNHe37X/jyN/em5e5s58FpXPax88zWHS2aR/OkzmkyW/ggvQSQBSAEUagM4gybSzfZOyRYbHO9KkgHTYdB9lwVpNa43OUGFHwIW3CKyBhDxATaoHnbgRjCmYzhdqoltUOG9zlzXhwsc3QY9tygGgBEZVN3DxXnUAmeAUjbuVoaBDDPD/gMu39OUuIMzRnq5CR9ucWkFKQKEQQV9tJ2IUbpfOdYmTSZNoho7pwIg7jz2vqOWYGu7aNsP51xqm3vPSe7SVFRYeHt6PMgWMjbJQGwqdrPrLU3Ze5WMCp9Dmba2mynBQZROkbnHB03SfxcTSxPHZvtlFXySSPatp2GcamETNN26EqYKaqoSu5y0SAxRiZnDDOn9WeUSMRd1zmXOWdjiMawiMyms2SKVJRYzU8ePXmyN53deu75d999N3f5w0cHx8fHN2/cSiIAtfcX9/YvXbg4wLKEEEPXtV3b9GySv5khrspy5X1SAbXO9b4n5nq1+u63vvMbv/UbGObuSYlUtffeuUwGA5mnjtC5X9A5gFG301jnvo7vea3yUz+enXUADFMfAkUyCTUWZWYiUFsrxVGVvfzKjVduXZlmLvoGgJJn9o50GMFNMDoh6UJu5vjSKlFpoyGmQNKM214IcbjAKIqAjTNWotdIBIqiWULrhA1DJTLDMAVVUYrgzic0EsY6CgFAVLC1hu1qvaYs96KSztoBVSclxCjs7OZc2v4ZjqiIWmNDiBf29mKMv/uNb3x0//6Pfu5HptPpetUAcHmphKZpO9En85MEN5+PeO7/v48shLGOVSSIEpr1um2bqGIA6NDDIOIYBTFAkzDuZtRRNc1vImnZbROfDhceRYA0paLhWhhcRIZvndKFHExCEFRUohBRjNH3/YWLV5arpmnaRwePmqZWjQDGo1Hf97PZzmp9mmX5nTsv/fTP/Nmr16+VRS6KjSghHPTGjVt3796/dPFyXpaxbxerde791StXACzX9YXdnYPHh7du3nznvfeee/6F/QsX86z03p8cHz8+PATw+mtv/M53/2mel1GiiHjf53lurGm76H0/Kqt0WHdmszc++amyLKrR9DtvvTkd79RNrSrjapTkGkXEWstMi1WdZXZ3d/fC3n7dt2U5csYCOFzOJ5YWHUZZ1Ta0u7cfYyxcRswXL11OmKZILIuy6ZrpZOa9z5wbj6fL1YII3vfDmSsaJDlokSEOUawxEkVZnHNJyDKGmOeZ9z2BDJsYQ1kUMchZBhNlIiGSGFWFmVNG672v6ybPSwAffvT+3ffe/+E/8kcPDx6V5ejx4eGJP1XV09PTPM99CABWq5rSkLgoAO8DiIosZ+bMZWwMgHq9ni+WZZEr0DRNDDGGeHJy8uQ4jKdTAOPxjoLJOM5KCfOd6U7T1GmX2q5brZYAuq4r8tw5N6oqFXHOisTee9/31hrZQOfGWokCUBTp+y743lirEq016ZwxTLu7e6rxwYN7hcuarm1WCzSrPStCPQCbaefDo+P50WL58mTn8u7ee49PvQ8np/NV7+fLJYDxZHJhOpkvFnlRqWKxXLVNHX1/+eIVIUojyc4VOy7PXb48fnK6mI+r8Xg87vqOGSI6298BsFwtVYUNuyxjJmtclmVZbomRjHeSeXcIEei7ru26Lkbp6qZumnFRJqC2qqoss5ZZ+k6izxwzqbUUVaFgQ0iS+hIBNczWOmMsQCF99cKwQ4rerIsgAtjkw63OWlUGIDrkNDYG2j4+PO41Tme7p7UdlRmzAzAaFxf9iABAnB0T+tNVvVsRqQrHXj2Atut9H8cZh64jlqLIfukrX3nr3r3nL4+std4rgNLJuLJz3yq7oR5gHvSgz+lynAcVz0u5bPqqtJmRHhZiGzWt9OMW5zyreYiNSKzbde975iTMteGQ6CAcrUOldi63DrDfGWZ5vg98dsvcQqLDPp7rnMnmqedeOLwv0ebh83DkuY/6NNh37uVPN6ie6hptoU5s+ozbse6nY7vN9BzdDqbT5rPRpjlI2/bxU23yTWm7vUmn+umpY/QsnsWzeBZ/cMIaRLAa762jjLUwMq2ywo5i1LptQ5NWHSBVy7zNnAyYbadqYz02CPQzpQVokvtVAc4MDTY3IB0eG/4DSZq/AKCU3GVSiSdp6a567kaDzbpsQCE34srnm0yqUdgkE1JNeCYTqRlISQAkRGi0zGyNGKMWQYNEmUxG3vsYIgDfdQfrVdu0UB2PCstUr2oL5Mz1fA7g8fyUFTvP3WKDyaTUrpbArrSj2d6Vcvr+O98GcOXFvSi9ifZrX3l7/Mbotc/PwurEkg8xM8bMPQNoO55NxuuwiBqcsePSjapp39a+F1tyVToA9bqPyL1visl4b3dcEKv2Au92KrebAQinx4jdKMu0ngfr8iIbX51qrvHJo5/4iU9961AA/OJvfucnf/DFfJYfP/jyNz/wtz/zk92HTX/j/c/emr13dPLk8g4AXH9+3P9wx/mjD988+MY3P/+f/ye//vzffbg4eeWNN05+7TcOnpwCcLuT3fnkH//81z7343cu/vQX+qp67xtfu/HJNy5/VF958RaAbFGfHLUvXbz67Q/vHtb6hR98abSz+/Gvfvcf/fb7/+bFapBQ7I1fNrmp6kX71/7Pb924WHzqk9d3L1ydecRWjx4vAFy7e/cbv/n2d98+yotMd69mti0uX1u2HbXz44O7fYgAGh+oRKzbVVMfzZeRHJiDSCuqg8WtcabwofOIl/f2naXT+XFUVi9t3yUtcMvEGdVNX7qlivbsuuAzsCJnI6kANcax09ivomjbd1GlV4nWBWsDcd16AIIc5ARWIZ2EGLTMXFQFMZEh4wD4vg/EDkagfY9V9DnlXeDWm2XdAhhluaCfLz0KPjle7k6yg5N1kZu2bUkU60SX01XrHx41xHUxls/eet1lILRR9eHheh8FgMl0oox1XZPoyWKdu+LajdF4Mrt37+BoftIlZhPYzgpyhDYgqkKVWMhylhl0AH79629l5Hf32jLLVsu296EJ2gbx0izXbUh8T1c0XkOMmSu9yKYYG9rUtDEa3Or4bP7ewF2qCUkUqCKoCoOYCazqQ93WioFjFUHWOU1cxCEzbas5fG95qJsCdYNIbvvn31N5bwgCulFpG3IRnSMqbSrj5Na9yVYy6EombUZK2JPytiw9B+gNLXWJg9+3nlX6A8TEhBAgEXnGPgBEhpM7JyJUyYKZCQSOIkoxRoXGdHSLIidICD1UQgiZtZJQTggUG1l3YiYZAMqzpDpAThuSwrni+uxoDV2hhAoxY0BA4lPKk/Q9/w7Q7LaE39Th6QYxqLGfrYVUVWEYTBtcmcDDhxCWCCZnrQJRhAjJQ9YYw8k5hZBASWPMo8cHn/nkp5qmAWCtvXzx8sGTo3fef/8Tb3w6MYyev3X7k5/8zJPDA98HAOt6PRqNj46PHj8+UNVqNFqv1773Xdcyc1qZr1YrEZ1U4/npQmKUqFVRzk9O/uO/+Bfe/M63g48AqrJ0WV7XtWGTzoesyPVpysnTy5kztf6zh/TpRSHOXzZPH91zrwAIMRAhiBA7AGU1it0a/XqUu0/duXH7+rXpuAixE+qs7QCI7zNmJbvZj0SJHAxLN+fM2VWlOpy3677JrIVyOu9FRJQU0UfJDUJQInLO+CjExhgHoO5jbsFElgkEiSJkjHFZQYsuAuj7Pqph5wI4igbfwRjvY9ThzEunoWFjEmH26UN6biUKIm67Ns+Lum3zLHv+9u226/7BL37xp37yJ65duwlgUddtsxhVVew6jqbvu3JUIn2SBMqmfETDKfzPCh08aFREQvCL1SKGIE6IBgFBGjKPDBs6Z3++pZri3AE+t8qHbliTiTKZMOpBqG2z8Ez+4ACYLRR5XrjMNV0nIYLY+46IiNmmr0n1dDEfTyZ/5k//TOiaT37mMy++9Jzvu4xR13XiG3qQet917aXLV3Jjy8mYJJDEEMJ0OgMQfFeWVVVVi+VqVI2CD8zGWDdfLK3rEhH4+Ph4uVy/9PzLDx9/LCIxRpGYPg4zudylz3x4dHhw8PELL7508dq1z3/uR/6fL/7D/d2L+/t7Jycnu7u7AOq6Fol5nhnDly9f3r90VUKY7ey99c6bhQDArVs33/n67+xd2Hnu+TtdVXFeMnPXNKPR6Nq16yfzwcnKex8lVlV5fHx0Yf/SfDFfr1ci0nfdcDglhhiyLCMiUUlEyBiDDyiy3AwneTRsMpclz2tnHWW0rmuJMSWiLZ4bowxwZEJUnWu7fv/iRQAvvPDi73zt67deemUxn1fj6erDj7779lsxBsP88aODLBvKg7ppXrlzZzoeG2Oarq3bZjoZjyeTsiir8TSdMiEEURCnsQyq1+sf+7E/snftyunJMQCoFSFlald178N8sRCJ3ve+7yejKnnNHx0fAdr3eYwRqodHR9YaY6ZEKKoqnZIhBo0R0Mxlfdctl8um7Sj1DJKHOFAWxWgygcYYQh181/Unp6e5hCrTSQEAi3XTtf3eZHRxZxyV5qt15pwxujOdvHDn1bbvAMyXS982KlKWRQixqdcfP/rY933wfTaaWhcBsLXWZak+97631rZty4auX3uu69vVejlcrQRmw8zWGCJia4nIGpM4wlBEicaYEEPdNOv79x58fD8vqvFoTAqfLldjW5GTel2v1mOOmSMCWbZqIURgCwBRo4jRbecvdQSQViUbMlsCjDatHjKZYyYwm0Ry7UOEdTGmRmG+u3NBRIRYkV2/eLkLAqDz3bis1k1Td+udsV3UnWFqfPPb3/jtl2/f3hs7AJAwKgvKKwaaTk3bGJYr+0Xp2Pc+sSv2xtUrz1356gerdkDwmJiJWIZSapvpns53Z4u8s2z71Grs7Idzvz4jVA6/iTHJcNP3yaaEYaaGafsC2k5UnG3tXGGy3dGtTOOmaTaMemyWgzp02M7vPpAmADbp+Gyeh2Qg1Wx27IyhuEnaW93IM1QVAM4PEfxzQ4I0bBHn6Ke6ARmH02YoSGnTsjpXxeqmqa+bvwlnvfJn8SyexbP4gxPWUjCOLJQoOkMSOlJT5Xbd9cawswbJMUCGtZ/vA6kOcCEBZyW1AjCJ8QKCqFFIIhidy85pymOjJSzD+l4UG2sKRarBhwW6itLGm2w70scYbjsYxjW3ba4z6d8QPSdqSNK9J0o/CMQ6A8DAxgAVgYjLnXFZ06ybLiz9UkWTeZk4G0MgUS4yGGOsJctG1Hfter0GoMHv7e5J3xLH6HujZNhSpL//c188PXz04nNTAH/q5U/4qFWecTS9lh0MxFt0p2uZcObKEYBqvDv38wCu8rztAWLLeeQ2hD7E9uKFDMATHyyj9u1iFS9cufzx0SGPynJ/lO1MQT0ARImn837dx0N1ZcGZrQXzxfFesXdsFqPqNoDlW+99+YvtD/7hO4Z4Ev3f/Rt//eJ68c13vnv5+cufv/P6X/4bXwKg/8GPU2VG5HevvXrjE7f/1v/211947XXfxcPDx43G+ekJgHtdP7Pyw5+6bCL5eycfffzOlVm2dyW/V8jJw0cAVl24sKv3Dw+uV+7Otb1VJx3cJz/54nI1/9V/+uEXPnMLwGp++uDuPHL+93797pNF8+7D4rU37nzjrbuvffZld/vCk1/+JoC8qJwbtRSyi3vF5Wvrj75mu1hOZrLA/nPPv/ZqD+D9Lz3SHRJGUeTj4OtgmiBEZIwJklTS0UssXB4pADCWreEokk2nTV2bZEKt0ndhUhT1unG2aGAYFAQKZC4P4gHkJutiV7iqyn1s+yePH/Ttem828Zq1gfoEOtiMKBcloZhnGRvqQxAoGxsiOvEAjCDLyuT97tj2Yd2JGma2BWkPIAAabdP42EtRjl3u5qs6Sr6s29Jle5MZgNMGrZ4qERnTB9903XgnX62OR9XsZF7PFx2AV1+5HaJngnX8/r0n+3u7d7hwbtyGA2WngzM1eY8YTdcheESOsGRgiSxQADhtgrR4tFwSaUFODDxIBRFw+SS3BkAbRCQYdnKWDkg2rdtNM2LwmtoO1ZwpM4AYZnP5R07zTWRU1Ifed50rivTsGMU4hxg1yoYndr78eqoLnprKZxy/1JvYNKK3tMBU0BpihaSEti17tzDlBiHCprmyKYGVmUgRmUzUyGQkgjAY5G72ic52CiSixLxZoQAAD84yycaF2VgvgBcYJjJpS2ycl8hEKtEaY23W+Saq5lm2Abs0bkwNSJH4LDgrXLcHnIhI8T3zPmclfnyqnj3DcUUiwRpOuDwrECEqEBEwbY/x9wQPr5ezhcV28v0sqw8HkwicDtZg/pmWA8IMqC6XC2EsVksB8rwIwScbX4mSZc7lFZPEGJvmlIittfce3DcDeGo0hN73h4eH//hXv2Stq8rq4PHhb/3Wr7793nuXL14EUGTZK3de29/b69r27v17WZY1TSMSnXOGB9t3KAzz/OTYGhsgfdt95ztv3ntw/96D+5NqDHgAeVY0XUdMzIZATBx8N4z0fr/Q7/nPuUXeFnc692zdnCXb555fC2oT4TSUhhgdgHZx4qB3nrvw6p1bN3ZmGluNpxJ8K1RkDkBWmNg11m40V2hoGQJIiORwujy1hwolx2yZoxKGfg95QZblecYSegWYEVSjRGeUEAFkJjUXh20a60RNJ9YLOmUAa9+zMTEoCNbYtq+ttT5E4yyYt1OEhhRKTIyNsQC2jYftoSNmNjHGPM8TCTHP86oqf+U3fv3apasAnn/p5Qs7s9g1mXPz9aperye7O3h6ITmMnRPkKa7kUxH6YGya5jd5kUeRKNF772NIEHYMkRCImAghxL73mypFsck026uSlLdX5FMgaFr/qxAY50QnlBDFG7IADNsowkCe51BE0czli+UCRJPxuKpKADH6q1eu7sx2Xnz+pWZ5HNYLDT6zdjE/HU93mj4AMIbyzO7tX1gs15Z4dfjYt82Nq1cWy0XiwNZ1c3R0fOP69SdPjouiKMtqvV5rlDzPp9MdYgvgrbffqsqy92FVr5x1zlkiDtIzUV5WmcsAdKF/7ZXXXnv9DTb88N5Hr33yM23bffXrX10s5j70KSMSkcvccrGYzabXrlyLTOvGF1V5enKip0sAn3zt1QzxtRvXjhYni7o3ZcicWy8X169db7t2vV4B2N3ZPZ2fTsfT09PTzGVd1/ZdN5vOjo6OQwiJ+GytlejZcIKW08w2gTSKiBjDAEIIXdsWebZcr0XUjo0qpREfGu5NkXnbwNnmd8pc1rTdZLoLYLE8bfr45je/NZ5OCVyV5WQ0CjFYY29cY2cdgDzLRGW+XK6YnHOjsmp917YtRMfTaQJ8i9G4KCsfAlSZOcbYNC1Rcf3qldJZAEpZ14WiKL72218hY1595TVjzHq17Lv2+PgwyVDcun5juV49PDjYnc2MYWvtZDxS1bppd2mwC091VIwRBGddKvpDCDGGtm2NGa4X37W+b/M8C8Fba/MsJy95bqwJABaLJvR+NiqMgcLVQS2TihC0rtfrugawrmvtuxijqqhIcrYBNOlIlGUFIJllOesAlSjMxsfeurzrmosXry5XCwAqYq1Llb8CMQRARRBF8iRrQCSiLs9M1Kqs6mb96PHjG1evj6uRDyE5YgeVxndRtAvBsKILvaALospENngFUtOFRUUk+tjbmKsqEbFh8WeOWKkiEtUoqiDDpBCAQgSAEMWBogCK1XwNtdNJdjBfQsUSuTIDcP/JUcaRSHxsjxcra8ylnQud95ORnaGwpgJgR2Xfd9oFdlzkhvN8b7K7Wi7IaESUmLSGpjcu7r/7mNrVdklF54G6RP44t6Y7i6cZ4meSKwPDBOdXgtgIv9Dg1CdRNJIpjKKLLYJgU/OIJvSQkyqOHcqmoSOjG1QXG7WvbZo/W3NubynDx0mVm3rcAAAgAElEQVRu16mddIY1JrrLWVs0SdKcVXXbtPsUn5G2NSrw9DufOxbbbTIl6swAhn5vfN+XD8iu6lOFEM6+Bd2gjXqGvD6N++rZ5Pj3fd9n8SyexbP4AxC2KrnMXcZZ1zUMyS0Z1bbr+s4TYA2Q7qmszpKCglcwLIapISXZukMCYOY0R0msOKfmslGPgzKLCKWqTyXNRUYNBsO0lG4czmBIExxphjtNesKAhDJIKQGa5wCEszUJJU4NeEOmgCAaw5Y3Kw5jXEy8SyLDTEldS4lYDTrvAfR97yVEFdR1Yaw3vg0+yyw7k5UFgGo8GxVjH7ogTVs3u/sXZmV5c69c3D0R8uPcAsiLzBS6fO+0XfYTKmMPa2ddfWTzkqF17wGc1qFRUQZzPhk5S6x9FB8nJUuPxVoBkI2tXyq1phqbwu7tFquuXXpxtdy99xGAcO8QZMxEd6+OskmuEvro/l/23izWsiS7Dls7hjPd4c35cs7KGrt6bvZANklTNCWKFMURskjZEg1aEAwBMmDA8JfhP/8YhgHDgAcKEmFL8Adlm2yRzUFkk22S3cXuYg/VU1V1dVVlVWXl9Ob77nCGGPb2R5x738uqasCfJpCRwMPLe989N05EnIi911577U1j1cDUe3f5bg1gPG/2v3n8LfetYrg5wPTo/q2f+2d/f+G6a9fWW5j7r34LwGd/54s//3du1N1eW+W0aP78M5/9ra/9xe//5u/83h/8viVsX7kIYGRzY0flT33EHnR//JlvLWZHP/QTT9z64hfVePMPn78N4LGr46ODWePDpz7yFKBe+OYr375zcNK4Tz1zLai80dsArl6/ocy9o1MnfHtSt9sb1be+fRsxPHjrXguu1koAa08+O36pOZx9rZ1OdOS10eDlv/res++7aXa28ss7rx59BQBUMV+4alg6ZscRYo3SjgP7mBJdoQGiECMMTaczoSpGEIi0zqxO2U+ubXNTmGw8DVPX6w1wiGxttlregWNmjM6soia0fnZ6XFhb2gqwBqy0BRCEokgUTvZQYO68y42FCEdmrQBwjNYaEQ6RlVbW5DEyyESOJn0dR6P0op0Ha21ZetJNEGk9wy6iktMFgBaZi9JGWKM2y/HJ4nSwtTadL8rh+mh9eHzvFEBZZhziYFCN18dvPmirK6M2IhKyXHnSEQpAiAghxkiRtZAErZRSiBKDt7oAUJrstDsWsgqyEALrjgDv8qJQKgvRAQiRtTaKlIsMCPXKiXQO7FpmgK5Czw9FKpTqy2ppUWS0znUGgg8hepfokOwT94RijD2miZUF93DrM3GW756BgWfR9WVUevX3CTRUPWC5BHyI0FvpKcqyxD9WpCWT8FbucTMsU33P4JHerEx7FtLGl3C2M0M2DVDaDUkBqnXeKQnaQulIDCCjDBSEhAUssTdDQbIs5OpjTL9EZqN0iH1mfE/TWGKAicmxJG2lrxc6J4eh5OGOL+/DJgYQh+VPAqAUKTKB4/fLZ1VL32UF8a4S5tUyv2s5Pit1JBaOfQ6ZYkHUCoQ4mRzXIRxNTliwtr52OpsdTU4B1G07Gg4v7OzmeQ7CdD4X5q2NjZPJSZZlAGLk0Dnvwwfe//66c8PBwBh7Op3eeut2jDHPcgC7u5d2tndi9CzheHJ448aNqZlyiNZmRGnKkOflrTdvtdPFbLo4nkyms2mW22Ex4BBzmycGSlN3Pvqyqrz3woxMLX2z79No5dC9A/STcx7geW7IO+SssJwhIaDSlGcF3LxpHIBLg+zjH33qsWtbwddhsWcz64U18TBXEA+gCyTFSLNLPVldLh3uvFSvPOex9gFArYyIjkzaWACeqfER2pZFubE5tEoVVpM4CXUM3WLRAojBo0cvhIWUMiGoaRvbIHVQAGw27EIMgX3wuSWls8npVBkzsJkindLDAWFiJH6v1rR0zM47gETsIxttnPfaGFJw3mdZNh6NjcIbb98G8PUXv/Orv/z3n3riqYPJ6WQ+LctyKbvQq0L0z5YIlgIT32/2lgoDlGV5VZQpSb913aoGfeec944jc0QMvPQvE6q6fCYSR5iZhOTMWloO+pK5jP7x6P34lA7eA7UiUBRCSE8BoC5evHrv/l2tVIgxKc92XXvt6vWyKj/7W/+mqvJnnn3myQ9+wIeQVcPWs0s8d2IvXA2HruvK0Zo2JipVVgNrjcQA4MaHPhK75jsvfmdzY8vmhc1y37kQo9GmKof7RwcAQojDalQvFnXdbG8Ph1Ulwsao4B3AeWYAXL125f0f+BCRIjICPjk++KG/8eP7h/e/8+LLSVUQQJZlx8eH1pjHbjwGSOxa8W6YF8OqOt47BDAsS6Pw+JUL9198HYLhaG12OoHw+tr63vHh5uY2AHC0xiRRyMF47fDocDQcra9tnJ5OQ0i63FBKpSOFiJLUg/chz6wIe++zVGOa+fR0MhqvGaWbto0xZlmW5bmsOKzMaXrp/MIgdM4Nx+OkqaFt/slP/WimYPM8MmdZ7nwYVOXxZNI0jUgN4Nrly8bojfV1pdS9vQchhKLIa+fatlnMZyp1ZntcFuViUTdNR0q1bau1br1fX1uvjAEwa+Lp5HR7Y2NUVrNhNCIEjIaj+XSyt38/1Qovq0qENcFaY60ZDasQgnMdOLquTfuPNUYp5Z3nELIsM8ZopZ1zIuJ8l6sMgPN+cnzkXFvkOVvTqrC5tcP16cX1fE01ANziOCo5mdYu+BuXLpqidPcOYgwnp5P7JydVNQBgs2xUlnmWQaRznSJljM6Mid71KrWADyFycCwQFEWplCpNGaK/c/ctY+3G+haAul7EGFMGAzM774wxEtl7X+RpHgUiMXBZDS/sXrJWDwcD76NzXWRJ0pxECN4jIFdkDQGiiGJkIUXGeJ9EQkRZpZWGCkQA9bE5CaKIUk5D4kBopRIbQisdWUIMWD7RWmtrrUCxcDlc1+Zo3rRK2XE55EC5tQDWq83pvLaKC6tdEapMKajSlLvbl814o+MWAOVa24JIOe8Qox0MimyUmSrXSnwbQzpM6c7BwfR0TnYt2Q8pj3x19qxOundscv0OJGf70flYCZ97/WxbTLYO85IDTiJJa4KSTa6S/PT5K0lfkfosI+NhwVxemW9nQbxl5KYXszzbJOXs97OqAnzu23rro4f6ZPm1KRU9QaHJ/OtJ7it76PwAncdtewz1odP8YQN01bXVhxRhZcadC9unG+oPtnQJIqzAWYLwsqj40pwlYMnXfARJPmqP2qP216+ZUWWHRS4U10drruvAkYg0qdLYWdMutz+BsNbU65wkkuPyfOr3aOnFuxUppVRgUQpKiRZALcEDokgknHZ5Yo7pnEsZ3Q+570v+fQpH9lGwJZnqLL0Py2zv9DvOokfaKAi0UilpjjkyMwsKq7hnUyLLdZZZBfIuOB8GeR5a13q/sb6m6w5A4zuldUQQhK6pnXNGY204jD46HwCIN1kONljU3faFrRuXrw5UNDqYjBRzVgwABGVBrcTmwb0juzgdV8XBA7deZUWZuZNjW+4CeGvveDCkwdC2uhxWlQ9x4jDOBxs76zpf37UbACZHd+1wZ5APOzvMC8Tm9Ft/9eKTVy+a8ZbYPQBrF9V4a02vaVUIc1BWFTWZ9YF084ubWfGBDwIYfu3gyb/1U7P61fd9aP1etfsjNy8tPnTh8Yt1+/qLs73x//Tf/mcA/offek7GF37qlz5SnUzqg+Ptq1e3nnny4jOXHKudzL78pRcBTN+un9y9TtXVX//Mb8+8/Wf/9B/KaOZHzjQXbjz7AoC/efPacLNys8NJzTYvXr0/2zttt9aH37t7uOi6N+/sA/jVn/3R9a1tr+ugRZfqwx94X6i2FMdK8fOf/dLla2MAk1v7bhJJWboze+O1N57eWJvY0kCprnvluS/deeVNAHZQFZlx3ilmDXKMyMKkWElInploIuXZZypzLnqxw9F2iLFdTFzr81EFICvLtfXthZe1LNs/OlYs1hhHUUGarhnmOQDnm9zYwWjTdROvG51nw9GoEwntnIAQNYAoiBDSSoS6GBTpshhYBRdCYa0ucgDddBEjGxAziiJ3PoLIx5hgegBaqc65qspIqUhq7oKPoiM6RgzyYP8IAI0HigGjG0/tfPHgwe1LG8PF6YmL/NT1y7dmNYBRlbHo8ahysbO0v7UxnjazCFcOi3YhQhZAiLFpWxHrSRtrVaa0JunYR++1BhCEi8H6qWsoxsoOyMC7tsgHoLzuGhYPIMtL5li7VutyJc2AlSPdG1GK4fuNh879BGJ0ytj+BRIiKEMxhK5rRGJZlJ3rktdtjAk+aK2XFKOHYbBzkMr5FO6HYsrncpHPmY1InAVFis4sSU6FeRT0krVED2kMSc+hEiFh4ghtVsbzeYv9PA00GZkKDxmOQkIplzE41samlOymi0ohee/MMS8UcyBCFI7RJ14Ph97GzrM8ijCz0ipwUi5jwZLJzqsdM/F99HLz7G+rVyla9fWdZnRKY6WHBq9PLX1HdtBDn1zyJM6gzTONTvR4TW++E9SSJM84Y0eKsFIawot6YctqfTwOzNbYMi+2NxWARdvmWe68b9rWWHPtylWl1Nt33r5542ZVVgDmdfPNb77w+I0bO5s7EfSF575w7/DB3/6Zv/uj/96PO9e++NK3AVijne84BoC2NrbW1ta99/P5PE1unmUAsryYLxYvv/xykBgjV0UJkqDZWLOoF4kCaYucHYKP1lqKVLeNsfmqKs25tdgvPel5EefycHt/5sy3W2Hr5z+/fO+hF4vYNtNufZB98geeBPCBG1dyCovpsaguMMOLVZpIicRUz63QRNF5FZcp2Ut356HLrvDUs2Rtq21IytJRAZguujsHJ6RsnucffObJe/sPfFeXVnbX82vbpQEB6DrXeJNpxZAIYaXbgAitjCnyCklXGvxzP/Pzg+H4ha//1Vu3bz3//Fcef+Lxsii11qv7pRUmqpJZcOafLlcuEUgbXaYHRFgp3Su0sFy9cgXAxtbWZ37/9568evWHfuTHRuNx51KNmbPdoRcOWLnp36fZLKMYgo+wioXLsoBIjEEplZzn4EOMnNi1IpRgxOTdrnxxSlIyIJFEdaGzBbLaufqeyLmVA4iUZdm1HYAQXW4rKLLWeufqejEYZicnx5ubTxlt2rYFcHp6UlXVbHaqM/v6t16ww+qHoyhSSsRqiqF32rXSwYfA3LZt2zRaqRDDoq7XhkMAn/6Jn/ziv/u9S7uXdnYudC5AJMvz4XDsvZ9MJ6fTKYDRcKy12T86qMqqyPJBNfCu7domxDAaDdbX1gF8/OM/qI2t60VelGVZNc3i5HDvp3/uFw8PT772wtcfv/EYgMPDAxF+8okni7yIMZTGkDWh66L3GgrA/f37RlHTdbnRgyyv63nXtTtrG8eTYx88mAGcTE7W19ZPTie727tKqUFVjUfjpq1DDAlWA2CNcdqISPBeoIhUjMGYkkPw3qOHI2U6nTHLeG1sjHHee98zAaezaVppSb4zSZGGGERgtGaWKHJv7z6A9bXxl59/7hMf/ojN7KKeh8ij4TDEuLm+kV/YbbsWQCpZY4yNMW6ub0znM6VoPB513rd1nWDuxfaYFDVN03WdiNSLRVFWTajXR4MHh/sAKNL25mYO2dravDuZdXVNhDzLvPdlUfSCuU2zNhpVZRFCiBxSSaVBVRZFUQ3HeZ6nlRdDCDEQUXDOe98T8AmKVDrIvHOz2TREX5aVAEqrPM8VKtJSZDmAEKMmWhuNREIK24QYjdF5ble8eRG4rqvblowJga01xpgQvLUZIEn2kUGktCFipX3wi3oxGAyszdbWNu7efTsVCwoptTzLyqKMIYqgKktBFAnLimRZ14WubdbzXCudZUZbMzmdWm1omXG/u729MSxie7gxKAZaCMELCTMpRYWdhwhAE1wQZYQUWautUUZrT5wkdKMkyDKVRhKKHCOzqMCcTueEV1pjssz29gAhSHPr3sGN3Ws3Ll+OyOraA7iyednimKybTCelze8fTtaq+OGnbsaQs2qNYgDsnTG6DaShjM5kxpd2tzWca7ntFsZYAEfd6OR795TR1BcXlSWCttzp3h05o9Xu02veYulxrWKz5zC+d6rsKqWZo1aWwcxMRNZmKY3gHd+zOvDOTKJzbl2ymVKRqPTl57QjsQrXnRki58HTFVK3xAqpZ06uqi2eWWs9DCh9WTORVXL56qR9r0zsZXT6Ha/0fu27jo9V3xWWVYDedbHlMYHVzJz5vcs+nI+oyxLHfIRGPmqP2qP217GZkhsKTF0bRxsba+Ojk4NZ7a3OmFCs5YtpA6CyZjqvQ2QmJYTIQSsTvBORLLdpSwzM2hilFAFRAklSSeOYlNLIABDREFijOckIiepZSUqC92eHHUvKmiEFrUkRyCjSlMoXEkFENBCcFwMCaa2jjyEGa3RPN1CQlA2gFRMTRFulkqg8WeIOgM2MF0KQoQWL3xxVTExUTE5OXPDGFgC00orEaK8bmgdPXmyRz9tFSWZ9Yw0A14vhuAxeVcaslcVaBkXIiUhc9I0oD6AwZevnM7Ef+8nrF57q3KkuyqxlZeaNtMWo1ADCfP+Vtxcu1IpMWVglLBoSsF5YCcbmGsDhovvGNyak9IP5XHzY3q4yFdunNl++Zfb29gCcHD14//WtfLTd6eh1ABUdIRuOpR2qfGN0MgOQ33z2Vk0/8Tf/wVee+4tudv/S5cd/93/5rWevD9d3Nj/96Y83wycAbH/18POf+8u/+x//4/svfe7Kx37w+PYd3H3tc597vlDQooIWAIf3FrffmA/c5NW/+uZ/+F/85+bogaj14e5Tp2++9olPfQDASGeIQZWba7ntNBqjdGHG6xvr6xd8bF5/+xaAz/3Zl37hH/7c7sVnhX8/OHrj3hHy/Z/+kU8+eXP8p19+/f1rlwD84Wf+8I2DWbmpDVU+mKPZ21mId4+P52/Wkpfb6wbAvel8r3VFuamVYeIY2xDFRdGm1EEARA5WDGnjnbcml6AkU9aomBekdDNvARit53oOpRlKsW4lEowxWeBImrxEADHCBeU4BtF6sNFOD7Pta0EHbUs3bbsQAZA2pCgR6ozWJASh4CNHbsiPkANAroMPZKwtMjKIXvkQU533JG/kwFrbrmkGg9yqzJrMO7Wo67qtYbQnABhSVruamCXELkNFIxdBOopb6GKt2N0C4Bs/qmg6PUbkjfWMVXG6t6fz3FbrPDtM9mWW2XnHIi20ChLFpVQkAJLok75rssFwp9qazKdzX+eqMFDCCNJCk0LCNANA1uSyjDYkoICWVEEAUeIK3ugjzctSNAOVdxwAuOgGeT7Iy8CxDZ0hiDE+eq11z6wiSkW0FKmICCEiTuU9VsJrECKliZbBCu7F4knISwA0ehuRDNlkyKrenk1mKJPqsTBiRaJFJRBBhM4i2AAEFDgkqCCyKFIcGURJfGK5xybzdqWSSNDEwgRaCUz2DAKlCEnnSiDEImR0G6PrGECmRLzNtCXRwgGQmPKt+2RuuBBWEhoAMTMlC5UhD2ebKlJpKpZikRBBpJWwZy9rl6x4z1FrLUCq3KKSZjBpiI6RIwdtzvEM0lj2UCULxcQ6pT6lvbfxVwEh9L7Cyj6PIgQoq6lPmuucUSrqoBVFq0Z5tnAdlPLMTe+pYquoAHgf8jzzIZwcHW1tbF7Y3Prmt75588ZjAJzrlNLjwVodgp8vgtICjMZro2E1Pe2uXbkI4E//7POXL/60MeZ0eloWhbH29u03jTE3rt0oy0HbOgB/+aXn/uhP/nhcDpL133qXXBDPUYxJ52AMIfmZITIAm1mAU8GCc+4RJamwNE0CJohArYBuAgDF0EDUBFIqJRqQEm106FJSrWJIkmxUHHRwhKgz+uhTu++/eeXiRgUghHnT1pa4sMbrCEShVFpdpVy1ZZ6B6lcm9XAXcxSITaKlnB4LllSpRoiIxKR86ZCZAkCeF69875bKCg7xBz7x6Ve+8o3bb7xhRP7GBzcu7/yQUi0ABjITiVRmimkTuiCnbfS2+OgnfpTLDQBvvfHaC1/56h98/vOu6erZ8fHxUZGXAmWMZRa1fJ6YxRgtPd34XG4sLyF34VTHJj0MySZJjpwXhK4DUBX5zevX946P/u/P/F+f+tjHxhsXiFhp3bmeSOiDz/OMjOIQvUii0KZ1eyZ/JuI7p5TONTHz7GRy/95dkGXvF/MFpOdZuqYzxopI9N5YQ6xltUHEfjORIMxRGUNajLFd56zNOtclHNbDZ0LQZtF1RV5GqY1S2lgmCq5NwpRag0jW1kdWa9FmcnIyWhuTRjEoyrI4OtoHoLSS6CXGvCjed/2GiiHTKlNx2nZQRqwGoCR4ZkOkQF30ALKsnNe1YcXBAdgZZ6GZ7uxcKNa2Ljj3ausGbC05S5gvZllZASiKYnZ04E8Ps8xGzaxcjNHXs93N9Z0LFz7+8Y8B0Fnmuk5bS1r54EHUtS0p/Yt/75fLovzS818GcPHCha2tzdGwAsmiqRUwGAzrutakJrEGsLW+edtWf/zFr11/9sNVjO3pdFAU1XhD+3Z7e+vwaB9AUZQxRGtsnhdHx0d5nh8dH7Vt61snMdrMAmic18aIUhwje9GkrDHz+Xw0Gnnv9h/sAShttjPeKEbVfDEfDIYiHGKcTmdFWaZQWZosYQneM7Mi0kpxDEGoqeu2bQBYYHLY+SDPPvPs62+8euvVl5+8edM7F4TzzI6Go7SVzRcLosVgMJjO5oEjMx1PTq5cua51PpnNALCABZ3zi8W8c865jhQVecmA2BQ7yQ1UVg3NcKRC6NhXZSlgRWiaZlDkALKy6toWWi/aVilMZlOtcHF7i70v89xoBuBiBNnTab1/57CN4j3qegGtgw+1azKjADDCsMrmnYnCGiqzpsqKWb3wrXNoASyaueu6rfUNkjCdnehspHTuQs0sm+MNMQYABe9ZBmUpIZZFWc9ro3RkiRzLvAzSASAXBmUZvK81FIFI2q7O84yoGo2G09kpgCLLiNRisRBhgBKjk0idBhdDBoCg60VrtDZKd75mReNq7ZAP5m37+M0nEn4qPpKAtO0IhRYtgFCVG++pdSyiAZAySikSbls2ulNmAGW1CgxYZZxrAVitSVHkIGKhrGdobXzHtZvOuwWAo/nphTxjCYpU6ORgMpnV/qRtZrHJVN74HIDRVBQQKYtsce/+PrcYb442xzubow0bW/TyKVYCW6OVUtG3pjLaKnYquri+tp1nQwAyl9FwgNMoopQyLNyrZCm94pSg18XuT4dlXCaVNzuzIJJiTR+zUZoeIgSmrZHTp1IinOo/g+A9eo4hMYRUqmsaFZHWxByI9FkQhpBEOViYY1S9mHdfFhVATySXXtpBmBOAl2pP9zknKc60ND2pxyUlJnA5fRJM6WaYI0cteikpxKkKK2kREY6sDC3RTyzrMS7tKD6XWEPLggdYdXb52zIJnUBR4vICK+Cxx2GTdSgMYRARiwh4mZIo6fBjhrAIQEluW6IApM7Pw6P2qD1qj9pfj2ayomRtfNdQN3P5BoGMUaR0O52gyBOMaKwZDKooHDlarcAmkooxaqP1Uh86nU6RxSjlWi+kYhRmWfndwDIXYEVo75NcmARLPiQgUEr1pRKWiiFIx1JiSAj3uu4KkJiuDiUkpPtgLQiClLooAk2klYgEYW115GBToU+Br+ddcHZUBQnaY3NjvL02eNvIvb0ZOwPAEyBsDKBtznT56Wsnh/taK702GpAGkK8Pm8W86+aFiroIJydd17kcUIhhHq/cbAGEZtH6+ZXHNq489lhb0vHiTqWjDqHIzAK+bQKAx65dufHU6N//9M07d29f3tiwNizaeDSZh7Z13rjoARTV2IcoPl6e18NCM7xvFmHhv/vd54kVgO1B9sp394Icd0zzJhRZ1S0WNscMheumqZ446fHx6b/9/G9s1TP/2ltvffmPvtQcHP3JnK9/5MO/+4d7XA4AnNx6/e5rr/3a3/61xeG+Or57//jo137hH9965bXxxlpw3nUA8N/9j/+qPTn8Oz/7C3cOFv/n//HZf3188PgTj8Uy99OT3YoA1MdHZI0PXDHP2b99cMQIr969vT5tCmu7JgD4wzePXvvfPv9f/qe/cu2J6w/2X3rt9psbm6OPfnDM7frT15/90PueAFBJ/cJb32gW7c6l0e3bb3ZVvHlt+61bBztbpRmP5nUEsKgdZTpwFCE+F5CUZUk9TZohlKqdCuq27TpRCsM812RJNIDGd6eLeVEUZVGRBgXyMYBUimMvTQRlrR0Ohqf1omudwBiT+bab1tPY8TkCC5arVwQQFpaYVFRTkN+HmKKwIQbDnPSFRPBQdBeI0F2MbQgwJi9zXSg2yrFkIABFNuKo20UXo8+LoipzgWglPjbaq0EpAE6bk8k8dsHnRXZhZzybPZjOJmtbN8ajcu/BPksEIMjkDCo5awJKGXPW5iSGIwjKGEPQKsn+yJKttPzAewZmzxS4Hw6An48ZM0n0DkBuTVkMmOBcixiVNrzCLFeXoH6UKYlHilryiJZfT9JnCfekMxIQ8Zno3rIT52icD9nSZ9NwRmX7/xZ1Xt3qqiwpaKkvkURse0nNM+vzXHfOQEzqA/UifRULRIYLTFBGUWILIkGsspLrPN9HWTE3393O9DTPoV/f509pqacuzgetDKUE/8BEQqQSLVSYkxzwil8HZgGLMGjp6qDHIt/za2RVn5sEiFE4CfYRcSqiHtl771wIg6IIgi5Ga2wCiUIIPnjVc8wIkMPJMQTvf+Z9IUYAQnpzc+vNu3cYUmg9Hg7LvPzuK9/Z3NyczWej4RhAXgz/7Lnnd3d3L2xtV8O1F77xwv7h4Yc/+KHg/Wf/9Hee+9KXARyfnmyNN7x355kZK+dtSf98j9E878k95JqcLep30VIAEhbmAOmLXgshEjMnQUyBSGQjYsESmmFhLm/mP/KJj1mNzESROQCCL2aW9AoAACAASURBVDIY0kapIHGFOZ4xVPv1sOzKw8s99ASfFGGgtLsmxoj2QWlrVF/wuiN40GhtuyzyKWk9XDODQc6xjarIslkjABiiyHiW1sWFC05MFygqNJ61EQBVtR4Y3/7Oi/fvvTUqhuvjcWaLMynas8F8aG0uWSOrZP8+T/bh5MKzDSgJGoQQQvA7m1vM8dsvf/c3/+X/+vO//CubO9uZ0VEpAJk1AuEYATHa8JIvQ+c0FhQpZcj7VP1ZzWanrXPM0RgTok94eowxcJRIHDnGECEIUURIiFkSEK8AKAKpyBEE33UE+K4DOISIFCaRPEYqbKUIQgJDLnbW5nwmuqA9c4BmbZn44OTgwuZmrnRlsnE10ili0Ta5zWu/aJv5QMnm7i5EwLGwuSZE5wGoTKXKwUlKpyiKzGYcg7a59wHA5O23b7915/JaPRA5zcqL1lJONhvubF+8dfuukg5AcPP50f6Nq1e3RoPRsCoK6wLfzbKd3cuPPf20LioAznttM6KEyIu2VpFq27YajH/2F3/55PQEAIe4ubGVWdt2jbAMRsO2bjJljDLp0d7bP2lb+e6t7+VblwTY2r5gsuLe/TtKq+l0whwBhMic5aeT4xCcc95o3TkPSF3XIcQk1ZJZKyK+c5Gj1caHbjQesXCzqF3nmvkCQEfNaDAYb65nJgveK0CDYgzz6bSsenpvkRed67xz1lgR8SFkWUadb+fzosgB3LvzdtPUr7/6vXtvvzkaDqH0yXQWoteKmOeJhd12nXeecltCTuazqixtkWOuA0SMTqGcGONiUZPSBOIoSpumrofjsmtq5xoAogSkc1TtYs4cjNZd15WZFSCzNolUxhiM1qJoY7zWdk1TL2azxbDI6/m8mc8t5QCaeeeazij9z//lr//AJ3+YQyxtMZ/MSFBkeVq8rY915xsXBlWeG9v5OJmeLmazLAsDCQCs0lHpResMgrGmbTuOrLVyznGMRTUAYKztYrTGOB+U0gTEGI01EGnbRpEBAKW86wBKESaw5GWRWds0tdKmLCoAdVOncjfOaa1N2zoRsVYbbdquAzCZTGJk1zX7ew+6rhWW0+nEu+Ccq6rKaANgPp/uy2JjTSEX531uobU2VjMgoWdQpmRwVmplRfQsZ4Aj90GQfiPnCFYQRcqQEsD7kCKC3nnnfdv5pm4oSFUOrm4ZAU6mM8g0Ik2TWtRcFmWIXVHk13a2nnnsyVwXKjohXsrWMJRSelkFRemDg/tH030SjUU4mZ0C2F+g6WqRbLWJvudp9Z6Nzsr39Te7wvcAyLuOrx6hW5588rB9hh4YXBL/lpaPgpKUD5e2RAgzJ9RPKVpaGcsrLruF81ipLLPDz6zNvnfnDltJRP9VEsvKF02pOaSkT1sXWuowrHq0/PPlNWk56ZH5/HDScljk/JklWJn57wBwzw31GVdz2eNVOaRkaaQ8lTPqZgKSIeqRfOSj9qg9an9Nmymqaj6dgSS007nJjNJVWR7NWlfPlAAmB1AWhQCT6QxJHwUqrwr2dZ5lmbUuBADMEkJkjpKhc1EpERBLoixRr0hCUSRt80t+ZB+nEk0pjwmUVOhTldtlDCq9SIlc1cedRGlijiBFYK1ARFpTCg0JRLTpVdKgwDE4JxJNllFmmRSAtnXkO/FN08aqyCx1Vy/fuLY1/t7rr2+M7fHBDMDmzsXJce01ZZYubW3lVnmJg3JQ2ZyDBwDnXHO8Myh8156eHE6OiZiurY8+cWPnyvWd7ScLAB06UcxoG+etViMtc6ftrNUxFEaCXwBg6Q4X/uKND60pKcy0bueXrm1e3bG5UNcITAbg7u37saLLW+OiHGmo2XwxP/Ft4Cd2Lw/KEQAXalatNkNjxiqG9ULVKBBhNJ2yu3VvAqAw+f3D9c89/9oHPvjJbBh8s/j43/qBr72492Mfuz6dHcxOjgA89sTFxy9u/+s/+KONXFXDgoC6mUqptze3Jyd7z954EsDm+tad1xbPf/WrlMk3nv8LivTqre8qg4tV+XZuAAysmSNYY0LwDKUMBoU9mbrJ9K5WUGAAW+PBGy+++mdf+CpEFYPSsPqVX/xJMxqymxrLd259G8Dm7toHnn78xTcOmtb/+Eef/e4rLw6rwcZGbaza2Vi7vD0GcH+2GFajmfcRFDnVsCZIovgQAKNUFz1zNNqASUSUVkbped1aa7S1AAqCj75znQCenVZFSLw5Ikl6hX1UlHwI0+nMEGubzZqmndV150QsJ79yWUiYUu1gSKq+SoCIpBz/GKO1lohC5BBTyVEWKKzqrgLow++aiJx3Cuxi1Dovs6yJLYBF61oXBYYhISBF7DVlMUrn29HAAHCtF4aIZEZVldk7OQidZxZAx0BMAYDSRc+eW7LiVvhR+sVoKwwf/DI8K8RJhaHv5tl2QstBWkEafbx4RWFaxn7xUDzYwZMwgMrmNjdt1/qu1VpDKfDKmlxCCikg/pB9LCmR65xRrPr+kQKIhPpc2GVCbN9FemjAscLplj/OvbsMq6fX33XrOA+8iiizqmUjfZ9FIjGBSKlzI7fCHyV1sw+A91+T6ioSgCCQIIAgU5Z0P5ZEy0Je51J+zr73DLlZDXuaEjn/7jtciSWIDxFSPczDIiLEQkZZABxd65rMZkpp1/kytyCBxL40OoDECsZSNfgcELnq57mk+PPWN7EwOCZKngIUUYQwp3JD1DoXQczSdS7hI0opRUpr7YMflJW1dt40pJUolfDKrnajtS0CxsPx5mDwvdu3hmWVafNg/8F0Or356WcA/PRP/dJzz/0/ZTHcvXzN++6rX//a9ubmnTt3f/szv/32wb3d8SaAjcEonWTv9LwewuTPr4z3LHKZHKj0Y8USlXd7hlYBxoQQnY9QKrOGCNG5VIoNQpkm8g0Fd2VNfeQDj73/6afhD53vvHeS9EY1KUMcxcWQEgd6aL5Pze69uFRPYPVACpZp/MtpoYfmjiDwzpGBaJ3gaSaCKbLxxnBtA8htMYAumf28iRmRIQOASbqALornuHBgRUF0hGmD1i4A8IFtlg+G4yrfNAY9dbPvxXsM5HKXPT8bZ89rIvosXbbecVNK+RiwLFPTeZ9ZOx6NfvO3fvtw/8E/+if/9PL1q21bA9Ba5UXZtU2ZZ5yoykSpjlNkTrmuXry1uc5sXbdWjM6yPM9A6FxrtUKvqc1GK4msCDGyUkqsTsNLmpYxQkgUEFxwSpQQjDJQZFQmOu1h/QHkpc10ZikXYohugs9JCwKATOV5lVdFVmiKkalpOAQwT44OwTEl3tqy1ESZsYMcs6OD0ipIBEQprUkQHAAxlpQ21hR5Dq2KPLfKuG5utE4j8ye/+9mBtqFrTmfHr+4dXty6+p2XXph3/LWvfbM5mdTTAwCP37jyMz/xY8889WyhYwjh5HR+//jkNMuefPzxwdaFJgYAHFJcuY/aRZaIICLTk6P1za1f+oW/B+C3P/Nvjk+Oi6xQhBh9XTcP7t6dnZxOjicFFQC+89LL8+NJWWx+6a++vGgWH/nwxx977AnXda3rRsOhDz5NpYgmgiIqi9z7MKiK+Xwxn80GVZkSeF3nQoxaa0vKdc4aHbzXSkXnm/kiIUc2M828mZ5MlFLBR6VVDIGUslq3ixqAya3zPVHMB59gkshstI6QGCIAH32R50brvb19a8z1a48xx6aZxxiOTk5GgwpAnuXr45FAHty7VxiTG50pVWQ2tyZ03TL4obzzIORlpa0hbRECRx+9S+EKWKuZFKFdzCWEPMubetE2kWPIbJaAOUVo29ZYo7MsN2ZrfW1zNIwxKqVns3lSa/WhKcrqfU8+/a++8L///E//Qjxe3H1wf5TZtm01R0gAEJwPnfOL1vvu7aYejdeUqCzLhP286QCURVmYTJQKnrUiqw0RvA/M0jmfiwDQWmdaa23gA4iYxXnPkUWEmY1VAKJI5FhVQ2uzzjlmMcqURdW2TQyxGFQAJicnwQel1HpW5nnedS4GhjAzGtcAODk5NkbV9Ww+n3Wum8/nruvKchAC7+8frI3GAJqTfa/C1Se2D9t2YKjQIqSnSoVIXmVttEAvNE2UFBGV7mUiSZPqgtc9A4MSLinCUH0ERSuKkVOxex8ji/gQfeeCc+vD8tjzcDzoQht9iGAAb++fIMSNjcHxom7r+NSVtes3b/h6xt2ClPVdBGC0EGmdZTH2ehBHk5O9yYPclEzxeDEFsFejcUbpgpL5lY4BOivZnHb3h3DH5a76sH4I0NtDCXM8F0ZP15HVh3ip7tg7fLy0O/TSABBZxpRSD1RvIiYyiiKCppS3ksBL9HHP5XZPZxc66+3S6uhPineYcikwzSJn5t65dA2SFEJVQnKmJCBEci6+S1hGt89Q53cmZfdMzXcOJVbmHeTs0+8acF6Fk+XcnKwQSln1QZYm4jmM9FF71B61R+2vWTNaExYzMeDQ+HZh80GeF34yFUL0YVQNARxNJkVZFGU5m86jjyYv0raYIJV0IRE45wElIIEWOpPB50iJfsWI6Aticu/vsIAEJFCS+ESrYtjp+CJAWKKwtcvjMDHUISBWqk+qFAIJkV4VZBDuWo6RCMoaEfHBa0WigOgCKQBd14pryzJfG2/GGKaL2fPP/flrW5uD3ObV+nwhAGan8+g8hIpq42jvyJbWRplPJsHXtlsAmLjmxs2LlzaH68XmVlVaY69tjm/sjsalVTKjqgFwenyvHOVilFNRXGQq8rColI5tKwTEBQAo3jtdYKBH4xKBx3YYImvh6EOeZ01kABd3B51z+/f3rt/cYceZ0RcvbDoXRWwkATA0eeOZo4q+1RqR+fR4/6W79bNXBxOhzeEIALWLG1fGTzx24bk//9yFmxc//gMffOvu/t4bb3Yfvhjb43YxB3Bv7+CZj370xjA316+jbksab1574u6d/bXRyM8mTz/2JIBXX/zG9NSJv59DX7uQXbx08cal8f2DiQQ/LgsAxuiWg1ZSQHdBHc87Ubw9Nk0XfAzbowEAn5VXtsrdCxtf+MoLB5N6oyo+/L4r8e5dW+1cGOC123sAnh6MyrIQINP6C19/eah8nqujk/nayJqTyXTeAIjCjXOc5n0J6bEgCieIUGJMRIm0tFjExRA5KhFrk+cA531kDkAbmhAl1cIlgJllWVYiVY4+ODyeNm2VWS20fzwNbaO1CYGX1got1bCX7DthpTVHRp9aAaV1CnZqohgCS1/QeSUR1xtoQtGHxbyOIrnVEoPWBuQTXW62aHwMWmujY+dD54MQBUc+etIBpQEQAldZAYWmCy543/GgGnTdfFELlCUYACyBljmPqc99oXqREByAzOaRfRu7EL3RJlKQvlLNw5bPuyLk/c5w9upDptm5MLdwCGVZAjDG+q7rmoYUKZ3KI777OgBWHKXVt6qHIMv+rWSn0lISiESdXa7/SctJk5VKER66tPSG5VJp6eyee+zmYWBPSLCqL4E+SXdJ3iIilWilPfhzZsjS8iblXCCcAOLe6CdJoGuECGlSmpKxv5y7ZUid8A7TdXmDZ3AqfR8zeTU0CbckgEgpAUUWbUwMkoqTtZ0/mZ2WRVnkRdd5iQKKoEjEKQlZE+mUZ0UrgarVt6/WDp29uvSFeheGkOoFA8IRQmSUGVYjpXRms6ysbJZP6zqVHXDOz+u6oFxE2q6b17UAddftHR1XVQXgeF4P8nxv/+A73/h619aj4ZpvFrnOXnzxO1/8yy/8yZ/8MYBf/Y/+0aCwoyrXEvcOHri6+Ref+RcC2RitXV6/4J0D0HVtlmXvXuPvtfDf60no/3jlh6zm673/2AVPBJCyxnBKc1OidJ9uPLCKu8Uwp49+6PoHHr9ZZHF6ekuHDkQJoU3fwiyAaBJAn/ec+s70z8dyBVCfrZfe7veBhwDk5GmStZkoHZcVknpFycCKZXZ8oqCzbGBUmDfHRycnKae+YzXvoosURLeRmKkNEEONx7AkAItmvmhqxKg0kSahs6Dl2eo+t4qFefmUrGDz/k7S7aflRdTTJQHE2JcusXkeQhTBfFEPqnJ3d/flV179b/7r/+rTP/iD/8F/8k8AKG3apiEguqizDByF2ceoFBmb2dwA4BhD6pciImXzIqEPRpEPkWNKcQdUvxuIVrzcoZRSHFkbDYAjiwAsmhLQDEqF+rDEiIkE5H1d5oUSXriZ1UWu81xrF+pBXgHIy1wXNsIH8V3XBedcN/ddfef2awd7NtXECN4xR0UKYe6a5v3PPJ7pn4+RWCSA0g6cwjfaWGOsgDObWaUMMtLZxUIBKLrFD97YRWFeePnl//m///UF6wgR0GhQ6XpqCQAub2+W1t5+/aWTyaHSJijbMn3yU5/INnZO5nOxOZalsUBKq1T3kPv/KKnnp+ONEYB/8Cu/+hu/8c/vTGcb6+tEMhxg7/7e9ORkXI6r7QsAgvf5sPJds0HZsFzjKDFGEeQ2M1pXZQHAh1AvGhHMZjMCtLFaKa1IEUXmtmmQAoTGKiJSSjMTqJnXRDBKa+mlLYILEnk6mWqtsyI3gNWaRRSQWwMgiHjXWpvleZYkHY0xIuKZO+8Tlq2N7rwvy7Ju6oPjyfbWDsCnp9OqKpTWNssBGJtF0Ouvvrp3cHDj+jXfdWWW5caeHp8MyiqVxKkXtZAKPpIyBCXM2mTBBe99ghopanaehV3XcAyLeXfv3h2jVNvWbdumzhRZNhwMvPcxhL3D/UGR+xhijMP1LR8RhADMF83B8exbL72yqOuvP//n8+m87To03sRgYkg1x22Wr29d0Plisn/39r07N2/aC5s7RZHp+WElAHBpdGWYZfeOjvYOOwKdLOrT+QJQIcRF3ai8BiCZUTajXt+F0oOfinrn2uZ5CYB9jM63bRtDMNqQ0iwSI2tj2XvnfXpyFSkSZW1mbR4CJ1smRub+8OpsVF3nFKFumtPT08xmKaB1795ddUkAdHU9KBA5nDYLb8hQZCgVObLS5bBWJQAGR6aQssaYI0cffIwRJCs/JDKn51kRKRIBp/r1RDC6345NyjYRMTZMprO7R7OLSkNoVFUdRwDRaUuGQJnK2MTM6MV0Vk9rBLezlbuuBeB9sNrosiRFEAXowmofnVE6sm9cDaDuJMQR9TVkzrHwlm0Jey2BxvcKvOH88f2urJqzbbqPQz8UH0pHRT82fGZx9p9NsjGyilgLeqiXCEogXuIKd1uq5SztmnSortDFRBzo34tnXVuahyuvdXmfIon8SCuYj4iIl3UNEhy6ot/3kKcgmfR0rvbB8mZWN/UOkFKWIOTS/nmPtlS6TP9IlvrIyxtYQsc4e0MgpLX+fuknj9qj9qg9av8/b2ZRL7zEdtEqFQkLM1j3USyiy0sRSgIorut2d7ZP6iZM5ymOJTFEYUBcCEnSUQSRYYyOUUgpqL5oNQCoflOX3oUQPLxpyoooefafs4OQRcDMIfZiTMQQjsJE0NrwkgWRomQpYiUMidE5Z4w2mnwIMQSVpeqKrRcFYDQaCGfOOVFFZnWMytD8+PR0d3vDk15fqwAcHc+CRfB+dnxgMs2L2mqda+PmpybXAMaD7INXrjxxYawlxOBYmLjtWnECMnNdNwAGehPeKrKjgWoOjmNHA2uMyaNICEFFDaCyNiyOreP58Wk9W4yGg2JtzMwQ5RfT09MGgM7XSRudBzMY1PWBrQrExhZqPqmtTRF+TRgMC/XG3uHlSxc9jbKRzk391qkcHB3duEAAyvHG2w/efuz65W++dKub8hOX3ucn/rnI25cvaT/w5QGA7/3lK0/kH9vY3di7s183p5tbG0/ubN6qsv3ZaTEsZyf7ABaz/Z/44Q9OT/ZvXFwvhmVOZjAYXljbYec6CQCEuLIldywURGCz04VvtMrqxp3M2zzPAAysuXxh9PT25kZZhoCta9dmUybOuqPwje+8/iOffgbA+nDr9XsveVESsLu7Zbv9o6PDy9tZNTSOfAADyKwWBYgS0YASAYNFEIWTwaShrDJLp1YZYyAxxhA5WNbOeQC177Q2hS2ZIBJFyGoDggtehFPZkP+XvTf/sWzLzoS+tfbe55w7xpjz+KZ6WdN7ZVfVo8qywW7kBqnd6mZQCwkkBBJCIP4EfoBfEAIELcQg1IhBNKA2artxq2gP3cZD2y7jKlfZVa/qjZkvMzIjMjLiRty4w5n23mvxwz73RkS9svmFH9xSLj3li3vjxj3TPmev/a1vfZ9h27TBqcnzjVpb66GWhXowxte+80lNWjMroAqrbEEuNPqlDjUABBKR1FB8nuat6qrkWEWCjwrmLFdmUfKiwQsAHyFgBUUiL7Fu69r7TWuCtJmzcZX9+eijihdlZKpO4XxbtY131hD3ALRtCXXpFpbVXZhg0+5f0RBjCGGN9dNFFuE627pwS19uSLnwiRWSgJXVTGI0OmOcywEEjc2yVBXjrBLxj8ut9Pyf7jlx3rSyztS6RDW9XDMidQ3D0WoHLuzhKjO9BM5RB5Klny90d1/4wKrCvzoHq+7XFX55AcD89AIg7dkqvdZzFJVolbwTOvF1REHjVYxmlonTyoNTbV+7007d1hMP7ke4Dt24XCNO3cbXZxQAtPMHV9XEfVNwVJRVCTEiLYC2bQXUxshR2LnQLRy00+nrjlTi5Ux8dYArdJJWDfXQbsAoQBARY5A4PiJRggirNWZ7uLG1uRlV4VxQ2trc8q0HUNW1qpZNU5alZSbmqmnPFouvfPmrX/7azwBY+PbN19/4n//7/+7b3/y9a5u3PWp2vV/+xt+L0GExnJ5MAPwnf/M/NcCgP7h9886LFy/OZiej3jDP8qZtog8hFTOcFeI1z/TiCfzUyu7PoyicD+D/r49ZmxGxD14lGlUTRSWqtABGOb35YPuz9++OxoOmOZlP504DTOLoJPY5oioTWWuMpbASbu2G92rDSRWMsULNVyMNoDWgpgoCJa0xBRgKdug6AxOMKcnsE8DewUFUjLa3Ofpqdvr+3vNX790FUAUzr2NQEkId4CWWPhgOTdv2Ojdbdi4LUscoag2vpSEvjt90d6qCKLFbOuL5eQUhUUtW92lCIlf3YBTJnANQN03msnSjLqvaWWfyrAnhG7/xGx9+9CGAf/Xf+DcfvP12bJu6bgpj2Ji0QL3wGERdVcREJs+LHKqhba1h39Sz2RnDpj7r4EMIEUoSo0AkdmY1TAa8fsCKj62KFi5PDxKPwGoUwol8qhAJlslpFGkKiiHMG50TmYxdFi0AK7Q5GI13d27duBWbxijubhXy6q1eng3yrGcNgOBblTjs9SiWSnxtWMS2ssW49mKYkt3NRj/3wUsMTBBVjUHVZNZy226wB7BhdbePZ4vT3/y9PzDQfq9njRnmvbY6KzZ6X3nrcwDe/syb4v2inIboReGNufeZz+dXrp8uKmWTuQyARM/MgIbgATCbJNHDhqGhbSoAw43xv/jX/8b/8rf/hxjDYNCPMWSZ2xxtkBJi1z8ZYxCwJcqclRjbtqY0fagslwsAIkpEmXXMXBS9EAITG3ZNU1vnEiIMQKE+BkfOORt9IEJd1eoDsy1cDsBmzjdtW7fG8vHp4bWrN7Z3tsuqVE16RUi6qjF6Z13mrPdeojfGKClBsqwAEtEVbdMYNk1THx7sV0399NnegwdvjofDsqwBnMWlc1lVtzHIYDBqoq+D2MwF0Ml8ORgOARib5XlRVZWxThRkMsNAVFHTBgHgHMM6JeNcTsRtvZzNzqyxbVMzIUnHqlMi8r4FdGdz07dt49uN8YYtBsuy/P7jAwBPHu2dLeJycvbK9s4f/6Nf286jgc5qXYo2Z7vXbt8GsPH6q47JuJxsMRgMe/2Bcc4EaNKlAdSyD+j3ermzZ8vK2d7GaFDO6izLkssK0GnkWWt1hUUSGSI6OnphFqXLewCaqpYQoRAogU9OJ8tqEWNIX5KUcIwxqliW8/39yMwhBMOsCHVTbm9tAIghEtSwIYI1psiLNCCZuPXhxfELADb6bDSG6qhwKkKklq0iMDNbkxBhMCimZ4xJIFqUmMSjCUgJWONDjAJVNqQSfZBFiTaIiHIngY2MyTAxwzfRcnb32kajaojyLJ/NpgBa8WR4uYx96+pYPXz60XQ+ub57a2e8CWtjFQFoaKOJBROTUSKAfNTWC6Q1tknP/sJlzjgRBV/ECnExSaDVqx8HRK5e0KU64urNlFd0uOB6OlklWkqkBDWEVf9HTFvriI8pYRRRaNfnnjpxIFBiY6y1MSRRo5SurOer9Eou7cx5xW3NCeheXOQwXujP6LLFdbaXXjKt2jukWzsoOknu1bRD60ZvWiU/q7a+7qnyo6TJbnufOsc/+plL/RLaNdisjmk1767yTlJAug6uP+9bX8bLeBkv4y9m/BnqYi/jZbyMl/EyXsbLeBkv42W8jJfxMl7Gy3gZL+NlvIyX8f93WEu4cufmJx8+JKY2hOGgXzdV8I2xmXMmdxkA70PdtM64wWB49OLFTr9XWNua4GPwISYWQOq1EUWMAkPJTVTiylA2yS0n0poq8ZrVRCsLs26HVCEqLJ2uCQBmCm2QGJLSFlNXPYUhZgMRYuKVuLDGCCAGIWscHDMJEGPqpWAJUfP+1mgIgI3JLc3ni9lyMcwyUs23r+/2s3I+saaeTw7S3mTou+Ggrcv+7gYtlrBWou+T9EcFgE0h1TBdng1sLBwg2opGtQGuaWKvnwEYyzC0pq1KWnqzWOq8jW7Q5NGQkJrIGQAW3x5Ow2Q2PWsXs3a+pKtqARTE3/9oksQuQzt55fbWwBZaqUFuqafRKyiyO1vOARy8aL761uemp8cvZouYnww3+eConLZ8a5A9P6o+PngM4Ge+2j84aVhf3H/w+ff/5P1Ql08mJ3OL56cnV2N1L8sAvJtlP/jOD5Yw1998/f0//nblyyUY3p3sPb55986ffOs7AL7+tTeuXendHt+Yzk4AunX1+mRWjfo9r3I2LwFsbQ+X9dJX3vUKq7q7ORjEfFZWAzI+dIIymfE3sJNCZgAAIABJREFUrt06OHm+f3BMhub12ez4pKoPlgs+LmezNgDI52fqm+j19Gh55cqN5uho2bbXtot5VQ/HeZ4xgBgFCrBBNEQmahRRInOxU4KJRTVItMZkWcYE79sY4KMkN4Z+NgCRsVn04mMg1sSaSF3eqfvPOde23hhjsqxtvLOZgtQYWAtEpFa7NSuvq7YKQUOMUcQ5K7Fjh0UVVrXGpMYTXvdh0JodiXw44OCbqo5CrQLgIOKjxugBRGUlghATCYwPwqxl9E1sd/I8FedDjJNZRUwbow3FoGmni3Y+Gm/HIIvWk2kAIGpnDAOsVL413WPMFsmaUMmwI0JHyCMjGi53p6xiVRk/J9tdKFyvGZFrPl4iKxZ5kd5q2iZ675xVQKJYNvH8b8+3tCrlJ4Zpogdeqqys2YnUUSDXry58w+oirfmS6+afVVzgV54Xsy948+iFD55zQkkBw5c6cbQ7ACVoakBaXesVZ63jc62IlV35ndbF/EQEE4VGUQUxiMkQg2Ki5Sa37tVxdNTP7jAvaxjqeas7LtAkV0QDCBMnoScCKRkhikJRDUBkDIBikLlenw3lecFMDDashpVX2u8qUYIXDdqZySRPoRWBYk1quLRX3XhgjoJoVu1uibOrEGeZiVofgsBHqas6DawgcnJ2ZrMMRP1+31m3uenGm83mzpWf+2u/AODp4dmdq6OtrW021mYZBbGDYnPjqoSoFFPjbV0to8YY4rOnT42xw2xgrfW+EYnGOSNdU633gVOX3fn+/xiC45/DeQTWBNzLxNlPRZYVoXP5yHITuK041KS4f4UAfPmLX7hza6esJu18b8iG88xTbrGSj+qmTkNgBUIgorW30iVmJynM+WjsTGAS42JFY+lGz+olMUFAhnnVjQAhjSKBERhNaKumXi7nRjXX4nTWzpcRwHweajFeWUBtQBANUSyBIVXZAIheGIZhSFiDSlD12unZnt976fQpRLkT1OiULdbN2hd4MUhDX1fd2nmWJVMaZutjZGZmI6rGWFEitpubm588ewbgv/zP/7Nf+Cu/cOfV+6+8/pmqXBpri14/fX8MIfmfqCrFkOW9qmnBRITlcumb+ta1q3UrSQPEtyFZLYcQQwii2ixmbIwxljsaMoSShKJKqEWFiNu6ZTY+hmSsEUUGgIkxausYY6dKkvcsQcSSNRWAQX/n9Vubt1+9v7l9ZZBlWtf3r40fbLlxkfdz188tAMsAtN8r2tBEIb9zI0ZxRIuqGfaKrMgABO+V2VprjSE2GqMELxzFmSYIgNP5zIfF3/o/f/Uf//6fuMGQfWRrq9mLnXH/na9++SfffhvAkPn07CQQXDGYlO3nv/DW8Ob9aRPg8sJl6gMAmxpokqwjABVSGGMJaFufmEbz6cn911/7G//Sv/K//+LfDsFn1jZNpSFYdr7tHixt2zZ1ZQ1neV5XFVSds3mRt22TNBYU5L1X1bquXZZZY0Wk3++zM9bZztrIBBGVENrgffCsyKyrm9KRza1dc/HJGBYQKHHrjo9fqCqbznoxRfSszhEbiT7GKMZAYCC+WgIQUeecD4ENi+re3hMiFFmmMTZ1nSaBXlYQ8eTwaH//2b1796yzx0fHETqbL/Li6O0v/SSArd0r+eN94NS5XBRNXQ9Ho7ppJpOTyeQUAOU+N7Zpg5JxWR7CSWoTyJwjQtPUAOq6ngdf102vyHNrooiIVq1/uvf0xWRStx7AX/9r//KyrGPZXh84e2p2suhycKSPn++f1IvZ5CkAquajPFtUwRA7m3kfmqa1Gh1zMucR9Yu6Nca0QUKMxqo1pl8U/V6v8Z3tW4zRiIpoCCGEGJKfmUJiFN82PgKIPrBCoUVvgD5B1VrHnPpdtJNoADVNA9W2bYwxhi0Ilu1wMEziHiFGw2StA2QwGGRZFnwAaDDo9/v9NGGZ0BgybYjeaEYMiCEOICY21NHGSclaMmRgyLAxzAwQU0eTW01xiZ9ObISUSaNoFJWV1Z0xTETOsrU86G1dqarv/PBjyu213Wv3r93e3lgA+OjxR1Us+znnJnPc7B2WR7Nw++brW1evozlLqrIheIohxmCYoZAgjtHPDIGjxJ4rALjeZn5CMtNOOAZdmnHey6KrDORCYw/QcSZXWcxqyZZokt3fYPWUvtCHQgqJadZXiWnGI5WOQ7jOzAiiK8VyUl7ZrHXfuWIqqlAy8IvS9TDjAhVQV20rn5ZgXu3kasdWBMk0mV3IulOuRAqFkPL6/Gh6RCdVlfN28I5OeYnEeJnK3x3FxZ9W+6nr35yf1j8zF1hlu+cJ7HpeJkCTstnaJOfH6lW/jJfxMl7GX/CwJ9PJveHtW7du18vjfLSVZ/b0ZN6UjXDMi/FwNAJQ2OzgZLqs/dbOlrNZEAlBssw1TSuiyU9ABNa5GDVGZQsVjQExqhJo1ZQUV71eWC1+sGa1r8SMCZAoQmyImUlFjDWhbSUK0AFDabYn6mTQDXEn0Kahcy5ufVVVvSyPTJHh8tzYnsvccNC/trN7ZTQA8NHe3unZgiH18qydi4iU7dmNz7wemozYPnj1VQAny/KTx0eD3OzcvBM0misj07SL6XPqmSwsAFy7dq1vGl+FmIHYFi4jxfysfvTJ5P4VR5kDMJ5FsUGlcZaArJwsc3uSXydrxLejBBvNJcLwmR2entUfPZ0NXf7t94/f+dzVa9ubG72MnAXwrY9n12+Nnuyd7i6bG5uD2WLJIj7qInAdAOB0GefzZe7628OdX/q/H/3Ul/0v/4N3d+9djdEczL3ECODv/86773zpC7/++3+0qOKkrD86mh4fHo2Z95+c3Pnc7cPJAYCfefvNf/i9H3z4yfKd+2/tXrsxPXp+sox1WI62r4y3NhenRwCit48+fPzG51/NvKnr8tHeE83o5CSWIkNbADg60QGTj+3ZWSkhFnk+Ho+HOZksjHKz8A2A2dw3y/zWVx/Uzf9lrJk+nzza35udHE6OfG/jyvaNawC+893vnpzNNvuk0KacbQ77EPgQDfF8ViUgJgpCiOQygFU5xjYKCNayVSMAfBucsQAxc5Lw8TGUdd1zLsaYhL1ylykoRkTRwvaqUDfep1WGMSZN8sZYl1HdliGKMSDLpa8NMXGMFHU9spPmKQjgpITkQ8hclrms0QZdL+Eab1ICKZ/3cWOVE/aKAXxTlZUCIUYibn0MscMFRCCqlq0zJogXMYNeb3F6tGz8cGBSDtP6cHISYDAaFJTnzOxrz9wDIqFNqVUv6zUSFSuxmktwFQHw0QNkyChURAhKJOe9JpexmR/t4bnYKbPO/nSdcWpKjI2xbTJSiNE4S8wQQBEu5JEXNtC94FVLEEAEVtLk59OVK87BiXN84gIA1OWh6x/XfeSXelnXv718rLj065RPrr5aFYQY4/rTRMTEIGZiAv8YCc3zb6dVik7r3qH0S8Z5Y04U9V4IpIYcWyHBCnPhVR9eB8WsAc8L10hBl1DItVuIAoAQpZ2UDuolUfWiQCbnQKwRJRUKQjGE1YGpQdeOTippPHVQV5LUSFd8LXaJH90xAKlXy4cY4Lt9JSYS7+OiLIeDcYgxL3qWuNcfZFkG4HQ6PTmd3rx142tf+en9Z/ukuHX33vPJhIx9/PgIwN6ThzuDt5vWSwzOOt+UXLg0U5BiWZYAqraUGEkpGVvBcVlV1lpm01aNyRwAdqyBV2uCS2n/ZVzy0+1Snx4+q1H8ZwOSQTwrCuNY2mp5Fgifu5p/8bXrN27eAUCyqOcHTmORZ0QInTI/r+7i82GelJm764DuLvzULp4vyFYQdddOqNLdKavefSKi1rcgIQKxB2CJnGXE2NTVYDwe9gYy8r4N1WK2f7yk/imAGEyj7AUCiJJS0nQBabAJPIVCJPhWpGXOmDRI263w6Hwht9J6WcmD6WqVtnIV1dUgPV/Vrq6BD8GwAWCsCTEQyFhrQVHFt1IUedm0Ra8PoGrb/+0X/05Zln/1n/v5f+Ff+9fbprHWuiy3XRspARiMRmir6L1lcnkWm/pgf/+1e3d/4S/9XG9ru1wuABCxRGE2dVUB1DQtG44xGmuquiFjAZRl2QYPUPCB2PgYW9823hNx6LCGoCH2Kfz0Z1+5vZFbJlKy1vnW11T182RMPJCsf3B09Ojxo594+0EvgvqmZkextfBcCwBnjZI2YaEi88rzYOyckRiLPGNCblI5Ta11zrkQwmg8ythQjJZin6INDkCsq//2f/3F3/7uDzdHm97mDnVbznq5e+sLn/3Cg8++mEwBvAi1ZbXsio2d4dXB1rX7pwtPzjKTXy4cOwCwBipExJ3rYFSRJLTCZJCc2QwvZyef/9Jb//b43/27v/yL+3tPJSiTBRt0eA4ymxUjR6TKmJwc1T+ot6/sgMgas7kxBrC7uzsebWxvbd25+8pgMO4PhzbPh8ONP/rm7z/Ze5wgqp3tK8PheD47m81nIQRDJCH+wR/846P9F1E69/IQo8ucRCVma9zdu3eHg17TNkGkrEoArNp6n+ROo0TDTiI3TcPEEkMCoFTV+6YoCmeMqvooxHzn1s3nzw+fHxwknCvPColSlmfOmu9++499jFevX3vjwWfOpmdFUaTRnBV9YzNjsxjVWtfUTX8w2j84dCqT6QmAFrZnsn6RT06nCpYYg/f9Ive+tcbYXr+71sYWRY8Jy8WcCD7KwGU/+OCD4cbGv/Xv/HsAbOSzRfPVL76z9+1f25n1rxXtdLEwxein8Npg8+6vfPtdALNFVbZtFZpoybg8RhAxQYk4KYSGpgk+wEfDbJnHw357VB4en1y5fqcoiip6AMYZ6zJrTJpJJUrTtiqytbVj+yOwBcBKbV3XTUPM1WI5GI76vV6WZ6l4NhiOAUynp5OT40G/n+WZYcNsQvBEYmzSBukq085YZptnTkTati3L5XK5hGq/KNKZKevG0KjIMkukEUQkURVqQSssESEoWCQVwFWiRIgQqddoLKUxoyreBxgiiZFjv8iV0PjgE6YJrEzbSIL0+8NpWb95fff+rWuDfGT6PQA7pyd7L85CFLGh6NHSI8us+iYsp6oh/bm1nFQyRQRKIUjV1goDFUUoshEAdWPmJa+qTyvk8c+IS3P05QllneTpCr/rZoxO7Oe82CnCTFBNxQYCWDt1abJJSQYiKSFgVU0+lNopBQk0qQOzSFRVNioAJzHINSoIrA0A129f2oluDlxJ+6Q97o79cjKY4E9VgEWUOmB0nR3qCnwkBVSko8usSsayTokvbHi9xYsZzurcEV0QZl6LBF3IzNZL5EvZ9rkrI0Dceb0qYA2vVc8u9a6/jJfxMl7GX/iwm7mbPn++ce3ucn545+btg8lpVS42BuPWt1sbGyldVg6FMSfladXLmBFCbHy0lpdVZY31nfCTsS6L0Se4MAhENEZRVlo5eIQYLLvzag9SBa2bFM9nlETMISYiJTLWEKdSWUomYJhVlMFJr8qSdtCmSAI1oq+dzYajURt84xtnjBL5EFSxt/fk0CjQrbOPjye+WbpeDpsNTHZ4shj3i62NUbsoAbQso838+dnZvdEmMTLb46i5yWJd3r5xA8Crt2+Kn5GhCJpWmkWMBmwgB8tlFe1mzAHs3BxF51kbIz2DnnPj41kc3xtKOQ0LzhODsm8HBR0fH/WKYqOXX9vavCJhNvejzD8/bcZjC2DD2MVJ88Hjxcki3tzdDRS++SePR+NiumiP5xHAKzduTJZ4sv94XstwlB2dltd3++NerNjVoZ36BsDrr33+e3svXsyqQM5D9yZngyt3Xhnd+uiTT+7c7A3zDQD5zvD2SfnNR+/G5cS5flQlVHXMXr37xmSyt5H1AXgrs9Nmd++AXNYDz2PbVhg6W9bByBIAqJn76BuMBpn3Td20WY+DEIlu9vvGZwDEn/7pRx+/87V/qn91WB2c3b659fDxs+0rW42cHJzS/sEpgHnZ5mS0iuPd4tbu9uLk1Fk26rPcZW6cWLc+SNbPPDrTvSgqCsOs4GRdYjILRRARiKgmhwRm9kFyl6UFiQ8RYBBqX+em18t7IXpAyQBEbfAAbOujamh932RiEFWZOYNpm1bJrhIJSnnWGgoiQESstXmeJxBEIZTuEyZEgIkEsmIMp3uDVE9OpqPMOpAymqaKEVGQ235ao4I4xhhjgKG29W0viGQrmTQXEuNAjABNqz6QOIFKFFReyiZktigbD8Dn5hJgdSEDTWm3bxs2hiyJSJRwDj11jornydcKAutguXVaeI7TqK4L8+dlCJCAEivBMitTDNESiE0dgzOMC1tZi5YrVNV0OAV17yQKmF54xPwo9LdmMF48ZPzIOxf+7Dwpv0BlxI+Ly+8y87oe3hEQVEVlRT1bkzFXG1zlx5f29zwdhukgSlIikeijJJRTV0wMXX9HhxsJViyFdTZ7AXbsLl13yS6eDU3mYli5jEGURNF6dZlt2xaASHA2J+W2FWIymROJIjFEYVUABmqYjSFSf36GE/VOO0rdCsK7iFUrAMOukUY7ZzOTGasqrcfZvNzeaKqyWtbNvGn2nj5Jzg/OZVubm/Pl8uNHj8rlAgqT5yfzubo8RAGwsXPnH/29X/rN3/7Njd5O24TMZU3bsnGz8sSxEwgANiaVK0SkbZqiN4CJAcoKMuycS1dJUCvMBXT8x4yFNfj9qd92sPWn4tOfBAAKIG2b6iwjvHk9/9yrt25sj4iJmn0AuVWFRIVIBjIq3pIPbA1bxw6aaOMKEiJhA1X76U2gg9G1g5Evjj6lJImXtCM5LcgSDZ1RWBBLlGglAnAwJFKfnbXzkkmGvY3+YFBaOdPTyXJZPj8GsL21C1CIKlAwJ5RZYvDNMkm/hdAwa+6stUYlAmItr3fyXDdrJUbWLR4JSWARghVwui5urqoE65XeSrS3aRrnXBt8v+hFEVXAmLppCdr6AMCwGQ4GmXO/+bu/65wriuLeG5958NaXil6vbVrv2zQ4c8u+rIuNcYjh+PA5EX7+p39md3vr9pWNw/0lgN2rVw6ePbt779bDDx9ub2+dnJzY0bDo9U5PTrdeuXV0+ALAxqu3jg5fDMfjF4dHG5tbx5PJ1vb2s4OD23fuTk4mAKy1VcM7Lrz9mWs9WVBbN1XL1BYDG+KgrQOAUC/IciHN6eTFMP+cnJZsldoKIgxO4qcKE1SEtGDWGMW3xmXLssyLIYK3iW3qXFQcHz5/7713izzPXWaA4KsCcbx7HcC3/vCb3/7T90ej0TSQlZYk7PSzL73zk6++8XrdNBw9AO71Pvj4k/29Z9ff+Ow7X/7ae+99ePuVN7Oit5id3Lp7V30EULdN3h8wU9u2KmrzTEWauja5U6FqsQRgnYuhnp0evf65z3/x/bc++P4HrBj0+qpEnLAAbdvGZdb7KEYX5fwv//xf/md+9p8F6WC8URQFALKaOVdVddHrnx4fH+4/rpuqyIp33/veZHKcfJbqps7zwnuviszlUWWxnJ1OTwFi6speEIkhRo0UMS1PVeXrX/laJDk+OW7aFkDfcEyOKVBjbNHrEbCsqqhYS47GEBNeWVYlEytJ09Sv3rn3i+/9H5OT4+3RJoBYN4AWxXA4dPN6wcZEUH8wGow3XZbNpmcAHn7wwdl0Wpblxx9+sFgsJycnVVXt7++zr8UYAJ5sZD+fz85m8zzrE1GWZUzkrAM0UeoAbX0bFYYQQgDBOUdMe8/277jcCAOIVeNc5usqL3Je0LDfM9bWy8Pnk5MrGxtj5wHMYgNfT09O6rpsmnY2m7GiCk3hZ3lfAbgQmGheNolLl2fZeNDP3DxGCaE5PJ0ACOPRpjWtD6JijDXGpJvWGFNVlckKAD2Xq8IaA2If2mQ9JBKbtiHiDesARInGmOFwbK2JMRpjVRPzNeRZp1MZQjCGCpsn4+te0dsYjd7/6MPZ/Gw8GgEwiJnDaDi6tuM4BFLvI9qsEjVU9HoqADJryzaQMi5o36dJWTQyMQDHRpgTWkRMlk2RORWt66ZNfM8Y68YHH0MIajhzdHUzO12e0rOPY7NXaQOgXE4tqNIwcJaNubHT/8JrDwbZMDYVO5vmJms5tMFYA7XKZECKQEqEaI3kLgdQRxN8ALk/qwZ2IVP4kTJmeuMiPrb+SVfW2gmn60wC0gYsp2cxRGCoq7J2Hj7JOCYBj53kNZFIkEjEAJhJhESiQmGMNZRKrhcm39WsqUj0ZO3I9LpCBtOh8IWP69qY+1JvwrrAqKraGR8k95rzFJBB3JkhysrugEBqVjsilwqB3bRDRMmbb72htbHap+qWdOmoLk3G0G5h3H3iQh8ArQrKIpddF17Gy3gZL+OfnLBXr985mxzubIz2P6iubW4+2392eniwefeOgzUqybiAtBamrY3x2XzJknt1mbORbeHyWblMWYsSNUJKpootLclaJqMEkRglSp7lADREptR/oQmLSZ1TWVb4ZWszAiAWElkEopRbahShbkfD3qKc9xMw4YgEnU/mYtkf9ayWm/2hzYpHD/cGm9cA9Db6UlYbG8XxcR1bP9OAGA2L+iWW9WA4AmB6hS2yvNd35AykbqvpUggm59F+VftQASjbtj8YDc7KWHCcLUsFl7Mil1euv3L9+g6As9nxsGBrXRMFAg5hMl8sLEeiH3x08tXeFQDR7Bi/NOIENaOsB/eePfzBcHK3P9zcO3x49xUFoKa/O+iVj/Y2c12UuLLpdjdxOiufTxenlW5t9QDs7rYfHU0bkpjT955NM2efnVS2aR/tzarTFsCtG/e+88nTs+miFLMzpO98eHhtt3fv+t2j2WlZB8sK4OHDvchsi/FiPlUy3/v+n965em9zY/vDZ+9OTqsZPQdwQ2+7It+1enZ6wsPszJsXpwuX9cty2S6qr3z9LQDNi0dFhslchoOFc9ZZE6u2anU0yKsmAJgflSK6uZFXwffyrF62L46mReYqJi5686oGUAsXy3b/8Yc3tsZ/pM+C4apcfPHeTz3Z++Hhx9/9/uYEwNffenBtNHrnJ+6Me73e2Nr+Gza2k8cPjdfh1rBwBgAXWV37Wuq2WUblCG5iR7xLPbCqQkSZMwVbZoJEiDrjIGiDmkTWUG2ij6FlYjZKAJNREiZSJbIEoAm1gXXO1LEVrwwYYi+qaknVGAIgomwMsThmRCHiNoTcOt9Wgyu7i+UMACham0XxStYYVlWvgZls57iNZPpJ3tcqws6HGJWUlAxFCrEDriQ3HMl6ia7Xj+RKTxIYEUoaTR+AEKyZ+ogaNEQ2rXnZ2PrFsXJmTO5yAyDEaF0WYxAVWZEuQWysIwkAXJ5jlRcykJgszpo2eGstwwBofE3MlrJWKssOgAqgK6JdKtaS5syx06GHUbXMxlli07bVOgmDgpkFgES3voIrytaqLrz2rlwXvZVAKTfsXCxXVWVViF6A5rroEuHVS3LWCaKKiAhLSoi7w6XEDCUhZYZZswpEReCpI7p21t6UNNg7gxlAGUIrjARAVDC6E0OrEjhBQIAkoXfuvkTXBwBEY4mTHnznOh2hVQhBOv8YJmsSeKiSqEPGWB/8GvzBikUmIgSnpCYzp7Ozn3z7rf0XL87O5s46ABTaNMDUmKBQYhD1+6N2XnsvSZGADQTSQYsR1IooohBW7vKO1AlUiIxbNRCvQOPzMNrxKbr/0vJmEZG7UbpSoW0XJIXNe6a39+xJE9plXRtjT6ZnR6enqbdxc2NjNBgY66bTaZa5ZVXuP98/my8Kl9/eGgD4n/7mf/Uf/Rf/4eZwe3trt5qXWWZDE6s4N9bExOIEVBSqIQaBkrUhhm49rAQrbYjd4GRDEAJBWc8JDekaXnJtv7jgi0LQgBU3h5lCaKPAucxa9i2MZRVFDIwIQJTYuiIuVOK1bfOlN+9+5vauI/F16ds6z7m7v8BEIIpEIOsIWbZyREo6BtaQghPvUDVp8K/YHRc4GG2MlNoQ0Y2SqKKQtA1SUqW4vg2hqmiJLKxQpmlSDmdXd41oGaOrjg4aPiIyxtjc0WTmtzMCsLHBXjj5lCJGkBoy2rQP3/2eG24CaJumcBzU3LpzJbNWYlwczGIMIQZiJA/fKNGwURVRTcvarnMbxOtLkNrW1v10K4Cyu8rpoIljjIZN6323Mo7d52NnpYooSuRM5n79d/9ARIa/94dXrvxqr+hF1eQdl2VZ3VTD/lCJJciTR4++8MUvfvj0yXuffLzRzxLiY+37IvKtDz+yzOGTh4a5beXerVsfPnp05+bNw+MjAK/cufPho0e3rl8/PD5W8W1dP/nkxEd5sb836PcB/PI/+EY09BMP3swzZKGO1WKj5wpn6rYBMlcUAFqNhSm/9cP3fvBo7/lZHbWXVZFMjzOuL5aDAGIEyMzEvslAyHqFj22RmRATDssMlJOD3/rNXzOgXpbF1ltmjU0yBf7w2YGBo0UL1BY67g/e/qd/+muv353W5Unrt3tDAC/2Hv/u7/z+Qd3EH370d3/lGy3k/u3XhqPR3v7el97+UmLMvTg7++pX3pEYZvPZ/buvENOyXL7+2mcOnz/d2trZ3r4C4JMnD69dvXH1+q3H7/3wn/8rf/V0/9nf/9Vv5NYatsntmpkzl8UoESRB86z37ODg29/5VtXUtW9fHB8B8IsFmIbjDWJTVXXbthpjkedZr1fXzWTyBEDq5u73+qPROHifOfuDd38YvWwMxhI6NwtHRlVAyqw7w+0/+tYfXtnZ9qFdW9GnoiMRrDHOOVMaKEKMSqSiUSIAaywlM3HxRa8fbLEx3vJeN0fbR8fHeWKxEcMg1GFZV303WrbLzBX9YiiG2NmDZ4cATibT6dlJUeTv+qAxPnvyLMRIqu7u/dRknRvHbEEy6pPGmJqgn7+YWabcWWMtAOuMdZnRmNnMsGnqigmG2Ii6yIuzBQDkNs+GjcwnH/5wd0dpeZSZIiLWlR4en+WqAJ4cTbKPP4liQgz9UT+2/uzslH3tqjMxDYBN13i/UMqLord/elw9PQgNZdYoiCycMQDq1p/M5nWU2JnQAAAgAElEQVTbaJSMWESDCBlu2iYfjDu3IBWb2aZqFQoyQaJCQDbP+3VdP322B2AwGLRVU5WLXq8PVSV1xjTeM3HspJxClECko+EoSowhLpbzUa/vrFOg3xsCKPq9nlaDXi/6tnAkmhvSmnTRLgfFoO+S55IjjSRKhiQImHLbj1oTyDBZ4wAEjapCpMnaG9aCUTjJrKWMADjnAjMTKqUx27I8gRGjvaqOy+qoDh7AKO9vX71zdevKR3sfTJfHGY9ZBmypZSmUBokcbbnWCkIwhCAwdtDrz5q4PehVVRCZAgh2oHAxCGUCZWYjIgK5UCPj1MiwntJWRarUFrN6e/Us7SYDSqdZIZE7eRhNJfncmhBaQ3CZEY2qys5yx0/nGGJZV8TsTBZC8qkRSxyDBwBmUvV1Q0Rc5BI7OQ5FVChxkssAyBCxSiQiUVEVw8Ta+UpdfOajo18mjJDYrGHKbl6DKgFETMawajJRXc/aXd8AdXVakKFUXY26SvWSu+Ba7yhtC5ewx446kybhVIjvKmypowcAA8wcECGpMM5I53OV865lbfRTiQb0R6vLL+NlvIyX8U9E2GdHR8bXdb2489q9Zy8Om7axeS+2ShSHo2KcDQEc52ZWBruxWVVHJ3651dvY6BUeNH1RERmTXO2UokhovYoAFIKwISJyznof2nPPOw3imQyIYhQQDHEMIXJoAgEgY4xxxrBEDRKhwkbrquqbTEMNgNGEWDu2RCbLaq6Wt169e//GjYOTSVFIWx4D6JlBb7NX1vOzxcwQtwEW5MQwuWJ72PW6RimXlcuz0eb2dDalOlDDz2dHQYOqcmwBWJNFULE58vOFKwpnFD0XgxdHZTUH0BsM23YZU3ePAgATmiYsG1mIPF8EIGWXJAIK0YcY2/jw4/q1V6uy9fAutALgaLE4XDQ7V28tnr9fVu1y6e/f2vENxuOtjEMd5gCM5m+8fkXC4/2D5bPn8oXXd9+8tV2Z7cOzh7duFwD2jg9Iq6Zu5wuZZ/bWlWFhqchtf7C1uTOdLBXA4ek8euOIB8W4n2NxcrqHp19/9U6Wuas7Vw8engEod0M9r9549bXvfPyxcQWcOzmcekcffvDhgzsb6j2Ap0eTYZbfvrdpOc5mdR0bKCtY6s51sG5jr7BVqwVC2cZxz4FR+5AZPjtddt0uNeWb7KPufbI/LLih6s7111uJz0/PHjy4tXljE8DDw+nXH7w2vnKN2jOIU384PZ72irzxqCryLQOoqghGFdoQrIKEeE1PTOmGs66TFlVNBo7MXc2U0WmQiagVJU7Cf4atYSFRIlYVyEoVkMkYQ6rQKMxs2CgTRxHVtYAUEysRMyXDb2uNZasal8tlL88B+Kat28YZMszihUBMhukyyiYIGnCpK5oEENXEjkz9vyBKkkk+xLJumjpmholza3oAAsXGIwiInCgbl6GuWx9snmVZllEOwFozW8wTbYGJiUljFNUYWl5LYqV6NhLcdI7prVIrUBJEXO2Mnhd+L9g2KsAsMaQPRRFnnSXno8cq7VuV5Vdl93TS17uA82+Cpmuja8Cu41l2H+hQl3P0iz7tfow1MY2AKFEh2lkNd6CwKilJ1GjIEKwgCqKljmjWmUKmVBHn6NSqMH+pgt1pDa0InJfQ0dVfEUFp1f6z9m68UFpfp88dvAKEEBNSAwYbSrijMhIXE8IgMPE56JawWwiI2Rgf5HByejw5XS7LdL7zzDIzMROxgkSCJE/lc77vanc6HSlqO+08JjIr100KEqNKurlW7NVLF5CNW5EMLibRaog1RuuSFzCByEcS44regPOBUQLxcHM7sG28B9CqUtS+M0VvqCpb21eYmWwWFP/xf/DvA/hb/+N//drNB1VZ1lXt0bR11cZ6lI/LWH5qPFwcr+t31q8vvt9d4B9Pc7g8vDKXARxjmg5gyMA6FmVmlkCQZBdr2FhjAJhmaaolMX39y5/5/Gs3erYJ1axuGyIY51InVtorJlzYhW6gXMbcu91fEW0TCo9zmRRANIl+CkDrLrYVlyV119OKItKNSmuMCpggMQDo5/b+zetRMtWslkjsai9eiIwjY23uAJRNSI0OzCbxZaLEELyILk6nSFqxhoq8cM4RwTe+9Y21RqEqmgb5SvWsg/Z1PQ5TOaJ7UuiFh0Y3GlNt5Lz4kO6CNKi1kzXFSuAB56cnQQaZiIjIdDqdxEnTth1vVDRzLErMRkWtMf3BcFFXhvl4WSd6b1LkI8CYxNUy3vtm/6Alfu/p017RA/Duk702xMfHx6p4ePgCIGNdiPHo8HA0GAD44ltvN8ZEou88eaZ1xRqLzBIjxsjKPjV0gwbDQdMb33z9zV///kfBewkhy7JekQ/6vdw5AMbapKSmsVabDfl04/vv28GGbz0TEkpYq63rqqT8J9/5OhPlxlhAYhRt9w4nAPIrN4OyKEnwrPL6/bs3b91/Vra+jL3NnSdHLwD80jd+w7h8e7QpIBAraO/pY7aG2fz27/xWOsUR8t1vfzONSenW8+maKoH62QDAsl1c27l59erVD99//2d/9i/NT6Y+NPPlTESsZgCctTGGRirHWROr2pefPHxYl8sILauqqisAvTwfjUb1slzWNbMZD4dp9GTWUY7N8RhA07RENOj3ncsoLzTGjdG4rbxhFsRU3VGIiJBhJobCMAffxhh0paBaOFc3db8omqpezOeZc1HEWhsuUJZEVUSbtslslud5VbW+qhZn89PpdNwbJ7yMDEUvBCJlYwwxTY6OTqYnWZEv6ippy2yMx9bY+XJJxoqgbtuz2WzY77O1RtI9LpzgFWOauiJF0zRF5vpFrjGy6WQWBcrEGmNure33y6oMvs1cHttmJ7W633nj5OnT733nHy4m+3cfvLGhtFgsYmY/d29nvLv73ScvABjDIXiytrA91eiyInivCiKTNB8nVanaFLnbcHZj0CtrTOdLhdZtzVmessF+kUeo976u62Vdre9rw0ZVOwNuSrVGhiFrrEiUGGGtc5Z50LQ1gBh9EnIUiUQUYwzBq6qxnfVxeoZIjMvlvK7rGKMPflD0uwssEUBUX/m6LMuFaz2pCDK2ImBw0/i6UQCWjTVkwZoU7ZHqnl2Xi6wnuvSsYYIARFHERxFRy5ROnTWsihjC3M9OZ/Om1a3NzLBM5sujaQ3gK6/vfumzX80H9tH+x4ZpY8RR66pigtMeTNJYcDaTCO4EEIw13tu3X79VGP3B4+cnsyWAYKfERZYXzaenhVU6AT1/64Lq97kyb/dwVKxYfuczELqiJ7o2JQIoOkuAEMXMmkQqNJ0GiDS+cdaSsT4EVQExJ5mgdeFZNc1IKWNP821ivl+Y4vR8p7v602pC1Av/Wx8trdicPy5Wf6Md03KdMdL6N6v66Tq1W7enCFQS9qjn42yddV7OKS5ML+ujvZA2XBS4TGf0EldScW6qvXoj4akgurShl/EyXsbL+CcjLCB1Wy/ms8FouH/8IrP66muvbow3nx7ujYaDyeELAG29yKGja1emZ8usDrFpz+rgODJziPBpYWCsj1FUvffWgkAaE1CgETHjDGm9YQ0CupxYFYqg0UtrV0QlFrYOzlDUSBoyUjIe0HI2G2QEgOqlIWxkWRv1xht3zp49++qDe1/7/IP/5u/8ijOc5QTg+rVrmfVByDFNT8umrMTYGNWIyUeDk+MJANTVoFcs64bZ5L3CWRhkrubp6STPB5ujTQDKWcY516GeTrPr/bZa1vV8wNnzyWF7GgHsXNnyjH6RjYrcGtYolQ95xjujvFr0JAiAnKmclxk1KAbw4cUsfPNPX1zd/cSyH29ufuv/+QjAR48O5st6d7i5ffvNw9PvHi2mTw9t5rCsFs8m9WDQ9Zg0y7pCfn83f7w/XVQbphg/efLodHL25JNDANsb/bc/92BjhPt9GwNPnj9Eb5wPR9yUpadnBxMAYCzL0hE+8+CLpsh/9/APP7u9PZnNqiq8u/9sbAOAFtG31dLXu6ORl2wZqxcHp23GTV2fnGnwEYBx/Ox4ZguzM+wV1rLYpfc7A7do28m8BHD3ylZUPQuNip4tvVf0ctt6IeKyrpN8Td1I3itu7l7/6S9/9ZNf+a2f+6l3hgPzS7/x24/fe/rZn/8JrRcAbu5uTiZPezgLRr733pNro1FmOR+Pj8/m0Z+8++wZAAULSRQDWCZHDCUWtSCWVUYlKlEEUCYyzIaZiUKMCqReZen6M1RVowqtVdhSTrRORqAxKJQM2xVal5IyWicQ6w6PTt6USERUJc/zeVMjuTOFUNjcGBN8tIYNpcYjXucfSsH71LdCMZmKKBSadIoAKCmxpkNRjW0bm6ZtytAfZK2Y5ELTRlq21kc9mZVZpm0kcNIysyZz6ZYjZmttlCAhgCQBix3n6Dwb69IfprVbScJDV0oLzF3dmDlBt3pp/d8dVmpvT4V1Yyz/v+y9Wa9ty3Ue9o1RVXPO1ez2tLc553a8vKQokiYpyZIlx0AejAAJDPg1L/kH+SdpABuJA+chyEMcxUESWIBtQZIpEDIkU5Qomby8vP09fbfP3nvt1cw5q2qMkYeac+11LiXELwEs4NTDObtZe825alYz6hvf9w3ni0O4XUI+lyjJzhdbTGGIKAeYchCTjhGkKgZ21EgsJMYOtjTG0/Timw8xoWga3SaJdsAOFNE6I1DwvoIhSx6F9byNBId3t22YeBnx0ojtEAEMyI7SBrs3gcsAc7j+JQJY1lIe0UQUlqiRqgxOQcZc4GDaon9c6pGQuXK0MJPhwo7UbLFcny3P+0+yd6F4TgFoY8Z4JhiCawNAvmq2EfR4c0U2W5TpZERiJqIA+pQsJbU0oDzjkWCL1gIguGKbUCYpjR87k1nq6wLcx0jBqwhS/PDO3f2zZUrp+fl5yrnruqMrVwFcPb6y3KwePbh7sVyKqGRldj7U56eLs+UpgK++/vXVeiVJO7RQDRz2moNi0bBlbm6f1PaINf5oxJUvx8zOj3a++itPOMPvLGN0P1RRJnLsCGqm2dAUbjK0Quw3GwDzQN/89o3vvvuVxqe2e7pY9bVnF5ihbAkYS7jYiFaPOv7yEWhncNn2YDx8NyDSl/ZeBhvgFuIxJUAARmSZDcrgQkEcu0mMRHUaoBAAh1P/tdeviwZDnSHK9aLX03VcbHI1leFd2bQwjIezFjFx8GzAYVWV92UmxyQiZjrfn39172tG5tmJ5u1EK9XADHC8XSsIMCiMC2xE47pczHsHh4DhGDdO7WEO2fDU1QZGT+k6vqTOQNWYmYhTloKMcHGxyJnZESjGDKJQVQZk4azmqqY4uqpzxmAiY1ZnxuTrSXaOq4Zib3UDQHL21SQDauq9D6Fuu44rP5nMEwAgTOuag+YE2PTwGvOA9ZNzPuu0KvlgGPPEuwPn18uL4/let9k477x3iTkNHWKmajD1TVM352ftye/+gYGMXVbJw4fS+d4BiL73G38v+Cp4x2aacwj8vZQBhGYO9sTeVCVHE3EClf7Ac5/lv/2X/w0Ac9XRtVdOTk/73FdVLYKmaUKoCVz5SjQBmDTzlPvYJ+d8VQdT5JzJkWRhz8VF8WByeH76fHF+1kya7/+b39+bzb33qjlJStoD6KSUXKMonSd/UB+enT47ff7UyPq+n81mAB71vXeBnY8xTqbT2WwPZt453zRduzk4OATgnF+v1v1ko6oXy4trx1el7zXnLkbJubgBKJKqNlSnPhPjcO/QEe/t7fvgCoa16tvpdCqirgr1pEk5s+lkMo1dDyDmgXwanFuuVnvzPee9E8kpfvzpJ2eL0/36oIxt0eh9RWyVq4lp3sx76dvVWs00Z3hfxrbzgVJKKbdd751vmqmoZdH1Zg2gquq6nrbrFanO9/dXF2dlO2Z2PoQyCzZ9t1yvrx8fkaEKATAiq+rGe3++OAu5B/CD3/5f//Xv/s7q0x//w7/ztb3AmsLhxMOFg9lUp5NniyWAcHBjOpkm80Ig8LRp+q4X04PGTTsFkNuOgSb4xXrt2U1qZ6bBeecGFS4Az86FUPb0ynsQnOOUc4EUUxYAy7b33oM5+NpMYFZUBKri2BXZsuRops65qq5zyoCGUAMGEtVUli/vQ1X5qqrNzDEzO8k5ppRyWq9XAJKl5er5vSdeppZTrLyf+orYRYntslulspnmEMgblwWNGVwOPTsr/4CfMln5jyCqImpA5RmAZ2ImU+tjAokkunpwtOzSpt/MJy7lGkDOtllHsYvz5erJaTZbXz/s6nqvqgN7P6x2Wc0o9wkgUzDo2tHVn378eL1aObb9mQfw+dlm0xvRDEMsMojIxzjjy/uWGQ2pryFw+VKznZjFxqDHABTAngiwSESD7JmIB8guARAFyEIVREw1g4iGncp42MisVLYBGZE5x0PCiHcRN0PRr2ALzm13vkvQ8fKRjEimbrNxZts/uvQp2mZJt7vk9hdj6nhMWF9utTK8yVauPuCRwxHixR7exReJtspx2r3O+HFsvPfLP9gCrrRz20NE9tdBrS/by/ayvWz/ETfftT0zn12siPDu27d+9v77p+tzc3TQTG9de+XzT34O4OnjZ2+987WnT56IA6Uc5nMK1erssZqRuVSEmI5EhAiDnsWxqjJRn6N31fHxMYDF4kJVqjrkrJIHNVzO2cE5X2EER6RvQzan2aWWredKwqSuQ5eyAvjlr77zm99+7+79R08269/43q/+4e/+7mLTfvTgvvPmGcvFCsDxO5OzdnW0t39468b73YOT9YozsQ9VVWWho+OrAJbnZ6JiRn3MTVM7X+17N907fPi0W62XZ6kFwL6ezg7y6mTTMZ2u/SSE+iBUrvLYtOcAcHZ+5tx7r9+YzicEvti0GzUYLlbRkAuR8Nn9h0zt9NrETHttqr29Lrb/9/c//s//zpspn//5/QWALLqm/ofvf/zeK9ef993tq/Omnj47e55t00yavaoBsNHc9TGgXxMfv3L4e3/683dvXD248crXZofvf3IHwM3X3+xiR3qR0yy28eePNl9/wz96/OCje89JhlrmTb3HLi6Xqzt377/51hv7k3A0ny9Pz3Oms1W/lgjgl2Y3nhzHH394F5OjZZdZsQqxgm+amiezsxKuZRcTzldtt4nfuH3Tqv2PH3ysSDHaxbIF8DiYqL9987XDSTg5WyaRil2kvsuWudYC5Pn1g5P1zz6/8/pbr33lzetvXDuubKFde/36wXtff+fZp38G4PHJ8u3bR2fnizjdZw779XW/N3l6+vwPf/Zkvr/Z9AJg3WdfN2rB4M3IFGJQLaXdi4U/sbLB1AqPj4kYhFJpaQhNiq0hkZYUo9nWAYfKqwFiZnCfUlFOwCADygkHtjHEKSE1DZ5kpjAHiOnFajmGaxx8UFjqe0CNStU+mCkPUQmBzIeK2V3awdAQHRallY4uP8VqJ2VJKRvxppWTi+VMAOBiteqSgt3jJ2fGS82RCQr2vvKhiikCUJjnOrGp9GrG3pc+gnk1wRj8EIFQgkwCE4icv6QpXapIbDdUtC2Jr7SUpXZeNRPxtJkptO83lQu6NfQZI0OMmeqh+MnOm34pmb/r1zOsIbQT1b0Q1AF/Zaw24Jxb/uIQeRewzMzExMGxZ+ecqrKxDHbqRbdLu/dAIzo6BqAvXtLGTqNdxsHQh7rN5NuIau8QawYUFgTexqhmo7wUMBIrI9OGwpCFP3vZmYYi+SFzvDhbtDHuT4+J2YcgmnlkcqoWhfu2DDZ96Tlu77EQy8qBSkzUrFBdSAF2sEu2L70QOBuAnMUwGLla8W0dngOro6gEILh5aJqccqcXP/zRX07IsklMvfOBnVs8OgFw17lsmZ0flNdMBFp3GxU9bPYBrFarJFGQVXQapm1em6ioGG1h0C9/uL+u7TxL2z3F7cKsv/j6mKLnAWEp2jEmMpiKOFgyJRWK3WwWvvXeKwDeuf3azat7Lp63mzVZdzBlkI/JCPDORrgf4xGMCMOYLQfCF/FI42Ftse0geMFgamCSG0xHvLzwglGSI2pgKw4Ql3xzVS2maYP+USCCPotZ3uQUtV1ELNrUZhJlLjcsEhpvRTEHgMiRc8xEXOq5cWGbM1vwqkLE3rucYqlDq1tjyGEU89ZpYZgnKCAhiEeuXVkviTCUdgfosqY8tvRjjBsAdtIIWhYEA+AAYmcgky21ngAwWxZjphCCgfqUAfPOgxHHVI6WoAYkA9RJpMaAaA6hWXQ9gLpuRIq9iBdlNSRiGPwIAahojp0nct4LXBRLgmwEI+aQlQEkVRig2nH21WyVzNW1ECtfrjNu7OE2a/I+O+p1WPYyOQoVgBrZg1TMVEWjJcoiVQiri67Y+GZZm6r3IZUqcOzAdFDXdLD/v/3j/+7jzz8G8NW3v+LUrl2/dnZ+tlytnK8cs2PftV3VVJYMQBc7GCazqYH6rmPn4Gi1Ws2ms+1scuQO5gdW3HnqSR+7ST0VzY6dYw/AoFF65uDgjNBrr2pVCAx2Dec+AZhOp33bg9U71202MfYqOYsozNROTp4AiDHCbDabdV0XQlguzkRyqaHhyJVsuiCZ6mLTKqQOdYzdn/7oR87xdDYpO+n1mzcXFwvJ+cqVKzHFnPLVK8cX5wvvw2w6Pb+4ADCZTL1Hl9LB0ZW6bhaLi4P5/uH+wfN6Guo6xQSgqgIRpxSZiQzB+XW/ykkOQsU0IMtMnGJyRKrGxFWo27adNFXXtn3bAahCtTh/Hts2uBDbtm833abNKTqz2odsUsbDxIcsRgTNyTlW4izqKzdpmn/6P/73AH70b380bfyRdkYRfeudte06BGSRru0Xqw0Af+yuXLn2/GKtInuTSR2qOtRoqkPEvRkDQM9q/epiten7vUm96krfm2OSnKvgAbBjhYkqDb6BCitlqb2FMEz/ypjZhQrMOWdmdkxZssS09Zpk54goxr6ZTNRUJAGUc6xrP+5vxcbZRCTnHFVFpO9jzgKzZydPATSkE+1OLy5s0zvK07rqfQW4rHHdW5sJgEpP5AAMQdG4ABdkaccHZlyjCNtYrg6hDg7F76XUwIEBTsx9/Z3bZxerB6ens5oOp4V7Pnv69P6iezRv6htvv8Gkxc8qpWQHe2QCQFMiYna+hJwm2rWbLx4+V83ThqMogLb3RFMVgy8al8KFH8OKYU/ZiabGn+/ETzsfB0OWnnfwstE3xqgEicam6hy7EpQwkVHB5bNaUzcAYurMzDnOImQlu1AuYQrjwX3SHFNJw5dwxMZV2nYsLbeJ6RcIiTT4H28DVdsChJdyE9u6Mw5R1XaTHJOPQ06NubB2Me4ZNub22LlLsc52z90ChLuhw/aLbVHHkiTbwUPHHXwHD700uxxijxexyOFH/0FxzMv2sr1sL9t/ZM1775u6OZw1qeu+ePjs1quvfvTpR3fv3fnG2+9WYsWP+ZVbtzWEZ3c+QLNngfsU52nTx75NMvF7Ygygz4mZCRZcEMskRfdoMHjPzjOAqvYpWQiBnRRGDDMNWgOzrBlAbX5WE8taOc1n+Y3rR9dfu/XWm69//NGd+4+eAfiv/8t/cLTX/PN/cfKb3/lOc3D8l1O+f/+Ljz/8qF2t58H3jQfQSvvWKzc/uvtIk0rqX7lydbPpNaeu23Dqm9kEQPC8Oj9DNiIsV2fB2+G12dHBvquum+mzp88ArNdx/5D14MChP7s4v3X8er86XbXd68evzQ5vAQjUxr6PXX9yZtO6yilLzq2lGKMYPTjvAHx+7+z42F99jU3z4jx2vvoH/8XX33r1600Tf/j+T1+7fgXAnafLo6PZR/fvG5at0mcn3SuvHMTFarGS2zePn1ycAHh4cm7KrprdvHp91UrK9z568vwG09/95m++//PPAfz0w5+x49s3r7339rXv/dLxg9MfLC9aUnd8NElr3rvSAlivZLXeTKaTG2+/3faxmtQ/+9lPDvbm1by6eXDw2ecnAL7/w/fP+03OKoCvJ416cp5TTqYcmgfPTwBQTwJKWkvuLjpmlxddn84ysoVSLTTpresH08nkwdnT4Dhpfr7po4jAYtb5rAGw6KRpqoXV3/+d3/vur3zz1Tn/2QfLk8X5e1/77sFsstm/CuDOZw8O9qobX3335M6da0fXPnvwhJvwBz+5t87Wub5ULybPUUVpaiAdqD5cgghHDkDflzPtwH/JgGWxEboaD7XEzGxQMiLKOakBVE5zl1ELGHWoc86iIkZFoTqUXbqcVgaQjsllAsUcvePpZNqulwBENPgAZCvWMQotCFBBOQHnHBF55wASzSnngl6VQKdyFQCxgSdJzDCAnBqxa1KOT58twrIF0K1bJg6hiTGD4ENFgKYso09OuV0tErIX46TdlHAJ5MbUMIzgCjGwZL4BgAyqJmo2oHk72fLLfjFTKBHVVeWD62POOXt2u9Hul8Ipu4Rix99eYpZfUl9TIY4ZLl/0Alg5vOaF77Z4kiuks+GiA+9ytAHSuqqdczFFVal8XV7cpZbgx8e9dYgcA+KdW97CeWMt8O0ZYAuyYPzFjvp7h0FJPOoZRxbu0LejU7saSuUWx2UUOTDDEcrZq7yYCUwKmNGy24AcswtVnXOu6qbMnJhl0MOVYpdaqt6ax2UAbzsPBmaiY1+RFcowe++ImLk4pmE8B2yz+ihdJsaqNqiMBtBEcppUB0VOKDGnROBqf/6KS6kOlHI+rCsjOOe2FdICN56DaC54GrMToZ66VloAKtlzUJh3nBCJaLm+qOqqLA4j6j3285bbPA723TFjILadJ/pXvugXmht8SMuzVi4T1hQqtWOYGNLh8fSX33v7629cB1Cb2GYl6RxMSq5P5BErmA+VoHIQDKelrVv+toeBUTFXmtIIib0Ame8MSMDAl3zeofJSEUUOg7RUsNFCAyovUzCTWjlFI2XphgrbZlwtN3HZ5qgg9oF4vAESEQKIincq1LRAtKGqSk+aipg6771zSXIfc+W9qqpZyfowDS4cPDyGQl1X4rLWlpo2A5JaQLdt7C0AACAASURBVO7y3za3tO0CQkmYDt8MZl87JBbaPuRCstbhjKiyJWQTCs0H5hxLzmpGPkiWyo9/WraF8l6DqpezqPdBDN5XAFRRhbos8jFnBQVfZRERLQYggV1F6p1jolJFpwhFxYyd6/oIoG5qch5EMPU+5BRTFmY4sHPD2FMzExVRZ6Q5k3MIXga1I1QzgElwOXYATJWcJ+9z7kEW4Uvag02DozbFupmWzF/Fsn94+P/8X//7D/7tD77xzrsA2pTLMLx2fC0nzaZZFarsLKVuW/aVHY8YLUkW591sNq+DX2/WVajKZOz6DmbsuA5N1kJjVSJSygCYnYmB0EoXXGXMwU0A61LPTMVgrmtbAI7ZMYjgnDfnvUmxbSnx56SepJScC3VFANp2VVU1kQFKl6WrjIrvI4iJxdK9h3cV1lRVKfB154vPyyr4KZGoMiFU9abvDpqmrpvVeg2gaZq6nqzbTfv8zHv/8OT51StX18t1GzdQFKcbyj2ITFVN+k3nnV9tls+ePIFZqKsCR3qgaZqD/f0u9iE089meZYFiEhqpEwASg9jR3mHOSWLaP75+fH3hYHVVp9gVt5EQQu7aLDaZ1FlyilmNNGutNp/Nnj8/BXD1cL43q3Fymp0nSEybMAmb2G3a3ppUzCWSmg/esXdEk8lURUOonGeOMvETAJNa193FGuvgWA0iEpyDYLleRaX5ZAqgrqp1SgBE8qZtMQCQxkTNdFYyDZFdzgJQ30czCz545/sUVVSylNk/cZOhjtyQtxmbmg17KEQkwbxjAOvN5mKxYOeZ0FT1xeICgAQ63G+cd2X2s2diElE1deyrwAC885qUdFhnzLgY9RZHljEyGV2bSyYCZoAjmjRVgSNTLpGpeUdQ1dzfffRgbza/feO1vhU2AfDq9WvrPj55vrx2dPTt975xcnLy5Plz2rPZtGmXp1FKyoT25zNihhEERLSJy2ljR/NpF5HWBmBvNq1zzb0r679hSFVuQSwClaoyJWkzkCcHAHdcFm2cC2ZGRsXog8qTsoFQT8ZkTCCwcdldSM0Gb0Mdkk/e+7Zrc0ocgpa85GjCWC7BZN7RYDc8ZJZe2KwVWvDfUph7R55dQlMdPh1d7ug2CCJscCAZg0XsqKqHNOxwpe3VhkXfdt9sTAcC2MbnL9zhZZg0DIfxGpcxxvZddq+z09nlR5e0x3IF297d+BGYYLz1K3rZXraX7WX7m9R832c4N534PkCM/v7f/fWDxv3xv/v3rqo+eXBnNmkATKd7n91/OJk7X1WTw6vP7jxcV4FAx3v73WaQjgJkZuyZnMngNO9TSkxO1c7OzwCYqgveefbBiWjXdZ6C95w1ATINNQC1SJZu3zr6tW++u+fjrJI3r70WZlfufPhwbg6A5vVv/4vf//iLJ7/1nXfaR0+6Tb712vXb3379J58+/NnHd2IvAB48unfjyrcO9/ZPT88267Wb+RACHIGt9vWkngHocjZXT4Nr1+18r3r1xuH1K/N54K+8dmUTN0dTD8D7sNhkIg3NvL93x+JFLRvX6I19N9vfA9Bf9DenB0+WXbqIt68fXZnPW+dOlysR3Lh+5WyxBvC8d69dfaV35mVZq33y+WdVoLffeuWDjz4W1bfeuAHgB3/5KTX884dPn67PVitpYyT6WU7x+uGVo9nxn3zwMYC7D5f1tLpx+3j55Pnps8e3Xr+a/PTeF3f+2cN/dfvV2wCStZO966v1+Z+8/xfkfrW3UPnp5+ftr379648/fbBaZwA5p6Pj6+R1sXjQr7q33n7t/t0HT04Wb791e53DlZu3ANy7d3+z7ntzDKOA9ToGJpDUrj5dxInvABw0e9kWxnXUeHexCpynsz3N2tS87joA3ia9hs+fPjk9fcrkZrNm0+eYdVLVbRcXFy2AZWqv+fDh/Sfv//zh/GDvreP33n73l37n3/yIvXv04Pmf/eweAMB+54f3//yLsysTv/nowf3zflpVrdB7b916cBbXHQBU8/2s7Ks5DFk0ZbFSR9uERnSPlcElBnIAqYqImuolQ43IOWeApFIcxJFaMaemkbnFTEy+qqv1et1teoDqqgpukD4VxhyhEBZIDEyDkMc5d7C/9/at1z/+7FMAi/W5mDlCPZmplCLyykxQG7EO5523Et+oDIKSEW8oUbIV79VBosmFpZaj+MksppTaQt8L5H1SAnsFqRHBUpK273nTdt2mzFsQJ8nE7ImZvEJERVWcC9uVwkaW4iCvNgbITHUbPW0D8L/mXwOCY5VcV1UdapWkkpxzWbMbTnyXwdUYGY64iX0JkRyxZKJi2DOgeAOkQgahAWm6FBnZJf73wh1vWZEY3NlVtoobI2Y34ZqIRDVrLoS+ApYFV5uYjuVQxo9KI7Bz+YNtInyI5c0w2ixhiFXJYIMDJwgjZLPtPR4E6LuaHpiRbg3vCArLpUowEzMLwbi4SG5vj8UsZTk5fd4nmdYTcj6bgVjUUp8AuOBLbD5m/Qsd9lI2PjayEZUsyNWAWZdfqglQ+PI7MBDtuDGBS/WekX5AAxYLZS5zBgC8OXLELosGHwhWBRdCSCmbwnsHYD6fi+TgQ4z9bDZ79vTp+fq0PMdST8BxBcBURZS9I6JpPTUaRvIO4vslJJu+xIzATg3R7XHmP6QxE4OJDYDCVKWQcxyRSlcRbt+6cuP6Adni008fAZix+cDHs9lkwiB1BE8hi2WhphqvX9wbxkxB6eeRd7hzaUC5cP3KB6GhFtTwuIZxM7YBZyuvHSqcmilddlQZhM6xI0qiWQ1Al6zP5IIj8mddWrR5E0WITHM5IgHwzGJSpIU8YqjlcsVV0znnHKuaSqnuMHKuVTFyKmXE+ZXd6GxhQIEix+lV6oDTyI7kUaq9M3UGKiQNQO1YhL5kscr7jUxRjCDRsNDSlgIMQA3eUc4JgHdOhvWb3W4iYbiklr5XFQNMRc24sKtgZkYGVfXOeWZTDd6HEPq+A5Bir6SC4AY7Dxqo26KdKRUfQOYhcWXGKc5ms+Uyekdahh4V500us6yWlEWJkCWraghVHQIGz0EzMkcUOMSYVCVMJmpwhiHT4FjNjJBFRM35MAv1H/z+7/2f/8dvv3vrzeLVUBl1KdeOlezK0ZWz5YJUxJJjNjHvPYCkWZJkS8WM2TM5ZlXr2o7BgyEJlAwh1EzU922ogoh457PmHDOAuq6JIJpFJMvGsTc/kNVFzQcPQHtzwZVH5ohTjGqYTKa9tpWfxNSVnnHO5xRV1TlXV03lKjVViJEK8vAomVWzY+5zD7NJMyUCM7MxAOdIsnjvVJVzJoKJHMzmKrpcreEZwLrt1l3vQvjgw58rDOTOz07VLIQqWS4Jkjavg6tENXjPwpUP08n03v07n33x6XQ2a5opgM1mc3C4f3B4eHZ6enx8ZW++d+/BgzfffKNdrroYAbz26utPnz3Zn81ev/nq/Qd3LVTL5XJxdn7tyvG0aYrr7rSZEvp2tZlUFdRMLTjv2E2qZuqrqlRaJ4PKum17ga88NpLcMM2Yqa48gHWKq/VaTdUsSU4xBu+JVFXggVLAB86xm4VKxLLosm0FjWM6nO8tl2sATV2x845LDkwB1FVlhq5rm/2j8qkdufVqpcUcpvjMu+DVVFJKccjTNWRm3gUQxiRiCkQp97PZFEBdNwRzDlXdVHXddV1K6WA2V5XJZNquVgBizJWvjg+OjrmB9nVVNRyykFiCa3plAHU1RbceNi4wEztyxg4gZipZWe/ZzHnviJ0ROVZVA5H3LhTuucgWqrp+4+ZkVv3Rj99frNN8joqbwro9X54R/P5sJjnff3Dv6fPnz5cLovbRM4MlF2oA88m0qUPwQbOxgojmE195qjzN68lsEgA83FRd3xc29MiJJALDLn04ytY/IIA7ouAXhQ0DplcogFR2BzIidcOOWbBIGMDMZiqqJYISG6rQOO9TSjGmki2RLAPoufXaAMzgnCM1qA3hrtHozsk7dwMzGVxWLr3WX0g0DkT4bZrdDDvkyOHjjv/sfNqt7ge03fHH8+4Q6W2Dg1IObxD5jWnjS03C5e5D2KqsaffdLq/1glXkNiDZeQI7weD2voevd3b/l+1le9letr8pzccYb716/Z3br4rnR8+eujm//uZbhz+5++HnX/za3/qGu6gBfPTZp7lvEdxkf89prZrV6GBv//Dg8LPP7pel23EzSmG5ppqZQxUG4R6jiy0AArFm7533HmQKATw78s6DqZ5OAPRtdBZn3n7lq6/thex082c/+SmH/TZezJwC+MkXn3/w6f1f+dobT0/PP37wySu392bTrP3Z9QN+X2PBEWaz/Yu2P9ybfXHvQVKFqgsVGdhBSZcX5wCO5zNtN97xoWuaBoFj6leatZq5lPqDCgBevXH4+eOz85Pl9779t3+sWTUeHR/81rduJ+eOJjWAJ9KEMHnn4Hg2mV2/epxVT05PGyUX4xdPF5QA4M5J95/cerearOPTx0dHs+m9daomfX/hyXmq/91ffAygYjtdRsr5dIGam9nE/fjDR9cOq2cn6z//4OFiswRgtUtJ+PScszuLzcnJ02Y+3b92/fGjJ49PnwFYnZ+t0t1p7bpN988efj8v0+tfe/P02cnTs5+cbdrcFTgy14eTjtL5wwe+qp+eXly7dnN18sn9Ow+YrXERgKvMVuIbp8RZpKknkqOIEUInHDsBsFk8n01dqJp2c/H49HkgTCYHfYp9TKoeQAh7d0/WfbecuGax7nyofJgmiaJh2kwfXpwA6NVWq/5HP/lxs++mbv2zh08me9lceP/zzz78/ItezwD8/V//zesHs/c//vnGN70jq/TBxSZlk7snDxZ9VACofL1aLC2Hcpo1AzM7T1Ae7B1LwrOceL0jopxJLdMo7QEAkPe+KFWd9ynmYqqmW9EsoEpM5IOM2CXhMr88tJFPV46YA4rHRIuLi0/u3FmuVuU1TVXH2K0262k9NTUCefZGllIGYAznXJFZOrVQlUo3RUVbuEqXzj2FAGUlTHMMcuy30lcmcNdt6iqU7LELoZlS7ZsBHQFyioqiumUmh1IrcChkOSRiRzbQYCRkYwCrGLy7BwQNxRQPWwXkFjcr/zki+BB8raSxb2HmA6eU3Sg/tp24a/vv8MUIym6htWLdaGPFQdqCLKPP5jaK+xKL8kttfJ0O/qEjxkogx8E774KTnMXUszdATEtdV+84m9GgPNuVZ+9EhVsqJAY/AAyRJ335Bmjowy1J8rLjAEI5BRbrvQJsASCFuYI1MBGgWjTT7AAdLAWcmQyyMnDKebnq1n3XuCnY1U3T9T07iikXrlZ50DqQNowGEGPsye0BpXwcwLawI2NkAG7nACiMPLHLQ8IwLkhlW/EcA9xVJkIFIitF0pkkCjkV09r7rFnVLGaQeR/Kan++WEymE/QpdvH09LSNa1MNIQTUm7gC4KgyaE7ZeXZMzL6Nm6quVV8YFvTC+cS2pwa8EOG/KIca/+L/4xRAGGfUQNYh0+DDfNLcvnqtms48a79cN9gk7gE8F6uYrx5fqb2yiRGxq5TJWWYkcgHAQEYdcbZSSwGydQIo14IaGFBCKF5cZka842RFRPDww/uNzJFyFi1riBJ4gOl3nqKaQGKWoqmPyglu01sv3WKjfTbnKsCSCGiQySmRd76sXMOhkMFlznJRActWle2cKwMvpljIcYVmW+Y4FZdLG0diOZCpbiv7XLqV0kiaHDvqUjpntn3twPUpNXt2XqAAAblUHgeZKdNYcxUwNTFSk8oHk+y89z50KddNgxHiLR0K2i5fMFPnvajQaG3s2MeUBtdXIIs450TVYl/+ajqdOu9iSilFM2N2wZdVBCAulnkxRSaqQ1BTUYspNc2k5AnUyAaCqhWg1HzIsTeCY+eYJaWUNfgKQK+iOZ88fXp0eDidznMSB86ileditxrCpOvidDrPsXfArVdf/aM/+Ff/5H/+R9949a26Cn0fAQTyROSC72Oqqurq8dWT0+fLdjmf7LV5XViWatkHB2gWKbWzsiTN1jR1znnYB6F1XTvimCM5duSSpSxJREoqgoy88zHFadNkSTAuyxazUxXJw5jxHEw15p6JyTGDUoqOA2A5lwLoBZRmJmJygGbLIrns5lFTmfneh5yTbyZQqJr3XkWyCJfa0EUhxMzOOe8HHwNQzlJNpoXWGuqQs0iS/YNDNY0xETuP0GsHo2wJQOMaocwgkVy5KkvWrFVVOe/Xq3XsCkIdV8uzZ48eZtNHD+4DFKrw5NE9MSuPyRGrqWNWVc8cnCNQVflPUvbel5WTjJrJNOY0m87IkQ/BsVPR48PDu8dHTx8+AZCzHM4nqjlt2piM4NrUBZQCwK5U1cvj1AVRUmXvSw4gBE8aAbRdLMBTVXHKILR1CPCNiCzX67brANTBT+ZzJpiqG1ILpaAwq0jftwBMbG/voIt9u7wwtc1mQ0xmFmNKqdBG4b03s67bZEnTyUwkL5cXBlOJk6Yu07aY2ALwznsfvPehrmPbzqbTTfEs3qyY+NrhketQgVURvK9CEHgfZqsEAEkUpiUHVPbesZQNDdgUoJfOGgUEs8HiYASfFBaKkbDKZtU2e8fTZhalf/z80WzSrNY9gNMlhVBt+r7r+y4u5nW17pd3n66OZrOcswsVAKgmyV51XNl5tcw542LdE1mXHYBlbJybadqJTYrceUfaYwMXXrdopGEnltrd04bsFjFyCVV4G/WN26LzhaVOA8DJYHIhVABSkr7vYHDsVdWAQuyzyx3GQGWyqqlmERsXYYyx6HjTNqKQo+/zrhZgzMMVuBDbvv8rgkIazTSHP98msMoWQTsAJ4G+3CNA2U1GigMVSi5dvhXGiLTssEZjIXMbsU2MEQYNhpLbDWknyLhkVm59rgljhPwCxvqyvWwv28v2N6T5SajY7Okqfvsb763yaUzP3fyoFTpuqvMsm44B3Dg8ijRtl+30cH+zzkl7t1je+PpXjAlsFQUAvSqMcpYQnIg550phVtFUI9S+AtD2GzXnvSuiWBBUJSWDQVJGNgAQZeR+vbw4PeUZXz/wr71xO4q/tpY/++wLAL85+87hfDKfUcTZV17/6muv3KypR0r/8k//Arlv2ANYnZ5Wr9/yDuvV8mBv3hr3MTlValx9tFcJACwXF6t1f/v1GxXrqweVc33o7PlyXQfPbE9PVgCchSbg9uuHn/z8J6enpzXov/qH/+m3fumrH5/cv/PBBwAC/MXi9Gh/fhY3y/X5ycXm/GIFS6rSRlsuWwCHN/bPlvps0e/n6Sc/fvKDn5z8Z7/xrT/+8x+/fvV1kUC+A1DV7mhyzWIXJnw029uf+KP5bNP3SS/iRub1BMC1a28+fPj48bmsLxYiF6mVN9661fWtc2ndbwCsnJvvHc190+bHnuhpav3T9WxyeOfxY0PTNAAgnSye3nfTqnZVPdmL6/Xas2tcM5lo5bqLFsD+wf7qomdVH5qsOaFXwFkQT+YcaQAg0G6Tnz49bTgY9T7MFqvOV+H85Hw6nQF4uFiaCKJdcN6b7Z8suxD8tKkfPD9pJjWmHkDV580qvnLj5qO2TdWNQP7ze4+SoyfPTtfnm6985x0AG2v6jozq9z/8tG7qycHVZPnJyenZOm9QAEGcLddkvkvJaynjy0amKikPxUkcuSIJdarBDICIJskE81sgkciZUzUZj8RDRGxql5oLY8J6kwFUIcBIVPsUHdxliF/gKuOs4sBqakwqwoxJXZ+mjJIKRi4sjJiSwZjYOVKzpBGAZQcyMzcgohhQreKGaVsaSwlrrBBlVFXrqu76XkTDIHYzz96Hqq4qzVFFAJSyOaYmaSzI4LwZDEqQohwnELN7gZU5cuGKTkeGehRDmnosfMjEpFqYiTsYzfCfwdBUE/auj62k5D0TOSoikzGKImxDxTH1vbNg7XxrTGTFPn6IEHdSzQPWsBuW/mJEPT55AIOLkxkKFwsAmJxj5yuvIjaICkudn8EZLklm8lt7xKGz6JIseHnbO9HrFv7eYeZu72Nb7XeIh3fwMRpz6UXbbFQAKQxES8cMmBhEihujFXtPR7ikiBqnpJu2r2k2nc3gqY9RRIriSqRcbJBXcyE8qBUbrxDCLhdgAGwua92A1GxAVEZEzKjwhobXF0rCAFXChqrxBSca43IAqgYqw76qKkEGqQuculaNm0nT9x05lrwpIl/P7uLsvM0tE6WcqqrWLDC3jJ3zFQZ0VQrMISpZpa6bmOLWbgxjzw43/cKz+yuHTUH7YFQexHa12B1QL756/GWx8nPk59Vkf2+K2lmO7KOrLJuTBACN56tHs/0KxE6pYRg01YS6cuB6ULQZqdEWAy8dLlqYki8MyeHBkysIso0ZlOHWCI7ZTNV0Z95puVNgB6O9/IhU0KKkFoUAZDgBX3RpserM1WrsnIOqY7APZQHrci6WZUN13PHEBqYCNWaR4D0TDaCkgY1tS90cMyI2UFQtVH78+ViItIwrGgqIvfjohmPdCydKlMMnhqNcOfqNR77tPJUswQdgcFNlHmwrS1LCsRtV35ZTIkNOmcO4LNgoZSzT2kZqKnPOUlZpUTGDqbJzPoQYI5mJZO9cqSeeJEMyqXlio1Jnh5gAzxC1QkgsnCOQGFeVTzkTKYjAZTEqq4CRKYM35uA9maWYeDDipJwNQAj105PnP/zTH371rbe/991fzWqq8GC2XECLPkUBRcne+4ODgz/6/r/+n/7p//Derbe9D5JjwWETY14169jWVVPQxoO9/cpVq35V+arsX87VZfdSUxUzyczONz7lFFOsqxqAcz6lBIZnLypZE8yYmV1VKFRtv2km0zo0UKHS/w5M3rMjFwrzEc5lzQyqQq0qpdJ3F7u6rjZtP/LsOafsnANzysmzUxM1DaEikKZYBpepVnVTdpaxRjyraJYIYDqbdW2raqKZmUMIOSVVDZNJyslXpSxJJmbA+tiDQGBPPua+Co2YMDwAY8spe1fnnIxUVKpQMbOpVsHX3gNgM/J1HSoCdX1f8FBm13X9dDYFEGM3mUwduOvX3nuALGvO4pyDUTGpDFW9bteVc8vz0yQ5+ApMKafTp49+0nX7kwmAwys315uu8X5i4n1o11x2FAKC5xhzmRd1qLqU6jpUTTOpG8mR46amWtdLABcXyxi7ftOtuxQzTpfrSVNFIVE92JsXeel0MrEtymPwzqecRfTk5NnpxQW5CkC3aR37JMnYqer5+dl6swyhKhmTAlGlFFW16zuKfYy9qsYYiWAmZZ1RFVUraWjvfdM0s9ks57xara5fuXp4cAhgmXMWM+MsFJz0MQemqvamatBSy37VtVlSga7ENEmKEnNOhYFerA+amEilT4nNmZhwzkKmZoMUHQR4ZiaoSD1j5HgwnT5ZtW1ce5eL5+MkhDafe67b2H3xpHvj2jEzt23anxj7wupAn7qckpo6Ko4NRjxVozZm7yVmBTBrZjeuHj2+a3BU3A8NX965iKAD4jcEPBiK3O2s/tudg7b5luIgaduadUOgQpwlMxE5pCxqFrwvypLc9jmLH+oymWNnBiYSjOGcKRGVxVZ1pMeDtwREbNftATMtpiNlBx9W22Ehv9w/hkT6LgCLX2yX4Se9aPx4ubPadlsfhddEYKOhqk4REpmZ6mCnM+gYtle0rWDGLk0htyHiIAcasNUxk1V+oL9YN49QIOQXC++9bC/by/ay/Y1pv1gu7WV72V62l+1le9letpftZXvZXraX7WV72V62l+1le9letpft/5fmp8fHH7//U+x98/j4117Z7C9S8+T5g+Xy/PVbv/TgZ589vHcPwDf/3m9tHj1sav/uW29++sldR4yZV29NqA4P9h+enAJw9UHbdw1VkXUymYmk9cUiSjetJoClXgAweWfV6mxj0E1aeheaSd31sbPoNOXUAeAmbBYaDo8ePX3Q7fG1a6/ffO36p59//t1f+dXPPj4H8O6NN/6y+dHs4No73/5bh7/xm/rFx+7zfy9x8p3vfPPPv3hycW8JYO/46uvXr/7Rn/7FZO9GTjmeLw0cAnw9RYzczABMJkHP0vrZ+SavD2fXj3y4foT1vVqC09zfvjID8OGHj6/enLzz3tdOHv9ks0zTafXN735rf4L1x6ef3VsAWMdub+aen3VVTZ45CalKJoG5o6N5Om8BfPDB3fS//PPPnq1/+eb85Fw/u3vy67/8yp3HnU1OHpw82yQGsI7JwnJvth9T73hyfHTljRvHf/LTH9Y8u3Xz1R998FMA2V20CI8f3Ttb9yzyt3/9t7725qt/8uMfxoiDo1cAHJ0/DdN9834mrya14zbW8J21YL56/ZiyB4D+Qb1/ODnYWz55mqmfzyoVRPU12+2Dw5MqALh9651Inz9//6OD62H/5iv3vniY+wU1GrThrOIMQMiY1uHgyn6/bl1OGa6uQ9u3oamjJADtMtV1U9cNgZerltBLq+s2VC6oEoUAgPp9rVPYu6KPH99/9vz+k000mRweJmva0y9WT5YA/nD9x+1ycdRMFfxss6Tz1XptMVO7bFdJN1oB4ISqbkKphcqkqjlHNWUarLZg7HjgqaScS37SMSnMyIqZPQEp9iLqCKmPWfudChE6yDaIFUNdkC2FyhEBmnO0we4KRFAIE6kphhq1FWDOuaaqACQiYZBxjLEY8wPWxwgMJl/ZNPW9GAAtdCcVNyQPiEpmXc1yFmLtUzdpZprFOZ9EmB0z01A12LK2RGhjVxzHY0pElPoV9UOuNlRVyuLJMkhMnTEQZs10HS+2JLDtv8QEFNfIxBREBUP17cJNYBZHVDTuYHJmKiLlKVRce+dVRfoI01KIUJIwIDsyWbOtz7cBYB+KtfjwCtXBadxULGEQbwIoT6kIeUiLbtgIILKBrUhEuqNlGXPPGPPcDDYCs431WIgNyCmLCcxgJNDRy7J0DQlJGVdcLNXVlJSNlWSUkBe7gCLZYbOhSg24OLCXe+BB3ESj8xlQMvNjioyVjAAAIABJREFUPREMPVAuOyT4mQiVD4VBJrAUxZELdcgS+yw1CFUwInMDB+xisVqcXSSS4709wJANsAAelE+mAKJJue+t6ohAhqL3/BJVcKAYMFCK3pTKTWXSlD4cKiJvSS+X/V5IXoXgWz5Z+VRQBhW7QkD63jsyI0sGrryjZEJ1RTASSm0EEFPqKJbCymaURQjWpdYHZ4V2lyOTy3kwLiSipAJHgZ2oFrc7EIpYVsVEdVI1YiJSGBrkRs8HHbjIZeWA46LOJColVoCh9I+qDTxZBtgkwXkPBcCoiVSVzjZn8yA8mws0azBSFWl8BYBY9vYmilz4bSjDgBFVyNTtMlQHg4LCxeBiIUujoywMbihSBWIZpGF0yfwr3BItNbPhjLyqFasuM3gCgAxLBCYPcwKCikGIWGACr1wD2CQ7W7frjQBNUgJDNJchamPdd89uW/4bMIWQQgkQlNLb3sBqGLwgxDunEIcAgqluVyGiImseSsdUIYgqsvU5hyqUaRtqyyLBV6OseyCBqhkPAjzyvsqSHbnyYQO7gVN8SRAtHnYWfNX3EePmQqM7hPPOmzNVMSixgpjZ8TAIxuoPxINSu7iIFGG9OWKQac7lKXrnmZ2Y5Ng7EJkGdkw0EEGzZMeFgV5YXlYIqI59HUqPJlOAUs5ELCmD0KccQvCXo2HsOxBLJxpATpCrSd1tUuWqIvKdN41Fe/DFvW+/900yFum5rkxzv8mhbsoqO59OUt9Pp7PTu3f/yT/+RzcPDibMptlAdV04gBpTrH2lKkwgWF0FR0zE634tlAEQafErJJB3jLJxx+jIKh+Kt4FoZMBUAGNAwc77gbILA1DVVeGWVmQM8pUvhPFsiZQYoSzShTo0EswVZE3lTSQ4HmeClLrhMHPeqRmBHPtSaqlwD81MTGkszkXEfU4oxGrnAMSY2HmCkfMAsijYWfmCXKkzNtRxGmkA5CDIzrNYEacbAFFzzqkpe0cw78hgWUstbJ9LTSfvAcSUARCzDZqPHIIry3gTahPJkOCrohsgz6XUmwBVMy034OCyCtgF58uaX1cORNPQ+HofgG5SD85JNpZTXHbWTyg4xM6qHLUMCfGhzSLsyTJUVosF/7/svVnMZVl2JvSttfc+59zxH2OOnDOrqqurylXloW3cVrsFRsYIYQkBL/QLQki88ISExAu8IB4ANUKNUAsamgYZqYWgBQ0tS20Z2xiqu8rlqqwhx8iMyBj/+Mc7nmHvvRYP+5xz743MagQvtKXYyoy4ce65Z9jjWt/+1rdUqVkvZJ2hBjBbXsS4zsgMMpqV6/Fkz0o+v5hHH+bLRQgNgDrkYzd2eYYYQcpQUsqMtdZxPhAyAPICDBplU1FdxBkIquqyTAFLxMamKXGxXBeDzBjr6ybP8zwvmCiEuqlrAHVdEyH4WJZ103gijlEkVIb48uIyrYVKQiRXy6sJvBeylr1oU9fWmPlytq4IwMBmoQliHEdkeT4wWUMucmQQNLJRAIWzUGOsJSJwhNqykkqDAUIMABBVPRGjyAw3iJCn55/VkY4m42eX89yNAAT4zI0N55MI75fW8N3jo4vlJXOeu0FZ1QDISVWvp3QoBtr4WFvjamfjwBmwMc4BELeH2ojx1pgo1FkRRns1HkHQSEqsTFASKKsyBImVndbtyK3mckzWqUtS4lsLCnNKxoikbwMiEfHeG+uYbLmuAEgMztouUoI6gWs1hrVLc6qqIXgiss6m+ImokYnIUE82VFFNurzGqGiriqnkQzCmU2pKjMqUvZE3iSn71agLym5piqTcchvRGkXa2kMQ7UJ/WslQbJaMlmtKmgwFAG0m7hTFnm7XpSds1wclbmUiFBCJ6SdJZkG1JUKmWIJeaIRT3p4uPWFnoBFiCuH5Ir7ny/KyvCwvyz/exV6cPFpWzaN3P/z0q3evvXa38RwWcbx/6MbF129+c1ZdASivZgrMl6snjx9n48IxZLYq16UdsjVm4AoAjYqzTlTrUJvGqghAmclFVCHGWQDe1wk9scbktiDiVblkdpaNAYUk/FQFY83s4urHWP/2r3/58fOzo8N465UjMMMZAP/nDz625I723PEvfrm59dXGs5ycH9LZncweur2HOgcwPjr+7o8//OTBo/2jW8uqXkuYDsbL9TLzYS64vFwAGE0Gx8fHV8/ODIfFvL577Wg8Kn7jneNPH9w7uVx/61e+BuAv/+odN4rnjy6/6/WVV49iuX7w+Hx99mg9a37h298G8Pvf+65kRTbMJkM3yi1Rtigra2lgczvMCzMA8OHjR2oGX3vr2tmzk7ffeGWQ83l0+0P95NNHDZAEb0g5BL8s15nJHjx9sJw///Y3/srB++9//PxZmQ1G40MAHz58PLta7R0c7RGWq9P3PnrfhPns6ioEYmkAPD5f/MI7X/6Tn/60Ol+MxiNjrac6Z7AXX62KzAJoIGMqnWRVUx0e3WiqJp+Mrx3R1eXjjx49mBYGwGf33p1frQaM9flls4rTQXZeWjWuZinYpHiiwZDIRILPM1oFGQ1slmWrah2D+BAAsLFVXcfG1x7ZwA2LcVgtiNzh8QEQ51eXANaxyfNc1BNx5rL5cn56ua5LX4zGk0F+dnkFIMyhPlxqfXi4N9k7PL9cNNHDOOuywmioAEDgYpdSrlOhphaN0s5K6nCctnTIYR/9q5sQDSW0OeXRQSn9r7YiORV9jEV/Jjpoi2jzJ+BD45xNmQoAlOWarBUiZ1wTYpK+IWwikbfCPlshG1VVSLJJkh6RZaPqQxPyvNjf38uzbLacqVJKQdsCdCoqSkzWWHQ5jlNwSA8sJWF4jc2gKIzh9aokzkRCb7ftRPLsVMMm/gQJehBVbmuolz7c4IUMpTZFTB/M02Zx2bWgdCuCqIvt3QIsk8Zbn8B7u8b6yJ3tsh21qdt1279MMiDbuBwm0zayEkhFOhXy/ipbl++Bhk1FYFO3O+/TdY8+UKg3ZPs62jIwt36yXfuqXbdrv5LYITUajeEU62rYGgfV6L23xgKYzeYAzuenrHZ/dCCq29feyTCzo8WJnabYeiN94dgLHbcLLYohbh6W2vO7l04agh3Q3OIFCrL9aOyAWUrXM0IChBiasA6xbsEakJAMbOajl+AhkU0L4goEgMuH2gZOdapNIIKpY1BtY5c5eRudHKyoaBSIMJSVWU37HKpsLHpwVqEikaCATU4UlAHDKZNvgpTg8oOAGH0DoAnrEAJJhMT55enIARRXsckMWfIVPIC718djm5EGYmIikzK/vBBDvtM92hHDTG08dFfhnROUWlG7ltXdESepXwJKvQIr2gzdlkg0hiiiMMTEGqIAJghFNWUjAGarpqyjwOhWIFnqqDtqCb0+aKq6rmd1KgGd8JgKcXpnliCiQqQJEiKilNXAGM6zPIQQQiyKIsRY1rWDA2CtFRXDJsZojBWVlJCa8mzToxSdOGpMmbvJtmm7kmPNvAl5d86pbo/0TRt434BSAD60FdZQ7XJSpQdOEgYMAoOJQYgxBkQiYjZo0S7yMTBDu4hxaptL00iRGE2fhZqJ2VhjjDGxk8wDWtycqX34VoJhq5eQttJ2RVZ4r7WPho36OMrzcrn+b/+7vwlgVVaZNVcXF//13/ovQ+O/+Qs//6/96//G1XKWDwe1DwAM83o5v3XzzpNPPv53/71/Z39vbzIeJ/lcZgqhFQCxbNq5Ow3wKEScZS7GYu3XAGIISbgnqiSJPWcMDMXg8bnS7cOg6z9bHR8gILSTCRSdjioBHPtzsP1pKy3T9nfU9cJu8dIXftdW49YM1h/pr7Cds6L7doMQ7K6oW1+9sGi1WwwbyKS/0ybD7tatt97iRTBiazHd/LJfdrqrbNdoAsHVdihLufIx5IGmbnTnaFj7WAfxlmwZB6U3ADTQyNlQ16EpTxdLUlhi9aW1cc9FADFKVTeLRu7kk+PpqAz5YtbUdW2HzhC7okCyZ6AiIp1KrDHsnB2OxjwYRxCAaHMVdS4D08XZ6XgyvXH95rpaJQlLYy1aJRxY60ajMUbUNJVKAIHIGdtuR6lKlCASodKudyLG2MVygRgBZBzLul6W9SCTwoKZmOFVqjqU3jfBAogbg0pEVJKQREKgRDqcq0X9mNJmJwa5ccZA4QwByJzpsvcoEBl0bTopI5PND6fuYHQEoPYrIhsj3TkcG9ZRPjlfnPqAV1+7/cZrd3xTATg9vxgUEwipr5KYQYwwxJaNV1RNUsOoQxwyGen6QppcNnZn6glJAkc6Aw69MdR935rXhF4fpwf1ukkv7WI6k/a3NAlTEHHSCe27ctu3U68m3V7zN/9IFqIizbQqKjF2CkaGiEk1K/IQY4rlt8bEgCRuAACtavbPKskGSU/Ur5DbZWey6W082vyyX2W/YFbp3m57AtvMEptRvbu4E1qNpmQbpW2pXpGz28Nut7l6qyotLukFXqbXfllelpflz1axzootiuPrN96/97DkjMit3XgY/Z2jvbPZepAXAC7n6/W6Ojy+XlWNFT+8fnT1fHF+dnnw+uTy8gKcjIDAbKHkKAveJ1c7CYL4GBLeYE0uHA1lIFS+zE3hOIsarXKNCpoBGBJno+Gzx88HNHHDbJCH8fXsZjZ9/rT2swWAb3/jm//rg4+E/GJ9PnJC+8OTxn/0o3ff/ejp88ur8SgDcP+jD7PB/mg0bRq/rKrcZs7YzNoiy4Ox66YG4KtmVdV2WKwXq+PjWyHy/SfPvPOP7j/NBsWz9z4AQF/Ckx8/GY5HPtRn59XeQV431cOTi+n+pC7XAAaFK4Mp8sJYG1Ucxxiq6MMyzN2lHU9HAPaH7vnF4ldfeSWWczZ+UVcZMw2KxcnVykcSA+DmzdtN1I8/+WQw2g/55N7jkwfz8zA+/v7/8ePi4cVX3v46gIOjV5+e/DCUM1+yG01W88Uf/um7d/emXDAoqRnyBz95r76c55k7vn4gg0kzO4PQZG90MJ4+OX8O4LXX31wuz9UO98fD1XxWl95zXtgss9mtWzepXAKQ4I8mE9/g6dkVNUtrCzFU1TId5DaEOtYAsv2JZe+bsD+YHh3dXC7OBZLlmfjWUMiLgQ9N1Gg55kp+WdYxFNY03pOE2jcAvK+N5mezq9lsNZis2RXXDveePH7EjuK4uJotAIzZ3rxxaz5brUrNxlkVNCrYEJiUDQwBiEKi3hIlBZtuIzShN11P772EzaE2MWDLHuqgJQCKlh+HXVNNd6z9z5kt2374LqZmDTMhxFZ+K/GnJEZk6A0K2fy0lWiEkhCgJK3h020lEwOIrTVJdV3PF4u6rr0PmSuUNSWVAqBRRaJlm1nXNJ3TjzbRjXReWc6ublajwSR3dr2YG+tWzbyqmiQDmjyUF8Cp3mpLfyXmkaaH0vYJU332iVCIgZZeBu2rvDPseldwq84J0CiJcdcpALVssZ2W7NtEenePOmuvMxR3227TNajDGUVjAiJ5I9rTocA76bl1S9Kxx/G0s1F3XMcv7CkdzqstOpvopkSElIe9Iybu/oyJU83Slu1LgGrLqI0q1rGI1GVjbEoGICISKNa1ny2vAESRwmUtG7IFOtoG3lRIkjrqMeINSL+FCLSH2u+3LO1WbImI22SbFDuXo8dsNympu6bvPJP0TruwT4L90p9NE8mSSPCxFom2pcNoDMF7UoEhSwQGDLOBsWQB1L7p9RJTPSevikFKSTOxf1EiMmw4EsDEsAwwU3IAkmvgJCZIwDCRoZ7YOWAHgFMOYhGNiaLqo4rHhUJHbAHk1Ki6keMywFFYPn9aGIiiEjDj4CAD8Mq1azYfUZgxJWST+izSPYy87fr0vk2bl33LpRLAMikSrYM6TG1LO2vjFrYeMxGRUs9RNkxW4WOAijWIIkFigAnC6wZXSw9gtqq9gowFcT/5dI/Wz4KbYy94bL1jz20mVkRR9VEVDCImgJLubRqdqtrExvtgjCHmxocY42AwSP0BComaUDsoRMQwE3GMkZi7ZuaUF6wdsonOkpCDNtHKFpaqSalXU8WBOy9aoZrIv6zoKNj9sO3I9araVTErhNkaZ6RzNFOjSiLwU6qEtgkT2T9dpE9O1nqn0KiiAtutU0SgNpFKS8tNAEmbWqMb5ioqpKGMzlpnWERWs8X1o+PRdHry5DGA8WRflQ72D2PTzFazxWyuIcJ7n7v+jfam08vnT//Gf/XXh1l+99oNjd77oAqiNt2NCJTJ+5C6U0JbiI0xnOdZq3YHDTF0uyskQcCcQOetntJ3ddo+qltHqD+WGrNTe9MettzFDnWrB76APegGsHtxcd+60c7X7b12TtzCVL7oOi+cuhUJsHVR7SZM7bbBdha0/3eln013TZ9+3kiTW0es6ibyKjQALOdEJhh772Txv3zv/cM8RqlXgmJYNJKvoyJRNRlkKQZKkLeyWmONbW9hjcmzzKgvm6YMiGTWdR1jzJmL3NVNA6QtE0p6volhnuSwjbXGudSqTEZDJCIyNsV5DIcjH32Uktgk9rWwqmqW5Zlz3kdjjDJi8ImlDCCEAGoT/kSJZMgYTkmiRMTypl/tjYYOlSMRKFKWGI2GkecGgLXJyGlz43QIWbd8dyzsRIhL/zLMee4I5GNMfY2Zo6qPEiVWa6/Eg2zgKGuE9sd2kDkAz69KgXeW83yyqsiHVe2rEMTZsdZWQw7g1rXbbE3asAYpkS+bMF9HiYDh9GxeQwjRmrxph0jaQ9nELbQkR+IkuNivw5SGU9vNhQBut5RSKjUlKDFxx9JlTgTglsAXQhTVPHMxpoRytuuFu2nhuv7drw87iCSUDBm2AoTQUv7TxK7Q8XgkUYL3xaAwbFarylpOWyM7vb4f9+hibPqR0W4j647Z8wXl8yNav+BTe2o/dVHPl0Tvi3T4IvVPtgtJ6guV001P/UywtUPdWSf/H2eIl+VleVlelv+fi93bPx4NvAyL2Wn4o0+/8/qf+/Oyd7OU5nS+PL04e/r0BEBTnxbTvXyYLRYrAnOUYjqq68Z7nzvX5idllSCNeETLLIZNlucgNHVD1CUvZmIvKXsgqxVFEnSP7B3ZFFhXhjLPTO3JNzw4vv7arejr9enF1ej2O69+/W0Aj9cRe4fWkn7yHh3fHszOrz798Y/ff/zJ2XLmo9oRgNpzjSYrhvPZqiAH0HK5KIp8tlrtDSZDawAsz2ZuMg7iFXS+9oNx/uTBs08fn/zit7/1S1999dMPfwJgKP4rr7958Mrre69+5a/9p/89+fo7f/KHzbJaP9TFvAIwOdibDhDr1bNlUzX1wWggEvYHWdQQl3W2TwDGLn/w5OLxs2dv3rrx/v2HoZLZ/HKSuywbHAxEVh5AcXBYNebk4n23uPASMjJ/93///Wa5UtCqMQ+eXwCgvBgfTFdNORkfV6JR19HLg9PLvb3xm6++DmBQ7P/wB+8eHRQHRzfrpobODOztGzcmB4f3PvnoaG8MQNb1wLi8yLxzU2OziZ0rLU7OHfGbr7+6OjsB0IgbjCfucPHp8+8fDIvlKgyzcTVfY2DV6LTIARzsT5brGQwXo6ErBqvTal3Ph8XQMKUQCWNMiIbY2KLwda1Bj6/dGI6GrOKyXFclADIyHU+yfFjkWZFlnnSxKI2xw0ExHe9REwGsV+tYFDdu3LIhnl9cjIYHja+iBIkIHhIAQEhFFWS6pHvofewOumkN+y1wMS3hJNLm2tA2HXNa1tWQ0c402hgGLTdtYzXoNir2uX3VHmZjIARfNXWKDLWGYTlCQwytCPjGPVD0l6SEQXahJsk4U03JRoSZmB3bdbW+uLhQoMgHyYxXVUYKNwYxZdbmNmuaagPxqNImmTUCvHV8MBka0ucMVQnejyaTFgLozKRtKkdCUFuDFgDIMMUYukS2XZWAiFu8jECKFAza+fatTdaxJbuLdxhGax72RMtUyd32vO6mudhCabbap228FlXYITd18JoCICVOsAe1/968QQcQp4qjLZ5my5mizXnUGvnoocmuGxDQOSXEKTeS9hWqSElypM3l2yY86TxnBRKIST3PF52Va41tP5LEGKqyXM3XIlFJFZ7JqCDEkPCsg+FhlrsYg6SkM9SlBUlv1qNb6P69sXTVkNm0gramvHbA+VZddaiJooP1277dRa12GGjnvnU4UG9KyyaZe4J9+paLap0x1orkXhuGAaCirFg3K5cV1jqBBPGqYhipKzqXqURBpPY9JUF8wgJwAvtT52BlYmSG2xgwqIpoyn4BMIMA8g2SxwxVjaISE72w7U/KRI50bHCcu+kwL3J3cDy5tn/4vZ98AODWzWu/9nM/X+T8+OqKgg25KYyqbyhIWS0XYQ3geM8Y06RM0Umdf+Mj76DU7ZSVAlD7ltmaPzaFe7rd9oSWvJp2Vks9jETTyRQ6b5BBmeFIUGiMEqJ6QRV0tmrm6wjACwcQBMZSlwkJnbPb3evzsOTOtNoid1EEqgJCEFXNnDNIEKCmtk5zimWOaXoXhSFJ2agTMd86QyyiYDR1TUTKYo0Rlc32AUlsG50Md2m8mdBz79qc36TQGFpOd/uo0o5bUcmsTVsgDDKcMs+0XaeF39v0NanpVBXCSoqUdtl06FsK7uduKulnsbZitM1ZhSTQ0Gl6kKqPIUGWCYCEAdQQgSQxTBnEStz7vaIKARsTiVye+TpYmxNRtS4HbgLAkS1c5usGYAkx1A3FeLC3f74qE29xNByT6O/8zt96+Nn9d159Y71cWscKYmYVQBmAs0xEQYICxqTMRYgxEoQNDQcDAM64RTkv69IYa10mqjEGVdnp4H1HfxEM1K2vUhdV6kcgoNrvh30xIkhAm2Ji01nbG2yW8Rd+qd2TbMGP2z06kQqpS3/Rf6e7V91dn37WA3avtrugfZ78+DPL/wMQuoN8bNtEfVkmODLLCpeRG/z000cfffQg11oUuQETc+E8DwB85Y3xtCjOL2cxIvp6b7pv2NbruTXWewFQNh4SnaEmRFWTOcMAkTIRurGQ8kHlWWZTthMiEEkfzUsteKcGbQwvs2FerpbWOhPCYDBI4b6xriXJrRCV5doYNsaokSiS7MMo0TCpCBOFKMrS2h+qzlrHDCBUtQLDPCtiyI36GDWoYRBTbgxxBsBZ05lkaZijhd0h3Q4sAERJLD0F1CgAaoLEIFEVgIhKjCGIqDpLdYyAhlBXQYLG2tcAFuViVOQQWSzX6zpMB2GY5fn+keMRG2ZJm9PEvgkCJbEAEGOU2TqEECdjlzsDwAf46KF5EgxIJGwCUlpRoOVe92Nmg0UClLJeAwRlUlIhqJKmMY/E/+zWemZiYlWNElOdJGTae0/UJql7oUunyARqTdN23emM604Cpt1vIGNYNc3foqKqSqzOGGbKMweiGBpjueMI7kZnp0/aIXzdM/RZxr4Q0GttFNp+rtYS7S7VLb3dK20N8vYFt5+Atv/dS6t0qCN2FoHeeOzol31izX7jr92NIXzBs78sL8vL8rL8GSj2nde+8t6H79774KPXXv+SFZ4OpoOcb/xzv/n+u+9dXVy54QRAFWJVysnjy9feeU2tG0YXl9XdWzfe+/he7rJkrI9He6tlzYHFQBBF4nq9HmcTY4xIK73HjEAwyKPGELwBNVIaMcaSFVO4DICQZ/X7+/tn52sZHc+ah5kvhwfXRnm4/eZNAH/1P/iPfvuf/9U3r40ePbz//sPftev1+x884sG1Yjpuzh+kqb/23jchN06jZHnuY/Ax5JKz6LxaW6kBZJPBYJhfnM1zZ9794funB/kvfeMr7/30/b/wrZ9/48bwO3/wHQDvv3fy2tuvuPPLf/o3/qnXX7396OSBz4Y0ZFzNXcEABlI3HrUPF4sVga7vTQf56O7RdFRkA+bDG8cAmj83+s//m//p7Gz267/ylY8//Ww6nRwf7K0X8+PJMDucPnnwCMBnn33y5Lzeu7ZfNXEQhHI+fXjisoLJqaWrZQlA5mescLUUU8qiBDOwi7h/uGfgL86fAxg6d/toND44zCd7spgNMszXlz/68ek3fv5bs7Mny8gAyuXzt958BVCvtJwtiLB37fiqqYTCh/cfrs+eAfDlMh9NgmQFxRsH1w/2cP/ps0EuhZIrONZXAK6eV6SRzejJs8/OTy+z4WBVVdGHUDc+yS0567JsPBjVTYmhhYRyfrFenEemQe4mbADodHptOnFu76fr6iA0N45vPj/90Ac/e356cHzt69/6OoAf/uDdTz789JVbJVuzlGZEuaqIhBAlRkdwAByxsLS4SMf0QRfmjH7R72yEHs9qvcuOHdViPdQDYv1+6cZC2bLit/x+dOjZ5oxtKElTLmaFxqS0CIQQmSn4mCQU0222LYn24TrwSztIEt2dmChGiRBjDDuLlOk1SpvYmoHEbDJZkbssM1wqAR0TNKFJ7YcQojTNnRvHhcG9+499VRXDYeN9ehVuYbr0J5LKD4MUwh2Ywx3LSDd0KE1KbUycqKaJadgCqx3ml4AQlReNKOmwAN22ulqPurObt9pkxwHcMRzRZqv+XDbCbT9HoSlwEopOsSdRHIhAohvYedfUTKjbdtBeC0huYl5BLRet63vprhuCQgIoibRlJaQI3ZZr1ffflrTYdtvOhSRiIu2C5uva+8aTkojAJAFDKNSyzV0BgJhDjEQUg7Tcq66mOlMa3GGBsgMSk/SV29Ye0DHouirYMfdb6H6DAHUP30GNG4+t+z79zYY7zlR7600/yECGRCSq9+JT6ndLNqMscGhRbhImNta2OcGBGIXadM5M0CgxxCaBbpYoS8HaUA0BElRiCsVOdZI8oQQaZYAlOsw0YwyLfDIZjofDQVHkeWGtOzg6AjDK3f4wOxxlk5wtq6oPEr2vrB1+dv8zADcH0/PLT6p1mRuer6lRL1GcUUdcidy4NgAwtrEKMQV6U6tP1aqI0hZu0iKRLaSRoLmt+a6rwpZ0mjzPTh9h004ANsmftWsm6g5CRCVlsSetg/iIqByUFnWYl6FOnEWbEVhEVWJAyKm4AAAgAElEQVSSyEgdrx3CG9er/3+nM/Wod0RsqX1p4LEJTX0+mzdNM5lOAYxGYyIS8UTEzrG1IpIPs+jDerny3gMQEedcjImG0/YBTpl0qa2ipPlJbJy1LnNEHENk6kY8ul0UImqx5q4iu82h9BLj8SBd3xorzjFzlBhjzLOcOG3LcYzRphzuRpHUconYGGddCBEAmAwRK6gLQdQ0rrsm1DQJK0AQUmNNO6VFMVne87DScFaibvViEIPNLhRGAERRlZW1sSl9ZozAPH3+vIk1gEEsnHFBmrquR8Ph48cPP7t//85rr4e6nkz2ABQu+4//w3//ow/f+/JrbxmQcZkXT8QAiaCTRWQRITLE7VSV5qkYYxqbAMzAqiqYgw+qyobTxPX5aMndskNF7FcmTXTIdn3oOnEX6p4aawsL6Pt+N2ttXVS1wwd+lmuv2xPz5onSuNncJ2k/dHTmFx5787GdFTdnbXZmdgJiOzLX1lK483nzi+136S+4Of6CDdNiNq26cQeIUIrpRdmUIaIoJqyDUoJ1DsRNFGINIZwt5wDk/sPrbzzdP9hvmhi8LddrYlM455z6KkGNqlEz15KMRTTEQFAffFgsiyJPjxpFDHOWuUFR5HnunHWZY8PGGmYHAKKsFCWuq8oYQ2y891nusjwz1nYbSxUAEcnzwtoyy6xqtLao65iagsm0y5KISAxeJUZjmYypy6rxDQAjPoRQ1s3I0WK5jtIYZRgbJAZDNgeAoBIFMEjdO1VrjAqNMEk2A6IQ0RhFBKRiLIto3HDcASI2TEzGsMm4iJ451pVfN+JFiQyA/XFhOItBa8/D3Arq2ru9STEvz8y5OdzbBzC7uKqa8tqt22lHrVF11gwyMxq4YZGlHaK1aIxRJJJpF8Nu7YBsQEiiRIAEEu0RrXnRooQEISiRJEQSlGw/ZWI23Fol3FPiSUWsdcbaqq7rxhvmNGEa5hfM3W7Z6Sqm+9CjfBI0SGQ2/S5OiKIaFYgxgE2IXiEMIxJZO0Ii96SB3kjRLdCxnwV6smh3dLcQ0IKW6CdmbOxH9Pz63QFIvc5te90drYmuxvstq/Yk6r7V7VO3TK++zrS9CxTEGx7Ay/KyvCwvy5+tYh8uqidnF806gO3o9itPzy6+tjeU8dFPfvhDiZKPpwBufOnus3tPyuVyfjkXa+erpS7r88UiRsCZpBU4yHNTBqXaWqcqrEaBYpA3dRPFJ8m8UDVeGihNh/trKQ0ZARsiFmZSSawTRwjh2t27q4v5J/ev9r48jBipz3kyKkZTACVkFc0f/OD05Omzj578qAoy3R+OJzeez1esbdgXi/gIDcoEU2SOC0gMTUMRFuThAUwno2Y9GxfWWnd7PAzL+ZtvfPXR+cVf/Zt/+9vffNseXgNw+ybdefXm//b3/vja4Rv3Hz/7i7/6C/nIfHr23qqO4/19AG4yDKulqgSNuSuO9kZHk8loMMocwy6P9w8AXH/t1Td/9+bpyanNB/ng6I1XXqc4+7lv/NzF2eK7H947uVwDOF+sq0pm63qx1rff+ZIdZYuHn+QH10ce69V8XCRF9iGBgq5ELiWYa8fHw6Mjk5nJdPj05ASAL8umaQ6Hw7K5knpxPl8X04O3Dq4fjJy18tU33wRwen56dnmB83PfeBXs37hmmcyw8Kv54mqZGQawf+2IjWkijywEurd3YJ6cWFhVHQ4HiwYAgsZRPmh8LJtmMh4Xk+nBPs0XcyTLAIgSM2sHg8IYXFxeHexPNeYhNsMiJ+ZBMQBAIp8+eRrjVR2x8n62LoOvbJ5R9GcPn9VFASCDjgdcSpOJM01Tqg+xiRKiAiBmC0CNCgTaJiXoHOnOsN4Y2S1QtR0OAqAL1qbNd7QBB3cwGWw7B5uiPfi4AT03RwCIRJdnmXMNUaqcugqD0aCDpUheABKQnEbZWCjte3XoFQBQjEEVzMzMqtLUDdrsLZRAVpu5Is8yZ5kid8RFTT71BlZTm2frSp5dzBC9iNqsCIn11cKIHatvgxsKyG7FKYMABvfoRkJHEojCZBJVUyGS0MiOGogentvU5Ob1W3MWQu0VW45BIhyp6vZvu7CbzjNUbq/xBf7mTtF2nxsMVogkELuFqEzCBFucq7v97vP2lmPrVbbQ4Lb7178rqO1afThmquEW9kz/sTITMbqYburfpIv5VNrYtKKS2CUa43pV+qa21hFnxnJovLE2VbVLyaMIILXWleWaQWSYO7B0s7XP3HI+NnDvFtlD+2G1BY33mFhfzYnGBmXlzYk7Y2KrJl/AHzoPqG8hUiRyhi+MD1XlF03TUBuRjgAv0hSDgUqEeGlqR5RTFkLTNDUAa3MgqsRePNJ0CCN3H3LigZVJRtOhHQx0f7o3GY+PD4+Pj46n4+k4ywDkgIFGXalGlgD1En2IPmWssv4SQAzRr2J1FZadpicIQZVyV14tASwHe8PD2AS6sb832aNP7p/WjS9JCDwcOucsgLKJg0HehIiWO72NPCT3D9o6IcrUK5X1tInN6R0iiQRvtR7XhtOc+udG7zFpqIpCFSmBj4+apLqCaBPJC0e4tdfFOtZeNMEEzAwmIMRoyaAn8dLO7LlFh9w5mG7UZuRQQBGD996LyGIxny9mIcSqqQEsF+s0U1tnq7rKslwlnmW5b5qUxwaAxFjXjY/N9WvXRqPxo2dPm6bJXMZsTGZiQgAJXbYhTkOjrS3qckcR2s9EUO12VQjt5CMARPXa8ZEPwfsmofy94GQUadNhMXvvjTGAMhuRmGbv0Wg8GAzSiqmqWZZpP3V3+xld0wGEoshTNLZ1LnPOWJv4R7YY9IuCMSm1UvvgAmTOWeuMbcNTrLVpsbDW5sZl2YilQgykxjdhNBwDmEzGdVn5xlOkIhuu7LppYlGMXp0erBdzAP/2v/VvLufzt159i9k0dcOgzA2aFK2t/XSgxJRlmWgMIaoIMzOZBGkkPEIUWVFMrVuUi6oqjTXMpCqgF53qHswmUry4HLd/dGGZGzyBSHZiQV+cZfrZ6h+Nfr5YWnb8zubWBuT4wlts4w+7MGc3tFu4ZHvXbRvY2EJpXoA7Xrjl5ybS7Y9f9J5pv3Xnn+kdoSCNAKIEDuQ1y5jJDGO0PsBm7IypqqWxUwA/+NEPP3vy+Nf+0q99489/s7BGRNk4Rqx9mYsCOBiPiFy5npOoISKiumka73NgNBymugwxNt4rYKzNs4w7Gp2meaObsNiYNNtZa33wqrJar7I8XywXhlt5WWajqsxcFMXe3nS1mjdNTcykAsAYFo2qEmIgIOktFHlh2DSdsuF4PN6fTPYmo/1ci7FlK46sj3J2OV/4xtp0I9ZuGaRuclNVUfCmLUgUbWixipqEO6UQIgLATM4Ym3bgUBBJng0Lv1x7FWIf0kWsyurkslqs5HDqpqOicHh6sfzDH37U1OHXf+5LAK7vDwXeGDWcaYx5ZrOMj/fzo/EwgpeNB5BZcpaZKYnvqDJTQtj6vQ/p1wUFlJRUiSQJv3SjSVkFBO7ogcwEKBs2vcCRQtCK8voQ89yJiG88JfmWrW3IBO4pWs3mvhpTO/ZjIB2MIqJqANOmDINRCaoENca0HUUExjAZlY0Fv7EBe0y1Nfx2bBDtXvzz5cWJYjMI9cXjW2M7WX7aeR3o1t+OEJpWE2l3mruXlS5SvjdqN7/b3FVbK3tr33tnWX1ZXpaX5WX5M1Xs86tzk40qXz1++Hgdjg+nEzfe+73f/b0A7N++ezA5AFDsHVQNnd377OTkanx0GJfRTcZeMByPZ7M5mwLA5cXc+zDMB8u4zjlrxDOzD6GJVWasqAcQtXI+BAqrhc/c1CMKWDLrNQ5YR+MxAIY/fXLxZpH/5m/9xsnDy+xrX3pw/4NP3v300/P16aUCOBjZx08eNmW+OJ89ny1MlmHtz8/uWx8Lx6u6BLBu/O27b81my9qvmxCIYC0blzVNJbHev3kMYOpof/94uShDiPmkODl5tqwXWaaf3nv++79/8at/8ZcBXJwt5m712//Kv/gnP7r3ZL0qkX/9ldfv37sXJB6kFMlNKGOjjOmwgJrH55dV7W/s68HILi4vH4Z7AGaLq0FhsozJy9HB+GhajLXe2zt68Ojsw48fYVgAsMV+M5+R2qGhTz7+4JU3367tAKuZRoGYaQEALs+yLLfHo7KqhsNrq3pdhoUVb+P41sEEwNyCEK/qtcZybzSZz+ZvvDZ585W3f/Thj/Ph9PR0BoBc/srto6osT548ti4bDMx8frl/MHkwm508PXvrjesAsqHLDAYhTvfy2fmjRVNrCBVhj4rl1Xy1CACObh0bwvLyzOTFYHKQZTwZDnzw0vjcZQAGg8FolBPLyZMHVRUtST4eZc4hBLZWTdo6FkN8NTvNLMWoT0+fa5DBZBSjr+s6BX1zZqQK1XJpJ3vaxGw8hmcEH0MUTcmJIcJBo+UWNtr4cdpxYXrbcMsT6IkAHdBElBLYKPUGJLbJDsDnXIFdb7s1JtKXO35HU9d5nosokwEQo1hrDBlrWZSVlDrIa4OkbrC/3uLowTEASKKK1hpm9sFbY1ye+2rNxqQHAWAYuTPMCKEhKCOJUe6aLaogdnnWRC1XJYhCjGSdcy50wdrtq3eupqoyUwom7QCPJOKzxYNSgSqDDJn09FFiH3QL7bJrEAgkX2QEdi/PyUHdFs3beE3buPLuDzfnbiHQPyMSMNWXtCd0EbGEjgm1haRty2ttftx5j1sQQvc30fZ5SOYmKIm8dbmeU7LyRHbs/sNWVHrfCFsb+Okva4xoBNBUXoJCSTSlMldwgj4lImojAJzLoCjryoeaiY3apPO0jRe2UH/rAW74Yta4zXvrxk3sDHLt3q4VQe1IZJZ6Tw3diEkVI1/sI/sYt4aVEsCqpEqqvHZeFuRXAyYDQhKqi42GgGpROJsTNTFCpRDPAGwEMDRrAtgit2Y8nRwe7B9MR6Mi35s6xyZl1LUEAzEEy8ayKRzYGGIjKqE6rRYNgCZ6krisZxCklPIxaEqHw0zDgQHABEMwhJxUCdKCS3owyg4PxgAWdf2LN7/87OkT770bmFfvDtc+YyYF8pxcrgDKoFVTM0kHUrcEitQXyHRygaqSpAm7Dm6YdWt4J/5rm2ilJdrp5nPXozdfQrvM2gmYbPPUq6qP6hVRjVfTRL5crMsGUU1yvUXAxAxY9DNwB+r3Hl4/WLW/dTty2rxhiuBD8D7GWJX1cr3w3htjnXXWZiniMQSfZTkRxRCDD957QxybEMRDiraLMSM0iMGoskpsGkgkIYlR0WrGGeYoESARkSaIxm78duMZaQABoCAhoWmtNONmLtL7i7k11hgDQoyirR4lWWupTYlj6qpiw1CwYe89g3zweZaNRuPVetWPnBhjK2mRpvt+NunqLo1GY401lo0RVRGB9OtdN6N0Y8o6Y4w1xvQkaGaTkitlxsRWIlOhcrh3tFqsErmsqRpSFEWxjjHEYIz7h9//3rKqcsN//J0/BjCfXb39xtsIsa6aIsurslTlxXqZpDPSfvDADbMsg0alJGNAHTGtywkLBBVr2Tg7lJGq+uhDlC+cELaObXv+La6Q/htpEKIICuDYYYVdJ2+hgf6Y7lgCL5T0ZQ8IpN9Q11Vp+7dbmEQ3ybdEfEUfrN8+6xfejV68w87KolunoR1RjF4Euz9/A2J+LkfV9nt1eMgOqvE5vHQzlXPHkQNLHcpA6sw0QMS5YNCUtcuzEJYAxnk2Oz/7n//H/+GDH7x79/VX3n7zSzdu3l4srtaxKoiQksxI42Mo6yhEI6uGQVBVsWxCCrztMsLElKBKRFTrphmKxBhTtitrnCqrRnRpoKJE771xTkSSzZblORtDzCGEEAMxK+CDZ6aUy5rZqAiBVMUyhzSDQw2ziibmXZ5nWeZGg4Ez3lLICxrYrAkyW6wpBHRizanKUi4s6UI+iA2hZUe2hGtm7fe8ARCJIEURqYKYRbWqGvXaiDhnrSUR2RtOBQ7AfNUsy2Y6sNOhGea5M06UCHT7cPDTB5dl1QAYjg7LJtZ1nWVWlJ2KdSZzbA3VTVITwaAwRWatMZFaAnvHgO9n406oFEDSfoYwlEm1SwWdOjsDxrQx3ylVjGFmNt2SIRIlxihRmTjEGIIHYI3d6nsbYccte2knlUxn7rSQZLs3JDGodHswZJhjlBh8mmdEIrMBQST2xEXdXH9nRGyYir3V3ZtBL5YNG/JzI6XbuNX2kqmh+2WiNeq3TGNsRYjsPM9mxtoekC9OiVtWmKbEZ0luWH/24H9ZXpaX5WX5x7zY+ePHNstcTqerK/psaV99/fGjy0cPPj288Xo+mUwP9gCcnF7QbElFpkW+vlrVIfi6HA2Hw/Fkvlh5iQBqD2KKMQpikOCcg6As15apjuvMMoB/+Td/69HJg9vXb//d3/ujJjSOc/bKHAkxcuQ8A2AbRCgzfunXfuk/+y/+xrffOf7RP3j4gw8enlVllu0DCEZXq5qhFWWqFrCk+fOL81/95len116/f3oO4B/80fcoc/lgEJtyvVwVzjqSWoPJ7HQ8unZ0AIDZk/iBs6+9/crzs+efVv7jTx4FzRZNXMb4d/7eHwC4tT+4/tnkt/7Vv/LNwxu/87f//tMP73/fX62XpZOm0RoAeHp8cPNiORuNbEa0rBbPri4rv3p6QQXx6dUDAF/Pi8H14Q2dDmh0OT+fGPOVr91uzu49uPdezrj22msAPnv0/NnlbLJ3a//68aK8OLn/lAt1vF/NFrdvH0yOrwE4xvLa8SHb7P/63o++9s7Be5+cLdazelUdvnM0OLwNwA7nJ8/ehWrtm8HtW3duy9MnZ3de/erl/HLv8PrTjx8DGB9MZb12mT26cQwhsuTgr03z9dHo0bP19HIOIBseHU73T55d3rl1909+cu+dO/vrk+VzX904OkC1KlclgHw0Jq15hmFu8szNVzNj2LKNQY1xaHkZvCiXGVt3NGlMlsFUdbWoVwf7k/nJEwBuNCaFiB+NR3kxvDw/baowPnTG2Wq5en51AWAwzEVJgxweHs/mC2UvUb3E2jdeQGQBgA2x6ZKItv4wtQBTi7R06zz1/oRCk/UunbeS7LNkKshONFY7Wja2W2tM7GBh0vsfHX2iRxCstcbYKNLaJCI2K1TR+OCyDNricdtoD+3cvXMikg3G7f1EIjGrSAhRVWMUJuKUriFFhZMak06oE3jXI5K0Ue1GXXsoVT4uy9pkeYzExL3QmtKL9lB6iO2jiUu0a0ehDdelVuBcZSMf3lUufQE7UtuG2lx760P/zKLawTIv+Fdb7ppuZ9JoW+YLEUmgtV8ZHRTYtrMolGB6LHInLFmTzF7/NoC2Lsj2I+kuTJnM04R99FGyrXHd2q3U81H7Kv1ZZrIP3ocGQFWtY4QKNagz4xDIGhtCkpajKpYARIXJVk0pKe8uADVJs64PQQ0S0iBKfiq3QnVk4XpQcafBthu9E36SBGcB3jcttKkpcrHDzrDBstv67MZoI81m7JGyKkMYSlBZLQ0wZrbK3tdBBcCY7X4hjunGjfHt4z0iFjGTwfBgmh9MLYBxMXLGGjKsMIBlBaLE0PhVCLGq1wCW6/WyrH2j6pXEEoJzJs+MdcwMsCLlyyYwSKOyKhHYamaIDLFtSV2WkVnKU85g5hQ5i+ivH9380q0awB9/+miQvcI4e3Zx9sYrRxjmhcliDE3EZGibxgOwEAsJHcDdKpG1lFTt+5KgT5ndFk7xd1vuHTElOlKQzv/RTSR2H8e9NZElxo+KIv3EpkjiNBJgopoqYr6oBTnQZlImy2lmccY14jejpM100PbgbfmM7amzWpcAQgjrcr1er4gopfpyzhAbYlKhhFmUdVk1lYiE6J1x1tm6qdVlUcNyXVt2AHxoMmMAfX76rHpUi2hRDGPwIUpsfKISZ1kuKkwmxCAxNt5bZ5B6O3dP27nbqlEkCgVq0Xnu55igkTJi4iixrusQfWbzLHO+8S070ohvGmudqlo1MUQmztiFJtRcagtJaAihKAoJYWtc7XiqbDgBHhpVEJO3H2N0KfMWQETStmk7R9arYIwlJhGRLgyciJSQGY5Bg8BYjsFfPb/wTZiOx0jRpsTVqoJCI/Js8NHH9z797LMnD++lXb0vv/2Vel1KjKTwIWRF4cvgg2cyStH7BsB0OC2KIvhQNz7Pc2tNiDGEAGwIpAwKEplMPhiwMfNy1tS1NbYj1rYdeHuro+3Au9NPaoyxb6Ilb7hW58nEhJ8kVv4L0ZLdoc+XLc5+f/PtE9MolE6RrkMhN2hHulNvbvyjschNM79AtsTW4KCti2xmgc0DbUY/bS1Bn38v7YGnbrbtv9vGf7Z/RY4BBB9YJUokaOPLyleNslAc8dA17MUDcGSy0V4TqgefffLeJx98dO+TX/7lf+JLb711ZAc8qwH4ZZiv1lXtDZvcWgJ8iCpCRM7alCmOoyatg7KqyroCdFAUImKNJeZEf03cXxBnWdbU1Xg09iGkETseT8qqBKAiptvtbryfzS6bpiKCdS4xo40xQGRj03iIIfjg80ExKAqidj9ZRKq6bhrvrTchEjTLjCosszOm03VJ0pdpttTO/KMkstGqu1K7gCqrRmHitGfhQ2y8AkgUeB/ifFXO7GkZ5j6s1nU5L8Ortw9v37gDoKnp9PTi8fn7V8s1iCbD/OHZ1ZPzy+t7g7/09VffvHsTgAhFgXWWjbUug8Q8Mz7E2bKuo9icAeS5NZZFonKK0OCug23wNPTbnW1/UoIyYVuUPa0eTGBmQ0RGVcGmlYpIJ0aVGIVAWZ6XZdl475xTosSZRMfx/9xo6AdVP3qoi66glLhOokiI1M6ulsAiUpZr63JjDLNNWLBAnXUAmhA+B0S2N+tRwt4KaTPayBfan61x1/62X9266mv1d3bX367adnelpUMlv8i0TpaXYe7Cf9pNyU7lG4iyNcv0kwTtBpW8LC/Ly/Ky/FkqdnV5OhgPb1y/M18+n9x5bX5+9t0/+YeNlKvnD0fV1M8vAQz2JnVcm6YUlVq4afzATBbzlXMZuSIFWWeOyNjaByhnho21CtJAWe7W65CS9L3x+uuruLpz89ivF0V20LClofES8ki+qROAolZ8CK+9dufDD/70w+/94Hcunz1bz6+u1nsH+1erFYBljWfPL9b7w/2ja+Pr9mB6van8V79+91f+wrfunzzDuQIwg7A8PV/WtRUdsInM0TkjTV4UJovk1wAk+gp+ZE15eTKQcPv2+OnDjy+u1seTo3mIkSoA84jzR8//+n/y1/6Ff/KfORwaPtz7Z3/rX/rTH/3p4slHHz55CGDv2jA0Ya8YZMWwCY31a63CwfEBDQd7xpXVDEBTLq8P9n6yfjre80N7ICTe5n/n739HM/rS175ydnIOgFT3iqxaLdbGxcUqsohIWTcm59HeNF49BnBw53AyzFdrX0n8/ve/q9b5RXP3nTfv3n3r8cMPADw5P6Px4Z07d5vF5cmzR9dv3Jxce/UHf/y7R8eHo4Prp5MnAJ5dnV8b5lmTW5vVqMNFeeP67RDorTffzrOn9x8+BzDd2797c2RwNpqOiNBEEueLQNVyPZ1Ox/kFAJO7oTX2+m2XmZzjuqGmWq/mc0WoYwAwGmbBN1alODxWiYZpvbjKnBXQqo4pxM/42PCAxYg2B9NDYvds+alqZgo7zTK59AD8ehWUJocHHuSK0fLiLFIJCoZzaJ727wXBiE3mYUqbq6ICJRiTom47D34jkNT+MHKfvBQkUEHU7V3G3jDb8LvaA8QkqpaNKJiobOotloolZUAUdYxRhaARJKvFIiFonFllrWNgZ6N0FkfrzyRLRVQB66AK7bTQE7RBpCEAIGaXF6oaY0xh1IY5ehVBljlLBoAhikGhCFFEIpg6JIjBpqpqAMZYJoEbPHl2DjBRRikCHmysQScmGGPscT0moz6KCsU2olOl9rEyEAiRkWTpFsXw/2bvzXptS5LzsC8iM9ew9z7DneveGrqqu7rZIptNibTNQfQAy4ZlyIYNP/hPGf4Fgh/8QBiCIMkWQGqkLJqyBJqQRFMUJfZQ071VdzzjntbKzIjwQ6619t7nVjf1YsA0TqLqnnP2sIZcOUR88cUXxNynfsjiYYM5DHyv4VgDWnzT6Zua2a4wDk1vEuD2Ib/RB9/LBh1E6MZodUHHjIxsMgwNTJMNPXh2ajgogjgcXjAIIk0W5GRj7txnI1NSBomR2ZBdXtXee9d3UYTb+UwhGoXYq8E5X/LLUs4ppXbWqhqIdKyqYwNGRwCQLVQh52wqPnjJ4pxTFWTrrzsAqc9g88EHtKrqPGfJ7GFQUnNwAGLqlTZgA7FC1CTtmETTXdAAMsJgECrGOXVbOHYls9U5R0SqKllEhZkLAGlqhTMilhVZTXYVk2jg/toOMCvpgOMD14IeGzE5K2of6FOqQ13l1IT4rXdPjq26ezQ7mp+0i7at3Z3ZMYD3Ht5NYg+P7fmr8/Vy1UArshiT5K2PBODN+WvAHMMxxKyPKgqATIXG5+mIZo55Ro4AouCr0vWO4R2cH8viAFr8TWLHVMhmPME8AJOQqZipImdlZ47F1L569vm77xiA78aT/+Wv/4aJ3nvn9KNvscQNGXkPI4splQXN+ZDEiHRIbjWMqcPkiKY61MMoHFawcrUDvj9lDppZqeGq5mhk7AwTovg1Vqpok5gJyMAKMjYyWOoBiHdKvE4CBKHq5dX29eXmOjFY2HnPpbi5K8i0yJACV/IBMV5GuVoZqtMqMJRh7bbder1KuS+TVk2NBseenR9S+Ur13kLDZAWMmStXmRngAiilzvkmo68DA6DtBp12mpjYuOaqSTkxqWjyroXLAEi5LGtMnHOuQzU5jTbVPxhqhBuXuigFYTcQdCwARQyL2y5t++IxBniIxm0PHcukkIEsa2SQiHlyWTqCM7PNSqb0RmelesawwBYH1PbcVc0y4MZEmss+ZwOneoqG0JA+Wb7kxlxMMPuEZb0AACAASURBVBVXfprlVagyF/4ZUdUWxFhSBmAG5zy5ohGnpMJ9itvN47uPq1AD6NddofmHugIQc9rmdagaMmbiWe0AgLiLkYiqulGzPkrBE1BwjbKQKhyRY2YCh3CsxxtbbfsNAaFu0zjIA7FqVuKsMAWzgTQwIxuA2hm2qxb4+W8//K/+s//8N37z7/zhs6vsG+cDCbJkAOTGlQ27NuCYtPf30HkoO+7Y9fupzABgOswfvNUMYvu9jB1uuQcsjs+3nFRLLGAETHcn3UcNbe932FACbMB9pgsbL2rvlgiEidM2Ik67gh0TCmMj4gGMABtsSNxlCkJKTAKy1MPYAZ446rYv8xKAI7PsnXO+YVe9ePb8r/+133jy4J3/9Nd+8a/8xb8AIJ/Mlj/+IdZLqhiQi9VlXTk1N2+PiD00AjhazDiEzWZlChWImgHOc5bEMmyy2/W68r5tF5LFee+Cm7czgy3ms5ylZg8g5p5gDK/ZHPH11cpUnKeJR5xyRimGQzNRizHXodY+nW3PsmQSBbBZrvtaLy4v1Muc+rpuuqxRYlZtfFVmU+1CFqsDuPZM6hkgC96pkUo/lFmEsQnDzAUll8ycJjZeQiUVGfEqEzRLZhG7vLh68/nrbn6yOKqIskgEgKN5lfvmDz/t2dnzs6sXZ5frTrcbah6Edx6+G7wHIGnliViZQCa9hdr7yrG1NVJnV6sOwEmV5lUASRGfKONJARBjEHJUgwOUVclKCHCIvTDxsP9ZBpTJiLnI0rIPqsrOD2LVQBbdbvuqqlJKabM1M8du2HQBNwayR+LjYLpN8cgbsbFpxphqiWlxSc0GYtepmXMu9UmSmGQaNrvsXBVjBorJWcIIUiq+jWN9D4IdQcApDnEwr4Z5YwAV84YwyiOPVzdNnQLxDi6EmmHIiMIYDtzJ/5SyYiimNYZtc9hMBwkpAEOWlE5J3uV6B55+uciigfQTEs1v2227bbftz0DzVLsH7753eufB+t++urq4qlzz+uULQnvv0ZOT0wrJAIS6vnv3NPjgZ/Or6151Y4SU8raLNnrM7axtF7Pzs0un3GPbiDPiFJNmmbXz1TYC+B//6v/kc/9783mNWkyJYEyOnak6dbFPACqWhtwP/+SH791pv/nN9/+vf/vpux89zpo/f3FtTACcay62evzo+Nd++Vc0dv/wd/7py2fPvvnxt3/jb//dq/OXyg6AbtXu0AcP3z1/faY5OxdS13uoaLZ2tk4ZwFFVOa0M9tnLV+/dP/0rv/4LVr3zP/zPf+Pd1t+9/+Di/DUAtVw1/E9/55//0b/8Ezj8qz/6N7/1j//+2ZdP25lrnAPgJWbjk/a4qf0Gkeu5uf5yexU3G3f3/qN7dwH8m89fPXnw/qvr7eUmvPfoZN2tz87On77Z/MovPFknZecBCHLdBOt903iRZnN52QbfHLenwW/WK88CYGvhAftPX3y6mLehwqOH72/OXnab7Ys3T3/45XMAR2374GQ2n/k3l1nJzs+fP17c69hCzvL0kwdHpwDuzuvzzYvTk9N57a+3UVN/cfl60+WHj96/d3Ly6ZevAHzx2VNPrq6rLikTYp+cMphX68smWJ87ADXperOuAyvrm9VyeXH95J2HizvHPknsI0o0X7WLUTAocGWJhmQGtYGpg/ao8a3bzHPshUxyCrNqMau72CfIwydPAFy+Oe/eXC6qul9fn19dQoyDOfbmXTFOAJBxiavv5dQCeIsqsHtjP6t45/PtEkkJjt2EcI0538UmMe98UkEGMYmqiBKXCP3O7dklKJbAKRlgwXMxxZgK2YF1NJHfyrOgg98JN9//ukYwHzyZ5RiNFIBrKoKW8smSUmgqgMn7LKpAUXT1wUmW4QJIYQOiscfxG7hOY74zADOHuZ/3eazWTSDitpl3cUsgdkV/PqhJEiny8CpS1fX+BdtbvxyEi4kO+2Hqmck3m9iKOz9+ON5P6i3acwV3uTxjTH/4c88e3ScPDJdkk5U8nH2v2BENl2TEPNR9Nss5Oe9DVY2lPOG8Z2ZRzVLUrFxVOyZW0mLVTuk906Wwd6pGQKiCqlZVSCk55uVmW8xZ3qvPKMgqxCjJU0U0cbg7Au+i8+Pn93+Z4MndTyIAohkAZQJgqsVMFhE1Fc00lRYtZd9NlKQUIT54dAfPYszMYhvhAAPDO58kFjn94L1IXqVN7avWN7/+8986rjILgzSlrpIzAP3FVVT78Vebq2W33vas4ELyAkoiNgcqv1MgggWPAArOZSk0luGuPRPzAFIw/DRemEfGHACggqCko4llyzqyiIevkIycVzY1g8DQeL6OWs8IwF/4zt2Hd5pPXiy/vFxdrrtFw2YlS98mVLHAYEwjLjU8n4ENebMXxwc0jNcD5dzdQsQjmg4U4K/8LKzVIvQ5FEsikBkZwbsAoEvYZg1h1iV6drZ8cxU3UUJoS3SkzIQiCjtQYB1DYTt/CtO4YiIziGiKKaWkojnnXjpHhAJH769yExo3TCm7MYQMirgx3zquKKtDzucvAXz8+PE7Tx6H1n91sfz02flscVdZJVvqu22/GXoYJiYEBA5wJpq5BGFuavPZqP5l+3GHcRXcwVIT3lQ2jR1tbUfNK2n1WujT09ZQ7kmLqNpYVWS8/d2t2jh/mSBENHLW3VAJbVjzaPSKqWw6Y+r3vsNMoK1szcDE7DwzlyIMZYIzeZWSaZnVNGkqMby6bngET4uuhJVq4OQW82MzM8XErBqHVql7Cy1FQwaxjuG2xcqmUxZwMlMRIebGe8MAClPZ1kW8dx6sQSVngjOhGSmAee4bL7/0cx/+17/y7ZOHs3677nOiMCdQzLH2NYCMhLcaDbcB7OIx01u7RWsfXryBS/6k9jVv7mYkcGOPw/7W8u+CJtywaf7Uy/lJB7l5dW9dju2d6G3rY7QEBkRkh4sGx7UPQPXm9fO/9b/97S9++CcAvvfh4zaYVPOkKltp68ZTJpNtt8nsqlLE0ownWBgwtZxSFerLy2vbZJQANpGk6NyVmHXbdHl5KUlFJfa9mZW0ZTPJOV0tLzabZcqRiGGat1E1nZ6cAkgpeu/KvpxTuri4AFB73/X9JFs6SDo4NmQCbfuURYyVmCQNBNmsOq6gVgjIQ5lnaJFoxRAItzEaKmaIChCJWp8VQBIVNRCC93V1dP9uWCzkk5fn1932k1dffv7qAkAd/HUiBG9kx0d18P7D9vSoXtw5DcGFsjX5EKJkZgJBsoTWmVGXzCwz+ZN5DcC7ecqAAo4YhUw9xufLcxxsvnG5GKQWy6okNFbPI2LHcMxTlUIiyjmbaYoZQE4peG+jlTGMlUPo/O0xSbsV8WCk7ozDvXqEo6k/mqUFOmeoKEzKAHproySQjRvqQZbMGNIpHGIb13Xb21gxlXgbXjikUpejfJ21SofvTRtFOeY+oDl5J7ZbxPeM3X0rGdPMw9tv37bbdttu25+95uvAfYpVE9rZgtv6yTc+PHv1/OVXl6tN9jXdXSwAbHJu2mbbp6oKH3xw7wc/+mK93YRmIZaZhnoCzjEBZkJGnkMvkZh97bWXTdfN3BGALm64aa76PDs9zVFV1JkRtDJoe8JNC+B6dWEO6+vlF8/foF2IcX363nF1363XZxfnAER1HsLrl29+6x/+H9fnz7t174DXr175uupQ9ykAiBadpJI85ZmZ4OuaVZwPKRoJAEi3OW74qlt/+/13v//xN1ztQ2j/u//yP/67f/93TkJo7t4DcHl50UhNhG7ZqdDW8Pf+0e88uDN7cPfYkwHozq4ePzjpc3e3ro9D86oXnvOq23z15ur5xeoXPnoXQBvcm9WViL06785WZzXj5Zs3f/lXv/fs/HVbydlyAyCEhohE+m3ezI5ml29ee9e6FLeprzmd3J8BeP7qVd+vK1/N696F41cvP+/WPbG76q6fPH4fwMM7954+/eKP/uhfzhbzd955VNdzte28mVfVydHxrDcD4MRn13/vux9utpRePEusi9OTxleXMW0hH3zwHoCz58/+4I8++fZ3v9HOageI6enx6fXrq2Dh+nLpMgB8+cVXR63LyL5xTu3B6VHSnIC6rlZX1wC++up5qGtl1FXreCgdVyr9xj5t+h4AfPPgztGdO/eefvbZNvXnF29C7eGs3153kqvZQwBZKWXeLK8J4jhX1SwrpZxTTmJs5oESnN8VjtgBLGPbS5cYXcpibhyquthoNBCVqhCTfTAR6gg8SF6CSU1TFudd4ScOHx4NmuKMlbM4xwSrgveOAayZsiiRUzHmiY0zfL/8r2YHIOSAnR0AbYNvN3nERLlP3rmq8sE7AI4hktQ09T2zEsxXwYjFiM1yKqQkLTFdG0xnmBmDdbTnJqNoOi0BVRVSjsRcyBpFyDJrx8xZFVoYXUlEVLXoMXlfmcl4rQftZtLK7s+vtV531cYns5CIbNRyGo4w5OXb6HeWlw/qJNhoLe/8UtphEjTiDQVWM+PpIelkAhZkgcbHUX7CoMI+YIQnAMQ+JdmGxuWsItumaUJdDZQBZqAMCR4ztcth9zqCIKpDsigzTL13m/V6s96UG2dmgahqAbtFcwVnZApVUx1JXwQiOLGJYUdjD+wM4sOxPNyRqplp0aksrFK1IYVXsev5PXqvmmnJaxuw7hGh2Z2Yber6orVAjG3csHM1VwDMBC6gdRerN7/3R9d3quWThQ+ZApOqVoV3wGTO1Qwza5jZU2AuKYBWSB/KWVQU/VDHmJvAbR1S3tEMRzDapODTpehNwddoNwzNMPdjv0zRi4M5YmZD8SICG0igXfKM3K8VgFJ/2rpf/tlv//YfPDu/lLvvVymLYyoKt1wGIcOBmN3e6B8PDtrJU+3jTNPTRZHrMwx1q8r7O59p8qF3I5aNCusERYKK1EgVRcxwG5EQrjb55cXmepPXCUIVFw97fLKDS0vERXl2rEtdUMmhdIlqv+3MLIvE3JspgQlwGMRPR5dwWsFpb3Du8I4pFCBiDbleIhN7E0qbd+61AH713//2x+++V89nv/vHn78862fzhbKJAkdH6avnySKArJkALnmGu8VoWgKASYN4eM5l49gpDmPyb6eL2tt3CHoD8yn/qRrD0QTglH+GHF8d73xPAXD8uVOF3EvfLY9OJwrqOFgnMQ8aaarTkDEUQpMv8m5q5pwRsXNc1zUASQVZHKFwAICnEi02AKoiJjqQbg02QKIFeyzgxSBzMS6HNq7VNAyTYUyW7HNTgxkzF51QyTnmXlQBhCoQE5MHTCwFq820JXV53VgC8OG9o//21//8tz5oWNfd5iz3PQMgV6qDadlbx8XpBjAwvmq7J7//uCb85ICGf/hUbraDbWzCO0qpir1R9vYX9h8pJnb6jYP/hJO+3X4aMHEzmjEe2KZBPf0z2RU3Dz6YKPsFsKb9ksGOXIrdrGqCc3/wxz8A8Ht//Cd3g//o/bu/9Oe+U8/myz5ushk7wCqmKALApRysaLCwY0eAifR93CS1DB8aAJ4dmYlEOA9Q30XvOhERSSCWoocKNVBKSVVySgDBFCTOcdd3ANq2LRXeYei6LYAQPAxZxPNAu5OcN902ZYG3qBpVtsm8t4orMYQygMcglRlErJCNyzjPBWIEspZYoIIUZMTWJxEzNXSSAay7uFxtU7amroNvTuaLh5X/8mrT5E2fNtuuA/AvfvDCw3unTx7UTx60nujR0dGsPl60znn4UqiaDLnMLsuigX2X9XqjnuV47t5/+BhAtXj3h8vOB0lwBGIri5RhykEuO4oZcVnAjEwJChhBBzIeU9EhGUpmldCOiKrmLDFGAETkXcgi++vYJFqzN6xsx0kfV6+fHnYfRueOIDCsIESeACIWySXGX7ZhYLTeR/IlEY01c8brGh8iALLpuwfnHINShxNk7//9Qx181UA07g4HGzGhWKyj8Tj9A4wpJAfCQLtem9SEys5j44b+dfnvt+223bbb9mej3SxieNtu2227bbfttt2223bbbtttu2237bbdttt2227bbbtt/y81f1RVm4uXV4v27sndPm43b14+/eGnakeb7Yu2fTe3CkBEN13fNs2mj/MFLY7mV+ulB+rQxhyLqHCKKcVkKgwmcNYsKpWv2DvVXASbqzDfxs5z/Wa7dlQ5YoIJCYIPRucv3wDgYPPF0bLLn764yII77z65utr0fb9Zd1pU86HJFI76FD1XrgrNrM7Buboh3zoBgO2b8xPV5epaLbMPUfrK+96iN5xWTbfpAJxfvsn3Zmb5ex9/+L3vfPPeg+P5g3e+++2P/snv/p9nb14t9BEAFe58U3nnOSyX54nVceVmRwYgMABvLiZd1Lnr1o253uLVZYTCVK+W17/7BysAC4etUut1Xrdn569NyRz9+i99//d/8CnT1b27RwBce/Ls+fPLy/O4hs/WOvngg3evX3yxyVmruLwUAKFuXl/1dVtfrfJ3vnVvtZRP1psanoSvr5cA+k3/5s0bIhHh69W2v1iKdW1Wi3byjceff/KvAbCjWvmHP/hR085huQpO8+Z8uwRX89rRkJV5781XV8+++PL47n12LmnqJUOzcsOiRAFA7CSSOcRtZ3dOj45Ojs+Xq9Qn8CB06Jv6+OTYmFKfg3Nm5kOVUmSCZ982DODizau0je185rOur6+3q217/5SSOjI2e/XqFQDdpqNZ0Cyh4jrUOSUREyEi8o7VPAAxEgiwoyvtYrJ0I/Q40eAG0g3tERSGj5YQ84F+4AF7z7ETVTExg4oQk4o2dZMGtSAaouQmE32RmYlReRc8AwieUjamwpYZqsraftXhPb4V4TCd6gZbYch3s1KPuQq+qaum8kAGkPpeJTvn6qZaLzex78l5I+ecc0zdGgAYJeudy9lsOGOpj6y7a9knzQGiElPy3g8dboBpTqmkJBedm5xzElGzUgOByWXNuMlPsf1Dj4yk3Z96M0lzn1uye6nExvcYKDZ25UHEeEcSw9jruyNMA2SPMDhGyw2gIad1d4CBtDXotVGhbZYYv+MhiVJL1rFZzLmeNR//zMfBhy+++GK93Pi6BhfRfXCRrcCQ+jQKEu6GnmounaimTGQKg16eX4rGknTPjiVnNSVyA3vPzKAK0b3EWaKRaHUzo/2mDvpB5rvBTBU00DknxuBwj7LjPdDe0Uam8DQpJ+bbjbaj2AHe+aw6pFiKCAk57+qWUxezf3i6sD6ZmrGFkg5PCHV1XFHlHTOZmBmR42TosgLYXm+ymIgqzEo2vDoy6qLsD4mJDEYAmw78RgLzwKktSbaSdyvMSK8uN1y4lkPSOqBURPUJ5iXUVjkHoHHAjN95/6N/8uPrdbeqKp9EiYgZRNCyNqk5Hp7UHrvMMBCB3f4DooOM7nKd2Be13ydxFDKJjYUXyh8KHim8RlT4apYVSQAgKl92+bOXVxfrLlRzdZWSY2Lbo54UukrRhXTM5UGaqmRNMeWUAWSRnFKyNFRZ9YGJxCTnVNjTQ++VauTj8xh46vgaWlfwlRAodY2fW1x7pm99630Ax/P25aunXC/OLy5MNOVsTCIIlZvPjlabJYBONp6DIy+WDeopTMXEaFxwdhTI3SghG5hEU6dOhJaiB7anHbFPhykVpjGlCtK4+YzDCGrENFVoHum3B/Q5m9gwwChwLJYHgvGoQcxUBAjITaW+dnIkU96/Dh8jB0OWFGNOpYwSV6KaJKmJqsQYMyfnvKT1dArn2AwiWVWJyA/ClAcPaeRxD6NuytEeFRqhYsTkiJVURVWNIDBTZnJcFZkFhakYswG9SJB+ztJabCl9590TAP/9X/rV02N2erZZb0Poa+8br1uVbBJ8KDW+mKfL23uSN144fHVvtbxJjby5ndx8295aR8tBBo7Ujlw47CmFJHZAtBpI/ztW9tcumV97UT+VWnbDjnnrbg4IWhN/7EYHTLe8YwXvvk1D3oD4qibSLLlqTgE0kC5u/80nr569vPpz3/nwWx9+0C58vorJXIYrEp9d10ldMzExxxhTTKrqvDudH3F7cnR8F0DqO5W87Trnq+uLy9m8beqamGbzWUqx1AEDTNWc47ZpSrknMwnezWbNxeUZAOdcToPSd6H8xxibUM3a1mAiCUCoKhElQhaRnMi5LHnZbckc4OeuKadxrmjgDkqjTKxkTKzjKCeARhI4G7xjQLPoatt3SQGkLJsu9SkDVM8cXOj6uN32MQl7un9nAeBXfvYj15wsZmGzvci6Wm3yw2SzWYBzTbBqfCzZBDCIMAGG2cyfHrWw3shC4Za6WmzTpcg1AWQKsl2xRwA8MCUBM2KDGUMNClMiZccAvGfn3EBfNcCgkrMIDCkNqghVVRc7oQxuTLaF7cyy6R3sjSKznzaUi5pLUbIZ7NNhK2PHRIACkjOxOWZVHc3Xcbu0koBhu1k1nWvnMOychnEP2LMI8fZqsffrSBKeLB5gmvaTQb/HUz+wZPcN3MEvuTntJwvrILV7fAy37bbdttv2Z7n5o6OZXF3n7QoIzJCcZh5L46qqnz396vKqBbBYzB/dv1v78Pry+vrqmgh1cFVV+eDjKo4bjcaUYJY0Nr5lYsuqWTrdLqrjopKesqj5ZFKFmUokQhcTAgkZIN4YQKgqTXR+tYr9VznnbU59Pls0TZdjGxoAjjlams8WfYTwjKvEnsXVr88v7hw1Cx8AeM7MfHx8tNlScKwpZ4lggbflpou5A9Ce3Fn3Flebv/m//7O/+Q/+0Xfff/zLv/ir7fw4qTK7y6slgJ51NmsMLiJQe8pXy66qCNVlt7qjAuD0+OhkdnQ8d00V/tkffhoCHt07XXc5iz66uzCpATz76qvTxbFngshyk09P2qt19y9//Nl8Vr84uwi+AfDmxUuF3b1//PpqHXXTCTqJ9Sy8f//B9fkr6wTAk29+mK+vzlcXJFKH9ula2/bOzPTN2WvntgDc6ck7Tx5/9fJsvVwzX4fAFI4eP37QK7/66tPt1RkAzNzMH1+cX7lqebxYRLWU5fho1sd8tlyqZgB379w3cS9evnHX1+Ir7fv2aNEt81ayaQ/nAbz3zsN569ab121Vf/SND5n0arVdXZxH0cXRAsDJnTvOub7vvUO21PcxazQyYySNJR2saRZV3aZsqjh/9XzmrFtfvV5v1IPZSd8DyEnhqIs2c03sowsg846DqWaRoaoDO+c4i2JnId+wsyc/b9y2acw02+GRmKSsuMADIwaA0XufUC5mNoFCQwjMzjzEtIAWOmoJmSnMSu0B5xzDnINnAAiumBa6Z6AdJKlMSS5j9sowyWjvreK02ICJDZ5z8C54qgINxGd1aspsRFwFD8cp9VXThuAAtE0FIOVUEIzxLPumTrHnpgrWO7Gcbrutm1ZEB0ExMxCato19T8xmws6DXYFFR3HDsQjs217Q+AxscHj3/7zR9u2/3WsH2MEetHbDpNtLfh/e3vmIQ/GJffdrl9Sz93J5bTLiB7mlCYAo3aQAFwyQwcwwJmeL48V3v/u9dx+/+/f+wd9ZLj8TtUmOnMkRhyJbVNz4Hcg0GLLMTJpz6R52fH52GaWvPJeEaGIYTE0dHIPBpFmG2iDQEeEprsaoITdd80GnKu3em6xm23dl9zUCih+285v38BMuacBv+bP7jw07B4XKZ5mJzIpwoUfVaU6qzoWa/Mvz2P65edNGiVmp1PeAiAXLm631DFP0SWDkHIuiAI5dFAM5JmY2QEWtVxGpmXFwcWMGM6AyPnEC5d2tMyFmKxhlcBQ8Oc/el4o2BqCt/Kx2oSKwgtQxHJNyWzeoCvQssiI9vnfsq5qpD96NCn+GEZ6gYUgMz8wGYHzovSknq/i8Axy569RB2Q+AjiAxHeCaVpYmDPcKNVdy7kt2XjaLgihIFgBcbPpPXy2XnYZ6IeCsZkxawifjUmElI1vNTEt4JmfJOZdKR1NCN7MjJWYu6KuqqGYVGeDIguUPDt9Bht04VnZRBgOcIUFnzbwhWsbtNx7Ov/nwDgDuNoK83nSXV6u6qs1MxSwDhHY+TyIAkiUa8F1jOJ7CHtMKMV7JXhHmnV9KexGt3QJGux6x/QUIU1XUAkeCeFCjHJzqkipvtr8VjYG0cZaNBb+nJXgq6zwOk4Kej4sHiNjdQLGmcSxWhDmHARm7uOk2m35bzkRAluzYOWLvPQiOnSlKzI+IHPky5IzNsdt10NQbGP124rKrDnmsB4gXHByIGM4YmnNWIxUX6uCrElnKOYmpJ+eIas9z613uFl7/m7/0733/m08A1HZJ/bZL8s7pYh1CAfC8I5UJ8bvZaKfr/JMwvmnzPcxXHoIVh7nf/w6I5JRMPx1+qhYMxo1g4/SRnerc1255X3vdb727y7e/gWzshWmnSXzwuXFP+5q2l1O+P8rNjIDgXMwpqxYFkkQA4Ni7+XHQ7dV2+/t/+MM/+Nc/fvTw4eLefSG+SolTAqBq2azro+RMxN57Zicis2bWLI6PT+4AyLHPsSPmlDXGeHS8IGZmPj053Xb9xcVZuf6ckxq3aKqqpsFAEe9dWzcASuBWzKoQjo6OltfXq+UyNU3dtkOiLuCDqyp2TDGJxXi0aEX1/OoqJ7ALWy01c1KBIwkEI0fOMYOIGDaGucuCW2ROYEqE2aKOYperLmVDKSToXBLbdjluV9nCOkYfcOqbLsuPvrwA8L1vfPgXf/EX2fu/9pu/tY7LpvYvzp/HvKqu5Wfe/yjUMwCmGpyHqkGCcznm5XIVvNaBV12+Xi8BcL4S07qu4jRezGA2akQCMCYjU4MStIjPshkVpcgiueO8946JDSZqpqZqBBJTA0JVA/Ch2uaNqmB/0E9DebfzTL/Y2xNp9/db9oPtvVX2tEEvIqvC2MDDDjWZzntrzyDrc8MwLJFFGg2R6Zy2V9AaOPjObgb9ae3mArH/yiG8OVzjMPnoYE8YruzgiIdL6p+a637bbtttu23/X23+zuNHQvVm09WV/Plf+v77T977W//r37g8z/7oOHTXq00HIMa8A3RnTgAAIABJREFU7dL9u3dilETW9b1Gs1ZzSimnylUAqrqum6bv+r6LanDsfKhOTk8uL1zMsSiXeQoUmMWYSHKEYzYLEti73HUVMQCJkuE6Q9pG51xHFEKj5Nu6tiwAek0PHz08mS+ePf0CnhdNs9lcU9B7J3dit111WwBZvMC6FJMpAY5d7COTSUZCLEaUwhmJa9rnry4a53/v8vNPPrv4j/6TX3v87sM3S3l+mQAYOYnbaweibAo1U01b6Y6MNjkC2DbbDx48Jvi2Dj/38ePauadnl1ki+Uo1FOVs5Wq7Wq9i/uc/+FHm8HPf/uYPPvvsy9eXdaiI3cX1BsDL19eOQjXz33n05NXFslsvG8v3Htx9eH/+7iJ8cnEJYHW1mRVQK9DnX3yW0vXPf+tnP3367OT0zrJbATDPfj7/5gf1l8+f9lHE13eq+SobOTx7+tm9ew8ArK6u+2p1dNJ+8fyNr3wzm2f1VTXv4qVoXq8zgJa65vi4ujxPOc0XJ5cXl5Q1estZFu2sqGJfbZdVe0SslXeXq+vtNl1dr6u69uBSTejV5aWlRClT5eq2baqqqe9EyTknzdmMATTtwohVQI5z1sVJy03jhMi766tr5zyATDGrcuXBXmA1kxjnrFFMVG0sXkkjjnDY9n3ZPURyp+WCya3bvU9lWy/FrAe7fOTdkZERSFRjjt75pmlWm81gxozGjELU1FTIrHgbjo1IobnUZPRupNwR6egSDJDeQdB2KDEx2UcFR5i8AStlaM0IYAKTEVRSl10IngBUASKSRFO0ugk+uOvrrWc40hhz09QAdCODxh6ZjcI6Ixw1eNE48DtgQAgVDI7JVMqFmZqpEKGuqpiSmZooDI65lE6hvS7a6/8DG+qQvjFw+H6S1/hW7PjgTyLa/+ogBbiPPh9CDjuh9F08eyoqgRFGPbBNMTCcaAADjIehyIUoVSA5UikPCknFec/BGwGu1JGg8bkCquwc0cQrOrBU2bngfTIjmHdueX25Wq0Ce+8GwE/LdKBiohsZZSSDFbrZCATv+n6418m+neaK7Qfmi6qjlRu5YcJPhnChDRZ23MHDK9At7T69b33r3kgeceFSAsscuyQJQFQBXM0Nk1vL8tnz5ZcvF/calUwT3BCIid06aWCCmqo5hicjtaIE5ohETbLlQXPKODhytk2JxiFHBOdoYIwRqoqYyTG8I++okJq9Y+8otEIgYjCRZ/LeeUfMZN4ABPaFoKYmSYaztS1pluttAhCcb2Z1gDrOxY/SoZ772IVDt4+KfxP2CBRVQLcvKblHfJueL43syKLZWxDLUmZh4G3vTzhYIFYeWLwgNqWk1ostewPwxaury1X2zZFREDEix+w0SVlGi8afqaqUUiW66bdFSLBMOExagSCDMBH7YGpioiLM3FStWKnpbPsjEzvCLcZ3h8FTPDUH8/DRsLk8O2mqb3z88XK9BhBt6Zv5RtL55dJwVJRZnWOV5EPV1A2AGLsonVliIkecLU8TgPbPeMP3LTD+yPvZW9B2KOawZOzwSB4G+BA+MmKFDFio7dTnho+MVzDVoxmOklKiEYScXjYYT7EiAhUAmsayQv2eFM8+okWkOReCf1YpRWZUrRhpxAW352n5VJFs5n2YcDXRTCiZAUxEpmrDuBtRDQwcoSx5Gm/DqsYD0CbDxlHeYu8DADJSyUqUjQCIWRXqYKpxVatUlB7eC3/5V7/3c+/fbfQCAMt1U8FxFRM6CBMniUSoQ5Ny8hyAMRSz3wH7693NNlzq/ksjNv6nNTr4uQ9h7Ghf474ADNZEGUk36VIHB/iJL7z1gbcrXX3dZd78zI3g3RipvXkfuwOMm+d+Nw4HkSyenHmXNYtklKr0zomZICCQiWbY589fVedXpw+uvvnxt959510AfYzL7VZEnfOhqohdzJKzppQp5curSwBNqACu61nWrfdVTDmEpu/65XIVQh18DUChIqImWYSYA/vKB7GoJjywsEfM0fkw87N2tlmvnXOxj6ChDjOR88wx5WPvNpsE1ESScq8CUd1s1wBCwR6L7K8N20Gp3ea4yCpiqPbCTOyJLDj4suADPGjuO8eOiM3AXNWuPl8uHeNynZkxbxmAIT19de3YOd+cNMdHtes2+ezy/P5x3cW4qBoMs68kumQwW5R5U8PMM0HtcnkJYHlhl1etytz8BA/quJQVM9QYCigPepHGBB7WT1fYkUU30oDiEJmpmhKzpMTsClFaJJtalkmaeWedHw6mMtdgNqnl3gDyb4zTqRT3dLjh8+xIBaZFBl2J2BGZI2Bg19NkrNlo1U/j/eD4hftOezH+CS7dNyani9ubS3u79fjCDfbwPhy7f8ydsOXwxWHrmfDRfd3Kvc1m14llkbnFIm/bbbttf1abX9x5dH6Zr68u7frqqDnul5t+tW251sr7cLR5cwHAt/WDhw8+//KroOTnCxcqYSn5oYH8tGcwsxkEagpVVYkptiH4JMW2hJiqastVn3rnHDkHggltt5tQhcgOAFTBxKGxDM/+TnOct3Gbt6eLmVECkHO63qyul9dZ+tq34ABu5m175+jkgituAOD6elV7t9xs2qZStZTM1CXlPltC31AN4Hy1rFuynDza+4+fNAH9+s1v/sPfuffksfbrEABAibaS2zAjtiyaDcEksyGiPTkG4IiSbr//0ce//4PP752eeJGz86/aWfPg/p3cx6tNBHDv/oPP3ixP7t05O3/10ZPHL67lR1+uM6cnD47Fy9XlNYDV+frOk/fY5/feebTexmvg/iK8e2e+6teni8XjugLw6vkbm51mDk8++PDzz//47tGpEc0WR6ePP6hfPwVwfvYm9j2HNnGVCYswWy/PL671nQePHj14ePfBYwDoE1O/7Nb3T+Y117WvI5ZfvHh6ebb1bX18MgNwedW745abWjbSHJ3mq9X2eu1CEFVJAFcActrGlALYsr5+/ersbNvFWDetARAFoDnXwQssxVxVpkbOeWeA5/W23247AFmE6nreHKsjVRzdvffo4eOXry/XV2ehmc+OjwG8efmy63ov4h2fnp6urs9FNGcC2LO3AmHDsgphyvib2mBu2H6ZyIkBVVAbjB4hFTPVSqqh7uUpEyZroTDWKPa98/7Rw4eL+fzTL57GGI0H+3JgR5qaGsEcwQzMYDLTrAoA3pEjRFWQO8Tmblx+IUnQZLVhuqbh9sz23GUiLGatak8DSgDV4miSKmLK5IhgmmKfk6hVVQ1gNp+v1lsC6eBR0pQlLHulPkZuSDHWqQqh226dd1oIqmZMRIbFYu59tYLFnFPKROycl9KZxgfJfNPD+EkulBWoa7rZCTkbDzF4mnuO+w5rLJVZC6JZivQMRWnGU+75+rQrnzvc/Q4rHS5z+Pwhhjd2BjHTkPdfqpiTheDLJZSqsgWxVDOQrTdLUWN2NLHzyrBiRwMxd8xyBPZ8VDIiHwJUYuzPX5/BzDsHaCmJWz7IRApVFQIppFTiOfA/yYyMv7aCw2T73oDFUaxyLXUp9z461LFxCAW2KWyLoeMKQrsverDjeg5f3wE6O2PfnHcGEhuBTiWVqE7J1ct+88nZqrrfzNipUTYAEIdoRiW/mgBmAbaZFEhgADn3oiiXVnTuhUS9LY4qx+QdAQieqspVnr0n5+ho5oawxPQPwAxmYq9mJeAANRIiISKimh2ATlhAROR8CA2DPZHP7MFgNgDma2GfxYfgVj0Xnk4pmk27BPpyxiJOQuPYG35xY4Hj/Sc0+kaHlC8bEEyiwn0ccrWn5z4UX4axFsIkiVJW7jO2Cc/erAFcrqKvjkD1ts/BBwabWNniRSTlDCDnrJrVxNSSCDE7Km5sGf8CQFSDC4qBmKkqkrMxu8odjjWbgD4q2YRk9ta9mlnSvvaVJAtN+zM/842f/9nvXb5+CQAWDSFuUM+ONzkIlIwYYCIRHQchA2TQggPmnAocdnMy7GGLN/jXGCbAwZq8N20mYGhyfwsqVwr7cFmNyPb3rImhOU2TA3fZpok8rVs0Vdger0etAMOisk0Rh22PdE8jND0sYMTk4AHkLOQIsCwyMt+halnysNQDqkZcnjExSnYL4aB/CtVo3EZpzE3GCPKOebLlK8xj4SOYg1NVgwBgoIKybGtN77Ty0ZOj/+LXvn9v4U/sbEYRQA4B7CjF3tAe+brhbIYszjkCsXcATPLhU30LNtjrofFT+xvTTwH4RkhhBAvGImgHpzhYTGlg0+8/Q6BAk3v4wj62uRtStvfu/tjY+32CJA431Uka4C0scp8ESXvj/MbF294Hym+F7GsAdFhPDKCswuRhIOZZ3UghsJsM4XyQgipfmQvbGJefP12eX/B/4AGcnpzMZ7Ou6/sYU0xZtItxgMAcF4WHiFKAXYkHo6yuapG0XF0vFsdVqcWk2QzBuRACABWxMYBjUrSnsqiYap9i5b1zzER3Tu+knK9X1+X2nHMg2vT9saOY+quVqia13FbVNsp6swbgucTO4IbwDwFQVSJTUMmN0J2eCxOZY05ZjdDUValqWAJRlXfMvNnE2SJsuw2xikBA33h8AmDdxS+e/UgN9++4Fxd5pdo0vIn9+QZ9TOttBNBWROARI0xcWVvNu20KcKZI0gG4XHPOtaeQS0zedsNtGmRcrCQyNgPBl+gEwftqsEgGK1eLN5dVzSCSUYrpiQDIOQ95S7tIzjSSpjWOpvVgbxjjxnQ5HMPTtw8znYlKvryV+I6ZQnkUfIhpCP5M0b3RSLE9wHCctoOdJjRNCwJKmTkCcEPECXvzbYxrlt6cGA9FBml/gu7f0I7tuAvmluXhMF4yXvm49+3d+26xuUUjb9ttu21/dpv/4uVlL7q5vk7Z/Ysfff7wtO1cJTHlbs3t/O7DBwAs5y9fvgbxNsdGxDWtD5VIrqrWO1dgm9hHIhY1IyjUyDyHzWabJGfLTB5AQgq+MTViJySZNJNrmybHzCLZDMAi1BBZipKvEsgb99qL5ZwjpQzAOYNp7iOLhtxjI91q+eTd95ZJASPLAMhpt9kYhyZUq812m4Tg2fskxuzX0gNo69YZE+fjO83F+no2m2ULl32/fHax6qWpT0oH9cvz+ri5f7R4sbzUuE7EXsidzLNEAEeLBxerq3/8r/7vd+4++eb7H/7Vv/mbDdysbq+3+vknTx89vA/gPOazsyupXHe1/rWf/fhHZ1c/fnH+4LR6erbuRZ8+OwdQK58uTsU2n/346dnTpwoc311sReatO1vHs9fnAD778sWdOysTjdz2Oa1i/PTLVyrndx3FPgKIWTnn2F/OQqi8W50vs+ZHTx66UHF19PzpSwBrig+OHzarSx+aPuZu29fUiOa6UQl1lBmA2dzBVyHU637Vk5sd3+nPrxSp8SHGfts3AOZV3WWoubvt6cvnX26j+FD3WSTJ6XwGoBcVb5GpbryvnGjerrd931fBgxEqB+B61T9Y3I3ZItxxjXkTPIujfH159eSjb1UnJwBenV0SS+XYEVQikWOCIyRF0mSldCY7z24Sq8GQLnUAwQCYbJCf0kacCd6xDYQ6TIxENYOZY27quo/p6npZkkRmbdvF3gY8dBBks5F0B6AQndQGXI5LRk9WsJvKQdPkvr59VYMCzeQtfK2DZADq2jOZd4Pt1fXdNmcRUzVjhz6HOvRdjDG7ysc+Azg6PSUqRjWNdjOw51KNFmXJ7yvwp/Vd54LPqS+giXc8a5ujxVEI1eXVOsWoZs5xCFXgqk89AAKNtbpvOjtvGXn7b77lPh1006439h2tQwBuZ+Dun4YOnFPaNxmnm7e9h1IcbNs517vPEUDEWnxEAkAiQ111773zHkQlOYucC3VNTKIqot47DOxCV85APPIjJzTDhpGdUmrrmoxfPX8ec++ZiSpTLdpbIDhiJlbTbJkMIy/S9ghV2EkQHjjJux/7Pu5BV6ru9ddgFhvUtGgPFqRLJ/vesH+wcsQb5J0BoBhPPnoaagDJWN43ONKUTdVVgav2xap/76gKntRGvbxAIDLWbEQwYnKOfLAqsAvFd6rr4Bez+nhWM1HM2aDewY26e8PZyTDi+yuxgcs1oHkEwBE5JpJ2eFLe1z6E4J0Pzrnj5g6AmLWPKabNprv86tXl0zers+tu2Wm3Tl1KALbJuj5++Ojhm4vu7oMjEBUX0tHAJSz9xIeZkqO3xOMlDd1po89z+CT3S/SO06101mH2XHFmRJKKikKVs2mXeRtpk+zl+RJA3RxlqpOR9zUBOUnfxeJ5ppyTJhSmWwlIAFy0I0fAhMcK4eZMVVPsVdWH4JgRnKmmHOnAKxsG3TTdBqeRpjuiAR/0dRStLccUP/n0kxA7Dg7AZnNtbrbqrbPK2AmZd06jeediliJkqaZMDoBIMiYzuzk2DxaW3Tw4AJoMNhbQPkzpA9Gg3YFdIiQKo0xVJjec9xAB0KS/aYcTcbwa2jGxp2lpu6WTSrHsmPuYYs6pqhYDB3N63LvDGZGjAaomTCkDgPMoureiApjjIS9bC0aDMV5jMLWski2Tc8Mr071AYcOqSLQ3svfb8LqVEAwZFUZbJgvkwoCdZaQtS/feAr/y8ZP/8Ps/c6c15LVv/bVWADhpneOR66/ggqcsRoALVcpipqnPANjv6MS096++vf7t9fbBz5+ASL4NnLyF4g1/0MjIG4Ibw7MDga0Itt44lBLcZLTQIdSwj9rsr9Zff5E/vRXMeLffDQPZDFaGx43j2rSq7N3k/qWEttlut0yudoEYSrl0gCOnIrVvLWgfuxBCqNtQ18vl9u/99m8DYODJe++9//77JycnVVMbwddVynE2b+4/fNDFDEBzqoPv+i6LdrF7+Oj+3Xt3601FJENEEGBj9q5uZoujY8lZ1Zz3A0WXACD2/SBpquaYY0wxRufc4uh4tVmnrgMAVDDbdv2rmLTv190WyDGlufcxpss+A+j6TtTY1MhhMBYLUg/vXIHyHbMQ07iDGCiJmKHg+OXxOucCO83axQ4bqPYwPT1qn71ZJ9kCOJkdnW/Oyfhk4RZ1YPOq+ShUR/VcVEv17Sa0445uuY8hUO1bEZg6x6aWAMQYYRRC3e30QYZi2nuGz7CTl9cdk3PsmILzk0CNYRRHNy1RjRij9xVgMfUAVE1EnHNZ8qHt9TWI2Z41NRkIX28J26Cvi/0hSeNbhbMJJigAc24IfnOWARYc9VD2tsy9zA3aO+4ogbTrmHHZHdnveHu60Vt/7llQb90LdmEAG884GFeHB6Jh4Oy+sh+ixm27bbfttv3/ovkf/fjT08Viwwuq7OWLly+eybKzqj1K276uyIcWQBPCV1++mM/mWwo5CThFtZo4xphyrEINQFU3cW0AkRPNABmVTB0wnIMDUPlKkgqXUgzBoBXQpa0CQlxREWTJoqniKkusuOpTb6DaVS64XiIACKd1ryKhqcS8d42r9MWz/4e9N+u1LEvOw76Itdbe+wx3yszKrLm6ulndbDabbTVJkKYpmxAJgoAtwYZgGYZsv9kvetGb/4Nf/Av8Kth+MAwTMm3RkCBSpESTbIpNNdlV3dU1V+XNO98z7GGtiPDD2nuffW5mUzbgBxK4q1B57z1nj2uM+NYXX3wOViLfbzlSaOG84+v1Vo2ScVKQKTGJIWvhdRKTRVLDrYSiLNVvWlmnsGqaspzNM4vNjGYH69W6aerKFxrl4NFhKNgHH2ML4HJ127Tbs/PLj05uP79cXa0iU2p57TmQs23bAqDtdkv6imdE+/jyalmEwvTstr346KODk8MHx08APHpYsbc2UqdN08ZQ4Gq1PinSB2erpk0uHACo5uWmTXVHuPn0W1/9ymZ9u6j8qj1ax+7g5CGAzvj44EDVUtf54/Lig48fHBbgcHFxE+P2+noN4I3XDt5+9ZW6eXh9fXbWxc1qu+XQJSQK29t1khsAoTooNUqkWBTtbd10qUVaFEs1FUbmO3TCVKdieXTTikVulSEEEDveSq47LyJVVbBEtdQ2rQFi0iUquJgdnwCwy9PV7XmoZgXHyEFdeXGzirHZdlLXW8syi6zEkOhuNzFUkKyz5ZiInJENJoKZZvVGNkbOD0JmZEDm7g2eN8CDaQgAjpCFtACCOeJ+9RczsEEVOeiQh8wcBoKYEKEILsX22bNnBI5JmQtVAtC1tZk4JjFj7yV2nq0I86oAIeZ4IiOUTd10GthF3GHl7ErSBFPm3q8zEMjBenpUTKJmJuYcOaKC0tFytiiwqVtwHwVclX4xq5o2rtatd+xDcXu7Pj6ciYMZ55xOm6sLAxkkqYGc80U2Cl0oeAi9yQRDRw7szERUzTMBs7LM9IfS2dzrkU+fP/1kEz2HwrtZIqQYnaPABqBL6nNjEREFMCftVNWBS8fqPIC6izHFRTkP5LepBpCVHxy7jMoliaZCRMwsKfkclE6AkQ5+vOPAKlkwkcGWrTmCJycQm5jHhgwBADBWG+O1R3Qu238OLh8tlk+nLNfoyCXqmV8MEDnXQxtGZNwzn5IpFEzk1CyJwiSJehe896OHz8SWUSgiF4p+d92UmDPtl7R1zpvIzfVVs20cOR+YoEqaU9mYqSClHHRJQxT5ztEcEdWMkNhItprY5wZMIoR6N6HX6c/OaWYtmZFZDk53DBiUyIgIQ4aTvnrViO9azDtznngYmaODDQMlM5D6EQWFUCAGk6oRbp6uwlefLObwRMEDQOmIGcvqgAylp3WUZeWrwpWea1EAs+CVQBZray21VVmmrpu7IspW4CJmABQ2cwTlauYbDSWJIJn546MHFw2CdwDarjlbd4XU6whKvpzHtLn5+Fm4kdR129QVANpan91c1OeXf+dv/9I33vnqf/c//E+nrYdKC8ujAOaY5EcffXAYwmuvvkxqpbPCkWMn1qtPlAWpOh6oi2OPzB6ODCr+hD74NTelc5y0r+JeAwB9XJsZCEp5ZlTkLRYAphCDCfnCNV1skiqIQxlb/PDzq9YtAJh4ERPtDIBxF1PbNWNsfR4wDk6hSc1MRRIzwzkmhmYejQBQUYCInWcH5HBFR+wxeH2ZVt7HD/ZiX6Q8bNfkNxtSBxnQdU1VlUm8c3y5qp9uEtACaNs2+Dm5wvtAbAxnSuRYYeUiZPy0WzU5MxMzmYG9G6qkp8GMHrH1HriR0bAtJCPRbBeHDBB27G8xHfB0HXitTMYwKDLe3gNByJgtyBSiMm4eZBm6caUawc2B3mo9zjnSY3qSNjMZO7Bxsgkr8AXO8ghU2h50ZpS5To4dxpBq0E5/tl8uQcSul4ZQGO/vcHCedlR1Mur7WSf/lcBOMQ/syTWUuk6DdzP2+Y1MOwAh1cccf/KN4he/9sbXv/z2wlYFOQvOolZ549nDjGtdGENEFewBtpRARvCFv/PuEziS2Kbkf2Dc7Ru6Qh8ksBuCeYLqXy3XXR9WSbnRR3AnV1HPhDST4Wr9dXacWkMfrz2FL/ZCHnbIyQ4aGXsppgeN0PPknce9nr4RaEDlaLg/mChpCs4HX7Rt3XVNCCGlWFbz4dI2XqBfEnWUX9ldPs9WGmPpgxmJqIgNWcBgUGInlmAWfBh7hptVh6oAnPfPPv3s9uLy+Ohh3YjAuI1tUlV42MPjEwBRbdtsmZ21jYkZsDxaCiUmVwTXNTUARwXEHLuHD09Wm1VVzmLXaPJtu22zQV6VZVk29boqC5eFbInXzdYXIbYNJMvcd52E+ax6Mp91lTdCSrHqOlZ+5Yk7EQ/gu+++S0YqqiZE5L1Zq6pw5JKKZwbQtBsRSaIOCcTJWq68phRjG0oGsKmfbdqlmCWtWy2kbdddZJ6dX106b7mLmq0vL28MeHrqoqbXX1ooKAS3WBy8dHR8tdoC6CRVpcHNJCmziWxUGmZsUhfIic4A+FC2HQsUBpVkop7gmJis76KmjllEVAWMIvgyhOAcgGSSzQxRjVGTWoyipsyuk86HADMR0X7Pyxy7gdcP6nMC5i4yoUcOA4mIhpDqUeRmGB/9OoAMNWblkZ7UjrweEINUYapF8FnfSE1T08/SIXg1UwWSaf5txIJ3+w7D7WnHXp7MFflWUxiQpl/lspvpdqJL/TyAQZN0eksbkMd+9E4tpwHfNww7XnlLo6/PyZo4wTkHvP2+3Jf7cl/++hVft0baHh8/ur2+fnp+G7sGjGoRTOPp2dVLLz0CELvIji9vbublrG7aB8uld05UPXsmlwVZyEDksivuXXDkQZREDMbUp2zWXlAKBAjUoNwvT2awwfBVQxZwIWb23jtQ024Pl0fnbQTQSls4xx7sisXhEREt586Bm7bu2rReRwBtp1U1BwKBkkjSZGYKEVFPvhe7dCB1psn7Iia5vLn1VGxiTRwInNUYiezlx0+urtd1s8YyGJlvo3luAi/9DMDFOrEvfbn84mzz8Rd/4YnqmMBWloWodDkLLUqm1SdPn/3GL36z8MXNtjl8cHB2vTp57fUf/eBHb3/pFQDPrltfJSK0nQg4kMVkl01HHDqJ2Y+dzY5SbOdlgMpN02277vrs7GDmy/JB6T2ArgyQdLScX0pk7197/ZVZefLxZ+8uF8vGLc5vrwB87e1j5nB6/fl2dbPe1jFR1LhpYlUtQ3WUNf4OjxdNG733Bcvt7U1HVs0XyQCVRbnIq3DTRTOtYxc1ybatip4fkSNfAMSUzCSIa5uORVRlVs5cUcW2q+tNExsABktJ2m1TN/Yg8PXtWlJc3944xuXNja9rAJu6dWIgYSWvvfRP7rs2LO5qfUaaCfLy4nX5Tqhddj4mjufk5xghSBPNPCJQjjLrt7Zt4pFMDAMa0r9QhnfK0ocA1ZR5EDlJBRlBeZC//HFleOAenzIMLiIIUPWeVWJK7XxROGd1WxcheMcZl2/aNqboiGJKB4cPDKzYGIHIlUWVc2vgIGzqWtRi1GQAQUQynSV7pATXJyQmQo64Mpmbp0V5GHB5fQOAo8y72HTt42N/ep1MfUrr0sRMnTpHDKAQMyZn5MCwBBixKWtU7Wr1RQH3RFnjAAAgAElEQVSgVJRFwYgpNXPvmrZ1LssC9WE13Lc1EcA+ZNkz5xw7WOrDCX1QwPfoRp802AhsyB0oW6J9GdM9qOv9cqL9bAU22IKADoktsh2pBEdOTRUqkAw35WdLEgN7ZK16dmLUJVFTwJi9c44cq5lJBMDkXBEK78VQhBBT9C4ww8yJxBxTz+Qd0dXlxdnpM4aVRdGmLbmggwbjqL+4EyQan35nAk9t/clY2Ou79vzvkysOtddvBNwZajS91nOEqOdLhgCep2qOPsv4L8GgZvN58ZNfevLWsZYupCzgW9ebtikCzT3E9NGhc45Sl2qBEANoEhL4oJoHCtFtr7fxsOLLtg7Om5sZPQTQJTpftUnt8vSms7ZQvPx40cXme//399cN3dyuANT1drXd1uu0jnbA9A//m//0f/7t3/qzT1diYNfLAwSHh2Wo67i9bJpn119+df6OX85KnS+XhTcAy3K+LGfO+3/yL/5NG5XIUd+/BiQqTx+DigTtKLf9Vzn0UHue9djeOSPBCDH16HKPKQ+imT3NYhDDNYDJXNBO4MgXIbSpPL/uPj5fXd2uxS8ASNIUkUQz2pFUokYHb8+1b/6TuZcStV62wsav91p5UsYOPLBS8mxqvYtne/1qdOgKF5w6hZoYXEFukSU+2S1ip2ZsSgrTEVyCATxcPyc992AQkAFTm9xmAh3R8M3wwZ2XuBNBDQDgfN0etqOBnkywXSYiIIOT40YAmN3Iv6YBZxzvu/vR21AAIWkat1BycHS/ZfL/xTGdtstzCobDx9Pfd2zW/IOtr1+bHDTA6dPqm/xeQLnkaNqIOLjg2KDivDMNqam0A3BcxH/nJ1/9tW/9xGsnVepuZySdxBxA2kQB4MGFd0ooiAjGpAR0bZPIh1DknNpJIvYbNz/Infccvt1r4aHVh4VgwAInp01gxOcmPB1VPp6vysHMQB6k+18+p5lMY3T29LGHrrl/yX/btHvn615FwWS9uSWml44eV7NyvV3Vsd17qAGBfdEld3jlxDqj/QP2TaShvgmkeZ/YMJvNQ1E8Oz89vTz3vvAuaNMWzGUo6iwKT1SUM981ToWZTDXF6H1g4uCDBQFQhFIldW0DwDsfY6eqZVmZpjGc1syYSVVu662IHJ+ceB/W600IZTmbA9hu121BD4+OD31amYLJtCjKks0VhY9UADg5Pnn67Gx8H7UJoDRMYzZMg3lDo48woR0ql09SNTU7XBbbNq3rpk4UUyyDzzIsMVpR0LIMtzWCwIjmYfbF+c3Bst7USc0D6CKrxBk5V8whBu9iXG9bkc4q1uQCgHVDbdepU2IjInJD1zQZAHJTFSKE4L3jEJxzWUAGBbmsyyGieWO4CKGNabvd5J1Um75/Xwkv7CrTA6aH3gkfvzsrET0vrrrrZCJCxCqDLTSNtjHt8f5JXDMGKrpNBo1h3O3BtB8Dowuwb6RMDsDki8n83ru9+99jd8fdvgTtfzN5NxsedJSQwIsL7YyBF4/S+3Jf7st9+StbfJcope7Al+xnRVmSuSZ2F9cbpxSK8vziCkBgVxUFmG7WayLv2Ds4VQMR9QpoAJGPQUxExXHw3gMQNYO4IaOuSNa/535HPe+YwQALHIaNNQFMYWIipp7o8ZOXP/n8I7CvqgWApk11ikxEJK5piKkoOKp1ZsShMwHg3Lwz1sTMJAoz1SwgRS5wiSEMsCw9QZlMYlKxbUSgSpiIkCwCgNjZ1UXpZ2YS69tF6ZzH6vwSspgvDwDUXTxZHjV150sjH0gEMR0fLNTgHj4OYQagTuHZJ0/ffrQ4evQ42fYvPvoiMj1+48vbREfz8OzsDMDh4mBRPDg//bRNPqo7WZSzUKaUqsK3BW7XNQBJ9fLgpJwdLSv39Oz0/OysnBVV8bCJ2koLgMy+ODtv28MuJQ6zh4eH5eLB+x/Ep+21C4tMqfv8uo7v/4h163gG0ovVFVMAh5gklBUhAbhdb7qYug5Q0iHNhJoyUWcKIQAFuzbGnElFBV3SHHRRwPokw6rZFRYYO2ZAVEUkSSTqZRZbRVGUYValTayj2u2qDGG9jaGoHHlHDoBjTqDC+VAUBpixDbzHXRQcjUuwYfDELHuzoxf5omJ7uaqH7dnx1z7+jDIxBn1O2yGaxVRH64CZiHoJP8LgU+ebKjGc98TJBkksZCopDT7qc2XEIIcDdqaYASJ9B85OtSUxiUUoPEnwnghmSpSRREuSYNQ2SS0WvjRNKqkI7Ek7iwDmRZnEixog1otb9i9oaczaMLjVpmxKJgIrm66JMQcEH4S4XIa/++u/9C/fO/3od//N0Rxi7YK1dFnMEABUbQubBWY4iYksJwVCZDMPkywy5dZdC+8K9nW7mRGZDNvSuQnYg9lMTJEkFqFQ4qRJzBH1ZEMQ71Di3ocykBLzuLE8OHITD6m3WfecudxEptpL1ufUQ5Y1S0EEJibNG/aZGpMnRYaIc30spFoPYXvmwrsuJoCY2TnXC94TM5NzTlWJuSyrlCKRc457/BcI3qUUry8vY2pm5UxVXJ5ITQdQPieTHOETHjbs94QLsnE79aLveA20C6yceAOgFyAcY/UOke5jM+3JG+174DY9exjCu9ON9r3WycGAc161DT7c1JuCBcgjzlzhQ0lQCd61RnXLM79wPBf1ABo2o+JabN2oJlnOyk5uvCdK/NnZ5ve/9ymADz6/Wq27qLaJMIIq/sF/9utvPDz8337nDwHHOX2J2dLjuCyJ48z5L65u5pX/hXceHVUzvzx6slwCWBzi5UP9+LPzr7xl69tn/8WvfvPxwWGqn3VgkwYAGNWy+vQyLecEboBAw1bKEG4IQg9TWS9vuBdTl+PVKENAexmZFbuO3WNco19MPQs8w5G7xiZDUiOUpS/V/O2m/eDp2ftfnJNfxhQBxChJ1XSklVOgIiaZcvfy5z2J1jH6uP2scjEyCPcw5qGHGY0Kg4NTOnyjNrnHHbwGQHCeDDAVqKi2yZQYQFLvPBnYkDN0UR9Fvo8M9YAkyPqssLSDfNBXal91w2AZ9MMmjzTugQ0f9XPVoHk7gA+7sZPzq4wmEACDDtu01qPGRMOo7dtRBgHEIVy9v52aTO+upmpZL1hzbvqhru+4+HcAspGh/FwtTz6ZsJh7/HjC+HsexzS8aCSPN2ZG17XsvbEzcg5qmgJA3bpy+vohAPzKz7z9s1973Qh1fXvoxEAh889FK6cAknZRrCy4aUsvrfeoXIAPmrX6dNzq28P2xh52d6XdPePz74Jpx71TWXcBjGkuMDPeP35EIQwY8nTnwWH7/ejOfHsHaRhaYHrEPhA5nUmfwyh3T6SwKMk7x+wc0UtPHr3+8std1/7Bn/7J5Ikz98rG1yW607jP1SDdfYG9aqBhfUbP/BUzImra1nvPPmzq7bvv/nkn6clLj1959c187Hqzmc/my9mihEnsvHMAxa4TkfL4pChy4uxBhQBMxE29VUkMB1hWNuy6jqiXGtxsNtt6G3zYbrYmSlkNFVitN4+WBcC3m+2madkTAVESqwqgPgAwYhsCmDEhjOZJTPvsdBmJRBZKVeo3SXUQWchTQw5+7mIXY7qtW2OvsDolzwRg1cqsWkah48NCtG07e3R0+KVXKyJ8+PTTsqwAtK1r2/XrryyPSg/OVFpioqgadSTuDUqQKkwgECOb8ELD2qEmwbsyBB8cM/OwItmABhrA7AAVEUlJkpAnVTXVHbhGtI8t2uSfPatjN/zG3dRJFxlPz12Qxk6/b2DkemZSVbWcc892k7aZwCgTCwf+811ckUakeNi9Aw3k9+FB79ANnuvQd7/alYlxdGfDQvtEc9jtKGG4dR9iPgknH1/5uYiT4eP9N7sv9+W+3Je/PsUbFaK6aSW4wNWsdMHqetPWwReuKNvNCoBI9N4/OHl4dXUTk1xe3ajCeQbxzp80IyY2P6AnAyJjNNHoybNtdoh6GaS8UyhqagmAaCIa0puRc95f394o8OziKhNCEpJj58n5ItxeXnrvuzK0yYRQlbPk+uzbRGilYXUFF8xF0pZBBZfeuRQFQBRzzEVg9uxViEhSUoqFmylsUcwBNLFRFWKiUK6un/3cL/78t37mJ//p//5/3FxdLQ+PAPiiaOtus63B/PClRxZTtV0fLQ+367rw5IuMjfpAKJz76LNPPv3ik7NnK/KlX/J2sz5+5UlqGgBdalWkEynDPFKsY3e5rqVuqqIEu1VdA3j54UEZ3NXNFelsuTyMsY1dclHSwJabF8Xy4MBAddu6tqnXa726UgueqtX5VSMGYNWm1YefzQr38JU3fOnItaJaeN92ddNuc/uoojO04jpiY8fOc+aQqTnnogqAgr0LlVi0GNkHg7AZkYn2GFaOT4sxFeViNqsktrFrkqp3LhQBOTJ326mqd2E5r24b0SZKVGYvxm1MygzAM4MTOVIgxUjwOiSR6PvSuMTf9Qmm1k/2Gfc8x2x67CEku7A1Gwk7A/VlZ2k5zoQ422liESiLgg3X7umaAMEcQ9WERFISF/OtmczlPIyjfTYZlpOH3gOS+mcaziCCaSKV2awMrCaxmFV1WzNBmACIiqROBUXA8eGyaWJwWBScLBWucoEBpNiYiipUU/ZvmImNgneaMg2TjDhHwEONoIAE03advKOHZQLw0on/r//+f/jv//zP/dM/+O/nDjG2b755+A/+y7/1ytuvcNFreM1Lfu+7f/Hy609mhw+abUptUxUWzW5rsfXqt/7JdwH88P3TzkInomn7qPQtpM8YPThexEkNXbKYEMkxpQROCoGBXN4F8SjEulyBmdCZ+Z4JkXZc1AHdmeoD7c4Z8JX8nxmNsTWmANGQhEFUB3gaYqJQIiIhhTjLqSHUjGwQCajr9Xy+BMxU1JKnjE0gqSJFciwmDHaOkyTp0nKxrOstAPN4dvr0ZnVZeG+WmrStXJm06zllgJmOHYenI6L/bQJBPWc073vL2IX97BAn07vepQEDCXiCVYyVe8fxmFx/AgFgh+70qNsLHm70SCCqbYpVOYPVNoxuxzQvQvCzels7siaJKP/Fx6ff/eDiatMCqLcRxm0rbYcHR/wP/96vMHfJuoOHr5x++Mnvf+8pABgFs0h44ImA5Ezr1cV1/M9/+Wsni0WNGsCB80dFUnbz4D+6qN86igfffvz20qlz68blVL8t6cvFfMvuUSvLyn9wdtnG68evHc8PPPk5gMXipObD937vXSLzFKiP9TTeZYTom8DxSJ3qpxOmHPYlY31lvwmWp4J+xiGznhDY15wZ0Pu/Q1sMc6aBSHOsLexyvf7k/Pr06rppfWGleQagEIA45zmxCBDD2QgtT3uaASC2nOHBzKQXQ5xgVoOXuAO9e+hy19b57Ye4PuoFeYdr7PqV5P1GkPMMeCGDCoAYpSwKNVIzFSMiJTiQcV6VdnfNrqeSDu5r/81Qi3deb5KSddo9M/6EgX3af9TL5HLPee293LwtMpBUe7J13lEwM4GMbvkAlu0AoOFJ9obPSLGxYTxYJqiyGw+2gZI5IgB2R1t5D9h6MamJ9xDLoa7Mhn2K0aUea8xGOQi8oNqQ1JbVYd1uhZKQecMROZLboO1PvLL89Z/9CoCvvXLspD0qFSqZSUzMub77XF9qztG6i6VXdgpF0pT9dZHIHNBPUXvUqsnD3J1u+jlp8ti7jRva1c2kW0xA2f0ymhbTjZzp+dOtsN2AmjT3eJeJ4WIDDEh3sY2h7GGaL4JH7tRETnYuknzwMPn+e3/+/vvvFlU5mZbzZXcfjDTD/atOLbAJBLO7450lIg8ayvnlNUlVFiqqMBOdV3PvA3X8R3/0L56ePvvqN34GwMHxA2u2iUBmy/lMY3d9cZbMyqIIIaxurwHc3q48k6lu1rfbpkkpqaTUdKA0yQ9nRNQD92abzXo+X6iIY7epWwCixkQffPbZoyKRRZYc0i5IFJNHIACW5e77He0hkfRgVGR0UkRtgNoUpmaaoTvVPtsgkWOXrUpRwIXlrITzguQ9OyIAhWez+aqON5uOOT4+Onz9ySuHB/M/fff7Vzeny+UMQOWo3myXBw+KypERU0ii3rmoFpPqkL+dQSpCrJwT5hlMVEk59wQmb1wEX5aF8wz0hhNlXHU0DYiJqWua2LVZPN3URHUyieQj9W5XmyCSk0HTb/rnLbiJefCCeeiu7dLDntZPCiPox+NEn9Wl8mDJBMjhwrzrrAPcaAM7ksYFZ2Ke03Sa3Pt4UsanpnEp6VecOzOhYTeux+E0nWj2h+8dZ+DFhaY3uC/35b7cl79GxRs7YifMZHaz2iIlH3yo5kRutW2y5iMRNVFd0uOHD1e3m5v1LZtjIiMSBVKPwhjIOSKipKrRbMhim1SybHyW+RiWCQf04vej0YyJ7WW97A5u69vgizo2eaHyvlRNzvN8Vsl2ZWLbTRONXVEaHFzId8yrGxtcRhXybqCDDSyfJLLRLplb+oqdU9PFsrLad6oqlkOtPLtZOWtj65jnZfnF06eHD5ZPXnpwhvbi6gbAg5OH9bqZFTMfwqyqLIhaR47mB7Nue9tsEgCiCMLxo0eAO3nwpKpO/uR7Hxar9rW33lgeH108OwWwqlN5dbaYHW3bVkyr+XIxn9/Udd1127gpZwFAEt3WKx/mN6v1fHnw6Pjk/OL6k9OrlxhlOQNwtqoPlgdtF29WdQjl2dlltSyK4EWL0pXJWgBtElZJG9bVSpOWZdW0dVXNDhYHlze3GX6qStfVjfOh8lWdEsM55rprCvbsvPbZCc07jglJtQyuZM601p4+2Buv2SXVlMRUTS14D0NK0rQdACKoyGq9VjUCltX8dl0DrIplUQYfALRdG8XgAEXTWXCDRTHmm85gIEExZE0dM9n8+IX5xxnpOxLi4CFbdoH7s3jn0mHn05KZYaRiUB94MrhAjonzPrT16A3YnCMmNcholtx9lKkPY1OPBpzZBJKK4FJTzwp3cliRtqmLbddwppSmCKBtu5TEAVVFi+VSURNxTAk+Jysw5Lh7mCnlNsrWdU7v7Hwf3Kw5VFnVLMGECSfHB2+/+uiLD97/O3/73wPwpSflV042//i3frPd1j/xxny9jf/tf/Vrv/L3vyXLerNdzx4cAajjwclP1EevPnHLYyA4Ukkb6aiYndjH64vzKwBvPnmgYdFEqFoySQ4herPMJOrpBk2Ubd01XWzJtYkubrvT89W2TeydSgTQNBvyAaAQQuFLZsrIMWVCYt9KvUu1I2CJYvfZ+JN2dc6M0ZjuU5xwEsn7K2YmSVR6moDCFsslAHae2DEHdsH57AzwcrF88OhBVc58KADk6ZGZfQjZ4nXMpuqcE5GXH78M4POP3786uyA1eIvWVb5SJFXdubI9v4YmTvSUajj1FSfG8vNlzwnenTyiDnf65vj/1Csfx+a+Bwra69V71x9/eR4eyP6BwaKkuoPzvjDvoU3MX5sHN6mdzbwo5r64Xbvf+c4H7352m6/hCYGwnIXFrGiblh0dLZZXmzXXq2599avfeALg0XGlhEVRMbvKl6vN5suPCwf50pcPqxnfdlsAqUkvL5xo9H720dObl0wuGrlRvP3O4y8XIQUPIDA9fPz4++d/EG2r80WX7LJ133j7685tVggAPrsNxwfH6t6PmrwrxpmEJv4I9eS6PI3swp0NxkRZmn+nYDdhYvEw5+xcusxrQh+/a9jRVoYmsBB8p/TB6fXHz643rbL33iNqVnsEDEye2RsZW8rJ1DPvcOwuuQv2t+Lel1K1qT82CB68qNONLtzU4eph6EGykO4eY3BixjkAmmGimvWG82RmA5xo/VWgGorxIjDTLPZnagpjTGim08ExGTdm+x/umm0yDPbfaKBdDnChUactDe2HaVPYkDykp4PvYvkM5nsl2hekve8Npx1SlrE6UpHdy96p9pGLNJpfk2u+eOW03eDdoybt05lHJuz+qS8oRL6JdRFCQS6JIm5U60eV/tI3H3/7q185Ws4AEDXHlWqr5J2xakpF8EmgamXI6r1wBFUjH9gFODZDcIVa2jZb9gKgcD9WnXl8ulHDgvbqYWyjvoaG7NTTV5pUHvFw/ItusXdZyqmYB47cOPqH3tbPezb5a++O+xU8zvZ3jnue0WWTf4eHU3XeJ01dakPwRVWKyGazyWHL1k8a+Ro9gv88ADvpEjRALXfvNVxk8vzEgDkOAJrYEOW4Ktda68AwFEVZafrssx9eXjwFcHz8ZHl8OD84UBil7ur8dLu9LWYLI7u4OG2aGgCMYmzrevPJJx/WTeN9gEnXtIAs5zMA7JxznFIys8PDZfD+8uoKkkJwBjs5PAFwfDATbT46PbUjnlMiD0ekEO0Y7LhQAG3bqMKsF3DJ8CINmFVuWO3nRMr2obKJaVJN2qtYMJH3joiCc0bZeWGARUiHFIo1SZ3OT5bLqCmwO1gctJ0SF8FzUYVZVQAIsM75+WxelTOJ6otK9CYEFjgBEvf2TK784JmIHAhZTMPMZaF2x0VR5PRsIIikgQNLTJxzhQuJmomoiPbxzjmB3Tj750a/MwqeGxTTHRHr7YseuO17ak9b2REKdMoYH3rdsKgMjiT1EXc7HG8YSjYxhTKoOkU3qbfy94fGxC4x2i2s+wfuzYr7Q/ffVnZ8e5rU0PBUw2zTk3Mmz/Liq+VN8/9XN74v9+W+3Je/WoX/7Yfcl/tyX+7Lfbkv9+W+3Jf7cl/uy325L/flvtyX+3Jf7sv/H8WLKjuWJGqQpGyaFB1SMN+KkCUAhSuSaJTtwQGDnacyWcrqJ0OMdebCE4jBKqJmRsYAGShHewFw5M3QZ6hUJUIff5ZhUcppEC1vFBlBTWNKVVGt6w3xkPlSmYicd5ttHcpZjNZ2DZNzVKj2W2EeXEt05EEuqji24AtiUxNHnK+TIKSGKNxEySJZUhd+4Sl0McXUAahCmWJUM21qBL+5ufjBdy4fzmdlEaRuATComPnNekNqJ1W52Wy3IDB/9a1XP/r841dmxwBOLzZGnz+9Xv30T35Vb65ut6czRyq6Xl0n8JMnLwEwjTHGan6wio0CzpfVbHGDK5Bd3zRvv/kqgNuLS+/Kqppf3VxXKbXSsuNoWF/fHL26AHDbxW0TxfTgYDGblaGkxcEjH2/P1pv5S4/s9hmAtl7PF8tu24W6aZoaIDGNEsFMzrVdB4ANIbjM20tGotJoFJHWtHRzbx6AicC7LnUeNK9m3mnT1ID64HMsoUjKe+SG2MacttiRuTZ2BGTanYNX0dR1RN6A2Wy22dSdoiz80dFRRsq7ehMVBTvnQnBqlFPM3BWl2he2o5H8ZQP5YBqpPQ2AsOmGJwFD/j8a9kOHbf9RRZEVomZqqiqa4xJJAdLMWnIMoJd8A2BwTJ7JEYyR8y2yo+A9caOiBj95nL3f9l6qpxqMGjcgUxhBZTGbL2fVdtuKWte1i9k8ieQkkioCgwAp2eXVeenKslBJXVXOVbokLYAiFEhmDEckMDMxI5iaaowdMoOByMwy+8gRHLOT9j/+tX/39vbNv/d3fxGA23z6O//qT/yi+PmfevWTz26qV91Lx6k7/75ttxfPbue6AMDzl196p0p62bTnKRmhC46cLbqL1Z/+9veuL28B+OBaa01t7ryk5HxITgHNkVUAUhJYEh/JZFkdnt82EluTrgievYsEAKLCzMxcBF8U3nuf5eOIc66IgWzVsx8HzQjqpR4zm3r3O5BPpX6u2lH/uCcPkBlEJXUxxphSMjUfwmJ5AMCHEMoSFIKvDOqdE0mHR4dlOZvP594XAIg8QD44M6iqc0xA8GExW6jEtm0AfOcPPjdJwRdsmvlPebqzCQ0x/9zFLPWDYDo0bGAP/Ngykqww8GsM40CacsUyN2E8aRRsevH1xpilPYbmXQLlj9v270d8VVZOOiYmIzXLwXelC4sqXG6bXG/VzN2u2sOZ/49+/h0yD+BwGYqyOKr4YFl+9uyppE3XJJLkl+VXHs+//epDAAXJTVs71spTGbofCVm3Xgb//icXRw/8628sARy9vqyK5cxZxLL7s3O4Gdf6RUdfP3786NWTxgcAxMttnVy1WHfdseOj4K47++Hl1Q/f++GXfvpbAH70RfcAp1VZwXHXuczmGHJb9fyq3H6eSQ1mpASAHGexQ+pJb2PXHP6BWY5kHStRbZwKd8pfNpkY80fk3LOr7YdPz6/XsSiXxKwW4dC0LQAmds718qdZqmGoedrxR3LEnpkZ51/6CZVG5tekjHSSvZ/9KBx5iDR2sNwBR45kfzqTF40K5MlYRDJtnBlmkquTMKSOMTOzFKP2GfOMspBM7lwv5P1NWHDT7j+SZPqA7hf12aHDTsdIz0vtSTSDXs3ufXI0ofUspqGJ+3cXSc9xdwACM+8T0wCCiQ5z1d06H5/lBQ89HHiX2NQ/3S6+f3q5kc80BBH3fapXzMxPNM5Gk8dgS2amUQNhSSlZ/Xiuv/LNt/7mt99qts2DogbgLK3rVAUvBh8KYmNyRwezNopJArBNnSebz8K2rbgNYiQwo8jg4Lz3FQC1hLt1cbcuacJBvGMV7HrceJLtzppW70BmvHv5u3X9HCP1Tsaw/DnTnXZ4AUdy/+99scb9ZpqaFLZ3H8yr2aZeOXaz6qBLTVKZFfPouqnQTT+P7FIGD+e/OLL/xzzycA522poGoKlrAEVRePaiElNnIGWLXU0ueOdKv4wxAXj27MNPP4+PHr72E1/72qxwRVXOlwccik29kdjMigLArFrE7VYkBuesKmHkXQjsvCeYAGBmVVVVIizmi+CL6+trURVT5/3yYAEg1aTbWk2Do9TUJnAEgaaWjJwTD6BrW9WdmZeDtXMA+kh26+eW3mod2c6Wqdno3SZSMzVzPhhRUiyq4pBmMvD5Nk0Mwbqkh/PFslwuy4NtvTk/by6uL8oyePYASFQFpiRRu0YSdaaJGUVgMDdwALRRIvPeGYNgZL1l5RnBOwA++OVyOcyXYsa96IqRqmXL1ukgxRsAACAASURBVJtv2nZbb1NKTJwk0bAGTazlPADuTI6232HGYI6xa9G0kgijnT+G0O2Iyth1TexMOfQjtletoL1+vlNWHBOBDcsixqlq6J8Yict7/f+5ATCesrsT3Rm42i9dADCo0NP4pHe9EQxRAfvje2Jo4e4Evbv3XXPqvtyX+3Jf/roUn23fPHurwXORkiYjsUSuIDgATZSchOF2tVVVEBtzl6ILQWBDPlkQkGn8MDA5EGWVPybHGOIclZT7bCCYzKtRUz6GiIzA7Jx6YlYziTFqV1AplgCQ4fHxw8UsXFycKRfqDapi1GqSttukLYCYEthl/KvXnSPu9U/AwzLDjhiGrrOYNLAzMYE8eHC02qwDE4CkKamYcsGzLkgRzOI6Mgv7TloATYxKnZpt6u0XFxcOWJZl3bSffHGmgsO5B/C1t775g++/V19dn15dfv7FBxenl49effTg+GRe6PmGcsbAdtssTh4E79987bX16pNnF+sfzM/a69XBrHhwtDxYHgEgdRfnp9tnGwbF6iBKmC/KB9qurpu6iwBK701FRcrCr+vGQN4VN6suBJ0fVdI5AK61Yj5n71IrzF5NPXMXm22zLXyoAgGIbeNCYY6imECSqAJVVbXNtqrK2BiAtqmXyzm2ZKZFWQApp3Mh16cuJTgimFqfmm8Xt+fm83m5mAPY3FzVm9o5511oYne7XquBmefzWRLZbNYA2hir4IhYxIIv0pAndQhBzOGEWSyeyGhwBgBk53t0Yl9QhqMnDmMfUmg90GRZm6sPDTeQWY4sN+pRLJjB1HTIbMPgbAqBkEFMdjBtjCJUc45p55xznpktGUGGgZBtlMEx7tGaqatJWQAuCxcyk6VYeLeYFc6RZcxOtGmamFIbEwBPIEAUy5mDdArMA3etxLZO3JWFA5CSeO+RLDl2hj4BJIiIfcgaC2aqagpTAoiYmc+u1//of/lf/5Nf+fqHf/4hgHm65BR/49s/9T+effC9734+e/th59vt6fb87MKOF34lAA6ra976s48/Xz5aPnjpZLO9qdysqIsf/eF73/lXHy4WMwDnnazqpghlpxrVYh0XwQMmplEEQNOlppUoJPAXF9dPL68vLmoOoSiKTpBjnxcHR5zjzilHVZtaIhAbwVzuEr2EODG4hxSzxlAPiVCftzgroBvIGbG5bEvqgAXZkJ3SYGoimlSTmRjgmHO8v5ESm1hSEga6rpUUvXdSKMg0p95ix8654GPsqqpIIofLA1NRi1VV/u7v/zaAp2efzMLc+tQHuulWnp2Rke0wwjHaiPrMrPs4RT8gbLSGgT2jd7Dnd/74DkawHJ+7O3wiDWnjmMsdJ//aA2F/GUC593OAr+4cZej1A0BA3WyTh6iWBAblZEGOuOtwOJu3dcdkUDms5FtvHX7zzcPzmxbAgWtDkdbbVKCcsyBuLTgQSbRtJ1fXtwCePCi++pXHKA/KUB0dPC7f+/Dji1NFYUSfXrY/97PfAOCWdF0XH67RbdZrS43IvAoxJVj84+9/8P75FsDP/sLf/J3f/N1F6GaBSiPAQkEIx9/9i9uH7xwAmB363/u//tnX35obAy45ctTrQmbcH8AuCVGuUhumIQKIEJwDkMHxfqhOIn8HNEn7mu01JCxLCeZ0A6q99m5u2PPz1XtfXNVtLMsqSZDk4FiNsxIroyAjgYjGPCUaEoGn7WYwg+aULKMO5ARdnLpcL+gMOxdtRNHIcjI07M7ceanWD9QECJMDsTHZLn2EWkIWce1ND8uyIRK3bd6nMdO8Q5TXEyKe+MDTZyOMjunuq70/hxNpCs0MQY4ATLHzys3AzM/Bs/3rq9p0wqcBySXaJeS2MdvRiBMOmQ12Yew9mqsvqvChOicDfX9g95DK9NDh2QfP3PZmm9GDnt7tRZsTe396Dg4m7e0cfs71m6+Hv/XTb3/9zcca9eUFWLb5GF96gVRlCSt+//2zD5/d/Mzbr7/y4CRpCcCFpVPzxr5ApFkIZembVduY82GIzJ/46CNiSgDcc6Dk3elyDA7dBcJjusGzLz4xjIfpsaABJLj7+sNnYxvtfUt9rr7dvL2PVeZLj48xBQ7HbjFBg8Yo875pp9tIaOpNFUo17XFARZui90GH7En9y0xu8vzaMbnz3YqYvtZw4IBIEgAUZQBAIJFkqkkTe09EZVGpqhgCBw4MoLRKCmnr23f/7F/fbm7eefunXn/1DQ6FaRRLs9kcQBFmpsJMs/lswb5rOyJLRCHQdrsBkMRUAVJmNlNRSZKCD23XLpazLKjTdHHpgxqS6jwEZSEYEUOdmGMfAPgQTFsMQ1ZUHfUDnzmbdbAM6REZKNtuzvUKVf00rVDVmLSLKXZdJ/AENUkinqkTBeAZhEVBi8cHBwp/cX19tb3WpKqtqBXeAwiwNkaVjkzIYtek2/U6p+5RRScJQBslWQIr91slSmTBcSh8WRYAqiKEshQRkZRnKkWfwoaS5mlQNHZd13Vd36mGViRimPZJySZz9a7DjgIRk85i/f6/9XO0DXSAoYJ2Y4fyWOkRxHwhBYigQCAaEMkR8u2XDNph38oEISPGgJXabgwNdzSMaLKZ2ZBssNcpeEFyrnHADzjuRKbmrlHTo6/9WmHPXYcmB+72vvtn+ctnkuHCP9bbuS/35b7cl7/CxXvniYhgnjg2lpJGSy6UbFBmEgbAxuxcVClDgKgvXapTlFTqqIzU7+ibWZIEYu88CF1UhXnqE8iqqhkyOzFrvQ9yHUYgzrkyjIRSv1rmZG0qy/Kgjg0ZAzCCiG03tYqB2RdhHvy27QwE7yw5AEZahQpKpETwZBZFsphVgObUlkAwFTN1FAw5gS2L6vnVhWPkNT5zLwpf3GyuDv1MzYqDgwhT0yzl1LS3bWeU0ny5uGnqk3llFtum/nx9/fJLL13dnAF45bUvvfno4Sdn5xuRpr19cDD/8jtvqeH07PIrbzz58Ec/AvDKW2+enp4++fJrn35x2omo6NXt1ie5uV599ctvrJsE4OL6LEl3fHikyeZFIIRW4oMHTwQXN9sagHTt4ZFv19vrpmXn51V55J1/9CWJV4HlJls/hGVRtEZX29uyDAxt2rpk8qbSNk0TAQRWBR2cnCAsN6fnMC2dT7EN3nexg+7SOnvnFq40WBejmjkmEBw7AFVVeee6Lm43a7CQJBOtinlZlWU5IyoAgHCwXAj7tk3eUdd2BLBz5JiZJCUAkhTkTKEG5xwzKG8s205WzSAAuT5pAJDdbLu73N8tlF2FnTL8YG0Y7RxkMrOdiBeUwMzOYGSZ5QsTIzPSPjf09Aa9UUHaxa0nE0n5gT0xswOxgajHh2jf5RifaPR+aLBXNDeBI0iMs1moCmcQkLLjtks3qxhCv0HrC1ZRSfb6my+/9ebrm233CeTps0szsEGz5S1acCYAZoijV0UMwS8PDgDAJKWma2LbdCKS96+rw/Dh+eof/eYfS2oAeKy/+c6Ddz95uj7/rAj02puv/o3f+NV3//n/GZRfffs40hzAsz/5aDFbPj45CG7RtAjlIxP84Duff+eff05I57crAF9ctOtN44lgjBC8DxK8QaPIpusAbJrYdCpGgDtdJYAWB7M2GgOLqjJyABS0WMxFsy/TO2XMxM7ZzsXK4M/Y1mDnCLCcR2uY0zIioKpMxDmPeW9wDm0yOORkmg9g9gQ414MOTHCBLBmgRC6blczsHbOjnArce28G1VQE74I7PDxMsQshLGbzP/zj3/uzP//XAE7CEQiAGGUoHAYrXJEkTvtctlx/PAyRD9oTyRvc4anKJO16XW+644VqrAPT6wXARz6U9/68A+v0Y+y5i04fa3IoEJyrfCq8Y2W1VLqsH8dJiOC9o6oMEgXiY2PvfnD19S89BPD45cflbH5Qeqrm7/3jf2lF4SouYBVRjNYkA7Cp+Z2f+Ru/+ycfHB7P//jdLw5Q8NzPZ4FLC5GbbQLwz/7oz2W2uN0ef/7+D7795mKu2rGrLFWPHv7gk80PfngN4Bf+g0cffHT6tVeKh289KEzEgZ0tj2afnN48O98CePjq4267nfOyLAK7ABo2NoZVdKi0nM8tY0ZEQN7WcURwWWmUCMhZ1TNUpZIDCwjQMT8Q7YAx69HoCTsyn/7uxzfb1LHzUeB8UXh03TWFgsDo91dItVWoo+AoEFHSNFwxN0+ejBVQtYGweReLHI59jn83PB+R5diJ0SnczY17s3S/O5TI1FFQM5HU1J2iBdBKwwYyZmbvPHtvYEmSkAoOUToAqhJcAHY5qsjurhj7SpG9Y2k7oDjPxTaOkaEijIDemBlyp44snJHF0wtE5lFBlptSJI2r0sjazsVxD03abtAQgJhSP3yZR/kzVYUheB7aea8JbMDIxg0Fwov3DX5csbtOuSJzUW1vuGf5bjz3BHmV3giOkEqJy9D+8rde++VvvLUswbJ+4IjUU5gDiNY5lwIXqiB0yzJ89/ufvffhUxo4ofN5gNliXrhQHTx45fTZlSPvyBEXjnnbbAAUIQyPsIfa7Sqxf+Ad0PYc3DaduPbraQqw0NBN7tY6TQ+n/S92K8hdRBIDQEK7U/euOD7V8NdkW2IyXvaxoede0Dt2xCpiTI48eU4iTb2dFX+55uZdNGXyZC9agCaBHS+sQ0csFn3wbMzko4hoDMwENLFxzgPI+4EVhTZuj+aHH3/2o9OzZ8vD45OXHj589HCxWACInZopwQrvq9lSKjVLq5trWMp39+yMlMllGoEkcc45uLJAXTdV1QFIUczzS0dHh7Ok7QZEjgGGJ68UfDkHMK8WhvXwcv8Pe2/WbMtynId9mVXV3WvY8xnuhHuBi4sZICiQoggNhCmLohS2FKGwH/zuJ4cf/eJf4weGI2zzwXJ4lGlTYoCDCF6SIAESJADe+cxn77PXXlN315Dph+ru1Xu4F7CsCJMRu+LE2WutnruyqjK//DITw3i+LGz5y24iNEyGdwNbVFJKMaU2RFAqCwuWEMP5ahOTTMoCgGEAFRw9PH1OROt6UxXp4fNw96BsQ2uNA1AxtT6lEJlgmQQSUywME6NJeR6FNexgvGUyjOz5J3KGJoWbTCcAZtVk27ZAFBFRZWLDJEpQNc74EABs623jG2MsVGKIAwueCGmAz7Ju/MkOkV3nd5xI1eHYnSRdfYeXZW34SJxVV4IwBvS9P7QbkNTVfAPnOmI7lWQHKI4TDl8axHnu/6mmyeH2+3Tbu97vRrL2/M1uj8vSQv1N7x5yQHkVA9//+nVJx9P6bbttt+22/Q1qNooQkSYNEDAnAmuhgohc7RUAYEwQEHMTEhFijOxKQ1z7yOTI5uhUaISoMFtik1RVwGQdLBTZVchgmEG1ynRF6ZRu1SAegCHjyEWNIcVkpDKTvb2jervl3hJwbLbNllSjknrfRdRKgpom1AYEgLmUiK78hQJJWMCkRLpsFoWpADhr1aiKNN6bnBLZlZLivTt3FufnAgEwnc6w2lirVTWpfeuID0xlKBj1nhKAdoP9w+P1eilCVeVCStsYClMw62K1XEkD4PM+8XzPLRbtdjOzRXl4b/Hi7GR+sFk8PYU2QQAc2io0zYvN+XqxVMOREJrkMHHT9ORiEUIOqrWT8sRZWqfo49omnc2Oy9lc1Tx++gjA4eG0Xi5WLSWoruskbj5fUWnLvQMJvDebATh9eiZECpm4STFxUVsXkSRRUWqKlQEAZyZ1W2+X57MDGKmjcEPC0IKsNDnEBAHxxeK0DXH/YNLWK0iyUAiSF5O1WJEmhNa3qkKR20DTqkoxMct6E7JgGSaa7k1t8fz8MTNDxDKzLdKqrUHldALAN4GUA2M6saneRrjCFRJjDMnazGHNtZKGFbp34tNg3SNKGkz+XfCFQEXI9LatZlPYEKmQiHR6AhF3hZD604qkTrZEk0jMMYqi1hgAwQeyJnOQFMkaY2xpGc5ZFUGuDUOWVCYlSUQTJBELdQGbuXpEUlVJGavqtbWYdRhVzfhTAZpOy0lJjV8bBpNJgmnhRPwQj0usRUGzqZkU5sGDdxYX6/n+cSJiUbK03QYAUNLCJpGYkhKYTEqS9dSMCK9XF4f785dff/XB40fr5QoAO5NoFtrFg43/P779VwD+i3/xjb1J9eD0xS/+va+enf150u2SvJ3f/e3feJLefufnf+41AH/85xefvoP943pv//nCR0/F8dT9+PunD58HovLh8wsAm9ZfrJv1qj0+KI7297ZtGwITsK7bZVMDUGUlV0fysSGaiVjRSKwgxNTmOryFm8bYdihDnneYMwucjOkNIiIiBqMP0wYPjMnONdKFeBOloKCOg0VE1GMNIBIy2WxMlskQBUgSVa1mZSaXFWXJylXhiJitbWMs2E4Kq5KmVUU50gqoyooU0ZRV6ZzBfDKbl/P33/nx27/9u4d2DwCbJLkyZ0raaf7iY8Mj1ZQuh+uNFHodKclZUUbPl8lK8chg7oqGY7CpqNPkO5W+k61RZcqxkTnowtkGkGvK8W4HGuM4w4+QriRVNhSygdcxPVMMyULgrBX4GBIDEBZjnWOJnDatn0wqamxKshI9+cwXANx9g3iDD1bpyLxW7h8gbIp4CNq2QUhlogSgnPAHT/yv/U+//w9/5Ve+/Qc//MdfurdnrNqp34opHSoHYLkOmu5s906ePf4dfPatZHxZyWmDpnGL5+snz9cAjOVG6tVai4IPCm4UpTqut6EVbFYA9IXUoVnDQ+CSL7myxEzkQ5qUNs9FbVDbFxC21gA9RQOAqs0Ogw6AgmqeiBRQQi7d2q3bllRUVDWJipKPqXBECSnqtCoA1AE/fnC+lpaNYyoB2yRtRJ11krQFAES0RExsLayoBPVQTTkgu+8+GeYmkDFGoVlUMYyzEZw9HDd0/QDeDZZanrxAO1DwEhesj2djNkkyv5jrsBlEKWpSgJTaBAqkfVkxz909sDFBfcZtu7NdtvFohPiMuGRX64XwpYHTSbWOoMw+GnEcC9gX5xnKPXeMO7LWdoNoRLHL25P0N0IYcWdhjb3JNrfoht4VOBfc4Z3duFbtQnxHWB31NRl276K/1a77mC4Vx1GY/iWlYflVKIRK65rUFLZSgESJUtM0k2IO4E5oom5fveP+wVc/8wufeWmvaAFPhhuVednTrBIQnTB8SicHk7vzsqyoqijZvToZAKvNZumT2zab9UUdTz1cYNK2CSpFMdmr9gF4RGTqrOYMQ91TSOZv0qiDqK+tyF11MuQje5BSIcP6AYLuXlp+5m4yR6dpdFIddOenpB5w6XuWdxPqrtNVVdnaPnZVBkJwVlt2HZ7ldIjFEBlRxwBQx2Lui8536Qv6G+vvIYUUwR3HXxWG2XA5qknYnatbOQDt499HhPput5Sk9yegfxPdJ2Ya3RuGF5ujPaIKMefw56TRkCqQ1U3DlE2IBAEoSbKmUpXC2BA2T5+cP3/+0cnh/VdeewXAvbv3SEJZFEl1Va9P7tzdrJZFVYZWWQXApJpaaxYX50iyeLFYrVY++LKoCldslsvMKLx7sKdpU1YV2eRQa7Kq0YuobjUWZXEIgLTIil9Oo+HYqKSkRMRRYZgBxJC4rJoUmYhhUqs+JLBpfcg7eM9Qsqwpat3WgHPE2xDrNikkaAPgZG8ele8fH62amUhdTirHxfERizT3D1/y3gOQ1OzP5q6cCllYFEqOLJgqV3iPKQjA8d7e2TmqsoAtQmhVxRAO5gcHe3vOMIC6qZtm42OcTqdlWYWUmtYb4pBSakNd1wCaplGFMRxDEklMpHm9USHtqix2s6SqYYhqSElBxliFaEq7RB+D4I8kiDoefychnSLf7SkEo4DIMCl3NXqiyGw62caYNQVjWKApc0tHMjcquA2kCGXt7pSgBCZFLqEemJkME6iTUYlExH3sF3Yr2aA+9k9No7VNFUCmL+yO6u6EQZAu2qknng+vZITndxpW55nrzpqdfUOsezdPoVN3b9ttu2237W9cs7uPO5UL2JUw282SvU+8j1bpE1UM8E8XSEjUV9QcnfZao7HGrcNJO1YFg5jZsjWGm6aNKYlKphsZskysKoVxdfAxRgGSJCISKJQBMJnQE3byIiGdQauGbQ6QDKKGuqLHpGwM16kpyXofVLplrm7bIElTgkJUTg6Pjw+KFLfbepuTvi3W69nMk0iMUbSIIlVV7U+qFGNly7OzjwD8+N139g/n770XPvjgvTdeue+bhku7iqrgRVOXJQPwm5U15vTxkspKuQbssvWucO229UJWBYBD3NRBy4JtWXuPtln7aFdnlkthBbBaNoWb3T+aLhcvTFHG5F+cvTCz6oDocP8YOARwdraoHBXqvG9izJ5PC4pAsNaVZgogRD05OgZzMhYQBirrYkDulBwgmZIhkr3J3IcAjRqDAgJlpvx6C3UZpSNmIjaixhhmzcu3MABMpnvT+b4lqgwFRWmojalIbSK20IzmKKMNAZGauk0xGVd0EMlAofkp2ph+1CMxNFKhd6LaoTDXa1QO+3TZ+ogBoZ0rdRQxl/eXHvUCEUQzvY5ygiFrEjOMIVAOz6PRiOmto85Ep16t2Q0XynY+I3qvReGskxREUulc3dZEEMXdkzkAZl6v1/t7syB+XhWcSmUczF3b+vm0jD4BWFxsjauMYWtNaLyqB9mkqa5rNAAQmuYshHIy/cZXvvRv/+BtH6XeejKpYiMUX2wbAOW8XK43JZvDvf2XX/9Uu3rx8F//ya/9+u+1m/jqnemDd54DeOvuPDhXx2iaVJlJRSff+a13Th9vLU/PmuU6B1GCDvcmmf219sEVRlKKQuBiOikANDGt6li3AWI8RCSFmESzmyMTysCE1rdZSIjBzGKYqY9q7OxNImIlAVEOzc5II2UPeYbkMgxNMMYM4gMg17PuuqVLEqqSBDEipRwK69s2H5JCSKIlGyUWEWK0sWVrimmptjMMRXSbWmeL0mI+m7Dqwfzw/Xd+/Gv/3X9j+kSrDKJdZNN1wfyp2g500N38TddGwcecusOARl93n3c1MMfYzSfew+W14TJgeml/6pekyy6FrNGjo3WqKBGYu3wKANiwKwyA733vwwen577Zr2bP5oa9QFnrRo4mpizKZd0AaKPy3rxpNhulxjcPThdfe2WvdGZSuo1oWVQAnp61e3f88T1uEiZkqsIk0cOZqZyuLpabzQpAWJyKgAyRiCdXOpQGFy8+/PqX7xleAuCk//yXv/z84ryAgtDh5GMiHLpVdTDbhpmrB3Dza+lW0MzK6aiPl+ZGdG8MXRXfqjBJJCaUpSFjADw/XT04uzDMPuZi3iaHDUZJSZW5zJdTzVVes1aQqYTSb+oxi11M3BjbGnfkWCI6gIuu7XVdBi5J1Yj7cS0ceCebIx7WYPHisrwN8nrDFW+8Gb389d9763lGV368vhvo6h1d3ZduOnC08ZM23HCgYhyjOHJIDAcOwEKOhOy7jKiRYNnF2BpryRQpprnlSdoCiFh//jOzf/T1N7768jHF6IO3TmZlQcpQjREAjDOODBixlfW6femlV//JN5t/9cc/JHFsKwBraXhSgRxX29VirSywRTGtJmzrsK3hAbA1eUwwMfqMQIScSXCcU1X7vMnaJ3QFoJSdXZ1q2sHw/ajq8Oj86tIwKLrB0jVzlSW828Q54TRUx5uRk0KnS7Qxop4127/hDqihfiQRDxmr0S9QusuU2vXVKDa/G6Rs+k7OGHAH9psbsgwPpGveYUTds3Ufc/zAaGDqIDTUPefV6eHyMtBd4WqWvuGI3k4RJGbjjCXmlNLF+qx9pwbw6MEDJT48KQ9m8zp4jbGttxApimK1jABSElGx1oqKtebgYD/GCLCqxhh8CwDb2uwZXW2bI8PNZs1kmMQDJIk1RMks7C1z7+jsF6YePeo9ocxmUJrB1Af/Krrpvetuzd1YvfHaq2307z89lyRHh64NCcDZorl7MgFRZV1ZVM4U88khk0kSiNrsBVdBw1SWlXEFExHMpNw/ms0AXtfBhwggUSgL5oob7wtrALpzdLI3nxeu2NRrAOt628Z4fHgEIutcUnXWxZSYeLldr7drACklNiaEkGLIribVXh2CXsHdcl8zc3Zh5NCaPAZHiQSu5knc/X5JMHBpCA3X0V1YB43WmJH8DapaZ7X2BusumSSgPZGdMq++70HlnZzToOz3o2AgQA6o4JU1hrDzyN4ws15hmV+S8T6EZVjud0/dJ8qg7Esa5+O8+Tq37bbdttv2171ZdFAIoQ8G7UFHDJghUe/xyT/mjzlhX+Y9AEOsCRHnJCOXQ0aGltUM1Zw/stNQiKkz+E1euxNbdjlqd7W5ICIRYWMBEHPs4jhcDglnogIFyDTaZixMRMEWw1K0m+Kp4CJIAKCibNmySYhRREUJnKCrTS0iRhhAhBq2tW+JaFIWxnJIQqCynBSFASBUwLIwE/G28QXz3rQ4WyzXq9Ubb7z+yssvA6jb5nNv/czv/uGfHk+qr33pZ3/wg+8/PX/8SjmxVNpqvl5fALCpCSKqsk4pCltbFEU52Z+Wbg+pkXoBgGBmk2kr+vnX33j47HmbpCzNxfrioChfPj4G8ODBs+mdyVff/OJ777/zYn0xoxlbapWKoAeF3XIJoGnDwWy6dVL4rY9RknZRw4SUkhcPQICL5Xp/7+jOvbub2n/46FnlShVJDGLYDI4QM/HefH6xXLBoUhgmx2StyWlonLOqQqRsrIpCW4XElKBijO3CAIlY4vlyxRCGExCT+BhnZanR9yGLzIZIWRLANrN/Bg1mCBX5ZCnvbIYhiLCzVbvQwO5Uvf3eS2x/zhsjIwiZRUeQbKWTYUkdHL/DZVQy3aKL9mXEGAEE9sywho2JUIWSMu+M/X6YdafaBfV1qk9OHscanTOkWje1YYJiXdcCMoSmlRgTANFgjDk5OIwp1iGwta6qPvXKK0/Pnk8ncykVwGrrfQxERkRFhMA5xVwUsc4CsNb6EJ88ftI2ddN4V1bWWmeKj/hrBQAAIABJREFU7XaxVxIbAvDh88Wb99znPv3qg1M5mNrf+qMHX3t49F/91//s5OjEv3ga7AaAuT+V/dIVsj07PZy/Gl8c/F//949QOJ5Yh4P7944BzAq7XCxWqzpEKksXal84Ll11UB0mdgCer9cXzULUMhWAgBQsyAywFDPMTSQpaddDYJCyKsh0WN7oXaKPkyHkkkTZrpAMJncgERE7kjH0Ix2vQ2WIioWIpJgT4quqZkgHQGMaw07mCjbExjkXjv2qXipxac1LL70KoCwndb196eVPxcWKCAb04fvv/st/+eshtkUx6zM9ZZ6O5LmMbhbMjxsIdOXTFa39MhTyU+AtN2KWH3OcXtvS22E34Cm9mX3pkJ31TsyUMhIwMB0ytKCinBclUiLNwy2bgevWPny6+NwbX/jgw+/Prdy/s+8lVUWRgu8zpaK0hZkcPjvzWhyua5yvL2aTPdEmpjiZ8NbXAJabIJMX1YU5PjpstxEk8GQdXpw+ffWwfOMXvgDgB9//3r/41tcvXpwta//a8b4D+7b99P3X/8v//MvJzgCot0n8b/yb3y7FiLHWMIEMkwhR/0oGhtbQyUMAFy7ZV8P/2oULdvbUjlFIUNCOsRcinDPW2fefrgD86NF5FE1kFM5w0QZebptNExNIBppFH0OnA20RpEMNh8E0A9FIUoERINix+WgsCz0wAfQG26jHx+J6OZJtJCz5tsZBbcMVezuOMBL18an1quxdP33eeafA0LWtl0946Zebz79Dpa6PibzpGtR3zbqmked2jE2O7hk7bazbMOJmXgsKvtau3fmQJmTY49Lzj2iuRIP13pOvHWlXBEMlFZIq8TP2AL7x9de+9dX7rx7Z2DRTMtWcFRy8TgtqU7esFNZt6tZZrgy3UZA23/zK55/U7jvf+wtxEYCdHNehTaqRS8sBxkZjIwBKpJRyltUcALQDIKgXUAAyEIx2bCNwShHQ7N6SHtoWEBEMTA+99OMCA6wuQxfQuF9GMdRdHwwM375YEw3LTf+mE/WJ+QhM3CVTYQpBhvP0MIr2MwbtBgENWggRkFRuVGQAiMQsFJS7r2eMDsFR2HV+v9oRXz7ZJcfApc8j1LIj6V6h46LDSvsfLk381B83vpM+60TOi0uOjDVGII1fA9jUi5hSqNu7J3cme/uIwTEBJJK6bjLETMaYtm2dtfPZbFJWT5+fbuvtZDLJEd++2dLUAkRkjvZOLBtCqpNakCVnJ/tAt7ho3wf5XxJVyCiAQjMOnud2VSGYJMJEhnezU1Il0Kww5xeL9x+eClHhrG85Jz767CsvOVdaLptYJ6vR15Oy9DExC5Ed9BlVElWISoJhKopq26ayNPN5uY0CYCtalq4JdcGVsbh39+XD/aNyMnn27PHDJw8A7M33j2Z70+mMmL1vmI21SCnVdb1cLoY0TTGEXe7DPuj42qzRibcIkihIIMzEypf3GH+h4XWOZE4H9A0d8J4V1AGy4xGyeeUu8jLQ5Vym3UkzadGwiA5c/bxM5kWXMSTkvayG7LDLfpyOWA1yw8UxXO4GGb4Mww4zc8YhxxtpvOTnc480gd09fQywe9tu2227bX/9m9WhwuOYhT6y0gGgq5iQK94MPzOQE8ldti61d7JeOSUAdKjnLqIkrwIKQa8tiapKlBAlJo3wyjCWbDR+UNZVAeKiLLdtDVVnXFUUCorQjMKQZIf2rmROpkZSR8nPijIbYmMNEbWh8clXplJJCmKYEAWAKdy0LFKzqZuNKawCzhUppr1pUbcCwMs2hBhTLIx1ZC2xJF1crDjps8WLC8MACpjD2WR/Wnz5U2845uPD/WcvHj579szC3JvvL9YrAFVZTeYHIZlKiJdN9ClxWqyXJ/sz9UGamB+8rPilwwPvPRMS0arezKoyCdWbBQBX2RjSew8/PF0u9vcP2+22VZlXe6dnz71gdngCwBq7bdpoyr3J9KLeqkZVtqZkY+q0asIGQDmZagiLxYXbvzub7VfuPMXGcBFTiClawwB8ilNXOGeZSAWciaaGMhMQQIghxuC9J2sNMXWvnWNKgpSd9qFZPz8/CzEyTEzihaxxVVmI+u22RV0DEBhnC0kgEPoYNIUOmf2BDEBcq0M5ar3nGr3Sj5GeMYjtTiSHAJFrOMqgzVyWecLIEu7CqQgKFWZD0JxQjQgxRQAmRSYYa5hz8kpRIeVdBBxRT+DAgIHttDRCDqdNhXPWRE1qjCVL2zpMJhSiskFOKSiSUpLz9TJryzNnDeCM8SGu6lOGAWAM+TaIakwZM6WURFWNY8MWQKQ0n0/bEB89PrXGeO+tgY+ytz916pEUgFVeb+W33/7x65/74vvPFoez8lf/0d9+3i5Xy0dP3vlhMFsAb07v2pMZ4uRwUuo6/eD3/vTZo4uDiXl8Xt+7czwr9wBcrLfPL5brpt3fP8y1plZNKCoT43btAeDR2WK1aZhLw8rkQGpygUhQrq8LgDlYW+SXyEy5TgwzEdOONkWZFz2wGmByGQ3qXj9Gf7voV+pza3IuOgFlSlF6hLKLWJVMrlSKIQAwClsVzhiQiSKb1erRR+9Vk5lxrq23h/sHALbbze/9zm+++ebn3/nenzHBWvvBu++fvnh8NDv2qQ7SAiio6GkIuxo1lybumwT/E7b08noFCbpyTD/KejblZX/+MBDGo/HqsXRp17GlelVNx3CuGx6JAKURl5CoJ510Yal93ClgiKwhIsro/ybQBx9cfOFn93/8rx985XN3C+MMSSOYTCZc1bIkAGTk0Ts/chbb0w+PSplUbrn1ZVmJ6sxybJYA/uHf+myazV46Pv7VN3/5j7/z3RjjtDJr72Oz/uff+lo1mQBoIlfz6e//1u/HZuGVvMdsRq3Hk/ff12ofQIp7lMKsKMvCISRnGQQm9M9CAJgVGUvv2yhzIPXlAro0hVkSO9ixf6t9rxCBmFSIcpW5qiiC0g8fLt55fAFg2wZnCxEYU4TEqzps65BgyNgURPsg0zxtZtA3SZecYswu6Z06GVHNViuugJLdsj8qg3JFUEZNr/18A0a2w2V33Csdbb62P3Z7fEz6yitHdM9HV369JLZ0sxD/xHPf1G4CMfu3138ZGaBXF6fhnvsjxgvVbpef8mZ3R3z8ATlghmF6amy3K+c6EqoRapwx6mK7mTEX0h5V4e//zOsA/unP/UyMKxPayrKhaBhJWJXrkOazGSsBsK4IKo65Ip1U5UW9La3+0lc/9f0f/AVPCgB1oq0PKsarwNq6bZAsMUdJzhpKEcjO1vzkPeCRIURjVbXvZcUIInQ5NbiqZNhc+jdMGnKulJ4Sh1HAfkr96rLDPggEHq7QwZhZQchUSgMazoc+rQGgmuuU5TL1IkmR8qXyonypj4YbwIjQ2FEIcr8QdyUAr071faaCXESKBvozCJHk8v69iqRqLgvRpYHaV4XKW0YISgZgdsPliljpjR9HuA/6HlKAQI5c0pSQRIXZWHAOf1ayylK3F3/0h783nR689vprr732KRGpm21+UdvtxjlHEMO8bdvF+bmxdjKZtG1LQFUUAOq2NsbsT6b3j6Z79tAQSQpboal1pasiFwCmZTXck+aEi0mSSA4UyGijVYFKFjgRIgiBRJS5i+bePREZZ0trzOG88km2gYyjlBTAutma1scURFCmctMEt94S0XxaBR97nTG7SDMPkWHcpLKPX6xEaW866xKCgx3zxJVk3d27948PjpXogwfvPnry6N7d+wAODo6m032R1DTbJGKtUyUVf7FcqGjGRlU1dUnSSUX6AvA67reh/5gMKAEsouCcAgGt9wCKcWbSq0TI8Ylyb1P/tVMi+hy+2stYt+2q6GC3QGDYg4BM1yRNWf/Jp8nRNQRSHg3h/DdXGO/tyR5C7+fcyxJ9eTyM7vuT2rCggbKaeglqxKW3urvK8PJpeFu37bbdttv2N7DZnLwRV6IobphZc2DqTr8lZJr8DgYakkld8aUPrV82hn+761jTq2UZnVEFNIl4+MqWTBSjz9w9TmyYnSnKqtq2tWAop7MzxDQvhCM4kpGzgyCq7zVGSiJGlQ1bcT4FMFWuKqqy2WxrvwUwodI4N+N56+uU0sVqQ4hQ75MkIQBFUV4sTi0MClRVFb1//OIc1p6c7C2Xm6VvANw5nP+vv/m/xZgeLRbr7XfJlvePX333g8d2ZsPTRyoJwPm6DnU0ZUkUHJk6RR90VrnD+USCNrIFUFTFul6x0Xc/+qCpw717h03tT+aHk4M7p2dPAbx8sv/s8ZP14rSuV0ezatusXVE4RtL43pOHd4wFECiu1yuyEYgpehVNSiRijOEuEzJa78uicDzbn08MT86fPX262hojqkgSQ8wpb8LEFN6H3NvT0uUIQdGUSWEMNsyFc3FIC9NReDDUFpxOimbdVGUhXLTL5KGTycQQYiQmbL0AoMIQMZIUxnkSdCWsdQhIRKf1f9JCfF0YNSvdO5uiK6etOx29u+VuWIz175wJG5kGp4Pw72DOJL1io8zInvGOwjAEbpAS8qvoRDZDlgNPbUwx6NhPvfqTUyCRiqZkCnLGMhsRLQpyhSOKRJJVRssGhlabrXM2+tDqljaNNcZBI7BeNwCsJYCYlaRzFRAzyc40ZlhNxllLZJL3hTMCscZt6hZGzloPQLj64uc+9ev/y+/87N87/PzXv/DD727iOvz+t7/zqU/Pvvj116d3SgBh/ax9dsb6+sMfr5598Oi3vv1RE9uLU10EOZ6VwScAzdY3dSPAW5/5dFGUTb3+8PFTAVb15qJOAJo2EFdKduODs123syFLxhiT2drOOnZF7nVm6qi8xEQ7GLsDInsrEETWdAFo1NNDenQyx6Zla5FzlXghAZASEmnnkskRPswsuQc5py4SJBYNPgCxbv1mu90spmVVLdarDz766H/8H/5b5KEY/e/+5r9yXLGBaqqKsigtQUp2dfQAFGmYOamfKj9e5H9Cu4Le9zZRt/HyvnrpYjfhjh93kfGu/ZcxOUvRU7z0hiF8xeLdjQseguM6LgNShvsFCjWk1pAzFFXb1gN4dnaukT548uGUp9aAmRmwJBLgUqu5cBp8Wjz9z3716/sH+s1/9ouxXp49PbMnzlnTeGE2AL71t9582MZZtT8X2bahLAsmloSjg2nUsFpvACTs/dGfv1M37b7RqDIpXQjt2XLx9p/98Etf+wKAi0aPRSpT2GrK0qGQzGSRJS+XUjUKNny9fxXQIVMVRgAWUZ6+CSPmElGfggAqCfNpWQf64YPzv3ryogkJgLVlm9g66yMvNu1i5cGl48JLUpW8NmXfTEZruux7smNh7PSDvnOH1X88gwHDrLpLTdhbdthl37pyAIYJE1eiWTEWL1Wi6xSqG062+0b9fwDRpaLY/+/aNVXnk1ai8X3f1MbBhlc37YzP7ofrJ7ty6RtOdPlue9P6k+aRT97at5gZSDp0Og+5L4CUyLd7kmZo7x/j73zpU7/6jc8DkLCwFErWwoIMiagIFdaqio/8u9/7EMCyTV/+9N5n7h0sm3ZS0dRG+PWEnEXwzRoALBeGfEplNLaYGRAkkeHACihrAJCi9os8BALpcg9QUfZrPGiIqRxq+w68ZObdcqAwPbqIHZbXvenSuIzC61ARo4NBFf0EO7CM86ULwwOorgrRXCEKClhj81HUJSymLhHkUOc939BoJJJwHvMANBeE7ymS0odry5U6H+jImwnU1YBXyYe5Pq9xJwiEjuZM49wy3UvC8E6w+58uCxvtXnbe59LSMOw4/sQDoDn6uXPCkTDYgHPiQqHY1/tQAEyQJIvF02enD8+ePTu5c1KWZWENgMI6w9w0rZalpNTUtSpKVzhjog951d7b2ydttm0LmjahLYyBJkPWWi4cZ+qutcQm11ABMrmOmZkVlAb/8TA/g4k6kD6JUK6Nk1mrTAARm5RCfbGcltxs0rYNHFImwi63F0UhvOGjaVW3Ngmt62XlSk17ZIp8nhw1lJWZrCkmQUhCzE0SItu/aS0Lc+fu/Zdffk1Af/aD751dnL/55luHhycAysk0tFEkNW1DbAxbSbq4OA/el0VZN1sAImKtjTGmFIkGckk3U4wYwAAQYvQxERGYQ4yAGiLOWZgudXrnveizFg3WJ/Un7r5fiYvK773f89okflkKqV+i8jLFmRHdgef55yEXgQ6XpbFsj66QEcOrKQWu3sBPraF12XOGh+rUUN094uWrjJb4f+e167bdttt22/76NNvrtVk76vQYGmbHAWHpp3Tt/0PW3BTSe0MzY4gG3LI/Si+vUUo9u4eUFAwQIYeF5p0kwRjbp+zWIN6QpT79NxEzW2IOKTFMUpEkASmK+NhqNtaEDfclM7uLdqAPsetDZzRIgIezBuhIbWQ4qTaxzYqNKOqmhWUiTppixHrTFlbW2xfWZQfpLApU04ytqMaURHCwv89sVRPbAkCCffzg8Xxv/vnPfPb33/72ay+/Pt2b+xhjoLiqs/25aeuKS3Yh+NoyrOUQGml1cfGisHBFCaCYzqZIzPTK8d7ZanXn6GBWFU0bDybmcDoHcOfOgV8vWKy1dL65ODg4Kcpiud6SMY5ds9oCCFGXTZjPCh9qjTop5sFI025j3Gy39TYCwMF+0bZtoNVyecpMZI1hxzBsjYhkwhERiWrj66QRWanKC7NoQmaoEROYyCiIVDSFFA2xtfZg/2BSWgBNSuWsIutShMbWFjwpTLPZmqIoXJmQM0xxh6wZDpLQZTfPIroj4HwMo6oXqc7ZOCQ/pZwnSbt6oJcsu77O6FiTuGSUEbGopCQ5EWSnhPWRTcRGY6Je8piVu9eVAM0ucSaWJCJi2DALd0XmdaRsESmRNdCMfOoQJknQwhkASKmqqHBGFUVRGi5jbI1xpS2Xm3X2q88mZQ6fr4pi02xDXVfVLElUl/aqsqoypkbL9dpZ5wNWm0YFtjBEJoYgUQAURRVESltuN1tmZmPrtp0aNWyEDTQACFQc339lbgtLnBbPnj45+z9/4w//zi+9uXc/mLtybrYA3CGnd7Z/8r0fGb/3+MOLbd2+eufw2WqrjSzqEGO+GVls6oP9/aosQlSouXv0yvPV+cXFxXLrARBNS1cFUSKNoWU2CsnZmfpUe3DOEfezUj/wM+wxokeR9sl3sliI9Ab1brZDH4WW1WTNMduiXSx87NI8SAbJ8zkzU8/0OSuixPPFYv3Rw01TE5kQfIg1swMhidpcR6Wojmb7qsKxItJE3hBDVSUE9T3SIr0M6A77pqshQj9F27HJMIygcSDQpZH0MWe/MjjymS5fI/serkBSuBnA6X4Z/t9Zuz181RsHAlIyHdl9h3sRGDCMJADBWBiDFCnFAOBzLx188e7fdvuzX/5P/sPvfO87KsFHmk4MlxNylMsfsaFf+PoXfuFrX6oDWTbvPXpw+uw0RCFIUppUDsB7j95/3Npk/OqD9/cP3aQsKMEquUn1p3/1frPaAPjFv/tL7zx89vqchKUw7L2HxWQ+K2CcqwBYO9s+eTgpKLLOXI7QBoGYCao5yI/AolfDIUcvbLyeam8zdfEAQyYB9Bvzu6ocbxr5y4erd5++CH3mk5DI2mobUtOm5Tb4SLa0XlJK4rhM8AAGAe9CGUBs6Gp4dT+shuA36gnjg7mKztGjl9CLa5LQPdXNTz7ej/oY1Q5TomtSpYMs6XDMtXNdhkgu/Q66nkUYnQE6BlquPsDNa9FPYZlq9q2Od90N1bzS9bAOxuP1pmvd9KTDrp88Z+jo7Y1/ueGE/TqMDqninRxAiEAi7Ot9QmX8F96Y/9Nf+MJbL+2HegGg4hSJI7gimwIHlWlhQgxVUS7Vfe+DUwDPzpdHh5/9/Kt32bcKJk3O7ZVCX//sq2+//xSA1VS4om23ktRNy4qrzXY5LYq7J3eddVYNgKxidtqJqOQ6TFDxYXgPgmHgEIgktKokCoWIINe1E6hCJYU+BFVEc4KTlMW/Qjf95wVberrlDXDCzouZhlfNl8RTqecmd7O95FIwpLkcXg9uondVAoipU4dyZ2f/WJYn6eGi7PLcgYxA0hx03DHDkDkGI/dq97kj3ymQo6Sxe7SxU+TS6nEJ+rwWStqd/9IhvazpANFcmhB2U0QbW8PWsmUiyYBrZ8l0lHliTGw5qSanzx8/e/6wKquj47sATk5OTOGQJPogSabTWeGK7WZrmK0xuXwf28K3YbXZruoJ+UVVlI4pwLR+a2ConAFoQ7CGTR90zWyIMqvVaJScuIK6fKWUy3URSUoiotTT70TEGs5J2VXaut1MpuWR2rOVhcF+UQBwXDw6u1g3wXufBEfz0ke/rsnHyd3DN0yupSfBxxBTTDH41hfKxpSFLZMIFNYyAF+3Hs3dV+Zf/NJXz16c/sEffUeAn/nKzx4c3cnrzrbeKJEAxrkJc9O0T58/WSwXKhq1q1KlkpIIKZh5lMBjJNPIJdUAICWx1h4cHu/vHW7qzcXyhW9bZLWZ+0pJ2vfoZREZ222DdHWesBH22GvtuvuTM4H1rp1dOrFrsx711GbtCscNe/VFlog7VxkREqRjNA/XJYw8uVcTGPQTuY4e6soMvPPODkv5MLfrx7qmhv1xGYv8RBvott2223bb/lo3m5WXNM5h3a/+CuxiKwYjYueX0n7m7mI6+gw6uuNQXptQs99JOy1k0IAQY+i1kL76IVFOxkJgkBpyTAaAYZPTFzZNGyUmTUyWkiQRKBs4ACDKFI7+Hjt3NROMMb3ylEhJRELQhESgpBJiCL6NmmyOCxbZJs/JTIrJpt2W5WQ6tU2zDFEMK4D1apWEo2jr2yb4wphMg3t6eh7F37l7H8Bq1a68OTTVz//szz948u7zF+ccPAy2K79/MKtKBiDSzPb2o4pLxjoy4IINxRB8+9K9+1985R6ANnJdXzx8fvbk+Yu9vakSHexPSXBo9CKsABg9und8/8//6r39vdlkfjIpi+PDg9o/VK1M065OzwDs2eLenft1sw0xpZhsRca6JBZJysKaggFMJ7Om1u2mfvDhe865anq3nBgfAgMhxlxIvbITUc2lpUtLhXNEEElJJbMjg0iuuedcleNPVSQXn2DuUusULAf7+7acnC/rKGunWKxWB5M5nDMAtR5ACimJODZJo0pgsjkYkJl2igDwk6y9ESROu2RXjAHCGtQKyurMDqa61pgoioQQk3SFelQ1JcklR2gs26qcs+IziSihy7xp2ASJMSaCYRJiQsrDR/pytCykluwQ35gZQNk8nkwqANFL4YiIQozOFZPKbeqVD20AtluZTQmAYWp9GyURo3CVr73CiKZNHWbz6XQyA1A30dm6sE40VyICMxNIjRZVAUCCqupysy7KUhVNiMZZZo4pQkhCBt3Se+89/csPX3zjdPun3//Rux+++PAF/8dvfYP4g/XiYfXyEQBCIdv50d7evflLf/n973zm5XtibUOLJl4UVfXo0XMAvvUxaVXy+w8+DCGtNhtXHi2berX1SS0A6woRxNhKDIYJQkSG1OjAQQFSZFsMBs1OOLIuO+CT1JPy+uD6gfzd4VzUGQ1kjMlU2I5bKZl5AkkSfUgpxZRSEhFRhSQRFVXObILttjY2F0/mwjBBC1cWtvApmr7ioqSEGA0ocaOqRgksBqYNLRt0uS9y/ikodGRM/iT3+1Uc4SoO2Qv85YMu25BXj8eg+A65BcY/DoddQnnGcGenn18Nc/3ER+lvGuhTpA2hU5k9BEUuOJSBhzzH5KR13/rFLxvm1lO1f/dPfvh2DN6VRdOEKjZGyZAAaOq42K7++//9O2997s23/+Ldb372lb0plU6tJSIjUQH85QenbbG/MrPNo2dfeetgU0dXWBPaJhkzLUyzAeC5ZCiJtyUZTY5FnSsK+/hF+9LKA8Ce/Mk77715b8Y2hbQmOpJubSJBV5nUMCdhpvE73vUGd3bsuPM7yZTrb1E7s6Vw5rvvn737ZCHEzhUhEQDiIol5sVyHWPhIZJ2PqW7bonCGkQsgKJAJ6VkEO4L2IIK9MTmeYXdLOzJi3htf464cCecnkhNv8DRpD2GMUcjh80jme1uZusD2S/KJ3bE3XPXK38srzfDrDS/8atOrf6+yRq8f0AcX0w0rUI81fcwNj/M4XNe+PvFu9do++jEm7iVKjhJgCWBAVQ0EAENYhaFOUTrssf8Pvv7yP/m5z5Zumpp1ZQOA0lYUURqOgqRmYlxK0Rht2zjZq0pnARg2ZHk2qTTWHpx4wpwOePuf/v2fD/YvAfzxD99x1YFzrvY1WmjwLiXe1nW9XqQYhQAEhSGyxJazk7RrzlpiY3IiZmuNMewc57wzswmhK4DTOzs6ScxBpgOEPUYaUy6Dkllw3Ku2RDG0o3c3wCOkgG9ala6llPLqn1cQUhFNSSVJ/j9nBdHo6/7Nqw4DMFMCxzEUXRqSDlE33BcsvsI5Bti4TmI69tkgQT0eii6WfUCLrsMrwzfdSeJ1WbNj30R/OhqVp++lT3NiqMtRKcMO/RUUGjVmZiCBuK/+FzVaY61xIfqYIkFLV0Bw+uwRgNNnj5nN/fsvHe3v02x+9uL8xeq8KksVYTb1dgugWV/MTGrj/ma7NdIkgTOcyGhqUyRbAUDru1CdbGZQBrnzDRITJQDahdD0ddgYWU3og2IgqsYYw8zMk3LmbFFWk2YPDxZnF3WzCVsAKs3edGpMPSnspKwqa73hp+dbwzjRlPNci8QkqUPMVVKM2/WWjIaYWNVSjrNOr9y7861f+ce//Z3v/eGfvP3WZz/3hS98ragmotK2LYDZdLasg48+pgTR07NnDx99VBRVjCGE1loHgIhDDIYNs4kpigoP03Eve4N4CVFs2hfnL5brddPUwXvnTFd7p8fSehVCMQjZ1f4e/vbu4vEu/ZqUL0s9u3KHcV8OM8qOiV5Su+lAetdZHwEuHYm415cIUMqpcnhUCwrdprEeNE5qo72Y9jI7nlG1m6Svg/TDwdeG12ixOBXIAAAgAElEQVSwg5DlaqdMAYM74BMW1Nt2227bbftr2GzWXLQnEGS+/NW5bCAI9SsDAKiK7LSxnXVBwwLQx38P8M9guPbmrPbaWS4v05+9S//MoKTkuIAiafCpARCTn9KcidrYWhhDsMYRs4RElMEfpD5Rjg5WK+XKO8gubQAGKMgyIJoUysRJU5tEgNJU+T6TCoOJqW69iG7r1litW18Zp0kAND6QT8zcNLV1zpbF2Wq5f3SUyMwmJ5wLl5M3xOfnpz9498ezvXmst4uok2pCMRE0UgtANbz+6dcvLmpZPAuaVudrqHqRo4P9qiofrxoA2gSVelq4ytHx3mRSULNd7k+qwtFnP/UpAPePT96PL3xAQQ4sCPLs+WnhaN0kcexgACxeNAdNczItkQ6W7XmCF1gFGK6seFrm0iVueeH39ybVZPLo2cLHjRdYh+jVB58LXk+L0qfWMIlK4dze3lxEvG+bFKXvYmOM7UlrqpKSZCUz9YLU1mum6WqjIQkbEoUPenI0LwpbN1vpre3CmMI6Yhi5lE9lnFzgJ1h43RFX5HqcZg2Xlv9MpdyFANJI6js+ZU4TZPpYHZFkOtrCJQO0K7vRgafoqtAwq+QUPJYYlMmUQ2JtDDbDyNbJPxIAOGcBSCIfQiL4hPV246NtQ3DGGWOqImYaJpFh1pRk24To1wVAEEtqgBha7cKKjSHK4PJQKJKNcdZdbJYACldaawVChiycRCQftxImpUP0VUEA/s3bP3j7uz9aLTaHd16ivcKB/uX//PZbr83+wS8dbsJyf+YACNrNaXjl7vFv/eZ3lxervePjs3XwIpWFKO2VJYAPTpeTiRHwcrOVpJs6bc6fkC2FSqUCQBDE0MbgLQAkIpMT6AHSMRQBiJiBF9abLp0VP3ZVj0EHRUq5yjaIlPvg0axrSkhJUoopG42+9XVdA2iaxgApphCjDLNhTh5pzOBXJyYHJ5piCpQAS9tma6whNtzNfxpCU7qSCNaYGJMRuwmbiZmqRtMnMstT2WUt9Ua4YCR/V7TaPmXAGE7ApROOD7108p51crWNgZvhgI6tpzfcnl4+pP84Rqxo2Ka9Bj7cPgYbu7fFexNFmbirYqsKwPR1t549P3vx8KOVvVvQI1NOJMX9yeS8jtaByJJpAMyqqtHqo8fLlz5/sN7AsClKZ61jQhM4xALAbII6VKmoSHQTpSgKVxi50Ah70finz1YAfm6+9/zFej9hPitYkyUqyiL5VHshmQDQyG+9fv+wcJpelHaqXag/BmcDAM4iqOlyH/RLGuV9BpgvA2TaZ9tQoh1kPZiH6zqsNjElNYVJgsyOBIr1xq+3GqOPasnCp8iGmLjxm5CJQjkHgeEOx++k22D3+rtO2DGPP771dtnOJkQ/M98gFdeaXv8wqq89XH/H2On+yM5gG59tjGZevc8bLnrl69jC/Hdtwzk+btPwebzP+PMlLGe860++scG+vdHw7Yziq+fIL40u9YWClFWIxEoC4BCNKqualO4fFv/RN7/2zc+/SnHLfmFYIBWAbStTx0ECEeZlJWpJjeUgjsWY5bYG8HwRLtZtTGIZqjThGLwTbTBtXjkqAXw3eYhYCkFSqjdMeP3lu6XVzfnzw/3pS3fuAvCrpSpSkhBS24Qmio8aY9qGWgkeABAVQRAVqdOEO9CQhoQdGWUiWGIADMqltLj7XQEIcy6CwcYY49gwGdN5FXYRo9T7TAggNkxEZC0xs+GOHE0EwFmnGc3qcuNiQLr6jtCBjJ9VbBZkwiY6x+ZA4NQY2s4B1VeRRI9rUEoKEkH2Litydm9tU7Pr8TFQpArTBzN1wMgO/El9EZebUHfV3sUyGvDa5Z+4QVhHtsi1AVm4SqGqSRXGmK6v+hmGmZt2CyJDxK7wwReuNMT5zcTQPnz44WazOTo5aZomxZRMEpWiKEMIAJD8naNZYU2SBAk+MNSSA7EQIedPL10Rg7BJeVlWTlCNMcJkN3TXCxlQ5nwrgIj0ZcC6V8Hc8V5BcIUTIeaibmlTa46xNiyrTbCWfJS9qXl4trYcH50182oSUsphIJJiTKlDP0WEZDY3Sbx1BGC5rAHMpkfHe0f/9tu/84MPHv7dX/ylV1//zGz/wDd10zR78wJA2zYb30GTZy+eP3j0EbORJKCsr+blICdzj1neY0qMHUB3ZZKyxiSKIYTW+xCjNYaZvW8BVFW16+brPd+d4BpS1/8d/Mw7MetWo8szfyeHuluqdotUXoM6C7jbF1nJkm5hUSDLc39W6rTCgf4/tpb7jX336vjTwDUenu3jJ+geaRwuheH2d884TM/dufqZ6v/DanTbbtttu23/f7WbLczbdttu2227bbfttt2223bbbtttu2237bbdttt2227bbfv33qyIas/CwJiXAmDIrDEk394RIFWVmHMayN7pms+QkDT2LPTe+YTe/6nBGcdEKSVRJc4stOSkC1z00jIbIjZkkkQLA2ssm9iGHHmXJPnYWuuIXOIQU0qxKWxFxKQcNQKw5GLvrmPuXFLZA2ZhkKsHAEqaclxNx7WJSZUUXkNmJBlmAYL3UEoqZBCbFgouqxysLfW2nE2JqWm3zHZ2cByEN8umqiaOcFFvABg7qSi2kT588J5GaVp7dHRXuNhuH5XTUrYBwGx2sHhxWtf1tKCPnlzENpTzyd17rxGCXiyfxxpAWF+EoAdH+1/58peW29Xdg/nGSgrpYrlxJnvKDmbTvZnEZxeLEzZkmjpKNT8odL319cmn3wQQ/Y9SWLehsqpVaZVCabgJXgy7yYS4ANC0W7c3F1UB3Ts5OVu2iKn+f9h7k5jbkuQ87IvIzHPOHf7pzVWvpq4qNpvdbLUtiqZAyiBgyYQFWKC10VL2wjIgwNp465U3XlmGYXlh0IYswwa4kjVYkL0gbVmiKRIgm1OT7K6u7qquV/VeveEf73CGzIjwIvOce/9Xr5pFSwsSeAlU/e+/9/7nniEyM+KLL74Q1xBX5CvnAMyWi6tnFzM7PJjNhxi7riWmOPSeSuG/4yAxueCla5WZTKvZrOuHG8vl4XxxdXkFYBDXdVc+NDJI5TQyZnWVpGUcAtZtOwCeGV4TpK5qz4ENLClKTBoxNp8tzQpzanBUgLZCU8tmjJLlHPsomBlIxczkWt8SHVskE0nmPFjOZhMAOOcc+6JP5zj3vRRTNQNj5IFaXTkOlUUzw+xgHrw6DnUIYtwPCmDRBLAHkedKIJb10J0jlFlgYCWHNABwxOa8SgRZqN1iXktugO4oJg6VO5q7pEPbtn2vvgY5U0IXGcCtekFpSNrnuzEAXmOoZs3Sr1eXIi2yTJI18zCD9OR913aHR00VmtirdwFAjNGAqqpikiF1aqawyjlUM+kGbjyApNxb8/QyQWrXdu989d7J0VGjs/NPKNVpefEUgDYHnTX/9z/59WcXcnD7RpeosUhNWPVhGFJV1wD6CA48ny1XcSuOw3I2SMfkAU8KAH3bDzESEQfX98l5Y2Yjjlke3mfyqZqAmGlsFYpc3AqhUphWJCVyCXzOLbORmRqgltJIJMmJ7aurVYqxbfuu61KKRMwuNyUvsrPT2glkFUAwMicLdVUZKJGWJl2OHFPw3gATkVzbSFxXC4YDDapKRKpSuSAYaNdTubQRKDyKkQtAL+J3XaeblWx7fufzOtA/x7HC9SQ8gNzLcuIn73NfRqkm7Cr1CAwjUgVP315u06ix8CJGc6GX7kh0NPXONjNlJhvALiAJBXNIAMychyln5i85Q5c1nxRUzwF88PCD9x88XS6a6uDmzBapbtdpOJyp81XwnHn5YmazZSNeKQyJ20gLAvuGNDBFN98CeHylK6TlUf3+s+3X3z1QJ7FPJzXUBjlLpfv60Pv50cnN+rJbcbUQc+vBjOjrX7n7I68ogE+Hqrr343r6Hichz+Rm5AhMaspEIdsnacU2RAWBMx2KeHoQuX08ETkaVTTHoZm0ZUXbMYmKWla2a+b0xr3F03UnZhHOUgBwdnUah3kXgxErMwzEnkxjjGQ0MiizqdFofERkOpa3jeZnZFpo4zAyqEwNlyZ+XakfHsuIaWRLEeEFpnDNCPftY3prLN3bkWizt2HIRRewSXFjR5l5AbFwZCjuMxWtXLdZMcLR9dk3eyKX+br7IncGgNiExtLR3YEz143cjlq413kYe0ybUhy7d0qjD0Pjsyj/dsxjWxRNMgAcfGCwQhwpslziJIWcGbjMSSb2OHYEOwCAd97yfFPLzzdzAKOMLaSp3NnSQBtwMO+CxG2V4rIGgDfuHh40jmW7bvUv/9SP/dk3D607m3sOlY+D5JbNNTMAz56AKAlIRBQVoakprn7qK28AOJw/O+JFt4nLpnH91qiG74CFH9Y/+/Y9AL/13uGTdctu4SgK6Y1bB/WM7zaE+fIv/8SPf+nWDMCsrlQtiQ1JYtKYNImZ2tm2TYLc02nbpW0Xt93QxRiTbNdbVRXRlCRGye1NRMQMQ0oKiFECR9EkIihVzUlggAJmLMaa6x1gktlWAAAHeC5EY7HPUE/3RjSl0Z9hAo9MzZkrNbPMzM5R1i90DCbiMCnhOM4tH5k8EUgXB86xd7lcxUSL9rGozOpGVAxGTGYmaqrGxEO/zfZsVjh+KpaFSDTG3A8oVyEJLFeUm5kvnxdN2Ygyiy3bqpgp59lkYCYR2SdRZgsH0dQ92Of6bsKu+RQhV3CRpbILXmt6DAMCgiUNHIo2lEpwzjTaSJ90zqno+dnTy6uzuqqr0BwsGhFsNpfB+WzfQ5/M0EY5qg6Yklq38Mt+i76XGycVADNRwDtSgySDiqiZOQc/xJgfeFWTSPLOq6pJLijTYeiXi3njKwAxiXcE1YdXp/cPXG0bI7syamZUKR/MZwCenm0uV6t7xwtTevTkctMPt47mr91cHC4ahz4/7vN178BD31buqDMo8yCQ1AwaT2aR/QzAOm0ffPrwcpb+4l/6Sz/61a+t2tYSBa6aernpVgCeXj3rh+Sq2dnTJw8ePTZ4x0hDB41gn2+vqgKW1YegFohFhca6kkzVFGjuB+8ksSMlthirEJwYBNl3SimyWmAm55KqAhU5E8minwCMSC2TRx0TyKw1yVtMtk8AaslEqirk3dE7HzU571QFhvyiqZU+hrDCHkbelvzEO5z2sFx4rgkAyBMzxTQA8K5SE3acVZxIobqrlaZrq7cVt7PIJSCrV2Xz1L1VV4Gp89RYf1iOsFuxix86sjuvzSbOGp1lP2bO0+CLFIq9HC/Hy/Fy/EkbpeZiv/RmIn0/Hws8xy0fKeJ74W0mpU+lLi8ikI9/MWqFlPeTRU8egHfByMTEkQMh6TDz88ViOcQ2O32Bq9xNW1VzjEZFznvcBoAibnR9Xd6rTykbwxjol6vOy7rBPPmxXR4AePbJBscupjhrApSIuWu3ACRpCCDHTR1mzUxUFTZv6qQxOT5ZHAI4XN64ePJ0u2k/eHJ+1ATfzF1dHfDx6ez07OL05OAAAFk6O/20CRXPDxcUthAKFamQxghab68AdJt0+87Jm6/cPDpo1tvLs/UKlmB62Lhcx7pZt965m6+cvPfB4zu3HUgO56FPvaZIZtmbqNhb1IcPn3bb7auv3WnmdVKbLWemiENkDwDzeu7SoHFoN1uDX86abthCLLEKWRQF0LYdQCklprSY1123vX3r5kXfr9v+xskxsu8OzBYHrVwmU0++bg6r2oix7odtHAC44MKsATi2fR9VHSnSeXt+crdWsawERK7AyaYqUYyL9OcUGb/IgPfseCd+aqPkFU0FEFMsfe0/AlkunCwK8lkqfTLmLBFY+qAQlf4O4CE3+TGoCGIKjKryjr3Z4BzAPhD3wxqAYyMOzNV6szLz2Xptb4rlF4idqUpxotQxHJwjv1kPAOaz4IN3dS1kMSaIEbDuBjeYGm6eLAEQ+Zh6dvW23aikvpOujweHjsiB3JD7sfTR12GIMQ4xDhEGFaVAPri6zvB0l5KoWhIBOCsdyZC2/WldOecZwKNH5//2T335e9/58NPN6um2v300+4///a+f3A4PLj44uvfq7MaXALSnj5ZH9dOrj3ohnyBJmejBpxedye3jQ64VwN2bB+ebbVLzvl6tO0kgeJijsUmCKZuSAeIAcgY2yy1Bkbu/ADA2Di7XmOa+qGNdG/nRfkQ1az6mJFm6a7ve5tq3Sc8hlz0ZzHFZJ533PoQdAgnKH9hpAI1lP9NiWYC3HZqRNaay+ZW4lIlAZrlLE/ZMmfBiu37+H/8axotByn/1Qdd+0vUX/6g/GmHX6+9lbdNpzhMRmRXYbhT6wPgOALjZ3Dn182bYmE8n1UGFbtXpso5DivPGAZiFEAJ/ur74UY0GOt+2t47rraE3DZ6GwQFARB3k2dXl/dfvitYYNMKGSM77t9+58WdwB0DFy5/7qZ/YPnqva62Ng3mbVbHt/dXV5mKoATy4wrJrG4vNwm+imyoMcX2/KtsZjUVhYy8JG42PRrO6/kdTSf9oSWQZENSB3rxzoJh/88OPWDkhANi0XiOFUEnBs3IpoZEJzEYsuODf004+pkxs+nf+pmtPdXQh7DNPe9/T+Ndnwbsd4LPf+PxJXT+VaZ7uVt/x03tOxAsOaRNsfr14GYCCSptWKldpBgMDxjnHQdem+VSw/3mX90J5TcpeEOVWJGZaqgeJstItj58yu2ZlNE6Q8hwnOcPrNwYAG3K/GuJRKqXMOYCg2QAsCTswUS/DWzdvA/gP/8rPmcTjxvr1dlFJ32+PqsBmKgBz43Kyxnah/GjTBMRuCKH+i994E8C/9+d+xDP6frvZdk1ggnrHolYBvqkAvHXr5MGzR4DaYDENFfHxzNfWLZs6bs9PPz0DEFSJyRE75yrHM5f7sdHtIyJiQgDgqGEu0pJEGOBH94FAPCqfZGQwlzuzAgpW4wlx3sSUkrRJNv2w7oe2H7qYoujQboaYuiEBGGKKSfoh9t0wDLJdJYFFox7Up9ipAYiAAN1YBZtvDI1WsIrdNavIfeGJYDYANFY8cf73uHY4cmO19c4A8uWCSu9BP+GaYCaOXgBk2JO9c8477zwzEYk5KhIODICdZ2JiJoMLPntrYyV53sPICKnvRQSl5WP2KBIMUQYABjUzSaqSXSwzWLJIAMZ88KQ8b2bGzqbpdS2xQAbBriKewflmkKM02beARGApJQOSnj6xo8PDg2YhTAC20ibnqtn8zVduLck23Wq9PTvftJbM+TpSBcB8w8wZD8oZCyIQA6SOix9AVKIk4qyAD4Ems7N1NyQFEGru+2EzDGa2jufbuCG4lTgHPmrmm1YBHNTzOvBstjioD9+4e2u1XW/64cbhCcTHtM475MlBVTuCqiSpm9pXjWwvHaV5RVddF4kAPD5r3/ra1//63/ibwTWthKWb9ypShSfPnp6vAGDpbid6oHFz+uRBu7lw5IcYVcFU5QwFyg8tpfsEA9yogW1W4GlkhUXHalWMXUIKVSOqwZvEwXEA4L0z1STCpnCsknqIdzQYZXkD71jNIGqUVAUEgyvZ2J39Q1VTkoo5WxymjWkn/UgwU5iqmmmWO2BmUb2+6BlR0Yx3jg1GnNsJKgClNCp7GBOR43E/LtmocX3YO9x0Onsv7xy6vSuhUfxiVLAEUQ6gxrTUqLNQPp8pNjt7Zxs/9mKH8eV4OV6Ol+NP/PDZDZ+66NGevvBO1HqUsdgPmmykBBQXfnp/15V48tT3xKdtAoesxFoAQDzSlQIHdtylzkzz2joMQ1MlHTu7sWMix8Q6tvc0oET4O9HxaSvCjrcJAiypjEQBcPYkDCPJjpNGmDEz54U9t8sFO/YuwFTrUPlAs6rZrq4AMLPEWHlvxBpTt9kiJfYhEGpv0m0ALE5uH5wcP91066vtkT/YxNReXoTZPDT1+sl6+foRgH61im3H4JloVVf9au22m7N2M7t/UJlzmgAczOo7x4eS0oNPHm7bjTQNs6U+tkO3WBwBWF89To6qo4UnunWyVNPT07PTTdc4HBwsTCOAru+X7igEH5bzyvuu7w12OK/6wfq+XzQ1AAohpU5VZovDer6whMfPLkO9jKbMnHmjfT/AMKSBKR4sqiFKPwxq5pnnszmAs6tLcsHIJRU1i2ZmWlcBWUGv6LFYXVVRwATP8DWHut6utqvLSyYusqLsihyTkajolJ4tnnR+wvQ8HWwaJYJS5MRnUTnK4QWPoVo2YBsVWCbLBbLOOo9YdgaNRKToPI6GlX3zktEnFclKmeQoiaj082ohBiNSMQApJSMm9m27tdJbibKS4xR/ABajqgmZ5RN1zMF770IB1EAq1PZJVSSiqar5rG4Wh13ctpvNtksAttuz1bpdzGmzjQcHlUKGZF2vwTvHlc8NukFdG3tKSUSSOO9EzABirioPICYnklIyVc1ILQBF8sRKHuQBdO3qyfn5G++8+sq77zbM7Ks7b9/4/d/8ndtvv1XdeOO731sB+OR35OMH7xvPnpxdnq3k9tHicrX94NHTr3zpNU+42vYA4KCGTZ9OL7s+EsHH1AJKpHk9UNOcRsi0R8ukwey4jUELszkPgDIX0NRERURFdb3e5uy4qsaU4hBTiklEVZ1zz+VO8lMlUD8M7Jxz3ntHRKam2UlVGdXD8jI5xdS7JwmgCKQXqMPGxAnTngObm+ru0J0SxH3WoMfV9otjkTtq5Bf9m2sO7R5u8kNRns/kf8brpWsvfPHvf+HFEzEm1iuP6FGO+nIMSGWaQ0vnZbRSff/jize/Vn/v+394+wD39ThwDEGZzHMWiEIftY+xqp0LfO/mzS/dvif9OZSpbFQBwFv3b/DB7Wfubkzr9XDVuFDPwnbTk0I5/cHTBMDenf2zb334TtMvSfP0EY3+5ODDp6sv2RGAxxZWH7/31h0FTANnwJxBmfgzsvrKlkyjoQBZURIG2Nh/i8ZQxbK8nI0BklqmsmY2Rr4bHtyrvXpr8eGTg4+edo9XVwB6YTFlYzNVs7yywQqCeT3Gf+45PBf07BbS68Dc/m/7WPvOKJ4D8v4YYy/4oufx0M855P4nRkjwOfvcP78J+f28M3xBroAwAoPjcXZQ4q6aZF9Wdff250+RzzsB1SLWx8RVFWAgJlUVSblzyAi0TkcxMkZeTEdHaP9bqQCUpBnnKoaWm9CNd8OMAUdwMII5z7FvHVlFePTkGYBf/cP3/8zbb3z80VNstg2nL99rXB1IhWAMLVAo7bmGtAvmG+/blBwJANF+m5KkNA9UeW+S22ubY8zrAOD128f2e58IJyMxQ/AIPFQmX767XAKaIgDvfVF5E41iiSTvHc6lklbMt4GIyNgBBMd+2v0nucYCMbERyDEFZsdMI1gJ4EbjRlDCES2YD4pj4L1p3vlz2oxELakOSZm9GAa1qDao9WIAYmYgpj6JDqIxSRSLIiImqkMMAESsj6kfYjvEdogxJREd2rWaJlEAMaUhppRSVFOFDKaGZDaIRCASAAhghi7lJmloSx0UDFBgNglE7llHtqSxV/kIa3JOqJEBHPLWmx9paUxMTERscApz7IjhnKu8Mw7MbLTIx85sLzWysaphysZm7yhnFTNWOQxD5iEUuUQr/b4tq5OXS6FprzQrhTulFsWMoQYjTWS2laFvL+qqPjy5CeDk+PjOyeJwtrg7P7zaXgUfhmgJNK8qI5+PI6YjTg3nM2GNiGEQ53nsi65awFhmR2yAI3LcDkOuwCDVi/U2ZhKuuplr6tmcI33zvSfroc/dO5e1b1nn5O/fe02I4Yeh7TbD4Ml7q5aLGQBikaFTk7Zr++ROmgWpLoOtjRWhWjQAurN2ef/N17/6lfPTTd3G9Twt1f7gve//4j/5R9uzcwDf+NpP1M6ePnp0eXZWOc9gZYJSksSYMoPTmmIZmCdmWKYdFr9oWq8o+JBCMDWRiiCizD4vIyrmnEsiQ4wcmMiYwI6q5IofLcoGMRNLYCiBkmaz2klfZypuSTZnIynrK0+J4bwI6mQQE0i5tyaOyw8TZTQw+7lWZD1BADOYLWlRGDeazHGcG7RHMy+nOAXT12fRmJ4eM9mGsjHsL9TZuNVEreQdypt+XHCwu5wycH0Xezlejpfj5fjTMjyAHRgJIK+q13gPAPa8RgB7y/fkzDLt1t69TnlTSLPz+QtTCTq6HGSwylWDRQDOu6ZpbGNRBpgFF0Rls9kQqHJ1PgIzi0q05HMVC1h3xRpjHE9TbnZMChMAUtMxb2lGYBBnCgCI4bwLKpI0lXbhTAZTGJeUnXZDz2zbrs0M/KauKlcH70BV38bt5WUInNLgvfWtzOczAEPSJCBw2+PT03XfdbOGFke3K8ee7cbNOwA25JfLBYfFs9MnG+mZSFOyOjhfPXly9srNIwAni7n3OL260r4PRIHhnA+eQsMX6zWA7bq9eXz86r3XPpp/cnbx7N69L/lqc8M3nmxIejCvAVSL2lhny3oe5lUVvLkYu4vzc+baOXfZrgB4QeWoVXHeLw4PScT70ItGEoar6wZA13bOOYIGB5GBzGKM82bW+Cq7YfPZnMBdio64Ci7FFIc1kXPGBlQuB+qClNKQRJMR2PNsMUtp2HStcwG5gwdzpjuRGZlKCbuzq8s20hszoHc9aH4u3rOy7dO4fTNgpDbBRbozoNFTcJkeAR7DezODaC5LujaRmMhywEHGzDBzngwqap6DqhNKGlPfCYDTtEJVRVXATaRdm5ib40QRzcVN+fqJmC13czYDMEQx1ZwDYGbyja/qLsKsAserTSqzkv2mVR/CakPOBRBdrAaRLrhS8GJKIE2iYHbOeR/6Phq2AFQjgL7rDeRccM7n+ExVQJ6ogtnj8w2Au7X84be+/ed/9hus8eOH58vF/Hf+cPNBe/id3338a//DN3/7d78P4I03F4PRz/z42wB1/fDdTzb9EO/dOvFMSfq27wDUdd0Omw8fPt10aOaH2641FSCjfgwgikqSsWjRQ8eEPGU8T8eH2+e1SFRTknbLqxsAACAASURBVGEY4pBE0mcsJINZzrlSf419ZGUcoaqzwydaiB82goe5l9HodO7wkT33sdRyTqhOhsn28GuMlDYtov7AtIbtltxrePsXRvZG/3gi0o3A/RcCPvZRzGsA1A/7a9r7ZQfK7Bn2dOA/xlWUPyUQgd3YcamwEkoFOhWRhrHSb0yM9b3dPDlqqvDWG69tLx5DBc6hbzkYkVkiAD4Ecv5rb92sF80rd+4ezG8/3V7emVUNoEp9HACcxnT64DQeHD5475Offvu4N26TNp6c4enZ8IPzCODHbtx+/9HT1+/72Y1GySORRPaIHz64fPSYANS3jv/RL//qf/LzP+UQutg7HssTAeaJBFVWqh13styu0her3MHChRxJFIBaCWIwBUnjfQukPbiqQuWaDz55qqEB4OdLa6WXQTLLNyf8CvsUuv/FhTW+/3R3Jjlxu0tM9vyjpc/8xRcdXzCsuv6xz3zDjgW/s0+a4uuyc5QZvVuCxwu5NiOuoZ72vF2P/8rQyvQCfwa+3T8q7T722Wv5zKefu9QRzrUxnWWa4UIy1YxP0y47a0SZOpmIyupS6HPY+Wo5fbe39sBsj3ueAX+yQBSImDRUs6HbLCoXiGPXAvjlf/7r3/n9bz9+chEYFeFv/pWfuHdYV2TMaHsht+suPV7ChP8iiVYM0QhAoi1rrhaVqbVdnNdexJjEsatIAARfBUetKlwFEueco/T6ncWX7y91axnYSClfOJhG6nS+ZudGuArTPMo/nY4yBTmlpTQ9bafj74WLtHseIeSaH82dtojg2DKogXEHcAzPFIiCWWMWfAVkEqMhEKrdQ41Uo6RfgmNHzmXVhoQmfybjJDk3U5iAiYBiB5nnPNKdsRGoIhmGhHXUzSAAtoMOom3bmmo7DKttu+66ro99jH2Mm3VGhC1G7QbpU+qGOIiKIRIUELBo3pRNzHJfcNeX2CAjzLyXojNzCiNiMQWIyIkJ0dQPPF8HU2apMhFRxRWAyURdWS5BxL6uJ1cu40XjYkRCUjbVTJkrqj2mOu2laibZv8qbc82kUTiqbToAXHXa8bPz8+8tXG1XammIQn7m3Swm225bAM6EHZmBQURQgF2W+zFi0pIsKb4CE4iZFFVwninfMAApJonRg9uun9evztBX1bwVudzGy64/PpgDSGSOmphwtroCQDR0sV9qXc3q43reLBsAm+36st2qcPBepDo4FAozSSZGgesEh9w2cXnkA4fgQXrb8a/82m/+nf/qb3/71/5lTtB/8//8+35xtx+G23fuVrNZHOLx0aEzcWTJaNdniTIAXEbMtc3lIQLZCTMzwEFnVSCDkLAmJSaATQFEKCkC2awJAGVihCklttyOboiJQY6dgznmGOPUnzS7YNn6mZjA+XRswiNt16+9NA7a1UyUNb3Y6Ljw2OitEMFn7qTpbguEMqFMfGJQ6aQ+Ou0E7NDwfT6jASY7mvC4SRYqRW61RwVjnVZbGIGZkPP/tltz8yzIezUmL0pzKRWp5NMjTGWPL8fL8XK8HH9Khh9z58Xfet4V/vxoYHRvJz9y9NigI3a5H8xOiCAhk/qhoAxulMhg9IULaZLKianjkNsDZl2tXLappLmGiIjNKPsAu6+cRLR2jI39whbkXyk72VT4SlGjAxOpjAVQjW+IkUSG2CWVinG1Xs9mAUm98wAWs7kSdcOwXDYp2Wa1mc0OwVDWeLVijgA+ffro/OxZwy4O3Vk3zJi6jbbtE1ezY8qVC8vFctut5wcHyvrJkx8suYq1W948CKoL71979TYATd1m6DuNB8Ex8d1bRzcOlleX3WW/PlYP4KCuPVFT1W+8fv/Rk4+PbwpM53XNICHJTIF+GHzbHR0t1QZ2M0duiL0oMv1/iBHA0Y2jOGyXB2F2cPDoyRPt23nlYqtwcI7nswYAqbTb3rEF77ebjXc0DD254L3r2hbAfDaPKdWhGqq6qfzgI8H6bpBkMDRNBYAdDVFyZ0NVg0lTeSxnw3Cl5ji3FQRZ9hy5tLLezyBmS9M9hGU/Et2hizRG+FOufAxQpyJEAxFGtUmDqTkikDHxBD2qKoxUxEYHYjxIzqfm34yYHIgAFSW4+axRM+dYDdtuyE+hOTIfZuxYroH++8Ocd7k0xDOTqYq0bScp+qpGCXKyJA5E7XLVqW5liM28SklSTACqujZG7IXqWRpa5wLAMUnf9Sn4qnIAVAGQqDj2RmQgUW27HjAuagzGzlUhgMhM1VRUmcOgwikulgsAJv2T0+2Nm8dnZ4+uNvrg4dXf/cVfevDw9OnpUBkdnHgAn3yyefdLNz95fMZsHtZUTBxc5TuVvl1vBwHQNMs2ytUmLZZzYkdenOYqs/LcmVU512wxxqKuMRVNU2q+3W5sr9Kl+IBMNi1OhfYyVrMRpZR2ZvM8oJHdzdFOiOEMWfqHeKqB26cH0vN/X7Aku9aMdYQsxqhl9Jd3WOQ+tar8+IIIzd5yTp955UUf/mG/7l/J5//2wo8/D9l84dOfTmQPFAOQKwkZQGmjDYBz7+TR/9+RTokAvHb3xs1D6Oz4RnP/we+nNq1mRo5UMw8nBz9iyej3HpyGr3S/+70HP3b/legtWVdVvBnSspkDSFY/PtPqeHFxtmq+dmsN9kTrNFQHBw+vwm9889sAfv4/mn33O9//6eWb4dU5nDMx4Wp2695H59snawbw+o+89ujTNurCh3mdC0QJzMiEup1ICBWUG3s/MaKWGEPqcegYBn1GQKUcoJDHBO7R+RrkDR7Apo/Bh267JSIQE3NmXhScd1zRcM2EgOkb9oi3+4vt9bFraU17UPTe8/lj28Rzx9+d1Pj7H/nJ679fQ1mvvY0dmIIJdd2z6PHnjuuYb51O1lpWJljmfIEw8qamU6adadP1s/xh0yW/4V0onZRVCvgIYiL2jiy7Mcbjfc5JWipLUVkDRziwPIph2tWsqHMilx+MGTid6lqIcoUvS5xVrJIGs1DVANKAT55ugiOoHcz9ayeLw8a36z4EXweXRcBNdLqDIwMpX7Q5aFGhY3MmmiwZmtplxUOFqlH2Z6Ix1JxjaDIAko5r/3N/9p0DxPZiqHwCsB3SzkvFDuSXFAEUQnKmXBln19RMCnQ9VblPd9z5MXlgOX/AGH0B9eNdNSZzBGZjQqbK0jRDRqSaSjEyQQhgKKYe02ZU2djYGmYGHa3D+eG6GRQ+IwDiMOU2HLEjhPHB1z4QE5MDE42lJyAGOeWFZwdiMQcEYgY5gBg5Z2xRtI1p08d2iH3SJHq5OuujtF1ctwOA9XZYb4e2TynK1ZZy/eyQ0hDTEBOApCZmMaWkqrDeLIISuDeYwdGO7WUGE0xX1I1szXGfpOmZMHTiRGS4b4fnME/czFFil0Fc1w6AQctkZMcF9iR1PPSRQRx7AJcPPjj/QWevH951XzqcV93QB2dqupJt37VnvQK4vPyYyI07HAHmmEdh3+lyRveEyHKthkjbi6lKdrxIK8+mBtWoG+O0XvdtCm/cO77czldRAax6nVf6/YePnl6cziq3nNee+XHqm9V5u1i0nwqAxaI+WvqGVKTtBnl2er42dZX3kWM0jQbAonpKiJfHR8s/eO/8n/6Dv/8L/+V/vhT5xpde6doIoBtsa8NV3/7ge78X3FE0m9WLk5tHVfDKlNWTnHPOuVytXNwuzSlzHjMjoFyKb5aIHSdKYmYHSP32cvX4CfLER1QigjPfcF37EMS5bUpMDlVG5blTuBBmzYzZBU/RlECSVV1HOZ2s0g2QWiGlWGH3j2oZpaqgYJC73HF2EPOGS7sslAHsOEvugsy7yV8kUSUjYiMoyEA7xS+gaF7vr9158cwZc5tMeH/b2beV8YytzIPre2X5HgOQRAwmGX41EEjzFlb8hJdw5Mvxcrwcf/qGL4ujjZHCXnRBe2vrXlCYP1/iaUye7vM8tTJo+l95OfeLULBmfeG8kUeNzB6AqfZ9nyQ5OIUMEptQN1W9ubi0Kiugz6MNREzYZZZGVKJ8ke5c6nxhVpREDDzBByMXnlwRn2x845yTGNVk9DgJREqpqZpu2BLBFPNmxg6kWZ9YiV3tffDeKuuxcTCRwTncunPz5NYhAHKHP/j4cRz0+PDGybLu2q67OlutNrcXN+f14sEPPgKwmIWh28ya2Wv37n/7dz5sQWJ8eX4ZGvn6j759enkKoHGqZm/eOr44X7WdOsHHT1ZdN3Spu1EtAFgIP/j04yut7t+79+zJo9Xq8vLqHOyPloe1b6QfAAS449nhnaPjBw8/WDRLZQr1/NatetsNm8vVsjkAcPboQwNiTCdGHON61a0Hihw8cxgLk6rAwmxIDNdFzEi7bTtQN6vrPkYAlfciqWoaXiwr52pmBwwxdrEnlWaxAFAF33dDAhF1fd+xScXKgdvarXu1EmUxEwwCOA7BLDevyJsz7QfDe25heanUQZRXaWT/7ECZnZOfWxuVGaCqppYFpwp3KVMORCTnhcEuG1sOBUvWOBehqFHBlihTbEOoJMbgGs8Y0imApDr01FSNq/o42ChDiZ1vQgBRFgQvOtzEksXkB3Ul4oWN3iEYIiklaWYLI4upr2YNgCQ6dNF7Jwb29ZCiM7Cr6sapxBwqiMERi6ghqUIV7Hz233JhPoi9c95zSpJSyjU5zrsKiI4ke8wqQmSKrrOh0/c/vWgfEkX9Cz/5zn/2t/7Gr/2/3wTwD//x/77erJczvx2Gvo9v3Ds53XaOOaluOhnEAPTbaCDvqyFRajvybog7Rmy+vSkJABFmx5oF1U1BbMUwoGLe8YQtF5YGMxGltFdaM60VZgRLohNTaC9Cz18qVJInE6FxXGFKGuaFGMgIUOw9VcCKrLpZDu0xLpo7f3gPi/z/g+BdH18Q8JkuQP/IT3zOC8+dqO256p/59s+HjK4dYHfU8QdRJkwXDr8BIAaXYj4ysxIZlQWcAWzb9uNnK7e4ehi3N3xQ9CHMjHxuMBECAQjehWr++HzbpeDr2e+9/613X+M49CD0yZrFTQDr7smjs9WbP7785PSMw7vELvVixFItPz6L3/veYwCLaqZ9bMkThz4OoSIN/vTRStl/+OF7AIiTGZZNZcxDVGZyXMjaWbMNQBEZ2G2509JAGEvUdVr6bEfL2KlK7N1Bg4Mx1TRI8/7HV9/++LyaL3oEAJr08nLlgyvB+8i1y/WPk2TqZ2A+k/1K6eceGr34F+SAa+/0MAJ8P8QepjdeYMDTVHz+vR0a+yKcce8bdyZro8rMhDzuIDwbL2QvwfnCYXvntcMnx7CdQAwwlzh45xpNn6fxszwdZpxAMgapz129oXAAK9949jBEGUSSmVW+zAO2smwV6T3a5T1ouq7xwpNMq+ZIHCye4eQ2GWCiJMQChqJN0QeSJCxjIztyJHDKzstf/Qtff+32UtKWGQqtQnHAdGx2iNL8YewXxyBDnXuFMampiDEhiRLU+0CmUcQgAEKohkHdIZMmB9Vua2n2zv377enTW7cPyiNwunczc/PEfNFxfAUZArRSG6lsMjmxuXK6XLih1wIyAYqSCRg/KSPAMKkIAgCxRUyWRVCiAlkRfDIDmMCmLtNSxweVm91hTEfQ+CeqYzc2KsDwlMl3ukein3awMg8HKm7wvn9uZlBGNELeQWEg5B41TLmtFnmmA9CBd/Dl4uhkSSCQo7xEcCD25d6yB0jH6thcRZTThoMkMUTlTmlQF811yRT84MkaQIqp6/rtdr3ZrDbttu+HlPT84iyK9YJOCEA3pE6tN4ghFzKPNrkH9gIiqiOMW+aNkcFcV+DIDDU5Ik+OKYu0mBpPzdoccFBXi+VRrA9SXKc4QE1YBtEUEyyTDUs5xZ6T4dQiCAza30ZtZEurYRBLok01y8RY4spVczh3fHB0FUWkZaWelNjMYRFqAOvt0PiD128163az7WLlA3m+WrWV56dnFz94sgXw9Xfu3v+x19brlYqp6ZPzZyuzy20fNbBv6uoQgJ8PizDvevff/+3/5n/9hf92ePb433z7lR//8jve8bPVBYBPHz96enq1OKoO+4PVIOsoFodnj84EakXiAEzMjp1z7IgdE9Ph4ZKzG+rdpNrMzFAbUu99FbyL0m83Z//Wn3v31slXGArAz+dt26W269fbTx89ffr0WYpJ2S7XtcQAwNezpGgO5ie3b7Rd367XjlwuCBBVKcljYiZVADxO02l6lgfDxJZ9/9ICqXwkWyaPHTCxo8YgDj2y80CuIIApgSEpclWDtCDkVBgOO+hwssI9jJF4B4FOczOfxTAM4yydVDzGgzCVWERLHyyy3UFL3zjNNGNTNSIqnRW/kHP1crwcL8fL8Sdr+DHIG31uK95aXps/S82ZosPxLd4/nNluLyjucg6tbbcew7LXZZgEqgzEhUuvaqqJspxwEgKY2HvvvBdJAIQSg4l5UCFz2Y/ViTU/eh/jBkGjQ4LCYCMmKDJ/IUe0puBy+Tqm9bJfKKo5oM3OuJkF76BWzerUdwDatifPB7NF3w9M7Agy9PXB/GgRTuZFX3KQlqlz5Ci2jZ8tbh7aMpyeXXi2tcTtJx8BcK+9clBVF1cXxnOE0EdZ1I1sL5av3Di5dWP1/qcAWtJbRzOJtpzPD2dh1cdl3ZzMArvZ08sWwHazqeuqskhQduQsHSwWDOedJxeQOgCLZaNiZ1frFC0mOT5aGrlHz063XQd2SQlAPT9eHB1utt3V1YWlIQBeOZJA2VQ3V1cAvNcquG4QqF9UldFQMdU+SIpl79VUO45D50JjBlXTJDFF70PgOvgAICZRQ6iqNAzMaJrQ1F5Jh4PZdlhHNQBOKyICqxFcVdsgRSsw1ySWpzOO0S+nUU0yD2Y2hZaQagzsKD/fUtXBxGYQEhFVEbXMxPTgzDjK2OiY0waBuJyFaW7mHCXlGeIJpuaCY+eTaNcNXd8nzW2aPAAOSRJSgg8VDUOefsTZ0x+tFSU+0ux7UI5kJwcKlsXgNbGoc0wG5/22T6pRDQGlDb1j9kxIQi6ICJS8YxccM40CiAom730mC6qa96Ftt0Zq2eEmuCp4z6qJ2ZxzQACJwaUuVlUNYFBbLvgbX/6R73/78aVgZnj1ZvONd1//uT//9SM8+/iD3wdwfh7fujUjT7HTIcb1eqirmj1frrZ9tC4BwOnqakhkIDKImMREQC7Rx1SjOs7yVMBhySkJA9ukC2mZB80jJkZlSdjJ7pSjlXiGKHNOgdw8VAGYWv4vy0qW7Elpy56NvJAd941t1FAvz3IX0QMgIyNmZ2a0J7SULYr2O1DvsMjJrPMB8MXHHpTzw/5sP5DbvfDiD+6dyR/nLEaj/kJ/S9c/cB3lHIOHUv5WfsscJSYYgcmKlhlRxtMv+/7Js6u5Iz1/cnLQHlTsSU63cvsgVM5lVmwSA9cPnwyilXfht3/vt/+Nt34y+IONWphXND8B8OlaQLaIWyKGq45DWDVVEn364dmzR2e3bh4D+L/+t39wtd605Ihq9l6a+ld+5/F3/o//MZ7Fb/6L/wfAb/3Kv7g9C2frtT9YuH6bFXC5KNqOIRET1CZwcEr4jbdlB6Flxa49K6bROqfN1pl5BV9s5VsfX3zzu48S6qHThAFAVPWhypDC+G278PozT2a3n+5pCpT/GybY5HONrRQU5423lEZcdzM+f7wIlxwNkp5/4zOnXS5HP5s22FMx2GGQxQhLhcbzqCr2ngDRXuvwyXQtkBTQq3DhiqCAUWlgDOwQqumgrnDBbLyNtF+U98KhWlp4Mzi4YGaiMeUiXfYlFt5pgBeu1hTZ7gq897BvKqnm/es24hKZ5yoFJRsAsaxMgm7dsyMy865GdpygMy8/8e4rP/PV++QixbiYsRqpWtIMBKC4fyNVvKQQHFQwxLI3qWI2844oagyOmMiNyt4A5vM5AUnFcUUkBLRdHJK2aVM55Ix1LEZHpbPLeOMJASCgpNxyeadDhgU537rMupv47wBqGcZHv2MjjxboxvVeoWIYk5eOeZoyZioK1dyVI8HIjKCkQiZUdgSDWUpCNGKlO5eaycL09Iv1j23Ss2rkbrng64IBmKjTZb7l1aMKlRjBSLOjYwpSJoO6cnQBILAhO10EMwpjYWw5DyKoQYFAkhHSXI3gCpJiqjYPFYjMEbwDBxAnJQL95L0ZALIGtoAeQ6KZwRzgLm2bEpKgiwSgF+4jhmSiGLo+c8SSahRNUui2aujSIKoxSUypH9KQJOtDZyWBqNLlwvMhDUkHSTGZKfdD303l3IAOePzd9tceP377zaOT5ezoYHb3xp1ZmAc343oJ4OyCzx+87yiXTRg7z+RE1Dk3LQSUE035xoqpGnwNV7cSOgkAhkRX0bt60VpzfOMd352SRknpbPXw6Xp7+/gIQOXofL1+7c7h6WrTVJWqXW3b5aximBm/e/8EwPFyebmKrKYUV308PAyQ+nSTQAZHMy8AkuIf/i//8y/+vb/74W99891X7rz9M1+7dVTXvBmG9MaiAnDjjTfPjs4fnl89O++C8wez+rztusSqqLjOT1tNJUlKg1E2fru8fMbkPAfvg/MegHOc66hnx3W7kW2fXC1Xjx999Y2f/et/66+1dgaguctsjrZq5+3Z9x5/9J2HDz+9fHa1fbaxJ+sewEenq8dX7dHN2/defeP04rL76AfDugcVZfa8ZYxg4l5TStrfTQyl0/1UP5Ozd2qqpEqTtAjlZbcsfJKic56JzDSJAIhpqOsqVD7LCxAxuwK4iyFTU/hFOendZCsrA0BZCwMGk5QTE9M+WLBSIPcfz3vSePLjKp1LGIiJjNmMmAFlZlynNLwcL8fL8XL8KRojpzsjOKUYCSNJ6JobPoWVEwNtWl+BCcacsL/xf/b8QUZIyPbjzuCq/EsSceRDroghBA4qGoc4c7PB+vzti/nCGNj2ooIx82VZbZ32vmgMlaYmawZM3UJpxy/JH4GIqEmUPknKKOSQhtw2bkjReedgZHJ2tTqCkCQAnt3i+OhoNtv0bROauG195V65c+twzoiiXQuAzI6Wi4tt1zh/PGto1jzrLl67d9Ru4sW5ZLf74vLq9mv3n12c1rP28NbhsweXMqS6Dq/cuvP48eWbr70K4ONHH7dRROjosH7t1p1tv1429WDp6eP1Zb8BIG10wa/W2218vO47eXb66qt3NNl8eVg1s0dPHgHoRZwlMmrm84vNOhlVvrm8WIFtvlz2wgCQtjHFUHv2TAjJ+RQFYeZjcobNdgNgPneBfUwaKTXNzAwcXOO9DlTPZwBi34NJTJt6qXHoU7u9Wg8xHZ0s62oW+wSg7zoi8sHnR1U1ddWECDlEc3ZxJeqRIbncjMQ0V4cVbaQMKE41gy+07vFVygyCsdSulOcjV6CNPXMoBx9SUpKmpjqWY0xlemome+QVms4QVELZEJxnliE5dqa6WXep2/Y96nrlXWVwANQoDQPsqprNkDkaBLJdwUjm/HrnBclgqokIakZgM+qpQKLMRKoTrOW8gyMC1b7KXlSKuljMYowMhiizY6KURE0rTz6rzot1Xecc54ajKcH7AECS5Nnkg2fHLjALIeU8MovKRmJAVKoAzOZzP2x+9Tfe+6Vf+mfqaXnSLE+qaNvt9sGn7593wxZAc0L1LChh26dA/nzdH59UMtjM+ZbdxeoCwOVVl9RL0lk9j0bGcERCorqbzWYqWvgn2bEEjHJp/e4Rj01SSwNEqMLM6Fr2pJhRtpQQCrTNLsdCmJ479oT0czTLOYh1lCtxaOyCNPHMp5i+rIg0VpuPrST2dd9zzEIoMur77JWdcf+rOpk/BNP44od+ATKDPQjq87GoL4pFft4of2U20ld37EEu9FNjIgWIM2mDlQorbAB9672Pf+6r/+6v/OqvLt6pblnwN3nezJg4SYklDuaVr2a9wihsulgFMalAh71xr/Jb3/oAwPsfPvDw3/3N31hWzczX27NVG+WXv/Xwm//4f/JcqIT/9d/5hZtVbcRz+M1V9+2n2weP2+Vy+Y37r9fUA6i0Oz48vOo1WjXImnYoY1mAgDFNx7TbJKnAkUTYdQvYkb0nglQG+0Z4xMjABgbcP/3N73z3o75LbGEeYX0my/ugTinzjArHknIb4gkD2dslkRN+BuzaM01x4BcY4/GumcDIwvui4/r32c7oCBNlc+8Lrh16N7F2s8pe8LkRIcufoM/EmXv0xhefYY2R0pKZrlNfBViPzHqzyf0h2r+caZXYTz2M5/KZe0WgmEnuSQbpYRpjacfXxeiImMgzlfrwrCtpxpy3ob308QhgV1WdpUfNxoebJx27TGsqS6GqmIkCpAwmV5vJzFdmAwAWPXJ2/4j/0//gp2NckwzsTFWYvTFcwdV2V8c8Au+EXpI3ChnXYCeiaiZk5NgHjr1yQ8bOcTnbuvZXcXAuREvVbDZrghC7WRiGLTsPIFiPMntKu+l85yyT03N9tPEEVo6reQalMJKhkDcAdn2BZUdIdDdFXKIsTIks1kLl+Oz2H1lOd2eKdzXZFuWc5+R1UzW5F6IqKUPpBEvjuZEq1KAGUTOBIbj97c1yml7V1FDlVYMoCxnmRcflM6MBSlB2xsjpVTPAlAVlNuX7Fsp9M4KLZrxbgZkIcAQYUvTMuVraAIyrlbGaJBPLXl0i9DD4nFG5yhfkYKwkiqgQkAPcfHdzCQA5gqe8NLmjOWBgI4KCJ+1IA4GX2SFTIwUpipJjbwxAYUklJlGYGHei3SBbaZF0224HjQAGoU0fr67aJ+fbx6fngejG8eF8dnC4ODpYLjfKALbaA5bLvazobOSsJ+3xrMtSjlydoyYI20TvP15l0W0xuXfZ9uZ//9HF6foHt6r01t3lYlbfOT5g51ab0ovJVcO3P/70eDEfkpxdbV67c6jq+mTnl6t3X78L4JWbN1NKZDybuU5jH9vt0DhueonbzeqDBxcArlZnsnVHN49+/t/5yeODxqVIsZe2rzwPedqS3b55ePvm4ePTqz/44ON2bqPfxQAAIABJREFU6O+dHLfin6wu05Bl+kFgzsibqUKN1HtvhmQxDgP64vxkB+jiqjNyTI3z3g39f/Hf/b1//hu//vqdYwA3Xr1/crA4aupD77fnV9//6OEnZ5e9ykrrUC0ADP3gDZREuq5ynoCu653jPCHK6sqT4Nee3Y+7YTYcESlbZ6leUhWJKTXB7+Lc3Z/CAOedyx3kVXKrgbqpbt28OZ/Pvv/RR8RgBhkDlmCiWuQy0jhdpkWBsjkSpvWVRiVTLZjjeAI07UVmeaWZctU5r7Bb91VE1dhyM3FkMSlTmMm0Cb4cL8fL8XL86Rq+cCsUgNEkn/v/sfduvZYk2XnYt1ZE5GXvfc6pU/fqru7pJmeoGQ4pkzOUCBOWDRuyAPtBhC3r3T/CL/4LfvV/MEA/GIQvsCWTImmIpoShRM1w7t09fat71bnuS2ZGxFrLD5GZe1f3kCM9GBCBCjS6zmWf3LEz47LiW9/6PgYRcpbRFw9AcbwYkZcSThb3GpuPR6CiLOxUM8bgZxQcOVihjVgZJKoGooI6EptmRwGAh1fLxD6EwNx2XYcMC8S+oiEDgEM2JaXgqNTh2MTVz9AJReUiNTymndSoyE0bRKSw4Rx5Kg4pkFH1mx0I5FzguvD+RTNDguPmaJE3V31C9tyILo5PkCOAhnmzvmoqv6qb3fW6rrnbbIa0vd7S26fLr77/DQAvnl79+PGTpLiOu/PtRp99HII2R/dv3zh5eP/OX/7wAwCvztf5ISGnl88eX53vIJK5qYh+/OmHX7lZ8dFDAKu2NubGqfa7Zy9fHi9bR9o4f3zcnu92ANDw6dEqij9fd0CIfec5bGzXXbxi9s9engPQrAK0y5PmePX06fMnZ+fISlw1oc3G26tXAJq68X3fp20IVW8k6hTqUowEr3T3wR0AZDJcXmoSCZTMZEir4JJkATtzAOrFKTHVzpD6lDtkdY6PqkUgv+t2ITQADJrMqomSkro+bXdk0TPevnf30yevAIhxckVLyjfebQeXUipBv2BkeRA4OGcihydRKqV1IACxMBCJx8q7MWbkimqZHNsVlnKCGXvPBkemcIrKs4uxkzRWVTjy2dQ7nySZKJMzU7BRCN4EgJoOQ98EN3Rbx4CpGJtYv0Oos2SgRCEOu74b0uBcnWRwPgx9dD6UuNl5J0Miy4vV6nKzCaEmMu/hQEOfivZWEzilZISqaoYkCoMkEiZ2ClfgSK58VnXMbKbBa8om4kPFcCA/nUOzqflFSDGJAua2Q8egZVUVtmxgV3NIfW5940i6YWB2pLx0JNQMQw+gCu1A9e/9/h/dav1i0YIs92m7s8/O19/+xjcf3NsAOH38sq7CZhtJrW7CaukVMVt92fufPnqU1QBkqbKYEq67wfvAzoZBDCai+xMh0WiCXUw3irJsocAUo0/vZlwS+4CwVN/sEbNRH3zCC9UEkycKRsC3vKxo1MPKwQeskLGm2QzqjYSJAwfVHCUyUe3rJFHnxDuPtgOFMZIllQ64/TG1LKx2WNbzOh5xmOh57adfhnLo9XC8fJC9Lcn4ccfPOd2hCVyfgabDd91f5rX5Nf9CTAttIasUfgRTMEBNa2KRLCrGcM6DWEWL0qFjyjkT2IdgpkPsiaipGyipJNMMwDkfKMCQZJeGofGskTJvKx0AC0VeVnPMPTvXmz9paRg450aSGqNtjwD08eL5p1eLxc31q4vu3Vt0vOgTw3K18EqeKgHwg48uvvf49yqPf/lP//dPPvnsd771/qcXZ79UBa7rP/mXH376Tz8A0LbeEz+Pn967tbi+3nywzZ+/6s8jfuWrf4vYNaW8lFLurz775OMfreKttvnB918E8NJJ9/xxc1IDuHNzee+oShcvzx5fL24dm2PPqGDesTkqrtk8PY/5ZDGeU4gMGAqDp8Drpp4hwmJE5DRnJSiFvvCkLGuOA9wf/OWTH37cC3w2pCFmkAsjJdLBFVYG8wHyaQBUy/mTbWwjlqWAsa/IuZwzTfx0Ew1UdAzNoCByFGCklstkcURdis4F7ysiVtNSUwxQGOsupUwBYoZRFhFVv0d9ptrMSSQmgATZoMwOxiZW6h4cU56qMaQksQhkYCKZFKlhVmy8MLE6PQPgnGXcjVRLBiyZLdrlNNkKoGM8ZTcdsWKkzAfnWh8WwdcVyW5HrACYlJgNTohj5iS03W0KODYxp0ZWbDGuLg98v/rM/4zVnyVi2U89HdkxMIKoEuC8n8Do8SZkxbSusKFYtLnixmCmNk99A4CdGCaslMbzucGQc5p6UurD/WgjTJoNbLy0Rmw3LRKxYv1H/+m3VriiUMhjAVCCeFI/WvZM3OY9KGwAWnalKBkASF2Ag3kDHHnNCOq1AqnzNYDlYilMDbURvScScaYk/W4Jds6Xgm5YILcVq8l52MAq8E3Miwp+cLAkAHh54myblFvnhxgrV4kXaHQS1DquDYDktacGSOMjIjGYzZWbAFCN+hskRfR5zIdBokI4AKirGl3vvAGRAveDd0TBAQFDv62rAAD+VCOT243zMbBDmEYcAur9bRt3szElCjNIHuHWstkZQcSIkSOUwYFkYCciEYALjSaiHBmG2TRj/jQax0FfKlE0T4lXY+Ms6usxhEgxBwYRm+joCDdfBFOChWDOBzWIQUTIKDjHZKI077YmbAZzpsX1Wqn2MZZ5BACaR/5YjELulYHmzz1Twg1ECWKmgKgJilB9uSV+7Nr4VwZDDVTAnYqvRW4tXeAKwKYb/EndPTi5Un+RwqbrUPnz3daAV+ur3VDWITHQEJU9myTPDZxHtmga/Ci+k0UDZ3gRETMi5yOFn3767Py7n9eM8rl+8sFHoQ6vthrMBcRbq+q//Ud/50Jl2PTnVxHAreP6ycXl+dWuevDwxp3jcLx0bTVcba7PLt/95Xfa2gN4dfZss969c+cm++VxC/argf22p8ePn++2KWUD8PaDGw+/+c5icdTqBv0wTsKKAasNACoyy505e//e4uHtX//sbPPh4+f9dnOfq7gKXZcB7GTIzHDeGBBFVuecKY3o275+XazMAjBUKCnVN002//c//6GWYm0gMBdmvprIJE7bumaxOgbgFkterJILL682i3bZX23m8IeJjRmAJIGhbmp2llJPzAwSEWJnjGJmPxZPkCsC7mwmmjykCj7nmGMEUKyi2HMWyRmNrwyikiu2Ugb38Ku//Bu/8S1AheWnH3xw8+btnCWlNHRRlYgdABnFcAmYF3Ka1uEp5tSRT11OviUbNOmfY+S5MzOTDAXVLSnVktAaM0feB81ZTJ1nEIpMvLGN8gX/Tsm9N+1Ne9PetH8/mh+jx5FDVGL3MWlOcy5+YkBM3+yP+vvfzmVG4wUnNUqzw5T+6JNMUB2rgsZf2MRsgooKAcTISaLG2rFqcf4syXlW1dLLcpgZWQi0f/8DllLp4AEGQTqx4ZxjLlz+osJRHJN5X6wEgjVVOF62WrndQE4JnnKCidZUmGWqMQ39IJT6nE5Ojq+ur6422/vL5dXl+itv3QPw7r3j41Xz/HwXLJEMt09Xbe0XFUBirvr6L70L4JPnzx89eXz79r3z3aZXUdaaSFParDs9Pd3GHkDrwzZrbzkDKeYbx7zrUx+zTsVHprjuopoY9O17tz599PLVxVWnybEHtMAE2g9XV1d+sdCqSYmDb8AWh5RS6lPnnQNQh8UwJFGEwDCIIJDPYkxqwGq5AjAM3QYagoNkUg7eM1OoKi05unJ8Mjo/P1vW1RAjQG27EJGYonM+TuieEYmomjhHfqzgTmpQZTeeice66/K39joic6BnNY/NcSDOicZpEB6QMeYEZHnI05iZUtrllJhhYpazaMqxmPyUgjVVFScmWkwgzdhMJCVJCUBdV0acs7IPy6MlSJOkto4pas4oKOFqWS+8S0kBSoIAP/QxOG9qBb7POTV12HXUdd1RFYQkpQxwjPloueq02GL2zjGM0hAJrJLDUdPvBu+cmYVRR6ZMEU6S026og/PeF3UhH0COAIhY1S5jHCxngki2sGiaG0e79aY4axMDlOuKiJVMiFTyACZTYzdOszgMRFYtGjta2uXLq8TkrDp72fcnqOrHz88BxJ0squZyPdw9PT5etUmsG+zlxfrV1c7AsAYAOWPTYmCoFi0XRUijWbVtn0weH/C8Dn0hCrM91jgvAtOjfu1V85dckiVmB3JcpEAxvC6rkx2enFEwlLJeiqIQtIvG6OE7j70ZF0T8vHbYOft5X/8V7Re/5PB+HVyZDu/m4T15rbJ77/00fYZp8d+bpwCeydS88yH4ggubSgF/dzmGEDwFtayqbMZEzsqhwhmrikC48qGqF1mTDj0zQ2UsgVOoN2JmchUFlgToi+evfnnJROM5VsyqysFcCK7vd8QVWLniIepHnz4F8MHjp9zWf/ynf7jJa7ablWNlqbnJsePgvvfxGYDvfX5hFd26cePV2avGu+1Vlzbx492zP/rOz54+7zwIQO7EBZzW7If0vR9+8u7d9tV5f1L5ZV0ZURsCgKO6Pr7f7taXklLH+Mbbp+/eu7lYHrHZnRsVgFtH1dHq6POrq8/Tk/MYD0urJvgIUwaPxm11XqjK3YYBKEgeFylCGAHO6aBS+SYlPXYC4EVn2p7+2Q9f/fn3nw6+VoMQK/b7LhMImqZiz3nhLEVsxQK1jNrCxQZ4JJmY0kFlWZkkyiObZKIGjRCBjR0WYmbHMFXJZhpKMKAQnlyiACJmMAjOlWJZMxiDRv0+oNBLCBSJmRyhUI3FjX7gEBNmX/AVLnXvImRqqn6iw5RZXERcR/NfUfZce/hQbn9QYyMfTQfdbxZmdjjlih8eQGYqqknTkEVBq0BczFggBskmqiyZRWexjfJh95MIMDWZbkPBc/YT76C9lo843NcOIqEvvHBGMA8uONKL55/s/+7nLihf8KQ6JJo6VWOvTuuIrB2AlZP/4rd/5T/8+vuI54aynlrRR94TknReGcf/T4Hka+8/DSQCWZGOKyRNhgAgjDbWtneinzH8qQJ9v8gdBIjEf/oXH//qe+0QOwB23vaSFsvVZRoCmycfmX2gSr15qbgGUNd16nIIJQYZfSumahsbn8UMRtD+/6ZdEyrULQBRQt2aD0O3Ccp1Y0g5W2DzripKknBZnHcTtvaFR4kDWcL99kZFnJCJq5FVKSqaMpsxO648ag91cF7Vk1fkDIBCgDDbMD2EfYETADdxRMc1aP8fQM6JYFQdgKZRyM+yOM37YTQtaOP/hEkFAhg7Kg/STIx0vIcGQrFHHhlfRilCiqJOUSQYB2FgFfLzjqT7twMABE/lGmYEiMET1Cgnea1rB4vvtuscuZxlGzOAqvLnvWxsiCTC5otij8mu77qYxRyAmAYq9x2jHtUYoZipjIOYecScgKJGRZvzy74XH1zBnqTYnUfy7KuqdtY+ul7/H3/009/+1vvq6+VJDUA13Tw6uXFyujpe3WwWL168/O4Pf3bn9Oj27dOGeJcigORqZRtEF9XpxaDf/+TRn//gk59+9NkQ8/07J1+5dxvAW7dOiKnr1yWdQgeTfk+BZ4NJGnoje/fW6vZJ8/mr9ZMXVy9fvAzNEYDWNZtBdjGCFMxU+ZTjGPjsrUMdwRGcgxiRsiZQIs98zG5VFAnUTDgMcCBTZCuKssZb7baFua/VqmGIbbtOAPNk0VKKsJHjUgY5KTGX0U9aAjFRkHOGQufMEomolA1N41lVZeHJvNcwhiKAgZEKy8GZUmHSjjHG+++++/Dtt9X0o08/apqmJJPYM3u2bHEYABQ+zeEiVv43iZyWMTAPu3F6F+Ey4rmEDzBIArm5CGLMFUxzDyJK5fRks2QCEVEhab6p1X7T3rQ37W9i8zPlpeAWJd9tqjRnXucA0cYylwIwThSzffp+3thKIDb+ve3Ps+NJYCIGHFY8OR57AjEe62DELMNMkNm8mMrI21EVhZKJmvEcBAAHlWRzpEYzHYrGxO+0WhdB69IjZkfMpuZDIEXKsRx+quBXi/poWfvK5Uta594SnbbhrTu3JEcAjx4/NbM0xG3MVVNJ8K6q+m08H7K/fXx5vQVw++Tk+OgoUPfg5s3f/ubX+t35MjjW+Gxnivjg9hGAIct3f/jRyZ233377nReXQ1TZbq6PKrz74O26pnUXAVjfHx2v6qq+3uyKt93Tl5cXVxtjqusAIJumPvmA48XCk//wkxdnF9d379+92g2m6HsFsO1yn8Qtr4cqDyLsvGNW1X7ouXFgA3B2cWFAVTGQRJDEjJzCmEhUur4DoJrJu+ApJx1iCo62XXfsg2QpdLkSqEpMZ7uuCkEhTbMModrsrplGQgeR845BllKO2VQFMO9d1w9EoQ5uHE4EkANURObk/QGeCMDUlDGTBMYytPkkPB7pJ70/MCmUv7RxE/FItXXkXa0gBk8Fj6502PngYTlldiw5s/NtUw195wihqgAEchliUO9827btonGeX3z61LEQj+zdk6MbN2/eyBln55tnzz8PVb1sa3Z+SGnUP8pi3vkWKYmIVa6yFDMQmiqZHC9aAHzUAlC1V69eiZn31TBaYzhNefZoVjWugrGHpigZxp4ZhC52hZwXgruxOmGm3Xa93ayXNxZGPqYUcwquxGqUcqorn5JIFsckKo68wmb8NMXEDGq5z7Lp6bgOsYsvYnbB/59/+C/Kkc9X7tV6+979221T74acUz7f7V5e7bouKR2NXBkzYiETVZMMIIFHu635uGwzuDedp2eCzxxUl3Xm8ISCeXU6ALMPMhb7eJwwkwfL2sVzIIgy6mhfD8TOleq7jEQTHp6RaIouqZylR3+aA3AP2H+o12swDz/JL4otvwgdfHFIf/lbAmazr32p6h6v5y/oU05/c1BVtL/hAMwsOB8tRonefJGFVbOifdGGOueUNHrvahcASymJFNjFecdw3nJOQywGEEwaYGClkSKmkAg1DysFiXVgltgGl5G18EzZ4MgZJ9G2cRLz0mse8sfP1v/L//g/AYALRxT+9Z/9v7dbXK/1Yo27N0PuA+C/+8HFDz+5AJCzOOMs26OaVzdqOds++eRlVYf37x/93V99n6sWwMlxA4o3fXN65L/7k8//ztfvP3957QLdv/OuD7bbdgAWC3/k5ezcWU3vPbx1RMPJMU7qDsGb9ADS5XC1W0nlfeXSMFaaj9Vkk8pfqe6TcfsqjqH74Zoma2bPDDUxBRmxGQk7N/QDDM+HDKA5ufWTF/yH3/k4GStcQRLLKJ2ORARiN+lflPzQfrq9Bl4dnKIAm077XAAUGEHZQFBmTzAzEYmm4Bl+Mw0EB4VZlqymBFeO8/uK1bKUkxAAFTKdKjCnD29qU5lzHyXUtWNKQxQY1XWhc/ZJmsqbaSnE9Waex5K+yu0NCxwRgx2DqGxtcIGItZQBk3HWkFFFpReb7TzU8fqUGw+T4xajKSkEKUFJRs4XwUBqmrMmRVYrFcRWhL5GAENtsrQylH2KvpS2mFHUX7AkHK5qGI/BdPhblOk/Tfe5E4cvOHi9Ta+zgzVpovaAYWCjBE1pqHJ/ywuAb7x/8x//R1/zFh3PRetKZES2f9i8v94ec8TEPD3Y3OewMbBBjYueLwkAR0qw11b6cltpr/g81RqDSMefEwzuOh8LNxebDYCAqGxX/ZYp1yadqXGAiGSrF/yV998C0PepIgVk7jbIYY9CArMQUNEsnr41LFX8sCYAm80uhBBCdj4YHDqBX/iq6WOqPLkiGJISlPf6rDTehEmCWMr34zjev6rkU52Vwggz11SAQcUk01imbGQCUSrAd4Zj1lKxPaIeB4OAx7B/3CUmnVEAcB7qYGM5Qo1mGj86mi6NPToYewaiQKIQLXQ1mELVTFFqRsbLlzrWkQpGYv7gtyLKRETMpnmX9nfndWOtTc6F0Amlkh6TUd5x7ElJMKrZCGUDLFxkPJ1rAVwpfvB8wzXu3z1BypXjBMspy5A3QyoCqSknGBUVFjNYMTou7nQyThxlqE76sQwmqgM5bjlUBYMtAumiRmQxhsvNVkFn55vry53YQN4BWK/Xl9u0XC6Om/o7jz5YBV6umpOTxWrlO1WvAcBy0VarO2cDff6Dz77/kw9/+MFTZX1w8/TBnZs3Tpalil80k8jCWbR5BwDGAvMSNpNprioGqEuDSFp4/vq9xS/dXj16+87zF9cAnj+/gEhbt5H8oJTFyAIzg9XKlgUY2BQmyt7bNGTMCOTNwYpCmGQFE1hVjXzZiohYwcE1ALai109eVM2maeo61DFmESEU3/Y823yzQ5YECqI64sIAbCyXmaaIqY4l2yCwczD1noviOgAyiAqI2JF6SjmKiDFrziIJgA+83l6DaLPrVGy363MUK7s3jfpRNCmfTV0oQmJQE9DBTKU5K2KFTco8unrNMSgxHVDwpxqWaZHTMpEJNtEqQaPWyhciyTftTXvT3rS/Ke1LUmpv2pv2pr1pb9qb9qa9aW/am/amvWlv2pv2pr1pb9qb9qa9af//NF8yRnO50FgiAZR6qC+QdkoSB6/97LBcqJB9zHRS7x3NqydeJM/y+Jgo7cU0uNDvpVzREzsmdgZjkZAhYlkmjoiaZSnF2oUdaTNzYy+2RobJGIRAxqCJwuAw6sqJicCI4Nk5csyUTYOr1ExyVyhCy7o9WrRNcCpmQgyEOijw8vxMLAMYpNOcuRu6OHDw7MLxyQk0EdNb9++eLBYAPn/+aog5OOnicLJq7hzdePrszKk0VbveXryKPYDTG3fu3D4/Xi1fnK3ViL2ruQLScnHUpc31ZgOghrTHrgLWXTpeht2QLrfdoq1htO0zAKZw43iRYVe7HZlbnCz6692rTd/1yYa86SIANk+Wu6ED2BjdsGt9A6a2bo3RlaIbEgZlibuUm6qpvGf4rAOIDXj16gzAYlE1odr0O7DzjohJRLuu8+yKlmi/21R1vWiaQfNq2arZsmmJMEjlyJXnnzNqP8otqSLFFHP2ZDAwWx08gCEKYZbe11H9aj9YAUAh0GKjPFbGlKThnCAtWfC5oIbUiEx0FGOfCkyIqRAhjYlyFIUJqxlyyjpxbdm57XbnvK9ccC6oWcoyVqPFAYB49h5EZDpcvHp+NskahkDOjXI2jx49/fCjTxlo6rBYHEVREZPYC5lmARCYHVLq8mrRZoOquODq4ELw282uIgBw7Lqua9pm2dQxZslC7ALIFZMUFQBWrMM9jzam7MixmrFjMtXRNFDPhudV3QwxRbU21EzOqU1G0nBMADF75ASAQVktmTkfLEtJ4EM0q+QkkjvzPkVVzyE0/+Ynj9670d46OQJwfb1bLKtbJ0fbnvqYr/vd47OX17scwiksmOXp8U3uPMaTIS3N5Kx5HRqrYWaF8JlqhELz2BNmXluxvsQumlTVaOQ8TiITB6/+4pJ3YG1BTGZkpWQbICYGlRWQ97zxkZNpB1lxHHCX5tz53P+fy4H6d856/zU0qqK6aoevnO6eTLXCUyfGy0gaAEy85lI/RwQi5kGzD5WlLDpSXYrIYAhVFGFy7DlrUlXnHbmRTRDIoAJVSFYVYLS5p7FatRTwqmc0nlYNNbX3pHeXzb2TmmzjnYGL/a4RMZnWnqKiqipJ7feebn/06flOGUBNuXO2YG6q6sknV7e+/bC3nav6Zy/4ydnut7/+HoDbN28en7ZtzTdvLN65sbi83DDbW7du/vhnP354o2VPAFg7iUOy3Wlozlp9d+nzuWxj3736MI+KcthdcR9CTHBWWSY2OFNLO7XQ9xFAlSXUFQMwWdYVMzmGG/WtxpFVKqKL9m2pmh5peAAMrjwVMlOowkAheIV0WYdBTSgSeHEbwF9+HH//j/+ii5YoNCO/uxjk2DwQR14PQFQW0b3dB/N+iGJcTsdZN0ozA6WMYCyYVSVgN0TvnGMCOXIAcSFuZDMHFhEQee/MSEclSh5ZbDYvx6Vs2aDKE/1wLrLmafpV3kg7J9QGGFG2pBwAtM1isPG2BaaFd8ua28qC0z7GmQ+8H/IjAUUJZlrciWHmolgmDHLI1HttSplZ8TojAhOXwvSsprAh53L3mF3hyilBGEIIhQvzWhwEjA/BJoKPvTYFx/WMsGfQ/NxG0xh5beYeriblFVqWyOln+zqC1/h2r9VN054QTXuGNIoqjq/BnLuFk/duBQD/3T/8nbbxw/rFSesxegYqRhrj/JH04Mr7uxtKCfC8vs/0YUJgMyhB1UZ5X4YSKVP5+TiJeIz8JrIpC8qoh46m2WzKIThUnhetB3ArhEjCA9RVFbhqsKDGSA3y/OyyML5DcBiuQYJCsKTifsPFlRvAqO9JNJndlb2DmIVD9cd/8GMATvjGDXZqrceipaZZRZXaWczpqDElAtAT3b1/09ftwYMlw0wemGhQmEQXpkcTNdVNLdEAWMqOLEPNsQ/erBgcqQlAXApIASXHPMfk+w22/FL2HTikxhrIkRGXiu9xCIkYIGre83SJKeqamsKNtEtmcmyqUCHy5MI4lMzGrX0eABgNc0ZSpipN5VXuaJJ9VptDu3KdU6Sxy1pIi2PsRxxRvKHFUhIRyVlFTM2Ss6L/m/wCwJ/9+NmPXm3eunPz7XYRtE+xyzl2KqSUBBYcinVJUV4t11cTmeR152oDo3GBNSNjYnDjG3iYJhkABB98aKLQILEfcpeGmvg3vvXNsJTt9XrlPIAMQkVGdL0ZssTTu3f/g/d+tXLNtrvi1i/qBQAi9/jpkz/73o8/+uQ5YL/88OThnbduHDVKFjXNzFYGyUSAL4vqROUrXFtTM8nmPbWeYs6S1Dm39P4rt49urxoAd25Uj1++PL/a9Fp5qjpFCk1OKmLkfQnUIWYqo7lLmS1jkGXgMeIqGr2j8EYJlwiAcAhFskCzmmraddplykgpkafgvfecsxQbwKryoQrOuZyTihIzUwl1zUxKmOG9EymDgM2UCd47JVxdnDtHRVpKxVJKIXhix873sVdF3dZMo0rAoycWAqkTAAAgAElEQVSPn11cOOcvLq5FeOhTt+2aptXi5z45H84EYwPmRXU+opYl4ZDWrJbHughF1rF2ipkdm8hrk87GhzWuK6qqWhYhGqsC57j1DTvyTXvT3rS/gW3kl5tpUbIfaedjNfaM5oxf7MUtpuqUOc7lcY+bqeqYXmkYSzrATDYqxR/Ughsm2nmRFFECOXbOEcC11FE2UsTiy5ZpZDBVneN0Ahik+yrecRdAWadBTGNkOqlhzh00Ip4CXvaOAcuaBMkRA3AM0dzHJEbqgrkYRaD88sVzKRVzIkfHR931drlamVrsoyg5cLVYPDm/GjbXAM7XXe18VLBDt72+3FxqtrNdt7pRr9ebTQ8ADx7e7WJ/td49+vzzbczKYVk3junF5cUQNzUpgMBu/fSsYpU0nLTHV92wi/L+u3efvzgLYAD3T4+fXe66PoWqCb71Ydh2V2kTU5Zh25+enALY7jas+ahdtYsbWdIRdL3e1M1qvdlBc6grAMQaXGga36dM5AYxMxPJHOrgPJc9Xs2g7Mgznd48vbq6qkJw7JeLZc8A0HW7yqossW4WzK723jlOOddV7YhSVgDJxLEzgB1XXsA8xBQtemI1LYYtWVNGDuQAZfJq84ZbdvkiMGdgkKNxzJbodD7PlDho+hstMjPTeWZfkaij+JSZMRMFhopIFDGbzlGmIjkGBkHSbu1D5chUpQouxS5UHoCKQOFIAclRHVPji7Gj5gFsHsDpgvva52whmEo/ivA4yjElUwDOwFFb79L2mlQJWJ0svafYdyct57gFEA3LNrDLQ5/atolRyQcdOkCCm2TIyJxzrJGYXU1Z1VRMtPK1kpb6aM+c404Hg2gTHOlA5lhzmE3MiYJj7zwq0n7Ilk01W/J1UEExq3bOO8CylPrL3WCNR+56J4v/5h/81p//5DmA737/06/92ntXu5QGu9xsfvb0+aYTCotBGQwzD0C0zxIll4ltzPUITEzLC83hHqCmc5A3QWpfBh6nI/ThP1+C6ggYBSK5JCnK7xkokrWzH9cIBc0xX6k7ns7Lo/O7WTG/Ga89AuRlAGIai4fVsFMZ3kH65+cikv82bb/y0s/7OcYPRl/+OR38a3NkPdWl+1AfXMwAlCNBif2zZirSSGUAE4NYhh4MJmY100RQTqZqlDOAFC0AbR2qSrNozqgdFjVurkJd06qtAazaalmFozYcL9yiZhLnyG6vSHciKpU3AJ6tz+JY1cyTq6j6f77z6acfnB9p8yvvvA1gsUpHx9X7d26//979jz/52aB8RxZLv6Zj919++0GbawDLVZV1twzW9Wd2xh5hu4WdrJ6/XN8IEU4AVHC180nx6jqB3NWOfKiHIbvMfdYFM4Cz7cAtTm+vnu9yCI2oP7uS80o1D8UBGWzDxeWzdf700fbWg7sAqKTfaATDAXiACUIEQ9G+l9FXF6X6C0ASJStgAmXRIdsmWiC2RfXoZf6Df/GXAL7/+TZUVb1qpM86od6HBb8ENRiFUDRWCgxuMy6mh+AR3DzsaRRunn+ggIELIH+0aKCikiQnmDnHYAbgA5lojgkwqiomspQN5EM9ybCAJtPfgmc6QstGZKOrK8AkjjPDiGhxdFL5SrOsN+tdglLIUgPYpYIwiKgawZlz7KCanQw524S9m5HqGBgYTJKASESmWSBiWbksRUX8i0Y5wgNkZvyCZm07FDUvVGMpspYAY9QPKegFCDA6qH6eZHGnqXc4M+fvfnEhix1uZ/v/v3ZNml/42opA+5ruOeo7/L3ZwV/sVRsIIKNEdMNy47B0+N3f+SaAe7eOt5vzk5U6y/N2jf2hfLzoHhCd/iHAO8BGCLP0hYvoJMHIHNtYbD3GkVZAe5iWSG8yCKKpkhjlqRFAJKPWC8M4WEbGYnXzLoAh5tWyk6hPLnO7XGw2Sgig3OfUJYqp1N1H1uTcdFfLG1HRQGC89tnoEJRMOTZt020NwH/2G/ebygJLjuoJJkDVEGkekncUVQHEJNfX8da9xXQH9iKfRAe5MEy/mmLpCgpFutoCSNe7Nqg5ZefBFVXVqEpQYCjnAOQc2YeyuI139HD34L9yyCUVIsCRdyXmUVMiX3nYCDJPm/RrfAbbgUhYFQhGgKqpc2wyS8sc4Clli0ceH+o4FExEJts6GyE1NwLdkzoKiY0pm1HXYVKCplzvLz2CveXYgcvd+ni1is69SkcAPrx41NGKFrfUn4SQ19vYS99nrdg5V+eiqU0l3eusmI+VGIInEaDpeVEJANRASjC4KpDTHMdlIcuQhqhhm3S9jUPOjQ/vPrhncq6y6iMDOD66s3D9aXvDUg4sLlPbLNvVjWsnN5uT73/0AwB/+m9+8Ozx5aKmX//l27dPby6WxxWcksS4qz2qwABihiplMcd2qHKqU1QFw7KuzUzETNWDHLNk2cXBVXHhKgDv3ju6fWv17Hz36PnZ5dV2Ufmd48i0SxZlNMZix+SglmEeMCI1KBFs1JQsLmoyTtVZCJFUTRUZowCIq31raoPE9XVWmPdOvHc+MPPIKVGtmZumXq/XE4ZdXF9U9yigK55d5fBXRGbNNKuRK9KfEFERI2eeCaCUEpM3BXvfNhUAVbpabxftMmdJUdKQVajwa4gQKgdAVfY76HjALXj8fh5Ns7nceIimIj5g+7rrUVRXTQ9ePUa4ZWQxO1UxjPLKUlIVzJZ1nj1v2pv2pr1pf7Oan5JjVthBtk/rwWy/sY7RxfgV2SgAT2SzQs7UZtlIg5U4kucgbXQLISLS1/bsGZwkmGfyjpjB5BiEHemY+SzSlVxEQqb3wIxTgA5iaUz6RrNLJIyIRPOYgyq6+pNvjZhW3sXYD9KToyLw1OeIbQzOmBzYERfbT685wnsAR4uFb5rd9W55dLRZb589e9kwNMfVatVl/PDxEwDXXaqam4Pg7r17Scw5MnXq+MXlhSRZtScANt36etMlOmt93Q/WmYrJ8WKx2W6IQCQAEPyiWUjsrrf63Z89eXj/lho/PevY6qKzNuQivkZX6811d67ko6CmQERN5eAdAL9YpLQbdv2t0+U2drvrSzWslsuYc+oyBQbQ8mrb7QwKdqIguOArQk/MktNR3ZQnEVWbRSu7rQ/+eLXqhq6um9kE8fjoGOAsOQ3RJFtVSVYYinf7JKF4xN5tu5hzDp6aqnXsoeKcA8F7D8CoLyEGYMQOpKP61CHmqCpmxPuD4vT4pyAx51FqBaaqUwRPBp1VsQlw4xAyYhAcTCRlA4IPRVdaVdmUPNIwLJcLMphJqH0/pMpTUymAOniGpZRPFu3Nk5X3brPriVwXO8AkJwB9H2uP5qjabuOgsQ6uizAozH7tV+4BOF1VsdvmYQ1z27VeX++ut1sO9OC0ve77VcMAdjvhnB2oDq7rOyaqSSNyWwVGctU4r03EBzKxNBh5VjgRC0myjpSLWqxqaLfrVbBaMFIkVGzsvCui42QGsq7vmMlMc0rBsTf1kCSxUJIdzId6SDlnhWY1u9rot987/q///t976+0H4UcfA7h5erRqTra7y1eb689evDq7zi60oEagUCrutKrF0VaJPLEjYhsnLM2Ma8JofHWwQJVz8eEJ/vDQRvPx9TWQ7osQZRk6NI+gg9fNax9Pp/7xnQiOQcU6Wy1LzjA9VArCTIp8XThyokhNR9hfFEUe9tIOvz887x2cJv+aPweQ5xB5QkJp5EzhYLFXvPY+xTJ1PG6a6TgLYZyTiYamdo40ZwDMxExJI6VMRFXw3knKysDbd5p3HzwE8OTZZ4vWn66Wx22AmaiuGn9rFRarpqncsi728a72vg1h2YRFHWzYdjlrf1UF7UUYBiAwxFnFLKogjbm/fep+9+//2q//0ju3bp4AkH69zRcruJh3V2G4fQyxvI751mp15yg/e7IFkHgQk5jcLiI1XFUyZETldtFkqi7XHYCjKrzsMSAtasqm26G73m0fP+3YUT/kk7YFYM496obvP+3eeWfx2ZPnP3p8fbOtQ4taadE4AO3KE3i5TI83KPbWjs05csQ6UfqZMB4YyUghKIYs4+ZWl4URwjCoDoP0SsTee0quevxc/tc//MnnlxlAXXkLIas1nrOpTTDNPCYLE5EcFbKlwUxKygYwuHKotxmeHmFKM/OOFUbsJsd7YuJy5KO00xzZcuCRlO3ZAfCBKPgBlkUdJXYsTOSbULXn11cooB6DiBxz5bhy7JkQOyYiktnAxEGIiMmG2FfBN21gf+IH3SXX7wTANkYXHIHAZGZDTjnFDfKIxoyDecS3puiEAKdGxYIWADEbgeGUR3anTXp9r7VR6ZrGrYZARc9aZuTPJuvU8fmp25ufjFN/vGzB/L6AFf6CdeHLPfpiB19b8GY29rju7Pk3s3ruuLC9jkUCNhMzx8M0j2qhZDU59JcnTf4Hv/XVv/cbvwJgc/WyrnoHY+TxQzJmXttk4ENz/18n+REmf6Hy1mN8OMYAk/7kWHBDNl2RClmRmNgVlHC6bi70JSpwMBU+VoiqqI5XxzcBuFUzDGfHd976+uLmEOV/+5//yR/8s3/98HT1X/0nX8t5dGwhUlQ1kArEQMQTIjkFuGMoXMYDz9TX5uTIco7ZAJjkSGSWqoAhGbNKTK5uqT4a1EF3APKwCTKZXE3Zt5FXtU9i2aiFPiOSBGMHoqtNBoDLoV7CvIhEG3bkIwhwTuGVPXkCMORhdRxw521Mz5wOh+VfjYCbqY1RPAHY7Ya47Y/amgwSluWRFZrqCAmXHqY1NS2TqQiIyJRY4RxlPbjytAWXJag8RJGJJ8GOYKTEnHWHkWAwGtPPw5xGmuk8nqdQ0TfTlctUnKFTC7WB4iBAdRNAZ2ah6YttvZNsyZANymTO+VLJ4dwYZhKBHJUtzzlmRyKva3tONTuwkvMU8lRsvjeb4Wq7E/PZ0KdYVYshbtP1unXJc7h9WvYvgatitxv6IQovl414fXH5/IOfPfpnj/7k0WfnALKmb75/6627d46OV0YiecgZYPLOOUdDVgA5ae3YeeTRNGy6XzOqTSRmDOcYShlmqsYEzxylDzAAXtVTWNw9uXu6eHZ29fLl1cV6MyAE8DZbskJ0rYxA4IorNSkMQp3UIcf30lLNpgSe5GIBM8niXQBAzg0yqMJXQU1FREWTJk7Jsa+rCkDdVCF4UZl04VWgxMzgKfZHzkl11J8106xiInGI3pFjN/oUGJmZOZdgSaTyDRwraBhSOeiF0HZDD7iqqlKKIgh1BZhoNqOcDICblqw9DD+r4r62oI42rwSi4uBDSgzHI25uhpxKh/fJiBHwPjhHM4GL0ZspEznnUlb7RfvFm/amvWlv2r+frbh0Gk+7Jo1JPzMz3jsR7s/xMzSpRZ+ZxjV0ZAcRCKxa0ttlRbbDEGp0VyxFAjQhCgTmwiArO5YRKYN1hLZ4fCtzKH6spa6SeCzmGdue0WnAxGHHHrSY0kvlm1JdhTG3rqYWVfrYq0gVqspXpcMxqjKZDMRLptB3XXXcNkQSDMAg0q3XbNYNSUo6XuR6m95yPguurhOAe+8+UD7Fpy/eunXnwe3VJ59dbofsXNhdrhvv6kUL4NnlpZH1wyYhwIJpRkzIXlK+dfP25uI5gO3QN8sjM1qs2s0uqvMG3XVikgqk9uzyelFX3rl+6K6vd81qWQWumwX3vchgngE8uHP/p+dnqevX242v/Wa3uXl648XZq6OTE6gUoC4Ng4gRuRCqIWlgb3DBewOyyjAMAPyiCnXlSPqUNtvNbtctmvr0+ERVN5TLY09JF8tjyYm5oIDqHKeUhhhLrdCiXYrZMPQxo3bjDus4MHO20VKAxyoEFhPPPJ3quASYc46Up0MjDqArmk0UJqDdRtIFDKZWaoXmAyocsZnCDFbCA2YiH1zbLksRimQh023M7Un79r37Krrrrxnw3a6teXV6C8BXHtxf1fXLs7O25q+/9+Du6dHFepuvt1fbXRc3z65eALjcbh/efef9ew+fnl0++vyDdbKXV5sYqambv/ebfxvAf/53vxm36+fPPnv37Qff+/DTP//xhz/+2RPvm9/8tb/96PLy/v23AawvLs/Onh8t2+Mbd7/74Sc/++hRMnnv/TvHiyb1XVtE8VWHLMdHxzGrDkN1dHLdxc8+eWSWb99a3Dg+AdCwaxb+0dPnBnr41sOX5+dxSJs+XV3kRXAAiNHW1WbXt3WtEntVV8GzrxpaHp8WF+zL80uQCFTJlsvWVK3bmaNbt27WTGdXGwDvvHPj7NX6xcXmwyevLjapbk/7SEMfQ12lpCoRhWxY0tujn4PsZ/CUIS4K9DbN9nIsPAz5Sk5lPuFOo2L+1+Yv9xkWzD8vgON8UCmYhY1vgBGv3l+vXIYUMNWkkusqtGHR5ThheyMuPPdm37ERkfxrMIfX4NO5HbAz/8r218MUPPvPFrrQYfcm+Gl+k2nBxARIobgQzPW5JJGIKHaOiv8yYpfAdHfhV60/X6dbx/rrX3urDvzs7OJ40f7mV28AuLpPnrFo+MbCLxtXOaqDb2o/SGoCt5UD4Alk5MqpkEFLH1RJOhfgptjbRB1s10vbsmSpff6tr5/E7fPt5efriw8ArMCo+XHMPhxnoj7F41ohrfij67heqwDYbqWtXI65bv227/PAmuHz5tX59vx82zQOwEsd0qDXvQRHID5ZdT96tmvb6uGtmydHbVv2qmBX6zWdX62WzfHJ6u5J/a2/db9FxewVRQqju9oNN06bu9f1J9eJCpLH7Jm0EFVKsTbMZiuI2dsBJcNAAEQsSSqCI8umjuLX2/zPPzr7zr/6ZD1Q1VYAEojVco5c1ZR7wIEcyu5pU6ZBVYcd9mMdM6aCmOcviQjQiYZMPoSsRDRCaQZyRCDzpGZom+r06OjOjUXrXUop64hQK1GKTd8PQxrEVBHUkDVOcE9xZlUVM7ARF/cHIzNiMwYgRlkYMIKrJb7avCRm9vUu0fl6iCAAVbNIMRWaHLjcUlNjMvN7vpyhkDpHT2/zoRJQMjdToUuySqcbP3VvDmZg4xZTul8q3keA6JAMWO6ZYwaIlLPlw8VovtRry8O/7Tz+OS+YV4aJfnkQAE2953nZo7lMe4+cvH65cbXU6dpj4WA5FMMYhuGypvzOneYf/8ffCBQBwNYNRxIqKgcj9Du2scd6EBfu+Zbj6kgHuJh9qS8FcCg4V1Fome8dMwqt1hWjibHv5gEFlYiycJfcUXskMfz3/8PvA/iHv/vtb371zv/1Jx/9kz/5vW+8d/fps4tkeScpwgHsFhWAnDfBDN5Nyz7NhsL7W74PMEceIoBura5u2AcAgy3uvP8OU8aQKldxVeswEPyP/tUHz5+8+vYvHQMgsaAZk6eazTeGJvSrvIHNt2i6gwau2rYdAFCbXM3wEQpjpnBUlhZnjpULSii6BrWOwwFiYvsbrn/l2HNVKA9DsgK4eLXenm1TEyzqYE+owJFcFD3m0wEdHa+qG44qTyKlqAJqZqqv4fOjDft0YvATfU8BEDvAYMkMrj4tVyiL2eRxBSiYZAIdbaxvKGE+dpiGI6wI/4xfLx3FNCQ133gAoW53PfUxV85fDH0WA7mKA0B9VCEBkDWZKNhKApIJROZc0Q7Q+bFMocmYVGeVLkVil4QBXHe8lZzMYE5Da5Za5xPrSePrrJUHAKnN3O2ULkITbi7abcx/8p2/+PTTx69eXlcBd+4cAfjqO1+7deMOaYyxcw7mNJQNm0i0pJURnPMMEWF63WVohu4AdhaTqMKxOR5BXga3GnLZd0wqr61mD6tvtfdvn3708aPrbfa77B33kgH0AwSOfUVcKoscGxfAnGw0g5KRW5ABcnAghqmYsTlRASAG551TIiKRFLzj7A1mCpEkzmN0xMbQ95Mx2LgGHxJrpAQqAJEzqKqYSpbsVHSqHIeoMziDmuWciqOrB6nZqGiUUvDeYDGl/4+9N421LTvOw76qWmsPZ7rDG/v19LqbPZBsNsnmoFC0KJuWwtCxZclWFCkS4kx/AicRgiQwEsBGEkCZHCAKkNiwA8eSfihCIP+Q41jWYMmUBEoUKZLNoZs9D6/feO+74xn23mutqvxYe597XodJgAABQuDVj/vuu+fcu/fZa6r66quv2rbzUhFRjGnDmdtI5AyPdnjGm2HqWXlPLsMaRsB4SAiZAqbEvE5PrTddWv+5vDPmGgUzEGnuCnXPfnnf7tt9u2/fM9azI6kvv8re6lCKxby5t53tpzAC9ZWSZrSRlexfHzDKtUeONTDY79WZlmGWXQ81I8v0KFgCTNQsipomzdUpfZAGYIgWiECGdVqV1zjp2W2ut+a+rS2IScgZDWEdANV1xlkTHEo4eBIm13/UFNuQxPm2S6ukdTUpfZmaZdAEwEN9WQWiNnT1qN7ys7vXr0M1xLT37q3tsQPwvkceePFa02p68+3rU9o1UvKqna+ErOc9ojCeFOWpdeaLpTVlUVnXLI6PtyeemYQZQNeFpuvmp3MmJZLTZavdqm1DIa6uBIBCl023XIZRWT792M6bt287JykZAYWXVQwAOjURboMdn5wUk6ouq5hs72Ax27k4rnnv5AQAk5aeDakNjSr5ouhCTBYtKBOdNgsAk2ldjerV/GgV4JbLQqQsitsHB56dLxhASFFcQcxksW1WINSVSzG1XRuDxtgBMD4WX4QYCWDmlHTVdII4qqsEzWysPExRLVocqDi0Pt4HwpBZLs7oI+c8P9aT8B5bJy8ta/oM1TrZ97eh+7H3AuNIICLXh6hIMSRLhUAoHt69JUQhzGFWsmgT57dOAbx4/U1WpBC3p/Xi3ddno8pgtw7vOqqm41FVKYBLvpL5aj++fXByUPqqs3Bxa2u1bPcOF1/4/X8K4M5bXysN47F//dVXD46aGunybPTGOycvvvBtIVy/swQwPz4K3fKQMT6/Ojk8MUPXRB/DKMjh4Ql5AQCFBZ3U26tEx6fH29NxUfljiV2bbLFqLQJIZj7s8iLC+61yRlOe0/HiZM8ZYkoAilJm06nzxWwyOj6h09NVUq3Utkfb5x64vOgigMODo3bVeMdbo0rBxqma8t7d+Usvvnp+fPWhq48C+KOvvPiNN9+Yd9qCo5XNqmNyzhXtsk3GPS5guQFrbmYdzOS9gBwNEyCv6/VrtKnDYIzvMvTvnQj3vGfYL/q4ex0f9z9ZsyvvYU8aknWaEmDMpDF6kelkvDWbvXtzb8iG2D270lnZ4/8JR7gHNgDu9Ss32ELf3c4c2/f+7Lv8Tk2p9577eqIhLw+wRawLnWxIJhG6ReeYC1YAlUPlUXnnRYTx1JOPnq5WN+7cLQt+3wMXAMQYjubL3fHo0sV6VMisdrNKtkbFuDxf9P1N8cBzj3ehS7FLpswgkCmpoYQXt268q2qJCCzJCbeNpZSkspQii+MzFbU0qlkthSTjqjrej6eduZHokgCsxOKJrZI+PBOzgphW4XDsxhJTXLYWcj2uBMNyEU/3liNPXcTlS6PXb+9HsScvXhpXIwDgMJLOVxMDff2Ntx9/7NJxd3pxVo3qBBzDIoB5q1LK9tSxx+6l8c42bt+9TWytUmaX7BZYKk92z4+3y/boADAGyVqkygCADUykm3MnsxOz5qY4AAWnLhHERoUPSV565+gPvv3md96OgYlLR1kpMkVlq0rftJ2uOWZkYpppa2za40SAsAqZIwgjV5+6ogfmmEmERDw7yiDD2/shQI18hizUON9wzjSmFFMyqIMyUseWABTimkAFGM5ZipGY/Tig0M6YVsPUtZRSSlEJSSiKiLDlPrU9Ed4GnlFsu66sCmHplikoIFV+MkmtcD4/ueylZO1AJupsQ+0u4x40gHFRlTmtFTP7CmA1PTtrziiEw+oiykwxqJ6xtY3IYWgwnGuL0ac+QTYodd67/IeDa71cN+NcnJ10/w/23d5EuGcDOvtq/QZntHmhzb9mZ7dh4DO1x/4rZaB4y9G20L/0mQ9VPqI5ATB2IYVuOq4G+cHB6bONbW7zdD775NZX0PRXNhp2+7UrxxtyjRmeW8OpuUx7aKUta6lKG7ir/Y2wGehm036gLOcHJwCmo4r97PZ8+c1XbnDXPvTwpX2N5iwALMJIAFwh1K1JojQM4eZ2O0jInVFuCUDhFlJOpWAADctvf/X1rZ3pg+fPa7398qvLaT1+aHv0a1+/deetmx9/+BMAaiu8yb3TYQAi13zDHvi4d2CdM6LTkPVeZDwubTxKCdKhRZe3V4tmXXIaAISuKbsE7A63esZSBPCeYu31pQygLhgRFwUXHgA7VxZ+e1JbVMnc7R4MHFxwA4DT1aKYFeBKoZJVZQ1KRvFsCmANwRgBpBpIhN3wSS0088XR4bxt4rhgBgmzEHtxwkz986fkQMhzhIiNoHkJWnIbH8Y2voJsVXo/Zd1PLQC10AWNoQ3tsvDl9mTCQo5rjXa8akwKAKOq3tPTfmvq5ektB0e0ToeS0RpSyoXbkMViaebnnQCYR2U/iho7Iw9fJGvTQkbV1jbffevdg9unAKjkcdm1UCnk22+9fbx/eHLYzGbug++/sHvu8vnJDIDTeHJ6tyh83hwde7KUohKRZ85qTqraJet1VM/85zVxAyBqY2DyhRczC6nXxGIhMl/m8gjSmNIKykIT0NS6c09d2T9prt89vnV4fLw0AC1zp9ylcJqiWeaOCudFkQuyAYFTDgYzJMuVSSACC1zsJ4SpJiTyzGzqkiRkQQZWpaZdAjg61KquQGBmJgY4H6RmptaLLYtwT+0feDaZuhtSotjDkZbMzBxLVuNgx11MqhZDzPv43t293YuXRSR0sfBVVY5jiF3onBdbL5qNBbJeLXkmr5MwQ9za10EYpX6bU4qZCZF/hfNmmSdnZlPSWrKhZ1eYmiLLd5shxkQZq/+/lxe+b/ftvt23/1+a610ey7o7MLVkiZkZrNprOuXDNe+U1IcfRrA+hloTzpIRE5iEoLlCCQYzMslKakwUs0glwRLy7qsw0+SpN4gAACAASURBVOSIshA4iTHW1zJmFYZFaS3l6l21OHFFWbpFM5+M6sWii4mRBXyMcxlFQlfQBCnnj43IWCzXjqhylsTK/M3s+xLnovOYBSgDKNdxOCIRR05YPKdWiIMGmE9SOkoAvGOStBJl0q1zF6Bwk+PaWkEqS56HBsDenXfCwXLboSj19RtvbRVjK8ourLgsj086644AFNVovHtuvndUSOk4pLbZmk2rSSEUQtu50Q6A3S1ZzE8RlNg0dV2xCimuulTVrFYCmJZVq7G1NCq9ErZ2zof2wOLcirILdXN0AmDFXIzK1bKpxjVSWrZdM19OSzm4+Q6xaFIAblzPxvXh6TGiFqOZQdrmBCIWEzvnuAJQVnVoOg2h9Dapi5Di0dGBL4pyMsozqhQSQRcVbH3RDnFReUM6ak97CkZKYXnXc4lkMipc5bVZjLd2UnNCru7aFQDuVcENhqZpyKgvIQSJc5QSAAWSKlJiFqYsbgnuhezypOIQIoswM9RyUU2MyTGaph1VWUbIupiiKVK8fGG6Ojz0HCyoi+E0LBcrAKghBJNCHex2sCJhd4LDJVQx9lAhAKWaE3QJO+NOV3r97sn5i9PlPLXL+V1ezGoCcOXCFHb8zn4XOrXYzee26JIIlcEujUYAPvL+RxdleurSg+/u77VvXL90/vwzl568/bd+rRjNHriyLRQBvO8DF96+cXt+1H7rlXdcTVVZNMv28QcvrY6OLm8XFy6fB1BOtuL8eH50UFaTvZW9/o3XreRumQh4/MELlTMACc2rr908XdqFy+WffPFL44oas8M5irpurAWgId65ea0YF3f37izmaVpABGGuJ3dujmz+1p0FgLhKUvJO5djm0OoUbUH68U8+C5y8/MZrbVcBePckHLWa2BlqJl+WmpJ2oQtGDLWcM89NbDK9ghzlVjFMwmeskB5GU4NzNqSI+yi/z1sAayexr9LJ4T6lM+fxLATvo8qN6HIjyKMhNd1nagAy0jUxhY1GRR1TE7oVpXD58gOXLl28du22xhYZd9PMRVCYEQsxwyzGqGbOOdfrKBlMmcm0TBSI4MibadBWNfksnJ+EhaJ2ZBAnWb9foVGTgAB4JgdO2hHgXNGG1hXeknoWmK26BgCgBQtUzZSIHAsBIQTAcrk5AG9OLVWizuHc9qhtmibqAw9sP//cZyScKloAqWuffuShk+N5cKtRF7hIzLXGrXHld6YVgEnlqsKZ2W5dFF4KL15gmkwDWa/E6l1aISUmVVYFC4uTFBOJaB8F5E4gYqpk5hGpiqURYCxccDLOnj97cExGnApnTDYZT5wczI9WigQgdnS6bJqG5vt7DcKzV8fjK9ur2LHg+lGTkgF4aDJSTduXqmj19nh0vIp+Nn3y6uW9/dMRVtE6AG0Xj0PX7R+OCmdqvogicrRsDpbLiqkqDAA7lkIuXvHnz5erxf5TT0494KZcqJgrAQRxPoleeaKihl/b6xiVg2czYjYresFcI7UMguSz1cgEIEcw6ICPR2OYgi11+s1X99+6FjvFdk3LqBlsVhhBupC4ZDFJMbChLEovNKoBYHdrtLs1Sstl0g6IDHhxjh2MAA2p57lTTtwhkkhV1HVZz7bTS+/sHyyDkQAgSwg51nbQBJKTeWqXh7OaZrOxsANwsgrz02UXKECVqIvStAfJjKXQnGI0kJkjwDkCJSCljAjds1iNJItWWlk2gGnm3wDE1hd0c9AOYGRYKlNgcm8r5bM/NTDc8sKPmmDmBka9EkBiGCCOvMVkmRjOsBeIaXCQwNQT9jX7NeskLdEa8M0QhXccQlSs9TBBYDOLKXhXrGWuvXNmFmLuSBZZZA1hniGVeQ9Yf6IzNo0ZLBo74szly0MZU/BOnLjELsbADAbFlIiYhfNCGDQx+kh4KEkmT0ZaAFjGlryoQSy51I2IKmp//M986LnHLpa6MmoAlJVLyUJQJzm3TURgIRBnnAAAHCwoQxM7BPDYwmJBZVnAo6SwWgEQWHSlpUoi4BpW49zAypL5COBUA1ICqRmrRVByriAxx8Ga0I9CX7jpQAVcgBaUIN7tSBlTymXLyy7WwlvT6cwzjaeL6MhLijTzfh7HvdBg10ZLjhIoT4DMhRzEx/NHGshM2U3OH1StVMQABrBVT9794stvA7uff/6NW/hv/5tf+Pij25/59KerCA/uqhqANXfYb4PU1AAiFmLpiz9sXbE0oK9YV7Kj7ayqY7QKQCFzqyaH9Tkr5PK4EplJWbUn+4ujw3/4i7/3Fz96HkDpk5bnIcjZXBbq5ZLNiO7BIoF7ulCmetzGlcRQuBLA8YoqNOaZqATRqTW+c1xIWizr8S6yCx4woyXYQRxZzEPDahEmQ4KqP2ptjfQarDRdkfMaFUCi0YI1Lm7vViPQvF1MnFNzzXxhiLZICqAoPZS6oBcuz8pxnTSKCDSBaMj+98mdfHz0z0+p0w40ZucAxG4FVFvjcVXW25NLXQyFEy/jUeFv3z3ObSFV3yk9k1MQmYJNGWBXakyefU/x6xqrKgDiXF2REluiqpwdLsIqRzmubFJn4ML5qLFlc8DBwcHDWyOAqCoBdKE7Xd09PV0cHB00y7Q9rZ//0ONXLpyvqjq2B9aeAOgMddnHUDCyZEBmZK/59DAGgyxzMDaeA7QHwojgyQEGiwS4XPQMBiQiKesw7UR6CQo1asNqtV36S1cf7B66+vrePoCXr107XfK5atspN10ToxJEiRKSEMTyGadExsIgB6WoxlkIC5ypHo4EMBPrzJSto0gmpOILF2LIIuwh6KK9K1qcO7fFLm+d6745EJc1f5FiJoRSu2qhZqpgAbkIwtC5jIm6LokTIqSkpGwMEpcPYee8EGKIq/kSqiF2xsxeUlIGe8cA2qbtN3waPMa+asdUNR8sfSdADERx1VwouKnmkT3WTqPkgy6BorBjl0N0AMwmIFhSTSkxExGt+RibuaX7dt/u2337XjE3kIpsaAtxlrcnWrs+95LNc6bW+haQm4oZamBbEw56N22d2s69umkoOMnfMIZWiEM+SWGcyChXWagjaRCNcrUgDApLMKuLQgoJjcAomZnmvm0CgLQYtvY+T7VO7W4U59KQoB8Ym9njIrJ1MZdSrlBLqUtI+eoxRo2xdADABOfKWjohbpvlYr5smmXsoBaffd8jD2xNAGxtTW7vvRgNVTFLnfqyDDHOVyvv3GhUiysBzJtOyNhSOZr6ha7ahZTl9myqYbGcn+aB2Tl/CYL9vSUxq+myaWLU8zvb5DQDtW3SZOa972JadCtjL6U/ODzduTAyp0VpAA6Ob4McJ9KugSqZdpoiMReFL6qd8RjA6cFe23BVjtquIeHCeVXzkMi5paUBOJ0viZWJYThZLLem07Iai3NMgObQPRAHX44cCsDatiWi1apt2paIJpMaAPk6aIyJjMkVxXg0gueiKObLNTkXLOKIxXsBk7iu7QYdHrVhdgpzX6xN0AGWIkfro5mZWWhgsuS+71lIzfmy6JEPVTUTMmO9cf1OwTopUNd0fJqmhOeeugjgyoMPvnvjxp139hrGX/7s99++cfvmm+/80F/82LgsdmYlJQbwpRe+9WP/4qfOPbi7da6oqup3fusr7//g1ce202uv7LvJKBYRwO6F8c6l3f2jZXNix8dN0fD//htfefqhh3bGBbAA8NhzDz31uQ+PdjxWByev3WWO6bFPvvDSXts0f+1v/FiiNwA88pEPNId8+7X4Mz/1c8cH8xDj+Rr/+r/xk3V52ixefur5RwDMHnvf/KWXX/q9F5958mMvfOvuf/Sf/cJW7f7qf/1zv/w//Hc/+x/+e+eunACYafXzf/uf/uY/+4O/94v/8esvv/jwlbgYP/GNL9/9m3/9bwUlAE9+6Nm7t9853L/rgyHZP/d9T/zkj3z+nTvHN24cPfr4I3/jv/rbAJ66UH3g+U+88NLX0GAZVyXTqBj90PMfP739wmtv33n3aAVgfnJixGpOwWaWcs/xPhcch92l34RypXwO+M/irY3NhNYbxmC0uU0NL9L6H9v47/8r6zeJe3+WYCfNsnBw4h955JGL53fefPedw6OjhIKH/l1EwqyqSVU1JRbpJRcH9zLGYKrEEEDIYoqLMGdxhavEWYgdgIIVhlIIZqYRSQEqfUGeu7YBULOkbslRnXDXLguQxE4ImtSAiWMAQkSpYeDcthtVxfHxYlzVz77vcS968+5+7pH+sec+kNpm//C01fDMQzsnp80btw8unpu9/7I7vNXUOxWA+dGybG9viZ6b2eWtcuzLsvLCPKqLsnAAUteFkEIXIoJHcuZcrnnPFYjDYDo2mHFurcnGpEYWug59bTgIJDASgIjZGLQWDqa1ahqBQIVDjOpYWLG/f/LCywdPX5lsTwoAulNVEy2kshTfvnF3Npt6nBgSU/vAtjucA4CrfEocLcYY944X3ap8aGc8K7SutImrUVUCmE7FOe+qonT8cKimk/gDH9kOKRQVR43ipgCcgJyYhpg65jQ67+E9MQMuAwqMJsWiqlPhYoPOieM8D2jg7AA5fh/E4tZS/33FXxsTgKRUiI8W7h6FsuJ//vsec2UxT80q2P7h8mBhAJz4kBBi8uZMnIaQYiQzLpk7AVBErWJkx8kkqZolglqKPcCfAgCBJLWQgloC8+k8hHgcebxqoxnnlm6UlMSlGENMnozJjI3B886aw0UyAxCCmpQhpqQAczIXmAwEV1AM9yyyfpUbEam5YW2frd28lNdqAvZeQkpyzJYVOIEsGZn9jXjWPoWGZz2U/J9tPr3R4BmQrRUy+ytkWVUYEbFZLtQ2ApjIEROTalzvWLkyY53mMDViFnFE0udeQDnuF5YuNtl1ibEVcd4VyOWmfdH4hv9yRvnDxqdfUwCRyIIGQm7om7dQl8w0BKOoKcG5zQJA04QhYL53OABDZ4m5AOB96U2TdVAlROq6H/zE1X/hk0+xLjSFackA2rYtCnIul9VnMY3c4NhyZhJA0iReAAizkcWgZT1WcdaxtUpUAzDvJCUmUq9JTCBGKYvQZYzbOc/EA97bzxliyiDCgKlxPwYZaMujRM7qMonFmgDsTHZPukrCCmZTkS5YaM1tC3nfLVeRDYCMWLoVyA/kTQZoQxm952few5IiAHAOKMueO8mcCrayuBViU0hROS3EjapWSAelC3ISolb9noCh6mh9lY0BonteJa9qlFwBoJme/90X7v7j3/jdH/j0E5/60c/9nV/8zcXtO89/8MJf+gufPVi1dxcJwAMzkwGhy1NoECTYuPuzD3M2vSR1tSPAgysAwvWsnPGlR91D7+OjO9MU3M6V5Jw/Opjv3QHw9d//xicfreBl4LCeRQD03qvQ+lYA5Nqo9T1lFrkIO+9iCz+CafKMoFTMytySp4jVklZmJCJgznwHs8SKfgjuPcPXzaUdk4nkhJCqhZhSjAA0KhRd13FRGETEmq4BUJVuVLvEYsYp9fGGMIGRrJdHF2HvvBU9lJtUiasmrlaqrWYUNIGETWAsUWpHc+Di+S2uptPdcr5/BGB+fPTu3u2m1Xrkn3niytVL2+PSa2x1tRqUGPPI5MwHLOc3c/b0ng/b+1HrxnnDqUNDbPVe/2Y99Gd8T2zslwQA3nMXV/OmKYr68QdGAKaTR9++c3p773SWdKvePmlx0M0ZUvEoGjUUAPjsBCUiYSJSUdNkmhKRp2KY2lnPSz0XBCKWFk0IrWcPZPWkyMRc2OHhQVEUznvnPeDMLKYUY5b4ZJh1MXZtm0JKMcQYvXMEq6uiSQsAlXcwS8nYiRn5quzayMxQTSkCmE1n4/G46UJKKaZI7BSa/7hG7TngdOam5idkudvb+pS696n2gpXWM4nXCfGMVDoeqrfX3o4NelScj5szEJNgum5j8x7e/X27b/ftvn0v2NrX76v21l4dACa2M2elP5OwzvHovRtfH0fk1NMmWkC07iVrfQRJPQhBMvRCzHR5AGbKMM0/M1MokxgiE1vW3kJ0zjsxImYR7yQlU9XMBOnhSBKj1EOMtJZHOksTbnwZYuMBv8xpvzTUjjJAZHVZcURMnSaQkBiTKABFCgGAW61W1XihYaUp7kzLhy/tXtmRB3fOAdjd2Tm/c53o9vb2ztTXh0d7R/NTEZSF355ui3gAr9+4szsd7d09nB/ftWS1d2wphBUhukJyLNSFAKJgybGAaBVjimZS1mU/Ck3XOefqqjg8Xsyb1dZ27f1s0ZzUyxV5BjkAXZeEWZkSJCR1Zc2+iDFWZcnOeWEAoeuYUNfTxrI6XIJlJWVW7Rmny6bZmo1INYJUSYHt8dg5F7qu7SKAECMxT6fQKHVdmYFZQtOCuCrLoqwABOOynlAbgSX6Y5VMk4KIIJKFCykl1RgUiqhDw8012jxMR8ovmpklVSJiozXeI+Ysh415moAAYhJNqWCfK8dVIwt1becFly7UV89NJoX/+CefvzlPD+3WP/4TPwhgdG577+XX/sv/4n9++dXr/9qf//6TLn7xt37nL/zpD144d+6g7SwcA3j6gebKQ93Tn95dzN+xqvqhz21dff7S/OtffubJ2l0cz569AKCztihGszgmqis/O/jD13bi4489/MSdm3u7WxMA/+SFb40fjDuPlKzzYjxb3X7nATxQU6CR23lwvjw1AMf731wexmrr0hNPXPzq7ZMC8dLFcm91/bOfvnrz1dXi4OsAvL+7PL5Vj44rdyfGO3XtL185/4Vf/eVX39rfn1/72IUxgLdvLW9cu/2zP/vTj3xqev6BIrYdxo5XDwghC6i24SQdHrYLfejhmtru4d1q/+itz3/+x/+3L/zxP/jN383Z5SuXH/z4cx9oDr/TtPTyW/sTyM62Ozfy105Pn7g8e/XWHMDNW0Enk2g+KUxT0uzC8yZySCBmJjALMzMRZ726tdLRe+HA99Yw/l+BjkPZ4QZcORRe31PcYu/5LQz3Zhiy7n0dc35BRAAmCyDb2tqKMR7c2WfvO7XUN3VRpr7dpjCZwQHOSV+2Y5nF4BKrqhaOUoqkqfLOiWPTpAExACirsu1axxzCKiWtnRcD0jKojgAANWxS2WxSVwV5X+3MpmQ1W3fncH88rj/xgSfy53/93Vuq3Z//zPMwevXtmzD7vmcfFQqvvHmDKQFQN09FfOrSCIQYw66nJ85f2N4avXlr70++8fZf+YkfBHDlsa1mfnc6nY1dCFXpU3DeWVLiAA0A2EIpVlQWU2ImoUSQszxU/1yj5OHNch0EQI0tUcq6rRiwJJGhIpP75rgZguShcJKJFm0ohEBISNNpcfF88eiVSdYedSOmRDFq06XtmUy3XNIgPnHH08ofL1sAnS18CUeYCZXFeFxX0ymV/vT5D0/PbXkpamTSCYPZCZml0DSn1dSzipSlA1a6AkDCUHDhvHoYyBdR4SBaVa4sAKjnaOOV3+W4X7qJ94UTYebUFzbmc1ap7wNnBCI22pCP9JL1uXRUltbRbCxBceX86Md/8NkvfPuVN24f7mzVCQ2Ak5WlGAvxoeuoMCKIFyJq23Z/aQAWJ6u9Ml598mE1xGQhphRiCpZiMtUQEgBNKSaoIRq6hJAQE7mRni478tUgm2COnXnVEEGkuVCWuUu26nqqq4G014ZlkqFR9ln3D2ADK8TQNyYTJzcLRdGXreWKNlsv7nXk3dMYyXotF8osUaL3YG3rv2kbLZuGvCht+AZr4ceznjMDE9vBKSVFyvwyyk6JcSS+ZyfpTy0ASDGBWEmJKWYEEOTgOHFkI3h2GJTwKKXc07kH2Ohs08I6hTvsgRs4JUBAyQJTmMJSBu/IsSY1S21UGCyR5UOWesgmD8r6rmkdYBPMxFIu8EfQzrNVDB/Scw9P/9XPftBrI6mpnJpGAIWDL33XRTajfvtjkuGOAQCOJQDGEtpQF+LUhWAOrNJAPbqQh47LknyxWpzU1kFGMDXNnE0PoPQVkWRV8R5PXpcz07o3tMAIQwWoQYg9szfhUTPXlQH46guv/7kffnBCOIh2sL8/np2bkIgljYkBiAdgUihH388BHoQjs0zQANVs2iAMp4irZPPUi11aa86xt6rkAqZNFy2FEKOG3r0pREwkL/t+oM9q+zdatmDjPCMC4BwpuRUbgBpuFVbtUYuAUF14d79ZvPjOg1ObLzl2Kq4C4GrLVaM5PTZcKNOs7tElGXhbgy4es4WOnU/MAA5CqiI17x5/5ct/9MAzzz76xJXf+51vXazKqw/vPPTwhwC89taXP/7YDlMAEclAWyYaarLOHPJ1i7d86bzmzgpvGY6QRMSX1fln0qqLauaLeockLGm5BHD7OE1ryf4DKOsrqJEaIUtY9BN+iFeA4XhnEKFwmb9mKcYuhJRCSK1CQwpQcYwY21XoAIgj75iJ1QRmvaA51BCdlEOAwSLCfSACNiLvO0OjlnIBNcixs0RqNuK60bsV0Ueefu7gzrvfee3t16/fABCbZqsuH31499EHdmfTMqUUYluSVSWW6Z7mQwNddg0Wrvv79eGbDeth8JMy7rsBPW84VBvIOoRp46c5UdQj5UrOudSFbtnMvasAXJiUs+r81Us733nj3cOTmxOpR9PZURvm6YS49FYDYHCyTjWSQoiFKOePOCG3smnTksGei2iqUDWNsamKukttsFCxByBkqiBFp8GihhRcLMY8qkd1WZZlVQFo21VZFKumOTo6KkofmdQ0pDCdTNWUyQNo2lCVZUKykJquLdSEvA4hMYB6NC6q6nCxcN5RFnvW3hXUlNtcbwKRth6EXvbm7OGtE1NEa2J7n2jcCJhzyoaIjPrGiLmhZK9YojDrj7/MzEBmityTDblv9+2+3bfvIXMb51JfnEOggWC24eJubLbUH6738MI3w8wsyTS4SmRYtwwbEjiG3FGs90eIBn+jN6F+w88chsyiSGgBOLKqdCIppYjooYltOO1poMSvpS3P4pR+K1/LYfZUkwGh7GNBguVymNxMk4wVzJR5XKrJdA2DMgBm1ZSKskihPT7e15Bg2oU0rqqTxdF2eQzgmScejUmD4nS53D5XJNKYNClg4Xixl7XA90/m49HEl+X+3sK5USI7PT0RqgtPmlQcAzi+e9c7cSBTyx1fyrpSVyybZZXZkdGWsYvgBFbj01XXRPNOVsu5G9f5QqPCdTGVJbmqbmJkZiFu2m5xOieiw/1DAJ0qUyS0ISRxXde1jpHgg7ZQGyJJGo8nq8VJm/jcpIgaT1cLYSdEIg7AaDzqQrtoVojOOVazkJIBMUTn3Hy+BCDFaKseO4QjO+qWy1NxZKnzneUS3exaEalGqCiRWbJkYGIiFl5PuKzx1YevGZoGsaS1ODQxsXBKybKKEHovXE3brsnTNKVE7FJS0nRu+8JP/Ms/+fSVrZ3x6B//sy+9/e47t978FoDDP75269pdW8x3vH3rG1/RZjErTvff/rp2D/7Sr37pU594EMBHnr54fOvG8deWbpsCU7pzhJsT1cZbgcVJOuwAtOG0TeK2tjpyfoFXfv+P05G+fHhyvIpf2jsA8P2fvBju3pp99FmYS0c3xo/stIfLa++89ZnP/dmjo4PrX38VAFXHMiofetZ/+CMP/e7vvTgW99xzH769/9ZKt65+9Ik7L70C4HT/+OLD56cFsLr96KXTv/bvfuKRZz/1n/71/9EDujxZ7nwawDd+7bde/cbLP/f3/+39u6/u1ItI59u3bnzjt99Khi0nAL711Vc/+OjWf/Lv/+gHPvnMi1/8wh/94Td/5X/5rZV75Fd+6Vdeeftg6gDAFfPvfOnX/9Qnn2iPt7/5zX+ys+3Pjau3r33noZ0x/OjC7nkAXdwnlZAsabbExEMF9BnDpEckMzuSzrahIV3R9724VznrjBO0+d/13vUeKLJ/xxpa3PjRWQS2+V7avNA9Hp8zhpCE0DWrVbM8OjxMyQrvRfpwjgCGSb4lVTNlNmY2s5TaPMm9E0dIyaybeyEStaScSECsqjEA0NhWTJ7TxNPWtiMokT758MWnPvChrdoD+PpLr169tPXDH3/m7sHcEpVlHZc379w9WK1K56isTwCMquLpC1tqMa1ujCr/A89OxlVhelQU9tlPbntHAIzLyrEZhZQq5+KiOThebm/PPvrMzre/fX139wIAZ7dXR8sHRyNKikIdEZNqSr0kEmDodacKnxepgjYPiR5Wcm6IffNOr0ZQYRYZFDvUiLRnXWt/ktjwLwYUgAjTWpJZjJRg5lCP3dtHiyzsUBwtvbAZ1zUefaRoumMpIjETFh97duvDwQMISmVl4qORtl1XOTOdx3g4na0iUQwtgBDZFeJSY1XhSxExLollBvNw9WRcA6CiiquGxZOv56fNfNktQzjcO712cLi3dwLg7uHJwUl3tIirOV/YHnvnmfu+a9ZTyGCWQKDcSwBn6oX5kM6F2LORNE3Hjk9WsSyKLmpZFXsHq65F6arZuASwDCuO6rxLptpFMgw1s8SeAMBRcvTmS+8YAUJGZAwikAixZAYKuUKKUkgsxti1QRWel51CvPNFz+/TZJTx0x6zT0asEjV1secse1/EpBCs5ffUAEtmKpvVERtLjs44jGevbkAX0q9uGngjA0rIUmCgm6xxRtsAwtd7x7CrnPU9u2c7yLQVTSAGZy26jNv0jYASkkG1V7zNCpXGsDPXqG/YN1wFRiyW26+t9xVLJKQUUyAiZrW+x8c6V7pWKFnf2FnR7j0g2BpDApBVOwWWYqehBcAi0ESwUTHK6aCk2sMROKPd3bsp9hQ2L9wt5gCKYiR+muIhp3bC6a/8uR+4NJPFyXFdqqOoMQIoKgldVEVRFZZS7lwijkFClhvtwsRHqBMqXSIY1Gk9S4wQMap2YgkAqSxC5MqXbaIxz8BLIM/MDGlB2DFYyNHm7p5HggiS2XCOhselIGEhdsTiWGbT+q/+Wz8K4M7pyUk6eehS8VM/8rmtLXv95Rs3UtvFqvK8ENFoADygJkMjdQy1PTg7cGfFigAAIABJREFUWe6ZxbZ+wVKsx6PRzjkALYuCU3TnJzuLrgLMeXFeCu/JetBHE4Vkw81juMpwiffMU9qYrEHZiy8IwK5ajLQK4ahdjsYVbU1NUlFYqMct6xIMIIJSp1Majr5ehm497mdLjWgNSQKACiGhTeYqA6AFilrCcvmVP3j5idEjk0ce/Zs//0tXnf9LP/Nn/pWf/hEAYdm1WtYIwxnbo8bWdz8/Y2j2Z/IZymq6kQvM2k7EFFRuLdV4/OjTT736yluTunTV7tb2EsC3X37hE1emYFIFI1/BJC9IXV/DsF6k+Q9nVNn6lu6OmWBdDItVQ90JWM1SimoWuxhGZcbCWC0559UEak4cMQdonnrrc47X9MtcEk3WpBiiqvMAjJ2C1DQhdbETp2Nxv/NHX33t1Zf37uw/eHkHwKNPPHJxazQq4MWiNgwVT0IcFLzhmfTPB30UN0Q9GEggwDCGaR0XURbgP3Npej6Kne0t6+82I0CzPpIiAoOdVF5cF1IICiCkrvD+0sTNPvD40Wnz1o29m3f3z1XTCWZHq7ZJJwAinBA7dg7OTDsNCcnECir7gjZkpDGLX2ghtRSJjJw6hUXqgJxEMQnsvAdZ1NC1XRNW7sQ78Vnlhh2PR6OY4mKxqMtKhMuyjF1oQ+udL6saQGiFWFjIle7f/Kmf/uJXvvTKy6+xCIu03QqAggzknHeFL8oiI9sxpJAUNsgN61lyPK8j6gu5bB0qD6xfQFVz5Gu9csD6l1XBliCEnhQ/6EbC1m3W8oKkIRgfLm2bI3bf7tt9u2/fQ+b6PH9mO/bZdxvOg7Vt+FxnvLReqmPtiq1fP8sV20YSv//nDOC89xJn3zORExHnjJAsIREHU4tZqMmLdyIE7WIMGmKMCumP356oBxjZ0KKhdzQGJFStP5N7pJP6epChxoGsV+ZS9BKZZIY2dCJMwinFtgukwYkAEFLH1nVLYbKQmKiuR3f2TmtfjbkKsQVw9+DwrXduMSGEdrWIlkiKEqpBNYVEagBm4xLOnz8/Ozi8vkB04oSwWJyi9nVRNM0CgEZooEJk0XZOiMXPJqOuDc1q6ZkBpBQibLEKBAlR5+2plF6YSzeqysne6QqAK+FYuiY1pyfath1BQKlLalQ6SRoBlJX37EzNmVFUNa2rkfpxc9wC8M7nMUpGTUgGcsVIbblcLeuqLqs6OzS1K5tOmqZxzKu2AUhVq6pu2iUz53pMX5KCiEhIm059DIUvV6tFPRljKFxyzktMRgwWmIYUaVBIH7AqwLLHzkO+t/9Hh0EnOwtrqe9UoEQML82qzZ+ImMm4rkZdO7927fYv/fIvPL41unHr1mg0vXBu+safGIAU2nevL2cXx6fLxevv3vyBDz/qHy3OXdiZnXtge1x3hxHA+XMPl/XkO6++89CHr1x+Yne2JQnd9NzVr3zj7fd/4ooflQDOjSYnd46qShxxvWqfe2KGlXztleXFC1t1FQBQwqQeu7RqG3LdUdwuQnW5rKcn128fX6MLD58DcP6hy9XlR5ZJrjxjJbMifejJq7sTd7w/V9fZ+W0Ak+koFnx8Wy9tF1cuuQefe+JmGz700SeO77z6lT94XfFlAL/8q7//xEfP1faq7r17+0jJwvXXj9/39DMT/o1lowDOj/nf+dmf+uCTF/avvfb08w9tVeHa9a//3f/+7+6dxqtXqp/9z/8DAOHatbdf+pPPfeK5f/TbX37kQnH1gen2tl+uTj7x3GPfeuv6crkEIE7bmKKSmvV9AAEmAtZajBgI2HRPeNkH8v36NRtAhE3I8b3Wz4yB30TDD20z2B7ire+KRJy9j9bIxPr/ZyVtFDWUTli4S2bgsiohTromhwpZZJ0twwsRlmCSugRVAeXlzx05Ie9IqBPAe/hKRMR7LgsuvAewOx6DzAnqQi7tjJiQVJ96cGdnK666BYDzz4y2xnrn+jdXTTq3NXHmR+NweWeyOz3H3K7CEsB0WkzqcVpqGI/LslBVU3WOTbsUO196ABZjXLXOFVKIxaVItzPuUrvnRvWlK9NXXrsJ4JmHfVmVDI1OCi/WLXO6hgDL6khsFruUtByN+73XFMQYfG8A3NeeDgOkZlAi5OrlIXDS4fkTsxkZ92S3IdgCACXilGJIyhDEzhH9qQ9eaFqd1CWAcVmwi+zEF60vtGmWGoOAiO+iGCscAEu0jCnFkEiZMG/D1mhkhKKaNokn5Q6AmgolpiDRbAE6Vdu/FZbB3TlsmcP1/TsAktndw9Om6UIIJ8fz+el8sWyWC1rGTpC556qAZzijneqRjEXmibo+dXueHaFv3QUDtJdVg5lWAGJMCUZJx4VTJ+Px9De/9OYrb96dbU3atlu16xlLTbMScezMFFBLKRI0Z5WMXWLzk9xum3KbAVOYMhHPFQBYJHNGYkxgISn6EmY19GkhmGnSZGbe+7VONDOHDslSHj8lybgr9cXNfR5QLWFoHHe2B2yuto0l2sd4G8uzzyPeu351PaWw+e4ea8C979/wSr6LUS73I7Yek8kldhmE1cyA4z5zZgYkZAY+n/3xe65FLAICVJ3z/QTW4Nm60IkrUgykCWZQZRbxPl9n6Oy0PvHObm/jQmssMi+eOK7K2ciThdzfwHlHMCc4nluXaNnGZdQMm5GRkLznqWKAv3I4XJWT/Gy7cHJefG3LH/vUUw8+sDM/eXPkDJp8oX06wntP7tq1wycenyF2ZCGXLWSCZP4a2nnhEQI754jExheL84+RoNt/O158tJpOASxOjv6nv/OP7lw7fPzJrZ/5/MdcuzJiIkc5KYvMG2UWoeFW114dRKjXzvMAyIKBACEImOBAaFofPvn5jwPAKNrpTc9bn/zLn1ntTn/z5//B33/pO0ul5HxiKl3GsTqmAJRnmZX3zqb3fpM3OlfIqtUYGcBqNb9wsX7p3ZOvf+0bs52LlyflWOTuweFq2aTYFqUD0AWORCChTIDizWttwA29Z52FkfP4CoGs6QBwXcJJ23ViBuYCxmptE3xZzxcpz09fV85nLtbGGqPNwV9/sbOFBzCMRhV1hFwclGIS6dibK216vvXTEbA7GZ8QdXktTsqYWAFZQ58Y0orEWDcT3HyGZj2TwdZYDGAxC3ucrtK1d67dqWajx579e//rb6vqZ//09//gh64AmLUxRvMlm1nGkc7QoI010wcFw0WZXXbJMyLshYURQly1bY1GUxKhpO1imULCbDQDEGJatW0tPibrgjELkoUYPVtMUXrZUhISZqhajNZpKLympMTsqAAAeJgCybE4m1+qxIfwtRf+qODy0x/74NMPnANgak06YTKmVEFDikFhzjvvtes2Jsba8rPi98yXzTes/af3Bns90j60i7RhOpzRstETOYZLdqkhsBCXwo4igKhJUwyJvKt3J/X2U49dPjx6/cbNw/nifDltixLAQq2LKabYmRALCTsmIsQU8t0UXBnBkJIlx25re+vZD33oq1/9k4OjPed8FgnxVjBzshBSJyzCzoEBMtU2Nm2XY1ubz08cexg0akJ0IixSjIrYdqnv1SMhaYzx6Q+8/5Grjx+enpwulrdu3V62TV75KSmR25rtFkUxGtcpaGijxqQpOlcMubCNx7KxpOwsX5GZ+EPbVe0FqXqP6MwzNeMeqTSFmYnAFBgC8z6E6dMubL3E7Ht82/t23+7bfftesvfqVd+3+3bf7tt9u2/37b7dt/t23+7bfbtv9+2+3bf7dt/u2/9H5vr6gSGDYzjL8Jj19TubAm094Ywst3ZdsyOJ13IsGOjn1FMZNvN2Pa2F+ryc5Uq0gaOIzB8gFs7aaGREDGmsi8kXBQAhiTERmxq3IUZEIyHjPhGaVYWVckfMnkkw5Alp46MMUoVDfdeQteU+/5cTtplqR06889xFQa4rVKgagGChXTX1qFp17aisiN2omhwfnh6dNpMtHo8EwD/84ldvHs63CkRtrt9tUugSOwKtuobVshTRtK4bFEYlOfWolVIHHRUiVRUMrSmA0hchBleUFCKM207b1TJ00QiLbgEAbN77mKKlpFkEc3iiSJiOpgBUAwt1Ou+OT7brMQurxrIoViFGWCYklmWZupD1+1dNiITEjfdTgmXZaQBB9eD4tG1a8b5LgCUGnAgzNV2WzCvUGEbOS5dIxGU6Q1mWVV1lZkxR+kZVY2sEEqonk1E9OTkMapS6LmVBKCLpi3mYyJxz2muDJWSmCcBMTLkcFgLSvlMdUhoEScl6vpUR54SkZmaWlmWpMeXrUFEQIFYcLZY3buynZrSzO33+w0+W6n/9i28A+FPPX/3480/uHy1/8cVf//IXX/+RP/uZSzupGOGP//Br1QgqLYAbt9+aTdz+4enk4PTSMxfNSYoN007TxN/4ra/98JVPAihT8IUnNVazeTsuPIr6nZs3b9xePPbgCMCt64tZVZx796iT8YIeS8fHs9nk6oMPf/ELX/w/2HvTWMuu7DzsW2vtvc+54xtqZhWLZI9s9qBmz2pNkBRNkSW7HUeW5NiCFQSIEGVAgCSIfyY/8seBhSQ/bAHyAFg2MkCKLLclt9SC1VI3LTXJHtgku5tNsoossqpYVW+8wzln771Wfuxz7nvVaiG/G6gNgvXeq1fn3nvOHtb61re+7z//H/4bdykCePPrLz56Jt576dUzo/m5yvZaXic9Ow283m/316mLAF57/e6Vt11y4/nhwZ75wPmgjoc/+b2Pf+2pb37hj77055//CoAP//B7f/W//2tjuk0Wt3e3SLZGyC/fERJ0SgAef2z+/T/6jpvPfPFrz3zl7/7XP3zp4vs+8db6+j/7ohL/5F/92b/+sz8O4B/+/X8cdXV3cfOJJz/48iu3t3eni+P1UZO6tsvoloslgJhM3UZ7m3oN80FNZ+Bj9G6Xar1V1tCDPyzbYQfa0IS+Yz34frZTf20aLKru/7UNK+M70CSx4aCc+nYQeACAqJ1jl3Ir4o8ODjV1KUYmjFiJS9EcmtTURCx4eAFTjgkV2bxyvSMW26im2aQeV2eZdRRoPvJOmEjHlWyPS2tY9iLn5957VuJ5HUZ1qELQ5mhnZwQgTM+LAzm3jORFiINJF9hbZu2ORgoAoxGbknU2qhrYykAsIM1kys5SuwLAFPxIDKax0RQRmxybMAp59fpPf9/Ff/LpFwC8+CJ+7se3bcROu7w+8uO5pWRJVXWQ52MnBOaytKi39ET5f18EywBLf1r0D4eYKJ8+b3iwtAGBhC1K3wO/USTunx6xqygzmeoq2/qRR3es0xxXAEQOjVMbKeXGwzEDFut6rB2MVQMByCzsgrPJVpgw1QsZN+YPj1pd4q39dv84A7i3tzw8Pn7ljTcPj9fLVbNadAeHXVasMjyjOGKVTj0u6nYCJ8wEhkuJpR4DcKLIYMuIEFcRKaBEQrBT8lxFZ8wIVpxATQe9QiLHALCKKTgGUTS3Mz3z+0+/8a//9OvOU1Q7bmMTCUCbMjtxmQ2WE1hYghMGIZdlZpq6roj1s4LVKPVLTYksqgHImnIGmNl7Ys4pp5hdVQE5x9h7IAzmoc67HDsiVkbMmszyhsvcdcLCXHjoAEFEzIo+3mahbjxJ6GSB9mEFDV9smJLlrdrgGHCKAlVYJb1GTL981cyL+/btYqBhFwbut2vRGgBr1YiUCEzKfYzU01adJiNo37fLJ9JgJ+zp/oGWOW499ZXLKWYaASB34iVQ4pyccFYFoap9NjT9ye47hM0VT94iDRc9ean+5QFUTibB704raOEcgR1UkzByzU1yXVJKeeNF2K/coQucTjGXiYgU5UQmymPNAccfePTML/7IE01zcyRJVSunzFLkGsnQJH3trfjQ2WMRFuHegkvzxhNDeErI3ntBhKNry/gPf/P/OWrXP/Nj3/f20fQP/u8/BPDlF15oDhdvn81uHaxXTHMWIhCEhgfmHTOD+ZStFUgBYSHh8tGIxAxErlgPAg6sxJkQJI6/8Nt/AuDRD15JOPKVz699NY6nL71ynbJt1Y5UGSgKm5pIRAbC2EAZ/QvT5f4fEsg6yLgaBQQAy7T8+b/7w3HrfHt4jxp84if+XmwXR139zf2j/NYd8wLAk3TJwAwYigzNYDFDfPowuq9Tl4hySk5ajGoALZOzZCRfvXbv+27eCYvFMvPt26sRmLQXI0qxkwmjGA712pH9SrtPqamfWRvtQVjOXTZRV8yIiIMFH4/55ZfuXk3ZTaaSMWJvLIPmYPJuNbxEcaUkKtTIvl/7ZK30Yu80rBsF85AemDKBmPcOY8e7K6uq3UvzWb08Xq6NfTUBkAkoMWGv6llyjI2O5AlpjU6ol/2HzVmLKoIQEVnWrKrkLMVETEIppdRG2p4YAC6MNYPB1LJaIggzOSdIKOKxmqy0eQ8a2QQTMhaQWc8rR24D0tRNL5+thdPszPjdb7v6jvPnSLu7d28BGAXv6wxDKlOPnSNTy12bhOQU827TulZaebVPt07a7k/vkNYv9VNE2M3NsNMEyRNibB8KlU2s/4GRiLGxZWpz7InpTFKEcW2dbBmzXDwzvXjm3a/evvvq63eOlwBQ17PO3BK6VosWNZsoO3KZKRXmI4mhV1NVM816+9bN1XLpuQ4Sulh6qLNSJiKPUJ6p9hq7xAwmAZAtRe0Erih8EZExmVq76mAKTgCcE6jV49FXnvvan3/pmUsXLzoJzOLYIjKA1XrpFj5UYxZW1djF2EbNiQAzNd3QUU/iVPSyPka93sXmtg+Ne4b+XB9YjbTZw2xg9pfDuFiqDpcn5ZLKnjgflO3a6DQT88F4MB6MB+O7aDgAp1szrLeawYDOnext/UE1iFww+mbtIZAH8SC1a8OOexJobBpMhrgCULPeEZvU9T1rg9d135lmChNGJVWnTWmJijm1XQpBABetS5aJ1eBKW3ixsik2AAO+WrZ46oXL9FT2gz7zO7ECt3IWDIx7qKDosDMxF7sdJhHx3gkAszQfjaUOYNrZOgtwE9vgcO2NN5smLNIYwEs3b+/MfLTUpm4iHCMqL21SYt+luOpaAHMfyPP2fOoJ66zK5p2bT6ZbW3Oz3lIktwsm76vRSCmEarVartq1pgxXEZc2qxy1uIgylIywXDa15+P2uMvrM7tnASxbjpomo7DocjWZhuBW61UgcNu2beQ+o5Y2N8JwnqOxGEXNgaDIzFzU91NKaWFMcBy6ziajCSF3MRqsB4KZc86qebleAsg5Mejw+KDyLsbUCzlpBLmUoxm8sHNOQnDex6xd12RzAFSJQWogZjYY5T5w7mdaQbIIvdYksZQoRgGoWs4ZgCI558rkJgbMsmYyIyXvnDoGkGFRFYBz1XTOy7Q8XETvu1defm3qpl/52usALK5l/upOGF+6coYVF+eS26Ovv3V0Z93+2Pc/rDQC0Fp1/bU7wTCuQs64eePOlXc++vLTL159dOdDjz00m28B0KT7x8e27DzPcRA1EVP80Q9f6qIdNQnAy9cPv7m8ffHRS5977oXzj1x597vfUd1NL7z4LUAnF7ZXdgMAI7p06+xc5PGrfuSaJl27ffzJ7a3Y7J2d747njwB488Xry7txf295buK3H7lkk4l/5aCt+KMf/9Dv/u6zf+dX/mMAn/rVT9SHX3ZGa666Lnaro9GFevXanVVCcaJ07O3wxsHha+958p1Rj1zV/MJ/8gNf/dLde09984Vnv/i3f/ZXAHzl6Zd+8JPnzn9gZ//Zxe27i+///o99/nPPfOA9bzt/effWKpHdARCToMh6EjNLv3EMfqvYZApEBRcwK927UuC/Hls/Jfm9yf7+kg6VU1ISwy53f6i2Ca+HLekUMHr6n220wU7F70Nilrq6ltQ2wVFzeI+hW7UF12lnwQFACOwceeHRiOfj0XTsnRNHNAt+d1xNKgYQAo3HMp8EM5mOeBLIO/bCo+CqAOYEYEKO2NgLCQxmLJkciRcaFQ8KCZJjQ7aeTr3GJTPaLkelHMGO68qjD+tbdVEz9Yb0KDmVAnBVCd8tto1zFQduOnPifF2DjASPPnr25378EoD/4x///sVf/IC5dV5FcMgpk4G8p6w6JDYxmxARhJipxNdZUVQRQSjdu8R9x1hZlkUyOCYacEgn0meNpSGpqPeVVcw05CoAEBhtNGWqKhmRrbpjg3MjB4DEi6vGPO1ScsGnVbNuDruVtjIBtlvsALh7hGWrx8v1vbv733rt7sHd46NVOl62bROPV+1aAaBmcrAV4KicAoHCfFTVFSib1iEASG2buja2MadEyVpTmGXKY0PWBQAyjMhlAglClYiUWZkUxGZEvXOLMBMUucBqQ9dWmfQl0a0DkpGqm0/P/MlX3vr9P31RM2Tq95ftKrkuZwBtUmEwcYqxnGymVpKvou8hFAiC0kZJoiwqbMzEjti1qxWAVdtFjSQC4qwKwAcPYWFgkFwgQ6nSpZSZuGzVKWcDQggD3mVtTI6EFdmMyEACUDZ1p1KqYWkRNs1pwCnk5RRy0c+18v0AnQEABedgOlh0wPo6RJFKLFcpBz99W6HyBPc5gRTJOQdTtkw5IWvfWwoYNPigRtmgJMZSPjsTtak7tc3c9/HaFD1V5eAqH3w6GV05O5tUlHKXc5dVAVbFqrODpQdwvFzjpFfyNAg/gDen/2J4/znGmKRbZ82r8lm89zF3CRAXBFI0bnrhM8vFIJbEnXrnJ7dVwFFXAMaUtzhenvv/6mc+7qgd2aEX8p6Do5iyYwGwXOc7h2098qtVrGo/EoZI0QIoQgQAeNTG2JrVx03ans3bcO7pZ64tCH/1P7r6wrXlP/2tPwHwyFb98NXzS2vOVjLzY6RAiEWfltkAhNoxG7P194J7K0JyglycliHMZAQEguu1UTkbd4fN0ox//Z/9GwB/a/o3Hnvi6lPfOPwHv/brH333Y5O62iLUjLZpHXGxFG9Uas9ksY+LB1zVBgSXWU4m5+mbaJGcUwkAoKvm3gFyXeUkLrhJkK3ds36bReJCu64DUItT6hU+egWaU3P7dEh+8uPyvfgVaSnjtou9H/74wx/55H8Rm8WFqvnlX/iB9U9/kI73bq8OknPr0mTtiqP7YFlUvoYanb5sgVVK6+5mJqtohhGCA0CsY7bbq/baeimaKssLxp1V8/ZlFhoBCK5SCqAEEJjLKxTUiDaR/wmqfvqVVE2FXV9ZiNkJRZHjVWzlUo4R2Ql7MHlTjwxgnYxgzlFxnR6CejYTKnZGBQoazKf771X7AqkU4aOcYjarQ3BBqjbGtst1SF3MR4u2KNXG2AAUcyJyzBZzFDIipJSdBFAGEM00W8oqQkzi2GWDgbJGxQoAg7zlmeMrZ8KZKr7tocuPnD9bT2pdHNy689ZikQCsJ/6RySypah/6qpKJWcXUqZ3kWJtn1isUFp2TIh/Zb8HlSTIYxXAMZEUHd5MCbna/kygbJ5e97xGhoGGmAmJiE1AxC2LyyXK2lGIXyNfCSdeJqscuPnRhfvate/cAfPONu8H52odWZZHjKnWWQAYJUkyrs8aC6QlEzfaP9g/29jOisOu6tg/eBMmSmDhxfZlae6F/YmSU4D8zxFTVqNO2cqNkmnPWnAOH2CUAKhrqqm2isHOEG6+/WYy3nPelIPTwpSs7Z3eqevza9VfevHErNdmRjOs6m63XrfcBgOZT3f8YVM/MTiqqffiqvNGVGsJcbHTNhs0l516PmMBWygflRAOK9q5CmYiYrQgY4RTo+WA8GA/Gg/HdNhwG6eKT8o0NBpSb/L0/pfr65KYEd1+dfmORbRvIoD/OCrww/F6fZmiv7NxrYoC5DziUtACRxetRMwlVwbeqyUpFt60kVFVlpkqqVOSczYa3AIBJEilOlOAH4x3aiJUBQzxS0uCCVW70CHU4ywvVJSf1jsUJOsQco6YqGQBhM5XDw6Oz585HpSDsghOmW3ffqieXljePASiqdVyLZ3F1yrnpVuv22FX1eDzZOju5fecOgJt3D7huQ6OqVqyETDW163t3WzVUFQFYd63zlaopsatqr3mxWrjAGnu2Uc5KxOwldZpT8qOqDs7BUdOyk/2DPQDJ8aietWsNPkXKZLRs1p6dsCPSUlhu29gphDEf1VA2g6VUh5C6Tqq6yD4akXeOYbBs4JThmZr1qm2aMBoBqIJfN2sHdCkxQ0Qmo+ro+Ch4aWNb+RpAu17M5hMlg5kDd20LbqE2Ho3MIikDUOMUu0RQVVBfLO11JQcgSVUN8OxL5N5PXrPB1gZJlbnIFhGzaFZYNiNxftW1s8kcgOV4dHg4Ho+NchO7eR12L12w1Z024X0f+eD30xTAc08/u3t5evV7HiLkC7vje3tvcm3vmYYLb7t47fadDzxyFsDO1cvf/MbC8bqezat6dmn3Ei2qO8ertz9yJq0Xr/7pqwC25n58cepn8/VRde2r1x4ejd9cLq5euWLwz7/0BoAPPHH5+qqZv//JD8/w8NW3/dr/+tRv/b+/0R23n/rUf/jsZ79yfmcfQC3UHq4nZx7d5/P1bOT3j55/7tmX3uuf/OjYTbf21gnAmujC7vaZc2dWN15dLNcyuzh7287eU9cuPfzoo4+39dkAoB7dyneXiiNn2zevHbbL21cvX906c1kUxe4iBl0cHk2j1g8FP82rI10ev3LhfBU8Xn7uVQ4M4O/8/I++87Fq930fePV3fmf/9pL9+Ph4qVTXOw99/bUbz774JgA3CtmMiYmEyGGDAcBOFeYLfGBDwaIQU07GdwQfT2EHm7H5vmAH/O1QJPpi8qlr2l/4nVOXKxDpaTQEAFALS2xmUzcLeQweVXxmXlfMHNxs7AHszPy0dpWjURW2JmFr7NV0q/az4EaOvRAAH9hVbI4DK3G2spThikYr2AGIxqNxMI05R+dYNQoiuUxunQsxj0RGnolzWisyhCoP0ywOOXOyUVk48Cq0EleDDdalbK4KmizGXPtyr6J4JDNhP5o4dNQtOw8jLNbp6PpBn63BhcY3Qim4mK0iMrCwpo1bJCI4AAAgAElEQVQMqBcRkQyRwn/RBCRo3jxrIzcoCZak8cTeDIRSYqFQAbAYC+uhB1CKsGKvd4weRqLoxKLycVeN52c6HSerl+oA3FvQ/v7RuuGbd+4ul3e3K3zkXdP5yK6/ov/qz1946c19AHuHuY25iXCM2o+O27W4qpKKtXLVdBICAMrp8OjAUxRxbbZ1ExM6t1gLocm5vJuRYMrYrurd+fTCufnZrfGFc1sPP/qO85Vz2zMAHlYnrJv10y+8/vzNayjHXaGPcu/5RsZMVM6gnjxhOEkqQQAcu/XSdnd3nnrxzr/4zJcnYzcL9TrxKmo2dCkDGNXjrotd7LxzTEysTEwGNmJSAGIKWGsRpmqaLHYxZVWAwNKZAhDxHLyqJc3MzI6hllJyjjc8QWZxLoCRTRkGWIqxbTsCQgjiBIDCHMxBiQBVmAqxGSgnuEK7O+1GXRCRAfnDd1h6ZIZTjjcbQheBoLEPJtBzmQvKaHZS9SQbjvsej/tLihoA51ZggSk4+CDecyFZsrgcqcvcKJqoXUxpqOaS8/3eQkXGukQgBEPla8c+5dQ0a0odgGBuVM3PbVU50617B/N6XFfjO0fHs+l4Ot0FcO3Nm8dt/5lPAbL/P2+7U122jWNKqS0ktQlxUjPNgQjm+8CHCoSXaXiU+HYwigBKuRtZB2CiOvXxb/zgR+ZTn7tjJ6SaNSYwe5EuGYDxZPLwrM50sLUthS/cmzIUPjwzAG3JC0Vz8zMTJFsoOxfGqdOuHU3mYwcA3us6xzlbipqjenGmWdWY+4sEcTRog5cQrydbM5lwv/eXAi0c4PsdlTIobVsrFU1mIwBtPUm6BaJKcX5ngtH8mtFSTUVSSv3mw9lIe+fngdZvG0IscIInDJO2/OGIk2HNDoBw+Mynv9TQ5EPvf4zn8//tn/zz+ch/38c+GPePJ2xlATQxL/wAd94PNYOAATX8i4OIJuNqpBHAen14/vGL9Ng7Zs0x3d370PdfOQg7s+PlQcKlCztvHR4DgI7bTv194OMJBn0/0nnfJ1LnEnlVGgcFsCVd1S7PPnT+l375r1yexPnNr/3kDz7RNN2ZKmt7BGBtaQQrTk+nqAFDQF6WLQ0vYDakDugnOAGlT4vUOWbhtkt7erQY5ciJQaZ22K7hMoDakxmYQBjYscTEjoxh6TstFBAomTouuq4EIOUUYwSsDh7JpUzrLgm7LqXler1cdQBy7oRFzSrHTNSlDJiwxC6KWN8jQCzMUgyQtNQRQUJEfZg6Ipk5Oz+XJy6ER688vBVsxOumOVLk+e700QsewCraujVxIqygBDMyKFEcup9OjaHVwID++Ci3dZPdlfyov+GbwGtTYlXYt1/wO40etCyVQfJMRpzMYsqlmEHMUnvn4ZIhKkjNYdnF4+Crx66eB3DlodmrNw+u3bjXtDoL4+lksoh63LVZqZK6PHgDl3KXZ9/k5RhjYso5EkScB6BIHoFZssbyhkgGhHkoLAm8mSZLRJxMuYvEWYTZyDnXxgwgZcuxU2QOLE7OjM+3Tey6dcqx6dYA3v/e97//ez9RT+Yvv/LKjddusKb5ZFLV1Xq9Pl4ej/wYQNSEDUdis0nbSd0Cwz51qkxumzu8wSxt2C+JlIU2tP0hKkZOJd7rKRhmUC0lmQfUyAfjwXgwvluHOyELbLbN0tLQh+8JQ/F0oJT3UJBa7oN6K1tk1qE8c5LlE6PwEYcKnpdShifJNZl12YwKUYKYPQDvxbOVf0dmbPBOnLjDxbI0WpK4xNSSNZq7zEyOzRMLF9Y8ACD5yCob5lzJfop+vgyBFVOJxftfIEBJALOB8VE+gZpFTchJeFyxN19lINR14fBLpHW38JVru5XHyrLKeLx1+dzqrXt7B2tyHsBs9/LN1/YrJ/urrIs7mfPUTVeZcrNYLtaHXQuAnD/eP9zyc9k9l/YOxAA4pZDTKqUmdgQgmYFzjCsD2vVSc2ZjJ5U5a5YKILiawyh4n3KbNcXcAYYQQhJXhRLqNstFptimLKzt0WKNnBUsbDBjNhUApK13zkDraNvz6fFisY7Rrw64qnLufQlyyi1aYgreG5FkS9HIBBphKwBHsSXmVjObtesYnFvQ2jEpW5Bq3SQAlHLWFI2TUsopJ+2aZfDcJGNXo10DiHGVyQkBZAlccro+WjVob0RrUEuWWDjGZJpTziFUTNQTFmICiEW42GEQgV1WFUfe+XW7KktgMh5ZihlZmNlRmF3AaHLt9uvh+W995P3fA+CLzzx3tFwF8du781evvzmRfPn86MqHn7z+ypd8dfb3v/gtAE8cVt/zoe/5zB997tLdaAfWLrevf+N42o3a5XWicXNAAC5curi8eXOaqxFZcwSTeOFsAKvlVe6WAI5TO3r4A5jN5ltnV1x96dmXv3l99Y/+wd/bPRu+8uIXfvyvvAvA+tXnX7t766F3Mx3P0zrXjt96+U5qRXGh8Q+NfQLQTVZ5DJDevNPVK1x79g1Po69+7cZLX375lW/c0ru7ALCaUVqS0mp/f2c7+8sjuO3bR0dKWGoGcH66O51euFs/Ox3tNXJl6u5t7TzyiY+8+w9/77llxjsfuwDg4pnx3qLBEbl6Gqbh6c9/fvvCdFRt/bsvvv5//d5T684A1FO/TD0OUJo4h/XJpdsJADGBTLMSEReqS07EXJYlAGJBb4vSt+pnzTklZhGRQvCJXazHY8taVVVKXYrRB2+qKUbm3t9AB6yanRQeSsqlAmK8QbuHbL80zhEymbKpaS44CcFI8yjww7NJYK0r/0s/9bFRjVW73JI2+EJAg5BVnmdjV3kH2NgPkSWhb+hmI85EZK7vShdTQwuAe48mMCW1ZI6IzXLHBAiTa2Fp8LpXaASRCIljgCCiiVl81o6wQOnIraY57SiOmUYw7xwstWZau5C0AmCGzCLiKGnXJgLceERtaxkV5+nWGAA5fX3v6MrFDpXXjl0VUyNikj24dHw715oXJrEDw05nNWPlUnyrGZ2/fB4xAeC0tu7AmI19Mg5iZClnDTJ6bX+9c24bQLagnCbdIjCMaqNkJL1TWVbuPUvUNIOmjmJr+c27q68/e/zaW/H5a/f2lisAR3saFei0I8wdfeDtW+9755NeaXxxeuPeNw+OFEACBT+qnKyaJBkwcE7L2HqGixhFAbA98VcfGk1Gu3Xlt2fT2WRUVX4+qaajwGSz2Q6AOkggndZ0ZhomFTybaYqdStpHugEA5DWlW8dHt+/cEiBpDOLZzIgzqVkEwOxYkYxJksYuqzofVF3WDM4pCYCkvHNp98svHfz2Z59zDjoat6C9e01JSkq2FnMihi/u5r3NWyaklBA33XdAUrAgW1IYiYA5FTo6OfSZ52B+rSWfJUGmmAncFfSKiJEtG5tm53LKMSZx4rxAtTBQqhC48qXpnAkK6nIS4toz+QrAcnnsi6YHka/qVLYBMxLPwil2ZbGD2HIOjvPw5kBGxjAjKvMBljND23Y98S6rstBWzatsQu5w2QKQekyubk0IsB5W6CtYJ+xr6oEZhvdeHzp/ppYk1ASHigzAqlE3GTXJvXlrb71c19U4Zctm4n1W7bUETHvck8RMACZYFxs2EheK7U9Gvv7am3tBK/Y8psNmf101qy5XnpbrDgByEvIG6GCdewIV/aVwpPkQsmpnHiCvBMASO6EmZ+MtIQSfXNQuqoLAAi6oMYlqzaOVdQBVHKI2xG6clXMCcGZmP/TBt//QBx/23V2KRzzyFbowrqJxTkbGAFiq5TruL7t3uLPL5V41qnImj4xqrkqU9wFAVWuhJusqI9QTJeLMjJFhJcX2CR2zZmubbnRm4kPK60jwLojFmG0NYGcqIM9qTEaG2LUjrtWzOWet9piFRaYE8qAkSjAlE2dNy4nFvCcAnFzr1qE2A5qUZzlXnh3B49BS1G4NoKpUk5IUhGFDR6INjlAIccN3BVkAgFW0udEW7QMYTbZjRpZ8ILQ1OXfr9oGfT6uGXGzNi+oIAAcLnIBc+H2FEne6zxabl6H7X86Om2YslACcqaoXn739/G/+2Xvfd/HKe97zzKdf2T9u3zVbXvnox5L3pcclp8p8TayFYkBMRGY69CicsOGGqmHvtmUd2kpmiGpNAjCbjOze/iMfevI/+9RH9PaNvFr+T//Lf5eFmrdezUwAgoZkLjjSYtAjUrpWYdlIUTym+3YIRZ9mWIkQqypY0tKgzwwyjavm0UtbL3/tjR13sbt9o5qIHXhZLNplAyAyedKgSkYUfIrmhHsSYSFDDy/R3z1VmDlHmrPBSoEQytosGWdXzdKnVXBiVomxmF04u511DGC/6bBYCsSU1dQ5IbKcM4mJS21buMbiJDNLyiCiUagbBMRUqbIzAGPBzpTf/ei5xx99rLY7lq1VqpzAEVI8WK0BCJP3RYyIyAJgxgZTZiPKumk3K6RWGiof8EBhrQ/NwD3wSAObblC7GPBLgNwgOKF963fhevTbPtDT+Jig/dPS/uvMhkqkXCepmiVuKJ7uZQkkpgnNHgCCe+fFMxe2d97YO3zz3t3j9XKm1dyFZZbjtACQjIQrghlZsswiGhmAuMDgpAlAQjKCA8i4+M+rnah6nCpIGxmydoEczFC0tpw1TVOiQS4tcOagmnNe6pKMJLgQ6oP2EMC/f/rzjz3+LtX4+uuvL46OA/umabqmAVEl3rECCLNweHBcV1VW7drovQdBSHLOIqK9LR3nmIkJhGoUYls4Nlbua4kqTWEwJwIIMOQ4fVIOAOzK42IAlg0EZiPkvm/iAST5YDwYD8Z34egbc4a2E7PTcpCbwOc7coc2p+CpQk//r+4LkenkrAOZljaJAkOQQWFQmJUEHMg9MxLeETMLc8xqplo4NIAa5WwpWooqJEzcy7KYFQ0XAGaZToQ8hryrhHSn3pph6Erpm1X6qOv0fl7EV1gYZDmrFn9YUK8dmWFwW9MtCuJMu7Qys2k9udfeqTuc3doB0HQxiywT3ds/Tou1F7R+EaaTHPO944PZ9jaATtP+0ibJNKsBybTR5LuuGJaWrFucQMFESa3rupS0idaktvYUIwGIKU84gSTHxEZZQUAdqtQ0XUw7OzMAy2Zlhqqq2m5pTMFVFYkBXexizkIBQGvwRME5Zl4tl8erBUApb5gGpb7KJzRTQ9NFRyj+rG1XWi2yOGGRddM67+q6qkejZrXIqowoIgBSshi7tmuzQZx4H4woZ5CUWLXUw0WJtbBlVXug6IQrQP1zJGJhInKOzbjEnTlr38jGVIBzVeuLpoQCavSTHiXD6GemGTUd3X3j+vrgzmqt128+c3CYAdw7bh+9fOZ7n3zvl595xvv62i1973suvfLi66/tNQ+ds7duHwKYTW5+6cWv77Xd1XfMm5fzZ/7gpa3tyad+6l0pvDU6v5P1DgDfLXm5euGLX//QJz6we3F6+3a8dGYX2lhuK/YAnn7hYDvstbYMl6+8+e+uzVy1Tfzxjz7+9NOff+ThC2cfewSA1RRvvLq+u75549rx/tIs7MVssX7hazffd2584W0egMqNUbiQViNAv/nMjf/9158BydmL56+/fufIaH7+PIBKj6KHrlJaL0JV10rLw+Xu9jkvGEEA3Hzz3q0bb7397Q+PLp9POR/eo2bvzbkT74jZ6qAA/uhz//54tfzoRx45e/axmD/z2v5yvjX+6tdefer5V751czmbMIBVlyEMM1DerDygtwy8TwirxF4lmoZjsJ7kRD17GoDlXHYtYWE+8R8lJsuZzEwTslrOOeUi8RNZeiNSMyMtXB1TyzGBSaRXtStxOmkPUNYgtay5Y6hnqpBrBoA6IDiXu1Tr4sxkcnbuP3J1PAmaOz1Yk5MCI5pp9o5GlSMnMAWlfs8Z1Md62162jbBaWQDUl0xQVpwCpso9mdtgqtn4tBsZ9doT/b1lgAr2k52XchXrGrAgTGEMcklzZ1qPnLYqSQGo1YjJOEGknk9zu85dMseMAOECN5DSZFKTi+bZjDMpCxQmTpAEgJK4qlJIponLua58btxicVRP5yIujs8COLj+8pnxmKxTM0JWUyIYMQWOLDf2FgDuLXQ2D49UHIJoFx1bL8RRkOtBhZLMgGU2baMfVfPX7+49/Y17N24dSemiCrNQVYFIKa0P7ly7s5xNL5zfCnxU37nT3FskALXDQUwjwXbwgfHQ7nRnZ2t7Nnvs4QsXt2eX5hMAD81H2+MqpkimIgKhbImQiZJpstirUqyWy/awa45ypDLVjVodjdmHDoB1nimcCe7KbBL328KVMOvJkflUdVChOZITD1NTqOY2papiywJge/fsm2/Ff/67X77XKrMc7LWtmabNvOpPtKHhy5SIhLyIF5FTB6EZNEwmk3FM6e7evcVi5Zxz4guX5OSoxHBMAoA1SSvHwk76xIjUrOk6IvKApeS8c+KY4EKxBUXTtGGDO/Tcjh7jTG0DYFLXptrGhphTu86q7IOZmpKR2MCCBTHYiBnFFXVg+mwoKASEKrjUPvbouUpSVQfnPLrEdXW4fyjeAVh1ulgvEocQQkqR/Ua1ZujQHkIbA5wgp3ZxfFDPvPOJsrZa9JFdbBtDJVyWeaH5kt4X/6BXkTHCYCxPBJHSJ1JKI7lTaqKuclMlunr5oeOmbVd7VjuRUF4IJ0jXyUb57RHYqWEDT1DVLGsUAeDMikKKta0RJ+0xKCtVZTUA4h0InTZCMHCnLZsKIvLyzCgDePT8/Gc//Ji2S0IKtSd2zHWzVvYkhLbLAF766vVIfG+Zr72Om3eWH3+yBhupUWqjsScFYG5sYCHLDplTDVPT1qAOG41f731VBUbsoF3OApA4IkCKxDdyVmYuQR/6uVXK28Pu16+mAX6ik32SgzfjkUl5IW11oVoxBe1sWBoABsLcZnfdGDVT/99mv/225zDMyFFg5FiiOM+mORtyxUSapaZ65qoxZ0aDVPyCK7KpGz7RwJD8DoNO/R9F6H1ah5ixBNBJWOzjC3/40sTZ1Q9/77/53Bf/5DN//OOfvPor73v/4Zu3r3zvQwDczDldgndJy7RklMPI7gc1NgJLw7FUdS5rUhETApA01vW0OcDB166LLWdjv/fGjcb0ok9Uar1eycWT+7P5TH1p+dSXvBGo7BV4zHR4aYC5Xa5jTFffffU//eQP5cVStfmVn/9U27m9ozfQrQDU7OjkEGSAhwdBm8zmFAA3rKLNdoS+nyYbWJwTzxzIsDViZwTnlIJzEwAh1FqUV8mSKooYLpERLddaBOtDEGJiIdf3WSFwqkXBmFcC4MqF3Yvb1YUtp2npa3agmHJMCWrH6/Zo1QEQ5ktnJ/1nKCK5fRs6gP78H6Lz/jkRrFfMPzVH7NRmcXpW0alN7/TfDF36GCQshgm5+bP3sB+4kifEyiGw2WR/J7IZ/e8w8rpd1aF64tHzl87PX729f+uto6bpxtMpd1MATWexNwJQAog4I7IRkKPl8o4rCSAyqGVsFv0QPdrwyjw8fKIe9esLCpt304OYVPIbIqPS4s1EkzAH8Nuf/m0n4eMf+77VcjkajYLzlQumqqqT6dgHXz5zXYVQ+ZRySqf266FvEJuzDzCznPJGEnrYRYZTG9TLp+uAHFNRoy5v9yQ+Pp20W6nOnhYoejAejAfjwfguGSdw5LAvDrFIOU5Ojpf+aKKhw7LQzgsaBJRguxdOHqJxw1Bk3SQKuW/S7jEFG/olc+bypaomskiWjbxnJ5JTNIDJc68CyWSuuC+wlH2eDFCYWi47sqqyDZjnRo6y/2OTYp10d9HmuB72+KHbvK8EFo0zhRp6U5RU+gQVlTiIq+uqa1f1eLy1tbVcRyjlROzHAA7378IFZe7WOaUqrlc6tpWtAjhnXjUJwKqJ0yrc3T+KkKKMHLO1amyUMxepmprULIdQETTFTCSOqelSo1SEwJKibSMpwciJX8cmeFfoYItmOepqAM4FJbgQYo7GnIa0sPgSJRQyrDC7KgQnslyt1GxaT8AOGz8KnJyyw7ecYTARGBXFFgObMPkurdnZoJijDDJCOb9jizbGLmUwXKi9DzFrTFqJGZjYA2BhVipNdvchL6fyshIIDVC3ORa4vvBbImyhIVfBSapITEN8vXngQ1QHmpw544MLZ+p7198YBfr8s8+VO/zIe97/xPt+5LOf/fy0Dkfafem512/culeP63edbx555wUAqxj/6M++ebhu/uxLr6akh/vdf/DTTxguHx00/nLlRwbg7uuvTcbhbU9c1bF/6Ep9oCE46lbtcr2+eukygH/7+f17b+Jn3PSLn33uraf2njj/yKftG3/w2aee+uOnPvnXftQORgB+81++8Ld/7ieuPf/iS9cW9cStFibEf/b01z72/rd/+jf+6G/+j58EMH/oMTk+WrcHVz/8wRdfucHxmXXCzs6Fg63l/mr/yrsfAmCLvcqQXD3a5vHWbhcXy6O2NTQGzx2AV1659/k/f+6Xf/n97fqAxnx0rz23Pb216PbWRoTbdxcARuKP9pa/9uv/+u2PPLJY2fc8fOn5a2/+4Rf+kIRmc+eqEQBdJJIBSeufGsF0Ez5unuypr++LjIevy9qEGqlmAokT3uQtgIjEZi0izgsEpKSWoWZmDqk8Y82aNRuxeWdmINUuhboKwho7jQ0AzuaIQFYjO8JohNk0bM/dznS0NQ4A5iMZueDEzs3c1fOT3a16Ml5Zikzt7shtVocqiJSQS48qBTopx/Rw5JCNcR+S8pAglZVWpjLlRGTMRTpRAWPa5MP9muzJw32KpUSGHEk1dwpAgiNxzEKo95ZtCFITOzXtaJ10FBwASSa+0qzoOl0fEbNk06jrbLXx8VEDgBLNptNM+9X2rI3O5dbQtZkc8+EiApifm8v2ZSTOy0OVJaU1A6vMqWmf//OXLrzjcQDXXjn6kSfPUz7W3EEzOzYjJ95Mg+MmlRZ1YzdiziwgZ5tkiKi0nPVJZ0l2YsT2dFrNL86q7sYbLy2TaPnUdpxwlBRG8ARJ6Stfv/bOM/jSq4c/87FHzs13AGztbk3ransyOXdmp1uttvRIzbLlunaxa1J3C4BrY9dqu47Hy5gUzgmJOaYgZkVkpMzelCmpc1wFcSzOc+aWRUkLaiJdzq2Sd6LEPSw/dCaemvTGppV3bRSmGDWLx04djts8mswAHK3d3/+Xf3z7OGldrbtMljMxc6/COrTj9U2BZBBOQuRAYkrDRpcMWS0eHB4f7BcEaDKqU0optez9KUIWbVZo+dFk5NCkrJ3UIwDOoClNgu9S58AGZRJms6ypy+IYQBWkVAyHakBRcFMg564FUFEgMxFiQQgM8ot1q+ySJr1PMQ/EQsyUbag1Fip1QahBwGpxPHG4sHW+CrHtGrI0GY0oyGgnhLoC8K0bh46yCxJTrEOdhq2D7j9W0GfdWYhWi+WxYYk4rgIJATBWjS1VJpSIKKsaD15dJ7dqcwOHPuJhrROx5SJIQjFpCx4Fc8Du2UfocO/1N+51OcV8BCDqCZB2csVTyPW3/dXwg6KZ6I369D2ZIxM1zgac+PYQ0+ZrOJEudsIMsqypkpFToFtt+3Tl7BjAL/3Q45fGnFLjautiHocqR62rqo1RhKZjB+CxR2ZhNL528+jixdmVS54ZYEdZm6apR2O4GkCO5kyUMpkS+6SwZEvFcr00WSw6ADjeW+9sTRzlbpkrtkTGjjWlTYCqZsyspuW2GqwYHBGrkRa2UE9kL0EuASAjIibK2ZOZMICDu7ffcXVnLm5f7XiVdycGKsYmjjhSTz9nsJ2q9/Sb7V9EITdPo2zEzkEZKAq/lkQowhyUBVUV9pftKuet+fgWs1gEMGJtuzQAQ3ZyzRMM7/5nPhw0JkvDPDABEDsMo85tea3gJ7Jzfufy1L/9oSn5YIwmM4DU+TAb9fi+DT0AdoLynxp9OF223zwee44pWiGEMrqOp5/745d+83d+4xf/y7/5oY++67/91f956uu/9Qs/8CM//WMAgqDV2iMNN2eo/J7G0mmDGNmAPxKRkSOzXGjj5Ebed8JAbg8XB3NN7Ec03q78PGwnufsagLln6otmBGIi3kBjw3HbH72nvdB003+f++nCIgZaRTiirDYiJDCkBgVQALCORSCKDRSzGRBEjJA1V94lVQAZ1uWU1XJGjrlF26YDtvbi7vTyxXMALu9OvDbIreeOzZKpQYUBxu682pkFAI6pUx3i3x5UM/CAC57sNQU0HI5IGuCvk895kuqc/ApOIKyTNmMQemcULggaDYlUeXFDabb/C2IR/YXKhWl4j0OP8gkCGK0bhaAWV6t2LPL+h3cfPrN9d3/92quvszoAIzdPYbTsUqfJjFLuXFEPKlOV+q5wmEZrxDz62Wkoci7D47VTrdFqhk2eqoNDQb8b96UuoFgQuJw1xzyt5wB0qb/3e//2+ms35rNZjmlxcLRsFgwuW+x6tQLgfVDVFFOOSZiqOqSUu67jYn86YIelFxBqKWUaqg10/83vvzv5CQ2F5j7hHmKeTfuObfDnb2u2fzAejAfjwfiuGBvZ8v502igj26a+hf6PAbU5vdnZfb9gJwHUBgYsxoA2lKkLKNi75lovomyGnIdinpEaGeWU4RN7hxTNoEl761IiSlC0OeYsJKWte3DO6V+0dHcOIfxwWhcwdUN+3ETzG5wVisH0bPMrNDS1EYGFRQSgnHtUi0mqyQTsmqZBjj6EnFJOSsHf3j/C5BDAweEyUrXuWuM8rqYGbTK1qzSrfYLkNgFQcly5xboRNxLnhch5J17EPNm6lNoU2QzM3hFSjt5X7CymJTEVy5HgWNWSQgEGMbs226ppQ137Li7WDYDpbLpet8vYRsuVq53jrospxdy31BsA4iL4os6JwSoXRpPJqu0KCaZ/3MynIhsicbFrk+bgJBQtfDI1ihHe11H1aNmuI607qCRzLuYIIKlDIsCJy2Afs8aUUspqbTJ0mQB0ygnIQ6SFk0IjTp4iAILmVBxoqfdOAAszO3nowaAAACAASURBVABZU98LNICUm383KEn3M/iEIJlNTSK6LLxeGxsD+MRH3rV3uP+v/s9/envvnq3Tu598/MUvf+OjH378ws7sxp3Xt89cAHDY2LLTZceHKW25SeziBx5/ooPFFkd3Ds+enQDY32OrQg7uuRdf3r25PDs5D1WXuZ7ND95aAXj++r13vXN0cHz+t/7RZ37qJ/56e9Ceq/D0nzz1zBe+mpJ7+g++AOCFz3/pzz77xgvPv3SwyGqYTb3m9AdPvfCRJz/0xHu2X3z2BoBP/PDHMt5o7U53ZKtsyyVF6J3l8fF6lQyLww7A/nKpt++du7y75Fj57rjtppOtaRuI0CUGcG4uH3vyHRDroi5v3Nm72Z4Z2fG95dgh1nxvtQbgchuMyOq3XXr7U5OvfO7Lrxwt1lWgbLLMniMAkPgSBpbEZkO92oBzpx5on7MbSFBq473vcJl2SkowY1cUGMyQLVuvoQgRVu+IWS1DiweFCrMEaZuld8XeBNmyZmXtGb+meRRz0AxoqA3A1iTMxhxEtqbjnXk4t1WfnYfz29XZrbo0YsNsUtG44qZp6+DGtV8tD0dOODgkRc/kVSMFw1wuTgvEm5SMhkTXSq5ELBt5uFOSFwUptz7L4tKVRCCCOLPBnaM3D6XNrqcpsSOwdzlp2V7ZJzOYurwa1+wlC7lEznGaVYA1AOL/x967Blu2XeVh3xhjzrX28zz73ffevu8rCQnrhQiEAMICTOFEhiQkIYVDFFJOSMo4lfyIU6FScWJDnLKxy1Ch4gfBpBBgGzAQAyqCeAhQgZC4AklXke5D9+r285w+r733esw5xsiPtdbepyVR+ZM/VPX80d2nz957rb3WXHOO8Y1vfF8srW2jFDSepqqhrW0qp55tenJPOeRsAIKEus5lwbxzIWDavv5KFGLTbFwrAWhO6hc++vwTb3zT8n71zI2Rp9MosUVQtdfuLMvHSwAHK7cw5Xzqbp1CqAMUCs21COckAFqDekiZAAP3Ufdm4e6fYiaY806k9uwsnzWHuV2949kro3K0vzUHUI6m29vTrcl4lWya8uL03luuTbbLo7ddGT/9povkJYBK0yjmVN3hozu0XLx82uaUy8DzWQhwNQUg4uo2LsZGaFKukzLTtBAR1ozO+piZPEhXr6LkqVERL8YhmXc24+CggBIpc2Y2IhtYDucRAAPGBatSZDemUqhRXzRwjFc+AfBDP/07N0/qVoKYB6ZKvSSOZeEO905NF27myOYORxnFstU2oP9rdWinukkco7m7OZETswSJQVI+p9w4PKnd0yoKEWlyQ01XfLKmTdKffzGO0VkAMmHN2dUBjEejuq7Ze/gQzKAQmAL4xuUZgO35RMg8m7oroWnynfttnaHJzocdnamR9c0SfSLJHajWLyguLJrT0fH9ra1I3pQiMVCTVrDUMTTLwqoGYDLhlJM6hc5M97w/98C4TFklhpzrqo2pbuOOXbtyHUDTtLc+fytSIBgTpR4MoAexyDUC0p9+x3zpMtO+BmYwIqO4TI27Vy5tVzjzaMgAxtNJc1p/KSzyTxsEoLMSyt0VCwRAmYWJ2EABQ88BCMwsQToj2tRUMcTAgdyZPacmeLNNuLjN3/CWxwA8fW0nLavRGEk1RiG1IoRl3U5KJvEOzZlNyzrn5bIOWnAB5IaMEYrROLpaSxMA5d61NudYW5jMlmYX9y/+pfd+40kt4yiyv/3e93wtgFwvX7/78q5wUhzHOG3INZuZDD0NITALuWkHMXQ2Q945Nkvf4UjSxzVgUKdGx3CmzJwsvf2dTwC4cWGknq/v7HzHe997aW/y+RdfBDyrE3FRDIQ9ErD3q/o5YGv94zntUzr3N4G1aVbwBMA0q1tSLQQRWlXY3xq70bJKnoHOa4VZvQs9z1kid3uC+5qcdo7S1j1UkFC0moUTALYqtZOUWXnWqlcwhdaaFaNiPEYcARAJpqdCu+j9TgYwpot3H2xyOl8ycTXX1DYyCQSgDGqjrdEOTQyLJtPOxXTSktjNkzZBANRZE+IgCgy34ZsR+v7w7nt0HQ5DL3W30vSYGvc7hMRiNpWDmzeP9fYLmDz11jf/7E++3xp51zueeftT+wCcElExYEvSxcJdE0a3f65zhV7Ensjh4qQE997ZpYgxxlFrfO+smUTL2RZUjQKLRIRAyABSpiZBxImszZk4FIjM5KSK3KUQql612YyZQpvbarUyNI9dvfzIxQtlwejsubuI0yhrztbxTalr0Q1C6KHSQQdzmHbW80bWTlbnC1rdfjJ812FFWydotIEUu3TIO6pHRxC1bm717+yBRFpP6nWatV6A17K+tEE+vwDB7PQSff1SYBpCk1RNy8BEbqo7Rdi+NN2aPXZw2gC4fbharJotKRqihXmgkk3VFCSMovPM6eR2Isa+CerXIcK6nYY284nOpXed5Ev/hJF30s3dA6BuQCxiapNIADAbz4np+Y9/dD7dKkLh5iEEAswsiIh0KjdeFJGY3KiIxWw+TW1aLIyc0CWtQMezICIbnAo2+fb6xvXfoqdK9s7zNED0wOaV67d2k2FD/Hk4Ho6H4+H4MzZ6OHJDGTuHRa4leR8oza7HwIxc/5KG1dR8vXcOW2HvpNc1hazX3I2bjJqtl191MTUlpKzSdqdh5jL01LK5Z1cny9Zl8QJwp//IfUTVsSBpc0598LNOAcDrwlTX69u/YPhGD2YTcCfurG0lm6v1XcAFx8RsTav12TTQWZNXjY3H88n2dnN4dnZWAWASsJhoJk4cVQrVVMS4ShZk6MYiCEtotWpaEgGDHUnhxODCuQXAEtzhHN1cXbP38tFkQkUEEMtC67ZxBQuDWEaO3LQ6HhNLqHMCMAVJjJZyNldQ4MDB2V0NhmxqAEYxquUmm4hnA3EwUNNmx1C+xmbWuMNg7AxiZ4BCdx87gzsmEE/a3MCsMWOMQJEpqBOAOltkMipcaJU9rVZq6pqRPBtnFwAZYt416q/vUjcNqL95w/12kLuZmWpWg4PMB4Hnc5N3iFY6gNp7NAvnPgoAsDw6LKZzFg5MQWh7hwBEqj/zkVcObn3uLc88ff/e4ukbbzh88aX9C7P9na0P/cHpv3HhKQBVdXxymuO8yJZPVysR+cpv+CqdvHz4uXa/LJcH9wBoOSsv7lWebzx+yaqbR5+/e/HqjhaFBNS9TDXq4+VP/81/9vqLd970de/8xz/7y6XjTz7yCQE++uGPTkEA9nemLz3/6b3Ijzx7dTwd/8nHX+IRaY2/97/9xN/6n/7yndcCgJsvHW7v14WwXZt/zXve9eN/9//yJt85uDfZ28LxPT2rAIxmO37/zJJCK89xPp+c3Tz7wM//hjggDuB4mQ5O7eZdPfr88ZNP7u3e2Prj5185OL4zn8n9ylpnADV8twzHi/qnP/Drt49WVDCEKpBSLELomqqH2zEEXpQBhru7EaIPS0QfWncLSK9F9kDi1b8bKGNUgpkSA+ZGHVsQUYRhUVjgxObsOSUonGgrSCEOgFxbz8lRCmYFRkURg+1sye58Pp8Uu/MSwLX98ZX98das3Ak8m8YgDmujoAw9YVmzV9Tm7JbVYGrteDZls9SmGBjsGPqAjB29qoSv81niTVYGAvX9q5vV1rFOBfplc+B/ee66b3HuLcTU20YMUXUQVye3KvXOMOMw1rZixirXpTA1meY7YW++kmLsjazOANgqFyVI1ZO1KEbTi8e1LE6qvVTMt8ZMSwCmlpKXVHzqk6/uP/3W49urG/ule12ORsYBgCm98vpq5/GwqqmIIae6VWvVp/OSAzcmAKazWSyL+kyZnEPozKoksJNUqsdVh5XbaBZv7BYqbVOtxpE33Fk6t0o7udtZlW7fSQ7/i2978nvefQWI3dWwplquDo+W97P74qj9ZH1y4/pjcL1/fPCJz3xuXpYAmDGKEOKsrqbk3iQ1Rmp8HINYAFAYoL5wboxrJ80WxCcTogiCESLQ06e0S7CFwAgFB2/NPXSdVgwhT2qpVUvuBCf4hqnRb1fmruqB4LAm5bbJ6gXJfD7Z/eFf/ASA518+1hBCWea6dSAylWXBzOZw9E413qX/bgBGRaSeW0rmntQAJNVs5rHDUYhDCCH0cKYNnfA9ULrmLTnBDS5mY3ZYArAVrZxwjlxOpncPGoclVacQYiSIagbQNA0N+LzDXeHIYADa2b7MpFRtmtxE4QDEMkwuz16+3bSa84CVuFn3SCn10cVa3mCzdjvG4xKN3j5cnK4wDphP47L2qlrFELu9UsDMDmJY6uAOIva+OdTR7S9dawTBJTSpCQ6QkDBJeenSIwAODu42GUjqYNAAAXvvYb6GDja1luERNhsqY92M6Ex3Basqj6PfX5zdO7jLwgru5KfrNj0Ae9H5COVPTUC7hJlZtJOpAwzMLGDqFiNfL0A+iNAAAmewWUeNsggdm5XcvPWZR77mLY8BOFzUO4ENqo6RsKsicMHWew1KAJDNx/O91k6lmJouu4vD7rnNxfal148B4JXPVTvbRVPO+WySrFmuXv9Xv/rBexW+/F3v22nTb37wQwDm42J2eXTGvus+S0bCljMxw7T73uNxKcyO3JGh3AanRaIu58dQuegzewY5uRCEs5MEOqobAG+aTsrReBTGb3r80fk8vvrZzwLI2YSllAFnJOLAA/P4/AK0vhXDwnTuXgE4q+rt/WJeCIAStjsfnd3X45PF/p5f2t5FbtpmNYss4KpeAZhOyYoJ1n0EAxiJPgQaNtBB4XgdkpmNQkwsDYBEhXiUk+qRAOcpp4TaRswwr6s6dDbfIVRJI3Xy7RjanQZQ/sG4/xyWBRIm9VAKRADMA2LIydTZS1oFbn0ST1pnj714dySHrq8YsQ+8vc3FG/65gb7cwRxyrojQ6USjdVMtC16t2pmGj53Uj05nd4/ur+7Xjz92qX3uMQAntRL1jVQgBgX0yE9ftl+LNg1NzRg2441iDItAJDudNtYmc9W6Pi0E4MKlYY8AUqsniyaKskiTUwgoyr6Pijx2fiPuFkMwzU191rY5FPLotUcvzWezMta5BVBrLshKkEggwiiwmqtZd2+zOtDb/6zvdoednYuWCFi36XbSH32Q4H0sNeRjm9lp56FGAGu+CPpUimhIjbqbxt73G2FwANzEZOtH7BwWyV1TeX9bB3STNr1yTVIiLgMzk5mZOrkJ6cV5nI8KAJd25/eOV7funYSWCy9PcwOKrcJAeWhqUteIIBS0o8oTunrEcH+78/XNTB4i/17VYY3iUp+3drNSgBhDbrNBOxZMRwTdne/kDM3q7oCGEADkNHB+3foPJ5hp09Q5ZcBG04lm1SoDyJpDiGauasK8PhmcO7+huLBm75x/Cvsfzt2BLqsFrV0dHkKSD8fD8XD8GRxrOHII4TZ1rXOv+pL8b1r/0f3jHG+/F/pG3481BDbee8T0IU9Xg+03VDfvS5ed82cwN3WHp4jCyYhCV7dkkg5qZArZjBgEJ1iv6bM+mf4YfSXR12jH+WCyOyk8uE89uJx3DAlzNzXukrJO0LEH3ahNYPKAIqmLREI8Wba1k3GxqBsAkXilDRcFUaxaNQsEjItyWS1mcdQJ6qj6SKJRACtIsmZiM3NnYQhxASADasYJal5nz65GaBXz8bRrQCtGZZtPc3Jmye7sCFJk9dPVqs5tF87cPzmbjMfMLCE0bW6TEpyIs3HSnqyVuzDGqE6+UiOCL6u6bTul5HUs4t0NdSNH3TRMJMytWdMb/hkzArGbJPdAnJMHkWalTUDfO6aw5CDOkNQatw3IhQhO2TedNX1DPjGDDdZnFn3tdpMJBAnMTJoBuGrHUjHdeCl2H2TeTUtzd2fekCO9l/0mAsNV+bSttE0TVs/5yUtPANifbd2iW3sXHnv91L/qHV8Zo1597Mr9w1vTOFZN4wvXAdx+9f4y5bIlJzQ5P37tspRhUbej6WQynmapAJyM55NL+3Ocjba29Wj0c//8d7/9my5NxkdhNjtZrQBcuzj98Id+LR/lb/22N04md1576VMQSlUajePE/a/8d38FwFf8hedCuj3R5Wzreppf+x+///2/+OO/MgvhJGtd3tifFQB+4af/6Du/710ndz/86BNNNb8WJ360cDldLZo0EXzwV38PwNO7T33q47f/ta+4unfx8tFri1O2Me8989yFxn9PSABUOS0q33/kxkd/648vzcpXXzi4cyZ7159d6GeyAckAhIlUrp999c5YYIFj4KSSySVEaGcwA4nig7XlGpfw3nZ2k8F1eOSmIRkY+m42kWOvEpGWZCZwUndTcu8CMrY06YyXNRVs4wIcPQpHwWrVXNwfAbiyt7c3L3amYWcetydhFAkSrl+cXt6fR0GqlgDatmG4hFwwAY0mM9JkOG28EySSIPA2KepkRDQZBUOWKBwIuY/fmbybdBvSB4eeF7nJZ/vep+H798/Yua4raCc9BQI8AybcLUUifYsRCfdL7SD1RBTapmpzmu5sZ2UAVI7H23vp7J6gIEu3TjGZ7zXNPGc7ubW4PJ8BuLi1aqtERqGYcqt/8nuf+YXffOXwcPV93/WOrf1x1vsAsitLGcLWqzdP5k/Nlw07FyKtKw7vrwDMdmNSVxqrGtQkhAybjYq6sZvH7XOjCOC+Njm15Si0TXY3IoTIpqkoy7Y9HUsAsF3yTJy1ZcvjUSAzdDSLLwq5m5T29sq9vfnqzF588fPL2zfhJp23CY0C50YbEC1O8/6OSOFelJev7LT3jmIQAAqvW3W4sxDYzTSbEJqU5nMqogAQokRgkjKQGVWaVC3lUEo003GHw7oTdwR/d6ecPQltodDsOghDqlt2TcnarCTcpW7dgtplh51k6WQsJ2cNM8c4Vg3b84uv3ss/8osf+YNP3gEQRyNz1IsmFDIJgRwJIGSH25pN6M49DEO97r2amWX1pA4gmapaISxCCustpEIgIu0ssLHOHzEkt90P+cJu8c43PJPSCsDh4eFsPEEsy9F4f3ryyu3Dg+O2mBBBugeze9zNe+lXGj6WzEF+tGgBcFwV4kzEzNksqbKEKLkDa7tH27L3NTHzHqHoz2ddVgLgdW6De2s0pfL+WQWmq9curerPnyzzLAYA6oG4r16V5chz3mR3A4xI682H3ZUAykxGZCwNGMDh2dKApKwuHY2xe/QYnQHzsO9gvXY5vFdQ8HPbFxGDA7GwqBVycHBvuWopFiSi1hl8CdAOH0fnPvlP48M4QKbGRKrI2p8OqbtDlVptWSSp9dbagKmqOYAihDa3UUbZqggvTSPSs9fn737rjQ6enhSiUIWPYvScQqRWcwikRkVZLHMn/ThPdblI41Re8vqQ/YyE1CnG0Cp99r4C+MEf/RkOBPOTJAZ9+uqWuo0J41guWj1QAzAfyTgW0GWl+XayK8JEyiLwHo6UIMI83HlXRycLg062oBdD7LUJfN1mzQwmkHgoX3zhJoBH36LPXvBXDm7/jX/ww+96+tELl64AyGpCoQh9OGnELE7Oa7zsAYRnc5fPA8UEYD6ZgKiqGwCw8dOPXZzuyYWLe3E2fu9f/LaZLSe7o498+l6ryl0c3qadWej8noYgaH3fz7F3N/Og/x+JbiYxjACQnpXzyfYj0w+/fOup06PovGB56fWzAi2J1U0CgGSjogSRE3fh0BoB3bAv10//5ltDtE5UdGRMAMfJ9mm5NZ6EEFgKtKvSaRJpQhVL1b3FLHTA2oBq+rqg8AWpxHBJidjBAUYSpCvwMAuHmDQx8dLmDEyZJzFWyOV4VMQCwEji8HE8aEcOVZ4OG7V1n48B7J16tROgPMB+Blen7JSM4ZJaPThewhNoZFx1NdLF8UnKcFcyb7Pl3BAWxGyeC+GmqQG4e9u0qW6IbG9ndnl3a3dr5qqrdiWBABQBnjSZhYJL72zBPAMGD0KhbyYDTPsW5HOZGQ3I43D1zomXDGDr5hVY00K7CMoHkHKtsti9ZviIDvMcwEftxI3Q9z73GF/3rHWkkkGpcTjSOoscXjrUs7rzKUIgZjNvk7o5MQmD4JyyOgOYFmF6def6ha17R4tbd45HZ80JG4PMY+3WM3nNyXPS2kUINKynHddkSA666JN6aTAaVAL8gX6E/op1/ITgwdSz5hBYigigqlYhhJwTTMwRRJjgXVnJTGIEoMk0ZxBJCLEIRM6Bp8WkKMq2bdumdyaMRWibZKrCYmbrDWfzCPRt4961pa9LAzSgl7aWlBqezGE/XS8WD8fD8XA8HH/GRjj/w1o9coiyBuGPc6/ZrJ0+oELr1xANjMpz+QvWdpzDZ26Od76u613M6Nxx6aVzkDXXROak0rVeAgQ27zq1iEkwNHyjV2xaB4Y2KNFgSPZA504f2NTJNic0MOUe2MJBcMuqnSVbt9134VEyqxbVqCw6EcmJTFL2tkmLnBP3JnPs0dG6+UpbtywxwumsSm7hpGq7S8Mix4tTJigRE3LKRGyqOYiwdMV5yZSzdZ6oyTyjV55fVtVZVQMoytjpJ1ZVIyIlh1XbMnNulZiFBUBdNw5X1WwGsLurqZurKjGXsQRQ1Q0zu3sbrG2d2Oq0hIOE3HrrDB96n406Q/Vev89s3Q/NgCcgaWYiE2EWczd4sv4CK9B653nObk5QJnIO6qbaZ+bS92QYHJC4AZyH2YcBlySWACEig7s5Mataj4SBiHnojSBX6wOJB3H2bjqwOzEkYDIep4y6SnPG6eEhgP/kL33Ld7zn637ox36yteYNVy/Mxttb+29eHD5/uqx8FH771z8A4FM3D1LGfDQS0gmkro9/6n//me/43nfQpf2q9VuvVwCO4uzC/v7tP/nU5e3tqj1529Pbd+4fXL6kW04y2QJwWqUdhk5w6eLkbPHyD//of0HH4Qf+wb96/JlnvvbLL/zGB38RwJNvP37rV15fHh2pKM3zd/+n3/iBn/hlULND9Hf+1x/5qZ/7RwB+5/f/8Nd//Le/4S/sLhfL2y+/HKgwZAjVrS9azLYuALjwzKOP3mm0Dp/543t3D/KNr37La5892Lv0RGtcMgDslkjJuZi9++vf+cJHP35wmm88+bYXfvfTN898NptEd3TtLdY4k4qEEaWUhTh6m3J26p2qRbjJm2XCe7iNumbbwRWXCGDqSt1EZNJjbDaoAnW1BSXiVNVFEWLoeqCVNg8yygABiH1vK1zZm1/aHl/dmexOi92dy/vzEsDOLBSihWgZQTDNumiaIPDmbKWquQUg0DIiCDsV2mtNOoGRqQNqRyE2uSmllFkoRUbzkTcr54CUncNAPDBiITZa81lo4HDzkFyQ91E7dejRFyxMDgw92l31voixjDDLTbNW4AcILMPq64DDpM1IYNm9jLovzkd14lAmNBYXxq+/ePrB3/jY4a3TNuf/4a/9mwDMlCi4Nm1dH554nZBan8eYdVKdLBbLBYC6ae/ePYmTApiajeocYCQSPVsIAmC5bM9Om+l0Kx3Bk2ndZOZx4DsnKRvy8gTAVkm5XaE3EOpCcPaUXSUQ3vrcIwDKyR5R4/dfRT5xDkBLw06DB3YShIItZ4dMprMiVtrY1kyaJgFwqjMQYmmZA9XPPbG1aleTiaY4OWqPcpsB7O2UZeQ2gSS4+So2KpbMq8rmJYgKADVJcgqpTW6qbuZN8rOFwqIghJi7M4nCLp5UVzUWi+TkE0gsZTZhAGrO7HEetncFx5mCMBNbh7rDenE7gGnVpFAEorIs93VV/tNfeeFXP/zx0xaBCwC1ahAZT0eNqhH1U8jg5rZZ99yIhIiJOJm7kWsk25rwfDYFMJ+OxmVRjKev3Dx49e4imRUxZsBUY1FoqvsFc6geridkAO4dVrXak49dBiDBju4fe1p6KvZn88OSF0UILKCg5CQCoCjiqloBzkTCwiwsMZAX0jbLFsDB/dW05HEZg1pSy0qjEROZmzqs82wY+B80sCJ79k9HbelPzr2UqKpO7DTKqWohPN5bVK9kd+tQIeaqbpxJyqJtGiNikb7I0cuzOIj7+Mc0iAQQWJxEFbcOOhmWY5LYmqizQzohCXJn5k4osDvXQbV644DrnTvNQPR1U3VPRhRiAq0Oj8oQWzVAUwKAYjx2tJ1SyQOFYSJsLPtw7pcER5DQaapm09y2ANrUCItmJQkheDY3cwkswuLcrUptzm4qkoV9YmTaXN6R9371cxemRdQGwFjMREmIDaUERXaiKnkh4ayWVrYAXLr85E/8wkd+5oPPP/3UMzeuP17f+XQpqW7baYFyVCRxAFLwzu5WDRot6io18/39umoO6za1Xo7DLACA5raq4kTEVjSKI2RlIWI4U8oZAJpm2E7YvdMl7x4icvBQ0XQw+vvZ3QwGmGqSHEMxEgA82z0hroW3hXZnZa+Np8YgCUN9qNOO7EWFzsGRX0xhWocnw8/LZbW/PQEQivjRj7/2+gLf/NjF1aL68Z/+sTnTn//qL5uRsaJuMgAZS91RrjrZuU3YSg8cZTi0D7+ypK0QUwKg9cn1G2//j77zWivNdlH+e+/91u/+6q9ZLT/BRZlhUyEArVXjsRALr8Pljr/W7UX+4OHOTb0TCjvl1JJ2JzCdzbxK9ap99X6qK4bHg0pHyVZ1JaMAwFtQWlfLHNQ5aPdtqJ2/fIcPDbTifqPsGNtg8mwAKBau6m51XVdGIcg0htRYm3wyKTt9yVVf9ukG9zrM5yN/FqiDeQDHCB1NwTOTdxbenShTVl+1WkauE9UaNJuE6AgCA7BoqxAGyBsw87qu1dw8i4SOce6qi7Ydh+LyhQsXt2fb06Kq61EhwjKo2HtZcMlkaq07yEdBInOdNRALEYCzNkXpDHkGcVwH1mr/58Dcc21p6C211q0m6IDD80vcADw/uJNizeboEcnuvZtYecPP7YGwTVA+rERDwkWdGLb3VoR9jczRpW1qACRK/9ASQJazF8IARkytJoHduDS7fHHroGpffvngrNJkWnIRJ2MAKQUyt6yrtgG5DXlJT711NEkqIwAAIABJREFUByAU6NzRu9nnfWbna8QUQ9EM5G1qg4YQQ9aUVwqgjCMi8uwpZw4UmEHUyWeJ9F0Eo1GpGkwVRNLtBX3ngzFTKAKAYlTM5ltnZ0vTlQibbZwM/Dw6en6cW0g2q8DwaK6f0XUm/uAO8XA8HA/Hw/FnYzxgZdPDcV3xjIh7lPALC5cY3nN+m+/ffu5Xa+dinNvJ6NyetanmdL6qg7iGMzG6nbhLpNjJRKSToGKwOxjcFecGMUo3sg2c2B10rS42NLg4+tI2sN4Z/ZzG8PlfDWc2kBtSzqa54wCCoKoA2uxuWNWtaq6CH9YnbDaKRevmEnPKABKSOwUE0aB0lnJy4mZZFTEScZtaABwidXSGGIVZQogSMtzMzbWPC5mSuecOuKUu/ZIQNGUKAUDTJnEXYQeIuapXXEQmVuTI3DQNgBBDSpmYTNXYzS3lHGNRhlG2XLUNgCBsZlnVmGJRikhdr2IMWd2Hauuw5xuczL2IEYCZq1nnmi3CZtq2TYxjYnM3DpKbzEHU+m8kgR3mBidhYfLAICJhcVBPDBBmhzp3nBh6ED8cQjEnkKt76LJcOEUwcebe4S4b+q5WZuoB5a602fkgdazbTcjP8BBRBpECd0/yu776zW97/DqAl27dPLy/OLx5FEv6rd/9zbe97SvLEVb3Tqacn7zxyPOffAXA7YOzbHj3O97+/Cc/efv48Kknr1+5sne0rMN4fObhk//PPQC4wLPnP/PYFlHOr79666lH4gLNfDbXFJ/68qcBFD//R4f3mkevT7/s2Ufz8rW9py7u717/nqOv+6Vf+sg3/dtf98Z3rgDsPrfNbTUalx6a5fHzj77h67/rr3/nP/2BnwmFvvLa0T/58fcD+G/+2+/8yR/6J3/y4XL7sv7L9//f9VkiwhuffPqFP3pBOe/uTgHEvatPPXt0cngnFnLnaPHm2fxXP/ah2fbZjuSlFQCmRYEU//gPXtyqz+4d2te/+ys/e2frQx/8qZjBqbYQABTBDCQccsrOpCBXj0xCkgYqcRogkvWfHTEBnTQS1nAkec/MIoIFIiZYl112KxWrmTL5eIRJqZNRIUyEAHc3BaBq+/vboyKOy/DoxdmTl2a782J7UowKmUvoJPzavDxeVEw6HccyigOWztoMEHXt+QCEgzLcKXCKBYI7kAmMshMtArOOfHR/idOMqqqKW2dveXJXKBCJDfasBGfeLEUY+qnBmz+7FLCfyjysnz32MEgMgalbrhg8Knkydc3BTZP2E7fr/eFzi7WUWRet47VXb5oXADyl1dnxm954xdGWWaOm6qitF2l/Nv7s3dNbKwWgadEsaqlTKREpjoMYrFE1K4hy6ETo3FlClPJskVNDVeMSR83JcTkrLBmA28cLIaxWjVsmGpljHHHS5vls/u9+85u3L18CwHtayEJXKxKiWFrbSFfuiBHM40kBwNmXi0XZVmpalDJciuHSnsuXScFhknLIMptO6ldv3y+CNCQAdkJZLZpiKi1761kol4E1GWv0TOU4AGhzVssxcG1qSZEccAhlZsTY9cwiubeezRQOmJO12Q0tscUgqdZufk4pBkJZBGEuM4hQbCUW3t4RAEU5yTm1FqYjJw4sQkLoXDeoB8Ccu35vK0IBmnzgD26+/wPPf+qknbM4ULsCGEEkoHIV8mSZWQqh1CYHiBEldItwwVIEFuGcqhjCbDzemsSdSbE3KwDsTON0FBatB5lTyDcPqjo3TsQCteaBRXYznAgCNrJPffqls+UFAJzrnBIzDg6PWz22zOPCqtxyEBHKOQOo68Q9G92zZldFViFrkYUDAGJ2RmsGhEZdzS5ubd25f2Rq2vkt9w8QcW+svM4gO15Lj0g6oJ0SXuCTVTUtaTybrdRZolnusFFFS8zOBKIQQ6obsPQdiw+gSwQgMEMzmERiUj9btu2tuwC8URJRZ7Ue8SL0XdC+yczPVdD6/mxjELuwiPa2b6ruSTU1jZZRFFXWWjOLUZgAqOv2i4Mw2hSD+11wU4zt0AhTImIKIcQQA4AyxkIKNyiDiOqUtK7dLLvCe6ULSCCztl5ujUc5nV0q9d1vefK5Z66ko8NJ4QBG4ivP7sEsZDMv4E4hxKY2Gk3VSgA/9SvP/9gv//692v/yD/7kD/3Vb3/bIxexuDUbBbWWrK0yAzCylY6M0OoiwZtMNUGB2qkAN10fDUNiXDWr+TQzWHOSYtSp5aakANpVDYeZrevO3nOnB2YcALKBi7upSoNowgwBTwCgbVoraUIIwgvjOQDqdM/PtVT2xbLzyOCaj/ZF4xy0YI5yPJqWBYCSKMQAcZIgZSllJJCW4za2S6M2RgA8JaX6HM7wBR84rH5fMMEAVxQjut+0AGaT7Vu3b//E+z/4nq9+6sk3Pf77H/vQx3/tU1/x9t13vlPGFjVHAO10Pra2A/eZ0asPw3v94s3E3TwL3dHnJN665WTBAEQD1c2Np9/0vu993zNXy7Hy+77nP777+uGlS5PWRgBy4EmpG+TsQcY1BlmIge7XdQkwQJ0vkeZewg/qua5ZOk55u1JZNKmtcruqzs4WrTYAdCMIc/52dH91B1LqXI0G0qkPxzP3wAygCEEktNkWVb3KbMlCMWUuJU5daDpiAFV1dlI3BCKGCMPAxCBleETZ1XKEZX9vvj+f70xHgb1N9SgKqZuqxH4nVTMTZwjcA4s5UjaAbDBeHBjM62yph9rO+133E4E2jdkdvPVgGekBQPv8b9c1zM3LOt7jEHGf60TyB6PwLl0EvsRF761s1q0tQ4t+92Lv1GXgrubZHTABceBOqVmzs2NExGqRpSwn5fWdVw8Wx5VbkNG8BJCyCHFgbo3cXVVT0pRy523d5WhNyuu0cwPVeV+iWEOrff7HYCJGTLkh5mw5dDspSUrGLKNx7PrjTM3NiKWIRcdhb+qmHJXMVNeNmsYYCKTw+mwB7rHCoiwmk1FTN2dmmrXvyaZemPVcsztc+4Z5pgGAXKvUbLLSzb3wQXfjIRr5cDwcD8efxcH/3y95OB6Oh+PheDgejofj4Xg4Ho6H4+F4OB6Oh+PheDgejofj4fj/Y4Se5o2hgDVIh/TFtfNw5Zpp3xGXvJNcsQeqMWvC+1BZ9TX3H2ulE0ZnYu2m1LWiOKQXdRysGzqJPxACuQUXc+fOTI2MCd6Z4qzVsXuJE+97t82jdGQH7U6HiTs9DuN1ae9cz0FfMd9QIzYF586UIAg7hGLXoMxA6JTC4BTYQcxFL7bPVDsAsax9OxkZuRiyiRKVQqwwKme5Y04VAUDvJshCYDXnENtOW/5ce3kyJ5Hsm2ojEZmTiWCwJjQiM2dmM6OiBKBqMUYQQhnRk09ZzSkWGQ4iiYU61BNA3MvuOAQiQkA2y6YkQTuj4ME8VXp7Yu4ar1X70jcPhT7N6oCEshdGR9DsJOIO4o2fO0EGRpCDYHCFsm0aULXr0O8omZaZebhnPWVsffOym0KZmQDqrgCoe33JXDe1BAkSkmYhkiJUyxU8lzE0beo/wwxoRViYxuX8+Pikbqprl0N1evp7H3sNQGjDuJy973v/8//jH/3DbBfKSxf+5Y///GNXJ7uXdy7vXJrunADYOqmup/or3/TnTm9/7tWX7+VEk/G8rrcJBzcuzd/yzkcBvHIKi5THT+ZPHcmxjraKYrrTtPne5z4/uwIAO5NAhOn2iNL9kq4e3jq5fql+yzsvfuQj/Pmz18flCQBqr7740ktXn94Ncgk+nhdHf/Wvf8cff/jTH/uNj+zP4g//vX8G4Muee/YrvvWb/vb3/+Df/u//6694x8Xf+PnndyfxW77uXctVeu1jn3nqwlUApwe37r1+q6HTL3/7V+XFp2///scff3o2v3Jj++re6vUlgJU2py3/5m995unt9j3/+hs+9bnw/f/z3795dzXZETVMYwGgKFU914skIbRqBTG7moMKhWU2AyDJWoA5MBVNSrGkIGXdtCGyqXcqnzIic2xzWafaRAMJaZ6NikWTJiMCACOH160/dmlrPB8VqXrzc8+88LmXb989LbI9/tg1ALOtMljTtgqma7vx7Ox4qxhVVi6a9rWUn3jyGoCc5YN/ePCet2+TSuZY82h7HkdxUje3zX3VGADLfGlbxpJaclUQJKdomcoitE0CMBoXi3Z5r/Y/fGH50Y8fPH5h583PXUt0JEZcgiwCcATKLVg9ELJzMYOwmZJWxNlRAkhxSrCCW0hXoAfQ+5usnRPITUPkZnV0f7U73/vcSyeXL+/eefX0sccue98ZqsSdgZMBRMyamctCF6uDhVncBjCN+0eLBUa5rY8jrn/iE58/qXIxpqP7KS+bfckA9udX6vz50YXto3vp7H59UJM1OD1eZUej41Im3cNqaVUdtt5U+ejzhZ3BirKEelvVGcB8FL7lm5/ZvV7F6QxpkSQXMjltq+JiceW5pwoLACovWVeIAg7ZILFMqlJEGBv5H73yEoCqDpPx1gUeX50TcnLw0HB2bnSkNhVwsmBlyaxgQIF9GQM4WxnGY1b1ZPNRWAFCTsFHUTnr7aMVgN1ZKQVDbWZYsS4WVo7HbNw2Z3cOqumoBjAtJEiaT+V+U+yOR8s2lWOTyp98ZK8Z68wbAD6Z2Hjc3rx31tLu1qV7h6/OLuSLT13B1jxEB3Da+mSyW3/i5bN2NaKCeUvbGt6EYpyyU9evQI0yB47Zwy/91ou/8nsvX9idf+O1uVmmWE4nYwAv3Dx8/c5yNhlb4CY5MSxl6j0HeoNjMkg0z1YnFKrHK61XVaD5xfkoeAsgqp+dJRZ67tJ0Z2vn5u3nyYuO4a7aEy/Mu7YDI6YQWLhgDgHJRU6r+u7hKYCozd4sjLevHh+/unv52urOnXqZAeS2ljgOEgFkNdVE5EzOTP1CTQwquh0g5mSBjGQ63S1GzfHBYY2ytiiRRiEm1W7DhUN6InYzMJs6xYKNDZRZjoS6znt7e9aerRZtlT9rDNNSU0csEaIQWOBk4LIcm2lPddvQzrhrRHQT80xObWNxMm9zo8szAEWcp9S6qRlJCG7eyeSquQQ5N0mHEIk7J41upxskddHbXzSqJNGUnAKxF3HSOe4AcPP6AU4cYdC2MFo3DzsNarNYt/G5V+1yfQ4rbZZeAwCHLkaLfScCEXLwDECrelqKQVEvt0Xf+sYLX//Oi8u7h5engaMAWGYrQ1kwZ6TsTpmKItYZFY+29p/6wIdeBfB3/vmHdibllQlWi/zX/u6/+JHv/w+fvfQcHXyOSzS5iACAtkEcLTNLRjEpx6NYBlUhOjpqL++GggAgAVW7mhVxdVBzpIDYmgPkWYvIAL7/R392ce9QEFhJAIjXJFtF6ThFXA6tMDMSgtfuCniWEfFEeHXCbeFS2xiA4dT0iXKktWprldikypirSzGSVhQBQJxuI98xHfTW+7/WZmQQCXCgE5ZBp1DJIDpO1S7YOyFmdh3FdF9XmUYMArepTSmNyjG1Va8VmHg65oxeQJvB3cPSNeEanVNYGkTwALg7T6bCuRQB4LRUr6u7Ta6yb2199sR/62Mfu7J34x1aVLnlGQAETV0k7b16CBEIAx10Y6wBg1unoD6EyxOSxo2LOAdwmm/ORu6renX7uL12DUn3Lly6IOO9rWVcHQIIJreInyAFuYGNiN16cjNp7hroe8OsXpFQuyDPahEQR1MCIHDiaarihd1rr7509+rsyoxlf3uSVylr6nq5po2TFySAJbiJpM55ay0Y2T1ynYZtL2APJIdAPC+7Lo1YlpruJN1uzWxxNipKSOQ4lhgJ1vVpta20ZOMissGyEwkLOYmD6+XZeBwBXLywc21va2sc3LObCQPuxhASMwZQkJKoBCZfibDD1NAZMHYsYABFAG06vLo25C7jER66iDarw2CBbgiA8/m7BuuEcNZCErxeKAYXpg3bjtbeNWtmLA1vGnrfCJ2DKIZVSbifOf3xDBtCMrmAeSAoum3McYgQhdFniEzMAEyzu9UmoFwQVHPdWBTaKsw5Nad3AXg2l4AQsmaROArFdFKwlI5BIRJYLpadQVnK1raaUm5S03gTqQB6s3WQOHpBTQDZcoyjlBsCd1nnarkUjnFcar0aj8etq5qLCIvklFgYQAiiKQOIMfb7ETMRMfJqsWpbBZArEwkx8HhUZE1tazFGdyPmwGLab7imFoSJenosnM1dgvQittr9Z98uNlC31/2JD/mRD8fD8XD82RsPaEfSeVr/F+tYDLT+8+0q9ODiN5go/KnjXD/R8CnnNrpzh9m8YwNN9f9xri18SD/On8D6f87lKgCsb83YNA50y/i6p2rAW7/Uak4+HLd77XClBu+JL/Xt1v4c5zSeHe5DU2ovs9n98gE5Y8eDV8E3YQadZ+MTNh0aePDCrvsYHWvjtXNX5kHptfV9PwfPnrvbm6/2hVf7wS++ToToi371xZdn8w3Wx+o6/79o2g3feLhdX6h0s/meWOOa3RuIWQZsu5MGc3fqIgTiGKPD1bJrBhBimT2jyU8+8UTWdHbnjptOyEvns+ObEy0ArPIqX6BpQatF8+9/13v+/L/1jT/5D//FE88+uT3frVs/OskA2gV2t8Ord1989o1v/PinX5+OZ09f2Tt77ZXHv+rq6vhAlocALs32Hnv20XHwlOzGjQu//fuvPnK9euL69tZUJtMZgG/4mj/3h39w87lHLz1+oSy39PTouLr5abdLzz57ZYvbe6cM4P7hZxfU7OF6UExncZEapXvf+e987e/82kf2crq+LQD+q+/7X/7GD3zP3/yBv/WDf////C+/+z8YT4tnn3j0Tc/dGP/Sh91xVCqA27dMi9k0TD/+2y/PaNwEf/c3v6uaPTOb8VJbAGWLX/q5n33ff/btX/ZE+4/f/7Ff/82X791diaCMRAC0BgBjzZmdo8mIndyJXU2bVVX2AqAIgdxpWaciAkj5zJ3bAl4aZYDMAYxdUhF0sdgZw4pQghhxNB9fkInWNYDsPtmeT6fTkfDjF/c//cKnmrPDN16ZPXpx6+re3tXdKYAXX36Vy9HlrcmLN0+Ojhc39gvNNN6Zfebu7TsHeXt+BGBrNnn2qfGdRXzDha0/fuHgMzc/923vectrt1fqtL8/UTQAiKhqrYWNyhCZsyK1bRGDCGLXaWUphq2z5f2cOauuUmvggkcIhdmZdO7bXRLVzWAicyc3h7MwDOhkkra3QeT1fWganur+kdqsCyGEIlpDk3G5XLUnp/Xujp0tM8i6ogvIqBdG6h0qQpnpLHtLhRXdUi9K5OJWUC5z3dSLLJnzMp+ctvfP0p0zANDVfauq5uZx9HIc4nYwI7uwN6sXdybjax4cQDEp9q9eHsfFM49uX7tQXNRrbVoUwrasOuGna49fufrcM0lGmhZI9Wg2ckck5GX1sd/5w+nOBQD57OAtT25xMTJVIWEh9IKhTkxZDcCqauA1poZhNcWXWIU2hZAiijFnWJP0aNm0nAEkh4Mbs5Ssbn0KN2armzPEuFUe36sBlKq+1PmIRpM4mso8qnqxv79ba3vt8mg2LwDEUqZF8lHYSuNXPnVvb7tMy3TY1joLk8cvFNNtAI224+m8XqTVnVOjajSTRaXFM19WceOZAGxfurb4/Eu3q5qKosKxu5ZlsFWbWgjHVW0ATJglijchjpZN+w1f8YyUoc1NgBNLMgewu//Yh+2lOwcrjEswxxAzlEHuzt4rG8C8ahSuBNdIW+NA8PtHZ5rSla0AIF7YmmxNj86WVYOGJ8wEBRG5mTs6e270RtvmCs9knJlDhpWBQ4/XoW29anFta+dwdOexa4+eVvng7F4MwRRNqrrmOwkyncwcBvT+x4Pch2oIAGoncSOwE0AUYxgVkdmJyAYZFgK5m2o2U/4SG+6wjQ2CgqotVK1NzOwtkkFUATiRhODM5q6a1Sz2KNm5KYUO3nMS6iwvcm5zyppTd5w2tHX2vs5JwbrggWCe5cHI6ty8/aJDbPZr722Iu+TS0IsmA95X8PotbgM/ONIQDzzgJHHuAOelxHyImNz/X/beNMiS5DgP/NwjIjPfWVffPUdPz4W5gAEwIAYASRwEQPAAD0HkguKhXXJpMnFtZWtcLZdarVbSipKWIkWarXZFI7E8JBwGUSRFiCSIYwAMQHAADAgMMJjpubtn+u6q7q7jXZkZEe77IzJfveoBZfwpmnVYW9erV5mRkZGR4e6fu38ekTJzlUDCqgbeUgBAGmfT2hEPnNx2uP+eN9xdkNhOQIYyegCOQcZsl9PcIss4RN2egrsFF6vv+/3Pf+DhJwCsupyc3fSBKPMkf+8XPvjPf+ZvPHj/rXTmyWzZJTytyFwIGqnu5Hk52spNDFXMCFdmY0ATmptFziNN6yoCW2VcNhSjeh86mTHDIYC/8yPv+Gf/6sNMeZDSaxSRPDfIMrN8xOhKLQSARQyrSiAJKkpkYQuYbHlQ8eCo8QrgY//5oZ/86TtnlTDgMNgOMe9m41HtDXtmk9y99ZTUG2vauVTs1XVDnDVebcMgTuV0BLTGIi5OOwAgUTuVTGtPnS5pZqyF1EXejVJVsLUWANTEcT3bx66lcWwvIAKVVB46rXgkh2ujQ1M9rmO/W5kAYFpx4TqSYz1M1IR+0SsMRYks0YsEEQAd1pmGATNEmkTa5ORPNze/DpBoHBt4l2gyCZ0iBg2GMwCdQc+W7rlnzvynP/7c4Mi7b3nw7l/9hX+RI/tb/82Dr/7BtwAgE1c8CIbATAYAszToGsiyB+YRCM2LbgAFhC0ZUKO9AwBnKjZGqo+/4tizlyof7be87tvNLNCgQbrGxggsiMAO7FSESKAWINGquZ95VaP0SpDaVMZHbEzREkoJ3GREm2UKGDYwzEx51ulaC2C4NK0mGyQQMAwRMTOr+HpW7ds33Lc0BLA67BaOQgyJdsYQSXvRxTLs19pArdmyqOu3jI5N5ZgWn+TdE9q3vM3CJm0Mot2qRLvKRXPoXotib1cN59Y1tgCRqnJ6JHtJKPcI5T3faFP/Zq+l1lpAi9cmlQYbN0wBMDDdPDdktsvqyvZm5T3IGkOFYwCZYyIiih1HQFCN4iutG6MxXWy5UxCxpmeqUOKUBV9VIfhQlgFAFbyPvkkfJxYTiKjIMh/r0KwZrrUMVWRVndWq6jIH1hBqVbVwwLzqD4iQgEhSJSZjzGA4TK9TjDra2qnqSgK6/Q4QQwiqIJGIFFUCEJg5xkRplUrBQgUSYxMcw9RyRu4+r2Yvv8Z6vt6ut+vtevtr0iyu2b520Ua0NCTXAGCLP+fVvHaPafng058T1naNnNK9IgiNsN0FOa/52zVj1ObXPdZp82nX3bYHoBNQ2sVpDtxRO7Y9dJbp7Jft6ImcaH6fifgaLYi2O7IWrlUscLgRUQrraN2bgEnXns9MqyDtwp06Z42Zj6HlXgYWkNV0dDNHDQN7qg7YRgi2Q5tfHEjaJeYPSNOYF5SUFsLcO8P6zX6FArTwUF+OJ/4XEcm9ZzXI42J/Or81NGZme8ieB0ecfLVzNDI9JgMAoaoJiC2bDFihQsxVCF3r8iwHMJuMEf2+1V6oqgMH1gbd/qlnToghF2Tt8NLrX/t6ANsb555+6dRDD31i/2rvjm+59dBg/L/93A9/7NNfuvnwgRsPDiblCMDywBHCV7/86Fve/OAdtyxJvdUZjCfrm6MLPCiiGQDAkRsGm2eeD52+j+Xg1pu+4/Dh7WdeoIhOf8gVAMxGEyGZ1fLimfH9d6242UR3jK+Xv/Lkmbtv6xy+aQjAry0f6JtOziKBs5WuWbr07Evv+p6bfvwnv+s3fvOjR3IG0FH+hf/913/5V45+z3f/wD/+J//UcuBO/aH/+JFvnDjxHW+8wQ5HAG574Ei10+9L8b5//dGBpff+jz/kC5QXt2IZOmwABKHz5yfHDw4Gdx3ZsRtfP/fEvgwdw7Zu65QD6sV6kSDgwEC/ZzrWwNKsiqIYDnsAhoPBS+uXVwduc7s8sN/tXxmGiE7By8Nu0dn31a88ASCSaEbayYeDbuFMPZn5jA8eXLVK33j8RQDDnvWzHZgwrXxwuP/2G44cu5HKK+sXt7oy7WYdADfesFr7YFTf+dpbzqxf3dwcT7xfW9lnsvzoYTsazwBYx3neXem68dSU2nnx3PThL5782nPrD9yx9J2H1ravzgD0CouAPHPi4TI2rW4uUC8CoMgzY/uFqzKD0Syev7KdcSFSGgnsTDIVjCqYyGWphEeqsGScg0qIVWKqmowmUeNw4LQMC9U6WjOleRMkeM9EXih4DZHYZiECHEkbOLIt3iCpCALsNLNU2HxjEjKpAWhGVRVAHUvjyMJgX8dZFdjJg6+6yWECYHXYlb6n6TSz+aWteqmT/+0fet2B/csD3uG8dDkB6C11bNEtuv7upUPW+VBktSelpDULgPF09tXHnun2B904PboswkSwbG0W47mz2/tdHwCmAcaqlxjFZjkksAJsUnw3N+ESgVis44Y9lvmbvfsKQJkhIGbVEBGjYjIV7jOAPkeFyZ3JMhNCCreBCO3vmHrguKwBHDjU667mqwPnvckG7tLG7IUXNvflxld+a6yH7joMwOxbzgtoVuhWNXlmfYVgGbXG2tHw0Nqp5y4COHTbsbPnr+xcmjjFZLpjEUizQHjiL54auD4AV3ae/NTjR0kP9vsGCkhVxz4ZJkvKHccAAkQEU9FO3v8bb3n1hz/9eGZCVVc9w1XtB7kDIAZH1gY721UF0ojAMcSY9vzM2V5RAMicE4lBvIoQlQzjS3VKwcetsQdwms2x/nA0q8u6zIfGGmgEM4UoCrGc0hFUyahJNcqJwAQGI3PWSB0SH6LIrEbR6dVCZy+eG422oo9MHCXJuITdazkZo7GkmJlNi3hSSNgbmME2HcFsDTGrJjMspAB5aw2TJSLRPcWrkwCchzcadhS8EkRFlEoFasRgBEgDlpi4CZE4spP5uFfsz0OKIMJMho1hYyxnVDS1uYhp6PIbx4/MAAAgAElEQVQYY1WHWVXFEAAYNpZZXq7jzMfa/ny5IhRipHn4U4Jjmg9sTMNNiTZVIJ1rGhN2F9RoEUmSRU/vXsiB2oJzjaNSxSAajQDe8oZ7X3nTUQ3lSldd2Bl0VBCcgctMXSegiEvvjWVYU9ailMXu8PIOf+CTj3zy6+f6hQMwI2TB5tZMp1UxWFrfXP+H/+9H/v7f/f7vvvsOP6WpYQCz4Hu9YQhl7WtVNWF2+w2rL53b+fIffPyxP/pU7gEgzOqdMtrM2BA7tQlWLRvnuKVxxJE7bzFFByWZjGwwUBXvf/G3/mRtqRcUsBkAm3czptxQYbmwNrOZy3JrsydOnDmaP72xNQJwYWP7d37jd6aZix6nT14QU8dp2SWqa6XhknSWAdDKMOusxBCgOg8YpAX2vgwBqhqjhKgiEGiKHcs6xEI5A5hmWW/Q2/jqmc0LF+66495pXXfZjmZaBx2Jbk4DgFjkMZKmIkU0DzBLrqa28EvzTJODux1A1xIHlzGAqdhZbWdRGKzUq6GRIIGiaLlT+zoFhRWFm4jaZrVT4mxOvOSUUcTuJruLaqlQt9dVk2c2zlQBTGpD1O+s2QOrrugObUHd/Xl/u65As+kOAMO5zxxRACIQGQqReY+iFgAn7XlBbycAyjFAKSbBlnKUKtD2zG9Ods5slt9aVP/xT/5zuVH/4A+8yd63BqCCMAcQAYEQm5rhCdiBayaOlNSoCrGk+kcxCIOUicgAIONSjTVn4MmZNuRNFDFKrRGAc3nuOiASUTAkhtlk0snsLTce7veyYTcH0HUcJUSJhogIPoo1lhasA04vYNpYG1/+3K/wTSpXJR2Ed+MJ9iyGFvxrbYe51rzHjptX1G6wSGoDHZs/aWuQoBGz16rae0gnd82G3YG2d0ALqvs8ZmKhH1Ld8wFpQasCsEyOjQqHiK1Zee7KjveRyRhrmTltjAxVleCDczZZqwxmw8xkmFNeiUitSlHVRwgpGwdmJXStAWxrVVGQlsdTlV2ekttm02oymQEQIGoG5mk1FSFVRTBEyszMlKJl59TDbZnxtuAapzAIADBExmVZ7ohNp1tMx7PxaGKMJSJfN7ofEbExPtZMDAUbq6og1SiJpNI6s4cnssWU5+Ev19v1dr1db3/tmsUuILUQWpi+UzS6eksxfQ0chb3+rgXJqIs42t5zFCkwMKnP8zDDBTBL267b3mTxgDb0bVEOC6C0p5+GA5tASrIgc/eaALSLdbYytx3iwtjbES/ezm6sjrT2wFxnvOaY9HUSGCnIg+eD3AX9CKlcOM1tCNImS2KO1NECINkYXkTE6dQW7Gz+yu3l9ygiuyPbnYLd299zzDy1/lr08ZrJmyPOf1U5uDjP10Y6tutQaP55QT/VOaQLqO7qMLR7F1Ck8FMQUfKcAnDOGdgQfIypFiQJYKzpWGegqXxp3nGxDIZQV9PppJhsXfAwLsYDK9lwefX8+ecAnL5UHj94/IkXXvyuH/6+/+fXP/xz/8P3f/9Pv/f9v//wn3z68Td/2z1HBhmAU1vbV0Zhujmy/OU7blgb+9nBFQkTj51tzbsXJzmA1a3CjOWxL5/aZPPuH3/FiadPnHxudMv+Dtdx8+ozACY2/5s/dn8m9qWRDE5sr64ubV4Z77t19YalpU9+4vkf+fEHAHQOHquq9U7OV8auo/a3/sm/e+GF9Xd/z1t/9me/b/nQDb/xy78FoDC1F/qpv/uP3v+bv/qqu+//+Kk/HfQP/t7H/uKmG7o//49+olOcBUBXN8rTYy72X13fOTurR9P6Kw8/82effeHS+nZlLABHvP/omrHLH/n3f/7lLz2+ZHW5RwdWu6vL3UG3IJMBUFWVstsxa4f7a4dXujllHAY5OMrWZj2ZKoCNK7N3vPXuYbf3kc999S0PHH3rg/dtb5anzqxntnPk5mOXnn8OwPKNBy9f2ayrcvXgvounTruOscFUV3Zizq+4/QCAPLc7s1kny4/dvO/QsRsnl8732Yjrr+4vev2Vs+uXAbz2+P5tX165slNHObDSn87K7TJ84ssn33L/DYcO3nb1wrMAyCytXz49uUKnr564/bZj3/4tB6uSZzGORfurS3HjKoCgYnJjnFFR44xjEsB70UjVTAEcWOuT6+Pk1b945tzGlrdLJgaTW1ItiZq6KyqRiGCcQG2WIwpIwYzY7LAAdiazWe2XVlYW6PUX3pD0a/KAWOejZ+tGk9pHNdYSNz5zUGzyqJr/NUSviiqo18g6AdDJeLicaQgai8pLzfSmt77yHUcOetZ+ZjK9DMCEINqFjb6OWtDttxwP+TIh8LQm3kkZxwZkjfE+nD6/sbTv4Pb5i7fc0EUUN1zqDj0AEn3m+fP33Hdb8OUNrpNCkNiZPMsQomUFIM6wc7FMuWSqEpkIhhmsQFLxYxOMRgQh5wDfvPELoeXtB01GobFhZTnfWrIA33znQQAdKpVMkXdV+MUXzjlWQswcR+YKMvMKYGb1hvuOT6ezwcoB0y0mG1+D4wjtFRQR3WofwJmtq5sT7/oHz37tqW7ld2ZxczuoiObuwvr4kS8+A+DdN9z6xDfOmbNbtx4tZkEliisc6zYq6fcsgG1f37h24I6V4vFnr7A6qFiGU2bHhlBXKXZPqlJM7+Cs6v7Shz57ZXvzphtWQVplLivcuc0RANdHZlkZzohGhKAMsIoFc4jVeAygFIkiDLWkY8BojWg7LEVhYR2AUR1L6njF1mSy3BlYC9RJ3IEN1RLTq03gZAOTIWLDZDTGqOK9sK8BOMNieGc6LUM4t36+8sFaZrCQMltyCkAgmbVKKSeOiZihBpEQE7pnYNho1Fj5OkIC8cyXzAYUFJosc2ZDBJUYomdNC701pFsdoBEPUFLEEFUoekQfgg/94YBTTQxEUUUUEKy1hGTf7hGFrQNPk7QRRRBRMBtuUvyijGc7UIoiUZqi6FBVWQhha9GBuRRL4jdlO+5Rmdo3PulFSSFplINGprXqiipRk6Vp9FpMMyVfpwstfLegOgDp4gJNHloGSJmFAbzrgbve9vpbL507e6Cfn37umXq21cmQoVtN6nlKL5NVjdOZEmedwfKLF8tf/N1PnL0Sio71xgAQNTNfdWMeM+oCl/PiapCf+zd/+NwPve1//vF3243nAHQtcRkoimGrhJWB+19+5kdmo3GtPC6jMRFARYRpWfRcHI8GvGktl5ELm0mY+aoEUEUtup3JtGZjRBGDdJkRcMtw2AGTNQAqP4IK+ShTiSGWAROFKt3uzNMnHnnbG24E4PSm8VQ21Nx/4NXjabU12kFdoZKPPvKkY+GlswC6gx7gi2JIRDCU8BJ2DJNKcqDI1DJ3rOlnttcpep280ymcc2UhA+4e2WcA1DUFhzPPrf/6b3/i7ZuF355NRB75ypNaZ3nOiQwn7/dE+nGwrBKhQhoJkphsVCS0T7FBslLkmQLAuBJrcyMlgCJzVd4fb49OnDj3I0v7i+5wojryLriOc/lMA4CJneTuqrXHIEjUBEkhNyl0Wbhdfe2ypUbZCvWUyBqiPAcAQQg1BbJ+XI+C9AJm0+iEuFMVw17zshGEI1sJLFGUE1BLCkUbmKwpgZdUEgyvqobTWhZtAocFrBG8NfV55+AU44kdLA960wmXMUtliXQag0YoVAMQRQNBJa303cRiqKbS4ak2fQOMKkzUJjqyqmpbV2VZR+MMgFBSW61x20cAFIVdQSKsoS5nwVcrg96hfaurS33HQTQAKIMYwHITIMHMqSbMPA2IGroq8C7VRMICaW9kbLt9NGGS2rofFlV7SlBeojBqbLAFY2auVGP3D81uP4+uoPmuM7ftCClyYg802gJg6ZSFIaftZn6oLsKbL7ulRr9vN04QYIhS8HkQcUxEvDkp17cmWzsT5/qgVkOiBhpnYjYUhJO6n6ZRFT6kdH/YtJEqjKqFsQCzAXGUAFCzrojUshILDADOXAhEoLVBMau7AIJS0e0T27Evow+1DxLj1tXtWTXJTJ5lGQBf+wZ/JFJl5rTJE4FiTEA8CEypGKD300kESCRmecbEvvaJWipGiRLZMAEhzj1oxIbTAaqyO2t7oN9rTLTr7Xq73q63vzbNauNGaxxYABZBxkUUbtF/hQUgsTnrL8GidC772l9aZDOZ3nqNR2eODwJ75d/C922RNwWlhK1Wld9VzYla1kulPaDX3hCIOYqHNnBydxCLLRUApr/sJr/ZGOfRi9qOWNt87wTgtTI//ZhDc3NDoim3u2ttzAPxm+DBBmxrAlYAknnWtib9ppmmPVhq6oXmHs6GdbJNlAHmbHUvw2X/kltdGPee9pefujtH2j6RXc8t9iyoPfkdaUYwHzEtnANuWJR0ntbHxKlbY5LSoQBEVVIOlJLNOGeexRqAL2eZoe2tEfVp30r/0uXJLSu9TLGxPvuWN9wDvwPgTz7x8P1/+833DLK3/8Dbtur637zv46/6wpnLZ69MfDSPhwOuAHD49lvefPzm247e8plPfvQ1r7tbhzuuSxfPrmfj/rE7jl7eyAB87eNPPvPU+te+evZ73/WaK89cWSu6d33Pt+hkp5bq9ttuBMC53Tr5/CBbvVy6z33sy2+4b23psLE8+q7veNVsY+tDH/0agHcqrQ5G8bYjRO75P/3GFz598u6b9sssfu4LX/2f/sF311cnAH7nNz5oWZc6+f/1D//B9733PXfed+/TzzyvEe/83u9Ye/DozpfPAIDTL3/x0uH+7A13HS19+NqnH/u13/7MxdPbzlInB4CtUo8VQ84OfuT9nzh1kZYy/tbXHP6xH3iwv2qGh4cxKoAO5xunTg1vHHZfccdoaE29Y6urjupyZ+okqy+MAYwvjHqdlc89cv6drzry2jtWZXRyYOOlMxcqz0tL3ZfOTwC87tvvefDO4tEnv/Zd3/GWTzz06eeuXB30zPb2zsHeyo2HDgHY2NrhODGxWr+6YQrS8c4pPy1y+8yz6yfPXukUGYCDa687stwzSyiDRmgZ8eo7Dr20Pnnu9Ma+5ZWtMpWPt0+dnXzl0ade/+qDS8PlfSv5089c2p7UW2Mh02lih0W6nayqfYcNszHWOKezyrtOJ0VHwi3VQWZlGHSLN967fGipyKxGUc4y1ZhYqDTGwEyC2sugYHhvrJXgE19eSs8scgvLYNPU3ea9r04KZjCw7ELpo5IqZlXYGY1tzjAtfy9BuYluIBAxWe5FV2dDftWdR6zNARgy0e7TnbOljFy/f+ggrd200jmyajM1sxFfvQxAggTETocDiTX9KxWfPHGyrGcP3NrrHewv9yMAA6G6FhdOXhi9+tDtF7eeO37TIIxKt9IVZgAcYqxCVuSU5arCMYrEwNzvd6GqdQkghAANJs/IRIgSE1mrGkEWQOYMgF7HLvVdngdAQBk0zg2ha/cUYobxQWBKV5AhiqqDgysAisNDqAWc7Mz0/LrhGCVYRGvjZgiS7FzL3HEf++ijd9579+X17fLE2ZWeubI1mnlZ7ubiHIBTz5+fTDApsP7s5dceLoy1g4EzGUnmotJKzwIoQ6GaHzqQqQlLq2ujcjtY9VAyVmAB+EDWGGu4jsHH6JhyS4Xj0tfGEVkBUFdxaWlY0covfvjPnz69cfPBwda4dGwZtj/IRloBePur7/3Dzz1Wl9qzCiYjqkCsRJkiEEkAOKN5poB6wWrmrLGWXV3NlDTZXbWavDsETO1j7T0zpeByJhgmX4fd2QXAiT4uCAlUY1SwTYtTSL2Gly6cr8SrcQIbVAjKxAbROQPA2ix6FYKgSb5WkYjA0ER+6gygOitFt65EkclEtkfPx+gkJoiAgMRURiIqQWy2mNvRiMz0HtS+toY7nU6n1y2yzGVOiKDip2VMt2KiMcFHVQkNSACaW9dtt41+IymXNYp44QWdQSX6RHWWUlkTAZmqaOQFOPIal1srLrW94KKeQ4ASsaYUh10mawqNI63tsFV1rtkp9nSm2kKfTZvzLrelptHWIldFs4383scevvuoy6hcX79aiu/3M1/VY9Gs9kv9DoDaB7Koowi7Tm948uL0X37wM5cnobfcrUJDqluGKoMVpwiRi9jPs5ixjMf/8ncfCty544YDADSzXhVwmXG+BNfekfR7VYxXaWkALQEQmdiLVHjab41eBvVUWCWKSirX3su4k2Uc6+CTasXbo/rdb7/ngZtXTD2CCQBqT4aTmwNEDDIp9RiSg6tEHRK8dMhSZwByQsFrJ4ciYrI9ieV0Z1qnY8bjyayyPsZZFaaj2bSqZ97XIST+hHo88l68D1HUMheZ7eYuz+xm3csy470AGG9PtrfHJsbJ+s773/fBxAL00taZKCiUHv76OQCDTm77fdk3IChDHatjWFIDJUjf1gCY2BhiY41hk+iuiQaoauOWawCos6kxpr86/PLJy5966Ouj0y+sOlvN9PRXT9SVrvaXAPSXlsj6KnNQgahRZSiLQqEiiSoUjSdA0j+FiqgWCnGISigB1HFHtTvsrU0rX9de+yskPK1C8DaOCYDNM6k8YkSIJpHScwvRATHU8/VLSVNOfC+AEifonNukKxUrmo+ndlIZzrJaQi0Igk4WiGoAWeZATpmJc3AGJUCgjkCkfr6ZtdYIo/FzEMCGXJ2GJHF77GO2E2uU5SQzxjGH4OsYoEAUAIWxNu/GuppNp4XDzUcOrg56WWYyBiOi8Yo3pbolwhkunPNBm/LhjaZPTPM4OkrQXwvx7QYNtqpum6/dwng6NzNItc1mW7BpqD052RyNCj3nb0x2z8IJ7ffamiHa/lRK4ehtUGUTa5E8Jc2wd3tqPjRWRrsH7TpYdnX85jxqL89tFz7EVC+gClp66XaHSllMCd5tKLtIZFKX55OyZmJjuEF+E8FIctMmAscmiFgJAlFVdaSKJrBYUnUBYxJS6UO0IQJEbKjyAGLUSkMQ4l5/uLJqrR2PJ7NpKSEymxBiszhViVLuhTaRMATXvqIAiJkskVIIkkoD5EXW7RaqEBFrLYAYpK5qTRUPDLMxkKhQkdCYNolA/y+zzv4KRur1dr1db9fbf23NtmBhgoTmcvqv1L7ZcS0w2DKXzIPcFg7R3e++Ce6lC778OTa324mmKjhQbVMe5ggjt0TMyRACQUlYdy+VDJqXj3gXaN29yrVH7Zb8WXBD0d47W7AE5ulW805bA2QO9e6557mzdBGDpfkUoNERmoSW5AIktD7VFo5InRGI0fgR5/rMHkRyHnOorZqgi7hf60NdmIqXm//XftF2vufHN/nL4jgWgNImgPSbOfh241bQ0untamzpAAUJGiO3NbEI0qbvGaOAYaNGJQQoiElVHHNZ1XVZAWDSo4f6uTFnz1+djnbuf+Wxq2cv/ZNf/j8/8OE//fxDf3z3/XcAOHbI3HzfoRNfPPdH/+4Df+/v/OSvfeCTv/Sr71/uUIdQxf4TL5wB8LP/+r9/14PF84+du+vO/ePxxhvffJyGsdfvz7z1lJ986jSAL3zuBddxN90wfObkmW/8ixfvu3Pwnp98sJKtpX390y89CeDASm46OH3h2Vjvf+C+2w4eLc69+NjmOJ54lk+duHjv298E4Nd+5VM/8gPHb7O9zVMvPPrwU3fcfOD0pY3nfuvTt7/x9je967affO99AD7/seWT57aN1E9f1G/d3vo/fuUf/+g7fmytY15z9yu2Hz05vnwSwNqxN5by4qrli3V15OjRX/+9Ry5vjO9/4O6Xtkbnnz0HoDDmzIUz//ZDf7QzM0D9wLce/4mff8eRe0dBSjEzLT2A8088O+pu9m9erodUlgd0uvXSEyfufNVNncPD7TNbo8kWAKmmtkOnr2x+55teffbS6VuO2X039N6ElenE3nHLDVkHADa213/4R9+18dI3/v1v/T734oGV4YHlpROzqcZw9tJ5AJujmff1wZUDoyDrO1PZLqs46y71SqNXPYwPADyvSqyn07o37EbjVpZ6RV4cO1x86fGrjz75zCNPXgJww9H67uN3Xr0yvuf4YZX60tX6o488+/S5ya1H91m3XGQFAKtVN8spStbfZxwTKcUxOy6KfpYLAIk8nZYrw/x7XncThBHK0Wg9d5LnXUhN3FDvRUCEyoA+O6IazBoJRBolQQDlrJr5oNJr4Mi9/pNmN1QfxBBTFOn3ilfef8ugZ5e6M1CzDzaR1Y2PgkEmRqrrmrOs2NelQACqnZl1rFoFkUzQdd3ZlNaf2li/sl1V42971SqAzF3kyodIJNEa66fh3/72Q/0iv/fvv7NrxBgCIFW8fP7iviMibFxnIMSIAhFhk0JJRpsliRpnM3KcQWsCOx/rrVlFDIkRQLdjfF06Kcm4ZH+CYhQltUyckGXp0XDQ6XZKihW0BVTo5bsLQSKMJfHGIesYkIJNsW8JwGPPnOq7Xh2dHU03Nqc3FM5krJFI4s6ozAIBcAqQ3Vgv7+Z+NR4vDTqrA3R7RUZkmFL5iNw5LQrq9XcUS/3uvgOrGxd3trd2rKNQjVNhknHIrozqm7tgyybvMe0wQh16G5NqPB0BOHwg/+Izp26670ARwbFkUsNKiEqofBTkAHqDoejw//7Q57/+7Ll9q92KtFv0QqzLOmyPy9IzgE6+PJ75KKh9Ek1MEpYO7uvmmbOcyqT4UMUQQgg2hFiGaVVnXJKKzbM6AkBmQNAYSZTq4BvRIMIAAy7L2iXIursJk4JIPCwXWV5kGYCy2vFRrly5Yo2Z1pTZjjOlDyH4CIKIBVD7AEFriFObxZ2wcwsgwkRfVbUIUBR5r29tUdTbE8PGWpugOWsNkRWFdU7a+mlpRSzIYmJmEa1rP5LRjB0bDT6IRB8bS7usQwQRGSS2MCZpeScWxBMl098gaTTMliEi0pZgYGc0QlQhAiVWYkPMVq1cQ+7VSLEmIHoOmy6KOwJUAIK0HMrUqgPc5pTvXfEpZ3gxybyRkikISWFax/LeLBaAk4dVG8bVRogyAfjGqUsf/OTX7jiQn7946c4jwyPL3b4tiozZ2TI05n3tPTsOZP7gkef/8PPPTqqY5aYumVhTLYteVgSNQaFBhWjJ2qtl1S0GeTV934f/6JbDKwDKQEZ8lvdJQ858dWuWd/K4OQkmUjVy1gMgZUssta9Fs0yFvTEGosracRHArJ6JD8F7HwIRjLVVkN7KwC0b1J6MB1DErHkETEQIClURiPdbzvVyrgBkOgvUi9WOq0ec9zKaQRCqWGQ5ZzxcTVUByZheiOnZCaHb0PViTmrUg6hGkRhFGipGAUZabJehyHMAncDjqEt9o4ZCWYghQ7RTTyU4X/ttzwCulOMz57f15KMqAokUhWKkGCECkYkXAAIEwIt6wIsGVQGGhSlns9lsAsBdrWfVo6Pzmx3WX/xff6GMoc/81NdP/9njv9QhPPzoSwAunB+sHna3vnaHiKy1We7yIufMkrVEiJltXgNSECnpnF2yoCChE6pgCw9geFhR2uULfR/tpXG54zuTsloytDUeFkfuAzBFR3qrShfBwiyU6lZSk+muEtrXhFJOOjWkH0quoxGs3NR0igHGmKwoha9OJzNLZLoUJYZqWnthB6AGmagUlUQ0RoIHpPFLz/H4BoEXNJ7phHkaBpm0zzARiTGWmXsZMTMTk1VIyCyLrwD4ssRkVPl60OscObi2b9izEIlRfCSrqSoREyPlPRFxqt+TvucGakyOEKbmQwsFzkMJr33neb6XNObIrmeOAWmY9nU3mZp2y9U0pFVt6MPcQtndoubmRhvxON9dmgj5XWyzxRJpHhTQbkkNjEm7qKvO643pnMWitWWojapsdPWo4pwDAOWoYDZKHJWMzTwoZU4wN6ClRgBCxvZ7eRtkqu28tKGarCl6WUXQOmFEFBSZUqgiAIgiagw+iCIrOrBgYFaVCZJf6nYis484e+bKuXCpDhWDY1RrLIETHGkSSki0ZyoAERVo9HWac2MMlOraK8S5TESqutYoMYS5NaOQKKIwMURmUhVjUhw8AQi7CXkpIn8u9/Ykvl1v19v1dr39NWoWaALtduPQdhGilx/fiJA93ywcT3Nq5L36r7b/LzrrtYXZ2tMbaIxamKyVkdpeWpufLQrZiNkFxG5XNDIhFc/bA2Xt5nM1qQx70b5FSbswBS3zR0Ntj2viGXZPW7hnnaN6jRFHSaC30ngh3wILwCPm/sTdtIkFtFHnlgwntuQGr1icyMbVypTYo7SF6FpNo535xVttHK7/RSCa5rO4e4svv4GXPdWXfdjTZ6M7taYa0vPl5sPChebay0LMx57w2ZiC0VpzTBWxHUVmOoBaa401SQlmwzFGHzwMG5sByNiv9jrf+YbXPvb081/80tPl5e07bj/Utzt33Xtg+6U154YA/uZPvOfcN75+yw03PvqZTz55+uwXHvnzTkau5zYn/s7Biq6NAFy4uCGzQTdeeOCBGycb5/PZ5rTXu+G+e5598kXNBlUcAghTznr5qY0rh1ddt9c7dmS13ri8MQq61O/xDMCZJy8Rdcm6y+e3Lr149eZ337fa63b2LW8cjDrpPXjPGoCLTx+fch5H9VOfefbMme2suzKqws7F0X2dWyjXfm8K4LY7733qxUd6fTNgeviTn3nTm75tsFxo6U6eeGFy/sLdbzgMAF19zZv246Wdc+PZk198Ybw9HY/D2c3NWSVNOqQzl6+Ehz72aXi86oEbf/7nf3jp2BbTlqvl/MlNhxzAiSd2brx9rXNwbevCpsu8r8qDNw7E6nQ62dm8Kn4C4KabB7R26423mp04PXxstaYdY+jYrbeMr4ZTE8q7HQBnnntu/dSBB15/4/KR43/42U9fevLM+sGrR/YfiFFOnjkH4ObD+7u9TkUymU5Xe4MrMU4qv5zvYxd3Zpf7TgE8e/L5A3cfLgoTNY53yljLyfM7vW73jpsP3nX7K77x0scBPPKVEzuTzVffc+fqwSHBrF+5+IpbDr3x/rVbbu5X3qfiO9OSL131k5mOzl21rMwynkzKOhX0kz8AACAASURBVBJdrUoCcG5jRAxnzNF91ljuuCLLQ+7szngy7OdpxRtSUa0FVS2acrUafZqjjwgBQD0LVR1SZeRFZKUlAyIAxIbAxjmXmcFKv3vTflNv+aubxEJtdGS7wRCRBVs2bCkGCWeePkm1BVCWaopw/KZudWmryFZrmHPnLv1//+ELo83xfffd8o5vfRCAXqUi72tAHG/W6n3uTCQjRMxgFQgAAzaMvEPGSIz1UsdJXQntUhFPS59ZU81KI2MZdrzCRa8+kMiBtd7+Q6sApNwmDSoepGQcgbyvYSyDQfA+ABiNpy6jmGnGULLQ0O4GezaIRuSwYQ5AnJSz6TSaLo3qCsBsu8oLt1PHfFJFr7ljEAgCZzJnKJUxNVZcrtZOJI/cXzro63rr0LC7nLMvYxACMBOeBdvpLBHYsAEbl2dpK7aiT78wBnDPW2VrPN7WeqU3qKNw9IWxJHphe9YxDOA2R+I0aO0K69hqqC2jLiuXFXWwjA4A2P2/+8kXvnLi3MH9ncwZAU1LL4jkeMhOZAbgU3/x9bKedAvUJIYQiUBxcmV9kmjtALTFIZLRXWTOawxRLWPgckMCgFXOnDtT+gBQ7T0AIkrlU2IUMrvhdNTsvc0CU4X3MTMmBgDYmWjmNPpAjDqEpS6zEoGcM8Y5azMAPkSTmWSots5ETaR1FAGg6PR6xZI1xuQuEs0qqaX2IUQRo9qMSkSj1yZfYS5CgAU6NoLmLg/Bk8LXtTriFM/EBiIJOrJZ5owVbcrXa4y7igV2X70Ub2TJRngQmKyYGNQn5jJjLEVNkZGGmGDm/jDaoyS1wEJjUO5ikdcIR+dYWnUBUN4V97Inq6OFhlNvi50olOdxWQtm8TUnGxVDTfgVgaRNWAGwstavXediLfsOrZ0dz/YtW5Ax3c6kpqqKAIZF14e68vVnvnHhIw8/AwW6haGsmlZBSU0iPw2WLROKpcF4PKOuMYaojjYv8lrPXh4DOLC6MpmM66rss1/qF+fPXH3x5IVjK1YrDwalIlQ+snExZoXJ1M9gvWETkdmMxc8AZL6Cau1DUWRl8CHWIRYexF2rhn1gAC5LeGHKzW3wFYb23KFYJ1gPlBXGWFGxNgdcFYeZZdTeOq4jigTM1WUUlpa6k1RJGlApeZXIegJB1GiiXG30kIJHa2rBAYA1eccHJzNmlv7AaM1k1jBjsUJWpAPAZp1IHYNCJaqoxogoKhFRJEqqzxNVo2odtY7ig0bVqKBooD7V3c1i8OpMbijro/RU8MWdMQUjtS9jNd2ZAqjq+uy52ejyN9L7Q4bIMLiBVyNn7fvFIFKwglRJVW3mV1xWsTUcAUy2J9VUzl8xWxQ/+cGPPPGlJynAsPvSf3ron5Y1gAtfPPH61xw/PFiifEWLZTWOQTA2cTa5xGuc9gUVaEQMiBEiIUYFsSoZAyAq4Nh0bLT41OcevvFVr5yOVV1nhumF7TqJwcwFZQIDDGJRBNIgFBmITVFyVVUS1YYfNu3/gDDACUbsZjzs0srSyqBTOIOy8tOqLrLMGEKsJ1uXAUwq34HcfGT/6lIvz4zhCFWQUNoS5raDkiFiNgSug1rDIN1FkFKgI80X5jwNec/mpou/7Oq/TcWr+QFpD0ygpKq0+kMTAQrZUwWHaNd421XXqd0JCHNEUlN4CTXbCrAHbpsji3M4M80otaGOu8Dmwg67eEbqPXUsbTwBMVhJiRQUoqpGYThriUihKVrWGENqCGnK0TB0NAhvo0VxgiNbQy8ZQCIaQkRjZiV7qskwUIWPapmCD7PpDOwAuBwaVYE8Y2esRh+jEIEMgVM6BxKNQgpuJVFwerikho1jtgxARK01QCKFBBGbVPTGWed2q58VRdYb9EOQ8fY4xODLCqREnJJpTAR0oaJ5ms8mi/+b21nX2/V2vV1v/5W3ZgdsPTp74UaaC7sFG3CParvnMy0AfnNRfM31GolGe3q69qAFLHL+K0gbza/BHZM9KuBroonmJ2mi1cFcH9e5RtpetiVq1Gvv7GUfiHdlaYue7d7QLgKnaJO+ZRc+1QanTew7hKa4dou6zq80j5Dcc/fzqWvZi9u/zYnmmiBDbm9kr33yMvnUBn00GeJpkNfGT+w9oxnIIk3VNTPWTMA1IhIArk093fPnhaFq68+8xiu8+GsL8CbmyD3p8zqP32kXq8wjJYMEjWqMYWKAmGGNEYnigye4zAAoR2Oo7+T0ipv2y3h66fxI7NLBG/e/547B6trww+/7AwCrqwdeePLFV/7g6z5b5l/44plTz76UG+crU9h46ezJfYNlAA///h981wM/aXLV0fpdd/dkfHW8cYFufMNo26uai1MC8JVnLtyfH5YqPPPceQbuvaNz13j1yUev6F9sv+3NxwF0uzaOzGzr8rEbb+aN7a0zL64e7Z946nTHrb3te+8ZX3oWwDTUdrDq+kvPnR6V2/X5jQtrB5a7uT178iLb4X/42MMAjh88+N/+1N96329+sN/Nz6yXn/3M5/Os9/y5c7MwWzl2w/qlSwBWLspNS1M51HnPO1/zC//qU+PxbHV/9/S5i+OpxlQNEOg7o5WpUb37h9544Hi9fvHxbHNffWXr4tObd9+2AuDW/cXy2tBvjLeevGqy9eLA8trxm6LJQ4i1Hd/8muMALr5wfvb4OQtdXbMHDmaxDBfObE1G9LkvnTl56fGLFyYAXnP/HflwRS6/9Lq7bv/i1/qPvDR5TXd5MFwzlq5u7gA4sraCoghRdi6PL1+9PK1x67E7yA0urH/dR8AIgEsX1i/f1HcSnDdnLu0899Lm+k713rc/+PpX37UxzVaX1wDsX9o8f/byXbcd2Jx1JjpaHRQ37+se2H/k1KVzv/+xP6vHHsCgI6QUhVb27evkPOiZpeXB4YIkhNWlHoCl5aVJtdnJusR5FcWEkYlTUBx2o0Rl0wAoHlzXsaolRLUSVQxEjGWVJkFybWXQi553fQa7hLHY3WLYGlPvTJwzwdcbLzyXZ3Fy5dLRIwda44ZUUyIPNdmI5DMrUNra9jplACGyj1FDNIGnJQdX+LzbYWtdt5PZTT8D4EzRC1yHwJQR2auj2XR7mmfoFqaqtlN4RpYxGVJWY6LGcthxk1nFIOe9hghgbf+gv88sdYqhZuycFsXWuZ3Rdum6var247oGYGaVcxmyrtRliMEVOWkgZgSAKUYBMCt9WVaAU+K5IwkL+++8iQo3W6j4EImp18ttkTXDzQpSk3ch3Ty3FEUsVCWurfYuj0YAwCaoiYEub5bTqfAAXsQ6W3Ssy62qBXDhwg7V0a3a9fWZ3hZirGahFoPa5Oc2dhwIgK+qe285sDyLY8/dwpKVca0rsyvf9po7VvoDAL2e/NR/9+7J1782CiNyHWd5FPxqz21NxbhOb3gIwENfWv/Ax59Y62YgVFXpOGdrRKFkpgLvawCPPfOCcxIFbIgtZUIJ7VZVbvw5UGVRa9nk1plMyMHXFdQUeVfKHQBR4sWNC13TUaTyNcrgkOL9glhpy9c0rq9k1BFAJuPS11AiNQAccdcVhnVUVvvWBjGgnOzkLiuyLMSYchsdmcTXiOQnkjl1iSbquNFkZzqKElSNRCXvVYnYGiKw4fY9YCEhYvCcMbl5W4gwXyKOrSJojKGuNaq1LAJjG048oClQTIooEURIzI/UAP/tOmvuN6COoWYilxmWlC9OAHJnorJyiFFEogJsQJZZjCC0nczFdSObFqXzHsnYIMeiIFFBYgjThBqTtW5PPzQXx7vpj4s4hraZ4PPe2x9zuLOZsUbja6vebW1XX33iqZz8SmHvO36kDOELp9YjuXxpqZd3AJQXtlTCEy+e//ITZzyZgjsUwgQ+6+SxDKmSVd9lVYy1kz53NmTaU2M05J3eOMZQ2ywrAFDNNkqeFSGMLu3ACR47tX70wBFrSmJTeg+gsFSSscaKUOwMyY/JezImQlINHyPh0NrS0y9cjpwxsWFMR9NhJ4/Rk9TCBoC2ifMMBsg0qoqghnP1LFXFrYOliTOmNL0OZjlAJi+GRfCesxyGARhnNSDLYjOPrb7XPkyoJNK+RjNU5kgkxDwt86X9qdIdBS9l2en2ImXwJGY5eDFMrEwqqAUA2bqclX0bQCBW2CZYNq1ex80AkNjE06skAKj00um6egoALrcTJgp1EWfGTGPU4wdlpr2eDZXlbn4UwHjW7VImdU1p7YlCI6JojBqlVg9AhGJE5VHVWtUaAmLUmgrZ3AH74CMAX/GoCs7mP/3et4636tNXd+769gf99PJoNl5/6lEArtCHP/bZx53ND20U+wadXreX51mew1gwu8EakPLP2RhnTWaJDJShnazWCANNBXyiJwM78f1pHS5ubF/8zJfGlza2Nrel9J956ItnXjoDYF9mH1xZ42XD/Q6vLKvOCB5gBshX7YumUFFpfDUpuSbWKi1gaQ2cM91Op98bKFVkrVfNnIP4ne2t8uplAEswh285tNwtupmBRI2RmNia1s5oXlNDZNgw8a6toosGSwpoJOzGbjbvMbXaubao3zXtGguiPZxo15ybf9ks0vnhBOw9uVnUezHDhY7bSilzS4GA3US39jRdOL45SFtjo7W3Fs7ZPWXeLFD5AEA1MpsIBZExVsioBEusxHVIlBswZKAqEYqQVJ4mLW6hX42qponRMMzGcAJYjdpmHwCSNBKRKApVdiYzvBPGAElUAFtbo5nXqffsusHHOsws5Qz2dRAIkwVgjdlNRtudLFJRlabGgCooc1COQaLEGEGAiDjnrDPeN2QC1pqqLFURYyQCMzGTRmkhYJp7qpXmD+Ob2dvX2/V2vV1vf01a8gVqlF2X2FyNVRFGS4FEwK4zPxnJ7Zbf+J2a2AkVNZZVUhWwud7b/BBom6gBtOKIQNoWypSGulCVBCxNmRYCzdkWsSvthYSU2oIlCwq4IiIaYVKKGgE1bEEQlShNwUlmQ424bIisrHEtg2IDE6i0ySMSVDFnu0d7SZ5DtsQN6VZDAgMrjVzXJmm4RRMVqkwsSkJzsdIa0nGXJYZ4nu/ASYqzqlCT1gGRKBIMM6VaGQAxMVsiSAJBYmRFSq0zxK0dwq1eMSfghDamT2OrGDKq2nwFpZTqQFjIkm+WShOEma7RLJO9SO41UPFenNloStFMeZrKnDQpEuEWPwaaHJG0NKJhBlSkjS1tlgQDBEm0kCqSsjKIySbGFl+WIPLjGk15G07pomwzF8qGuSyzoaovnH1q/3Dwoz/4jo2x/2e/8v4P/NFjP/Nzb3jLO++48OIbAXz0Q388mej/z957Btu2XWVi3xhzzrXWTiefm/PL7+lJLwll6UmtRFCjxmpMUGMaZJpu/MMY2wXlbhcudzWU6TZuijY0BpoMBqE2T0ItJFAOL2devjnfk89OK8w5hn/Mtfbe94HLv1xlqu6oOufec87eK+215hjjG9/4xtG7nuH59I//8Ldy1cSRN6Gs9Mr2YC5VAG63+r1f//RH3/+27r7uhq12tq7c9u4HPvMfn37u1fEH0r1nnn4IwGI3Nb3k+KGlc1u7c2q/7b670q5/3/vuqjqZ6c4BOPX5p69dG6Ms3ta+tKk03Ht87q175NqzqLrjq+d9ex5A1jMvffnVB07cnM3ty8+eWt6/cmDx6F89/4U79izKiy/sXLgE4O//o4+Pdug3f/P3xHvj8OXPfW5xcfliibO7+X/54AfXnvsmACPVlUu5yu7i6tx7vv3YgUdbj5y6cmVjHAoySQDgOCmIrc/f/6FbHvz4PSfPP7V7tjxx/5wdDW9+47JNjwHYSxd9Z3ecHDt03G0MpXPL3bmkjvJAsnL7YYopXOEe++OvrO/a937ncjkcZyHrkvvUF1/48tdOblR1mTrIsFjbWllZffbUmVdObi/sbbnl1pXNc/POxZxwfaxLLd3e3Sq02NgopaoeXdve2fYhSC/BseN7AbzjgTed2LvIkpNL9xys1tefOH95+8UrF3a+svX8qWtXdjcBZMqXdvxXH315udc6eGDuYNsd2re31R6+/a4V1qIsxwDabdub6xSVT01CVoQV43y8Oxp43R3tAphfSDsoyn7fJSb1JdhCAYgSs5EgHoCKsggZWpzvuNgkFNdFQ8ZwnMzS27+3i6DFrkoJDsyKyHgkqsXOgAANVd+2pBryZr+8dnFtaWl5sGv1sKmJGFKKCKmQsWCAlJi32baQtKBF2wEodzGfKKVJkkkIak3iKx4V5aUr2wdvOdw2KwBcuARuGa2qVspjtsOtw3ccvP3gHqEq1awYRmiJ51MaFdi398Byak0766ZFOc5Df9uaFACylc2XT0rXum6whRp17ZZJWq7leoPBJkYWQNfa0rV5vMHGEgXhAkSsqUoYBgG1AAwrHO5YQAmJOLA0jGd9/VJDkgJKhoMPrU5roTvKR5UlB6A/KrmyXrgoeL6T9v3OolkCp4WMepluKwCEUEklr57bvu2Owhe7Xz977j13LKHyV4uq13Y8HgFYWOwtL7T37Ouf+OF38tVLILMnS3LicmfnvrccufutJwCwyi133r3+VLj63MUk4SJJTFXa+c6Jm3updwAKbyWUI1MtdF1RbZflyDjaHuWW263Ovv/4jS0Av/7Jh4+stMQYVcBYshZQDToeVaS5JwaQOi2DSRMellIJH9y3YtiRCapaFuV4PAZQld6IaKhGeYlxGFZIrBLQcmZrZwzAV+glvBHGnrAwT+rJmQC2xrqiKuPiyczGWmYDrSc5iAh5z8yFD2nCAPbvO2IMDUdDQzTuj0OAAxdFkY9zYooC/AwS09D5Y7NA0yrMdXKsbI1JqFJODXecJSBQqy3ivYQQAHjvYz85FMoMldjtBxVVCXH2jqAsi7gj4xIQvECh3nsRss4BSJ0NokWogihYrEtCqJoSQBP+1OxSOLLsMkC9eKiahmvjqzJQPaLCMDROyvAC+OuhwEiUaYpn4qmJIJqQpi5USvC1C6wd5wyBUhodzzoWqqFIV7tpAFCa6JQoSI3UdTtSiTxXJRGBSamT8OYGbBZ8gGHA6wDs2AFQ8rvbxWIn3fK6MZD5peN7wvIv/dEXGtVNIBYYGWotm6QUAYkB+VAZS6oGQKVCDAoKUzhDxrpQylgkCaBWaowBsOurUcCROZeMeg984F1rLz9+7123XFMLns+S0Fs4DECMTUzL2DSEvGUr2FQokCMkidcUQCdbzfXL4nPrWsZoPrJuXxqUmUWrNDUOQLBVWVTtxJTBC5FJGME7JrF9k7WyYgxAU2HjoDAskJSSSlQgZGxSBinifGx2lpSYESqQRqq4dQbMqAoAoRH0JorouRiFqkePFTudlgUQCpO2eoFgNeSZJuCELJDEWMvULTu9HlilstYISIAQvEiVcDAkqlm95DXlqyYCJl8yy9CgAmCsdYWySZwXoo5BDiBRhfaMILa5dtu7UJU0E/XQymqI8L4EMoCVsllc0ZR76x37Ci5bFO+lFpVlUhJKOIiqAfYRAD4khuMzQZWQlyLXwbBUcqXYcaVMyXBUCYnsXIqPdlmW43GR50WRF2Xpgw+V9srK+ybw1iBlWeWjPB+UCyYl0isvnVLVoKpeXnr8JAANOPP0FdNNktVOe2m+1eu2s7SXZp0kWZ1vA2AmY5E4so6chTUggs86WYcWwNa2AIR8kBrrlYY+ZC4lFBmhv3l5sL3GKiurXQB7F+fn2g4qoqGGC7WmG8P4Bl00IJaofUuBGQQQG0X9SBIZhmTOkJooiaETZdfIMQAAYnrdxwBgQmqIq8JMwUNBgJ9tUmsSNUOkMKFuoyZLTfIzw6GegQl1kvZNSA+qNclyUjmFkKhSTBiaJcIwAxSitEGTVMWonurV3jR3ExoKBwEajFiNTXtOghpRhoTgJW0RJYVXQqCmJ1lUiCigQV0DDAj1xJfJCalqEGUGR7li1F1lzeQZALAghlGN8iHGMuti0i5pc21tC0DgdBhCnvtQ5CAwJQFKJAziRu6yrKq4ZkeyubUuSdMkdUVZDIdj61IAWbstyoB3GYuE1KXx4quKhBC71K0zbA2RGjKYS4y11s7vbO2OhuN4sJVI6jJfFc6k6pWcGGN9oeyiBOoNu2E37Ib93TPblFRquvyk2lvb9eWW2R+m42+aYnB0WWj8FtGUcdew9qjZTHRDcY+NE2wok0JNa1Ldznw9LX2isVLjUJi2kFEzvSWip2gYBERat5vVnnV6PpNNXyfaOPnddYDr5BJM52cTQhxqgSgqrTUiR2RwnUusX9K8EjVH8XWMS9QzNDAjpzJ78s0fZ46IMKGwNi0ck9JgHT8qTWDAiX+mST1Nm7B2ginObuz660TX3wOzm9fpX5u90eTd08t13QZiN/3kTOv2DiLI7FnXhUYQwCIhavwDZoZCJgq1llSNiMY2QGKOij8AQLYOheLxNeJfLkkqkwWfAwilz9J0eWkxQ/WNb3yBgvnYd93zqf/9d/cfdPv3zz14+yqA4gMPfOqhR978jndfuDa69OjLaS9tLyygHG57Tor8luO3AnjTfXefP3f2n/8vv/NDP3D/4X3dYWZ25xd43+2nz37OV5r12gCIRQss7z0wUt1YL189f22eyrXd1pH77tg4fx7AhUtrjuauXF7v7r3Tng2Pfunce/avBp9oUWxIdusbjgB4q8OzX3x488xrX3/4Wb8+NqHq52WS9QwkX16+6bajAEw+/ObXn1xJsO6RJem1YdE7vLKSbLzy2pmHPvlE224CeDuRD7rakoxHt9+294t/8eLyfHbfwuKtd73pU5/9LIB8GFqGvcGb33bHXMuZI4c6y3Od+Ralzo7pqb88BWBhNV05vOq3q4uXR0V/ZPZtpsuHU9cZD0puL1XVEMAqzx09ciDd8cG3x1cvXji7u7Dn4O3HD3/1sYvVzjCm+LfefNfGuLx8ufiTz35tVPmeSS+fv1SWhSws3XRwP4Dc+0sXr4C0PyzUkzJ1V3rtZUNSDobDPYttABc2rg36l+Yyk3t7fnPw6qXN+bm2NXTm2mbWwp17bwPw3KnX3nPvCQ3eVZtHVzst8jcf6rVTrvJRK8PudgXAj4uQaplXztiQJkJkAxTsjOZFBaAqx86l7D0RVUEtAcYiqAgMBZs4ALmO2fD+Q3tBBlVOzipAzomWxEiyBMCw3x+MxnuW0ykbvV5Mp2ZIbSvLd3LLzMaOxxICB888rckzx+6zhhxBruurPiXJaDTYHnsAXttdMl5tyyal09QUAdXS6tzxY0ceuOeIwwhAycyejKYq7a1r4yRb/d7v+A4vJjWJhGHQACCQgVAKbK1vnhGTImSBA7fZ2YIFQK/nLg2LRFt5MVy2zjlUI2OSVHpzl3ZHt6YxrFZA2VgyRKJx8SPmILabuZ0ND2C8Ecq94JU0+DHTRGIMMyvNZCHSGugyZB0bQwgaggdw+eruhTy/5/Zj33z41XcfThb3GJQlLNjOJSRsDIDMallsffBdx2860bnlSCabid/YSJyZb5Evqm43AfDR7347JamKVkO5uHVe1Kt4FckSB3CS5wDUuHxrJP1R4kwIYggCCiVdeOXigbYD4JNOvrmWGNVKM5tmlKzDzVPaW1p57Ez4009/A8B8ljmXVFpVXkBwzpIhApXeb48HDAHghQiWoRmjrMLVC5eqACJbO85J/0HENwiZMXMZFJqLZt3OXfv2AGgl1lljjCOi3VHx/MnToRIieF8xUBYlAGbisiImAiviiADAEpSChO3hLoDdcR/QECQEaaVOCLDsQMRkjDHEAJisr6qmYhW/4hw1qj0owRArSER88GUVRCT4QeS4y6QtIjLFAGb1vpIQjLEucWwNAgEIKllST+FQ0ZhQx/AigOJgYgkCIDGWDIiZIaXSrPtrkEICUPkSTU11tr5GgJGqrgU2SFT8v4o0cRUmcUyNNfHf5gwVAFKZzrfRyN1qgiPL8fybTckUaIhxRpSf44ZKQ0yAWANryNpaedJYa51hY6QKS11QRQeO7WX0xmEw3C3PnL0KAC61rQ53u2lCZ9aHn/ray+9985v+6Y/+wL/9P35PuRllHlk/bIQUrCHUscwU/2jQDS+BDElQJgflCLXENaRrkqzVGWzvdMS7qugPyp/8V78dLCj4VifpdNoA0lYqRGxsK8mcderaxnKSWhhETPPQ0sIzp867ThryKlQhVymCywM0SXyWqQQAKSe23faA1ZEaMmmXs1buvaUEDLio+wZlZg0CeO/ZpCEvHQml7cyHqGPALgBj9UyG4zKu4r3CqIR4h5sJMz1iw3Wky0S5ryQUALpZqgzrEpQ+I1f6gkkUCBp5zQzAshCpUYUvmISscY68J1EnZJsa1nQtnPQjpZkw0qx2Iq7bbvvgWcvgx5J0JnVjmuj/KVRhhK1lqIovRYSZIzA2jYibSL8J28FjEDNErCEAvvRFXlr2RKTViFSAEIdWxdvTGlBA1rLZHGANojQCVz5UDA1sAag4gmPt1BMIFVBIKV5UyBrrEDeqJF7zwqdUAFBVEfVBfBUioT6I7g619GFUVMNRMRwNy93tEEJfcSEfxhd40cpLUQXvxQcJQUeDkC3aLtmxYQCvDDYsEgN1hqHBV+Odnauba2udhI4eXFqZ7zXPYsCk23pactBJ8NwkHNPsJiZIM6WIJnVhmuRMddJBTfpy3SJBE+7h7HKlM4vW6397ndEkD7iOQ/j/ajNt3U23N2rSY7ONmeOkya8mKpSYvupv7HSCtV53zLN33ewZ/I2zoWY/dN2bdAroKlSVGj49Gmqm1sJQEzQZjWIAg9mYxLkkSQFUlHAgRqx3TZLBJvWNuVv8CJkiqiwiZVmG4CsvhpJYTltfX3POLszNQYjUheAnRA5FxEpRlsrei0iapc5aAYaD0Wg4Amh5ZRHAbn9UjEKSJhJ82k7H+dhXYqyFIIiqvH46wg27YTfshv3/3+y0LHU9OjcpXP0tNoEKI5Y59YEzYUsD/cXK2Mx2tfbYUKIGF1OlSQtzjRopgDi17nXOBgAAIABJREFULfqTGReIyRFTrCc2GtsEMjAAiCgOtVUIEUfuhAJMPBGPnIEBpx51cr4NBIYG2ZxCqzH0psaFWTb1iBkQkUG96+syDULdQ1dvkaeSlY3QU9xHk7PMHs3kCpNOP6SpI1QRiTWxKHivjbvlaQgBTKqqNHMNJ/nSrPgL6raC2j/XHA1gZuZd8zFNouE65Jhcn8nRU/PKv92uC2eobmkHI2qJTTcfj5EBEokCZzxthgCgQSUI6kzNGJ6Ud0UDAGNrUbumg77+a39r3bY6wXsABjTfWVzZc9MtS+j5p+1Cq9fdv7p09I9+69Hf+f1/cRpfAvDBD7/b3f72/+svvrS5tpYQU6nq806n1zKj9Y3i8UefAVDa1vLKvodP5+/c3P+Rj/+9Z7/xO8lrX7/nDQ/+0Uh2zxe9yHTzXsthpXM0pOHW6ODtNw2qU1967Mx7iQ8dWwRw5xsPHzt61+//9p9e6feXjx76/X/90KOPn/qBj7/FLC65rLfuDgFoHRmkS/MLd95+79ve8Npz5ylN1q9eVsaVjdHqvj33HTkI4LnHnrl5X/rv/81P/ch/978OhCqhV155Ncn09POnP5+599y/DKA4snT42N68uFrReGeQDPu4556bLvVVNSvzAKDM/XzmOONb7jkW8j4Rsm67kjJXu/X82fF2BeDoG27prS6+9MVHOMv2H7Q766dsN1kfd6ydyxyqYQng8plrK4f3dFZHZndgqtHxO45cu7Tx6mvrwzwnpr5XAFd2RxcurQ361ebOyPSSrd3yWHdhe1CNxVztDwGMRv1Bf7vKxXsOKkFkfikpy3zcHxihAysrAFzmBsO1g0uL+1oL3e7csy9eLH1plS6sbWdZeuhAbBXE8y+c+vDb7r75wOrpq+fyQi6eu3T8yNJ4NBAP7ysAxGzSNPFeQuFMEtSYJGkJpyRFWQHwviBhH2Cdsc6pCIUQhGOtIcaF1nEgrqqK4Y0B1HPa1rIQiEuSiF+sb+wMxuOl+SWmRqqBdPL8TAoJJCKlDyELFZ27OHQuN4HJmHo8JEJUr5s8TGGUO7KD3dwlrTQAgOVWu2vZYAtp3h9QVa6cOPR9H9t3bayCYRkcALW3O8svnz5TSXl2M//yI196/vnzvcT89E9+pOt2r528BiCxuhV81+hoXIRxsevzfXOdsR+7NBmPtgHYdLg7DA6tTsdb9QEqAmczNp1+PxRjAbDcIiIl54iJgmhEapiRdp555uVnnlsHcG1jd2O4cvcdb+DxYFyWWWxZnTqqmbUlziwjJTbWsjGskPho33xsZSmdy5bn3vfuE8n6JQVRmhX5jk0Xx+OSHAFgK73F5KMfe0uwPYKT0+nza+tCkCBIQiVjAONtXwXKK9o8fX7eF4E5c2QIhs3VK5v9kxcAHLj9zjMnz66McmNYCBDlxELl2sVrnfkWgHTZjncGXVXxYpmtTVLtdJd7z51LfuHXPtvPFcC+JRpRUC/WIAQtRnlVlONRlZc+SRwcA2i1OpadtVnWdqIaFEwMlwQJBJga89IQgkKt4XFeOWtC8BxKH0LMjspKoTocDfrjfKs/7A9LQEsvIHLOplEyb1qKAoGNMZHCEwmBEfCtSk+AczaL/DGe8l1UalXfoJWGMIWRa68ik09RoJ6giqryxGycrRvEyTCRoejZWRvXLEGsNUTRZUCaNCx2ltc1yqimpUAMD5qXea0IxMwEiA9BxV2fJ1PjkwC0WGj2l81rolDjJOiZzdgTDU2mXAvZUQNKxuHw1PhoYDq/ypg4SZXYkGFmrhsjiOASGEOJM2liU2dTZ6xhJurOzVsT2bI2S12W2DSxiWNrqOucqIRQJc7E7Rib2iTb6Re/+tuf//Ef+e7ltPP4k08++O4Hu/O9zV39V//hVwCsbwbXaQVjh6WX3fHnX332sedeu+mW45QkddezsnoRkAdUBRJUefJMTmrU8d8yCFnjy+BsJqUXIlVfr1aiFlKpNRl99ktfufPmYxvXXlsvyYF4u3A0BuAVoWbB8sR1E0igsQzsgRVHCi21FFWvtL49WOhlGobGetIcQIUgpXMuMy7bHfEjj64NpWVbnbYly8oWAFzKbKXdTcHBOle6uV6C+cwkQduttLUAAKP+FZMUKbfACBK8BJMkzAghjvgBm1gindTVY1uw+lC1ljpDXwEotTK+UnZ91Tmbqnow29i8A1WNbapQoAiGmBTkyJNVa0gUPnjhCKDQBP6eShCoehVkLQAKywRhkkpM25mmqB+lTXWqOahijaioMqdOlcaVGENp4mRSHLsOjgRUbUtEoEFhUwCJsc6rCDGBYkeTeKpHB0VmNLPAVwNfFJYN1yF54AAiTRsaZtSNBAwCRKCqNhtaVRDHY6kqgSg5tokmdRlaIKKTKY8xmN2TAESaSEgEZIyrp9T4+LkoIpXaq/ggoiK6M8QY1XhY5K4L4OHL61969kyoylAWu7tXNzeuajU+dqB3YKHrLCkqAIm1zSrXFAkaaJgiKl0vIdGl16KQsRNo6sMiSbp+ymuwr4m6J7QDaiJyjc1PNMliJuzIJi+YxuWTPbwO0GuUKptdxmEy/49RepSOnEb+06ytyTTqr+k2ZrLIKc0F18lSXgdBTiifrzvOGiCsBbrqlG/aiDWTLBFzk2lSVOmaNJILKYG4ZoCSUt2RVfd8NdngTJc6BVEiqTve6myXg6AUiRqmUw7NbJKlIKa6bqeiKj74yiuTJcOdbgfAh7/jg3k+fvLxp0bj3CiMZSZiQ/H4a46qigiYuSoqTz4EMc4uLi8yc7fbBRBUi3y71en0d4dKYGalyKJmSK2qecNu2A27YX+37PXjGm/YDbthN+yG3bAbdsNu2A27YTfsht2wG3bDbtgNu2E37P8js3Xxq6k8Xc+Ev45fN/3bDEGRpl1hmLAgRUJsqG1Il5M6XTPWpHkTx+E3pJMxyA2nAk2ReCIKjEmv96QMSkKgqIkfdVRMc2TE9SQ0rVusotY3T4qWE+GdWRbj7Pd6X7EB42+WDmsFYVJi1kkDQKwG1yopzYS4SB5UUtIG/Z3Qe6ZvxfWFP528ccKPmrx4csD1RBfE5mNiRDZoc5Ew88WYXt/6K/JHZlmfM5diclQab48J82Om8jr7wTb3Dk2v6Otul+aHGQaKckP4jE2m9fdaG2vadz9DGCXbVI5lcpqAMlOZe2JmNkREpCoagsQKv3MTUiihHrPHpOocZWmi1gAIUq1v7C5n874qvu3uO6+Ot6/uXPuJn/jYN/6rV3/1Pzz0wz94M4Av/Nk3337bvY889Nqzz7829EiNkrG9hYX+ld2jB1tveeB+AFfWdrNW54e+912f/N1P3fPmu954y7tffOJb6f7XbrnrjstX119+4VUAC0sL3e6cM+VgvJN2ZPXQav7SS3efmLvlruMvvHQKwOJNB+ePJw/cf3C8tX3LvXfc88BNpLnVcX7p8iNPXrhzfQBgY+Pa6Vc33vYhc9PBO55//GUt83JQzS+tFJU/9+T4lRcHAH75V//Tv/jFnzmxb+FDH3n/r/6fXzjQa49DGBWkHuuXLt/+Y+8FkJzo5uunQ1vXNjePHbvrwKp75dTZQUXVxc3hWAEklsZFuXfFrezpJmkVymHHlZVmYPPE45du23MQQLfVzs9u0wCL3bl0/6GjvI209cTjpxbnlkyvS34HwCIPz7VaB1cXOHV5r2jvWd27sPCu7eWX17ZPndzONwYAHn/iiUTN2uW1bq81Dl7VXd4Y5KVf3969vL4JYP9yd35hQQVBqMpHXkM+HufFOLHIR7o9GAG48+b9aVi6cm141/Ly5sW1vAirK60qFHccXzy4enhjcxPA9ma/LOmFV08e7N280lkwaTEcVoVXk6RlqCpYAL6SeU+7Q8wllY23o0nIqoGPbYCDwXBxYV5B3gdrHUJQIeMcyCCo9x6ATRMW3tjcYdCelXYIwfgKhskwkcIaAMYRCgZzQ62oe+pQryoAIL5k28pa6fY2VSMpchmPw2K3BarndBJRQ4ojEIOZLHdbyfpOsbK6b8V1AFTUUrvGJrAUnM6tj/pbr1z9pV/61NZusbo3bf/YhwCsb209+dyFK1e2E6OlV+tSk9IY/hvfeqGV5KfPjACwatuapCjDqMRquuAE1raXnVq6cPUqgKUQ1oba6S135iqibamC687hij/5zMUrl4fJ0+cA3Pb+ZWNqHSdiIoYyg1CMhs+/sr69MwCwf7l9YrXly9yottqtOCen9gDXL8pkSCNfhsUaa0ztXAC888EHOKi3qR5cuvyVK3nlleBSQ1TaVEIZAIyLgh2d+uuTWDh68czmXcYnDM9UKM91rGkbAI98/fm5rJ0sHnnxqUvvPJqE1Hj4OFd5NM431ocAltRs7+YHHHtCYGgI1iUlSkKwkYjFmeOWCWMwxlW+sd1f7rlz262f/5XPDIa6sJQCyB1RKBkqwi6xSdoySba4t9XtdrudDgkDaDn36qUrG2O/uLTAjFFRjPMxheCrYlxURSUAKq8iMFDLVAqchao4g2vXNjbWNwCUHs4gSUwIAqLEGZBzKRtyKvVgKNFaLzKIzvJRVARai1VZtnGZDqJB/JT2A6AZFqCiztqJQ4c2dJlGCZGY2RqwCZh0M1I+Hs02UYuEEEIIIiKqaq0x1hA4VB7B1wOOgaAmOhFDoDjbgAiIPKroOBo+Y2SWacNDxJS2Q/WcAHJxngCBuW6InnAnCcoAG7aGjIG1xAwiZEkSBcysIWdNYo1lsoaYiFNniJ3lxJo4sjlzJrFsiJAmznLqTDuzrdS20pqg6qxZaM8xwzGcUcswFEdja5KERpCEAQohqidHr50QgkilgA8EwCOBaY/GIbX29v17D86ljzynv/nHf/rTP/J9g0CjogRA1kE0ISOkalwrcRv9fPfpF5PUSa3KEmJoJZHYKvW6BUBqOmqtD6dQL0o29VIyk2iooNwEDRVTFbySWcuLlbnujvK+PSuf+NCHc93K1y6afBeAL8pCaaxUjcqW1Qq0OyolcAhkbAJgXFQglFWlZVGNPcCZQafY1UFprImaflZQZAuFeFLqLC04Hv7yL3/GjinVMN9z3od4iwYBnAkGZDRtW2thDYFMO+288db9AL7zA7cfun2P+k2KDzKRsYYICnXGxNNXNMPEatIiAJTFHCfLreU2gKoqXZVDJZkjYdtCWb9YJN7WzfoG1TL4UspSCWDLbGJwN+W/cz2zqmZHquZqHIVQFgBS5OqD58Q4U5g08XWASbOPU7yRw0CJFRyUiLndMkQK9ajD6Ub9Q6eBmFppDqbWRybDhgEJxmYqAMPElqTIbBMouLKOXWqsKXKP4AHKMgMo6TieBQEAA6zgOKWeZS6EACjFVgMfDAREEgTqoIooZTvbMkxE1QhsoASBESIY9eqryiWRWKpQMRIMggSBBlHpdFwgA1qUuf0Ant/u72zsdNPtq9tXxjtrC73Onr17lnpZZqFSe38NnoyrGXw1SbumODbioZPpyjMyUrNt2pHLOV1z4jqojcQUJolOHbQraUMhmenI1ZkNTfuVp6IYM4JPNWV8ZmeY2ZnOvoMmdyKIYqNz84ZJAje5iRSEKOU7Q3qkmbax5lczKdfrEq96JZ5mnfVaPE0LJ/najIZVE/k05zRN36YUYJ1cCyElqf0Oz2xzdicgGNQK9aqI6pfCCCBpHonrrnvzX4UGIVbiOPgNTCoKdakZDAYHj+4D8Il/9pMXT7/63LPPya4kLRdKDyhF78nUCHIAQAjeWMvMRVnu9neCaK873+11AawuLwcplZCm7VBJu9cui1I9l3lORGxucIxu2A27YX/3rJ6srTqztDY2RSdnluzJH2mieDx1QJNAfuqD4hsnoOVMl7I23zW6RJr4vylQOAG/aCb7nMHNoseiiWeYdIU3fmjib6e+ZxbNu07kBDM/Ngdeg2GvgyNnNkPSqC9CCaQU0zRVmsU941FM3W7j1Bs3SnQdwBcV7BvRlQksN+lpmIkUMO2Xr2M/mdnNdB+vgwgn3VQ6OcKJRvakn5kmLeu1ls9Mi0ZzxakOhPR1RzVzEa+7e153EzWvaMIOamT9/6YREciSEYggSPCTPJaNYTLMMewQQKIMJTNHBNhXfnLQNBORB+IyVLEDlFO3tb322OPfOr6vm920X4zZf3AvHV1+z1vf8L/9wu/+0Md+DsBbbzvw6a8+Ftj/zM/+s1/+tT+4dq6/fbW/0k4PHDrE1cZcWwF8+B9/+93/4I0bjzzx3u988Bd/8ZPf//fv+p6P37+1MZfNja5e6585tw7g2LH9XrA7KDpzc8VwePmVl48nfNZlf/Xlv37LLfsBnDl5tr979c4777yarz/58JNzLfq+H/l25s3d568Ear/y4iaA5ay8aWXpic8999iTz1iVa8Nxp91KTEg5/Z//x3/zlgfeBeDla/nP/Q//8hOf+IGf+9f/beXd7//pZ+fnskoJlrdH8tlPPwbgB7pve+rFKx/80M3zZpfaexf2zW1fGFTjqsxYJE4UJQ1y530nVo/0irAmo3XqUCGdpbm5t9//xmefOA+AHzvZszh9cjy6sr7x9IW3f8ftqn6PS85969mdld788T0ADt90izPjdsdsbo/FLr3y9LmvPHxyOV2aJ+wOxi0mAAcWe8PN0ZGDe26+/dan/vqFS5uD+dW9ly5eVu8dAoB8nJfE7cQhlJ1WZ3FurlTyVBr4XtqJ2pHP/vWpD7z5rpsPZJfW1m8/PJ9/2xGb8OJ8GyInDnS7vRTATbdfe8PhI2cvXHjo688eWl36sY+9ifx2WRXjvCjKIoIsVeVFZG6uhfEY0nQTG4bUKUFeeNfuDEfF7tZ4eaEVVfLI2OCFocYaAPChDOK6TqsAiLGkpGyM9xWFkCUJAEE+GldBlaMMBM+kEM3CaCxr8IXH+lZ++GDvnjuPzC+v+mpLQXU1ArGLb7KOMViLfATi105dygsAMK3F1aO6sKdny3K0VZ55aTNZWSuDP3SoY7PkufMjAI999amdYW6dK0mJ3f65ns3GxWC8vLKv5connn8RQFtdid7KXNuXV/KS24udyuZW+pSs+JwAnDu5vX119Lk//+ZHPrL/xOE53dwR8GN/femPP/fKnpWebO0CWFo4aExQeFLU06CNkHoqRj/1w/e20yUAbNXLtTTxlTjvra1lHOKVmcl8ABCTBLBSzNoYxsAYAnDlwpUOy3ru/PauKat2SqGqbMISvFhuGQEAdh7u4UdOr7xh8crZwbFVcQxyRkS9F7UAMC6rvXNZLnMQMmqN66AcMoGYYZgzB6AgqyYty8KL5iEkRAzKo66cYQAeNM5LSZicEcNLK4deOFn8+9/4y7Udf2BvNxgHQPyIOAkmWKKy9CEM2loNxzub10pjeJxXAPJCfBAme228MSyUCaWAhepuPhCAzLGzbKwholKCNVyFkKXWWE4SB6CoClGxlHGQPC+9wBJZIh+K3dG4023Fyysq8R5jrnMtEYoQiI3acDXYFyrRluPGdxART0AZZYWVJs2dOHYAyL0AYFGqggBRDC76tRACETHTpAeNFYAaQpImoqISmNQ55sRGeYQQyARECT9VUfEa+2YBCpo4AuAMjIECxsBZ45zNnGcmZ9g5AyCxxpm6IbqVOcPkLCfOJo4Ta2wczsPUafcMk3WcJiZzJstsYtkYSpJeLA0YZmvYGGOY4qCSFkYU4Xcmw7XmYDzBUiP2qhQ1OiHUIAROdlVFIYAnFWhQUoXku6Fp6CaARCEBEjFeY4iEOU52NQBCSIswSNPOcrfzxBMv7n//g5/4zz7+s//2Vz739Etve/u7kr4AGEDSdpKXIU1SdX5Ew1Y36fba2xu7OlG9bNTBo1NtatIMQGogklVJiVAFAw6qlVYhpvaqUVEHxoKToqg6JqmGo+xA67Wr65/76jf+p3/6sZsP3FtuXgCQSq4mLSvN2gmkKAHHxgsZtnV0ZcgTgxGCh8BqMhrtUMsLxqJkEwDQKk055GWVWR2NN9730fufeuHM1osbb33Hre+470RVFQAMG0XilQOpQKqcoVUlVTX2pDzOBwDOvnRuvH5+8URvYWnedVqWHaC+8lBh5wAIFCCJpRVQLLIo8Oeffu1r3/qsqgVQWusQ5hLuLM31Oq2FxaXUuW4763ZanU6r1UoBpFlqndm3j9sZgu2XxValZWoMmHwuNkljOISptEccrq1WQjq3GCPPAFVV6wMRUxCXhslqGQNUxAhYwdQOIkomwh9VVbGoNaQTOHJi9ZOMUS7WGGNrradRWQUp2Yi1SETJgiIWiHq8U8TYOoIQSgi1MgvKijx4JVJiShHBIihUiATqNarUEhOmksHGKpGpswZjI4RVLzEzQ17IdGpwTAIgbAgKDqSaNieCGCeSAqJGYWwRQpKY9tB2ADBbCTJavwjg2J65xfnufDuFBl95Zq2rfg20R7XweSMRyVM1oSjDwM3LgCgm1ZxPAzUqoKp1F37j2GZBS2oSJ6rzlskY7umm6vxhCstOXjEhcjTQ4UwWFnOPevOqr8NHpwcyxTeJJ+oXQJOy1BXR2FQvkx3HuWNNaN98QH9beF+//nWpVkMFoeayRCRyij9OzjMuO7EVO34O0+ZrKGkkyCgh6lTHzU2b3usUVCeXJOYQ1ICWxExc51mTj6a+UI12JJNExd8gkRTBzKxUlWNmfu2V1wD8yi/9/GAwCCLGclmVUkkQNiLGGOZppYCIrLNQ8lVIXGK6tsiLqiwvnruIqNPSsp1ez3fp4oVLGSdZ1trZGHY6raqqooLqDbthN+yG/d0yC2AyNA1oZEEao0mh5fWCxJOZj9TIP8WCPDXCKE0xbFYweSo3HDcmDYKlVA9xAZE0yCZFLGmKoDX7A00KiI2HmopzBwBKDBBTLJpOcS7RqbKGYCIaOXVsNBvUxCvT4AM1IFeDp1OslaNYCTUHouA4Xm/msLXZb+P0ZeYd9SWotZUhTaWuDhinl286QOf1wPA08KivrE6kIjFxsM3HiCYAwnXg4fS/0Z83PKvJGWgTUMTj0xiEzdSGG9rL6+313nH6IgrQyIichVknG5p8LtODZTiKobcGVY169gQQKEkSRQhBVIVqQRYTL3Pp63xU41tRZ6fM5ItxIA+g3bJeiqdefSXgwOETqxtXxlcGl/efef7g8fbFKvz8v3sUwD/5yJ1h7alitPvRH3rn8ZsXf+ITPzceV6fOXr6TwqKVOU4BfONrj1XhEu9effB7P16mP/ir//W/dAs/9IHv+raV5OVOa+ld73wzgFdfe2k01jv2n+h3PZ08P66U96zs2XJPPn3hl772TQCLK+7I8ZWD71lFh1bnk53MvfC1x+96/+3XiN74jpu6FwyAp//yK15Gc+R2+ltFruSDFtWFrfzWmw7ec+LQzfceBvAd777lnntvWeyZzzz059/7/e/8wkOfLQYVEi4rfsvbP/zy008AOHPLcN9tB93iLSef+eIwf+ny2o5Xu9Ufv3byVeMMAAQB4T0ffBMcbQ5G86xbeWi1s9OPvnL51HB1314AF85f2r7Uf/ny8NqF1/7hd91XXsge+sq3jh7s7G9n7aTjqgzApQvla3/5wh8/feaeWw/OdXDp8vb+fXuOHrtjsIXDG+zzPoAPvPvdn/zkZ+65/74Tt534yhMvbA+KCtfa1rYthn0PYH23SlpJ1SmJ5ejinsPLS4XI1d1NQPcu9EIFANs7vt1qOZts9weWcXRv78pOsdpbYWOfP3lq7+oBAHsXnOrm8f2t2w53H7jrSLbQNd6PLvWLYpxlvDCfAOjv+OHWujXcbqVQ2MRQQiCrhe+0EwA+rx8XJWJrYjSugqqSNLG1iqz4ooopoKgKiIgB9SCAEbXR2RiX2jIEl0QskqYP+yTwDuoVSSsdl+OtrcH29iBwItoHLTbTLEJcqFQJAmKws3nuk3Rhc3dQlQwgDeGgm9+6sN3atlub7vy6P7KgCoWVcTk+e3EDwE7wmqYl2CSuUh1VPvdSiF4b5Uf2LRpLALznwid/8aXXvvrk5aee3/xH//D2rOvGO3lqsbY+AvDEC2e7mS12+6vmGI+BYdCO3HPPnjfcdqC7p72w0ALQ6w6JClAVp5QQC8ASfLtDSDTIFoBhUcy1Ky2HJAyT1UvBpJIySVWgNUwVn24SJiVWYxnAlXNXl9vuwnaWluFENxv4nC1BSnV7Cr+VGQbgrHMuCVXwYha7Sy7bHfcpiFhRFu0PRwCEeLfkEuwr6WRuSDbLUmsQlEZ5lQsAdObnx5UEkEssLOVBqioY09odVlumD6C3uDzMh5VxZJNub36bbvn13/uTb72yc/O+Ts7syj4AMol4YeayCkUpZRXWt0catZJRU+yT1M0vtL2ATJYJhOASV40ra4xCffAAggoZCiI+eFcFJvUSQhGKsZZMACIdq6yKyisRHJtAttIqc8l8p9VAvxqxMxAip5eJKy3jvRnTNRVhUkviDGp4ILpANU3OBoVSFdf62UEsoopcAcCamuRlCAmpYU0MSrZMoGYQLTdqYlCClKI+eKiACM4gMQzAGA5WjSFj4Rw5x6lNEmesIZvYTmYB9Nppu+WSlNuZ7XWSXst1s7ZhWGOcZQDGso2j0AgWMbesE8yZehal1qqqqKiKaORsxuneg+h6a2amNOPZFBXy5rpOnWzcorGoZ1ogDlyVaYDiDKlAhTQQCUOYlKCFMsXhxVw/EEpxiYGqh0bBbFhLiHSdUKVJ+G9+5F1/9vnnf/bf/clNx1fffN8bv/HYc/uO350TAUiyzKuoSzxhnOctZ8Tw6fWdnrPaoDAc15l4S0jwykqkUAGrMhqoB0omNqkY40OprBrEqziKQC1VlbSSlg35sCqscwJ865XTP/ozv/DP//F3v+vttwIo/E7YvJhypTkXQsYhKLEqi4SqAiA2dYaEQCTGGLILcymVobDGECdaFQCISuiIDUCJJZSjzXtuWvmDr79Wba13dFF1HQDEgw3AngQcQmfRJob1f59eAAAgAElEQVSZbdKtKrbaBQDi7f5aq51aZ4kZTBIi8s91sGZMU+6M8DlHpuQ73nf35ZcuLA4YwMLxQ2NH5WC4mxdmqywunx+pXPOhCnESSwP6E9mFpYMHu3feubD/qGsvaBEqY9P2fKespqterEBB64X/3Flz8qunOrYDIGt1wcRSZantGYS5jJmMNdY562xU046FivlkaFkAX4U8QJLUkqgvq4ar0IRhkwiQ0FlyxTgokWUHoNvqAgLxZQhBmBgEYQ0UQryBvSCAjcDZpCpyFc8GacKAqQJIHWpQJwqpRwqtGpUAE7SC1I8/REAqShWINdAE7VNVmQx+Ujg2AAJRPJC6S4nV5ADqQV8RnxICQQWelEWCL9qdBIA11nus9LK9y925bsdASEvAp5aY2CsDqBRJw9OjujAfl4b4PSYvDS5ZpypRs5AUMgnbJ4jkFBVrAvomW6HIi5xic7PihU22FZHGiQwkTYGzhqNKoGZvU9xrJuGp853ZUTlUkxVomkToNChRECHU6vdEqAu1TCJTPdNJBjNzRJNzaSpSaF42m4c016e+tjq9WtM+u5oyUe9sglTSZOJPw3Cox0xNM8WZzKjZuMa9iiiRgmCorhjBGGt4So3UaZoyc/UMq0gsBknU5IQCEHbWRoHZhz790Fx3ud3qQFCD7ajljDWKLgPMDCjDkI0TpYjZxj6AsqgADHb6GNK1a5v3v/1N/+B7PnTh/Pq3vvVwmReLSwuD/mA0HOGG3bAbdsP+rpmlKacfs0gcMFtqm/VD9W8aUt51zgykk9pZvbRTHHRcQ5YkEyJkXM1lEpA3DpamsNYUZgMmTq3+gRT1EJvIi591s6qhnjNJIip1RQ+kKsSmjk60dn71ARBNnNqsNd5Pp+kvFDP4WRzMiknKFYEEpjg+cvbQufl/jdDyNLVuoEqNEG2zG52drkv11B+duRQRT+XJOaiCVEWb2T4TBzqZGDPT+DG5xLN1ZdQpH1GNijQxRT1uaGr1508z78Trr97ktfF7E8JMXsb1jzoFTEFx8rUBGqw5xo/17SU1AjrDvoXGDFe0bh40DTW11uA2huNQT57c6KpQtcqefDXOAZScqFJ7fnF9HM5d3Vy7dO35UxsPvDa4/459//2PftfP/8YnAWSb7/vo+95XdR6+eu6b22tblEuiUPDa1Y3Vm5a//zvfBuDXP/ln/+nXX/jxn/knxelTf+/O9KXveddP/9SvXTk/Wjt75uDxpdWbjwB45vln3nH8rjTJnnzlxfXNzSO3HW53Nj/za994691vWD60AGDj8qW/+sLLV/o7/8UP3vX8zjA9dmKzuLL+4sZfv7C993b8xu/8IYD//C0P3P3BA8mRw4986/RLV6/d+cY9D9x99599+osvvHj+/Q8evXf/LoB7fvZDnXbvzFq1sHVud1wgJcpNF35dZbMaveXB+wFc6w86Z9NnQ//si1l/Z3u37wvT6uehLDQShawv9x1s3XvvscwUSbftx71zr545dnx37+G9my+eWzm4CODcudO9bhqq/pmL2/2w5w9/6+k/+8oTP/Xjb6Gjqy+8evnSF14EwB7Hbz2yd6HdyvJ733z0vUs3VaZz8fzW3GrHarm6fz+APUv7R4N8ruOefOLZ82fWApFPq9tuOipB1s0agHyYD/pFt5UudFrWVq9ePHfi4J7VbnL+4var2+OlxR6AvSst5t5wVBoyyz17daevtrx07dK5zcGoX71yagdAUZb7jlub8jDXViewjETN2JOQ7c63wAIgyxxpgAROMu+Dy1RIyARolaQJAKo0tkW32xlAZemTVlsD0iwFtPIVAMcMksp7LT1RGnwRVJMsaXXaZTmsygLA/OIyp5Z5GlHTRKtgYjZlJVXrskTZrPXH3C2ClmiK9hCACSJxyAMTc9pSTYztVlLmOQCwS5nmhlvbo3FaVI4q39/a6rZNCYSiOv3aaQDDyneydDjyBgJjxh55CKXImVdO3n74bZUGAIP+7p//xcOff+SlY0fmVlfac62go36rnQbjv+2+VQDvecvNi8u095YlHY6r8ZbNjEd+6LBj6lg7dKkHQFyJ5GwBH1HUwEYFLMYTdsQlADqJSJWzS7yEhIsZLoNOgVpMlnIlBkzdvMVcZ+tkeZj7XNudVjsfXwpMBaidZso2seSFANjIkFJt2WQnVIbIqwRVS9xupSZxAFR5a7ucP9bKi6KdYVOqlIQtKbGQubo2AFDm5XBcaYs1VMbasUKDKnPLGRNLIIY6rYS8J+bKhz/9g89+/dHTtx7dW7K2aISkCyCFDistq5BlaZIiQNlYMBGbVitjtQBEgjFclD4vvORFWQ5Dga1NL6qVqJc6EzOszsAwKsAQVGAcQkBBEbkjYmo5SZxVRZa4oKRijLXj0rvGQWnD34ljOwXBokJNWYn+QaILNwYSuFm6I4xY59RE6ougINVJeU+VFKpdOACJKpMQBUMcyYFWqGukSbHjGWntnljZIs1sq5WkmUtT1+km870UwFynNde21rJLTCu17cz2Wq7XStqps+QSawAkhp2pgwaOIKuM4ynW3XmqIog0sqJSjWMzNIgGlWa+ATAq+1q707rUpYCoOjN5fBVxWA30/2bvTaMtO67zsG/vqjPc6c3d/XpGd2PoBokZIAiS4CAOIkVSlERSIjXRtIZEg71W1kpiKYpiRV5xFNmOlmWJsi05URRrSCjaoqiI4iwQIIkZxNQAGmj0PL1+853OULV3ftQ5997XgFf+ZVlrvVoAGn3vueecqlOnau9vf9/eQbOZ1JyySWMjnKTwEiThRBrKVhiuNsE47MdVnVgOZT9ENaotp0BGE5BWqnPqeR8xiYJIgyTZQ6yFL4oje/b9/KfecvyllW8df/HRb65dvLR506sX1VoAwzJrJm24sj8cSlmWXtd63bTVLIVqptu4qJ0qSOFR76t1p0bhUCXAORtHRTZkY+F86X0KDwDeFUVB7KOIQQ5xlHGaRuVyrr/4e5//1On7Afzsh++Zmt0vwyXJNyw5lYQs94uynRBx4GBlEK+Q0rQBtTLcHJZT7bIofRxpsALEdCBFwpwNmSPBcPWNbz78roEW55d9WcJnAIhyFBBKiIgt2HepUDIkg3VjTelyAAO4mcVWvHMhRHyciAIcR0xUhmp4wdyqQTIlViIi2nPb7P3vObD07XMA7r1tKto1lUiZeUo54SSqMA1VURkbuwTk9syl893+1Syfm2vvEmA4lKLUNB1ZoUFxU5clAd1weP7Jrzz89S+fBtBhtsRDdrYd97LcJDEQissEQGwU8CVO8O63HLnvbTcm07wx7LdacZzEROk15t7oEQMoBgo0sqG9upQByMsybk154sKLZWEmNhQbJExBxg5r1djElPFUWvQ2rGTOZTF7gtc891EDqCAcqgL8lc2pkjPBRCaws0UJzNYaE6defb3+KAmpjsrzoHQ5ERnDBFbn4XyQK4kP7Mgg1A1LigBKrEYKNRGxKIYAcl/s6JibD+1hA+cKwCucYSjUqRAzgIY14ZI03rQDCkZEXBMOx5ItGhnAr2MnVz9H/dLUjzVAmXXmqxEoSTVMWD9FVGWja9C4QiYDnljPEA3L0ERp9eo6OgrzTXyhNQhJkzKyimxR31t9FOrMFqMeXOu2qdZe22iGj46kcJVqmo2dTNrqHY1Q33rvr/XjpOrqRTV0iXWUA2TUmEfDUC1QI2gynKX2TCIbASKBtl4PkmWyYDfRNx39W4msR6epvpbKoSNrTFgcnEuKvBj2MiJpNdsqDrU5ozWAK16I4L0PPTLWiFdR9V4C/T9p2DhNL169essbbv3FX/2lL3/2q489+nDStGELYh6N33bbbtttu/2daRbX4Hx10zFKiWqfqp3l6qPJ70OrGfHVyqwTa/MYzxxDWNVmEU5Go1gTV+LfwLIkDQmAXrvEBqwsfD7iPNZW8AjVrDRlBpEy1G+heI6ciqrk3etd5XUGZutfvPhRlKzeVIi0Zj9O/oTqsCYR1eynsX5BwyBUSuMwvFLvVRhZnZNXrwvNjWmJOvIbVVR5i2q+BiGrJzQ5Blt6RyO8Lxy7dSpMdKk611iEMb7Z8ZFj4TxtvdSWi9bTBUxA8KDqiG5lPFDl+yhUiYlgRuayqnrvsywzxtgoMiaCwqv4shDxAAi28s6DjULVPM2cj6K4zIYANjfzQwcWljf8zYcWHn3y5Ti2Nx6aya+szt++67/+tY9cv/8GAP/if/rtjeWV3cdSM925/765W26+8dvPnGyCs6Jc3+i/dOoygDa0324+9Z2Xn3zuse/53sMf/dl7l8+cfuyBb3Y31j7w8fdcudgDMOyVS92Ns8+d2FjZ3LHQXtiX2KZ+/4d2DYf5W9/xTgAPff6rq6tzu3cttq479OAfPHfj7XtFo+759bWL/a7ipz79UQCXjv9tnOwjQ843j16/+8PveEt7Ornl5kOXl9d2Hl0c9i4BKI3VPsfD4rq7rsvnD/3j/+5Tv/6P/3AoPJsm3/irL66+4ToA/+jnP/nAl7/8Z3/8Dc37d919tNGMi5zLXk5srAoAS3jv225ZQPbi1767+MZFJ6VuOF+uiWmdunD+8O37ABy5eWb5+fXF2fTI7tkvf+0rh4/s/JGP3XbsvoXF+xbtYnTjTYsAZpOFF55f/8B9+x5+/vGnTl18751vW3vs7EuPHG81jhw/dfkX3vc+AI88+iIoOv3qixu52TnPOTdnp8UYesc9d1y5ugTg5VdOd7u9qVZDE1q5ur5jfm5+186XLyz1nErR73QsgMzZl8+9squVXlhZz4bJ+Y386JHFIzsOHlnvfe4vHzy3ngG4503X7dy/r2E2Ds13ZrVd9q5Gnb393CSN9tTO3S8ffwUAq98528iHRTtqqOuDSVmg6ry3cQQA5MCc5Y7E97pDFd27t6mqbKz6onpLjE2baEy3XFaQtd5J3IwAqPfMVdrHzc3uZn/YjBuIrn15RqCbFkONmrlXE3EUGyVqNJPNXpAGhiOqV1O8KAjk+5tufd15yje60l8rAKjRV04sxeVg5eTltaLbG/Y2lr2BclnOTjWXVQD43GzmubFxUWrMUX9QNuPGbJqeubx69nJ/18w0gHPd1f3X7fjoYucTH749nvbl5vl+4VpJKqQ33NAB0GlMl8UlK94VQ3RXkDSNSrs9u9FfaabkZQiAhQWO2bCFd6E0JJgV7aYvXZn1AMTNtFTD1qaxLfqDiOzWtWq88IgPCYtHDDag3l3EudjEESUkSh4NS42INc857ZNmSRsASu/Uy8CpGxRrmwU6hTovxnS92lKm2ABQD3WwiQFhmPWo1fGlszEEaLTbR/bNAZiJ7NEj1812Lw1WMmONWtZCvR8c2j/TLAiAIF2YmYvWVwVyeXn5kSee2L0Ys2WWIoNhJwDERMJiLAdfL2a2hkS0yAbdfrfwBCDLiqwsDLFh470SwznXTK1XpGQCLRRklIgMW2OzoksK9WqtMV48VXLj3GtWSDP2WamZesvIStWCbK2yI4IhYgIb4opqAmsZFWvSAjDEFFItqpCyeOclVK2tyCLVUxIZ72ETfmUkBYA0iqKYk4Ztt5JGGiWJbaXRjtmUDVnLcRoBaDSSRqPRSBtxFM+043bLTrfjRmxZcktZeClhG6x9ZqPM3qtzXp03CgaxgQQGtQhCyjnnfemL0hF7VfUeAckVUCgIDBBgg0niRb2Hr91uUYhm49e1zgNCIEJaA7WeSCrUA0KErAxxyopoClTbETGJy5grTKOKgI6QXBmG+GpIXglVVTFQMmnNKSOBH5k0IKQ2YpB3athIqFhMMAlvdAdX9bIRPnhw6o5bPv6N073/5lc/c2l1edjLAZipTtEfrq734yhywNBpFHfgjXelcI0OBAaoZ1Wwqqtjfwod7/9akaYUHJsUAo6MAN47oYCNehUQS146IsrzvFsU7aTZg2sB//aLDwL4f7724E9++P6PvvvWmakpM7yqRddTygqoJbIAqBj2DdvEpgpflgp00hgYJmkKSgrnAGgxEJaI2SaxFpnJhgvN+Y9/4nv+/W98br0vrcQASBNWJ9aa0okYqFEhQ6oxefjCsQJozk7ZuXkR70QpsspUOomY2VaLEpvKsA122QiF0Y3ukduuf+brJwHo8HIyWEPWi5tTWlCWeWuoyncNrcP2EGjuywPXz8QL12Fq2jN5n6cNMexdpSmaWP+CpUbkU/7wx+8/FBsAd95ylGKjxIWY2JNTkYCmi5ROnBcEcrRomRU913/5+Jk9N8zN75+B4awQ1fBmjxbbLdGxsixanbbNh89+5xkAjz91oZ85BrGikABMKYkYqX/FRg1bTkpXNlv8ie+79Q13HwJl3SxvtgxJWfVIAp8RCHxAVcMEigB2YgB0M8nUmrjh2TZ1WIcTEIWIAhEAJkpC5W5SJ+LyzA0GVrxlE3FRvx4KVfVSCXRUvLRcoUlc9LNVADce6Nx54y6TyCDPE9ZCRIhAZEOahgr5KxVmrHoK0NjY2A8fjUTCNRMgvCa1yV5DZgjZKCoPSWvt1DUOVk34qxYbTDAJJo7ULT+ozF6gFnZVh9SeWv1Kb/mTUEHd9USb9JtGJ6y7W2ukrkUja49qBCpOtuAe1vikTpjnmOzSpORKJ6gq1cxHzY7EKGlV5coRj1OQVQA8j7yXijv5eoIuBay13jsRdd7lRQ7AmsS7UqRQsq91xELzpQeDGFxL9UOp7MhGIj6gtUmUOCfNdqJe8ywnrvdAIiGMkEQC2LCIxnEkXlSRJDGAEGlO0ghsFuYWH3zokX//mT968vEnGs1W1i/yLINqkiave3vbbbttt+32n3OzAb3iiq+4NcgDjIuvTLLpCAoJlPJJPmE4FQFBsFMhVRXDoDrKVIhVyBAVNjkiJWWty9HUVw2/EBZ1Y3UUUOWBYqK6esFImlyDoCBAiX24EBERC4SFDZtSRtphwjhbBykRxFf29AiYDLonIpKKT6da2QEjRmIIbIuKVN8xsYEiJL0CQCHpSEAsSVVVvTCIlajKRQ5VDQm5vVboIlfUk9DdOgZXKVMEUA4CDvVakxXqzbeKoRqqjZHxtlwNIAEkPtgCTKy1JxWOUS/MxhhDIBURqrTP4v0oITeYiQwFe4LEWiOeVFTVVXmkyIBAXPM0x3Nl7J2q1roj4pAHPJBQOCQtGkVbA90UqoBBmCrEZBTQkRJceWq6UxSlcyJSBkqOtZYpAhCCiqJaiQuJjLFElDln1GmaAvDDYdu2Pvb97/mbr39lYceu1tT0yQuvHKMZnd5byNr9790N4Pd/S//4wad/7vC966dXj7+8ub5+TglDccZjdb04sZ4BOHV25fJKdtutvY//3I9Ody5NR8/fevee90zd/q/+5R8/9tApHa4CsExry72lzf6BxemFfVO8eaEYZPNzhefBqeceB0Bd/8777jy7cWb15EuzCZ144vkf++F75ubbx89/+7t//sA7PnAvgL233eNFr37nMm+u3nBo2q+fP9+lvTPtzUF35uD+udlLALL1te5w/sJjJ2VmwW+cfPsHD//zXf/oV37uN0l11cZPPH0aADVm3v9j3/tf7pj91ldf+qM//rM1X672/KVMG43Yd3MAb7jn6Kd/5n3D5HyrHbvVKzsPLSbHFqNeT+Nm8+hCv7EE4OCbFps0/dLT373v3mOvnD31yV94B5rd4ZVn/TlflnbX264HkEh76dGTrh+fudg9eOzQxsPL/9efPnLvve/mJG0Qra1lAF566bFf/NlPPf3y01dePpE7tDpyZOeuOMF8QmUjAVCU2fX7Zoe+WNnMW0l8yy23PvPkd54/vW6tP7xn52buAbQadthfevx8CciS9zftnj24e28vK08vn957XecAzwAg57/09afefNPcGw/EA9dD1kBn0BPMNK1pu0aTAaxuxO32AtNQEHGzgzgyXnyW5Ubz9S6A2SR2pUKlOd3szM5vXroKKWGj3KkRNdwEoH4Qe0qlLDnXslmaadZNk04VwswJOw9gkIvzmpemnRITkVEAooLgpAQsjFqMti83NILy7KtLvfvu3lesLrnu0LYYgJL4uNk9taYXe2WJjaGLGvMXz2YPPvnkYFPXyyGANHnlTXfumJlpXrhQrhfL62uDd9zxhujo9Sub/eaOQ1/8+lcAZN4rYAhp3CKCV09xVAAkfP7K+czFAOLI3LRz9tTKi9JeMo11HfaiOMqz0vqhjT0ASjeubma7JdZSkE5RxJwwJZpCyDZMkJfGOauFKuCsJdWQeiljZwyh2YjCGhHFBqooKY6bEKmWurDUjxgRCo4BWIgRqJDEsTQ8m7Dombg78GkisnkxtUzGUNzxUpalTDeipaIEMNuwzseb/dLYpNN0lzY3Z1q2bWhns2lEW9QCcOfRg2kyZ3fFR951m3RXjEibG5tDRbmxo3MIKQG4sr7WK/NhvtQxyiXFbDZoGMvC2ubl3BUAkhndKIrd3P7m0y9dvHjGx4Ydur01Y8hwJdeCaERwVDHERbVwHoAxlo2NjQHQ7jSVSMkIDNgSR2xsng2Losiyfpb1AZRFAQVDhanINSRbywsPJvhQgwIUc5sIHi0LENhwK7KGLSmcrZbhOI5I4coSXqAaGZuXGbyKE+dKAKXoKKaURKb0PrI8N2Xj2Kj6QLJuNWIbRylps5OUZHbtmD19bsln7tjNB9549AiAuSlpzRlY7TSi1Ci8j+NG2uzV+2wF71RJD6pHX0Jy9QoPiFcvCJk3pVAlKFgpHt2ZQsuk8vCDuhoCCFNhOGQXZnC9CQp7ZXgDYu9FQCJcxwUREECGghJssYAq4EZRVm58BU/yyNmMJih0tcvtAVXRlNQBohKpEqtjrtj90Bi28t7DaJAENWjITwLAMkMYolHEgDpXRkbKkp23UWxDis/SOXamGXVKWCZb5r2V4cUmmgw6c+YsNxMAeUZZmTnoIMuUiDkigpAvR8QmpVDRocKqCV5kNATBbmQECABDXxpVLw7WKrRUxEmDPAMQ5sim0IIJLcNaFs0o6qoWNjYmSowBMCj9P/ncg5/90mM/+bHv/eQH7kjcarl2sZkOSV1eDgBQ3Eg55qKnBsZwwUWipRTkgIjyyERAEDITTALJJIpEIi568JsbFh4cBaMkI2FFuWbRhoty8nGzoWXZL3wzpdbCFACzuDfXmKmgyDAziExsQFARYyMApYJNsBgrYxhQYph8vT3f/sg/eDeA/NmXiSPfattI1RcxW1AJLkkJ3gY+J5MIubI9QzNtaZqEMgOIkVIh1srYgp1sChVTbthG/yoPAAy6JxqLuyTvxxZeJWJWMMAgQ1zjMvCiahir4jt7djVmpwdOYmPSxEKk3AJxjSEoAFEc55I39k19//ffCuCtu+Pp+Z3UaGpeupCzUeC9eq/eIawzKromUewKHW7mmxvnXnxldt/M1FwD5VBCFI4qTcsYL2NyaLLvGdbIdwD81ZdffuSLz69v9tKmEakSgiMQiStADEowURq14shY8bj+wMI/+Om3R7yBbICAsKpAggkpKqwqKmJMYVKroCZHADqJ+lgJYpNIXBEZAyUCs4aSQgLAiY+pZkaGy2s1SIZGMXpXvReVI1HRPE3IXAuoesuknhkgVqpxtfBdzTgcj7zZCoX5+hsdgZ3VT6vRVKoscFRukQCwMCM0O8T9QzksBVRN5VFVeiOpnr1UUJkqQUUroVEVnhphnFyToolIlMNAjbQelXsIAGATKPccvKtwcSJVCklu4DRUnWRTZQFVMtaLFxLDFFY/BnlR7xVQa41CBWRCeU8VaxhA6TUyNV9VA0AMgMQgzLHwB1C5lYa5cEVvY6MsChGJvQOQba7FUdpO4w2UIiDlXPOEUhFl2IoYYWCMFe9FlUhFfRgYV3oAIUAYru6zkoiYNNTuq/aJuiZheJrqlIiK0oOZmUsvo4HORQxRnNLKpcu//c//RdpIrbUmJi0Qp1FFsXydJWK7bbfttt3+8212HI5BxaSvAkdUhXcmDq6WObomqjQOgNUfTICH4yNGfx2HEjWgZ5iIiE2cpY6ujcDBEclikqI38dOAv6GO1r32nDTx3y1BxbG3s+XIsZ8R/J+aewiM5Q2jvo4Yj8GDHH8+IjiNbrkyoXQkht7adKJXYau69imMuzKhcsZ4nEaOzqSCApigkcKEjOBcGTCB1TnJ8gCoyrAplXkTWB7VGULSRkYooyhc1yw0VenSKpE5AjgbbnSEF9fA5Cj+q1WviRm8dTOttCChGx5+5JfWt1n1zLlSxBPDGDCp98EwCH21VGMYgIpX75yopjZ2zgVKQjONn3v51Gf/+qtZ3n3mxJn9O+zZc8Ud72y++MSK+pVj+6YAfPrH3vNXD7x44sTGkRvveO5bf16sF/v3tZYvDzz7l9fLl567AOC+j/29rLhy/MTjdy3ePLU4hcHw7R+74bmvnPnAR9//1S986fY7jwBozsSGsntvu37Q3yRgEM8kvBpPxdH+G088cBLA8Qtnv2e/OXzHwbn9Cz5eU9Xu6npGjbd88M2nnnz6ya99F8ALZ1Z/6JN3P/DVF/rdXjacutotLi2tXNksTp1dPnP8uf3vaAOw67kpu0Njn/7Gc4s3LOy+07ztB+85/PtHHnnkZNtYiQDgM3/yFwd3L6yfefHHP/rBT3zsh/7J7/yfZzI0k0S9IyoBHDm2OH10t1+/WvRtsn9HFqVtDAbx3OBqtpv87p0RgGHaau1O918/8+qVlSP33LRw181Z/+ow62Pv4T3XLTqXAIjIzk8fu3y1e+utt9y2d/H5l04WQrt2po89s+Q9vvKlvwBw57FDexejh59a73YH7ZgHw2y1N7xlcdfy+pq4IYCd0+ladwAyM83WdKuRJFGudO/NB5c3N3p5HscEYJhFKwNz602HT547FRltpM2VK5eHeb6xXtqI33rrPgD79x84d3HF8lBt1Jlq55lLDLusmG5P+WywZ/cMgCzrK0fNtilyf/LM0u5Ds1OdyCZRGieUA3cHZ6YAACAASURBVIDPS8mKrJD5RmOw0Z+am0c54DiOoKyxeA8Asa4PvfElQLMQ9pmxCcCNRlwUfW40ART9fKWfT+1Q7z2zAQjGGIX38KE4BUDRAGLjzh50rzz6zLMvPnvlc/m37rp1ii2FCxGTijhXFsNi0POwJimGq8v9oejlje7RY/sBvOkNBy9fPN3akZoZHl70JdsbD+2B8uwgO3Mlu3ylD8BYcgonzvm+sdYYjq2N45hMdGm1uyudAtBqRIallZBlYiBKY3GIksjAavC/xDRMxCUhabpBzztv08g7V4qkcJMLetBwhRAMoOPVdGIJCI7OeNcZpbEdHzURLKsdjtHHLs/ybt6ZifqrzqkkMZf50FhOjOk7TSMBQAJV97437U92zS42Fg6yXDx7orTa82UKyXwGoJDhhdOno4GsP3/i3gNNGO73B57Itlsr6yuX1noAboijy1e7c87GxraYY0HESak953MfWQCrGcq+6SwePH360YTTVqcxdkVGCz0RgSJTRaEm/gEIpAaA864oS+e8d1IW5TB3haeYRSREvarV3hoYA2ZKm8xh/Q7kDQsAEgSiIhyqxYDVqfdl6XMn2oxCMJI9ZQoVUWPRSG0SYzo1cWwaadzppAA6U0mrnSQNy5YTmwDaakSL861OJ2YjSWIBJImZn5uLIsnyrN2ZwsKRz/5v3zj38oWf+KkfuPDqCwCO3ThF8bDX77Zb0GJACpXcuzpJce2+6wiODLNHRAWsoTMaerV16kxMJd361/oQCYNbSV/DLKrIR5NWwGt+O2Yf1aylyWOqgOU19s6EwaQT+1e1QRkiZib1DE5QKRhJPQLht2IQaS2YVgJx7eiSMaO64UxkjS1ybSSxiHhxQIW1ioopupy0YZqdmQV3sQvWCytXVRhAPy9LL7nzXkRhiHwIxmIic9qoN5jQZuh4oKpiSgq1bK21ZeGSKM7LohE3MpdRyEoYzgwN2IHU4EvTJFkxCHWfRItO2jw3yP/H/+PzDz7x3Q+/+60/+NY3DtfPcveSaTYAUCkcgaJGZmyqGpOWCjKGhJ0XTxmARlI/vdq2DZPHKhJDHAqOccwUC6Xqk1BkCIWw2lZkxfX7WQFgmnwcsb/modVcVqCW+9SPqXrWBDDYko0tABcbWAM4ZYiQYQazwoAIWpGqHHFJPH9gUZsNb4z3AdkzagjWkHNbJu8YCVZR4XZ60+17ARQbGy04skSGWD24Ts5INErvHiqKZwo71bHtBsU2TUkBV5SjGs5Eo8uM1ds2EmU36K01Wgrg4vpy0vSWW5aYdRhYhBQxwFTVw2GAphQRG3XkkKaL09owIjkbppG1ThTMSWgVpzdevbNeKW43AbznPUfvWWxZa3LnWE0p4pzkIbtx6QKv2YtonjmiYb/s9otG4k+9cnL/rjROIjMxZyu/o7IuiTQqXU4skRUA1kRQOCVrgCSqbceKu1nLokfVhOqo+RZbnMaPZ+Lzyru6xjmZzD1E4xRN1zhZr20j2JMq2sGIYql1EgmMvInJN5RGKf+rZbSeThNVQCsMUxHYGuObouC2cfVVnXjxNWzOCSdmtD9P9C58cq0HdM0pdHKGS/AhpOIQQ1mo6ixGqKuo8OhNByJjguuiVRKbyUiQBupMGP8RtmoV3knpPEFD1l0rcOxNTNMZFMxp0pnfnRWuLP2g2w/7jsuzYdlLTAolp2IoAmlgWWCUSYC5fiIMUhQyvjRPFFYlVDkrwyP0XnzdZyBuR1TPW1H1hRMn3ntLFcKK7bbdttt2+7vW7FgtAKC2OK6tMA1gErjDhLC5RhKrPZkAgCd/rbXsNvy6lmBX5xpZ5K/BHMe/HzEYq01/lBVljBnSKCNzfTat4b6Jc4e7vhbbG1nWo46M6JFjZ7fa68J5CaohRl1LC3RkZITTVCq3MLRaCXEIVTJpJal3cR1doPZ9RnjfaMzqDZlrcuF4qCrI8TWDFqzh6p4rfsbY5KtvfGRfjuirqgiFPiukTwmiQbgWHLOaQUmkREqV7RLC00qgKqN+JcVSgUYmUgp1g6pRGrNZ61NpdYpgrVCVkWf0fLdkrZRqnpEHCDqKp7tw0yrivVMiERBVpXUluEfGhHRhChXxIhoZDHNpJIFDIY128tTxUx7asnTibJkofuef/W7f4/3vu+G/+PsfA/Cx97357vvf8aGf+B+ee3Z15/W3a/uBnQu7iv7pQqg91N/93DcA/Lc33f7Tv/TRHX+1vHzudHPXojHS2S1d0+tvyKXLV450pwHcsH/Hjl0zcdJa7a61iM30XJStlInIXOeeD9wD4MYZ2lzeWDu7Xiy0Lp688MEf+eETV05886+/8PMffc/C/sPPnHgMwMkXLj7/2IkPffJd7cYLLz/3yu7DB06trDx39sp8p3XDTUe62QaA7mp/bvfBW+69KTaDzq4400venPv0L7z3ledeHQ5RJgmAB77x8JTqb/32L68Wa5u23TSxZScQdmhbANjVTpdPnD/74jNCdOv+uQjDK+eutOcXz58bPP/M8tycBxDP25WLzfnZ/Q+9+OSH7ntDeW697C05E3V5Z8tMN5EBeOILj//rz/zphbP5b/7ap1gTj8Z1C7tOv7Ly4MMPb3q97fBOAPff88aHnnrhyWdend41m+cuzgZ7FubnpqYtm5DPsdPkYY5S3eLsXL+XXb54iZk2s6GJ4EUbaQPAjvnps1f6F5eO52Vx9/U7BnlXMrdrYfF7r9v39cefeuHMMoB2q3P9gfnLS8uWW2SNjTeRZcb7ODKwnJc5gEYabWz0z5xbve0Nhx57YWPXxuBt9+4oSjc1tXNpdRWAlPmexnRio6LQZ19cumGxPTOTWMB75Vab+psA4H3DqStNP5eZmNNmykYgPu93M2c7zd0AEmwsX+oe3N9hm5MJasxQjGa8dhbeRZFJ4F69sPylb5+dm0pKl99wdA9st3A5gCQ2lsgyeYhRbaWx9ga3H5m+965jv/lvH7r9jYcA3Hf/Xb//O8fve/vBlbX++optJcNudzDdaj7+zAtPv3I5eGsiJMoeVHoxrogjYwNCStHKxqBFCYA4osi6+anIhGTA8Bwn/V7RIHKDEoBNO1NRojmhLE3ahGQAyFBqI0dq42rpH3kr48V8IhtatfjVi0Dwp6ugT5W0f8K9RJVNsnYrxpvQnp0z03uavfaOqOOaF3PP/ShhiCN2EbNRC4CZrMHxV5f2xkvL54dTi1k61SqtUVawlgwAl1Y3N9Zde26629M4bnCURqkbiA6d54ZPbAlA1A36ZTzV7JVl4vNhmeVqd3HJNhoWDIDiqax39dkTS+v9vDE/N8yHVbxHQiGUkTeoxdBXgFuFn9Q7QeCUM4iIDRsQkTExvJeQONMQhUxVNYMdBLWJwvkKuBINuQ1tYikycTuJY06TqJHY2BrLBqrqdW6+DSCKDQjGsjVoNaPFndPNRpRwZC03mlFnKgHQnIqjlBBBAKveeUekxjLUubJQeAA2MpQPC+OMGcRJUdrZtJXu2b23s2/H2W99A8Ce3a5p16wMc28UnsEWrGhWu8Nok1aBiCrUu+CDVm7xSBk6hoKqzY20qhm7ZaMM+1uAMUVFUJfrCZMTEnxhlQnwYGxybG1Uz2WaPA71vjau2wBAg+R96xyFAuRZk6CIBxE0MZVL7aRCZ0YCBqrd9DE3KRhIrOK9KJh5o/DKJiJIXpoIAJzoUITjlIzPBn7X7t1rffPAUy81Y6wsrYX5KVHkBaJGwESsxGFSmrFBAiAU66tw4pDsT0fGVhj1GmMh4rIsYhs55+I0lnLE+RIloTD1AamV/GUxZGLhCACl5IjShqVi+OCzZ7757JmHnrzrZ3/8XYd3z5jhJQCgYX84TFNn48SVpXowsbVcQJk05WuehtZgDYhUSBx75TLctVPLFBmbCBQckYcImWhsMzmC0aJ63DzqfrDvamCS6uIl9XQgQi4lUyisDTKhuJCSqKmgWAjYELOx4TETi2HSZurTpCQiT6xGoSIoCdaMcWEdmasAgNJ5SqJDd9wEYOmhx72UJmhoDAgOFToSVuygF/GiwgvzncUdptkoXAkYURS+jNLEjAok11caI1Js4D0bplYEoLnYKDFIIKLCGtLsah2+r6aDAkbywqtaa6amqJWwNWXhUcGeFZkg1OQBVaYlk6MogVopcwB758rG9GonbRUlxY1iNOsBC4qqF4HgZMZQzByB2ImLp5vOFSJlqHxYbSaoEiOGSkQCE8UWXPayYZicaRJH7bR0Tli9SFCOh7zE6g3CSAVDX2s3papNUq8s9d3xFneCJukFtSH+n8IbJ96512s6+m4SaaxDK6Mv6jwuVD0aGmGVNR45eXD1Go9ImqjGa4RMV06bji5X2/H19KhJJnWCxoklClWxVA1G+fi6o7u4dnUdrXCi1YyUajGHr8r2VcaAasidv0X4rICIaIWF8rgWjo55hIoqZS8AhXIUpe22cUk+6KvLAVhRURdTBKslyqnpqeuv2+PZKHFWlJGxAMpCLy1d7vcHZekGw6x0znkvKuxZIKwMwGhEBGJmhpOAdlcDrBMjDCK2cVDchcqcRGCusl9KjiBPZGYihYiqkKoXDRW6AXhst+223bbb36Vm6bXg46TFTuMPJrKiVNbWpAE+8aNJD3J8pvG3IxIhKcLuMcGODB9da9pPAHQTtt5oq6vr6oy3Q5rcZWni6AmLefISE2ZTjdBVJ6qxzIo5GtyA+loKoCIDgqnqUqiHSbQFla3+XyoLtTqhjiy8LUHKkeVH41u5ZohrS0RxzdNDDTiOOqPjT1BbS5VuohrQkZcT4MgqlVHY3wVQqmrAjcNuBFAVaQ83zwRhAqTWZRA8Qu5xVBjsSOw14kiGLFjVXlzjpLoluDeeLCAETqewIqQAkJrvCIVwFNX3FnpBzMYaC8CDRYS5yj9JqipeRfI8SyLTjA2APC+TGLZpdk/Fq2uD2U60a/fOnTtmT75y4T988eXjD/wzADffe8cnf+FTn/jkx37qx37lX/3e//I9H3zPX/7Zl3U6bmtKzWz3bATgn/7T3+p2r/zDX33/2rc+G58pNtXle1c+/Om3/vqn//JSDydOnQdw+ODcylpPbLZycTDdniZRG8eN+c7QRMlcC8Due47sXs8X15I/+g/PJ829t77nTb5xeOa6qW8/8d0733jo7h9/JwA7/fhmv9/cZZY2rpw7eenQ9Xu9wf75dhxz3uvunEkBuDI6dyr75tNPUr7y6V/5yOryWfvM3777/W83n/mvfu7T/2uaEYCBNQN1D53a+Ic/9ZEvfOHhVzdK2zK5d20yjZgAfOvJ72bFxVtv0NvvOnruO88szkVrlzebrTuVVt7zzsPtg/sA/OG/+euHvvBkGlvEiOPZz//hXy5f6X/k732Issaf/NZ/vGlxL4Drbz744Q+9/2tf//rTJ1795kMrL529uH+hJZcvlMyGzFvf/k4AR2/Y8fDxq6XTO47d8vDgu1c2Bu1OkhUSpSbYfAxKYoo56nSSqysbTx0/ceOhmamkubLZ7RBSYwCcW90sesW5pZXrF+dv2HPQJsUg78XJtHBzOMT+vU0A5TD3g8E3Hnz5lUv9T3/k2L5DLQEfPLgjB1LyZBnAsChfPXn59JXhTOtqK7G333Jdq13agk+fWTEmBsBSxs22rG6IMJOBsoJLL1Fq3XAtpBQrc0mn5tHadfLcqZ1TNkrSzOSJOJumnTh54dFTAJ56cf2JFy+UXfnRT+4AKUIsP+RIJRNyFzCmvbGZ9t7+fQd+6EfflAGpzwf5VRj43AFQD06ttYSIbStqJHHmtU08O9ucTuyVpasANleu5sythQ6gh/Yf+Nq3T1xY7k115h9+8uRGoQHDKjxrtdgpgVTgvRR5YcAMvbS8CmB3xxR+2EyZpPTCXiROY1i/vtSdmZ4FcPbEecmL/YudgWirYZXAxhILWwI0DC+EatpftbAKSGlUcrRe7lVVqGJt8HgBJ8KYVgNS9UAoYFZTIOqdxBXlandjeeOy7S/t6A3SjjgpjRZkUnEy3UoBKLGwyZ0WgqlOI7a8MVw1RdlOjEJLH+o6WxKnpnFlrW/cjJZenYuVhl0i4znoYW1zUNrSc1m6xCYlJx0QNB8quwIAZqen/uwbz73wysXeel70y2YU1c4GE5jZAKCQ/dKWpIAqSxXOCWGjIPqOLUWREpwomJAkJokTsiaOTbMZdUJRl6mk3U7TZhRFdmYmSmPTajfSZmzUR/AA0maaTnc6HY1iy9aEgsCGSEUhWmYlABtHpN4Ps6zXs6C41QRRlnWhYkiDII5K0sILVAkiYIhX8gSoVPsCoIbVNNWUCROVvSS/dNNu+/t/+9KPXLy0c3oegJMEkiZMpVdrGYDPvUn8VjMENbKoyhrEGlXdF67TFTLpOP3yOOoXstHVyGA1u0K5gZARRVWpJo4RcRj81ydU1oG01xScqqdj7X9zhaXWM7be/2kCoaiNDAIH2TmkInBVaalrCXA4c32VMeQZEr5IVdXKgxlsuM0SRXGWe89qIgsATlpR4j3HnZ28Y/effP67//Frz55f7WWe+2osxwBKH/ZIENc5EEmrk2PE0AnliMLd6zgl26jjE6HZvCxIdZANjOHS54ZIQ65Ab1SUR1BNReohE1svyPMhABMZY+LSlRw3IpsOy/xz33zi69/67jvuu/mnf/BeAEcO7+90L+TDC9GgVyQJotiUpWaFja03PFAB0Jp8fJVIQ0lVvUA9GQ+AlCyISEk9oSwVQMwMJedUg3LcMg9cHkdJZS9RjZTUeEmdu3YEEhGIwHAiltVDAHgosYqERJ4WykqmBqxs+JllgDVX8eIdsQXYGiaIF4jqZO3ELZMOhnxBGjUMAG5GxEpgIu8N1fIXqWhZVQ0WzyLRjhmeaXkRnxdE1gvASrFBPoFpBEXt2HK1ljgyNlcBcP09t/ReOslsxZcwSbAYw7sPHYGaCjsdqeP5BZ6eIhvlToyNSDTM+eotpbD7VbLiHvJmygaEfAhA/NBR4fqZpVgraLg2WyeeMhk7HFIURaWU1GnFvMcbMnUehuoSwRmQsGwr29jnjp02GwZATjk31FtNkrgA4L2qGIYlMpVpCyjyooQgaIMrcxaT3kzAqrcsH6Mvr1k4qsNGSN7/F0C55cFXJ6zDHhNEjVplX8GHkyzuLb8fu04TgmrF6Gx0zX3XgKeOat0AYelFQAmrflB9XFUPZwIpnXQcJ/HZrX7QhJciIgTWsZMWil6GFMY8SmLgvRjDqrW2j0KaJrBhY5jqBPQqIatx9cy42jIo1BhNG82WMZuQVSkB+HyYNDvtqam02RxmGeAunX0ZRMbGNk5C/uAoaS5MJztmm+12J0mbxAagjV4vy4brGxurK6sAupu9oiwhzOASLjYxtHKAFFWqlmAPOOt0xPhUMmwg1QFKTkSVCJEhIqEQkDTOOVW61p3fbtttu223vwutzlY9hnXqv9aRRmCSxYca7RltKjWxboLpIlJlYtZgi1+DdgbTpDZX63Do6OzXbtev3bar/XTivFTvfq+nfa5uqwJer9VB6Pjr+pOxna+VNR4Y8FIH+8YH1P2ZCAFWHBYineBoqI46HqKDVKNy4wAyJg+m1+7Wkwr50a+22C2vi01WRspEfxVEcB41G5GYQDy6JKEO8KuMEuozVUXRR15HnSkn3LIPvj+PbI2Kq8Q1gxITz21kF261uiqYUqovRsWAdCIWW0OZkwBuMLOK3AFqDFsbpsMYygmjraoqwhy8BjCTMyZiBKt2vZv/wLvvvGH/3HceefjwoQONqR3ZyuUf/Nj37l9s/e5v/t4V3g3g0aee/NIPP7bYsEsD/aWf++WdOxtxJ24059a7m4vN6Te/+S4Am//3l//0X//JPbftftvd97z60pO7rsN0RtK64Z3vu/tLf/Xt7tUhgIudbM7qfMvtnIkbsRmeXPJYntrdmmnE/aIA0DeGB/1vPXDq6W+/+PKr/Rs/97cf+f5jb7n/2K5msXLGH0sHAA7uMhseUTqcn0+6Wf/sxSso/NR0gzyvn9ncExkADz7bf+HlJ1xv/ad+8UMp2ZY0fFSuXXj6XR+6/wd/5O1f+OyDAKynIkr/59/4zL7ZnQNwoZIU1E7TOHGDrgK484Ybf+JTH7h86dG1S2vDvu936Pq986fOXPrKXzy/+7p9al4CcOzIjZfe4L/0lQf372v80b/5k43u8NZ9cy98+Tsv/LHft2PPY0+fBJD79LHnXrr52K0LM7NPPfZcXpRvu/8tx26//VtPH3/0yT968fhxAGef7U3P7rzlDYeodCsrVxCZwhXOG5E4LzIAw8KrUl56V3Lajo5MN0RQCLxQb5Ct5gUAJY2Nmeu09++aSlI0mzMXl5dWe6/eecstu2ZsfzAA0Ng/e2n56i1Hdx064HctdK5euiSmderyYFfUpk40tWMRwNTK0ko8fO99e9tpcoc1hoSMKYsCZIKER9WKall6BXeHbr1XNDq2NZX6UuAV3ADAJj71Sn5l9ey3jl/or7h3vOMw67BUmxpz4eLVrzx+BUBR0NGDC9cfmK2tfpUKvScoAmmoEMOcNztl1KKeexWqQws2GcVTlCkA77wBvHgwmo3IlbrcK6zx6fpa0jBlvwfgkUee2Hdd03JGzN3M5x5kkvOrw8ypA2VFBEBZKlkfU6DQlIV3pU8iGxvT7fYA7Gy3zi6vHVh0qiVBbWwAaXSam2c3Lm0sAVjelJREnNpmfHWtG0W6MNcqxTmPOIk9FIAxFjJSMQXnoSJdjRYMIFQM17owyHhZ07GwO/yextGjmvQSwJH+IE8oidLIFFG7FXsMotjAU5H1rK1dN4Y1pvTUStK1qxs8gzLP2w0eahnF3EpjAI0k6VJZFqUyFREDZRzT0GuDbKPZXFkrAcxjuigsfN5plF7ykqTULInnNwr3wnNnAXzwntnvPHl20HM7OnGZSzYcUr0IGxKuSXDMiGNYy3FskzRqNKI0jeLYWEvUiAHMtNMdc42pThLHJorjVqvRajZmd86ChY0zVgAkCUWJjZPIWMuxg6FSxCmseL+5CUC9iztshIRYCepK9aIQUoV4Ha4BcCVbMsVgIL0MZMq8S1Fi4dQLAcIWABsDYmYDY0oZWjamyoscxNUA4AtKWtbmfbFt7228vvLGY/Pveut1f/Dvvjg7FQNY3HediRQgJmYTAyKGJ1iH9d5Y5QJUIqOmIihVdKo6gwrqLRa1Mz4CMa/BNjES6alWjEUi1DYM1WqR+h5eF5oc72PjsF99jQpkqEUeFDjB48vWdwEAmiTEpRAAY5hgpJriNrJ1wbwJBg3qP8MNO2FLUAYbZqPQYamuzKyJm3HsQmpMaqSdnSrRgy+tfOnBv/2bB19wXmETLcUm8bBwAKQS8xPAMrKOalXL6JII8G7VRw5B2rFtGMZKVYmc87ExZVFGaarOR4ANqLGwk7BLk6FRNj0FWYFPkyYAiBvmmeXIks1kaIydarXzXvezDz37zNMnAXzk4+/6+N379+6YzoYriS/hRC2rsVWVIDWoba+RuUpUZT7ocNTiFNoCQv0vw0aYIWRTyWDjIUUNdpSrdwqgFLI88fyDQgQ1KImRcheoKsErMcBkI+a61LpTFWLHFgZMRI4tEZGomnC3AJw4D0nTqIiMCNSFpHdsjDG2KrU0MQMwmqONmPJCi8IBIMvGmiAfsYYhZYVEEY9mkQFUvRoURaGKJInImNKJMYCKbCHvjjCy8B4JLPIsKzgCkOxYpLNXNXNsU++LLe/C6FcgLfMijtszbeokuRM1nlkrihtQIWc6CmcrgDSOC1dEKMKbYNP21IGD/vx5a1yWM9fXobo4YeiauCKGsZGBEWpYYo0NealROVHlEMoisJJASZwbGo6gauImgMu9ftJJ7HRDxVg2Il7EkwqrwimcAlCvRhWi8KoQkjpzQ0V/JABc1Sqh0ctD9Vs8Ij6MyLX1tKqRv9d3ZbY0mlycakizEjkBAKR2e0bHhNlJo3c3ZG4cvdoTJWCYQ5758NPqKVUckuDiEAWybW3Sj+UKIfBT8cJrffaYHzExsaqTjjwhmujQVlcwGPPeexFfhz0UCuVqGwgZXasPFT6k6WSI96qKio5cnbQWNtPktZjIMIlzKjDWNuI4iiyA3FHSiGdnO95ENrGJtQxqJMmwcGAj1QlyUR32817e7bMFc6fVmY7jVru1c3rKHLoOQOl9t9/rD4bOOxHtZ7l3vihdnhVFVoY314kTFS2UCExs2Fi2HOibqgC8egDMJlBExIsXEUdJO/HeO1fD9Nttu2237fZ3p22nmdhu2227bbfttt2223bbbtttu2237bbdttt2227bbbv9/9TsOAw3EbMaE1EmtEg68S+uoeVNHoPqx2NaQx3AH4XPJuOfFaV/kveCiu5YUSX12tO/rpQhxBtHIqn/BElygqpZ8+rqE05E4kbnqXXZ4a88PmZMVZCKFgAaczNCOLjuYC2hGHWGg14INbMDoIn7neBMbCVjjO9cJ0b6mlGY/JO2oM0TvFEN+jQwaqV2zZEkAHWm6BEBM9AJDcHR+Bp1dDSU2GOp0khPUJuqeuLXJpRhHsVBmSfGP8RsvUJIJx9uPQwEAF4wpluOZe8EsHjUfFOug7dV7khXFq4ojDHivYkiw+wBJY7ZOychR/+UhRsObzk4u/Ty1MBg8+KZHc1IuxfMnP/xT7/3tnffB+BvPv/go195/symzp5feubs6uB8z7S4Xa74YTmIi7vvuhnAo488dfqlpT/437/0vo/9fTnTMiZBHPvsPDTf0Wn38gGAYnN97ug+djY10fKl/Orj586vnDt0tHGU59R5AGuX1pKcjt249/lnC9Huqw8/92fnj3/fz9zX2Ldr47vfxKV9AObaeujGI1lrfveNN0xPPffS8TNgOnrD4oVLK19/+JXDh+4FsLjn8MHrdn75q1+7XHSyB55eXT638/qpuRvjzXLll3/5E0/8zXcAnNj0CZmFyP73R6Nh/QAAIABJREFUv/TriEmYSpDrZ1O1LOzJR58++saFN/3I0Zc+/+WD+6Zmb94rK6Vs6JkXzrteHs2lAD7xkz98cVm++ZWHptPWyZeX3/SWfXfdc0e2Nnz21PGb73vTqVeuAPiNX/vd7lAWFuz3vf2tN951/dLSlQOHdiy0N1bOneo7UOEAcGf+2edO/MAH3n3l6iZKpFEy356ZmWqUxXC13wNQeE2tFRHnvSudqvQKjWwaxdHF0xeDpnY6baTTTVvQnUcP79l7YG1ldXklP720ubr6cMK6lOcAHj958cJyfv+d1+2dQ6EF+aKbNc+vFjsOp/2sbKYewPlLGycvd99w9JDz+Z69M5c2Lu0o24lNe91eXgqAuRkL76w1WV52s/LEGbewMyEWURe1WoP1HMDmSvbnXzq1upq1ppOl1b5GkXhqsIobtmboZ3/mDgCt2UWUWZ+GrlwmQlDMAkZVRRCmBDdVS+edRjbNZDNqzJIfsDEwjgNVswzlJ5UsMcfkuRlHbLhwLm5GNx6YAZBYk0WSlLlKvrax0urg4uUrQ79UEjUs5ZYAeGFSUSGEqhrEgVtWOpdLGRsCQIaWNga7d0RE8PCRMa50nDZtZNbWcgAHb9pZrHdNI4qnW3mZl0VWDPvcjOM4MjbyZQkAxgTKdS0P0/+XvTcNtuW6zsO+tfbe3X2mO9/7ZrwR08MDQYADOIDiIAmUSIWaIkaSraIpVUWuSFFil0tOpZLIieMqu0q243JccmyJIqV40EhRFCWRBCmOAAiAIIAHYnp4I954353vGbp7771WfnT3uec+QpW/VhU2hjr3TL27e5+91vrW+r4FUWKqijJ3iqHrGrtGLWPXDtf8qUDNHh3vNlr1MgVgWm0TKC9CT6UsfGfWlTEyG9vKBnHkSAGU3islQZKh51aWwuTOxIRyRIlBnU0BrPfl/Gp+zwG+uu5TSdt1RYpJk07aWXz6YgBwt07PtKeLYnnaACNkSZe8CWutqfYxK6sA1s9eSxjHb5tP2tzvj7KeSxJut5OpXqc31Z6eaQPodltpmkzPwFputVy7nfSm0l4vyVqJTezIWgBJZk3LiNVSghjAWhiHuA4xEqxW1XCqhiPbkrjwwRFRmRcUopEyDlYA5MNcQy/LOmQMkZUQNUTVQBqhVacr+DIIsXpQMMZloIxcx/v1qnLG1b1TVCUigoTZZI3oGI2L0gBYwzHvKzstiqQ7HcoRzPZHPvqOT/67J77y6HMA3v/wHUyQGIUTzaNYsoQJYzhRFViT6kC1TMiuRSFjA12vkQlThlsHERFzpSlcFSzuyCE3R60qcBpvYZdt4jEjYjw37FqYlWOwUzdXc6Dplt5F1WdNUEjNBDYMVVO3zxYGN4farVI5poPIWNXBODK2HI3IOGr1SkqjmrTdAZCmU+fOrn/is499/fEz2x6UJEEiiAeGAVPpS5KCyICrKiiQVrsAQiP1VtMedOyHUGjoDmMufCVSCCjAxjpRZeeiqmVLqs6lqK9qRREhY0xqnVFirUjU4n0E0Gu1HLXyshRlYzKUo8JHStKu6dwYDAD8y0987vf/g/nxH37X3/3om60rTdknDAZF0U6YyNiq6UvSlMkKiBWiYCGImuARXHVVLRsNpLnGhJjh2pE7QQ1CaYowqnrMw3XGfWQmaiQxdpPr8tvxXkYNSYQBklA1DBZWOAWH+gMwpLBgC1hiBSCskVVViJQNMxk2Rok0iijEfM8qHjuDHKPGNHUAhrmXVpsUpAy20pCSqlWuTbdDARLiUpSY2VklgqiB9VHIjGt8q/o+be4+lAovSg6mYp9YKRlp1Xwm6VQl0dBYd52qVf/UZCVlRlMMY2BmS1ajgMz4N1dziETHXQ8dOo4HEB9gAUSx2fT85qVLhvqZ69XTq61D8yMUlDCJM2BKZxe53dIY2JgQmg7e1Y6xcwcrNziSIAixbQEoqLVc6F3790U1hgMRqYgEH/My5j7mHoAvgqtqLX1s2E+1EgM1Vd1EO/48dioHJ5fQuChyXEmNetfZsWYT2+juQbRjEUXHQcmOioSpllFzpKq8Tsa7E2lVgd30467qRXXc8ElYG072LQeuf/7UxGrAREvSxqLzOPzZKTOfCF4arRZthHAn/quiMMbOjwiGrWpUlRB8paBaickAKlQVhLJpNCNJUdG3VRBCEFUVkcgVr9kZEyQ2e2q1bBrRLWYwi0gIZYix6saeZZ12pxNDNFFS55g0aoyREsPUhEqlSuaSzNqgaoxlNhLKYTlSy6p173IikxC1p9rWOmYjiWU2xjooFz4MhzmAwXBYlGU5GoQYi6IsRmWel77wRVFW6pnOJcwcQyh9sIbZVBor7POSiKoeOG9oR74x3hhvjL9Zw6KWTNotyQhgwjBM0JHrV8adXV7Hwa8/OxE87DyoULpGy2kC3JowvZOjYlfrGP4DdpGVGphKd780CZuOY5GGVrNDc26wyNqN/mtORGsX7tbndx+r6cNSkU0qvRZpDsyNGos2LvgOh2g8T8IuT2Xc7GfcZqY64i5dTR3PgCYeafNmpjGVfuywNoxxqkSbKn04qp/biXAq97P2ESptFqMxYCfQqpHAqsVe5ZGoQhGpgUINMwiiccfNqH2uprce7YSblftZzc6QmVw5tEMCQwVfVE9PrBcFiA1ESETVh2rq1hnDBECj1xgrPRuVKISyKEXR7bUcRdSdQGi607rnjv23z771u5fNP/3Xf3rgvYdu21PunWnNzW4hfw3AnH/p7/zd+xfe945iOX76U6c/+RufWt2S9e3cGdpzeO9gkAGAS9oGzz353fPXOpg6vBW3Rq/dXNw/5+VmTGV5EwDaa9jYkr23LZw7faZ/Y+u3/uipd92398zpftZ+2bY6ADav+MV9h89dv7nVXy62/IUL8cCp+88+e/Hke5fueN8Hz7/wAoB73vaWAF+su8HF/nq/2BiOfuiH3/m2ew4Nhtfe9vEfffHPHwPwZ3/0jZ/7pb/90I//0P/7b//gA++9/cC0pDOzZZ72Lq8kU0uHThwEcPGpSybqtjVWhUTUGCZkXRw6cfv6K+cBbK71L1248Q5zV2ehPX9gaW19a2r20Ma3w7HDe1dX1n7sv/oQgEe/ePp3/+1vpS2Cx/HD8z/wgw9cXV1JZsMHPnK7tspjx1oAjj3fe+bqVi+zX/r2E9//3g+cuG36333ysx/7uR87c20girNXVwB0pkuBu3lz5fza9sa2J/IWJmG7UZRDHwAQ2yx1LGVeDDrW5aPhVt8PvUz1sqk0a8/OANje3syLcHh/zyQ6Gm0NRzdbiX3onsPX1pZnZrrFxhCARh4Mw9kLa/ffuRRVRz7hYDkQI3FGL164AeDVS5sHFqdW1ra/+ty5D7/3IJOPnsrt2N8aaekBuCSbmSoj1FjzlpMHtpbX253Eh9KlptxYdmkbAHfCz//sndOzM8q+3TYG18sWF4U3aTrVhmANwGBjtZNZ9puwc41aKlWcVCau/GRBSNotjcPSixMkwkIuKkF8BUcaYVFhZ5A4CUg63VmON7eKzlQvD6GAAdByrdfO3XjXm9qv3Rxc3RQBvvHU80jSxJrBMNhMAaBS02KKUonqcRV0xJCPgm+1UwAx0nQ3nZtqG8ulwqoQO3I2a9nbT80BMPv2XX3xlejEJGZ239xwfW04Kmdme5RmACplwAoYqMMYUuZKwk+gHjvqHRM7WANy1IkeGhum6h8ek/6qJ5sUEvqlQI0yh1La1qnPjbGocF+DEAMA4yxMdmW12Nv3o7XN/tXrR/cbMu20nZTqVKcAJNmS7br54/tPvfve9dT1i+H+qfncDE+/tLYkS2+978MAXnjqBhUuWVzauHnxQHfTSHjs+Y0vfuY35xYXHJUARqs3f/nDd73r4XviHnJpkmTMlsmycayGKKlieiJDQUdNcFrfgqAUIFGGAHJGFGVRJlIPeCPKiR1C2USu2oKQECIkqLIW4g3gopoi6MqmXxkBSBPXcjOBS2cYxAwSEY2IIpBAlAFwFCSSZacsMQizSgg2MCI1ISi04ryzwoChUBJBTZBjVPqSIFGrPhrQyIRtg4zICw/nu2xyBdBtt8jafIhOmmhEIJjUafSTQXelIlC1IRhDQFqxtw01PbWZVBrFkyYDSWNou/rYGKOkigVb/eZ2HA2t1tkuMz8xxlarlhYh1OBkY89iPb3KfDbgOQHM1ard0T0df3+IiU0ciCExEsNWtGbEEFIbKkXHxvOgCbdMAZCxZExQJWLj7Nbadih1z+Le7cAj0z633Afwh3/xjUcff3ltvSjZmsQEIW9IA4yz3ufGJahkV5kqS1vpJzCxYQ5aQziNhqUqawVWMKFp+CN1K3AIqVa+GzFBxFgOZWHTRKHGEQBSGZVRpCJIusQYB+QqFLxVTaruEHkfUMNWYwGoNUyIniDDAWcpgB61RkX5iT/5+pe/9PjP/tfvf9+DRw5OJ2m5gtxr28BZAKqhXjkqqoZqbUJlTsTNm9k9AGiqw6kO/XAUM89Za3vt+unro35x9wHH4qjudmUQS5gEE/esQVHGT1RuZXVvZJw5b+AlgC0lLSYHRFLKK/eSyZI1MNAAwBALtPReE6eGYViZIygoTfS6/t6hZRS4pGpmnQuCS6yoiLVMIKMKrXo6EUDVNWEFgzhpZV5RikaFkHGGJ3bgeqVPQkow7IvQ7mRcqYBSFFu1Hw4SK8C2EgRv2pVVGKZAhdillq34CIWxKZyLeX/nJ9aoOlbKrbkvmMlZYykBEL2KZbYVgsjj33Ljy9c/zSy1iBEslHWRpChHYE2SdOd0eAwho0LcrcmkKFzLeSQAphZPbLy0edN3Dh5dmurGLMuYOO8Ph+tb+eZ2vrEFQAcj1gojJpWgY3+ZqLnzoHFOrAFO/7pQZ1eAtfPM7njgde75xDlVog1a+9lciUtRrYZVv7eytzoOhqrfehNIKFV7K0illjXAGI8cn8WY+t2cTYODjmMt1KlDqWnTND6HiZOfPKOxHH/1znp64wuyQwYHVJSJK1F4VfXBS4xGmZjJgmvVKVJFFAFQjvIYgwLOWWYGw1nHaSK111BdNAkhSBRmFmfVpZZIovfBCxkA3anu7PTsYJBz6iKEyDhrvUSRyGSYLQAytghBVY21UcRLrMQqE7ZaoZwNnZwYgI9SYghlVmOYbZtNq8UA5ltdVbj2bYYNG6Ogwvvt4WgwGpZlAWDQH4rEwWi0vdkv87IovMRKYFudc9Y5AO71l8sb443xxnhj/Bc6rNaWYyy01IR3AHZbx50+a9jRN9ltJCtPDBNSKON0GQBU/UyAxv41MFqFf409/O/FOGubOPF0jc3JbqhyZ34TmBduffHW5yaxSN1tK3c8lokTmhjNFZmw1M05ayP81PSRRNONQI1SDVneOrPme14fHd2tvNJcqAm4cVxJ0sykaiQtOtGhtp4Eu0Y2dAdyFm1a702eIhExMStH3ILKVtdexl44ULejaD6mxCpBGgSa6yrI8US5mezYHVIZh7iTiLaiauQNQ7aqKa2Eq6rce4Wo11/MxEQxSoyVyEwEwBBVWBYY46w1bHzioiiiDaSLWQZgc1jume5Mp6RLU3eY+UNT9sM/8oHQGT711HOHb9vb7a4CyPbvz2a37ebFQvALH3/76vVLjz/69LWbG0U/nn7ptf/xf/1nABzgEpvEcP3c2sorV6cX4gM/cueXPvOde06deOAHP/jSJz8NoB90O9izX39me81bldawOHjbwelpT1NuamoBwG/95td+6EdPzBzcf+HMl6+tFu8+9fYf+sj7zjz9ObK3f/kr3/jiZ54C8B//7OG1ixf/6Nf/5Pc+9WSvS0duW3rP299i/Wt5Nhs3Xry6fBbA3/vVn9hzzx6emqLRu+954OSrzz3SRti6Ts++8LLlqxfPXQJgiAKpixrTjKJSKJI0pTJvdRc39ByA2+459I73nmpZv+/oHLrZHucuvLL87//Nl26/++TBB08++rXTAP70L742KmkQ5C2Hlt7+4O1Zm2cH8cCxhdkjCzTcl5/tAzh7das/0pUhOi3+y7/44ql7jj329KXv+/4wv++w46eHgxzAtQvXfuDDP/zsmVeKpF2WhJSffvnVpcWpVtsOSwHgTDbb615dv5FoDLDbRdjsj44sTm/3hw7oZhmAl164+MC9++87sv/cpauPfeeVDzxwx+L8wnY+uv/U8RCdsWsAHv/O5efP3bx+afuhuw7OTHcfefTCxsrqSh6O37mwf761tLQIYHF2/Z1vv9sX4V33D9M2FaIGXGyPZtrJvsOzAKam0mtX1/qDfGZxttdNbCysIUH0Idr2HAULYGHaUdoexFGrHbwaA3Hei2mRD0zRmABALMGyI7YuhQZF3dGCjQGZqqdtQa2+B9torEtcxnnwUsJOg0ZVw1BjjQ9ChoNqMSynZ1PJWROXdlqaC9sMwMLSni/91QvLa6X47ql7jp49tzw7O399e0uGpenYCpf3wVgGqUhUKCBCrEIm4Ri8VCJ0eeGPH5hfXEzIGmVGIECDSNpyIh6AsZhe6Ej0CmVn2910VCiRUe/JJOAUgMThOHVCtYg7VF8ns18FSK8nFjneIcYafTo2TGNxvq0iPv7M+bc/uOcbT716+wMzALGxbLPRMKoibRkAtpOJ2g+9520Ld53akyg2rl44/fXRhl+Y3/v8hfXzj54H0Jk/ePjEoUe++FUeDC6oX19efWXj0rMvbZz2z7aXnju4OA9gqZvdsac3yjedxcpm/9w1f+TUqV95J3eS7lynBNCWPnTzwvKFg7fvTRdaMRgwgUkZ0CixBEARShCJdcknV6V0VfknZdYB0BgFkZUIrKAYAR9YpkACFpgAQI1EIS9WIjkWFN4pUCBs5nEYAdi0p67tQ1BjLCfKVVWeqYPIUACISjGIS1hTlrwg5hACUxoVKmKr3juRgjJAVm1kImaqkdWJ3m0EhhPbbhtPNqz1ebqdMa0PeDDVYgCqZeSQpT5oYcBeojMJ1f09xiEwKVewCqAe0giG1kaPADApotRmZMLMNo/G9q55XCnPydgsYbyqUMuf/f+ACNWD8SMd99Ktpk7VuTe4ed2IVyeOgsqQJb25bKrLzmlegJjarQpgLbb6yJd3yqd2KkZrvwIAGYa1FCJbBpm8jH3uyrWtf/QbX86LfJATgPPXc5MlkmbGWg+KUcmrSFBEl5iqk3JQqcQto8QYq1VklJl3cBXUWVeqMQaD2GSox9WRQhJJVWKs8E0tlESkUGaikAPgzBCLRCgMgy1Xks4h+rKVtR0YQKlRVEBEhlKT9cthl60BpUm7X2UmRkXaak2rvZn7f/6pL/zBH9v/7qfe/NCDB7IsiVENAoAqubkjdY7GDaI49DkGDkDiEmsNp+lUa9FNLV48n37qs1/k5eKXP37f/FKXPABkIuIqsH3XahirmI85QXVmFnXCPiWGMeQSALHVkpnpAWspMSq5UBKpUUCIvaCqoIwFZJROdUKSlMrBB4mixGoMOYumQHNiqda+eyRr0zQMCgCcJNxuSxm5dpwM6gY21QQDAKgXiYNB3pqbA6EsAwyrAtawKu060ARLSRW2RRKih0YAKCWwczoqiJmbUrdb3FUoFK4YlL0yZDNtZQ8FIMPBRpa45qY0CYPm3yTJBRS8sFUAxlERxGaZFkFN7WZDgbrGr3GDRwNAuJsAtJ1HAmz0adqmOD6jXVgXSEMwCAUxtvoewId+8m+9/+d+6duXzg5H16bmy6npaWa3eX11/doyJ8tVBgvR22FUQgwqzMJS7XbjWKH67vEVGP92adfT1dvqqYxDor9uu7l10K2Pxmq7VSDWQImNdVQCYcxPGiOHEzhiVVwpDEjVdBzATu0gsGtizSa5E4nsmlSdMdEGkZxQsZ8ka916QmO21OS3EaIGAojYGnY2ARBjKOIoxJCZFqkyaFwcKY1osSEiNmzYWQMigTbds3a6jcYY86L03hui4By4zIxhy6QwLgWQdaemZxfYDYRZozeQUX9UFKVL0m631YTGIXMV3RCrm5ujoliam0vTFBJUtGroRmzYWkfWGDv2dIhAlXoz1VEbQKwjDRQDlNiSme2ms1PtCsgkS51211obQ1SwiIxGw7zI+1vDre2t9c11ACt4Y7wx3hhvjL9JwzbFZ02xY8UjrrTGiQQC1In8HVe0arAoleGTimqhVcQEYGyjmkcTYB9V1XNEDBorZVeNDnfmtJNMFKLxlzYJ0MotqlyduuxwByadsIgiE4euzq5Cpho3qSJ4VMQKYgLHWAKoG5zVpTXUIK9j00g64ZmpKjWNDmtgrP4I2cZlFamrjBoTCNFQkxPGdRU7V6ualNYur+7kNdGAxrqDGFYXYRxVVZrSPE77lcE3kWADADYheu3AVhabqgCSJyajKhj3qhOVivpAzV0j0pokoESMiCZ3SQbNkcAENoqd6shbGg2Jb9ZZfdeVq7oMDaxsKrSSazq4REJQ5VKUpKoBYa4uvpKKVGiFQmJVLuOYFFRVCZmEUyZrnRB8lCiBmFNnyzz30W9zBFAqbm6Prm/5WOLG1rXu4nwoN4/ccXdPYjqXfnf5EoBT9x2wSau8/Pz2as8emOvG/l1756enpy/c3B6u9KtYIo/S9qoGj33hmZWLFz7w4RNluG1f69XZu9+2ePAb1VrJstmUe/1BBKOd8pyxm4EP3rZwYH+pB+4FkMy+8B//0+f/4a99bJvtbUcPnjp5+4UzN757xr/t2rWDi/bBd70TwP/59/+QR6/99Ic/+OL9W2e/e35h8fC//+SnDyzNLRxN3v7RN/3Qz78XwNWvvfbHv3nuvne87frFmzdv/NVP/ewHN64+d/X0+aXFE66377/9+M8A+PrTL/7Jl79trAle1VHHphTL7tz0tQvPb4UA4MSdd902P3XmqQsnTnbXrl2hzh1mNBCJT5x+9WdO3PaH33oUwOLBqfPn1wJw5pVX7rxz/j0HT/3V18+9dH71vQ8daU1RURE6vXnXA3dsr1+9cnUrBJlqu/d84MEvfeObzzxxOleemp4FAB6defnbEoJkRWSVPIhNBiO/sr7ZTQ2AlcFgdm7h6nJxaE+6trk5GOlcr9PJuhJ04MLlq5cBtBJ64NQ71lfPP39u/YF7D+3bc/gLX/nydCdN+LbM+q3UAlgfFQszvR9538nlPHK/uOvgrBxbeuHMxeHG9vS+wxeefhXAhUuDd96Tc7Z14vhCTIeW0nKruNkf2HZiei0A2xvromKt3V4ZXHhtfd/exfVCZlrGCqn30BwAWQV8N2EAzimMhYoxo7osQR2AxABRLLehPkokIrJOVIKAdmQQ+m1rrLFBSo5JoDJq9GG1Y6fIFgBiGJgY1KsPbIwiGWivR+VQiqCpOTDdAbB/ptOd6fRaqVHqX7m5ueH7m8sbpTeE1FR1AGDxGqBVrRuDRYwqE43KkALBC4C1teGN9eHREyKMxFoBcfA0XN8q/OzsHICtK/0Xnlk5sq/bk002xoyKLEvj2oBaVmcYyAEE70Uka2VEFGPVTJyM4eqaNAVGSohVezCCsgWgIoIablKgxkXUc+ToLENYEpAJbdsBTQHYv2dueO+eQyePJ8XaUFZ6towhBJemyZZnrG8mAJ7+Rn/1ha/o9NKzX/22GZUHZ/rd68NXyitXL2zdvJGvvHQTAHeTduLmsuT4Qnva8vwd+9Y2Bj9yavEX9u6Z7Uz7uA2AWMhH8dMrG2HfieML2dk0HSK1xdaq0w6AEAcqZjofrp/d2jebuUjqlY0x1sTYFNiwAcEJEStElIIahk05axe5r1mfAMFIvaMKsdoUXrfTVlaOioqPl6RpCIGiptaQdeQjhShFP8Qi+hJA2s5AkhlLxKqRrSW2MRTkPXwhgQFwBBVBYWBcGUOKxICCBpugGJbqMwCJsYZyNVoEcDKVWNKQixiIJSLiCICcRO9dd2vkqSUth5EUgva0QZKrAnC2JT51DmAzLKiVdAH1MbpE/Kh0DgCGoZ25xGOL8pikVkVUqja+2lROgqAgExk+xNQazUeFsONArESurIT+GRoz64yG3HgaqbZro9+UczEIGqNG0USgFIXTfgy5ltamU3YKQJtsLjmTbS8c0mJ1bXM7FnnLKVFUadpNj8vEuK6ONEZIlZRSZyVWxzKlD+1OeyUUn/v8q6rxzQ8cJ+X/53e+cmixC+AX/5t7yDmo1Mtjp3uqqiYhlgCMtSBLCIUv0WpfzluPfuOla9u8uT1a3QwVnz1xKJQKHygKG1ZRIbUpq4pEUVYAKRklEcCxOCICjArFOJSYEAEog0+MUdHUJTHIdvRtQq6aAJmx40orhjgLl6KXWmNh2JS5X9kKMcAPNwEMtw2TcRIvSpEFDJevqmgaBWT8cJhDAVhiqDIFAm9KH0CIjkU8mbQMAKIi5rmoZQrzCc1be2T/iYxaRq/nJTRJAaSxiARorP4zFYQtlEiYa/dePHcZQEsuzp2Y8dnMY5//8nQ33dqUXpd9aSlLhzGn2gvSXJKkobJW2Q4iRtNGJaLcAVsqF46VGP1gM3JZygDWR+vcORGXjnSmW6Vskep0e4lEBpdWvvB//4c9SQBw99tPxMz0C00cIyqsFSJVNYZFI6zBrrHjUxm25aAgW7PuUylGWTsn14nbHiVzq18os+mQq5zhRPv56ijuZ82yWBQu0SgB7ERVOJLdgdor/LlezkD0Xigr2NtUAHhJjApzoDTLvUkUUO8pg2piI4CQRxjLpaRuK9ioos5QUfg0zZI2djmH0tRw1nbPGATDpioJDzFLyfbDyJL10UGijQQj5Cj34igAMOBosyAxUyKWjiU2HD2TBnDd5rsqvawagINr9pNpz0Rf2qwEoFOd1sx9D84dPXPzSR83O9lcy9rpIwvtbusK+bIYANBQFp6K4SgETxEQriIFUXENzoVGkKDCIQkkpERGRLjmFwMihoQglpq02m7wHAqlug2LNDFQZQPGS6EKCqRSgBp3qalv3o7jTU3d+qhSAAAgAElEQVS76ebGjqOZOgBhCk2lIrGyNMipNdJEXhBVqRNWUMAxi9bdq8dRYoV5Kkkde0EbU0XNm2rslmBq1JQAQGLdSoerRkugSIaZOUY2FJRs4orh0I9GAPLBUCS4zCAYkyZMGoKnWmGGJQoAZk67XevccLsfhwOGmm5brSXxrHWvJ8Mc8y3VyK1WBFTVR6+BbJrZKtnlR16iS1Io2KXqi5vLF26ure49cft8t2PYATCGRsVwe3Nr8/qNjZtXud3zveleK1UJuamjRGcsjAuhHPY3hnmxtLhHbMLG2hicaiEKgDnttlojgSHNIDmRMzYDBjF3CQNIkl7uR/DR2LTVbsPHhek9RTFKT0yBOAQP4JN4Y7wx3hhvjL9Jo2ks+j25uIaxu/NqAxI2hq2ynbuUjMY4ot76xO6DfG/53w5zYgLHRMXt/mvJKQ3CN5Fpe5237lQSfM+ru5QxRZtahomTxa6u1LtyxLu+Xsczr9BEKIEbEJGbU9KKdvzXVT82h6i/Vnc/A+y4FbsKQhupxVsvKZQnrurrTXvyCDq+HIrvWQ2ocq1jqlsFW1NdDqC1Z0EkFYQMgJpWuJMA6NhZacKqGtps5jMGoifZ8TWVvKqkjFUpFVGlKNMsPiKCNRSjCChWCnTGOMOVGzAcFc5Q9GKtjTEOR2W7lbWSbD3vO5Af1NN77Mnn5maTD33/uz79+b/ULHvx4o3j64dC0mobOXr8JIAnn7iQ9XJZGUSH7dFLZy5cWL28FZJ0fXldiporOsU+z3gwoK9/7fT/9U//5yvXv8mvXVxdHuzJZSpUNQlY31xf2c568+04UigOv+126eRxffDUtf69P/UuAG9+y/Fn1wdxmBw6svDit8//zm+fv/OrB7eL0Q//+H2H7jr4vr/9MIDH/s3nLp1bvhwGD7z51JOPPa9xcOXy8vLF5Td1Tq5vd1946gkApohXN27ctfzaicX9z597efX8zXB16+RdB+3tx7/8h08dPX4PACzu+8yXniJjnMs0eO9HJrVzc0vXz51xlgC8+szL3z556NqlC8CS8tTVp5/umKV8FK7fXP7Pn/ncyTe9FcD+6Wyq8+r1i1eX17zXxG/z+e8uP/vM2ScfPfvwB9927qXrAN718KmlqWNnz4aHHvq+/vrVH3znycP33/+p//S1z33pO7MWL7x0FsBdJw+vra35mHeBucWZm8vri0sHtzZvBB9euLgMILXcTm8MR4VLWpL40B/u3Xf4rmMHnzr90pkr69YIgH2zc1yuXlhZOXl8ZiYNVy6fv74dcmSjYpjZziAPAA4uZXPd1r6F9HPffOHht+zt9uyhI/NH9iRffOK729e219dGAOYXkpXhzYWuAg6OOEmXrw1LMYlLtooIoO1ct8uD0aD0kZnZWuPIGtbgx7+xZp9sCvy0kVfdta1RxVgGk6GqXA4kxIaIqOIcGVgCRZHS+4SctYmFiSI+z003A6CZs6UzKLsJSVGgUFibF+XG2tqeXkYYAFhfv/z2e6dX1i8e3Vu+sjI4cWphajZZWJgfbgy2ljeXOg5ASZw4zhLOHBMhRi2jxKgu6zoCmw6Arz/+wvkrG+9484wxVigIpWTYFz5NmJIEwM3LN3yJ7cEwdhJDPGvZx5gkKaJK7mEIgLXGlyoxEnPFmKNqD6q2rSpVwYAwk9SEXKpTZVrniMa1FMq2jGIkpows5E78wta6O/uVKwDc0u1b1/Cvfv3Tdy64vDMIsvnQnXMbZZ6VrZdeu37llQjgtcKH1esLc73bpztHFqaSrmt3Wume7hzyQ+88wjPTAKLmxnC+PUgdQcLNsmxlLvV+mtfDjctV/amWQwGbTtslQkaL/jDJI7UlZdY8AjAyhOk4iQxjxIrG3dvweIscS/hKHVCKVGz2XXv/xPYMKITK3DNbm1gAEqJCk5YLZZCiSA3rKI+Ft2xanRSAhBH7jBwBUIGoGJWqgadG2VEUqydXZQeVAGdtPtxKHcMwAI0lWVWFtZlxicSylhEkJVtTUIs8Z+66GAxxDJS12s54NQaqiWUAaTvRkJaIeakwlixijMbZ0g+TdrvKWJooEUxwSTuDlFXz1/oS1N1yq+oXtsRSldaZJINQ0h2UIeWYOgYgpCIuFtGQwnIHTmMdTleGiyfYjuAMJGUuiOJ9aVt2ZqkLwA/LQnukvHVlXXnYbqV5njvRlqXANHY5qgxhXeFKlbIJmKtCHkKFpzMJoT/oPfrtSwwzv2/v0ty+v/rWufuOzQFIWw/pYKv2qxoxhxp7NXAVSujLxKj4wiXOZOnqZr7Sl9sO3vbqq9edsf0KfXZkKfQSE0pPgBACUJnvpnE3gh/rtREDrFLl9HqGZmdSAJ3evBc76A9vbmxlNrz1ruMjgiPqtlKCVLNanOqAkGZORUWok6XdLFVQp9VLrLt0YxnAdqTTzzxz49rqr/zEj73zve9+6it/furkifkDtxkhEV85ANv9YmFmam6qfXl5hWHYuVI9F77c3Fo8dgzAta3Rf/7Eb5+86+jHfvaj+2Z6syy+f+mRr33zzW/u7VmY1krz0ZYTt4LGv7F+JE46qgmAInilTCkJMN5joZvdyLaWi2hCbBHnPgBgkdTRLtTl1tHI6DWZkuqgrYTLEAZeANg09SubT372j+79wINYcH5l9NqZJzppkpj25ZfXDtzRBWAZAx/SCu7khuEfpUnc7lIE3zUXVZtl8ASgDx50227/sc7MXFn2kyDMWRscQ6DrN89+9ZsAThztxLatzA2YFQakgAGpNrKoTf6fdrmHopbByllqAKgkfaLSJMOSWkwlAAizlkVp4QFAjSEPdg0ohyZv3vy/QuAEyvWDWnw0Ng41MQA2jDoPbommhUVDSZakKBzEIwUAm5jYV8v1seoyCwZzLeYwzrmj8bMJxlAIyoZUCgAaNhSDPPSHw+19czOdZNYZNygp6y3M7s/zYQTgC42DlVCWYlm91qRoJdbdxr2qDpiIEF5nARFudeon5CSws5eoIUilkIB6m5oISaq6RmoUUHaOV2ucKBTEpLEiE1W4Iyk1Qk3fywi/lTVXxW7NGY0h6nrCpKQMNC3Sd/z9qkCTdgDJOm4YB287S2ts0ScQzKbadhy1jCtAdx5gQl6aDHNzBeDLYEREtCj9YKuvEqYMuU6HiSeuuDYkDUPEIfiRL0Gmm2UCBVD6EKMYNsw6GhWrqytbw6I7PeOcjb7o9TIAwxi3V9fOnbvoWm7+4MHV1Y0iz6dPzBW+TGDSGAE4okLl0tnLl149Q51sfmkPSxRQsBwhSUX6JgxjSImUuShDYK5Y21mSOWdQBe3GGGJrbTtx1Eo3hsWV1873puf2792Xj4a33sU3xhvjjfHG+C9+2MpvUmncGh0rFb6uyzU2kGMjOAkF1oDeLSHTzse0IfhUf+zIOt0KvY29OigmHjZvbujV48h0/Pr3onS34JAVjbv5dI03SgOiTZjLnQkp3cKS3nVeNFGp2HiqCqDOxNaXqpnLhIdH48O/zqUaX0vShmohO/iwjj2N+n0TivgTs3wdxscEfKf15Op7BpoAScf+xMTlBMGAdhyA6jzGVZbVS1Vw2rwqIo10euVm6I4bseu7dxDJ6k8e19o2ThXVgGgNbrJWQVR10xUgCiECxNawNapQkbIMKiWAIJLYFERFCN4HMgRCEcqoFBWcEIAk5a1B+bt/8Ojv/v4TTHR4n3PFwWuXV69ffuXOA91kMASQ9uzMvgMzbz/UmTk+2mx933dunnnmzHOXVxPCMGgRA4CP//c/oyKf/cTvPfmdFz7+C3//Qz9x78nDh28/cvzVP//qE1/95jvfdCeA5y+8GrU92NqQUbk1jB/8iYePHIVZX734ldOf/kefANDqtd70jsN/+bkvzBhXen992Thz+cZG/NRvffbBBw8cmHsJwOVrr+U+zvLo8y88G4DphYVDB5eNmQY5FPn+xRkAx9760JGlc9995vwLz13OV2+8/OyZV7977iP/w3ta06PvfOOZb+FlAH/nFz9++76p8zf6xKVHdNa0WhzKUVDMpVUyNnz29z7787/wg7pd3va2+SSZ58HC5uiRzRJmffTRhx4CkIWNqcW9j/Qfeenm9Zsb+mv/+NOnHzvb3jv34E/+ZKtt7jT7AHz0++7+e7/yL1defe3973hI51u333N7yPTFCxfuu/vIU89fOHXXbQD+wT/41T/7/d9/4uXTKhQhRQkGhsPtEIMkDGDgZSvmpkVkkneceuDzX/7qBx588MXXLq5tbN15/LbFKQBY2dh67NkX3n3fkaMHj3CxFor1xcX0zqNH1lc37jixX29cBrAwnb12bXtxYfDTP/yOLJXnn3tJskvnzq62O+3+cnzp7AaAB9+8rzMVJUZlb1NnO25lYwPtNre7gxABtDstHo3YsCoJSLWSAXNxONrl3XMtUaQqpDVvcyc6BpS47m1RiTkxqYioWmJUzbIA9aTEsDDOEBPEFmv9cqu0s0ls9wC4NJN+ceX8yrQ6+Hjx7JVtk4zK6NW//b5O2h4CiK38nrfNDKX8gTuPv4enyFluJa323GvPnNt+9crRhSkAUdlYcq7hOisAA2KNSenMN06vA9haK3JvFVacy706IthE4YvCt8shgCzV+988326R2TMt2/31q+tZpx1j5AiRwvQ6AIxDIBURruAZxxQVABmu4TVU8muEqr2NqkLAzc7NEzsNEFSsYYUOt/uvvrj52unhZsFr51cBLOxd3btn/sS9hw7MkV8/J5lhR13mJHGHj08f32sBdGa6QE6IPgSiDaXpVS16c1NZVmpYk/U1ADEfRUb0kHZGvWl1LgPi8k0phxQDrAfg84KMNZ2kJIIzgRnWDoYxS12lYxujGpJAXBL3g3SqCnUBoo7TTY3Am0JVJBo2MUaQSIh/LRACAJRmreBDTWCslh4TCDGUBK+ciIgWwsJptwNAEajcgukCRslIFJLAKlDEQJX6McUJmegq2CRSX6ZGJZbVCmbLgBqbqmurkihs0lKJEnyM0aYGQJqkMbZHm8ut6SlKMwkSgxgmRi0UaA1ArpAsscGygjWSVTJplpaFqAQASSeNZeEoRlHDDBmjAApBBXOTIAavwkwwNpEYzl9bP3rHHjszndi8GCqApGUgU7K+Hvo3yJg48Jy4Oh6uqpa4aosFEh2VLCqJSwSFKeXspZVHHr8CIJb68Hvu/8KTrz7yyPPve/eJn3n43nYrJS2jBjY0jq6r5ILhirZQ7REVJcNUmIIQC3EE1GymiVndzLdyHJ2Zcow0SQFQ4nS4K4NIDSmYtIyIANJ2qgGcJuocG/vgfSeO773bpDN/8flvEVHH2uo3kjGnKtwibjwAY+reJlUBT9qxAAhsmawx1tS8iUjIWj0A0lq07cX+6jK/+LTL0hMnjkwtLBX5KGNSH6o+K512ws6kWSajfq8zZdgORqNuu3PyrrtilPKJbwM48+Wv33fX4Usz6Q+889Rn/vKLXZUf++C7b6yvlZJMd1NjLQCQ3btngUj2TLW8VwOfmHQ70qETe3oiAJZvlnM/97M/+dEfja2ktXwtz30cYWpvZ252Qf3WAAmAbp27oDrL2SjShCgofaWXq2QsO68SNfpCZKoNGBiE6lJXkLcIyURSdVxF1jxT/QabPjHV4/o9TFw5u+ySpNW1N/p2u7DzrTSOzj59eqrNB4+f9H1f5AGAEDrtVjREzHUjRSbSii+PBlx6nSFVTW/uATjnyCZnv/V8f8Mv3HVw6c5DVx591ud5tnfvgbn5lasDAMf3dLiXRENqDKyAVEVBVqGguCNiVHccaorugMQmiGygwUcArp2glQXwwsHjw9k5PyhIR9YmCXEYbQMonj9Lr57rHD0iACwrk4JhGFzpLlTaslXvH4FUWFx1Y0SFlLhyp0mYlMhaSMLF0CYxhiExeQNjrCUHAKUHyBgD5ioJQAAZJjI7qQsomKrGi41rL2SJ2SXwAOLgat671Peh3XIUTNrupbY7CmrssLe4Z7g5BJBvDXVz4IMX76VUsGoESHkyma7jgKPG2erlNuGYj9/9PcEQGr346jqhsgy1K9GgqtUOXIvmKqRCKEmJqJERGKdAK7+e6uZfBNQMsKanJmmT12r6/dS4JcYBwcQiJ9ZxTlDHXw/UaPPklxMgzcvjLaz6zjGhfHzOk+9o3r/zHgLGMizNoahqFKOiYGUmruFIABDVrf6wjIPR9nYi1MpaiUtEJEmcIVOxvZiQJokxPD0zY9mdv3R+uL1ts1Z3dqY6PS9CRIlL1rdWr169JqoLe/exwaA/KPIy+gAgEM6ceWX/nn3TexavXb8SVZPEXl+7uX9mUQXpVAfAtetXz587P4q52zvXVpKyWFzao86Rtao188z7qKB8Y8212i5tJcwaQyC2Ls2LAoDl0Ekzn/epO5WmhmMs+xvT073jB/enLkWZ443xxnhjvDH+pg07tjM0oc64g5014NgEpjbmdWtVITeJgI3/mQyadlmbCcmkMQComOx43eRjx9Z5xynamd3Ot6BxMHcb8olzaCx2bZrHJ8m101MzvXahjBNTmCBSN0/fwjoeA2bYNYMmm0y3vLSjRDUBlX6Pd1LjGWON59owU3NjdnyYCXhQ6yRf88dYNGZ8+pi802P6TeMtjCOeXcBq9WWVL8xjQtyEE6JaNUisqO+Vc6eAQBWmdgzoe058vBhoR/ermeiu7LLW56tceeYMsKhKVSu0c8kqJ4pqyUzmOtsOpIyyiMaoAsYSBIO8sElou8RNuWKwAaClOn948eD+vWfPnN+82b9+w//ZF7+TzC4eu+/+ubv6Zi4FsDR7aHv12vTi1KhYbc3un5pKNge+0+rNTtHq1tpCjwD83Md/4LaT+x7+yff+6sf+4bdeW+4+8vwv/fKPHpG1w/dMUzj6j/+nfw7A5fra5fPOGmHq5/T1b55tTx999DOPn9yzz+0pAWTz5vibTv2zX3+kzGk74mO/+NOW88HK6ve95+4uD8stBtCa77z/Qx9oT8++8q8+d6DXOrZnYdTv3HH8RMvG/s3Ltx+5DQCtp5/5g2/92Mc+dOXa1qEjR7/17QuPffXisQdPJbP9Y8cO3Xv/nQDKwbXOdNeujHwIymSsceCNm8tOdagE4O333LnvzqW1G9cPPXAsWThVPnv203/42Lm1AGNXN+P//r/8CwB/6yPvG422V/xIDT35xAtXrl/fyPGRe29/67ve48+/8i/+4HcA3P7Oo1N75i6fPvNP/vUn3v2++zdGV976vvffvHLVS0uILq+sAPiNT/3xM48/fujgbK/VXl6+7BjOTM10Fl69fG4wEAB7ZrOWYR/l9Itnr1zf2OpvPf7sc9tlwaD3veWeA3tmAfzpX31lodXZOz+/NL+0vTx85uV1dXZpfu75S1cWF268fOEqgMTixkZ+6fqoDNcffv/JpbluFFzdzO88sP/Spe3NfgCw78h8MVxNhtFYYUtIUl9ier6dJNkolABcy4762+1ehuiikA8afEWXgtFY/5KYmQjMxKo7QOQYjqx+m6xMRBxFmJhUQwxVkS8xBe8BtLKWQKJ4qEQhCmG4VshGiYx7aQvAxmq/uLox2C6zWTN7cHqf690+t5esodGIJZQyApAtTsFJyiUnXQoujEbFaKXcXG21i9AOYgoALRNqxY4YUeUT2BC7OMAoxiuXBwAwkmNHDqbzqWRxczvMtRKQpu3WtmyvX78JwBjXnusVW8tttMlgMCp6c1NGWfoDtsRJBgDsSSJbQ0zKjDrmJJBMbqOiXKUzKopr3YaTd4TQqk3COacShHw6kx7e7xZHZmZhv0mmAKhuFLKaECMOQ2fAczZEMkJw0ptJKjXMUAxiOXKJ5SgahVxeErjVduBYxJIJgC3h2kkePFESIpWBMzE9l4AYmat2M0eGIgBYBKYIEinzzCnyvmYZAA2jEA2YDLlWu0f9TahCBMyIuzJeVdQZQzSGG2KcMo9jx4kETjM0wNokeh99rR0iKhSjy1KKEouQki2LwEpIEgCciIZRHDEcm1amEuE9goePiFXrbJBok/GpQ0gi1RjAlSWJAKBCxpBJNCq5qqZJATUJ+xB9qOVcrPWt6YxaSVlEx0Jkgqhh40kADPNRInnadlAOEguyntJezxXDjXR6uiwHAEKxrcSwDDUaARJloorzzHXLWCVlQ0QmQEEcbWu1v26uDM9dvbIxCPffdweAp5/77ne+c+NH33vy1NEWlwEubaB3HSt6MREYREgtgg8+SOEjG26lKaQE0M8DkKVJZ2tLDNlukqz2t5KUTaja2SmASrnPkjKpIUKlDNZ0vWn+YBBFhU0zx9nG9nqUFNYpEDUAKCk4roVVxn1mK78JEK0E/BC8jy5LofL4ky9+8/SqHUg/0Mljs/vnWrZS52ylRrWdwrV7zrBlThx3Epcl1hlOrAGQJAIRhhiGZWa2IBKlmB7/7T/+MoAnnn0mTbNiOLLCsu1PP/FkZ+/eYT4ajoZGqGqkYBK2xsSgUQooO5swkyHufeWbULm2ehPAyo2VUbGZmPC//R//ZH2odxw/+mu/syY+J1WFhBAAWLatLI0SSh9EEb1PWmli0qxlR9euAyg3RtJNTl99rYN0eflKYJnS/Jd+/vudU0TplAUArVgiO3lbrf9NWwGWyQIYeCOSOHaWWYMOSlXRBCb6QFCq+NHOjLxPnVGtsUhURXvjn2PTsUgxdsMISsMQuhnN9ToALkfyZEKpZQnjKUsW9rbaacZes1KsVwsAzpb5piEmJmgFXROREpuKRDu5700+GAzLdqvtOhaAM6bT7rgbF4rnrrW6mTu2f+s7Z2RtK9yd2/fs40gAVNI8qmVWY9RUVbogss3Fqr0qHSsE1d46fIjWwIMlAIA1NhgjQvZm/+Jm1ut1Xa97bW0DiTu4dARAn24OtvUYU1SoIWWK/x977xltyXWdiX17n1Ppppdf54gG0EhEagQGEAwiFcAhR0u0KFLUyKOhomXB8iiOZMnSeNmSRpZGssKMguU1ChQJkhJFMYugCBAAARCBALrRQOf8crixqk7Y/lFV993X4HjZa/mHtNbbq9fr++6rW7dOhXP2/vb+vg0PTY4Kodpi8iKBUAGn+co31KosH6dClocgJMxEobrxgNGh9mlqPSul8px7AiB/9RyHFlpDMZSiUj7IgbnwBsv4xEsBpw1r9rwDuTwIBIDtX7b2SieFVhTGDaaol7o0tV5Iq6A11gTQrsWuUctN5rOMcu+NB4M8QOw3RRMydHFHo4bR6KLIT2yCtUcVSmkkQCojsREHuSwvLG5Hoqr150YyseoAN6y/KLDKqkl6AZcXUVpVMCBA0aKqPOlFb57S2S6OZqNrdtXtZujOFA+IVHtQoGHZ63DnVXkkpFLcquT0aSR6lGGAiHLDYgGqtmCGp42nDYKy1nwEEy3hShKo3FtlbOxFK2WdAxMV2scEAQZZLt2+d8h6/TQdJGXrmzIcY6UICIMwDgJmdpAsNeKdMbbf7wMYpIN6rV5vNhYWltJu2hof7+eZWZhPRO3ZvefyyhKAo8eOjYXxrv37F9eXOxfmVldWZnZsa4w1Qs8eEtRjAN6Y4y8c/caxF+48cvdth64f9AfkrdJsianeAOB766v9znNHX4hUeMvhw+eWVvbuO3j97EQtSTxkfX0FW7ZlW7Zl/9xMVxrFVQ0eULlawBDBK8v6q1q4AtLzvqqhq/DGanndzFQYJtAqKLBypqVMEo6QCjDcGZXr8QYTe/jfBhQpmz87JDgU30Kl41EdjgyXsZH1vlxPRQpa8VU2rMPc7F5s9io2DaHSvS/42kMAlDbKXUBFpeNr6iKHJ2w4lvLlVdsNocKqld7GCRqOtDzXQyzy/4ttPqWj/1+FE1bHIlLEgKOscapipSKiE6nolVJdWRQxW3mVqPJJirEX/VmLr6hCCFCZeS5adgoPm14IAHgvXDTYKwQlCwHpguhKBO+sc77U8fHeiWeiUNnMJUECYKXfv/u6/b/xvz5oe/0vPX789//wD4+e6iSPnvjv775zuW3bS3MA+ulF1Q9yt75t78yliye+/PULT718IWzUcyeKVH1yAkBrLF3tHn3dncnv/N7/9Ou/+edPffWlX/iZhz74o2+45fDuu+6flFoBxNBaxzfqbG2ein/ki8+/883Xvu6e62+57tre/ByAl14+v7Qex0xPPP/K/d9y6w+8747Hjy0rN9ixd/Yb//jYJz/8EIAHPvjGsyuXmh2VjO2Zmzv5teefv3hpybdfyKxM3lIbzHcBfPQj//DKU8d2zoS1yGta7KxdrjVVs5d+7svHv/Odb+91FgAM+u3OyrLyQhFYYNK8EwRjjcBnea9rAXzj3PFved99f/IHT3UpOfDm937h4U/83l8+0gp0EmlnxeYWwB9//AtaURJRItKWzrWH9zTGpl565Klf/v6fUTPj/c4SgAvPnFq9dGrPoZ1HXz63+5a7vuOD7/jop//xzIl5F0QcqF7qAfzNZ79UZ2Rm/rbbb0oaMbMMfN+RasTNvs4AdNP0/FL37usPIIjmL1/at2tqfnHu8DW7TTs5+sqptf5BAItLvff8y/teOPHMRJPm2+sXF029zhfOXrm4snYw7QeKAKTWzTTi112/89L88vLK4pm55XqtLs4vtTsLC51De+sAgjEvcW1pcbCjyPF7n4TcaiQ5IQ4UgECprkhjvGF6LESstHXOO6eYxdqN6mwGcRFzAKpiqDGTImEGAOaylMOWlFEv8FJpNJWttcU4S8pFAQAWZmu8cr4eREWtQ6fX3X1gZmq6xQrU0HmQhwHn4nNOBwvL4VgMIKyZgbFRI/RZ14BtYBst9qlJ6tPqyqLK1gFQow6AIEzOi3hPcACEVTberHc7bQCpc7Mz9QENaqGfaHlFOuv1o6jRbCTriymAei2mrOfhTHtNUzQzVVcQMaa3tB7EQTGg2lRARRlLoB3EwQp5ZtqIEIvZxFe5F2ISX2C95L3IUGd/yPasI2kAACAASURBVMkT702Q1Btjdaf7eZ5GmQfgsaqMgDSxDWuSk9cBQUXwgUYo/S4ADpgVMcg5YTAFyI23FoEoyRA2IgAgsS5MIicmDeqB1oDLjMkCpTOD3GoACozUNevknBIKnKh+Rtp6Z6RW9ArhEIj6/cx1cjcwygt8AfBVbLPipmEiEmedNXkU6wJ2g/dF29xq9t1YCosX3ln2bDPDWgEIorAQGXPOa6XhjDjpd9KxRlyAuaSVOEr7PRWHQV0TwxpDeSq5hVeF/rJ4KZrYSgmfeYAoquVZJ4xicAAgtzYKAlHKZDaKAwXlvIFYHSqI6HoTAMIaa5VnzhBHjdBnGbxTWoFAgQKg6qFGlAkN2u0wVoibbOoLC/nE1I4zRy9H0wmA2Ym6cg4GTjSRk2IBLGAbuDK9xshzG2t4gWeEYy0KlheW04cfP7O60L/phtcBmLvce+zxM3ffcd2tSR0mdZGwrVCeahUjYhYvQKytaGSOc1ZrXcos5TkDePHE3OvfYHIVCqFrPGmlNTN79g4jwm2KhCGKQAWVkdRmP6QMpkUkH7hOv1sLgqmxYGVp3TiYQvbBU5kKlFKGsjxQktyVKJiDhIkGPCm69cbdwslqtz8zM91qjR2Y0pozAN08a4YRcS46JBHxTqyD9+Kcs2U1qM8dk2MYwIkX49gYbT03dgWgCMD5ZTsZDpqhRLVIaX1moe2WBqlzHe+brByKJw5EBOe1ViJwXrx4TaSJFDhUAmD7WGNxte/Fd5zs2rPz+MW51X6akM7ECqArmICErTgjvsahMDm4gTXbVFBwEXZNzsriykv/8Ig3EqmAyWii716Pp6PUmKBZqB96CyKwwEsJiohAxCuX2k6UOACd3KytLtQlGa9RYJE6rK71A4sakzifZTmA3Aozyo49Uvb9Iy9Qw/RuCWwQFT0WS4AmSqK0nxVXEkwqCJyVZqtBOrC58Z6c4zRzNiv3o5T2nhSTFCnzYjrgwgWqagmr+2fkRkOzXnfeGyEAbedtFES1OLCoJ3XHY61kIghzFdeVjlUpJoqs5nVB02YGK/IA6aJUm8uppmwauAEyFeq9UTQYpBTEAKwTToJxHT378LGHP/nKtz34XdM37PzkL/+fksr3/uK/AjA+O1tPAmHynjwrx6TAosRTUR1ZxgMQlBBhhRuTUsXbpWA9wVmxxJr05SVvomB6++xKOzPL3Zj1nn17ANhLa66zHkSBIobSSulS7JAYrKo1RcAiUsy9EAGcZaUZ0FoAnD55lJOpLJqKtO4PVkNdd445cHaQ5rYfxApArZVka4nqdylg0sSavJMyyHAbzvhwgr7K4d9QGiprDLhSvqo4xyNutxuy1jb79BvVulWu/ypWW5VjKb+64HIVCf+qKoGoynCVIUp1bMPSRQC8KYojLoM3ERRK7pCSyTUcXeH4DIdPfmNo5W1VxpJFZFYtZkRc6jyRoFSeLOKGEhSvoEsqniYps2TFLVSEpNVgSxlgeC9OSKAUa1IoyowrSSgC4MRb5/qDzEDlmXXOa1ZhEKphZxxiJsqMadbH+rX1tU7be0RB4KyxxodBDMA7h0DNL68QeKzZEkUuN/2VDnuZ3ja5cOEsABoMJvfsai8vr5270Jie7A3SE+fOXsM025qKdLi2vgZg4fKVkydPwTk4652dGh+PQp2La9brywtLAJ47s3DpwoWLZ88cOHRtfaxx8bnn33zk7pXO2hMvHl1YXl5rtwHg3diyLduyLftnZNpXNSYjaS0M1y+iSnVko4BQKlitgN6GIRSKN2RjT+XyOJrBlWGCsnynQDRHkLQSrpQhkvlNQLsqzh/9jIz8TVBEtGVmbZhMGyVd05AqueEWbNo/hkslRpa4qzcZHuUQe6jGP3yvWPerViuboc2rwcLqE5t3/9qNaeO4rzKpNFOGCPGmwW3aM+RqiPabfm31VtlEu/pDgR4WuGL5xkilrAy7HBWlkuUFKdyhSol7eNBD36dCk0UKZf3yVis8UhIGl35KFbMXnyfAeSHyrFixQGDhjfXWOgBxHNajKLPOOGetI+JWPaIgcC7XlpViADUnc+fn/+Hzz75w+uzZV0/PnR2MaXzpqy987qsvHAyhBAB6XjH5kGXdqrZ3TeZtgaz12hkH0DR3cQXA8eN0/e34yl997L573/z+B+59/vEX/8vfPvrS155+07vvf/0db0u2zwDonJ0HZ8sdGxOPx/Tssy99/qM777il+Ruf+MS//JZ7AHQX6MXnv9Zb6r/ze977P/7Mh4699MQXP/bQD//UBz754Y8/9pVXuqkB8CO3vK97Qf3dX319z/S255552VgdOeUpuu7GsetuOvClz70A4M4jtzRyc92hsV2vPxQu9A7uH3viGxdMFGhgfnnx0UefBPCe9x+5+/6bPv7xZ23mRGsVKCWinbEBDQYCYHk97yynNreLxy79xa/9xcf/8ku7Q4Vx9Np53VHGBWYBOJI+mk2lrEStyf3XHDz5/NGnXnyFlXvDHbMAPvqpz+YLK21Sh2899NAf/tG5M6e+9LmHc1Hife69GAtgPA6VUlme5kZClXTy3vGXnsoy75yLYwbQajYOHLjh2gM71todN1g8ecWSyZ999ezi4tpMwy62zwGAlbGpenTKvXjy7Mzk2DvuOXDszOX19cUgULu2H3hTFAJIs7VHnzx3eXX54J7WiTMXXr3Uu/WaZGoyft0tO8+fv3TvkUkA0G2bQQUAtAiJTZOIGjW1kqVBpAHAGw5VVEvyvlFaB2EkMvBeiItSYQFQhJHlbFVorFNpIC6KPkBErFEoYhGIoTTDSRWnMIA8t9a7wDoxgLIIIqfA2q+uD8JmB0V77pmaD326vl4X0XCCUNUSSOi7PkoIQJqlWrOyLjccxkGtluhQd/qr3nnvlC7ucnLF41aSBD2Jh4io8VnT67eaEYB9+yfEDBpxpMgGYWSERJE4k6fWDCyAxrhKu30J2fVSFXLUrPfXB5yaqBYqxSj0+3LnS0Js0aqm4GJzQU3byD4RExdQDEg0SMg7KUJKckBZTWGMhFEYWguxzBSRiMmLWc05pYTBygoUEDUiA7FOEmUBWzWZJQrIsjjFAchTbqyBt5qEi04FAMQ4R9Y7n9v67HTkvM+yTu6mx+LQuyRWAEQoD5nisJ+mqZWBp7FWTQcBk/UEAFo54vG8d4V8nsS5z4W8gDyRL7NUw9mV4azNB3mcREXZuS8XtQ388eoX3gk8K9IF11XgHECUZ56DUAOus97pZeNTYwYOgBZWQY0GbWcdoACBsZJmPvdMUUVrBYjKtB6X83jqfdJs5rkFQgAqUbnkRMwRe5+pQMMVPZis9aKDGgBYZANktYlBmqftdHyyYQ0izbmxcbleOVEiHGTEuZFXvnb6Yx95+cYjN7zrgXse/J8/9W9/4tsB7Lh/Fv1eJmxy0bEnKmHVssiniH+9j+sMAUFyIWecdzJRT6aSWpsGhhlAHPJkS+tahMCIMrnvhdQadSFK4eWinYc3Fk4pRQSleJCZ01dWAdjcN+IoYbbFSsrC7LQ3Qcggv7HQltqRZc0SiqY2UropAJhIMTEhXblw8NpJ5iR03Xx98APfff+1+8cAqKxfnvlqSS/AChFoVfIVhCpVVTFBEE9P1VUtu+f+G9ZXkPUW88EVAAGs77guUY250H4WLywe3pMryzmN0UXRq3XWWnGO4TWRWr+yutjvAdAMhpvdvuvQrbc1wvjyx/921ThLNJY0s3xArAF4T0kckzWZtwDFSZ1EMjfI8rwe8vh4EwC0+bGf/HeL7c5v/cbvRFfa2geNIDJsmtQcmJ5iBcCJA6ge1ISciCjiPM/iIM69UyoE8NO/8Iuf+fu/WXjkEa3gRBjsRC6ev3Br6AMWXytSN7knUuyhpPJYBSLa67SX7z+4F8Deem21vxg3o3u+daIWttbX5TNPvtLyzGHgTMqBBqCcjbTKvBBEuIC0CBB4YDjJjyZyq4dVmVyiZlEjzFrbdidMwpXF1bhBDR4sG1MP1RSR9j4sdiPEcSLD/Wy4urxxf1aLigz9WBC80uyNAEBOnpREkdIACQ2sV0pzP6sJDHReVgCrJIaQAhRoCHUqEJNgowyTSiyIKhdPJwEp9laK2lJhpZOAwxipSzzFxBPjrZkkjDV2T7cArM7Nd0xaY/YCKgRnCVTovhIVVdhVSEHiBVw2beEhG4cYgHdEDEfsEVz8xjkem945PfX1R176zEf/4drd4z/5S/8tABeCrAuIoBhKkw7EWiIUMijDEUGEvC+nDYF1EiiCCp3rAjhz6hxHxydvvCPU9UAZkrbWivO29b3MmbIwP2AVBqw1KSbFojyYqzP0/+DjD5HG0telKtcAXDWjj9REDsHAq2wjkKLyA8OwoHKfaWPb8sAKt3mUwsZV3FTWCxShG1WZPoyUq5R/3+jnVJVtb8QtUrntrgj/ijQMIFIS8VGxy6+KM6QayGtGKYB4EQWSQoaj+DjzCABaHo2IeO+G6Kj34gEvLKQ8yPmityjCMOCiyRoAIWaltI6jmMmtt9eYAyZVtR8ro0WBGOs8OIjiIjFKQdKIdRBFAIIk7vYH1vs4jsW54ts7nV5/kF177fVREAIIgnC13e2laXN2pjE+mafp+RMns3anftfrg4i//vyLAGw2aExPtGzt7PkLHMR33vQ6VhQ3anm/85Unvgpgtd259cbrCFkAObB9231veuOJK1eee/brSb15zx2379u1B8CvfLObZcu2bMu27J+sldWRQlKkXyseClX0gmKz0W4xG4m28ucIAjUsRZTy50alQbWjURBxw+OiCsmiMhIdBfaoWqc2r9UbIOSm/tPlPovO2qXXLtVreC9ccWrK3GCJjo0s25uxxw0IcXQUmzeWjU0xFFMZshuKUVfewCaPdfSXkU+RbJzDkeADG0exMaTNWVd6zfGPHvDm60nlyaGNUAnA0AEchYIJEHFDTR8qKhjLXKVwmdndqO8pil+pAKoK8udofr38sUEbL4Xzhme11DMv91RGb1ywNct60KGvUGRoa1HgxTtrjckZUIpqkVZJiCK0dIhYJVo7b7LckFJhEKUBUW7b3QEALTj20vlf/vnfzkXGA77nlm0cBd92zb50LTv5jRMuywC0e3YulT3N4K5De1wcnTp+YXmlO7CwLDXlRQuAf/uvHvyhX/7x7//Av3ns45949sljHhgDzi3kx/7o8/+FvoAoBKA1JbUmsr6xhlliRw99+B8C//bG2L6/+cSzAM6dvTyzfebRrx2/Q038+9/4v04ee+J7vvctNx655vOf3xvX7Qc+cAjApZfO/cEffO5t99ya+5X7bztw5823v9w8fe8t19Qby1/99CPv/q4HAPzxH301mQpndk8ELX/y4qXpwxPv2KHjVP/wh96e7ty30m8DmL32jne/78aPfey5ehAOwtAbk/bMWi5vuPt1q3NrAPbsmFk5+8qNO3feevvbfu63/+TMKq6ZbqjYNbTh1L/lgbcBeNu9d8/Prf/Wb/3e1OQuzYNXXjy6dOHEoduve/Ct3/K7/+H36vVpACdePpn33b3vOHLi1KUs5M9/+hGnFYWUGxuGWnkBkBsHY6ebdOHkqbybWsagLzdef9gxdXprABq18PC1B8+cPdNdWrnQTt9+7+vJZM8cexHMS4N1t9IDcGB3/ZWTR2+9Zmdcq9casXL2xMXL9bp60/4b+r08zwBgbtm8eqm755BMTDZndrQmanp8Yuyxo+cl4Td8y2GFJQBiu60kSQMRFSJQXro6kiD0kmalnqNnW+RfmIm5QNQUa+9T1lXLiQ0e0Ub4UfWdLzUlSTGYQaw0vPcCsCqgQRGBUgQgjALk8J1Bv9s1UGP74rAZxJG3YdhsxQAksplPg7FEZ4O824maUWpz0kGep7mxlBedlCNvcpfnQb3lyJm+1RSHGi4fGHKSAIBzxQTiiUHMYCYRL2R7/uir7W97650A7roT7cVT5LabAamkma33tGY7sCb3XES0OtaByji3/Uz5TI3FxRhVPSFxhcQsiRMHZyw0e0WkCJ7AnkhvTGDCRAXztag6ISp1jouKmXICJEgY14m9WGescURKA5IW4UaoBVr3B1aHrBVlmYuJhVXXWc3W66J+0HEO6yTgIB/YMEy2NcJY6QVjxyYaLvYAxDodiXIxQ0G4vdTTBs3pCTXbZNODHQBwXmLR8IPeek+yPDcSTDWtb0OzDxIAEgSaxw2tkiGf+bKHLBU1SMNRFxOut8bmaeZcAtYg+A1GQjUtVwUgxZvOpFEtEYGzRZMSZa2wDqKkAUfKuW6770EUR0XJK+nIK06iwUBQdqtw3lsjBtDDakEP4jLNVdIAJBobo0BFYWP1ygqAtD2Y3V5XnDrnpFD0ZWHWpEVyU0yeX//62b/77KX3f+jNC/OLp146/4Hvv68+3vCQuKaboQYQaHbOKQ3xXsc1cdmFE4sHb76VdSvMmbgOwIA0eR/phoYnDc8EL+QKyTkqYGOSzGRRECjNKrfiLJNSgkE/7+VGxQEApwlM1iEk71g1WUtVall2dSAusgQQWGHv8igK0et753qDfK2bAZiaGAuy/uKVxb6AvB/kaRyAnROoaDOhlqr9gQisCRDvN6BnVpqZmW656Ybr3nJYhYFbnI9b/IbvPIg8B2BOPqU21KSH/wkBZARhUbrL3hit4EBEVrMVI8vnV3/i333snjv3/fB37wegu4sIVOzgLTRRAcgwVWqsJABi1YOOQAFs5MTDG9ZekW2b9typiwBYeCn3/bNXzp29pIBUEDD6Dki74/WiFzfWerkZmJgxocQ5ZdKOFZewckQmz/trywCaY9EXP/WpvpOJgIhz5ZmcDa0PVUriovI5YAdonwJQhEDQ16SFaqFEkQbwx//xF23fHKrriakWdJIoHiwtJqrzlw9fvvPA1O7ZGEBzlj0RKyFIgUgWSCyIwlqgWhEA2rFt0minaHxmWtV2NAai63+d92zXuRqJYgagAi0uB3FZEsZSQfVS5awrEtHQa6ncaaW1IAdgrQmS6JYH3pjFoVNErZmd97whqTe0D2sTtSgMAHiLjslaFWRd+Jeli7tRpIYK/9nIdwkFkvdzEgBJHGoymclcSO08awTgkD371Li6ClRQKISqyLscXECQ1YTCXGryKFTuWTFBDfXZBWxNFocJc7lN25k8dSoMfCbkOQqikNiKXzYA4EjXQpBWALNWCBSKNJITFLqkqPxmL8wigmE1KBRBCKrM20EYSoECK7rBLUomx8andoeNCaX6HAJwQjWQ0hqshBW48BML1vbwJFZ+rAwvF8MZiOMwAtBo1AYQTWSttRy0+0tRoHrpSpr3FAfpIAcg1pJS4ApNZAYXBX2bMTXa4INX6HV58TZdx02i7JvCDVy1w9GpZdP7BepHG0KQGFZLFK9HWtgMff1K/h3DeQVDLx1Dp3yj6JKGiGVVTFkqeWyqSyk+qqogwldx4LBehTbBtpvV+Tf2MoyBRk8IbWhHlrFQAZ6WWpPee1eS3wUAKyZR4lnAZfEtiIm891TBmsWXW+uMsYPUCCl476zAl0+XWCciQaHAC25NbbPO9fppI4wUfLFWzmzb6a1Z67RzL7k1EWsi3r53985de8fGZxbbXQDcXAtVEE3Vc2fESbPRXFmcP3ny9IFrb5qcrRd3xvj4WNd628kHve7Fy5e0Dq67/oam4s98+jNX5q8AuO+Nb52fu3ju/MVrrr32pdPnJsYmV+aWZrdvf/DHHpzduevzf/uJb3qrbNmWbdmW/VM2XbIvCpGpqpa/8oA21rpNK2Xpt2yC5aTiDhdeMw3rGmVjy4oafXVSTEb4JxvgJmRI2SiXu+FnMbqj1xwhIFXjuWFF4sjGFQGbSm1zKV9uCvNGjm3zgf5XPIPRAY0c23D5ZNlAWjGyyv6/sGrgI8SHcq9Xfc1Vu7tq2Jucm9KplSHbfbjgXzWo0d1VDgBV4y1aa8NDmEu4VCoXgzF0USoPvWJ3DHHOonC18p1GSf4VL6P8RaqD9kVfISmFaco7zo96ROLhnS/jyUqMzRlnoUg5skQUKun1B2vtXuA9lNckAFLBux6498EHv70zd6FO9WzJ9Pz5O95zDxpT/UG6uDYA4Naz5Qvzrt3eceM1k4f3XXj06aOPHtV67BNfPvb158/WGQDmV/3v/NRv+6V//X3veetNBw8ceddbj33++WOnF06fPn9mzbVCAbDcyUPrw0C7zE5PNn/l937lU3/90LnlwS/+8o8++CM/A2BscuIH3v+u8xcuNjQe/8Qnvu+HPvjWd9/6zGe/vLvevfsDd0/tmgJw7uL8D7z/PbfeffAv/+Ajx9rzDz/99KPPnHndDfuu2TZz8KYb8x07ANx6ZDq74sLZCY/xsXHHveXmVHDqzGD39XuT/WM51wE8/8i53W+6QSslROK8YqWYOungpkMHXsY8gAjKGTozv/TIH3/kyvxanXBurX3X9BT7ZK69+M433AbgjnsPP/mVU1EU1iISqo2r7rZWuGfHzMx4BC1vfsf7AKjgcy+cOP/+7/mBn/3VX1pp+7DGOhev/LSWoObTvgdw8OZ9u6anzs6f4bQzNwApyrxb6nV3zU732wRgaWHl6ZdePnPqhGt3rz98cLLZ8hbn55YardZ8p7eysAZgx67xcxcuYdC49/ZJYYw3Zo7ccjgzSquxP/37LwRBie4142B2elzBN1u1wI2ZjtS1FlGdzIaSA4iSsDOQlEhUyFFguAPlWXsocd4BcKA0swIU3ZOy3EQepLQ1NohUIRg/QiSqZAcqAhAq8bhS0YAJoGIeZgaReC9VI104eKUUgUzfei0URvF4HOXOGz1YWQYQ16E5tjkgGhJbaRCJdYp0ZIUaUVB8Z9bPIvEU1JXWXof9lBHUIdZWvSygFJwz1ov1SmtWCmBmnD+zOFULc7EATKwuX1kT2xRV6/q81ogxsP31vrViMw9AckuNBmxbsTKegqSu+iZgtdruTYzXylbg2UCcNdaSZSjNZYtVKVX0h0uCJ3gi4nJZKcTFiLFR2S0A9QZ5GHhFQRDVLIsYSGopKk5dSkFcC8LeINUhxYo1CEAQNjLpBEkNANiTNmSMrtWkZlRzfOncPFZ7faUmm6GoAYBAMcj3LCVB2O1l/a5tRWE8UXMNraMxyCwAZzxLza+l3rsk2jEwc0vpWM+Fi2vtAQmApc6alnzQtztmk2vVdCxXNggGo3CkQCDeWmOMt44CJoI476no17JpahyuXGGsvMvnLy6RCgHsPnSAwsQMcmINpcW280EWJZoCDlUIAFFivddRzDlXOT2B995DbZr+y5C6mGIJeO6F08+8uvq933nvs0cvATh1cfnffPAu71MPo5gQMDyBrDU5a8VJE8Ag9Z3FzKa1mhqjnGq1Zi9dbsQJaR4IAega1wgQRAGIBl5xVE+CJIkisI6hcpMD0HVCb9XpMZ9nFDaGa0exipfzPXsfJk4cM8dJAgk1+Xanf2l5XcG51XUAWT9ncGDgU0A1864NCgCsqmuUEWdGSIKATe6VeJflB3Y0f/DdRwC04mau+Z333frOt943XeuK7mfdtK49eVF6g1Nf0gm5iqKZR9KCxZrKShEzdTP3dx9+iCm+7bY94Wr8H37+b8ebDODXf+pfSHuliq83L+U6okJf0tooqTvTz9NBfWqSaNBzPpkY7yz3e5lHLQTg2hkFHApBNQvGC3kHL4ADeVeus0qHTYpnEorRz1xn3WRt67KQln7iXx8B4HhCm8BmWc+mOtbnrixu23NgbGL3Ey8cW7h8Zml9HcCt190swPkLp9a6q1rX9mzbPTkxbp1XwCDtr7RXAayuDFauXMrht49Pcr22fdfUtm2z48THTx2fnWgurrcBbBsfG2/U5lZW6nHErEww67MlI2ai1ZiIQwCXLl1ZnV9859vue8d9t7lcWKkzp5/af8vh3/8/1v76b1/96R95IwBxXSmq7bhC9SAAsoEkUf0bz70KYOHpVw/srjtrL3ePemlqcVNBJEpCIjB3+imAJrE3rsBxK3SwdHs21CWGHt6IR5hZwSBvBBrAsvO2nkweOULOmHwljKJrb7jBqwRL3f3/+KilPgBnpdaIK3XuzY/hZqiqUj8qIWrvSVf+YOpte5CNXbeP9PjK5HhiNe/ck9Sl3ZxIwc2IAICtjWyptidcCgqSAnzRVh3DO67AvodwHmlxGdiHHACwznrxulYzoFw4DxoDrmepd12rJQCAuL7i9FjBB9eaCjll8YJCwHGY+QCRUNEMq8rLV48PFyMlz8RMrLtWpjh2xuXO5c6IikQpABNBYBST0mC14fsygVlshYQVo2CGL552Ya1gnRhPCAAoFZx8+dTRuZ73XE+aO2Ymbrv9Oidp1u8GKrHdAYA6q95GZSoRYaT/4kjPnKvgydd4/VdHWYLXSjmNoIHA5rXxqtuj/CsVHkeFl4/cPa/RsqeN2W74FmGYH3ttcDGsmhwWoAzJdCM7lY29j7zYHChhhKVdHgDzxk6kHARKPF5k5ERsOmfVgRWPpnjxqMpQlFYBAi+BCYz41PminZLyw0QxikCRuKiQ1GKD0HvnbCGVIwCsM8baJAqsM0Gc7N69D0rpICbhY8eeX+20ATQnt+swmpqctMDK+locxlqHYUBTO3d5L4f37gMAcavr7ZjJZjmixEXR9v1794Xh5Pj4RK25Z9t2ABcun+90B1GojDE2z+IorDWag2xw6syZHTPjANbWOwvr7ampqd3TU2cuXr6y0h6b2q7ba965J7/8pc9++WEAuO2/wZZt2ZZt2T8f+6826duyLduyLduyLduyLduyLduyLduyLduyLduyLduyLfv/13SVuSq5QFTQssrX4qpGNwUXZFPZGg3/MszQQrAh/FXokxT/NpKFQ/EWKgpeBAApwPuyZH4k1zakbZcJ36o8bzR5VqXchg3QhvvnjfRc9Y0A4Lmq7yMpc18i8CLiqlLBkQxc2ceQxI+kJzfnNwsCgvhSpJdBoAAAIABJREFUh5GYmAnkZJjYLtRKINWICv3skmZQUsPK3KoXxkiNYXkdir2UbXk2KhuxKcO5UWRTSrEVRU3DC1Vcyo2zMbxGVArpS6nFVhWgeuKihMLDky/6WqPsLEMbnxUGMYnzQiKqkucpFFwUiUjgS/q3EBExe4HzAkARqsLJKp1bHCm7Yc6YVFEOVOQ92TMDYOfhvXg/vMQMIkckJtBCEC/kvXib5cYBUBQQJPVWORUFutCSIe85VJP12IcAYBc6e/aP75pdnafLs53Wn33i6R23XDexcNGtvrRj/82ThTrM3mz2xu0XnlxYX/jKeGPbrmZn+w2RDQ61km1L83/RXncAmgMHop/81T+7fGHhJ376XW/Zv3DLXrezdvjRl6/83IO/udgrOLOxM9I2rpHos6u9bzx/7K57Xv+ffvO3z3bdeOgB3Hb7zV8/ubBv3+63v+ctE/snjty6+ysP/blP4uvu3DeD1Yc+9RyAZx87/t/9D+8lWT+0M3g2iu66666vPXdivdN78nT3DTff0DIdAEfuO7z6rP27v370unt23njb+Jkzr173+iMHQnR6Kc4PFrs5gMf/8dNvqU9FsAzue9hISe4SJqUg7fMAnjp15fBN97/3h77rx3/sP19Zylsxv/FNt+3bvfeTD31y197GjmYA4OWvPvOHf/THl9fzmw5FWS55k9Yze+P1u5cunX7rt7/lCw9/FsDc+WPnLnS+90M/nA+8jgLvOYnyPWPx5K6ZyanZ5fkrAMbHa2Q6O1vjB2478vQjX1voDGa371hfXwwDXJi7AMCJW1vvzM8P9u4Mu+srp8+c5frUWsd49EHJer8PYGVxbezGQ888c3Fyx77ZXZNHnz912+23nnjp5Avnvl4bSy6eWwZw0+HdvX7ez02ark21/cmludhHvXZ/+eh56SX73rwPABDXl7vJno6S3GfwUVRPIqKk69KWOAChD5XXPakxydq6O/byy+94zw2plzDMKagXDW+9OBYCNCsF8tY6Tc5rFiGNEKxRktWchkApEu99pgPFIS9eWp7ZNuOMA8DOko5zZkpqWW7AAYUxx2FyeXm97wDUuM4xQkO9fuYUNWzbRWP1UBnvF8Un3gJQ7Xar1exYgTZsPalQOSBo9myXBFZCAFrJets0opCYWcOwDxutK2fb/ZzqWi+8chLA5NjO0yf6i/O0/WBd1KJJJ1TYqyeNpW4nqocASHXE9IKQ2UTcrBuXGZuGcTwZ1bnRzMIIAHM7yJF7i8xwUO/ZZCwOjRsM8ixUMSQCoFwehi5LB5ZjgmZYL8SBQtbPU1erNwA4Mc504yYpo20eeJ9E0su0Rz2i1gSAwAz6zgdRmERabM7cEkSeLRCJz41tA+DxlqgEYWAjjuoBRMQyOi4YT9S2hncNAFZHAXxkBrk0JY4o7VpVO83jYSdcbfv2ahfA8tpKu706ruLHHj99ZnDNQx99dC1lKOoZU6xbAZAopdj/+E3fYXPn+rkDuBEDzN6rOAHQ7/VVrQYW0zdKRRyF1Nqlxvfw2kvCs1wcTO90SL0U2oKjKEwz1wiMoTDQ0ru0kIRNAGd7y5cvcV5Thw/s+Ks/+JtvfdtNO5uyvdGAZ9SaACRU6Pasj6NaBKQwRqyQJEqRH3RZaQDQsYhT4r1OuBnlnfUoaVw4f+nihSxTNYcYwGLXQXvOc+ehAuQiURR4A1I6s90wcgDYsogT9kuDaJ0CF/UTtqrOAUUxEQBjrWlpyvqKVYYwz/rWpmm/Wwu9sjaIagDQtylP150gIk+eGPAMkHMiXg2rTAN4tpaZvc1zIz3TqzeD2+7dpzTEtwHcdnh6247JWw4o0QYm00kow4W/YImKwHkRES+BojyXWi1p91NFetfO8cmJ7QAirjmRRdNs2mCpt256diJIPKcevmxRVSxOZddcLkVHiiJJHcJblAzUDMqQNu2B+8azF72j3Xvquw7d+szTrx7cWQdgA2KtRLigeG9oS4uAUUjvhVqLS1UQJjoymQkD7xQLZ7U6h75gx0IC7qYUMRH6Wgew4qzTij0safSKtak+y+HUhaX+uXOXjr+68KbX7W7VEh3mLs1vmi6aNS1TUO/aUBBeXlzlXY2br9371Asn0O/unJoerC4CuGZm26mFU7OJuC4k7fP6+eltodJhb2VuulVDGgE4OXeGw9CLDpRXZqAH3i22fS1sSIdN2rQeAGUubthJnQYhuh2/PDfXTztRoFoyDR8DmE7y6T1j47wkC99gCJzU+nMxHdqxu/7c0+uXl9YB7Faob5uA9MUBokDegQNxhvyA0G8rAJfPn9kze81Sxk985cV0ub93z/SOZjDfseKMtVSLNYBAgtwziYEnLwzRxKroTVZebgcwM1PBBRcADiQIQBz6QY8AkLDrpfkTj6vdh1ZdOtuyS0vzY5Pb446eW+zNNg2AgJ3XmYYVEeeJSTSJJyWiAGHWG7dW+bN8ock4HZFNAYSksyyduO2O5I4bZiwFwSB513VE1AKZ5QtF8ygfhkYSaoqXmFinzjATxDPFQGCMBRCF7LN+mi63ml7p6fW8BkikKKm1cmeyogGUr4e1scjkze1jM4ejrL8UpF2ZVsE4Ly6tAdg1U+8pqCgUFVitgiTI81x0SLmD0iUvu3KR4aXkGxfapmGeg5VXAIQjRJzHOrKu2zO50T0XBkFIqe+2TS0cA9DJbEAOxE7yKATgCQqIhD1K79pDvIgVcYXvC5IAsXCf4zjzHsBN+7Z3l/zL7ezMmbOHbr7+1eML11y3r93p9trtiHvpoA9ArA8kZ2HiEMqIEqVIPGxueTPXasgngqBs1cwyvGpSEoJdSUEqKRMblbBARYwuHvqqRzYA3mizXiohDcMDt1HaWAZHHiUZyYxUBjINQw0QwUGN6Cr5qk+m8LAtNpWH7ysSuGaNQu6qUpH0ZV0ilIcIPIQrilv1dbCAQLxH+W0kTKII8HAouz4prRRrdplYKwGTwMIJC3kSRwAUh1YGEpBwyZtSSgkRswIKGVtInjuGVqpoMBUGobiCPgUnBYkKrELwAAILqIDIFzRzY0lUFAHwmYQ68kCoozRbfeaFo0pHzVYrVtRbW5ucngTgbWpVcHDvtbNTMwvtdZf2z5w60RnkLjO2riuqoZ5oTWXwrIMgVE1da23bsWfHTmMtaTjSAOq1CSuh7a5FzbGk0ZqdnpqthVd6q+MTE4X+dH99ccfMTLRn7wB+9crZs08++fo3vmlAmlXt4a8+nuY5tmzLtmzL/rmZruQHh/hPiQSWeGTZcW2DerIBO2JTmT1Q7QcbJf70Tf883EaGG8pwra34vOUbVzeKK/cim3/F6FJdvb3pfUKpjb1pRyOIXnUGRppDV5tcNZTXkC3KNzda1YmM0Ny/mY2wJ4ZnVTa9Uexz069UtBooNyqhYYFQ2WdlhATxmkN8jZpkQYSveBBli/FNdOnhz6pLOACUjDOAy7NVeRNeuPKZfEX6K0j6XMRiQ7mZsoVQcZ6GetabzmQFZZc+1FBIUyDMBO+HkGd5jYkE4uCIRCliYvHiudC6LLwW50UYEHjjrQgFgbCSbVORha8TA1hjuKyjws62MVptq+U1bBNMH9gxcHUh59ABwOlF27f7X7/PzTdkvqe7tX985uwLVx5f6OQJy6/+7q8A+M9/+tdffez4/pr+/T/79MmTx/7jr//o4186dfuRgzdPzXznd7zpI597DMBSJw3iWDGRICT/C7/zJ+978z2/9Bu/9iMP/vy9R/YD+OyXHj5+cun2Nxw+P//KE08+dtdbrnn7A/eHO0SwM5t7/gcPfiuA31xZe/HpFw7vH7vrppuffqX/yOe+vKvOn/jsF3Y26p00f//33Qlg/vSr22ZmD16X7795m9TSG26/++yzqyefP3f6zEJzvHH92B4AtV2zy2demKzrC13rdCSpJCrwYt543z218TqAxx/+wlu//b4vPHHqzNm12Yb6X37r58fyzp99+OPWeIb604c+A+DkS8eto2978+0//KEPfvqjH1l+8bIY12pOnFme/5mf/blf+9//FMBzzz7DIS0s27GWDh0tDbIf/MF3fuvdtz/11GOf/+JTu7ZPAkC6lFk0avH6yrpjy7GwilhoaaVzZSEHsGMyHpuoxUkyMxW1ktYXH//6WtsOIJm49fZKKwGAm2+55YEjd//u0Y8ohZ17Dh772qmf/fd//sC335/F42dfPt7OMgCvnppncmnuF7rZ7gO7G1ewkusXTq89cWzx/TPXHShmpUF7/tLczFRdbNsOGmF9KlT56hXn2+qFc8sA3nnfnsDpK692P/X5F4+fuJz27V137d/VGrdZFGipXPyh9oUverGOaD746rV4L94Z8QEzQCxeiMkLjLVFuxvb6RINJDf1sWaQDbK1tSxPrTdJFCFLAdjU+tWOS8UMbG2iaW3q2XgRKBWGKohCAPBCgYa1g74JQx82oAK23kaxXl/Pd9QiAFD64uXVvTvRmpkkygJx8D4fmEirtc5gvp0DuLh66cYDzdmdk8ZkiAN2TosGB8bS7GwDgGg7SG0SxT30s15vZvv+QbvrWXSzjlpStnsesFKIGgkUfBRoYiHSYVwLXaBCn5eixs77IAw9tM2dUAQO2u12HIS1cU47AwCeI60n0zzzkrfGCeQHWb7adZM7wnW/BmCiFjbjGECaIjV6wK3ZG96UGsfra1m922geAMCTsyb3lNvl5fX2er8xtvtidt60p770yNzap05Y4wCsd1LlpLO4Xq/Vk1p87vKizbFgTcDoExIHAI16mMCM1Vsk9u//7lNvuv2aqVarn6azk60jt1wPQAI91gxfPPpKMhlpPzCDXtho+DTVUUREkvUBwDvyNiSlc+p3TEfb7NLaWjc7eOvOL33m6KUzlwB834fu7ufdqEZi8kDYe0fGikXGKpMoaUwDWD8591f/6em733rj7pmdn/rUy2++8/aJ669PV+e889zvAYgHGXc7FIbeGphccu/SjHKnSKHUjwPEe/KKiRWzUkoRM4ehanjyujawAQDlFcIG7FqYhKQlogAiFEbIBtYYXUTDzGk7d4jXV1Z9J4/j8V6Wu54wOAgZQD0JvBBrHWutXGaznkl0I3Z5e2Fy33hNGQCcJGoAeJ8r0kOWqgcpoMpuQsQ6D/GsNSktniyrG+648cgDe6yfU10DwNMO5klz+ZLtn6egZVYHUTJEeSqjcs1SSrMTaxwTkeITZxZPPXoGQLfj3/3WWz7z3Isf/vjj73jLNT/1PXfToE8QzYo22oCUrauGC11Fgrx6RRZipVFrhMZSELCICxTiRoJC/ozKc1jsb+QYqRK7JEjVJgvEirRmxQi0IiGbpsVi2qxFjiK2qVhDxCrQAqWicG6hXeBczbEdz748f+ZS+9TpK5qji+vhYG71zde1OsgCeABpltnMSthKB/aLT116+xtuP3nhysc++dmDt96urS1W/xScLS1t275jdb2XId8106xFNMj7xmWU69xmALRWThAEgcsHFjDOM3Mvd8a53oB7eQ5gNqqN16I07b56YXlheW2QqUD7sXrQ7ec2MwD2zcTkJM2M5qLRhPfOQRxInHODNAXgfFRluyt/AQygBom8FGAus3bQpHlyIklqutFMzi62u/2MiBRRqcTqvOZhdw7awH/gSz1Arub2Cnwpvs0zsk5fcwSgEQRRr//k5545cKQ3ee22zpw5/rkvNxRfs/dav7Lu6jEA71l6crWTK8Ps+8Y9c5XvlIt4gWYGMNaoh1GUri4zL+iJKeedxiBkOGh4J0UvG/GqXvPDmweb/ukgBAC4/iATr86+dGbXflKNBAh6Jm+F2thBEtYBCMIVqCzNXvfebzvwne/xhtRE873/208RSEdNAHT6uI8SYoJiKe5YVkIMFlIYRgMkXjwRkVSKCUVBAKFsRi9eERGYiaml1y4sn76W9iiTYix0AQ96SwDiMYU2SFHZKG4oEASQBlB2ZBQQRMEXA4cxVfcbY4undtv2mU6Lrly5NHfuXIRQpd3VC5dJw5JWngCsr/cIOrdwVtiBBIoJiqDYu9FWjVdfpuJiXhUiDEMwGtlqJIYZjQtGXlWOvYeULdCLv4yKBYx+L0rM87V/+iZbjgYkG4e1IXJcsvtLScqRAIRGjmujyAOF0G8lRlnFTNV0KEXkNBxUWawgw28PlHaAsdY5V5xhX4rbXH3wFWu8uK8kz40nydJceym6gltvAw4KYjcAhmjmOI7q9TpBestrG2N+zbmqxcn6YD23vp+nPs94MGiNOwCKFXl5/NlnQVSr1eth0O8PkkY90IFz3jgLwHk5tHdf0mhmPm+3V+cuza13utfsDVlpJi7KR1rN1tTkJNvtYS3pdPvgyDhuTsy894F/8dgzzwBoNZvd3KwvL09OTykd7tm7f7Xbc2Gcph0VkPX26iPesi3bsi37J2+6QhevYm1XqGM1q1+N0mGjSm/0Y1IpvRSfLT9Er13UqoThN9NrLFN4tFFe+H+z96bRtmVXedg351prN6e9/X39q/deVb3qC0mlFklQCEkEYUIjAsFgA3acZGAcGxwG2E4G8cCYBOPIBAdnBBsTsE0UIQSSaIWQqlTqqlSdqn19f++7/Wl3s5qZH3ufc8+rKv7jMd76UXXePfusvXY75/zmnN9HNDVq9W8noGmYzlkhazN++oRrsS5dnKB4xK9Z8wQtACYw2a3fVj97Y+AVk19SfZLqs3bLLm4FC2Xm5/tVDpXnNWt7b/kNYeYizGzzxqDnvgje686uvJ6Wpp6S6q/3/1IDJTPM1wHEVT5WkRBClT4VEQ+phOhCkKrysSr2BFHwHrOnpRYfBABwvRyh6QbVxWCaILpEwjQBcAkEX916Vew549mAySlWRmuREMhDgpNaWCNJtHNe+RCCuCDMSBQB5GxhWRKOATgHNoTEKZnb4ZWXz96MlvnbcZ8zkXVFFAGAlEShiMrrA5A5cPTiy+f++InLl64Nro3tkSPRwoIC8M53fcMXv/iit7oVRZ95/OL/9sufvHD+7IWtZOPcpXZDv/m+owCePnNNWPd72cD5RqoWoV554fkL7373UkJff+YCgAhYmG9+4Jv+2rf9g+96+tzauRdfPnnXyeL5nXF67ckv3ji+lAD4vh/+ziuvvvIv/+XHv+MD7z2wtPR7175630N3ZHl5c2trc238B7/5NIC5RXqVnrr/nadaSx27h+xm/tu//ngYeM/hbtPU3RJA0aIPfvPDT3zuKzsvbzOcDXqQ5xHoM4892dEawNu+40Ny76NP/cqnrtrwv/z9Hzt5YuVf/+pHr15eTxI9d+DIww+9GYDsrN0c++//3kdP3XnwzNqFly/l733LHZ/4Tx/73DObT18YvvTicwA2t23SjXQU+gO30uG3PnD00ffe/+KLz146d6XTSExrDkCsKIG/ePHaXScPeIpCcNm4V476rfmlxbkGgN29cUnqv/zgo73drb3+CA3dNYyMx+OyFSNpxQBeuXIV+hvf9paHLq9FL37qhceeeoGVbi8ddkYrurjUiQHc6A/J+cgiH5bjndFWrucPnUzMpcKHxYMrkYkADDc3r93YWp6PMp9pbnrrXr68+9iTrwbIbq8AMBi4RuxeuToocvvOBw6NbdEyRcIFtJ86yZPa5koanivxx/phEFRyuMQML2VZBLFpIyagqoISUoULaTMFsHd1ON9tkIiMR6yUiUyUqGI0LEVMEgNIWNndUX/PUpyajt7eKbsHG9DGe5c2ksb8AgBXljbARBzKXGnlvXXF2HpuNxa2dt2WKgFI5L1w3E4oCt55pVnGuQbGNuwOy/vvWwWwuwczHmZ5LzFJlpdKRDll92xWBm4mADhyMs6gG9JwHADvizJkeTm/2I1jz/AALGvh0Gg3wWSZOMCLKBURXJGPq7AwjiPvtS0DJJiI4QYS2guLq2AHN0oPLQEoJA5F0W62BBo5xnu5arX9qoSlAxwYwIVRubVdjHK33J0b9vI//KOXe//23HhcLDXNO48fOnNpHcBmb0RO+v0hBbm2tXd0ubGzm6WtBjnb7DZbaQSg24q6semePtKIDUX0lvSkCe7EHavwNmk3mC2AOOhOjAa1n3nx4kKSjGxI2xHtrC+vNF2xAaDdXbDjvY319fvvS5K87xnsXWktBa/ipH7/ucIXCO2mEjenaLA9+K3/+LXnz27/8i98+OXHXn314jqAv/23vrmwW+Vev1AhirTWynpi7SOtIuecBwAnPG9io2PSMQv1cx9C0K5woSCrAQzzMZdl0m47Gzxr9iTOswCkSBugqF+vxMRCWkNXuFhI0nh7Z90GpaMmgCILIppMKraEOKLIWQ9DHEddamajDMC4sI2lFDJYnRNZiYtsV7TnmFkjSAAQaXZRw5EZ+h5sfuLhE7/wSw+2F2Ovs//mH37P4YMdAIPe2XaU5DkHNTEZlaaCyEQXGBApRVRAKEutVBrHDcb1Vy5e+fwL2zu7b3/XNwB48aVXX/jq2gc+cO/dxxNf2HSuEcqiNibTLCWBhMASQiBmpVgb026EcVFVUEExSFESRZlDO42T2PR7ZRxJlMSYUaqZpuD2YQCq3gb7ZjwQIBKlmhx8aV0IwpIwxSAALnijGFKrZ9elTfuAQW0pIQSuRJ+YFGstZZEDapCXkTEAQtLwzvtyrGLtxCkCM5eI8rG9tu3vPX0UwKuXd554duOe+0/j7Fow8ShdeemZM+994HhMN0rrAKQNNcoxzO3Gbu6CZ5jHvvrieBAakR4Ps+oQCzcsrB3l/ftPHT53dY1YJ1HkvLVOdCmVzKBSHKet8bh0zhMo1hFEpUY306hwksYKwHK3vbmd5aVf6c5v7mbEYh16o/K4NloLgMLabBzuPsLgyi/w1jpIpXgVRlkJIISoJspkQZ2WFIiwFHkxZiUAWCkXmElJoLJ0AJlIK83OeyIYNWECDVO/UuqmGkil+gUAwtW/KipgCQATEWnI1MMuShtAKVHElJVO6agRXARxblwWlnQTgI+ToMY0aZGZdZNFpt1Brwdh0DR6UFIQDcAJlYLBsy9sPf+lu9/2Jn7wvq0vPOnXr87ffU/r7lNIYwCeSFEZUJGZ8gSIZFSZb2EAmcssUdqYH26Pe+Z65/RqEWIXVOlBoioIu3TgJM7HqhQfGt2m9qWEZPEIAJf3ARC5cSBoAumgNCkjAWCGImGZZhGqh1hCjatRlf8OHtAIHgCJIhAbRVDf/mPvvzbw3WV+6/tOv+WRA1aoeXAOQHGRlAKYoAlqyg+PgMA1C7tIABEjCIikumrsPJhJsWIApfMuuLnuwsk7DkeJfvapc5vrN1wx8oWAEvIMwHA8LgtFOmIThL2TYAMCOR8UzVywKq6QCQJXxSmT6IYwfY3JlA15Skj5xuyNM8mMMPk8y2D6BnUXk+VUS6FwK+oJ7Ida0/fKRHWGppqSr5mUIFIhjLfehZNvCQh1WxwRqNain8ymqKrWrFpDbtHAnIY9QapzgsrqeB+gODaR5zCSAkAIokhpVvWtOw2n6uijPjvMTKSZ2TtXWmoEXYUJFWcvAK2V1QqFWOcluCn0P21Nm/6XmJlrnmWeCCV5CQAUK8VqMBy50vX6w+BK4/yxRkMpZb0zygAgpXcH43Hgxfmu0fn19fUTp045ER+ksLaq/jm0vKLjmIiDYG9w2YsqrFCSrh4+ubq+BSBl3l2/MRyP71u+9+Diympn7qtnXhk7a+Lkvd/4Ta1WG8C5N7pkt8ftcXvcHn9lh57aQpJZ/6ZG4MJUjQG1KZskjChQuBVzk6m/va/lMq35w+T/t+CRr4ciJ1hk9XHGvMxgkTNtylOAbmJMZXaeWRuMEKpiun0x58l0mEKKb4RF3rqDqS8gM9tMsn+T1crs1PXGMqnxo5nZZjyMqRw3T2Z9/cmZYaQGRGiC4k29BboVstxPy74xbDmdthL8ueXyVJzXE6h1+ofJ7qom7qrXgkQCoJhqDd56gyp0peBD7RrUHpNMryEpro98Uk86qZfk6eGSVI0tQgyh6QZS1yjU2CwBIK6cPAkIIFGsCKS1ARAZzewh8MEXhWWmKIqIyLs85SgyGkDCmJ+PQNpK9xMfffLs1eFfW+wMb+wmJ47tbFy7cm0LwOpqorMbV66WjcYBUeMbm7vUMEdPrOyeXxvshLNfexVAsI5iDgisVayiX//dx+dZXbrwJ3/7v/uB7/zwBz/6Gx8D8IWnrzYSf+T4obWNLbEuUrK2l3/kf/8V5bwTAOh2TbvT/U+/8zs9ZQ8uHXjHO98218quNnZbtPT04y/9+ld/A8C//79/8tTx+3/lq3+mOpf6Ny+bjr4+ytvOmbmlj/3uE6cONgH8/L/6b5PhJV9YVt6aBMPxw8ePPf3C+tlLl33A+lYG4OjxlV23/PCDD/TKS0+fu+aJEBmbF8888/y3vue9AK5cuJFd32LfEmD7xhU3OvKP/unP/PSP/6/Xn33la8+eObLQAPB9H/7wb338j/+fj/7+Z574yqNveujBNze+/X2P/ML/8TtpvPmJT/1hHDEAMI0GtqXQ7tAH/4tH/uFPfHht4yZT+rVn1g4cit96+m4ABw7d8buf/kRDq/MXz/S2ejrSh4+devnro52twaEDqwBGo8ubW+PPPPFksbO3euQQxY3+qNdtN46tds9cvBK0AnDh4pX/65OPXTj3SjnyBw+fyrzuzCe/+bsf3b6ydfThO/c2NwAMiqzsy5devPqOt6yMRzh7dYxLZ65e2ezON9NGo8q9Uyu96933jsD65iDf2C5t78jR+LuWDq8cXKjC00tre8vsDy9q6HhheUFREJ8hlIEjCp5n3nUioXo0qycCqrp/a8UWZhbAhjDKSxUp1kxBjFJGKwUoEwHozLcYsM5m47K7Mo8Qdrb3BKHdSMkCgCJjs0yR1lGc7Qz7N7Mo5ebKgoBADO8B5A6udHFkWBsVmdJ5V2ReGAh5EYqMAAwHdnG5ES90y7wHOM2xLW2sSAHtpjl0ZAkDF1j4AAAgAElEQVRA5vYiIq1KZdoRN9gXMgy7W+NW23CTAZiYsswxla3UhNKjDFTY7nIrasZOxpERABm0D0WqAaUIFYsDC0WKDEeKdQHA5oX1SutYnPPOauPhO6+eoStbNMpTbwEgeL92o7fbv0BZuHnx5t5O//Sxg/3eeHvrBT0UACOXD4PPg18w5sB82xo/t5weaWgKcvbitSMH5wDcc/fSXCua68SZzdvNJE6DiVp7/fHBZlhebfmyBKCVEudDpODLfi9PGp3r567fcbQseoPYjBE5AK6Xu6y9s3Ht/Ktb3XsPv/LS1eZKev/RzvZgkA0cgEDlC2fWev1xs2XybBwrVeaZCDkuhWDSFDWsEYqQFqGlyqGP2il0x0SNpUOJidoFA/C7BRtGsNb5UOaEWFgFN7Y+cGmVCwBUkowM77rApIhotzcgq9kX48GgOd8BEJiSlRVmNkUuVhDAxMF7OEtgJPVjSwwogiZieBFybn1YDr1srN0ssxGAZqJQjB0JRw02uQStI7ZKibjSOjYC4P6H73zoHQ9HHYnuPKS/ZVWlHsEVwiRB14WJErw4gWduduN0bpHz7qjIFlabZ8+/ujQfA2h1YueQNE3ubYCapBdrabM6MhbS4DRuURCXi7NBCdlh/vUXrl640L/7nkcA3Lzpv/q1S295131JErliaF1Qdc5zonRXxc0MEiIFDSbmEDDOnQ9BsQDQmnzwHhIpREpZ5+JYRxFPqxQnj/+kihO161TNC55oMXEl0gajwqgoFaMRa1cWuZe8KADYsjRMCBDwVNsKkxRo3T7JJNXrRJiZiZnJN5rRO+85euTkQVEGQGmDUpoTLSFThiVAAKXMK5e390bUOXAKwEf/w+85l8YlgxXHBpnb6g0HPmgpQq09brKSm63OlZc2x0Xx5RdfOXfpfNSIrbdFORiMxqjU7XSjt9fvHFhOEjMsvAYpeO9lXPidwQCANuycY6WV0lrBCwJ4bCUvnQuy1GoAGBfFKCtBnBX5eFSIbjFCWY6DUKcRAyiLbDiyiiDiiAAEH3wIrnr3uqqqMUzrrwgiVb5UgggZJybSGoD3Pti83Uo1+eDghawLJIAPEkJeegD9nX5nLoKUVLvHUkMrxFJXw+n9vD1NUPKAQkSgrACA1VqIhGCDBBO5cWEkCKNgxaS1JQDalu3YSaikAmniHNZgzn5t2tTxnNxlIo4oUmSqo026HZb1rYtXmncf9SzpzppcuZTMt3DHkVHpAeTWNiJjiUEMUFVjXN/3qKHG8ShXcZKk3WbaHu1sz5OXoJuREl/qKC09A/AOEdP8fPPcn3z+zJcv3v+h98ydvuv53/5Ui/nAtz4CYHUuNYaEiZUWZmENBSEmzeAgwddPioiIEFe+Z5VjZgpaoGqWo6CIKCgWYRvz0mJjkOXJsU4rWfJBWTgA1DR2h6AJmkRPKJpEAomq0ggTdc06YV7BoMZRYAIqzXEvYW97Ow/xgaWlxbm5OMQ3b6xnWd8TCxrKKgCXz12VvFhopCmZTNgH4kqXZ1IkO3lIZcavrrPqE3ASk6ioeo5vdcGnkddsWcEMFolbv6v85TAT9dx6q1S/nsCjry2TmN1pHVgx6jQJY1/WdFLYMQFZpyHCRA+02oZr6bNARNMbl4lqvfbJRRACmEKgMNtWNVlYxYJFVUc5gSsQl1GViANgRZoUZk42apOAaZjFxJOnsUpdETExkatauyqoWoL3zlqHsvS1D37riasjy4mKJlcGtvpCtNIAFCsBJVEStJDWSpJiMCidZ1ZMwiYC4L089fUXCu/jJM5HQ/a50TqO4sI6xbryBvuj7Mjc0tz8wo31G9fW1o4cOaaUAqvSuWZ3HsCpg8cXlg/EHGDMxvZuc/Hg3OKO297Ii/LAkePHtrdfd0lvj9vj9rg9/qoPPeFnnK21BzCFACeI2sSESW0iZd8RkqlFmhi/CWYJTEsj601nXfTZ0r2ZOsH9Cd6gcrKe6zVfTD221yx/8nk/VVkXIQKgfbTgjUo0b53k9WWGt+x8Zv311jKDe04hT6LpWZT9M1PvSOQvcxL2wc9Zp6bSqcbrXI3ZhOrMRLeUt1JVm1EZYql5GWdKS3nmUu6b40CCCV+jQAiBpPbIFZFiAoEZzLVHQkoBxPWBTfKftYNFAFyN0GACTlc6e6GK5Carkf1UJ1HNgAlMtqDplERV4UAARGvNTKVYpfZnVlqpUCsXa8XEBJiGoWI0BnDfqaUHV2Lfo81z4z/7+JfvO7HwyIlFXTjNur2w2pg/BYC5KHZkPs5oV/Pu9vlXt5YOtjaH6PQa69eGv/unfwHAsQKE4TPLPkinEYH92sD/yZdfePcH33/g7nsBlO6T2pXH7js5LvpXrmeNOFIcjt1/l93dXLu+BWCnZztL1F049Es/95Gew9efv3zXie6P/sS3DXbXOnNZGRyAp77yWDkajZy9fHVnd1gOxn777LqJ1b0PPbA0P7pwvQfg1XPbp5ca6+evLb3jrd0F/Wdf+MSl9d6VnWFvL1yPR4cOHgZw9sL22t6fdrU6dujAZ1+81EjTYH2nEV08f/U/bv4egPNXdy6ePX9tY2+F1Sc++qdbg733v+9DrzxzdiGOrvSKx595DsA9J088+ui3PvHSmZ0ba2XrjvvvPLZw4MTm5s5e7tpp5EoHoKlw5NjyYH3j4dPH3nr6ELqd0asXLzz7yt7Qrljz6c98AcCHv3fFjQYHl5d2BqOdSFvI1vpa1h+UWvUGewDGHmmq55YPf/3axgkTj8txI9IP3HsPKH7qxcscOQBRlHzlqa+1u4tjznfLkS5x/sWbDk48Db52dulwE0AkAqVfvXj9zXct7g7x+T8/S+3m4fn2HXd0FdGonwFY29w+fM/dDKytdxK7M9eWTpt1Yjz6BgbAXeT6m5Q21Y3eeMG0D91xsNzdCCTBW0V8C5VrqF84lXcOCYwpOg8mUYqVJuVL7WNmI0QAqygqvKQsAEIUeeugFEVR7kO+0xfCwuoi9Yej3AFwmm0p7cWObnV3z11XpQx742hUQLOJolHV12yiuJFGxtiRs16RUnEzdc4PbvaNUkmcAuiXo3YzJlhRiFhX2qfeBa1osd2gKAEwykrDMAohc8YwLNzAJiRqTkVxDiAvxrmzuhg20ha5sty8WeZF0j6QWdtII3EZAKUjG6yHE8TCpFkCRaXV5U7WiBWTAeCK0rQS1ioUUJ7G6sEX/+Klj/zaX4w2847iUeEBUEKNJJ7vpNTWbc3NjonG/UNtPn33Smc+AjA/1+w2m/NL86mOdLCtpZZucKSlHAy0L7xYAIWEphEicZ5hh9c3hpEMViIa3NxeSDukYgDlsGTnmXKYpLiWuThcOTOUgT+yFA+3RtTWAIyTwMo1ko1+eWmQb4+dDtRaXHn6xcvnL+0BWFrv3dgpVrtmkNnVpBmGPVt4FWutdeE8MwFQSZyN8+5ynOUhETPouYtrewn0eBjs2DkbAAyHRXNRxXHspUgUirHVRrEjZ9KxNI2PAOhilHgYF1TpUAoKcRKP8qA6c6YzB8BlmZlbklCAmdiDAkfibPClJ/G6pjaEMKAUKQUCGAFy/Mji0e8+ttTm9l0LAB44FhnkVrz3VIYy1hFpIwjOCzE357sAVlUrK9m0bDEYZP3cdBo6bgqSbrtVBZBeQOBmpLJIgeTCUy/8m1954oG33P2Df+PRf/erf/x3fuybALzr5FI2KDLrG4acnwSgFeF1QE1bIhAfnBAETmuCsUohTsgYMUjaBoDpRGmaiAgxnCYVYgplDRZUkIVM7AtVDGcchIi5kZhh4QsnAIaZA7MxBoJRVhpFnoUIo0Ka0aSGjTARJZ8kIglEAaQmTcOVg0BCxDY/eGShmRhyfr4Rf8u77zt2cB5AqgiewYEm7RMTd0s8JsK5qjaFASAm1pTGKsvLCxu7WazH4zaAVLEVQSgDRBkdrBcmbcyFG+NmqzUaBgDba6PllbkQdG9km1S0tc4ytz3KDrdMaatEIyWRts5d2tgLEs5e3xj3vCskFLxxY6McBACSS297p18OiiCtSDWa0d5gb5SPWJOiEOkaPw3Oex9ApI3W2qRxkhp1JQuRwbHVAwB6o92FbirMF85d0yYaWx8rxWz6w+LoQgpAGTXfSoyGdcEYkoq3blJRNfEgZtykWnUX8EGUhGDzYghAuTIWm2WUW3LwnUD9UWm0JhIbwlw3BWAiLdaxoYoih4LUjHmYVsIyKugDTERCUn3gYFWkcmsBKBGwyjMnIB+CFhgmTYiJRZDbAKC0PpSWQ1UKMHEmhSoskiYUN0JEtatWu2m5E0WGyQHQ4kNREqsoQamUK0rWjFhscE1NqaLqlhlZB1Qi2kzTAklSACqQJYAUIh90HKdFb0Nr1og1e5ASuLEFgGYUifONVKv+bv+5G3hPvrC4dPPZs3tFWH3XQwD8cnfoSrCw0VBKoMAMCCkSOJnAR6g9QKm9xco2UgjE1VMZiIlIFAv4if/38aWTR5tHTu68sHHuc1+aX0y/4XvfDSCiGEqJUqIQVNXlzQEcIHUPtUylyFFNWhfPahWckNLVc1QWthyPi9HwoEkSj+GgKK3Py7I71+it3wTwqd//7HKn/db77jyy2BFh1pqJXfBaBe+w73rPREbTMetq3+qZY78A4JYpZj/S7IQTxLLeiKkmRprR0K4J3zEh+ud6mnqScOsCp7MTgaUS76b9YIxeg0juU09NaGyr9ErtsIsIE1e1sPUPq6wGQQGhyiHV3jxV/PuKCICf8CEwg4mY4L1jpWzpnJeqiDUIrHcIBOwTqtKtHyoh9jBZbGVhq7w/TTMKqOMhqQvXZ1S8J/WqRAKh0tpKuZtIMZGvZqxfx0SkmHVpSyJPBGddmiQgSKgzZcpE2kRRIwrBaxMpOO9D6byAiDjLCgBPPvPs3MJZgdg8VyKMYIxmpcZF6UUAJK35o/NzywudyxcuPPn1F+84cVpFCRMTh1GeuzcuZr09bo/b4/b4Kz1m+JJk0lcwY6im382m+W7J9mH6eR9H208pvcYyTPymiVZNbTZfs6Yai5x8mmlgrmzqbBXjLXkxzFrVypmbzF+5bPuo4CzUWNtj4RlMc/b4/nIksl4e7WNi2CdCkX2IcgIoTioQafbIqt0x3hhp3V/MZHH72dDJzLecjluTovtLml2xTIowJ/nICSw4RUcndI3VqARxqHY2ZgBTqigjKxQSQQIztAIAH8BMIuRq0sbpnvdR1zoPOrntaOIEME98g+kxMKqUptSCPoQqyVkdZt0JV9EKcQiUxjExSmtrVqcKDCIhJmN0FcAyo5UmETxJAeC7v+3BOfDa1eZHfvWPtOC97zq+YwcL6kA/Dz4YUAIgoSjfay4aOffqxmc/c2N7p7i0MfDMZbDNufjy1U0Ao9y2mzptpm5rKJGCE+eCTqLPP/bc93/oR3TTAGjNmXxsn/z8F+PIdGMTRESrwXa/2NwiZgCH7lgqB73r2XDB4PBKev7Jpz/+OYpX7vjO73/LP/q5nzpy6DEAD76p8/knvrJ6MH75hZd2mJMgZGhY0tlzFyJ2FX8M7eUH33Y8LsY+yMYTV7782Pnre6ML14aRY4c48xrAhSvbj9739j/5zJ8WwzIGNIJoUyDs5LI73AOQtppffuaid77bkGZsxPFL5y8lnblTJ470nn12tBMAfPrP/+IH/uvv/zs/+r0Hllb+9IvP/08/+/Pv/uBzN24OQmCyvorN//Vv/cqy4R/9oR9/+zsfLcb26c++ko3CF588oxXlCvceXgSwN14LBirq3nfvqVfOfTbpplrprIDzMuz1ARiW7Z49enDl5s2lr5+/0G6bVjOO4/Ty2m7hqMgcgO1d177zwJ33P/ylxx6/sXaGghw73Cby1y6PPaNisjtx5x0P3XP/nSvm6OGF4NQ3PnLqxJ2rxw/EX79w5fnnzn7gvrcDiOYik+rgsXBwJdoskjiU2RDEhS2NagJIongD4djx5fUXroWsJ2XcjPIiWFKOkNYeaPU6U0yTuiYmOB9YhCB14BJYaRijjQLEEwwE4hGxKq1FUQAQY3zphqOSlIqIO61IddogP9zqs4oAqCQuir7WyrRizbI4nzYPdkbjDJra891xVpHHCQR5XoJJR5EXQwJt3MharWhzewiggE3iVLxVAHMkRfAe1gcoPnJgTgUAsKUkDTPczVtzySjP4+C4jDqtdLff874AQEGvLM7lmovxaG9nlPsQp3pvOEy7q2WRR5QCaDZZUdARW9IiEsQ5ivr94vE/ePn977knqdoJUTpVRE1NLPnIffJjn3vmS9fefvLgyjuaK/NJp5MCmFvotFppnEoj0cjGJatEsiCWE0aZA7DekhrlMhDnkJfJuON2ynFmudXa3txL2zEA047zXh4RkYkgOByZrd18caG9jcDOVqbDk1cxeZsq3RgMssjSzpY9tRiPdvvr14qrWwbA+jj7nnc2d6xsDUdPn7kuZZi3+frVvSdf2shyC2B1eanVcoGAwunCEptIic2dSpm5YjuDUrS5vruwMvZZdvHqpp47OMqd8ZyYxsjBkwBYaEVO+gKGF1KRidk6h+CMtwIUzgLY2Nm5sr13ZLh349L5pE2ptuSGTtHC/NxgZAF4HacuFAFMRjOYwFqryEsQcsF7B4CUgJl09WYXZlZavefd37CxiZVVbEsPwPZuHyHnlBGYVURRLN4zK4L3xlQA0FNPfO33PnH2B3/8fTrfuXnh+qN/4z2FKg1TZvOqOjKwMQmTcO49VNJKG+V2YXQjbs6nzjfnlwC4oChpDMfDZkSieOoiVPZLJqZJGSWhclOUzQMJYqJgPSMQlQDIlz44ExkYxcF5L0pNzG8lrBFo3xgyEWkJcF5ckFFmSwsASUJsuHC+merYaAmBtSYOsVbgmsCrTsAyTRNo4FD/JUilMSFEwhBGZExbG8MsbLzg6trGdm8AQMJb6wKkquSOpKKcEZH9rhaGTOuTmEipEGCU2u4PO9l81QEArzWCLR2zAokosCIYHuTeJLhyZRPApcu7rXSRtH7+xbVmY/MdDz28tTM+c/bmyUe6jgoAgE9jMxyPbXDOU2+Ux1o6qbK9m4HN3aePAYhjdf9D9w48qWK9yAsvdPba1kLTNJJ0MCyNNgBAYxMZ630URcxwXtb2BolWbKI0CYEUgJ1hFpnSmCTSepD3RIwvQ5KorCgmOUjtQr47yquErg9VhykRGPvCGpApuCAVbwYgMA7FsLjvxDEA9x5c3VMBUft73ne6FdHW9o3nX7qIIjCRLVw5zgGE0lEUabGT+0Kopp6beMDCkFqrg4gxKX01BIaoIABiojDOPKjfG3TtAkrsZIhVWAWi4O04BxCxHukmV4dUJ3Nr70n2fdTXeOGYACdhcuN5QErHWSmKUxOlQceWjRdQVAtrECvFytfwlJr0oktVwWadBWC0IdICDkImjkRIRKxnAhGHSQ5bSxwPMNatNGoYZwmBGw1lnU2TBoCGjhVDGMpoEFciixVeRiQ+1GmPSX+6VD03AAIRMQtUqHJyQRGJKBLQMKcVrzUJD8ef+9RTp+7qvu2/ejeAAj5iJUqLYoo0QOBQiXaGaflhEARQCFXjNgKM0aF0wlppBuDysS1LsX7cH/eTwXAwMlGyvb6XNhqSY2+nD2C+aagstXdKJITgQ91iFGs1dpN6z+llon1/luoKhul9VDFc3UKYRbMX+Bb4cT+Hj9n/0+RdU1M51vqhtUTlJKiS/R/JpHkNszHEDFNFvchKZydMiidIUCVYJ8FF9UpCfadOdsB111NdcD5FHWcZJiufnRlE5Cc0kFT10QMg8rXeZi3uY4zWSo/zXCleWJgDUIbQH23lmYviCPuYbPWm5YptoLS5KE1MItXjqkSCC16zmkZmMoEiq8Bin/x/yvo7c7W01nOdbunhIaW3hXNKcXVCA6R0ruLgKcU5CSBSrMuiqHR1AkiYBcRKRybKRj2ttAuilGbiZtoA0Gl3ojhl8SHPGTDGsGKCpGmydvMmgCee+DftuU4zNsE7IgbT/PzCzbXrZema7bnO/Cpuj9vj9rg9/nMbuiJArk2DTN0fCLD/Uq4cucolmgJrxK+Za2rApnjT9C/TTWSSfpUp3jSBpma3m1qJ2b/sg3v7/xC8bsvpRvsWfWoGZ4FIqcxzVRlI04ObWtlbD+0vxSTDNH2GKrE7PZxZoJVowoZIE7Rv9txM0To128Iht/gQr8OBaZ90Z3aSW3FFTI57kuoTmil+rH693xEycZxmTzdNDo4DJk0SmEraVOs1iohIgleqbo8R74kJgXwIwsREIdQnZd/x8hV7DFcnqAojBFA8cyVQ+wR1vYNS9SeuvBQCwBAiEqkE/YSZkjhRikrrqlwopGokFxBixZVHqBWL84NBNhcTgKDK4ycf/MxXtj/5F2d+8FvuOXmoNe7taiUWidESaw+AZdDRZNfw1Se2zp3v7Q0zbWIXbDumvO9hDICiyJOmXmgt2EFmC2taUZaZcYZuO+0VJe95AMFo50lbGhQ2NgbOE0lqqIxpwTCAQmQ7yzpEi4c63ci07jr9kZ/5e7/wT//F//dr//4H/vpfH/g9AMsHHzh9p3zq488vdxsYjh3BKVo0PtsdUIxukwH85m///tW1+3/kp9496q3deO7M5RuD9e3SOcQxRm782S8/D2BzI3/7d6zcd/9bP/Z7n1SE3BXQKRygdE4CoGm90RGHYpi7E3cv/9Iv/IOeNS8/8WVpzTmP5fkugL/7T34ybF/96Z/4qfd9+Id01Fwr8KlPP1kG325Qfxz+2Ud+GsCbvmnx3/7cbyQlnbtwvt1I//DPvxQoyy0vrciho0cXljoAnnr6ac+qlURHDh5PlApZubK81DsxvHB5raIBbTUSIP/KM0/FjFY3LV3YHJZffOq5bJyx5kHmATQU9vb2nj17MRcXDB07sPr3/ubfunb9wqDMDszF/+FjfwQg28u6zeTVa5fOr99sO7q6vdOzw/c8+l40iq++eN2OcgBhoVl4D0i82LLjtGTvnY8UaU+ljwHIuNgb2Ln51h3H5lZbHIXSB9aslTJiJ2+oiRguiKmqFSYFqThtxXsPIBRlzJo1KUawltkQa7gQK+3yzOcFAB3HlJWRUc12Q5jdOIMrR6W1o6LZrGRqFANWwtiVUJJ2mhCMtnZLhPTEatUFrEUIbIlIrDKReBMKCwppmiQxb6+PAUQtiRMm8hRELEIRiiJ4kiRliIxv7gFYuzE8cfeCgpQ7BeVjEU2hkMCKnFJdAPBmZ5vmTh5UZtsP8tXjc2nTrG31O0pfePZGx2gAKycOxKmGImblSkfBB0Oj3G5vuL11f/HKVQAPPXKwu8qORnlWDnf9w29pv++eezqevGJWTJ4AlOVY80iyXSpiyUIcsfejssw4aSmOAVjmBiRyjr0wwZcFDMMkUM2mySp0CC7EjSiU3hZZMtd1ezZ2kg18yqa0xPkAgGdWSryweD+XusZccuJo5/Cdi3ne55EcW0wBHOHGTVc8/cpOmqTLnYW1nd0PvP2hM1duxqn+0LuOA7jj5NIrF9aurg040oPeqK21CuwK70cFGaMCA1AAudC7Obp6dRCLRkmp6J2d/sWvPFf0RhIRAJePTOSD0UrMsF9ylAopbUiBS+eifATg5ENHfvpnD3UW4rIo/vk//r4jS2mxcx7iqSh08ACiubaKNI9DrBkWAQHihSSQF3gFDYCZ2CgymhSjaudmeurxp/7kC+s/8mPvevXMOoAzF6+fvv+hzA5bsSGtstISIYkicOyKjCIDoMwsRtY6iWJF4oVRFNn8vILkVQ7GV8GkViFSbLQDPKTnxbJWSo8CAJQmUS5fWJqTfEQTQj8KgIQJlTQgYCHNHDyUNiEXFmgfXFaWmbMWAKLAzoWiBMQbFmcM+XKaS6yta+3CoPQ+UhFpY7QiUDPRBxc1gHbaDsCRlfkHTt95/MC8dw7esWaO4hni64kFJYACCPuskVy3FcrErEoSvXphPY3MQ2894U28fmNnoZsC8CZSk45uQt1Zi32CHQJACpPgnVkxM1srcRyTcBKZSsmqzEkb0iEwx8FbVlThDEQyzmwURQB+4MPvVvPzpSm+97u/ea6lDx2f/5s//D33HMhcebNSQI5YrAQmkxWII/OT3/Xtc6unRmUZBltkUpYYgLODgANZEc6d74+hW2mzGGapiUvSw3xYZcHTOBo5pGlsi9z5MMpzo3VvMIwjvr69t9l7FUAjksVWCpG5uei4XxqMpbe712l1TFzn0edbbe8tK2htJDjvEQIRKQDBoyrn9D6EGanzuvWCKDaSGH7x3AUAO9v9h99+onQ7n37iSfLFaneZhAtvvUgSKacVgGaaSplJwoDMSmVM5DpQtW5IVTHGodZWCsgUax35kgHko9wsqLvvPpysLGgJtLyw9NA9DcVRd7U0ul/JISsZWjcvgjDJL+87d9PPFdgjM1AJjGLvbVndtKEQ70syg5LHmZpnnZcY5qTyMAdyEAA+SETGA1XBnGDaMcyAeF8CVZbAKFY+iNKx84IQPBuxNopq8M4HoSQpvC0gJcEopYwSQ7mAVHVTKh8UVY3MxLVLJ4GZRaDYTB67qjm3qiGoGgeAoACGKNQgL6CIwCNKCtVAmbNWiVJzDdVIOwAK04DSpDVpJhMRkXgwNMsEWqvikBDgA/kAHyBBQYsQKV1lCLxIWRShtMHLIHeZ9YPBLjs72O45S5WAj9FxYjDK8vF4bACt2HuxQTLvcSteeMuboIa3MB10C0sUzW6MyVW/NYiaKRSYacuSScKjeitUlaCV61FrVk48fZrcNKEOvGZ642RmETORFk8wyxngcebeI5kCkdUPiRAqEiip68KZRYIQTaRswjRQnEQUFfEtpo1rNQKqamcfiskHF4KPY9NqNQAIq9Fel+145gxWje8MkEjFJBDIsDKamQORYq5iAcUUAB8CgOCd814ApbRSatLD/Vq8HxBjIjfKSudKF0wUpy7qohgAACAASURBVGnqBv1KgsYTlDFps+E9hGAY5MrCWeuqCnANIIpjMFtntVbWO620l+BCEASCr8vclWaldCDNxtpcJY0C7EZ52mhWBiPSirRe39ppRObw6nKZDbNsbEws0Fo319Y3AOD4a5d+e9wet8ft8Vd56Npa7gOHMyJpU0tam41Z1GsfU5wOwYwkCU34AW/ZXe3t72eabsXFUBkyev13mCqw7eOFr+d6nNkNTxu0a/9zss9baCDr5dapMflLq9z/sh299vvJUVddYLPKOrPbTZmqMYtY1ofP0+YJqrUOwyTjPTvb69WzgVtQROAWQBkTf2//rE2bSQVhet5nl0r1/XCLb1V1UtRNMEBF564Vo+rL4NolcsFXfjRXzRJEVPWEE/HkaogL1XEyEdGEOofALFLR1kxR7Un7NrOq6iJRCdHNYKOa2XnnQwhBlFJGa6UUcwCgOWEi571IMFpXeKRRXMhOt63uObQA4PAchuPi07//5XmF++7rJInrdmln40b3jjdn46IR9wH0zry4/fzGU4+vP3d2eH2jv9cvWp1CgROt2LqdwQhAN9HzUOPt620Ct/hn/9n/fOFm+Pl//HMoSiWq8jhCEGV0XtjEaJcXnETKuY0b12OWKkDdvrxXEsbk3v62e0fbF669/PxTf/bsSmf+8b784q/9dlV4c1BTO3b33LucFTpttF+4fHO5xRKxg1tuJbq5CGD74rUzL/Z++V88xgHrZ9bXt/PM+php4MJ8Ix4O+wAyj3/1i7/YSHVrrmHmmru9YfBwTLkPLW0ADEKZeHAQJjp56sTjX3nl//z5XxwNXLSxzsDatW0An/vs0//9//CdH77ae+Xahae+eB7EhUKIKLekxB+9vwng7NN/+P5vOvmm+/7Jl774hf/xZ3704t7u3/2Bv+8yt/zgsYOrq2mjA2C48+rxu08hiGeTNKnMadjrxxGLgFgB2BzYZkQ7u7sJ2Fo/GCqrJMEoiskHOr4yD6CQwcZOFsIVV5aDXpBV+7XLZ//8D/7o3tOH73//Nz/85vsBPPbYVx9Y27xw7kpqzPd829vO37hCxo/s3vE7V67d2Ll2ZQPA0dXjZV6w5liVuhVnDjtboxVFutNxOQBsiywvqswOVu+Yb7MUeRYpDoFCEUjVkp3TDM7Uxyfims9BUBWgBRtCTJpIRKzzrDwzkwZXgbWzAKJ2SxQnhklAEhhwWdFqRL6d5JkDQP1Bakg1436eeVu2DIktF7qtkS+d89WNZZhsXjo2gSQEhKCjpGnLYZyaTkOjYQGYmCCexDNYHMSJCyJEovDcC1cfvO9OAPNJsrNXLKhu1utvXt87cWBxe3PXOt890HA+AXDjYv/f/eGFH/6hzskjqtlux90WGU6S0d71jcjKeDcDkC0MlAlKe4nq2ueokbKGeDaSXL8yBnD/w8ykAJ8001Q1Dt+bbD/+Shh5cVFpdaISAMqPlRbR3ZHNdVMxg7JUByJWNZFlnksedJRCCKXLStfqRIbKohyhHAorAJHy1gWB6ISt5KUuqWuCQSboLMSlRACEFTSs5SRB1syCsZhnNydJa+nEUkunTQBusDHyx5/++CsPnjry0F0P6ytPLt9x7AsvnT11tNFuKwDr21sHDzRfvrTjgo8byej6ZqMRR0r7cZmzizsdAKT1sQOLG1tb27vDRx5e3cvo7tPLG0sN1yjueXhxuXkMgI6CwBdOpa1uvjcmH+DLYMMIIZh0KUkBHDx9qn96TmU7enP7GjeLcY+jaPHQXNnfSzttANZnUgyobIBIAsMLESmtOFJggeJqMVWLJWr6QgEw7A/LvRFFjZITAONcfNLWzlX6BNA6iow4CwRhDiAAWrM1rBiepU8kOuk2IcEYHenIADBJHDj33sG7VIc8cFmGYIuWlrEtXZkDSNtLRW+EWOeBjOFJ64QgMMKk8FCEPRjiwaLYU9gr/eF2600PHF89dCBupAAOHF5+3ze+6dBi19o9pVlPBWAF+7nCiVekSYKzHMVgBYI2ugwC4MbG6I67/WYv+/zTrzSbJz70jUe1sIiId9ATme8p+rCfvZ11rvYzjkLsiZXWuQtFIB1HxhCUAhA1m9SvJOCkBi+mVDBTMKNiriNCVc6pWCkW1i4E64J1AkDDEaCUgFgCmMlb65xjQpbbEyeOArjjLfeUe6NSLD9yv+J8mNmDrZUonC16F3RQAIxOnI8baYeZ+ln5ySee0fpsbzRIk6hRDG5sDAC8+c2nXz3zCqk4TtFNmk58IyFjWGutNfKsBNAfjcg0y7LCkkUpLqzNrXU2Xt8oCCWA2KjNNDt6ZOVQd+HQPUtXb248ubPrrB+Mi5euegBvPn40TtBMtdLGFc67inyGIeRFClthDRJCIAipGssFE5id8zA6aaQA9m7sZIXTCfUGfWULDsnhpc7l3rYtHTWlKCyAhvMCMtO0sMyi15P/hoBK6aImngEYkXXgIuUEFUmI1ht7g8UDYU6piIr5NFbjYFxiWk0b5wCkzJZl172BhyezWfzXu6VFoEixlCUAEyxZ11hZad55aLu50LS+7C5kqyuq3Q0ixnkAHKQUPZ1nypEjIALHkQHQH3iOFQEB4mwIYGOMDTrAMzPXuuTkrG8mKoIvBr4oSkfinJfS9UdjAHPoSBQDwqouDawcPareOFD1wdRFCVXnrBAgIVTVkRUGV7tHTBA6lvgiy1qdld5aWQawlyrFIikRK7AmpcCKuOo915WSHCq8KgiFAPZggAWByzxXJhFLVT9NFGmlqCjLMi+dU9eubT3zzEt20FfQb3vkzctzCwDe89A3ZPlmYuLgvZbgvfdWxAfcco3k9Zdp9uLtA5RvuNHrLvN+TDV5i0y9ipl4aD++uGWe6XXe90imotgzk878kiqg8dZbceZbyIRl97U7q+q/w2yHWP3UVH/x9YopEDEJQoVGVhUP05twsggCAGetEMWRBnBzZxsAWFlrU6bXnvIKWAcBaDTSxtxi3OiIdbvrhfUOMJHRIiAmVbGjaF0arbhC4eukwnTnswflgydQnudOSIhCmaPS2AGIYL2z3ivWcZImCuM+K62YuZGkFY+tdS5JEqWMiC/zUQjVmauyClxz3VYKoaR8EGIT2AQ2OmHWMZEGEGsdmShtNDk4rbjbaV3f3QExK4BFV7Kbt8ftcXvcHv9ZDV37TQh1I+20jq5KRdbufgCEKklC8lWuS01N2BQ2mrjvVcp9AvNVzQiVMyG6KkWUSc9R5XoEYKb+UGqdknq6KlysILN6A6q6OTApTKz/PD0qqpJts2Z0ivsx79dITkCBqkJfsaq2nLJBq4nvz1JREolMaB95WsQvoUJip+sjIsUqzIrF1dgiQCTAbIfPa7wQQagaLf5/9t4z2pLsKhP89j4m4vrnM1+6yspKU75UKiekKnmDJCSEDCBQN2IG0CAahmEYmm7UIKaZ7pkGtWBoYISR6Ma3sEII2ZFUKtnyJstlpTfPm+sj4pjdP+Le916WxHT/mbWatXKvXJkvb9x74ty478Y5e+/PCGJ5qbfmrnirfFnuYUbXOMrWxRNGaYNafo4lZ2N0aPymyu2WKKXL2ZfK+jSiNgkAYVJMihTFMdODhRSYxceSTQhrdeGLsr+tFBNL4ZwyOiIWhcdIqz/44K01Y1Oc0ZuJGMFYUoITKk0wa0k6LPIYYwgCy1tIUDWqiYZAEokUiJkhhCDBS0mJIYZiykGaoUDOF0VRaDYx+FgmJKqvFDsfRRDFMyKzuDxaj9tvvPpHvu8WAK2KeujLTyjb/9D7v6uXLUxO9w7cdng4Mw0MYVx7fRXA8YcuctvX987cs2826nTIlVo9bShJVaJM67HjCwAeOf5AUrFX755ePLU6MVHXw1PU8dfuUkFXd1199fnVdQCXnrmYM5E1IS9samMIULzac7/wvn9hh+sA/sNv/p4UoZlyU9HRGw7NT06cPPnlXXOVf/ML7943f+Snfuz9AP7tb3/in771+p/9mf9p8vrJ1Y36B//FL3/pa8/8k3d9d391WTUrr37T2wDc/5kv/Plf/uX3v/mf/A/v/e5v/OWX3/eT/8fNL7pjod2fq8fTJ0+LH+l+RqMHEEtJp9sWVmFYeMS0Win7/Fq0oiA+zu/Wb375S8+efMQKXvaW177oZa/9nV/+d82pSQCf//sv3/qiF7zxXd9h/+qxz3zu0TzERsJROC/yO+/cf2BeAZjMrJ0/+Lcnnybd+es/+8/X3Xb9rn2znXRt8fzK+vmFm267AUCQpIL0qVPHH3jokXxAE7OsE110gwWKXg7AOOiZhgwG8/uvOXPyWWUi2WQYkeVFvYapPXMA2sugWT/o9IxWNhVX+CeOP9Xv5c+cXnzo8ZOPPnYSQOH4C1/5akjt3tTe/9SJiVrzjruOgRUon5uorC5kAK4W5WJmdCu4YgCbEWbnWhT6YqtxUAAwrXqOrJbnSYhZXiQ26RauJoUQyEdYDYC8i3mhlIJNBEJ54WLGrEHsWBQXAFgChD1sNSV4T7Gfe0pUFUFC4YYCAPVdSvRQOoXXKhdfnZgNrqc6kULDDgcA8kEPNfhBlkttdV1m6t5NFpZT3UdGaTI+EaWp3Sy4Zrsm2koWfVCU9jpZr4iaBcDsfEt0Y/nicqtZ01wF8/r6giKOQU6t+5l+FYBjtCa0aVQGhWbph2Ba9bSIHsq6VANo54a78cwluvdLz77iRa3a/swpalbTpx9duP7aY53TKwDy5YGamWkXrml7DFUUJINuahqp8kyRehmAzeWl5tE5ZbUpMFBQQx4QsZKaQSgKQQGAdciLaLWvUoyeFVTMc4hQxKDvAFRMxvU0Bg+mzrCYaFQlcf0YG1W/suomp+oA1K6WMo6MdPNYr7U0uTPnN1qT6Upm91+z3+ghANICpgTc7Wg+3Q1VopZadIP9NQkrmy7bAIBo6q3uW99659L5xc/d99lvf/trBln76oP7HnvqpCorgKsX17P+q168b9esvvTMyqzxOTesDsFa1nlsXwAgyYFg9xrqT891kpDt2nfwxpuq9amJQzfvP3oD9RcvAnBLy9QY+AovbXRTW28FXSAp+utJMhHXablgAF//yOe++JUzN9966JZbX/jzP/Vrb/2eu9/0hvnB0kVTZW7OAChiNKhwqvJ2xxROXA7vIAgAWI3U63SE4ghh10eIUcAWoqsFxCQJiQWQhRGfMmOOLloL6BhZB2bJujFGAAnbYVbkUWwoTBRPykfLxkedkQQAUXEeVKViQCZ4GoZiZn5iV6vZd5tzu6YTUwUQY/SmTj6iPhPCABASgQQgYGTQDQBOV4f5oMqF8iHoqYZhN0AvK5YXlxAAwLW79z9y6vYXHfZCwdcMCaEAYVsjjUvuXfmRWhUBFBpOg8mjyEa6fhZkE3FBiWmKiRKNCGAjpNhZHBjhkagkHtqQ5ZxoKBVHTtlRcfDOVbXWFXXimeX1tjuUKCakCgDyQc6iBYHIETvNRGARTVBW5xIYgEQVRKLkytihzx0guq70Li8cKJYG6PDBCymqEAdRcEHbFE6xUtYN5NSZJQC/8b//zfTs/ttuuuGhJz7P4l585y0f/r2P/vD33v3CQzO+aANwRXDic6FqYrrOX1o6t7maDTbl1a+9KxifL3UB7Dtw46WFBZ2i6OTHz56/9egBKLu4OWxVKVXG6gggTavOSyXhIg+h8NBVg6wzHHbyHtgGCQBy8ZVQW1nZgB9eN5HmPjPCgfRgkKvYA1AgT7ymIHnhbfAFc64pz0h88B6pcgBYhKnh0bEq8xmxUTHmSgJZ1sgcMYBeER1URwjkLUXF1PEiMYoo7/NGeScXYYPgCRQhQThACKQEumQ3l/z+EDxEkQgpjdL9hxRTGDgFQOsIyS9dWOFdudkb8rXOE/d9ta54//XUdn5f2a10akPVaiEWAQohhCDGWKNAcD7CFwDAikaGwiJRQAxi7wtPosgDCKLz6Iyye646RNUk6DyZnb66VVXTk0GZfskmVuQ5JOiKBB+NZhIfSOkQgmIHqgGoVpSmnuZdITK0FnjvIikRRSIhjJrGZA27YeYmJybuuIpr1X7u/Px+bzYHwxLab8W5aBAMiMVHMRyBIogFKmP3ym2gA0b4UwE4CFmtRsfyKJqt1ex406OqjAuoJj4g72VxUIx6vQXQ0ugyKR08E5N2LhAplghAIiITCbMQopIYY4wsSjHllJc7/iw0fe88u8QN+3l7Mwl0YNdePzU/Wa+moKK7DiCxsWHrEhCDFI6CQ4QDeZFQYjm3vv3lmyslj1BKJY2RzWV9mRlKGYSRKghdnhyMcpTxnWmckwBAjNuFs9FYMvpLM21lJSJb4wgETGpHMiMY28uAEEazBO+Q2S+LezJOPkqViJFIq4gwSjsxltHv+ziHQ8lixogmLVuYVwCWRSAxRhYqVStKRfsAt5VIJay8AjhGcVoClBImEvISQ3cIYOhDPsySimKmGENRiNLCzCBopSERQO5dEgvFUknTQWLioB8KxcyAE+FY5likgg9grtRqMYZeo5a11yBUKhMACFnXkLCmXJQQiFnyCMNpWhlmg7Lo6n1QSvn+YCigLLMlfDmIi95JLBUqFBNihKFWYxKEtc6m1ooJEjwnKScJACi2WgshKqLCa3HBZ9FHEBsiAHmEzgsvgbzT2pKwL4rcZbnzqYjSOxTYrsSVuBJX4h9J6BFxdqv4BwBjFeQdZO1yJaOttRD4pm7U9kNj9rJstcJGowCXEwNGbf4d5OXLWmwjpaQtHN9/rYv4Xz1anv8y9+lRZXK721cq9Gy9sVGnboeQ83aL8Vu1BHf893Iidvm3jLzd/uH5lxXauHWpL7+GWye8XM56fPlHXj3bG5lv0WH/FueE7PxEnj/q+I0xq6qlMrcsisLHqFUpDy5aaWbWShFR1KMtDBMrZiCOPvitKzeeX4yEkQ4NlT9ziYGTsFViLa27aYwKiQSRQFv813FnOQJKKaIACJQShkeMQMkvM0r5EIxSidExhhCEo3jnteC262an5jyAondxYtfgx7/vhqHrHrx218RVyTCRyu607zou2GRyD4Bb3zFnJCRhebjZFU7PXujsP7i3TvLolx4/fN3kHW86AKAnE27ZheH0b/yLj6aTcwfvupHSU4WPlvNf/IUfGLZzAL/4/g+cvLiJ6E5k6AyL/alVTClw6sknl3obAL7jra//3N99/I3ffnd74eSLXvmSE7smvvOdrzm7+OSRI9dsYs+kcQD2v/ylX3zwyX/2b6+T+ep8/fgPff+32XrzxXfddf/Xv/zoQ48eencAcOPPvPnWu4/8x7+5/51m8tSJE/e8+Jrj55YPHDr6C//yp9/3cz+TZ30AfOL0wPsQqbO2ISDSOmpltaXccWIBkO9757vAm15891V7ph//9PGhd/d+/t5zi6t7Z9P9h6YAKK7+yi9+8Of/zb/+1Q/9wanzm5XUFjFqjlrjXT/43cPOKoBTFzMsLX/ol/7kyO1HhItb7mjWksaJ9UWlKB+G1S8+AeCq/Y3TS4vveP3bFzZWPvG5e2em56bqtfVVBUMaAqAxV9+37+j9Dz3kvBROHHTMXSUxXEl6vWGJE5zbu//8pQsURYbOaLPe6Xefe26lV+w6tOfE2vIzp5cBvODm/a1a9cKZ56Q6M7drft/cbE35R+59jlXc7NPVB2sAigsdqXGMjlhXxahE54o7q9lURbX7DkDrqqvOf+1pZq1FXzyzfPV8q9GoiQ+Z+MoYrbBd3WAhhjihaFk0pKZCCD4DoNKEVRqi9l4rERJFoBgYzpNwaV2SZUCuo7Pd5YGuW81+cb23f646GLokEgATgjFVxaZ9cV0VGFCCoeDAbj88U99Y7Q4dgE3PGfqmcOvteN0bb83DcFAMtFBzotoeBN8RAJVB2KsTo/SZM+szLSdBrWxmV+9taqVaNjfwAM4trEuf+pV2a/dEeyOvJZ1qRSi1Qy9zU1MAAvqsMFFNn14e1IuJsFnYAzMr3SVJLHFMjQC41MkPze6K3dUiLyrGCEFbU+R5Yms+acZqAiBpTYiqZCE6X0AF5YcJBfJFCN4H0swAWKtEExBEsdaGWAcOg95wYldzolkDQLofA3TaRKTOM2fSZto4OK8lU/WZjfVBbFYA7D1yQChzLjfCSNKQLydk0mlbNzaXTlphALkrnJOqKerNSXVVhSrJ3oN7fbahh91aqvMhAKTzB796/Nlza927brrujpe95E8/+refU8lrXn7Xkyc2r9sbABTSOr+0dGFh+daJxuHDc2F5JQtFtEpXGgmp1UtrAC4un5msTfVVzMhUZnZ/9bGzf/bH9zdble/5yTd88k/vXX5uDcCv/NzLpdvTUeomrdWap55d/+wnnzh8aOJld+1ZPreR7J8DUMHU2sLx9elhfiBd6cfW9MzUbLXTGShUygzURBcoGlWTETZQABCzMQAgrmRrekggohiFJURERG+0GgxDFJQ8NR+FTCIOLkij1aQQnQQRRIlhbJurjZ6sV7RNp+pT3clV0ooN2bSSF1Q6rVtFbRe4VoPWDaWqV83/6Pe+ctCaabQm7771SGOiCSBGL1wYI04KYlPOeSQyM7aGEZG8iGm1kmf9WjU1kUL0SsVLKxtPXdx8mbcAlgd8ZrE9tEmlKdEPo48UaYTI2SnpTAKBdnDO60QJSybRKR20BbCw1t901qhoGBxhlRWAmHp51kp2sBCIxvxUAkFiripG4Irg2BoAMYQsxGq9wdoNCjq56Eh2T07sFpIgAJDWZlCs5P0umLW2MXqJYCIfJI7dnDUr0mAoldQTVCtRHnnwob/52O9fWu5ec8QVwQMw0QeBZgEpVuVuJChNIPExlLWDxeXe1ExKVBvmqFlLql6rJFZBYqlKiqwobJIo8d0sJyKtTTRFVN7FsLK+zuIBWJFePy+6xb5dk9NFMSAkipLUbg7yQVYkusQj6xB8FGFmUsqH4F3BrIxSPRulcABqaTqxa7eRYa/fzqOKIoX3VORWITEjN9vMhRBjYhHyXp0Tk0fLNoJZU1tZAEUshAPExRAUS1lFU0Qq91J4PdrQUuFFDAdhrY0BFZnzWYzOa8195wC0KObBV7SRsqQjpdZPiUcNACiUcjalpYem0W6TXfS5h3AsvyneSbVW96KJtSNLNmGiqknzQVi0AYCq2pZ4gRI2LFGYJXJwFBW8g1g/+rWSOGpGo9SsJK00kyLvAdiopivV80+cvf/vnnnp2+re1S498jDOX6wc3nvkLa+zhgFQltWccbE22uGVthxMQqwo9rIcQFZ4MBmhSECkKABTCEhtBaFfXj2j9bCTV1N18Op5/dL56flJTuiet70hX9uoXtUAEGLutAVK72BNsCM1QjIQFrlcaVFkG8xAW6pQWzU4Lr9K187X2tI/WPGLtcrBG45Oz9fBVQBJtRpKZq8iUlzuRBGImBDLZEdIRhYtpX4CATolBGEypeIzKxV99HAhd6vLrsjUXHM6y4p6YlhCqR1rlcToJELiuFQnpSPQ85OC/5aQUUkO22jAUcq1haD8Flv+nYacO47LeBxhUNyWYxqhHcv9+EjYEdsZHcbF021gxbiwiMtTIRq/aqfCE2hMJKetLKSEoYBKEOpIvX+cEsmYgDX62IVKg5jRCZmVRCbNpIgCUSinImXBkcaTUiV8pRR7+uYcUTNvbG4ur7fbqx0J/ZY2Wqsid2nKxKq8VOWH16ikxw7sn5uafgDh6e6KSGCCDwGA9+KjsEdaMVqpKFEntggOEoP3pbtULIWVjVZsYI0b9vMsF8AaowkljoOJvPcuDLzL+72utTbGEgcj3vusyAE0641Go2mSxABriwtRhEEhRonBBw+gVqlGEaby+z5CxpSdLf7/Si6vxJW4Elfiv9/QJJdlzVu1RRkB8EePlbB6AKPltpRJvpyNvEN/ZEs8+bKFmbZXMZLtkt94SZTLX7E9KyHIjnMJyqrmZbW2b+JF71wPdz64Xd3bkYBtH/ymQcq1fEsEZavgOq7I7izm7Wj+7QzaIRqNbSMZ+hZzRmRQKC83lTniWJt8x9Bbzc7R3J93KYBt7crLqpPl8+myz2189LJlbKuEuiMzA0EUqRA9gCJERiQmQSlTvbWBwchgRsbbGipl8kiERErg586MT4SIiaSU6WcwsfDI6huARCESVZY2x230cqOvmMeCUCUZTjGgiFMVUm0TZSrGKkQArFXZM1SKFQMSFFE9qXbXOueGQzebAPDWzdxYbZD99OfPzL3kOn3kmPhB5h0kVFQlMZMAouSZ3whK+UZNK5471Co4rPT8qnfzti/VFEC2mK2cvmQmmze+9q4nHzzz53/1wNve/LJb7n7lvZ/8zL33Pf3qO+4AcPSmo4cP66eePv5DP/mec+3+//PBX59IEpPa3/7Lv3nt7bcC+OGf/9nJRvX1r7r54a//3UP3H59s6Ye+8MQXPveFW25fOi/zP/DTPwzgPT/wvR/8nU993+ve85Z3v/Wdb7tplSefeuSxXz+7tLKwmOX+l3/nswCmZ9NXvOZVh+YWf/CV77TDTqphsu7SE+u/98d/+Ku/8r7/7ad+CUC3U0y10s4gPzA3szzIOp1BLa0NhrmuVpO8CyCtyF1333rHjdc9+tiz//EP/uw73/C6p/w3rr/2pu96+w1nv/6Nz9/3LICP3ffgTIL3vvtH1wtdTTQoMmS6zhq0uLHwQpoDoHXzo3/9yTYXZx8/eea5U5cWLj57/Nmmpdnpen8wVDAADl29z3V69z7wsE2rCE6UWVheWl9auPPWF5w68TgAUWZpecEwilBM1FvnNnuaqZc5ifGq2cT31gAIFb31zeixluHwoWlHYWV5TVdYCMubQ+c1gMOHjk00uRY3XaTHn3zu8196ZLqWTFU4aZnZ2dZjj5wEMNXKpl94Va+zwOJrzRb3B9HnzUalLtImAEimm7axq9fvTkzMtnZleZHpEGPhrRKMpSyiiJSSfPBEIB3Zme5Ghl671qpK1ABgLXPqCcRJsU1pUQAAIABJREFUdL7fy0Ur62GIFXGWOwAtpcTW8oDOeu7Ws6S+qVs2kk1CLi4AyPIYByFNolt3cObUc+u7rp6dmTuweeECLlz0ugog09XG3snexposu/UTC8nBWWfqlURkk+CRsAIwO1GXQP2+73Xj+vrmYBCriepnIYZoU87dEMBmJ79u35RJE1uz8wernd5gYm4iabb8ejcrMgC6UW21KrvnJgIrsrV+JoDtQu0+tnd9faMhGYDo4VbbE9Ot4aCTdTOyJcWRch9Q1QePzgCotVJIJJVACSvEGGszqakoFm1IjSAiBCIBkwiRtgCHVAbFYKpO3LIAAmUxskpJIqW7qzxdpSp8VC4O5w5N6UoFwLDoAbniqFgVg1zPqwN7ZshTa246hqEfBgDWWkRBCORCapR36+KhQV64F52e2gPg+DNq6aJ99QvvaCatb5x+OiuKs4udxfZnaqAHnjwNIG2qW+Ynn1nrtlN0BuQ6Ayduuj4xbHeG7c3ZA3sAnL24+enPPHvTt81es3u6v9597IHTE9b0fHH02DVfm3rsYmcBwDLUZJUrJkoyI339wCMrZ0/1juyZOnN2eaHT8+fOAmi3B41mZaXbCZ1VBcSEMl0Y61yspt4CSMmAY3QuOqeCRwwypjJgXAeQWFDUxCrGUILlJfrCx6lG0ukMS+7C9EQdzLaSapLgCwizVmw0RFgrVW8AmJqefOOd19XqLQ9Xq5oYfH+YtVQ9rTQLDwDDIJV6VUkMxN2ksnR67YO/8/FjL73junsO/uGn7v/xmw4ByPIGmEhR9EHUiKQ3JhuMtigk0absiwGLFIEzR1nkSrVmjU2EbKUGwNiGOCkIrshk2COdoOxT0jh1H61cBJJCVSUMAlHQJk25cLK+sQLgaw9duO2elxVaDYNEEbJVyZXLes0E4DFEZaQaSTR2syFrRFD4wNbYtAqATCWJtLrWe+ZkZ7CZpYJPP/jkioTeZrRUAHjkqWevmliv1hIm5YPXxkIoBrKVpKDSxhYhOmNMUYTnnl1YWnaCFHXad6j+Yze++NChuosLAJT22lTzTBISxWVnMShDEchd0MYAGA6KLB9MVWRzbUXXqzVW6yvdbm+Q2NQPS6xxjNp750OMQTgxqU2Fm75SNZppZqIBwNTMrtmmshyd7Jpozk02iuC6w7xwrjfM/KgRzGqEVxtt55wPirkY9FSUTATAIC/aw+FkgoTNganJs4ureRADiSK5CwBKYV4XZNiPttbwmSmoQa3ZXhQxVJTicc4h5EQREcQsMUgMCD4osQkPVvoABsOsyjBKhTy0cyd6qAw7FVmrGApjFAD4qE1ZeWBmQijRXlGorNKBE1vuTcfKN1HAJIFZNerVzZ4HEBBEp8OYNilxLvYLKfIiqRjNipXKfA7AF33WeTHsSdUSl5wkFiGwZh6Z89HYbphKmz8wE4cQNBtKUgD9zjD3lFTslCVSmbWNFrNVHipmg6zw5T48UZQMxfhILkYB4AFEH6LiUOK5CsXMSjMpUIyiCawo9wwRYwxFBcAVubWJSdRzTz3zuQ99+aVvffWxG2/43Ef+aPHk8lt+7LsAzL3itqi8sAYxwRKZsX2xAtR2+QuxdNTZqlgxleyosZogl3xtIqWoXpms19NUNXbNvuvtb6ymMbUJgM1eJ1FETFAMoyCRWJW0mm0DRylFhkCREAjMUhKAwmguqmDngnd5GBYSu3CZD+LyvBBVxKJVVSiVT7xHJAmIpfpJ2RMRHvs1/0Oxc1cu4yxknDPtVI8cwwq2tN+/aajL8o2yNF7u20da6RhT0EY3tBHeAUJEEQIuM4JRujXiMW2VGrGV82znCZARh2t8w92xm+exXahslZZLhMRYeUrGk4lbOhsjgj6V7kZK0Y4LAyjSirSiEEceVCEG1oZKZ20XiZSXCFDpcE0jscztlCSGEIQECgIGxxh84YJzwVhmKTuaRuuMuTcYnLt4YXllaX2jjQBtFVNpfQlvWEgiifIuYbhskFRMonXI+xSDEQGgQXkIiDFKgCetGNYoIC9yZQyNvOzLMrCwIDW6IBIRZk6MiaSstgA6vR6I0R/2u31FbJQt5a600YkxAGZnZrXVwpRtrJYvB4RBQrylK3wlrsSVuBL/uOL5djRX4kpciStxJa7ElbgSV+JKXIkrcSWuxJW4ElfiSlyJK/H/U+iRz+M2TnALUFjSY8ufR11FlJC+sewJATstVXaiF7dAiPQ8EOWo0zZC8JXPxJauCr7pH6DsNNJ2MxFbMP3RdHfSmS+nHf8DpInyxPFyhvIYrSnbSEzZYfBSnrf0AKUdMD0Z9fZH0MgtHgIRb43/fMgk7bg2W49BAPAWkxmja7v1iUQR2sGC2J7SZW9zS9yyHIi25Na3qBa0DU6VHRf7+ZdqTGLYRn/GEPNY+FAKDkopZUkERVw4J3HEycDYrQUSQoi8NdxYQXILFRopxvExQSCKEkOQqFVCPAK9CISYtWalFQGlKWcJTiUeN80FACKCIIJAjIgQ4ZUandL7YI0mAiQqElZMiAowVtd2TeezewComVT1Q7yw/LqXXZVP2MwJq1pnvT/obM7tm46FB2CSQBTabZmYnDz/9Ilev3PdbbfyZPUlu14U3VpECsDUD7BqJ9ngHf/sbc88vPBDb/znxz//aGVCTdb0Rz/0R5ubfQC3Xnts796rP/v3912zf/ad73jNf/rgr2d9D8I+W/n7Bx4GMP/vf8v32p/Neq+7+4UbzbPX7Z/8+ANPDXo4fNt3nP3KF971ztcB+Piff6JqndnM//37P/K5P5trb3RT6P0Hj6GWvOruO1cGAPDh//sPH/rK1+666QVPPfAMMV5y11EM/Pd83/d/8f4vLZ29bnlxDUCV8cM/8u7f+M0Pd5ZWmpU0I7R7HVLWQAoiAHpIL7z5xiN7Js8+8XRj/tob3vxPpz/9tHJnTONw62h4x+E3AfjkJx+a2XvwkbMXiL1EgCMbCeC5mcqb3nTrzJ4WgIVT33jrW+5BZ31xg86fXdnbHdx886E3vPb1E6r3jQe+9tTpJQB7pqYXNnvPXnh2bd1pDmvrCw3bbDarlpHULIDp6fluf2CIKrbSr2Xc7rNWMStYMDs3d89dtwE4e/7S6QvnCyeVxPhBLyhSgqsPH+gPukMvcw0CUFXJdOJ609PLi6v5IF44tXKpbr7nFcduvevAvqP7vnbvIwDue3Tl7S+6PlYRTp+WqUkp+vm5JVtPaW7XdKMCwIskPiMdqnPVpDZ/8ZGnalGzIhdATBQJgETEIDEEBM9EDE9k11b6m5udqw/vrtQEQIZ+c6apWFwUzQRIotkoIpFEY5hHAMWgb2McZjFk6A6KS6vFkZsm+8NMDd2w5wD0B26+UR1sZCfO9Or1Wnd18PhTz95Tn5g1VWWaogTAgWvn88lGeqBp+NLipbW5yelKMw1rm73z2WZneKhZBVCp1s89d6ndzo9es+v8uXb0xZ65ulZydqM/iGHf4b0ABp96LFfJzKH9EzfMDc891+xbM9skU2kvrFQSBcAZsimvb6y0OUgzqbWoyPKBd4cOTK+sr1MKANPOLD5xev8NV9mUnXO2luTeK50ysta03P3ywwCQ9KKKhQhCVIShBDVRMdUEZMEaI1tM52LUTDEIMWljrU2bqKEiMAUA7/KkWvFFn1jt3t/UtbTI2h6ROcxcPSc+B5AXA8UhhiAi1piBi4mlSJ5Zhv2i2aoDKHInwt11Gq51k16mUzl7rn302GHbapEuLj2nALz/A3966JpJFKHrhr317tzcbFoplpfWdu1tFaEAYAt74tJaWjeNWkNXeqtDmZjQpPTapY09E0k2HACoTtbbAxeGymxk9z1xZqJp2zWzPhimpurtZKmG0dg7XZtVayfP2V7vwqXh4mZx6/X7D8xW98/NXzW9tjHIAQh83s1btfTE2WcUIbKpVhu+0hiQzod9ANaIJ8WFF1+KW4zuphIjA6URbfCegiPFAoEi0gzC/vnJ191pJ6ZalaUA4Or9cz4QfNCJYjYxxAgRiWCOQn7YB7C+2f6L+x7/zpvnq1Nuoz0QJY3J1HsnEpUCgMAMRmrYCzacwGjJfR4yW7NJqm3VAiDiyLoIMJWaODeC89B4hzJ6B5TEIiglsC5IUk2DSF7kuXPt7qBE1WWefQgFk6O0UueQZSAebUzGfD/hEZjHuh6sIZ34qA1TPuifvtAGMOiEqVprc2kh1dEqgJisMgE+OKPV9kClzvP4jysiMSVJOnR08dQSgG4PCxv+0WeXTj61qKrmpS/Z7dtnH/ji4rd/++1kA4DPfvLzd95Yu/mWg5OzrWGvjQibWPEQJoDKFZmJhNgk6tCRfddcW1W2+or6MJKnfh5Cni+fAKAhEEVKIB4KUCLwrEggWeGVsQCu2js7N1+Xij+yb1Irg4o6dGhXc3pSsKlUUb6hXGLVaM0skSdqjZuv/7bN9c20NXnEJivLmwCcsbaaGsLTp89VNNWsCSQSxGpdSZNeEQF4KRRziBJj4LHMHIHJmtjuFaIBcKTFi8ttI40Kt3t5EWMkWGvgfJYHAFGgmZgAZsmHzvOFjsyf3Xz6zNogkOYmgOauimlOBp8yAUgJsBDo2UZ1ctDXNx04DKBSpKYwttk4evCqs4vLjVprcT3riRdOWWtHAUD0gEJEIWCIFuaRCDqLsACIRSkYSsRjrBaTEJFKogdLADDMnE0qPhhxrkrUU9Tt55nVPdKOlCsCgOC4oCRlHZUuL4sXiMQoAI3c8FhYAgIBMRBJyUaOpAa9fvlNgSbRCqTJh2F3OI0qwWx283oMScWWW+bMSb+IFbJGkQCKFZmRTpMiDiXqFuyFfBQXfHASIilNXpyCLooicgIgQucF0tTopL63nnBaG3iqcdyreHetCkCRHUoIkFjukUsBnvJuI8KX+TKWu30ZA+kURnYzAEDMAkQQKf2l4+euutq4uXDp/KWP/MJv3Xx09geu/wkASUVHJigipaDGPjmKSI3sLSXKlseKlMylCE9kokBpKkU1rfJegnfI3DDfjC6EKAYwgI9BHADkrjTbEQmQGEcCiZdxirbQjZch1p632/5mRtaOp8j45ZcPsX3k+WNu3b22d//j4NIfUsrbpRB4pI449rdkUNxKDbayjFJPdyfBisAj+XdRRHGcIwhExnIZ2AKujz5Lki1S9vgNjD7j0n8+7qCSj3I3EYIi0Vxu4yOTIpKAqJhROgqQCCkfREY8ZR57IWGLX8VEipjIKK2RK1BgZq05BHHeiRYA1lir2bn80uJCnueDzV7VWgYVRbHllmaNJsXifIy+WqsFUIQURU4+jDMToRit0nmI3jsDic6RSKVSyVzhMwfAe6cVaWMFkocwwq6KhBgCRLMCEEMgEIidcxJjuRCBKAafFxmA02cv1Op1k5i8s1l+jYw21poYQwzhCln7SlyJK/GPMTRKY5id9NyxTgmRjIVCtsgUUrqojX74Fne+cinbXhllB22hvPNe9rxxrawsmJVHaHuEHS/cnsPOs12+UP63SCWOZ0BjmSjaPoLLudvbY5ayJsC2+SDheecSjBU2MS7/0fYA36IwunOE7Z+3TbRli+kwrq3uYGsDW+v7NqVj/JTRBmP7+GUTkK0BQeP3dfl2o+RWj7wht6q0iOV+uFx6mYIIExNJUbhSFCZK5Dh+SWmNA4phTPLYYvDTFihXSlYFEzNEQbxEEUhwJTUbQCQZ1TiDjzGCQsl7K38HS1lKJgJBUWSKiiAxRDcMiBLy8uozsSucYooh5MEbrQzRICv6bd9oThShAsAXWrNixWuLq3b3TNFpVyemG1N702bLoQi+AyAMN6o1ZWsHAvShGyee+MaD506sHLz2WubApLNLHsDv/toXb73xiLRqv/cTv/4T7/vp//lf/fQvvv9Xbr/pwO6jx555+Mn7P/U5AN0bDimd/vZ//p1hv/tbv/qBv/jDD/7cL/7qM2cvRh3n2AD4u499ogI+v4uW27c++tWHOEn276k98thy/ZMPhiIWRRXARMX9v3/9Rz/3c+/5g499/csPPnZ+MLxrvvLyV7/Cc/v+r3+1YQ2AP/+Df7124vjqwP/2H/7a7/3+ny8cP3XjjfNr66e7w84iF5sXLgJ485vetC8Jr3ndPa9805sWVvOf/efvS5hIsQsu5gWAI7ff9o7veNuTz96/vLl53+/+8eOPPqy6ay+84+2T+w9M7a9/+ANPAnj197zr9pfc/t4f+V+U0qqSDDv9G284/Lqbj37qK1+OS6e/9KVlAJ/8i6+/592vfuebX/zVx5YtwsxUA2750smv1Q/NH7nuyFK3B+DQ3MQbrr3uw5/5wsPu7LAIxbC3OsiCUpvtU8euOQrg7MKl9bVup5BLSwvtbq9e0VnmmxVqWSoGm0sLFwCsrCwbY4YuTNSSQZbt2rOns9E/c/bc3ES90ZrodDsALiwtvPfdP/iv/s/fXG3nb3nL61lPdHrLL3vzbTppU6V922teAEDf99TXPvHgXe+8vT0/311Z9cpWJurrFzdn5nZLiACqTasmtLIzfmaOJmfpzHrfOVN4k1qRWLYuuGQ9lVwjAYAQffAxUebscytJDQCOvvggxEWfQYsiKCWaJbpCgYioWUsA+KzX72YxCDMd2jux2nfDQb57/+RGr5iabwDA+WWOsZLYman6LXe+sLe4vH5+7a/+09ePHGjNVvzNd+0FkCcTHalVlV5Yvbhn9kDnyU1b8yK9/oU85rQyzAGEpy744O+845qVhdV2z022qnMzzfMXl5lCJUFeeAD1auXqI1dvFv3Ueppv0koeW1WCgeG0bgGkNQ0jZPGGu/dNNCm4rLOwbgy7WMzsm4pnlwHUDeX9YfvMRTXdqLUSYQiJK9xEXTvn1rsdAJwO0sTCGmaW4DWi0SROBAECLu+Wilixip4oRhGwjoma2D/hJBbkAZAFlJCJICGjoTyBEmaRmOXd4D0ApTiwDiBlVC6xYtPSv1wUwdiidMTysVKx1tTPX1jut4e33bl/155qZzl/7OvPHj/f29zMAVy1r/WCY4cPHzr00InjDz6zBr0+0WplgrXCTxgC8IJr91xaXn7uTN9/4gR928FLazK/r8Y6TTQpFTcHbQB7jl2vmidjlmb1ehfm+juuffrxe1MXuqRi4VSrAmDqwOTQbUzP7/nKx888/Eg7d/SiVx5Ttd7ixXOp4esOzQNYWOoGuZDl4kTtbiWpVggVSNWkFpQDCAEmmUDut1SiBWAadd1GK3iICAEizIqUYu1J2UF/8PffOPHe190xzAOAh56++O3mGNgWxTDPs0qtahIbI/kQIoRUWRFgUlKtJdrGQR4hnpVirSpVU54uNbrtXCQR4rlEda1R1USxFLmvVXSRZwBsOhVIWJB5KCoLWCP1NyEZkfchylifDa1iieQHBcfQSHltvRPyMGVrAPa1JogpUTq1jdDZ8FFZjZGuyKgsK5A4sjw1Ps89uG4i/cVnn/jKo2d7vRyAjvjgv/vtwsU5xafPnvrIn+Z3XFu95qpmJVWyQ9dlrGxG5fQ0W2KBkDUqrRgAzvsbj8684Pq99f+x5lRQIavUDScJhyrbBEAMq9lwwEx5NtSJVYpdiFJWZ0JRracAxBXeBVNJQnBFWFGUxKWNoZJUp0qoamsAYhFjAKsQIzFFIQkiihEFQxessQDmmpP1ZstUW5OTe9uDfCgUKAzygsp+KMBkxHNnGIaFZ21PLy/e//BzSvjVr7l7bXnx7KVVADd9m7qwnDetb9V09HGtN2jW0npqiwL1pKoMALR768RMHJhYcWQi7wMRqhNT/aGLeQBQOOO9I0Z3ozi+2C0iWz3qzZapu0TxErVWOkmlHSdS/cTxi3/18Q9OzTVunms+8cjTAE4/U9HpSYoh1dCKK4lKU0Wp6W0Mvv5QLxuUd2m7fjzvg649cODWvdc/feb8Aw+d7Wf5iQvZ3J49/awLoJlMchVCOaBKmWswEQdwoFIlgM14Y4UICFHp820S1R/4ZqsFYKNWdYWrV2x3ZTnJqxOMdhEvDLM9SgRxROimSjTsvA9FoZUCI8i46156dABEcURLJWBklSQQbVIjUgAYDPrkCgu13gm3mSqHEIlgIFIM+53+yKdeBeuLYTAud0KEBJFZjetqUgDg4CkaQiQBg5kY4qNYH5mhiBQApaESHeGzYiibQUvRSnXMJAdtGgLAIWtSEskJqXFGEEUiRsqGW9qRACKV8o2jUpUtGdoYF3qlfL1i8aDq5IBqtXpscdw9oSoTKYDFSyaFkCJRBMUxRiISRRipE5Q3ChDAUmqLkwABsUQNsGYAyhofYig8Fd4NnXOeiTSTL7xlyrMCQKKZwFLyvmNp7hIwcrbcqem4vbve2sx/K4W/McRBsNVX2ZGXjTb9l0lrbXdPdmzvxznA8yugO55UFjdFyvpjOWKZFmyVPWnH2Uu1qXFtc6w3qcZFRoaUZGzZUf8ce23z1jhSFn+3JhGxLfoLlFK2gm2UCJEQiRAYERIhkVkTBBKZCcwAIusgXN6zlaKxSBiNS8Pl/YpLAETwkYUIpBUrZqVUiDJqY4UQQiDAGG21cr2eiHM+lm5WACCslQ0iBYWCcPNNN1abuzzge+tPP/pgNkJOIEisN2oJVCaRXN7bDF6Cdy7GaI0BYLSOIkEikUQJRVEwU71aDS5AMCy/lWmFmSLEWlvkw9JunBW74MvfGmOM1toFD4hRSikVJcYYmVhE+oPBN/1qXYkrcSWuxH/voUcFxzEYbbRO7kDGARDEsf+LbLlSb+Mit8B82zDAb2oP7mzjjVc62fE6Ir6s/yajNeXycbbLbpcPXj72TZXEbxVbspaCLZWoLbDnuJ44bg9i22Z8PFMaAQ+3dghbK/BWV/H59cctHenLpzqWU7n8vWzrUMaRCfm3UpjcsVmQLfjk+HKV3tyjkuhoklvd0x1p57jyO370srps+ZxtmUkSEEFvVQkl+IiECaChc2miRm3mKDtqjgSW4GVkVVTWHgEgjvZWkLL1GmMMrogSIFAEilGpkQxNIKFSBV5QFMEkgUfilNhGnxKIoIkVi4qRIToURGLE6zIDRHQxUmlHyVoRaVJJjQcdF/2Q1QBAo6XFB+GAtKqmD9hKEoQJldTo/mAzSQ2AhaVlbWYlFrZiXcTuq/atb3Z7+dD1OjwYnH/gWQAYuOXl8I57XvSxP/7S+97z85OVRlXj4DVHe45VPD4zVQUwzLK/+vjn+QsnJWRnHrp/trF7V61+wsXcR1NjAInwULDSc0cP3Tw9sefondfedn39bz/9zJ/87h88sZR/9u8/C+D7vvc7f/QnfvQv/+JTpqI+/LcfOn7/w7/0L3/tx//X9wUWJVIhAnChk0hnc3lz6WV6z4Fjxy6cfXbvTdfees+xG19x+PYX7HnJd70KQLF4kSduev1rD77wxXf+Xx/4SMhdkpiMORCV2pxnTz77vl/+wPFHvoJhOHr9vnNPPMdW/uj3Pzp9yCxtxN//D78F4I3f/UMf+9R90UcoksJNWrzqxXc/8tiD733Pd8mudNInAH7gh2/f0+hfPVt/5tTFg/t2nTz53OGjk4fuuuXck0/1Nnp3XX8IQK3aWnae04QNFJtarX7h0ubc3imXb673cgAQPShyUlhb70mQZCrRARN12jM/l0jxla89BKA1PdmanF5tL07Xkon9e0G8f8/02uqasdXO6tLFFQ+gcenMn3zq8/32MnlZWd+8+86ZY8euq17V7K2er+habuYAHL3tpo//6Weue3qzfnS3Wy+cYzl8V2/x+PlnpXHLLQBMu9HbMKlNznx9TUXaPBVs4hPttRFiM76vgCJIiIQJCsLK0kSrtlHE3sAV4gC4Qa5FaXK6alGEMudg1uIRCqfSFECq1QCSeR+YtFFF3pchjK1MNN3quXUAw2GoNmp2erL/6MqzDzyuvD97sffau66dP3Dg+FcfvPcz5wHs6aWNeTr/+KlTpzfXqv650yuvuqkxf03r+MnO+ppDMAB6eW/PdOXMqYVnTrcbqT1ycNfKxrpImKoo4+D7GYCiW2xudm64ozUcdlPN1ampoWdNOoFB3wGwxFkWDx2aJ6+V6ZskdJaz1sF5l2WWY6fwAGpMjRQa6GeZqRBVmFUSnO9lenNNzp9YBHDo5kmC0UxKs3i2IhAWYgILqLQjYEVSysKRQhQfQl4UulVD1qfoAEQi5z2JgFUEYogQUCRNlMdYQqwVI4BVkirDRT6sEoFDPhwSMST6LAKo2Ep/tZNmaCbS2lVrL3UvLDpIEnT1uiNzyLoANm9qPvCNp3btajAXNx+bfvLk+vnljWZFv/zO289fOAHgwN7ZmcnK7lb+6fufPXrN/FUHDiZVHYo42axTEidrCoCr+Jhwv9fdrPOuPRPaxaHntpf2cnsqMRPNOgAa5lXLQkbrylS1EAlczW1injy7sDqkw5MzAFJT/+7XxocvLR8+uOfR6UY1YVgbYPyw0KIAkK3kXZ+EiODE+xh8iOOUbgtzSBFeEMDWECuQA5vuIG/3nRPqDgsAvcwFUdF5VqrWrDDrKDFG0qlNa5XSsMWHmBgVRZitsQmrJM+HNo1DF0p8X783RE0jCIUoMbLShXMUjaW6YaM4BRBCkhdDw0ysRcqOKYFpXLAoITtxiGgnmuy860olaS2t+D9+6pGFpaEHffnzXwVw8sTpySkbNtZ6qzaBthxAYbTl2fLv43IZlRjzlc3i/PLmudVgGvYV91xrrAYwPVFnDYhM1uowBWRApq8QhTSVCnKjxZqECcQYma8pIECcaOw5UAcQgyMOTnKrvBcSFo4FS9sF7QcWgEkEHEgrgoBJuATURCJoy0XeB8CAMqooMqGorHfSp8TaihGquJ4L0QAgJUnFwPejp1DWCoQF/F/Ye9Ngy67rPOxba+99hju8eei5G2h0A40ZJATCpESKFCmLFGVNtuNBKjseypVEiSOlYpUcObblKeU4ieXYFSmSxciJLGuwTVMSJYo5njkhAAAgAElEQVQjSIoEQIAkGkCju9ETenzzdIcz7L3Xyo9z73uvASrl/LOqsKuru9995+x9pnvWWt/61rcEqINamwF46eLFZWdOHD11fbUQYqH8ys21WHhQh80AgAWb2k5miQFtFX0RSdpZsVNve8eZU0cAAiVb/VKT3sH5uc3NXmei0+0kISpYQ6yjAEBibVAQ4Bw1DRXruiayIB8MNTp/ZV25zHU77f7W2iBIlrStsT56IyPnsPTeSVQiZvZiklb7L3z/07/1qVe+770PTbRyqSsAg6Jfq0hdQnwVvIhXlUGU/HASNG7aCKCuw0xHBlXcuX1+4EzLmh/5wOmoNIj9V197fblXAZjZ2KmkbtsEIDApGTIEZvBYw9YlRGBmY8hatpaNYWYS5yJba3sA+nfgq+252bnSy6Wv34jz95x51wdaeToYJr1+KZUHkCLJQzBu0ptuo6QnhiCixhKhAWKIR1wwHYkEMkAhkhdxDACDKlqb9Gqqu+07m4UtqlubA+kD/Thns9VKAazvVDOFMZllVpZIZAEoQuPHGWp0t621dlRtw9YHrUPBrkNEEkQRABjm6EvPxeR0VjFLHRkhyRMG1b0SQGrc2jDqHkQ/8toa6E5HX5VGtZb3sLeGMyCsY3SNiEUkKikxxFcStfJd5yJTsAj9EkCKNAIKEVKlJp+t0nj7Y7B4XMs0YhYQQ5SCym5jEBgbgoQ6Si0JjI+6MywSZ6wlzmxTqCMqJK6ZSaRJY+iYDPCHjuYdsOdX71ZaAWMQbzT2IYzjYqdR/RXtbfYm2HH3Q+yhibs6+/uilT2Ucx9ACIyPfG/HfcexL87bhVh1FPSNsEjZDR0ZpFDZUwNT3UduAAAWNP2RdKSu3fwyjpcctTjlBklWQAyTQJiUmbT5yhmOwkoaoxgLGudMxielAGKMZBJjHRnWiKgaRHzwiXPG0Kj3N0EkiooC1loF6hBSy7TXVUjqulRmZuN9WN/cEMrSvN3NWwCq2gNQw2r40JEj8weOBWu5Hp57+eu1eIkBUGYCIBDrLAyFGOsQQvDe1yHUUHUuNaMsoJZVycbG6JsMgGqIvibjDBsAPnhiIkGIsQ6+iasaRFVUxq2f3h5vj7fH2+OP0rCNhWMyvJev0117MK4qlrHp2a3UHputuwC7EYVw15cYJfRIdx0QRdPSDWMYkNA0SduX8wNUaa+99d3N2v6Q8R/Di9y39Xgn3Ys9Gi7o7qJ7E+to/gYxHUUUYy+gqYzeb97Hm+8BjLvz7/3UKO7vHcm+5fbIpKo63uYPuwI6cgj2nAgd4X3Ku17Iri9z9567l3f3Zu7vuIO37ELEbK1tOqU2ASQZVhE21Bx43Lvx432wr9CDhJs2NjQ6eVIYM6afIjAhTU07TwzXScIN+UVEreE8SQ2bGCRNiYkss7XsDLuGgNbAkakxqo4NSNlaEEFDOzUA0iS1DCUKookxMaI38CHog6eOHD/SydIIoPLRKRVVyDtta1LlHFowSQg6PXEgNp1JlTdurB44kQXpE3F3Zg55RyCT03OrN7cfPHMYwOPf85He69U/+7mPFUvrx08c+8pLrxrCH3zlKwnbrE1ePICdcmfzjduD6rpUcXo+v3Lj4nf9wLdNHe38h996LpQAUFrXCdjqm1/8hY+dOHLwyfed6R7p/sW/9ec/+uF3/+ovf/mlb14A8Ev/7P/q/5nvk3zytc8/89Lnf/sv/eXvu/j8u5/55JcW5jubG8PVUgD863/3SQYs8PEXfnYG+OAfe/TctbU//9M/mNnejee//v3fdQrAv/v1rU9/6qsf+53PP3DPYnfu4GzHrHtmgRUJ1gLY3h589pkvt1UnOvjI+9//3PTZT3z+7IcePjzfOvbP/+f/dbNvAaSU/8av/cJEnmoIhtQm9NxX/qCsi3e+94Hh8NypezsA8vyelUvnzFLamslOV/nNq9cPnVh457fNzZeHfv/zX589fgrA1dvLl9bWXjp3QUBMyWYRiyjWGdXu8p0bAGwykeVZ7eveUCcnMi/RDwtyphgO13q9pNUBkEzOXDp/WaOWO9u1S27fWFEWxzh58uQbV2/VegPAB9//wQOHD/RVZ+am3/fux0CXpg6oZCudeSOWO8enAFRTmD40cfXV2ycXHhyU8Ruf+drJbz+2civ9xd/48nd9Twlgcbr95a++ONvOt8uoBL8z/MGnF5I5UxfepWOvMIgG1RrwzMaqQomTFK1clza2mv4BxuRSBE6oDmSJnU1BudQxDOv1dT853wGQdZI8bV/qrWnEpeub525U73zXdG+7bqnMHpwBgMNk5zrIrAthuL754P1Hrt/u5RKwufGNV5Ym53IAr3z87A9/8AG7vv7kOw5PzZ2c7l5duX3l+JOnF+9Nbry0NdO1AGZm0oPznaW1HR+l8PH1K7eTFNNT2XA7LM6mW1t9AOvr/fOX3zjz7Ycn8sU4dKGyNy+9kdRm+3q/3d4GsHF96Adh7fYax+1uR7oTtit2oZVXg16I0U21AISiloKT6W5hUcWYiDKzZeoNfSJEhQCYaE8kba7LTVcHidYLoVaNxqYpRCV6AAz2PnoixyaqxqhgC1FnbWPASogKGSIltomLCo1EQsqmFlgzoj8AthoKtDaZqWGk9kTGD0O1Te3JaQCQ7PYb6wc6nUNHJn2t51+6WVD65LtOcWfq7NdvlrUA2PEd1GUnnzh54tF33l/W/mxP6hdeuHXu9YudKQug36ebN263547maXtrfb2dTb+0tnR8fjJNKdpIgQC0CbPd1oUrd1zmpyaRl5vf875Hb20PXC3vefqJhw7OA1jZjh3Z3lnVY4cP/YePX/iB9983Oc0unX7HSfN//uZrC+3bAKyjzzx7bu7Q9OLR469cXX/3Si9KiOTJ1ylyANzNMezFYuir2qkSoVHclxgNoA1RiEgiOILJgkyIBLJZYgzDGtP0PzHGGMNiDLGGqAYRzGQ4KnyIUUbGog6SpPn29s7qehTJTWqhjom0QViSZKLtEpYycwqqqroY+JQjB18MiwYccdaqMyAbPFzTNnc3W0p7howMyrLmOpbb5aXrW9c3+xvr1Yn7Fk49uLjaewPAyXvdOx598s6da6+Se+KRe0FmXG44rvkYG3YCweQHF1rTM+ZdWZtdDlHvKwDEasEREhRpmtRhMpUNUQqA22tx2uQwGcyjQNHUUUSZIZX3EUDSsuBoY12GKce167SHZR1LSRLX9Bw2KZiZmbyPompsyswqalNblJo2xbka6qqGsSZJhbj23plYVjIswmyWUAIAMYgPPhbepK0YI5gMDDEDRgTOJQCyVtLpTiRpK/rKtpJgXIQp1Kl1fgQjKlgtKAatSoGyhOAsdxN84WuXGstuGCHK+StrznXE6+L0hGjsl8PEmZ6GnaLCmF/FPIKhrDFQLauShaKMmORpkhHpcFjWkVZXN2enJgGIimVqWuqVwRvSEFXK7YSLqn/jyOH59z6R53rLDTSGGkCbpWMpyUxiEyQZYqowIp5dLqpk2gDEWEJlDYGSSsRQbSgF52QN6kGIIywsxDrGUhRRNUSNgqAkShoIQDkoRFVUoogXiWPK3GC7jwSbgwigLbpRX17ZqIkRRbx+0XZaQaGV+tg7dCAHcOP6BmJ/+41l325ZJueMcTYxxKljZjuZATDWsLMmcWytcY6tYWM8Q9ME6gF02Pk7O4dmpr/zQ09Yw8WNq2ceP2MOd0sAO57UAuj36ER6cJ2YKBKEIDAGEIYSwbACcMwQ9XUdRaxxxqZeOeVSQsgzbaBlibHdalnS4UaxNqzubAyPVTQYlGu98s7qDoCDJk8zp2TBlighJEyI48T53al42a3U1nGMoDJqysyNLFLjjFv2xaDjEJF6QX8oGQiAa5klldjQLKECkKqoMmTcEKcJW/ZaUxLAFmYkCgUAAtReQx1CHaJXikqKxJgssVCwEICyjqndLUAf0/yU9jvA+2Oa3fhnHAztbTHuU2NGn4zcY9mH3o6S/E15lgKQN7nou4FMgw7ukSJUxxHELs1g3PzrzeHEvnhod47x6rtB2iimwOg4xi3KR2jnaJ3xse0GQePc7P5B4AaNVN0NQ3jUsWakIiaGrIExZAQARGKT2RltY21UgChEcfvmbk57l42hBGm6h0FAxMxRNMZATE0TKjJsncmyjKwTRRTNbMIjQowC8HVZ+zLP8zzLWs69fvGS0htplueW1pfXDh09CIBFxPvtrS2btKjVmkxMK0l2BmWaJrGuRxGEc4899PD8gSOiYXX1zh989lMgVYiKxLpqzEqUEFViqKq6ZAnOGVU1hl2aNI3UidgHL9C6ruvap0kC6LAsFDA2abc7eHu8Pd4eb48/asPCyAhhI9pHGGzKk8fQ0m6rzZEpJBpjebRrn0YGdYyQ7TWDA/ZZTdr/I43+jEzl2Hyp3pXdGo2xddn99K0Q2/9HTnJvmnEGj/YfxTgF9ofvdfeRfwtTvrflm8ZbJFz2wNO3Wuix/dPxdSE09CqgkU3chTf3dh/jkTq69to4NKqN76I6yq42u+y5JPvdhfHyd53lm8+FoAQvCiAqHANEytTKXFWHppWdKHik/tOU/KmASGFYjIExbK1h02Q7QSRJYgFE0RijYe1kyUy3c3yhlSfWjLHRbpYsznSnO22QcUwiQlDD5BwbJgAqEkO0GcW6SjmzCSujDsKoF2enAThm57is/NLqVpblC7NzN29ufOPcrQcfPjDRpeADAB/T1Wtr+fJOubqB8HUsHDhwXyZqjO32ak1NCmBycXr11tKVi68fPX2ckXTyyQ6ZaxcuDAIl23jx8gqAh9qrK8Vw9dqFP/m9Dx184PGllTe2Z5Kbt3qFrx984MT8wRMAvvrMlwdlnJ7kH/vv/rPFGROs9rn83o9+30f+9If/zo/9DIDVnjd5UsMu9X2ytPwv/snH/mL6k9/4xv994dXL9Xa9fvkVAL1gPvuZZ777u3/wyIn71i5c+txvfPnPfu/3XX7h+j1PTn7fB5569tmrAF4+fy070J2ZpJnZ2Rub8sn/53OF4vjBQz/y0+87cOrRg7M1gN/+4vVXvvDlmQSz0xNPPvWu5772TWEVZ4Nh19AlyPiyqHJ34v7Dx2e7r7WSAx09/8q5v/0zP798ZzPYCOCf/eK/6DABPsmSFPVD73j45sXzaXumX1crV67zzCKAxdMH5+555Ort29curk/N3vPUux/58Pse7l2+2tveOXz85M21NQBfffXCNy+vpLUbMqqyKDwOHJqrKhjb8v0tAJd7d9g49cIgk3Ws5e1elS3OF76+vlaeum8WQAyaaIRL1gdxfbjcMbp49DCKwcbO9unjh2N9G8Bnv/TM2a+Yx+49vrW5NVi6tfjgvcIrO0tVf2eKTGv79VsA6l4c3Hb/6l+/cuRzq52UXzx748izt06dOvTnPvr4xYtXAMw+deIv/aUnM0PlcNiZbK1duT7bye/cWu8NsXDAmuZrHqJ4gRMKoMikiOLKslIkdWgqDmFduyq2MsNVFa0xvjbDXn9jZVgPYow0d7ALwATvqUVkAiHPklOH+OTJE8lEOlx+PZAD0HaprK5sLF+ZS/XIqRO33lhJMveVFy53rKZt850/+J0A1tbutCZ0mOczf2zC2MljxcFPP/+SPnexoKlkIutZBXA0TwBd36omWjazdm2rOHGkVXnvKS09iZYADkxlj9w/a9CHLEjlLrxwabqbxUGUHr7w6dcB3LwZq4F/9tnzYXvrqTPToR0X51K3tm2nDM/NV7QDgIuhXyuDBs7b7Y7xXAFqmea6YqqdzeVNANsrrcVjU45qB0tqgkqMkAgLqxrHqTFnEBlqYEASwSZN6hiITR0CAKQuhmCyRI0ZFEHJtLKMVCmQ1Zg2vTUjHDQxTr3X4EM0FDTxaGUHbp2//tpL1wAcPXygWMLv3VqeytIPfPe3idkKxML497/+qZde3hRLANavbj39nvufPX/x9KPHji9MnZ4x1zfNOx45+tKrd977vnsAVHl2fbWeDRubmxvPnytxfunpxybuneMknxTT762uA5g2O6cOzn/pwvqhmcmd4e0qd69du/P65TtPffTpzzzztdeeuwzgn//gX1k/uzY/f+Ta7eBLvffMPREbGC61p+ceum/2yo2bAOamkkLRmph2STuI5pmBG3JaZWL8QAEYKmJRxFhriJQ4MrZ5oYYyxhDZNmWhqtIQdwxAMQJgUY0iVeUbM8rGRNEYgkmZmUKIUElbqcvTeuDYjPhc1lLwtLY67A/UJJOKflRjk6wKAiBjGg6LAVFvq3f9xZcuXQ2nzhwtypXf/a3ffPDeNtEWgNdfXp1Z7MzMH6hAtCsYyTxmO42MaEq2QjA67ExT6fnH/8Z7Wqbrnfe+dEoA3GBT8sl6ZTAcFoZQ1Jq7Jl865sOMJGwYpGRyODVch7hhfWDWxBkAlYh6WGcJEbWx0vK+cnkXlqDj+tOmRpuYmvJeQuQdsHFpyweGyQAEcF0VNmn1EOfaKHUgznc7rq6rVFMAZVk6NiLkUieqdVWZxCaZ8762ebeo+gCci66TBG+EE+UUmlgWlTg51fGDHU4EQExIPedJLrWVWDfUPZAhkCgxWwAiFEOSRnv21bOLB2fnWx+qSxE2xkalEkAtcNb4Mm7uDKraMlNZw/v65StX+htDZwnAv/n4b29eX+KaVtdW59p5qIvesPaVEFsVV1UeQIgBbJgoqgKaWmOMC6HKO1le+WFTkuuSUA+VKbHZ9evLZdAomhhjoFliABhClKbs1NeMRL321w4fsBxhmPvRAOhQxhaoRWIkiTEEAyIr4ktjuS4LAEbFYOiVjLHOKkUDtZJ1BNHIjk0SAKrkXMgyAulIGYAtKAUlIAeAXTYCpUgIsgscAYclTUNZAkiQhDwHqwBau1T7Si12ufhCJoTLIYByoKp0WGyRknovUsCrFl4GpKrhlgIQogCKqrVqEBWQkraJyySthyWAaYRrXzhb+GTd0wR09eWzhSZtFuulunK7WNoCsDWVXvjq0Gvdnr8iThPTsZyZJArBsI15CmBQ1iSWkW1euJa3nT96J1Jr24phGwzqkAJIsolqMGy3+P77HuT/8mA63ynL7Sd/4MPJYFDOJADy7dV2zIgd2BEcccqkMpJD308maL7G+xUYI8ZYH0ZfS4iqEgofc18j+vVhvHVn+8Bip1/XAJgGCozKp4EGEVYV0UaidteZBQHEIBkFAGwYChUCIIogUvvgg4gWCu22COQBDRGGLADnjErAOMMO6C5Otl+i6s1jL07a/WQ3bzH+eQSj7d+MqHmXgBpCx6iy6S1F3yPUk8aF0LvXdfSv7v49krjYf/nvnmj3cPeHWo2Ooe7jiNDe5CN8t+FrKBr0DHvilLQXCjF4hJuOz2M0JfNu3RcRMZMx1HRKV0BEeNS0YLSxNJKXDeJM4wX2cFUoYK0llxpntSaBgoh4VPrTnKc00SVRiLH2MYiH1RB9miStNAWwo3CcZDYra89snHOAy1xaVX3nrLEWQEaUEV+6cf3K9RtqbSexvr+zOD9TFIVxo1bgtfdRhIyZmJianOiefe7LzOSciaJBpd1uAfjwgx+Ynpkv/PDSxde+9sLz/UFvgRbYWiauqgqAtbasKmNZRBt2SIwxhOCjAGqs+1a38+3x9nh7vD3+kx6WYCAQjo0vPhLcIGICke4KZ2AMFDaqKAoAjLvt3S402RRcKEa6SfszbkpNtrKx4A0foQlo9nUlIR0JDzame8Smv5vq33yyZ8QI+zYgIPowVhPnxtY13EdmM05IYhdXbUBAGlVi0y5jcNcGWx5poESJdyU/x7lBxUhImceKj4SRWRXR3YNsritzjDFClcYKMypCRNaYEJSJDTNGgjTCxjRxwrCsQMTEu+10AAV43IGgKSTf/VVzVCO3g/a5LY08TAwqBGaYXX6JKDfbGxVVjrvzkEAF0ZJK8M3ZJ4m1hkOMCnXGEryxDOUQGlU9MNQwMcMmSkHrEiaXg4cXjx1ckHo7VCWAga9I1YIGpa+FJjqtbmITosWOm2hbUgFQe51spwcmHHNZlKVwToQstVnmmDUGD6Co6+GgyjzS3E1MwVlIDP3e0BiqinUAAqqYYUy3gxCHG9s32fl7j6C/tTTFh2x6DwBXFkcfmPb9VcZ2+9jswJQ3rveivz43PTdz4HTtcwCd7r1875RF8BLriKKKznQffezpb/7WM89+5tVHHpwCsL3Tm5iu/v7/9pHNK8WNS9di0BMLCyb4K1d69z74KGoASGdscUu8wbkr16aPPTaHO+xg58P7z5xcWPgHAH7qr/6j6zcHnDBccnm7es+f+KDv16Ha+pt/9y+e++Klnzv7dQBHZ3h1a+f/+Fe//MDBzmU7efH8v56dWVhbu/m+x9770I+851rZBvCrv/HZP/c9f+GpM8amk09+6H1HzcQv/qtP/NP//Vdurm+86/FjrswBXDr7yn0PP/zEu7vPfP4LP/9zP3/i2MHbq+tDolTV2xQA1T01XJd+443lL7184bXzr3Zmu6+80UumK3WZr0sAhpjSlKJnqU4+dPLwkWMyjMu3rt64uvX049/5pd/5AoCr19adW+jq7O+9eGmSbv6d/+aH29GvSOf5C99YWw+FVwC1N45cNt8ptgvTzm0Z15bWdrIksGlKySZzU1YSjINBf2MzMix00K+MoenpzIcawMrl2woK6vOUWnkSi2q4vjK/uNDb2qq2bsdgAFy5uLzZNkjk9KHZj3/+61u/vnFrvXjiqUOn7zt4IEXs9QB87osvK9Ff+bEPdXS9vdB59+NzX/nKzXuOJe88E9oTEwCyk7Z9NK3LrXyKzEK2sDhLl3qHZ6eXNjfrUGdmDoAUa2zdyka8c+VWAPdLMoFXNurbS4Wz+OPvvReA9APisNxctwGYOXbt/AZLnD82OdWdKNd3rK0BhGDM5GJpVzfWNp48OdFq85XXrgtid87MLjgA/c3tsCPkujOzgYZbNkuLcnurL8ceu+fA4uHhagEgSTuyENo7oZvnQ7/xhc8/UwdeOHFc5o68+Ltn73MZgIXpiSrQ1oDO3DN1//0L66tLBGnnnVeuDC+e337ssQMANncqhKqeOJYVU8/8yldj4k88naWZnW0tTAoA3NlZXrvo7x9s93v15Qtbs/M85XEQpSG4qel8cqZ5ZlZls52aibapEIMh5xzqOncUkm6r7QB00zQjXwd4JJYCB1/5MoaYw6jUjYuP4A2kKDXJc+eAMHAKTifV+9zlAIiZ7EaQHXDesYmH9z4aMjZBW0NvLQNQFf321FSxslqEmMxUSRbSyCu360x7RW/u93/nmwDaCxtgE529vLT6yMmrg6jZ1OSrl+7E2jxwb0dMAuAqm1D3+tt8dOZwR1f6tSTMBw50r7x+BwMLoCX1wvHZjuvUhk+evP/EtNse3l4ehgnd7iTG5QZApXZjq3zgqUfuP9MqttObm/V27+bGdhWF/TCU5QaAUNRdShJG0T54+GCbaCiF8zJMFiZas3m7DACydruqSp+mLiVnbRQfysyJ29zephQAJurINnJwwVpkqbAVX0LE5N26rLih6hsbqgpl6cCwrcR2qX2A3c7mZuH7k3W5BIB8KVliYlKXJXWcGrBBHQfV0tby9a3zn7wM4Nbt8tjpmZsvP+csjp1uv/GNL00fP5xPHB5WcVShxzwc9IqVOy5rnX76idPfnk7MTYYowfv3/9nvqMohgPWVNWjXcEbSs0kuMaqMsACVke4YAVGCSSyZltF46JhHqELokcToR+GqJOr8TtalNGlLJamzUUlFoDxqmWYEsQY8IEylhJCxiZE4aXkRFwOAlFktVMFkiNmY0rgUqCkIbL7nYRCBoSSNwiC4S0xRieyI8+VVKMmEaaIVarFE7FxWeQaPNjA2tY2SDIGIXStTUASxM4KQpFlj14OQGFVEAMYagSFikoIyilULQCI1mxBjILLeWEIVxbLrVmV97vztv/X3PgZgc82Hb7zwb5c3ri0Nl1d2/uHf/sfbfe+KnmrX2BYARa1ipih0k7QYDvNOOuF4fZN2blw/fKg1rCIA2rn54KljzpmyXk5b7tL15bmpLhta315f7/djk+VSgRrLViyJhBADfOVDnUlV+0BqAMQgCluJwoKj3r6zMpm11ZtIdd3g4kBujAqMnQhloVIoislWImwY3KYugNRKU8vcKA9SAyUSMSII6chTAtG0o5EcT0MfM6RQozrdeIwjZy0W2GV+EQEeFEaulXeNkg1Y0RDqmj4foc8eyUhihmzFIIumxQoJeNNHJRI7GPmx7Q6rTUiD4WiSXHysyabOSR3IuQgHwFpmImjURpKUoCpECfY75bTr+lG0rq5DiNFam2YpvudBAEVRDQdlJoUiAKSyI7IdvZBqUC2XA/b1iDo811LFygsvqqLUqKIhxhAaJy16H72PIqJD68/H8PloiJwZebivf+Ezvg7Dr9RpmmbtpazdZpd5odqLD7GbuuZysqHEmTS3aepsYowhJCZJHBsTkgSAJGT69UznEA8Gf+Lpo6hjW2R2furv/ex/yyZ0ggdgYpvbqctaqU3ypNOPCrYpKWsUiQBgUZS1takFUFVpiGpbmdeKBx6luByAjRlHo2VRlKP7zKpMAVEsWxUBEIQs6ziAaCh+SgQmssK7ZUoio/JhBoiJtakcv4tNMWr8hRHcinFVE40ir6YOuilrjmO8DaAG1Gvef6SjeqqG2Ail0AQ+dBfdonlQRoHdfhLJ6BYQjxfS8ZzNu5n3YZrY08bdrftqXsJj/HHMlRhJ4SvuwjcBqDatqBRQodEuqioaGuFNo6qKIN4ZB4nOsifJjBUoQaKvAISyNgRSipENMSfG+4hAsIxx0TKRqYLYVInIccaQuozchKGqhhMA4r1jm7CrE5vaJG916mInt6kvhrUIgKh1Has6ZGRYJJJKNCpMnGQStDGUwyCcpszchJx1kBgjG86cE40MA4A1vvzyi2dffdm6xMKv3iUmw5QAACAASURBVF46cvyYqiojVj76GkBmuTvRPjBz/Mjs/NlvPhfr0gF1XZExR44cA/DR7/1oFCLgha8+c/7ypcjOsa2LobOGNRrzH0PLeXu8Pd4eb4//tIZ9k5EYo1WNbaAxPLi/N/a3oCW+edBbNxrDhWPDuE+icpev94eMvQTe/hkbnHRvp2+x5t62um+TNwu70P5/x2bz/8cbfczVbFRdduu9ZcyP2GuKNwIFm2xvs+FerXgDnTpnSXdVsWGYmQyTAZC4qApRqMiodHxMf3zL6e4vutg9Str/v3FfctotKtsr2x5RM5V2D1CJyKgIj+T4YZkNc9RGQ1maGcfnuEfAHDXwELC1IvXNpZV3PnCylSQ7G1sAItKyrKtaCGRsPtXJnFEW8UG9l8QSgCwzieNh4Yu67g3LVlYzwRUmddz0TgRgGEmiAIsPw0HBDI2xKGprScQCMLmty9oLgUkUda0SpJW7fl0LSNQCyPIJSL81PZMZ4zUsHpnuIqtKH4vBzTcuTC8eASBIAKNqE0fWkOGMDcphP59IH3nn8Xd84J0AQjcO1y65lp072r5yq3z6yYeOnXj4U5/77O1bFy5845UDCy0AJ44/VE+t9oqd4ydnz1146TuOc/fAIe3KTv/1e566H8Bf/4kf/fGf+LmE7TAGH+Xc5dcfeerMU8fadPkLO1cv/a2/+ecA3Ornv/Jrv3fj9Zv5wrH3fOjJ93/kyVe+8gezMxP3zvbi+llT3QbQD6Zc9vmTD/wvP/OPv//lpR/+7qevvXbhSy++9ulf/+THfwXTjgF8+7ue+KGPvOe5r53rtrNjh93xdzx6+9NfqIaldc6CAXhCO7VTbaz2Sk5dOjl59dWlxcVZOz1/4+J6bO52YkSjE5ma4bi9dvbrX339wvpP/P2/9r7vOvPL/+RfnjpzCsCxhc7F5y/81rMvLK9UPpVf+MQXTxzr/uif+g7qzNy5eHlluwDQ7s7MzS9cX11xiSs3i7p2waS+0KCRMwZgnYtUA2DL2ztDJsqcHZa+08p623FxOgfQmZpZWdlILabbWSTqKcUYly/dtkG6GTqtDMBc7rzI8SMz4qnbbV185Zp6PPHAiXe+/2h542a1GQF86H33/OqXr937eCej4Cl0Dhx8/pvXYFEen88CAEyZeuhMNrm4fWtpFkhdt/Rba9uFVRoOpWUFAGUTvt6+8kZv8WAra9lr1wcP3H/g3sKfO7eytlndXNoGYGx15HAKM+Ucio1qUMRTD852Dy/QTrW5XuhgA8DhU0dWXr58+dXVfqEnAycJX1/qLR7oHD44Oez1ART9OOxLf1AdXWz5QrrORl8/dl/39KmZ25uDf/OJrwH47g/dN3nvJN9zrzdpXK83CnnivQ/cuLH8+NTi3FTSqHZdu7XWbuXGanfCUoa5wxNA1KquVTQxtXEAtopwZ7N8bPrQ5W9cuXqz/0MfPDhcXssmukUZjt93CsAjj+SvPn+ripR2kiHJdGaL4WB7LeadfGXnUpsJwOz0VEI63Ox1MkMZu5RRB4pSgjyb+cVJAK35haJcI7IWgRBFoog0NIko2hTwqqgq2p00ahwOCs6MVDLsF512e9ArABTiZ2YsW62qGFFzYqPG4H05nOjOzLzx6mUAd+7UDz28cP7rG8cevr8zWczU60OwkEsSvrOyNvAK4NTigWe+euHBo9PbIZ5/Y+XEicWvff3a3FznoXsWP/uNK3lSAQBjq1enkZeuXOoeECHTL8MDD86nH5p46dIlAIfvm310cf7Weu3QNcQRIUvdhasri9PJ4szU0ZMPA7hwffjs1Vd+6se/v94+txn60wdnspzIiNTbUQbDLQFQXr0+6Mcd2T77tVt3BuXUgVPlcH24E41x7W6yeKgLYGmjYmt8FQoljUGKIoUZDHzeaZm8DYBgiJPYJopRrCUiiCOK3gunbT/YApAwhaqkGCXQ1jBcuTVY27nx2uXVyYnWpz75mQAP4PB09vqLLx88kk1O5UENmOu6Huz0dzb7PuL+hw4DePjxlk1dktlWnlKa7JQ9stzqtMVLUSsAL7Y7Mzc9kwMJK8MYIjXcCkEAySczAN35CR99vyqNsz5WbAyYoo8MsHOqBkAofWId1IKMhqBswYkaLzFOtCKiByDBIwQxCleDiRIy3kYvIhRG5BsWJCBLpJYYsSqF8oTKYpgmDsIAimHIOw5jr2JkUokAs5f941HXjKbfGpoeDISR4GCzHRO4yVQaEI3ER3jXwEMVYWRh9wCmEdJ5d8nIPrEcgiqJEhNERpZSmQwDTBqsodRkoZBhf/30makKpzudSQAnHn5H6jR1E4+/67hzdWpoqpUeWbSDnZU8cQAgWvvISflTP/oudi2XpFuFLK8PompESBrYSK0PcWtYltthq+eHqFuZjTAbO2SYLTsADFSijdabqvoYFcizZFTKOvI7dl2TEYMqioydFsKoeldVVIIyMYHhLNT4OqQOqW2wFeamJn2Po8ogUvG73s+IcGYYzEoUtDbGgImYeRdcgYKI7VvdzHGGmvfdIDJEI29JpTfuuge2TGyFGDAgQz4oxEIVGsdumxJFKMSrMYmzmqgoV1CGYceiBQBvuHnzUTMnQUSZS4yz8vuS983Fsi7llBufqWioYZ2EulOmdl1iItNQ0WhUbawKY/fNQGMfHgACBApVlSgAvA8hxBBEJOZuKoYQQxihTFEAxBCC91RRCNH7qt4ZVl5EQUoJkQ9DABpVoxQxkoJVVZQUMM7u6qQCnKaW4Hxoi1xdQYJk/aWbkrS61gapvnj1dQBJ2Z/KY3r7RmUNf3M9GCbDibUENOyzKpZp6ijJBsOyO5GosTUNMkekIYpvdxMA9ZBuL21XvSHBJhY07mumYKMkLM0zk4Bjg9gpGGyaKyQIITZALIOsbQgRo2dG4lsDHb3rv7sP11jnaJ+Y/e4me2wJGulC7haVfUs5yTeNseDU6O8Rq+NbhWDj4q79bM7mMMYo5h4PUff2efPJvPVLM+qLBW36fI+eOWJ2yg32Ko0irjXGGGZQYpIqeoli2dg0AWAEUtai8N772lgLEVFijOjLhEZqlVRVYowUfeSgBCJR1T2pUKI6xhijNbZZEGwAYuaxnmNDcR/BqzFGdkZEyqrUMXkmhqDaCM44BbFKUI0xGmOijw3fxllLRFG0KKtquJW38zRxIhJFsjRtruC//f3fnZubn5ya9v1+CEFVa18bY72vquABTE7Pk3HTM3NhsPPGzZuDQb+sKoC8rwiQGPD2eHu8Pd4ef9TG2OEYYWq7xoPGBmL0ywaR3DOUu0boW1nSPVO5y5rc5S3SLh8SGH3+JtO1Cx3un/0ti939SbPE3fjnPiM5Nrd0t03cc6/2LsK3xiL3t4R7c9fvkbOg+4uyFQqNoyuxG1SM15AmgG6ikvGvGt00Y0yMIYYAKDFZZsM0UkG2RkRFNCrJrs7lH+Z6jH2UNzElMQZlmWlUbN+wARpC5+6UOpKk3Of1NLOO8p1CI1+zwUbHMj/E43qY0e1RLSt1BtagBmsZN7b6E4t5mmcASOAgFqJRrLO549Rpy6Zzk912RkAAEGKwBnnu0tQ6Y5yNzJQ6TiwZRlOx1W65JHFlKQR1zsYYRMgQnGPXVNVVlQ8iALFh5uCDiqSJCWDjEmu6zQGXod7aKieiX3r9yvYNmb//ydlD96JdVXL79o1rALoT067V5jgklgg76IX5uenrZ8/nuXvPn31/1Vzq0hc7neDFL29jZ2B9tfzGBRuHeZdv3bxa+RkANy6u/eUf/9O375x7x+P3zR165PwrXzo0YzuuXtkseaYEcOjUwnwqg+DTLCHDX/zSs1/80rM/8sdP/fjf+OgP/7WTJhwBIF+8dM+Rua9fuvn8S68tHJn6nj/5wUMH5g7/6JmHHqelK692uw7Af/Ff/YWb15fPfm3lH/6Dv/u7v/25/3D+E6fvP3H+7MXW4elIrVdfvw7g6hs3PvGpL07NH7p2Y10N7i8GzAwN0dhKAoDcGF/Wj377E36j+ne/9nvZkRlyrhgOL52/OBiW2gQtkbLgyxjPnHnnmeMntjfW3rj0paNz07GND3zkfZ/7958D4O47vulbV5Y3WwYhx+8+f/l73cO/+Tsv//4XXl5eLj0UQN6Px48d6w/14Pz0FsmgLjlLgoghZJYBgHVIpDEmQgtzE6WPofKbG731tZ1OTkU9BJCnpptzPYw7vaIS6qQMQ3MdmsiSK7f695w+AuA7nnjky3/wzM072xSr16/dOXB4sh3kwrXNh+zRO703DmcZgKPHjh1dCKXJt/za1FQmPTHWLvX1zKlHOr3nAdBWGZ1ZXusn1qXtdnlxZ6sXGCFhu93z2ooAev3h6np/atJaMGo9OJNNL3btYPj4Ow7XtVvrDwCU8C+/unZwbnJ2NvegM/cfNVMwbZKtKkS9eWcA4NrOZb1TTVnXmsB2pWkZi+3QPZ17jVcvbgPIUrt4dGHz3K2bt4ozpw8Md3oHJ+3pJ4/WUm5eWU7rAGBmMl/d2Dl08qhwuvz6pZOnph/6U+899/HPbb58IU2pYxMA2zvV8mZ532Jrbtptrd5hLfqDqtvKJzK88/FDNSUA2Jo0cf0r68Wt4YOHs3ZGGbPLJGlpLAcAhiE6S1DjNR6/b+axJxdWb9wptnbmu5MeruhtAuAsXzw0u7XRD1AfwWSSvBVpUA9jAuz0KzT0KE18NXSIWheZhwYhQgxBooxyI81bqxoIpy41Ns2Wrq+urA3m501JBMC1CMgMG6BuTBuTkmEhEb+dSQQw05ldul3+zhdv/5kTT4ivZyjJktRlDknMZ2Sm2wJw5MCJje2z5THqTqXUyuZPLLqz14/NThw+cuK5V68PogA4emiRpKTSZ2FnZysMy5qC70zMPn3kyKc/9SKAL7dvP/FDj268ccO02jfeWLISjy9MLa8NNqVs89bW8g6A184Vf/lP//GUlq7duQWeAMlkx03kWYYUQrEMAAZFbKXt9sGTXXvtxtLFn/3Yi5713iPDHz52L5Lk8D3zAPrVSl3BlHF+ZnpxKomhhLJrT3ito2MAjojESNGzbAzbWBWIarMWwWtdmzQD4ENl83ZUunRz48a695xOHZz8yIOPzLUnisDTUzmATqteq1YTx3X0UMeOk7yVdrtzx52xWdMjBZzsFBtpK0fU2mMCrQhTV76VJmkUAInXAMecixiFIXW11kWvyDJnrRlWAUAUSVxqDftQR6j4SCrWGNUQQqXiARBLKcxMIIniOUrCpMJ1CPCDEclHBKIs1ORevSBPIls2YkQTACrsBIgCqBfrOtNcDIWjzS0Z3d7eATA50Yb6MTZAGBU/3N01l5rGS8BoKcCCqIEgdwNmJkMgVjIgEDG4QYVGEitMCI3zQgCRjvS2ASK3f7GmdpKwq5Sjwk0jLYzgSDMCLKRyYCbLUGuqD3/45Ed+YNLH5v0aTNiqBmzAIfroTaYmbN+m2jaMxSoaRijqzcXpThFC4dlFP5XEW6v91e1BnlkAq9tVllqQdFv5ZIctsLLZ92SDIE+SYthkapnZSNQQY4xiCHlqObNeqWkNAkAbfBYAETFUow+KdA/eIaIYVVQlqGUnWkkQkJJRb3TgSwBTtjV23xQSgXH21LECGHXFVUU0TYdqYkdxLGpI4JG2TIMsi0lHXlEDN42PAwA3NExiIgMyYIOm8TRRlDgSBHSGrQVYyRBbqmP0tbNMSarOUpqMFvIl6pJaOdjAmtxAfM3MsBaxD4zadosoMRljlEkEEv1uY+J9eCQJoHVlDAtzDLGuvKgAsNbaxAUpmpogAjf83WbX6HcfLSKmPUdQlWhcS2QZQOIoGYcSdRhYwKoYY5jQpNKhrGpjnTYPviEytHvtxr56k4IPYGWVUW/yKMbEqGWltQcQiUSVozJo/rTTyEGMmiT2i8qXlBAADPPNaoehLYrVzrKGGOpggBjiWuUBbAxLKLGaLElf3RjahJRop4hQZomcOQBXN2VzaduHol9F51ziklaettM0N4bH7RwB7Q2HzrA1homgEAGjES+X8QOnItRAfuM0/S6xcEy51X2XYt8YsQKAu0Kv3T1HN2P3ftNusDV6OHcTFHcFWdiLhmj3p7FIl+7d/d3pmodtXwC0D1rc+2ek3bUrxilj3cv9dxj7T5N5FzlVaUrkG+4LgQWASAQZgnGGLVMAqlBBmg7vsNYAiOJr9W2XQvyu8n8DPZLquFbMMFnDjtkCoXnuiJiZSKlhjRAhitQ+WNUYAojZsMTmVwxAtIHfNcSoCussWysiIUYRDc2WaF7/TEQyymnRSMJ/XFTXpAGsMYZNrJ0PgUEheBVYZzt5DqA/6Nsk3ewPip2tzLnUWkNMpFVZbWxsAPilj/3C9PT83NTUrRtXirJi5rIqQ/TlYCAxxLi/T/3b4+3x9nh7/NEYdpT63dcMZddze3NyS99qFt9sRd+SmGsQv7st5u5eu9T+PTLe2LKNLOJbVBf3Dueuxd9Ux4294u+7KrjRqKrs7XQXgknfaq0x529vtW8pBEmjz0fwHYFUZbwb7U7S0CLHnl3j/AIAExmCIViM3EIVMcyGlKCkAYAjlaa+SAFBVBVRVXWW7z6W/Wc0uriK/RXc2K3gGNMMRn7E+LaMFtm7ODTCpqM2QmKIUSyPxHmYdFQ6QkQEu69gXFVaiHUkrzDWxKif/dpL9xyc/s7HTwKoytoy5xaaWOsoc9TNebHbylotw76sawBlHS372clOO8smO60YvTOUJtYZFolN5tIZA1BiqxhFJUiMqiISq1pqHwDEWFtnAYQQlNkYKMgYTRIVpdIDQGpSUD70uP7a8uHD7dMP3JcfubeIU8WgmFmYbM/sAEhMqP3gzpVNaHngxOHOdCvPrfc9stlO6IeVAsBzn7548p5Da+nqcGVp8dij5ebaxmDz/iNTwyIWfvi+D30IwG/+2m/cc/LIB7977nq/nDt6333Ju66eu7FVhInZVs41gI2t5ZP3TT9/bifU3itm2raq6OK19b6cOPfiyy8/93sAXnnu0vlXt6YtgtInfufZC2e/+fiZIwcefcfBM490Ju9ZqwcAlm9djnX1M//wn8Yf/5PlzuAzv//CT/6P//n5147O3fPwE2ce/u//3v8E4MKdlVufXPnrP/kT7/rO91w9f+7q9paE2M6SAqreA3CtVNhf/tprP/2P/vHP/stf/uYLL+TdVvCxCEMyrqlKyiwiqGXNjVdebRdbr5y98u73vGN9uf/Tf/WX/oef+K9v3Pw4gHe959i1b7x87pWV9nT+2FPvWfrdzywNy+Er65euDV2bewUA9H3oFTcqj42trX4VIuBibB7solcCUCYRYUBrDERgLVszmyfrm/2pydbG9haA9v/L3psGW3Zd52HfWnvvM9x73zz0e6/nbswgiIEEQXCWKMmkKJmShyoPcmzFmcp2BqXiJOVUkoqTVP44VpxQlhXbZcuOLcmmJFqDKUriCJEASZEACIAAGo2exze/O51h771Wfpxz33sNQCn/yA+7qtePrtf3nvmec/baa32D4SOLq29euY6oc1036lewZna+OzU//9Mf/8Tm9h6AL379O5n2zl24Fgt67JHjcaSXhsOjrid+oZvOV8vTAH73Vy/81ue++8Qn33P03tntWzdWOf2Tn3702XN9q9XiHADs9us5x3ubu3PL3e3zty4+d+1ISiaEAMdk+oMRgCjSH8viUjYcFCdPzKYdJzIo62HSnb26XVbEAB5/8qG4fuXqhZ3Pf2Vjdcm9530nCl8l0PGwv3p05th7jwK4OdoblOtzOQdIiLpVxW6ix44uu65aswFgYXG2N9eZW+zGMWFqcfvalul0Y57dvnj99751aXG6A8B3elWoZvPusBiHfnn6zJLaKiH/2S+cP7XgiqgALmxX0x1zvOM2R6M6jE4e7c0sz0fKTstWPT/97edvATCMazd3/e+/cKzbOzbPNXnTzUrL+YJRHwGEWpKIi5d2jx/vPPze++ximKvnZh2j5pX7T4ifAVDd2FJ4NmzTVCyVVdTSF+NqKbWJj9ubQwB7W3urx/KgXnwYjceZphqFrWlw4jQZMgBICBFq8g4pjfekGCcX3hyOnQPw+ONzKsH7yOSsoRi8EvKsOyp3YIxFCaAOXIeYGaScGchgfXt3TDfPV489fdYmtgwK4PZYVeJA/ONrnRNT+a2LG488cCI12eb6xuZmwSkDGPS3k5yPr3Qfe/re3eu3fLl162b5yuuv/8gnzzz66HsA1Bb/2z/9yp//1A/quV1vaOir3vTUJ84e3d3YYBpeOHcVwEuv4iPvf7TYe3Oq1xuGLifTNzeL3Z1xdatOJJ+aMQCOrxx/6er2PUmns7D6yU999Nd++bf/9Kc/8bFHzm5f2PnaM5cuXNkGoF7qcbVx4+Kv/KNfOHPf4u2N7S987mtPPTw1PcvaQHeFQlBDxlcV24Q5oeilquuyikEoaVAq5Njmef7gfZ2HMsvTuemm9ciDKusD8RiAMBY7iSTW5JlFV0gDqRALWa+Ab+Z1lTdGPRgmggzDGMvKBBKjAChNIjgECoEdmTRL66F0e9ayiRKbQS5N0+C9r0Knm3sRiQJoSxRIMOn2aRwPXeo4Sgy1IU2tg5FM65gut0UBUQghiA1RKq8a+0NTjP1wVFdlBFD7GIMqQMyQ4e2tPiSeXp22qI+vdXudHICvxszUEhga4kDLrQQdGuOVJ25rzTkYAhMMtx82uzCt1ks7+jfAuon8tt7BsAD2y5EgYT5I0pqBmNpRXptLw4IINQ4ANLaYNsuWbR1gUmNkJFUh45vjogbQy5bG4y0nHL0yKhtdLJjJKGnwAsBwZkw9bephMQi1vbVel2Pupp0zi3Nb2+X2Zg1gfbfopXZ2ymom3gdrsTDbyTvdVy/c2twbku0AIGMsGVFt5ftAxGQNBw+R/WbrpMhBYDYxqhcVpX30KVEricOW4KPGQM4YZsRATCY1ACQKKxOLEisRGtnHiVQ6cVunlCaZaS4xTykBEGjEpOJDKsSk3jdNXGYGFBMtIAAIERAigolNaaI5LWVD+x1ogTZip0qkkOD9cKgWtjvlKTdOARjxdX8nKnc6HZGgKl45QJ1B0JCnHQABHIhiVGZjLQtxUDjLB7p6mKA1ASJySQMUBUSzSX2KrWFQgxee5KgtTxeqiWlF6A5weRNWTwppM2ttnzhtiqMKgxJoEOsSYpS2OCKqAEWosiqJsEbS1gcm1A3IvbnYIBiSiXKUakZIVMi2Vw+QSEoETVPUPgZ1ziVz1DFMqQWgRTrNJzQESjNUpYiYxAlRVHVNxR21jyQhSShXqX2IDiBWiVaKUcMK993VF7539Wf/+W9UFF2lRLUZjFNnO3k+1c2nOh0AWZr2Oi6EGEKAiiUx+wpXBxOLdrqwT2ieYE8PlJr2rXv2L/ahVBwH8U5IiTu5Zbq/xjvOmt4W7zCZu2MmRS3c4c7i48GbaL9wieYddWjV5uF4x120f0yIXaotMXviYMNC3GxBYUCmfblCNRaWjLUOEiWEZhkLEyQ0d6YxxhprwYHU7FsJhkgMa5y1Ft4QsYgqVEVFm6ceiTHWMDMrqA4hBDEwoiFEkRZrjBil9rUaR0xTvV6gxAfN07x2g3bckagy6WG0/HQFtK7r5vEAEGOIUZjhnONub2dzHVBm8iFEic2+pntT3Tyvo0abIJSjcqwaVUy322su+Mbmtg+4fPnSaLC7vDAHIIa6KEvLHL1PXfJv9OPfjbtxN+7Gv03RtDTf3plrY+J/grdVut4Bfd9s6R0/PzxmTgqfd9Q7JyZ6eAsx4VCbD28Zqg82fmhQPHSYdHgJYDIA7g/hEy46vWULh9Y7hDw4fBiHFlBVAk94zncc6MGcYX+QJ+iB8HPDFOBJFsgEhkDAKiA0LAFGVLQjYmJcFAVplFbcu1XG/CNC96u9E0ebt5zUJIPZP++2t6n7xPOD6nOjIM6AijQgNUSFyj7Pg0XQaFqyNQCsMc3lqUPR6eRFHbxEJtIo569sP3rPcQALnWTsK0DzhLqdpNtxeaJ5ZkFU+xhjY6VKzrL3IbgAYgYDxrBLEtv204EQJVQ+cREHmBLShtvZdjhhQFGkrqM1mljjEpNYJomdTp66KQCIcK6TdToPvmsxRlH06iAKzXtTXgIQAexsXM2n/erJRxiVj3Wn0x0M97pTnbTriMLGm9cAXLlw68SxozsaVh65/8Kr9P2rt+p6L3PDH/joSvfo6umHlgEsHPvUwrT7/G+dmzu5MH2ynhov/x8/+5t/7i+e/cCffZLqIYDX3hj9+J/59x68Xf7Dv/v3siQZBUTEG5e3f/Hnf0vtxk//1Z8E8FP/ceef/P1nXvzDK8ZM/fbXnzl/efwn/8zH7j15+pd/8av/yV//iZd/7l8D+PJvfv4TP/5D90zZz/8/n3v/D73nZ/73/3Su0D/9E582M/l973vf0196CsBXnvmmZO5n/ue//fDa0umH73n2me/GoJVlEknyBICFLq/OgrOvvPSy7c10iAejGsZmnaliMOrkKQBfjA3FYwv5vcfXLl+9davWU+PRpdfO//5nn622bH9rB8A3nnvRpNPBumpUbW2tE3DpwoUHH3r31JzzCuMBICqNa02d3R5UquqcgyFW6XZs1ssBhLoeD8og7DpJXfhYewhC6sjZrc0iSQwAnspgqJtR4uze0EtE4vTyjf5gOHz3U0+PRgMA3/z+lYSxnOCh+088+vh7Lrz84r1nj15//cKX/5WunXDHT68BeOjx6bVffQlsV86e3N7YqIfSPXls7vLrdGOdnAMgQePNze0Lt47kR8sqe+XVvSMPL4xLDKIcmUv74wLA7m5Z1ijGZZ67bi+NGLNWKYXrF25/77XhqVNzANgP4IrZI7y0zSfWeoONXYmG5rwUpZqAXgBwfGXuD567IXt+a1itHpkajUNvJr99c1M5agCATmKr4V7q4uzxpcuXr+71y2S660WOPbD6zQvyyQAAIABJREFU0Y/q+Uu7ALozuSZZCN4KrW8MHj9zvHrj+SNL6YnT07RZdmYTAAuhPrOarC2a7rxRsT5UClf195IEtmOH4z6A3rQNQRPS1IX5ldzMdfrD3aksF4lZOg1gvlcLY2F26tbtQeFjnicuz4vh7fGVsmTjXQnADsrBaDg9M4XaO5cGERPVRh1VxElmuwmAubUFqbY5Bo46lxg/FAqaZAYTaHfzJmM0fsNUxWApQEynO3fj1qiIBoBLcx/6Cm8cmDnUMQIaQpI4XwWgC4A571jnR/LKC288kevs8fkldDcvXlwfitgeBQHQtebo8vT9Z46srFp07Yvfvfq++1c3R4OLN4bbW8XSYg7gyvXB1FL++HvPTB9du3n1ZnfaffgDJ/sUfueLv1fFEgDlU+cv3NgTu3lrg5bc2uL8F791/tSyffrRs6yZXZgBcOJycfX8zZm5cm5h7Q9f3Xjj2vfPX97d3gm/9LmvGuD4/UsAvnflytaVzfvuTYdl9aGPPPraay+cuTd3S3T+zeF9j96/sloCsGqPzKRKeyVGn/jkIyWFUPenjh3nuANDAIJYmyD61DTczBhhnEZNOpazbqPeqzFoXUSC7WbiYrRBmdiVNYkzjbIWYtJLbVIoSLSog8sdWUPOBlCQpkgHy5xQT8l0XK6Ksh4B1rm0dSJphyJDJL08IVFDlGWZQZQIKKy1AJxxlrNKy3G/kr1hmmXV2G/f2qlLSW3mvQAY9UesysS+qgf9oiy8ivoqFGU1KttkQEWiD2VVjcfluKjq2pdb5aisB8OiKCoA3gsUlmGZqqghIrOU5GZ5vvff/dSTC2spgDDaNT1LzpJhUIuLanzlojUHTUhDSqB9wKNhkCFDytyO0ERkmJhVLRHp/pK0L+8Chh6UjPbvfCIxh5qROJRPAUIsiESRjDTe5WCrEDVCtiMSqrpiRkJkDTTEmU4CoCrXheMoUuaYAkRqTWw0hkfBZhkA9VpXZJkMwRmL4I3zYz9YXFo8eiTrDwyAwsc6xiLEul/miQHUWJruZqq01y9XV+cBxKhRNaomrrGtU1F1xnhpihMTKT2FAlGhxEocxMcoxrSsyYkst2osVGrVqB5B1Ri2nlg8ANhO03EnNCTNZj2SENjQPqOzlVwTQCXUfWKG4SZFazgfRAwlwzqhYzfAqcPlGzf5iskc4t23VVMCIBFsoEIg1SiQYMOIgqgzNkm4eZVXg3pvvc6XMiIVxMpTYhKVJApC4NgBYBISNmA2BANDIAEk1vsJuzJD9jXDqShrAKpgY9i0N2cUCQqinNreNAi87wkfW1HyO/r6jep70XCnqRXU3O/9N0O4McxNp10nMojcmBhHhYhElaAizE0jAeQDAI0RMSJElshRECNEoo/c1jYVgPdBVY0SQYdlTHInIVQiJoqXgCAATGoYPqp3FCOHfr8/PzcjIRLxKEQAeYZRUWYhq/d23rx8/dSZWZYq5mmt3ElrqwCQTc1fvX4dPqR5ItEGkTrGwtcjL8Mq7o49gNQl8500dSZzzjFIo0qMDc6P2vojEXPbUkBTkLZtgfiOp7a9wC08sJkoHJpL6f6i++k6TeZGh2cl+1OQ5l/+o8uSdzLd3r6B5tgxAXTfOSvanzYd0qRsTvow8oSUdOI11+6G6ODE6eC+2i/YNqSydhoiza3IZBpkqUiemtKLMwbWilYAfB1EYSKXcZCUajsusQm1XRxpEJoSvZjYijrE6NlHISW1zCLS3MCGyRhuJBuIOAQxRGCKE/cEJoox+lArOESRqGLadxQRRJqBMrKxiUvYpVGJQgVjDFGMnrmVow1RiEhBPkQVMUwxxkb6NXWu2Y73XkQJlKVpEG6sO6uqzNOsgeUmLjVsAZru9ZrlkzRNEwcoVHq97h/xo9+Nu3E37sa/vfFHFrPuxt24G3fjbtyNu3E37sbduBt3427cjbtxN+7G3bgbd+P/37AHJF7st+L0EAeu+WSCUrwTmvjOAMm3hR5uurVARL2jK3iY4v2WDh/RW1p8d+xR33KgerBMSxbW/ebeIdzAHRSIt+MpDw5h/6uGhHWH0GS7IT4wydOWN9XyYybmeioNOwCTpikTgYmgRNqiI1sGt7RkFhCTkApIJ5R2JGy8aoRABYLGYpuY9zfeXq63Cpljoutyxwlr28ps/Dpbes7BOtq2Cifqk9QevgqzAjAMww2ZppViUtUYFQfsJ2I2UJ2anrKUFfWugqtaDYt4LcsIwM4kDaE9dZw5MiQh0qgK8zMASKRp4JMPUodY1pVCQx2YKQTjo0scT6AhURBGhQdgDUdRYooiIUqDl0kT46OCOMuMZXKGU2uyLIllTUoNoTuxeVkUi4tTaYiRcXuMTvDGjmNEDJwkGQCzsLi1eenoahaiIiBG8TWfePCBcjQa75RhygD4+E8+eOrBk76aRSdfk+Rf/ed//+lHTv3l/+KPW3/RLc+5tQqAwdx/81f/7t6NutcL7/ruqw88ePzsUveXfuGfilw8fc8ZAGF9eOaJBwfVbUtU1EGJDZDZ5Nzr10+eTMYXbwNYeeK+v/iXP/EL5e+++PL1P/OJD1vX/dt/6x8tzeZrjxz9E+Pl+WwRQAZcfvPS0sp8d3nh6rXxmWvVl5/52rHHHz25+PD/8t//rfFoAKBn3DDEKcu7o/GF77586uTxS5eucxAkCVclgDJ1O4Phk+9/9z/+uV/ogxY7KQVIiAMf2bKOhwB6Tt/33uNJrk8/tHbl9ok3PvvlucXeqZPLH/xjT/7oX/j3f+Vn/08AC7pyz8PvSuhfiejr3/2eBXYLffV7LxUQ723Thu50Mhiz1x9BtdHWr4IszU2dPHE0zRIAG7c3yuGGTXh6eq7vt0ZlbQ0XZW0t12X0ZQAAHXYWZ48cWerv7DBj9dTST3z8I8+++PLLr7zxhc//+mi7ArDaQfAce/bEQ4988D3vLne3dvc2t7Z3Tj788Wvr1/tfvgJg80pBhBrGJZ3zr2+gTB6e8UV/h8ZH/cwygNjfvLx9I7VMvbkkmb218Ydj78qQbBSx42SrXwLY3Akrc/bUsYWb68Mrt/un75+C1WnTe/5bV6u9aiUzAFAXnv3s8d6TSXrz2mjzZjGrU7pEHTYCsZ1GkDUmK0cuXboooFqoGsd7Hl6eMXJje9TIc1nDqIrxoNwb3QKZqTmTu2r74rmlh1c/8hPv3fqHvw9gcPnKyR84HlQ33riuCWfTeX3p/Nq997xrg775G9+fnhMAC3PuyNF89sSshs0QtdOZ3l3337/Qf+DEdMaJtQLg6EpvdtrNLGad5fTmXn/txLLxe/WeZ4tr63sAbl0uTpyYf/Sx+7757eerIm7fGI8v3Uwrybtm89Z2vtIBMDOd2ZwVpg5ilJht1u0Vg/U3bxcfzToVACA/fqTau01KVSW2VOOjRrFstCVL7r+KVQWU5Ry8BAmRPZwzmWZ581ZUbR0sALGWJWo5rmyeGE5CiABuX986sjQ345Jid3tt+p4QhjGYUw8effP2ztVz1c2tIYAYw+ZgPJPN0JHOyiNLyes7zz1/6X0/8q7Xt5ki8ukMwDHle88sT0/Nf/Or51568bYlt3ZidXzh4pXzG0NfA/Bbu0lmv/bii6+8cf396bHjK8vXrgXjku2hHju1ms3OA1i+tBkouXRj75VLu7s+feRd97zv7H090u6ZpaMn58Y7NwCMbl1cfOLM+vXh5QsXP/S+h0bbxa/++tc+zR9evff0Ix9ZSiQAuPjia7O9meFed2l2ms9OUZDh1qAYZ3vbxiYdAMU43r69m2bZ3EyvLMrRXp/VMIEM2yTRcgcAUN774BKndU0jTUyIOmXzwqVFVY5rShICECNlrGSdCJncRaIokKCRDKzlVlGYxBvjEgnwXtgmYONjZOYCADCqpNNJO163Xr9+/vnXd7f3pro5154JbG0D7xqNy8TZqLK1Ndq6ubvQy8oyXrk1Lso4m9naC4Db22NHDEIdtaxDHaOo+ijehxBkMig3zNNGIRkK5B0igJnT1ALodBp/BQIwn9mqliSxK4u902s99CQmBYAkMyEQDJOzZF2jABebbMC5dthmArEaKFMDURRuB27d15Mh0uYTsi34kVv5yP1xvXFtOgBI7qcz+08BvfVfRYvUU4V4C4BNEDVQtaGSurIsQlAvisQmqVaNV3XsZmnJoAAmCyNKXIaQEhoBSi2LxJrgDROBTK+T7hX13ihubu0s9EwnYQDbw6oOzETGGWZylhVyc3s4rv2+Vo00YjmqULLGuEaAj+CsadU2mxMERFRUAyuIo6IO0bqG/YsgalssaKh9sIljmxGlztoINS4FgFi14C2CquzTRsSADLVGtNwmuaGhnVhvjCFLYDvJzhC8KpCkVetRQ9SmT5P0y8A1zHzAQgC2LVefFWSl8VCK3ogYl6iqxJrKYV0V0OiS1ObTCBGAlEWsx44rGhUEUY2MlJngJVGN5RCAdDJkGbGQCgIEpASbmANELbUOzU1mZywzMxMDJBMJ8FYjMkS0wqdMBGk0TQl6SJaQqPFZau+8tFlgkjBK+wZWIrRXDO0vO0GvIipUDbFhduAGDNuQ8kVbR0IBhEkAAYlKhIoVoyLS5JSA+mCUHMNE6TJpjLmzzOyLKhE0Qpa+DG6CysyN2bt03SzPo6oJgEsBjEI5RL+8VV96/vrVvdHZpx6OHOud3WyxCwzSvAugdvm19X4vmSqFRloTsTGO2ICpDChHzRtrvNWvE2s7aTrbyabzNDeGyIhq5rhNs6Whtzfkp32u16EZzmSm0SIdCQrlOycpE5DhJBrxgvaH4X3O2cQbU99C1Tq0YvvpYSnIZuOTX/IdpnSHBeQPbeitf1L7WO3jJdFCid+2RhNm3237zvlnc7sDYGnApdyStQlqGMJFHUIoE+sAZLkZF2MWViDGICKAEsCkjH2ePBFRM1WIMbKKqjVsmYhNa/2u0CDiY0CIxiRRIK3qFDWDlyETYrQiMcTK+71Bn9OeKvm6lMpXVQWgrmtYDTFGlqhEUURERQ0RG24mYMZwnuUm6QqbWOkIUIne1xq1qMrJi4R88EouhjjBLCNNXPO4tL9/o5Jf18aws8ZZV3svMSTOvV247G7cjbtxN/7tD3sn3H4yOuwPUPtVRDoYSw/HO1ckD306GXhbMD8OEPwHi769woj9AuXbvv8jCQhv1U1s0qUJM3vi2qIHp3hwgnTHFiZiiu1m7mBEtFnAnQIr7RRhcpJtaZKNtGQ3qIpMnFQaFWWoNjXJxquNoI0+EpECQirQ2NgmEsE0Q6bUBLJkUsvMiE0RUQT7uu6TTEcPkosDvvWEsD65WqL7303EgvSwxvZbhUK10Q41EzZ9c3EbJSZisGgj5dVmuoRoDIkCKqNiMB4Hl1qb2BiicdjcHQI4vjQfBYnlbmoSRwpEgRepfVXUdVl6AGWl1sQmxVcJRS1MOq4oLckybMuiUomRYECwhkVhDPsgAAIUQMYcIyWp6eQpgxiaWOOMNUyWyRkLwAe/t7G19cabJ4/Yy4Ow9vgDBagqSweTd2arsgZgbF6OeGv7atJx1bCanl7o9abJGQ323NdfaqxLHnnXGsbX965fp9UTc8ff8/DZ05/+5Mf9YOuV65sPztlLz48ADK/s/Y+f+Rvf/sI3vvQbz/zIB5+2i+NP/sT9F79z0cfhzPIsgLWTy//1f/c/jPp1x2FUk01dVflr43Cc+fjKmZ//zG8C+NiH35ufOrOxW37xuW//5Mfef/LMie3gj1pz6blX/9S9f+pmKQAc8ePzS69dvv4nfvRjU0nydz/zC488+fjv/eKvb4z++dFTS5lNABw9Of3GlT3PNPbh1l710x99OOlNffv5VzKFZwMgFapLfO0rzxKhl2cS1NuUjWo56DoLywD+5n/75z/2Y+8Zjzfz3sLFrfjZX/uKmHQY8Kk//qPvf/c9P1dEAM9duv3Vn/+lca2nTi5fu7l+5GinGhYbwzjXdcMQ6wAAx+eWNvd22ZjMaX9QJ8y9xEmo1m9c9WUBYFQEMCOY7VvrCTMxRxEJMUvc1IwdFBGAj7K2doKKwflLt+85fezes2fPXd8OaseV+uvl0ftXASSV3Lx1e7qTPve7X/zG7/2+qH/89MqRM8uPvP8J/Ro/+/nnAJw5euz6jn/+u5fffaI4srTqOidH6xvciYJAux7A3q1dN5U/+iOP10snv/+v3ygH8eZOrRG+DlcrKn2jyE4Pnj3pMjM9bV6/tX3mydlYFv2RRLFlHW9t9AH0ztUn752LVRzt1ZlLO0v5pcubFP1SoraTrF/aArBw9Khqb0j50hzd3PM3t8qnrOlmfN+Dq1EVwPXXrs8l6Tjw6cdXZxeyWIxlMKiAfhybeqfaLQFcuLT70OLTZRi/8vzlR973kO2xZOpp6/ZmH7aR3gKrVKNQjr0jcklXZfp7L13+3pvFyZUjw5ujcjQGsLzYYebFY/OrDy5950svT+/uDtfL8U7/+KmZcmQAXDt3+8jqTDI3d/L08mCneOGFc0cSmsuNAY4tz5rpDEBRDnuLs8U4mCyFsQD54XhvZ7S5VX/nO69v7IwB3Lp0YapX5jbX1I22YicCUYmMgohJZWJDIfBIbFSN4utKiftl9EpFWQGwboZJ2CqxSIhsOTEujBFLyTpTg34fwOa1jQtv3ri1O7g3mXvphYtdRO1lp9576oMry3/zc19dXlsEYGan6oDf+/IrP9Y9WR+LLpu+EYarDx3jN64p2YcfPAvgfY/eD2ufe/HK3/n5z59cmfnQI0c2Nzcu3R7WtfeZA7B1o9/Je+zFOlo8dvrCrfXHHjh7/OjR9Y1rr1/a6h1RAP3h+t4m/sSffjrrIRIP/ezFP7wovnjp9VcH1UI+dwzAwE9ff/76l772La3i3/s7/7Rf1kvs/vH//a/7tU9N4jodAFu3t+ec6U13q6qULK2HRTmqxVLl1TadnKheVYFOZjSKZeRZ4qxlS7vDsg4RwIeeOPbXjn8o71DNityFiO3Kz1uXeBs6U8YRgNJXYi2s9WopeE4cOcvsRAjSDk9sOekkZR2jMtgAKkpQYmvhBcDepc1L337zlT948dr5dS5Cbngqd3VsHHbhXNvlq4MSoZMbKO2MawALyykzrDUNQ3LpVEdhiUhUQ4QCxKxAFGUWwwZAYl2SJFmapklinWGmLHUEctY1/tFpmjjLxhAzEYXvvHT++GLvoftWuxlFP9bWCxgtWdsZMEc0kndEhg1bnVhFKBMMNQUJBU9KjdzyutEoPDZzbtsyfNsFDgZs2TfCONRyBaD8dj3v/ZVIiYgFUcg2sjyGoAqxlkHWMYcoVkOISo6k4bra3nhUI8TUWaiJMYbgEUjImFgDCBp9HVW0P6hHdR2CiHCe4Pag6uRmppcDOL421R/6W/2xRpntpUVds+O5pLM6P3PxykZDtLbOSkQdNMZoWImgKiEEY7ODhKaREBQoRESZWVTrENNWygU+SgaoKrsc5cjaZFS7Gzt+KmEhSqczAN2GHG/YcKvo2HqsUMs1BqAgISJrjGE2RjBs7UVIVaX1u4AygJApE2DAjdwOHTS/Y0BjNCTN20mbDRs1YFBjzVdHxMhw0ChFaUDWJlGjwkFYSwFQF16DJLWPu3sgBFETRIBqXNjUhZb+zOQSMEsMqiESBQJLd79SrdzI1nBTEvRVMJbYEBGJ7Ke6zEzO6SSlbSR5m4zwsDIgHa5JKRoBhYOuf8P1Zksg8iGYlvR6yLNRAIJlaojX2pZJWz1VyxFN2161eSvExmhKNSUKLTm+qbGyqARB5YOGgpJEfIRCmVWjVhUAdmZcRecsE5ksodzyVGZnMlW1aQ6gKuypo2vb37h89cboyafvyY+e2tjZfPnS9Q88/lCU9QIdAC5bdLmpq0KTTMmIIETVKMRkDBnbiht6dcHruC62+2PHlCeul7nM2eVud9KMaG6RpgSsELj9FsP+c9w6yeg+mEH3basPTUkmS09+kMnzf2hbh7L3w3/sa5se2udkR3e+Md5Jqv9ghnjn6phMktoZQTsVaL+SiT1O09Qh7L8O25X5YLt3vL+sRVOOlAjDLMyJIctslKoYE7ZFXffL0mYzAJbnprQYjoY7xhghiRJEXdvFObAUU2a2xjEbBYlojESwE/scBiBtk4iDiHVGRSvxbEGTcqQqyqpOU2eNYeIQIptgjGtWCSEA8N5DEbwPZKPCxCii1MgfQLllhTMAY4x1aR1LVWmug7XGe9/I7i/OzbFL0qw3go7r3bIsy6qwLo1V2WhPVXWZxRyQGCMTjYui2Y6KMCHedda+G3fjbvw7GPYtjbR2XLyzG/eWeOcS5FsWeQfXm2bF/b5e+wkO/rizVXawGuGdvwDeNnoeDHFEh9tE/x8H/PbDPGR3M+mvNUsSQd92aSaFWpqImTUpLhsgNmurqqhIK8neWhKC2iyvueLSVATZRoioSIwiokzkLFsyAOroiYxhNrCGqRapfQwhUJK984k1l1sPzLXR1Bz3swSlFiPZdLJ1XytIWqH3wygMUlISaDMixghrjWozW2MQI2pUCqEpvIIQowCKKlYSk07GAVQUdd4xWoXQOMwogmrClCbkLCJBFEo0LMphUdZ+0r5lU/tQVWpMUHBUGCDEpvypzaVWUUNKisgqUZvZnbHGqAIw1moQKMcIVWHDzEaF+oNS+kMTAoAs7U7NdrZ9fevW3uLDD4/KEXXm8my243o+xCzLAXjfP3rq7Ki/FTynnRmmTMXduLL9xV/76gceuj/PLYDygmxeu7a1foVOj0aD7WF//fk3v3o0JiHJo7GLR+YAzJ48evysm11Z83Tvyukjt3eef/H1y0/9+EcGm7cuvvwdAO8+ufKjHzj57Pev9X1ab/aH3jvDUeSVc6/84Kc+fPb4WQB/+OKF+zbjG1/68oPT/Nwzzz37zB8q0B+awpunnrr/L//I+wB8/SsvbNX+at//zpeff+yJB02e/NYXvuUFP/Mz/9Hs9My//Ow/A1CMiiTNa1+WVVjqZVu7e+Pb6yQyjrGb5ABG42HNnEDXTj+wuX6zdGSq0jjTdXa38j/96Q8BePyRY//yn3/2wz/4cNkvXvzquZHw13/n65/5e3/9Oy9c/Kkf+9TezQLAvT/4Xv7Up/7Jr/7mkbVFV1cgxylt9YvtQaxZu90UwMWrV0IUwzSoNMuSXsdZy6PdUdUfRUsAgqojdUCeuppN5qkq6jR1nTyFD1O5AqhjOPfqy6jGOePWlas3L1yNbLIO9XIynXS0uQlgPaiCimFxbaA/+cmP3L5y6Vp//dKzO//4b/3SsdW1N1++BOA9H3z4f/0Hf/PXf+Vz669Nv3xu9Prl7xa3r/zUX3n66rXd+pVrAGKpp+9dpvmZoencXh93hG5sFotdxsh/Z3105kgHwJP3H9Po6n6xvVsbk9SR1i9sjHYSQ1aVhpUH0DEzF1+4kaX8yhtV6uxcLysLMsJb6+Osq/liF8CLz174F79y8c/9xIel3ri+PqSxLM9Pb/VvuyBzJ44DmNkr1t/YnVpapPnewPR7q52NWMyuHp3pmeLNgYkAELrzQ7Oy9cI31o7Mrjx0aufWuemzR5nqToo8TZpZt+X04o3KdQarx6dF+PbV4rtvFj/02Lu3trcXF81wrwSwJ5iVfGFhZndvY2pudvHIQrgxHBaFq+Lxo8cBvHnh2Ut75r7H3/v99cHj7J48uzJtqI5Fltfoj/xwBIDm7Xi3b13GEA0hS3nv1uZCL/tLP/7+Xdg/9yefAsAdTVBqv04wY4XqGFXueKE17zRREU7FV2nmqjrC8Kiqi8L3GwNZx1IHkgASX3tW67I0SbgqUG9XO9sRwJXr2/3CT/fc6kL66GMPfe9bb65/52bw/Sd/+EPHTqyef/EagJk8/9nP/I3RlTe72VY37J186N1xahazPeeSvWK8eXsbwL/8/DM3d0ri3uauZ9q5ttZNd8uZ1aOmu/vS1ZsApqaz/m59dXPgcpy78trFi9XX/uCcNQm7ACGyCYBebq9eW//as0eJx8Oy9LWW1ysGj0TGIXTZAMh7rrdgttbHQ08dw72eXruK5dl0ZX7JGvBiF8C9Dx+pdvrHTq7OTM9yipx0UFTzK8u96TkSATAa9edmUmaTJdaSZolJOGaZrer6d7/yfI8dgEfffVSnubahMz1TjIfTLmFIH4PpmU5Zs69rAMzqGYDhNGerxEYAESViw2wTB8AaU/gxUaKkZEBsKh9I4vzCbM8lAK5fXB+W/p6Hz77nqYdmZzueYbupMYmoEJN1BgA7ZgO2hgwr+8aSGjEyxKhIiADqssqdaIwaI6KyKMWIqETw5agVqFFmskxMUEJUjUCQSKothhYk0AAVEAzS42snmGuVDTYaKMR2wpu4Ts7WKLMQvIgwwzBbY7Rt9Cq35cWmMtGWq5jAE7NtYGJfwzCmTQuYtZFAbif8++XIg0yk6ajS26sb+39OFAEVBk31WQ1UyHBfkRhWsAJJko6HBXHtcgEQq063m4ThbqgLa3JjocpdNQJuVDXz1IyH4zRJB0VVRyoCxoU5sTLzyoWt6OOoGADYGsjiTNdZunBrWHWjj9Doi7posHtV89Kb6hbB18GzwhCpqKhaaxoQo7ZK4m0hRUTVtufmYxDRJs0IUciQAsWozrOueP3NL5//gxfW53ouVdtkEI1yZivU3YojAoTEcdS2YUmGXeq6nbTXyzpZMjO3alit1TRBJ6de1wCY6iVpal0nZTZsjCHL1pAx+x1psoLWDFxh0NqpM0gVpmVykK9BgLWtFZcnwymRMDuI1ca3rVaGcb1uY+drVI2Poayr3T73OrAJAJPDCAOGNCAqSECEsqDGJQlgY7V152AQ4Ewj5kiHsFqTkAaDpWDCwf3EhzwJgX1Jcai2vsYHP0/zHwVUHZsJmoHsxNAEBBWpoxKY93+CpqktWmvVvMZVddLpV4iQiM0hsabY7sXUng0bZuWY50kdAKEoiCDL1pj28KLBpHbpAAAgAElEQVRjZfW1Jw3jcTEYF925aSFKlQBYNnsb21//gxeOZbz1xpXlM/mov/nm9zc++qOC6V6MGYBKaFxXwpESIm/JtI9Tc1cGIQCIajlV1QY94IMUVb0zqKByCRvdLAcwN9VdnOpM54kjgmoUxZ3VItK3TmHeMe4ARLRwxolt1mSRyZRnH/n4b+SwfEdR8m0Tqjsctd/61R1r0Z2+o5MfXtF0YZrK3x3akQcnd3gXzrbLCGCIlckwGyajlMRYhTiVWMtmtxoBEJVuErPcIelajmnqbIPdaG/U5hVRE0KjxKvQqDFE76N3pqPaakeqUmJN4lwlEkWioA7etZBpAAgiZV33JLdsmNk550WspTRNCxXvA4DS1watNi2BVCExhhBURaV1+FTFuBgXgdhV4+EeM0cRw0zKBKpCABBidE4Tm3CvN9qDNSZxDsRkbJqkAI6trQmIJCtZRaXX7RpjADJEVVU1y9yNu3E37sa/W2Fb+GGb+5HKATARh8cMbStpmGTBfAc1ul2obbHum8ZNvuRJFr2vIz6pP05Iw5MBaR+tuD8STLpYeEuBc79lur8n2scAAoiRALO/0iSv51YDvVlsQo9oFyHCnbyIyWCsCmZmEEijRKi2uRooxmgIhmEgfMAH4gA/IbuQta7hObXXQbUBSKqokAAwzbSBVIJwM2Mxjd+mguCbxEIocTAcfahCIFbrmNjZUBdkHdAI1bcZXgu2bNTTD9WPuTkXgrQnp0LUkJGYJzAWsm1qSDCGAYhIjOKskRibC8LGQKXJQUOMLmEICGSN5RYpwNFDFMblhusoNoTY7eYSKxGtfQ3AGNWoSloK0iSpq9jNEoDKUHvRJjEOAquihDKi5zpZEgnCHA2LqkhsDpjyJGEgihiGTWwIzeZdw+gUEQVXPlpjg0KUb272NertUt+DNGgNwIR0ena1d9891faN/k7NvjLVebO4FuS48lxDUYcfOU7n55YvvvGq+mE5Dvc98fTKiWNPPHhi/dxL82tHAHzpmXOnF/Org9EHHnt07djUX/hr733Xx98zdSTu+vF4EB9YOg6g8mUtl3pHkic+drqaevP+hx7Zu/VaKUvFxt5adwrA1StvPPToA/e992Nf/cZrKyH50he+aLuZRL5aBVj71A//IIB/8Q8+87XLF9TwX/oP/uyx2anvXbq5148/98u/9Rd++Ice+4HHP/yhowD+1F98DP3i/PZ/+Jn/6R/8yq//xmisESadsb/xuV977OmP7VYAcHOzUkuRLNvkxnB0bWPzj/34D73y8//MMhW+AqAusSBjeWdrSzmR6CObDFzX4ZPvf+Sv/PSPAvi9L37VDvZwfauP7PaN6uMfeOr6rau/8H/91kc++OB/9l/+Vy++dBnAZ3/lX6S9WWKS0s0eWQsmXn/13E/+8T/WHxdff+YZaA1A1FiwV2WSHkWO8eiJh8fZzaLcu7FZAZjp5J2Z3nDUj3UVaksMQ6SQcliks24hnwawtb5xe303UYW1mSHRkHcyS2UJPHDPg+e++yKA3VpnDUZ1RKRjjzx2cmXpK1/+jfmFRefr577xjaIuAcyvHTv5/pXf/uXxc199+d4nn3jma79z9Oh0911n5jbL3/791wHc3KrrqZ3jjw+OHLk1XW3OTqWdqTxdmNmtr3RTOrdVADgz9gtTJZL8+uY1m2WmUl/ENEsu3bx5Zq1zdiUBsHAkM+iORvSuE73xoHx5azDfS9K69+y5vScfmxre2AGwdaNaWUgvX7wwt7oyuzy3ufPadlkWY7lw49aRmyWAC28MQ6VPPTGfztq9nWqqsG5o9jz3Flf48oVaHIDhTe/Xq5eePf/+H3442Txvnd9b3x2JOb6ysNUZLMx6ANUw3RxVm5th7ehCCPaLz37r7GqeuuoL37zxAx9c3BkygCDV8lrvxs3R9BFz7F6NU2Z5eckPUlo5ee7NcwC2NrbNIPu1X/3y4iLNTBeb57ayhRkzS16TeiRZtwJgajUzSRkLvyud1RWVOJ+6qTMFL2yWEzaW1RmJUyHWPNylyKZQM9WRtKP1yJKgmRQrhajOjUQxHrBTquAGO/3164PdcgyA6TRzYSgJGigB2e54UNa7BQQ8mNsbRQDp4tpPvvdsLwlpr8rt+IiN1wzuO7ry6vPXb1+6ZRMDYLi184v/5LUH702feLdeefX2bWyMJXdleeP69tZOudjJAKxvypVrm0unzyzNuDMPrq3vhd161LlZdOZdVVYAgmRlVVIYdrvZaHs7cUlVCtsqDDQhM/IDAEmSFEPZHlxVwrsePs0QPpM8cOaMcmd2bnk4HgL45he+euXG9fvec/qDH7j3zAPHuzOd889//+hy98yZY93ZLlILwBp98cvPnlidn1uaC+NN67LxsOj0HGikoQYQ6tJ1qRhuQzlxmXHcHwxQY7638OkfvH+qBwD1eBMyTqePDccD62y0xnM2zWkILEGUHQBi7hhXB9ZgYUSE1RCY2RgytskoomhqU9GUhGEQpGJ2hhII5uZTAB/+saeqTlc0qvgGYu8Q6+Y3Phi1IwCoEEG0oka4RKJo8BJarJCY4IOGSDFQDCKBhFREo1QJZyIAeDwwMRJz6QNUE2vq2mRpopBmsGRDIUSRCGamwqSIoYQEWGOYpcERG5g0UyZhiswgbeiwRFSbyayaqAE/grmhuwoTQEpMDSm7TW0IBGqskInQKLEQN5kTgUAeaDgKhNbTtvnKvoXcsV9QEm4LPBRVggIgFZaEJE8YoEhRiGIlIe+lGn3TPWWufRTKMxMFQSEwFIW8xpSiBzAuKmttHbnTmXEhpsbnhnf3BjMdI4w6KoDVhSxJbRXC8lweQ8iMCU1po6pmO1l3agqAV9FQj4aDNO2mSWasBWvtK18HUU0aAi+7YEihiSa1Dxknnm0KjIbjhW4HgE2SGIs99TYd1zXZ2ZWp+d0y3vizn3r61H3Hwu4QQFlti6qIhChBEIVEWEFeRESDAEAdYh1iHUOIMWrww2u1qDSZg96R6HICNkzMoiQCBRlrmvKckzqAg4BACXFi2BgGM/ecF62aPYF7nXS6U1IMxXCcd91Ut1OE0OuBeXdxZRHAaGcGPN/NV8KoZl+AYjkgGfY5TLFZdr1GciIh7hoBPGkhXNVuqkvdjsudcgQQ1EdS4eYxtHVtFJUxYjkZl61J+lQv97FICZGzoo6d1GkIjrzL3O5onKZzAMqqTBOm6ANYkAKUGSNSCVhATTcitZwltoohCFQ0sRTUJTFG1Dt7YwAri4uDIk53RZRquHEdchszqtWafj2OMQEw5SAqFRkR6TKxTQe1vvTtl+bq3dpkJ971bgBp3u3vbvYyk6gb6zQVu5JpNH7a5IjdXT8G0JlxnVFZS+yxKeri2HKXrMZQI+/e/P41AONi7+w9J5e6OP6AK329fuVGWDhz9qOn3rxxY1nnpVMCqDHomCwTqpgNU+1rtlxWdeIybhojgJJY0ygaGVFukZ2qAMbVuBx5ALvFzvWtvW6SzHTy+W6nl6XWZIl1pCqxSXhqRyBiUB2lIxgbIsOJj1WQwGQAqLCxSI2TKFGdImYuH1Vja6w1HCIApA5RqtprajKQKApV19z2LS4AAJQnSIgqChO5ianRvq6UegNSUSGCAm1aj4mgU1uVbqdazXRQNR6GmGDyYgLArXxVi4QlHLRPmKXZ3ETGql1RxDoCgECqWuScdVwKgmFETlIDUaZIjS+cMXVnOilSImsBx+RISYHEGoU26P66P3SKvlAvVxlWaXCpFkQFs/ERRBZAak01GNdVwZ0sZh0whSqMvOz9v+y9Z5Rl2XUe9u19zrnhxcpVXV2dpvP05ATMDHIWRIARpERC9qIpkSZFitQyLcpLspdt2YQpmrCWKDEJEiTRggASAMkhIkGEAQgMMLGne3o6x+rK+YX7bjjnbP+471XVDOll/TTX6rPWTL9X774bzr3v7L2//e1vd3vVigHgpdhYWhppjmXdXrebsNYeqnDeOgvDvugByPKCoRzBeRsFgfe+XeSWtSdTNQpFD0DMuTGQgDkw1DNWnNbKW68DY50zmgAkaU8R53aZegl5YWUcKxZmkJYMQCK+UhuSPGttrlW00vAWzqZdm+cBs7nDjrwz7ow746/hKOHI/+8E3W57M8hsDsYgbf9fqFmxS0Blu/nk7p29rjigv/fXg4T9D7fRxwHuOQBCnZNd27+W7Pi6XQ0KI1Cia7KzHe3ajAZFD/24YVClosA8QHBFpF/FLIPO4YNvlcBr/4jk+rZ8EFTsoqPSziVBZJfKpohkRR8N1FoZpa3XhRPrip2THzBDty9Wdk3pYHrLFCpvX9f2XFJfz2VbO5QGqeqyQsfTALKEwJa1SMwEzq14xyIoc/4AHAuIPIGFBOxEnPfWOu+8dyisBZD08lLkJ7c+Lzy8kMD1J7F/m41CaFRolFYMgiJ4359Ho1QJCiuliBVBFFgpUgM5KOaSooA0L7qpHa5HZQV30ktHh2tXZ9eH46oxFWNCAJ4oyXOtlShUa1F9376F2Ztf+ZOv6LD+5Hveo1QEwIv1uUCb2vC0CTF/7fbs5UuNZn052XzDD7y/Pl0H4Jrx/um96sy1Dqt9+2h4dCQdTVKhpnaXXr3RePIIAKbQib106erI3sn1jYXpuDVyaLxnF29cOOeyOoDhSvNjv/XldrezlhLCSAdkM+e0ocJ++avPP/r3Pgig10sbo0O35+eKHO/7wKPhd775pje/+2tf/OLFs8+dv3jmW9/ZA+C3fv+nkqo9dTD8p//j3/3bH3ip5aCYOC8uLGyc+dQfNXUprBlIVHVJpsPQwT939sKtufk4CkXrMltLpeCU92naswInngHjrEB++h/8OKkEQGfu6vBQcOnayq25q9/91u3pw4djVwwFldlF+83nT3/9D/8YgAqiy3OzR/bUz55+uVYzhx8+ObNnz7vf/8Pf+OJnbeZdrgA4VzTqqlrhaqOWbSYb7eLmpTOVQJs4HqoXALIsjbo2IuTeeXFiUa/RVqpIa3A9VwEAVatSqzs2PsSUbyylzph2K4nrXORy6PCR9dVVALg+b2Ipun7IyKd+81/OzNQOHjsGa4YnjlQ4vnFpHcDFG+uH3nN0cm/lS0+decu73/qBv/HY2MHqZOTWhofnVwTAKy+sRXvGTo3M5LfPxCqZmgq1bZ17ZbkQ0xiuqpYF0E4LF+1fvnXu+J6xpbRz4eUbB04cb20iVwt3n9ivqAXAWzLAzbmNUycmVRjcOzZ5ZS6Z33SHD02/cGHtiXfcB+Cuu909hw7/33/wnX3kD85MUJbcuHotU/qe6apUawCOnqhfPH3bJXkwtLcOu37lRtJSnfM3p04dQqM2Nl4DcG157YXPPk1UiZy9/czFaiNOY4zsa1KVFpY7tUoMwHgh5iuzna300kbLZoWfX0+u3L5sfRFPjhy7ex+A7lbr/hP7r1xcGN2QRx4YktWNxFTnu0W01H7+9CyAh9545APve9tn/+x7b3nD1HA9PL+WovCHRxplKMPKAICXdisNmxEKYue9zeGsB5EWHQS+JEMRk3WS5mzFZpasBSJiZlYCKSs2GSDiLJVaJa6GlPd6RkfzK6uPnzqcoQNgaWl1cpxYM3kOojDtuU6rVzUcBfTsS4tZ3gBw34nDufdPP3/x5KnG5Jhb3Fw7emTP9RutY0eHu1m6kQPAQmH+7Se+9GPff/9bHj+e1uLJsckXv3o+pyOXrt/Sxl1PqwDYWIYEldhEUQhcX1jbatupvfUJxWFBAFqdtjKmXhuOVjZnJkb279u3f2bypecuDDfNkfsPTdZqAP7g89+7sbj6Q2+9/8TBvY3J/fXpxsyRhoprmzZ4/mtnX/78aQBR0/zSD733jR+8t1KXQOduY/3gw9pKUgsXJEV3MwMQBfrEQZ1uzaULi3mSR0EQKOXSdpbl1nkASolrkUgQhhFEfK/j2616I0aeVLX2WQ7Ae2tzq7rdkmAmQt6hl6a6UtOx8TYH4G3hvFcBe5addN9rTOhr/iYD0gwIrKjIMgDJVsspBpP3jhhC4G0FFOIBIsmAR79RMRMJlXvyQkrKxCXIkzHkHFwfkSRvyTlxPjYq9A6AeI8sFSAyGgCxjlgRU54LfApACZMOtYCJbeG0YqWNdbAirHWgNco8n1b9gswSNyzhSCawGsCRTMxQSvrYIlGp9li+7XsRuzbuw48lfEk7nlfJ46SShbTbY1K7JnbbXJffKr0Fj539Q9iBpc8AFQAeYBJA+lLXfcJoX3OlPBliLpmvAQBPvpN4ZbRI4b3UqsYDG53syMGRs1fXwtAAiIxippFmVOTtUgiwV0ikpZUmW52eqnYBgM1WO8lzp7X13jOrShQ7opZ1XHaFLi9G+nXcEEldIcxOCN5stSwARYEUvchqg4q4nvQ6b7p/z8LtpanhrrLX46gAUK/0QRGPEv/VIA1okCrvHIBSFpC4nHsBBX03p3902T4ZFlVuYa3LC2etgPo9tr13UGy9T7ppr52kvdyChJmSSHnRDABbSXLp2lwvzRQ5cTbpOAWQZskdQEEtAJD1LAe6WRuxDt6lRZF5oKKlHqA2XBmZiADoeiWqVYI4Yq3zTo9yO7lnPBuLokrAZV0+ewoUG8VKKaO1MsrAwXeLbm24IREBcEXGrCxH3ksjrmRJruOmELd6OeuozMzHUUzemTBkL5mFYmaIdaSM9t5XoioAzeJcYUiHhoosdxI6QQLb6XaS1jqAYjQKIklcTHmqA6epaPesjwiZjU1gtQGQ5EkYarI5e1cQ50m30mgcm5q6+Mxt1OFyArCx3qrvGe62Wp2kmGzM6+ZQ1yvXSbsx+8TTwiaAjfO348bwVhhPTQ6xUk6CmMPV+eX6tGm1EgBpu9vtbB17uFFt569cx/CJtx168/tPhENXzv359eXLx+4iACONSMjrIGylDmwhzuVlDsFDcoEGQKQLh5LGsJ0J6DuvZrQsNbA+T7Os3cvW2tlt1TLMU1OjzSgeCk1FGQCaiUU8BTmqVHijFTMZYial4HV/rVCdwpJAa61F5UWeFZm3RaApc2KdA9DLC0UITGC9MHsB5db3v6wV7cQQ/chG8aAyv4xWIACcF1Dp8qFMpPg+tZGI3a7Fu4wWtuOa13Cyt/+HXZFdyUGkXezI7ZirlIUcHBTEnvt4pi9ZhmWvdyLAKfHlL5LDIADA2jiQI3iviIiZBOK8F+eJ+/XgzbEmmK3rhezHR+tVzVK0GeILr6BKvoUrbCvpiiuqGiTp1OTIiu26oltYl2VlTKTCMMycjVllWS/Z6rogVKaiFIl3aS8DQK4Qy5vLK8NT03kvkyKTIpc8MYZVEBjNABxvEQW28EQ2AHrWOevKtt2hCUrbycRKKaW1Yy5FdY3iohAC93ILYG5haWxcXJ51kqTSbAhRSem0zolIpXqns/adcWfcGX/9hu6jWLuMxG4SIg3+tPPpjmrJri2A/zcw8nVl29wPJLaPVe5kd7b/NYcuXwzMFW3/ZZCgE6KSrIntAuhyA0+7w6Ld5/Yaezg4JBHgZbssXF73Ndrtk2LHzJJA8cCgCvX12YVE+vAlMBBCx0AgBv3DDQ77GlzXg3a65pRXTvCDebLeixetEGhlorCw5NKcd6cgyxs0mM1dVNOd+zSgQG7PduldlGcP7EC6jEEpiQiIyHuvB9nUElhkLmusvO23swFJv0REPMqmPCUr1PdBRDCBVT/Tutnt5XmhiElTXviyvkNpzrOSwkkANFMQ6Dg0RGBmghV48QLFYWACY4CSH8ck3ouo0gvpi3P2o9lSCSkIzOp698Z8mwjHDoydvtQ6cWCosEjbawAorBiVbm2shUWmnOt2Nib2z7xtfGpjvbO5sjA8MQmAlcryLjlTrQeB0nun93zzs3/m09749IQPVbuwAI69/UkvsZ3tju4ZNjrd7CTZkN84f5HW1iLd7KYFgDiqazO1567aUKDbqet08yDW3fbi/Q8cfeXZBMBiOxuf2V90e//Tz/7UR/63f6YKUo16VvQixc9895l/UxcAVrTb6p28++h3v/3ch3/myfFxF0zi+3/ixz/6W7/fJLrrgaMAFEb13LWN9YT1oWPHj79w5mIBeKtNqCDUZg+ASdcq9bRwm+0uG1OpNlY20ygMO5kN+oEzE8E7cc45D6Uo1Gqz1/5bb33weLPx0gtnATz6/rc//s7DCy/c+sL//PkXbi/PLi9D8O8/9vH2RudTf/TvPvSD7wBw9uzSP/0n/+x//7WPfOO5cx/7rd9tnr9pxiof+e9+frGbaaAkutYgR4bNyfuOzS6stfNOrRkaax3CrVaXXQmeQmuZGG1utns16+uBaWV5eysXiO+2Njo5ALGuophNJdvqmNBT7rKANrq0t27GKrVaGAJoG60hj739jbO3zkqWb2x2WueuLi90Tz9/9pd/5r/+4A/uB/DiN56LmvLok2+efXnp4x/7fcTBLzz2IbfIV585d3l2FcDq1la3Y+LaXde++ez6fHdqRFYcX55tw/KxE6NkCcDXn5/ttGGQPP7AHpfy1KGJtS177tLS2FBFB/zSqxsA6k2ipJgerVTiMCU5uO8AR+2nvnL23Y8eIBV+7zu3AehJ/873vDVqPrcwu+qL1hsfaR45XqM4psRJWb5n00eONl996frhMVWZqha1+siekDbS00/9+cljp2p1BaD10txNJMOHh668cMFkvds3Ogce2V8ZnwzaLiOZX8kBxMZtpUWSy3xrXaC9WAKuzHcfPDVEnJ3Y3wTwzWcXX7p2+8jJqYMH653FudWN9ZV5l7d5Y3Hj3pkGgHAm2L+vcmC6XmvqzdXFu4/V1ld6t26s7tvTJOs9AgBaXJG7qgmtE7aFS1NxIkQSUlCtpT4HIM6SZ0qd9EQKAKJKiUAia73vL3DMRsWFlxR5nve6vSyPDs3MmLA2t7oI4A3T+5hSJ+JFnJMgjBtDcejQWmod3DsUF00AZ2aXo8Rev7n8zrfN1KtDxw9q1Ie/9cqrRx8dvvfeQ9eutgF0dciMvY8+Gjy0n3pLBw8Fc//+2mefEh2ZysTI/PIqgKg63Fnr2s3k9kq7OVqt1jQFZmZqotXuNJpNABln3TQXHU1MTN9zcuot99372W99+9Q9+99w79jixvKjD98N4MvfemHET733vW/d+9hD/8c//djpb76w59j0ytpaJaB3/93v/7l//BYAQ9lGQJ1044xvZRgbb7XasJliCx0DZBQBMMaYekNDU257WU+UUBDBea2DIGIArBniPcWkjIjr9DKvWdUisT3AlNMbhtp756zX1ZjYgZiFVRxBK5v3Y1RltPM+CMg5W9Yk9M31LpMuZXA5MPqlhQTBGFWGYXkv0WgImPpI3ECvuG+dyvyZHxQCEpGmvs0EKUBIqG98XFlhUFo6JrjyWM7IAJjTynadOM99nbscKmIpNHtwSRL0hCK3ok1IWjvxIPKsRCCkiBUA76WEI0VxyTuSMiZmKrVQS2sKZumzIxlEVPL+wNs1qwAL9e0ndl6Uk7CtDbdbPw7g3drX5Z8JeJ2T4gEBWSErHAAAp4AHHKBLuLEESEEW0CAHgP0A7iQhFiknUrhwLggDAK21rTxXUlgQd7s9DT00VCWlapVwtBknmQUQBqqXOycovFcM59DNLMMWzmoNV9ZZF67V7njriSkKTeGcdwATs1asSqJr30MrGw8RjGaQlsIWoLSwAIRUARJHgIVyLtuoKPOT79urTNZNOqHWAMRt3wqCMJER0v3KGeL+c+XLw3mQF/JSEl2JQOx3bkGpBtkrwWLFqhIRsSJWJcvVwSij+lIyPQ9RFFQoisHknHfCAEiNQPaSc3BWnIdo5LmKomKzxUSIQgDOC1Xj7mbPqrAWaxbvyWRZnrS2eqmv1yIAThEpdoozTaLR3djaPLtarWkTkO9DS7asIvZlZtd6D1gQETql0wgIsSeysW4YTjtppRpTGOWCKFSNamA5KOdea1WtRGS0hQSGa5FJKDBRwEFARgOoNKpFmgVhKN4rE0bG547irIhWEjc0AUDRcKvbTW7dmjxxxCuqadSNa1sBGwCKHQAXhB6IQw/vnSCq0eb8+kRhpsb2NO860FrPATzz9PPv+eDj1ahaiYOkaJnEpcorE1Iw/cx//s7pp54GcP+J6MH7G/MUzzx2VKBO/8UFEXvX8eGtLNn/wAEA1761eeP6wolHTt46PT9z/7tGT7xX8bBz4cGHfqzonfPrTwMQ4dxLJlaZ0BVwhSXlmGCtI+oLsTrJhQMiZqX6T4sXsMALvEW/aJedGA9OHXJHRLzx6mwch5VKXKqsjtWj0aqJQ00IOLJWvHibOccItYmKvoFTzQblhThxAIRglKRSbPWKMKw6nwHoZnktDJlU4Z3yXsoOT6XP3Oc99IucvEigFVEp5On7xcVC5YPXlxQAeSl1Hqh8rdFX5u33nyqrkWl74SnXpL86+USD9am/qGOwgMngwANDQQCoYGEACl6ImaCYNBEBiqnsasVKBYECkFpfeAnCOEsLZmYuYwMLMENppQB4pUPF3hMzmMOISXtVrVQhwiBVIoCshipx4b2zXW/zyKi902Ob61xYGygNINDGC6y4tMjSNLG9bqh1RZMOtItCKRyA5nCTSDUqoXcZVKCMjitBnnWLXhKx6joG4DxHYSXt5QExE2mjIqNDrZwnRZwXBQCtDYF1EFFR9FrWO8sQEfLEthQLEIRh2O71lNbO+SIvFCulVJGl28jynXFn3Bl3xl+voXfn9gbaHzso3HaP7X5W/i/Bizt8QmDHXR5wCAf7eg3eyRi0ou7HJWVDlL+UNRu83JEkLvc+gAwHRnYHi9y15eu+NNjdaxfr126xvT/Z+WiXMk7JeSzxw+0kn+y+RED6RegMlNFJP+m33eVtcFJq0DJ7e+r6bWtEmMqEJYEEfiA9BEBIMTGx0wpKaYg475w4zezAAJgG3ERmFvED4uvrcePBu10Y5uvq5Xm7Ll8AbGtKMgW2busAACAASURBVINKgXaICHFZ6wFnve/XoZeNJgf78UyKBk0OoXXZ3ZEM95vz5Ll1zjtHBIYn66UopGqUKFVQv9LdC8SXDXsA5wNV6p2TIh2aMAwDAIqV9yDxzjvAOyfeCwRucHd6metk1q22ncWB6SY8nbu+fmi6cu7m6siN6/e/YR+AbrqlpGgOx6tXFnrJnEr5wMlJpfXE9Ein2ymcB6AVotisrMyNjo6sLC+Nj+959IknFi/dro7Xzr5y5sTxIwCq08czVN/wridt58bsK5fz9c6BY9WkKldvu2NP3FNpjAMgVCy0LjYsJ2R6lep4b2N9YmzPtXNr588vALjvnhMf+JH3fekb31lZWtu41VkX32htmihCFEmv9+k/+y6AuxrqA+97YnFtaXJ6rD1/e1TQbq131uZrQE3LS994BsCF0z9469Laxg117rkvXT9/biTGugus9T7zVjjJPIA4NrfnZp0OVGACbRyT56Dby6wJTQlVe8+KiZUyigWaYRjjzDfmVz/y0Y9fOHcGwM/+9E987atbZ5++8OzFm6OaHnr4+Ju/728uX7z8yU899Zu/+8ePPX4SwL333HXfgaEXb934hX/yi5fPn/6Lr343Xtt87OG7v2/v6MhQHA5PAWjU1Cc++enzZ84/8PC90YlDQe4XVheDQL9w5lpiFYChyaFu2lXK7ZseHpmanKwM9Wz1E5/9vAMXaaKUASBWTc3sWZ6dFcWNeiCGeyv5nmEeGp/5+p994/rsjfIpn/D04MmTrfVbjelKe6P7wis3rQM6+e/83sebYyMAbt3edNz+lZ/7m8dPVq5f3rp6fmXhfPKNa5e/9GffNnEMYM+B0avnL33m//qiW57bo6Q5OTY9MX391otXZzu9btpJHICbt1rO3viBN9/1jRevRTp2YfUrX71wYHrUOJe0OqEhAAvLyaGpoY7PL1xYGh4NmsDKwvKp/Y0TDxxaXDv71NdfBfDIY5M8PBo1445rP/H45PT+CuVbWumbm5vDsQYghWvZYng6fvXM9XvqBxqH94Y1S21Ozy5c+NarvXYCYN9ENDXE9z/QJLEqblYFa53u0l+cvffEoyfuntha2AQQhyZzstpJcos4wOJW2qjpRj2AGJu4V8/fBrC60rq20XnkrQ/cujUXbXZGjzT2TgTzFzeKWJkiBTBusmz1cruz2un4qT0UVmshydp6e329U69WOCuBLq5WQgYrOKQFZYX3EFISeKoEyDyAXjevcKAK+F7BnqFYafZexJd9wvq4F5MSGJdYZqrGqtPNRxoTF24u3lhfAfCh4Ue7K7lSrAIjYpVSKtDt1TZ8XKtHW7IBwBcblciMV0Lv9cunb2TtXlvm7j41vbbpW4nae+AAgFuzC6OaR6PQbhYZDXU2O29/9KSNm+ur5zUFoioA6rVRFQVbvduxoU4vUZEZq0ULtxdmpkY++MQ7APzG731SaSdildaV2uiNuaV0ZePIqYmxijTIXb1xAcBGN09X7fnb9rf/6F8/+7lnxkcqh+45+J4D9x/cy6ceqPXaNwGg6DhItWJcId3EcmBU0LMUSBT1ulkYRwAQxgUVFPs82dRxSEEAIBNScZUCAyDPU83CKoTNbZZ662pDNXgCSLwb1PE5ErAngLMkiysRPLywOKcUlDYAbGJXl9eGppSpVYV26pR3WZzSdSDvhZm8iAh7sAKCWOu+6JtVhgXsvTghL3C8yxdB38LuvCX1Gmdhx6XwzjmUVsODBOQ8ey/OS56X2yoveeHI2iiIwAxBUVjSnshTyZfx3uc5sbHOB3HsnXXOso6U0c5LX7PYKK+57I8DZg8CkzCLImHdZyC+Fo4kIirL28si7lJQDH39QZGBmiS2lSW3r1e9JkUrAx9k20XZ5YmVtlNTWaTphZxwCACshL0QIAwCnPI0ADbVQLRPQQTwVB4EBGISkTDWvU4PQGZ9nmNlsz0+NnT40ATDZhbjI6bby6dHa7MrHQCF9Wlm08ISS+EkL8iw9iLekzGKlQbgc4FnInjvNIPAzkMBxIqULrmZrs/E8t47IhJf5F60cGh0TwoAi5utAzXS5JxLlSpIfOF9HEU+sxGFzoUAlMp3CoBKTLCExhUDA6nQ/krSbx8knDsqhbHLkpHyh1CCl2bn2WZGydx0HgDpii1ypD0quuwKkHGkPdggBbi8K6SUUgY2kzwFBByAHPlUm5RAzvUAhFFI2usRl8NXQiXOgkNTcY2mElSkUgFgapGKIgoIgYK3aI/22q1o9BBpBiwAuMyXAJQTAXmdbXWKIIqJtXjnrAfQ7tpqs9lpt7SzWnHSs+KUDsOk3WWfB0oAWFDS6/XSHjEVcEwOtgh1xcGXkqsAFJNRptfNAKxeW63XqqOhSVpJrTn0yvwGgOn9k9QrKFlZfPZsV/ToyIjNXdZLQ/IeEmgB4Ily75QxRCoOolqldvX8tU88ffXJR/fm3UqHNICHj99LG7yZJUHFxdA9HYcjYc1yb8GdfmXpwJF7AQxNmtr0/scO33Xpma/WG+GBI/fYZGm8Ea3HcRIpANWJSAyoEYwd2GNmJguBBqkgTBNnVKOoDAEouhivRQ1jwOJI1+JqbjMnnskI9X1TD2HxTMQQhqCUMHbeO0eDpam85VSyngXipAjr3qGz1ltZ2gAwG6hGXe8dHV6Z6A3FlWqoxhuBiM8FnSxfb3XKn4ExcS8v6rEeb9ZZkROX2gIiJgi7aQpAMSph4EFeRBE7D5ATkBfhXSTFATbJRAB58eK818ys+nhgqdQEAoknAsRzGZFAD8Kxss/RgICwjSK+FpocRH/bPBJfkit4O2IqyZKD3155+FLtqmTYa5ThDwxDKyIvisX5frvPAc9ACZH1zMyAePECV+rulk1DUarMA1AcB1prNoSINGtd2Fxtk8CZ4zhsknhScHBGF4Y2t8gEplxgmZT3woqIoJnIFTbpdvKCQ9PtdALyADIvvWSzvbE2GoeNoaEoCjZ8sr65srq60oziOIwAaBKjEEVGRcb70Hon4jWxgw+MVkqh7BNlrXMud04HATOReGZdOAmjuFwPFSsRX1hrvVOsAmNcYcU7E5hSVPfOuDPujDvjr9fQAwqDYNAYehd4uDvNshtN20EtRXY5yTuQ1oCStvONASuwDybuauv8l2BDev1b2k3v2yZeAOiXMlA/8Ua7KATcL0fa5a2jpPjtNLDbtafXXO5flePrMzHLIutdPV7EbxMIZMBbICZSZecalCSFEtEbXDy95lDlrJUvBCK+X/fVp5N4kIcH4EWMUoqJSHLryMN5YSLNqm93t/P4RP06hz6q+VeP3cyT16fUBvhuvxMiBATNLCKu36kGkUEUiPcul0IDXsiTdn77FIgJSkhISgSTAYhT5DVTWcmy2U5YvHUq976bsni/1c0qVTtc0YXDtllVygWZNZoDsGdiVgxWyjAbpUJg4MSwJmeLIve+TPkyDeRtjOGsZRfXs/FGNNyo9jKXJM4V0jB6/vatfddeBaDr9cW5JWkvGGUqo2Mjhw54UUqbXtZaXV5oDI8BGGqOJUm3PtTsrDmfVlptN3XffYceeXtWpBcvvHz54i0A943ur9ZGu1mvUNjaXPUr68WVC/XAT92198ZcMl03AKpxrRqroJ62e92rLyZzZ28+8eaZYrGFdueFM+cAvPmtj3ccLr5w7jc/+Y233TP5wx/4gT/+2CdX17qZYR3GUZ4CWE0oa9Y/8tv/ILlyYfn8S8ny8nBtuL26YgEmWm8LgNPfvO7ZFOHY1L0Puadfete7nry6sDa7tHppdlVrE2kCkCVJZHTufGYz413hvApDbSLtfTl7XkScU8xKGxGBy22e1ypoL9z+6rXZQAHAf/iPT1WGwpfOzieea+yDqhnpbDz7yrnJocrBIyf/83/6UwBD+yYQVj75kd945itfu/TiSz1IpGjvxFSzwt7eHkMXwBsfefdM88P//KMfnz17/vjd+3sui7SPQrnn4Njl2XUAd02OXl7MfeGX1rfWNtc2mxN7pg43Y9oShoMuxcs9BAhDbb2wd92unR4JitRevHjziccfdqoHYGN11Wbye7/zu5WY9x8eDYLqRKDe+yPvOHvh7Mq15eFKBCA09j1vegIb3RPjlVAHZy+vX718e3V5vTFUL73YVmKnx0bWr6y9/PzSB544UJuY7m514OUdj43tmRq+vpIBuDbb2TNcRyaH9k/uH5/5vT/5i6WV7mSjMbvSyih/4v4JAKKdBH7i0MjMwRhZeuvCpTPXNx49PtleXtw/Wbnv8BiAtOde/PLTtpd/3zvvOnDfvnzpcm9jS8fp/r1VBwKQUcc6Pv7Ge0c2V5TKOdsqNvzqWpGuZKrrN5MUwPBUZWw6UjW2PZ+HxYE3njSRPv/s5Qunz508PtoaIgCS683Lq5HRWV5cWdyoxSYtnAOKwp+/Mv/Ns7cBIM3jWuiXFotk49SpMTMMCCaL2qVXt5aWEgAYi1VWUM9ONUw0Uu+11lhjpGrMaFMF4cLsMoC9cS0eHSp6OTLnhRR5cVBkmCnttpk1gKKXeXGSlxpvTBpKs3feKPKe+/0urHMirl14r5QhT1KLg9uraa1WCbsGQC9NSIPL3plQeZH3ehlpru6dXrlyM7ddABW2URRywFN37UtbolWHKZrYt29pLb568UqlXgUwObWXSF149cUHTj6I4MQLX7vatjVT8FDc/OZzpzc3UgAHjhera1s3F5ejSGsg3erY8XB1vfvmh+87evwEgCK3wyPR3tGRq8lKIeF/+JMv/fKPPQrT7a33KnWb9toA3vPEo7/+r/70v/8fPnpgVH/fjz72/vceOfHYUJa3Nm5ea9+6VVcVAHlU6Ti/ttUbrQYVw1ZrFdTEaWZtTFFKJOdp14Rhd6NDrletB/AiPgujECGzEgDKeUWQvGXTzBZFEAQmri8urk6ONgTEKAD43BptjAo8VNpN69UKiAo4gBRzXxQZuttKhyZ3W7bSGvet5OC/Mn4tkbMScZMoVuVWWZpFEBFfUrqcAFZY9+34a1tKk2C7dcK28d/BPfWg1FgAhvR5/uKJ2RcWALxkuaW0CDkkrcBstC+8YlhX6hGzOC9xFHnns7QXhCGJcUJKBULewwJQgfYKwhAmR1Q2TCmhSRloRwoTsQIxFIOUEBHrEnkEsZTFA+VrIS/bXgwBLDuJUSIyr4Ej+2DhoLDktWPgZpVhuwOccAyAdCxEAgffN40DM1/OlUVfLRHwQgreDyBfocLbqGIAhOP15aV8fCRIEhuaTJFPnfRyiYPAexquRQAWN5O8sNa5/aONtXY2n6RBqK2jbpoXBZomBrC1sZkXXnPZuk/K1j3WFlzG++IBOJT0VS8ixJyL1zoU53M4EQPg3MXbQyfGKj4SCgERV5jAFElHB2ClXGYBeGW3nbOSwlVmTp1TNEjXEiv0S0IZJJorMsjD7tJMJyLyKIlvQszMgwR1PwvtnCtcnilX9Juoi/O2yHIKQmPCoJzmrNeFywMlcLl3BgIPp7QRW+rdwHUTFmEdMtjl4tNUhfC+IFitfOEyAESNLO34REQpyXMGotGRhDe1VmW9rRjHikgxA8TKOzM22UgcETMpXXaXqupg9ubiifvuci63pHqFj6yev7Zy8s0nVq7P3rh0A8CRh+5uTIwapawvcmdjrQvXcxJ6gbPZULUCoHV7wbV7G6udsT0TnbsKER2feUXNTNyKR99yzz0AekV2oNFMK954u3prLvMF7202TBNp4oMgz1IArpcJqXbqjFd+bmV97caI5xNH93R8deuVRXEWQC3gM+stKDkyM2yTLRM0byy2nj+/3l5sz95YHv0bbwdw+ir+5Sc+/o433EsbCzOHRkeO0Ory6rNf6x5/7GQV6wCiWF9urXSvyqH6yLlvfe7wkxWpnrR5y7BeW7q19OqLAGaGpxaW17e6XZiInIC0LxyIhZwJw1JsJM08IWGIImGospOMhziCF+7/LAcZAe8hIuKFKRAmiUKiCgBH+Xo3W99cunhjOYolIFWPoiN76vcfOaRVNLuyCOD89flcwMwPHpmZHB2yFmvt3marN1wxG91kvdUFMN6MWaGTFIqEAwMPIguI92WjIgKgSi1fpl7my4ZDrvy1U18KXyv21jmIAlkRXWphEBHI+cEvpVwqdjEhd1FJtpfoQYglg1WwBPlJQP1e4APJSCLaLuUmQBRBST8SJSHFZIiNIrIQJ8wUsSl8nydYrQRg0yucFe+8td6CfGnmt6vdDPmCiUxIgSaCdVbYZy5XSgpryTsA1mlNaNZqSumNjTYUlAqiSlyrVdNOB4Bz3mZZnqWOtWIyzMzeuxxOFCOOYwDMCtaG5Fy31RK3AYRS9LoJMzycKxIAm1srNs8OzRzkSoOBdPn6WqvtnWUhZ3NncwCVKFRRZG0BRiftCcDMcLCu6HZaABRTp9sSglLsxSsmhmjFSuvt8PHOuDPujDvjr9coNVB2IMhteHGwpL0WopMB6RF9SePtTcpy5V1fLD95Pdi4I6m4I6+4A27uGjuBzXZ2evuDnbKlXeXbg70PvtXnLNIAa+u77X81bbK/E9r1z66/l6CdDLpS717vB8GC92Xrad7JpWMgdyU7x5SBCArv0Br6ifdyCw9bznI5qb68KAHKSgoFhsqddyKKvWYONRcQkZIdKX6ncXa/luIv34IylPG+7JmDbaB21zQKlfncPpIoZc0Gg5z0u/EYLXHIoWECRYbEuaxwqS886RJHUMwsICnBSysgghcLT14U0iID0E1sbAyErBRhxoZRFMgkTRMU1mtVulCkmIrIG63CwEC81sooZbQB9Ha5pnNeE5eJa+elTLSiT21AmjsB3X1geK2Vf/uVBWtltBETKITfM16JJAFQGZ4YGjtO2JNubfrKcJ60N9eT6tBka6s7MjE5d30BgHhK2t2ZA6fq441ofz3pLWRFypwzxQf33TuLLoCttWudpKg1JslVHnzgvvz8q37D+srE3onhhZdv84E1AKYWLZ67MbZnRNf2v/iNz8Dadz72+LmzV+eu06/84k8B+NKfv/K1z339e9dX/qv3nfr1X/2w3Xvs6plXPvv5b9fYFBpwCsBGXrz83Ct/9LEvZktXJqb9iWNjRx6Z+aVf/dv7PnX03/yrP6lqBeA3fu13f+of/Z0f+Fvv/tzvfHpyolYNfZVcZ2NzzJius5oVABPQzOTU/MJiHsfOS0DivKRAqPuJViohaYFzTsQjz8jljz355A+89y3/+tc+2phqAOh6dfncfO5ppik/+rMfDvz6jbVzZjifCacevP/gVz/XBfDFP/16aMgWsvHts02NI/sbgU2/+8w3r9fC4XpNTjgAB8+9NDpy8M1vevi7L172mdnqtEwY7Rk/fNehoaJyEcBqazPL/OJWN6pXAutvddcu3FraTKxEanx8Om1tAchsujh3e++B/Rvzs63MKi/7jhwsEsxdvXTiwVP8/DqA4dA+cPepS+cvqAiH9jTzgjoHKj/04499qPKuf/4Lv9ppbwDgalQx7vT502PV+G1Hx6/NFs9977SqBmnuRuoGwNBMMDUhP/J996rW7W+/dIuomruNSkzjQ0PVUGkrAILI33fkwKefPv2hDz6Ws26389Fm5eiRkcceHs+QTE2VuqV52mk3xsNOYeOWXVveetfb7g6D+KkvPX/XTC0eIgALS2lI9u4DI76b9uaue9utNKtslGcpldSDqhkjcbY9cWwMNnfzq3ozkPWtSoj9D+9dancAhJ5BdnJmpEAeTtdT1V1e6e09un9l5fLy7dVmrQpgdHzk9JWVaqw3OkWaF5PDFWOo28vA9srNpdWVDQBFan/s4fum9wwdGQ3F3gQ1nU9F5dMzI69eTQEcPDS9sKmHJibY5j4rJAhS24l1EIxPSZ5ubBQARjNfEbLd1Fs4QIcKzikGvG+vd4aadQDaepum7CwxS1GIOMVll2HDJFJ2okytt55BQWAKL66AUqyU10EfsjGhFsfwJN57gielK2FJA6Si20ksgAcP1dcloJqZvX3loYeGNrpB0Ou2Nufmlujovnh5MwWQdNf//j98Xytd/Rcf/dON1TSd31xKpJtarTAWB0uLawDmFr/z8NHxIhq9dmVha607c3Bi/76902MTe6eHbToPQEdIA1VAhxy32r23ntwjgUPeSiq1YY27pycArCTDq5l//zuP/ur/+sN5iOvnn7l5/sbeofHJscl0fU2MAeBsFoDEJZHA9kjXG90OGQef9aIoKn+5zsnardWgcHUdQxmxOXFIQb3wPunlAGphJLl1lFlxjhDGVTLVufXFsYkKsgSlrqIwC8EChSRbHWnE4llFDHFZ5sqwMOv5rOsDUyEO+pm3snKZtul+AxixLBvs8+gZQBjr0milWdEQON+vLlUQVVq0vlEq97srAVrWOvQZ+V4gA4Pr2XvxQl4gZfWtJ+/IOUY/y+UF3guswJOAiQ3YubQQMkEYAYBSSlmIF3hnHcVaG4XCWyue+tnRwnlV0ucIKNmOiolZuC941j9jYpT1vGX9dZ+CpwQDOBIs/dpkNbjKvorhoFibsNOyZgBZllvQblWaHVNfeisi4uEhVrgKwCMjIqAgKkp4AN6JI6IdXTcv+TYYyn33oWzZ0ndEvCCKg1ow5Na6ReHBTiBp5gxra/uuTjUy6+2ksLSZZEYprVQ1DoEoCgJBVio4wzuAiEkxp3nhBUpRu9tTUbjtXTrvQAyBF2Gi2MQKJrWp9d6AAcxtpd7EE5NjRD73niBU5DpUDoU4C6Wxy7mRQXddISHyUHrb6ey3hRYQvIhoVhBPfaW6wSNMECLPukyClFni8sErn2rtMoIYHYDJO18SwliEUAAa1pYuhKGYyEiewyqBBcSBmODzvNQV7Wx2a0pp5Q1pymzeaoVV1opFOAeXxHzJSLKcvAuigFjBGAGHwsr1nwfnhY0iKR9vts65zMXDDVuoyDSUKABJ5p752sUhqcaRp9FqZMzaq9df+vaF/dNDQ0PVWjUEUOS93PW6SQrxKgy3eqlRSqRwgLB4nwDorS7NXbwdNUeiocOvnl189evPPX7PvmEffuYT3/uV/+XnAfyL3/r973//u7YWZx9872N61D/zla+/6YMPRcemQ8NJuztkGICx3inTRdCo1s/98bc2k94b3/KGU82GGRrduHwDSRtA2us9NnWfREYP17K2u/W557/+5cvv+fCPtHvZ4u9/NrQWwPLcSi2q2qRASkPNUR2GrVbv7OmVt97/QDFXAJi9eXMeMnnwERdF5179ckFfuP+tNe+VjqOVK88snnkZgK7dfuK+fSu314vUKskElnwRGE1CLuk4DwA68xxpYiKjWKl+4/Z+pJCXCkvek/dKhD2RCHnAwgakFZX5IIhXuSfP2psotVnhqNVpDTcqpOIo6Ff/pLZQSnkn3rvMuqxUiyTOHTa7SSuxAKKgCHQvLagahgAzO2NUKU7SrxZC+SNiJ+wELCLwhXeFtdQvhkbhfJL1QBRo7QWKTanB6AXO5zRgWxChr927kxvaXna2f02ADMjf/SBOUNJItz8llFjkQES3n2DhstgLcFCK2QhrBkFYBeIhngAqiyMWl5cdaGioqRT7sk2pEDHEixPH/fjGV2r1uD4Sg9jZLOt5ZM57o8izt14AaBKljYA7md1M0ijQsQnr1WoUhkm7BcB67wvb2tgAB9YWjkRrCrXWUUA+jKsVAGmRO++r1SqU1KtBal1MPDk5pSPjSPI8B+CKvNfZ3FiZQ7BejeJ903sssNHarFZqUmCjvQlgZmLy8OHjgQm2Oq0vfXV+ZX0j6SYWpt4cqYYGwOjwUGELJmkn3ZCrxCjyTLzTStkiF/9f1FH9zrgz7ow74/9XQ9NO8e4OerczXp943yE1grCDZg1cOdrF/RtEEbIbAOy3yd7WXO93LflLxx0gm7KNOe6GOgf7J+Lt97sLsWUHVOyzMgUlt3G7y0z/kLQbn9zWCNp1PoOr8KWHDyp7LO+cLqtS+qrvnQ7Mank6JS65zesokcdS63BAiNwdQvTLE17DnyyZCv3Z82IB68V5LwIeCObvmhcBlca9xBBl+0YNPGr04c5+/23aZrHsUgXdjk1EIGUFOIG8OCbRygOIAw4Ma0WBptDoLBPqZa6XC1zJdTH9KNEpYoL1xIrEeSiG1jyIlgkGXryCEi9K9dX017ZyECqRAhBoSnIXprYMUkONqlahCbTWXqiwAkArgCQvLEiciBPPBIey4x8BqFWDTlIsbSbtro0DtdjJkqyjWc0vtMffeGx+sQ2gHveGJpoXT78cabfVmTt26lgQkjIyMbU3c3L07hEA87PzUzMHCpp2WhWZM2aY7ebC8u2h4cnm0Pja82sAei2cfOhoGIQkw9/+gy995jMvHj8+NHNgLNaXr8+3nv7cBQBT481Wljebo0ePPpBdW6o160//0aX/9IdfOXP68p5YAYgOVJvN6Bd+7r0/9aFTanPz81/4dGd55W2PnTrz8qW0k4RhBKBR1WfP3J579d/92N97z6Eg3v/Ak4niSm3j5//++579zqXpfYcBPPXpp/7wtz91+VtnThy85x/+9E9+/DN/8NwrSx7MihuM43cfB6ApTX1w/VauFFsOHBx7LyIqiLzPy4eGQF5KH88FgCbqLM+fu3RJIFkYA7j73kcX5/5kvbD/+Jf/0U/+/CnWrc2M6r2hX/rw//kTf+e/NQCAEY0glJ/5xf/m7Pe+d/HqtXvvvo98MtmEJjd7Y7HTLgB86Zvn0Zi3iO89eXR4KO7M94pe9/mzZ6rN5sr6JoCFubVO21miwAKsTNUMs17byAqXt3utLMsAaOMbw8002dQVNdYcHdkzPXvz4r7pQ3sOHLh9/kK2sQxgYnj0kbv2Zcn6Wntzfat4/8N73/DAaNC5PDU08sSb7tk3ehjAU5//wr/9j1/4wBNTq869Y2ry7W944Dd+/VNvet9jt67MHz9wEMDbH9sD2w2C5Xc8cTBO7//0d04nLLMCYQAAIABJREFUWfKWB46srK7F8fDs3CaAe6dG9+2bOLyvudFKL10/a0L1o++6SzUwOW3EVEvxrADc3WzDwZjG9dsblQgqb7VbnWqkrywmij2Ag/v04RNTqxsbvbyIAp0olcZhLVSiTHcxBeA2kzCuFJsrGFNIi+5ahtV2faoyevJIOrv+xIMjAGbPt251826yKU0Hx1uruTJxGMqRe/aevba2OJ8AGBmWg1P1751f6eWuGVdavWw0iEYa0ejwMHzwF93LAA7vG53cM7S21lpvre+ZMZ4UieRFHk2NdWgZwFqizs4uPvzo8bRz6faluQMPHdgMO64gKbLluaW9M8MAhEmSnnJC1jOF/bXReVtYyiynBYDQAzooUsfsvTgCWCsnAvGS5S4rAPieU6CelVgS5sDEtcK5wLiV9Xa9EgEg9lmaGhXpKPbecWAIbOEo6YSFTA5VACit73/gwMk3H4VuO7dlWInWdrOzNbt17OHDfg4A1jN65wcfN3b94aOLX/jyswssd40Mzy5uHpiZmplsuLwF4P9h7z2DLDvPM7Hn/cIJN/a9nXumJ2MSMBiACEQgAIIBpBIlKlNW9K686yrtassqravWrmJRu+uVV+tyqRTsXdkre12yskglkiAJgCCRMwbAYGJP6Jnp3DffE77w+se53dMgd71//MOqmre6ak5Pnz73nHNvn+/7nvcJoXD1oLJv/nDHBX/+1PNXLl12odxfC9++dOXdS0sAkpTbS4Pz9YVub3hoN3/m++/dXF6Yalar0xVVDxLTALC61hICv/CPnpB63Xc2js/Vsk6+eSOvz0VW5Uk/BFCulkvkSIVDEd5Y2jxUKZUjAalcrw8lyDGAMCqtnb66f7JBQnowk5RhDSRJyo3lPoBoPPLD1MFJByWUItVtJf1OTsbn/aGRHoB0xngj0BGNStJPhp0OrIeWKgq8Y+8UgF7bbCwn+cDHZT3Sin7XmF74//FWEALzyNtRBbLQRxsHkATZ4mdMrAXnH5SeFaS+0aC+HccCP/rion/nUUBEDD9qerLwnthznrvcAIiULMUl5gwqAMgxZJ4LSEGllaUEQD8z8/N1pa0nhIFKBwMdxqpUYWbLDqqILrGjIZIKU+SCSCRGtjGj3qWAoC0sUoymLUTMBdRYzDnEljpb0ogaKQESJHd0XuWWdvpm2B2BiEbBbQVsu3WzaNRDJRB5UOwRARAiAjFIQTCxh3MkiISDH8HHKDiSYgsoIAY7ZmJ2UsFkDoCUIs3dX31jwXucPBCVAq424iiQSWalUEnuAIzX4rV2Pze+08+alVgKSo2NtI9CWalExZtmbSokGQ9pXZbmBQZtjZGBZ2bnHQDnQJLA3nlWJIQXvaSlZBirgGwOQEI1Jqb2HT7sNk9bdoFSUkY2HXoCCRJKApDFTS46q0wkJFgQSSEctgxGsT0rJQFiKtw8iYg8w5PY4toSCw63WrcM9tiy7wFAnBREL5Bg79lahvPCeJsTwY9oYaS0NOnQpqmUpHWdhWAW7J3wgXcMwFgpw0pqTKRC78gkHsisMTpAMBZClABwzjJlOMc+oVCSYNsZ6mqTGMQF7mkYTJAkBIFCjikI3nnunWHL+UH8Z0++C+Dnf+xjlbRUj6LTL724+/CMKVWmxie+98e+h8YntUt8+j4Ask4JhvRSCqmUAUkRxMKm7HudfuHvUWE+tHs2mptXzXG5cvHMK1d+5Gd+MArCQy8uv/HsuwAOh5XXn3vn2S8+9RPt/MDJQ4/c95H1a4P64UbWWqaNFeyaAyAClfQSQZmQIrfDoKZUmVauvdfgvX641Fq+AWBy/26qs25ExnbiRrV578xH+7tuP0BVrl89VIfaBPDZH9l1YNfDL58698Zbm3sO7Gnsqx0/uScUbz1/6tInH/o0gHOXFj/x+N31+SNZ2nnoiXvjuMZilRxlg87uWT7ywFEAoIljjx9vVpvPPXdq1/SexdXVi0srE9XSVK2c5SbJUwCAXetlheOSJ4wcRDwzk/EoYtYssbOm6Ak4eM9c+M86y4WHrJIi1ORzVta5SANCsMkZiVOSQscKgJBBbhIplbGu1R8w2Woc5ibvJLlnH0gC0B0a5/uVuCSENM57GJuPwiZvJl95MXJhgCh6FZm1aZ5b9rlzAIzlzjCRQpTDQAgZSGk8vGfjPJEpnkUFPwBCiJviNzFafm1Djdvsiy1uBgOSmLYGAWzxQgTxVlDNaBlIpArvSBICxfOJqNCESZLeO+s9CdKiaIMNcu/Hx6rOAyg4np5A1jMzF09GobQUYtDprLY6Y/VqGCpNIhKCnA9lQe4HC3LG9fMsYxJBWClX+/3MWp9lmbEGgCMqR+Gg021OzEZhYHPjiMnkIDZ57soxAG/zzJjUu4l6UwW6HofryyvlOOz3ep0gLpfLAI7tP4D5bGB84j0x6lOTQRiTIGJnjev1OgAWhkMmlkROyEqlkhhjnQ/iwKYD4XIATzz2UVGqhTq4fvncl7/25TxLdKCkIOttEAR5mgBAHbfqVt2qW/V3qIoQvlG29M7lw004cPv7bZbCB5cZ2xZG/1HW4XfBjFwohEceJlvg4HdREnfCZ8Wr0M6TKbCzIhhnayDcEm9h5Gy4dRnfcQLbQOR3nd0OhuDOH9zck7fmqDstUkiAwOSL6az3PIpw3HHF2wcfSb1RqNV3cEUL8dYHT5mpMO8c3VoSxLSVlQMwYJ2zzikZ7PC43Dqpgog5yr/j77ocZs9EhWdn0ZKkbWSW2W0pwbD1QWAics4pyYESAMJQS/ICPlCqVoq7MLlxKXLFvrB9UgLM3sEVJAwCtID00ILCQCklACghC8xSkJBE3rODC6ClgJQUKAmAiLPctnrcHVBpmO9qhnGAQnTiR9GAkJKUpNwYIch7770HgT1ZW/RQYclnua+UQ4KYqAe1knn5zGpuOAjjDseHDxwGsG5k2Udhpb53d3Xx4rVq7QBTyuDMpWBNIgQwPnVAqolQVRKz5nwKH4BLi4un28Nk33x42+FjAFqb692Nnks2zr76sh/S9z96t5gSTmdSh48+9vDClQRAb731qU/f3Tl//ff++R+dP7108s5dr7zx6qd+5O4f/kc/KNqXAJy858DGQmej01teH37zpeUvf+P5qHHwwPRMZ+HC2Q1o7wDk0KVKbaXX/vaTr772vBaTxxvx0rUlal261O4lv/pf/ASA86dOvXd24fhBPnlkqr3ePn9u5dDcWA558v77n//yV9oLZwGQog998mPtlSsXV4aWnI7C0OTsLJKul0UAAkkhvPfeeUEUBkEsZVCpvPf2qaXMhBcXASyeXesaLgOoJJmw6K5euzJ448XhSy+8dd/dd03MNAA88+Qzn3r0xCNHJh/d94n/5YtPLiy8v77W+akf+94jE7V3Xj732Z/9ewC++pVnF155fXau1uvZQWqOHj5aqoxfOP1O3moV4eZxWN7o9OKKnGjUwpJaarfbK4PxMeW8GIqs8Jivj9VKUdDfWE9SUQr7KhkoDq6+dzrLXe/6yvy+CMCP/cDHX3ztvbdeO7d/X/3gfFM08Nh9t/vkdLrWuW0y7ic5gLvv2L+23Oq23NFj8zf6rfGZqZP3z33ywRPLCyv7Du8DsH+v7LZbqr9skm6iML+nfP1S/tSrC4/eNTm7e0/62jUAMsfa+vJYrf7KuwtiaGsNPVOPNk1/Y3kQVSOpAgBXL/XOXBzs3huUwnzQ15MBSnlnptGo3bf37QsrvZUugIPHJrOsw8K5ZonHSmU2a31bnai71YR6BkBZlmSl1gmEToaib2VOsqzT8Xo7qjD3sWEADFrOxVrV63Kyu9kziVG7mtXu2ibK1bhZPvPeJoD5Pf0jh2ZOX271EtfqpXuatXIkyqVYyGDvnunHHz0KQGt7fW3JD2/cdrTubVkihBsEElFFz9QDAOcuLrYHw93TWiyrtRvtqSSPxirdG/3FC8vz44ESAQAZU5bmoZJesBTSoRBlep/bilKFdySzEKUKD3NrnFQESZAS8GxMnqQucwC0A8mAJLOU7GEyFwhSykvJpTAoHl+CWEgqxE2eAUVC6l5qK1GtV4oBnL66fGA+oHIQO+0HWtksx771jU4vNeH89OSwsGMLVjZbw3NvOD+edJIwDD/5oSPI3WqnXalndx7aB2Cs5M+e33jjnRebs1PHDu95442zm+9cO2vw+c//8vkrywD69o1mNVxYXOwl7v5jExeurty2r6qDmFy7J4IX37wK4A/+4JnP/fTHD9R9e3MDUTgAl3ePd1+7XG7OaTEb8A0AAQW51ZWx3cbj2tXr+/ZRYp2ySZBb+EGrbwH0s35n3fiaQlRe37gSClEj9HOuTEwkwyEAVw3MoL/eSsarqlSOOHMrK2vtzSQbJpylvcwBqAScZI7RH6uXBVjCMztvrUsMQ5ZLFQBcjsiv2dyboUWpGOJ2MvW2B8Ct8W0Hx1EqgZG4jjzDexA7iMKKjT/QJNwekbYWtLzd1BsduYCNPEhuaRgYcCQkSQnviLlQGcogiOMwTU0+HLYHJnE8XlOV6kTfRG+9twDgymrvJz9zsh4H3hmTp4uLm1Epnj8Qk5Le+iLO2jmvCnYktlDErfkBtuBIbCmARyP/CCkUINzMpC26h0yjaGeM0nBGe46wMrl9B3dMLiBGOcojOLK4MVvgmScmggdpIg2AWReKbBICo/ScLRKn3Dp+gZoxAUzMsGAJeAa5QAsANucokJ+4d0IGYayxtLTe6ydxFCute0NbjMiBVtNjMdPAWCGkF4KTPIeXmTWBEqYQSBqjpUqNBUMpWYoiF0emIjPniJnZo6Blii2uFVHPDsI40FJJmAAaQGpsp58JocFB4A27wCdWBrEUKZFNrcN2C7YwiiQijNBhb4s7uWNytDXHdCIQNIKIiRxGhCMLsFBiqynPYIL3uInkMjysBxwJJ4QnIiYH4ch3k4KqKaMYBuSgmMiyhVVRiDwnZzm3WZICIO/BPgwiTh33+q6flVRIaaJJCK1ZOgAiSe36mpSSJhppxtLaIAw4Z5ZwjgGMPqQkoaQgZXq219qcnW6smKGOxj/5wJ0Adu2ZrcxW1K5apap9q9Nd7LU7a6cu9Svjzd7GZm2PA3DwnuPwXmkhGCYzOqhfOn3t6ltvnPzoh2qT1WQpBXDx1bNTY/W1TePXkunJ+s/99Mcvv/SyLAdjR6dqDQvg/k8dyqoNaVrpIHvpSy9KKT/yQx9Zf+XGuXdfnNgdzAZlAGEckHMBO2gxf9ukYOHkcKysrl2+Ol8pn+8lAGpSVoLo9GunW6s3Dh67LWxOHrlv38bG5Xhm5ic+e7iGCgAuJWxuHNwXHN9z3A82/OJ5U1Zz0/K5Fy/4P/g6gOXWkm4Ek2lr0LqRLi/6ruzOLpZijf6ydv24PA5AVmR/8cba0mqaDF545a31fn+mMXbv/PhmNsyyZHczBiCUnKtmjtl45M7nzlvHznvvOTG+MDUynp0v+iTeMTvP5Nh7WMeuICtI5I5MKIWQME5LIaVY3th47t3TzWp1vb0GYLIexqLsBU03SkJQe2jYI3OwjiqRKocBgPXO0Pm8USkRkbGOhVvZ7CsptJSRVoFSAJQszGspMwTAs7fOZdZ5sHUMILfcT3ygEEjWihneemcZmfUB+R0Rl4IAEsIzbdvibz2Et1aDxapMbtlqbK0jtp4v8DtiUUf88mKbRZF/KZh80d7Z4p0Kdh6eBJzzUkgAu6ani8AmY03xp80QzAV9BEVq9tCmMhe99nDl2lKgdodxg+E0CWKpSCpddMJcZjJ2UDoYsBmkhiAlyV5/UAi/lRJSyaQ3YOYojMNSbFxOxo5srLQCgDyBtQJSB1FuXT3U1VBnaTrsdgcqLgVlACIIqqW4DLZEWlKWO6GDWlQmQCl85J6TADrtXhCFNsuH7I4fOkC6DJAQ1Gm31tZXAbxz5t242oh0wDYpl+Jet93rddfXN3q9npSytbEBANO4VbfqVt2qv0Ml/vO73Kpbdatu1a26VbfqVt2qW3WrbtWtulW36lbdqlt1q27V/xelqOiCwwGFCpmJMQo94w/opLZ71CONNvktbTawFR0z6hpvJals/ZK/mb7NlkYxJyPdUqGrIn9zHyGklIqIPHvvnRz5K+2gX25teDjeIlyOJM4jIsaWwftNN8ktXbTYcZQdbMSiF+6cI6U8+0BqAM5ZZp/nWSmuGmdIgplzmyuptCjoBrC53+JPyiKscXQVW/5Hngp3/kJ3LkDwRQB2QU8UI6V5ESEtvCvIfQCREEyCwc5bAEGgeRQZveV8DxCRLSLwiig6UbApffGOGJtLKYVU2xE0zF4JIYmEFlsklu0koFEP07Mj773jwhWHBZz3cFZRHgoKVACArbWAlAiU7ieZsTDWeoaUYBSSLoIQbHwYSGvJOi8IYaC8tcYZ7yMAUknP8OxjJXNrU+fKUaCInBTekzUAUIqkg08zDhT1Ob+84qaNdcyNajkaecPDOOdYWGfhVBRFnV7PElXiWqkcXV/dBDDMMx1T7sxYpRxXGxwM7r8dZNIk9TMTUWX6GIAgy0KZ790z19lcnT56NM0SpbmXJKW4wSIAQgAkpKZYkWOvve2KUMDmsQ4RRHC+1xsCOPPe9fse2B2G4/c/8rH1a6/ur2+2Kk2hhyqOhJ667UMNAGsb7UvvX/Dt1qd+4YmPJ93xfXMnn3gUPh8k3VDPAjj79Reef+tKKZh67alT776/gMrEs9986ZOP3Pv9P/r9M+9f+vqzbwAItU8zHwp96nK3ZR3/xpfuv2/uK3/4rUc+/tgv/vwvPPXk1wAwK5biz1944/KlMwf3HhiLo/s+fM/55eV/8Es/VY7j3/qDvwDwU48c/7V/8vP/fu+hf/6vfyfQLIf9k8cPK62fP3U6EAJAzoF3niGUlMLnnj1J8fzL7+yZLn3ie7/vS3/5FQBa2UB753Dn9DguJX/111f/3f/8f5/bMB+5Y+8f/sVv/Ksv/O8ATh6qPXHf8edefXnl2vX1G0sP3vvQt5987urFKw8efXxy1+zqygqALOktL2VBzcVaxrl/7tnXS7Xwej8NZVCqMIDM5DNTJRXpzc3NUhaUA+9KmoNwczMZJtAlDWCQ9Ib93uyeA/Nx+frZd6+0z+WKMHRaQ8Ad2rcfwFef+ubi8maj2dy7d9dYFYJjN+hlXCqXapeWz19eWATw6cdPTN67///6m1dm5m0zcrN6cOTYrkS0XGxjKQAEjaBW3X1j2W+sr11ePINIn/jQka//7VtBOFktVfZVIwAvLq4ury5FYXDjand+T+Wjd8/bKJscm82GPVmdee7rLwFYWvHDzAzW1z774x/WzVD0OtztQMU+c7VSuBwIAEduu1M2K0nnWlCpUHMuXW2FpWq/Wzara9ASACpikG5SqZzrJoJeMMOdtc0SZkIVp9hsD7sABplQ1iatPNg1my6flVEYulK/M7SuUq2Kx+6NASy3M2pEgnSlTEE3JS3W22mopeXs+vJiY0wCaHcyRtyYKDcnKq7TNqUw6vbSlETnhnE9AMMNp2xWDnwixOGZOrUHzqg33h/ccWJWlLDc6gPYVQt0QCmMqmpZKvl+KkDW5dKBZABIAFZHlBsdBkw+M5wHQaBL6Kz119cDR1qGAMgzM0QQJEBZSuOT3Oq1tul3h5P7GgBkJIWoQUVpDhlJZZ10sgupyoGodK/0MgCpbqTT0zEvDa9QthmeP9vd2DiDwPss41ZHNaYB3KabuVDSotXZ2NuMGnPTd94x+7u///U75tTDx4+GygNY2hwK7tx9z7wMp7/958+HikjrtWF+8Vrrmy+9CWAoOSbrMpcl/uAdJ2fGu6vrywcnw7yqReX2J9/4GoCs5//+Lz7uhufTVrvWmNCakzTvUy1NwkZseqkAoDBAoNv5oNRsqMAOV9bI+wEF1mXkslhFAF4507t01p7cl3XXW4NeKsbrmXFZb+g9l4gBmGGa6liHmQtKRsUOLhub20iTdh6WhChXygDybEBKRR6m1Z+fmVQ+z2XojIwpElFYeFD2VaXHU+2hnx1vA2Ww8I6ZPQrLRJIABIRjxwznJcAEKy0skQtMIZnLh92I8j7DE0WUQYpeYqPCv1LcNPCj0VgN5x2NwqMLyezIipGIbMGUZEfOCu8FMzxbx9aruGCyeGIWEp6kXF/rsBB7p/bDurIafuShXQD231ivNTR7KKroSNQqiWOwMSIuKZMnSQogiANvTBAFTGydZxJ+FMUjJVJfJA5ZoXXg2AqCtxyqaDQ7gABk8SEvpNlEwjEXvpTMNDJnuznj2ebxCRp5ShIA73fOimg7dKWY85AAs2EoKWoohn9WQMIogZyQRiD3nEOlcFnhoGIhQkVakiByzjnlyTnhrYxLWbsHYJgMo0hNjvlSVfV62fRsbHJion6SemeTvAgT02EpkP18mGRW+0oIYWXirWP0hkkUBQAqlcrqepcgfCBBSikBloFuDCFsPlDQAFQgreeUTSCEd0Z69pa9ZyifUg6AhHv73XO9J+7RwaQSqbN9GeVFfogBsfQA2CsIb2GJvBCQ5IAAXngK7NA8/9ZVAIemS8Y6KQTYJyZvlmocukBqgbHMDcYPzQDQlCDLc3KShINUWgu2xuYCxlsLQAdl2FSrkB2D4PxAwMExEAki2+0AIFK5qGlDg2EeV6qBrJrWsmDBsfBuzCbrAERcspAq6eYde/lK5+D+CWM6QE666XpWDFoAUmghA3Zshlanm6Ia56xlQFr43nAAoERQXPYuFD43VXvlysbm0tKHP/WRiXKSW1uqVwAsnnlx77EZsakaEerNuo31laXrS+9fu+OBKSvLM5N1AD3OmjJiqxLTiqJy7+rK0//nlx/40NFKc9aQ6ycpgCjQcW3e+Yq20ftn3n7s537+rRdfX+t2Hr5ryqs2gMHgUnf5zOd+9oSPGsMra0/+zavf/OOvjO+b79joiR9//O2nvwwgDuX+Y7ebuL50oz053lQibLeToN4cc+3S/OwMpQACJTvZ5vE7blsaHnjlG8/Wg/MnPnz7+JypcqxuKNYGAGuRh6am80DU/uyP3370c/fM3TO+b3p590P3v/TXSwA+9emPBPfPiXRJyFqlNDcWlM+f7rmFxVOv9mca/PBDADDoxGdOvTtcWRdhsLy2NhaoI3MTsxNzresrV66v7burDkApsiHluSXm1LosJ8/IrZcQKOn11rCYxMPT0KREyrIIle6njnzqnM+cA6CVAof9NPcUDJgVSSXCzqBz6tyVAFyrlAHcf9fhmXpzuhL1hsnQJJ2NbtCoaRVd67b2zB1bvH4VQK9vGs2Sh8xzF5VV15auri5HoTg4Xdcyzs0QQMckcSiCQLMskZDOwfo0KyxNRZEtaYQSE+PN9Y12va6HTlxf26zVYjbcytKpSgggiGJjRClWCuQJ1uRCAoAWqp8lYSAAVKNSJ03Ysx4lR0kxoq7TdniUECPHXZBH4a3ABe9yxIwu1iWOWXoXhUoNTUZek/AsQJxZAyAKlCftWEhp+8NBmqVxHOTGCQgpVZZZAKEm77k+PhZWoloUVAU7C0cE7STIpIXuXnTZcW7zzC5dXSTjGnt26SBygzw3OQADNRhmQpYcvCQOoyhv9YXSxJQNB8KPA1A6tr7rpdMwnlSSuG5uDu67rdkcu3b9arksAFy6snHb4aPjlUbKVsHpONRSAewYcDQ9MQNgdmqWwcZxnudCqigMQcxsVKjuu/vu4gmc9NcSkoLEvbcfN47jIIpCGZBiGSwtLQHAcdyqW3WrbtXfoVIjwG6E5I20LDcly9+d2rhDt/3dCuud2F+xQTfhvuJ7ge1Z8+g3uDB2HGWubMuetoRPW1Prm6/yHa9KdPM8aWvnkRZ6JIn6QAD4zpPdTo1hQAghpSQijPJaIKWQkEVoQiilY++ZtdBgst4AkKRuHm5LlrCF2bL/AJ47EnkTqBiYQd+hb+fRRW/fhZHia1vUIJjhvStsKQURkRBiR/oQfeAuFfsAgPdEVFg6CiGFIElwfpQJeVNUt7UdBFoLCe+JvRQMsBcsBYHIOxhrAZASoQ6UEmlu+oPUek4z4wE9SvmG1AoMVkgLZQjgPCR7UUz5PQAM8yyQOpRys5sEgdBSChJpZgnseGQybbwgARKCxOiurrXyTq8zNWYmm3EYFhk+jr2Lw2iQZmaYhnFckdRPBgKmXgkAVHPZSYxzPsucd10yPF6JkathyTQnJgIZA5BhaPJNoSNnjBIMoT1lWoHRH2ysVZpNAFIGgOilRisRqlIvhyR9x4n9Z86fdWNxqTwO4NM/8qAXk2FYDai3dP381Uvv83Q/mJlXPlI+aZ25CuCv/vD1+x5/4shnfiBWZP0wHdqNzqBWwVJrebxZATCx6+DeaXr8oYd/88oXa3vKZ0+91dR47KGHH7xv/8Tk1NETDwL4rd/+HZYs47LIUw/XPLDvH/6LL9x57I9/4wu/dX559b03TwHoS1mJI2nyhZU06Z3+xV/6+5VK2Hm9O7+X53arf/K5zwCwSesf/uN/9vkvfP7pb9/50ounfvlXfsksXT90+OBb758f8rZn1sj1jUGOybLYf+LYieMnTr36XG4cgFjyoT2T3/PRj145s/TrX/gf3zmbQeLkrPqZn/qJM6cXX3nzFQC/9Wv/9KWXX50uz/3pc88+9thR9iZx5vTp935nfbU/aJ/67f8DwMd/6InahYsLFzbvPzE3fceRxc03bvTSzFMUIkkLSywflNTM5ASm/JVrV6tQ99x5x+X19pXFSxToXjcBEEgphL/83vmAkDm6bbf60R/+mcxkrzz/9bI0n/7kEwBOvfXu229/dfdMebZZPndx8fqljTzvPfLYgfWFy81QHX/gEIBWb/Po7Qd+7hceOvP+0szseK+TnruwND3VbA9NaW4aAE0kseSF1xbeudSfrUbr7WR6ONw1U37j1IWpsfJGKwUwOVbZNzdVrYx/8/XVg3O79h065mT/7PXO5feXjLpwYHLYaQLpAAAgAElEQVQSwL13Hg1V9KVvPfv+1TV/OXWDTDDWVm40JqrLm0msQwCn37mRuPybL1374c/MO54yNBWVS4ExpjZjTQZA12NnMmqW5dgkalXh0nKjhlz47uZYc3Y47QDQ0L/x2qX6ayt33r53cs/R1mCwtjyIy5Xm/FR/caksAKBrxs9dXFm43hYhsfPsXSkMVKBKQg4HWZJZAGeutG8/ODkx1bi+1t41W4ryeJBH1nIbUVSNAQzNBmvZ73UaczP52trKRrK22l3eyD85W3fDhammBMDlyDLLWMsoMi4zwgWhygcu1p6HQw8NgGAdQwrntNXNckmZdHNpuJlEuRZB2ZsMAGshI6E8WQOKpHSIwqAcq4x4crwEwA83BCXZMCOlYMFeSh1E5VAYdKyJIw1g9UrHl3cNltM3X31jvGx33TlxW3mGlbXX20lP/94XXwRw9Wryb/7lT2NXMyr5d95eGGutrVydjCvVI8f3RiVlWx0AG+sDKaPdtVplqn7owOTrp27smZ386CdO/O6//Q9HjhwEUBbo5z4mihTefP2tx3/uiXwwe6O/oXRt2BEvPHUKwCc+c++4ueIHg2oQUactS3Su03/3XG+mXDM0LKDGNPe5YyVgNvt75qedFzE8e1GWYWZhrAZw5wP3trpnM10xeTbVGCvVamaYwMLlrl4pAbCZkUEwOTuVpV5JLSJdDcaCqBSVyqZFUSABkFJJYhUFWpUAErLExumQVFSBVpGSABCNKd1xxiuhc+dZAMIXI/B2lhqDtQhYeob03kIorYWlgGQklQSQZ6Zo2GE0Xjil5AeHqJ313Zl0o8nG1ni+9S9uTip0KJFbAD4dIBtmmQkCOnhgKjWOOSUpSbjKWAxgXo0R5SY3SqtOq10fq4RxiaR0wz4zK6UAOGMhpXeFNNGz9yxE4bZMWm0pG1kQrGMmhhAQzEyMwtpl1DgsHEeYPWE7vkZsmUJvR9lga+M7nFd25vnRBze2p14SVPj3hgRTpNIQHEMxKSIFL5lUkcgsA83OOW9YWNYQI92x4GGWpwZAqRxpGQQxkY5rcTlMs+VrXQES7Ccb5cWVPoBWJxlmLstcOVDlUDFExr4SBFoIdi4MNIB+fxgqts6mOU+NT3aTLLMutU4qJSSx4+Jpz0V6BsDek4B13iMrQUehAuAwuLjS+5V/9xefeODEw0dn55rT5NPhoA92Eacy6wMgFbP3Eg5wxPBSeigiBRlkggYJAWg2Sy7LTQYQqpFyvlwJtSDV6tHZ661P3D4LYDhIwoCU9UIqKeAz67xXEkTaUaHeJ+dIErwlQVpAwZPJLEkvpBRFW926s5evx+Ab6909U5nWyeZ62xIGxlvTKuJupGo/MncUkO1Ot9NO8jQNyyWTeJvnShvOUwBClqQ3cCScZ+td7lgpYZXXPsgYQN8GUQyd9n3c6K2oquKJ6SRfuzbstUqBKNDevYdmz796bn8vac4fWDh/bdfsLJfwC//Np+PdB//Xf/P79x37FICgGtokkZ7ictl6EYX0yIHmsYfvklGS9tZndk0DcIj/9o/feu3t1s9+5pPffPr9q4O/WLy20rp+dfyHP1yRQwCkWns/dWLY5c3LF9avrP7Qf/09niqm4379X/zeXw1aJ4+UAdSbs4PV/MXnn/7wAwfTvCLAC69dqO2aOHhoshPL3cf2AjjzjffPvX/t3kfund839YmPPySMT5Rs99qc9r716sKjn7gdgG3qiCnpY/FSa72bT0R06vR7s3efrCz2e/k7AGzcLylXnpmf3DWfL13MoSYm+tdbMz/8Kwedb7nMA1C96GtvXqywXOvmd997KO11mjPVNxdOKwqnm8HllQ6A+amKZ++sT6xznos/XS1kbn3WNdILADrQjuXKZnt+ZroWRkNjFbUsQ0nlIACkxnnrmCG16q0tO8tM4eT0dNTg7sZqLxkAuLy4CIixuJmz83C9POfuQAeBlLTZWU1d8VcZN+tloQPrNFiXtBqvqMTalc6wUaWcewCyFIOUpsbGQ829ZFiKdb0arHc4NxgrEQBrUY0lO4pDTYzOIIkCwbCpsUIhZQfAplmjUs6c9VKwBxRAlGbOSl8tB8VscLOfOe/HytoYVxBbIFiAPZi2ntI7Oi1brBYCMaQYOeqqwj9YkGASYmQvSYziCMUKzRMLYg9P7AUD3sPjZhIOF7inUkJGUoZhWFLkvXHwJAR5Nt4qKQE4ElWpr68tQ8eN8XFiaSHZ+kDrPEsAKPJsrRJ6fXU1SypeUGEtQwwIdIcDAIZko15fz/Kl9Y1avRnFqlaK+4N+OQ6T4fD68jKALMmF4yxNhBKCPARvr/loZIY8eqBXyyUXxySllLLYpVouy5lZAIFWiXOKhLMe3pVqjeX1ldyYYZY4ayaa47hVt+pW3aq/a6V4i0C4ZffEO2HG70AjbyZEA9+B7+1IcKatCOoRyW/ngsKPjBe3YUIGWDCTkFs0BxIkBEQxDd0etj6Amm2/EEYoCdHO+feOk932ldzymGJ88JJ25IMLKSAgSFhvvXcA2FkhpWBmdkKIgjUqAMfes0NBzCSx7WW5fQLM7G5ikVuvTwU7krxlgt9ym995Pn5H7g/dhGULFqcHM7z3zJ4ITIIEBMEVyxgAgBDwo4VY4eVccC+9IFEs+YSgItuO+eZK5ua7OnpfC6CS/ci9y0vBSgsp5DBxkkbTLKmE9zwYmiQ1xrMglGMdR9EIkCZkWZYbQ0DBdbXOEpEUAt4b4wEEGoatMbYS6TgIm7VSbi0Rcus9+yK+0DEFQighiMgxKxaZ4SQzue0lJpsaiwBUSyIztj9kqcwgyWqlsiZJsFnmRGHh71kpVa0Em5vDmfFAafbW7j/YGLKLKlGW5QBKpbrx/c3l1VpFxcJlkMZi4fTC0ZNHeoPO+uYqgD0HDq5e3xib2ZcZCPZry5v18WlhhirzkoLpfXcC0GpXP9dp3vWUeOvTttFxLg/UNFXNJl744qsAHvrIw/Mf+Uys0ck2laxF5agifZZcqZZ0IwwBbPSDs++vL1/9Vtrpkqe4XqZOvr78/pWz9GdffuZX/+kXAHzpi3+0sdHrDRIEYTOWX/+rb/xatf6jP/gpNOqvv3rKKAIQEfWtYeuhKInpxZfe+vGf/IHV5fQb33p9/vD4iTuPAPjy3z61du6lZ55+xiQDBl548TWd9G90OqXADzIx+iAWVF8WIHIsMi9d2FwZtF5buPG5z34fgKWzZ+87eqA7HP72v/wTx+LjDx584tEPbeZm7fI7f/MXf/1DH/sQgGAqbA3yI3uONiaa+/d9yNrs4/cdi+b2v/P8s72h6WQOwPmF672+KZfCbu521SoBY5hzPdKdnil8ISMlWq2+t/ajD90VsT97fjEKw+4g7VtfC0cfZO/9TAin8clPPLR79+HXX/zLM6deC+OwEYrF5Wywvgrg9IXT/dzHpVgYVy/Rw3cepzDzG+mFy72jR/dfXU0ArC6vBNHszEzcatfj+vTbCxdaw+yOh++cfO3a4soagA/HFb8ycJvJsbnq8dsO/u2zb6Xej++ZyFN/YXmpby2AB+46EJajN89c6fS6r72/kLq18blGrVT/0nMXmhOV4CMnAFw5N6z5zovvrJy52KkEPLAcV+O0M2ysDgd5sfzEi28+f70/FF69fnpjOV/IesNqXa5vdKphJUkBoFwnQhDWg3DSKagoqOVOu/V2udzrrrTX2grAHbefGLyy/NVn1q+7Mw987H4ZystvvxuVQxU0ltfG/TIBqMXl599f6qSmJBSD1vv5dGNsenZ3Z3Pz3UsbufMAUmNXWu5DpWnO47feub57hkNqVMvloNlsTiUAZufzfdONqTjA7tnVjY5o1mYbIUWbLYNKpRZPNwBAxYH0Fj5zHFUillYGQblOfGNpiH6gAwAiDHUYwFsNRqXCm3na6sakdSA4z4SOACTeB14KyksSIA0K04xaPTz79sree44DEEFGQBhpk0M6ThOkw6Q6FYhekmR5YXifeqNF+Z03h2eW+b/8B3eJ7iang9bqomll0/uP//zPPgLgV/+7P1u/3Np/aPfVhSvvvbMxPDY20R7umqlXguqVS9dPXVwDoHRoc+vQHrPm3fPXx6KoMVZ5/P6Tf/ynXw3KMYDMEw1dHigiWl5a//xvfVE5OzER7J+bevr1bxeZwz/yg3cMhgs+LXnhS0qlaTa2+1h0ZbONsWoQqmwZAOmAmFSeC5KChZBawsHp7nBYjiv91ANY6+ddkhvDdEKJUrWSZdZ5coLCUMdRCMBmGbSyHKiyllFEGpxHmZc5ByqoDm0hEUBOvm8i26a852OJlKX1G5J7xvvM5ABEPd3c6A56Gq7mYSElSO4wR9xuUlpJ5MBCkIdjVg7kSQutAeSZKbzIJIGEcGkSBKVinN1RO2cdH+x93uyBbmUd85bQg0fWyM7kzA6AkIKFUFpaa1QofZI7z1qVrM2KPLVSWbAfAg5QUmsdBqQDoOik8SgzxzlYz9oX6ctMHt6DQIKslYADINj7nDx7CBIE53NAA7SdsoMRdFqM4cEW7ZF2bOA/Achu/2QbhcTWPG40/eLRwl+BIgAEzxAE4zkHPMh5WJCFyIGcyQEgmbJIHVKWKUnAO87YWe8MgjgAECgNlkBgk2x5udvtmDhQjfHquFSDxFrjANRqJR0G1qPVSbRWk5EiYS+vDSthqKTI0hRAXAp6vT4xB4FQQbC+0bL9zTyzUSOWSrrMbt8ZMbK09pkzigLvnHGiFJUA1Gul/sb6qYX1C5ee+cvJ2r65yd1T9b2TY0d3N6fHauWJOQDshoIt+xQ2ZZf73BXzqlxlcRQW8pR2386MT3RT3WhOnDq3+OSrZ8hj0DU/9PjdKg6L3PNYBmxzw0awV14QIJQECeMcnAPAMiCW3jBYghmkvPNJwmFJWAgpNACbi2ur/YmStAZvLXT3TKnllk9s1hyrp/BLmzkA5f1D7eFGvzXMHJPodxMtAqkjD+I8cb02ABczUUqkhWOQBulAlwilpN2KwzoA6zgUqr14KYjTl15YCeDuvac+uHFtrF6lLJ2wAwCKMR5wbXaiR3JmcqqXOw6dC/q9y68/+tCMroQAyjLObM7SwYj2ZmtMh5LktRfeT/wg1FGGGgAa8uJCXqJoIKPHvufBU2eWm5H58X/2g3rtxpyqAFjOtawevv70kzNHD8/s2ru60XnlW88f2bP3kw8e3T0/3phJAPhyrALdjIk3e1FYuXbp+uVz6/rtxeTufXp+ftb3AczPzJQG/Y0LF8qttfPX+uP75nc9dPd4qbP65ump3eOlud0ABHpnz19Ug+zQwRONiT259vVSpOOqyoaHygBw5sXTd1Ub3/6zZ8Rmdt/ttVQBk3NRvTSQ/XhXM1keAmiOT8QT9TdeuWIt792/txEFWlV67eWJqto9Ud7oGwBZ6oVGoBRJ8sWThoQg2UvNRHnipdMLADY660pE7WRYq9p2r9sbDEBppFUUhmEoAUQBWt3Beq8H7a2HsT7NU93pSuHygQkCAcCClOIkz/rDtDPs5M4bn5nU1aLgzOLKnulJAI3KWKhd5hyBNzfb3rorm/2xcuCyTEodRCGAakS9YTocDkVNW+82e2mWGyEkETLHAJSm7iBba63FIe2OVbEIyjIniBjUSRyAspYgYbxPnA+E8OBSIIWEJ7J+ZHNfjhQzpbnTcivbiagwkd/GHpkBurlm3KZrCLG1vNwZwTn6dsT4EGBJI4qlHzErnCB21rMnScJ7TwQPByDPOVCBZBYEtib1hiUxKCZh2Rfeu0qFlFl4sOe4VGLIQOvBYEDOFvHcWoiELXk3Mzlt4QdZKhjOGOc8QEoKAFma9chFUUiCTJ5dWl0pR2qs3kjybHpivJf0AehAg8h674z1kqJiKCiWxFuZBIJGMaBCCAK8K94F1joolWIASkoBViStcQQOoyhQkr0PA93rtseajf/UYHGrbtWtulX/vy1V4IIFJfA/RiG8WTvzW/5f62aU9PZseXt+TBBipA8n2tJTj+THW9iY2NpkQOwIqNkevD74+jdxwG1UbpSm+Z17E1DkK+441R2VO1vAXgJEBRgGkp6lEILYwikSgDDsQEJLDcAWu9EOTHfruDtwRqbtoabAIOG3bvqOX7i592gNhS2e6o7rKgTXvIVUQhD8DhfpbcJl8V9SCu88gwW4YGsoUajj/ai9yFti+a3bA0BBSMADlr33XgonCYEOBCCpYKwgUBpg57yxTggp2EaBDsMgCIIiis4Ym+fGWEiCICellFIIQcUQW9wMLVW9XOoPBtVySZKYHCuvtgeevfPMjIInI0nEQRBqbbw3zjrvISiQgOBWL+8PcwATY1El0ky2HuuklXmbmwidgU8z560BEEZyo2P272p4ot2H57rDluTM5JuKpDUdIVMA3cFQy3b/+tnM9NXR46muyJA6rcG50zcm9kyee+1dAHsPnpzdPWf94L13LzbqUbMqSmHsrLvtjjsDPb40BICxcsnBOW47d6O5f6K1WjWyPBbYS98+//RTF773Jz8GYP6uo+ut8xthZbKisuFAReRcanyvEjp7bRPAv/29L08355/+5st3Hb3j7XefSTMONP7Dn379+KE3Lry//j/8q88DqJaDa0tGRqWMZSZEZsxv/v4fTcwc2H3i3rOXv1I052cPHCD40+9eJHCzWnf1xq/+yr96+JEP7zt4cG7X2IXn1wDsP3Hk3HPf3hya9SsrdaFOvfCSlviv7vvJ15p1vtYBAFV8srjg8XgSmce5d9586bXhE48+dvc99wB49q+/cuPiQifH/XftOrZ/7p6Td01V3I23XvvaV9/9b3/tv7/jZAzghee+3Yzo/QuvjE/Pr7c2zr3xxtzRvefOnVnqd1MRFIG86zeWj508sbx40XNw/vL1XpKCdOL0kD1lDkCk6ciB2dDZ199+b8/MgQce3P8nX3s+dWZqrDpe1WluAQiPe4/vlc4c2du4eOV0lqs8ay1c2Jibn7/ztpm/ffpbADq9zm2Hpx++957ZJm0Mlnbfc8frz7yIfPP0VXP73Xt7N64CuHQtv7pododj2g9fOXXthZeuddbSTrezu1n55pdeAvB9j3/v2hV/7MhtC5fag8yl7Fdb3Ua9tn/3zJ985YXu5gDAPXfh7YvXLy0NltoDXS7t3bP3wPyB05eusRaw4kt//TSAtfYgjpQSBCDPReL4wKHdpl7OzbDbHtxz4h4A77x36f133itr/G9/9JTwiDw8IECBFkWabaCFN16AIMCOpSShVZKbUJMUSHMGUK0+I70Twr/28sU/+d1ngyjobnQ5Q3mi0mkNvGUA9WoZivMsJwXHjA2s3ei/c/qay3N2XITVCs9fffL0y9++OFUrLyxvanFmphHPzY9HVbpyrQ3AG7d4pv/V5xfLNZ0P08mpWqfrkr4Zq+tq7Bv1FQAkdeacCqR1HAVaMLQkY13ayRyz1hKACjUJWOMEc3uYra1kJw817rh9YnIqYgx1XAewstjbPdtgapOIhoOcWJEuD/NspZ2gXAYw7KXCBc4IzphyYTp87eLm/nlpN5ab1WDqzgkA07vHLn7xa7/5G0999nOP2V7HXu+b9bbmgc+xdP1KPH8HgM88fuev/95f/urf+9g3Xz4f1cKJRtyL4guvX7vz2OGVDT8zXgPw1IvXxsejtd7y9Fzt8Qc/9OtvfPX2mYnp2nCqESyevwDg4JEDixcudgbuxMnjD3/6wf/pX//7SplvP3FwzaRPvrb4y//4+wHceXTm9Nlrk5GfrFY3Wz1EUc51V4naOpplm1MNQBSV2DpIp6tVu7oRVptais4gCSp1z8p7C8AMGAjJy3q9NLR6ZXXDecpN6PrG+2w0cBF1+qtBELJ1BpzX915Z7b59Zom7y0V2QZ5lSkktA5RDWZaaLUVxFJiQfFQpRToAgPD/Ye9Ng21Lz/Kw55vWtKez9z7zOXc4d+zu2923u9WDWq2hWwhJIIQgCIyCkcGQOPnjxKRip8qUkyJlE1dSqVDEGLDBGGMMko0tJASaUbdaPc997+07D2ee9rzGb3jzY+197hXK8DdU9Vun6kxrf2uvdc76vu993ud5nzAKQk8FpnDwHABwx0o69VimByJOJlZCCiYIzoEY87kD9z0mBQCjrRsrNMgRccHFuF8M3SF7+KtRLmPjsuNkzaXbpx6DkuOVlE20zZyBC8aFAIe11cgXsgQZKU9TAJ7vOec4J5Cr1irGWJ0mUgVCCmM0nAHAQc46WDdGAJkj6xgnWKYtPEYABGfWQHAmGcCctZYLUVZRaVJaPdgH8ckSXC7M7kBcUl7duDrriDhj7vsaf9PBhuGOmwCAMUzsbhhj8BgzzmUER2Q5MwQLZsB1CUc6SjgyJmIiASVJG0POasPBw1oIII+LfidOR9rngjtang29IDDGjUZJblj52BaFDT1vZqoyGOVZYTmoWQuv78VJrgNP1iohgEyFjVyvbw1afqVen4rTYW4Kq42QTHoSowIAEWNCcFYyq0gI6YqCAUwgzXMAsCKs+b7nhBSrg+G1vUFdMubgCAuLzRMriwCOLbfb1ehQq77UXKpVlVLkYB1cKGFGSRj6ALb72a399fZ0+5tX1t9c2xtmWX8/XtvKPvoE8zyVjDIAPgdjQqEgZ6xD4XjocQgwCKEkANKWMQIcK/eZ1lrjtHa7WyPGuSkcgOVDLe3Y2n5aC2S71TRhVVTYOxdu7l/qZ7o4vTAHYLEls4x0SsZQEMJX3GkwqawzymN5ngMQniHmmASIQSruhSyouiL3Cw1BACJPuExTbuVg/Z4KhVM131q9mzsdxY0ZpS2AnTev1UPKXCZFFBCGu92VY0uBdLmgU++9F7IG4Np3ri3eO2VqGK7ubK5thidXpk6e+rPf+NoDj52+PNI3NtcA+MSe+pGP/cbv/sn6c8995APHH3rqaP/mrdHr56YELuc5gM1+mu1/HWwQOH3l1bfqrcYPPHEsatfnDqO5PLO+vwegIv3qbPs9P/zBnXd2//gPX52ba37ysz85uPzq1hs3djp6yxMAHnu02Tx2JLq5Wqv4vUvXPv/v3vzMz+498ORZSbhv5dCzf/YCgNNnV9omaMxV9pL08lq32s8f+qmHtm6uJVfWHrxnHkA2ezdXM9u3MtHXqxXdaqtjj8wuKvW533v20z/1wfahGgBw0Zpt5/YKY/Tdl99uR+HlcDftxtvB4OTxVljhAGJdUFE+YSQYy7RLC6ctpbmtNcOk0AC29pIosF6kNvY7CqzTHQjlatWgLasliS/Pi0w7ckpr4eArXyqf8izOirwiRWEYgMz41Up7Z5hGIghURgLEkRc6zmwSW+XVADAZ7vS2B1lWq+rt/dH2zn4YiDRHva7qtUY5K3BXWJfs9AfMC8BcnOXW0myzCofOMAaQG+N73DgTF+7Gts1zWwmk70lyCAJV5AUAzw8NKc9XTjvGpdZZXNgk01Xf99Q4y9EGkpXGURhL31hp8XkbYSxJk3dWUsYkCBpTNHhpVOlKDoabdLgaH8l5+ZUDgY9FY85Za7VVSpEjcs5ZA8BpwwNfQDmyxhmCDWXAOBfEmBJlDytjnNEuDIKMeGGIcyMdI6sZg9EGgHGuEtUGaZ5b7UcR14UuLCNIsFDwktRcn5piHNZY61w19EzgEWx/OPCMWZpuxDkAFMZ1Br2Z5rSUwjo9XprGzmyTdJkzEEr2BoFxwYWUjkgbzSUHYA0RZ8Q558xT0vM9JaUQHHCD3v4d2fe78W68G+/GX5uQY6Vzaek43g0feEHfnte+x0t6fNz3zHqTbGEMC05UyXTHNHtwGPGJiLokLkx2zRMd9xioHA/1V4yhJ1xDdsfp7jj/5O1+f+by/zlJG62ZlEJwfhvMJAcHx0Lf487SWGTFJOecSQCCo7B6PP7tXAIA3IS0OCZmlFxPmnAWGI3V3XfSPiepRvlNyV88uESCY0DJ9BijleOS7AFflCbY5filgnNGcM5yBl4ORMTKTT7cHW+D7szfnGPgxGEkHDHL4RgYIwuCnNhvGmOdJc4ZF5zIKSbAWKG1sVabMW8UgC+ZsWQcce7KfqHOWXJOOAlACBl4XppmnPFRmt/aGYzSXAme5pqIlR1kpBRVSN8LhHPSmaJI4YgzJjgT7KBXp7DgDHYUYzAkoiwM3PWN2DnypQBwaqniEbl41GgoNDB9bDHt7MiRdtpZfyjYNoBie9tD/1gtTXKQZFEVO1sbj7z33qja3k/TubklAErVMxdWpdg4d/PwUw8Od66GobDG7neHzB82pk4BEHq/0AXYfjra76zvtMOphZMnv/tHr37pS5d+4r/4dPP0GQBx0s2dWYiW4kwkGjVPZ+l+Fm+K0forX7oAYPXSjavZ9XQvO0dPr+9q7jNtUWmEsrY82463Nq8AyBOKIt8yZbO80AwyaAnzP/3qrypGh2brye4AwOattf1eEgleWJiM/9iPfmZvr8+L4u2vPNs5evJKLwQwy9W1rd5zv/GHgVBGEGP88QcOZ939eGtQ/hEZMZQCezaeLSwRWbfgq5vnXv7dV54FYBwFPqv7+Pu/9LO+ML/3z/9tuxZspW5ueWF5LkkoAODbGC3v9NmH3rj8zS/86Td++b/92d///BevrnYOH5/fWeu0TpwCcOb0ka/8xTPkMlmr7L19KRVwaZFoxz1RPluZtp/42Mf2tjuZxZ9+5RuN6fl+UsxNyaNHZ7a2V5cX2gDed9/dUeA//e1nXn3htdSxe848vLd3RcB2uqPhoFtrNwA8eM+JcxevvHP1ZqM+Q+S/88qFC7d2OjW5u5X+2z/41qXNIYD91e3ADx57ODt6ovY7v/fczlbcPtSodbpaZ7n0Adxcr/Yudi0Lrq32rlzrnbverfvDu4/E+XD01Nn3/ObnvwRgvdNP83gvjlNNy+35ijf7hW+9+hfffePkoen5malsUwM4e3hxP9f5dnLy2KFePw6MvnZj59B8I8tMnpj7HnkUQBrV9LrRxfUAACAASURBVAvnc6AS8Ho1bASsEqiQs8y6LCsAVCo+iDEizgVZx5UUhSVYGYgsdZwDgHauEarCkWKsyAyBLxyaFZyMw0zVC6oRADhdCbxBbpTHyfFGTQrO00T3h7mvWLNeAbC22WvUfDibO3P3sZaU4bA36HX66LKSbVSbqmdDE1IR2SAbJANKbqwnzZqoVJTKRG+UA2BCk6U4N52hWWgHVrtMGwHmOMsLV60qAIxSAhnr4Gh9N240p7796m4YsrnZhdjoElzKbdDdK+ZmtU3yYWJ8L9re7Q/2iuXWTDYaAUg7Q5mM/KiaxzoudK2xFHf2dJRXZVX7VjYMAIW0oitHluavbwzzEfMdt0JmzpuaDbc7Rd7ZBfCJH3rPZrf4rX934cMPHSebLs+3Hjl6dH2NPf3GW8rLHllZAfDwI+LKZodpe3hm6VI/0sB0Pbq0devTn/7wf/rTFwH0ez0RiCwxD957RrUPZ9ZJxnv94cU3rpyeCT/1g3cDEFwEtZP1uubEmlNRPxlGXsO5bq1eLZLrCaYBjAYu1VoyhtglRfXazVxzPorZXr93fHa610sBXFzfW9+Mryh3adBf3S+MtuDwfGWN9QMFQEmhJJ9u+srz/YBVPZlVGrV6Y3Z+NmqyhdY0gEG/o7WpVquVhbZqKEVFSjxOBqEKhK96vQEA4/nK95XnCcGtS8AZyAIOZRu/CcSmk13SLrce1w5RqJoeA4NkjHMAzDmAkXOMkwVJJUvSGcAAN2mzeDvGvtrjwd0dNMkxCnnAkJzsakjwcXWzyIwkEY+K0JdKMe55THBbGBmEEBwA44LDlKipLQrOBRh3RpMpiBzHRBshLDOO0Vj2wUGOOAdxKM8jAEJw7bTHFcsyJwUJATJlQn37QthY0s5gAAClXS1xxt1ErM1KkeIYlCz3ROOa43iQieKRxoXMA5FKuf2QABg4MUXkOBeAI3IE42A4LDEzLnZCkvQIEkw6HlNoXahhiGujdQbAD9XMbLWoaMm4M4xxYXPt1SrDuGcMQn8MR9bbUVpYTwkpRJY532eLzeqlGzs0xiXQqjdckqRxsdCeEZ6vh4PA9wsDIivkhCIFBsb4+KKIO+EAxZRkTOscgC1ISYChr7VjTPqMwsAPI2eKzd7w2nfeBhBrHkk3FbHpRrg8Wzu2NHfqyMr8zIIfCaal81sArHTfPX+tspE9/86aFwgVNilgUYUyLhSRLwQAY7iHvNDCU4IBHhd5AUHGOQhPATAml5LIWsY8WE1OM+akYkVMwqGXWAAzhseZK7TtJmZ2dmbt+n5/mN3oDXf3cu3MmZXDALQzTqNRq8b7g2olVMqXUeDgGOOkTclRFYHPnSOl4IgEN2Ae4PTA5IUXWABQocVU6C/TcKPZtDx0FLaN1wQX6xcHPAsA6HQ6pB7vFEcfO3Ph+W/y/d7Cyfs5p72dvri2vpcUAP78j575pf/h4+Fs5C3NNSu+0DbmwUtvdVfu9T/xiz+5v70G4OJXvn7/3Y1f+rknb7z61vZWZ3G2At+LRmmlVT++MANgetnvXt+cOVztd7odnbGwJXjRu3HegASr9B0ADPeH9aUZ8sM3b+1v9bLHHmwXaWfA5OKDy3cfn6fZWQCwfO+1vl+bykfJ+07OnF1uVHxXPP30DGfgtXQvA/CXn3v5E5963Bad2ZU5ocI/+rPnVXM1793a7Q8eOjINYP/i+edefLq7mT15avrso8tmXjLRjWR1sJp+43Mv/uhnDwEw3hxy8nLidX9jcyQbemXpkI1YN9YFG5MQQiYtc2UJpZ/oODXWUT8tOoO42Ng3xgLgnDuGTJNOM85gwUwupGKdQVpYC0BrQw7GcueU05mFdQRyzDllhEzSBMD1m1vajHzO5mu1xSlPElzBiqQYjNzuYHDGAsDq3v7Gzi64HeS2P9JxWtRq9WatIQTf6iSSWwDksqwwnJi1jnFUo6Ao3N6gKApTktAF58YyxpBplxfaV9yBCy65gOTCUwoAMcSFMVYHyusneZHnwyyJfNmqRt0kYcYBUIKqAVNSOLjxRE1l16kDvjekuCNlO7CqZ2ULCypn8fLTmC9BY5HdWFE3fvEY+jdGW2utsUWhORfWWjCy1gCgIi2KQPuSMXJwAkxyIZmytlC+zDMNIDcaFsRgrAGXRa65NqUQzTkCkGtdqdRDwnAUyyCUQsgwtMZBF4xLU/a5J1eJqv00do6csyDr+SoviiTLFRW+4gB838tGceJHjWpkdOECS8RB7I75fDzdO1DpxyA4V0oaY2ksVivzwIPsmllr4iyx1u51O3Nz81la4N14N96Nd+OvW0hg3F54Mr3hoL7+vWAZgDt4juPf33nwHZPp5APjro63p1pGjhG7cxzG+AF/EGOB9tj8ZQzmHfSeugOZu91nEhPkEgfgXZm+3GYN3kmT5N+jTP6e5phSSsUVB5OcuxKI4czCOjjjiEEYMpocYwJgpgTLuCJ7x+z/vdLrye0Y5x50e3UtEcPJ5/LGjMXr476KZV2RwNzkLfIx24KXNNHydpEjzhwdSNrHl1RyJ4kxzjgEOGeYEE1KWdhYN0EOVJ7hDui50AUjLphjsIITEYxFnheMoKQohQnOUZplnqf4RMsOwBhLsHfkERCCczhi4JwzzojIgUAYc0/BeqM00UV3mHBgdacrhQiULLQrUyUAxFGJXINxpYTHRNVXzlprTaq1Y/BAABRnSvCL13u54f1R7ilM1zFKbbsWzLUVgLsOR0GtYinVVWFrxrbD3Y6568yhpNfvUZd6lwGYaze0SXxlmzOzAyf7u7e6OxvxzY1KMB0uTx1dbADoXr0xd/xEP7cVyJvnNnaSzYVDRzbffJ10YGS0+AN3AzBFXPVpe2dzquFdv7L95T9+aWie+9gTj5w9e+Lse2Y2O1cB3NhYO3bXof14IxBe3rvlO3Xl2bem56e1Tg6vHAbw8z/a/qe/9SUKcOj42YW7qkmKbzz7rG/F+bfekrJilQaQ8SAZxM2pugu9LE2FEtaKaiA9KrJh9tFP/jCAG7duPji7fO7NN7Y3t7a73f/lH/93+5ud1zLMzXxoay3+8pe/C+An/tZP/9L//I//zj/4hw0ykZT/zWd/ulajxx964J1bq1994WL5r8gYH4scx/91jkvHrevsx7MhAPzXf/vjxV63HuTHl4tR0vvxv/GDw4H39sWNJ3/5fVkwePFLLwFYXTd/51c+GZp89i9mNq7xwDP/+S9+9jf+6W+GlZaczrZuXAZgyXRtgRz7W91qa37z2q3WTC1z1BtmzRAAtMVvfv7ff/ih+8jxd9b6aqN/aM7rD/XqjRuVVnhi6RAAMB5IX1t35db2Y489YHh2a307J6N3dmbb4crSCQCtWiuga9c3tlaW5ZmjKyvtxkUluFAnl1tS+pfXOwB8DzfWtpNs+CF19oknHvuD3//aE6dObOWu3ZSm3wHw3HPn06u7y0szPvfevLYXSi9O0o1eMnDm7vaRSi0AcGtneHg2evz+xWsb/TevXunlO9tb/ZXp9mPveXh3tLFimwB2+oOTR468uXUxgztz98rNazd397tvXUmW5usFuV3nALRn5yBVaslmlGcjWxH3PrxiSUspi6wAUK+HjMOR8yRjzpESsFCMBCPJUfo+eZ6MpFCcV/wwEHxUFJWaLwQCkBBM1CsAKskwtdDkjdK0NtVevbl5aK65NDfVL7JkmA0zB+DtK+H7Hj7FncgtVyr0sbMx7FeknJ6ubG4MARw9fPLCzdGFqzf/sx95YuPqO3GWjgrzntNz6bBXrQbSUwDI5EZjfbN/bbX78JkFsiZJ82oUha3G5avrNV8AsEkhGHdSppl78eLO+x6+u8L7yqf+bo9sklMMIKi2tnb3A8WrUb2ztxGGzJfq/IWLV65u/kJ0AgCPB4HTejQMZJDldq/bP37vypU3rueaHf3QIvX2AIgK2PLRJ35s/sqt7d7IaYbG9Myou1ur1Gx3NOjFAGbqvZ956v5/9Ot/9i//9et/+7/6GxGnf/21515+a23zxgYjvveJKoCnHj5Vqe3luZ6aObJ/6dJSu/LoXfPW5U888XClcRTAr/36vyCPKj77zhuvffDHfnS6zYVUN7Z3hcPhleb1S2sALr3Y7fbsN/NcSG/91ubNW5uP/1C9MnPsxsXtb5+/3JpSABRAgodhEChZr0ehpzJHyqsvLcyZwsowAvDg/Uvt9vDMca0Hu0c7g3azFlQrBswTogRQcks2z71qtTMqQi7q1WAtb3iB77ggsLhIADhu+6OB8BhSWehiuq76qe530+lmSzHK0gwA96eIkKa5sR6XBDjnHJwddxiZLLn5sGN7g2FfC0NybkHUZ62TljmpBAAq7AGOZgmCMUbuYJtwgGneub84WHcnR0yOKc87/sKRc3COyInbhjBEDL2BRpV5yivixKtWCQzCL0ezheFCgEvShXa5lL4QDMTgwGBL8qm1mgufGcNIlKoDOME4YAWDKRd9JxhZCMvi3b7xPf/IIlcWrCy2EcawphhfOL+t1GZMghifwJEOcrymEh+b+ZR1SHKTPdFBg5w7bs5YR8HHuwUSDHxcE4ZlZMEkgyUYIgvmMH6lcCXYyzkiJhlnlWzYW0v6OYBG6PuM+/WqTQtI5sCkYXC5Eix11lMSQG6cMdTpxpXA2+mnVqNwIstMI/TjnvHLIuJwr8LtwlQl7e35tajqi/7IkXXOFlIIMWGxgoE5diDfYFww6TTZkvAZ+k6qaJANuCPteL3icyUNTAGTMIeIAwiIMfCYKO2k6/vZSxd2OT/PBDiRZDwMSp8lk1s31WzujhzLSMU9ZpAZ0831fLPKw3kAXp4WReKJvnWOOc2YFlJqbXxGZfMZIrJGW22UZLAGzkopAp8TDMHNNGsAdEHaIrHu1cs7cs3eWF1XSjHBNAiCl6JawVCr1/v9TWcw027EaepXGGdQnu+yXHo+ACYUUDCpyFonwABnNLcj4XnlGh2L4OaVLLuW3n3i+OWbN+9Zvju67yzj9g/+t9+6/NLa9nYK4L//Rz8TheHC/Cl3YeOVl2788JP30KFl1+uEC6zT7y1PLwE40Zzaff266VF1cbZZCdON7TqFT33s8OFgtXjjP7JaDcDSdNw/9xfHKnOts4usQDLdnp055kZ9vbGdJB6AP/3z1z/2A2fSdMubaTz0oQdGO6PaKM4AHvqiMn/6TAggGwntm1gPPvLZj7z38W0ed6+89kpV1M95/D1z8zYZAjAB96d9083PP3erZujIVH3r6tr6KAlr6q6V6offdy+Ar37zzbV3bqlmunxkZSMePfDwe37nj18/EaW/8D/+SOBxANkzz33qMx+Kk/av/epvP/LjJ4WXXvrLSyMulivizPF2XrpUMXn+4nqizbGFu/c65zSwvLgY8MxY1U92h6kDUFjj+JhokBVmmBWCiXLOU9IvqeVC5Y2p6a3drTTXUaDCINzvxWaUDeLMWAIQhIGAStOccemFkRAyGSYMpIQgsDAMAaR5urU5klL3dnrB8fmcUPFVq+5GaZ7ldm+QAtiL+3GSa2v7mW5EtWrk1avTM1Ottf2NG1ud43NtAFGgQt81/Rox7knhSaZ1HmfpdnfIuQOw0IyEDK2LPaUER+SJ3GAQ5w6OcQgBAGmmmzW7tjuYqkZXtnuDOJuvhe1GtD8s9odJO1IAWrWqEC7TJpC8ZJSAEQfnjDiftH0cd5Vi4wWCMPb3/D6xNiMwTmAA50Ru7AowgSydc9ZaY6w2BqCiKKRS1jrr7LhOZc0gHjEpPE/CGUtUFIaTtIAtilEcA/CrYWryROfawAtU4aywJD0vLTJb5iEORZ4qJtJcJ3Gqi7QShoVz2hjle8L3AOR5HlWrWmvHeW5MliZe1K5WapJju7PrSwYgqtaajTYxyvLc2CIk64gzxwjMER97+BBzgCPHWdmv3xZ54ciN6zIAZ9yNbxlZa1yRp2kaeN5+r7ff6YziFO/Gu/FuvBt/3UKiRMhK9AsHiORkT/tX0bXbSOUd+QDd/mVJsx/rkjEZ72CUsgxEbGKyUm6pxyef/AQg54jITeDFO+FETBptjAc/wES/56DbhIzvAVVpjL2OeZcT4G4s8hJccsaIiDPOhQQgFXdkiCjVljNm4CwRLx1lxgyMyU1gEyySvu8dT65/TNaYWM9M7vYBADzBEscHgzgciDgrAUpBnJXuNOXyVXINx9nJWA2HAzrIZM3mnDNWFvksADgmOBeCGDEa1xZv/xnLqxEcnFlGjoHk2BKPUNrJsLHJj6dEXjBttMeVkMI5zVA23ZogVgyutItjEIILwa2zRORLxcVYpZ5rXRCcQ5LnFd/3pFJSaGeFGG8/AGhjC20Ka1nJA7XOWGuMHaXaWpQNOpVQR8LoyEx7EOvRqIBDXpgji9HxpdrpE9MArNevzFVtOK0ac9HyiUFOyw9OGzF0IF9nUTIC0N0bgnk7I712bvXUR5aDhnfk8OzV51efe/3Kj//cExdeeRnA5vnBXScOzx2d/9rnvv7xTz31gc88Ccjp4ycEj77zrRfz518EcN8jH8hN0UuKarN1eOWePx8+/fFH794b2u8+//YP//x7W/MtAC7Nk8wLAl734mDKjYrhsVPHr7x2/X0fvefm8AqAvu0dPjoVF/rk4blrPXZj8zpl1HEZk5EqijwGgEKnvqcykwkSHqfcgRiTsJYFnXQoK00AYbT/D/7e3x3k9Dd//JMF8U63WzZC2NgafPz99z6/ch3Al774DYqCthCO0cxs5aMf+sDXvvrFb33taZ0OJnl++a86rhYQOUvOt7bZkp/6gaemm20AGO3G+d5HfujJ8y9+pzVb4eHpf/Vr/+aHfu5TRx+qTQ1rz45eBzDa2l598drhw8tVma3v5F985oUPvf9xr1mByfpbXcMZgPMXbjKGxUMLU81wLzG+r/qJBpjJae7IMoAi6ye95Mrq3rVbmxoIfC+oRVylPvfjQfzqa+cBtJocUSQU87Q8d+6KrKgg8v1aXXfim2v9ueVdAKNBvN2J773rSLvR9Lxoqz+I/LA1N7e73bn3wXvf2RgB2O8OH773aHtK1GXzen89L/J6vbXbMb1evzXlA3j226/eszK/OdDzM60PP7ly6fqtN965ZJQUUl28dTVNMgAVpQJZu+v0g+0X1155++rMQuXnP/sTaX84PVV95+Kbw2EGIHXO58pnamd108/M+k5POfhMbG0Ou93hF7/wRQC9TmpSQ4JxwSsMzboHbrjkcKJ8mhSxkhBWGgwzIiNkIJnvrC9L/hnqnmRCSCC3Lop8W+jC0OxUXSfJO7f2Th5TAKrhVAV2qzsaxYmqBGv7nfc/dtoVeac/OHxo+cKLFwE0qnJte7vVaGx28vtOHk/jwOjhiSPLu7ub5SzYS9O0MK+fu7Wy3Njf3+70s/tPLW1t7aSj/gJvdrcGAJSvej2jCyTa7A1HHidH2OwMVloRMVeMSxZWSM6U8j25l1gKrCdcrp2QUkh/GBsAQSsQVS9PByqo+koyFQyN8kUQNae9+gwAJVKTZc4XxBVTbjB0ranaje1k8fSJ2pHT/VsZAIUkSUYIopryZKYL2VCqGvmCBbVqfUTkAej3OpTyj3zsiX/z2//+Tz73dePSS2sdInZ81ss4/cF/fA7AV776yl1HZw4fm1/rr17b2J5ZqL69vmPjYvTOy6mxAJaOtK7f2KswuX715r/6P/956/DSjXNrXsi4hafEVA0Aciv8qlpEza/WTDao8LmHz568MXBHTy6ePfOhWroJwK9GnTiTRFONSrcfz842M2Nv9fvFKJlrtdb7ANBJM/I8OGrU2klnCLCwEm3s9lu1WpwSgF6cDTv9lbujUZY6xiuRJGelYLkxvrUWDIDwAuIjCL8wItbUnqpwackNnTVEqqTUWWsLo5M81076rOzvT46InGP8QFbMYG2gM5tm3ICKnHPJSEkpPCkBmKzgJW+QceMYcyZQCmPiyffb5pXoHAEgd7Dc3TlflQv7OOAcOceFLEE3LsX+5v5gUDQqAfOripwxufI92KzcHuRZ4XlK+r5zWnHujLHGCSEY42TN+F3ZAsYwLcalM+dIODBAWMG5zS0A4sxouL7eeOs61aNDc9PGWjAOxhmXYKUVOHEuAEbIgXHmTWTpwD6bJo7ZVDIcOcAnhcwJaZQxRwftX8oNhby95Rn/fDwgZ9IRI3aAXbJSHAmAyHLGeFk3BWMeSHFW9VtegFtbAEycF/2sHRrugSwZo4WSOsmbzchQvr06AhAXbDBMpRBCmM293Bq6v9kYZSaSCnC1yANQDNPrq9sSguDmhUmyYjDIGGNGa86VLH0dmCjLmw5gnHlCxWkBBxIsYCVjsciNDSu1dG/gSVaAFc7ZUZ7lJuCCcwGgsNYxZhmTEpwxzkvPCMpHORFRrAFYQiiw3e96nGty5AoHqgj+J8+8Mt8OCzUDoKFku16ZjVwUqTiNJWXKkSckWcvJAWDSc8YVaa7CUvfP4STnIhTpKNHNdgWAr6Iw8EcmF0ouzS+tr215oVRhbX93Rxeul6QA2q3AkRvFObMIZJBzY51mtgBX3KtavQ8AuREg7jMqrPDA4ZBntii45ye9EQDVmn77rWub72xVpmoj79CLm+JP/sX/8YFHT/V7Ji1suX+tthrK9K5f2Ln4yktP3r3QOnNsmMQNhumqX2tNB1kB4Mc/8dDG9k0eU5Fr024b4olJfupvPb6xs3lhY/94rVX+uYNIpL296YXF3U48K4mzyqtr+7W84ZwP4BvfeOuRHzzdbEazJ+dMMQxHo43LO42V5d7eXnrl5VY4BeB3vnDuMz/5gcLeaj5were78fRXX//IPWe29tk3v/ri7h7/+KeeAFAZbCWLhwa752dOnQh3dvdFbfqxMzNk/UAxK1xBAITmb7y0Pv9A/RCPrlxZe/TRD7///U29c04G9avPPgdA3+oPe9dfP/+2NGa47WpsLjpUXRK0IvaCpRldZACCZuvo4cXXXr6Wm9GRQ01udOh5vpLGqO3OlgIBMM45Yg4kOPelBNOxNsR4FFYKEs4JAEvL88ai0KbiqyzX1VojqvE86UlwqTiAwpI1WmsteTyyvBJVPY+7QpckAOUxABbUnJ71Pa3jZDM2R44emqrWsqRzzPdTDQ4DIMviTn8YKNWIooCzlNkwqm4PR8Ns1KyqdrUKQFtygBE+tPGVrzXzBDs0zZyzu/0RAOOYL1QBFgTeaJTmhV2cbTNtBnle9XgpPx/lhVSyF+eOQTJGoMyazc5wcy+OAjZV9QFYoB/nSnKYMalbcAjOSmRyku2Y2w33icAwyYzojpxkTDkZZ3xli96SXF2+kmAdGescgYiEELZkMjByzpWGYzKsGOsKY8lZRkYJnhvDuGESoyTp9voA5qKWttbAcsGd1YxMOiqqzWalWs/jBIBg3DrnyEklKtVqMrTGEhFT0pPKK7QGEAYVzoU1zq9FU/V6f3fLC8LFublOb18qCSpF39Y664BePPI8Qc5Z5zh3RAzMlXunkgojpWCcg2DJwRA4V3I81fODXpO8HKAYjkbgyLIiy/J07BX+brwb78a78dcpZLnrJJroYdhtRPKvZgATBPD/PthtIIxPOoQcGFzfZsy5icBqvBUekygJZoJ9sEnnxzF/kd3uGTI+zwTKvKOv5B044zjGUuTJd7fRTNxBtyy1yrdxVeOsNQVxOYY7udRkiJxzcCAhhBTcjFWrDIB2ZsK1KLXa37OO3tmB/g4B2eR+sNs51jiNAqMx05FKH+1SozARjnPGiI/JpQQYIktEYtIZit0mZ5boLnNEYiLtHvP8AT4G9hwD+JgbOb6TY6hRurJDVbk/EJwJMM5lNfTTLM+LAkC1ElVBcZI556QQBLJkC2tBKIEPwTkR4MAEE5wzzkxhHTnFBRizxgHIjXDGSY6KFzhHTLCs0JwzLspc0gHQBkmeD5LMEDE40oV15IhxRvOtar0SAliZn25ONZbnF9duXr2507OO9gb60UPB0iKfOX0YQFwZJF5oKnNTrdOpmao0PGt382LVq/dZ325duAGA5yJltSSXe+s7K2tdn09Hi42HH1+8dfObjbmpZmABVGtY0t3VV3ZCw6bb7UD6wpCam7UuWFmaV1EKQGEgVOXY0fsEeGWFTp+cbx1pvPjNtwSjq29cOjH/JIBbN/eOLYu5U1O9bM8lqE0tZDvu3Guvnz3cPTw9A+CVeHV5uj1bb7/21qsvv7Z97yOPIJC+RWLynHPuCIBXrQ6Hg5l6PVLRbm+fbBYwSVwZIQjsS5//IwAry/V/+E/+yfLcnHGEwpEVjtxj9x29ce3NL2euWN8B8Ob6bsMHE9ITKuvG77zz9ne++rX7H3uPL4IJ32bSfW38fDNG1J6dXpirDff21q68AWD3Vuexx46ZZG1lYcaPWl/97r43O/9Tn75fbqUXnz//3uPHAahDJ177ynfTUw8Fcb9Vq7x16eZT7/2gHXaqx096t9bTJAfgpGs3WuvruzsdVRUYGuZLqa0DYwkpABpemvafefXSILYrLc+GorMzWJxb0JyJLO2NUgBGiiltdOqWTq50126u3xievOvE8uz0G923uOCtVh1Arl1a6ALFVt+IkO1v73CW7/b2Xj13MynM229fAzDXbv3QU+8f5cP9YfJnL7woKm5ze6/Ca51OMbPYBnDzjWuvXbKLU5WlpSnLir988fV2FDjwpvLE1BK58wAM0I/j1fW1wmrFMFeL8p1ra7udZ17qXby4PTU7BeDwYnt1e3OYjVIOZ9jMYuuelenFSvPN66t5nK1fvQJgK9YBp9g4w93S8anDy41RYVnBfWmsNgBsYQ1DAQJxRZwcD6QdadYtiqrHZyoKQDIyLBQiUKP+wJJ22iXDNLLOCXHu5qDqhQCueS5SIOlvDehrF260fX+1GyleJ+u6u2xtqwDASUvFNtd2ujEdqTFj1Pl3dppRaIedJM4BXNruXrw8XGqL85evRQLXN5LF2Xiu47IjkQAAIABJREFU1Xjn2o5X1/vDDIBIbJ5QlqMztLoAlxSnVjC+c2NblfwvQIUq16zQqDbq3VGhU9SWWkU/TghVpRxxAEVWLLXrcT8VMqxF1R0TDBGdPH5q6oi/kYcADvFaVOGptr7XCAQqwl9fHxgn5o8eYzlrTC8CwHDL7PWaUeuaI60qNJTb+y7XfndtwwvrJhkBuHL9+m6v2BqJ9owc9nqqah975N44zo+cnKrmHWtCAEPy97d2vvD117vJK77CYkv9p88/c2kz3SZa4gCwMBsEHBmzTGDjwkuzR45WFCmpCqfnjq7cvRICING6deNqc7ZRC6NzbyTHDketGrvSHRaBt3Z941g4AmAh1ged0Ggvml3t7HSzwfRUbX9Q5PvDY635ggyAkbO7w3TUHSilvZqXOZsYkzvXT3NZqQGIXZGSyLPUkSucM7ogcr6vABKCRZIA6LwQzjKtmbWc4ApHmhgoS1Lhy7I+lRttnBNKyMDj0G686Ns7rdo4g9XOBy97e6WOWV36J3uqhCOLgnNeavpAyHITqQAoJov195dGJ3KD/8cYL7oTVBI6N8wwAL4M9veGSWLJMcpSqx2kA0SR5X6tBkBZneeJEI4LEJEUAsSdNs46xojDAZBgZB2sYSUmyCZtmImrQFLJoCTAcT0Y9TZjZOYoWGEMYwxcgCZeqrzE2xiYm4CDAnCgscksAytXcoBPSp5jhPJ2YZgmmTubbLDG3FJGdKANd+UjozhnIDdmVjLCgTgahqAgGPM4d5asddpBcm6KZjMIPAA1TbS+nazv+xBcBAraWra3PwpDq5Sf5gZAnJOnxOxMvdPv33vUXbw1cAQpxCjVZJHlBYDcyFPHTjkhNm7dsA5SqSjy89w6azkfU/wE4465kp8PsMIZrngQhWS1kAwAGcEFa08vdpxL0kISs8YJ6UnmJYVzOQGQJCEADsucZI47Ds2dE0xCCc8YB8CTzBkruTTWgMNZLpXUNtve767v7H/n7c8BqDL4jN19z/LPffSRE0cXGDdMGDgDa8ZFWsphjdYgD2QFERgXzknGqJ9q0YsBtKqVnW7iJGqVamyIGLLcqEgyMOFQaAMgrIQUeEygGvmcK+UH2qSBYtkoC6YaZADAwQpOIJCxzIC0sbbgssIMGBcAaGR+7PH7//DG1r/83DfOfvDJhxeqG+cuf31755F7VqhS3b/aA5APsqHwXnz+5Q/cP7vY9vfXV9EXq1mhtNZIlmoDAIUXndvYe/j4kbDdpCC8tN01qZ1/YKl9aGGp3X3pC98GMHu8PXfP2XPPXxTwd0fR1CP3/8qv/O7lb7/zN//LT3VGfQAhZLu9VJvpcaagpfSrX/nLFz/q6qa775skum8ZwAfvP/P68xef+EAtj9dnj9Y+83Of/KPffO7cs9efOHPsm//7V5fTEMALb7x+/xP3p7urHhWnlpd39syX374w16o/es/K+TcvHjs0D6Ay137i3rPPrl/5/X/2tbMPLM3Ww3MXX/2xj5w0Xryztg8gieXGW1efePT4k2ceq7cOf/Nbl6vH5qoz9PaNN9oM9x0+A6DglSPHTk3VX9jZWj21uLi31RkNOtrz06JbZTKxOYC40MRZ5Clt3TDTo0w7B4B2+0NnnBMBgGq91e31PV9OTTU6vZGBarRqA5eaTAeeD8CrTMVxmueJkMwWRTzSUVQtCEwIMJDNAERSZOnIk1hohsM0a0Z+EAS9nl6cnuuNorX9HQBFXjSjqOpHYCzPXOgHI90hjUK70Sh/dXgDAOfi0FylWW8nuS40Sca1gSYKfTnfrAJYaNUy8L2Bq0WiycMkK1rVehqnJBRsnmQJAEecLOqRR8QXpxqGaHu/F/leP06b1Wq9EgMIPT/OtKe4cqZ0p5EMQjAlmBQQnAGIFLtNyThIxsZMlBKVKycxOrD8wpieTkSwk1nfOXKOGONgXChhtQUHJ+EALiUAJaTNCzgkecZJizAwZHKrfalyrdPRCEB3MKpFUTfnkniW5VaPirRIPTXXOuSmWgD2M505Cv0gCINqo1EUqTFOKcG0I+tgSmczwzk3cA3f55xbS+2ppmNic2u7TNYAFHZ0020eD6vcWj/0HBGf6MEPVOoAOYLHWennSUSMc8mZYMwYUz77JLiYdL/Kc90fjfI8749iKb0w+H9ZEN+Nd+PdeDf+fxqSwIiNxbasbDc0AcnooIkkZ6CJ/QmRc45xdru0xSab/rJBCMGVPPux+wUmVD6AE4Rg4yoZsQPTaIBTWXGEJVs2GhHgRERubHnDJiTDiSc0ytoROThy4xocDgiJYy9NPsENJw2SSXB5YCBdMjcnrRfJwXHOlK8OmjEW1hKNMwHFOePcjdXW4474kjFjzTgBmHS9HJsQlxbWADlLzJb+MxPAtmySVbY+c8CBq5qA8Msj2FiJYBmgJEfJ3ifniBg5XqKKZZnMFeOkhSQcs2QdOBdCCQFnQZN2+CVMSCBDhXEgU9r2SM4IEypLuS46Moy4gFIMgllHDGBkkyJ1jqqVCoCisERMCsG5yIqcDOXGKjHhqwDaEmPMAtIT1Wqt2T5y/dZFniXkHAnGVJkyBcbmIEOC9ePEKwQcD4JQ68yTnudJAI45be1ef1jx/fZUjXyVJFktCu89drgZhpWKB8AwPlWpFlkmhecJ3svyY3PRbEOGRxaLe+4BYHC4GU4luRvZwBOhNCPmdkBJhZgdyu6GAHDj0nB3b3cvdZ94YqVVXebtOvX4269unanPxNeGEVoAshlZO7K4FE6d3fYW7pl3W53Lb19ceWjaNubapCyzADpbb1PYFH5bCAQsbYQVj/NWRB9+5MRUAxE2AND+nq74O0Pu2ayqM1dcG3WWkrQ2yg6p0Q4A6+rNSr7aGca9bLYVLi4fid64vJv1pRIKjCkJQCCdCj2TdMOWVKbwCkOCp46CekvKsKMTAL/80z+/ud35zovf+tVf//t//qfffvprL9Skf/rMibU3+4Klh86cArC6t9s4eTq7ej1PsyCs9O3oF/7uz7zn2Km/97/+tiQAKADnwK1xnBlIxqxPmoZ7ksdHzn70Ym8NwBNPnLhrYaa71zGuFtXr3/nyNz752Z/pd2s7V996+uvPx7kCMK/k1V7n6YtffPSh93/2Fx/4vT/4E3XoeGNpJYeIdQ6lACihRtoaJYlY1wpH6A5iIWUY+vEwBaCEl4koS/NKxI/d9wD3xPPPvLi7t1XkToa80a4AsJl1tVAP81GnRzKYPVTN8+H+UGVaN1qiXp0CkOSFMfAgXnr78hf+/KVm0z9xdLY72Lu504thZ+dnAex0O999/WpR0JWrbx2Jqmxm8fVXXz0fqeXl+UpQBXB08cRrr1089qHpPWP11vVGGGW62F0fukTPZ9wyB+DcxdWVY20rrid7PRCW5ldYPZT9RHnR8bsWPnD/AwAOHTp04cqFW5c3T60cT+KeMqlfpMbPl6bc1XY07I0ACEEUsJDY9JSMAk+ntiDtQLIS9p0FoGAlZ3FSgCQpyayLLfOU2O3nOvK48gB4rFBaR0Z193PpWLMVbuz2r2x2m/UoVPKVtQGAV19/gwkWVXxiojdMBv3i5QsbRheC0dJCO2QcwNKMFw95KIPtfPTOej8p8mKY3dzcmQqVVRUAPB85x2SlxomPiN19TBbOrXaHYU2laaEQAnBesLbdV7yWaKb5HPRQsswTdpSxwPN8CADGWsVJCDEapkw7H5wR971qUrigtlgrCEBnOBz4VVs9NuyPzl3TmQ5v3di48M51LeVnfjIAsHldJ3mlWmt1dvWNK1eHWZFri9C79Nbrl66z2eVSKhhrk8aJMIPNrTVRZ9X9UZGTbTdqKmy++OxzAH7kY0+ciXsFyz79gdNJnFWarUJNv3KhWJkP+OCF0ixoYWaG1NHtfvLChdEX/sMz3X2zsFD52OPT1UCM4gzAYKTXhzrLbW1K7MY2vXWldXR2f3MvdTISovSXb5qkl7B6Tjs2D1nBNCBz7RCS0khTGAB155nCJyXSnHkQjci53LaYS4RNXC6gAcxXmjc6u9ms5wuIPK5VvIp0gcs9rpS0AAiFrErhNRXruGyfWOS5rKHymhNSWKdLKTHzfJbZfp0cpQWxZuirvZwlyBqy6fMcgOEFp7wq2zbPyFMkFDEwZ2C1ZbzEwrgUins5eS4vYGVeqEBA2tiSr1UFgNRpkhoLbo3ljDzfG2VWyrK5x//F3nvHSpbdZ2Lf74QbK738Xr/XuWd6pidSJIfDIJJiEBWhXcnSrq014LUl2FoIlrFwgGXLAVoIxgZjDa21EGxLkFZay1RY2uIyaMkZzgxnhsPhRE7uHF6//CrfcNLPf1TV6+HCfxsQ0D9046Kq7jv31K1bdc79zheYZtBdAEsRXLBSKg4OHABL8JOJCQAOwhuXaKFBIQSwlxS0DCGUxkthpnZmSsvScaQjaxwLikWDHWkZhXriSqGTiILzgtjbSEoiqYkrCYC0LSZBK8Gj1IoJkJKsRRAR6cQUPhU1GwNAqRzFoZO2X7lWkWTslPSjwtZBJmkiUQPQWgWti9K2ojhEYlRBRQ1Xj5PETxKxHbcIBRjMk3tQQVMTGzIBYkKxDKQjDYYxNSQxfELScQzVEKSCLQBoLQPFjpUJIabYiZxtQapyruok6mBoAKSRlcTOFUr5RLHpDvzBlusfxlLSUAC4NZZra03Su5XXNDRRguB9Z6WtKbp86XBsGcA9G/OL7cbt3njkqWt4viF3BocUtBfCOsOhBDC3tPITn/0x7/mf/9HvN5XZqZHlaZZWMml2R5UMABDUZP6JVMdVWQhBSiWmqCWxJwlAkBr3Szu+3Jyfg9IciDzqynDgSCijPGbs0AlcwAQJksRCBbCaYCWTyR9J4eGFJhdC1kg4sIMuAwmJxmS6wmFg/XNv3HzhzZsbi3mjmTUbrUajeXpj7fzpDQCraTi+enJcblKiGrEe7R1mGkH4zSFqkld39wE43zjojYLkw/54Rd/KFNrz2cFgTwi2EhwKAOBaGE6jds29EIIzvbKkuJ0KqV2/OzAMQKVWtdqmb/cPRlkdz+UdoUiIqhoxpwqATsQLL7x9ZW+8urr0wpf/9a23j33mMx/rjfaf/v7rCjGrCMDrW6bVWf7wBzrfefW54pHmsfZiapL+sGjN5UvVws2LCQArirUTy5g/e/E71x/8ULq8vprV+7vXb8zPr3QFxasZgJVjZ69sqc0DPPCRB//ir566Hd169snvl5b+8t+89Ks//1kAzzSjypfZYMfstAeNM5de3frql944t7z0sUdbvf3+/u4WgI/82OmbN12S1c5oJ7PGQutn/qOf+zfP/2OD8gs/ufHcd58C8O7bvY88+tBjP/X54tZFu9Pdv3H90vf33zgYf+tPnrj/4x9K5gKArz/xxs6ZncG4jFuxGYfvXizK996Z/5lTEWPxdAvA4tlj9y0NTDXqlfrw1Ze++dRbjZcXFn/qkc1N3x0fXPVjAMSbX/vqM0VhpRdB1R/8wIYJYXNnK1OImA5GBoCRoklUMTNRnmRJwYNRUXujdKwjlSysA9jZvOGNgQ9lUZOpiYwZF2oyMc5aAILnUI2YZZQ2GnNNW4/YVc6H5ZVjddFzlQWgGykHU45w9v77hv3Da9evrc0vzrWWgwWFiDAG8MEHHtza3x8Mesut3NRUONre3NURRSSzRCZpBkBFWZ5nVdltx+3CVluj/mJ7vp0vZ9H8sC4AjOpuI1vxbr93OE41ZWnb1tpzMR73qspleQPA/ni8Pyyrsmq0c5s04XZa7XYcxwVk0pqzoglgr29v7faPrS8vNKLD7mBzZ7C+oPM4UhJJmupIAEikBoJll0UAO2udAMeRtCFIJQDUQbJH8EWSxt6j8kxMITALZe1IhADAkoQQtacQrCDFzpmqaDQzEqIaFjRJw6M44rp3uNfMcoauOUpk7LxT1AHVThGA/YPb7bmHW43lfnffeQ4hS1qps/agu8dukloWYigJOMvW1HXlms28d3A418wKb8kDQDUa1Y3CG29MuTvsJ3FsQ+gfdPO0GfxoZ3cXwPLyRpbGmzcvrywtWd+oDcVRIhRVtclU5LwFEJhJyMp4QURCeO+TKA7MjtlbDyCJNAsxLqoo0oq9ECLVaa837vd771x6975z9+Bu3a27dbf+upU64h1M+d/AkZPkEalxUu9ze/z/EE7d2e0O13D6kGcPJkTIH3COnO7Jd47G093ed7wfbH+2vcN5mEB9U7ahmPV1EiHzvk5PDxXe37vpzkwEmrgiEo4k1QjMPoTArEkFZprQBWlmwDhpd0LznMkJpm1P0quPTsQsG4hoAltOCZBHhMTJ+/i3nTpn/bxzRgngKeY7pVZO/x+dlQkmfKQGv7N53w530Ng7Lx8tUOKOpSVNpE0TxDewgmLwJIoOgqZ0Sw7eBwLHWpAQzvtJ8zaAAC3gStet+w9dWJbSvvfuW0pGJMLEQHocxolWzsJUDgGBwOwrW0WCS19PFHxKCyWEEpTEamOpSZJcWaZptNCO2dTjsgCwczi45alb1qNhPSrN2nz8wKn2wkIc4kWnzgCI/H1Kt3OlsmAr2y3cXubGyd5w7/LBzvd3aCwBLK2u5knx/eev/bMvvvVTZWaf53/1xCuPPHDmYGvrJxt2fbEDYGtn94obFld2L167dfKtt1747quJpESsVdn2i3/xxkOfewzA6Y89WJX29af/tdD08Ol7r+xt/dDDx5tNcbPXb6YnSu4AIN3o7g3t2/H2xZfmlTt9T7L7Vu+NJ77zm997IWpHAI7NLb711o3g+NM/82Mvvn75W1/7ytg4oVS73YkTUw48AA/vg73n5Pqr79782Ic+8Ld/8W/9+n/73/nCwFWJlikpAP/ot39nY3HuD37vN459/P4PPnjix7/xwtCb7n73J3/5P82j1h//8R8AOHCid/U6G59HtHp6/Z//z793YnnhGw8/+sHHf/jpl64CsBMBvlaTb4Tw3nm7fs+ZlWb+4cc/cLD/CoDT66dOnuoMh/vezv/uH/3V65dun/zOc1/+k/9jNBosr6z8nZ//eQDVyL39zLMvv/7Ki6/8KWLZG4o//fK3r23tjq9dHRrheEKzMiJC7YIEkSQPlnEewKPKl7YHAOxcZWItm410a3dveWk+i9XYeIA0oy4NgFSpw0EtnO8X1VyeNfP04LB3+cYmucASN25uA4gimTf0wWgUvFuYy/pm/Mql2wuNvJM1ji0sbu8MANzaHr367rVPffDRqjq5P7w4ONjiWDrrr128HYwBcOEjj7979WpZ19999uIjDzz88OPrxXh/faGx2orW1lYv39gFMCzGj97zcG2dDa81O/LNGzfXWifH9XB+pTkYJUwGwAuvfc8bW9SmNkaA71lvS8FlaebyeGOh2asdgNVE39reSiO5MNdxPmwd9oUIIBgTJl9bb7xQwlk/9sGqIAUBYljZw1FFjEkmI3sfS3RC2hv6UTnsVvXeoXMmWF9uHZYLC4sAAlFA6FWl1okl9Gof1xUHvzDXylNphw5A1mi05pKLV/YbudCiWGo1e4fjG7cG2en5zlwLwLCoTmwsP/W9iytLcZ5GKqj+0NmQry21doL0cQxgIdFxHmLRjmNKo7wtSMlcBtOrjHPcL2oAg8qMKmMCBZUMRvapl69mbxnHVAcGwwQAMBwiSUvNuZpdksSFqx556MSPPv7gnzz1WlEqAI08a3cyJWC9ufC5Bzonj+1t3s6EazjyOULsAARnssbCmy9Rb7/7yP1rZItzaPT6Ax0JH8dDXwKQcZ1hOOj5hazN9fjJZ1++9/6HBTpSUGCqywGAsqf2y0rPtx99+PwTX/nO3/m5H/nkJ86yUgdbN0XSAnB649jeSP33v/3nz3737da8LCt0QpjrtMrBYFCVHDwAIUUEj+DZu1jKg14ZrOk0EwSXaBLBAyBfxxFS6TQCMWzpsrYSpVUCcDbTEkBhHDtjaqtaeq6Tk3PemkgLLTiOJhiWVQKx4kiJmllSUMQCIVKCHFtnAFRVUVe1YrBPlGBFnoB2Iw+weRK5MgJgpPLO11UdR81AbB3LSAkhRBRxlLgwsecTkYQiDko6iNr7hIRUMpY6EQZAaUZKeibWQhjnlZho0+KZA/UsvYADCOQ9y2nGiYAAmAITTS38hJJMU+MY9lN/sRAgBSsBAK4OVWmdDaPBeGEhJ4Cdp6l14wygAgGSiZiDs6wCAOGdJZjJ4qcUKjiJoCed08RsgoSXQg4K5FIBICdAUUDIYm0rV+71ZEfHgdNYBcFBSAAOxMbGkiy5qnCSpAoySQlwdT0CEBgqlkRiOnWiWZY3wMFpnUxHZD+LmxPkGBVTYHbGSApikv3gJRMZ71Npva8HlWlnCRFJihjQWgFwjAAv4ejqRb+/vXdrP1K6rdXe3qDqWwA6yxBHri+YldQtTyNKatstSYKE7g5GAHpNr+LgTFABKanLB66zmLDQWsVxmugkAdCZy//VN75R27C6uOBAEdnae6Ey770UcsY3RBzF3jvnnRAyhIlikkmw8Q6AFkRENvCo37cuACKwqF2YzF6CDwCEPEr7mch2ppNGH2brwwCREEKQgJRSsRdSMLEIUoZA4KnVRYAQHMfN4M1er7i1P46xR8C38KoGAOSRmmtGzTxZXGyfWlm8/8Txh+4/01ycfyi9/Mr33yoODgDsDoftTiQ0nT91JkmjB++70Gln/9fXnhqObCLlxPfNekeKlPZXbo27B5s64yyPS0cHg3KumbSSBQDdwr53afOBM8uaJJP0OjXFyI3yKCvj/hYA0e5EDXz6/mPPPHe5udI0blT7cd7qNLKsHBvdIgAPn954/pknzt6/uLJx7v5PfaTefs+LcM/D58p+9L//3pe+9uU3AHzs02d+6T/+WBr8C0+9prPqxL1nqrf6Nh5zo7UQRXt5BKA3Grz08rWnn3ntIx86v7Swtra+uLo8n8i5L/zkJ17dugRgPsZWlwNsq3Jf+sM//uyDD/9vf/Cfza3kxtXN1TObl64DePe7797/w4/KVsZ9IVT2pS/+1enj9/27//5nl1GcPN8+vj8EcOaNayfXSdS3VQflgdZx/Ou/+nNfffKF/n7/Rz9y9ivfehnA4c1h8sj5fJ3ueejsH//f33q7e/Mf/+qvVDvPhMGJteUFAMNq7qsvvnxhQc/fc/7Z7+2cO3vym3/56jczt7qA1Qce3DIxgP/pN/9pQ2BuVT+wfs+ppXS5pctivJiSDTR0ImnEAFLvOQgOnGplmRp5XFnjS583c50vQSgAzUZjWIxTr2TSDOOiv3vQWV7Mm9qY7iQM03gzrkxgpGnSSJNKcjE2kutqeMDeznXmAchIeDvQyo/rsRVyfqF12O8da8SWeX1pZaGOAbDxMnCiaWzLLE1D4ZRMyBsZq/Wls4oDgPduvuMKtbF8qrvfVZqK2gb2zK6wVTuNAQxEUg9vSYRASPNGFuesU1/3uv16cXGulScARqPB7mF5Ym1ZN1IpZAhsfVmZci6WZnx4qxgBeK8oW7GOia5tHR4Oxxc3D+Y766igE24rKooSwIGjPCZnfbdfN3I5l6pxbdlAKprdODHNpGw0o30LIuN8Zcz0K62U84E59AaHcdwhASWls46ESqJUyARAmsW98ZCFCkJ4Y5WPnHFlVZEoeqPRYFwDaKS62x96gEhIYtXIXFEWxbDRafTqCkCNkAhyzkqpe92e954ZUqnxuJRJNOEjS5Lz83O3bl8/2N5N0+yee85VjFG/3+q09/dHrfYCgMq4qraBfe24JRUB1rk8iiOljHWBA4AkVt57JYQPQRAxTWgnFGkdpokFIpBQsTZ1mTSz3n7PB6+jWEm53+0KcSck7W7drbt1t/66lLqDQ00hvB/IzT5yR59s6Gj6NkXlGO+TOr8vz3r25ATnpFk7NIPkZmrto5qFO0+6Mxt9JipjnjmsH2F90wYp3OH+TRs92odmfz873kyLfkfcNQUwj7iQxNOwTuLpiEjAJN0scCCeqJ9oFk1zBKHOsM+pVGqK6oWp9Ramw+rEIZMETSNspuzM90G+7xO2MWbYJAGYdWY2SvP0QZh6QYkZ/DgL8Z6d0Dt/Mvtkjk7jUcAA3zn0TMQ+bYgnJzBML5Ap6BuCBwCP4MMkbNwHZj+9vwVhAiMSucAgwrjmNAq9QXdcjBt5UpdGzhwrvQtBcQgIljOVmGBJUiQJzErKydDrAqwjBgUfautAMpLU74/7eT8isHQApPTtPD9xYv5gd2xtOdeQrSYnOVMz82IJQJ6eMIGs9+xtbfeEeXd445Xu6xfffL13eLueQI0R21v7xZkza2c3Vk48eN+7b1w8eXrxb/3nv7B34731e1ZvPP8mABeplQ/eq2/XX//KLePFp3/i4bX1FSHYZUvnP8CHezsALjQ/Tn7xhz/7KcvkB/XxjcVma/nydv0Tj24snT0+cgmAuVMnGsItPfTQ/feS6do0FzuXzWM/+tlTx9SffPGvADx8opMvVT/x+U+98cZ7/W73+rgmhmCUrvZjG8cNAMEOW/PJP/it/+LNd3a+9eTTbribJ35QADZY9koIAKNRtVntyiy+8eb37n3g1I987MN//uyLTz/14md+/MdHZvTKd18FICC9sc57CkRl/8RCoyyH3/zqN9OmTCIFYOycFySlQGBJpKQki9AfFCTe+P4rFhLAX37tublG/NOf/8DrNwff//6VhsITTz7fq1gLvH5t89rtPwRASt+6cWgZTihiWbv6X37py20lDKC1DkwAjBNE5LzwTDIwh+Ck8t5TwMSfK5JCael86A1M5bfLchxHugrsTGCmYuwApG096JdRCMbVdlRvh30R2AWRyZBG6tbNTQBKS2Nsab1SYnml1QzZYFCszuWGnST0iz6ANJI3dncp1CRkNTSm8Atr7eBw//l7ji3NA3jitcvO2d2dgkamHA+kH1Z7Q2R21/njJ9Y+8uBDAP7ll598+/K7capNGY4fX7m5fft5aU2VwHjnAAAgAElEQVTAhbOrjaR4/fIVAEvtbKdvxgVHEVwQ1pgx+7mGSmIRa1lVFgBHihlSyOBpaIy3TokQOAwHdZ5GAAQj0bI2PjBrKQRRYASgOyjIzeyWAieKpFAyoVEVROnq4D2zYDbsrQsAvCRPYCLnnU7TvBUMiSzXNZs6CIrnAIzDwrVbh5evDr7wiQeW1xbNcH9psdPb6bYaTR3nAOaivHVm40t/9crGuRMPXTj10suv94fjpWPzvcIelOVh2QPQ0HW/6yj0r1/rRapI2QrB5BwIzNM5tSNmZqG1FgB4ebHdyeooiZSWeSTiNAEQ51Ej1nmaV7bIWrljFZKkQnzsYjQ3BwDrLSg/iLQvUHZVadPW0j2Z3bkxvj2cO7baG/UAFMMiyhKpMsHGj0dVcdiOo6wYjEdJ3ZozdjpEKGVEcHU1ZC6OL0fNNKQKitmB4pQAJHmcet8vi4N6XIz8PWcbgfcPtnrXb2ze/8GHALhqc6tnPvzoyYPdrb1+n7QYdnudVseFsD8a17UFIBJKREgUfGABTqR05TCSHVcXEblUSQCJ8AkZNuVcunSgk3YUxmWhJMWKNIdEEICyLiV8pLXURByC8+QtBedNqakJgLzRBAUj4SR78lbBimAiycE4qVIAWmulpJKAD+SCGZUQ2lclBEvPEgRAC+SxkoBgqEQ6Y8FgFsFbhPjIoN+CJHOkBRjWGgZqF6TwqRQARtZFka5q651FcJ4IEIGlFAIIUkoAUhA4KEmsRO3tdGSbjWozD0dHAsF7Yj9xLQmBrfWCQd5yEABsXQbvBfFoWC4sZHDWkVNak9ST+YafWieSAEkQEXvjwkSWy1A6AuCNJUnBWuutUEImWhKxrUAwtWq0MwAhsHc0rqCE2j0oqTBRR0sEWxaQlYgiAI6FNy6O48qoWAkChaqyUniEyb2l5pptIKmkFADN8oGIhAimUNF0HPeeBUkSgogkgoAJkAhF8BGQAlCRJHICXqsosJGmZoY3QQhR1yVQAoAvi1s39199rTEa0rDo9nl9vfPeVs+yDy4AOBZj99JOXfrxsNzaOzzzgdbaGd0GBvumV/qqYgDv3ugFrfa6w/s2VnrD+tp+f10jSymOJAlR1A5A5VyEKlCI83RUWYTSB9NoLHSHvdFoPJ13BR88IqGNK4WYEEJZKklTg7WJJ52wzpWV936idgkhsBBEREJOIgSn0X8CJBBoJvvxgSWJ6d07Yebbw0KIidsoCJOcRT9TlbKgwjpImeksEq6ySLWIiMEewIjk6LCgw6K4dhj4ake/vLp2rLPQ3ji2cnA40hOB//Z+f1w0cr2yqHZG4/WF+eDNibWl/V7hbXAgALbyw954MCgB+e7t/vnTS6Oxz6NkXI0TbffGBYCxw3tbAxfQbuq11XaciJEVzzx//b7zx+49vQoAoTw+l95q9x97fGPpxOnbO91vv/j23/6Fn724vbt95UpWWwBw43sXGk+9cCmfW3nhLy7D3XzgkdQeWxgcHt7eqf+b//HvATh3jzrYfnmpkf3SL/8NJ4cxids7haW5Osqe+4snw80dAB/9gD630Gp+6Kwvu8Juv/bMwc/97N989o33LnzgXLHXBPDSF1/i/n6htZ2b+8TDzdNLDp306195S0aZaOb9OgXw0Kn5W29fz5dWnnn66nqrY/ZH7Qvc7qiNY+sjuzuXFwDO/dgJ1GWvHjSzZdGsb+x2HwfKqq5d/e3vPvfau7cBVMqnUfrOpZtf+E9+/u8+fO43f+1fPPPGW5/8mQ+LQLXZAbDVvX3p6t5j5z4K0Xrr7Vt713rH7zv94Kc/f765l8xtvPnMmwDuP9sqiiJLs8cunJtLufRFrFRd1RbcyPMJaqyZfCQbgg4HxeGwTrLUU6idVd5oY3YOrgDIG+25+dXdG1c9d6M8lqOaoGtTZGnSyjIAh9ZmsXQcBLNxtWVmqYUoTDGSUsZCAUAI9aiIGhF5f3p19ertzb39waCqjq2vd5Jsd+8AQBQNskgfn1+P0rYLTL3t28VOEucr8ysG/trWTQDk3GLW2Bp1h73BieU2g+uqqmgYSenAALRKq8ivHMtvbu4rEfvxYBxEbYooEp1m21gPoC7D2nwrilKE0B/2SWemHjUb2Wg4pOAmK6MGxJG+vH2gpSDIxfnWrcPq1GI7k+j3q0AWwMbi0u39/n5/PJ/LjtIORCLUgdMj8sZkxJ3o3EAE8iGAuarr4EOaRABEFI9L08qzomyaCbga2HtOtCRFOpIARsOBqU2r08nj6KDaY2IPOJJbO1sOlEQpgGG/ZrG/fuLEuBgOx6O5KPGS8kYeRZEqCgCeSUrZWJi3lXHGxFKWdcVCFOOinSRCawAk1KgqiX0iJJEQOq4Ho7yZRZqsNZNJmjHhxs0by0sLzUQqrr3XQpB1znPQOvKBABRVJZWKk8SHIIVQUjkfhBAhhJnll3eeYy23d3Zt1eoNx93hKIkTIeRoPB6Wd6Ns7tbdult//eruQsrdult3627drbt1t+7W3bpbd+tu3a27dbfu1t26W3fr/6dSRxTFI9Xv+xNixBGFccZ8PFJRM44Uy7OEiyk5kN+nFMYP0CdnLuuYsiBZzOK8GWFKcGCmo+WwyZrYkaqYZ9rqmTpZCMGBA5iJecKNmORHEwRNIltmcu2ZSPn9QvGjtJvp1vOM3TjlGE45gkBwTiDcQW/v0EAnoukZm3O6yA4CIOQsSofvvGnmKUMSd4yLpz2ZdE/cScGcKbzvfEJT9uSdtzSJpplGWk9U3DNBOIMD3fl4Zuf+ffTWH+BH0izPCBCCpk8xAocjobkPYcJIBYDgjfXe+0kDQiAEZjARTXIJpFLB+8JYD5jANzZv1tWYra2tzeMIE1cXJqWklKKqrSQoJYUSnoMEKaUqbwHkOpJCCIIA94bjVkNvLOU7+36xla0ttyxZABwoby4F18t1fH3zVrOJJBV5k/N5WFECkAKhNIId6TG5W+b6S+U779ZbJq3pgRNr33jtFoBWo7/XL1aPrX7osYdv9vcH5e7f/PlHq9jQAlRWH5R7AD74yQtKhmS+s76xfOzMxsJ5JrLFQbfZ7py8sFyONgA0s/tF6WK1oOL0cPBGJJOnX3xFCVpZ6PD+bn5qCcCVfj9aSNkd9nf3xmM7H60JEUmdf/QnPhlKD+Dm/m4rlpLikSlv37jFEBPrVs3eBdlqJAAKU1R12UL9+c8++r3nv/VP/tE/1Q2lVQiCfUDpAKAVpUNT/t7vfu1jjx8bXB8OD7cbwA2DL/75lz/w4Uc2VpcBvHF7PyGRSv1rf+8X4qp46c2Xf/qzn371yrUnnnouCQSAxuydI1I+eEFSKym0OnH2QsuNX/r205/+2MMAtm7sDxkrqyt/9tSTeyN/8uTaW5e3Op0kBIbmve0ugBGkzmIEnUbZYDSUWd4hVdW1jCMbApMA4IWnwCDBE3OEwELBIwA8ZUkLlcRJsG5CAO4eDCMEZhHFkbNmcm3mjWRjfW13Z38wGMzNNaRgCdEdDFKhP/johcX5OQDdcfnUs68uzs2tLWTWYOvm9snlxnBctXN9fXvPGAdgea5xerX98rvv9fZ20kayM7b5YJy347fefvPddxSAoihOri9s9fvrjWy+2Xj11cu90TjOw4mN5dffeStwAoADX7vVf/D+E3lCznsl5YjFPSur33z2xSj3/95nHgPQbLe+9sI73gcleXFej61pCd3Jk8VONteoJgLeqqoFicrwQbefRgKM0rJUEs7EMQMYlsZ5rYgCw3kGcxAklYwiRXLyMwYhIaQwDEWcaxWrJE9x4I0UebvT6FoLIBAJqSGo9iHVOmvE1nlSqmZjpGKKACBKbt7sCcUHVaWL8bg77BXVzqDa3u0fXOsB2N8ZtU9kDGxd3Xxy7/DwsNfI41ff2eQQskwlaQJgl30qVTPLl9v00JlTLekhAWeyrFZKT/yYTPAhBC914aKXX7u1sbrYzktSAggxeZIMQJKNpegf3NCRrN1QhMAZd0XDhhHcGEBiDLEBbBwHTUmovRdi1DOkufYuTLIh2Lu6IIrA3jujKKqMiOO2rZWQ+XzSACBM1K+DM1UQcjAeri0qUw+LsZJLqZkKrbDbHwNIIm36fjFvUia9qaTiYytp2skB2EFfoZCx/A9+8Wd/+3/9/SjN9g5HSoyEgJQ0EdXWdZkIxJK7g3FlXLsZtzLFg0EkW41MYVwCKMeVhkqlclUFD+koz6msXSSgQ3DDEkCoXaLQbseejQg2iUQUiVhDK1GPBgAiclmWaDYi1JKYwpQaqSh4gYkVo9JRq5lrzYmOvHfkoGMJXyVas60VHADJtUIt2JCvIBoy0j4EETi4wMIJnQIQQlqQdC5yRlgIX2gRDCkfSOkEQAgTQzDB3utIBm+11tYbsCK4ECb21pJDmETAEAIjMAcPJjBCoMnoP0mI9gYciMNERzAhSnJtEDSANEoEiSSSzTyGZ2ssCSU05CyOOYTZKMkQgsFcFn32aDRzeAcPAM6Fmq1OE6WFh3fW+eCISAjklPJoCCAIYU2t4JWQ45E7uN1fbCrABXZKg42YDs0iEsSCY1dTcFUgKRu5ByeKAAz3DtNMkpRByelvIzNICCFcWflMT6cLgUmSN5Mhm4OIGRRJFQQrlQHwYO+N96auC4bPImjhrHRC+LI6VMIB0Lb61p98Mzow59cbe2NFPly8OPLKr7bVcBQADLqmlSTbu/69q0Of4EOnj6t16Qc3amviaOpR+8bN/WOLuTXm2s7BqAqNRpxFqjTGWcPBCwoAauuk4pSocjbSqiSOBNSE1z2Lzw0crA1KiYmHD4OZgyDBzH6a/yBJCM9gVkwT2YkgAQjJNJHDgk1NRDRTzMwmVqwlKTV1zgEjhBCAgMBgIZg5eO8nR/ETei8zg7USUpBzrKDbzbjRyHqD/nBcAEgFV6wlhTRRkYrg/eXN227z1hMvvRUJqckBYHAiEAm88OomBKkAkjJpRIzgya+uLQKotdw1rjm3GqE33ttqL2z0B9tJ2joc3Yav+9YCWJhf/KlPPvKtFy69cWP4IZ00VzbmG43vX3nr2y9f+Qe/8TMA4rLIw1YTGCysP/vyO+urC8xVutT6oY9+9rnnrmwIBeDSpSuf+uj9b9ft8xfO/cNf/81jy52HLvycunE1LfH3f+WD1WAIoH1s2bVO7G33Lz3z3n0fuZeXom7hlRvSravbV6586r55AGPtTp1dHmxtZ7vD9ab45Oc+/vQzF//+r3xhPS7f2QEApDptbcyvRGEY25H3D8yX2eL197bQKyTZt968DuDx/+pzWECrGfnx0JTFv/PTHx2tNLbffmn5bH65t68WOgDSD91345vfts4uHV8I+6N7T7Z9MMeXWpfGwzxTn37sNIAbt8p7TyyfP9v2mzfqMm8udv7wd//F4gO/ceX221xdArB46vQPf/7j955Y/aOnn++NyuWzp37tv/67/Wvv9PvI48ONOAA4u7bcHw4uX95/6fXvnT3WbHUacZLrLNs/6JtxL400AC8jJaiy5rBX9MuqYh6URRQJ4tDt3qpGBkCzk6hIltZlSRLlbZ26ABG8UoomtypScDNPSutsNQ7FiNJmnqQcl4KQxELRCIAQTiiZUYQgneWlZnOf93xgtrXTjV7fAji1lh9bbEd6Lug8mKFWwddI5tqduc7u3k5RDABkSWxFKk3w8Nb54MKV7cPjiyHP40xFAKSK9cSxubStvF25vaIe3T4c3H/qVFNFW1UJYOzDUiPpj4dl2R9UWFg5IeSODTQqXTNVk5/6ZpwkMdWuLIxMYzXfbPRGw964PhzW7SRaW0wADCt3aas/qkwjbfZGbiy5lUpwcAFSAIAApl/76R0MKSUUpDLOlL6ehD5Fsra+srXWSVmVtq4FkXfeGBvHemL44IypqrqjIsfsA5dVJXTsSRZF6UhRUACECM7WznsfSOu4KIqJQVVRVWkUAajCaFBXiRAQ5IyVUkKIZqNxOC458NzCPAAbUI5HMnjjXHtpaTCuyLi544u5Sg77XcEWQDOPpYySJK08+7Ka05n3HkQBcM7PxHCcSOW9r60BKI6l5wlle7IPtBLOOSW52++//OYbH/vw4+1WS0mlpOqNhneTte/W3bpbfx1rFso8xfh4lvxCE+Tq/dHQU8tCTGHDIy3xDCLDrKkj2OwOSndUR4PLndDtowMcxUvf6REdHeD9zoo8TaGBIMkigEMAJiaPs/AdCCnv9H7S8gTemHVg0lWaOS0SQfIdKHY6o5/cyIRAk2c4ECbhn3fQyBmmOcVrp8aTzFOoDiAhaAKPTrYzvTP/ICI5Oz+zM/R+z8g7nwBmwCHCHRtOMYveBohmMnLmic3kv9XG7EOgqUh9Ik+fYMvTo0tBHmCwDzzxWRIAkfDeSymnaCSz8877MBGu60jbqeWzqIwBIEhIKZ0DEbwHOLQbzZu3BkmsGFMVsGCeXB6BuLAmVZGrHRMSLX3wTBNxrowU2cDOe/Z+KY7uObW6vs7NubkoCpILAHVtOYyYelEk5htysZMxK8XsD2+Tfg2AW1sQUSu4yhTvZeFKrrwWDR+FkxvxtdvFmfVlAC++d217e2go/X+++uzJe+cfur/VXogiWdXD3ZuvvLF4cg5AujY/HJQ3n3n72e9eeuxTj8ggKtev6948NfMGfLQAAH7cyKJev5cJK+AP+tXpU8eO265W1CwGwm0B4FHhWznsld1L23I8uPHajc7c493dnTdffOX0qVUAf/HVJ6/d2HvmhXfu/dADKtHJqKqYpNYaggVvb28BqGp39vT89mA43P3OfvegW4SlVr7Ysbu9ikjoSbymjgH7O7//py+/uvrZT3+QqN+WovBhe+h+9JOf+7Pf/yIABe5b94s/9YkLi63nXnzpwtm119999/z5R174zvepqgEIyTKABFGgyWVNIbRbjYbIGwn2hgSgk+nOyvy7Nw+rih+6cI9oxvH1LTZGK22SOEQRAPIYGpclaQiQQncr04pU0HHMVPgw8y3lwCyE8CFAEAlBpoiYtY6MtQBM5RTFWsskUs4YQWFYewAUiZhwYn0RQJJGxlRxIpshzRtRqxGfWOrs7vfGRbm3vyvIAojirBiU42HRTRBJudbSK61GOzWNNFtabH3jOyUAIcOppbmFdt7Lmy+88+7SnLZGfPLxT16/dnscRgBccGwM3+TNm4dF6dpr6x/9ofOHN9/Z7fYl6cU5BnDu9FIjSVeWVmQc5c2mkvPvXLuys3fYSfQnHz49iRy5vnXpvc3bYLq9N/jIxzfqcaGELMq6i9EjZ1e+/dZtAOPCWRtK5jTRzTwyVV3ULpcxlJZRCoAFByJSAj54CAaYiYSUOmIp3eSu2wd4sAnLzVhbco47WQwokpGFeOfaJgDnWWCaVlnXtiysVDQYlRGh2+2achdA3dsqBoMojq7e2Nu7sRW1osORPbXWur3Xd0EAOLUxR43sgXMbN/cPHz15ik51Tm5sjLlupbp2IY5iAMOi20mTUEdV33Qa0HZY20rCHwxsrLVmAuDAJChw7DzbYIkoeCelFJKDr8ESgJDkjLckmllDWACGgpURxUS+sgAGvqp97aQd1HW2knTWBUGIzoKDd0HCxwDySChAkmfypANq4xm1ocP9gR+mplsC6G318pWIMUpTwXBXbo1PrHeqskBwANtRBUC2UwOfCVkcdtvrS6WQ1kMy9wpzoiwBxPCtVLjRcPX+BzuNuGLWjKo0iUIrjtMoA1CX+9P7JMeRVEXlm5mEG8PH5XCQKwUgX1hL59twXV/3pZgsv7EILlHKV3bUHQDQaRwJjiNi74NzgagqC0JIE3X99h4AW4eslcpQB1PHkWDvKLhIsBYspPS+BiDIS+mkJA5GUvC2iiKlhNdKe2OmcCQ5742AVRSqqo4bWW0sBwTHLriJjYf3IREKpCsY5sAIwpmYhBRMSQrAenbGJEoEwf2DvTQVOg7ELKRk54imuQRekA9ghhI+gAMzQmAEwTwNj6MgJAKcJmb2zAFg7zw5ryAmMDei1JlgHebm2+xrYzmJiQIhTP1oJAAmBA4+eHZCyKr01rgsTcvCTm4LCRiVg0wrncUQ0vsgmMDeOptIz84AgFCmLPf2qos3ioO+HQ3dYnc89jWT0dqzqwEEoWh+pdA6EiZOElvDCiZydX/ImgHMYVxWSRASSjJNTGUYQgQiBeHKAgAHR4KDhTc1ESklgg1CSy80RMMTAwiIfWDvjJDG2JCICMETuVE1igVncAC+8XtP7l8297bF1p4zSgYbFlKygUZ9Z6wAkLC+eWgvb5a1p5/8/IX8RIepH6/P++3tkxudpLUE4L2bvZfe3jm21nSoiwpFHQCRxfJ20VdSTWYgkZRVRYFZwAWABIIIVT1MI700P9/tH0wmLErpsSm1VmBIQcxCCnG0zCqlUJKkIBtmOYoIIBkY8GHi+SggMElCms7RprM8JUlKKYWaXJyTWVkIYfIjE5jBLASHcLSgy5NZkxKa2TN7Y0tnkUQcyQhAZeomICFrB+MZgWohrRStNHYugGMAgqgSwjAP6kJpoSwSFjRg5ZmZ37lxCKBaaa+v2XtWH7X+cOPc6liv3S7KFdto5nPFuHsw8ABW58TXn37rhbe3s0x+Jjotu/X+1YPtK7c+8pkPb+3vAagvvX321APcbN934tgzL7xy77kfylYOf/8Pv3jpxtZ6LpIAAAe39r5qR3s7+I3/87cS0A9/8nxFcSqWxocvRPHiM09cBPCp/GNqbb6xnD7/5HtbT1964AP379w4+NyPPSZPL/+Xn/ql8vJtAPXmrWx98V2kq2Hh3Ila5vKt17/36ceWxXg0OOgDsD5s7w2/9Jff+eQPn3/k8Ye4YFNf/YUvnJBpyjs7o88cBzC3oIp69Bd//K3Hf+ixxurCi5tbi92dcrTzwnNbD33qwX6RA/j+Vy5lPdor+yfOmyURyfUFHakzx9ZPLcwvri77cQ3gf3j6L3/n2td/5adPf+frF8OJx+45d6bTbvzJP/xnD51OP/aJjwK4cPbkfHXl5evXnn724gDVAqpWaZ586XvSRrd2uh/9yFkAezv95ZY6tpqvnVna3D0clOb4CpJUDyqzv1fMdRIA+dz88HB4MBz5mltZ2mjmLjhnbGCfJHI0EgC2bm5GKVY31jqdpd6o74uxUOlSZ/6wu3/YOwBAIngXEADJ3rnFRgTy4x4AHykgOAAeliKRz+ftuTbgFbwlcXJ5np0txnvtVANYaC03s6Zj4bhOtMpVy/rtRh4Xprh9sDkJw5ybWwoiSnzv+FKrrFx3XC03Gz6E/qiqzBBAI88zHd/cH3oHnS8WdeXHw6q0kUoTHVq5ApCn0a3DvtbRsLCeESm12e9CJq127ow7uboKgEjv7N5mXy0srfT6Q1db48xBf7jUTgaV464FcDA4OBhXsZb90hnrFxpaSBYcGkLSzGLqTgIpEBjWGhKSAxdlOTYGwHpjWUk16nZJKB9clsXeGee8kjCVy3MNQMcxacWCbAhKaW99vzuUMo50VlZVcB5AJAUH3xv0m3kzUbIwpajdXq+betdutQEIHSVKpzrqjcbsQyChhdBax0lsrJvcNzUajWJwGEJgz4sL8z4gjZNhMQ7S9oaDCThaVCaL09qF7mDseQzIONZSSkEieC5MBWBUlDRHviqLssyzfEHNB0BKKaSampGEYKwjrjutzmD0tnWu1Wz1i/HC/FzlXFHehSPv1t26W3/9agZHThfpaUYhnD79PuPFadgzHa1TA7hDWQTe50s4DVmZvEbvwyxxh1RIRHzUypSJiKPNFCOcwnVTjJGPdpg9FALMk9l5mObazEDS2T+asjMnLzId4Z2YoodMs9Yc+CigG0dvHEQkiALPvDLDDHEEEIKfeELOTCnvQCrvh3IJgo+gxkn09pE/yh224vRV4AjfnD0zO6vvs9fExCdp4rM2hVCJwSABYmKAJ8SQIwPLH0Q+JxjjHWR2ihtP2ZETl5KA9+HBQGAIYAI7EgkfmAiBIZi89y5AEgK4thMWbWjnOolgHWnBSrFQpCQJJcFeT2y8HYJzBG40MykA5zgEHUVERCQmZitSkrEBBGN9okhEqlDZ/IXzIm+FqiswAqAQSKdRf45ob3W1xQH9ockHwV+5PmdjAKbcD/PHDAINL+vxTrlZXbvmb10f7jn/7pXD0gYAQ+Nq2ETxza2t+z68ePxkU+TEFFp5c1wXC8fvA+DLdOvFS2+/ePXc6rLfr7CtOCbJyeGt7uZ1GO0BHCtp50qv291bP3uyf9l968V3L9z/UGetOihE//LhxnIFoOiG9qJw491mkrSXVxrt5T/87edffPbtrWuvTr6K723ux7Ei9m98743CIQQkaVaX9W5Zx0qsn9oAcLi7b4fjP/uzr793+Z3h7ijryJ2D4dJKa4F5d1BLJQH0bRWrNOXy5Td22H5vff349d2LvX3z0uuv/+Iv/4oXAYBQ8njk97c3f+t/ef7YUut69+KoDI0nXtsc1ZykAIIPQkohpQaBOQQfGINBYVzv2uU3q9ID6I3Er/7Gf1iUxfWrf3XvQ/fkjbTdlAOHMWAsJ1EMIDijRDSuLbNjF7QP5CxpVfKE4wZMEpJC0FKGENQ0Pp5D4CwW02vGOna+qo2tBBA0CaWFAryzSSaTOAYwLitXFVGq81g4a9hLF0KqRT7XsEBNDEBJ4Sxt7/Tyhlxfay9AcYTVhg6erGQ9ST9g3xuOQObMuQu9/f5Xvv3GfY+eunHznc1hP48lAFtz8P7C8dXhfve5F74jm+kX/saPnGyFV194ZVjVK/kxACvznaub26UtbZAJNbrDXhjy6XOdC8fzY+tZS0UAROgcn6tuy+6p1U6wPlbK2BA857Ei8IR3F2slCJFUUirnGYIc4XBo0oStBwAlhPcIAkRSkCCS7B08RAC7MAn5qY1VhMzEvW6RS1kHnyjRHZvKHcCL3c0+AN3S1rskkTpStq6rqmq3Mh/gQkiaLSVHAHGPG4YAACAASURBVExRM8TZ46sfeuiU2T+I5/LRsDizsfrMi29trLYA3P/A+UGR394fPPTg6Q998Pw3nv3uCdQHh/smpYW5vNc/BPD/svdeP5ql95nY83vTSV+sXNXVcTpMHs5wSI5IkZRIShRF7lLcFaDFam1gr7xY2DDgi/ViLxY2DDgAhtMCXvvCXkmWVlgliqtIkSJFDtNwIif0TOfu6u6KX335pDf64lR1j+T1HyBgfheVvpPrhN953ifEKkp5KFxNzsDX2tbGWwgo4YlscyEIRtZ5rbUNjHMyxiAO3hsgxOrodhmC50wtxDxobRwTUgUWOCENvi0FAKytmUBIRAuVimTlNBciZ1ZRHAJ5IwBw62AbyMZ7OCm5ZXIwzH/y3kBE1a1bBwD+gr/3S7/6qJATbVwkhRK63cq6cVxXlba24bx78saVxsSvvnm5v7AQrHHO6rxsJSorcgDa1saj7cq1xD71+JmX3rglJYwLnDDaGzcef3EkR95FgnPwWRVkgBQUbEUwWaQiqQD85Or01//dd/7RVz5y8ZEYlAsZz+fziPMYZLRvfL4WlxMuxt5bKTkjoRg3oBAM56wZwspioRicrq02KubOeWcNvPPOeY8osgCsqX3Q1oELzoUgZhh389wIlTjPJzMLgAnfzyKyfjaYlqHob64IxpzxUdpySoIqAJKLg1FhPadWl9pEUbsmMAqA8YoA5FUV2bKqLaNy98b1c2dXOHPMcaaEqwwpCUC2GBw5T9ZBcRv8Q5kFec+DBUDecHjnjCA4o73zzHlX1t4YxlUwAUA5G03mNq/9eKazmINLgg02eJgm/ySEQAHw3ls3qnh/ocsEnw5LIXLvg2tcVl3wuppPprHPVJZwwQGypirmtWOhFUsAoba2tFLKQrvSei4T6SnyxAQ4yLMYwNzFGe+HeNmKgiUJpMskVYXtJEmWEID6cORdFLx11hFn1JT3xCiJEm81AKtrxkNwNlinlKDApRI8WCLmyIQwB+B8wYhUBIF9OCZEjFDXs7Eklqn25T96CcDWj3Z6mbIymU+rytluKgfadJUoJJXaA9gaFrd2c8VpabmVXVoT8dgU5bA47J5gB1sHh7MIwOZy6+W37yESmeL3DkoSgpHyAbUJXAnXBPjYoKLYI8znUyEb/2tPQUuRWB8eRNl470JwgCAQ4/xYNwNBBEAITiwoyawJRCwcY4gBzocjcQ+Bg+BDIAT/QPdCsN7DHUEJ3nsf0PSkjciFwYOIMe5Z8EdcWcYZ8z7UVidKJElmbFnZiksRpSmAMNYAqTQjLyodbG2E84qhso6x2AQHwDobscgzTzziFNVpMN6xQMTQFfL1m2MAb904+PqPrmf0HRtcO4tU0jJmfnFt4Vd+5tx+VXoIAJwpEYmf+8hmK2H7B4NUUD5xX/7sxWlUbmQbAG51OpWssiwKw8lzz65urDN/ubh3sLe+emILB6PRDMAPf/z2M5c2bt/Y25pVT6v46RPrg3tbh3cPTr/wvE+jC9eGAIYHRRy1O6cXP/vCuTd+/N74CnpLiVft7W+9vrnQ00WD4VJe+xMbp4aFH04Hq939a2/e+Ms/fumpiycvPX4GQC/l1eHOY2d6m6uqe6J1dfu+yOvJuzdOLKetjE1rB6D7yIWod/ajT621Fs72VvunhnfY9qvxoFZm6MaD1doD+Ppvfe+Tz26sLqXs1uDw+j3i2B8Oa1tHUrQWl/7y1e8B+NCjK1Esemsri3H0g/uDe2/f+NUvferlbv7mm7e/++0XAUSd7ptv57ev3Fo+uzF876ZPww9feYvztKpsV8nFTgrg85957oc/fvvUWu+jv/gP7rz18pVXXz84GHcWkkTIiNNglAMYVo5ZX9Y2UUx7x4BTi92DybwyfiGN+u0FAJUxg8EeOT0d3tNVVeRGZV7LSCmVV3MA5AMBUZRK4aGCUj644uSikpwb45qRJ5moaa4JTpFjrh7n07WVTrebTMb5zsGooySAXiueFoVScZyo2bRQXMRttbO/Nc1ra8OptVUA7baaTWfjfKKcmpdWMTY3rtsW7Dg6al7mFHVFmmj4ncO9967eORwePn5602q6m4+LqgCgmC8McZKGWK6rKzev6dr0+704krXwTex7K/JJJLyPGFFA6LSjurQxE6VxHnw1SwGYqji5lHGiViwSyawNk5luJdL6hhgNRoGHQOSdD84H74MOts7rWVnnRR2lCYCytlVtpZQBPE1VsNo5S5wLzkDemAqArnWqIl1WgjNjQ39hYXA4MaaSIlJx4moNoK5Kxm1dF3EUcan8fE6Mpa1MpmnjExpckEpkSWtwcGis44EC6RnmXEqvdX1ESORcirTd0oUJ3mvrpEz2d/dihLosonYGoKyKhW7HBJrOZ8aY5cVlIDhrGZfg3FgH4HAyIUY+hNoYIaXzvihLAgmlGgNlxkRRz+/c25FSdFrtN9+9bJ2PuDx94uR4Ot8fDPBBfVAf1Af1t62OxNpN10Z/nZb3IBflGOV78AH8A+LdQzzr4dcG+zqCLR8Ki4/kwceYGr1v7sDAHszM2BH06Y9eNsOxuzGOZjzCQwN5533wwTeRMkTUPFaPomOO1dNHo+EBQGB0HPxypNoJvtm7ECw5Fo5SzB40rfzoZ+vCEQzZ7E3ToXrvOWOh0Y0dA5hHuCQdLwSNR/pRoCMR+eN074dw4YNDdQTXvj9W/H0Ca1BoIjLJH2HETQDPEQx6zHLF+6DeIyX632Bhvm/9R6ujB6s8Tg56iCT7ABYCY+S9Nz4A4MwzIs64sdb7YC0AEIMQAmQAWA/GEClJzHH4ssqN1lIKH5ziUhsLwPmQCE4Maae73Gtt3bwTS2ngfOA+hGNJe7A+JFK2Ynl6rb1+YtW0l8SFj4RsXdQTp4cASLCgOnbrFrevrawd7N0fu2CLOXMz4+qrANidOy6JWt2MSn3tyuTl10fv3ZjOiqq7lMyq2gWOhjdmw2gyVSyR5UF9wNqJN5OsPpzOb4zffvH7AFzlt27tnTq1Utji2tvXT/eXDg7ubY3nS/30nXfkiTMrAA7e3ptuj89vJPG8HO+J6cHkv/1Xv/GLn31usZt9+0dbX4zPAti/U2Vqtr6Ynnn+cYqT9775ymuv3ojbglJVjCYAFjpyOqizbnxmZe3ene27AxOIsUjF3okQdKUBiFY2mY//7Z+9tLHAk1a83O/h+vbG+omD3fvTua7rCkCq4to4IxNy1WvvHTzzU5+76HrT778SdD0ez5mSADKhirIIRfmLf/fzh/nkrT/5/sSxvdo44m6uAbhABA4PxlhwzgPE2Y3b1zshv3T20ts3rgA4GHgW9ycT3V7pLawtt7ksShciziNIzYvKAqgqG8BssFwwMKaU1CBujfVailjyBsUOcD5W3hMUBwsubkWVMUqQYRxAc31b7zjAFe8oEbWzcjxxRNr6m9vbAPq9nrUB1trayZjW4+7BtDo8GC/12oudiOoKwGBcSh4oOAGPYMChuKutVzxYGyIJAOXcTfMZZ9Fs55pPWWH94XDQlt2EYTgoAOjALp1eZ3HriSefuPzeta3ro6/99tf+/s+98LkXPvd/fe2rb1y5ByDKOnXhuul6J2u/+c67XphPPXHmy5/60GS4PxzOVd8BkFx97sNPv3P5+oWTy8Hno2m92E06GYsUD8SWuymA+9sjHyAYVdrB6yxLJOez3CaKad2kS/nK6VpyzngAA3GvNTiVZeWdZ5IAWBsiweK4VgEFCSYoRGqh2xFpurU7dREDkEghI57EwofggFhGaSyliLwzF06e2d66C0BKPaay1zI37t0mLR9fXgwxisrvH9btVgWgyOudgd05GP3S5rPkdMIwGU46khWzYhRCu5sBaHMTw9cOnGwjiORSMcFMpXFEN4cn5qmBhhriduDknfPWW0jStkm78plS8MRjJjhDMMxoJrOi8k4kABYeP9dhcWk8hTpWPh/PhQspJI+CtTC1BhDF0hJpZ3wIptYsSC4ywNXGQ3jjHIArd8eautahBis02nGka88Yd8HPyzqWGQCA+0C398K1q/f/0S9f6DOK4A40iShJFlcAXHvvFmVtUjyY+WOPnXn59RuOKARizN29tjWdOAD9xdgFaO1u3d4/GOuLGx0pRRxxXdfWmKBSANf33b/9+tu/8itf7p2OB3s7k5L34qTKK2eC1k5GMQDinHHGiACyNggJYqyq3WyaR0oA4FHMA+At44wJwRjnBO8cI6Y9hcABEDxnEJxAXIgILBFxK8o62kWMZ5xlAEJQxvDRoFxbac9GhxGnOE3n43l7waoen4zHAGIZL66shETZNCYiB+4BwDkXkl4HQJ5rPs6HW7tRxkROfOyn94chQMZSexN1EwBcCkjpARCZsghggTgRIbhgjasrADC1dVXQhjHSRY0AScxXVShrH0tXWQDTkdkbVdaG4VT3F5fhYpH6EII2ptZ1s12SgQVvja0NywstVBQoL7TjTbwNYJxttRcDj4Ps8qwfGHzwcVeoTl2V1psagJBOj+5JIVtpcnO7CkLp4ATAoawFpX0AyMW1Hx+2EzeTssin1jpFwRjf6XQdagCQ0flnuHPBhxBcIMlJcBC5ECJhrTUAhHAEciEwJiWL9FzP9vXoYCp5lLRt4HMAnrMsjW1AmNyTj6xo5OPhTjGar3c28v3y9b+6D6DjOWNyXHgKrK9icEYEy1lRusOZB3BtL5/UlpG7cOZi9uSHq8FbbIyEOdU31b390awCsLaSPvXoOnE2Lmrt/c8+fakoypsHI2utsT5OFIAkicfTifPOA0opb2a2hhY6TuJSV4o3N3qyxkouvfOcCWsNI0LwREEoCcA6Z50jEJhjjDyIPDgx39wujkiujRDFuwAiz0LzEIHxPgQwuKYVcQEc5OERAj/SlrCmGzwSBTESQnjnPLz3xhofS17WNbMsiSUALRJvjDcuiRQ5N4NnigeBKGRGO9awbkVwTltvpZDWasGFt55UEhgNrOFRCoCCq4nNKiOSaFLZajrkIUym9/7xFz6q6KDbTQDUjh4/f8oac3P/YKWXpv3FpX58Y2/7U89eEkIAiJRK4x518zt3bl+7fHAwfWvr7vZip3dY5vVsxskCkMJcvnlw/tzZgsJHH1m5MrjzyQuPv3Z1EO/vRz6cvXQOwKwQYX+4dev26b78/C9cRKtfHQ4uv3Llm3/w2mMXT5380CUA9/f3P9WOXvzaaz/9/MUnH7t4OHYbvd43fv/1p/6H53cG1wHEFD7+/AsnHoPhuZiV9R37re9eeeb5M73TLY7pxUcWAQz6y6q9emLzk4wtSsJyN7z1+rezlRObXg5uvG3WEgCG23RBrfRUfXe6dfNgd1ReOBfyfF4E12/Ls6sRgNEwPLvZsVkiW632rLj15lvfWHTbOwdvvTdgz5wC8N3v//DOm9u99c2f+fCzr799a7g1/N+vfbVj8dlffP7Rs6c7WR/AJy+eXGul/+O/+ZOPXrmez3MuMMhrlvI0klka+RoAdobzbiLamegmiTbBe58pxbutceWcq/u9NgAXRFVOK1fFPOosLN91+8G6weFelqgj5gcRCZ61smD1fK59kbckW13stFpJJIL0DMDE1BFPzFxPD4c74xmLAvN8XtiLp07n+dWG7z0sB9N5udlbSxI5rOvaWeLhYFAUZd1f6KdpC0BLdTXXJBUcBDEmCYznOrRTVmoDwJhaV1Xa7hbavPPeO9N5fnJ15eMf/ejN7f2d/ZHzGkA/S0UWj2aVcaEyYTYZdXttDz4t7erighISwGR86EzliPRotNiKJeMSfrXfsdYuL7Rt8AB07TqRNMYzkPOYFDUDY2RTJY+CRBlRCAhwIXgPHyAFy23tvK+N4VYCmJc6r+qqLOK4m6hobzKGN7GKnbdSsrIuAAQbmPPFbBqA2vpFEXEhtdYGsM4TBIBAEXEqi9wY113o27LmWbywusqUHO4cAID1eVmOZ/N2qzOc18Z7MnZW1ypRtirZjAAwrhh3zoZsoe+Cq4ticX1hUywP9/biJGUqBtBJUi+ixd5C8H42GXNGo8lMCpm12s6YJlZLCVlro63RxrZSbayZzOcBlDifpCkAL7kPGE6ny/2e4GI+z2tjY6VqY4gx3byMfVAf1Af1Qf2tqiN25Pt1wfQfhK4ecOweTvOAs/fXJjlG4I4+bIC5h/6FRA+JgcdgGxE9AOAebEijrz6yhnwfIPeQmxmC9e6Yzg+AGBE/skjjDQp5hE2GB8nSOBpCx/tXc7SEEPAgi/pBsnizdkZECM0A+vFsAUDwwdMR3HeEuB4zOY+ZpcfIawPhEkKgcByV/dcmIRA1cvKHOu3wIAD74bFpls8eRHj/f/5RRws6mvb9qO/fmPBYFX6EztKDlR6FazMCHaWTwocgubDOaRsAKAHBmBTCOad9kIoYY0SIlEyYAlBUVSMQLK3vRNwayzmT4BDMWN/4c6UZV4EYV61O99yp9bffvCoyUQWXyKiojPUWwGInEcSsC3EkTqz2Tp895VdPUevkxCzEfAl8CQDi1tSIpXPdUN/gd21Z5oZQ70qrySMHwKLQigZYiCZ5+saV4vZuUcEl3bgubeVAcSMl5iEwzfjZzfbJpT6PZJjVoAGNJmtexnEE4Me3tx47Gb3wmee3R2/cvLn3r9+8bCL8R//xp1dPn8oPJ3m9D2D5zNnF7nZ957AaH9y+TXuHttUOf/ZnP9q6cwIivPTifQDXr84W4uS6KW/t/OnpzTURd778S1/846996/lzF68d7gLozM2P9t790IUzFy9c+J07eywOsVBOuflwWkXi4HAMIBE0mdRrSx3WTorKVtvD/sYKQq2YbjEXJAfA4LhQExMypgjlr//G7y1nUnMIGTkmyNYA5t6c6LeXV1Ym88mN2/cKRtYSa2dtL/fns+ZqIiLnXOMzEJyLGCG4WWme/OQXTly4CGD4m3/w3/2X/7xweO5D6zvb1/aobRjLZKa5tEx6ZgAoZSprIq68hw8ewXPFlGcBkgXPQ4M3gYgnkgfJIuY5GLhPBLqtzDgDYDKbWe2cCFkUAksWuh2VxTQbj6rAJTU4f5LEeVXUpUk8r8pqcDCdV7aaTYMJ3e5mKjyAm7vbBmxjbfniyQXnaaBtLyLmmIOPVevUchfAtWI4nOmy9kpSMS2jlG7cnS1unPjU4yfeuHkPQHFY3bo/fvr5px5bfHJqQm5v3Llz9xs/zpQLnqU39mcAzlxctsDBLM+9Liv9zFNLK+sdH/RiVHGYy9cqAK2O2K9yAjsc5esdjHO73CXrUVSm15WPnVwE8PLWPgBrXR4oxEHUuqqd9SQY07UGYIOHNZT6ViwCkfM+KBFFUWCs0rWKJYCeinppJJWMVJBedlvxeDblkpaX+vujaqEnATghV/qdoiymeYlAeVXHFbOSFpc783k1HYwA6Nr80uc/VtaHN+8Ozi0tT2ZjE3zm2CObSWULAD96+92DMjl7sru+kt3b31teUNuHk0dOteI4WllImtEY77z2njOeKJJkrKsYmK4cZ5wx0dgAauMDQII7z6o6MM5qbZggLlAbS+AAVCSc85VHX3pvigAHsoY5xOIgeAAr4b4xvShbLGd6fjghBOEjr2spHQV15B0ppYPzRGkSBWOdEN46V7tEMU0uNNxdsLkmHihKVG2oMljhcpaXJ9dbUcTqggNYiGRlxcEIvTQ9tblSzkvRIRHMyYWFSTkH4MlO7+9bZlyYCFfC+8DIgynOynFtGucBxjyx0XCy0G39g198PlXcaK21CZRqbVWPA0gXNxJib1w9OHE2S1LFGfMWVV7pynlKm+jnvKwCwAWvyzJizBjHYymlrCsTRQpAlCVZEgVTJDEpxeMkspCMSClV61pXDRzJuIydc0XtbfDgxjhTa37j1t7o0CteAJgdlj94fStRdOniU1TE+pCHOe7fmqnExV17+84AQCLl0x85yYjbCII5hVDqqVAJyUQlbQB3t6c3Xrnx41evpu2ov9p57catnVvDha6MUk4RLWy0AKwXZbTQcVIGxrzRxDkJRVzAO9LGlxUAr0udT8g6SFHlNSfGlXS1DrW10MExAFnWGk0dmFdpJts95mvRWQ7W1HlemxkABEuCRHCAbQmT52WSxFkrUVJWlW4elDJSO9PW3u18ZnadHJXGlnXtva9qPcvnp3sRgF/9ygvdBTOYzJmUpbVxlujMR4ZDCB6IshUAgzujf/Hf/yFNPWOhqkJDCYylrBwKWAD/1X/xdx99/hxgg3MgxrkgIRq5BELBmQcgBAdxHUhSLEjNCv3r/9M3XnztepX7blf4hhIu2EKWVpVpd+L//F9+afm5nufsxPqq2OPf/8vLasYAmA6D8yGQ5CpEirGQqLA3mN4ah8u39wEM89KGEAp3P3/nJ4ckp7u7e8MvPL/+058+98iT6/PRDQCvHc4+8cITo/3R/dHh+dNZ7e3r1+62O3FLsbn35C0ACxLkA3kupPMUcSaYZ1wGBMWFEBwAcxRHUXChMnXEJRrY3gb2sBkKwXvB4YkTFz64AOaZOLbRaUjuvrEmD/CsccgmIgI/6nCaZig8GOVGCM75Rg3Pj0Q3TTcKAL4KSZIkidL1rKrK4MEkpvMJAJFkULqYz4y33pNAiOOkMnpqK0Vxc58J1koZk+MeTsbKOEsE8j5ioiafGw3AEBwLnDMXOGeMIsFsGJj8/oyfPL15+/ptABxucXGjlfWfWeodjIffffXKl1546uTp84rZspgC2Nou109lbjKVaZQoNj44OHv2dB06l19/ra7MaocDOLWxGXorX/x7n/7Vln35G396+txyiNkLn74wiuKDN7bylgBw//rOi2/c+co//Hz3TOsnL7346NNPJEtrz6jJxj/50mtX9x850QPAzfb9vb2fvH3ZDe9+/632h558YfXsYy+++vWdvemnPvEcgP8H3/xv/uf/48knlr/4M89/89W70wnOnLpUlPlkOli5sMSTRQD3X3k1W5r0fvpj1lkwI+rxwQ++feLcJvVobW0ZrS6Azz6+0j/dy83sT79x5Xyvl0pisjWpRywUqOebbQXglmSTwbzd2W8tybi9/PHPfAgiyO6aVIdv/eQegP37/As//1mj+b/+N7+2uZo9eum51TTaGo+eOnvx3cs/2t66DeCp9SXPxD/+widuXnnptR/dP3+29dT5U0VgFrMTq1FeWwCdRbV7904rjnKt21HSbcXWWiXEUi8pZyLUhwBGM93hopUls6LMdRULIvC60sNR5WwA0Ou1jTEUECBjTtPZbHFt8cmnnjr/+NOLS13uPIDD2dTls5e+992tgzkT6sRq21cmUvH60ql349veVADuHUw4/BB7g+lhr9tmSGOmQsifOHPiwpmzdRPZXIwrk6tY5JOi1D7hsitlAJtVtp8oAAdlnc8qI7P+ytpgsFs7N83rP3/lJ/Vkv8V4uxcB6C6tXN0Z5/VsNp2wqNXOlHPVaHK40G0fHh5yEIC9ve2VfqsMgnxF3q70up1WOqncci/2CGVRA+CgeW2Ch7E+jcRCFjvvnfUeOHo9ahIBCI0Jl/dhnpel1nEUMXBdWwDdTruXJdfvjLUpjbftVlSVljMeR/FsNl1c6QOY2drour/QHY0mWZTsHwzJeekxqzWYhyMAwfNKa3CfcVnrOkpTY10+mSGOwAUAy4kFGu0M2osdJjgB1jrtTSYzR1QVBYCF1bXZZKaLOllantUVaZPFyWE12925v35601JD8yDrDCfqtFu+yG9tb2dJ1m21uVSMi4YTbp2DMcaavCyn86jTLp2znLEHTllFWQdClmVCSMlZzfhoPBhNJ7fubm2srxfVB8naH9QH9UH97StBvBmlczh2Aj9CtFijirYAwBqpyxFNkgBijEDOuwdIl4fnxDhYIO99Q6w7orjQQ4UMfPAgNI8XauiHRAAZ5o/ZeYRAwYfGXuo46gZ4oJ/2D+BEcuQlcVDw3hJjSipiDdMthKCPnmwERsQZE1wwMN3Iq3FEjTwa+SaEAMYEJ8aJQgiNYssjEGOc8cADCz44a61ruI0Nd48J3mzKX+dwNn0riHMAnJj3wTf+VkSMMWIPIcb3ydEJITjvKQRGINCRUSVRM2jmvQVYAzYyAIE3bu9VMJw1Nv/sOLvGNXDiAzwW4eGKjkTtdMwCpdCozZ3nzZYz8gFMonHbdJxAnJo+gXEWMwJgnK+1L3StBJJUVsYqwmhuFzuMCwbAOhcogCjiMM45HxzzQjAOErLBUlHNtFGSszJmCNnGfGyylDhnECECl4wBMCboYJZaIpFsOMtXZgdopWp+O4rj2rVarRUAzhZdRYWNBfUYj3nWKnW0O56PRlPOCcDyQri5z8ob1XKbuKT+Qjoxdl5bsLDUat2+uw9gVHhbmOcevfTFnzoVCc/9nmWdhGU3x93T3TQrxgCe+dCH5/kdJ+dMJYuPnP7On/xh5GkwkMtPrd47vD84uAVg43vD4e3tztrmxaeeOivj3soPhwez9Ut9kbbfuXzr8ae6AM6c6P7G7/6RmRePPfrkwi+sPHf+sX//e789nuxd3VHB1AAK5596bFXr0d7OvdwZrQPxgglBcaSYCN4DCJzLSJHqjIdjp83ZUxv3796pZnsZS//Zv/xn/9t//b8CuF6WbWZSxgJLSi/JWlfCO6edt6ZuxRGAWMi1lf6t3eE3X3prVgcuZWms0m4SEIRqLjR/dOEBADjPwe9sDQLZP/6T317udQE8/8KHbm1tz/KRmU/nSStOYRXbnVVRxApT17UB4JmQnqytuGCKU6fTscbm8/mJxX5RVc25J4lpY60ulQAXStsQM2htJ+aguTtpG3wI3W6y2Fuuy8m93e1E0drK0mT7QFu0BQDUVRW3us6U8+lUFGxzrZ1Jv3swi2rjnBsVNYAzG72EhW6XXbszWN9ol9BCy7Qdmcq1SMadRQCF2W91ez3hN1dXut36+m6+dXe0df3Oy27WarcBqNW1m7tbX/3D35tNzblzqxJ6TpL2ewAAIABJREFUPHLf+dZlMBIgngoAO+O3Z5X7na9/ffv+7Cs/8/TaAi0kjjCto9g6lmYFgJXVje2bI+tcd3GxpbSUxawqFnppqoQzeSE5gF4ilKpLh6WuzBQr6pIYYoXe8ppgAYCtc1cX/V6XiBKpHMh4LYTwxk/LqpMyAFKQhoermeaMu2lhuGLdVrQ/Hl+/df/E2iKAcTnwIcuSXovJeTnPQcyztf6SrOrdwb1WfxlAMRk9tbLw55fvj3YK082nOl5O2rrGibOnRuMpgFHlfvydVz78sSfv7N9XFm+/c6CUjM/3k05EXre8BUBwSZQczIMNYRYcV7w2IJZULs+IFa7JDfMyZjqUMx1HLHBYCKFd3YoiqUStNQABqlzJRUba1iQjM/NStRx0XqYhBlBN2//ud3YuXqCP/mzXYBCxaG4a346otp7DASBdVkLCO69dgVYncLLcsJp5z2otOQGoLQssqmoWpZWpNYcOJBCp4GEdY7wG4JwqtJlMD9rLJ9LUCD4rrRgUtseErwCghTZrm+LWzEvGhSsrJwJ0hKpwbaI46wGo7YCxUS/dXOjy/dGez7omVMGGFndZW5ZoAegyfTYKf/x//+YTS196/Il2x0WT2nLpsizOBE/mAFAkq7GaSLlgLBKbMMl17VOF+8PDoyfyGM7M92d8MColZ+Nyr9uNX728TbDbO4UzVfMwE5IjMMZZHKtI8TgZZIpfvXI42JttnO4AMBztOJ1MZn/4B6/FC61nHl3a2Rl/56W9O9sj5lnZvCAF/Gf/9NPPfnjdV1MdSS/BFXO6VXuultoAIsF+9w++f3cvd85fOH9qfDi9fXc3U1GnHa8utzq9GYB/ePJkmE+CUi5qx4BhvNtLSi1CaKUdYeoBgGp2uLKwUe4PWFUa6zkjZniqenlttHZaBgCJjedj01lM/+Cbd6Zfuze1Ogg2nORe67IqAJDRH39i/QuffbKqpymLuWCLix1nwIWI1Lh5AhNla6L9vVfu/5uvvh4A4wkIjJEPQXs8vpkB+Hs/+9Pjqh4flns7k3Kqr98+7I9rYU1V1bkxvaUtADuDhFmrrbSc0q7knKwPgofFIGl3BODGQXH2zWuz2UBJx0hIoYgF4rAeEUuYbyBsYeGcp0SpqqpVevrd/UE+ckk7mc6ckBJAcG4+KlQg5vX2zdHqE72+OinT6OXvvfPKS7urvQ4Ao2vHmaIjQ0Y4mMAPavn29Tv3hwaAB2prtQm7726/+da2YuAB9bC48PGPnDyRqDMcwNU//brwotQ6SYUJ8ZXrd5fX11b63b+6/TpvMaY9AOVdUFmw1prcBa958EwqKfMy5yL2/CiVyNkQiEkhCF5wVeTTOFGKMeEBQImIq7iuZjEPtbYB4EwYZwAfy7Tp4sACJw6Ch3MOIQTmwRknBudcbR2AiAVrrYgVEzJYVzsfRyJQE9xH7TQDYLX1tQO5gJAmEWN6MilbSavf6gzGAwAwc0aslbY5l7OygAxFrR0xp3VBuul5iGCdJsYFcfIUc+HIO29KCwIlQh21ncETh3XGelgfOETC5Hdf/8l/8kufnJS3ADyy1k7a8dbO7js39x87vfbWlYPx8Ac33ht+9ouPf+SJMwDW07GK6quD6cX1sz/70c7L164+svm0tum3XnzJAmXpAYi0nbF8tnP91BNtnSZnLz2qZzeMyfI7O+049D/2aQC7o+89/4n0yZ99Tr/745W0mu7cTFp9JsTaWvlpN+N+F8ATT5webZ599oWzH330I7/2m78/zq8uRaPVBfHeWzcevdQHUOj67/+Tf3q+fW+z677yn37+G3/49uHh7S9/5RfE4D0tInAH4FKKeT4OodS6jLMw2buz1pMnLm3w8RTQ4XALQBKFg7feOfHE6QuXLpyMs3l7/+69rat7817ChjsHaytnAayt0el2//b4cLjT4+nyxz7zyW/+/lfXFxbzx89+77VrAGSBqp5v7Uy2C1/vVWfW7p8891FvB//nr33t7JnFL3/6cQC11iv9/pPt6Fuvj77ypY9wI77xgzeeeO70Qn9zsHu7cfg10JYjEtJZr62pDZOKOWOCtTE3hSUAS71WCNIaGIvd8bzT6xRTv7rUzmt37/4ugCrXWbvPeWIxCcGkkfrYpRM/98t/J/3UZyh+xOQDAAtRzfSgc2bpe1/99+c3zh1qqwv95q13OVdSUuEBoC3ixYVWK+3ltW51MpR+NpsvLKQrGyt75Yw5AuDqSvEoVi0e4tROnfWDyaTT7TMppqMRgLYQuWxvtNdv37o/GM6J0dzkxU7ua2+S+JFHHgGQpRuduLp8bVTnbLGz6EMRPEVS5IWXKk1TArCwtNRuL0jy+eHuaq9TGZPItJ/Givj+4YRLC4AcbXSX22niyctAnZgHKgaDkdYhYwQglXrshfdSBi6BypW+4kbXdVWD4BtJU1WCkScRgpPayWyRAvMJm83n5OraA0AdrHPWg8ko8R6CSFemDOS9bfR0AJz02lNMCRfKa+MVC9oHD18f2QQ7ME6MSZaXLo7j6WwOF7jkFHhgEZgDUDtdzudQTNS15O31c6dGk8NCVxRzIWPOBYDK2TRrd7I0n8+ubW/12x0vaDgxFp6YTKMIwMHB4XQ2u3hy4+a1m8Ne97GTpw/L4Z18+/TmqWav9/Z3S6MBe/PenUrrMp8La7K0a4mc9/OiwAf1QX1QH9TfthJopM3hgR8jHXPmmoyWB3TI44Hj5vcHtLtj5iMdzdL4N7Jji58mI/thvDV7wPh7wEKk0CiQj16Qjh0Yj4uIsyO/xqO/PFgLvPOWHSUwsgDrbJOI6bzjR/rzJlGxYf0HD3+cBPNgdP3Y3ryZ5KED5NFuB8CHB1Dp+/TUD1mLFB74O77PC/OhcP19ZMy/sW//YdYi0QNgs4npPhqcP5aZHxteBhzzIx8mDB3N9b48G8L77D2b34hwBDcfR03S+7e0ySb3IbgQQiDGQD54D0Y+NOolgBN5OkKVrbOckQ9BcFjnnPcAjEVtPKMQRbw5RfwRxsvAjsBcFzwHvA+VNmVZMEaC8cZ+vrbWOQZAKRUrSQy19cNZvXv/MGOSbr0qlobkolxwAKLdl0k/cYWeHL797g686PZbd/fG8xq5tgBGxVhrb7yva49AZe08gvVurq0JVDV7zEGKej2xNdpJdL12WrnpgVa9nTuzzmb2u3/8EwA/9Zln21GqaD4viogqlaiNnnzxB29MZ8Pr13Y/+bmnADz+dDLsquHAdhci0V5fPXlytHd5Minm0+Hz50/le/sA7lfRM8++8PHHT337B2/87m98++rl63v7443NM+sbyyPtAJSvXW0txKPcXL57XVunpNTGeeOYVMa7Om9ggtgaF6k4L7iFube9b1zYOPVY2mkvLCz8q//lXwD4rT/6i5+88cbNQR20AQeXEsQ9x6kTm/fub+vKAOgsdpJO+/qVa5VjcSID4y0ZGUeG8792rhMdEZkBhDBl0UK75YR6491rADixYa4//YmP16Crb7/G5r4oWSCel0Y70yyHk5FKOBviOGKMe3cUtlSUeSSUdQYAAyIO8t6aQL5yPkw1FKc4juZFDUAxBCBiNJ+N6qpqJbzTyoTg3VY0z+tjnUqYDA+VUq1+t5PFlam11kLyeweFFjuXNlYBcEa155NSFKUr90In67935y5TnAxZzGc1AOwN691xMeN2tTyRRnHuQ2sxObG2MJnZ/cEegNJtEwjWeO9iFXUXlh69tEzBl3W9uz/sL/cA7I2m+9sTybDSizqZZGRqg1FhTWXqwgkeAVheXFmfRQcz99aNey+cXzqzsTSajI2JDU+mpX3h2ScAvHtn9P3LtxlnADMOnIkkBhO+1lrETRqDA1CaOhIyr2sQ4yKEY1fd99+UvA+WbCKlD47Ai6rKku7J9cXJfAZgeWXz9v2dlT53REnWdgfF4mIva/UmxUGPIYkiAIuyf3U8m41LmSrr7XI3sXlV6rl0alTMAZRlIBUryc+urQ12hpzzZ873O0lwphIcijMARtvgQI7gCA7kA0NoMEprXIMkcM5q42daF3VdOzc3VSqdD7CewQZjGAAlpZQBAbNSW6U0j3Xgjom0Ew2nQwBr84UffuvVv/r9bz//oX8usjMTfdCOaDbJpemAeGN/YQNxqXxgxkNE0SyfZHFsgphrElGctVoAvJGlNmkSzaugmdC1sUSeqAarArUVB+BhkzS+v7svox6BmdB2NY3qvSiNgrMAKlu0ej0pYpuLc0snP/mJj/zRt18mwcFROj/PDYC1Xrq2fJ7xXjBjniyr/jked1uqnI3t3J+KCw9AV+Xjz1149ulL41n99nvaVdW8rAfVrMPVsqDJvALQWSqu3rh7KgvDwxkThBDiSM0LszOYLXVbANpJXBunvQ2hjlVystWPs8W1FX/61MZqd6KkQ2Oiz3gUq3YWRxGr62Iynray5Zff2q48pBAAZjNtK7fUbW+NRnow2rqbt+P4YFhu3R8nUnrjAdSl+/63b7RTfurJtaAiZpHJ1MZLiVzdGt8DsHdQJZJPC12Vevj6dY5giWpt3DQMxvPT51YB3L5fdSwtn+uOtQ1If/yX9w73nYizssrbrAx1BUDP9LgMZ9fl5z/3iK8tgnCeGzPPMhbsSpYxAJNd6rb4qZX13cOtr337HUtgQQLUsOcAGKcF7Z0+uWz13Fha6rDJpPReMIK2hhMHUOmpZsP7s1mQRELIIAS52glBIXZl82i+vzscTA4Go2p/Vk/zCgTOyDmrFM+6cW+1DWBakuDcCiIK1jsHFjwYBR+CkgzA6lJv6cySHHIpLRwjJr13nhyMt7lpInrhggnOOiJGtbPwrtfrXsch85Y4c9RcThAQujSf+PAjUup3fvBaC0xp/ld/cWP73nQyzQE4Y5t8Pw4mBfOAVGx7PB+XlfEMQKBgAam4ikhapCRiQAk+urfVVjHpAEA788q7W+1YLvai7elQOdtfj+/evmtdpepIRY0egxgRo6OGjlEgBO+9ttbUVVG75qr0gYfgEXwdvPMha7c9vDEmFgGAUFwlKpDTds5ZCIExMIL3wbtmCBjg/LhV8hyBQrCBwcMHRpwaUj6MdRSCd846LwNarU4SRXmem1pzQHsDwDnPOVes4qyczSzId9qdxU6/qOs4SYFm/JaVVWmdsV6DhBCqrl2TwvSw5aUmSrex3GFoMgqPejl6OKEPIUBwEchGXNjCXt/euXX3LmMAwMkty2ifJz6vbt+48+yHep3K01P9WBhrKgDtdmt2azDfL8pF0+4k588/kSPzDmu9bFgUuWuMTc21925tnM1mK8shZD98/d2PXMyqQdIOLR3L/Te3APzGr734pc898/3f+pNLy66/eS5W2Y3rV1uxVFIF7w8uvwlArK4UPvrln3l64+SZP//uype/8PlONH9htp8ttg2lAEzpFt1kc1HO0zpdjP7Ox8XB7ZN6t/j2G7tnLz7bFTWAd168vs1GP//I95bOPqMrqt745t72/sr9K4uiO97am1cOAF9bOdnbNJPp5lLv1pU9QeLUifUb998xlXhla/SJhT6Aqqaqn3YWoSmqi2mL4eRG5/Lt3XntZQCAsrQ/+MnNiPE20en1+Pqte8rWGxcurKx2fv4TH9kb3QNwamNpbXHjxR9/b2d7/Mj5zvKFZx8r/bUrb+aj9wqJM6fXAAxHY2McI7a52C211sbHgrtAEaEMIeIMgBBqZqwGkjhOozwfjrVjnqUgpkQAYGE86sF4rpSMRKIkn4zzrbd/9Mhjp/j6Oov7AIIvy+0bu7e2nCUilarQE1mw4ZV3rjx6Zi0wA6C/2J4Wmnuz2OkomYAzXfl+p2McsRAEIwBxmimZOmLBHxwcUixkVVfz6ejU8tLVwRTAofcbp0/uHe4eTCYbm6eUlD7UWcR1LW0+zK0DkHg3nucI3hqvjWUirqbzwONuJ9s/2DO1AKAkv3zjei9Vm5vra2urcL6oauv0rd1D48xyWwFoJzLYytQuSgRjwnjdTnBuozsqyyjmACKwVkANFKAazGmb67rWAURxGrtAAPZGBwGMc86IGAVrra4rlSqppHFVLBUAH3kfSArpfWm05ZITY4rzqnaVN/AOAGOcwFzwtTY+iG6nXbqydjrLWlIYAFVZchKNY742rnR1i2VE8KA4io2tABCY1paCHY7GkQlRknqruRCis+hkkqQZgIgzcDFzPOksnr/w2O2te1XQJILhVSuTVV4D6PUWA4k7w5kT0cbJM5R2buy9FSfJuVZrUhQAptYzEZmAuQ8qbSVMDeelk6qqaudCc9P7oD6oD+qD+ttVolGyEPgDSfMx4Bhw7CZ5DLUdK5SPUpgfftJ8/OAbOxIMP8AiHyhijmTZDyJqjkaCAQYcmxayY6ENGsdFxngT8PwA1YI/Nmj0rmE/gigQOR8IjbW2a1IYCUdcxAdoqaAjwXU4ztA82hkKxtkHoOeD/BuQPzKsO4L8HnosHh+nh3f/h1J0AETHXSUe4rbHf/z/e2KwJjXymGnZiBSao9eYVAZ4OlKXU4BH8A+WFsIx1RPNEfVHtpj0cLMDgYUQiNgxZHykmMdDVTg73kiE4+1tzoX36/iPTwYfgvOkJPfBSdF4VjYUS1gXGCdGrOmsQwAaSmzz3wYa1ieIrDVVPpeSExFnnIj0/8vemwVJll7nYd/5l7vkXmtX78v0LD2DAWawjbBQJAGQBEUQoCjRYoiUFbIVNP3osB1hWxHiix8UQZvhIO2wLcmUQwrTIkACpEGKAEFsBAfAbBzM9PTMdE+vtVdWZuV6t387frhZ3YOgbL/4hRF9nrqrsvLe/PMu537nW9gJYgAmhAiydL40vjBhrz/tBlvmk+WNO3GcyEYKgLqrc6ejzIej/vZBPppzb4ztwdwYlzkAmM6NJHKBqyr4EMBUGVsam2jtAlppCmCezaSgaTbxPl5daUexDFxlh/nO9SPYNFpuAcgnk9aJJiVLvry1cXrpH/2Dv29HR0vr8ca5ZpleoHIXgA9xlDbDaOsbn/+ji1c+NJoOk4i6kV5td973xMWlpRUA2y/0b27tKxTPffCZM2vbr7z20nRkmycvBSxRtgcgN8aYZmXsdn8eSUCQd2xtIO9IQ0UxgNLYJEmFkJUNIJnllZKyNMGOZ2++8trHPvQkgEfXl7bWlu71dwsEDtqDMmdyZ//WU08Ty63tTQCTovr+a+984D1PdGfZ1t4+Q5aOWSjxQ7HsdP/wJjATJRzm4/nNOyFWEYDDaVmVeOm1a4dH81lpFTLLIm6mPtg4ilWd6uydEqQipZXiEMBBSlJScGBjTW3GKiQEcW13JwgswCwEcRJFkZD16T/LCl8Z50PlQqsRlWXJIUhBAYgFAFRVNc9dylhZW2801HC0rwiGaZCHeFZ+/407AKz1qSInaTSZhQOOInX7Xh4ERIBh6EgBqEocTKuGdNs52oUdjmdnVruPn1oaj3OhOgB2q3JwMKmCbzZpe283mxQO5eUz5x49+9jf+cTK7333FQDvvHaXJHUbcmkpPpyPJqVLtMB8eW/7sCzLT3/s4wA2j9zn//ylqcfN3UFZZMuNqNfE/qws532t5acvPw7ASRm8V0TlvFANqZUgknNT0CLtHEVlUk3Wey1laao4iiRLH0LgwMdWvwRwgBdBsqxcqJyLY4ChUo60mGY5gHUdpeSOjvpzh0TqTjtRmoK3zgWmsLxMANjxnf5+rMTRpEjOr3VarYPDfYr8pG/2JzMA93bm/9Ev/O3rd64lSO7tzd5zYeXKuY6l0oUgGEQKgCYpghAs2FPw5OorGblZTg0lo1QDCMFVjnUcy6CnuffshWCEEClSkj08AAkfnCtskTRjsdxK1jaSTgOq+8gt01xKACRS/sav/8rf/dw//fs//k9+9dd+5gOfXOdqHrLS61QqxbV5nGMBZkLlPUnRTImCs8b7AAVZR5c4WyUKAkzQFCQzlBSpVu1WezaUhgUAYqGj5tb+8L1PnUoj55kjJS6cXF9urx/tHwFIG63+tDqY2p292YVe55n3X/l3334psq6yYR5w7eYOgFSI3cHIlkemKgonhvlgbaV7c3vQaYvpUZUqAeDs+fWd7d2Ntc6lR07f2Dy6cvLcSqcZddqrSbKkwnmlAFRx65GsunD+zONnXKPVKItSx/qwf/jso6tLSy0A1po4iedmZvNpcIJFC+mprf6010unbtBOHAAhVVUVWkbOl8XMem/npog63ccfW77Tnx5NKwCdqL21vf2eR8+cXFu9dzDYGmRHo0E2L5IkCl7EaQwgkH35zZ2c+b+4dEH2ellW7d+eFVqzEl/7wvMA7m1OVCw9QRGk9GkkAwcBPprOIyFv3xkB+NZ37nzulz/iVEP7otE5++2v/tmXvvByR4GATkoqFgBGYz/3+KWfe/aTH31fns0sy3nFkjthOhE85qAB3LiztXVY+LivouZaO6oQ5ZZJeB8gSQFA4SvrvaMigwl2CDkrZ6V1gqUUaCUpgBNL3ajZSRoJFEEgBK6YgoALnjya3TaAK++5PB+7w9EsmGg+s6dOndg4YbV3WivPLl5KAWRGNpI4GxuZyBAYHIgoAMGHmt2/stxVS72IyzgJUkQ6ipkYgq0NiVbkLQBmWbrKW6Rp4oxJuo+srr7mCPBeKaqbKyWFlLqcm5OPnf7IZ54pxFgMZttf31mL1JlHT+ZcASASdZSRc0wC1gehRWQVFZS0NWrpiSVjfKSlVoKCMkW+ttoZjwb2VuWqJoCNc93hfqEl+sMSSly9Pki6J8ejaawjxTpRArUhtZBCOA4BwksCAc7bJErzaVUPIwVEIAoMHwJCvSQKREqrGoTVxL12M1dqmE0FLZLVhSCw4Ptt5sL/EQxirrFACgRb2EiwUKJutJgIUpCQgoRnX1UmOFbQIfhQX2e0IHihZJSkjTi1tlJE83w6m8/qq2ujuWRtDrBSnAgNGeVGOOtISiym3ViMpJlBgVgAoe6IIeqms24yaxESeW8SGUFKohCp9N7B7JV3dh9f6QEoWQzz2dMXzq5EjW9dv74KeXVYnD2Rnuu0/uTr1wBs9+c/+4ELS80Gk52X+UHe3N5+6+Mf/si5p98z+t6LHAyA1269I0pcPvfIjXdeb0Unrr2z9bEPPffNr7z88Y8809+N/8df/+cAnvnoM1ev739Yr3/76t2n3r9x5oPnT6yXaaPiLMRsuifbAIYV3f7T15+68ETfGDLF57/wLx8503ri9NKPPP3BajIGEEq3vbl5pYPGhy/1h8M88xsfvPI//cbv3RmE4ot/8KnH1wF85DOf+vGnz1598bvID5Kk6be3z680TL+iM9J5PS+bAK48+5wf3pyOjpZ6vU3aalICpzlVZ8+tt7W6+vZWfTsYRZulxYnlc1/8xkv/+Bc+tfFjz4Wvv3TjxbdkUveb6vqtrZOrabcZd1e673l0rSyrGy+98cmf+OjFSxfyQwBYXUpuD7dl1HBRsX1n6y9fvf365uDEeuvkuaVZFV574xaAXqu10mk34ySvbGndcFYmRbzWTkpTGUSAARAFrRCsolbcmJfzg91ZsxVFQhfO1vGhk8IDkziOm6oj/Exrmc3s3o0bF/f25ClTuAaAVoRq9/r07tumyA0HDp4pWV5dVclse69PMgbw2OnmaDqc+7Kb6KjR3pwcDQ7n50+drPIsy0aRVAB0lMYxlBRJs7kmtHfO0pExdjwzy50egLt7g2bhB/3NVInlVrcwtpqPrcX6xrm8oQZHYwDT6fxoMvRBqVQIHVM5LwvX6jYZcrnbjSgAyPKZ4JAqYWy2ebC/nDa6zejgqJiXVTdJKKQASu8VG+MrZUQjjhoNxB4kdEcnaZwAiINsRsh92LbBByjPeWlBykMycwgOQFFmUiVSaOcqARIClTFx4Han3Z9PavGEFMIxjHXee+et9Y6YVKylF7JOLwMCwCAfyLjgg8M0qw0rrPN1KFwASCiSKnAAGIIs21SnQsmiLKNYAwg2GOcksSZqNNOsLPsHh6RFlCSb+4NuxwLotNvzYjqeFt12ezCZ9ofzwph2p9sxRJTs9Q8ArHZ7ZVnt9A97vV6sWzfv7fYHo27H7+zsDo4GALZ3+0LKjRMbvUYvy/Oj8XwwnA7Gk87eYDrNi4fJ2g/rYT2sv4al6pHwsej4WLx8DGrRAjiqaXrvwiqB2pSRjgNbAh87Rdaq4Rriox9mR9YQAyMsiJY16FejkBLHqdgPtkIAxLtybBbYmiDBggCuxcwgASEW26rn4aDA4QFJk8QxhMfvoh7WqRjiGJMUBFcTDx9wGBegZAgs7lMq34051pgh8ACmexdpku6v1eLP3jX5/it1f30W/IyFYxGDaKEur/0WF0ngx0E8YBZC1Ct1bHxJvMANBcPVo/cfIncuMOFFVAweNMgPYsUf4K8scPyNCSIwaq26D7g/hCOGdS7U73Jsr0mESNdKiPugLzPDh1C34gAELwDveV4MBgNI8gHsQwiBA1gIAKV1RCgNeu2kMuHQ2My6lcLIzCy1ItmKAZT7m9sHR9MhllKh48bmveHN/ZzgQ2Bfm9k7V4c1F8Z7H4SgygbrghaUxnHpGYAm1Yj5RLspSShu7d2a9ZrLzQtX0qXs//q9Pz//yBKA7796e/1s59yVx4NxGydbbfa/8Tt//M9+7eeXVjvuha3NgyGAH/vklS9/8aUff+qRaXbr2mubfl7EKa2f6j791OX9uZ0JD+Ctm2+0Bf3u77/e/Q8bjz51IW43fucLfzLZulrxHlUBwM6kLPe3Ns6sr622D4czF7gmlvoQZFg8dJGUgbF7sF9ZK5hdCFLK3Xt34whfuXnz5e/0AEyHE4rkSjc+mLggtCcCUyvWX/vO82VROMcASAnr3LWb22VVGoaSAsRSqaKOCHjXWY+aaAwQYGTCQc6dEkkTQGZKGUU3d0dCRXGUsJAxSeet96bX7ErFAEzJBNZxzByqqkySSIo6sls675UWAIREnYqqKDDYOAgl9YPSAAAgAElEQVRJgTkvcy0jANbaANjgqUbuA4Sk4D2YBCGJIwBSCBmR0HJ7b18qNBLVShuj4qgM8u5gwZ+sjQxujccE6FjnWeGhIEVNp+aKAWgRbh/MFVd39r5bZWVV8qVzS2g0lC2UVAAurZxHddsrEUXaWj2fH51fP1vl+c13rl2/rfqTEVB7xgrWEevk3rhKE3l0ePjqziQbZybDd2/8IYDtg9JCO6Heuj2+xkOtwpMXlzeWGmdXlovC7R3kAH7ykz/zxa/dNtYrHVWAc1aS73ajONbWlABKa9I4gRDG+8qZKFKBBXEI7yJvE4HB3rNBAIIgEZgc0/4ov7c/TJIIwDyfKsFJnGQzu7s3/8AzF4WmLJsebR80z65u7RwBaLabzXa7t9qp3hqfvviICrkQYjYrV1ZPbY5yAEJGJ0+ef/UHL//5S28fHE1+7uMXqqrUETRJImFBAGKlpdQkBEPYQOwZCCDESWqsY18PlnhaBZYysBYC8yIkCSP4wCawr+3YpIql0MN5Hi9F1I3DaoO7bbaJaqZpswNg+3Dz0feu/g+/+Z996Y+uVhUlOvhcLDVWhSRiYUgDUGDyFMkI0MILrdeVbMYp62iUNtsqGgKA9a204eOmd1Fmh1LGxvO8qPYGs6t3J7FSABpCjB3GA+eL2csvXLs7zFNFrU76wgvb+bwCoGI0VzqD0fzezvZfvnxw+UOfShNRFiGKMM2wm0sA0erJduWbJyCsTnVEIsTra70758+cSE7znOuslQsnv/XKSG+c+JFPfei9N6+tJKVPitlMmeJwuSX78xSASXqcNubBhjAqZjeVJGvE0XzYXmq04i6ALBRxt2uO5ohypWPWrdDQBXm51FvriKbyAJgECSXSJoKfTmfMvnkazdb6M42Vpc1D6wWA/b49yMNaoZRs7A32Hj93ajrZazXTlY3ujTtblWUAVeVbzZXD/WJnx184udJd63z1i1/67f/5f5Ul2UQACMaB4V1op1oJquZlLGE1eQIIk/0jAF//5us//dMf2j7aT4MxSaPBfrknOu12CKGbxLXW1Wb9hubB4XT73mR3vx9kZYXsJq20ZIeyFSkAFmq5q86snCh5IoglUTuWM+NNCEoaADoKnV732WeeQMiRj0vrrTPjaT7LzL39asuMAWz2s8ceicrcOIc4hlKSGUII56w3qDPfsrwq53llvfFclDYQz2fzjhZJJLL53GsGkCSrWikfTCR1bR5dA1ie2dd3W3Z5YYrKBgQlhWfJAqBgLbMA+TpIjisOLjCctc4Faz0p6ymWIop1ff4LAnvk1s1dyKmYuKLZbQ5mXseq12qIigFIEYG5ZhhGShgXpJJMclqGCBKA92GWu3kxF9IrKcqijAI9+8xjH/uRU6JxJH0DwBNPPbb9+o7M7dGMStB0nG+strrJxg9u3EXwLDQWB1Y9mawRWADsnddxs7KlYQYQxGJmLJVi77QU1hgSUisYZwHk2bjdSTVJWtiSc0AQQiwal0X/Wsf9BRAzhXqezkxCUQCSSAHwlivnVWAtCNbHkQzBRZFmFtYasQh+CQSvSXfihvfeGZOkaVGVSqtGqgCstDrbg7lnb0oDUkJoY4wgwerBtJcZAgSuZ+2oO6FazBLelThIBClJeLCsk+51qpK5yY/yfP3xNoDcVavgd7buNrT63Efe0z21/p5+cTS5vbS+8ZkfXwdQMm7ePvjeyzfDi3eWN9obF568e+PeuXOPX3/hlbkVZ1sxgMuPPvryy682e9FJu2Hkydubh+Nd7bLlV14eLZ85700G4B9/9ultXwoKJy43bt27+drvf/WJxy+Jc6c3fS76m43LFwCcbm+8dPvaH33vtW4z+dm/+eylZ660O91qvy9bS7v9IYDuWnz+RG8WG/XO0f/x+1/ZOH/lFz+68Z/+x5/d2Xbj/f5g+yqAM2I+23w9Fv6r/92/vntv/NGPPvXhn3pOj3ww2sb2O994EcDzL9z8zE99RJ199t7+7Fuvbq4udz793EfGs+pr37391GOnT6/0AMyO5oeFZweatd+6O6v0crS8cpA9H0f60rl1AOPpjEe8dvJ8Hk/Xls9s7W/1t4ef+qkfO9Nd3dzf5GwOYPX8yZO2+ubdPdiqc/qMEcNnuxc3uinFjcz6TqoBvHHvcDlNVnvdg6NRYayp3HRssqxsNwky9mwA7Ozv5qZcWeo1UqqsS5pRcMH54JmiuAXA2Nw52eu1S2ubMuS2GuU2H8/H27uNJ6ci7gGwh3f7b10dHY6dd94XiVCdNGpEdHs42d2dnT25BqChWhvdM965nekoajV7USeOm0vdlcBmXPpORwNIkygIqSQHFk7oVre3fmLlzubevcP5uZOrAC5f6hrvrPPZLNejidZyealrinkz0kaIt2/fATCa5QFI0l7a6SWNZHi4DcGzo/5gkAsplZQAGtr3Oo31Eye3xjup0qolR1Nzd3ucSniUR0UOQDRibukkEahQ5oZzUqvN5PR6culKq9cDYKaDYrznDnfEVDQjmSjRaCQhpIVBYFczGWIdNZtdJnV4uMdSNNPYNFJblhy3pYqSKAJQlsZY70MhhIh0VBrnAktmEjLSCXkPwAQXwFooKSQIZWkosJBqMp1X3gBgiMpbVFancRw3M5MFFzxQWuu8I1YAZtOZFookoiRJ0mZhwuBoMs6nSbMtgri9tQ2AfZBKMRFI+OCVlIriUbZfbW1GOqqfqZQQ4OA9tg92fvDma1ooHUVllX/j299cJGuTLMsybbRMsIKkgGBmBXXz1t2A4J0HsI6H9bAe1sP661QKCELQu/XGxzPmHyoBuh8PfUy4gyAhjv0KEWpb8Brq+n8sPp7/LuTRD/4nHrzmAWQoALAP9+FP1ANzLGiKjhbmkqJG++oI1gWcxkzHnL4Hm0Hw4bjp88e4nqhZixJU42nHGuYFKZD5XXE99Fc/3DHD8MEPFit0fx5+H5EE3hVT/u8tAhYZ3A8E8XWFhb/kuymphDpr+/gnDHC4j4T+v5P2FwaSx+Dsg17YL4iXROH4aQEEQAUcx50fS9oWm2FjQQseaA1YwQdEigD2IRyj0oIWbEpf47lKSFuWaRwnSgtnlKay8klMlXeRFGIhcRIMwSHMc3twlEvpkyJKmo2y5KGpWo4AzKtid+twazcjkiBdeRpOci3YumNWWGApazNUYrAQ5DiQJKkFC64lvtO5aZ1IJYvhzMvtCmL51XeG0du3/uLV20UIQAxg3J9dfOzsn/3pK6OdTJUhUy6iIAwO7g5efPW1ZmsJwL3Xx9/75r33nrr8gQ9/bDNPYV+0hsvAL7x6PRsXp86eAzA7HHTObiRBfPnLXz+71vjQx/6GLO1R4fqNgpUGEMsolz6rfLe3cjDMSCnvDCkZaW1sVSd4MMN5W5g8ShLmoJPYm2pm4Rw+/pOfKOZTACcvRRNbXf/WKzpOs9yQjoKUAKbzeSQVagqVC5FOR7M8iiLPbKyLY+2te5elwn0wm+4fDLl1kRBZabL5DAAEBRKkUinjQBxqJ1rvW2mcJqoWYstIBWsF6mkEvA+18j8E9j4IWX8oEkIyIIUEBylsJOEDcuMTZet9iBPd6XZjnY4Gh3lZKik8QrfVbKfxLK8AKGnidqKEGg/mgbnPIk6ROw1JFUR9wJuqUlpLIT2HvILULQ/4wASlIuV8bW8k+yMvAG/KSAj2fm9i1ib+cD/P8wmAixdauRDzyp1pLe2NJ4NRNqnmV86f0iGem+rmNAegU1H65GDmj+YDHZEAS63ng4ocZ/MgojpzHEoBsd7JrCTRbka3BtXerDqYgo35xCcaAN7zsWceudB7650BsWsn8WhiBXvtfNWoqqIEIEj6QC6Q8caH4NmHWgH5wxfk2ulCCcEgKbQP8CxKE8aTcn2tBaDdiKRbzl1Y6jWyadloL0HTdv/2Tp9//pc+e+0vXwPQSKjdat64ffvESqIbjLySWrSQnjh1+uD7bwO48tj5/cPBtXfunjzR+/BTJ6JIZTNOlK4q22pGZekAGKUEyLK0gCXBJJSiQJRXRhMLCACWYaC00ODY+KBkopVTxFpKBAILAEKsxGkajcHcbLW6qt0wirgM4+H81IkUwIXziSgOTp1a/8T7nnzfhzpq+qYMia8aweXeGDthAFXwNCvyUSuf+cPtLDvM2IbdYfHa230dTXf2BgCMp82rN3duvcVGb+7Neh25f3jt9n7UlI+YogJbAI7FxKnVduNTzz05n2yl6912jDOnTqUigWMAU2NmIird4JlnHr8Rv/ljH3vun/+b352ZwoOjWLY7ywDW1rvtbCtpzUKVlOURi7yRJFUVKEon2S4FAaDjWjISK0tpTGWqC9uO8zzrtiJuK62xurQMgLhZ+rK9cXqp2UKx6qSKRLDx8tqZE9xIAXBR6pXuSjezk9wWNClpMndXdyfnbs7gaDTJAVjjBJFx/fFs1h+ODkfT0ro4ikaHY3b1GArDcVVmbmf30DgTBN++05cQZWkOohGYa+IzRNg/OAo2/Le/8Qenz504cWbj7Ru7BB1ElUgNwCWYOx98mBUu8TpSMI6bUohEs8fGcgtA4exffPUVr6kdqeaKGxxNWqlUCAxiyPohtdUWpUe73ZQy/cBTT/uQjYpifalFWTmxea95EsBobq6/vc0UC5JxpJTUk7xSrFoCUhoApuLxYHa0P/ZuXxEOx5UPFCu13O3uDQbNJAEwmhlT5pUtGzGkEuwFQihNIJASC9mEc4EDGBSYmSmEIAU7W9mKBLiWCnrnfeA6V6W+VQIC8CD4wACKsogirZXQCoIgag9uBHDgyorazFpAsQ8+kCNYA1OFAAUWgLWm9o4ECSKKQbGM2VuljSnUaDQ3xpWVa6cpgOm0JIKQgpgFk3cezKYySlA7TgAURVlKKZklQQmCENZVUqfSFvlsEkclgLS7Pi3HdlSwbmpO3/fkmel8qKJGCNZHXPuxkKAapCNGHWBdm4UrUt6r2g+XpCJBzoeaT6iEYICE9MHEdTQEwzrbaaRxpJl96cEcBCQRheMe5TiWcMGWrCHQ2oBbEEmlAATvpCSllFKagrM2SKWkJO9cEok6Eiewi3TEVjjni6rwzk3nc61IECQUgP7oUMqwnMbGysrRrCwzE1LdKmDu91lERAi1RIfA9WevO14wwqLFBUACItGxrSyEVCwchcACkCudJQDZrOi0l/anW0Zwc6k3Phw0hYqDUWRlkwAgSd9/7tnSmbv7o9MbJ968fbuU4oU3XpMCEpg5A6DK8aM//Tdd8+Ldv7wbx1u3b979/azc608unT51uNc/c3YZwPZoM4tjMtWpU92N9fddVGL/xsFXP//8+z94vn32VOPxpwH0rw/E3N57ffPKM5cPs+lpod545eXxnXt/e+2TKt4AcGeYZSZ0XXq0vvrzn/2xWwfRO6/O/uh/+/rw+juf+NTHG6cuApgdDcbTdOC7372V95KVifWNSr72yutv98ef+exPfu7nzwF44Vuv/ub/+e3VFe2m1XhSWim/d3P33pGpiuL5q/c++vQlAL1mdH0wOhqb0/rQVW40mmaDyXjuZyO72lb1MTO3bpYVVx4732unO7tsjb/60tVnnrnwxMpFOzsC4Mv86vbueou7Jx4r5uXYqQuXHrHz0XR2RFpPqgyAZ2+FPKr8sAxTy7qRdHoN6+ywmPcSC6EAkPSSlDOh4CJWjdNnOnZeVAKRbMZJA4Ck2XSWlSaU5dgo2Wmp/Vm2N46GO5udkDeUBXB08/rdWzeGc2sEMq6ayfJR4fcmpVLJmQvtRpwA6LSXKU4cCT+b3D08vHbz7tPvfWxpfaXIZzSSQikAXsCT2xsejYfDmfFLveXTKz1rK4H8nbs3ADTiTu5sEsWl9ZE0PiBJ19I4siH4Yho3GgBaQo9HWdrslt6X84yFyqsZ+wAKVeUnNgBob7Q63c4gr8zYTmkmnTE+dFrq9HrXh3A4zgCsKulcyB2tRGpJxN1Ublw8t/6Jn4ie/UyQTQC22NOTd/Lv/Un/e9cra3MgiqK8JO+DjnRgD6Aq8yhqdLstXl7O8zwvMqnlfDo/4oi0rj3WnTUgctYFeKWUVtpbl5cFKBVCLnz4j59FhVQ60pJoNp3ZqgiCBHR9dbXBOe/JBUVCi9iEwrigGyrSkZB1JKxnQQ4so/RoVhyNs7l1Ubo8dWWqNaEBQEiQEPWoKUhlWVhCjI5QBUIUYOp7nJJaxdr4SojIA4piIUWAr5wFEGvdaLVsMDZQqlMOCOyLwDoCM8c6wcN6WA/rYf11K/H//ZKH9bAe1sN6WA/rYT2sh/WwHtbDelgP62E9rIf1sB7Ww/r/oxQxVJ3XDGAhKaF3OSQGHLP6CMR83wUSAoIpSLHIecaC90fEtFB2L0JSHtDujh187nP/aOHrvXjLWjh8n8h4TMo6Nl9c8DDrSJfFPlLggOAX7xxC/SaifkkdDc2BjwXLi3Sd45k5IQCCEeqBuQD5+/tR66MXE/Xaq3LB/QMzjvXafH9F6h1+N3fymFu2CABZ0C2Za3n1X+UuMo55kYtt8YKEevxNcKhH/IQHrM/aCfI++7M2i6QFHfNYkf2uvTqmNiIg1AxSWjhHPpCc17zXxddEtbIJgmCdv099FYAQCz8kz6jNkQQ9WIAACCLPDA6oPTwZXLtILizm4bxvxNIHPhiNZlkZxSJWEqBY61gqLRUApVUridjbvHR7h9O1jobD4WA+HIzz0nVajfp9bu9P5lk1K1kqZVwIIUwLWzMxsUhjICISQtbr4TyHgMoYkmjFCsBz731kkk9feO3O0lJ8eiPMs+ErL7zz6GNXOvBPPHGisZwA2BnJt39wx3t/687grbevjyflcGd653CyOxsPd3fjUxrAb/+bPyHNB2GSPb+1E5YRAljYoiCtzLS4ee8OADP3T7znmdF09sjqyvsuP2Xy0dKJ5o3rR7pZSm0A9AeZjHReOZHIKE6MV2krNpUFBISkmkxkjSTRXeoRSe8dSaqsdV4bGb7+589T7We/3M2LIreu2YhJeSbAI04ST6Sk8JWtT2xjrZRRCURao3JlZeMk8Vm24NQ8OFb5/tmjgg0sLaCSNoBQeRFYSB9pWXnvA2uiTrvb1JrYm8oACIK8s947rSOltQ9esVCRtqUB4KwHAJaplrXqn4BIKOe8Y44EOe8BNNLUU0jiVAjd67STRM+zwtigtSQS86ICIKVoNLtVnjsIy2qWV5l3zERCzExIkhQA0qhkViBHTAhKxgrOmkpKVLYSUQygCl7riAScdN7ZSPqXr++8eWdLlIvU56EJ89k0MG/xaH84Kz1yE9/YPnrtzevDGSMSACDSeeUIKKFcFQjUXmmXEIGLXJYyxACMdCAKJpJSw3lTynGWw7q3ZdGk8Fu/+zyAj/bJy1hoWQV2pFrtRl6YuXF2OOIAAI1IGRsK4QVxojURheAXXrTHV4vAjFBTqEVgWM+WOTBFOk2SeDqrAKyWhqQGmzIvm62I2ZFXWugDyy/f2Ko1aIc37/THS4c7ww88dlKxPMrmmXMXT52YV1V9N2h1m3d3bpnKLvfSpy9tmCpLtCaGoIhY1e6HNoQACmDHXHoXXJCQgUgIIglWEgAJiiEgIwSppE5bLaEL9siN9gHj3AMYlY5FMR8xxrOT/qA11lYiCY17t/tnehrA1WHh1iZhNPz1f/r5//q//+Xe2gFnnqmtXBkqa60HwAKkdT4xzlAx58JEa93mSijazWG73esfFgB8UV2+eEnkByeX1zaWd5eW4s0DF4/CE49dTOSBkA7AUnP5z17dXFtffuyR9c1bfRvbpVQ+eV4eHfTrOBEdRWCSwsdJcvrM+Xletjrd4aQgC2d8HWUvew29qoXoNHuUqNipMyo+FfP2yupq59E1CmMAavlMSN7Kg7bNc+EkVXC5Hx2GJJu5fDxTWgIoefL861tp1LFGDOeVAmfz+UF/mMa79deUl1XlXFnlo+G8KoypjLcYj7Nv/NELbDmHvn//CjbUpDkgkCAbkBAL74Wm+tSWShRlJQSxEuxsmmqppHEeohbPQghV+KCl3L3T37l76OzrzShiiRBrFgGAYGpFsUdoxFKT77RT0tFSM441B2cVKQC2ct/+1ptJQ6WNhhA393YGyktPFLy3rkqbMYAoUUkiDyf9/+VffykBD8fVE5fX/+5nnvZuMptnB9sZgBdef2de+NUlfWffk6piqdPIBkqdY6UiAIZMVrFudSRGKncSaLeSVKcn11eKzC13IwC3dkZpsw1Sjhd6ASaoSICECBLOA8gm8/ms8MzkQ2DO8rKXGBccRzLS4r5JjvNeyFqAIuo7pCCQWLgr++DJGfgKjkkwCSEFkwBkrWKo5SWC6pNn0b+JyvnAkCSYuTaXQAjBOgaPJoXkELOdHuajo2yprV1w1kcAAnlJx+F/RAzvERyHEHw7igAUWcnex5KUkASRRNKZILUWGhR4bucAGqqdNiFKUoLKiVlbSrZ3j5ZX1iMS41m5mrQAKCmDc0SkpFRKgWQgGUjtDEb9WWkhAUAEUauuGUIIT8RgraQzXIHqi57UMlBoNhLA5cYYG0jIEEJgri+MkphIExMHIgjAERFRcNYrRXX4ofMhUkLHMYgkRVI2BLxgBy45hJr9lLaWpNBRylkxFQJaaynQTiJrraksgLwqO50IgBCUxumsMMSh8EX958cOP3zcujIRC66l22Amvh8oWXNFj++8kkSAd0IEQdd3dwfZkwAs+xv92d398bMffu833th9/rsvPHF56dPPfYiT5ZAGAF7wdFh9+ZXNT374SiPtLnXD2Q3x5Zd+kOchJfIzB+BHPv7c0pmlb756/eD27Bd+9qcab+Trp89cvfUnvWL5A+979n0f6gFQK90n1lZ2Rltoc7Mikar0sdUPr28MxuOtO8Plo9cBbE+1a67+nX/4/v4kH/VvR9Bt5w9s+Ivvfa8wABAs37jndKzb8c7Fp9772vbWr/3nv/Vf/le/ku8c/OY/+61P/8JPAAjz5m//i99pr5/ubKyis3zXNXHqsm/1aTv8k//mf1/uxAA4iooyvPDKzomzK/nc7hfD0m+3107kg53D/dF3Xr0J4Py5Zhz0ODP9116/vbnzW//qdyKPdKVtXJhlFkDCShGqbHhULo9GW+snTlw5fWbr6Ohf/KsvXuxGdeb4P/jln7v+5vSFa4NmaxZFXpQ83BlcOL9xlM2zSTmZFwAC+827u1ffuEfEjVQUlYukTLWOEzHMctJ60fNIHswmkZBp2ibiPJvNrGEkSmgAicRRcLYolECwLs+8km7/MD64sXn5rVer+C6A/ovffvvtrXnm9orixuDNK6eqMyc2mKKzp85cPnflhWvfB3CvfzdNu+3man9w7413rmsTnn1syZlSELebaW2p4S3HiRzPq8NpdeLEytrqWiWiqKV6QstsDkAi3uj0DIij6Xb/8PHzp4ROlZdHo0kkubPcA8CjfDKYp93lyNnDe5slu8qrpNG2xVG3kZ5dbwA4ub6y0msNRkO9sra+lAxn014jkkIbR2kcb6wpAKlXpIhTNDR1o6gpOJ9Ob+1uX/lAT4gOAN3SbrZjsiJULoAKtsYEIaROFJGHsQCqqqT5KG3EvXZbCtk/7OtYwQchRLvTzrIcQJHNZdQwzhdVFUccxa0g1CybSM0heL8w4SKAK1+REVpp1Yh1ZEzlpNC1SMuGioltsLBECFKKyhex7DQaaVbkenFhnHkmoWMHdTicHGUzRuIpljJ2FNzClIwURUzBsieSgpkAE7yWTWNLLTUAEooIFXtCDELpCsdWQAmpI0oBBApV8IBO46YL1nCV6JYmEUwuRVr7ezysh/WwHtZfr1L146lYCFge4HyAeBdCxQv9M4VjbTILEEjUwSOoYcSAxT+OPRUX8S3vghrfbehNC+lKLU86/g0FwQsLSyIiCNAifvs+LHq8FQilgnMITMwUyB2DgyRlLcUONfDJJEACJAg1HnW8CwsX82MljeA6tPoYwiMiX1v63N/vepneFV9zH6upE7EfuLT9MGRZf1heZF3/+3TUdIxWHps51qsm7kOZx/k2i5VdAEPygQXkfVV5jc7SQsVdr//9hQ81+ki82MV3eVze/yAPtlgnfALE8KEOKQKAhdxKLGLOtRTeB62l87W8DHV6iRDH8nzmUNtThgfWnLWTHTObysFmUmLhAyoARmkMgFhQXlkRHCFEKskLb6qqKHygYL1vThlApGRV+MMjU3nvwZHWDFgEZvha/iQX6+69IxIhwAc2zk9mJY1my80mgGcePz0v2nfubT12afXxy49qN+016f3PfeJrX3/p5Lq2gQB8cuWCU+krr73i1PDg8LBgXTa1ips/evnJgzvxF/7wTwH84i/+yM/86ufWHl/vf+tldSQSokqi11pJenRxuXf25CUA2fQ7P/u5T3/2J/7GrVtv9od5K11/5oPPvrP1nbjRI6kAnH3f4+X48HBwVHHmgrQOItGAq0ojIu1cPSSQwXuGIJKevclKKfU8BCHU1qBIJQHYGh+xlL3eSlaZwKxI6lgSIJWqjKlPpTROTAgyeGvtzHopZEwUjJFJAu8fHJ73A45qj1ai4G2cxOwsAEkQJCGoqColhQ/eMXvnKuIin9V+oyrSIdSWA54EBc+WvJSSAa3l8ckCF8jaIEAIHAJbL9qp11JYGwBYY6SSpizG06EWWO51ICjM55NZbp1PEg2g1WrZ0vUHlQ2SSHnJUqdKqrIqm7Hyvha8ROQ9B6dJa6nzMmcplJAgUkLwwuCcjA/Gs5LSSiqrIhJknGbHjUgC2NybzyxFAjvDLBDiRnzvYGRD2BqT1NF8WgHQEbNOjK0U6SBFYJcPcwFyHGTU9M6hlh+SIpB1TkdxhZCIVEhXkYekP/juVQBf+u7VZiyEkFIyhyCFJHbBMSWiNAFAJFwdQ9VIZKQVEbngNcv6Abhe3hCYJBOjtDZVkkgwkFcVBam1KEoDYDSeeSITwnCYN9NkOmK+/NoAACAASURBVO13m53cml5Cf/zlr/YiAvCTH3hkZX1ptz/wLfrutdcp6EsbHU6XX3zxmtQCQNpMJtnYOn7q0iUp0kQZSewZ7STmgLSGGpwLJLQEKITgKxtkkDZAS7ndn44KC0BJaViO55ULzXE2e+nld5LIKIIiZXzIKgcgkCRFqqPSpkSfLq0sJ62YDEmhISIArbV2e60Rd7tn3nv2kScvxutxezpGiKVgVM45C8ACnkTSX+0Px5cfX6XySDIClysd0VuJtnYIQGVDqmaxcEpZKaqygqQQbJYVozQW3sYAeq3lbHRtudtY6zXFuTMZD1aWltOT50+1u8U8ByCS5Y6Nv/XnO61G5LrdO3e2Tq2dunFzPyEpNWaTCYDhLKn8GWcirspRZTg0Kau+9O3r1/bmIkr2B0cAYF658fIP7r74g+//xTetq2TuSlcODrNZxlrAWwagJdqp/MG/+0HuFilPglB5MMCiRnvhQII8GEJIzywYaaRJeJXGbeEBkFCA9D5wIJK1vwIqHyQC+wBFAMibwthuUzCDNJI4JhIyIutspGKlCEBVWQ2p40g148BuqdmajwtTFpKkC/XpxokWFFAZHzcpiaMggMDSOgXk1gEgLU6sNgncbEeWgoyk4RDHqfIZYLUgAEUIEYskUEuIwbiYzc1q0p4chfG8ms5mW/t9ALfvzM6sJxzImGknTWd5kUSxJURJohMFAOx1BCWQFwFOSEmSuDA+iuJIUu1Re3ptOU67WkU+1K6zwoaghQ/ehMC2MAAUWEbSzsrxJPfBNyJlbJkqhRBA8NYB8M5XrraBJCJBIYRwPFmsx6KSBHliz8GFEAJR7bvMnlmkdaATQ3gWnjmQCEJB6yiWnsg6DwF1fBOu776lrYwjEmF+GKZzs95IOFRCJgAkhGDSJD0HBYFAnsnawERaRwA8I3CoI18YQighFWZZ5n0EJsgAICV02ulWv2yG0EgQl4ilmk6mjTQuK94fjABcviyt9wCUkkprFsqTgogm2ZEJLFQCINBizBxCUEpYHySIiJUQdW8TJ41us0GBjdY2YpD3zMSamTgsxNohsBCeWdQNVj2vJYCUgCJIUV/qwRCBvQ/euHSlK1jY0hDB+yCVALDW7dpAhzu3SmPbrYSEbCQpKTSixpKKAWz1d+tsbymQVUVRhVgqA6YaIDiGCai2Y6VAxAJ1hA983VTS/e5RhECBndaRYGlDKUQqVPTW3ex3vvY8AOfcfC/zWnz76vbW7kRIbN0c37qbbawvLfXq2Pfqzma+dXv73+7sB8/pUtrqLq+31AyumoW5JADvbN279of/dqnb/ke//Itfee3mCy98/6W/cDJ2n/6bP7oi4ti1ASiT/PHXruV7Wx95dC26+EgDauLkxsVHr3/71afOXlxqKQDZePsrX/ve/MrRj37yubcG/M2v/PG9/cNf+nt/bzK6++rLVwE88siJ23dvfPji+x4/d75cWjr1VOvHP3z3gx+5eOPt9RNL6lf+gx8FcP2dw/e898Lf+qlP/el3Xnzr7tZjT37y81/43vTe3vW37/XHo5v7DGBY2HNLrZ1hXrCQxFBhtnkvFfHROGt01NiVAJpj1VDhzIX1m7dGyyfT0XQ2r8zl3urFi484IQHc2+sroN1a6SStN268ubzq3x7NzKj6T371H37p939PmADgYPvgzMX1b794Y221sXHyRIvC7vbhzTcPclTNVpq0ugCONndjrZhFO4mysgoB7W4yLyrp0nYjNd4BGIwmxvJqNyLm0fgwBMhAnTSaFS6fFwBipTS5opiv9JrOWy3IlPaNGwf7k/Lmfn7q5BKAN6++PZ0UWVkNZvmkPNq8tZ9GuijtqY1O5dzd3X0ABweHWnHhwuBoHknx7MWNyeggM0UUIS/zWe4AlJ68F1sHQ+MQxiZpslA21SqJmoVxABAIUbudJhcuxG9cffPSabm6tCbtrCgzw6oqLQATWEqVzwsSbIKXOn7qyqXxdBIafH65/fjZFQA61rsHE28LY8vx1JUVXbx4cWN5eTCdVaFqpQpAsC6VukJVlrN9W64q0SrLpBwZlHkVA2jHoNmkOhz5KgRWvvR5EYS2EFSVuTNV3XZaU40m4zJpNeJIaRmC9wG160JVFACqqmgmDUFECMH74JmEikTkUbMt6gkABIQLrnQ5ZZibKjjHIM91wwoCJEkwKSm0VkWVAUgbiRCiMhVXCoCxwbNIorRiaTxVPkihLQC2HEJt/S+EAhDgQYEgBIhDECDnSylkFepHnsT6sr4sKEQRBISX0IG9Cw4AkRAkGcF5E+AFCesKBlywCWmih5LHh/WwHtZfv1LBh/B/s/dmsZZk2XXY2vucExF3enO+nCqrsuau6pHdTTbJVnfLsimIhCTAsvRhfcgwbHgQYAMWbPlHH7IBfxj+MmBbgAF+GAZtSZZISaSopshmN7vZE1Vd7JqrsyorK+f3Xr7pjhFxztl7+yPufZnZg+FfAnUSyOFl3Ii4EWfYZ+2111IpynBGSCQi7uxDic06PGJp+7eMlsxAxKClkc2SSNhREUHU2cjQii75kO9IhIc+0Oh+cEZ41A5KUwigHRhHxkTK7AymWJq4SOdKaDBY8KHDQT2xgRxZh5M59lmydSbeZmpMD6+2QlSXnjAdJiMEJep1Foo4852hJcNzpT7USUk+hiU+YrS9xP7s4SkeIo/0UFfz/1vS8Qw5JOtwhNUFTPWMnHb2Gz1+P0uodvU7LX89vKSd0VMN1gHPD9moy8PUjMiW9AJms6UMkwNMl8J/asQOTOigQ1GLYiFQkuVJHEFE2a/ObKqPfG9aob1JMkBVcJZFnNVZqgLNPMLQgSyh1zpG5WjUL0e9AgULeWK3vVbUrdzdnwGoY0wpL+pGiVtJi0ULhwxKph37JIs6ZmJo59MMymoxY1wLqY0nUwBfqkbPXtz2ml01vHMaUbd7dTTTqXd//NU/vnxlDYDF3CR/73A/Sn7vvQ+nbeNc/q2v/fBLL6U5cvdq7jT0zR/tpVfePPrgoKmdWOwPUavpuOyvudYqADtbW//b//oPfuULP3fj6BRqGFz6+je/1liyOKtP5wDkwWRRa2xrnSSYqQupjZ3qYsjWcWqauu5XRUy5bRYKDT6oZJA7TtrvjbTTxHExtnEajXIOZZnbREA2Je+I2bEDEGPbxkhFGcqKRGBQZqg5hT7Wt5c0u456S74IwSTlzv9ExAxM7BIrpejYU3AUQsq5TVIGB8CSheABzqKaBcwaMygzOY3iQkchQY45trHfK40oZQvsTNFEc+hk1JJD0jbP5qk1Sjn7wClrHaUK1MmonZyMU3RGPG0TE7uyWtQL53wUXR+M2sUCQHCqDA/XSGJWMDVKvbKoY1MA9WIBoNfrmQk5zppFqCj7lpsYjcveRAHgdM5OlVXAJs4Fpb27Y4iS8xIN5RCAwMG4zVZ6jqrEjsx8GXLTpCSV9wCqwp9M67L0wSCamjb6oiDvna+aFFE4AENYtqwKy3oyaTfXe/DUq+jc5ub9dAygVbCqCBF5A7IIrSjdWBGu1YzViIhBWcUA74MZmpRiTp1Y68Fp22rMmSYLPpjUozVqmmZtY+284ujuuHIGIA3XI/V//nOfPT65z3X96c+8/P6dfevjG99/5+mr5wBUPpBHvzco3fDewcLStMmzWa0mfDKLyADgGGDMWnr35glVOaUUQhB4MfQLJAWAbDYchLVBmW14b716+cVLm0PbHFZsPlu0wgBs7g7PX92xnYqctpOmHIUo7vR2HK2NBltbADbKo+1z547GNKB4fHx8fj22PhX1fB7rImtSBSCeyYUkgdy8N1yLZr3hxnnpbd+YlMPt3a05AOfCxva5Z1/8PJN7zq0LFU880X8wu76I5dNPfaqOUwBWrD+Yx489f/Xmg9zK9vX9ycU8+uHx6f1bHy6mCwBxfqMXqu+88vZbN96H44P7i8m0HQUn7PtIv/Eb/xDAv/jHv2k5oxUha8WsNWYY0ze/RmhX8r2MjfXgxORgnFMKXDU+DouqWA/eh7IoAdSLplf4UDVrITjvNEtKyccYvG9MAczbROS9CwY2gDRXvpgsFr1hFW3pkObYw8x7D5CIGCznXDAUCIVLOQFg7zYLiWKu9J6lcDxfxLVRnw2xzlS5rkuYms1iLsmXdtrOk3eOyBdO6gwgDPsxqzciRxT8PLVbZbE56o8npwWxZwUwHPU2R+XpycTaxWDrAvGJUp1yLLIOer0iVADYl6luJLqUy92dAbvT08lpaLZpkQehvzYqARCOzZyxXlhf214bDQo9mYynOZcDEhIAYPFeyfKAq7mkjbVqNk+O/HBYRc2zWQuArcyxiSkWDswMJa+aszGDvaMkAGan0+hyz7tB4cvg60Xd92KOJWcm1+UXJUtWBbuOEAmQmD7i4AcQZcmi2Wk2I+eFAGJmAzG51bJNoiRGCg94YvIuGrJaWRQuFACStllSCzCy867wfjGGCEwxgKa2BZBJfUdxVahDIs1qtaiCuiEpasRsMGLnfEFqBfTmzdv16XoYcKduZtBBKFlgPi/SInBQTzfv3T+pa8++rAIA7zgBHTvSse9on8S+yVAjRwxAcjZnmgUEx85E+qEUiZrFd3TPnCVlp+LYdXiDisEBxOxArAAkq+kS2OxStmRESpalFi6LjpVvGQYm53xKeXy6v7m+LprZBU9c1wsA4+l4upgfT5skUCye2D1PpibRyGKqATx9cfd0PuuVZVY6nY6TaFWWqllNztLUWKb6V2n4swhY7VEryK7QxDnHzpkQwWWVwgXPLmw+A2AxvXda1Nv9cv3cxYXfGB/vv/iZl+Y3r51/+ZlRmQG8eXf8xS98lkebP3j9nctPPXl8cPvu3u1f/Wt/4zu//c9e+tXP/covfBbAf/f3/sF/8Z/8u5uj4r//H/+X1w4mPcZRwp+/fP7WB68Nxu9dvfwCgN/52jfnD+bc5t+/Pjv35Ml7Ryfv3x2P5/Pz6+f2XnjClQ2AX/zci7/ylU//z//773ziqcGDWzfHp7Ont3a+9e23uLn53BMDAGuXXvrw9Q9vvvPq+voL27w1uba3d/fmf/zv/7d/82//Ry9/+Suvfvs9AH649fnP/sJ3v/FH33v93f75577+L79658Mbu7t9JpsCbs0DOBfcvEkN4CufTUO0RTO7+tITWxcv39l77/RBDaBQeeri+QUXe0fHG5d3eTbe2Bx9cOvm/sH0K//2rwA4ONwvTqfDjbXbNz/srW+pLy688LlXvvVH33nrxmi03qMawMu/9OnJory5P0uLo1ifjp782Kd2LijyOzduPHP+SjU8B+BHd/5FmxXMrip3RoM6TRca4dz2uZ2Pffwzh8f3ARz/8H2xyeb2+WF/qFYPXa9ux09feepkvvj2a28CmE5le7NH7P7C538hsZ7z+sb1a+28Aekfv/LmZ15+CkARynNrujbiEMKFoGooHGexsnQnp/cGhQNw49ZsrSeDQdmv3KULO35tdHw4q2Ve9X0UcFEBoMbuHY4XrZFzJydzxw9SmtV1vd4fxnbRDYKiN65Te264cX6zevWdD45b3Sp13s6nkfb3JwCq7d3sw8333qs17V48t7l2PmY+3L/3wjNPfOLFF+t2AkDa9tkLO3dPcPDg+HTeXNxcv3N8597RzUFV9sreIlcAfBnv7LXHdT0IuHB+XUNFra9aAyaDcgDAZH783rUHN/fVV8Vo4FxZeps1EY5Mlwqw/VAIcY6JS/Pe93vVbDEHzHvXxNSNuaySJIuKZxK18XyuzjFBTPzSsxNq6sgXXGTNtS2AMsZF4UtD7lj5nnyd5yA3cIP5vHHGnpx3oQyliUlSAKGsRoOS+/0Hx+N50yqcOWo5D6xI2naXMqiKMTlPDkKRk6ciQ0zJefY6AJCQjKtgEFV1xuzEJKuI5eB7AESTwpicIhnMU8iWAKr8MGlb8EfakR+1j9pH7c9e80VVshKTiyqOyBkMnaWuF21YBQCzD65URbKWmTx7VTVCzE0BNwx9AORdlBRFVckbr+qJO+APS6RLV37ODyl7XTmweSOSjiKR4YyUghGTBadtMjEFs5AHoOyzineeGTmJmqrlrEbEHXAJoM2tpwLI3Q9JVEwA8uR6wafFBAAHj2xqQo4twvX7UbMHI7UOPkkAEJEdm2Rhx8sa6KWTD50Fl93esOOO8eqLgQBVswwgBCfSoXGmpmYmYjBjIueWvt45ZxC74E3O5OyNVjHsinx/BhqSGmCyKvCWM0R4ZdIIY4MhOKhBbXldAA8F1TvWJS3rsU156eXdcTNXX7MDIjviJ4HObICco6LwzCQiSSRnZUKbsnerN2wATLMVZciSDXAM6uq7V8AkgWDOjHJWBSFz4QBneQ5z3F8rAHhmMg2Oi7KfuZTFvCpUzO7NplUIJQuA06ZtYuaiXzfzNmdROAtgB82xk/knn9WIyUASkTJyNhFatOQZ64MCwFe/9a1B6ctQvHX3NMv7s3oac/6DP74WUxbVw3cXAOo2OU+TBQZlcXPv5ML2RrEd3njjnR/86Vt5QY0IgK/91td/75/o01c2zg3KJuaNzSqRu3P7oI6pjfINfRNAm/I84qvffFO0sxQh8gCH+WHKVgDQGMU4h8Fq5HSla+QcpCPxAmVVZQCiLqxK/nxg0wKcTTtHUse+1/dkCh8kC7kOYyRT7fwEAHgfOn/5lHJHvFUDMduyOy17nq2oNboEJFkBZo5dXr3rWEoeUOccEZvFpmZoWYaOGGImyt5xYM4QOA6qIpLB5HzRZaGhYibOhyapIwvBsxeDIKNXOgBF4UNwSdVJ5hbB+a3R0DM8t0VRRREAmekoOzXiwqshZ2EfiKhiX6eGPQNoRQAkgEFtFhCVbJLbQKSgUPUASAf/S2cXb1kFHIjxiE8q1IUOdDezRk3ZKQMGXj1eg0LUh0IA16k0MGIW78MZgbpuU1V2YvOOgF5ZdfRXE4MrOpr6QoTJEzIxc1GJOkYYN60eH3cwYnCAo6JXGHM0cuQLr6KWJZWVa1MCMAwOKkaspgSoZlUqfMWt1G3a2uwDaDOnKItWY5SNtao33Dk5Obky6s99OgmT3SevAHjj9bfWh+6LP/epD27dPbe19aPb0zsfHh8trh1M9QoXAH7w9hvXrt0fbm3e27vtuPWQwmtwxB4XNoJoCWBUKVxvHDGp6+dfuDhUVe9BlrPNJHX8vjL4tm3rTJXPZekv9YvBmhZ2HCjNUUzmAmC2aM5v7NR6j8MwDRJ4StGxC1xMNjYLABuXLvvR2s7oXO/S1ua59e2rF07qo+Ax5JyPT4pWAdR1Mdq6gv3bIjKRCw+a5PLgww9m79zHfHbn/oMxgEWKv/k77x2eHs7mzcl4Pq9nqa1ns/lXv/1tNt/VYsckbcwf3Lv/u9/4nqrFnKMgRwNRVgIQBUbmA/mT1kBmjrlSMVMMqsBi3YRWMYd+n4n7IFrrgGWtVGyktLImIgIKOFDgyrHruUHbNgV7IiLKAIZrRVPHrFqymAtNW4sm5ymqiDKAIvQFnM8MiMktjLgatJm6uReAqJkhSeqkTpjIh5CyMFiSOXRenzkbOQfLgsA5WxFc20YGG1vKAoACweAHQZLUCyMS77haW48xLcX3RbwnEy1Jisyld6LW1LXCQukqxwBGnqSWlCBM3MyyJgI8QkTKjjpn7ai5KnwNW3ea28ZEH8zTqfQnKZ7MTg9PpwAUrMguU6wbEhsN3ELKluth5ZsMAFnBsMN54sm8CDKZx9GwV7pyMhk7ZkEGYCzs1koKDJKswVEXwYB8VhNmANPYhFKIPbJIVKe1Jq8gZisLay0C6Lt8YXPt+tHYjQbGPuVIZs45kRxKBjDs9ZwgoHIaTYQ8GB4RQCaZdclWxygs++BUI3kzrcuqVxEEgIo3BhCNSNkTButrJDVpX++93SNK2YF7jgWAE3HwnpyaNClnkTYZTB1RbucAouScs3fOlCy2VdWbiyG2DPakgzwDkOLBaP1cWVVBIa68cO7cB/ffJdjGaKgplT4AmDaLnueYSbNqiGRUej9pZB7VQlVrBhAcs5GvquB9lFkQrXpohBL7SAAwnswdpZ3NdYPb2Dq3fzTzBOkKXKIs8x4iTNQris7Q2ndFKgZhGvW4oy6W7MjQLBbMDsyF8zm2Zh1BO1QhAJiNTwhaOnLOTIg4lcEHHxiYzGoAjOlaGUKoLLj23qF3RZTsLBBUOlsNwDvHRKqq0i0fSnDE6tlLzgIFoIbCB40tMYOIPHtUqppTEgpv3bwJYOByvwdsrx9PTg4PZnEer9+5n+fNE6eT2doagEXBb19759rN2wezyOPZyVyslXffuTaTVu8e7F07APDcCxs/fPcHP//Zz0q/2g6LWmWN7f7eyS+9uDNPuZ4vAOys7/zaX/7zt6+9PZ3HF3dffvXD339is5cXIlaNehsXLnsAzz6zc9DUn/vkS7eO5G/96sug8qhte/1e1b/6f/7m9wCUg/7HP/GxL318++b176Xt7aPX7r689sTAn/z63/37H3vx8r+5ex3AnMO1W/f+nV/5cnXt+oM7Nx7MrccyaTEZz53hyasXAOQ+TY8Otvvh4s6F69dvhHV//sLVw/E4NtPABbsawDT6rac+9cGPXtnbX1z52ObmaPP20b1RFQ4ZhfcA8qK5ePWpQbE2b/fD+vqte/fGr908UX77ze//yi99+smRA6Ao//Fv/ta1V9/Z3S7X13vX3397Z20UNcfT9u3xNR3eBPDs5Z12kUY7T776+uvnL6xf3n3+xodvEOjdazfu3b4zUQYwmTdrvfL2ndufeemFAu5g/44vq/HJUdNMP3bxEoDT9QkPRoc37ty/ezOUMaxV64Nq8+KVazdvPHP+3PFpC2C3hCtxeXdta7tdLMQb9UueiYay53K6RgWAQMc7l54zObQUtjfWSKg3LNeq/qJdqGjbBACt4d7RbDTacoPi+HB6dPu0j3Zrp2rzwhUMIM8takrN4rBNVA5ncXr44N40OEnNg7EsEgFYc/PjcfRE/aJnTXwwu14NNiA0Pqnfvbcv01MAFGytv/CuDP1hnh+Mtra9pZPTo0Gv/8GtA4gCcESp1szwu71m0RwuFkT9S/unW6cHi7UegP71H8j7b7y5P30wnhwnWmRbGHMBkFmMUQyA9yqiKS2kGjgLqIZxetorgpqSZCsqAMF6MGclpxwti2MjBcxTSU1KS5c19gpLlgzkqcjSOu/VhIwzMgABHBcGyyqeIeYMrhr2ql5ZDYfTxQyAki64zNP5IuVGUlfn1ldWSw58totzZ6VuZAFhWdUGqAiWbBgQJMHAnUIXoARST2zaAl20DgWZOQLUwCgAwMSTV834qH3UPmoftT9rzXsODuSIU14ErgQKIgdq8yKUPZIIgIyTNoBVviBFZ8waNRbOm+RaFwDKUKpKkkTwqoIl0EUdj3LFLlwiXivm5NK8l4Amt+QUwLlBOeoPoipZbhfzLLa1s3FyeiorXqGaGkHMaEnBfFgsDZxxEZElEimxA9it8DcmZdG1QbG8G69Z1UR84UUbB87ESpRVM1oAxsTGgUMXJp5hEI/wDX8G29GQsi5NGw2qWpYhxdjZRRa9SnKWnGWlIxmKouOc6dm3+Vlv7DFr7w4zfAgA0lLCqtuw0gqAtMduklbHnv2xKrF/7Ar0KBNzWTjGXWJyiSQu6acweCaiZSn3Gamj+0uKSdGVyjsmMqisDnGMnDUbgvdE1GFoBB6tVQLtuHtqwjBHSCmdnM6CN5E8qxsRY24GRQmAyRmE4c1czpwFWYgYSXmpAbqUrVQxpKwp5SQsShxCUj1ZJACTRVSBCFStSahKErHKk6kxo2MPR0XwyAoi2uxXvVF/yHZ6PL58fudzLzx78/YdAD+6sy9iv/jSlZ2d0aK112/t3X4waY0SfC15nhRAVCbvnHMwy1nAS9ECBXdm5rKUjvqxXkBnr+Nnd76zY5avlUBmujJsXQJp9PiZiRjLsnugkzo1U+gjh3WVZGRnA/kh0ddW11pRqMHdtbRjUdsjJ1FS6hBN1uXNsBk1bdudhdFNC8TsiMHAqBrM5rO1Qtc2hgAGVREYk7buDzfv3D+ZLxaqOUrO6nK0RcsA1Ajgs3tb9fCfZDY/9shsOVTo7BDDw9782POyZeX6Tz0bfmyM/tS2+uTZs6GHT+nhwOwg/bNr2oqYLIpsnOEVeTHHYMAA+lXpHREQu37vuVHpFUU2WrQ66JcAwCyqnjmYtW1y3psmg0uGDMB30kVw8KN1bnLigsdNOq2b8a2bxyfSCzSfzwAcnzaa3L/+3hto23Nrevfw3mR2dP/44Jc//8Kf+8yzAG7cu7O/3/7yz734C5/YUh1rbklbZnVEzGraA1CLkOvprPXeMZLkJArRNKr6IehwrQJQlf2U+mV/fbaw7+mP1i/s7lwaBhlbnO2uD5sO3l2r+MlnAl1ps4yKQT1d5KSpH9pwcnO8BiBubtl97O/P9sfFm68uHjygcW3jO3uHs/lsPDk6PAEwm9SccXQ4uX+w/0//0XePjnITTZOZQVeofCL67lv/iIkJUFXvqBe6Gc959uwigK5XE0DEjgEO3lFREqGTbkUWGCk7IiI1qltBByebmnNGAoCMzXUaISsBFDMzURWDOiq610TLaYOUYDA2cyEQMYGMlgNBzbRbCBWVL6M5AxWeNWUAOavDEjiz5S/FsoTN2vRwY0MAMzGxUSchwisz0iUZn8y6JQBd6kKViA2qKt1ylInIKENgKAsPYhHNKeWcVxLVzETEBJCaZs1tpvG8Lgt2jgrPAMrCt1GzasncZmFiwESlA/pzpxTMHHMEbNZEi2m6aDc3+4N++d6txem03jvuXHE1iZ7O5seTOqUsRGuOe6G4tLW230QAs9OTmHDt/Zs+javgC0+FK0PfRBSKKhQAmoTge2VRCCwEYjJmNaDNZjk55wCM5/Oins9r7ZX8y5/YrXza7PmSjE1T1OgKAAYOIbD3QaIpHwAAIABJREFUbUxlVQZXpJw1W+kCUQIQozESU2bKikwWoQIisgQ3sE6XRkiERDshFSO4fhkUEAUMkhVAyqkgLjyH4JxDWzcn06bJMuiRAW0yAOwoqcWsasu3mdWymGNuUgKgZt75VrOSdBFOKPwLVy9qJs2JpQDgguuXVHjWVutWtgbM8Ov93sm8haLNy9ckqqLSSmL1CjuZ1fuT7HzJHJbpNEcQGFRNSHVQkfNJksQcVQDAFQ7qmapexWmloQkikewI/bIEkJqGmZyjZTYNYgY16xWuLIJmASBqhXOdSni/qkxURVVVTcW0WxIck2NeG/Xni3prvbq4uc5Q4ty0sW5bAKWneZIeytPZVLM5eDN0HHzI8mEyqOu0CgMoSyZTkBmpmnlHABREqiBapYaXQV03Co8OagDlljPXu7r77Juv/+mHD8Z9j/ndO89cvfQvv//20xd3ARzNjm/crRuFqn9wZ9+X3Ji98tYPnnti+41bez949zcBfOqTT7ST6T/8ra++d2/6sZeu3rt5b5bjhdHwCy998vD47u/8wasARs/016985fxzX7r9w1v/1d//9eefP/fSS0+2/vR3v/tevHP/P/s7fx4AnXtC7frf+R/+83/y6//X3fnOWnmYj45PZv0nX3p+s+8ATIY87EuxHYuTqhc1I37xc8/s7K3fvLP3xU994vnnnwZw7uKFN9698/tv/Wj/OCdyBakoZtP5hfPb45NJp8uxvrGR2775+ngxZehskgKdTqd1jO1wrbz8xEUAZnJ6uvfgdB4qf2V7vZmc/uLHX/r2q29d2Bqc26wAzBeLnXPrtw72bt45+OTOud2NjXsP5qa2f1q/f/to96XLAL71R9/s5+nzH9s8OmkHo+GF9fWrW6O3r9/Xsh0U8vaNYwCXzvWfefa53u756zffrqBZT4aj8tLmk2Ot21qP7x0AaMXKtaLP5EfnL25u3T/45uXtYbBYBt69cAnAzRv68Refea/gO3f3h2Vhp/W/9eUvPpgsfvBW+7f/07/4J699COCVb33ri5++GKdpxLy5UeYmZU+Xql5qrUG1uzEAcH/04DNPP//Ku8dPbmzsDPptzRHeCFVAMtvc3ABw+4P7e636Cuf625N2HCg0ajuu3wvaxBaAqg+N7G5dHE9mZeWKkveO6ueffzY2mo8/FHUATsfTgNTzjoI1qYZxmS0r9g+PD44PK2IA5EzUnrywIxRj3Z4cL9qUvNFsrk3UTqFitFl94pkrTc6Ldu5B80izvUmUt3d/+MPhc3MAt1757nTvdDqRG3uTiSgyIVvMUgYkRVUEAHVTVyE4UNYakK3hsJ2vzeYnKsqQlbiEelXnyLMT0gQTmGPwcqJcxVddAEy2TDV1Qd5yJ3W2zpkvCstZKSvZfD63nGfzSRfGZUBUxSSrAWf7oZ9o9NPiUPqJvy5DzMcC+kc2a48cvtJR+hnR6Efto/ZR+6j9GWheVRkctSnJ5VRzKMlIU/Jksa27CkpmYjhPnkCZopmWXJZUZIsGE80AksB5X5JvmpbJLSdv6thBtiqTJjHpplReyrYvEYyy3+9xBlC3c0txSC4EVs1bvcEiLUqkhXE3MzsXlooe/NAk52H1b/dP6zxrmIgVRFAzEVGFZKbNQQmgqhxUU4ZmsWRNbnIGuYGSZzOwAzpBKCLWH1sBzlaTld7lQwgDK/1KDr4z32AiMDdN44sgKTnP9aJ23rsQOlGk7mOdGpKtAIol7nNGLH3ky60utPrqKx7WCuR9pCLclntGw1IOr/vgEleipT2RrX7cyX8yLfXmCEZEhE5/EY4IxFjVf3YIX1dV5t2P65XYaiUVtSVvlM82rctvIkurDWpjKryrQnBkbbLgxLklzVNyTlk0s4iaWK8XImnTqieaN0kSA2DnCl/WOccsbUTMAmfsXOegA0AUqiaqSTVlS+KysCqZc5oyEy9fE8x5IlDp4b2SKjOLqMESACBDTTHyfjxPO8N+27Yx1Zc2R3/1F14c9qoXLzwPACZ37p9q0+wM1l8/Oj46OY6zRS2+aWOTyVACIKZspiByTlZgrRkbkVI3UmCP97nHkceHAN8Kz1/9x0Ng+tFGj8c/tELtl0d1GqBkK5DZVv3wp0ZT3QlX8dkK/z+LiKiLjjpcb6W8w8tOoWSkpqRGUDNDJ6RgqxPJMktBYtqJf5HDsELhQ+UZQM4xSl4frG1t7synTd02SXIUJA1ta4s6AiBfGK32b48Ie9tjD+bH8UQzsodM5IdP+dHP0ONj/aGiwxlq+TPSE4+2h7dx9rVp+bKJ3Nm9LS2zHrldXQ2qJNRmSKYsJlmKqgdAEYw5QcTUwEKejGLEvLXZTIpKAZxOpyLGzA4u5qwm3lMSEAWJeHA4AWCO62RlWbUx123YPxybUtvkWZ1YaWdjAADt+LmrF7fPXzg83B/H9MylJ998/e5nP/nc5z/1sQfTMYBe4T/+3G5RQmVC1hSMIhS9MlRlPxSeaQAAg0GvP3j7+u0Pbp58+Qt/Lgi1zdxX1Na8MImtAJjNWmhdp/LDB+Oj1n771Q96PyqR6pPT8dG8qUUAmHd+8N3Q7+WY5uNFnMb5eD6ft3t7Ux9+B4CrgpJAwWP5b/7g+2KAYXfgMwBvZg6AqZpqr6pcwcezpmI/6gdAydO0iePGABRlteUryQKynLOqqV+6ZBlRygW6Mswlmm9K8OyIOasQOdNOqliU1IhB1M2NiiXGJ7ZUZlQiJWpTJqKlsQXQIexm1GgG4IycY8eOmUFdha9BVRABDqtMlUDBxFQ49lwELsjYKSi3DYBYN6Ky1NHoClkfIdGvFrlupJgZKSkrEVEgBvISJMdq3APMbKoiHbaCzj2swwqd85IkiTC4ZEdEbczaUeWXqGu32hIZFCrKJCrSBl9hJVMionVM3XYvRXFwMEqxZdgc7UmYAZgvYq+yNmb2Zikr7HS2eO3dD6/fOYypPZ5GAGL2YNy88d7t0/G8Kj17ClXYqta2dy83d/cANMK//KlPE5VvXr+5Pqx2NsrNofZgqhAFw3d33TY5J0kZBZByw2xFwSlyYN+2HQJIT57f2nhqffDJXtvItJjJdMxVcFWVMjqnsMA8rMoQ/GQxVwXMNEVlhMJ5EwAaa8vMGokSW2bzZAqDWjaR1SShZMLLlAo65V0CggOz64Da7gk3ovP5LKeYkswWMWVqRAtHg6oAMFkIiByziSZDFFWz4N08pjq1AJioKIvYSgd0ppTAfG571+wBiTqrALC5mCfMlhjOBxfYOxpUwTl/d/+4KByAwpElZROCEqMIxWzeHM2Eiz4vU80gdKLlxGoB3OspSMw0Z+k0r7MLXfaxKpyJLoePioqSmlMCUJYlETwzVAElI5g5pqLwBMo5AxCjlBNgznvnnPdlyk0RCklRRH2nfKKmgHMIzFcvbD91bu3W3uF4Nq8C726MADx14eKD6SIpH9x/0CQjk9Ixe4uP0JVolUdYKmSrEShrl6cjzw6AKMQSdcAFVhKYy4iaTtMCgDuhYdCvff0bWbEWsEhIRrfuj6ezNJ7cBZANU/XkeFgVbTMPmddLQGlj4+Lw6tZ3/vCbAJ68+NyFpzZ/9Ov/z1jRrwYB/POf/Pzf+bt/60+/9htxcvz0L+0A+ODO4r237n/yue2tlz7z5V+9+9f+yi//k9/6x88/cemLV/eKQfXJv/AlAO8eHn/jW9/55BcufuErn3z3NP6FTz2/IfvO90XCX/0P/jKA++2Fk/mNsIXqdnBZtdTdjw//jz/4Xcuzr/7Jtf/7X/0BgFyEbOHGwQkDxWDN4rRBMfJmJv1+uXQ2YozHc4JMjo831jCObjyePHV55+DwcKMfrlw4B2DvaO/1N98k8cM1v9ULUw0XLj8ZX3mjqHrvf3gHwOZgUJVVEXhYhfPD4vb7By9c2fmFr3zx9//Fv/6TV6/dfucmgI+/sPP802s85TDqf/jmXov7t7dHn//E809e2Jo0p0898SyAt25ev3Jpd1Hb5158ftHarft3Nte2yvX19sMH89lYogCoDHmOeYp/+kffei24wfb6C1c/Z+3+3sm9yfQUQEnt/r39J3e29vbG21trGyG0Wn74wbVQ6yS3n//MxwH83r/6Kvlytz8IwWqJ+23sVT1XFNo2DTi4CsCg398eDc2yWj44PvBWntTz49nCs4vkQ58B7O8dD9kNN9Y5lMLLeq+yLLyLJAQgwnxZ+nIz9Gg2P+n5UNft0bzuD3YWuUoqANarsLm53Tb1fDYuB31fDI1CNucd5xynEABBbDDs+WpA4iLlaUrr/fK9m3cfTBdXzg03egMAofDjZlGQ0yyHKV3Y3BoWfu/k6Lv/9J9XmxsA6snxg4ODw3mbKR9NFjmFpHAc5tOJUNgeBAAXL1862LurZG27aOJ8rdqsiv6xntBi7otSrHNEoI4fA0IGBCDmoijbR4TRuzmTibuYa7khWi6Cj6TbyWbzRRW8d85Mmqb2BFUTywAigsGy5gxYt2kGzB6NKB/BGh+5Nv2UiP2RY3+y2XJq4Iebw7Nb/Kh91D5qH7U/k81nzQQm0qooe9UaHE/nszalwK7PZbuyRvEoVFFry2SefdK0UfVitgQVyQAYWgVXqEsxBg7aqT2a6SqOwpLksZRu1M6kZiXO2AvFcG0DwGKq7enJqPKb3D8/7M3buXO9RdsMi8ooAJjnaHAwUWVjKJab9pUly3IOV4nsnME6IMSDA0mw7GMsRQAMrSgL70pv5EStzjqbYaZZzOWc0YXvXHQUgZ/GQTvDYR7+aPln5zQT237nz9u2g96g1iYQGzEUOUXnHBMnlZw6pAvOuy7yPluZHuXB4fEVyx654iPSxSt6iy1xSV25DD0mL0lnSMxyHVymBVdQzAraXalWPvyunbXQ6mIKJUWnxWSPEVW7o3nJo+n8tSEinR4lHkKoJKIAQvDEJGZmS4knNWMSAFlMBYCRAwvyIhbOO+fN0LTtrJ4B2FzbEKNZXc9rrWOOYqTmjUDoKCopRlETQxLK6kRdh7iZkpFbkc46jJWZGQ5CpCAhWEccXAJPRkS1KqulunUj/8Tuxk6vGgTdu3Vn41wPwMd2B+crbxLv3juan8zr8byHcJpSzFANzAGAIxdFsmQyMnZm1NEmbPVWO9JT93AefeOP9rSHePVPO+YhjGUdl2/5Fh+B2B4FMZe8kuWz/vHc7Y/BnWf024f/ZY/+kwB0vRAG45WE4bJJBthA2lmud4IAnWPV6sMGAKxQg2WNw6rMqe121Cnlpo7Bx/3D41hHVWTmecuLGJVdhgfg4MmW889jd/944GePP68VwProfPXoSPzZ0eEjj9HsZ70QnJ3wIdTz2Hhevvez+zyDS8/uqkMjCdSKUTTJFJXY8bzpJqs2eAusZKJtaog8U86S2pySeTYA0cx7FIUrCUlhcOyYXbFIkGk76g8AzFNSQ92SmC3q9nginTGOL4IvidgBOL+zXoRyZ31zowp/+P0fCt1tRMter0lS9QcArj843L2466vek89+PKXpok512xzE3Mw0as5ZANTTI+9OPzg4/MN37k9+49vzcTOP9byZtZNFZp03CcDRSZOiJCMD5Vr/5L3vSqdIvNw1EACxJQHPEyusZG+w4D24bBIBoJmah5pe2thyOYnmJqa2CKSpGvbYe3R9OotkUec3z29QQiMKsjYlK3o9dgCaVowKpUyk5kw0qxJccM6dKWsslzczM+tcqAESIsJS4i9DV3sVMiallqAENlJVVus+TWbcaYCuOL1ndQXMpACY2bHr1hGAurQQg1UTVuNTTVVsSTwniymJIpvVKeel1RvlVd5jeRXg7FrL3MajWyfrEgwanFNTMqWlYejDzq7aIefWSbZ07ktd32ZmMxWVlBWAiIAdM6ks1yks85cgYufZeccwhbVZaOkx3S6azERZ0UoWVeoc9aDec2c5UhQ+aWpS2qr6C7N+4HnTfv2Va63qxqDorN+qXjlr2hv3D9eDG/VDFi3IxYzeYOT1HoBFTH/x1/7y5fP2wx+9l6TzuUbMKaU6m7SSAQz7gwf1FMhlgKiYcZfzDGwOuWMORokFeWvjQnLV6wWJFXMg1/UMDwdA2Fe0FKtp6wUTBc/OPeTm9Dxx51DjvAEeHua6BTvm+XJSdsqIgDOQipgsFosGZiYUswz6nVyjWLKCaLP0RYykUhH1gpUEZM3cTT0kiiSI0um0OINkVV2KdcA7x2DnPJERsZnBWx2VCMitSAGAXIptLIJOp1r60MbI7Gdt7cifzua7W2sAyCzmBEg3Ssi5JpsawI5puRbachURNSnIgnMxKhTO+aUDjFmT8qypE8qUDaBODYeZzSTGCKAqe11dDsOYaFWKQJ5djE2HFzhySYhJuauJthxTUxbMxEbWdWBTMdUQSJwGZ5pbR1p6Cp47ZzMX1HmbLWKOWnAZk4FMcgrsqKM9o+NIdg+zC10dwRGZgYILVVkCELWYRDR3eOzD1DQBTM71AIwubFw+f+HwlX/z/MVzf+9/+nsTt/Vf/4d/6/6kJRRdx4PJILBjbWMeltVoZ4Tcfv7nfv7aj954+7U3n3/2CoD7k5MrbfHv/Y0v7+3Ze/feWlvzf/LGa/sUmrVPfudf/aPhzgCAi/mf//a/vPW5T3zpL36lLPzd23f95gWd5v/yb/71V/cfvPruPoDXfvjuL37p03zj+z+3++xv337wnbfTl3/t+fr+rePrk7dvnQDY2skn9YO9QW/r6oU8qnbvYvjCS5/9K3/9q7/1exFNO1MAvLZxcPt+MnZwLrfELoiQG+5sXJotps47AIvppHTaq3g8jTwqLm5u9Jxf6/vAG3WrB0cnAI7Hp21rs9l8fbP67pvvXr2w8/U/fZMUTz51+fXr7wMYbY0WpqN+OKzonTt3xOOzn3j65NYtJ/mFl69c2dgB8MwuNtaKQVWcf+Gp13rv3987+ktfeLG/uf39N94t+v7q5UsAvvb911/z1z7xwqcTERwGayMqeo6EQE8/8/IXLl4A8PXvfS9Pm7Lfu3j1/P7+3f37J7/3x99whV1aG124chnAzSbtnU4z2p2tcLiYX3zm6mjA0+nRk5fXcjs9d24LwObm4PBB+9kvXL13sHf35LjfCynZoAwYulHUOPMAZq38/quvBue3R71s6XA8fTCtp5Nazcrh6M7JPoD740nlSuf54O6+ZamR1lwRfAmRtcE6gJSobhrTfRWpo27t7DbpaDppWpkS0e76CECvXwo49IY+NUXwMTYx1kQQgChQYAAxNrvrW2V/TVqqQ7t3fLSoy7XRaHdz7bmL53ohADg8OR7Pp3GRJzGf2+5Xg75RkLB498aHt76zAPDUExtHi8npoh0NN8vaj+uJAGVRVINw6ZmXD/duAvjMyy98u17IZDybzYI/RqdDSxZjG4oqNqkLDzS2cAwi570Z2HnyLscoEF3ueoiIHfluHVPpBHBpWcP3SGYu5cYyefadzm9WS5COPp06Sz4zAYg8EZmpaO7KvH5mycz/jwT2T3yCzsLMn04/+Kh91D5qH7U/a81DzUgdCJ6j5AoUVFzhQxE0pp4LAOZtzIjMIRBBjS0bads0ITgBpU6rgllzTlk8aVX4Dv3JqjA9QyPoDPjA6t9YTtPj8alYC2DUD72rO3/9L335ytPPV656sH9w7Xuv/M63X53XrRYMIJCD83U9p6o80zpER9z7iUmZjABzoJKpZJQsvnK9ngDYrNwzl7YurlU5y1z0/sH4Nma2yDFLJsgqM5a7LSIvPbhXO4SfWfTZNTMQc1fibY4WuTEyNSVCCIWREllKMael3LLzDgQR6ZzXzk7z+NnpjDv5EDJ5HLD8ie/fiT0qHrEGXxrsnOEtK9FLnOGJy8JeW3HFVqjlChPpDtXO/9wIBpGzky93r9w5rzPByHUCmwY16xA21m5HjQ5a6E6jpmowsZ730gmGAt65ovTsfAiB2NfNXAMxIeU0q2MIAYAZtVlni1S3bSuU4QikIAVcJx6lTo2yIgtlUYMpuPOOJ/KP4LAmMFVhJtLlJr+j7+jKG1NBcKgGAd5H1WqtGvarXp/PbYfRoADgzq9vbsmf/Oj+uM1ZlF1QFyiqC0jZ1SoAsgLMFIKqdJBchwfo2fukh5DjT/a0s/f8EHN85C8/WSHyKBh9Vqz9GL5JSyd1Ap89DXsk5HkEfFwOYP6xY1aXoodh1/L/9Axr65QWljXgZKYERodQrE60Yo44YheYAouk3GTRLOPTBujE9mw6mzZ5kmLOVDTi5nXTCrgMyksai2Na2dR3T5Meu7XuVs/wfvvpAeFD2HLFTP5xI6mH2emulLqbiroN5E+PMc+A/Z/4+U85/pFRDltV7QkQBQpRsUyeYTkpgDzXIrjRaBA8q2czzZZ6g5JDYzQZjUYAaNAStAwhZR04BhBFWyHNpkwH4wTA9XrKqOuWfLCiUo2zlIm1H0rz7s0PbgO4vNnfPFfd3J/UzfzBSfverQ+9w59e3/twf5KlAXBnf2/Yq+zN67/7/R+mXDcLaRZNjDlldMMPQJOQ1YrKB8b33v1uB+SVxMH7bFm7IN5Q+iKrmdloNEjz2vnAcMZkSwE2gFwFR6zEXffyKqpO2VitS2mgcIWK3DmdDr2AyKvmJjFTUzfcLSsueHLUzfwC7zybKTSmVPRGg0EPwMHhcd0siNl7dhwoeBg785Qh0CwZ3SA2Y2bnOm6+moHYYZXvUVMzIfZMjgAGA0ut5S4Jg+WAIyNbdqSl3dgZ4LccWGpmKjCoqIiJmlMRMyYKTgCIWDYh+n/Ze7OeS7LrSmztfc6JiDt+Y85ZVVmskUWKQ5Gi1C1qaDUld8tCy203ZbfgtgDbbzb80P4Bfmj0iyfAMGw3IKuBhmBLAt0aSYikJLZIiS2yOBWLNQ9ZlZXzN98xIs6wtx9O3O/7MqsoTzBgGbkfMu/NvDcibpxz4pyz9tprUWyl1SSipCoKr1AyAMgUIYHNKdR7xYUEjgWEuzFJK3c2AlKKosIQu7JCU3ASydUIKTtli6qmU8Ow44pqSiGEzMJm6hzbgM7mTkGqxMyFc2VhDMhajkl9FADOIKmQUhJl4iRqDMMYDel4vMakbRTjgmYJSAYT1W3o9wsBmiYCiEbbILGNPSmqnikMi5cz47Jnkl/MADjB5//4K3/vZ65sr5XSpMbLnf1pVBwcze4e1dOlB/CBi/bqrd3pctarbCPSKwZNXAYlZyFeOhAWqmDLxktqQrAava1YDSdjDFM2s7ZFG5MP0RiXRNiwsVkAlERaAIt5O6gumaSFiTEGawoREhFSM+x1fjhE1vt8u4kJBUyvsEGpgDpT5Am6bbyLJKKxCfUy2V5lnSXDZFmTLnzO94goaaFJNCeJNGlKQkSl7bbri0WTJDFbw6Yg9rEG63BYhkWudETQAKiPXtQUZVkHP+hV81tHIbQhaN1E5EJsgAnOkIgkYS9UVr1GEqnoaqHIZEgiUSqcGuqllFSU2XZjAKkOmC3rBNe0bRSjiBkEBzQTlHxoM/zvWC0l6lZzJClK6op1rDWKZEAiErwncEoR6gwzGc0iqvmWE8nmuCxIZ/PlWs9tDE3dpk6zeDkPTdjdnyMxm6IytL41slZ37xxSnuwAEtAq7UyZq6mGSKCZSJyFC1UhTHzKSk4IqpyXSx7A0cFkzeFvPnPu45969pm/82PN7cOq7ySERo0aAEgCRxQiRdKLjz6yNrB7t66/feNWM5tsbA4/+8v/AYAvfPmLL716c+uR4d/6xJPXdq5tnBv8zJXN/+mf/neNtH7qxz0CUEc+C/ODV+/8b7/zTz75iY997o+uPvPM4994/mv/8Bc+8/L1d//nX/8ygP/8P/nskxfnR3fp7vLg43/745/7zW9upvjBz35k75Xnn/uDbwJ45onz/YcHC7e2dbE/+Ogz7XV898XlW2/cvX37btVnn/oAFkf+YJ7I9UvrqlFvfri3vT3u90eeVeEnsymA0aB38cKYIMM13j882hibx69cefvGu8L2mSceuXr9LgBn+qPz4703rtk2XL1x+52bh/31YnJnVtPd6f4UwDM//uy5M1tvHexMJn590FzeHL796hvr4+HPfOyRvcnh3/30TwKYHdxZd+4G6jPl4MOPP+Ti8sKZrbd39w8ns3Zpr+69CKCl+NKbbyuXN3duNDMfNJArptPJuLKjza27uzsAxr3i3YOpS8b1xh95ej3u3XTDIiG9fePud968mR94a73q/PkrT55/+He+9LWL44t/vvOtiw/1n768Ph4U/U0C8NDW1t6tw2kbr+0uX3lnurFeHR20673J6MLo7dt3D6cGwO3dqdf6o1cuDvrFWn8r6c1pSD1X7U5mEdaHtHoC89povNzbXyvcxC+3+oOCiWCscwD6g6H4am9nd3tzLa2dNYM1ovnscGEm9XhgHAcAsa7FWGOchBaWKtNzFaOtE0Bg4/oATCLiso307jvXJ00YlWw3iq21PuLi1k74wIUNAOP1TUibrBm6cYjxreu3h2U1qetkcHd6BMDflplfzhdtr8S8CQnWWhNCLMj8vZ//qd/7oz8AsDObDEfDxWJu2TRN3TR10R+UVVGHFGNXeBagGoMlVxYlLMNLEPimWaXfugWgAa9sxNJJ8dtqnXy8OCvIpdi2EhVqnLO9XlGNmxgBxBAFKiQCmCy9LtoxAf6K+KuWnveAlSfJ626LcFzktDrKA0zyQTyIB/HXNmzSxGAV6VXjul5M5ss2tsPhoHIUgviwBNC3RVSKEAMHQyF5lVRrBCprrMIBEKQQA5QMISvgqOS6zWOWHQBkM+f8WlfIGIhsMVhMJwD2dptf+Llnf/qX/o3qmafaOj25cyjz2S+tbzz/2o3vv/4OgKDesXNltfSNc0WuTcp0meOskaqQtRZMYFIpSHsWlaPS2NFQL66PATz10NkPfvgj57bOQrhJcnTj5p9867n5rZ1FI94gJQay9YqxbH06qbeRU2J43W96vyCrvmkAFGUhSQgHArMVAAAgAElEQVQIbVNWZfBNaJdsTLaUzDxBIs7Z8lNxDPCdfk+nNPvQ/VTRk51qt7XLpdZdfu9e+bsTdGN1pK5EfEU2gp6aA6lj2hBU07HOCpB5rscoVRI5xmu4gztz0WIWY86UBMoci6Td1toqGco3WWOKIAtBjJFSKo0tLAEomBRIKSqYLBmyTRvqxheOicywP0BXaaVtG3xEghEyCoqJUsz+0nCuFKjkgkylJBAVEGkS69yx6wgxSAWamKgzXwE6PbQsZQUihVVto9QxLYK5eWf+2tHtvcPth7bLu5MAYG1zPPXeOhtjujutN4br86SWOzrSsU16SgmkzIjC0unWdNvK1U2l912ovDf03hWLnlrD3NPkuXusWvXU/3ftuTrte5ZCubqYuvbNaHO+3vv4gLn7mEyZAY5lrzq0M2NqHUAAEQaTYRZRwyf55w4hzBZMImQNSPuVq6wDwKS9ksBcgjWYmUcbIwwXRQ/WpcygjOlYhWBlTU8nx79vvB73+/eM5ExwQ0dpwb2fWcH5RPg/2U7HR8hPqvv+8d4zYwVQ0gp9XvUOUiCqpqQqUDapEwaFAjHENF2wiGGCSt2G7a2xM9QkPmoNgM3xVlUUKUprwtpw4EMs2Zikd2/tHs4RxAMopSjLfkBKkBCSMaVE1SQNNBw1o9ICePXdxcvvvBQVvcJS0mlSC371+gHLQUgeABNZ6zUm3G5P/WzLxjATjAOQKABYMFJIXPVYGdYws48a2qWrBgCYuY0hu6oftGp6w5QkxZRCgAgbBsCOwSYFb4RbiSBBgibpWZfBkcFgWIclRPv9so3eWbIGStFVZZKUhAA4Z1SNcwVZns+nPOwHCb5pFexDzChL0aumzdTBIIpaBqApdlIVqi7rvq2o4kzEXb/pBkMGT0WCagJUWVSYhPMMpqoiKibnaVQ1Q6m5b3TSvTlSjMiwZup0qEQgCUk0y2FZth0jQ4XRMcSHgwEDbVMvmkACZQEQUzRcrArK7h8a3M1NHZvsdJqBBIAya3bRInBKokoi0vmV6QmOn/n7IhJTdNYRk2gH3is0pJgVCnLqh5RFSEFsDBtmhWUjqlEUQL+kELnxYqCJtI0pilpLSQGiPAn5qGxs3cSDaTPol75pQhOLwhpmS2bZZCvw5JzZrHqxSY5p3HOxp46NLOYH0wUANvj6v/rahtm5st1fNqHnSEhbr9OF91HiogawaOoYfOO9s6b1aHyMCS4vGsi0LQBM5mE651LLpQY/DcnXxaZx1oAECGgCgGVDdyeLOvqUxKykD0VhnfVtBPD2zTt3rg7belqVCk3OVUqGjSqJTLKxGRgxRY9smN0rTCqRhAlQ8inZ7BMoEFFW3N2doz3bG/dTMjHminatnAXQeA8hR6wWSYWJmLm0btrUxjkAROq9V2VmFpHE1NSBNaZESful7QEIyv3SabidshBEqkf9SlVFqPWScXlr2TkGGROMCEISn0CGow/AyvKWuLJsSQvGoO+sM5okaTbmYwBBTRtRt9QflI2vk4CNFWJJCpWyLAC0ISK7+YGJiUgz0te03pBi1cmdNQa8bEL0bVn2nOGMkBum3N0ZYMOG4vb6eFRZ71OvdEGSNZRZjW3U2sts5o0pAWVuHTtKcVCwD5oRIQGLsiZN0mX1WInzRKWSYgAQg/gQiLJPxarypJMvp0AFgN3pdLGc/sKv/Nzf/8xHb778g//1N/40Tv3a1plmMs+TVq903kdHfGm7N9m7WWFzb3f6zs2XN0q6+OjWH3zx9wF877svvunMlTujR849tnu4pzT8D//OZ/6XL3zpWz94/fIm7+/MAThIsbHR3p6sXXj0my+8EGbt4e0bzz778ZePDr7w3Bs/8e/+QwBnHt6c17crtvth+uTHNz/25odfef2lj2pVVObf+Uc/D+APf/9r/96zH1w/t/HSreXh6y88/9Vv/Pd/8c9a6MaZ8eTQz2sFEP2kKsr+cOgXy8FgPNu7E9owWJPJ4e7s4PDRy2MAa6PBoCxE2JR91G29mO4cTkrDsGa8PsStuwC4LM6cvXD1jWsqSrZiNfNZU0t89d3bMSqAcjQGsxCuXDnrtCkqPltVjz35TJwv7946HCACuH40nxfkCnOwvLXeH85FX3n75rdee+c7b9+KYMcM4PyFNQq9V996XZyS5VFZGlvd2Ducm3TUTnb3WwA+8FKxpu07Lz5fPLL18z/9yeXh7Zfe2n/4/Pkr4z6Arz//ss6WR0cLCoPNtY0/+fqLj10afvhnLvdJNobD/boBcPHs+ut3Di3rxtbaxUVz42h66+5Rsz24dr1eTNtbt2sAUx8/uLG1MVzzIY17axvDWWGrST07XNDuwdS5PoCzA9PrmY3CymZZTGpi27PRt41jOpwsATDDDTfM7IgZVpGapmmWlnB2s4eq8ss5gKZeVq5oYm3Jsi37g5FE2T84rKyzXCYyAHqDYbNsbrXtQR3GPVc4UVFocXY4EpW9oxqA9oQotTE+dO7C0sevfPsFBg/XRgfTpvYBQHtwKGK8t7PFovFtUW1rSqWh5XK/13cf/9DDAG7eOaoqV9eLqiqiJGhka8kWvmnVh2yK7VzhUxDRIEqalRCkRSqpADohqqw5BZBCUoz3UTfQrYoJUEFS0q5OjFjZKhfL1GAlTNTlEvPkxvlbXWZlFfeu/U6Ufu4Lep83dOr1vRyEk/i/uBx9EA/iQTyI/y+EZSJiGBUJzbB0jbi1ajDs92/v7VXWWTCAUa+UqJPZQtQXriTmZYiFsUnUWjvo9QCoimGrgmWTUgoiJJIRllxaByCjbsfP0o4Hn5+lKQTXHwFYd/S1r36/HHzul//jX37o3Lnrb9187sW3Rv1hmzQIAFjXE4Yk8EppOO+WVpy+bmeVklpWhjC0IO2XPOy5kqVncGZzCODZn/jRsz/5SxhfTuQEuHxw/RPLSd00TZy0ietMOIohAQzbASTd9a6mHZz66x4YSAG0PhZlASD40CutRJ9CoBA0hapyCihl+Usgl61Bmc0p7PBUNfU9ZzpWzOw+KyrHcMlpzGiFkxyDaSdHIaK88zv+7Ok+kRGklQhlp+oFIOo9E2eWumeACLlYMd+a1QSvx4zM1a84Acuy+Y4lDpIMocy61G0LRemcJrWGKgKAnjHMEFVoCrUvqkHrfdMIE5VV4awDEFKKknxIQUiYEzhDuzGJ0WxxEDO8lVLmO5KqqiivrgT5d0pOlypxp2G24pZGWUEKRNS0aiqKKUznVE+OTGp3J61vO9msrSb+6fNvbK33r5xfKwt3d57u7h6IUEyaVAwX+Q7HEESStSYPA+22WPd1qpN2oVW/Omnmk053uvHodGv/kJXJ8bdWGJ10bKjjs3V8pdX/06kLW6l6ZQ2uTilVsywoVvzJrvUz6kArUmHuJJzrNpF3h8x5HKzOlYvtRHLFKlL0GNhoSyr6DKCwzCrLOk0WiyZoE12IScmgOw4DcIxVgrvD9TrXrFW3PrkRJx8jpvcMk+4p1bH5jhmS9ws03HPI1Qi8L49w+rCZAXd/29yXi8hX3pmK3HMSIu1KYsFs5NhwyBgfY4wqofM2Z3JHs2gIvk0vvnYHwGhoCqvLpYChypq0DapMIIpRjSkB1EGCNFFViNvWO0vETMZ6EaYSySA7PRbWi7ZkBKHvyJW92rdsYMkBEEtiikYby8SGjHHMrEAQCSnmi3e2MGAljiY6ppgAMpqUjMK6oAyAlVNKxhWGbBNCErZQ5vzrVo0nCNGzLUThjBWJKJhgVWGdBdCmlo3tlQOwWpZOnALgXLOeIgDLlo0NMRpNxpJv21F/OGt98IkNfOsBtK1nNq6wEkWFQAgpJhVQQSBNCR14xxklTAJDzMxqQHKsC5oy+0+JhUgy5RokXZUzATAEUrKdilUnr3iMraf8IBVFB1N2ndIwDIHJuqLqlBZVCFaJfPT1TjsoiJlKwBkSMgC8RpUUuep0hI/1++lURz2u2s4iGioi2uvQZThrATC4lQxqgU+Uamkl4dxZ5aSUTMZbiTu1DEkdQ/wY4AdEkQQxKrGUhqLIqkgcUVQJScWArTXEFhzAbFzB1mRuaVQxRBpp2bRbG2OIhLqJIS3bsD9Z9AwDOL+9dmF7fKFnr988XLSh9nHNmPODNTXltFYAy4Bz4+Kh7XFs64ITEYInVZ7VmhIaDwA3d6cHszBbNk0TjTUiwoZVee5bK9Q2EcAb13f8zGxVdi6hV1YxtKMD75xlYwyLVt1TYl63RenqoFEjqRIZJQZ3v7rx3i/atmnUS4qBuc2GSILU+k4VmtT44FNOlhotq+XVG3djVDEAaHO8DmAOTA4XFeGdt3Ze+4aOtgevvrp/8/pko9cAKAoG0ESZ1v5WNVeCMzRr49KHyaIWa4RyUtZIUoWmmCQlRbRkJrP5ckaGhmpLAEC5mMGnYnO9sHAVxXEsBlVpSJlRdNkIBVNSFZAKZVJtG5uy6JHKMUuXSB1gjZS9itjEnKZS7ZYZYJ/Qeqq9LmqfuuQ3yaq3ArC2ZIiKz56NBCWypIjRs0OWvWZmiDrL7GNKEqK3BiEEw2yJc0mHYSZIvyorZ3yMAnWFaZbB2o43Gi010gQRw8Lie45su/SNL4ztxH8AQV6fUFYnT8kDyTI4C8NkZFlhFYk6Dy1VWdlaQEA5KVwV/UFhzl752H/728/Z8ctHd24VBpOjJXfWvWia9tyZ0dp4w7eT6cFyslN/4keefOojH/7u8y9+//uvC24C2LqwPds5ev7No1u/8VslzEF98F/81/+V3bwMosXSuaoCcGsyuT49/NjldlBdvLnbHrRpLSxufuObv/vFv6Qk43/5GwC++bn2V371F3/6H33m8tW33vrKtz565fF/8ZXbfmdy8QPD4cM/CqD33Pc2nnz4hRdu/PPf/j1fL86eO3Pu0sbL7x5gOmOUG2cvAzi89dYi4vzW1u3p5GDvwBnjSnf7xi1burPrZWUVgNVQN95YO5/Mt9aKJunOzs3DybQszfMvvTJvAwAfwhuvvzauSls6gd0/OFrbHJ+5cqk/P1juHAD4zje/sTOZccC//fN/4413Xtuo+hcevvj6yy8o8SOPrn3rzRcBPLw+/LPvX6cW/Q33Y08+9sj5jUONrx1MvOgzT1yaTWe5P0x3Z2e31g8P55fPDD/0wQ+88Pq78yizFuXM274FcGmreshrApHGIvLDo3NvNgef/vBFMoPD+QTAxZ/4yF+++M71O7eXF4ZajdUdPPvBC2vKi50mtHUfAwCPXlq79e7wC3/x4k/86OPzJNNpPd7u7yyb2VFQXyxD9qDH3uHsqt0dODUk08Xy/MbZKLQxaPcP961aAOv9otc3hwfXg5+UBa0jakpH01SW5XQxB1A6GxfL4Xh4cDiFCUUxmIX02GMPnd1aP5jMB1UfwL7I0XQ+6ve3z12Yt/XRbGYDGaLQJjWRRAHUGqaT2lpcubh97swZv9gNoe1VZmcyG1auKi0A24Se4/kSN/cOVLkonbNuWqfJzKuxAFoPmwiux5QGZNWWzFzPD8brg/OX1627AuD1d742qXk87s3nXlSXywUPNsFlQl0QlCIAU5SmUVFpY0wpUmIQGcM5o5EfnqLCMAxKK7n+nGlWpVOTICtpm4IEb50jMnXj5+1kf3bUSspLCCIysALBSjML4FwPpPcsoY+3cOjivZDifbnxU3HP2v3489DTsioP4kE8iAfx1yusM84ZdiR+OVsmYccD12vbVhVPPPVEbCYA9vf2m2Y6YPEBzbJlZ9d7/UUbRFLdxooZwHjYJ9jFbBklIpmkSNKVChOblRwhn0Ac3b6nW3SCddk0AEo2G1Xx7S99tTdtkuNbd2698uKb82jIWBQVAHbFMrQ+NOOq8klOP5RPv7a2sDmhrdGQWCZLcIwz1m10tTpqxutpfbuAqUxMu0tbjIygYvR7/ajZLiAl4b8i23S8c8O9WCSAiq1jA0BLDqHd2lj7wCMXj2azD33gA9/7wUuTRX00a3zbwlgAuRQO9D6ZsuMZ5jQTbVW13d29fBcoq1Z2TNGVjVBXyvWeKz85+MkZjvEVhR6jnrQ6XTr1zePKcaXjM2YDlhUXCCoCAsxqTyWrPYZhZOcbETJsmKGSba/VAAIpCgeRrDFfucGl9f6wKmbz9sbuNBVFlJiitD6V1bHoIRFzSBrUKiCk2auajWFjALQ+AARitsSn4NeUAlnKO4WURIk0pcyOZEMkyNpOx1lSVgJROS4JcTYNWpEPoWfNrekypfKdqzcAOJiLZ0d/4+lHXto53GliLXbRaiuU2AjQZEPbXKBoTEyaTa0pg2/3t/x9kNZ7Vxv3a0cy7iG3rr6pqvdQ/PQ0pNm12klCWNHBKh3CsjoCEVZ1/RnrzhTce6A4wjFXt6P7nvgIUsbXcIxbgrI4F0JoT3loKDqKLYhJYhIuqLNEgiIx5Wy2tArruLSlRLQhqIZjoivI4OQXHWc98gbvZDl3glmeJABodRknP+r/Ybyn8TrB3NMf6eyY7x3/Od3S1dd347hjVooiW/ka6qBcSVCBMKt1sBZEKtQgpRAKW7UaAbRzEZHSVU3TGqIkWrpSiWJK3O9nAhrb8mg2sZZ7Va9gByZKHDNdHJSYABhXpRgtcxQxxjaAj4mtE+ZMJLfQIImciyqU7eO7UuUscggACImYEiSk6MCQRMmQSFLLRG30ACxTwUaTJLSGOba1JUpAAixz1saFQd8WtSYSNdlFhmDgJEWwRdZGUPJRo5VCiUhCTEUhHBYaZdivAFjDtQ/j4dqgKu7cvVMMB3XTJk0xJj+fi7UARMkwNLUQsrY0xFSIxKQalYzECMBaZ8CiGpMA0bnCEouIWWH8WY2OiDuvJ4oiia3VlCQpm0xAgxLFFFfpEGHKVEPKNC4AUM6l0HSsRaAIUZgMswMLAAgxWWZCalJMwdhBWTpjYpb2BwqhJsS0Ytd2j/rVNBRDBJBtN+iEHknEYGUltkYy4EtCTDEnvvIsn9s6Y6Z5aMeYrDG5+bMCqaR8GZQ93/JrogxHakgperierX2wGb4EZg2A7AbCSVUUKammSKpRqfEeQOMDRyqMPZo38fauJu8ktK1wbVVw7vwGgMKaug6zFPvOOEOWqAqmGKy/cPPo3b1l7g8S4tvX9tnXwZAxSjCldUroFd38tWzS4xfPxmj3jm7UIsaQRGm8slCb0tZaH8Ann3ro/LqOrBWKxg0GPbvd7wtIjHElpz4BmM5KZ8myK0u7XMwjaVkWbJxhhjUARv3e5YfOtHVVFqoS2RQAKasiqhaZIwwuIUFFvKQovigvbY6uSVbxFF3UDYDFcpnXXB9/+tFLj4yHZ9YvnD2Y7DZVv3CMzB3rMxZN9xiOkBCTZdoY9A9bH1IEEIGYJKbgLPICoGB79frO+bFVMYgJwKQxN+7UR9OFLc1icafskwR3MG2mdWMNz5sGQFYVyLk/tqZytjCoRakTjOxq962BU1SOq6pqWhExKgnceccrIaboxewdTmazhXMlGIbJFS4lyS6L1thVVkcAQecPo86AiLLei2XXel8WRSx8aJM1XBY2RskKrXkgpCSASNZ5ISaTnaeoKmwn4ZOqxfIwxqiWK8eb66bPtjGyTIEIiAa5jF0QOa+vtFK2zGQSMwRC2TNPmFSylnSezU/LHfl2CaAqBtO2/s/+yX9zrpK//+//au+xrS//2XcSWiaWkE+kriCo15SSl2c/8bFf/szP/uE3X3rj9ateyggFsGwjj3sXN7fb2eGd5YKjPfSId+8ocMeHtbxIc+ZMqW649e5b1y48POztNDcn7VrR06K+fOXKQw9dAPD1r3/9N3/7z1/cSWvLWz/59JmJm7x+c/cHrx08/gFrYgLwyY9/8rd+55vPffc5v1xuXry0IHNUT4Vt2/LG2S0UBkAkYvVcltXANfWUEvb3Dy5vl88+88R2j44WUwDEPJ01Z0aVVbqxk2xBqOcb1XDRLKZHe4e1AWAIB5OmNGVV9WKg4aAsy80ENzs4GK33AMwmi4318UZVHjWRGdr6Qa+3ub316JVHnx7077Y7ABZ1Gln77q16d7L8N3/8woevPHZtMbs7W3z5Kz8oWn9hWAJ4eKu3/qHHL545++Y7r1k0H/rA+Q3bjj/xeLDbT1zeqjMLW5pJ0N//1y88fG7rpVde/+b3v/Wph8+vnRvt7O7VugDQ3zz/4aee+LNvfPcDH7l8861vP3yh/5HHL24X9VGv0AqytwQwO9r98Wce/8Ovfffzf/HCLMbpfLaxNtja3Ji+tT9PTWENgGeevtgn+ciV7f2jw6u37kIsFVOY3ub6xng69RoBlNrfWlu7M90PSbfWh0n6s8nCh6higk8AUhsNtXbtMTPqT472ZTHzKiH6Zrl0SEkJwHhtQ0J0Rd+LSAwsyxqj/tb2/HASDFNKAHzyPU5rm+ORG/aKassNYzT708X5qnp0czwoCwA21aY/atK0Db6wbtArDpf+cJ6goOyILQng85cvzpbN/q27TVwScdvUdlQlDrtNC+Ds9ujpxx//kz/+E1E1bBeLZbmpbApVLpyrwwIApJC8FxLEJEbUGEfEUSIUuaJbRWCIyRA4mpTyxJTL97qRx1Bm0kX0hXPGFrWfab0kKi05pgpAhDfUKcyornL8IOZ7l5ndn++Tjv5hS833/Pv9X++SN+8tunkQD+JBPIi/JnG/IfKDeBAP4kE8iAfxIB7Eg3gQD+JBPIgH8SAexIN4EA/iQfy/FLbP5CyFVlxRptrHEHd3jkaDEiKTvR1XOgBW+cLahrN6tJyrIgn2j47WHB9xD20IIgBCWcXkjWO30IBEsFnTzJI55l6RUkDKNVr59J0sFUAwzvUASFhMoj876P/Up//W737hi6++9CZbW9oqmNLYEoBottPhVjQqrCoMQyQklLbIHmfWJ89oQb0kjqJ1KJwtS9tnJmtYGEA7O4jz23Z8uTVDi+X02rvm8F0fRciSEVgCQGIppZRizpStbGxy7fDK5uV9E1IEZtNm22hL6tueGf7KZz+7c+Paj1zais38D7767WVMg/5a8C2A0toUonU2rBQku6ycntA/LXXcw47Gol2CnZlPmG4dP+2kDFsRcV8lLgAod0fjLBqf2TX5MDE1RKDMMCQ2AGeqZHZF7yrvQFAhKJCQS42YSBlpRchjzRaryiCIIhfKGqLuLcAmxQSNSEmYwSBR+DYlS/3N6hM/8ikAv/iTzx5JuHx2FOf8hd/7/Gtv34y2WKBVEuvK6XwBYGNzswkmSJGIoxJEcrEukU0pAVBwxyPVnPXMzDgCEFYi34aJoOQswRA0+ViVHEKMFDdEcwWUmiqlpmjrJZtyYIMPkUCWSay11fkzIwCPnR9fODM6c65XHk5ee3W3ruO0kVT0EpBAgu7uJe0EIwkAM4H4lBk0Vsy4VdL0lODqSo/g3u7W/cnEAlVR4lWdF+CKIsXY+X6cprx2Jcl5BGbxg47QJElwzPzNdhNd1X++LDVkLVPKddVArNtewaNCfd0mYQOQdcEnMlbYwlkAEqKC2Kgip9oppSQpERGzWxX2dyXruU9GaFUNoJ6sodRVYvbW+4WGOPHDyszbqAmOnDHsVxXaZAxJUiIFYyVukGmZETBQAKzKzE2MbAuopBhtUViGiOQ+A4CNARkRMUSriunO1XlVon5cXprFh2hlOHMPF/Wk5Tq9gvcXC5IV6RjofEz4pBsgdrU4uduAABWIpNNMTjKsABEn0VV5L2BcAxC57kMGAWDnMpd52f2jxcqNKvm2qioFfMxqZwoia20+T9fcgqzTYFgJcNaKiMRESKbjanHu3vnrx3YlhsnSKgem2Vtde9aqqjFWAWFjCGq6mk8Asnq6MeCMzYq1hqArpQVS9VADkGUoCE5jTBSNs2oLAAitM+TDUprgqsKSUU4auWkiYMfcA2BM2RuYqlc1bWPKYcEcmXtr28Pt6vDoaLGsAQgMDM+my/HaOKbIRclilSGqvdLGjpmr2cHMOWZmIiVK0YeiV8Vs8EJiwEJkiFQCYjBsoo+OnWhyZAGwMEFi6gq/mdmwGjLMzMSSPDIVwhhDlslqgqQQJVpnLdk2RGcLACU7hRprLK+5dKAplKaqBmVCZq4jJjZeaLaQogxkfAxQNYZN5rRzbjKE0BLY2VJUlGCtEWkNmZJJYzYLSilF40wmnjEZhYSYVlT5XMaOmNQxWWNEgkKSrlxIrQUQUpzXS2ttWVhPNFk2DoithYox7CwDiNGEKERkTZjVUTUaxzEFY1nNYDqrATQJBauPwdU8WzRIKGCcoZFgOLCVYQDrNhXiITRZhpJ5UFW9ftsflLdu38nDIAmGlXn07PkgO0oaVfvFyDlYB0kq8AAubm14L5OtrZhueK9VVYGtSgNxJeFg0gIo7Cba/d4aHSxDv7nh7cZB3B8XQ8Q01WoWAWD9/FO9tfHhOzeNc65fWjASpCCk0DS5CTZaU8ayaeKUKVgKZAqFhBRKc0SwAKwp2xSCaNBeojXqDzbObkdDEl1l+sisfGhVOp+8VkW5dW40qjfGuj4uxkNug9mwFQBwtAMDthBTOeota2d7/9E/+Oyfv/zGb3z+iwC2ttci9e5c37E2iaGxG2pcXrp44ZFnzo6gGiyAP/vOzddef+fi2eFmry9h3gYNwZeWVETJSC0ASko1iUhMKqKaGMxGoSFpyZQd/crCuBTYSFH1JGpo54E0KAFsOwUBEVCbBKE2rhIoKaUURSKhU7em0lCKNhJJ98QOIqTJ9fslI5EFUFZ9lhRi8KKM2LZpNNosy+FyuSx7fe+nACyjKtlaXrZpvaImyHTSBMT5UkQJwP5smZIURb8o7aBI271+Ydn3Sl7WRUhlEABiitlSiYxvg0Fkw2UvWeOs0xgCuyGAvVRrKqKk7gG6UpHOhaWjygFoNSVXFgVue/9Pf+1fADhfumFBjmhSBwCVsfP95WGciGgbIG7461/66lef+24QqgblrA0AWHE4W64Nai8aU1nHlAovfI0AACAASURBVIoCyqrEiB4C4LGHLvWNtdBDHy46y2u0rGnjwrY22Ny+/PBjFwD85bftd35w9Svfu7aZ0qf+x//0qSc+erD/O021vnn5ypf+8CqA3/2N31zrlTDlrF2erdam0+XRUbN97ny7t3+4e3jp7DkAthzXfrq48VazqMuea5d+VJiPPH1l7BKgzhkAk0UtUa7dmWysj3rb42I2S/3izPq563d2Js1i48waAEI1r29Ml6GYT6k/guH+yL5z9bXomQsHYDBwFGNvfDHUR8P+OKbQc6PpwdVv773wIuTixQpAWNAnPvTRH/swf+4vv/Gn3/vOU+c2n3/zTlnRY5c3e4X92Y+dAfDEQ5ed26KyemzDXrt5TZc7P/vRJ8VU0desh+OxAfCXL79zOG/RTm7cmPzssw/94OrexmjvR9bbzbHZGlwB8OLe/Pnp3t5SXnrlxcPZ/o8/dObSpc00v7sufjMsX787B3A4X64/3P/Fv/vp3/qz79y4u7c1KjY2Ln/wqQ9vnLn95a987ez2AMAjl85zPYXa2oeishxl5+bdVJp5HVItw3EJYCGLaZ3aRWMt90rHlsjaOzuHi+WiX5UAQogw5eH+XQjPlimJVGxZwuHRTjMN/eEAQLNcGOPWNzcAMYPB3PXKZTM7OFAxPiytMQBKa0pjRtakuDc/nLqecTDrzpzfLKP3284CGJ/dQDmWoHuzyY2DOQuNRxvB6NHhYaobAGubFxH9YjqremvsXKVMzqXaFKPe08+ce+573wNAun5mfWMwGs0ndQpCRVXPpz7W1iA1PpICiG1bcCmkzlhndRnaJGJiIZw0RU0RgLNFr+itbW3WbbN/47BXDkRFNClSp01EMEQAF65UoIlQN2qCRF2Kaq4K12MDgG7ZrJnHDzVEx4VMCqiSqJCqdmxKvF/5mq72yKcVhk4tHPX0+65w6b7KqAfxIB7Eg/hrE5YNlaUdDNZUlAyT6LJu6rqtDA5291Q9gE9+6Olez7W+2Yxr3qftta1bR8u3X3qlBDtLdX7++TBgrdvGsA33PDxPwLCVqS5A3G3vc6WvIkGymwWxAUwT5be+9OXJ5MhaE6zN6o1Rmnw4RmKSFHxJZSIhUSLDMRA4aQBgjcsgmmU36vXWR67voNLO24aSmTcVgDCdhTt3ivNhbWj8jbfNwY39+bIO0UeJKeX1pQoxGSZOmk5XwdLJAlH5h1DsxTBFBRBCspYn08ni7o1vfeu5X/v1H9RappgGVV9FlbOGl0YRH4K1FveY1Zy8/D+cZVbFvXT8Onsq4HgZ3l1z/sBx45y4WK7wKc1SKflTSipdee19v/TYHgQCJs2GB7Iqhe3wmgy/dXqC3T07dYSMc9LqMwAYNdK0TZ/69LMAnvoHPxVHQqmka3XxxT+dLOopEEJyBetKPbpu2qPZLKNYeuraVvXF3U/O/yXHy4XVvVqVIx/fFAAQICYdGXVORmJz2dfABdvnemkpRYK6Xmn7fb+cHc0m58f8j3/55wBsrFvfLv/i+29f2z1Y1j5pwc55iIBEO6/MDpkiOS6AVH1fiOqk6emkJ9B7Fy908gU93VN0dee7xdG9hz4xp8AKre10QzOY/VdVfsQYqLDSwRogQ4ZpYzSQIowG1c7dRQR6A7dYtCm2SA5AUZRNOnVtx51bTy5NV+Xfx+9FA1ES1RAjAOPIx9CGECGlKY1NhliEVFmlwxFJ9b23KIchmNULJrAKqRBAhmOMbAyBssEicsV4linLJfvoTJ1wTwqiGx1ZyjDLU9zHOSesVEHzndX3ab7ubnQSrNAONdeVLAP43uF3qsVPXcp7DvvD4v1btrvG7K1y/7ffsx4+eZc1BY8tmFf/p13/O0a1Cafevs+Bjh8S77uo/mG98ZRcxvH3eOUCDQBkLJgoGUCUWJlVWUAGwRoJIav4J7hq92Axnx6OBqN6zjH42ntXVH3nkgGARdCUxBhjYMCqqsawNUWS6L3v3GPyZZwao6piQRoTiQAwogw11hCBiYIxMYmIqLGdoBUQNaoExyaPwmPpRkCzqEU+MiuIiYnJQMkappSSarTsIAEAsylKZy0XxtY8aBtvjThEoxKSApAIbqNIlMTKYAXBEJGoisTOIikRw6hqjG1+3qZEgFjHpXNVvwLQNK0PM02agpAlkdVYOJU5IQCsiRVeYoIxqApma0Q0ajb5UWgKITYNCkul5Z6jEKSpfeF42C8BRJdiFAKJChEZgVGERBGmaWM+W9k36mEgBZmi4GGPt9eGZ9erM2vFM5ceciYAMCzJp35hnBwGr5Vh5dFrtxZX7x46YwBUpINeOejbyQLOcGEg5OGYS7OYxawBUg7Xr71799r+UVI/dAXaUMGSBEFoEfPOPNZHvb4Oq3Jta00XR4PtS/MChelDpCgG/coCqNtBUY0DbmlCbGNR9Yxxdd0aJltaALt7y4O0zWbEtmHEyEbUAJqMtCl2rrhFacpkjZRgQVkO19WLJQ4pxeA7+WuQQAPwx//66t/+t34Ca+s0Ppqll9fMoCptzI/GVhe1ntnsb43X6nbpnN1c2wgxXj6zuTVeA9D6VBi3PhjNF1NYRuF8SoNhf93F2cHdN946BPD29ebM+fWnnjjbq4ZXLN86Wr51bTdRKzCioRoYADEpETlm3/U3VSgxiyZiWxoLoCqMia1q8r6ZamRiVSVi7WRVj3U3aPX6nmdkntfqurZIBVgNNUkrBjMvF4k5tBBXGQDRh2zybjhLv7IkGfRKZ0aGTXbWtgYhRPQNSHyU0ppRVVhX1ine2F8AmNUSQiqKorCM5M+v9YcVTxbJiNqKbx5NAMToh5bnTV0YUkmuV5SViKTSsavKpRcAoW2t67eRsltcpxq7GvXZiykxwMY5W5S9obI1NOr3z17YTFGavX0AbeuPhH0iIk5M//yP/qKypqDCI7UaC1MBmEU6c+7SzmTiY/QxRQGY2TgLY6mYL+YADu7cXWi9K/jAI5sf//BHq8H6q9fejr3Nb3z5T3d23n30ws8DuPTMU8Wd65NDf/tu/V/+2hf+8fknHv3QU5///Bvf++L3/+XnvgDg4QvjWLr+wFSh9/0XXyyGoyaGNSjGg4P9o3a2BMC+tqSSmgtnh4l6dxd3yDmJqZZ0azK/eGYAYOys1+gn6ekL67f3J7OWhku+vntztlyeGfalaQHsNPPKutY0IKMxLZftO1ffiUELa+LSAygvnNu9dW39zOLG7uzierF76F958XszTQ+dGX7w/OWlLAC8cXTnS89/d7NnfvUzH7u5e1S59PSVwe39+cceOffpH7my9HsADo/u9Hl/0B+q91e2euQM6qPprGVrW6X5bAagame2bs4OiqNgpjL6yb/59M6N71naBKUXb1wHMF3GsQ29Hl5/89pHHhlc3B6+8e5OCWlmTbgelgsAsFXv9s13a9ifefbJf/XNdmf/QB6m6we3j2Z7JWPcYwA3r9/AvKk36mhkbTQalO7G3am0dVWVo0200xrAwxeucH9wtPfypG431mi9HPbW7OF0miSGEAAQSjvsqUKTVSu+aQyUqMemCNhf1ksAzXIZVOPdO8HXZeFcNV4u2yCUVAybfn8IYGtzfLRzdTqdf+ypy1VBbNPQmulBk0z/kU/9aO/hxwGYqnBF8UFqd19/xX/r+83OQS28qNWxZtHSuplpbOI8Ce81jSe1EntBjSEbPb/19g6A29cP7xwFMq7s9ZZ1GzVQ9CF6QfJIXgwAZyyApKKSOuZG3ttpArq0fdI0b+Y0tQrtD/oqiZUI9tREzhAIcoo4r9JIZeUMuYIGV1I/3evjxX33lDp5YK10Sf7vxgPI8UE8iAfx/7Ow/V5Vla5ezBb1snRORbIylCtMiqGQAODd2zeqyi7q+aX1tcnukZ9Mh2trZy6ftbN2cTg/s7kJ4MLDF5HiYtlcu3ZXoJ1Yj3bYgHSMrtP+nAQ6logjBmW1oKQJxrSK737/xUFVRtMT6wxXLmkMLQBWtbCGKCAsY8sMRyTEztqQgiMDIAkKNqJCxGysKNeND7Gh4Hu2PFw2APZu72y8/dbwyWlv/dzuW2/uXL9563A2rX3TakRK2bFBTbY/TmkF3t2HGp2aUVY76hWRSqQoSgC1X7KxEv1Xvv71plk2SQ8W9XAwjOCQJG9sJCZmY5zJefbjzfnpv0/v00/4UPeAhKderXw3urm3w/zQuZADQKKs3JkXuicYZtZYZwJl/mXnR6LK9gRm0RXAvAKwOvgq82KOb4gScYZvTk/JxxP08SJbj+dxkELq1NbtP/sffh1A0Z89/mMfPHP5R1/55tffePNqUDHGRQkOXBWuSQnA0Wy+XC6VrQDHcqSn7UTkJJNIx78YAJ9C3LrFxAo4giSQNUhrRTEq8dSjVwAM2Uzqua023rh56/BwaRxNJjOVNNoYDlzhBn0ATahV6JGzW8tvvxNaPVQVIyAWkChl9E66lmPqru2Hwmer1jx5eWpRc3w/70HH7vli1yFXvLpujXQPHpZBDj39xVXvOS0oed9hjbUiQkQheABVWSZfq/rzZ9Y2hoPk0+bFi2XhXr/6zs5URBnAvPHO2dVJdbXU0xNcEscruVNwvIq1SkyZ+Fw4I0RJ1TkCGzbCOTuQoESyujvHPbVLH6xulCG1BACOmIkCAE359zUpEWcsMlOHjwcPSDNFT1dYmhybb6zkHfPtOmmb+wC1U7fvfduaVsqVxwNkdQHH3Xg1gt6z677n7Ypa2t27Y6hT7+0Y7/3uCWB/6gLuifcDzDuIXyQ/29mYle+JZvsRw4xV1qlTej0+4yk0/DSkSHIfk/veZ0enlnjfXcgWTqusSSd2uOoFbJRI2UAlKROB2AGSUjQky2YJICQ/HBcq0noZVZGrgYRWJSZfc/LwAcDY9qZK6mwmkrPCEBOTagoSLVsc/zxC9nYXFQCGKKTIK9AfjLKwICSQaZWMKZ11xrUpBAkADNgaS6wEZTrZ2IgoQbN7TDe2qWtZ0QyPGkriTGamgzVw0v+dvTeL1SzLzoS+tdbeZ/inO8WYGZkZWWNmjZ66bbdt3Damu0FtCRoJqZkkHnjhBYkXELzAI2BBC6nhrQVCGFqCRtjddhtjl2W7XFV2uSqrKl1ZVTlGxngj4k7/dM7Zw1o8nPP/996oshvMoLYU+yHiv/f+Z59z9rj2t761vpSTc7IzLrX2jsgsN13njQB4ZnEpSI6kpglKYGZIRs7aU91hYIYYac4BZESsCeyka8MqNZPJDMBoVJycLLLlrDJMjc06StQLzIAAEYhQTJa1l7NiEdkqmW6y/QKmzOKclJWrhepSiFAVDkDlfRuimdVFJeIWT05XbSIuAO66tSIBiEmJUTI5n28cTK9O652R3L5eX98vbuxU9w4XAFJM07LwxAwj0qu7xVffevLF9749X4S+m6Jhd2dcVMVHd68RIzlaKrIlhr0wHe+MxgCA+vr18RvvvJmDkXazUsaFjUgd0YRGB7MSwLtvvf94wpN3y87JYrFG+ThpPD0Ni2Wz7vI69+Ib8uG9s9KUnBmwjo0ruWBWsuQYwD/83S//3te+zqxMylBVUx08HyRepBeQdd6xK4Qdg9l5d/fuyf6I2bjJsYoJgFDOxDt7sze//e6/8+/93Y997Nb9D+6tj07/xFFZlL0O9el8GVK+/YJO6/D45Mx5u3VFny6/9Pajk3uPjgAsu5gUPgtIfVm0sQlJk1U2/sh0fPuFYgHgY39p/+GdOw8ePTqL+ODOk+vXbn7qk9fvPnnj0fGpmYI7AFmNiXvGbwK0x7AJhfcM69XtxiOvbQydxhhiDFU1totutO2efg4HXFqaBkZ/yuIJgBJEwEyOqBJjIGUtzQCEtnFQYwhAxGVZCVPKSTeydwBUiWCOeVqXXVRmcgXXnpAG+/B4voyR2bOlWNfu5b26dLZT5IPS3Xm8/OjBPoAHZ8vcdl7IMbzwZCKTcdnG3HQdqZF6AJ606YKK22hTbHdhGIw3W5GZ5hiZSMig2oTmbLU2RZcygKBgQzRx7CDI3GRXtoApMfu2b7gu21mj5BKbEhsD3Dve1VQr7wGctsER/fSPfeI/+Y/+/RT4f/jv/t7h/Q9ffv3KUaJJTv/j//qPAfzo5z8FK1//xMv/4X/wr/6jX/31//I/+28e3b/z9v2WxD5xvQYg3ntoiHJ1dtCs3f2HRw5+Pl/AzDuJ6zmAw3X87Ms7//yP/+Vf/f0/eHr0KAPzVfvmu3deOth77YW9K/s1gNW6Wyybq7uTnJOuw/Wbk0UD14Sfvv3xWVF++/27AI4/eDxfd25U7N26paoH16+9+87bUoiaXr1xAKAe7ye7Y8GW88VTnf7UD70+wvoFCvuTKz/82me/d+9rALrrxb2nOpvI8fJkHdbHJ2F/Wl2Z1S5R052E1AF49LD9ic98MoBT0rYL8/W8nvDZSrssKax3RiWAH/70J7TaMT/54M7DX/ndP/6dL3zlhz/76t5Pf+q7d9+/83ABoJy6azcOXr63Opsf/9yP/+RrV/Dg4b2Hx3GN8OqPXL8+agG88fWz03kMSM4Xtw4q0/qr3/iTW7durRdNIrx6/SqAyYjnPic2L3z4ZA6LTbSDneokRGHtkAF0SXUZsrizef7g/vHBQbwym81Gs8LZarUC0HVBFynkzKhjyglKJEl8UvNlmYICWCVhUFFWXbM6WbZ1JwpKatH0pWs39g52AXShYeLZCLtVLS41qsT8uVeuvfzyS5/5xb9pn/85AET7gqbg+a03v3Qg8l//T7/2tAvLFaviyrUbAI7nCxOZjrFqWvhyuepcbupaFicnv/RL/+3ddz8EoOo7t1qtVlxOwmptjKxKJOI4EUEdABGn2bKpGkS5D0AxZAYR9SmWoWahaYjI+cKBg+WN0DZvzZ7hvKMD0aF33Q+pzs+959ud+XzaDuvTefrXrZnw54cjv69cNB7/X6z2eXlenpfn5f+n4rLqum261Rpmq9SIsJAQQ4mkcEUxBeBLRxZm3o3Y6p3JabNenDa1q2RWlqar9RGA5Znnarxsu2ntuy5uyHHDAWwLXgmLmSl0I4EI7qEDoo24MYHLqIGEF6ET9qbiiMisGKQAiMEgSjlX3keNppmISsdl6dgUQM7CgBmy6bLp2jZR6rzXWek7YKkA8ODJgt98szv4vasv3j99+6137z54MF+vYuqSBnBvXyZAibYaLHYRB6RLO9ClQgCgORsrACc+JTWTP/jGW5/9+G0ua581GoUYpaoGDkVWFtKUwG5rWtsWqDlHDWEXsKTNPvksbfGCDvI5zGRbXJIGUqdtqwZwQfptYGINgIttCGsmxFud36HOAUwcHpd6QHL7V9IBjR7699Lr9DcakNIh9HxAOaX0I/F2FgBImIxvfvTk/uN7b3zz+HRV1XXOAMPMYp97H8gpRdU+Tb1u77iF4TCAAtgAHxchyG1Esm1Avd5eqUsnLlPKTPJXf/Yn/ta/8S8BiB09PX700ic/cfzWm//Ff/XL73zwtBDKJEfL9l4l7z64C+ClK6Pl8aptbNnlDsTeBaXBiNmASZsjVd8jZucfLvfs+XDachs3YcObPv1+XOqZAWnD9LMtsEXEwCby97x9Bt6aYZDMMBjnbfde8uUaLkgd9cAZIaXoXVUXwtoKQXO3XK1L5J2SQ2YAhfdtThcqOR972IzPCzjGMM5jiOpdyrlNEQA56ZAX6y6o+ZQAI1IyNktbYt7wQBcq33wy0ixMALwjz6IFx6wGENjBespk3nodCBuypG3Anw0+uRnWAx7X26Ybuq6dj/vzbtr+PFR3yYI8h9ouS6Nf6l4bBuf2T5cOqxe/vx3J24Z4dlRc+PLWdbFdrvFM2bzU5cvPq1BVZu4ly4Ymss3KM/zmEhVgg0tuJ+rmsW2DHl9CZp99HAA2yD5vnmGjvtL/zNRznM7xTjXtA+pTziAWmKp5K0LOrAkAk5qyo8LDYheatvMik3oEYpiBOgBl5aZUhKed5uSck36NViOQMG29DHzuCOmPLWrGquqEAGTLQhjXHrAmd140m0gv6KV5YKn4Xl5lEM7moRuHcWl5267EGAJSiYjEqRmznHv9TE1Tv4NpinVRekHbBUFXFwUA7/yCI4lbJ1t22m7wTyIm5hwTADMtREDUO6oYrGRkyjkuuzRargFU9Yi0X2WkfyQi4p6AvJmWRH2QLAmj8n22EFYlpl7cGDElAnvn1HJOabXOaugo1KXfn0zKXu8iZ+/c7rjen4zIT+8cLUMOlaOmW4espQMAzkyi+wfTj714/cqsfuXq/qdfvlp7sIV20cIcgP1JfW1np3RyRxbz+fr4LHjnimy3ZoWijzf0N5ws7z+a5+Yw8lLzChY0xTZ44sJ5AE2Tz1I+PDwrIJXjvTFf2ylfv3Hz2u7EaJAcebRoDp92b71/towZ0Zbd4em8Wbb9qLQmJQAKROJxNTWfzIzFha7TlMrJuJzNAKBt1sfNgEFls2y5l5eGFS5ulKx6kXJk6yMAMN6vZqNRzNpz+QGQWoYGw2g6fufdp298+8lEUBakGWyhEAAomRdBD5/eY3YhJSJ8lQ+TvZUzqr55PamBWMWzOCwW83Hl/v6v/Nbvfe1rUzc6XQUAk8nkvfffPlktavan8253787udPfh4ZOuacEURLbLwHbh60eIZh2PK3RtP2a8SCK2QWIaKfXuT96ufv1mraZ82bO1WfQIwLgejxytmpVmLQvErCljVBVlUYZEjgVA1zUgOHYCc95NRjXIYow9Nb5HAEUgRGQ0rfxiFTz70jtFDjEnzQCMtSjELOWYr16b7ZUudN2V2u1X+sLOQY/2fundHA6TL2w8qgnZez9xMi2KM6L5OvRdWRbVaq3n73O+SBoBRVkCUHC/KhNQsINpjPnJ42PnXQy9AhUzicAwpPTxSpI1Dykk+gdWjU10vuzRbYVq6gWoVFVn4xkAMHdnR80yfPmr3/zt3/vKH3/xS07w5p0vRHPTF648PXwE4P/48puV4ydrHP3qP1w8Pjw7ehiXaewMAJUHALrmLKVWmKJKStEM+/uTLunR6dIEd+8/BPCTn3/tRz/9kW+89V1Luru/17XLl67VOaXHJ4vXbu3fP1wA2N8pq9Iz4/Bkvgz45/7az7/006/V0xHKCZXFa1/4KoD13/3lw+OHxGQs85NT72tNJgISW6yX/bSdFKP16uzkOH/+owfX967deXL38MOzgucPPrw7GQmAqzvl9Vm6tjtC7m7uln/0YP6RG/t3n5w+PmsLb/uz3mMv795/cOvF2drW3/rwuPT02vX9j7z4Isu+Ki+PHwMoYbY65pF+4ub+3/jLn/kVe+tLv//uZz53fwTtY+pZ6LVbt2S03J+tnNSP5w/HI7vK4w9P8Aff+IZmA/D+4+WtF/dsvpqMndT1K3Wx6h48fvDkeN2+fHX3aHEKoE1yZVZLUZVFUY6Sz7rO6+UqZDP2ZdIFgHl7PI/F07NcFtM2xVUTidZOOMbQD2DyNB7NdL3UnJ1oTATL3To0bVMQE/f5XmKGuWpcjhOHcOXGrXt37jjHs2qk2sV2DqBZnglwff8gW1dAKWim1Fh8dPhw949/8yOfugVgaa9NR3XqutMnD3/ni19rui5lLFerEFIzUDW5KqoU27BuXDGbjBCa0KmVXfz9L34rNWsA1WS2OGu79dyXO9nAylDyvjJQp6iKCoCIJA0ZSqpgZ2YwUyQvjgg9MZ9Mi4qdOMs55szktJ8FGEIRmMHEINY8uO6tp/9f8Oxvzw8bm2SztF20njZhMpu4Lfw/KN938TYR1fPyvDwvz8tftOKW6zZrHJH3Dim0xEzg8biOcT2bTcalB0B55UlvvXjlY7dfno6nsVl/+OjRB3eeJIbsVs3hAsDju3frnSvEZYIReOPXHXR2L8BWvQk5HCG3B1G17NgDIMe9YOuklFXbAr6LXcqZCP1xrhBJqpYVYBeRvJATp1qnNjfJswFw5XSpjmAG7tRaVc4085UrqvHuLLoA4H7Tdu/debD6B2Uxbp48efvhg5N1ajJ3Sgl9mDUyoNDe+L/MQtzifd+//p8rUgdNAMBk7L1Qjnr30enZKmoW5VRUpRHFNmDYpEiz9nxRu1zf0EQYuHuXIcEL39k8lA1Q3Eb8+hLkQX1YZU95/IEBuRfwi4vwRK8P94OJeBcIln3CvnNgxC5iKxdxJ2CrJt2jZdvTBDNHaI4RQAjOT289+MoXfuO3vtI1sWPuFOIEltdtQ2AAwlz6whLMSDf37pv0HJ/afNhKRAMYwi56g2PLTzMDiGGOcl2VTuiFV16+9jP/AoAmlAechFfv/vHXlqt1XdKitbKqito/mbe//NtvAPjMS3uHR+vjZVIv5lxMxkXRxYwLnXEByx64p5fbfvvTM5D3Fjb6AX3w/bgkhiyTBgyA+oWe3TDnNpDw1jo6b5oNZvmss9UAQts2o7pOKfehgqELjvnq/kHWxUmnqxRHKELq1Chnco7Qpx1M53jT+ZNcuIWdP0M/jMl5R+Rj5jZEAJ22mW29ViOpMDixoUYZjEFuVS9UdfGVAIJpf+QTklJIHTFbiskMI1ewsIEiNPZh38wQEeacUp/Fr8eyt6D81jA1kKFn9dH3Z/a0Z7rsfPE4n7P9X3QzXbY9tT2yb8iT5003gHobY/cZ0HNb7fcrrV+68UXm7TPg9jMLx/ehpLbNdLm9fpvF4hwRPccEL0z8S8PgHJfsqaeXeQTbbuwvJNosulvQjUBD5s2hWmbuAbXhKlXVbDmbahpo02TJRCirlJwACPucVVNKGSklYyhkPBqDfdOsTBhAUXkoqWYhrUSCaoxRnOOLbpjzNzNgWOAoJyfoicFsqRa3W4mqotViZ7RucsiWNAtRZu7fPJn681agCy1nvQB6j48z0OMLzD1U4kg4Zeu9XELsC5nUvnK0XB4LJc/saz+ti3HtAQg7nqusE0VKKaXESVk19yvVkLGEzFgH6MPIoIWATffGfrVMi/kCACBEQkamtknvZLG+/AAAIABJREFUacO1m5R9ROSdA0zYVQWnrDEqKxVl2ROW121KOYnzhS+DqWY1447quhj78c54VAIonAjLpC4r7y1KUUwy2qiS1McUhrmXoQWi+Sxl4uqso7cfLM8W8+Vivjw9O1utARQsteNZVVjSUUHO2e0Xd6c7o1f3J7eu7gMoveu6rknp979153e//k5jFBnRsvX69f02wyBgVPmpK17Yqz9/a3Zl6m5fHTNzyjjuAMCpNsEKdvtjF6OVVRUdS5ed8yEin80BdKogSULjarJumxSy5uyI2i4XDACTaqrjiWbVmDQnmHn0aDuzHzJxp5A4qR/GBcCoi3K9Xq5CrEdV6Np+ZhTewViLcLWe7nV5seqKwjKEyLFFAJp5XJtlA9yoKlksW3SqYp5FATBZNmQjKLG58XRWCX9w98lb7z+RYK0CQDRMHBRgi15wdPa00ycO5J1YzjxyAKhPMJo15mybVC85pX5yp2gA2ibkNvYC8QByUjMMaWQuzrTBSXsR3jzfVrz3haPYSoI6mDKJ91VZdssFxGlOAATmxI1Hddeicj6GAFYRFIVLMQkrAC9kqtOyiDFXpVSF12wsyNkcE4CDWd1Gi0G7FF+c1GMhNvZKlatefuXGk1UCcGs/Ha9098Z0PCpFSLomQ4NRm1Jet49PzgC05ks/jtZD9mbEffsMqyZ7DLmJh1+xMNQyJSEmk15p3cySajLjPgksgWAlFwpVswwDkFMWkdi2w9628ZYyjImaLgLY2Zs1Z/bFb73/O2/+nd2aSi6fxBxCqsr6wdGZdyWAysE0Pz56eu/wSQ2bjLzbvSJ2Ml+10+k+gBLSpa5IketxuP+gdjqZFA/ff/i5T338ww/unWYAePTo/tuT6uvfeWe3Givi7Zde/dd+5uNf+fo33318/P6Do72qBPDd08XBtBpX7v6jxpbhH//ql/7dX/ghvXGNsNNmeeUznwJg8+DN5uvOzxdxubo7P5qRJc1XdqbsRgDyYrnOXfc0v/hiPSnLL3/jy/NAoygtdTcPitVSAVRMP/yRV26+cOXh4UPAf/Tqumm7+Sosmvj2/fXfvP0KgLFfPjqck63PVvHmTF69vXs0b06PPnzhBXFhvjvyAJKmVUiOzojs4ZP3b7+A5bL+B7/6m7duX7s3XwPYV12u4vx0sWzi8dF76zrdvHLlYFIQua9/952iHgMoJuNRvXs2Xx+tWkk6LuUTt1988DQ+effD2cFu6RoApNmCAqFNmI7q6ch3a39UpO6syZ2OpnsAnp4062a9BnEOnmUdpA2N92G1WrnCA9jbvcpU1IqUtNDUpZZUwVTW9eLsuJACQFkWSbVL2Zh9Ueac2xyvT8e7e7vd8qSbLwFYTN6Bi2oZgnO8szOJIR3N08xheefe4jtvAph99mVYSVmPP/jek+OTpk2BxByccZ8RvvZeRGLia9dfUDe6d/ceiLqoXlwI2UUGEAOUdTQ96Nog5JGRQke+yllSTtO674LBU2YATGlwHpsjOOdNGYBpckSOXNCYUyzLkrT3c1vP7e+dawwOFnqLXW3A/uySKXXBVN+cEs30go2DreFCIP0BeeD/yeVPARy3acafNdifl+fleXle/ukvLhuBXDKOKbIr2ElKqS5KSu2k9CMvALomXLs6/cW//tPy8R/D6KouT1/vzo7/5M1/9JtfOHl8cuulVwAcL5qTxXJUl8EYDDberMF0fmDdnt4vw3gEMDENYauqCohrQhYpU1ZSU0tOxHMBoPAOObEmxxxdgsJ79jG8+Oqtl2e7H753H8CDo6dcTlnYCEYOTDBpM52ucjg8KVwEMJG4WHXp8Gwd1BkC2VmD1jgAUYccfxmqBO2FIZ5Z5i+cEC/+evuDsKT+SJYyjNWVRuGDw+O6nlDJqpmZUoyDqg+zplz6ImALWw14xwXq0AAunfvLzze/DcKwkdnYXmyX6FT9/5vo68HC5S2DakP+Gm4z3LH3ylO/q24Qio2Pb4NH0HmTbP80iNjo5dufN1LvXTSY9OQzDPhpNKlLqBiAX//N3/K39t7+nd+fL9q5mIbUJSPPlqywAW7MSU/W0chvMw8CpAbaaOZsMwVs790/sJ5z8c5/b4DBMqyAlaWfeqTFmZw8AVDvf4TAvD7+8Dvvh7YrakcpG9C1KTU5dacA6kIKJ37im+WqUw1ZUxu2mffPm+AcRboIQF2YFM8gk+fdbedRqHjmor7ZsQHKzm+xud9w7ZZqgQHYwjYYeft4f1ZmG0OfKY823Kecs3NlyEyJTk6X82VefXgY2iiWY86xjQDYC6Gwy7XbJnHo+RtcIIUYIEWpQDZue8msQMowZC+csg7JgLaA8uYqw/nnS1kzSdQigJyVnHlWgElYzUSMBQrWnPNwtTJYiAxZwH10z/D8wyxRPacDGwO6SYu77S3amKLbX9KA7D8DumFznH5mQm3a6rw/DIOKyzlOB4AGCue2GbeNeAmLxECWYVz8Zt/mz9q529kEM8Oz44EGYZ/hzWhYtTbgKYGIe2WizVF62AnOu3dryF9oCBsCkC+0kW3Xww2MftHuHv4l6tdSZurVX4yGltecNGcyNVWDqbAIqyHCSu96ADmYVUyuEGLKUHIuQcX7ohyZWQotALKcQ/DMBaH03HVtzllYnJN8ng9goCNtcUQCGVTAZBlA4WwyLkYF5WzBw5W1UGpCaLqoij5LnRATFLahQBJtHEgKWE/vhREUapZzIgJRYnJlXXsRzeqFAThSFohzIqjLsipkXPKkLISpl4YAUUJJWZktJaSoK805IZFZzjzIYZmRQeGYKKlpN/LFpHJiUQ2OMgAnLEzrOCw1G+i8v25zSFKr6kJzJjCLaI5dIk+o2fUAdFTOOQNdUXBKWU1JYm5DarvT47N+dNaV02xdm0NMICybDPFclt77NmOdEgDnZMTpyYOnh3efzEonoK4Lhfe+IFGIZACTuqicm1R+ty5eqaZVWXer09d3i5f32MsKQFVO3GSq0CujT54t26/deXjUoijK6LwxNitw4kZrwo7HT75y8M986lolzoFXzXJh6cPQAfBM3lEf+prZ1ovOOAVNbZebJq1SAuCr0aQonC+70DnnzAxMjiWZphAABFZw1phTTJYzGCIi4omJIJv1IYGgsJ6Oa9na464s2Ttv4vp0GQDAcOxz08UYmFzhnWer2BnJWRcAjAjE5AthciARJ+Aitmsy6VICUDAXImbsfWkgYl5r8pOdyjKyTJ0A0HVWYetCgJHjWpxPsW1Cp8RmKRiGlNWUsnYpG1LWHq6nZt1UTDlFACtNnFCABS5k1T6P4gaCPF80zOwZXwptDU0s27UxVDMTdUGLUkb1JKYQU6qreruGiHOq1rWduhRDms2qwlHhyJEwEgDP8Fw5wbKNN/bGLDxvg2MLKdfiAFAtlhtiLZy/Pi4OJgWPpetyJe4b37n/1oM5gJv7oxuFe3x4Mt4Z7e6XUkoLNKuQsu5NR0dLA9CsY6DozBmgIKOeb7xdR/vpr0Qws5yzKjEh5Sze5cHk6XFMIxm+XhSFELdd28PYWTMA77xqZsYgh9iTJocmJs0RwHJ1WswmoUmesOxCKxSTJl+knD15V1cAkGK2nJUKYa9KhNCchiYo+OzkCICW+tLupHKpqCdpuVyvu9sv7j+89/Dm/lV24y+98QaAu4d5fvL1vb29uiwXT57WnD/10vTx/fGD03Xh5bMfuw7gjfcPF42K0xv7xWc/d/u3vvjen/zGVz7xb//LKVd1tbtq3wTwdNn19HghlHVVtskTuk7He/sv3nwVwB9/5XdPO/74rfHedPf9h49enBQf+ej1FJq9eueHPv6R9fwQwEsvXh0XV4/PHghpTvn2jZ2PvPpRxve+uTicN93h0zWA2zf3Xrxy5eT45GBHCnEIXIFu3JgRrVWEXAEgBx5VhaQAi5+5Pv2jD7tP3N6Laff3/vCt116/AQC++KNvvrUKi9uv3v72Bw9+4rO3YqJv3f2gMPfy9b3J+AqA337jW0dHT3fGo8999Cbg1m2evXTluH0ohHFR+37/alZnZFfL3DTNw7NlMy0nxfhpxw+ezBfz1dVXXgEwS5PTk0MUrmsyOT5rO4Ghy5qLWgAg5SycyqosjAJSm63tVq1FX3oDmtgCqArHirZpKIYcw9mimVZSlc5y2BuXmhKAzrEawH6n2DtenF6rR8rr777/aPF02aT21mIOoMshdLFcLcNiTkRt0tN16jRahrQNAF841pAZSXzTxaqeZO0k5y6uszKZALA2mOOOieG9FE0IwaIQhUQxp/7koWrZ8saWMoCVAFi07KjsgyxUk6YEoayZxSvlPkckE29VBNSwOa1BMWzxAwK4PUBdsru2S9Gz544+VdH/B0TG84XxeXlenpfn5S9WcQDBOBmZSCYtxceYTueL2mO+mMdCAEw9X706wc5O+conm+qFIjZ6em/08P7UY1IgpBZA4d1kOp2fLhQgKjbSqgAzWb5ALRwCzi7ABGYgNk4aASRShiizKgrnkjaePdiICNYjKSxgR2ywZDpm55pYUvjX/61/88f/yk+985tfBPCf/ue/dJRNhZUkGTEERE3KXehg1FEEsGI7c21ZeSMf2rROydR35iI020Dx0z5xl/YkvAuI5P8F/1POOacMoPQ+MwU173wxkRgyQVPojNn5UkYVgBRTHiSeL+X1u/h5i148A4w+Y4fbuWfO+hgnGmIGt9+w/tyyScJGm3RnOpzeB+N3AAR6XISINgfLCwDIht9qprSJ9t0EP2+oZOcww8WXALBp1B6ktq2znyJCF3hcTAF8+83vvfMf/x2f16hdbkaL9WI0Kq9e2WtXi4xBbLgu/bSMR2GwwbFBT/qzC4Be0Bsb2OsCsKO2Ue1QMybqqzSg9AKgjTGYSrO043sAsHeLrcHjD9dHJ7X4k5x86cuibFfrddPt780A3Lp+8PB4cbpsHx+tCd75MgWlbdrNDVi4fZgNemMX4nDPaR5bY2fodHrG2njW9NjUdmG+2feZPRsuyaYX8Sy+uUn8uv29bS/cXFaWlZkycYh9rCsr9IN7h9Mir1ruOtLUMdEq5Cv7e4AAWHVtlzevaefM3D6fqV0G17ali8mYS+c2WX6g2TwLscLAjmBCCmTYhpmYN4fTi6Ts4djpihwSgBBSco4Ax4ATywaGMECkXnog3kwFmU09GUOZmEnVQBhieBi6feYeud8kN7j0KpdwxOFrl3qMzlt4G1a/QeU2l+vFOs5t64vVX6zJNg17wSretsjgW+BNTd/HiwS2OTHPF45nx1H/hUEIaND/GbxJgBkxMVFWvYA+20UYgS94dGh7Ch5Gwrmxvp3JPTL2A551qPmcK0nMfTribey8mjKsX8RATCwgNTLnSjIGEDW1WT00Gci4VqqKolmuV6vWCYkIgJj1+o0b2r2vlhnJEalwz72kLZq7AeD6pXHQ1BQiyv0GNyrddFR6Z0RaFqakIvBCgSFCuqF7G7INHiMeFi3qO3QYYLTJbqEKIpMhx6TBzMmgFKeaNaMLHQmN67py5gW1k0Ko8H2af0IqK+N5yMQppq5tcpehMB52DwgzZyLSsXdAYylfm/m6LE/OOgClGICy4Koqk7VE5JzYwEYxEGjr7SLqQlBVGBEoZoKBjcyo6zoAPcU0ZrVOiUxBMWrK3K1iIamfcU/OOgMKoJfiESdg16VExN75/mzpnCVzxlR6C0YC56siadKMqZO6KgBUpfOOEjRk7UeyOOc9SGDaAGhjrCS4Iu9eGf2Lv/Dp5gvuG+/dz1yZ+VUKpSsAOJFCmp2Kfuyj13/qk9evH4wRmVG4QuLq9IX9EQCZNxEkOS9DDhCQt3ZtjWVTU9nZ3QMwnu0q6XyxcM4lTWJYrhtiTMaTNgysRiaYwnQYA/12y0xAQdRnwAimyZLlpAaoZufH7FCyr+qxNwYQUlTNOUWY71LnJCmzVKWmbMTT8QgAZx/btRm7ssyZUiLnuKonqmbJ0GNWmoSLqq6btlXNmqMj1wUVX/UYQFl4YxXloMEVEjWnHEqnwhICel24lFXYsmpKOVoOKcLAwmZqyn2m4MpzwUIKM2OzaL0WGJ2vDr23xy7vnsOqMVidDCUjJ8RsMZEamVpoU1RMnGfLALJpVdU5B1VLWb2T6ahUjd7ZuK5MCwCa0qT0jml3UuyUPpHUXhZNk7NNywLAhKgidzxflV5evjq6ul9UnmJIZeFdXeztVABOmvbGzu6k9M6Lmd07WWchJjpbh3kM66YFYCoFuzQEjZD2fp3Nyw3bjSYRAUvOCZpZpM9yoKrb5N1kJgRTtawtEhOLc2SAaa/vQUSFFG23NjZ2BFiP+BIzmEOKAFJrcHAisQlSFLkQ4nrCWcAJerxcABiVHiQ1c8FxdvN6SqmN7biu0mIe1x0AX7gQz1y09nS9nJ+VAbfH4n749re+9Q3Z2+nf6Cc/9VLTrFn2To8fthH7vHzvne9NR3L75mQyru8dLQEc7Exms+rBk9MXroyr3fL1F6Zn77/jlyfF/kuUlBcLAN6TOKnLoluePj6NIx8VblrRdLb7ZH4MoE+qeGW6H2PLJFL5Fw/22Orv3DlcLhevXLsJoB5VafX0ycnjj77wibfev/O17z4p/P7VnZ1re0/ronjwdAHgYDJ67+xJ6WhKEkKYjmeT6dVkZN3623fvvf7qLQAlPEBduwbh+s0bP79z9Q8/+PClay+1Of7Rm98F8CM/9OmT0zWBp1dv3v/enUdHq/nZ3KIWO/Wrr3zmm9+9D+DB/bNRzTvT6em6q6gR9pOdWZSnwVAiTesCQD0u6vFkvngagmqydbI7J0/fvXd2smxIaPXOXQCmZTGaWNv5sgaxMedsSc2Z9HHNMcRUsxeXQwLpyBfaNd2y0RB1Y3/EnJouj0xEoaC6lqtXr6X1HLlNMPMVgLL087P5Yrk+Oj6KafnW/Xu3D3b2d+tSqrhI6w+PAVz98RBo4RdPnt578HC+UiHxJFanNlkMAJ4+Oow5zXZ2ynqa4yKHJiOnlImoLqt+O9Dlas3tbl1lhbElokRG8ArNZE0XABAk50TM/ULRTyAFTHPa0ITBYlmjJlUtZKQ2JOzRbd5qw5BFlvqUGX0lG5rjBmQ8J2dcMJUG2jGdH6f6fRsXnbB/7nLB1rfzNfF5eV6el+flL1hxahRTKMknTeO6NoDYtTk6zxbD3s4ugIrhKR/fvz/75MmY9yS3oVs+uvNwMp12T+dPnxwDyJmjm4obwdqYmYitD9TuEaoB/9hk0NuecDfHy5hDf+IWdqLUpDApRj1Tnnz/RUvaJ6o3Q4YmU2NwCnFEbsRa7JQHL83+sDkCIKF1bqIskTmrZrPCBhLBaaelFQDY26oNZQZ7hCYDPiVkTxmkG7GEnppiYFh+JlnkeXDrn+Lkcr6MaNE7ooWQVIVy19bewyBERHCOIgxABpEvw5bbdqEeOmcGbeKJLwTe/sByznQyHpiQG9bVlkawEaOkzW7a76MDUnQBLtuABCCFXkADbJtqENiQoy6nsdzAKJeQuO0zAgDxdkRgSDBqBsuttZyPpQDgUgqnR77gFsGDiDGdTL13pyFO6qpJGcDpfL3qklF5sYMuFdocYy4/3GVobwAH+9+FpJVgFcIEePO9+x/9+psAXnrp80Vav/Hrv/Yn7907a5o1xJW+7ZomtKX3PeYozj09aw+P12ZYNLEaT7JEv4kwPb/hxjLa8s4uwUZbswYX2GDbn/4st+qf8adt323/3dgvW2RyQ6L8Uy4/j99VzURIOeecAXQxqdhxG88AFhqXrlMtSp9ge9ev9WFfT+7PQR6Xgdd/kvFEUTMBtXhf9PK7qjmBkM2cY3FCJpwNjK1SUDb1xHaO6p7jXixFRAMg5ZyyF4Iw90BI7NPmMXknfQSfKmCJDAxlIibjgY9rG0xVFa6fbHwBrbXLHfEMHGl/lmL5+dTedk9PhkubGmgbng0AQ+5AnI+M7bsOiTKIyOzZO9rQNpeu/TMGz/fTjy6U84mlG9kHDMRZztoDu9uazwewbdtrE7t0AUo95xFcvNGlh734Nhe+QYNPZLu+9TzCXsIGsD6xpBDnLkTmXPZxzaB1G1NoV51dcSV5T4UPTdes145pAJsLPxuN253p8elZzqEQx+TbnEzpnKw6OEIMveo1iJksGUFVM4DKFZNCPBSWS0YLM1W1TGSj0m28KdTGsH1X2oxkGoI2L7zsJva8D1HPoVODF+nv5cnKUcFA7d3+tGJLsCBss8pfGVUAJgWPCIvKHzcpWpg3mZqklgnMoNxHhRPY2DGNS2JRSnJ9Wq6jFqWf1T04j5zCZDarJ5PYrFxRqZlqzpaHPNGbJ07aMYtlzdkE8F5cURooxJ43Ks55gwLKvfeLqBr7nFPW3Kt0j53POWeFMxS+VDgDKXJWJeKSSgDODKTBYs5J+7lJREpexIx710g2eGPHVHgpizJkMrWYUtvE0cwBmO7OViiLGcUk117e/9s/VxLsq997zKzTgjknANR1hPAzr338b/3s58a103wMzmAZ+XEZV2WfO88FZiJKKomsSJQw2tfcFN6XUiZWAMvVSkRLyokKAF3oyLiNilXTO4TWSevaEztmRzBwHgjtzKFdkyiAFNusCjJ2jsWZWVmNy5qyMgw9xyerKZI3CWqVs5iSFx8Y5JxkuGwAjJ04p5a60IIKwAFS11VWQdcA0NRmiyxO1WJMvhRwGReNqwsKkTgDsLISos5zlWdkxGZMpBZBpE4TFEDM1hs0SXNEikkMJiLOe+TIQgDGo6pmrFerlNWLNHk79Gmj2v6nrFsX/TBmLJQyCHCeo6Jdd6JgsqIokXsvONVl2UStyoIdeyiRmWVHMipdyQwgxiBiBbkX9ifH826dwwsH026eY7S6EAA39maa7O1opcMrNyajmpKlerciWdzcnV3PUwDvv/e40rxbeycuw1Ztd++0o6wHY59XwzsJHMEJwgbAIN1sOUbYbEECckRilNSMiQEmQ5/jAD2ySLRRBecCaiDknAmqA4eSvCCbOGEWZurpzEZGzAQZiQIgkqSaoW5UeClC04I1uVFdcu4Ccw1ALVVmrJx9uZyf7c2uHOxdbffy8jsn3z47AvBproj8lRuzj3/u1m/9oS4eN1xPPzWdzoP/7oMHfeTI1SvjT7/wkdc//trf+9/+4Ltvf+tf+cVf0KN3TxfLl66M7j5Zh04BzEZVF5JjOjlqP/GyJC/vP3j0V04fN6O28nz/7YcA2rO2KF052zk9epKyVtPCuCqYTpvu6bvvAEhZb928umrPbr94NayOVNy7797Zn5TXCu9o7cwACE8OHz8U+LY7NvBO7d5878Mf/fStUekYMtupALz53v3DM/6J1w7E53a9ohUd7E3JuWXbLTsN6wygrGcptL7y67Qe5aWL6dUx7j/53s44uqwA3vjm99ZOJrvXP3b9xQffQsxyFsJIXehQeP/mB+8CGI3w4s2Dlw6mElNrqU1ddXSyPl5UgExne3sOQFqtp+Odp08P1fLutGoLc87E9z4/13QKIGqTuyB+ZmZN6lhYYxImJoxGUwCr5Ypj7Gx9tmg6M+/rrLpeBTSRNE/HFYBkMVMezXa69Xq57K7sjopqRHntKTvnuZoBWEVdhacf3H3kq4lFaLbVyKyk8aTsrP2NX/kNAD+3s/fxf/ZvfPN3//dvv/FuKUxCIWguypg5hg6A46oSVasSVc53XToOJAKhwlfVlKoCwGrdjSbT+apzDNUuE4sb+2Jq1qUUu9wB8ChVdRCdUzPehG4QJ9M0dLeXQlIMSRXCYk5Ns+nWcCGQ9PmUewBzmzZnY8Fg+8Mm3GnrSt56SYeFa2PV2OUMYP83y5/zCPC8PC/Py/PyT2dxCquqWow9O2bEGE2NwCmE6cT1R+HZbIaEfHqUTw/1+uur46Py5J6sH8/KuuQyawNA/Zgdm5l2rBaN1XrzKGvIsbeQKueTpv74RAM3h/vTFosZBAAMgVCwCzkABudyiCLi2GckAGfN0sNFZDWGcnCUzRrl5sH9o7e//ubX/xDACtFXoppT1t4WjqSipkaMoiEFoCkS+baTEcpWqQuRWBDY2KkjHeCqlEJSKJNssMcNEHnuNttAWMSbGOaenZB7F3Qf2WlAzNYb+mMvBzdvHj9+vG5X5CYASLN3btUF74u+WvQgwjZ6E+CLZ0/YlltGLP3+LUxemIm7EIjYOZdT470Dk9mACjKJmsUYe+0EszTQbZiNxOAAMAUQA2KmsGykxo6cc6Zq2sNPPXo4hEX26uNDWOIGCLU+FNpgnE3703JPniJAhAEkYghgmkyzKQN9jiP2lolWbdNXxI66BKgmUFnI6em8Wa+T5nVs+wxTJOyJMzuQZLWQFQYGgalXRO4ZW1ts6hw+YeHz8FLrCZIGy4Ss1plWZqksv3vn0d//7/9nAD//8Kjj8Zf/l1+7//h00aiUnrKllHf3dlmo6RKAR2fdeDaaPzpex6KuinUIvij70LPzUGhgi1tdiDa+aKn0A4pwgYPWQxO08c1uEKUtImxbiZXtd/p37Vm6GzzUNq8/OHWx4R1vPm8QR5htfeLnj735N2d2BQu7PkJWKOdcjEpJaW/GTWMFSdMGmB0dnvQAXOF8ypohGWSwvrsVBlDevP7WvtPN6zrAQOvQ9KljxXPVB9iaCDhEBNU2aVLJGyKoI1IjYVZV71zWBFDWDBilllgAuHInCWdLnsl7VlWz3IagCEU52t2dAfDOCfWsvwxATXNO1meWGOJYEaN2GSkkZnBRtk3XdevxdEoYRNO9K6PGGFrWkLLV40k1GjXr1rsCQM9AEXDW3HRNVU/UMnJum2VRVn40Xbft2JUAOoLlbJpWbVNVk3o0Pj07YaHVcllPpgDKarxeNyG0mrUoSzhKMTEzM6tBhxg+8KAePjCRbZMRcCvJbSCYDnzqXi/DFEBQE+7T/4GIUkpqRuLMwGA1wPIFbBGqqmr9afjiyHnmoxp6SnLPpiRmyhkAyqnkAAAgAElEQVTblLM2LINERODet9JnFGWCbZmYRJoSESlgql23mkymgHVtC0A1uR68ceLFESyGkFOoiklOeZ1z/0badZZjUZSdqEttoFyMCnbjs5NVL5pValiePrW24S7PJu6sbWKSnIjEyJGmAUkEoxAyy0jqnKQUvc8j4RIlgJ2SJp6UaJE0QbouZ6KonBPllKUoABiyA4nTnDUlTWTCYDYjU9uouisELE4c3LAYMIQM5GLmPsjXF+xABSAsBVc5LerCWbKxk5tXxgBiwms74+8dHjeKGNY5RWdxAp8URtyzjU2F2ErEUvK08iOm/YnX1Wrk5Yyk3LkBAFTdef97k/19V9eeXMpd1mSa1dRM+3wdmgmsltQURMIiJAXYG/s+/+wWRh5WL9UMAwkRD9onQAZB0I+pZAJAtQ8m75ewAecKSJOSkMUpI6UuN47oeI2JN+fGALxwRC7ExZSPFqtpQR5WuqIouZASAJGfzWqwFZUry8rLtb/tfuRo+Qf3H51IKs36rNDh537o9Z/9S7dm+yVYJEy7dYKa5EW7Wt8/aQCcrGKbE1FKbWwTdQ0p+8lkktWiqsY+KWHBIlIIpbWmmJEiUDkpi1Kc62dLr6WQcy6cL8oRMzchpJhj6nwWAJrFWIqisJRDiuycr0VYlEVjIFMADM5JbFRbXDSRPQmMKCTnnSHFXt4kxmwoylFZlDmrcI/TIsXWMwHITphJRJLGqi6c82HdRC8xRiL0/WA5sUjpBF4KJy4kVmqyxtRvMgZgse5kIgrLySmQU8pS+ESh66QqyBIAjgjW5RgSKaNgJpDPyQgYFjFVMqNNTog+6Jh7SIBs0JcniVlJLWQjlkLYNGVT5jqszxwYQBtDTqGWYmVKGf8ne2/SK1uSnIl9Zu5+hhhu3Pvm93J4yawhyayqBovdpBrioIFgN1oNLURx25C01lbQXhv9B7UECBIgCFq0BEjiQlCLYLe62UVRLA7FKlZVVmXl8OZ3x5jO4G5mWvg5EXFzIDdaNIFrm3dfxIkz+PHB/LPPPuuku1rFWRkmzpHGpAyAmevSHwX38qqdTcrS8OzF1XrbJ2iXPIA3F/3tu3dLx1MXb5+UfEwFRCQW1YRMdHkO4CsPHVLdd9ZH6TbNvCa3JhVST9thmQYZmUTPyLxIdWogM2e5vI2mfIxKMkqeCeahedBY4YLkco4sjl0mnzrnkklKNi8monFj4BAAlI5hQqyOedv1x4tFlG51taqqqWpKo/YFQKy5OZMPThTzqpgU6LRvug5AIY4DC3UFObKya7d902y67fS4wmUCcHrRPLxd3z4qli8u+9j2qT8uQqedaOquNrd8CeD9dx6zuP/qv/4fPjhb/urfehTPXlyuLq863cZ0f16mIwfg2WUjazue1Ozdy1fblvpnP+j+6Hf/9Jf+0193zatPf/YhgAvp1hZC6tetls5fLtsHb8y1aS4//elVKwAK5lenr+aFXr58/uvf+obz7dnq6mhavnUS7lb2+vUFgDtkq57u3qrMum9+7f6jRb3tt5eX60kxDb5/9/4tAB+l9PU3JrcWfrvqKk+dxdfbVyVj6o5/49vf2m62ACK2gj4wu8Rw4jm9dXfxKBzfu9guWwXwxz95cvpsG7d09s5ZK/bibDWf+WrqUr89Opp+7fFjAB80W09ydHsh2964p1Xzyatn56+fTZybOddtEoBnL88vLs4W84kSgWyC2IR6fmSxMwn1vJwD0ODWp+sXqxV7JvYVT/uiIUIvzavXpwBAWgS/ODruImLfJO0ohGRaV5MoTU8GwFGYVsXlcimxD861l+tNQc5xq9y0nFaXAFarto2hKjlSfTStC7TbPjbPNstPz3/q/NRfAnhy/t+883/+09c/e/rJB6dSgdk7l5q+C66wogRQVhM47iy9fP5Jwc5CVSOcx+3MiupoEYoAYHPUnNy7fXl63rVXSSvnWYG+3yjIUylJAUTfe1/IKNVCpuDsS5BIdHnLlkSNDMwutNIVRSkqSQXEgco8A/fSJUmEgnZOaqYvmA5V7PY0j9GdH1xz2lMKstcFHqIKA6Q5aMh/xkHKW+PM5MmckmF/OVyNriWYmTE5hY7x4Bu7sRu7sb9J5s2gBs8MUxFVGDMTEcglwaQ+BnC+WVfO3ui7i+cfLR58vED/+tnTH3z08Y+fXb0+b6fVDEBnpZCrguuazmykAI2gyB7t2EeDdrpjyJP4tYRu7Od8IjaCqAIynsYIKJ1XExQVIlSK//a/+yd/9off+ehnzwGgrASiIM2uaZZ8MwJYxksqHIzYfAILnJAM9BylUaR4oONfkyj+rA2hLxvXDWSCY77g4XF7jpIlla++8+hTij998jpKBFAWlaiIpBCKHXXo8LcYgar9rewQLFhOUsspcqLCuzRPJskSTfvVMK9bY362Yb9SjicdK3WMd7ATdqTrK+bBT4CMaeyzjMdvhisb9imbtv8NhjTl8cqg4ZODlHVTG/IUiaiLCjbAisrPZpPlqgGwWW56ZXGkRvv0inzyQwDwIBS5+78dfrr/LymHZNBSz/vUXsgy/gzAp8/OYLztm6uOpCrh+PbJ7SjCRGcX53HbAPhzkbZPqw7mKQsRpti7L2WW7VrrS6ln+5zcL2Et/hW983MX28GXRLtLfsE7zaf9q1h8UcTTEFoA4LwvHM/rsnbttPK3jqrF/PZPnjy5WHbrZpNBNyMQ+z0MOp7fjO2gAAzh2oOKUWaf7UaXgQgOhJ4oqSVRgzMa6Hj5KAUxOcr1ENkRkfLo+hEDyBr/EBOjqKZqBZeOg3dclfV0MgFQBOeJHJFIpBz2Vslw5Nh5TSSt2q43C9770iNFMr+Y1pttU+a00KIiVzVr0oi6qqkIgNbzUgWiMgm5YImJ6KSo6sksSYLIxquBbt1eRJm3mwZAcD6wi23f9p1pmk1rIt1uN3VZTssJABcKqzSlLvhg0GbbFWWZpzTisaaTjdGMQah3D2cfzLc0QPcEHQrRk9mh3GQORgzA9xig+WIbTz5m6GPPGDh4y2SZQU+OjLNCou0T4XeDdZjJdmn+h+a8yz2WmQf+qBkPFR8YI617kI9UVdUutVDNoREfgoNXExDU/KZpfBQj6pqWCEkB4Cj4KoQNpXoSvMPdxRGuGk0QoTZJUVUAUh8DOQMZeSPrU5oELr2y2t1bMwBVGXxZnK+bzbZXwdVmm8jHhGhgjAArTAGRnF8OJjazGE1gSlQ6Riai5uUNAjNic8bqi6xKXJUlgBCQVLokEyu2XRtjlxLuzkoltF0EMK+qqHL+ut22CV07gfSsG9mWjKiUkRoRNyvDpLA3TuYnFcemKZ1Vzl2t2qO6CqUHQK4oCt9s2nJabPpNki5KlEFzeShNpMbOkRobyODMHERJOuoj5Q2WQXeRolw4GuaMdnTT3Dmzr2BmzntJsouQMXHOqWciD+vbqKr3jqaL2dRMHJEaUrtlCIBWxHlH7Mi5BFpFuzfzrih9Ma2mCwAAi5lblD1LOQ+T4+rt2v7j33zvf/w/vn922oTaA/jNX3r37/87vzy9UwpM2s6BS/WyjVeb5ipGOAKg3nW9dNHWbeqEjILt9TNsiCeRqcEkM0qV2dUFe++d2+lCwkxFJfa9ppREvPcxJSIUoTTJipkejjNLtvBBCKbWSXKFBxDTAAQSU4xjlZT90M/LKwPoZavmXGJ1TkWZvOcA4533lWdZ5mG4EZHzQQ1sOs4ecMxMnPMdJWmMse+6thdyzI7aqAAu1hvvqrZPopoMfTIzExUickQaDcBm28BiyTwpK4NHH3chtN0qv2ugcTnfxdiGm8nFotnABM6sbccM1ih97MtqAmA+rYgQJQXvCk+pp2lVlp6TiCod1QFACC44rqrASWPS1aY/XzeWDA6wBGDbpm/Oil9++y3DKsy496mYuHpSSVGwip8wAFqRblzho0996Xza8vHaVrGNUZu+zzXmXVEIlX3sxgppu/qGtk/E3q+i2E3Xh7ZbvnN7FL5CarexYaZJKPoYARi0dJlna4Gt3a6ToK6PmClZl4MAxEwDLZvyzOqIL68u/GI+m0xZACDGZJoMpgyoXXaXFq1gd/zgwWxGAJ5++vHr1UbCz52+evH3fvXf/F//rz9ats2du3cvLj6Y1CU2EcCHTy9////+zr//b//mt+ZHzz/8Fxr6enGyaK+uts35pnn38V0A94L/6NOzi6vN/KjqOq3n7o6r/rf/6fefvXj25uNv/N6//GMAvVJUPX11GTuZlW4ymdShfvn6ohPKRHioC7G7fW/6a9/+2w9KW68vfutbv/Dofn3x9EnhRIMDcHm6eXBnfvzgflpdbq4ul8tVUfuXr8+fnjdvPyg+evIcwLqJv/gLv0CxDQiX63MSoqStNZ++vHj/aw9SowBCMTWeiCB53vbNpC5KLOpJ+cjwO7/yDQDbjb0++/T5evVi01UnR6GaNP3yR+frN+/PLhpXTTyAsrSuST/55NXX781dQbfvHmPro2oknK6etdkH3jSymMxmVVm4JkrXu0jJOXbMsW9PTu4AWKVkzmbVrOmbZHEjl47ZeVe6kJdMJqiBmNlxVYZcev5qu4nSB54mybIGJGZNasloVpQKXS5jUbqu7S7Xba5aVrmJml9vullaEXeOUU2rd994MzgpPN+bLwDEZvkHv/fH94/n0/vzZ6fnSsEA18VWaTqZAiCzbdu40jui1CWFbTROimldV2cXV1ESgHa7PYkni6PF82ZrRjkbTGEGVdPR6RhdaBt2N7k/G4BdKZmciD1uPkRkqEJn0EEt3MYCcl9uo+TM57645iCNLo0ZwOCRW3DtsP1Z9vRKAETX8rH3yi15n6WmdpCLcGM3dmM39jfIfJ6I1SwXaihd6Z2DJkva9fHs7AIAaxvE/+UHL59u/vC9rUyc+5Pv/Mllr8pFxFaTAPCV61JabbadJMueOIagT2Y94HBaHZaJwyoT45eDdzV+YRCYM87sOQCBXWDnmOqqTm26aJpofuKqp0/O+ovV4mQBoL51tF21iVgNOs7aebuXCFkX0swAF8HeXIKkfeaxHpLFaCSTjasIXbvbg021HX5upHucJwtfDjXGCXCUluvV89OzrheeOABJJKZU11PVLw1t5fVQkYNiuYFyxEwzUiApqpEk8cFlVhOI8788lvlQFcAcc9ZModGDt10W6hjgG1otpyEOpLm9dJseJG/mGswA24FWpAGwkV8zkvI+C3yNjK1cVxPIGVg2QG8DYgImGyBVIlVHKtJHVwXnC7EOwCYikYtC0Uw0861guZUOmnMPQe6yJHLg0XT//S4rNUViE/EpQX3srxTA1dWFmagxexCRmlutr1JMIqlZdydHcwDbbbuN0VwQcdFMHaeUXObJfqGzcjA0Pv/1AeZ8HTO+duQ+B9tG7u5nr4LdEfuTDIhSxuh2p7zGufxSMyJTcc7pUHwDhdK08Cfzo9RukNq2Wc8Kf2lbN1AdUYQyyYgx7cfUFwDihw+XOY8kozepuVSvEUDeq1JSJwod3My9X6cmMBNKquqdgxozi6kOfqWqqGmiSMQkIoUTMyuKotz7r1mpUs04lx7VzBUZr2OGmLrAhsAmFvs+Skdk3jnVmNNCATNNMXYq/SJMOZD0qQxFb6lL0Ts/NCfUszGkcCDmjmm5bUxyNiUBEE196pu2Kbx3nh1Z8DSb1trHjBSopMCegeCdmhXeQxUj2WyvjyFqRoexedtPw3lPwgBlYJ8HBWDLM4SpjCzLLGzITJm2N2xmrvW1A9rvYWfbI5S06/UHEYlcMOuzvY/GTreHNA8rWjKz6dgrACakFFU084J5BLPNzKCZt6mGQY0v9wfzRKRghYpRwZ4JkpTJTcrg0AOo66oOzjEx0xsnx8umWXISzxsBY8DCqIClpEQCOEXJ6mO7mM3MuiQtgMVsDvZNWjW9XV42GzhyAIjYDTUrACIix6WvRCQ/VM5QJqbKkXMFADMRzepWZmZsYFIRTWJV4YuyAMCsom0UVZPT5fl0AkXBhG0vl5sE4GhSbDat47BtuvWmnQR++HjBTAI7W22aXgEkKy8uV80WJjNv5cO7x+oseN93vWe6XF0CUHRt25N3PpkMpqqEPHUPb8q1QkYEJnIMkKmRGSRlRDgvtLvgXx7uXRQAu+z7PA4BYoKIidq+b9PYj4gD+V5E1Ni5EIKZ846dJ3Fu224wpC1zMu4VTdRlkzxsEgq/aiZFAcBTadskNWTiDR1PQ1HO3uHH/2jKL55t7swLAF/7+v3Jwxl8sj6aGhObkFG37mXZplUnANadrKNsk3bJtiklgNiLmZrKCAiqiUpUU+n7vo95FvLeq1pmuhG4CEWKsY8pJfWuL4tARCEUofQDq3F0mQZoEei63jvPhhhjbiPnfdP3ORYz7NNH2s0Y60RwpAbHxIwYk4oFF/o+mollv2in+Z136SLsmJVVgUHMOX+npmoEkSQpJZUoWgfvPaQDgG0b4zT0omLoorRdDqcqfKFiBRGAuq5hTH1kcn0CwDAy092aObgNlJE6Mx7Dz9jv7j07GMiEkdPmARNP8M45Ht03yzklYOIkUpWhLgpHJmoiw2sigInaXph5UgQTLNftWvvC0awMADo1je1kvqD5ArWPdQgnbIuqAFJ7mj0ur47NmYvUauyIiAKTD+5oUhahc74BkAw6FJXJSpcYPBozIh2bGCP+MbrRu9DNsJYPXtxABlCry6qVKJJ8amdlAcAXLiTRIOzKk5Pbz05PiQlsMUVSY+93k6fSnpGqRr22q2Zbunme9EwlOGt7StGSRS59YoH169On+ScPF0cfPtt882X72//ev6Xr/r2T+dNXl3/y8fN33nrvaXO5efYUwO//s+98/fH8P/ntb//3//sf2NpenK4cutK5u8d1umienq4AuCo45tdXq0S6ansVvlPz49vlH/zeXzy7/FdHsxmAhvz5NqkJOarnrgh+uVw2ibwzTQGAWlOWc+fqj568eum2/8Zbx3cnIfR9YEJE6T2AqnRVcLpaEQdPOq19VdWLaXx+udWEqyYBmFa8uVpNJrzerplwfDwLwT17fvXmm9xsJVOJDamVVFbl6bL77k9e3Z3X7969WD5plUN9/DaAv/cr31z28s+/9+n3/vC7oexOjo60a0vHn764eHX27PLyAkDgcHxUX/XxbLV58/48ihfx3rlZYSJd20QAt2b1u/fuv262lXbJWFx59vJy04CKSbtcnW8bAJt1y+RK0lWK5BgkDBKNjigOpY1E1Jarqz72zhOArCnRxJ5LzTHNqNJrEjCATsFcrtdtWq+ZuA4FXNao1WTGwUcKkTCbO0/44Kcfz2f0xv3721IAnF41Z6dLc9XR7WPaWt+m6WLhKpXVOpoAgKaqDLPFbLtprpptJBdFnCRtut5SntAk6acvXk59kcQZ2IhUVUzE5GACwDBrjfuJPFWqZcRxcITMSIesEAgpgQnOLPNgcokwMPldQOCL/OPdSveZT/kAIhzdJ4x74AMfaf/jQYyC9uMZQyT+8Myf++/ndgY3dmM3dmN/Q4z/+kNu7MZu7MZu7MZu7MZu7MZu7MZu7MZu7MZu7MZu7Mb+/zA/ENQNApjktGYGWI3bxKt2C2Bah6su2enmqn36yacvUt+l7abnaStO4HvN3JwuqYkZUWk6JD6rmZodlAUcwtjZhmBPTr6kawl4h6EnEcvsFucCgCoEzywkk7raFtZenblQclErFy9TPD8/BzAJFM1Hg0B34a/MLJQxD1hzBhRIQIpcVXooMkE7Cj8yW4P1mh7HNaan7g/cxady1Ip3+n3XCEGG9959L/ZIwr7wWZXMSNl51cMS5AdcoM/ZQSSNAMtplDpw/XLeqgJGzJIEUMr6YgP5CFnGEft8poEzeEg9wiAyOFwna0AfNsLBs5qZgfQwvdcyQ/QzWZX7pCLaH5fJecO1aJCq27WZ7TBzA5kLXpKKahvT+bpZNR2ARN7Y95o5L+NZDzk21+ms+2RP2x988FykZsLBe25i58lpQiM9gLKwLsIbhWjstJ6ETdN1fVeE4v6D+6uLMwDJDK5Iyts2iicOwV9jvNpn/voiwuT+iEM6444pueefHj4TAOJBDP/Les7B0NsRTA5uYySrfmm/21vwhTNlosyWU1OoQGLpy9kkmLnnL58vW6pD2PRNrtvo2eWoeR7rtutoB53k89reWRBTYTKwg5XGJMdQFpo7TC6zNBKleHjErCBGJgLmXHuHAXZDuXkAIM4sZiOOIiDyxEa8S0xWFagQebWdIELmTCoAUWVXiMWcUO65qEr0MXa9MDFlupzjvktqmuu6cKZCSTIykZQToLxzSYQIQZUAUWFyYGqbNhTIDEoHW65Wy+VVCIVo2m5WBpRFURQ+9R2AZH1ZVhZjgqohOC86MH0PJlcDCDoQxAEbBEoP+sdw8J79nEspEDHLWKkmNzUxkNPohzSoAz7C/oq7NO3c2Q7ete3OP3SHoVvmvjhqXxARkPlQRllu9OBud73mYIAbgBSjyqBaSAP/cE8cplzloSglxkxIT0lgMBE4I6dEwUw221R5NkJVZT1BqEpV1I5S7LqTuphUdz548jqqOVdILj6jBlgSYQaJkLTzuf/qg2PRdt1GAGR6tWnXm26z7ded75wGtsDeOQLteF0MR2VZp5RMBaqAisIxV0UBVwMw66AJlmnDxGQ8FOFl9m5Y8FTMzJnFFEUioaiCKwrvHGWeMkC3jmZ/529N6cfPrqSH0eO7k3uLajrhi/Wt800P4AfPN8tLlMHdnU3vTsO9RXHZd57oaFJcrtvVegug7VddwvF8SsQjoSNP5oSx2pKCzXkjELGNi4ANUiBufKMjUXvsIkkzO3Lg3qpleiUAdCkNSzYxExmQ1e6gIlDvvHO8adsYewKVIfgCSIhdB4DZ2t5UYtuRpmSipYUz11oX5z4CuO3n8BX1ZT2bWFHRYuILfzwvi0nx9XX0mbA8qdIRwxz3wWm0RqxrN9vNqpWzpTw9bwGct31UdEmNPBG3jfhSxUxNdqREU4kpiiQkkySZ7cvOqeb6VWDmlFJmyKpaMoX2RhRjDFWRh0FMybJUbkpJEvugkhDIqcWUcif3oJT1B8FZvttIFGZqqkPionOFM2Y4BplGpUTEKXXOFTtCIjMxDTxEAiUVVbVMjxwWYDMlFeEAlURknlE4qqvSBzeoQoOIGAQjjim2bXKW+bUMlYIZwNF8puo2ZxcmKSXa8Y9Gf2t3C+NCb0N2g4F4nBSYvaoM/hFIFQJzZL4IRG7bNHmkeO+8dypC0KLyqgpocB5Ky20PgLmfV1XwrgikCs9cBO/63jFX3gEIBbddG2XDtx64B28X82Mt+SpgoWdNkVLaAEBC0ARSKIeywCYSmXdUFZ7ATA5AitpZJ8SZv6pZmQFmprjGjsTeVxtGHV8rrbjzPwFR6VIsi0rajjzl7HJKJp0UfU/UN+3GCciV6/Xa1/Uun2Y4zUDMslyXsq6qtmmvuKmyGgZbzvH1Rigdaz8FFreOwqSUPgGA0tuTxR9990e//Vu/NileepfONtuLZrtJ24+fvCq5B/DVxyf/8Fffu/r0Z8uPf/rzb99fzIr11fbNt+49/f7z51eb6XQK4P50ws4VHLadLZJOjriJtrhdPULRJLF6AaA4FlutnLOTme8stduexTlSVXUsAN48Lv/ur337Rz/44Xf+5Q/+wW+9a2zr9Ssvk7osAnEm1mmSuG1NwL6oJ7di6rqGZmXhQasruXvCAN57+2FdcYoNO5bkJ5O7UAv+sgge4EzxS9SvNs1fPun+9KPzHz9f3l/MXXFv7mExnr16AmCyOPrGuyc/enb+wxebKvmf8/Oj+vji/Ke01RfPPnl85wGA569eX66aN9+4Z7C7dx+1DSdG20Zf0DtvPL4IZwCi6GxxXJwcvTp/fbrczuezjlxKCvKp04vTSwAefn7/FkJrm8tZOS2DTyl2EkUS5zqiAAxt3/dRWKlPbQgMcmLapK70BfIocs5xkSR1KjCqKTiiWV1NZ3WXOgBp3VhUg1U+BiOzYJ6Kym7P5rGVn5x9BGCb2kaw3mw7sYurlcGHSW2OCCRdBACR1HcpdikBymqkxH3ftxyZHVvOgKHVek3lNOUsNiJj5HV/8IcxbGx2lGEbtwQ5s4xoXLRzuW3AQGLq4fPB+SsxVYID60Hq0J6UPyxWf4W60W6cYrjIsMXKXtAwog/2BqRDqkcuyUjjlGfXTrkb+QBAWbPrr5JYurEbu7Eb+9fVPBERsRGZahcbRAouEBkDBYVV1wFQQ2PaVryOsYnrvukKcmHarTYSxfK+plk2EeaoYGVAs2OkpmqGXe72LjNmxE9G59HocC7O+85DCJOYiXOxRWIYWRLrk24UZZgqcVQXHSYUWlUAri8iJcn6U1kKknKm0ZgLnDN5CbmOZ15HlJBFF7OMWb7PUd5puJUvAY922ODuCcda4sPTDI6hgYz4x08+7ZLEmAQ+jYuK4xC7rfd+2IXvMmd3ZzE7WPt217Vd6jERg5nYZSzAzLKEJQy7dM0d4JR/NSRU7uUW83JJRDbCxJw3jXld3otHEkaplCw3YyPiQAdNNQCNtrvCdRtX3Pycu0QwzrKig94dDcqYGTrlAFUjQhR5+foqaUZ8il446uB3DAv3kCS4X6wxPuQebKNdq+6TVXNGelSBmqqpKRGYc9nuIjrxjG2UmjUa1WWRus4ZmXNX2wjAF4W5ypg4OAVZEpJcImi8wOe6jX3uo2uf7OrSjL3g+oZk31fGP83GTcj40ndgEGgHQh90BxsS7g/RJCJAvhyVJCYIbCwTxGRJUh9JxHXN6mSxqKYFF3bZIMDPZzWA08sr8j4j5Z892zWE//prI8qApA5w7L72T+rUAFFkmExtN2wl616xyxgiOSZVOMAsHcBSuXIR7TTPzFSNRJCV1pwnZ56Ra6zsUDfscE9VdVzEvhFF4UvvvZj2SfoutjGVEwegE0tiRVGrdhEsSVNKSq4v3jgAACAASURBVHDsZNCxBHk2k7IoidD3fVJVIsc+dj3AGc1JMQEUQsHeiYomccGvN1sRGZpOzVSyypGZkTPKEkk5BXZAU2GqasYDpEMjiEgHnWHXGoP/S8aAMbNAdrnaw7vZgQIYccTrHVMP5vYd0viZd52xptwzzQjIhXRs7BrDGgEadC0PnPI874GJ2Q/1eVQlC/UaBr1LEcmCcWMtMDA75/whQjoMmawwKLYy9URJiEyjaXAEYFIXDFRVZWn98vT0F7/yxr3b08vVcvV87RS9EgCBFVXtTYJGtn5ah/e/cue9e0dXW+pTCeCj1+urTWq28WwdI7wRm7lc/Nkw9HJmkOM+RVFhmOPcClAgmQXKfduTqOogm5GLeGcgi4li3wOIGuuSidGndDKdedbCuyTUQVvuAXz08iWUkiu2bXdnUhVMjxez9x7N79yZNF373Q9PAfz46WpRh67Xttm4yXSzjBG6acQzE+ioCsj7U6LJ7KiPKs47qI4zjWIAGFWMXEUwNTVRM7VxA/nZMNbYA0FGzuf3Izt9hEFV0pJkGcmsm3EwU5klaOkKMtt0/To1waj0nBVKcmy0KnwkkpjMc0PsgeeSVo2uj0IRFEBR6qQQS15T5WNx/vzl7FZdFMXsQX32vA3zCYDJ3RM/Cf1l65cb6fv27HJzsX560f7w1ebVefd61QBY9hHEUUw0qDkC9ympmkFHFB6iSSQlSQWCYwZlX4eJyJMD4J0HKRMVvovGROQZxFyEgpjzDBxTIucIpElEDCTMru1bX9YE65MAcIU5okbUwYPIiMygIrnOw7hXTjB2xirJTABHxMROxYaKcAxWAlKeU0YYUHMs4SCT2LIYpJkyO+990MQuqFFUAtDF1EeJannEiSiDHcBGwZgtAYixUdmm1NWTEMj13WFm/jg9DOFK3bmUO5QhLwdJoqkQDExMOTrBzASivldNPYBpVToiSYnJysApJZVUehfKwjPFlDVGrHBpNqtS1Bfnq3UbRaUsfOVdRnKnjDJYKnDy1a/SV369rh6CqICo/ITjnerBEQD3yXP56ZluWzbHri4L80ymerHqzq9WGcxlZhtVMUY1AhvqVYzACoAdcD9iHHtXIQ8gG4IxMEPfdmU1oRTnoSim4cHtBYAXT16I6jd/8Ze+9s7D//n3/9lmvS7YVVVN4FaSH9KNCTCYjnI6sJwTT9x1yVcBAAkJqCAqPfm64t6KgHnF9ayc+SmAj568fP/tx5dHd/+z//If/6Pf/o3qwezyL16u1D//9Mep7/N8/u/+7UeX61er53Qr+CbZxy8uyCy9WC4Wk/jk8snqCoCIW67iLzx+czqb/OD7P17F/o037y2v5Hy9efjVb/7Zj/8SwMXLK03y1sOT+WJ698HDn370SVz3q9XFnXkxvz0FcM+lt2/PLo/LW4+OHHDVpJ88P//GQ55PSw5lvLwAYKblhLmsY5dSs2433WS6mM2LB5eb9TrV3gEwoacvn0FjVXsFmXQw1yf59FVDfi3mAMzr6VWnH77enG36k0XZI77Y2ptvP9ws121vAJ5fxJ97+Pj9d/jl2Y+WKb6+aDH1XUxv3F5sm+3f+cpXAHTx8V/+6MMPn73u2uS9qyezy7Vdivz8gwdH4TjMOgAfPn/1/Y+fPrpz69XrzenF1bPnq3XbSY/elQiumJUAghbL5ep8eVb5alLVdek3m+RdkWIs8iTrkmrw5IW7XgSmxrHVSMw9KKYIwHPBHHqLRkTsttIVRe1C8JOKq+CjASh6TRK7GFPBLhR1VapuT47rYhKEfChrAPPVBbv08nRpcnlrcRTBl2eXRTXlsuySAIjQ4HxRVMyWeu0kkWNvRa+9aMy12oqiDCE0GnMZNyYPDO6xDXVAdzpEBB6JE0Pci/dB00zfwE5h24YCMjtPbwjAfsaBx7W5aJz8PpOabZ/9cBie+a5sv2fcnSpfO4PDNG6Q9/7Vbrwjq2Ps/m+ai+R8wT3e2I3d2I39620+63/p6DqCMlFSAzOI660HcLHumWNMsamimhrRJkrVOHDpitC0HYDeBGAw4IxkFDI3OxBizFK7Q4x1t+swDFy4cRLN+BDJbm5mR8wgl6ftNiUydJrSptuaKVgJjBSIY+zLegrgsm1KV2Rsal+tM4slZagUu4l9VP/d82syRWPwsrMv+PmY07UNcf7DDsHKQzBx2OqqQY0Ubr3lpm+rolRQFujp2s7YT0PdW7TdFYYWGs84LmCUY9QHZ85erPOe2bnKqwgRmxkDTGxQGQl6+Sn3yo804gAq+6caFr6MKdPg5FrS61gojXqSyDDs/mGHtzjE88H7Q2zfLENDjwTMXAo7b0d1gCfyznxPmCJQShGqntmATWfkPAAh36ak5AhEIDeUO9jBc1/4unaf0oHTsPcGkrrYW+Ciiy1DJnUFYL3ZFoWLHJSpR5BOunYdo2037XbzYp1C7gZ9aqI5+EBGltR5t/N7/kpPYedqjHd3/eiDncZQsGUPIO3KQhHGvvxZx2nnFdFYgHt/3QF12tPb/lqPxiQlVTIeABRHataJdV3/6O4DEM1mJFa+uHp1NC3v3bsHwIXi9cUlDQW1h/f9uRvc/TW8uFzT3vbC/JwRCIBSnrAw8HUcZ6FTECi4wmCBPREyySWmzmXJugGOzFtrNpAawAxDkggCeEA31MgzM5GYMKCmmnlAY6FqM+uaJnWRfVBNXRNzF980a4U5XwBo216STUIh0FBNY4yb7VVdMjNUqfQlABWY+rKY9rEX41CUcMpR+l5CMaC3DDaD8x5mTBxTnEynCt6ultP5EQABJxHvXSiLKGJmVVFmDdVd4CHXu854xWHDH9a1sIM97dAZiD6rWjSSLgGYjfUdr9tYLWv/LoFd+GH0oGkPPAEYCfPZd88tLGOfJDJmJt1z2PezbMYfmdhM+74Hk6REY38Yb5iYaSyK5Yhd2zUmKQO1zrFzQSOrihn10pPnIvikAtEoCuC2LwA6PV3C0tHEXW6WpZdHR/W2aV+etc5KAOxdil3tOHXtrdr/3fff/vrj+epsXQfOgaZJoHNJBMAVZAwmM02SdIgtZXCfHYgoiSSYMUxViYmcAzsZiY3DJorIMYhJQabm2VQkw5FMmIUqODFLy3XDLLUPlacqUCgYwOWme30R33q0INVgZD3iJk6DPzk+ObF4cdUBOKnXp26zOCqPyrKPaT4Jx5U/X0YjMFOukZIEoaiTWR+jGQ3lxggKyhAqAAHy4wzqXvl9OEdMef850FjGfpKXBufGzrDf4uUtojF7s4zbKAGcX+84PXYqUIGqIy+EPimZejdQK2MSR64ufXAMs5hEyxKBQ12EsgLgiyr4IsXeS9vyvWJR4VZoIRZCVc9DNQHQm0RBu2pml6t0seyXm826f3Kx+eR8c7XsOjEAUZHU2l76aAlKvo4x5icyWKb6imXZUzLknkmqhhztyY+tamTe+dl0SjYEZ5OIc64IYSPD2u2YRdXMPLOoEamZ9n23G+4imV2KLIcoRjAyZVYxSF6jfMEqNJQzZCZyIsSoyKlmpmpufTVmJXLD61BjAvMwqTvKngOSZAFRy/WLcoWjbWIA621ztfUxpSQQBZTIDGZsVjhi9AAgDaRPBnJUuLBuh+ou+0ll8A2M2Nnojtg+kQAAHEAEl8HXgVSFpGJEnnwIAcB8OhftTKwM5JhikoKdZw5MTKgGFUUwUUzax7Ruuz5JEbgu/K1JcWtaAHin5mnNGxLmal4/UHqbxPmgCdDUh9kGgJtd0eTCNkFjgRSYmqZPMemqiZsmxkQAxISID9BGA40CzLns0f7x8hxqu2EzRoZARGKDr6hki/nxutuUjNT17zx459tffxfA778669Ger1796Y/Wq3V3Mpttuk4oMTtSUxMABiZAVXPojigHINmFIqa0jQKgdq5k77QXiffK8t6jh5vthW6u6thWXAKwtnv20bP/8D/4+//L2fPf/af/7+/8zj/8f37wL56+PN8atxJ/fsIAfvn9bz3/4V9s5rgKhuYiTKuvP/7Kn3/8Uyge3qqfX24BnK2Wry+WUdK333+8VJFeX7y4DIV/sezj5umPPngNIPX28G4BJMC6Lq4uLk6X8dGxf3B7ceveMYC0Ol8/eUYpvvnVk5NJ9fB48cGL5/Gnr9976+ThG7f7jCxXAeR024CLKFuFiG6a1rouOa8nixkAcmAL89uLUIbNatP2EoJronXiUmu+YADL7SaUgQMWx4UqpnV1vrz83if6zt17xydzANLaqqd6Uk1q3mz8p0+fy1zunfDdW8UvvPP2v/renwMA9bfvHc+rSQH72em50vLHn1z0gju3765Se9o2AM5Twx2WL06fvrgKzomactlbe7XuYJy6NYAQiuiKUMxJKYk1Td+l/mg6VcdOM5cTm5SMci0BNjMmYnIJ2sFnpzOJQZPm5djA5ArnN/1Wkzpa5NpxKcUonXMcu26d2jt35yezWxS3q2bZxda5Kq9xfdTH7zy6WK7bVXd0+6h1qblsGpNAHoAb6ud51SS9Og69dgGeyZXOw1UAyqoOROtmG7VVyusGO2KQx1jUZScjm7E/HuavMQuEGAOCuUMvWUyyXqvZEJLPS46OPvnhNuhgKO6mHTs8aEjR2A3Xg12d4fCMvD+ngcjtEtfGG9DDFCkb3fWBbWBQGFPehN/Yjd3Yjf0NM09ExJQkMYidYzCxSxaTwdSiEYC6KA287dELVI0Vjoqr3sqKXSiDMoCk0TEb27ZvA9XsnIoAMFUbiY5qpuSAAXLUsb4ZkLl1OcA76nKPxBsiKBTGOSvcNDlyYr4Vg1qHFFwIHIRAVrd9B0CdU6ZcFiWDkRmlMTLofv5XMqKcCAMzYhqLPtiQKajDInRtgr++7d6nln0GXKFrf9P4hKSAwlwoFazgfCV2PqlYarkIdO0Ue4Rzt7UeEcns8IOAvEEtfMnsgvPbtB1+rOaYVSEiNjYviEZGDdHo3Buxadpn9+xSJTMiaYIhtWFEvQi7Y+zgdj/XPm4sdZ0h3Wtfmo0+wACWsA31izOPVAGwmnPDtYiQxEigSUPpvGdjB6CNKiMkl2GHg1BnvpDsPYARajHYWP0zw6UDqZCzi6Ads2MK7ByRQ6gBxFaZfdNE5zgpEXHf6HRaJxMFi2cAXZQYzXiEVx0x8wF55zPQ5zVi4PUulN+O7X61T6EeWXo0OjS7BNthwHwBq/EQ7BuRvetfZbeLdvm9n3+ZB8awlPlYSgAcO2PXdLFp0+nlqSPX9Gm1PmeyLsn52RMAXRzyrDPndnf1z8Ku1++K2ZHtS50MTDBmIq4CYYQjM6VsqKZC5pwnoCg8M5Gpd9z35JhFLTMmVTUDWIJchzsTUob+kCtIJFWo5n2h5OK2kmA2ZouOIXcwsxeISKrqiXl3eXFeVqVKAtD3fWy2Fopo/XSGrk8xWeHQWwqhUGMATds5dn2ydRsdOLhCNPpQbtYr3xVUBADesajUVRWCU9HVZi0qomKEsigBKPPVcsXBW4b2Hc/KyZiwnnlBUDPRlCxtttsxg3voM2MBjJEGnfvJDiTcOdKDEfEwVvb07BHsPnyLe78be6d8h40TmLHD1VVhMGNyh374cB5wJsaS7VPPd3MUEYkoe1LTruvIMcw8D5xk71xmDTJ20aUM/DjBoI9BQ7EiGkBBs5g5lcyqLt9GH+PLi6vlsnn0YPrO/TmpVo7ffzj7+qOj7/zoxUfPtwA6lW1jZSmTSfjFr9z7rW89bGMbWW8fVVdtBPDo1vzTi+1m00BLVfVloTGZDSIinOvqOMt5zSoQMQMcs3ccQuncoO/BzN45ESFY8I7ZqZljh5yibsNDwVhVe1HhpgwFiCZ1Oak00+WWG121sU/x3qKqFjVBpwXPjjwV0m43b92eAvi54/n3fvhyduLnpXfQxbS+7LrphNc9F8Fi7ACoEAXqul5MNttWoGKqxEqkGCi1ScxSHMEpUGaumpkO69cY/RvlRGBE1PV97ph5+ctpwkT5y2HDrINOya7AlDmjaJ2ZFeyZCKaJ2CGQ86IJQOwSkdZVbUBUBPC2k0kh23XfZMKXm1p1LPOZP7lXL+7YPMU6SeWNzKEf+PpX0V81Ydth1VuTms4uernsUhRbi3RqADplUbftLAoixFMSHVVVMPANc8zAMWsy9o6Zd7G+nEYo0lNwuwJ2GZh2jkNROHYHI+6ARUxEZATrYgeVPKIkqSoch04U5NgYxs5ELZdOVgA+VAI4CsTOs2PmKOp9Zdrn5lVTmCYRx+xdLgcVRYSI4AcpDMtjkhgmxJSSJVUB9TF2o0yAqHUxiaookljOTzSzwuR4VgcEAIXTBCIHsNNed9zIHGkd5geSPH3lGWYAD4YwDAB4hiNmS9mzy86mKuC0CiWEADhyRnA8MKsmRREc10XIMYFJ4QCUwUXR0+XWkRnppPB15auC7xyVX7k7A/DNhwtdBHnrmBazRjgUEy9RE4rwRt+dIb4AQFYxeSGYxL5r+07WrQytikEOJyngBuY+QSmnpwwzqRtLnBOuvfcBtTcbs0nyBEvEYDXpYuMBYkfBkej3PvgQwOT4OGyuXr181iZeVBNF1ZIasRKKuoxdxC6qNND4x1xXZgKpatdHAOVkymDRWBWkaelRzdD7Wfkr33i/SQZAmRalTtYf/ef/0T/4L/7xP/ndP/wxndy7+uSF53ru/BuPZgC++4MPv/9nT+03bteTW7W3e3frP//ZR2Xwzy7XEm02qQBcNH1dFarx6evT+mjCfb+8XLmTetXi2U9/kpkPbz0sjhbTJ08vxOzV2eXlZf/guHhw/1i6/v7EAbhz952//Mkn1cRd9Hjyw9d/8uHzO9PJukr32nQvlNOjKQDEtm9b72sQvzq/WrdRrtbb3l1s090F37lzBwBTYO/++Z993Ig+vFW8/7g4Pzvv0Sd286N7V9stgA8vzoHtk7MuOAL40a2TZdv/+U9effd7T3Na2UXTRdErtSBt4JkaZkdTRZpMZ++++fYPPvgJgFDQ19568/mrq1UfvZ+senp2tr0VwmbT/Ojjy9V6DeDl6Za47aL0STw5y3RsxmxaF7PF5ekZgFYtxQ4weN93QiIuh41MU0oAuAxlSHlNcUzkXFWVESlKb4CjAEA1qqWCCjJzmkq42Paa4tZaJyiLAECjefMhhFlFFDeb9RWnLm5XQCfOv3H/NoDl1cu+ba6uzqu6kGivXj0XhOnxreZys9UEoHBhcbKo5tP+fJnaLjGxOkCdYyLKM+em29RUMntSNoGoMhPDMXvRmPH0zGMYgj3IkZ9defq9pMxw3AjtZy6Fjv0fYKNcee36xu7QDjj+2VuyL/afM/RJGCp7H+7kdmE4BB9Gzys7LTkOuwtFjBPhwf7Ac2ByO6GhG7uxG7uxv0HmR/IgmZqoKimppSTBe1Oysgaw1a7gwvuiUzXrHKFNHcQlaeG3g2CTOEkmpJWvTYe0MdvNpwMEeKANNuBT44S6w7ZMdwjNsFs2iGquewZAkvhQBs/GHqmdlLOUUhPFTScausIFAAW8mhrpwcQ9mBl0zyob/NaM6fDIV8geLQCQqUEzv+jaib5wmbFhQ2RM49Nhv2TRbuFICnKsSX1d9akHELxjXyBaHJ7+AIYcbZdKe8COzLjtLmHWEbH3IQNUWT8lb7N1TDpw3oGgph4jGDGiV3b9EXc7ftrzEXiHHdH4zsbl0I0J9gdn+Axwu3vru7U0vwPeEyORq5+r0Li5JGhWuqOBweag6HohZ8F7ycX+ukTsCTmWO2So7qEWDKDxDgg8uPj+mF2D29CwXBZVin1wwSDaC4DCeTKUVd33vSuLvuvVhVZYmaRL4gIAIXa1H/BlVSKKkhzvxNG+FLX+PCa3u/vcmnb9yBFTPExdzTTgzyZrj5f+UmxxB/7vmuav9Wg8OwOIHTQBYHYRFJNsW9Tm33xwa+ObuNoEkZlDv2kBGDwdvPnxAY2ImPjg5g77CJy58RZ3z+hgzsCezMhy1eeMg+mgxKNRWgKclDCGKZmLfVRyXAaMuf9mkmW5khqD2rYVE2YOFgbNO7WoSqYEAcFURQUGx8wjQTUZkpJnb2TbuBUOVHhXF6nrurYBQEaiqWlTRNpsmvV2XYZgjtWkqIs+RgDrZnM0X2xjl0zAaFMfNVV1uVpdNP2m8BMAmhD73k2qPkVT2W63C0mbZpur1ua77WLvnYNY8MVkNpPYj9OYI81FUVU0pRQxpC3xwZDY17O+nvlqgBlsJ4WLwX3+/9h7l15ZsutM7Ftr7x0RmXne59xXvVksksWiSDVblgxJ7ZbRNgx1w+iJ3YBnHhjwH/A/8NwNeOpJwzMPG/DEaBsQZLG7RYktsEiJxaoi631f5553PiJi773W8mBHZOa5RfXQkICzUah7Tp7IzHjs1/rW931r2KiralE+fn3K2vRO2qDOtBFir1HOMtzLdFLSRutP4hEwHYjaW0glxu07qMA5zpEVKTqY2XlftuZsBFegTIUUPIVVrQpNJpKS5RITy6aiJkQoYuKU+oNpZY4nngDElG+urqoK04aZ7dsPd9842p1NvHOBs2v7zwCcvoj7DYfg3nvj8L/7w9dPdtz5mekkLKKYcwBW0mcTQVGMmmMHElUxobVfGxsxcczZRNWMiTz74Ctmr0ZEgkEfbQVcC3XN7JLorK5j1znQ6A5BMYlo1NRD0ysPJmK07DOZ5hQBZJWJ87FN79zffeWgqQIWq3YyFbjWaDH1AcC7B+Gj+4eP7s1e36uf36wWq07J+iw3bU5iBX6ahOlV2y2yes9iVsZMYT/a2pFAhTQRO2Me/VNgpfS549KB1wvEIGtgSB49VGgMCyEFNi7sL2bnyQOmqiX+VFUTNTbvHRf6nirDnAuumuTBf0bVHFEQiACeq2BJOiyi3UwcgGswjl6bvvuDdPwauyX8lciLST3JqfeefdmKOF/pSvtsnVCfY7TzLj9fpsu2j9Cb2APoc0VUJwV55x2SZVqHquOibIXa6SiLOue998RFR01WrkhEssG7sk8ZRsJgS2K8yTSod04tixoH7xzHLhK5wNwVeFoR2HVGakrGZoCRlrTwuEKmqDkBpcw7E3kyWKidRCcDDZNUi3kGlZU7xl5UHZfJxAEgkJCKKbiY9uRCNxI1NQTvAFQhlOcoZqKKQT+BidN7xzNKBmDVxhRTcOR9WN1Ew/qW0TqpPM4CBXbWYQ7ZmnFEEoEHQT855wqTiE01Z111EcDuLDeVa/ucRSdVdbAzgVkTmKB90uCKwy/3SXpngdBHyWx17SrvKk+DZ3U1wU69++ZDebC/0Aw1sHkvre62dEjhBAD7XUFFaiaR0GdhVQSmLueUzDkPILBTGIO05IiKYd1W2nKcr8cpepgTyyP8+ipf9orqnBdiYf/Brz4ub7l/7zC1lqtphLIym/YxwpMzBwpDAmCIDkBF0FCmUTMirutJ3xbr2AhjDzy4v7vn4+XZae2YZv7nv/5V6gzA1EuzM/nk6Yujo0f/9J/87v/yr398IxrcfrbFt3Z27588AvDF57/eO2mu++Xll0/OJv6qd5enq+Pjk/def/PTp+fFH/2wmnadXNzM33h08s7DnWdPz9z9PQrhi+cvznr36H4F4Ftvv3J2s9jfn/Wr/uomvXXUTA53J5PJzu7+Z58+A/DVlN567dEbhw/+/c9+mjRcXdxUr3rPVTOtPFPPDMAXWKuqFe6rF8tVtL093q0aOwyPDivBtMwzk2n93TdPfF1NpzRf9F88ny+zzvamc3b/9ldnAGa1f3iwc7Caz1ddF/vPP38WITsTWxpXdQ1gRpO9vclXl93Np6d1lEvpFqvJe9959d2HR1enj4sk2U+arx5/9fHTi3mX3nzzdTGv8ET5yyefmJAKA+hj7gQK1LWHOc8u1I253lfe1aFxHgCcz8n1SG1uPQWvOvMV1FLqUzQAu019fLyX+3hztYhq06aqJu66b8nBy4CDV8bOMSAeEgiWe3G2O6kY1TATAxBOklOOoRenbBaS6vHBzmqhp1erp/IZAM0wuKmR9lk8VcH1K7ns+ywSUwYQqqrr8yJdxy7Vru6kreBKijTGntUByEFaU/KeDWpmpGZM7IjISMci2GUEcPFVJSgNO4wS1wwI4BCMjGKN8puNu5xBYbftXP2bhtk2OrjZug+brPW2s4RUm+lrnesdN+FERM65YapTU12rWbZ3Pi+P9BAq51xZTO/aXbtrd+3vV/NRO5aJqXhfQSOICMzBvK9BQIoAiKgXZs91cBRhKsrkuF7llj1CNQXQy4qJa572GgXcd63CmL2nrY2UdzqoholAhVVelIRieTyOB2DMtNTCYSrzuQ3OxD4spYcSE2tZnZwRk6XWk7Mh6WRYmw/aSPcpiwvzGvg0NSbOOZkpkNXIYOZoLQgyK+z3NaVyEAZucmYwy2vrOcCYh+Aaa320jlibKxs7GpXowecc3ZjOlxQBTySbJcoAkA5BO8SYAKZRGjmE/5RTClUAQGzekUH6vmXngvfGLCXwcn4Ls+LgXc6pVF2wNZ9gXN5MlImMivIrjfE/Q+PAURyf57DyF4OwIi7bEhQTO5CT1Jej10vqaEIF55iMNKtBCqCTVc2YnbO1BhRsBiJ1UB2QCnWeRLLnWZSAAlmqgN3If8DLWOhga8XEPHStAmTpepuwxlkGoFWJV31PhCxgdjLYZrEarE8ALdtogHElxSLfOR3wU6hkkbyVtiT7jduHoReO2C5Gflo5GyPyniRLzgRlZjCXIcGFQAQCsyuUMFWBGRUDMFqbqRE5gzJRkXpsn8K2DY6FynKaVmHV981kV3LMOWZF0AxAfUXMjrmLMeU8qWvWvIzWVJ40l8+NkuE8s6acmmPsH4VXmnuZn19++vz44WuLLgN4/PjMOyJiSQIGeU8iO0FT7IJjUg8gkVGYtiJGwSBMMBoYLcNpG5iUjQgm5GyIUJVI3Cj4Z6DXzGS9iici0l6iiiSQS1UDByBaMsdklHIygwDLZUkRqgAAIABJREFUVdfUFZzrOyHpAYgZk/mKNQsTcbG5I5hKHoRs6NuUIKycxGKfuni5t38QwqzLOD27AOBDSCqOSLMsljd91/rdvXm3coQrXZgqgNr7nHowQUxNc+qIKYn4qr64ns96AdDUgZlq5y+vLsEU6vri7KKp6g58fX0NoK4br5piDFVFUEvRkgryaJEx4KeiWVSC9waojmwhK4mJYehQSb+gsAlVx25HxGZDeQ3HjojN1BH5oi0eOu56PA3zrGPHxKYmmlWFmAo6z+TH6fQW8TLmSONI0PEUnSs0t6FQBQzjAoJs2vcxhJDa1Xxx7Vzhi3HTzFLuAXCxdMgpp0RA8MEHXzwIDTZkuSSzcnlNslauSjmGSd2JNlxU/lguFpPKT52yWW6Vkj2q88P93Xr3pM/57OoBgI/bF3XFf/jDN3/n+w+++faxk3gzv44uWE5dnwEQhWVHIGceM1epaS+JmSsfsioHB4DYt20kUkcuSaq8B7NAGdL1fRMqAItlG1NWUZj1XQ5F4TvJVajIcRF6aY4pRXLwdfAgDrP5Ykka3U49YwdgcjA5Pk7fODk4mdruAbmdaldctV9RxVN3KG4J4N3v7Dy493btq0T02fn86XUrURY3uWvjMsay4C6XixiVITn7TAQa0CwtTpHFz4FIfNhGkMuTt7EylWMG4OBtTGKKoArVkC/lMUk5TAemlIicFuiy+KWOGSxzLnBFJKI5JnOgyntyru1aRgLgPOB8Mh8oeC/CsUvEjOMd3jUBsHdUHf7hP5o/+E/36wNY7uUzt/yZ6an3S8k90RRAWLZ2eu1uLrvL+XKxuu76z14svjydd4JljLEnAH2MGQYXxLIkUoVKWpNAh2XUMRyZMCEBAeSNSMxMlOEBcKi8L46QRuScC8RMBom5j6lMeoG5ZCNTipbV1yGlVVX7tu9XUZvgAMDZUqzLWlFQICIxI5BTBVkoHFMVIihzDzNPFaVaVHtd1GFiufg59kxmWcRRXVVmZLoKVR2qQDwU/Us5mxrBBCFJVCVlzuqikZgNVuIkxA5ap9gSIavUxLs+vvpwf782bqYAUt/74B1sdX3JftomqMGo7KRKTS1VEIqKnMDOoWClajxIHQDySmJgtWxi3tykqptZfXl1VVFfF89ZpJTEklSV8853CmeGlJlAjuEJAFeAMCPPV1nEdmtOvcxzf6FojAH0J5fNQYjyIJD5nCYkLlTITqR3siDXAci5DYyBSkr1pHJ1vbrosyhNJmGZCYBlDexzloHsKjyIEMzMlEfS9zqRup5AS6cqmnrHLoRKTbOqiNWuIkA1eyZqJkXwfX0d1cgiYBRzn6W1wI6ZmJXMBYctNIRLKTPRLOJIjJUqDpUHoIplzlNPrPa9737/4/f/ejYJk7qaimlgAAeHs5//9Kvvv/fgxeOPppPqj//gB//Hn71fM92bHbppfHZ5DuDYByJ6/eits/2L5cVcXf3Wt14/e375o7/6YJWihAqAm4bDo73nzy9yl+OhOz44PLu53Ln/Rvf+MzV795tvANiZNkp16vHsdPH6o4NH9w9TlrZrqWGdTAE8+eLZKwcPH/7wzYsf/Xiy44PzT85WD96cTIUstZkbAKo3fesvlpee8PjSmPSbrxyfX1+m3qZVg5gAVLVKOD4+4c+efr5Ik6Pj1++72d98+uXZ6c2Hf/XVF8+vAcym0/P7h2+/+uDe/cnM6c9//Qmy/t5vfyclSegBNJP7tWv0b96/fuvB1Ydf8YKfnJ+2kU4v6K3vvfbOt98F8OFnH1xeRng+OjleZX52ee0oNQc7D06mse9CdQzg4mqhZ+cPvv1uN7/uF9fT5nC+OrfShzVODqcA2osbcnUQp+Z6ZDKe7E6NbHf/6Pr8KYCJ32sc89HRbDZ79vz5vF1NKp7Uu328IUujRatMqKqca0LlmJhmi8UqmRK1EDgJACquq6aKkt2s2kF/c3OWwNXxdHp47Jepmu0A2N/ZW95cvf7mty+fP0V/HU/u61zDKi6pr0IFQEzZrALNpetFK/Z97JswFUvE1EsEUJn35Lt+FXxVXBiNDU4dO8pOS67LoBCQEnsxUZOSejGo6ogvsgBk6gGCg4HF4CiYZh0zRmLizUfrHTlHobhgFUr3MI3DDUneQWOBkpFTU9OtDPxI8DAFmdvkdcftP7MjZtU8qDV4yM3pCHLagGpq0aWQsWPPzI4H5xfctbt21+7a37fmGz9lRuMnqlr5OmuKOTrnTE0gBdEjZjZNKSMjMHHlpRdHuWKwkqQEYOqaLiUl6sk8CCOzfB08ooBnNEKLQx3VTZpoiIWxwR5GFHFQeA7AGYjYMTOT05wwCFqNSrXCUrCGtqg+a+SloEa0VcQT5MgxHIiYTE1G5HCMqDc3ajTqQKF4jmDOdhg9nDIwss1+g2S2vP1WZv+lT1hT3zbnYJsX13/ZgF1VHepQA/DBZVFV894550osj/EeDp9WSIeGdc7u9leXna1tMoLDQWqlGOco1h5Pb51gfCnPtz5b2xb9Du9Z31cbSRnjxw6Eka971aEIcsFU2CDm3Aa8Y2ZPSPrSG25hb5v+tT4HuvXn9Qlsi5VpfceGLvS3gIovv0pbPeGlA7bvxOalwr7dSn0ayCCRzdgRky8jRWFiKOCIiGZVR5mpMIeJydGIFn29banCt24CACDkJOSWMZta6lYMnoXdNreZFEAVvIrlnIPpJPgQfG/M3ciwu/WJrMBscpSEPnn67Ozyuqoo+HraOAD3DqePz+ZCfQ6VZ4JlJ+KzOiKvffF28A3ftEut6gCxnIlYBm2abW5c8RMlGkzX1MwUBobx+iI1G2DMysQwMynHmM+lqmvKyiE4dqnvVMVVofIEE0lZpS/YqJgRachOrFSeHa53A44QSZ+FsEqJyDfBt7Fbzefk/Ei8hogQk/c+Sc45mdnq5sazZVXvfXDFvEJzTovF0jc1gSSLQB0VC7lhJOScc0orz8wsBTWDgsmsVLkBUSTmqqq895KlbVdrIhEXLK9U8jJTFQIVgtMoXtzgQ1xm5pc6aUG0b5NtN0YLL09iW2RbG+d0KrkMIuICE7itNP7WZDm6/a7B/cGufdyE0/D5PFgFDrmGUnbKkWP261yIkRseAW0tCDTMjAIZgNjhi0pBD2PimOKkqbu+F7O64S4JAM6GSg+P9k52q93A9/eaeZf35Ko23kf3X/7ufQA/+Obs1Qc73/3eK1UlOn/WXvVou/uHe794fPHl2RLA6TIxc11XpCFmI0MVQu1DFaosa2cLYuI6BCYGwbNzQ96HABryOgZmMqMS7BQoPvZtjJ0bSpiBVBIUlfdNNdudlVTZqssLwyuHEwCv7k3fujeZ7cFVsACa+GkV+HBKlLWNLB7A4b2qCdIu8vVNu4qpz3K1iouUVil3eSjgkxTJXAaXrOGY4hpZ6eOsw5tM3paon6iM/XHFGZcQKrXrZBz2m441gOel+BwRg40Bk1JPtUzba4ZtWcRK4RYmGrlkY0xqRmaeaH/aTBve26v39ioAlaN4fbH/RqV9Rj1xVHfLXvqLSR15teJ8AyB/dU3Plt3Zsp3n8znef7L86Nn8ct5lhXqXxAD0ScUEjmyQYFMuyuSCyY6F1LlYRmashdbMpJ548B0zza4sCkXGbpqyZsnZKAzaCMfOO3bsne9TFFHHoXApHQ+dvxRpZ3Y6oHhGTGZqGInJgAtOMxM7QI2ozK1k6FNfPEAV7NhVDYNICzzIymRcLnH0NR1GE4dBMWy64SQxAWjbuLdjzhWyFUwVmhW5qZ13A2S5N6vny24afO6kTRlrM+uX2yAWGXQe6/lrONCY2LOQcwYisKmlPjXBZ4270+nQ89iFSq8XybN/9+HJ04vLRRd3J3U90ATRR10WX1o1NW2jpGS58kc7VFVFGFFxqMAxyzXcosvnrFVDwVtbO1USAEjR2qgp96DK3PVyyV4fTpu/fkJJjDigZLCSjZ7mL3Gd6PaaO6xHRdo/CGKGhDHBlAAHYu+aZkcta+xpTFcCUEPKQqZFpkTMnsgxO+fIOYIAkDxU9DNocUoI7MnM2MWua+pSycq3aZmyvjjv4OS/+MPv/uSDL96+t//NB0evHj8CICl+8vHVl09W3/jGA6ROJCtMpDs8fnT5+NPuYgFgccCVtz6uMtWYdqp2dT6f7dZnq7btU+4UwOG0IXLN1H92fvUmyWLZ7u5PT89enC1tr9ZXDo8A7B7Mnv380w9+9fyHv/X24axarOZV5WMXc4R3FYBJQ1fL5en5VU7JAY5xvNu8+cpu1Jj65P0ugHmqrvqbNsuD41djf3r/wL/+aG9St1fLXE84TD2ATlaVLM36y0WHVZxOdh8/O//ibHl63X12fr3sFcD56maJ6CfuoNk508heDw53FLRa9U+vzgDsTBYX8/zs7OLk8GSP/ZxMlLNas+MW/arvOwBX85hWfTX1fb+8Or++vu6j4h++9724mi/40k0bAE8uX7jG7/gqJsRVP9mlo+OHL148k9TlBXyYAAjMYdrMV32wQMya4vnV9YN7x00Vlo4BXF5cxv58//j+/OrCE4tY2zmjuk2onBYf7sY1pinmblZNCaSiR/t7q3ZBcN6HmARAzFGNksUq5WZvurv/KvWrpqlNaH9aH0wmAOqd2fnFi9jdvPHa679+4Z589mVMM9dMiD0ZAxCVtu+dY1Y4hWmeVFMwWVbvQsnb+cAOoaFp0ujIB/JGli2lFEnHjOYwvw27mq281ta2o3ATMaTBaNi0QDfBKJX0RgkYNzZKmzhsE2etg7db89S4b7kVFW4iIloHXGWGK5oxMyv5AFXdaJ2IRr0AAYUqQMwDU+Xrk+Ndu2t37a793W/eeZekb7hJOTKcJwgJQNnEzMwHAG1sPVPjfDY1UKWogXlczsKkeD4BWGmsuV7JquyA1uQr20LyBthly6J7QCTNaF3ZYtt8jIp9DmOrGq8amJnJO+eT5BL0jGE1tByoGKpjv0S2x7DdB6DQssAQFUtmVioO71sS8nIS2MzxWyHt+PsIdd0K4Ee/8a83evmHlw+kAZ/bPgFD2a9vNuIb6ArGa7ZRzi2InAveu5TTcGcMt7ewOkoeyUzLmrf9hQNlacAi1wIpAzvAxjoqNl7wYNtE65u2Jf0m0nUh5PENmzNXVSIUcaURjXDz+jy2bglR8foXGBfaK6mRyYC+WYnbx9359vtLkCL42jq9qdvy0iPYEjuPfxv3HmrrO79BuH8zQvn1mMnWgO/f0kbX1NunyEN/zWTGhIpZNJaTUSOYBxOXcVRk7VtY9t/2RS8dIyoukKQspl3OVWjmqXVMja8BsFqbozpfNzNvElPskgS3hrPHsQ2okSBctz2u8Ytfv1hFm3q8OH9GLgDos9ZV6I3IuZmjuFpO4Pdq98Pf+/69vb2f/PmPAVxczYG6h5hlAVKS4AucYBtAWLUA494HQrH35GKuV4qBMlFTVSXMVpESn6qYiGTTynsAIqagqq50Z6ePXT2ZMDh1Mebkgp80NYAAU0lGiH0PDHWrCVYK56DAkeTmEgHUVTChhYrEqJyDD+sHWdf1pK5XqoD64J2r9mZ11/c6zoymEnwlMe/u7rRd75wr3SzlzEBTVQCCd/WkAbDTTFZ938WUUhaR6XRayokYoarr2gVi7mOfJTtyw9AmHkflkIugUhtVSyRKYwKegHWVnk1sP3TGr+2jh9PfSlLQmtYIAEZEZRdNNMysPFTqdRjZcGt/Shp/HmDKcS4ZpiEzgzIYa5wSGPbfbAqS2MHMOee8BxjsjAaFb46ZmWGD9tGGSMOyCI/15K1Ut1BxxM459mzFk1GtSzl4BtBU5IhnzbSueeboaBpee4UOdgV+/vZ7O2H3AMCLq6t7r+/7PZJVZHOLU3m+wu4Bny3i06sWwEXbM9fEwkZJU+VCcK4KIXhvlkdjPTjnCiHUm8c61BETtWXqAZBZYF87SlkUmqBZtSlJC7VCumcz7+DZBe+noZp6rjhI1pxRRsr+g4Pd937LJv5atZP+pMlTt8A0rWQ1IWVrAAhp3aX5+er6ajVfyKJNF6tulVMnGpWlzDjkjMSKBhZkMB1Zt5sZmVHI6bdXShBhTSUv1rWlUxWgTgSEdSZs08Vs8CfEIOInA8jG8vGDEx+srEEOxKRE7JxXUYxxo4oIqXk44ya4puadnWrSVAC6+fz6049f/37i+jgZ+tVq8fTZxF1blfPlVbzqAPDZAqfLsyfd8zP54MXqg+c3N20ix9FSv+I+FeW4I1c77xUmKio6uKVa+d+QsSgMZrKB28fMRoWcXPpDVjUieOfcUFxrkGeb07FUAqkZ2VBNngmmrCKm5ggF1VIFKVe+UhUMQe8wfBlSTACYS1UaN+QFWAEFM0A+BABZRA3BeyNTE4ORKysyrXdZhZMkRkVqIoPWvpzfUIvKueCZDZIlx5hUzLHVnshZzF1VqLIO3sOIVVPMZEPv2qzdI+/bvr7q2YArFAGOFfgexGZO1VKOk+m061LxaqjDLGtMmpqGo9jnpxdZY+25dgGqyzaX6UaVsmhWEBjsquD2ps3ezmR31gBYRux5F7Bqu6eoTkBTRWVhv+E+yXL+1WMA7tOn06tlt2J0FBdZiAG6nMe2zy74VQIA70NRF2EcDBuo4yVDuiF5MbxSJPPjICsiFGIiAkdJbOTImVoxHQbA7EOoUqmsVeoHFk/gYYswzM9ERTgOQxnCxqaq4h3pcPfq3cle216dt/zhL3+d7x0+mjbHtR3v4GjXA/jZB1/98T9+6+yry3qq3fXkw4//cpf9w8O9WV2t6vDagwbAatmnpbz/yYfpqts7Pnn3u+/8ux//u2PsNHUIzl9ddAAqXu5MdibNBKYT597+ziufX8Uf/ckHvbj7h9WT8zMApz/74IMnN48OqmlFop1air2IZeZQPGp9qNq+X7ZtEoWJI/PMjuHqifLuR58/B/D0tK1naTZrWm5WRvcfHNX7J0dOlS6bUHNWAI1vsq6+evLiplPv+NMn5x8+uciiqy5r75pKAbTRurb/6unV+1efvnl4/EfvvnWu+acf/dp794//wQ8BvHrw8M8/+mmS9tFrb1zetJ/89HMCcmyEKon5w48/AeBVLiTt8+T65ma+cpJQOTAnVJmu49XVGQDE6BxePH3snA+erq+eHx+/4tibmBMzWwGQ3DrxR/v751c33tUCTmm5XLYkWQYxTbr/4M3feuftf/uTn5yu+mzcdX2X28DaBPfK0QmA2K5WqxYCib1zbLC6dl1PMKqrqmSP+tQKlNn1i8X56vrwcGZth5VmCtPgYgaAeHk6dfzF4y+udnJ2IbjpKoWLmyuGr6sJAHIskrMJEQfvDaQYisXbqKlOkold5QOSKUwhZd1QUzMdYPliUzyEmVvyI1K2wWDWgJItGQ00eDN7jNtOJu/IFZXJeJxu70OGzcqaSLEVh2zHV7e35qNX+5iFGn4EiKlgkVlEix/0rTiDh0o8KKksJuLReudvCz3v2l27a3ft727zVRWccIxRVAD1HJpqmjSpCsNxSgCQEgcPNuESMagnPagnJohq5g2AkEYVz8F5qN3aMm0yQgQCj8hCSZQPOCKvk95bgF+JNdj5sp0s9SUK7V1BbMNGrcBHehvqGoq9DBAQrz9w/fElATb+h5GXghGL3IL7xt/GRYXsVmi+FYDfugB87dUthtfw7+bg8Rf7TUjW8BmDwclAERlWsj5G7wKAAFI1NSVeF7MumoSt52EoNRsL/6hUA9i+LcP1DCHfOn1YSIs8orUjxWX4nXgsfrOhYQJFxkulLNHwQGyEiQ2A6vgMtp4cbW4Bbd+jwf9frZimwWCquTj9ZzUXXo5Gtq8I6961STCumVbbgNrmaW+wyltiYZiNAOvLT+nrD/7W0711PG13r60uv2Z0GGEk+wGmmVQ8o6pcYF6lDGDgNBA74vKQC/nk6+fyG2K1279FFyZJKiP4uqpqztExJdMuJwB7zc6U3TLHqKLMRs4x3LqKyQg3l6GdVW7a3ldhf9ZkdDFZjN286wHkSHCkTAG0nC9qsqmz/+l//Bf/9f/wx3r82p/8r/8bgH/1v//rLy+i5F6YwG62s8s+U2G7uRKgOudc4caVuh+lc9HQtQb0g7ygDAcHIsfkzZBzXl7Pm8kUAFXW5jip2FFYMRi8uztbkHG0qq53plMAbJIS9TlzcUd33nOpCTOyjQDUjNjCee/qnCSkytgpOW/QkcRah6qu6hS6FGMVvK+r0NRZZat6jEyn0yDoulUVgmNetS0BB7MdmEmOAFQpVBWI2HFdVUU+F2MKzaSMOs2qXs2bagaMGMEHLcGnyMYRl+CcH/DEceTbACSsO88Gg9ykEVTXMP0wAIqd0vjW7T6+yeHY4JxRQM+SDuIyhwynUL6yDO/Nm4fZmEgHYIHJjAtkUigDwFAkTYeOCBh7z94XGoRILrS7mLN3jmm4fBvnSRuNJodrVoUaeXLOMZtoqqsgKeUsUyYAdRUa5y2tukV4862jV08mR3ucscSea16ZLREBpB3RbxPvqr0gUIgECe6L89XZIq1iBrDqLCMtu2xWVMlGIFVNOaWUixFzCZ9SViYqr2gqFSooi+QoALx3lfOOWc1UTGFqRuwCO3YoJgCOqG58VXtyeHF+lfaqaeBZ08x2/PTQA9h/9xvNf/7P/P6rO80spwWevt999pdNuGqC5Dz3TQWAFr1oXze2N6O6pZitTSIF7AesuIWVokak0IF8b6PuYYuHTsQ0LhybldrWwLStuW1GXPYJW9NUGetjmQ7aQOXFoaL4Hw5rkw1UckMh1BExlGHsfLFZGDSoEqNqYmhd9yn5GDRK0TXXmtFdYfk8TU+YOpevdqo4m7i4XLpsue0BVByWKz09jz/9bPHB8/m878gTABeC12YojE0M9sxkQzk4Y+ZBjL6RcJiYgsCqSiJZCnnTzEwzAEMCrBQ6IiMYQ0HGjrzzHjGWAZpS0pEh47wzMRVxpEZDle6oxbS0PIrx+RgBSjSWhsnJxAE2lKhlOINjrkKjIQPIqVXph0XZjMiBvYFlGFMGQBRirAAjK1TJ6fgwC0kagPNspCKiVlAz5zl5z2aaJJM6AH1KdQjRmEplwjGfZ+NcYbdmnWFvRri1qWBHJpoNBnNMY6kHAyiEEFMPoI15UoWEjpybTKYvri4Pd5uc5exm0VSBR7RBFMZWUD/n3WTazGaVmJ3PewDxYnn06gEus10QTnaJ0CeNXbM/mQjOdttLAHS9yivjZXZzu8mqXPWpP7taPT9fdEJwEwDLfpUsKzHWhry2tSfdiDVuDQ0AzrF3vowmFVVVkDGYCX3sAwcYimlGKV0SKnbki/DfVIvQIIuQEDLZaHaEoXQNDTIDGEgJVIUqSYE1MZ020Ensui9Olz/81uv/1Q8enp89adPiyy8+BtB3bStWN66fL6/6/W6lntzurn/6+a/f/c5b7zw6APCrzz88eWXvj7733X//k8+eXZ11+kZaySWtXnv1+Ox6cbQ/ATCpw81yuerae/s7LedX9w/+6pe/ukqEEISry6sWwMdPrl+/v5ulvbx+AYl7kx0j9hS6bCYJQKhqqMYoarZolx4o/fD+ySsaZv/q3/wJgE+eXP/BD177T96Zvf/hhzuVe+NoD4Z+2bXLVW58lxhAM6sEOptNfnv/3rxN5HTR92fXbWD+rbdf++rFHMDTy7koGFXf09lq8eeffnZzJQ3j29+8n7IC2D/ef7i/91er/ud//TMmt1/5JO5qka7m0u5VvgoAXj3Y3T+yDz97HJR2p/sXl6cO+OiXv5wc1YgyX0UAcRH9xGfpZkcnKYdutVqubvZ2Z6mNvgrX80sAs5ravvcuRe0LCWFSTy8W1ymFygzAg3uH+0d7deMm01oXKbFLcBnCMBIjLWzZ3ilgFMjVzWQZ++vrqy72NlC3ByeBpLn21WyyuzvxoebYd53YyfG0msyqnSMA6eLLeZuWUT9/+mndTF2zm1iaMOn6FmN8KCYeIVRVzLrKLcM575lrYioLXMrZXHCh8lmSZQOYmIyUdITyQYBjTyAZK2MrtDC5t2guxYQfMgSCrni5YNygMLFn75wrI0UgZgIMZpNbexi7pQspke8mSbOJbjacnOHlTaQzlP42UrUsIipFt1cSb1hDjuv0DxXSwhjo3Uob37W7dtfu2t+P5pnJuWqxWjShWcWVKUKozKJZqXwNAJMwrVwllMi0ZnIpcRJJuVdh8vs7NYBrbzAmVyXk4pc4BrWjoROGKIFGGfca/kNh2JWVYWRUrHE3LgHPKBU0g6jRGH+Nb1/vrYcLoxE2G7NTPOyEVcdN3WhqZWRmMprB3YYa1xteRxiRzzVsCAwRvY3rzngmt/JjA5g1wGG3/vB13JG2V6jx7mwF91t3Zq18s5QjAOfZOYKaZBFDCbkM2BScuXVF5aQLPFm0TbaRw+NWQm8LYxyvnsbgYjxHwAw8sjkw4APrdOTmjjiM4J5tAEEQscGIdL3irh1VxucCGLQQadWItx+UjewqrPvV5k8oXDDbfsVQpI6bSxqfEA2gDd26a1tY5a3H9h9Z+deIy/b1/8YD6TccQBh7DWFQiQRPNVPl4B0BWDFThCCXEkI0iDXsa5dPwPrmYKuHbVqpKhpFQNZop90iqTTOB18BiP1SiSsQm2URgbH3BDVTrMscDVs3zkqXCzO7hiEEdox7B4/qrgVwejHXrJlIus7VTXDJ+u6jZ591/WNy7h/+o+8C+NM/+4vPzz/yvoGjw9k0WM6ITEw87HSJwIUYZaQmJUAdHqIa1pMIAOMSXpHzzKRiyIk0m/QAQlNN2AHZea4r1y7b1Jr0CxIhz7FbAJDYxtglJUcwYwPp8L0FdYEaJIlnqCJFgTlSMbV6Wqe2LyJKUWFmNVVVkYzggnepj7Hv67pxIABJJHZ9UksxMUDeFyqfmZpK3/cA6qrxjTPCarUiLuS5cDlfCqgQvpjJRGNMIqW4EKUYC1NM1WTKaE1IAAAgAElEQVQk15Qa0ykLCoWt7H1pC2YcusxamT68b8j30CAIKjkLHgpS30bw1z2+1Bga+jeN07Ftz/0lyGUaZ4xyBmMGS8Z9Ow0YF2i8Il2zxc1MzQVvcM4FIjIpFpC5VErRoUBx0WEV8f7I9gFknaHASA4iiinWVTDVnCV4V0acc2hqfrjvXzs5/N4ffKd+sLMC1OXpN6b9q2xtB2Aik26f4NVbjjddt7TLq/Tlzep6lYp8r0t5lW2V4Fgr59WUaGCxFVUkABUj5qzKxAVrZebBtxTkfQDAVAqDSNRsME/sPA9sai42p3Bk5FhBUWW2Mw0NATbx7t6sOtqtADQ7FR69qtXrhKqa9MDl8ou/bK8vdmbqd6d6BQDZVGGhdvcOJqer1l2Tc34rWQMMgjgdNAHkxgc58qyGXwowMnQGsw3W6L0HMHj+FbtQldJlirXJCDdj3SdtWIJMrTgNGI+RGMMYSsaAkoGJHBs7ImLHGGTASgSTHDMhZ2iqotd9WAA13gHwKj7O4+MP3TsPnV3m9qvpjqplDlxPJ77JAORmnjuRZAKqQ2DmZeo056auZ/v78zYBmC9iH5MmAxsAxxxzGsbBmg3DRMQEyjlDlFjKrSIaKs8DjsbEj4iUAT6ungNqBCWDYoyPy5ZGNIkJs1MjAOzYk2fmPqeymxlSwqQo6CNAfTJVq0MJ7akUf1BTjeX2OjYyY1JD2bQpyMRU0togDWViUTMxzaYZyDA1NtuszsUZgYiqUE2aSkVj26VeK197xuiPDDWeNNMd8Tfzbkge06ZmxJhVAW1kNUMJ3XG+gRmbCSuIyDvvnTc1y9p3vQ+QLAD6vpvUO2bIYgZqat/G3lRm9cQ7VwpGEfHutLlczkUFhC6n69ZS6i/mfBo8gG/v1ETsldHfpHQqnG/OT+cLOUvkr672n58CCM9WJsG3Ksv4NGnbeWbXC1KmmJV8oUILM+dxqV7vB8eU0u1tydasO7g6bN1hs+IATHU1gSHlWGjFxU5XTXPqh8l32PuaDrsFWqeTh+y3DglnJnYcmCinXPkKADPF3HpPFMLj69XZdf7l5y9ubvpvH1ViHYCbqJoteX6wf/LL84ubHkYp5OXDPW4C/fiDjwF868Gj9948fnA82Xv9/uUnV08++zz3cW9/9jvvfuPF1XI6mQJYtt3Fsleiugk/eOedv/hs9aNfPGZfzyZNvTv77MtnAI53+dGDoxdnz3enzfn58kV3fXx4bIoux4PpXpmV+tg55lfu73dJkIWdb7n68nrJ1M81AXja6Z/94hk59+Cw+sHrr4fDY5rsrMRdZ5kmPd51ZeEIoa6bNKn3j4+bz59+9ey8jSJvHu1WDV47PALw5x+l62W+Ws4t6/5surtTvftmg0TN1L1YPgXwf/7pJ6eLJRPFlP7od3//2j780/d/sYjt56fne9P91x4cAGhTrCvsVJNMfd+2hny8Uz88uKeB+h05bBiA40W1M2tjvF7NY0pNs8Nm7eo6S9qfHe/s7QPwvsLFYrG4ATjlCLCFqSq6KITiPu8nU/r02WPy1FTVIpuA2PksfVaLKQIIwSOlbKrgrJg0k+vLy9Ijk+RQLJU8U0Y2E5Ojk3tvPTz56glXiKcXV75dPvAeAM+O+qtTd3C8un7cdrki62NfN36C2c7eTpkql4tFTFlTNhGXzbwxSpJFCgODicVyl1qYcpngyQmEQMw+sAeQNAdXwaAS1bS4mww7mXHnYuZgNuwgB+ca2FD2eqgN4EPlHENUDChY5DoEGzZPZR5aB3zDHobpdkiy1pKsx/C4i6GSeiyrpIiWZrmMWaz1K5tFcEz5mkK5rAv/sZjkrt21u3bX/q62uyJcd+2u3bW7dtfu2l27a3ftrt21u3bX7tpdu2t37a7dtf+fmm+7jkBGVDdNm/pVv5wQVA1GCqOqASA5GYSJJ85zv0qr7qBx7/3ebx8cHvz8F588/eJTAFTXyhYpiymPXDbDWO1gxD03VPIxuz3yKEb/+zHnUyS9RARF8YYadEikpiakGHQjwJpssPkZoxR3UwShJKXsJXYkg5S0KBFHptyG/mZr5hqvv2pII69ZkOsrGN5NX2OoDR+htwh6v4Eut501+zqHbc1AGv8bvrjUHgVARMEFtWgGFS18kNFyaCCjjAR/Gjijo5oAphgFdjrWtB4JUzZUd1tzNG8/SgIMbOtKH+sTH3iUG1bL7dR+uQQyGuztGADrkKPcPM/h7YUZwexHIZYxkxsdpoRGdu1vaiNtcFTkD1J0IrI1SWebDjkSBTb685HcSbevEFsmVtuPceseARhRf9s65Gs9YNszdTjIqRBR0Qd7hjNAoprsNjMAlEQ591nMSM0RO6Iw8pJunSONNIrxbzxSMofmWbtVVO8aWFwtv/nGox98840//9GPizzn0lIXagHlLOY9ExOYYDIUtR27BRGIRCmrJTHKtjOdLW6Wz87O9nb3ABzMpqeX1yBWiCY1opUClelhc/arnzQ9Abi3NwEQAsix5RWQKLhSj2Ho5ExUfEJpKFU/EvSIyIZOYKaWYJRVRJOqZCAl7fqVQdvYAUDw3vmuj1nhmdp+RQzRFELwzsWUAPTdCibB1XVdAUpWJG1bcw6RQ2HxMIAoPZmGqlKRgakElFoNKmJmxFRsLvt2WbpYl/ryRT5lsJvVdZIcYzepGwA5i/M08RMAs3qWVbKI8+y9lyTs3MHx0cXlVQgVAO9dCKEQoh0zEakJsXPk2QwiAESL5FpH+pCNfX+cFgATHXngho0rLqEUWAGhePnBil8RrytlbPfd8Z9b7Elg28q26K3WZIFb3MqtCZZGgrCj0V2vWFKsvZYMauqK7wSTAaJKZjIQeFFqDg+mHGqqw2GOnUpec7qYmc0GFyYi55zmXDnHzMXNcjabUuoqqn7/D77/rf/mn9vJI+/3qT+76D5K4Wx3fwHApWbSGK56bqm9yp88jh89Xlxqnnex7Q2AiDMtHzhIJNkZyIgpUCjFi7MWMXTx9WSzIj1zVIT2o6a5eO+SgcEMMmghfhFQBQ/AE6tpFgQKxJpSYmbKMvN0NAkAQjfH/FSO3qzgQQLrjVbBtYVzb1LqVDh1IUpOyQA2QgjOOWZHMB7mNjKwkRIXGSfBAAYbbZXisE3u8yU7lOJ9TKYAiRX1xHp5HTcDa2HCSOQauuy4mq3F2jBzNJZmITgiz6VcqScSX86X2UEYIFIuxcIczypXBx9zBoA268WL9pOfzyYzdVft819PDlI0Ci7I4saEAKROZwGvntRa1/ce7l7edM8ul6c3i87Qa1RJQKmjraM3CKGwmMeBN/Q8hZESsTGJAVmM4L1jYhop4W4UvQ+uZcUHjTmLDOpjgoN3zlvOZhZjX9VNzuzMKVjMATBiY46aR52JjUIAZoAslL4HEqXIDCUmk5SjUl72ObgAQI2Ya1UHUkCzJFOnIqprL9yyXzMDiVIyJ0Sp8JHH6QZAXVU7k4mJxW5FyKW+lanVvqo9Ut8DCOyv2n5n92A68UmWKApLWNkvAtDieTtOWDR2LwxmPQCQRCDiQdUw+7JIzDlmccwSqgDATHNOSSz2ee5WFdOkrifBV84DNq0CAO/9KkvsskCJXNulxaqt2M3qQLMJADtslL11Zjed27sOE3eye90Eck96++KLcGMAkGvqbL7Ur27SaULs+9NVenIzNyJiH2PxG0VSM8c2Pu9by/XWREm3fzCzJLn0qlyeiJqQEiGQI1BZhqz47AEEz4QouvmYQtMFrYUG5eaYipo5Ju+8c66pp6vlnM28cwDUtF0tobrbTNtF/H9+/ItX9t47quvPzrqTfQZwdFRfznM1gej0Lz/4D72Ff/5Hv7N6/vFVW5tq5RjAX//ykwrp7fv3X39475c///l73z/pe1vNb379+OKtB0dvPDoB8H//5G+Wq7mqVfX0Tz968m9+9KEZfeuH715/8dXlF5dkAuDRt77hTCrnicLJyb3Ts6tl7FmyJl3QCkDsYk39/s7OyeHe47ObTDhd9R8+b0+Xn17cdJ+dLwF05r646f/y11/9Z7/15ifX+cvr5++9pdPZUb24nFSTEBoAznkxS/0qyU2VF188P71p46sHsx+89dbuxP7DR58CeHUvIOez0+47rxz/iz/8B0d7B58+/mi26/Z3mmbnCMD/++Un18uOhFyg6Oj1Ryd4Hx89uX79ZOe9Nx998PkCwNmTazL55lu7RCfv/+oqKb7z3ncnnFbLRV1VrQqA6cE+iPqLc5XMYm3frrDwanB0dn52eHIPQFOFuqrPV3PyrqKKfOhTnvpZJ23Z6j4/W37jW67v27bNF/NVsuCr2ix7H8iMR09nDuyMkmTp0DRVmY+d844gmob5ikNM8axL95bp+fklkWaT/b2dTqR4Oj6/SbWvrZ6xnwlw3V8hC7ld7bv5nAA4jy71TM47z0beO3LeYCJZIXuTGQBvpNAk2XMQE9UMApOrmG00jiCQc2y2KYpGGxX2GLEoDKxkw5q7pgavtVbEIXh2nHMcBsUwwRDWa5ONagAbB+lAO6aXdjebQTtGReUnHeWERqSAqYrJ+AFGNMoVh/mthG2l8I4ptBi53tEj79pdu2t/H5s3Q5tWbC7FVPYcWXMJBr0LpaxBZezJ9dLP+yX17TuPHvyzf/L7//S//2/ro5Of/V9/8S//538J4Mv+JkxqJldx6FO/1v0OU+nwdev6jeW3orAZAntsGPJAwcCYCBDLbDwYTwGqXGyhtJQmQKHUD5jSWGwVZb+1Bg42yqa1nmjcjtsGR9xyltwCoQy2rq58K4oAtrbAm0saI+xt8HHEIsftf1m8tt9pW98+4nC3pd1rAJJA61AMKNZjAFQMhBSlFMyl4tJcJNhrSxOiQW9p608dbs76hMt2lf8/9t4l1rIkuw5be++Ic869930ys7KysuvXze5mk01QpizZomzZpAVDpCXYgjnwzB4Z8MyW4ZkBjwRIhu2B4d9AAw9kQ7QFQyRAGiYsmqTEFkmLbLKb3V39qerqqsrKyu/L97mf84vYe3sQ59x3M7toeWgCGah6me/mveeeEydOxI61116ruFhigiD8+fMtuld0fXp0YCi0R5rJHaaEafM0VT34hA9O6PL11aAEw26+l0kBZmehclFMYq5mSsSBpvoywGGGItx1eI4Hd+UAC96fHbtfW+7u7/X04dmU/PoCZ3DkEIt88Zum32h/pXwtvX9QMe0TIok9ygyfEUnfvziJYJkH8QbcoK/gbGhKRScPEG0YBhpMelUzo9D88Ck5nsOe9sffvyupSQhBhHXcmf78X/u5/+Q//vf/87/xn/7Wr/82gIXHitESrzUFiZG473ZSN5OP52QfUbQ92cxvvfJKbq9y9opiFBvzkFIHYLE8tourhiVUx1lbgY1G66stn9z94p3qF/+3/wXAr//f36o4tsoxiMMy1dWETDNNigKAmVOxdiBi2AST+YygAW5gJhYSMGcmZgrmGdm99RAZwNj12SllzTEjiA5ZG1JwICEJrpNFUhCu6xqeiVDA+mK7ZPPYpzp2Yx8lRAqjKouEKmzbkWd8uZhPDH3nbkFEWMah3223q9UiYFIGrEKIIYRQV1EWTTUMg5uqg4UqVJtdCyBwv1yuKOe2azGkoxs3craL9RqgguSbI2kx2yGW4GYhBCJxJ3VDwf4wGSSAJzwSU70RMdGchphnzhK07wtuC/4xG9wDVoq1i3Tpp6VgAICJpUDFs7vJ/imzMjXMWPL1sPQ9ooUpvJ+UKYobeJmfvFSdl5sgCEUN0EncoV4E9oi5eOZQgc/N3Mi9eHsQMbFpKb4qMDcFIREi8hBFXdkQRNSs4GV1FVO7uf9wc352Ybsrefund/aZVfWxn319KevxdADAPkZl27QPf/Dso/eu7j/tNNrV5ZAdLAKAsgSh4iJs7swO96wmnpnDpI1YisloQt7puWWUnQyAuZJTAEkRdoNnc3ayUnRm03uZSZgCsw6t1xIj1QIz09EA+MWT8YNvxpt/FmGh447Gq8WxS4hmyhZtUABuIYTAMm5zu0k+mrsZuHYmI2hxQWVFSQuYZCrVbhNUeL02QzL0ADLyuQp0WpyoPNHgAjWVRaE8IPuFttz60lgCMwVmEaaCmEw2NR7IDVzGDhOYhSUQBVgbhKdhCQh5zWiqqo6hqTmSDMkebwcAN2+w9EP64LvPnt2vjprFEUtYrG6cDtun22eXK18CiPVRiulkmT5bBWkSEWX1LnnfdV23TsPkuiAsELapGt1ZpJy/eQlSJkkzgoHYDJo1m0oSFpnLuZWhwhJEmHmC58sPL1Kw0JwKLFfwaDMzKiAx5SxlvmKpiUO2cYrEyvtBRmLEjlKCakTZ2E1K9taGnOE2msdSb06RwUPOIJfAoyKrmJLbfmYAM4xg7mSc3NQ5Fwz5YF0aRqvCUgKd+zAMO3eESGwMYneklFDSKoxcvH18KsPmOTzChIW/qEazn74OsoGkblkpq7q7aVJXdwscinPR0LVqUYj6Ma+WRkRDSoGoliDEVTVZb2k39tuUBWbZCaaGgGWMtQQAJ0cxQPSizzbAz+rtuOJdHVbDE/3ke9s3Tm4AIKquNpcfn/efrNOyrlK2Xo04jNmAWILf0bKIKANW4MhJ3ajEurx/gg4udnISd59skaY5ljlM0e2YBiExN3bawyMsTMSWEwMEmuzt5zjhoHKKiisZM5glSIiNSIeaY6krzWZiIdvILK9/4fPfe/fdX/zKu//BX/3p1fHJJ9sLAOvzrVTckHz9j7//3v22Zrx5evLO/XGxPG1HXYQagB3zRx+c3fj5z/zT7/1ur5rM5CQ++vDZ+C79+Bs3+3EAsIi0EB7Afa6+8vvfG5Q+/7k7S5GHV5fr0X7sczcAsA/uHoRjqJn8+ChvdoOnXFdVP6TSG0SUs997dvnsort1ciyxfvBk+8ffv3zytH28YwCIdUS4f9Z/86OL9z+6/PYPnv71v/ilf+df/VLfDn0zHDVDGYHO1flm88qt1eNn25TyX/7JN5aLaK4fPnly+5U7AD7/2aNv/ODjf+VH3/qpL3zJRB8++/CzN6u3P/sFH9WrEwD/3Jfru9vu//qdrzU363c/+MY3vvdwIYu1djGsktn5TgFs2jEk/9Ld0/NNzVgzsFwdPX30Ho/94HGzbQHUq6PdptUxLauI5SL0fdu3UdgljCl7TgA2mtthqKvl1bgNTEGqOXE55mQALvuxa62uVu3uYTYOVb1qlinJ0G+XMe7aFgAoRSKDCwUBXWwuZYr7zXl6DFU1Jc1qdYj3Hjx89En/5u2waMSpZm52nQEY2u7qbCO7MOTeA1UcPHB0bjWZBgBVjHWM5lA3I6+rKCRGKhZBslzUAFydAq04dl2fNSfLSrYISw4hpbHXDgCxgKc89bQiERFYiyYMAMC8RHSTd9M8Tz0nhy8iLDSxWMrEPVMI/DpNdr0lmrd515H34eR0jVnOQdhefMrdyhHd3WGY93r7DYkX1/D9nnS/TWT7FNDzZXvZXraX7U9DC03TDGMPw7pbq2kdagZnqMOqUG9sBMDIrklJFyEGt+ZoefrW648ev3/31aOrcdgMHYAgVVZrJGBMM+ayB+VoQsCu0caiXDhjUBPqMqXsC5Q4gViAWg4kTEITMYdtAnimDPx+BdjvnAvtbZq7D7DI8sXzKlBE0Apz0w7w0Ou1Y7+8HOCQ879ee6/sV6RDEUDH88vQAfwzw1UHMbQfvGs+yDUeOn9P+RefEbxpq5fGsXQdB3bHMHQShIvZ5cwznJfF/ecKCFde1JkkOCu6uZYE+bxhLN1erOnoufPxSbapWF6+0EU+m4ViorvM+CImfzouWny2DxH2CB8dXHo5+ek+GiaCExELJgFLV1NzhDh/fn8Cc+A+YRnTmGDsKTWfnkmcB8z1jTvAJJ+Dqn94+d/feNpDjfOdO3j/4dfuLar9YFQ5QDZJHpoQLdlvxHpFWrl1KQEIjGVkJ8pGu5Q2iK1a/8Kwuz5H/+FT3f+TGZomCIt2wytV+J2v/Pa//Qs/+zf/m//s13751wD83f/5V7/xnQ/rUC9j0+Zk7quwSDQp6+w3LTyTVKp4xPX4bH1prbFLNu/6HsBuM1RNbaN2lAPD3Ouq+toffPf+77/74//an3/yzABcdTpoNZIfTWYNMakyFeiJAbBT8eclLnDjtDdVnzQSAbhpmOwHmEAiEjnAPAp1wMnpEYCrdrfr2ioGEjY3EV5Use87eCbXaRPHLgxC3nVtEI4xxBgKwqXFQ8l8kswzC0xHzQJEFkLT8G67mVF5mGnX93lMsRIz4yBqVlf1arWqUw0gMGX17ICpm2fNVajIYfBQhTAIgN1uF2NtbjFWpr5sFptdB6YoQYIASCnZqMwSggQJCjVLAMw8Z81Fbw5gCSHEIQ2+l3gjJpAz43mC8Z5QPY3hMnfwjBs6UeHJEsNnGUfsg+NpbDHLlJ055ISV3iuHgAPF4rKo5UF9dqO4fijK7E6quudUEnMBYgoOlscBZsRS+I/CDJoM0AVe6AM+BfjTdRXHmP3Zlt9KRkvNGAz3PqUYpVz1rm9F7ebxsts+vbr/vdPP/gw3y767ePbeV2/H83AcABzVMZ9fnX/UPv7e8NG9pGwswR0SYqwYgI+ju6tDyAvlMRtSznCKcZ533JllWilLx01eyDD3gn0MntXU4QwSUNGN1ERl3zKkEQACxSYKi3vOmrASqSNF2ox6ftEDuPns2fjBe4vPf7e69UXNz/onH4lu48lxv1He7qIEAH0arPeus/OtXnY5qTl7dsmGpD7CAST3jAIVCZDLggEUbuSMNoKxVxa+RkzKcHCUjN28s2KeWKrq4344zqOrEEMdAgIFCTEywTV5MsKUnjSGFBCnaIoRCZGYaRQCEJiZOLBVEpoqNlWMVWDmXTbdKoC3x7jqhgrn7dkHXp2uPv+Zy+3m+AtvpG1aVtF7ANgN6ezBs2GtbcZFsl2f2jRmsdhUtfU5MwBOWc1NzSY0jiyrY+bcF0ytUIiZzUyLPm92Jp1ZxXCCjtqE0NRe18JSlk6AECiWXkxpVFUnMs1FZ5QlsEQtk5SXByHAOany5Dqjvke7ZZL1FKvA4iSGCpCsaSzp6UATqZLITbshCXNDcbTYz5ZSDEwClEzqrrBonp2MoLpP+05TyicPn7zx2ms3lytAVDOIC6WJhYuXLIAY5GjRJEuguKi5TfOyfZg6/qFVeL8El7csYpPhOQ9ZLafMUYggDDar68YtAahi1Jz7Pt86boh8HA2UG5FCbD/fjABEOGeMCW3vKWcJHITIxTKl3gGcVJEzbJsYo8smXyWhjqL1H/n77/X1WxnA7Rv1Ntl5p+t1rhehV/+Ru69sfPdH338yKBIcQFjETApIGfBumKLUqe/o8BrnIAMAIkeCKk12WCLCRbIUUIK75WGAcJA43WymcRzrKs6JDtdpYLi6BQgAZhIRKlQtWMl8qNlqsSKbSo3YiENccjBKV5fdjaNXnmwu/sdf+b0v/5kfibkHMG53b3/u+LKlr37744cZ/+5P//jjpx+erftqoZ886m+fEoB/6S/9m9/96u8+XJ8/27VZ/KN3P1zdOH7tM6c/eHr29PLys5sdgC5nM/vsG28/7uh8xE+8+eorr7728NG9XcKXXl/eOD4GsLARS6kS79ru1eOjbjdsLlsRtIObDgBunh47LKvt+mFMthtTt1V3vtj5lhpIBHClwyJI9ljfuF0hp/A0VfXH6/xs9NORlpkAxGwexj7pg8eX2fTt1xfR8f7Dx6x8+0YdeADwzvtnb54sfvJzr+d+66Zv31rySE+ePI4Sj145BdAcy/vvvX95dlal1RDHJ2e7o+q1seu/9u13V8346skSwK3Ip3dfkSq+cvPU64cKNEFeP1meX47HzVG33QDorq5sxI1Xb5syVZWBwCFCDVbHOLmfLVehskpidRkzrNd+tTge25ZDGMYeQK/pwdZunSw/2Q2DesMMjGQjTAOqiisAi0XT931nQzDbpQ7w46MFgRUe6ygxAvBt32VdVIveNVsaEW+9/oW7N08u18Pjs/P7HzwGsDaHVlWyOixGjLVQtTqyrl0uVlUTAVSLGM1MfehzTgMyJQocQSROU7pi27fLpj65cbLrdiUWdUe2LERFrRuY9zrYB//sxOySSPdbsDIzO5PPJPuyMvkB+aCEJ9fqxgDICTzrb6PY4u0hxunH/PsMGfrBdLVfGeeJcZ8FLtVq0+YZoOck2sHwuVjtGrcEsbP/0GT4sr1sL9vL9qeihcVqsR66dnNFREJS6BiRgrn3Y7uSCsDoqrEi5ZSHWqoP3vvwn/7mbzbxL+ad/NI/+KUdFIDEVa5DNieImiqMiBlF836qdFbLTHKdf51aKWUimUh7rm4wZzCMnJxRlyTxBOowM3Gh2himhC5Pq8i+JJzmMGxaBfboZDILFAC4K1M0VyW4uWmRNp7i6SntjqlWBqbTQuPX3wDA3TVPyBwRGZznbNcea5y8pMuOmgEgjQNISCSntKgaAEnVgNGsoWxeNOcDEUyV3AsjCcLTUeA8l6UVckTJSmrODp+Lj5CyOcOdntv7TSixE7GZERFB1DKDRbhItqMwKh1OMFwn3PIwFgwAgIiAGDyZdxMZyNz0IDk/lV9HCXNVmKNEyVzwBzDF8v6KAyyPZiAGsZsyS+nApNnMmQluqioUmSpUyuQ2Bwsl7ppq8+Z6bHfb3y2b68R4Tvm7m5d6T+xBuWvzHqI9CXKPi5SAAwRXNWYGc6EGWLFgMCOZAO7prhcC1iwTPx9tPypnuu5k1K40nWABakFEkVjTeBQ9s4/9cPvOzS++8frC1WINYEzt5vxytDEvFh9ebP2863YUbRwpKvZ+wROvrDiuXmPi0+2YzidGHtUkq+fMTfju9z78r/72f/tf/A9/6y//jf8IQO7xwd/+ry/AanIc6k3WHoONFEv1rDkAATk4w2OMbb+mrg1wjJpyhvBydQTA+gdXpeEAACAASURBVGFIdOl5wdS3OS6XRv39i/S//p1f/dc/efadbz8A0Oc4RGOwjJBlaNnqTJDi9VDGjMOt1KlY4AK5k4HM3I3JADhZkIXm0SxVsQHH0dwZVbXIYTMSAVgsVv16Z2yhDjmNxKYpabYgNVnebi8ANHVFIRjb8fEx5rzD7OZNAIIQqKIxpT4hRkhw4u2mXZ2cpmEsaGBOQ99uA7HUNRE5ybC5Olqtqnq5G1IdYjmc+iDEJMhqdbUgZiYYPJvWTQ2g3/VNJZkrc2p33dW2FQlIebShqm4BEDKQZ81NXVvOw9gXyxEngjDNm1ticpCEWEqYs+qYs5rCvCCtNk0Ch1TzyfVGdUISvSDCbmbJbO9Qsp8XC2q5hx4M7jAr3Phi8FOeuCiiROyjEi9ChAgZiIUn2wYQEYVQ8hXmau7MHGM0s3EcC+WzqVaVVIhuKbXtjoTdPMRmuTi+WJ8DQB5VFUDTLOqmMdWUkrkjmmOqQXMzNWIRpsDEwQc1a4exqWOswpB6ADLYrVtx+WpckYXztdYrorW0HzXaN3FVP+4BmJ4BOKpfOQ7tevMogzmyq42pFDuDKJNlcTYnZyONDI+B3NnMzCeCRraRJdo15Zcm20ygTxkAIzATw4jUoMnIi2myOzAbglEwiIMFKhzzkFXGjqkb5WIEgLaTOx9/c/jmb+mX7lO32X78x0u5WC5fXYVdv9ntHrQAwiBXj/t3Pu4+vEofXG3P2/6q1XvnKSMicIHCCxfRnEYoBG4MZcDB6gxMF6WFxTilhbwo8JdxNeXnykzNMYgEcpip2ISFY8bBC8eWBBwik2dPuTNTJbdSiF1HECqwQ8FuroVAzbHxIAuespIpcj6qwzJWMQQHVyRGlFOOQQBs+6ZZ1jE0i10/6uXlPbs8yw/+yTs/+ue+mFn4KgNorgYedHR70OXOCRRCQAWlvmNElgSgbmJENWYf0uRIQGYT7r/fY0594VnJiBEYzgoGZA++QXJrqeuGOsmilqqKbmRuAZ6LDoOLQnQkdQY5qdlmSIpkZCLF72a0QbPyfjdNwlMW0HL2PK1AIONuZ0RdcflzN4f5INOKVSzCwWTU9VpSQebuIBaZoq5kBAiHVJZey9M9ZBIXRgAw9F277YZxOO/X0lS5t+KvtW6vLA/FSLsfUzsoGJnIAxOLmzmEQKoJgBClnCRGDuxmbsZAjIFA7ppyBrDR3VGMR3HhphQqiRE6Wh7retl3fQgOQAKRVLduLRjKZDqODpZATU3ksTEp9+nWzcUq8tl2p30eTGOojpZNXUmnGUBTifcbWrjvjq1PXu9SP1Kz2n7cfv9JZ7YG8ONUnffcZ7p5q+5yJsLJ8ubHjz8YDBIrogrAlQ5RKlMG4DxV9MzyBf5c3rfgKCTMQUTqanGdPqU9dAkACquqar3dDP0wahYRACfNUUs87jYSYqzqVdWAMI7jmAeZEz9qxEKxrjVnV1U1ErKs9aKxbDqOAJIOy1Bny9Zrnx9FaQZU77X906/duxUVwOuvLT8XTu6dX32yHd+MuHtruXl2XhG9dvPOZv2oqmsAH91//4MH57/73uPbb3zxoweP7ty5/fkfe+srv/fVN1+7+exisxlaABJWsqiuBv/2t96r6qgSnz5++uCTs+y4ffPG3RtLAE/Wqcq86zOwq2KguhppGyAiKMY7LuFqx9/5+MMoVXJNBqdlHsdWJRknHwAcccypI+Z2oKtt1yveffr0sluvLzcn1dXP/sQdAH/uS7c3bb5zemu93VV1uHP6WnT/xvfPFhWPme49vQBwxPT2rZO23/Z5Da1DXCR7ermjPND63kMAD7t0dbE+vbO6dfvmg8t2cMRY1+PqSb81Xh4dHQGoQ7y6Om97vvP2m2MfjgR5/UjYP/PW5wcDPTkHIFCvIqWUxxG5Eo5HdejHVjWT+7AbANQuN05vPNt2rNkC65jadE5GEpmjAYjV8Xe+/8GyCpliVXmEkvHoLgxe1jr2ANbno5przqHCahk9q4E5wrIPw4C0BQAKIjK4MuKRNK8cVeurIY/Pzi4vN1fb0RnAYrEc266yPizjIqzQj03ObVNx3+uYAXSWBTSmzCFEESeQEispZYelsQewrKQfh6uLy+Pl6mK7HnOOkc1Ntc+Wy1xUhziOg7sziQibKxkbPEpQo+QEIJfwUcEgJh85E6iETBU3ABi83eyYKOcEosDioGkpm1HFwHyNXV7nU5GpEL4nEgMBbiAQOXHJ4JPOSVeeP8zkOvMY9msE9gmIPYES+wUCxXrnBSfPl+1le9letj8dLQy7zoYxcJhAnCnlM4GDg/YAKq4V7GRBahLKTJe78cs/+qO//A9+651vfSfWJwCGQLnvglCOUzUuQwKFUmVmk1SN7vmBU7tGkA54d/N7eI65Zr/q69ke1zvl8mehYtoeQ6IXvuiamncdr5WQnApV85rDdpBu3rM1ZuI9DpaZ6TC8Z1k6Zqm1Q4aQP38SDkhVsSqTU+A0dgAWVUyppxCEgtksi4UJ0CobZt17we5PZH+xE1K6f3muftsjAj9USllWUrqu0aZ9qev0kZmiOPWte8EfS8+b+7WSSRkz4KlUiyfVFWYG2NzK32kG+Wx/o0oJp7vCAgURKdpkqnmv5XndkzQV8TlAIJuGRzkZe/Hy/oT2z0weTrza65zq9Qfn2zddS/kJgNkLSnJQvE4vHPRFIiwdDs2iajoV39F8GkQg4ZOjpfVtNQ6rGH/hF37uZ//KX+q3evqZNwE8/Mbv/8av/NrDRxfHd4451Isw8NPNo27PQSr3bhrKfICAHlzQdFnaD1wFEamOTzx1IfBv/fYf/b3//u/+h3/rbwLY9brOtPMsR9Wub0NYkmb79J4kAMPQI1sVbLmod70umnh6fAwgSnXv6bmIdMOAGLphWNUcjX/pH/7Br/z2H4TeAWSJXXZYWi0bEao8OcNQUAsCwDYZBxMxK4hIiEvJNlwnmhV5yjkQaaldNYuhyhnmY6yil9171pOT47PtTruOzbQo8JDknDu3uqkACHNgnlG5T2/MXMVqPfbIWUjqWI0LdG2HAtkDppZzztmEKcYozK0hsEiMlUixfh5zdjcEjEMuNtwwxCoyg4mbVQUgdf3F1dXy+DTEuorV0A8D+hhj4KquKgDulsYB8JQTuakqc6mjKloH07NjBRL0om8wx9PToChI9fUjN4+S+ZUXyNFFeOHTu2fviH39fn9+Ap0cJ8vkvyeCzaym51nqZaqhuqoKxK6kRFQoEmqbHWDuImJmdVVBCuHOafaorOrazYkohughlsvPOV9fbFGM5JIIoG03xCh1VZ0crSylfjQAN28u3Ybt2aBve2Bd+IUz5c0ndb6qRv/kGQPQy90br97gjoZ1gnldk26zuZtbqTtWN584phP2M7PIy+wy1SbTPH3TPK2XXiE6nDucyAhO000s+aoZtS/ZMs0ZDmggFfYh2TBqH61c0a7Tvh3k6//46Tu/s7xx6zRupPbNR5eLpjI/uffJFYBqzJpteRLj6P2FPtt2Zxfaap1BRD4LZOwFsqYFaD9yyMnmkohSVnvQ59MmbFbwmofVvO5gUvm4FtZ1mkVGHO6m7jznlthn4iwgbDBM+oEEuOecqAdRLs7DzD4RJokq4aYSVR2SxyBZHUDfedd6qLmqT2J1Up3eHtr2//yHX/vMG3l5GtrLNQC+XJ8c15lSbG2XvM/ajXk7jNuh6yyUlKYw56wpKeAiMm0rfbqfc/qr3GGeKcQ09+R+/CPWdc6UdGhzHsxkdJAYkHPa6+JYMY8GF5LvgJTV1AlkBakGTcmzmYhMM13nOlwq0gW+l1QuaSwyZ95PA3NVYqnUKLnFItU2hRyFKGQ2iyqIlGgAbg5TK7mB6sGzZ6pj1/ZCASgCMJK0yqmkz+Bmu3YYcu/MOZExzfC8zV1X7q/BALOpAt29VM7sx2YhR5dOuq5ica/qGGOJ3zQErmMwtZTTjRvHBCwrjiE0RKtKAGi2W0f1T37h7at2fOf7D8/b/vRkGdlNVbcjgF1P44goSVbRx2Hcta517POHn1w8uRyXVQvg6GwdGSeLuO3HqpI7r5xcDv2DZ5tB0QTuLQOIoVbVfUb9sO3jiHK/bIInJoC7TTuaS5GmCY1mkSMRVWfnEIK78ZS58MghrFZMQsRFGJmAwGLuea4CMbWccs4ZpvDyqSgUgFyqBJqwMDd1EIWqqdXN3NiWLfGm7wDUY7M4/pH3/uh3nmX8tZ/84o/dvbk5xp1jeef+OXPanBuAu2/aX/n5f/mjD+7981/+iTe/8OXl7eXjs82d23ee7EZs8oglANf63Y8ftHo2ZnnjjbuXm03btlXk27eqzPTgag3gYr05PT41hGEcbdMPo4ICSazqRRGprOJiQPfwvN3stB8xXLY3q9PBaJt0sOwCABx5gYX2/QcffxLNhenr3330mdP46s3V0TK+8dabANaZrrqLL9y9dbLki6uusTFnrYWXJ7WpNiwAbp7UEuuc27bvnl5u6mU4OT1B1IbtabsDEIO/dXu5Wixfe/3uRh+qhwWTEMHjjdPb9SICGD2uVuM3P3p0SY+ePn0Sic7Xm6NFXN46+uYff7XfdQDqZbxxctz3qQEPlqtYuUstK04xjX2zqgFIVW3bXhVOgUFZEwsd1YtYy0WrAHb9EOqqHfKYssQKTKP1OfUR7grN09pUhRBjrKqQc0oOAxZV01QYU0paVJhE2Nlzn1oSOVZ5cP/pehiqgCrWp6enAKqbx08++uT05jGq2K63UsWwbHjoHU5GAHIazSmR3Tk+XV+tkxkx1JVgYE86AjADHFnzKiybUKm5kQMeWAKHwq4gMBdngpJOBrKrmTNVNmmoTqj+HvFj4n0MXtYmhbK6zdRknwqopplwXpSmQL9kAqf1vMTeB0UY5dmkaQmYwrOZ63L9xL8QVO2zuwe/ztvUeWksO69/xg7nZXvZXraX7f+XLayvNikNoap9JrdPIBO5O1b1EQAzdXiQGpY9CFXNDz5++pV//LV3vvX+Ub0aFzUAUw2hUjPSYG4OZ4jvQT5/LrbGvOO8xgbnrO81k6yAikU4Z643ng9xjaAR9kjac8GbHArnP3fJPM/hZUvITMEJoFlV7WCD7V72zPOJzkw3msNuIiKSeVmhGeW7LnLEFNDvYUSiotcCj8KNxN2uBdD3LUcJPgJ1ievUnYimHcZ0FTPaSL53m5k7qryhwFpUQnJM2Cv58z0319S6uZFz2RM63J3sUyt69zviqbaR9p2/V2dy5gkxJMzR8GR0kdNABY/kiSpoNlVd55yJybzwDcEiBE5Fzm3Wltz3MkCT3h2RgwlmYJ02P3iuP/5f25+ISNK81dlHBtPQ9YMeBsGJuZxS4VoajJhpRnwObsjBt714qHKb5v339ci7HoEEmPYsaowjRlNZ8+rxrX/xJ7H4rPkxgC/cODr7/ruvHX98f9ebpooceRSEghnv4cgfAkfxAjhN8BAEgGrukq9CYLfTxfLv/f3/4w+//i0A9z56WFWhQrhst8eLG725uoHDHnR4oe127VE0CCRWteYQpO9bAGOXGuJ+TOBK6mbs+91gVEUFae8DC4BEgSOLWQY4a0M0qJq7E6zwRiUzC7EAJBADSwgzBB7K2GOYqRmTmatCohAHYgeESDSNAFTtaHlUD2NWjQyuo4NCjOMwRITFogYAUybeJ1E+tQ1DH0KoolRSjWbJPaeUkrPIHlpW86ylGpcJ1jQLlpDVhNlIAZgb1Lb9rpImhKBa/Gbg5g4PMQI4Ojo+O3t2vARVHKtKzYZxICbVXErUAVfVZrkkIjYwi7nD1Q/YwZjhSNXsxCCyUsc8T00H4/ZwKgaKIczB3Dg9k/s8TBlKn9ZT86DeC0JcM4/LvL2vCqeiQEvXX73HDsrcaqoOZxYiCkGmIinVnHNs6pRT0zTjOJp5rKpnz56UDy8WixBj4eSpW5AQq0qHwXx/bhCWsm1XUzXNRKJWNY2RpLEjFwBNs+SxF/P1Th99dD+++9Xm5qvr9/74KG+wWNz54usAHn9zc3XWHXE8XsoiilUsoCpGMZ90BWA0qSSWx0eBme8+60HNk8SM7c03pexdeFLZd0bJpNm+I4tWLe8P4abZhpyy2SJyFsvZRrWsPmQAaHu9XONGSleXl/x2feP1O123jhGbiz7iuNsWnmCqayyOq5OE+lk0ks5z8pDhZAIvFZ1gNqZA4Oxpf+/cp1wCprmIrhdqugZXJ9+AGfgnzCRkn7CuaUqbBk2ZLadaY59oKmZuxVFKFWHaJ/o1QV01mcVgFARAEI9CgSgwaqFlFEJK2awKu1EBPLkc1QhcLU+OIhO0ymN+dp4wLvTsWfvgDIB0bYem7c2MmBECC8/lBWaqDiC7uk3F2ADUlbjUvLLDJiSP4GA3MTJ38skYiOaHDAAyeSYoWM3cgKxlHJh7ETZ1uFpmUCUkzqYIRcJizjLsb4m705R+fvFBBWBMk2HXpEJLzEKEkt4o84AwYzKGM3P3ycYGNO/MiSY9DbfMzEGigIncLKureQZgoXq63sC8pgrg0ZKDCXK2s91uLLYMUbgfuDcn0dELm9ZnTQnHVOcOuEF90i8uF0rXQ8nhaqrk5EakprlYtKU8EotILE9l1/c5jYE1sDMjDWmTsNv2r6wWrxw1ANrc77quvf/x5++e/tk3Fh+d28Wu67scImplALshqVW2U+y2VSVEtVgz9Pqd+1dX/Xi2CwDi2fatm8sEXPTp7durqln80TufbFoPIqM7cwAgXoNGPZiDfR947l8qz/yUyTe3bM7qvkfzi/AoTcAzIZv5oJpNs8NLeX03dOWuOJOZpZzcLIjA3bKKlPAG7qamOacCYJt517f90Llj7HoAq+UqWQJcmJiNjY1D58mFzSoAH50Nv/JPvvHR4/URy9uvrca0bYSaIMH4R+7efvDwEsD6ydm/92/99f/yD//OL//mP3r9rde+9/53Hz3c3L61GhXrbZufdgBCPH66o42mk3q12Y3bITnRcX00mt17dFlI9+bW5TZn3fU5b7ZMpFapCqkUjvzV7mohYd2l890wekWq5+vddshtyu4T3buqQh4HIxdppAmpy8lya9Xixusb6/67//33Adyo+N/4Fz7/9Ly3bKGKQ9ac9MZRfPVkuaji5+42AKogVbU8v+jON7ZJfnWRHzx66kQs8snVCIAiD0O692RnROudEfjG7VOu+erZJ7/5u3/w5TeOACwjLY9vMOwPv/Xtyz4d1xXqG48vHj65/ObFs/bVmwsAx6cnHOuUtQqh33aby/NQHd04PU0xPt6tORsAtrTplOPSGIVcEMhOj0mi70YG0MKTkiFXsT5Z3ZTg3dj37iBf7zaNK4AmkAvGnPNYssEsMXTdMIx9JKG4AAAWo56M6+o46PDk6skrlZycnLx69zNNVT9+8hTA+vGT3TjcYqqqykV2Q1d3FrO2aSwVZoumEqJlbKrIRdsH5NmSEJFTqeEwJwar667tsmYC2MnhTOLuJS5iJ5sVrkDsaubFvJTM5nVmrxJZnq3ZtMcJ02zjUExWXddvK7jiPOdM28p5u3qYTj2Mrqi8qWxcyxM8AaGzf47PtPlPmZ7L8QptaN5mzkvlC1WHL9vL9rK9bH+KWtilnXA0V2Im4sBBOBCBmc1MAgMQkjz0DKZQZUaW6rxt/6df+vVlvRyqUEKo2nhHiQIvitKvG6Z0Uom8S2xF+9kT1/6Z+93XnOSfqGIlppwpfo7DCXnPMXgehbyexs0+HZ86EB2aJQuLSjq4rCMza2CC8+Z4UKbgz31vDjMdhWk+lYM9NA4LZbCHEUuIzOahWuSxT0PXVDWAn/mrP/f+hx+8/+3vIsxHKlsCc0xaeWV13OOupWD1065v+n9PXKLnTw0T8HuNNTBgIPbDvfC0eM/bYpDDWQSH2p9+DSyXGnwqkGV5Q6EasbDy9ZmWBZSn3KCqgsiJijn0JH+mVmxK5jFTroV4wlWLE08pyoVOuc19BdPhaPh0cGT/nh/qvVmN5YUPvcjDnZgjBc/F9RbvOeSRZqk892sLIhwwLv1gkO1hyb2EZgErRHU0jewxhkXj73z9az/z6Gf47bsjjgAcp4E0/9QXX5NPnv7uu/e6FKsYeCzqTrAZjtwjq1YeqYPeOewRMm+aRde3I/EwehalrL/1e98E0FQhxmAmdZQhjzpXlJeD2MHRyhPdd8PtW3XTVIvFsuvbbuh3Qw/AUjBFHUOfTNWkqt3ySIFDRaDLXQsge2pihI45SmAZLAsV0sv+DgiIi0qPhOAGmgtneAaICRhsSGUcOWqJE2+GJbv7MACAyJDy6uhofXWZzQGx4vBLvGga8xFA2c+ZO+P5bnsujzI9XO4+5jSqjf3IVR1CHMcegJrFEOu60jSqas6ZJbphHIa6qopEmsGDsI8ILMLMzGpGMDNzoNA5q3rhwNiPTdMQURVCSkPKY0q5yiMAd2OiRbPIOQNZRLLu7bwOxrS7aTnyJOU+0YHx3D7/eqDOf2O+PtCLj9YLYGYZwC98LxExyK5db3weMF5o1dOOwAuocU29niAoB0g1uzuCM4uIFIjH3IjJUuYDB3MQ6qYpZYBNrPqhJyE4uraLVRViMLOiT1hE8wpgmk1N1SxLDG7GLOvNNpieLmoAeUxiGFU3g4W2//jXfyNVzWdvnkOFjqvmBgH4zOvL9KD1MWXo8ijsyMdsxaELE+g2/zehsArI3Fc+dbLzfoG77ur9dDiZuYMBImdMMw3I1bwo6k4j1t3M1bKZN5UwlcHFLFSqd/vRd60eL6Apf/2r753cfHI59mO3+Qt/5nZzevPGqgbQO7KNXTemMUWWRV0d1db3ZIYJHUVZs+EMI58EVObBc730HAySaXjQbJHm0wChKaFVkGm3Iq9RyH7TurQPG5yKB4yXBcj3y9UEyFihw05HK7R+BspOswqohAMTw5lcGEysowlh1UQAd++sTpdVu9ttrjZ3bi0C1/fuP3K12O6G4XLYdAAic1xW1pkiO6DZhlGHlM09iqRcsHKTECJTyiVxNQNGxs6YXIkI5mxO5mJToMAlhNjD/P04UvHkmZhrZKAiJwoOACbbZFBmMfKcLFQ1zf5f+4RAGUgvWB4cThHCZE6waY4jZg6BmdVGlgAghliMZbLmlC3nQoazqZKDGYBAmClwMJ3crgSTNK3PjlkMjtWSSJirccjbrASupXl01XdjtlzKnxt4GF2FKBcotqxisy2csBjlIuc2DywuazLNIRjBvOR4AVZVgKAOuFnOCCqYniklOMEi0/rqSjismqbvx7NNd+tkMU1hgqM6XmzTZ26sbt8+Pe81UhCOjQLAqzf6YHCryAUUkcdH5917T6+eboeTVVV65sm6AyFG3gz64XnbPkt/9MG9ZBZi0ypFEgDd2IcqeNFBvs4M7WeCcisnrRpzh5bUUinbn+PblA6SGpAQx6GXIOX3Qs/uU9vUNYgCsZuZZrhzCMKcZjngIGKqk0SSGxGpKRENbS8hhBgAGJyZI4uzR4ORx0ihil0GZAFgm7pv3r9XA2/UuP/oXswnbx/FN27VaG59/HSzoZsAvvPtR7/6la/yrVc++O77T7c/GBXLhh8/22UPSoshGwBtB5NYsyDEJ+srwIKEXYYn5xmHNWfqeyLuR1PNIYSyiidPKfWl605j7K/aQWMGzCkN2VWqsHD3gmm6ZXI7aeo7t19plet1G8x2m/7De5+EAB0GAO0q/sa3Hv/U23jz7jEH7C7Glcgrt1+7detm1JySlsdn0213Y1cvm1s1LZvmqqtePW5SGs67+wA+2WzXbb5oc/eof9KNC4na97vdplP95NJ/9O0FADlp7j+9fHC2cV52StHkwdnVcLU+XvBP/cRPlFiYAwEcpX62WTOjqWM77B4/a+vYLKo65QEAEy1Cfd4OWUGVRJFKfFmTVLKoHEBfNy3E1EKMVSUiNIxjDEEcx83RUQSAGJHMctsJEUtkoZMbJ2K021xt27ZvBwAmIUEdoLFLGN+++9rJslpfXW3Oz3YkJ6sVgLPLi9unJzln3eyWxysS1s1WTSNPut9NvdCc3bDdtjlnEDnnvQNXqaQWkJEHgVpWM3Iqa5uhSEc6AEUKXBRgmIgBcQowV8AweRL6TGssqTGf6QHX0+J+9jzIrJYJumCK03Ppe977QQ5xisIO4ETssc9rUiYOgsqyLgLPRVX7M6GJOjEhmpgZJUVt9iUm+bK9bC/bn8YWJET3vFrcJCJ3CyFIkJJTTymPaQQgMQYIMRtcVUO9iBCN1ZNukGZVctKb7tLjkoqwIAVHLjScQvwgEgBMs7TQfps5J3V8qr7dT8y0p43NG6s9Xnk4nT+PRf5Jvxw0Jp4rXAqrh0FFnof2tMaiYTkfZyaxXe/PD860nA05Td7Zvi82PGSoza9MTZjTOOYxVcQhhHIcBi+bZnRWELkTi8ONnHC9lyh7E5+PNi9x+6XuBUQA83rpBVWZr+hAzrBs/CZu4+EiSrMw4x6RhM+3BJjsIPbfN+mfE2a38/0dcCoVau6qtt9vlvJz08BMZm4ONXc1go9JvZBj9izFa5ud/c/p/Isrrh1c+p+IQf5Q8+fTidcXu98yX28BDjfSXqIc12vK2cTw4j0CTpjjGX9hHz4NZjoYJAffc13kCALUIByOKmbRKnh+eJ4vtngzRVwBQHvWrS9+/73zxylVEncjmuURduv/D1c9D5e500yY4cPYhxiHfoxVnZlySqhXAFriyNGJUzdSMBExnQlvh/09Y8Kr5ijlvCQpBKGjVT0MGcUwtKkpexUadUo6MlFSNzMGQqwABArulsyNJHPIJA1CCSyLaKlLNAnu5E6eSM0qInMd+wGuMAUAs1A3TMQcVI2IsxpAbiYheh4AxBDNQUTLKvYpbdsuhGrUYdE0TdNsdz2AGCMRE8ls8fIpsWEI0VxBNKSxG0cnVrVGogh3qgDMvI5VjGG7zSllVxCHKgbNaqLDMAIwaLVY86K0RgAAIABJREFUruS437XJUgyBmHNhhbKklAFIXQcJ63YTV0sjcnfLNqbU1E0htxZPGncyM8uqs3lEOfNCPJv8NIoj1KzXQJiQ+GlgY3rB58fs+udBYmGaj/zFR4Sen4XmYT8Da3MxcnmroUwZCPvZ0r1Ak3OpZXmAHCAnr2JtNntNzptqKaYlkdwhIkSkbnBX0/2pZM1kzMJmNo7DPBnSfG6Fb2fuKFBtgHGMKSdNebGokzqAzfnF8s4yM4x9ux4v7324Jnz+L9wd+XbumvW3HgBodiNrhHM3ar3k+0/b9a7vxjxmKm5CVphkDky0+Xlbgmsk4YVBtp8H50fucK6y/VrlxcsYYMwWPcTFr5lImShGqWKoQohF+RcYsu96u3q2e2W5eP3WHVm99tHZ5u//xlf+/FtfSNbcPo0AxsofX6THz4aLNgG6qPnkpGnVoKyEQjRzgpJlUjNIsX6+XqMOrmfa0O0LHrAfQgBKEhTXheuYjI6K49Gcp5lHqwvPpbcEAROBfd6hwa4XY3dhCiIhcnRUkQDEgDBVs8LcVJ3chYiEdmMG8Oh8c3HpQ58j0+2FXD09+/6HD+vIu+12aIchMwBrvBHJYCMCyKFJLbsxMzlVJZUbamPOKasZEYVAybE34JrtXcicFKQ+71CB55ZRoihcKDHFrpsmm3o3VY4HXcwkIZQnaEipXCHvbVDm4t1scwj1HNpFKKUbTgQuz7uICIuwUDXNIZNlmU32skRk1zd3qsTgUggvEA4F+S6KmVzKAkQAtGl87ehVSOzGrCKJtE8Da1ICUcVVA6AjyUhJPYo4g/M4B4ZWyOdCosyFJrvna2NWuyldyCAvEYuD4KoqpSthRDxHVVzHsGzELA0pHdXRHe5kQJfHzZgAeNahza/W5qvmCr5Te7LrbzTN8aLKowO4342ntLp1cwnip493uyFnDk92edsnCNbDWO5qf5FXdXTgwePNOvmutQQxl0VsejMASUdxuU7HzqHUHO5Oj78fhL+FR88c9nnyaULZR33malpgYrp+MEt8TUxEImZCQJhSelwmycjBQCyCCNMMkJke1Qs0bGZNtQCgrsKh4JdGbtxXREuuk+m2WFEJ1cafP5Yvvr5665XmZkO7Xf7cq7e8Xn3ng8u7JzcA5C80v/hrv1mtlvA683LVVFvdcUUpuTHgFYAxp2zGQn0eRESzG5CMhSS5TgksdQ+qmgHhENUdIDbuk6kRgFrq9ZB13BFIFcwUPRiSmseSswKguWYitwcPP06egjBFYm7WA9FoDTOAh2ftVbt7/OySBcnz0KeG+O2T05/6kbseNusNAMRIDqorvnl8XMfFkBZ1Ld+7f/7egyeJMoBlrB4N3dNNmy67TZ93WXaP7t24/WoA7dqLd+49BvBFeX1MY3V08srNtz568l39f9h7kx1ZliRL7IiIqpkPEXGn9/JldlY1KqvYQDZZaIAbAiR/gZ/Dn+CKH8EdNwS4IQgSBGpBVqHZjW5Ws2vMoSrz5ZvuEDfCBzNTVRHhQlXNPe7LKpLghgmEvsx7I/yau9ugKipy5MiRcn74ML15cfujH//4zcvbd4+PAEyVrEDzkiawZXUX7HZ7kWFaKNU+9RZl2Mu8FBxGocgBloaA3TYc9iOAd49nHkZzXZb50U04LCWx0SBRQiMCFtViLhy24+ig6bws8yzOKaWllFlrMsoKGREHdnZJSfk2vH7z5t37DzlNOS8A4hBuXr1Y5uX44eNeb/fbYUrRk7Hy3e0dAApBRsqlSAyabcrJyJjFjMx5lAGAw9zLOEbmjdqUXAEyuOriPe1prR99c34UZM4OL7iQO1oOqwYdqzBjQxxXJ6IRO9zRo5/Gjlxj1KtwovP+qytD16HG9bZ4FcZdfKfLK+vmePXSRcWlvcfba10v/nk8j+fxPH73Rvjhmy+++fZX+9s9gFKKMAMoXtxdrVSwTM2ZmZhNs7tTHEzoZGQczVFKBrDfvj7n2TTPzoFjJ5ERwARpLUQ8FJTVwH5qN6/Ydmh7AaOTaS51wasDfY2H1XdgddXIn1j8qy9puCGYyODiDpD5FWnf3HofBl8DZ+6by4q9rE5hd8Hrd3UA0NdjLkhhD8GmlIYgu3FU18PhEcCf/E//8xgjESokJ8wkbO5exXqaJtaFkvZ0b3rCdbgiTXp1xYmelud61/nyxmFkdD5nY7ZwP+AKC67aje7rY2hfQKgegLcmBf3GMJHX5GOvFq+hstX6oVp4W6vMGAR1aDECFbWqw9jpUt6rGr3TQ1sxuq710ReGzfdH+5Df8mq7pU/myTWYUqdcQ1Ke0ktreHN1ZAuB+r1aH80F9eifeP37em/bbLOujebk7iiOu/HmXA7CfJzyL3799n/7X/70P//j/3inRwB//id/8rNfvT3l/H9+9fBwms6JP56TEVuDhurcuw5Ont6Bjn1QXWNFJUhRlWFQ02med7e358cjABnCfF62w3Yzbow8awELGrCwhjV9cbpvxt2cH/3hYDgP4nc3L+7tAMCgr/cvvnn3OOcFiKMMYJpLYRIDxlDjjaLgcbsv7mVO283mnNQcTl47DQc4mdbWq1PK7n53e0vANC+wnJYFgJby2WfjfjMK+ZRSUSUQ3DRnYqTsAFhsu4+nw3G/v6F5ejwn83Ka06u7F4DXls0SBiZmEs253aorFKv+7q7MvN/upsVGp6SqptM87be7eudjHGIcYxQiUVMr+fZ2x4ySc2HSFtJzKkqO4lpR+8BdBIBQpdbc/WZ/+/D4OC8Ls4QY4zjiRDc3dxwFQE4Zjlzbtrg5XS09NIlbBlStAT2tlXGD0VciW53wXvXXrgLadgRXTIy9s9fsUjTk/cZg9dbrJ3Zv31riv6+AhgpUr5yaMlxv/dUNZwfvKxxaARWvXU8IACQI87AsKadluL1DiKTKIss8VwQw56SluCPEMGzGnFJa5hBCXhaWykyECFNtxc2BQMIkcTifTluOEun4MAMIwW9v7gjL1w/nX3yDV/Hmy68f/5tfvz8TkeA/++lrAP/8i60vsw60FEmLH46as2ZVs1YMXvsHF6zE0GZhqSNwdW06mFbb4KhsUe8hEZqlst61ptke5sozJfT8HzMHNyKnSg4hcoe5p6IAjksx+MCLEnaY7nb+7mF+OCSSXZrTx/sDgHOy33xcfnOfZy2BaBQW1v0ONNPcWmRXZMThtYHYtaJ/ww8/NXZrhdn1Ps3MXGsUumBM24OqCW4fwn17rc4ByMhNiPnyNeRmDKrUymqUmDGIRIKIolaXEzMz4Gq+FAVDzd4+zGwMIMJfbuXlbXy5i47x4eP8/ngeN/SYNHCsXV8eTllP53eP6XEpGTRnMzgIEqQoSe1S5aKuIMQQDZ5tbrFp4/fXLYOLe8ciG+rcyzTaGhglqJWiai3jR5XsiT4fKnQYJAiFwBSEFz2BuEL2jaLj1ZPhJsPcdzO6ou0bGHBih3sllVafKVDTRlBTU+1ERUQREnKt7Qfbg3UzgxshigA10WHkxjAQmi5koGJ0Ok4fTh9dgvOQeQSBrQSJ9XyyuzoRizlUS4R3Zr5XZd7IYiJuCuLqylTIoDOX1r5w7t5FcxodW5zdQLV5FIGLe1IiZ1BMTvvNZrvdiIT7x49v748AdiEQ2z3r62385pTUaUr+/sMDzH1SAD/54f4PP799PCzJ9XHK7w4aboaPs2d3AWUzALsxOnAuuh3jECUveYhRMid1CU2sIEhQ06e59u734qnMXMPBm8ktpfQXseaZ+l9Vj4KbaqgDl1L36khVyR7Lnh3IJQcJaLx1aovFGURFS4GKsBWt+5eaOaGUPAxDVgs0ZFWKsqVhIgCY8rxj/Cf/4p//sx/yDcrD++ME+4tvPuyDvH65/XZSAHz35mh/NxTaDJsZBlAxcffIQ/LcoBauun7mReNuF2MIEqocaSpauaUSA1HIeQpxYOJcElVFJjiHCKCoVqA9lwLyIYxLSSMHRV6WqaLrtceamamWQiKWvZRReItSjD+oARgh5yV/+PKjGsbIWowADtt/cXP3i1++/w9+7w2AH71+/cvv3n57OB6XI9Hh42k5Tbj/eL57sf/xjz8H8Muv36Wif/Tj1w/HKX15KISz5c9f3uibV3/9V/dvHxzA7rsTNL14/fnH4zS5stGPbl5sdjenefn5b361lAXAKDxKfJiXUhZhWRjjIEMINzc3tzc3v8kK4P6YNxsLIW58HMNmZNZpYRp2483trnaxO+aSGXDTZT5vtju3Ahg4TvNcVXclUHGU4gWq7iml796eCVBVYd4OIwBlUU3mVDCKL6fz/B74/IsvXn2xm87H8+kRgOZ8Phz2L+5Szofjg+jGCYPERL6Umrcr22HY7DcKlyGQ5QISCq2dHtUegAp3jrEKTjojcgSRAea5uuiBBoYANUlJ2ayYu5EHWbcfcrqoMhOtBdrrjoy+Abmvjv+TUqe27vordc31GLHLtHR7fuVRYUUkr4KCHsk+fek6hFvDDOpUDXKA9RmLfB7P43n8jo7w5rM3799/F4fBzErJ5tYDGxZpMGII0ZY85bTdjFjyvCSm4NlDGKb5FBABLKIBQuTEsQnaAJVRj44Aai8AaVa6WdzuE9cjV+YMU+NJVF/pGvRD88Fak+gaj11VghPoqiX0k0JCryKR9cfOmNceijfSn60+3rpj+BryXf0fjiqnfA2NrjGnrwfSk5PwYTPoPBvBiKpXzUTLksJm45aBFm0wyMi0uXwIQ7ja+C5721VM10t9e2jbQ8FV8qSfQb+aVlLdX2r7pvWnR9eoLryzTerPXi+zgg5eaVlrYQK8MjvhQp282Ps3mJvXfdPJ3NWcg4DYinWMjIlk5W9WGKTKTnLXyTI3Wsule0D1BHjuz/hyo773uncMsT+5pwd2v/8J/kvEvD7/NqpY5lVgTU+/8xJ/XxyS/kR6AV1N4ncg22GEcdwdluPNGA7nKVF6s7/90z/9t//pf/HXf/nvfwbgv/8f/uzw9vDmi9sEMgzTnKZCCqmCZGscC8CvT+3KsVlPkUESYimFYjDXKIwYU0pV5j8EBBIWZ0bOuaQUxqExhPudoLpo4AAmm24GtoTiOjLOx5klAhiA0+F4M2yTpgQupsuSOAzLshT3sAkACMKsDlJQLsnPc9GlNSGptUJOgJdSUk6pFCLSPSQwWNzdUACoW3FwjDAVkVxyDKGUBC85WykGgLKNprlktSEM4xCE3IcoKSfiMcYBAHElkTGxXt3O6x8gBHUl4mGIIY4Px/O4GV2Rc9oMA4AQR2GpDeeZQ9EcxqglaymqIq0RFqclk2Ebd8QVxOMYA0Cqre1AMR03m53bPM9xGBEjhRCJI8Vq0EyVRXIuwgTm3qWmIYy0JjMYUGNiA1bJh8qebraOpUMiaLn/HrESNel3Y6vC6WzmqKjp06Wzrp7WX2XFT9xxAcIvWHxDzqtZ6Vj6OmO77Uo5U2Xfu5tp5yNTEM5MEmNOabPdUs4ppd3NPpcMIE9zGAZy01JEWJitlDX7VU2c+SrfSGCOcagM07DZmOlmEADjiChxO7gV4sif/2Dzxe/dbWn7F18t/+3/+C/fDHsA/+Hnn93P08PD8buH/PXHlNwgXFTdpWLcAWaWmwUlhws1GhYBTF3ymKjrA3cc4pOchpMTbMUaGjYUuKqjtJ23EtqoFVoCrm6L5vNCR87oObnBjXh8+/jxn21f/vnP/86KHaZzfvjq/bQAOBXcZ1+gtWtB7Q4zjmM2L6atc3It1gZ5L4BYt6ialmpPuPFv3XuqByvSvL7rsi2jAVdPUJf6/Q6vZ2Pu2tGXZoIdCndhFhkYZKpwrYAvsxCVdgoEIiE2hSf1kXkT426QH9xtAPzRD18MTC/34zCEKdGXv/7qqw/nn/yTu0lxesyH4wxgfxsOsyXHUvzjnA5LOeeStRQzx1iadqQpjIhYxKzqR7DD6slWr8eM1b107ct+wRfl7U4XNpARPLAICcxhbtZSGlAAZGpKBQ4mDyFy23RdmxKhuREY0rbiRou85tkV1b4nEuBm2nvfcN+bzEz75sz1Jqr2Ze3rn4ZKOXUjGLmSW1c1bpDlw/FwTjnXNAcsiBQtDjKyVDKaMmZVna587YbDUqd7BxE1UaIuRFBJzubmhGaWGOStdGVtiUTEAlInAYV6I+Z5TssyBN4O45SyBEuH4yA8DGPt4ZHcBw+PKX/zMJVJf/8Hr8bd/t+///rj4+nz2w2Aheirx/zw1XnY0g/e3Lx9txwOD3//9qHAtzIYFMA552EI4vRxztmRtEyzkkTNlsrsCABEoGbNGbvyNHsmftXLwUoZrv8kIXS3r5ngvibITUWE3G0tBQKYyc2Eo6vBjQlOrfN8iINpFUKFmTm0aCFzCUJES1qqd18srzYpxiAgYhdEcxeBkOOUAATG62jfPn7909/7PIB3e74Zw999SKcl8ub23/zrvwTw3fzrl+GmGPMQ5unAVhBYSBgk4CgBgBebSw6g3fYmCKu6mquWEIaIoW5OqaQQh5pyN7i7C1e1KK/NxISchSVIGAYvJZdS5fHHOGAMgQzAIGxmBNsMcbESSMziZrtLi1ouuyAAyEumiDBEdYcRLUMgY/vNxw9fH6b9fQIQRz/OenezvRu3v/7m/usPpz/+4//o8/vHKS0/+/ItgG/e3f/+Dz/7yT/98c++fvjhH+w2+xd/9r//+V/97d/+5A9+cre7neYFwLfvj59/cfftu0OMu1EibYcyvHqY08fH78ZNqNodhfnuBy/vNruH43R6SMPtVlNe6DznlAuKVtGteDwvauRUljKFuJlVv3v/MKvNVis5VLUEiZFtENyM48FK0kxEgaW7Ga5G85LzsoBMzTYhvn55MwxjzuWcEoClcNG4MEioaHxUPX48HqYpRnanMewALJoOx8k2O9nvvViBEMSZip6P8wxg2G4UkBAP58MxzckVPuaeNU1Wa+HhkGLsWgxOxCTCIhuWhGUuZwAVQ1c39b5VOQNS+2ytsSC1tmeEdavB5ceuBFBt2MV1rvIFncjeAcq6HV9hhld/9gO6n38VxlGHLKnlbj+JPC5W+uqnrmqzftP3uRfP43k8j+fx//8RnLkQD8OgWpaFVn6bCEuQNCcAERxCICtmGpmCBxfhc1bYbtipGgCWcLbD6AJx9eJuK9SoPaGUPQcKV99ei7bWOPSJvV7/04aCPXHO2vZQ8U1v2Blat4D6vq5+hE8/dbXXVjW6QI4WbHcr/8SgOxwuHTLlhmfWLeiKNEdP0EC469UnXHdacdfCw+DuZZo3MQJICkiYljSygIhEhAaDOzvEfd3lajXMhQG5cmXQv/fJFRPRJ5uTrzWVRCvZqBY2XWqKn37G5QtafdjT3Y4ITDCq8aivBZ4VuiCCKxFXFKDddrMqQSkQs1rnTcSt6oG5EjrWPb6Gqw5ycxOsjYNau+VPz/fpWLOR/9AxnyCS/abiypV4gskCYJEVPb96U2sE9JSJ2j6hn4t/MrX6OTRnZA0L6z08T9PIOmvYmXAcHib6d3/+8//qv/yvk2UAxy/fWUk//9kDeLx/PD3okJhMqUMUNQppYb2BLo1Rv3d6Yu7iMsZcSmRhxX7cTDkhjgCIWVXnJYkEMt9vtqf5HIft9T3umAkIcPb9zZgO890g7Jzm/PKzzwB8eHgvQe4fT4tJch8Y280ml7JhhN1tWRYAlo2DGLmPe+KwzOeKrjJIatzI0R2ZSnIfwmDwwAMxgRaIkFSfkcBCLKXkIcasGoxTWipPu644iqHkPMZwnuZxMw5RmPzFfnM6n8dhlCahQCAuWqRheV2LDJfHaF77Jisg7pjn+cXu9sXdy7//za9u97cAxs12ybqUbOYsQkU/nk6jSGCmlrSo8DO2YVByZs4lmZWtjO7I2WqrCsAVFiSc8xTicJpmM92Mm8akBRyIw1CKE5MbTI2qCAPaQu+T2wAwi1tl4XUOY7+mDi2Sw8jRMChqjXsbXGYwNrkA39TW0T+0zNYMSTPX3Wx6P7+6jn29t6spI3SSNoFcrTa/BsHcrGj9cBeJIc7zjIiUlmVZAColV9I7CwsTSEopaZ5FQgjBVDkI83rtXtRrPzcztYjlfB6iMDPZ8oPPXgMwm6d5GYne7Cg7fffx48vb7eLY3b6xxe/NAfzi7f3DdDoX+e60vJ2Ws2FyzLMqitUJfHXdTLC2AXq75E6OxtNyrxX0f3KHqT1atHSMxTCQk1cdRwCtoyivi1RNU/GJdQoFwBB4ENwXTZhuRz4v5/s0241/eT6VeT5OBKCAOIQwhg/n6XGaH+ekGozEyAzFWl6wlxl7E7OsG2FNKa4GsB18zbfFlaWllqPpUHh7sUvsNvilctMrOOWu3ouEgXUhGEAiMowDg/KStGgF0UiCdfS5zUZidB8jisQY99stAMLw8eGMJNsNfyjLzz4cPxymn8bX3zyczodzWQzARLqN43YMe8ehKDKGIFFCsZSK5lJNCHEMlkvKxYmGOKSiBDjMQVV509zNGyBeL76T+9qN40bfAQtHEpKoTiUXLclIWoqgJXDhpjUw11JIuNJsr/wiqik9Qncfnu5IRQsx92ywERmM3KGKKrHClWRLqDRbqLcsgzmttuPCpzN3p1W0B6DqaAHR5KRn9TBub2fNarofpStFCgUHUKx2vikyDDvZnfK5JT8ZNYvKQmzM1BrssNTTgtGVpnGt3YBx60zhzETgWbHltpOXXFQ1iGvJy1yEfBvCKc27TWDCPCcAyYe7bdScTqeck/367XHchLCVdNbHnAB8fZo/nzaz0XKv3xwefvHNidgNZQzDlLO20hPiYER0nEp2vT8ec1aMkBCJeCkOQKIH38xI6zLBlcAcur3ymq2ndelU5vBlbbm1bh5wsDCL1FbEqztKzOYu7qoKcqHqpBGxiITT6RGAmxdVuOVchMAQEBW1siy7m5u2fZkBthv3qhpcKDixFzLTRGIAIpEpf/7iZUjyf3z1KF5evww//OLzRXd/8fPv5uwAxjAswp6Wj6nAJGziZtyclnPx4iFU1346zsY0BGGQJTcosQjHQGFBpSwjSCwlkzCD1JWZBaxugA3DCMBSYhEYOVGQOKc0SGBmGTiCGQWAk27CMEZOxTcZYOfoWhZ32mx3uWY0Y9jkxSEg4gimIfhynue3Hz6Mm/C4zAB+9u03UzrS7H/58f00lR+8uP3yq7/79Vf3s+pmtwHw6uUmY/rrX//yPKXN8OrgS5AUQIfzYXezfzjNAE6T8UEHGb59+05VMS/n43kYx1yClU0MBOC7t2/fzfzq1ct5cSv25vbNfF5mU5IozFLrq1KelskixRwSjGOAh/cP6eP5frffAbgZb4ubUJWONSFyN5hHGUj9OD8CWDQNvAngMQiJwn2/GYdBJEiMocrYYvZzNsDPRXcYQVkjf7csIwZKebAMwEcBjR/uTwE+uRKcVN9TicUHjgB0Kcush2MywrTYJm6zcbalFgAsVd3bAjPn7ERm5rUIIuooJAbPpQCQwdyhMHUnsNciAURDNU19iaGmS6mlu9CdlBZwWpOMcsaa9+re28W56t7/6u2smbW+bKvJbTEtbJUlp+uDHFcR4/cihqv8xMUhcICdv3/w83gez+N5/E6M/6f9iJ/H83gez+N5PI/n8Tyex/N4Hs/jeTyP5/E8nsfzeB7P4//jCDejjJuBiMw8xqHWw0znSYtptlYs5V5ct2EoRWd3ImJzGgXmOam6ArC0BCaFk2cADiMXIq70jTW1a7Cmxb82DquaYRi01n2wq5Uo8eZmR8RvP7wdw8a9itY36fROxGuJ/5rYcji8a2fQJYV0Tb6v5L6AAKCgEHEcNl/8kx/+/d//qrgTGa1q7+hMN/ea/SVmarWGIGplZ2jUI/QUWdM2g8O9FbKxkDC7gZlYyE1LLhLI1IbNligAyHn2ovv93o1TSQNHdx/HMU0JItCWCjMzYRYRMxVmEVY1da+y7q6F4Tc3d6nktJyZIrGIkBfVUmrBrLmpqggXVTMLIQiHUha1EjjUYrGiaRgGZknLDKIgwVUhlLJtxqFW45W8wFrdJSmMNITAkDIvWRVAiBJrv45e+N14V1UbzBxAgjsxxWgOK+ZVy98pklhPF1b5r8piZAnqMCtEYLBT8FCnjJtTJK498dyrpljvZH6VQvxkVL7NNfuz8Un9IvvYUpW9Ov4q7XkppXD08kOs9cvXEgHudkWRXEdj0VYJz84t69Uh7ohAGHYMz0GClcM08+T/65/9u51EALcjspcMOp/zeVEwFYkJbZloa7dU6z5cOuWsX/TTGxIIDlcNRO5ehNSMQ7RW3mIixCwOd6akJcaRuAqE9Q90sICMzXQbY56PEok3IwuzWlpOALb7bfo4AxBgFFb3JRcAPGxyWho1b6wl2zzQAHbdhFSWoos6pbri1AIPLGaeCoNZFjIxIo5qKEoAismSZreNlTSbCclpSkOIR83CUWIljCTNijCA/TydBxCTqGIYYxhqLy+4uwBMobR0NbmpqXLnGppqlXlMlmKIKZXdZmNki+tuf3OcJwAUQi6lOMhJiynR4LTf7plQSq6Vd2Mc3D3sRsqqpiEMS07TnHfbTRwwLwuAoDt3X0rhGOY0E4glTO6+HKNFAJthR8RhcIcjjCmf3ImETdWyItaJilKKuxeUxpOq9dduMKt9MjSn2n4Zrf8ViQgxu1otAKwTiZmJGh1KOpN5Ld4v3SwyFCxV+VBNSyluTUbDLKCVEOfJdRdEpB25so+FuROunQHm4O5FC+AhhF4ZWsuGdbcdVT3nXCWPyRFjADCn7NoKZtfFae7MouqVZmVGdZVGCQpapuPd3YvtEFM+RkeazgAi8kHmN7s98/j+XSo38tXjMYTF7OHLNP3B8SOAv3qXyeic8reHeSqqjuJOGKhxiGCmBmeGmbsZvNR1CoLDnC+sLqu9WVt9a7MWBDiVtuKMiMCtOxARiIXJiMxbf5GmRGuAC7MrL8kt6Gd7qVP6mBMojpH61zgHAAAgAElEQVRCXl7cbv7lz3717pv729e36byAd5OdAbw/zIdFF9OklpyVorMt5zOBBqZaBaxwq6Q0EuqdZ9TcDCy1UhLqWhvM2KVWuzXSthBRVU9cuV2sA1qpfdWGOV0KmR1i7owShZmKq3HlbTEBGIcdAWol5yWGYdgMXsxKns6T7AeFAxjGzXlJQnlLzI7HZQ63W+Xk0G/vDcAypTHKu+l8npKO+7fH/OLzHYJ8OJ7JQ5YC4Ku3y2dvOBVaMid3JVaCM5USlsWGDQDMi06PLpHJkRdHdMO8GW8lxqRpmQ1AseBwYqvLrVVZ1EfYeTemmYgEkRykBTAGhxC15L6ncK0WNHEl1N5fxFx5xFq7WpsR4ChG3JjArfaRGum4FiiAHc4spk4UzRRNK6ZOzV4uXKmOcCgMDiYJ0vmRLoAAQ6UAkTllYUSJqq1lVtgwzwEFrkWcFDRnJQIoOHFdqtAMWBgic8hwEi5aihnBitYOyBbjeLN7UVJGl6Xl2o7dUDubEYnB1VzNovAQQoghCG+SsxZ4AhCCHyfbR1Irw8A5aQFNswGmxbS1+R5yWR4ez9O8DMIfp9N+u3+5373YLNNhAvAhLF8eZy86JR03sYgfz0nBYyTn1g6tVlGoWfF8/3jOJRj5NIFkgCDEupluHM4cHebmBkWvrQZQeq3MlbfRKVlXKuLtpbpqCOre3FMAXQCPzAFnCiAX4VR04FA3hSDMoZax50juahIDSWAIBASLHDY8nJcTgHmZKpNxDJvZdCOxyoAwD0kXAEqcPXz17v6Pf+8Px83x9ubu/rB8/cu3//Zvvrk/pyK3AAgSnVIYoqsNlNwsJzJAaADnlAGQ0CAyhC3IVawpW4LMNHBcKWMbCaksc5qIOcYIJjLKxdE+hJ0BcqgW802IRL4JAyy71yZPYIKhJI9CsTRdEwcoCExnrtzzRbPTMChTJBvgXuzMIsw3Kqeuzjqp09u3yzzPr+/GYbP5++/eFsJuN95tIgA31ckkyhCGaZnPGafDBOY8pU3UYUMApuTThw+vXnxuYbvkwzZu72e1w/02BPL84XwGUIrkfJ6PC0vcvLqd1GUY8/FwODxMhRYjAGoBgd10onIz3J7SWYZNNllKrnTvQoa0FNftLv7gxc1ujHnZ3BeLmyGfT9Xdu9nsAR/BwzhGAfIcaBTenI73u/2+EajdjMyLDSLZcxVr5bjNSizBogOI1VgRZoJ7V6V3yUK5+t6pgJxK3RopL8mFAFcv8NqrDIUKgCkhYDDoyNtiRVEk1l50WwBFC1MABO7ZDGC4mJuLAuuqrN5y7YgFK05dX6avqBYIa1M6IOp7NhzWKk4Inanfiu2IhYh9ALq6hXMNWtXVkIRjqwEgutSdUSP8E7FUN2uNFNwBSGCvFWXMTdmm/uxdU+h5PI/n8Tx+10Y4fvjw6vaWibtr6uTg2rlGuIczbu7CwiI3+9tlnlXNVZvBvvDDqcletFIzWkV8+yF0UetorkPrSS2QpoBPzkJCUkuHhzigRsyXKpRe7+beCtPQCwbRtSMvclWfjmw5UkQt/XNNZZmXuXhuFcvARRAf/cNq7NprDP1S/VYP8k++x93oSiq+e4Z+KThjpobS9gIok6oR6e7MXMUyiZmIXUu7Fma69K3GWhWNNXZvu1gvBug35ZMin7WuoAlU1l+vu14QuTszSQhWO+URTG0Yh7UeXNVhJizuzsJlyh6dRRytaQbVWKCWt1+VXnrvfnC5O2vN/tMn5VeF2N6L2Cqyub54qXXtdQvtEvqHrEf+1uFXU3MdFRO8LmH7FLz79F4+eczre66/5R89i6dn3D+KCEqkXovjuPiY2NiLwas8/BDk9Yub3/vizeFk/+avv3yczSS4Zb/69oo/9rO8zNSOpHb0qJ/EJ0KZ/e5RfwJrhcg/NoIIDGMYokQOPG5jDV+P54M3MLd+tq+o7noO1NCiS6CF1hPeL6+0m+nu3ro8tStq04wAVTPVMQ7naZJBci5LToElawnCqKic2XazDSSnZZmL7oOCZL/dllJiiEDtpOVMREVxETSoWonNQWSAIEQBXnEfg7NbAXwcqwAlLfO0f/l6cZ+mybWolpRTFBGRGkCWUmII9RNR/Uzi7GWaF8CrgG/RQo4QQu3jIyGGEJdStuP2PJ0BzGnZ7nZDGFOaAUiQKj/GTTkL9WRYuJTSiv9acVG1BARbnfKraqWuH9mlX4GKRdY7sjrKfdJdppP3Nfl/M6iX6V69sv5y+bFOmNX2o6tINXOnWrzaTBYCmIVb0RlQqxAbwgEQqVltYeGX/QTrVCRgGEcRzmlGKdTaUGO33+83NqtO2XZRDsnGUczpy3fHYeS3H2YAH15sTe3dIX33OCVzCuO8QAHrDV68Vaxe7Vn16vwyj9v+2VpIt4tfF/LFgFMDK71n/KyoK6DanghLFI6BA4vDi9VydDycUxoYwM0YoogjPpjZx/xwLuecRPlX7z8uaapNnx6mlNSdUcyy2pJ1OmdHvEhlXUZFG6ugRk/l9Uv1Kzuy7kveBCSbqe/m3dFkKr2oXgq2ASO0Unb4AAOYQc4UuLa4IQDMkpYzMQUJRDBVdw/DMIQhzQepPRAgRmXKnkvYBiH3U85J8f5R348C4M0+74e4WPn645Ho8OE4MeFhms6ZNyJzUQCPc8HjMhUnkqx+XNJpKeclzQuZYZoUQDEBJOViTiRUrIA4ZzVAeyc2dVMzY6t9GOrT7Vtee+RMDfOnVtjt3pSan66wlvHz5pu1Ut5WxEtCxFKVtb0dt+6yPQNI5D0nV2/7pcnDp0+7r0zyJhuw7pUtaSvFZzcTrignRHo/PCArKuhJIg64tuhfSyIK1XAwizrU3Ly4W2SmplzRlq2akRY1Uy2rIeF6Df1cWJwd2rutk7urqQEBWTXPC4AotBvjdrOb5yOREptqMfi8lJJbReW8aAyRHPMpYxtBeJyWzThSCGcvAPiQl4R5LrlgtxsPy/Hd4/Tm1RuIZ7Wqc70ZhQJl02SuJKmoQowrYtjk7bglxGsxfBf/vlLT+d645HkvL2GdPfC1OhsOd35yDCXT2smDmQOFiu5pztthAyDnlFMOwxBCNIfDCDTEmHM+zqeKgIQg5ppLPp3Pu+0u5RyAqm3L3MxsUv3lL9/9dx8e/umPXv/m/vjzrz7Mi6XsZbhxF3Qnzg3kzk5sTu4sgYiKlVzJCtU7JWdm1LRWN97Xf2ZVd0ioEsMt/SHMVfZk9d7rdGUIs6tluALGfcVpKVWhnLjron7PmxzHSMglJ3KPATFGlpDdx7BLHgHA/N3Hd3Mqb17dDZEMmYPcbSJxg5WLWi7VqnEyOuVUnOaSfvzibl5Od7sNALN0zmUuai4hbImjEinxrEQaFgwAnAs4mAFm+ZyLL5bdXYpu1GzDOwCQkDQrvFAGsAkDgbxAxcYwAig2ORGDX9+9fv3qZppTgbnj4XhIy3kXI4DPXr5MOeVlCQMxIScKpO4FsFJKVefM2VoTgm6T6Uk4Rqj58m7XnQQVGbz2yFd+R3NU0JQHuoeIZp4AQMXcffHF3L0gaVHXGr0OcScUiEUNwev+yw4E6xYHfc11UkCIchHe6guJHA43v1jVSwzmDmDcbK6XZxW1YGZYTVRVgxZqWGcOR+HmCzeZkh5HOBzFlJikqlfR9VKud9UJ1Uo0u0DMumgnReARz+N5PI/n8bs0wi9+/rc//KOfMou1liwEQEKAORNbCySs2UggxrjMc1OtoSZrBOAqcKz2ugrqk3fsAQBDWoSy4l8dT8pIVl065sogmadFhN39Ew/MOzCB6qpdvuCSyiKsemj9TX1ECfU380IggRDxJmzUrOjSHbbVX2l/tXYM1JodVMgAQM+ureohK4xl/U60+9QRQwKo5sSI3OHVQxIOAKzl1Niteup13/Lay6KKR5GTVYFubz1efN0Uzbg3fnGzCqD2vfsCnNUNjYnqATWebblJa/xTU5NRKNKsk7t5bZznrq5S2xey1IDGtLhrHCIxozJnK0JhbtAuvFjT8CvGdNFrW5/Nk2jnyvHuzmGbXb1rRmsKXD/BzAyrClyfAs1J/y0f/8lh/8A/rL7ubznm6evf+/Du5XTo7Qnc9o+fwdp3Hk4mUtzZmChkhkPZiQinVADsb0YFz7mcl5S1EGLRvPpvn0apv/2HdVass+Tif/VptV7sGnxemh3TBR9keIuKInPggQSHx0cIvX71qpKsg2PqH7hCTISmV3t1uk9gR780SOr4ETXgXjWDxZEdDCiqbCYAgogUVWES4RAkBDmcp90wAD6nBZXyGQmOEHiIYZqW05xu9pva+ia0uIXMjdDaijS4EK1XKwAzmucpxJExMFEUFKYMT8tsJYdhAyDEyCxDDHOlAg+Dqc7LHLZ7Ia75mvMy7TdbAojZzdydiWBIJaP2BwZYnIgHiRas5BmAmWvOatZCPrOSMwGqxQGuTEN3ZvEGy4FAwlQbdYOoYRBUDXnjfLjZtRZS/+uJd94gDlCPhBtB7+mD7Y+zax9egx99Xj1d/v704XcM8mqO1vlSJ2eDc6rBCyHknJk5DgOBWIQJyzzX50jCIlKZ9Q53AmkPc2z9yvZxBGcJrprmeSSX2B5TiCEETzpNRRcPy2K3UbLa33zzfrcfdpsBwIfjfJhKUXfyrJ5LKcW1UqIAANYgyC6t235uVrGBbg7Ama5AuutIbo2lu5lr24zDi1sxa3LLYFaBGzkIIVIIwuRj4GRl8AggO4pDgFNxncusRtFjDB/O82laUlYAU1aFk9BcPKuXglzgwt62l75OUaPDyjvyjnZdG8cGsVo3Nf2Rr6u234uGdHY14uvPMar0b3cnkeppMEGkxX3rdzGB2Wv/eGKWEIZxcz5+4EQAlsGIwpzyPC3bwLtNPJf0+mbj5vlsAO5PDwTab+NUimdlot0wiAxzMaKQnAEcSrr/7gR2DqF4fpyXJbtI3GyCZ085A2CJQYJm1AxvUReBqnl2itwh/mrPbE2JovFv2u01RyRuE6SuotrV/ola4IpfrnsedV+Gej9qcG1KY508Rw3sWhspOFXLwA4yIqGezOjJXgc6DbF9HbNURWsWkWYmleA1v+EAudVTDVxbZgGAwEXIQETODqHaBpzMFvJ2wkxcXTwnqiaYqPfHdgeg5poWEEGt5lSYSMDS2EwEAGYEZpi6Gci9qOXiXgyDcDYHsBQbYpAYo40c7HY37MbBtZymRPD9rn5aWZbzcLMVOBFSVqCc5/OSltvtAABMixUOXHL+8v3j28MRhCGGh/mjqg4xAIghDJEfJn2Y8v1RT0tJPijIYdmLmgMQJuHQJEGv19eVYQR6S92r0Q3rJy9f7DL5k5RG+1cRwFlCYBrjAFDKCa0VIbZhEzkqVN2ZiMHuKsSFmINEGQA4zMwcRNIwKRpiAJu3vclMJ9M//OlP79+//Vd/++3t3d37IwpGUEgWgjiqYTSvaLK7Qw1UaBgJlMtSm5JRgxSr16cAuG79TQ22bwdmlVFhcK29S0QCB25Ou1dpe6+JKgITii4Vi7Q1tmAnLgQbhtoWrHnLTisu6WDXoqUkkAaKIJkzfXc4b0LY7TYARvj9cQ7ML+/uFs1fffi4v9mQebFSAy4nmADuasXAwXHOIGA7xjjuHh4eAIyDHAsfS4oUQxxdQvLiQFGdc+umTJDsIuruHsyS5+DDlObiFHhb8T7VRmdgMBwiwdSjxMVT82fgIUTXDJK5+MfD+bwsBl/K8tnrH+TzAwAOPlIgL8yq7mDRolocGixzzgogJ4ObIGqTw63JlW7q66JsNWS+JqDQGKg9L9JV3ZuncTXzr13Iq4XOuWQOQSgsujh8DFsAd69eBwnE7N6qPRzk7sLr4sAF+wRqYuDi+LRTa2vILp5qN9TdFrVr6e4/V/VcpjFuHNqNMnlrwene2+KssWZb5FSDm0JGRITebnBd127dNyfGCmMSWy7wJzbieTyP5/E8fldGyEVlszV37yLjdauuXSC9xTssoVGKzudzyqn5sa3hYTOnbjVf08hxBKbOZlu7Aa4gxupWA+jJ/NZkFiB3K1qAQM7NdPsTR+wS9raYssap38N2vveGYq2djoCzlcmPh4eHpcwEbq0sicjXAKm7fNwjhdZgcg22+UIr6J56uyB6cj6XENK5Ks0T3F0r1MgUDLBCLt67AVAtvzJzEQZALh1EQC0Y6LH9ugG1mLoH9bXhT8fFrnoBVRoRO9X4nNadte1xDLNGj137A7jP8xyYlQhAkOrbedhsSsnMXBGcoecOS8m5FG/EFvfaLKglQ+FPkFLvgZRfXrw84PVZNoeh4pteHVDvbzGre/Mn4Nk1e+u3Dv+HIEL6Lf9El1cuM8ovHsH6jf2eo5dvPEH/rqfE+m/9JnSimQOVpWJmRpzZi3OAiIMRABBt5mz3x+VxWmYlZ9ar0P76w+m3XePTq+vIR3+rt/+4H7teXZvbANbZePm++l3mty/uBsFynq14yqWKKFAjOtI6qZj4KrdwPQd6XFFNRDvi+kC0l5oJqQIOvd6QpHZEKpokcLFys9ueplMqWYg/TgnAOMh2iDlnEdnvtmk+HSYdYwA8iLRWSxIJ7uYcqIfv9Y+L+5zKEsKGWdQyyIYYYEhaQNWCYSAKLAxKaSHiIJRLKaXU9eIdxmGmUopIKEpmBuLAwdyKlTknABsRhzNIWIjJrDLULWuuDOsQQ0ub186sqjV0b7d8Tb0DZsrSK517SNv6UzwJZi8CFPXf1znSzGErI7ru7rg+m3UV1FCzoYdN3uEyXaiGHeiWqj3U9Sm3X9qybtLzRASXrtxeoZUhRjeXEGOMFc81tyUlAByE2mOrVpD6vGEiqqwlN2VuG5MwmWrKidTDSDJECAHIWpbFdgOp25QznCTbw3n67uPhdruTKAAe5nI4pyGwO05LOi95SqocrHVA63CRA22jrD2D67WuqiItI9YjFYc/WWL1L+rsSGCdn3Xqm/nlk1Q1ucXAwjYEliBJy1RqExVowTgUqI1JHqdS3IvjcdElWX122U3d1DwpqQtzDDEspVLNrmGS9UT8aUR0bX0JqyAGvPco6sup53C6GktrcbvCanVyWsPYiNlqyrPfIm1yDipDFMAIDjIJXCPFrJpKKaWVwN9ubwhR7VHJZRB4UGe0SmIsybJqIiImdyvuUDX3Antclnoxh2V6OJxe3d2Sa/JiTlWdBRQpJOpwLRENvDWv1XcCyqUkAMJBhAAwa62PbvfrsnFcNSapr5ARDHAmCJEwFbWrzFm7ldV6asulXK22Fuv3r6JPIII6P4ka8MmAm1MFRmuj7Mv0u5qKlftIRAxaN4zq6wipkXN9YF772mplOUUJAhRNZpWsSBIjiwwE0uLGAMiM3AFmjkTinluBbs//1sDcXEPtp6NwhjfuZ7MzgYtUNRRr3mw1KwHGMlStAyFz2P3jA7N+8erVngGy27vd6xe7INW+4uH4yIE2IrXzUBASocUyCV69uAUgpAXFzBbLD1POObPLN/cf4oCUdSkKIJmNAyf1pUBV5iwKAoe2cvojYKI6m6/2OoBgayXMupz6b/S9ldam329biL7mhYi4WmZzq0+fuHZw56wAnMjYRQJcl3kWESFacjbVu+0dyABMeSaCFbsdb6dyLlqiR5iWntkSpgKcyD///d//m68fTo/IZQyBSVjYgao+QQQREJrqDgq0+tDauaWjjMyi7VwvtN1qzxvAd4UNoc6B6oW61+bm9R111lciscO9mouVB+lk5pWJHG2tI6qrpH8pkEsiMxIwA0xqcpxKwnFE2Z3PAAK5eQkcD6cjEVLKP/js7sPDMaU8RmlrkmCwZcnJy/G8TCUx8Pjx/rMf3S0nB5A3wkmWkl04yqCwbEoNX2/1IeSutZmPmRhnYMN0KAmEjbBbnXsJTkyiVgKCFlO3WugwawKgZg4By8fTdF7mtKTsrmqbOL58efduPgBY5nkMEqvYg7rEUKZjKaESC/sENmISIgWqb9DY0/XnntPtu4ajln6ZEbuvk7oHd5dY69IH9ZPqNxcOZlZs2YdxN+5kiQodw6avHGcQyLktEnc4tFxBd1cfRxAOl2DpavkAYNjFDWyvtoPWPRdNoYwqQXjJ8+pur2ladwDWyBurge5ejgMRPQ69KiBo51pTRDWeWpFJ+DjIakOOeB7P43k8j9+lEXjcvv3u7eeffSYcTJOZmxlRlcfgbjpbJ2tiPhyPBGJmEXYAxKBVSQiogFErFaq8G1pbrlZ+X9uxyFeiC9Zgt2U73QzGbuZCIVvyC0aB9bvqxkZYG3k+HWs81t7W3K/IIVAEoKZG7vCcc6PMrPsREz35pGuKJ7qn/Q/e06vwuR6+glOND1orikSkdYYFCMQQNxhBqFYZmjtqPNGdfFrr47xhQitw+r1zwPdOsNdftpNa3dR2tmt5F1p5RCcbqFUZINnd3Lj5fD4CMDY3heNmuy9xODw+yDBKCMIiUsEyKqrmXos83eEdt72KV6U7zJdIDN87c7rCzi7+dHO//XIM9Yi8Ig3fvwP/L8f19z6Nwa7Cv+vDnj4LvlKl/D4auYZ/uHrj+nVVYFXqWqo3WqnjxzyyAZhzmXOaDOcpHwyz+2w6DnJ92XT53/ew+quQpiLF64n0C/xkEjXffy2Yvfy5mggAgJrD3YpVhaplnlPK9SCjrVf4qT+r/iFPFhxaJN46sNcXL8uvI2HrdHGQ44ljp0VFhCk4acppE+N+HO9PJyGuXmMIHGLM6nCPQ9jtt6f5aFaYKvfycgMNTiJmWgWBqJKI64WrRYkxxqJ2nme3EmQMHHIprlZNWik1mFJmNofmIiJuJkFKzjUyDyJmnouChEWIm5AQObkjuwJQNS2lqisGCVlVLYvIGDa5xhJFh2FkDuwoltXUyOrjspWcbW6mZibhev452rohoIktdFSEnsx1rCbiyq2m7//7CjhSLS5tHXYbdEzXE77+WqdEM/NufhVHd9sEB8ytB5hMrQ0mBA4iV/OuN+LmxhWSVQDMYuaq3ng0aP2zJQQiqCkAM2OqPYhdmKKQFhsDh0gkQsIAUipCtgnIaue5jCEcpvLN/XmedRPLt4cZwOubjUQ6p3yY/i/23mVHkiXJEjtHRNXMIyLzvorNWTXB/RCz4f+vZpYcYL5gmgSme6q7+lZlZjzczVRFhAtRNbfIe6sxIEBgCgitqqyIcHdzM33I88iR/dbQeuytxVI8MJhWIx2J1K1ykCMEeI8TAQiMdZiaC98XCsTpSKcyjDEnZPaCj8RnKBK57mBQzLl1y7U2DXO8GWGxdv/z67Y3e9l3K0REbuAgusduvnfpIUEF4bGdo4TzhH5PDTLUST5CDG9sCu2TD/bOGZxrDhBjEuJQfDPbGYDSM1sQCPcAI9Vp+K5L0j5nQ+cRnzDzZak572bezFfVp6f1YVFRUS7XZtZ7EigvpV7WBxCtN5XqYVfzl2Z7b395ec3gyG1r62UtpW7db82uu+0NVMB7ix2iALbbHrTl8sl72/dtWVYEPSCnaOPwJv2Iko/A7l0AE92d4oRngEaEVRXUZttUgYeNkm3ukZt/Sstsio0znCdfOInwEWSZALEhlydDzDleesooANnonFklPd4i+THKgT6K7HztPojUCryiuUrQBe4eCtBdC4FBx6ZAc1hYOIjq4YkGFR7qPTAYgWSEYiMJgdynD6+QRHlqcQVLKULCDL3VWpnHP6w1f93aonrb+27bssrnx6XWcinloY4C/8+Py+319u3tWqvUUqxHRBTR62YAqrsXa933buvl4a3F8/MO63ULVWqqA+97l6Dcdry8WXclq0I8qJSB8grIKLw+sarMc3Mcnu8skPNL8+fArN95l2U6Ypo5cVSPAdZz8+SjI0dIpsMivGipIJYS5hn0raU8XGoq6OZbD1BkKdKh3hAR2763CYVVCsh//Id/fPvhp6Wur9f96fIEmNnewy8cmTmGAwKRiDC4BWBdqA7IqCJSIZu5ha0lg1NDQHhEaG5gd4/8j1BUlCIHUSrSDREINGfUwp2RW1d4SKEhtjiJNSIOY+o+kWa2iJZKqABwKIItxMy+/ukrgE8P+suPP6DgX7+9rkv5+ZefW/Nu4ZNmyj3MI4LZ8vzrly//7oenby/7n798e/yxfnp4AHALW+q67c0tTMPCzCMpPDWEUvKpDVlKEgxrfbuik8XQr9YWXQAIi6GTsDCHNQ+CzTtFm3Ug6Thi1cWCt+ZkFYngjUW/fP3SzfOsh1u3vWBVQIndupkXVdGig6114FXHjN0lPY+9KrM6bgBAEMohycc2nk4nT/HfsTbzigf/Ryp+ilyWh1/+8EuSWaSMapuVojqSjZkjyoBfGfURQEJnpqhE9/thOQu9CAyz4zA1T4cykSUzPDiEJMkE/nLUnsQsD5hdvDETavdrMYYuB0fK7p64A6DMvNdIBrsPmywF/gc68mN8jI/xtzjKT3/4d//0L3/85edflmVx8x49cztaqiKSQNqaARCVUmtvlsliPxJUJ8M0BXoMDNw7r3O+QYDggMv5dGDoYRzFmOFh7g4b2sbjXkg3vnAGsTB+mN94oBZjlDLjuIepIgTosQPo3oMoXMKjUDffCJlo+/sEDRc6MhSRwdYsdUqDHjNMEzH0yfR+ZlFHDA6miDvwJ4uaS8M+3xMAHc4Qh2soQzz7/lAP9+29q5e6Lrn+70bUoSsPh+EEj7hb7DGDSnkZOVr4zGXKoCopSdi11PrLj3+4vr5ut2TObmZ9Kct6WS+U2/W1FG22b83WZcXoS+BK9ulfJsTi/AhzBtMdOzKrwwo8mSD3D7nHu+12gunNsOBw574Lcf218bvvGWZo3sTJEPofH+kKkb//4fmlhwf521AhkDspLQ934aD/8pCODuC67bd9/9O33pw9Fq2XmtbIMXfvv/EMnfjuYe9vymkcYaHvZmcECOJ3Lo+xbkEgLuuyb9vL26sFtBIel7oAaH0fZADjkE5jzf3dhWKuPO+cB6Kx43UAACAASURBVJMbYvYUOhuPBAVnKnAQImpua9Xknvz68q2qXtby7dv16WEB8HhZCK61FJUIX5Z6qfF6u/7ww+et9YVZYjbpdJXWLTz7t0gCcgAwIJAwM+utGyPcW8Bvfb9eb3/48RcAocXdrm9vGHkIitCB1q275ZleSrUID99bK1pU1CO8m3molss6vEYnzEwopZTu1lrL5UrCh3W9iOi+30pdyrJE27tHLdXcEKFldrJqnhTpMXIw72UrIMI49dSYIfG5KmNzMCGRR8Xgkck4bZp7fCnAmMHC03um3wGhyLC4h4g8Oc/37QW4MyPrIuRBBam5u0bUJKUBhcSyXgCEe3j61QnTYpIrZkFfnqYEVCYor2i5iLSilxpJG3/g9izkuu3Pql3ladVt63/6du1Rnq8eawdQ1Nx8731zh1Qpqm67D/QOAES2E4njsDHlbFYlTHl2iJ/5jEdoavgbuQAjZjUdNrOeIeycYFERLVq1ai+FWbC2d+82GuCoSCXMC8Ot89rMlT16cwqjNQBoZs2iGfdOA3fzvbXEE52jIsdPkz9lHNupIZMfhBMSeaCX7lvlKFc//DNMWRT334+USerd8HAPV2f6mwAo7iGIELhbz0kvZSlFa6lLfQDQ2vXt9ooH/Xy5PK3rbX+TOrb73pM0RpcqFr51VxWhqui1u1tszba+A/jx8bGHPN+2t9221rc9AC4qopW9AwWA9d0ZxW1vt5fb9QmdId2tqpj73rOVjVugm4lIJm9zTxwPjaBHJEGOhJBBCIUqwj1O78rJyjM7hLh79jzIUILEPZVJJvPtRNrM7TXc4aToPJ/5M4Z+yIxpL6Rwk/uqRSa5UlmooBRZ1rqui5j2gcyNRVlEVKvBe36fc0TqB+cgd4/b1rvHOdZ93yQp/zOaFIeey+5cQ6s2l1H3AyHZkhMYcMhKOgjgZuhBlcWB//6vrz/W9lP5tO325duLd//p0wMGqBxPn5+WT49F0NpmbTdzc9zcAPx6fa3lQSnPt21/e327tm1vUnzrelmLZjsR67JDKpuLQVug0GNUD2U8DmFheboxcKCzvsTnvpiT/l5Dv7ci7gfzu1jk8XuKbleOfI+Hm2mgOETFxQAsok4yzKx53yOim10uS5Ewu5WqAB4XaSIC8d5qEUAFerVb87S4IZSyPNx2+W//8sVdSqlal93fwqJC1YAsMqJTw6juAoe7d/c0cO9dyDLU7n7sgampxiwSAhpHekZEVIQejLCDaXoIoqEBPcIvoslXi8NrIIUKjBgpBzLgXiBPIZJIhhKB7gajlkq9bJu0/gbg7x7/YHK57W8a9vW2/8Ble7vWWqWUawSAffe9h9ai69LeWr1cfvz5f2n9T39+u/3zr19/evoEoLuoLiJ0Q+9mpFMEAoj7lmCFrEtzOkQzL1ckgFApi6zJsETpCFAopt17ZVUtm99ILaoAtm6iAlHhCvHWdwOkVEK+vjyvIQC2Fj2sd1+H0W1ONQ+V9J7yZhRhbgidWY7DnJ/iKk2okYE7gAozUHeccjnpkayymcVU06/Ikbw6pHUz68uy+kxGPiylDP7FdBQyxJy75X6WiGFGkpg9Ye6HKoaahky0BrOY8GRVE5lGTeSuT2+EpUyrOa+EjNgGAM+04JHKP7lHOkX4YUUdCfhDJc7NPCZYT4nej/ExPsbH+Nsa5emnn9t/+7+fX14+f/o8AgEUd6jQpws64oYRCC7ramYAPXyokDv7/giIfR/jOcGYBMrR+jgke0dOCavTvURQIAKdgcdZrfk+boUJ+Drj6w6EzWTemdCraUWY9yRqrFJDEB67baOB+Lgx+rsnCBCOzLUfiIUJ9BnvuD9kTP/pUB7D6B+tTkGGeyeriE5VMnD+iWux6BJAcOZydaAVJO6QoaE+RSOMMyonMzSZaIjxZ969vOHsQzi9/qHYp/uemJpwZkPbSVsSgWVZRdm811IA9B7uUtfqZlp4WepSi932rW1J0KNSZr/osXiTdv8UpJiO5SzUHQ/nHkfbjMOdzUf0ZEKbcTyOEBWylm2G7H4bLvv/rqR5Z0ocI34bPeTxT4YZzuZCCGnx2zPx7qOn8Hr+McGBnhWJ7l5BpQrp8LfuALZGC+2GgJrQzJQah003Lz7jG+cZifs3zzcGklYsX89zeb4MDoMxd3fSB0QcVcCc5wIgvdt26w+P63JZtrerFAWgpfo2z09aYZwu73Htw9I8XOwRWY8RiBQRJqkOKaqiQpV37ZIAoNaSWAgRLkW/fdu5lId1vdX2sBYApZREjhEwNzA+Pdbn6w54UT3i8ppecW99392xLiuEPk8Ti0bj3t4sfFFR1q1bMGopz+alVAChy1Lry8uzu9fLI4ThQcTL9rZoyWapVI0Iqoah9ezJwPCwsCqlaAFgcIKt725W67KU5RpXQHbbMy2/1LXq8ro9g1wXTVG81GXve1hUWQAYOoBSVEQjYSRz/v0A3/Eehj/62p+IDe87KGfBEUVLhpmOZM6MKB1QuIF4zO3B+17neNi7wIz3XnaiC9L5g3vPAATMfXJLAQnndy0qIhGgMEOPdVkAtH13BDwGwdmQKUKOBuJzl1MowpA8TRpSIET07sklWsvWzZv01/5Yy0vr1+a/fm3OeL3tuq0A9v0tzGolRC1k6x7UyNYyMeGRZ9jIDAIhM0unw3RXm+//PXtAd9xrIILBLqEEE0xEZk8I6w7ZIyJCEd7I6NuOA5sjEI+tt+u2uXU16TcXYqBhwj3YTbrRSXdv3vB+Q8SMLjILTZOohJARhBzTexJyPGJbnACu0QOYR7gVPJOe4ggTjA8K6HAfzXCLSqF4bu+lPmzb677v9KZZySHq1lszFQfQeltKyXwCRShy269FsPfEIqNRdhEqe3TvrRYFeG09wqnMBMDN+23rtz16FwvWWrVWD7NwQtseAMIpRd0QTqWYC9zAJGChJSQtgqLR3c3J7HQ34DpHuAUkxDFKSpLRJcbJuCe9zlolRNXdAz7sAST7B2JYbuARHB6LkrvLGSPSlcDmw/J6r6GOwAJxBHnuB9cjzAF3SXCQzDypm7ceACzYvQhFVElluId47jpiwDmpCkwCSZ0GZ2BGOZB1iyQG7wdPG3Nsz+bWYSQZEKK7AwGPqvr1Zs07AIqA2thFZG+bxuJfTXTr/dabf33ZAJjFj68d8fXx8VFF9na9VPFwD9VlBcDl863LpSwvbf/16wtjUV72zbQubc8qHoQx0FnCQqIqo2UTeYCFSJyThbuZ6rByIhikw6cVe4pE3I8TInHi9wWakmUqaJ4AsMdGCWC/3QIhpbhZR0FYuGvwclkAdGtErEWbYw9T1R+efjDbyUC0hHwXgVaB4eq7FpWyABJbeIwMt4XXunxpe1G5tdfCgtYvSqgEMYvogwSFIAwzBBtj0+ctd+tUFarQmyW9BmUAYydyIInOVUupWTGRmqKUArs3UL7b8GneuSTG7oAvzAOoQJZIMHsik/cFEBZ3yyioZfcjixu0v9yCK4Cbl+cvr9frXy5LfX3d/vjr7fFy+fTDspaLEkDC51HKqnr59csfuTx9e7tuQizL17f+0q4Aank0ULQGeoMRS0B6tveMd0ceEEkeFvciuvWrCpXFIisATEUDQdLcipbuPc9/1kGTqlosbHeDR/cIKkCLWOtjCQB47ldxIKRFV3LxgKCFicM6ejQAxu6gIb6z7Q6/DVlDM+X6O9NNDouSxJ0MLNKGGEDhoSljXCGr91Gl7v328vz6009FdWCN6TbUo3v6PhnglJP3OvzXPGGE3kkyvnMlIga5LoEBLD/sZCYbJu/3yGlUkBh9Aukeni+SlDJFKId7jCnaBshlThfPWVyRTOGPGKukqcZu/lvP52N8jI/xMf4mRtl6f7x8/vUvv67L2noLdzjMzNJLP1q1iESg91bK0lpXkcOZPI3AQEZEYLTWPsn1YVYDGJU1gzA4hfcwQGMm5mV0Pp0MOhHv7eER9xnwe9z107jh8b8zmG7EUpK5yYHKZdWH7AfdW/d7XctRfs476drwGN8B3rKlTRx3NMwbnx/Na2SGT4igAsYIGyXsM2w1k4YhIs3SX8hMOYViPknNB/CFo8YBOIoM5vTOuz7VAIy/zHHElg58ASmkCyXhFe4QZTaLOXyVIiUizPrj5QlA69v1+obAn//yq9RC86Wutayx8uHyCAAh5m/dDCyYNvFYynvgcVoqJ7NqWixx98XOEa+II1t6muG7nXP8PG/831LPY4LevyUwg2CHZ3x+dZQa/+ZjR4hq/DKdbfJ8hfOl3nt351DEuIJ5UpCPN1JyMwSXBwDUKsa9v+7uqIu5S0YAfu8xp/d4bGbgvSd6PskZ3Hl/4k4nYQRLZoOZ76YOoMhjudz4vFzWutS//Ou3uhiA9dMDtvmEI6P7fXXnb287t/fhDqeNNwPooIASoItEnJLLEVG1At2su/u6FI8Qxo+Pq03IZSnFLDLw5BFahIjr7Xq5PCUjOyGi4h7o3fbdQdQ6zEQhABFdS939GrBallUfwF2WKnW5Pb8lC53D13r5+vKtllpqud1uKe1eXl5//uGHkYPJRIRKILZ93/u+lDWO6VQBYOYq0gAze7ioQN6WJQCBvF1fAajo04+floeLme373q1pXaoWMzN60QoABoJaCkXgGY+IcTonokr4blHIWUN66pF5+BmHe5ttJk7Z+SGU/F1k4N2Yr8xFPr/rjtccMgrDNTjIkr6nqySgqumLIpk1J3PZgIf5QLWTipR5Qsn+NvNJEzwuwte3/WmFQ4SM7t46ANdy690jeg93s6vdmr9sVrU6+vXqANZlLVIBmHWzEg6qwG+I6dkMdCSmI+b3rX0niJxgjPcTNj3l8/zNmYmht7LYPPcMRdxjs864oRSwFhEHq2g3A7Bbd7izl+Dz9ZaOEqF7d45ysewRQc/1J5H4YPegHDXUOFzG+1xiFv9PPkHOmOL7/RCIo/FvLjHGs3PUVY7GzfddNWeOI6aQ9RxaM9RYF/30+Ml8f3u7Ro9LEWVx67vfHh4ec4ZLKU+Pn6rE2+3VrEcYogNlb1YoAHbbtut+WZZuuyTWDqilhvDh8fGTCIA//uULGaVo79ja/mm9LHV9uX6zHu728moAesSlajiLrE+rUsq2PRdFKeqDXQ0igcQrw09K/qSj8z9UILLOc5L9GXlw5RwKZQT3RWRaVkywIcEEGOdCApozGjmVOMLljshfM7EbTMjrdIyPrxupzWE4OWcsIYJwAuzYgSCC1qTBw95u23UPAEa93poq1wiqeJ5vl2vbkjsXgGi1iL13iChElYnwjkPSxzhX2a8pgCwDPYwlIBO7o2+fByyMkfj2FUA7SCG8B7CWat4aL986tmuD1kLtKAButt2+Eh7br99UhGGrdFKXUrLvc2G/9vLp4dI7bvFgLhKUdd3dtt11bHHxgPUQCS26FK0UQ+ig7BAAzQJO46EAJdPhMzBzEpMn6cCsWn9v8EQMgt3z++O+VYLZ1U1YhBaSdr1lDtUbgO7dogtUET8sa7ms9XKJTSzwcHnIYK5ZU5ZtbwWyhVdRaBFRiVxWmPeI2gENfLp8dmurLtE2ES5aXVse/rRIBCIQCScjC8k5VZK1JuBSqoo6ZtafQzQcyWjzXmvJrpXNu2exuZacAB9ZGE+AJDNlMsuF5q7J2U4Mvo1c26j/HycgANUMDJEQSjTvW998t5XltXcA/Y+vFbb1bNT0KOy9tW/t+ZPL01IBXN/69nYtXXzHy0vXX+Tbl+dOWZZPz69fk0hEfWOpVPWAI5R5sjrIJR7GzIT16GRUFgWFsvW96uKw5/2rqgKorBEQioelWr/1t4fl0xYtUwqC0t2bNUCq1FWfgnHbn/fWLstjrQrg9flq3QXR0RdV91gp3VycsG5Zpj20kky9dPfTjm140IinlcCBPT3RUOO0Dhl2c57+NH9MeRMmlCrLrb+9vb1+/vRUypqQT2GICAEDD8lIMHq7q6ujoCLNTYv7aXnndMJgMywY95dOipl3/2s4V+F3vT7pICmkgC3sFHe8O4MRwwzj/W84kD4H7UCq+/kOHNvyY3yMj/Ex/uZG6W1fHp7erm9fv3yjsLcmqsuy3G63Uku4AqBWn721u3VAuruKWuuiLEUBmEt344iIaAJ2UhlxehsLS4ePajIIg4BDAgiBRHSkp59eEHvm1A9c3WFt528k4P0O5QkQgzOLFKANLjFQRWRwvjlhf//3/zsA79u+3XbD3m7X6xVYVqkRDNyzjhkWRdaSEwBnrWFMszvJ34YDMbWlgnAbkVBHB8Hkt3IHogoZRtPt+rrqAqCQm28i6r6tFCI6G2NFdI++lAXAbr1qabZJqZWru2mRoC1Vb9crALdetF6WxcwECutLuXh4cQQi/U9GqDAIDy8qZFjs7g3uFn4EUq33RXUtC0Na20utFXb7+u0iaO0GwKyriPemKhKsl9XcVcuj1mkzeylqvQlaRhwGLume7MQ96jGXFDMocH/PYV4HAlE1YZuI2e49v4wARSPCBwMRRhNuGYz4cZgPo2cHCXgk1/a8/uEtM/Ow38ci8+PTA8KMKqS/fXcL42jaMyv3cXoJB93R/DiOa83/d0zf6bYr4+GyPi4XIdq+ba2PQra+OxBaRQd8zCJbbox4YV5uQn2pkwRpfs/9hrOljMcdwprF0e/ru4eplux9AQ8t6r1HB2AUhdKDiPWytt3LurB3Ef7008V1AXADLUxKhVOE4d6tRyBPqHvHLNxWVXdXVYowtNttqTUjW6XUQFAAM6namyGUUqLvbt1GiJ+l1u62ANvt5sLLZW37bWV8jbeH8giA0d9+/bL88PMtOiMWFldtHbfb9enT533fAVCXHXhcLlfst227lNpbg2gpmjvl7fq8+f5QlpWVUq+91ce6d79+uTolWVaffvjxTaOsl1qXvfXPj5+dfLu+ettut/3pp08AwswZ0a2bmZuItDCKrGUNIlQBrBEt7HJ5AMWBq3eC0fb6w3rJZXN4EJClKBDmjG5tbwW1of/5+U+5tCLS9p1LTY98RiAnCHXsB0Ik3PdtAweSFGZH5mO+eZzYGWM6wbdx4NEzxuQzKhiY7UKq0GJ0DvHgtjcoVaV5mzHHDDUlBiaPuLq79RYR67I+XC65Z277FhZtb2WpifmspdpEnbduRRMDp+ZuvY/4L+X2+oxuAJbL5XZ7+19//Fxl+fr8soheiihss1akbBEAlIHYKJ9bt3YLZB+YsuYp0eURwGZ9a10bITDfOiIMRpqbZPw+mCVngnAXiPqUBKoDn5x/WalEuLu59R4ARFAEqAXDLxmhuaGMGJUKwOluNwDowYE6Uqvytm9b57pWSGXGTOERTudz67dNzHyp8tatqgQg2eHby2ZsEQ3Re98tIGV3QwQSyA9Y5hepQQF0YFBFgpHtFnJ3yCik4GiSPMQgsv1uVg6P+DElE3hQevhoNJHN3UalnTNUBFLFPCy8R0usyb63vd7cYr2sVTQi9taq+9Nj8ckKJmXp8Agvupi7IgRLt65FBpdoyN7MzEgg6Aoor75XLCK8bQ3AQmlSLAIVl3LpMOs3qU+sas0WfwNA7yEIZl+MIHZjWVSaUYrXQgBv257xpqDOgF8QHlOnEHSHBIWgekiApFMMzCBgqhPPcuTkxqO7IUIygp9IV1FSM7M4D2xmPuepB4ySuUB3y/5+qkrCIZoqDdHdJTkd3B4/fX59fn5aHgH3fVMdy2TsIlIZZqgh4X5ZFmjpr/aXzQCsUqDS3Npmebo93L33oR4BANZBCSohBqORBEUnv+3d6AovQ1oAR8+oqd6ACJthDHcDgmAfEmpUAUMBxVt01OVqjiDLYwQsuCfjjK6domjO6AiSXZSIl9mcJ6wG42W3oLCoQDwSya/OIYtUQiPDwJSQsnQVLKgARhAY0GAPW0QtCxFEwqV7dJEQEWsYYbNEtM84ipCgHknacDhIeJijVWgMiDoOkk/3aNZLDaEiWFjDKcK1VCFae8OoR6EsZX28mHXbrv62c324LJfb26uxAbDbtSxPvW8Uf3A1vS3yVNzcIdl6u+itbyrS3JS81OJxEyUFO/YyILhJd1wy0tgZocvNTSgaqCwA6kIC7l0J80lKn0XX5t0NQLh9evwxws2NQBUFxNy23lQrAIUiHM5CzYNJ1XAnKCwaKYuybCFcnJJt3w7zfnT4I4mdEGnszXxv0vtqYR7RpYcUAM/eEACXqV2XZ+9y9V+3L4PxNM/6l2vEdWfgL89ZK++9o35ObdsD0Q1wgh4a0bNTX0TsHG21SGiQEJ9GmwgsOgIig0+8o6UBp6UG0Bgo69XdoamGQ4qElRIUqBaDbbG5UnG5Xq+3a35VEcn4IvYeOwj5THiPTu8GAWAiNArZEaPBZIY/ibsdPnSBj/zShFSIH8z9iTbxQ8ERcuzsafin/Qh1NpqhuWhAb7dNEOtSAXiPrd/WZRmOood3yyoKzHrtIStmiXZJMyVG1uL+VdNApeCInx4uwuGPRMQgo8SAed+7bk6Eh9MBiOpIqMx/p1hGWBxRyum0zODryVeYHwhDpsw+wpEf42N8jL/JUaxbrQVXbLft8w+fk/jZ3WqpEUj67YFj9GyMe0e1xKjJyvfIqP/1jPLkII4k9fg9dVNeIe52YxylFiff9uhiBpxyYsSMsMwCrzg673DoeGTmH3DhUYUqIvjxx7/7P//DfwDwD//Pf/2Hf/ivdXlo4YswnIbkrzyCTThUjcyo5yyhvr/nDlQ8aH7giOzScnrqBHMFhOzZwxAuIgfwSzMzxxFNEwLhwlBSRtY3GEHmv55tKYKezS8xgrCwblmDKWAgHCPvO8pCY+JXB/5kBIvP9em9NSEzBUdAVNz9tu+UFrMsAhGa1GukUNM/nNE1wQwlzgBHHFqfp/V7rzlPs/X74x7IuPNIHeCpE7eLH5fhASGVmLHDQ5NnBDmOe7nnvO/38Lu3EnF6hPPjncc96srT3969ft/q331yBAShRcNAhEfsZgI0937qrRk4vHN+f5nf3vb83gNZwrEoE+n2/p2nW562Xx7Ce647vn84AsAidIl1LQ+PDw9LpfJtMwDouV/jwO8AmJCweDfvZ5Nrru8M8OJups0ddwcax3iPm8ki5uYRl+VCkevby9b7w3rJUMIDy1+u/e9/1lpqs9uny+OX17ZWlKJFmEAAIML91trb9dUj1nWVpZr3cLzdrgAo/PT4uN1a7xvZ6+VRtKwMXqBvcr3eALjo7e1qrT/+8PPX528W7sH9tgEM4cvbGwCgL3Vxt4ho+/5weXx8etxbK6Lm0VsDIAlsIPdtW0r9/PjUbreGuLXb9eUFwOXhcWtXVVUgArXSwze7pWW81Aqg9R4RpRR3z2S7yChujEGJmy2YIiOSR9F6eAjPm+E7EO4Jv5w/zPW7b6rDIp9/8e8sdE7nerR5GZ/KzRbuIDNMDDDjEXuCFq1bt8uyqOpuvZk9PT4xdGsvGecqmuRRpISApIIiytfXlyo8mmb89ONP3ba3lzdVebgsKpb899Ys6/Fa7ww2645I6doRgRClUG7bFYCKFGUgemvdjaKiGt4xJSfj7E2gzNQLCZ2tFITw8C4gsjUKBpFJgthtEM2RU85icntQgbC4z74SpIjodu2BTnGz7quvtSL9LhenBwWauURxSMtW6KlQjN2jAzZ6XjnhMpZSfEKTnHQyBtpuPF9C1jhv5vTcc3Ex9CoSkzVfHqIGbNbTOysysiqFSnG4Ww9JBpMIJ93R55Vfr/utu3XpzBxq6cG4efIaA4iwHhLoDGSxqAoZGhj1ye7w8OZCwd7d2k6VdSmosm+29wEca1bMYJ4kYFk22N27hyRpZQtVh8CzVpDums58FqDYsHlIUnSW82fMg34ISZ4STDMTmt7wO8k/3kYgeSENnH1pxvEc59Rhvyvu52VSMs8rH3zM89ex5pmJiyhFtGQLbCQiSYVgKUXhu4BV1LEpvUUHomZ6Ndt3jOyGgLQIP/TwzB3mv0dVeQTh8xbvoQ0cEYt7T/lDewXH8RBEIELyb/zrDCp2z5ueVDgphFGS01soLsK8ZN7BAWE8PcOsUh/HwCOIyFNJBmUlRZIZSUGJMXsqQ9HN5KkhWx1N0zRwX4b5FasWYTb/BQKQEDAYhrKg5Lf7eI6k7/RmaGEj+h8qo1o6EIM9SSAUFi2lKMI88+ru1nbbd4sGIG5t65sHHz+tK6svF5HL8q3eolnsACp0qTWsJxQxa6iFoHi2gDnWG4dtHQCgIsy+T0OIZHIiPKBSsrONed/a7mbJL1nLIuS0Me4zdOz14MHrl5tbwifo8Z5pTtcjAERnAAK3tJnJMJl7Mzv7iEWKxbS2f6fkI4bFjd0s/REeTzw/4FhAAgqIgX4E1mdF1mG48+6B8G7SzvMag9NhCo5zBVnksw0vJQYn8TgKWZYVntJq5JDykx2nYzlk3fjMlnoqlBw0EAZ2yNHF/PfN6Hmo7ksx5Ofh5kXMHMk45XHaHvMqBIJhsFSt7n6L65ev8e1bJLnDj5+fHh4urTcViaQd84GXPJzLdy4qeY8e4mS0pIBJtD4PYOdpiePu0caJUSxEDn9g7IX57MKJETgxAGR+192PPX8ARPMrkwL18CCOb82SiPjN3vsYH+NjfIz/+UdB8Onp8/V1e9m/lTd9fHrSomYEsO/tbJAd0jX/AhELgNCznvK0FTrSXQqmvjnFjo6iIUjK/bzovRR0KvNx1Th6QJ//7xSazBhUqorZ6QLA6DJKFWiafnAHv/z5X/7jf/pPAG7b6w9Pj//Hv//3//ynf/2//st/fvr0S0IX56WGwTE0ARVwR6Svd9jndyx9vndSpsfIVx3Tl0beCATKtHt0aEXE8P2ZdI2MCDhhwgh4ltsUoTKrw32gLb0HzGZNoopS2FvvvRMQEcdApnAmFg8tmjh/joY7SZQzXBcCJNy9W4uIosXDmzf0OEpjSAooWRjFNF7SlJuxUZABFRkcMZkX/rrtcAAAIABJREFUzYXhfWJ+b4SclDfPxkrm6imHuXd/17AT7xXIMzpNABSNw224JyNPjtXx3e/icu+jiedbzLeO9Xz/me/f+Ruiyfuzz0an98HzTwoJRYRbxK03INzsREM5yuRyDb97kN8bd+vmDHvkgfXF908y45WHl5sfD6QlxFNUkXOyKe12g7dSBOKQKKptvwF4fYOx+sRoYMgCzlN83Px79Ol9rYc0IpD8O+42kSEZ7bG59HT3orrf3krRh8t621tRfXl9e3C5LA8Abq9vP//0tL2+Xn7+0a/txV7WZY2Hi5ndtq11A6AUFcnIe1Xc2q0KICJSzRoARdnbrlK727XfHp4+NbPeoy4LqQ+XBwDb3iAjcr9cLtfb9fLw+Pjpc+97a7v3DmApZV3YzWopdV0D4WZu3gOtt6oVwOt+23uvlyXcza0QZalsW99bLs3e9lKXqiW3eXoTvTdASi1pqoqLu4tKiojjOCfKIM1cT4KAOeaS+AABzzU6y+OjRcDpZd5Xa/7lSPYPhy8i/zuPkQQYHI2wE6k71cNwBSbHLouqA9ZHx3ZRhaiWpYBmbt0c3XrHCI4sI08EpvOfwNtt22zm2MR61cvb69a6//LpYakSfc/b7iMvA0LA0txIoTIsmvdwlxAQycb4eFkoS7e+962b12VRKTNGl+xRkWHXVFB3EGl6OCNpFBA2a3oHeDPn2C1keiaEzJN4PzQe8Eg4+KBupAhVRAsIj9YjunuZarl3I2gWoGjN6nVxuAUsBIPjL+VM9qs1jy6BhOt5csNRfOaBMj93FzDv+n/H+x+JzCnM/RPJhjlTdyC2Fpm/MCUAJz2EIYEY1c2jzlJ8JCAB+MvVu9FDyCRx8N24bVG2bexIj0WxSH9cvKqZWK8uSMCfAaBIqcvj5VFLhcmXl697u1E94Nfdkv1QVffdh1qREijuaL136xa1OQD0gIWX6AlFDEShE+gWAMzGjImQFI/7OYq4M4/EVBVHpG0AKDMXNc8PRvBqbKXsp55N7fNz7hlhDjOfW+6UkJvnOea+zMrJETCYSKLkzxvngUSEwODOjG9YJlNVhIUMZRUR72vhZSGDDOtOAF3WADzz2UFAHO5IgD951mVpckVIlJGZwklj3XXJPXpymryDHSH5BWOw02VA5jAzjykEiBMf9znaMwrQ6ZQAnBKjPDoRXEh6SB4qDRwxxdEfPu2uSFSzECRU1pHyEQcisywaynDf9jQ9ReiBEQWc0zIttQNVhsxkCyIZAIg2cuBwwmOGpZnPMXKvAVjN/mzDLpwW7DQdkeExQsmQhJS6Kt2sivSWroBqYa2Lh5VaTOhhDgv3UckR5s1KNkwcy0UhFOH35jD3FG6M/Q8ZoMUjeJfWPiLzB4SFNetmnWDSmi+lCsWniJF7lQunLzJ2SRyUGCHM7c2YzAMHzCKBtWEjEI7DjgJk5w4wRBwagKgwVBn7vt2jjXOZMqpIKXlbI5pHxGSZ6SgHs9PQyMeJj+NfItkwMYNk83vGJI13zohk2sWB3Hv5ug1UYdrkjvmGXMvUP91tGFoRubEwHz4yWzI+wOwylTS8I0YLWtCIgwJ+HqW7Z3df7HdWNo+AJMfxwRFNtsN+vY/B7WGDKUIipJm7vxm6WzaX4w+fP3VrIgynijp80ZLNxE++xdnnOHzW8XVp6UYymJPTrrl7Gxjt42du/54jGKvM9zefLx1dHOM8BzPqfZc++TPvTz4PBM+pIhU5adiP8TE+xsf4Wxq/wzT3MT7Gx/gYH+NjfIyP8TE+xsf4GB/jY3yMj/ExPsbH+Bj/f4xi3Zci67pc3/h2uz49fUrw0rZt5sayAhPuN5N1A07yHpElo6Imi3ZHyhvAyCDd+YnnZ2bab/Z6xHuEWRY3xLng4ozHGT/MMrCZippoONLRgJAIDSkAOThIxG2/vgJYVffb/o//9E/fXl7X9UIJhmH0qDhyT4nwJEkLJNCAA6Fzh5rNBFmQgtEGhX6voR63lWCYvNVkNCylHMn3wlmiGIHsthCmDAwIIpYsnXQkoT0QYd1nj3IAFCXRew93AhTxAUQLyeKtUftABCzZ3NxH3ePM4wGotQBAeGuNQCmDairognGhfKJIxsnsBwqJCIsDqpmdMRnJwzhYY4Lvcpu/HUcy8K+ACnOzcEIQ7slGmYtw9LoZGUUcWe+IsQA5ffDRNvG08XiAAf+Hximh+f19DhjX+aWYfxlP8puLfHedkbsnzaPFzOVKmdCIe4Id57n4qzdLzsM7W6jH91+Pd9jJePfR/JYz7gQ4+lVMVE2Qz2+3h8Uvi9623dwZuncAeG3NSul4D0rJ//jvTPiEsuZDzrK8UUI6UHOZMh6Yi4HQBYnW9taLeV+WQmEp5fPT0+36dt3sgg3A1u3Hh4d//u+//m9/+KmJvr0+f64/SOH1ent9ey7lAkAcpQxapaXWZr3Aa6ndWt6heRdRkQo0j7C+1/qQ/G/X21WWFYlkUg3g67dvQbbel4h1WQPe970uFYComFsApRSKtN5ut+22XR8fniIiGTPWZTFGgfz044/b3l7f3lQrAkL9/PgDgG8vX9ZlMTOzzhCLQUOpWtwjGfHmUk5iiIh3yMaBlEyGtkJQRSJCkNWJY5vNdTvA4+82zPf7mzLxGvdFHUChidFAIGZrtMRI4i4Awo4yb1JrjUF7LxFuZgBK0XVZ930X0XVdCb68vSasW0sB4C4R7sgCtIAUgEJZSt23Wy5BpWzb1s0eH1eBuLs3twinRkiSVJay7nvv3kpRjMKo6JZlyrRI5egGs3BHUDkQ21oSEjWmJKtNhzi/T09+YEwSotCKqIoCtGzQk+3JdVQYRHLAHdqZ4T7Ot9zRT0itpQ8PQrp3s+5SDBXAbv16u4kZCBUtpVKqk5EMaImpcfSgJ/WxwSx6QCBOcZHE0mZdWYzCgcRrThqXOGF7ZvZzboaJ2jthKQ/mjbFbtCTyvg1wKLvDI8IdblmM6O4iKM5Za4C9dyD30pgaR0RvPtsXaMiF+lRiLaxFVNHDqiwHescjVAp0gayqK2Tbe7crRey2tT3R00XdCIZqKn70Hltru5nM5qcIuKC7K7KXUoRmi1dkf+xxn0dZ4HGQxl4BBo5rViuPlR2Y4VP57mjRkHsO87CewaqBmHMdB8hriuAh4j38BFzK1cgil4MugzrWF0qNCHfz2AVORMKownsQvQUVWhaBfX4ol0XV5U9hY3FS9CVZRCAJgURkVk8MwZK6ZnanueNDf6Mp77TH+cPx+kS7ndV8qsIzeCtOQofZp3tARMmzVGMSLUdWlsyzl13LZFgaTKnHY5dLHDgpDHS0DiiwzwZCSeKZVppIiE17YerWCLgFZLagj0OkTuBm7zs0dMCUDSCNAXhA7sDbZCRiLnaYZw+3nKeEz+XGWkoyWvphpUcYwgJ2obz59vDpkV4BfP3yzdz/7lJV1teXt1gXRAk3IqQWAEUQBobJYK0MQhiIECFs3htn16qk5CEQsGkxE4CMkqfU9t58N7MI11JqqVULABElDhUysJ4DbhaHdJ3HAWTQGTJ2NBPkHcMoyeqfJBWYSP2TKqMgIOGa0+fJPjiwsqnQDphtrhdL0TkFdyU4dnQE4BSJ+E1RzZCictRmxUDUnWuHztZrDOguzp1UDg9DxqGb6Lt7EcNkZfRsNBqIlM+z0ihP45yFQKCHJcd+InTzfPnc/0dr7N/Y1Mfd8rQmA3qJ8S2czsBJJpyvNYWhsJCSMM1RpAOhKoDX7XVrrRYRwhOVLIKD3n22PzpPYu6xmBOcy55Wi+SGyn/vYuGubYmxMLM8f2DJDxDr2Y2NWTo21mDK3YmsvH/igEPi2OETexnzq++T+TE+xsf4GH9ro9zerrb3y2V92B6v/e319VWLpOx8evq03fp4I4EsRiEHIRR6wAmmqSVwHVSIrjxqU45Pj18mf9bg8iMPf+z4wBCoMTjNRovts+K9C2IfknqqoLuVGtaJ9NhE6QySRvYu43rXdvNw+8c/3tr1aV2GOTwjm7POYVgOabdmJ+EY+uPulp90RuoegtBlORSIZLyGA1rv41uiinIaqpGlr9ngJxno4Ul4Nsrhk6JFhJAI9/xgUsBJtpAjsh96jKItn73LD5P6HrIKTmtkuB+zZxCK6DRKQ0RyNS259ENk1mxGRLhlG/aiBdmAd5Z8zrruQU43fPD7nojjhu+/z5nkX1eo7920vz747j1zn+AwfN6//te+5P4Lf/uXw2rCpHH7TcXz3cHkO4tsdPVBxL/1rEmUGdNby+8Uipx6Wn83gb8fwx1vGL4kv/t53M6822Ef/bas5jCbpp1992bni85w8tveSBHG2/UaiKIPr0YAndUDWZzlEXrYcmP+7lMUx7jffIgczHIHR0Icy8pzODZCyd7b41Kb7b1bKeW62VroQssSnrX86U9foLG9vn56+vz6/Px6fWFgb7HW9vD4I4CilY4sbKzLoqjdTfq+3ba8mfXhkRRr0KU+qH97fvn8qVweHrqzuz+/vgL48edf3m5Xqm7bJqJPj4/mtm3bvm9VS60rgOv21nvXolm5XLREb23f9elzrTXbUKiqmoLZ9seCIlmuNk1XEaFImLk5GR7OgKosZd37tu0Ns0vypS5EMU8+u/wbRUaIzboNq3cUp8WRRDlvuNNZPgcBfrvr3hnIB6PS+bMcm8ojssgOgVH/6HMf5Lag5GkTAu4x01hCwM2YEXqVrFpalrWwAnjzzcMso4ugc0pFgDJCjTAz3+tSHi+PZo0WEWwNPbAUWS+PAIK8btk3JTI3JHHwXWRdODy89e7u1KIihFh3CY97USyzDbnc/aEhDIV3qqwI6LKqFhUF0LuhG8dyZWT56GI8JjTrykeBNqe0Z/QI9q7D9S4gguKDPE481MIEAhRh6RA43d1ctn3DaFJHIwLiDnN6iKgG6ZDZRHUGTGKUBOZIkrLpcIef5NO74NtJBr6LygFUDcLDUty4eQPc3N1LKaQQ4XQBA2WWi8PChJJecTIAeESLjuUxVWL3cDQCn5IOmvDuupCi5g5g23vs/fn6Enh7fPj8uu1b72IsqgGmQbBZV1+DZuF0A2geFrDs0zfJmtMwyEptj+ihDLEAR+M9ZOQjzLK4UYY6niIun0kQdza8vFzYJC8AQOoM2+QWumd4Z68kALNMV4aZc07iHufxxGSWol6YPfumBKAw3EERkchmRWGqKCL5tUVExB1dIGuVVZafnupSBCaAF10AjBUdbnocT+IjVphHhccWP2W1edQ25ubD6F53DydMJR8AkqdmfBnvNsApQXC8mXOyj4t9n+TL+utAZOclnNgqzwn41LIxlO0g9AQQMVhBjZAA+04i2xwBhBQgCTmPsGOGmUMoQijD7bcWw6QTjM6QSTVOARlqDoTnno3EDcwVD7iCNIdkFGhwZDAQEc0cQLeOcN2gVQIGhIC37XrdtlKkLiuAh3XtYUDc9l1ElmUB9LKsz+3V9g2A1lKEYZmwx3ACMITf3ZCYG9/HfDCLxOXYoDEq4oPZVDNIiGgyA8fY7a6zHv9k88pcNBxF+pk24XhcgGlNjz012haDVM/gH7MR94RlDL3okmzaSfYYMauNx6kcGYKRogkcHOuzh1t6QASoEsy1z/8d956biIQfDOdxmrax1+YM3iNTx0/HJh9qlBMGcRhPGGdkKheepDIjQmy075tH8V5ZN8kg7jYcEZzJdPxbtvohZ+4WBO8dFsfhOYzd+P4sYsT8UpZSw23AJJLaNQDgatuXb89/98tP5ORmJs0dM73z3R1hft2MSM77+s1PON57FhvHovG47Zzm724+gHt5+9m0GjL5EGnvzC6cf5lxzBHWHcT6f92h+Bgf42N8jP9pR3H3fd8tJC14d/v08Ol2uyHQW09yLhKETx0z4YHhQp+oNBi9kMzmuYN9LF85CXgge98k2iYNgmEG/kbmYliNw0CalEEH4iJl/L3XDadOnV+X2Xsqo5CkO9zgn5almwN40GXzfbdNqa31oKvqHSeQgn74kAFO+I7Ke+dpzM7ApJx61B44wGG2DtpoAHHkysvsEOnw7JTHmfYUkoj0qzMc6UOBZS8+MgQRPkixBQBFgEB3TYqeGH15jgm73zWhZJ+O4jGBY2UR2aJmWixMJ/iE3DkHoUR0LDMpMUzwEbZNVqxcRXkHkSXujezuuwTv/vJ7enV62uOZjghaouYGXmS8fmAmAoakJ83gSByezW+/4vdtp3ce8uD/nm+dO3Kmas+e0BHWi5ORMm8e5/n8neGTrguH6zi25ru3zz0IHBvx9x9hnMRpKt9fOCzBU0h1xkUOo+dk1743c0/mVDDAm0lpjIitl+b08Lc9AKBe3GP0qj8s4Lhbw6eLv/vlewNw3tNwDdInSfbVjGG5F9WHZanab1vz8Oi2bTe3XnR9uCwAUJdf//iXh6f15fkZpQQgUEh0w97akGCQcCd5WR5ut2sQ131zd59IQxXt3QPQoqLr27U/vz5vvasuj8tjfXwE8PDw+PL6SrKu67KsEVFEb+3ae4f7y7hUPD49IcJ6r6UQuO43d+tmpWgG3YrUZVkQcdtugXh8uFxvG0iz9rJfAayXS+/NwzMhIBwgPxKTohbZVLpI6TDJEJwbAFKR5EeA08kZH0iZI4STg/b3vj6c2+kkXeK8wwFwuHX3j0wJMRcxlzgi3EPcDR4jRgocMa5xjNq2qWbzYByayHvf4lqKLqLbftv2fVkW6721zcsIUA0Jlr+Em4WZVZWH+pi7q7U9oq/LQoqyBEy0dvNuvdaSjtDbdbs2vxSJoHWfPPTpP2MpCsAD1jqFooVU97BuAoPFFBQy5oz/L3vv2iRJblyJHndHRGZWdc9wpEvtGlem/f//avfa6molcsjp7npkBuB+9oMDiMiqbkprdj+IZgUjp6uyIiMQeDj8cfy4ICU8SUDnSZG7ggynwNPiau7u3ssLDHsWfeQkxkA5hapTSObJ1K3F6yYqpDubOZeWEHhrrsEVlKRAw80ZEfQgt6wzQfhuB2s6VjURcrvTjJ0lL6GY8/Q72EfcpYuMnt/9PpbNEOkAkCRq7IWXMMI4IiHiYhkb6/Vs0LE5THMUglFnloCDQQ1Kd+UK2Pys3ZdRTETW8+kUoQ4HUG/VA/Bw9+DVXSjFCXDRZVmy3Jy3oqtHS2gzBFK0EEpUagyIV5aiS68QBQ0qkAYYxupEELnITXp97D6Wh50mGR4ca7hvERkkdAIV1SO6dg7m3qZO0lMxZD86dmtYx9EFUdljwJwAaUmwsRhMldEWK8pcwH1VioqJ0rGYGlwhp8UeT0VlNSTdKDamY16J9KvmsaC5vnfrnTIcVfNljgB9Hv64BwjHBePI68pm/3OyD+bi2a+To2wb3Hxj1eWgI+vngpBewTC9iNFFWcxncXSIwoAngXkOTVAdVIiEMJrqXPaDPNQRMcon7jPXY86RlbXzCb3fPR0kvzHye1RovU4hkbH61CLGIsKA/sWk3pTDmiip2IU6CcYMQ4rK5XImfHt5jtcbgLZFWcVvN4FqWcwsnKJQUWZ5GVuKcGtNoRgljAIkXedR09UACQgHaLBokX4Fkd6+SuYrhS9lMTWA7r5ttRQCKFGytuIBUbB7ynIau8Mn75oE7clAT42uGEaPuzNpPSVHqw/6gAmTGkP8ZoGThN3FjBHKKAdHQRZ6iaThPXj7RmRKQ7K/yYa7r0fBXGl3behQ2JXJnYt4Xh1vvjdKxQ3gscxQWb5husPGIa1Ze03adJyx31vGEYbOq3h8SI6dQ972+djuvjGA4jOdIjdFvy+PyunhDsPJK1FgEEm89nC4B4Cg/vb87dOnx8fTkh5GAh7R7ZY3XaEI4PfxsbteSo+FyP47+gDr0ebNk2+PKR2BKznu3Yt40KLf9ueghI8BAZBl/dDT9g6d5Ftx/9E+2kf7aH8zrZgls78uttTYtnrbbpu3ALndNs2EI0kDI0yo3SkphBtExVqy5ourGAWttVEHNBvnfzmB+DsZefcmoLvO8hBO7+EeOB2K4ZDtwwGU8EzKzHMeCC+GqSlokFW5aKjAAwqrdft0+Sm70G5xDV/tVCAu3Yu3K7H9h3RqBSTj1SNZaiQ1cNTm6XqPSh4jgVFYg0w9Dx3RCJElsYcmvcBMhqwDFHb1MYtBqyi052J3uGLPB++KkSaOcrgAM7ZoAxhivcDbfujtyZH91B+KyEF9ZwRtmHZkptJItwSncZk2tChTl6KKpQIVPfjc+6w9ODhKyB2cE3I4nO/b3TF8/4c59Xff7MDM8W+uDR2LyhnD/OqGK5EurD1JWO77cfz8TXujnrzv4XTyclchBXud8e+8+3dVkpG2AxFJOF6QHs1kn9DdxHz3Ct9p8ua5vH/qDpKU77zcPiQH23BoTCKKtMrUqY5yI72YN6/ekZCUhdKGMpsmXZ8uOw7MWKJTHwWAvvLTZFfvWJrB9MDQxA33vRnemsjJo0FgpqQsy1JfUVszXQFc1vMvf/9zrS9b4x9//e2k/PTw8+a3YtfwscxI9w0S7t48gs1EQJroVh3A8+vrYicIt9rC26fzuUV7fn2GVFEsZcmhiWCtm61r87Zdb+eHh8vDg232+vzSWgNwebic1lPQ6+1WGIx4fX0W0ZtXhBVVAC+3WyC8tnq7OljKGhFWSjCy9PbD5VGlbP6aZqeZblsTIOitlydGWZb0/6a5lmjb6adLb5dqL0BLMiKS3sHMdvBsD5Zg0KjvAAO+2y49YSulYBcxfSFpr0kl3caHMiKDLByd4cFwY4cQqoDhDjLL+IL0Vi0/rZu3aqJJwo8Mp2nRFMuZvyaICEas5bQsy3a7AnBvCqrq5m1J61QMmkVy9HWrAL68vppiKSUdKI5oWby2ix5BVloVUVEEghFORFL7W+YqE5r5sJY+WCnBCIaKKIfpygiieZi4agBwMvJL6R1OYT8cCAEEggKUNURiJMjl6ImAwq1JzzEVYZ31LiA0KrJiAdliZhIDVlaMikMQUTXtMylOG/PeAz6cZzx74BByl4fXu4J59E9rrMfbhvsAHMCeeb+5qjqqD6JDRAzbVThPN1L1JD0ElMc3FSJqNjAzCprJIrws9rDGZS1CPa2nlxa1BoDXRoYpEREiHhTiFCIeppuOSnd5whhE0EsTUQJCvTYfNdxKQRGBayXMRClCyBah0e3eAEVsHsApg9OLr2NQVS0FZowh6iOTJeX21hUgdvfvQBfv3keBdPfnPg/HDdu94v0hXaIm/mseOX1CREUcYZIrlB6S4CozmqmonYygq5TzUn5+uDyUk1CiVQBcl3R8kcK+b6QHjnffqAxHSbp6Ungdj6K+CaJ7o4dT5rDOBn5sT/4cx+D+4hxreAK/Jsj3bRyuz8mh6ND0pPQJmN6HzJMXhXMWcREFLBCeCdBh1gO6gencD3pEKJxEQMWDaOEOxPGQ75tsD1H2tdD9aImPBJUBmEkWkpFR1zl3gUI1Wj45E2Z6XjSQ/DCNyTwRBkCUVjKj6Xw5u7Xz5TOAX//tT69b+/r0+vPlfH44qUijq0wOJXi0iCbDFRZkQNPtQ3BUWUS6uMhE8TMrenU/VO6UhIeDQVovYqf98qn8C2KQViiVyrHwD9VYelSDkvgKRCrMQkmFPr2lOUi9iFgXNgKRIcmlA5wJIiLJYqaPM2WddgDymN8dJzuIJfJQUoE0J3qwaiysoaP3xdwVsoP4pI1rKSP1Wfb37CMbd3tcJrXGDNBx/3csDubLCyAU0o9YYs790A8y6Vn203d31PYnn8+dnntIcMadt25CBbgn19+hpIGjakoAcEYScmQUCoDCHHl02kvdvvz2dPr9z5pVp0QiiTzeij4c3mB++kYJHrOxf6VrNJFQmyFYuurCQW61v/ke7I37Gx+NARHsCRU4HK85qjxM9LTx3lsjH+2jfbSP9jfSSkQsy0LQTLmFu//ly6+LnYoVEVE6UmsRGsIQJt0d2bypGqSjygWhVqKfJRNfD8wDFYB0Ep17TfhetiOPhFTwRYbRgcNFxH4uM2vtTefPuFjFDCwaplEsz3yVQCxYLicAv375TVF+fvwdI67Xp5OskWbzAfqWYXAQjEhepkyYJtEpZsYJNLx1MhOQbbxjlvbsJnda8DaOnP1AgfSgpRL9tNlDujL+GQcVx5kmFNmpovKUo0Ih4p74nVQUpqY+Y8Zz3A8KzOhNVsoOMivrmVmxBQClx+/ZIUvcz8w0wvc8Wh6WQHfWchz3h0P2u+1wDt+39NDJfMBBDe3erUPYOTAQncE+AulEmVbWiDrjuLSGovfvdGW/eKoCx9fZb0tk6v1+m2NsNQ4D8uZZPWCPEBVNFJU7vbEkdm/a7FPb+4822VPz9y8OffBg641+3SuzQGYYyf2HPWCr223zsmwitXpQHd0xf73e1tPSLx/hXAwZcFBUMcAQ31kgbxAjIyrQrayxnWiqW90eT6qmgCzLUlUbcSol7YCXl6fHTz99+9Pz12s7W3kR/l5OLHouXxcrp3UF4B7Xl6ujckMxe6nNrLxet2hY0goQjWBZtBR73erptBRoWVnb+sVftqcvAH45ncq6bLdbuH95ef3pp89Pz08/ff7pdDpH8+l/um43VQn3l5fXh8RRgiKyrCu2BmBdluYt1B8eH3/78uW3py/n00UAFT2fzzka59Pp+fUp2ZGE9NaslAC32y25Ix8+fXKPcE97LZEyHRMz8CRqum/htJ8EQqVQvrPMOBffNMJxXMnD4f9+y/f1lqRp4HSegBns2eXI+Jos66KiWXhcASsJGEJz1tvtJqIi53XdvInq5/OnNCSvrSqYKdIikiSSJGutFElyidNphTdVqbU6Q0pRKKBmIqKv2wbg5fX6d58+w+juLZ12mbCqUCALdSYXgVLc6QEEjHpTcwDDHelUTWcnArSgBEUG1xcAUoJ0p6lo9CATQIGq6khvFaD7tkgkoeJqS5DRPbppwKoIRJMrQyVUVZy1c9FCiim4hIQi864JAAAgAElEQVTTKekmyLcSlwcAEXR4D4BloE187N9poyZLsXAcfARG1FBGDEx2bHqfU84/9QUxRfOQLqoi0EDsh2CusXSQJfQn0lM3oKVBaFrg/YwI6YWkOe8rKtCl4HRaLmc+nJbr1dV0YgCdyiZBD29bexLTUpaynEH12sgGwIzXly2BrelRCvrmqC6vHt5yN8lZF1FxNxExW4XhZItAsjAATiyJ+Y1jwgC426ucVN0kMcUe0q+i48e3Pv8pR3c7tXt+Z0XX4SUZO3PJ7X90beRAHhwH0lM4euxNREnx5KnOZG1bL+cTtJpfz0v56fLwy+PDp4f1KmtGj7KLmo6gmXaTLkPOtNPRp1RzUlYAB+/0QXcZb3ynU82f+poBABkBhKMo4wjI5kCEAJTIPbaXfiZI64nN6afJG7KTHXZB1kc+xyn1DxDDy9eZeIP5iNJ3qBycv8Egwr0xQsQkDu5IHZM9tKqxLQSwhI5PloBEPTKgTlq6dXoXxrgiwxu7ns3Blt5fxESgWlRLsYCqVxCvt2tIhPPxfAbw8Lufz1L//G9/+dO32+/Labmsi9mqi+hWWwNQimqEDIRoKo6g9QrRmSfTp26AELu/Jui5KjqSODGUKqKq7u5wAVTVBvFuJ6PsrskJ5kuDItdPpMkwVzQp0O4X7e7IfLgAAveRDyOpwFqn3g51dJAypXdKOs4/DgtTpJ8L+yLZDzZNP6mCGlplUihxKuldW+qyc6xlzLsftOy57uVwMO/eqrGHEwA+d9geaEgn7yG0M+waik2mjaEgQ5BLjDm2OXddpYv3ZsVuDtyd7G8umny6gqMuf3DXvQ0lSJ/ffgjOT7uSueiFsb1sr4Jf1PKWUsx4J2vHyDGPybdqzlQtk64aR4k6fukkLP2gmXKYjJD90NsFEZIklmOW7uZ1npNy/GQIc5H8752X8/1YfrSP9tE+2t9MK7XWWmvR4vTVzkEn2PwW0VS04xwZFqHiKi4IBURlNXfSa0R0NbnVG1UWNbA4nAwR9OONDsBT3e7qhoCaAdx+zu/qdgQ8SazTw6VAgdW4ARCwyNKiMuK8nlLpikgLM6QrrISZh3++nFZbgCpiYljLUrZ4efoKQIMq5O0maqudW7SCPIll9CgVSgUlimSuojOQOCwUAMFoXlVUkTlyIIIIMkpg6B+O9OGqGFREIgQSyPw9CgANYygQFO8JUR1liJl0MNrw1RAAQ3ppDwDiqT0rAQRNJpt+z1VC5wIHCNcws+GG6K+72AKgRUugp4kuUg5+uVSQCACWesMIbJORVP3Z5a6KU5O/snsjkcrvfJMMOw/1CdOvGju+4PDa2YEMkuePQ83qlun80rQrBspGuwk1FMzhzB7fyV92v4qAMWPpR49ndu+QQ3FA7ry1b3oP6RiJ9hzkNxyaxIjT339vDE8WFUkrs7kToKrKKQm6uGtEMroJED0Vbd5tEB7NSPtYC7uxpt15zK7mzDfdTVCMUPpUXHv1huxF56eCi1kx3LwuMFtMKNFANABqEXQeuM3HNCExwP3OIZksahAtS7RNRANRwwGoV4GRgGrbajFdRQzhIqFL+CsAK2bF1mWp25MRBhFGa3VREcEJK4ANfnt9uvz0+TPk27fmG/7l1//v08+/+/vf/8Of//hrJi8j6nb95kXO9hDCL19u//X3v18/819+/dPPv/sFAFvB6tfrU9T4dLpYWW5cAsKCNjhTn56+tdbK+VRrPZ/Pl9Pj50+/q9sWaGVZXp6fAHz6/Gm73TyiLOvD+ujhmeN5LqsEm7RcaWrlYo+q+pPY9XoDNBwRrebM6hbPX9UscXSLFPd4/HQh4RG2rADC2WoL49bqUhZVbS0AlGIAaqsAFNLxg2Sm+LVaRWQZUEQMn5AOmE3zKaB2Cy9/ULFwX4rVupmZLcvrbVuWCwCN2tqW+WwMmpmZ1bpJMQ/vuHw1jWjuamqqLWjKtm2mYqVkPmwAgImGlGVRK0X5/Pza3MF1Tfc3r+6tsoWXdW21tdYeLo+Xh8evX/5yWRcA56U8vzzdbvW8ni6Xizg3v9VbPZ3OZsu13gColtPl56fbk1DEKwiKUighCuiUhSJbSB+jIg54lCldhGIKAA5CzKBmybJxcM+lNSMeAqbDU1Qkx3t35WfsJau65Dc95Y6ODMHENqbclUJhpOMEKlgABOPWCKOoUJchNyV6V7rVqn3Gp1g1SIrSI/9GF4FZQABDZsqU5IKgMHkpe7E1MTWFcvJ2jIgbhkBu3gSiNuo/cFRAEjFbZMISD6YbDaPyRlc1cngIbrWlK0HLQ3GRcv3pv/x3PP2zYtVCMZGtnnPNRKiIyxIQN4GTNYI3MxPxPMSkyEKl6tZcVYro1SkwVYRDSt8Pt1rVRUVFNX3UtTUnBxAVJFtrZmpqrTUizEyTRa47KIXuDu8rqEO6BKIc52mGDgd8hgBOenFpiRCfcM4gI8JMpg8iCMrcr6htW8q6LOuiiwARrfnNY6NKdqaBVCnraiItaHaW9iyQCBXIchYATf3b7dUMK9t51fPnx999Wh6LipyXU7GaZD5GIj15KirQ7gRRS22gC5t+0qSWoul9nQrNiFxwHMnjQBovjL2J2PSboMNEgeG4H7EXAOgUfj16MXxb+aUmruj1/zoqTzQJXsOvnO6bXPmkCMJDTz1YVqVVtAINSotWlseAk64AVTM04trcq9hJaJYaqYpZSS/HrV6RDh/AtIiJFkCiOZShSbba3TERQYWdkAq5ZBlA0YzjoChcqq4rqBkNv9WbwFUXBgoAoBRstVHttJ5aBD3q9lrKSq9Yl7KcARB/Mim//OEP9dsf/+WPX36Jl//2X/7b+eLrs2wqADb3lVGsBENErWgAytAgGa5ZcCwAVSsk6BIAvalY91yqATAVkuGt1VbWc4AMV7rQQ6AouQZUywAAsqGOcDWSUyHzeVt4gjcDTgl1AZwiMUSa0UCKkOZD8GkwZ7VvnYMild7Cvh4Wsalk9vppAoAK+kBFTiUovd8CmnQnKWNow0MJ3akOj2KVDLjMNdvrIAVBCnXIlkP6zOTXzN1ycOsNLGcya4tI8uNOiJ+UqSenhdVluhJNIg9COfrDkre3QURVhWAGzASqvYCkjvJTPbktEEFXWdmVfBdIVnLLmpwjiC6illvS6YCLqFHTNMvEcxVxhmlK8gajwijiZGZUMzy4O/SOrj1MXfrQ9mOu9OkbS2KaEii6oDujnRw+WkJVOTSi/mrjDp3esod8MfhwAEFMCHtfXGOSMAMbYxENcckuM9/1/qN9tI/20f7Tt1402TOpbKioRHeGbdIArCpFOHgRQTAyOZcCscwdy7y7HutkSwozhQY6hTaQh8X0Rg0mxWngjtatunSJQUzFo8WgNutGEUNNW2Txz55hrYPzWpKW0dSKFNM0yFTA4FariQF4PK2kVI9Wq4gWGYGzoUhiynpC8ut7adDeTRHk+4/YPGT4dIqNcOg4gFK77SFUUaQ7L6F+Oa5TVcZ4cB5bw1H7o1mc35rwyd3tJIdb4nCuHj/cx/ztIzr8c+/O/0XbfU53j/rhbRKJdTBH31w4tR2+++v3Ozef+dc7/v6vHCY0h79S7q/k3QvNx9298VBX/jqf418Z1++/EYdu+/ZKORjk4wLBIe/xr7Yf3Pmun3khhxNydF7uL+z3ISTIYyETOe6e/ZnTv9I/S71KdnMx7eVeQ2kkJ6pAy/kUtWa9JiJSWCArXYAQcXDzbfNttTNVGlBENm8AKBJkNH+8PBjat2/X1pAVKbaaVGC4bTf3WC8Pp+Xy5enrw+P5WhsEp/X067dXAKuuJ1tJo0iNaK1VCmxRM/fICvUiJuIqUA2StW3NG8GiVpZFzQC4Oz3IeDw/2FJYaaVkjN3dW5bxFUv0RmY1qimgWgw+cWQKMh09QVJaWddElUykABkR7uGimpZNkktGxMTDtNYEkmlmqj0DXgX0N1yvpPcow0QT7Ct9DyaEmQQj4avhbgKgAiiLmZ2Sv9LUzqdzMS1Fa3M7lev1BkBUpZjfIgfy8fSwbbdSlqWoAqUUAIuV6o2LhnsFJUpme6qo1wbg5fp8u93OD4/r6XLbbmomrZ2WU22bFUu8ww0hKmaWkGqP1hhipQZufjNVAOfLQ6tNVYVUWHeCdOANZoWJkVgqkJ55N8ZWcLflBW9iGP1DAB2gtAOU9x32Vu4JRKGUUXy5i4EhAcbRwU5x2WFb476StWqRUbjcfe+kwNGrw7lJ7wWsYHcS/bANVIdqH6DhPxqG8bvXVSt9ALutlQmPnNbdfOUp/mSaZP3dJ0ATp1IG46aryGnRh1NhXVbT4LJYEbW0lrUYA6qG5oyhpBDovt/uLA/JYm/0SACpNtFXZ+OkSqYSSqjQGA4wJJkmRdX6i0cOSgC5r0F6NIBmCsBUw/ekeMxKgECEvzmLxtSI07PKDEeCcddqOsfFVE7ytsPyDW9RAcnCQ8EWXp1ejspCymsBGRSJIpKOeUg3iVOVCHE5VVyueHzRTyL2HKssF0rFWCgEMasWHtwjw+ny9vQix/x2t8A7vWgewu8gTt9pc88mmUofJcWOKNvrkueT9Ltnf9cG5e3fhvLFSD7AHoKLEVVtTIBdusbGc0IIzTLdMWGBI3LZCX7GbpEZ0pYjRBjdyTE4kJKAJ5u7e3MAZDDDgJl1LDgVA1hKQchWb+PVGIwWHoSZyrp6UItVdoCBipjqejrV+vDf/3H9l19//Z//459/+a9/oGrJQnCl0B3QZHVlDIzjGPv+MmMFEEjIbdIu2aAyGIBRrOuKbnDsEzW1H+/IN2J4CDvwHxP52BWM4QTSLOATuwQSoK9mlYIOGBQTcNeoZPr6uGukFMDdp95NYsAq8y2F6awaWo9wLv77JBHB/pf7JlOxOib0jmUxvrL/Z/6x3167YcC9y32NvZHqx2fmH94Udj+oxPK+H6Y2NDmRcT6lDdg6SmM/EwWTSCDXswGMARbtTJU5cTvoWwBVGa7XYZSxZyIQWYRTPNT75pgTs+vIx//+O9pyl0iyD+D8zlZvkISlpDwhgKxCtS8OzM2bgzlG/vBPF8ZzOI8aw49l2rzqPyD1PtpH+2gf7T9dK6lYt5gBm25SJKFVT8TmJEkXleR5o2emmFgmbGXaTqBHsRRKERUxzfowCsBg3rNaOv6Nw244BOo4UVOKxGdqi3B62oQimrUORMTDu4WjKpOnJTNUGIsVURIR0UjAbHvdmvtpKQCWZYlARAuEipSizb0rRjLLzkR/G0j6GVO32A9ssqRJCKLDADPCJYvacEceE3WH9pIG4I6gzHTu6C6HVILf6xOz9ScC2NOAB03bIBbhdBX9lROq545whwlOE3gmah6v5vHn+cvQA7NbbzTyYUnePRXHTw7qHd6ZF8cv8Y13bb+P7HrZUaO7bz8ah7vPp4IxlBvZbZsRmJ7XynduIl036xqjTEN5WOxvn34HZrz7wzSx9z8NmNLs6qHjey/kfnT3wfn+avreh4AcwE9H9W0agm9vM27ekQnBToN+0IzljdtUxvX7zVU7WwMBEc+0Yu2AFFUbdxOPYFBM1UoAPij53RuqKyFqlQhviDBquHy9vtrmAC7n1b1tztN6BqQxtldouZVPj6HL08srgK/PT6LL+fzztrWt8XR5vG3uQdj67eszgMu5XMpDi22LWzRYgUMYSN647LBAVY2gqgZ52zYCy1JMTDPRF3APYYiqmUWwuaed0yJQa60VwFpUIkSICI9Iq0ZVwmGWaCPNOtQZPG+1CXC93eSwNVTVSmmtrcuaEmKRIkhmOqSAba2qZs5fCtQ+i0spR5jFEU6gO0RrWjZ9wpf1ZAoPV6Gpdt9OEMBiJipb27ZtW0/n07oAoMf1eivrmrdpdROzTM2TwFKW7XZVgakJuZQC4LSssWUhK21eAapKsRJkIomgqmo/ff5pq/7y+qw5zvS63RCREPLNCURZCiFbbVu9qWkpJXu7roktjUjmXk3wSPQjClDIrHjLfZH3FLa0xPr+mGN45/e7M4Sks3UMWuKD0XI4SPIrOtj5u2cHO4I9b9XL9FKYpdRTIA/ZooTvLHlj1u5msT93l33zgu9loL0XcPNekuFJdIQbd6/oFF3v5JRqz8HkSD7sVB3jyLp3YM7BjeGK2MU5gZMuNZIRrzZWEaiwLMVEyiJmyERLABQ6qCoNFO06CTUoEvDBmElIUdV06NfGkPXm8lJbnQVhIAJqXymqInT3FGnU+fapOkWm9bHbtER4S/WMVpZ9nHpIVjpu77CsUuakl9OjBSNZGlJrGyOIw4qZOth+7kbQ0dKpfigsMw58AukxI7OaU/SpoUbcAgC0IsW0Wv36dP3zc/3nf3YNbuXyp2+vISccVI6pSHRdkDOLoGuke0z0aMvvP0+P0O6SGstbxk0wfz22XUVKnp4DSnR+dx5Xc1vtO3hXwsZU3z8iJW+qEJwe7a5gMwAH2EUQVWVU1zHR5EeW8S7pa2QEU9gm3HcHVRFZ15FEJy3VntGcdIsjA1tEJCIGRlpOy4JEpyZ9JOnhZM0jvE+rEJk7DqjJUpa4Xovq7fZatxsAbw7qKtLU/v6nX7zd/t///UW/fFvErgnvVVG1ofMfhyzrZnsfrKyBlRQTGsLcXbobCokMUOus63yz/Psy6pWayCSF7Op2z23qYbTMIvIeUlOz0h3CmcA+hLhIEvmmEnWIbPSpn46wmYuNrqn2a2S4+rvnamb27AsNXaJ1owCZ7nPUtDFdcBnpmk+aK1DGe2K6yd4ojvMXUk3GkAykxNwiQ/c96HIcA3NYjSBHgTjtcbP9wOvTgFTLI9gVBu2Bh0BCnGFZKyvoiYWRQ/1GRYrd7iPNlZx2zUFl3lXsg5gafsupZhI95sGuQM9EoANaYz9/Dqfn2zZnbczPftSWInPgknA5g0w8kuEebiuzouuQNjPPDYfMpKE2v+vKfP3Drz2q8tE+2kf7aH9rraTK3kXnbvlkChhKJCUSNmV4FIGJmqqouHT28FRGg3DQyVE2pB9qcUjOHM6IpJDeT+QBrJzn58D7iJioGguFHr3MjNBUAMtz6O60leOPslhRCTKCkt7T2jzIrTUALTJTiGYCYYs6zvIsVsnRNQJUsTQRVTqt/wi3Zt4amdxZpHb8404/Ir1SHqcZ0LzzLpM9bSJV6Yz5S1dnOA2PY+M8io+n1fjjCCFS7hT4e0N2jM+w7aadedCAhjkwvMXAnb517M8YqNG9w5/Gg95ZjHe6zrsXvIsJc+JU99XCd1+/68R32l2vfnRc8692DMA9/ucdsOn4tKEPyp2fV46X9LtMGq/vPe7tXd8/Z+/1wVY6vsmPOvlXW66wH36XQ6m764wMsJcQ4omYZoJscmsdzcJ7NXmfUtn1SgIYdYE7xRKCECAghLTaVivLcl7W8iIvtXnmbapZaKGty2IuRcrZYWEXO3kTL5cTgIefftq2Wl9euV6wyNnXP/3530K38+kzdf3nX38D8O3L89/9/Gnd7I9//rOIusrmequVQdoZQKV+ffHa2u31WkxPJ0AMgs1d1FRLjqRHiKioIsLdIeLuFVvHJALs5U+sVq9R63aD+6JLkgDmANSRRQWBR6SX9s16GBnWJiJBruvpertCoDNQA4Dhrek5VXEVs9SYa2vswSeqSCllXVZT054bRIbLWMIDkNYNI5lFKu8dbAC0jOJeIiJaEK16zyk2TWm5LstaFgG21rw5BM9PTxiVi+heSillWZdThEsfLixWspSNqJqqid5qu6xnVRQLXcrL60vWSCmlLMtyOp2+Pv+6rEvb6rKU6+tL0CXJ0QBRiUbVoiIv16vX+vnTxVtr122xRTwANPfLwye/tnk8pDDqslMPQmo4E8dmuovqTOO0uxmGi+J4TvYvDW/GvlHuZal0mpPucfYI6USTh6dJlxMks3ZKGSJZIAI7QEmItCj7/o/RXXRjuf8zYR39dQ+oosM7fveMyqrROk1rESQBTBrmU57tAqEfjvsQjDiRziU9uj9/6nQWZCKFh79r85b1DbJsuS1rMf308Hgq9uXl2RYV6zvFigFULUKFTXekuGSOZn+0IehEVNAJdfLmcQuG7lBiSfIXCkOoEukNOvCTEWQ4mQQyXSQIaIMbt4UTKUxGDuqU/DwqGunO0ZQSm9dMjNfhZeM4le4Hqw9r/ruUkusiMXyKEZZgm+4MikSEpDIEeiTBA3akHsfk6unL7eqvr/QqQSw3l1Izj70HKbs+MI4sjs7IvOZte38KYvjm5hn4nWNe5srK/XLv8Rlnk06Q1diSBwTzQZX5jn/hfXSBHf27v5lIJ6PM/zV60FMQKaNk2F4UYPVNRYuWzFNuER6thZsVAOzpqxiIV0CI0Bh6tcxcVAEAj9jcEwC+s2GKAdpaLSWy1IqJxsgnzk0kQDExM4IRTH+3tyrF6K1tVwBgLFaKiKxn3+of/vAHovzPf/21LBddkv/HT6Pgd458ZAWXlF1MDqfOEBogRUGI6qKrs3k0dKyZmhSAzPJU6PjPIwRysFqPabvzbguA8BCBmgg06BhxuON66KA8ITrVkRw23V4AcdIpHmGFZGKc50I4ZFpNtyo5qITR886mj3Ks3oM2fn/S9wj5/eIDejDqoD2/C+znmpbBvj7lQPTMlfsH7VpaNwMOj89JDI8IzZJ92H2RB7GfERMqRaXkZGWKe4tmYgKDMBiRCHFo9Wa9hnzW1EraXzUxorFDHPsJnEQlsdc57IW+5LBViywUNbHdKOOQLWMjTJAm9njB8UXuxuTu336ZAIho6PMIz8R0koismXOEhnS8J1KFkhF5vwvVTyqAt0/GnOEpP2cG2/BIf7SP9tE+2t9aK5zq/fAC5I8QAUOZSjAjWAWeHDNBFWnq48DqxownheOQiAplFrW8U+eAg79pQr0OmnGPQaqIiZjqAinFQj16YoiqGBc0esRkdQGANFYxTLnrrbYWJ7NaUbetqFePh9MptcMW0cJFtGgJoUcrYtLNn0G013WQDAEOcnxikK90VUcQQAzXoihMwUzMQmojZB6V+YrOedLoiLLZ0DL29A8ep+TY3h+Sc3zvbN75XWJG8+Tt9eSYkKPyMn+UsTw64HL8/aCcTTWO999Ez3A55Ou+e4/37ssc1DnQh7vK0BXmH+7Mie/DZPYL3n/y14b2rs9jWMdg/BABtN9hmEbfTa+482r+sNPDM3fo749VjcPM3OFLpnL7o0m4+/IP/nKvew1k7lCpDk9OQ0FEEho5ZnGqp2980+PunICy480ylZi9JiUS0eGgCBLCpOUkKi2UDR4WcLEFgEJft/rtxU8Lmi83d4bXzcNPX19/WwMAHK9tq9fW4vpca1OVz7/7f749Pf+vP/2axEMAXPRPT7dfn/9Yr6/n80VbDdLTLLICYAu/PT2ZCbRUoN6qoNmyBmG2JAFlC/cIMyVEVdOfGhGbhwBlWQF4axCEe/O2eXX3rCeCtLfS2TeGosNieqRHwgcqT1Ukac2042BUGInTTA6mrrQWM4YTYmpmRVVruLdatw3A5XxWs6UUSxTKmAyzTNzuRuSYJnJCIoevIy/Iz3qCdrGIyOrGN9/yJiYaJCBLWZelhIdA1vV8+XT5t//9x+V0yvu8vLxERK11WZZ6q2pKqKmeTqt1q49mJhFmJqrn0/Jyu5Hx6eGTtwrg9fa6nM+vr6/e2uPl09P2BeC23Ux1WdckGzFTWZbz+Yxg3ZqaekRrLUtQb7UCCGBZltYsjy0G2KuzJIPX8Kl1P8Y0P0YBtOO6l2GcDgNEBg1CjvdMKehG/PQi7YlqY0sP4QQOIOu97Jr35JCtkeXO+vkEytstmTc7umveiI/xjP1M4HjUCEns1/T+DtHJfurNZO1xz/7W7DcRSFI1jIW2d7gvNhuHw8Qfze717u/eNgDA1VtCiVVLkC8N//qXl2/wpfBWXaz99tIqAaA2B6HI4nwzv7SfpCNVWABvEc29SDQRB2ttFJsBy/Hw6C4GZiaKKKCm3T5WERQRlAgTARzwxE4n0u1W2Wa2xPwvQTDoYz2IjAxf5dy1qcIcRCt5P5PjjhMn1Hc8Y5z4AnT60P3JAAYKTw6I9aEYxfB3hG8uGuXCxSPC3ddSos1Sh9MLcFxadyedjKBH/nJ4i/nW+2c7IuwAm5W3T+lb7r2xz8Pfx0U8fmXcfXguBF2/60JP7zbNWJ3CnjnU306QwiPIa9sYTSJkVLQBEMQgk03E475P5OiG4Js3OOqNiBgFebqHzpJv/WSllCW/21q93bZlte7U6zj3vc5JDoSZlIxa0RlsdI8aNUC27QaADAHrtgkl4jVE//Ef/+Hp9vLHv7yeP30CcONrc1+XkiOSm4EQzexZKtB1aJCe2GZoH9/3kif7FUIFwSwgwiFiE1wJjNyOgW3ASA7PepOmpbvvEyk5g8I9wpZVdoII1UKgFwDn7ionQdixVzvu87heDiuMQJJiUeyQGJbwhxEzmNJWDt/dqQTm8iQE4d1ekP306LfYf3uzEwCmnTQwEgDwJoY1xjij6RRclktnNhRksDPors3h7v12h+72Wt6JPBV0ysjRVQGgaosWs9KJO0RSS2mtYVhA7r61W/UKtkRQioiprWXt7Kc5taKeVc2BfivsEsBvjWilFFObghsza+l47X6IHObufhrfHK44BAiDMFWIklAtUCXpzJSWOwNEO+L44NgGGAeiBST5wnzku4k5/nLX2/eEWx/to320j/Y30ArZZfPQnWYTQGhd48zykYHwrLcMp8d+EKJ/d4jdrB3Sc4pCuvGTp5TMQ3vAQ4A9UjdcHUzdzAREK2rOjiKJLIshBaCasR/vqT/3I0lAW863tjlRZInQzWsQJ1vWdcnwcni83rYWntxzBiGjM/CMIUjOqCASP4pMJZi06ci+Tl9kJ4gEAVH3OAZlM98j6eaTbFPCRlpV/4hC93cRzd3i/N65dOeHO7LQ7IlFY2LfHrFDRxksNlPl3Y/cESocBs/85WAGEr4aUMQAACAASURBVEB6RoZOtltNfHt+vzncd30fh0ztewadY8/f2lI8XHP4+fCFv3o4c663+6798Dty5z9/c/18bU4fwrsXeKOiHp8mBx2Wb6/k/cdy+HRYkf0TAjPpo/fjh6vn39Nc+P0uTZDNO6NumHEEonMbdPgWjiNz/BJB6cSs84YkMs0w6JkZC0gSSgSbJEs6xHRp3P7y2zc1bVv1Af4C5OvzU3MvqllGutUqqms5tXK+bQTw9fbNRMpyquTWeFpLCyznT5tfVbXdAkBIaY2Pj+fVrHlosEWU5XSSh8wCdnogFAsEDk8GCVCcFLVU8BuzLo2lzUm4MyM0IWalZAlv94h85zk6iWzKnwCYWddug+l3Y2uipcq1o0I8rGjyQhEQ1dZaEiMuuiSRJQBTXS5n6XWuOBPG50Sn4l7MymKmqplXRTDq5OzQ7nhiMEtopA9hL7GZtxNAmda1EpReFrrP/mk919pgaqWkd1VVl2V5fn0iegZ6q00gp9MJIhF+vb6u6zpYuHbb31RrraqmZretPj09/fJ3/3B5+PT1228AIrht2+vzy8Pjo5qSfH56EpHVlsfHT0mRJiqfLpfzum5bPZ9OdYsWvp4f1tP5dqsBB2BAuJey9HhCINLr3j0MBfumP+5NiOjuKzkKnKOHQ6bl/P0d23MK9lDDEHQjXITOSXd0w3W/dL/pKMOdpajzginMelmVO+l2MJbu+oKZAn3IQB+d5z4z/V75M+8OqMML7ObVfXijE7x2FYV7x9KPGcc61HfiUgamOvvRj4aMB0iSUYa/tviXX1+eb43NcyqsLLfbLXeKR/x0LkWxFGm1gQFQxAQI6VXm0mnhZMuljQJRZxMrcv8u3dEk3QEBUlRMdLECwBQmMMHZKAhBrIt8erio2m/fngD88bfmSJIciekMFYB0r/MZIhEicOlAG8kPdxDQm+HaRfNhfsnpAD5CVcMmDaHcL5Lp+BXhYFBNXxuJAhGchIvDG5uThZpCbxtwSw6c0OhAaqPv1JbjwpQ3/+7v9P0z9v6qvhlnHOV7jXNR76pNrvs5JuOG0oPTQ//bQW4j91adTJxXOrFGpI2A5nIW1VmvCYII16xQQxItH2aqmgWEAen58nNOFJwloVJTRYsJvFZV004BUdSWcAew+fZyu/28PECMCIClrLUlADPyGgZ7aCsZkXrfLCgC8dhyFDx8q6+LleWMl+dvj6fHf/qnP3y9/q/wCqCE1aw0MnrsYM+sEkk/VyCyIJ7PhBHBFlUgJmsitT1aQy1qKubsDkHvvv6d4rN0luqu1mJ4uAcPA0TEe/HrjHOEZn28EUxJaFseaXWEcLoQOiC170TQobVdpGGsI5nLZ0BAptjuKNbjtuJOGzDUpt1K240nyQBlX667mjlU6aHgHhX+oWTfx5juJcTQ4hIiIRBnG5/2MyeYFQcHtEJw/0/iSQccVLLqjh7C0xIM6bw0IqRCHHCP4Y7MaJ0ushAU0eoVYDA8gm0MHlPZ0XnopKKVG3eMvPRdRsG+HvazsfdZMEEWb42B42zuevD8gwCwsm5bvW63bWtQUzOoZoXVuWbSeWygSgj46eEyxMSd+p/i+y608YP21kz4KxLto320j/bR/hM3/fcv+Wgf7aN9tI/20T7aR/toH+2jfbSP9tE+2kf7aB/to320/z/aBHRMPpIewBdAxDIm2QuIjnAQoRAiWdflQOqbZGSQxqrQGHF1zHJsSA6au9h2/79jRPkIUIRZu0IV7h4Uj5Zhe6fXaGU5DW66DqdKBI+O8FKN1lrVZXnZNvcKEWpQ/bpdL+cLADUVgdMZNDEik2qQKR0Z8vOsRRpCtCzbHaQCnLRVktxR7Nz2AuR3MyY2QQjskIi8RDUZQ3QwM6PDQXf42Aj1TwQCBnpxYHH2OTziVDqVyBvwwH287y4gOACQx8SN8chJmp0zbKY7DdcI84+LpWMqdiDBjAb73kW5xzuMj3ZYTufGmjgDZCnLgbFhJsDiXTCXM/4ob+++Xzx7frjkffzxzSc73KGP1jFayvdfnPfWSVa1h7Tvrh23kIG33IdmfEu+18Hvt3GdHt/vMNh//UW/G1O9Tw+6+8OAf9wtqx7iT1KrIUs48dBDwkxhk8HqBJ8cQbJM8AgU7q5ikAgyQYKOUFVmRpvRG7++PCFJ0UVb1vE0Wx9/dkjLQp5GaoGo6yreqA3AdtvOy6KhCl+WIkD1KGKEklrWS3bYip1P5/B2rVcPhFDVVBUth5CqplKI0DCorMtpXdcQfv36F9ABZNnoRZcQJz3xAmrK3FckgFqrmpVSAlTVsiziHmTSOMpewssTNKlmQolgMbFSWsJYBvwrWSWTmNKWQg9R7QRkrUZr6/JQlmXbbiQY3hE8IqUYgFJKYgbCneGaAB5KUQh69Y0BX+j/HzCiPEjGTgdExIRiIoJiVsxAllKyekxEi3ARAWOrXlsVUYLfvn07PVyGwJDz+QxIeFxvtwj31tZiKlJbT7xdlkVEwqwGReTby7O7X87n19fX7Mfnzz9/e/nm7qp2266t1QAeluXT55/Ol7OPaepdj2AQ4KLL+XJeTw/UrdUbRl1XHqrFy8iiFIDfKRx/tzcGTKRvpyOS4bgBxpe/k6vVr4p9hHsF2SGVE85/RFWIQkDJNH4kDIUxFt4OaR+sJBMf1AX8+zaEGY8CoMPNBnb8INUxpWfPMNzxPRMpc5TZd0z8TCLnwN3J1KGGnSCShwNsDNkhB074ZggTcQwtp4ua3SikUjTcEx/WmiFrwtq5oRDhkgAm9tGMzC4FVDy5FzSpbEz0JJbFpA7MdZBEGKeiZdq1logIcQBCqipEbcmcRVmKLqZq1okCg1IEyBJfouzEMRCxWUVKhsDuyhpULRfZziDY52Vyjc3z/+5skr7juzzPpXpAd07FoGt3W6sQETHQZOwCEYgydPUI92cRmpSynG5RAxVAOSyVsa7mYsS+r963AwxsLNq+X2T/xjx8v/u9oY/8EE504DgQDNqEo8rTv313O2bhqKTn4wDfgSIhzLKQScM5UFgUUwFNVFXVrOvJTIZkQVcmqdqRXALx6Hp1V86C1L5VSU6WD5LNO+xKRBqDUIJbRGw1v1uDLVh0XZaTR2VQrRAaRC9xDDgBj6y+pFYWExeJWANavRZTAFxMRJZip3UFoiheb80u53NZf3u+AljWcy9eQux0yAOzlh0OwAMcTCwQKqSGm1gWkwRAVQZaJImrRXKkU5G8lkPdvnHL24/1NOarT4EGAnm8ZlpShJrlah7pPuBUjbXf5KiiYOrqu8F0txb29Uumzj96kqnZu4yelAgHMTn3xPxMDvwB+6q7T47KPo21N5X/w9VDpz5sky6ip0bH+S7jDAfJ1tp8Cvd9mh0YszMkxOy2o4loQj0bW05453kMOBpw7GgQ1OgZY1NtHGwTzCRxwsVrIpIjeXwpITGQmJyrKm+7cCFaUclCYX0n9c003rJ34GDd4L7NuZ/69FGpJwA0x29fXv785bfaXKRQRaAqxfpu70OkgAkzX/vxny5DciVBb++QAL7P1v0Dj5/e9+Y/AKb8aB/to320/6StTKsPwH68dWaNzqQOYUNLeaqiJppZh0mnPZxHAATMinUqPeu5OyPSu6eQJGDZHRzp9hS0kZ8x/oipYAsQDEamC8HDa93Eyqpli8oj+n/kE4nQ6WVdFl1eb8/e6k+ni5VgxHWrxbKy9ioqlqUcVMF0LCJP1Z38KPW+iNllplXV088zxy0OuatE901i9zqNdPfoCezaVVWJwSgEhuLekzQNiL39yDwcethRwT56igcJz+jj99u4+JBXKIPQh+z1B953ZbJT8U3HORwUx/c4Zrof/N/ju6lSd9LJ+wfJVJ4O63Wa7DJVtXuH5Pjz//VRvRu3+M4tfziOoyuym999N31Pw+lGy1A0R3rXjx66f/F7/cn7yT4VfZEN6vwf3e/4OlO/AcZq/N5wQoCR+LLfQHA3LqmqcijrSaggg7NufE36y+56HUikx8rdy5JVpPonnauwp6OzWIGqBw1C6RTy5+V8ja25RwsRMRVV8+C1bfTOIKWlmK6O2FpT09q2ZTlt2ytFprtJTE/r6VavDAZ7MZnqNQLMUhUe3kLLGbJssW3brUbYspSlePgwbLgspVjZWtbVCjVTsyBaa5tXALW1U7FlOSlETKWArUFgamZqoQBqbAz38NaaWIGGu6uZFWsjZUxUAXF3kB6MaFn8Er0iOURdVM/rKXlsW6tkWFirjZHJa92NjnQI91IZIkSL0MGYocyKuoxMOjz4AKRL4P6Rgirq3syKiLRwFUkP4OvtykCxhYBHAwIitVVVXXW53q4APj3+VJbyxz//sdZmxYqt6dBczFp4gQFY15WZhFz9L3/5y7qWX37+5eX1+vX52/l8BvDzp5+er8/ny6XWbds2Dz8ty6fHz48PD2qWIxOM220TIlq8Xl/XIpfTCo/b8wui+7BarcXseruJN3VHBKU7TsjMkgdGzua0Dggod9F55Li9c8fI/bZ499Fh+/XNyS6Xu9hMf9wen5s2Yn+qzG4dwip9wrp3ZDg13j805cebI+ROrs4C7uMwfm8jDW/O/l5Hm26/7PCVnrZ2uMVMRZ7OIaJHsN6WBeN4w3H/Qs3NEiRhgLkHUChallMLGryXoo72ujG2jXVblxPgIiCKZFyzv6M2eVGxQIRIEIutUpyhlLhzJ4tkfReBFNUWjYzmLVNZlVFUitrT8/V8KiaI1oCvpVgjANSW7shMOM0X6oa3Lst4zXHwj4kWkCrKeQTPIcXxkOgyfqg3ljXAoYBoZ0vOc8lH1n9mwO/nfkAA1XRiDn8Ee02RMFmgBjGKGoR+W4r1mbyb9r4s36gf9+2dST68KRjus/vvyf23vnOjHz9ADstzKlmHzn7nF0Gmdnfy6+mBkF2pSkYLsOeyBjtJEIFA9OJRwQhRTUViytL93J3P7NF7pxDpIz9k3yeThmadaaoAhDdGa7FoAbDYSc/2WjerRm8El7I4B6N8Tm4gnK05CTMtpagAWANWr0/dgaLSvBZYALaV9dMDA//6p3+t9RaptNPN2DOgp9gavl4UA+CR7OrprWT3ztCcdNZ0u5sUKD3qrdXVcsxyODG5FYN7NKK7K1Mx5lDaxUQi2IS9pGaEU5XYOf5CJJifMM9f7Im8mBM7PEjHvF1A0OiHqByFkOjJ5CplyNu+zKa6sUdieFBs+6TeKbBTr79bgpzyWIaBcv8F6TdWnXKzv8mutN+RSMp8/DCOMIOQI9V5d/rKu2S7DikRy0rTZOgMV3QriVNAk3S6aZk2kQiEoqMmoqgKPa3B7Fr02Pd4n4zW9Z3Qe7+1GyQWUffohBcid5vovsf/kRTp975IAGYLIRrlsj7Ysjb3FqFWVBh7FDLHjSahYCll3EXmObcHQubj3oirPWn/8AmGRfHv9v6jfbSP9tH+87Uy67ul/Nd+wEwPU0YG92CTjMh8qGN4LjHsGQgdAREPisTuaRAC8AiTwug6a0gkmIoBk6zSCIEqF1MoAhHOelkvtbXK2p0jspposLy06mxmpgKjLrKG+FZfAbi3nx5/UlWvLYifP/28LBmkE+jreloBBGWLm2tTOZEQBlA6AeVQzQGIQou0rZitRQoISlZ37Ux2xRTUPAs7oTaY7FDDupvqVz9EImsCqSNKLykQSgkgWfQwlbVjnDPc0ce7g1c7h5t76jFZrFCTgKefT+MtkmkIu2Y9FEKMbqW1022eVAX28CdFIHHAnFBSFeinX5sVBzCHzqelszfi/hy90wmGGSt8pyXs5/Ecxzsnp8jgPpPDgSxzHrO8M6Zqz3l+c5hiBO6hnbuC+UYb0L1377SZ+ZaM3SPRjRQ5XDD0S6G3NoZd9g4ilW8hyM4+MyZexLtm82ZkkWCXPoxj56p6rkpTRDCrPKvqVLyYn/TxFKT3H0Fmic/9/sdhCPpU9OfIcRgn0T/QXrBzam+aLozJIZfgIky0MUFRMS3JdbWuSwRFS9Czt2pGkSAo6u6qCtLMCMGobXGNxhBGUKimFA1StayyvvBbjnApqwuAMFMyKLLVCjEB13V9vb4A8NrO61llEW3h1Xr1CYV0r2o5X1irw+v2yoiHy/m63by2WrcIPy2nHFKv9dnbYut5uVz16tGc4a3N7WCm2+226vr551+ut2ttm6oUNQhb7bAESWaxClMgKCaX86MzxHirLwBE4nR5cI/Wqpm27UagtvbT58/Fym17yQE+LaUU2dzPp1Ms61a32+1GRimWYMPa6rqup7JItx8gICXWYq31WQiHFiuKrTmjqS3FSkS0aDmRixVVCcb6cHl+egawngsJtoig6AJg2+rptFKcLqBYWZf1tG31vCxEpNvoWq+fLj+fTmeL10gMbIRGFFgV3xgA7Latwuvt5i1q3T49fn7driW81m3bbgBMbbveROXx8ZHugF4ePsFs89auL6UvPX/99oTLxdTKauf/w96bP8txJGlin3tEZtb97hsACfA++pg+d2a0WtNKmh3JdPwX+gcl2xlb0w/SmqTp7Z3dZk+T3c0m2QBJvPuu9+pVVWaEu36IiMysB7CP2ZFp2wxhRgKovCIjIj3cP//cvTNwCvFVnltRF2DugjucZTTxqnDBUFIwENKqaSrPSkSqcN4DZNgwkYvyvG1gJAs2AWjJyIsfsiqlhMLhcFik4dNrPklF4GuqF1FRCsEHhGQZph2JtJKQJDljI6KxRIyqA7SOfgBaqT8VnurvFunB6dkcTdmGSumDBI3lDiDxnunFoEhRCAHq4CTEU6ZHAoVqqsSh9IeClJwoRDQllCNiYpAhsIKhJKKgkLdQa/Ofqa77FC3twLr3nFDCQLNng1C/DnAqRCqGQqFYluzGgVnJZgCBrULVSWS71SNEVhWqRgFRmrgZw3aZpUmorfUGgAgSCRtDMBHmA4R4Di1FM85mVZgzA8K8jGNvbMeJgALJFYiprgPdK5Suj/SaiHQEUQlkBmoC4hz4yF5EkShCcaKDfZw2FO8qYpCCY0ntqNpZztMbiYiP7hYNWwwpoIGglEr0BlamVgzyIIYKIBXUpiS2SOEj9VdRKwCtNQcgVjwBxWpmwadNLd0mkZSTMyRpNw2cA64posG/rBDLeb1+a20o3MD5ORMbNpYMUVL2IAKxlKX7S6gcFbzyAJhJPBRe43iHLZtJ1TAXgeTO6ohEwUosUoXhI1IKAUdxBNhmXFcvC2QuZQqznAkAViYh70W8GFhrrCEjXDXYvXomCKmHVuLYWFEvodgkwakHQMxkspvbi0pkkHeJ5ObuVuA9ZZbzrJ8D0Mmdcxp4aJaorDxsnnUyC1fOC+dLAIXpViLTm/GdO59Rp3fbVaWbqXOw1igAJhUnDnMA1lpReFWQEZAxmZNUykYkY+P9PLcZE4EcZ0XlZtBIqRP1zjsmLvKuCDlxzEQhQ2miPVfwXdOpZK6AE8m4EAgTebgwNqKehQ1ZsFcOSVytqihIhSSUgQoBXQwEEjMFplv4vurkqmLJRu2eanW68QakFcUa6MTEAdeLAH5tUdW6XNDba3guCRrVUPwJBtars5x7caRUicsKE+Ri5VzOwTLyRMarJwLDiAqoLjytqiC1TueGDbMpfcnEjFCvHJVUhrLKTzObGWQQEvVeRaCWYCyFaXKVKGANgSBerQmRX6wqwdHIZALeLRBRqbQiEDMbNqI+7OwWHYWjWD0JUGVwwYVHWVMsPYEoqs6sHPLPZpy7lHY2DEUAkJkYyuoleOANWaMGQMkzA+QZE4kSDBmAQiLMF6FHquXPYqtNAMOsWgeTxSmu2bKm6FaYE2UqIGI1BMCRIRXDcU7B5DxVpe92qRIyJLFcbCzdhKjit6DdRVUf0gQw1PtxkBtmwXX3qr1qr9qr9qfTkmpItWvlZTyuVrsHzSwk6H/xPEq3TvQ8Sanpk2YYb1LqPNlPRARik9nMGiJC5ZyoABRMrOSpjInAVeHEEUhJEyEDxFx6nys77/p5b2nQA3kmzGYzYyKFKqp5YFCMXksD0VhZiS4CNkwG6cwmLpsUEBtUqdrb10J4k9nSmG4JNkILNgsnUQQiFyKAgh6TECatyW8v4wDGW9ILk7TY6vm9N9F1oF376QGLaG/R2r4ibcM1/W+x3GOyD39fayfpfuHQH9G+8WHfQDT85psvHPmm29LiG754t5qpcE/FaZMlmVqRM9RaVy2KSF27IDyOGytrocOEmmPc1JppFlyIa0qOhdopDyJBrKOa1k64f0N7W/wjvlPbRZ9IKotVCfDi2KUF0yDR98HnuIQjrlP7il+iJsYCmvW4NM9TQ1ZZa1xeVYMx2YqIWQjUI5DNssANt5xlWQaADRtmEDlXChBsy2ijp8fNpRTxWZZVVVVV1cpgFUQ3s9tgVALw6hXITSh344xhazqlmznnep1uN+uFTs+mUyFxzjGxYatwRLBsFVK6EoG1pGlqomEhquK9D70VL845ZkOEUGdMVYsit8bmWeFdCcBVbrDUtzazee49qsqJeu/JOXgfS/R2Oh1rLDGTSrCbAhnNq8bi3YEyI2IsW5sVRUFg7x1bFseGCEC312GKtb/zLGMOKBIMGy8+swUA50vvJS8KJdaqUkVVljfjMbwvelme52FiJneTAO9meT7sDcSV09uJc35pbbnG52ZleXF5meV5p9e7HF9mnc7s7rbX699ObgGcX5x1io7z/uryEkC/PyyyovKlu6vKu7tup0DyMZRlWXQ6eVYwMxOYrSq8lxDGzsTOeWYTqgClVBv11hbZu8SsEr0jHJCPFvc/RAygtl6BUD632T2SIZqMHG1d2/w/fT+UvEes3OIctViLEfNqcMFgP9e9gdZ/ix/FfWEcX7IR0vEioRe+byiS+4HqxVofa7rXvnObDJdcl61ow3RKbZ8xajb9S2hCqCmTLc7Iy7eilkShtKmGmFpIKnRBrJOEN0WQqcUG7libMClSIgFJ6G3jgYlYYXwIEj0O2uJ9JjSDDQLTS+O+u9C/Wl7Xb7WA3DWzhHqZNegeIVn+qhRiwNFc3KrOQZH2lARlUFya5ALajDZpXAKU3jSpNknsvvAGC8Pf/kNbf7nfElMvndEAOAunJGin1ZHwbd433gEAvPAoisl0wsmWs3BN8Lql7ZUt8aLaFguehDsFt2F8DlHcnxBrVfsQKawgELNRVVVWRIKeEkRjibDwDUWeurbfh4jAEj9n1uCBZhhWa5QNa0VpBEVJlAlgZSh8qodTE/oQtkWVouiq6syV1lBmDAEqEIh3AmDq1DAba6uqhPgi72SWO93+bDrp9VcmkxsATmHYnN1WzrvKT8c0F+jceadkkk+O2CRlxHDIoAIWFZ/2aEu21HklVchkMnelteyqGYMZcC6QzpkocyreeyLjCV7VsEQ1IE65qcR7VWKKCZhAseBL7aRJtN/g8wBRbovKu4p8DBEgUkIpVVmWRd4RVaRSdZR2dgOrvkqVXVqSV2Ey2yxArZmmYW4skWorl0AsoBKirmLAmcbg3ZrwTEqhqJ8wWAJt0JJVieX7mK1P1dShymSdlALR4HtgE6abhBgsYAYbGMvBr5OkP8GLs8YyGS8udJuZRD2TDTsUg3JjET0cag0Z5tKVULEmyzQQn9WLd+osN+XdAvmx3nkEoT5YyIsVSbECZ6j2JZCBAdSrQ3ClwbCyqmfAchY/aQGxCoRAhhiGRaEQwTwVNlSvzmsGkOE49SJquQX43W8vCqvUtK31L0gUqWMVgsnGqDXqZuMMW29wybVLCdX7D2px942GRUvyxc3/my2fV+1Ve9VetT+NZtu5gzSkY6yFbNv+f1Gd13t/voAWBK1MUmgyoKJCraAyRK0ACiFf0/g1uNbZEpFC5n4eQBgmAyCkkAw7liELiJfA5Kc6zxoTB5+2woNI1bOqIWMpF67q7GbMJNEIlHpLDhtBCwBSTb7ZsI0FFTy9EkFZQzaYGmBBhH4achyApO1GVVNRs0qBmIdT75mg8fza4gqXJmMtGhykLeOxZQfd38UWNPloXdXwU+v6oLVEH62SaiBsvrTF8JZ7AOrC81u200vMkWZr/4YnvKwtGpH1tbT444s79EvwrBdsmpe6Rn9XX17+S0p20HTv3lfSvG7KDLhwOKgZjS5aD2ZYdxJUc60RgrZum2zcZtmEy0MB+qgZQhp2ZO0fiMsvrNugNAUYpbbyGg0p1JBuiKhtFEDbE7IwxsnGTu/Wtqbjfe+lLop2Rq24BSO+0eNqay3azbEP0gJmATAbhjFsUhHU9sE4wrkpApUgjTLUayWVaozVZSYmY9hSMumYOevnd7NbDxl0RgKX5ZmodvPufHYXbAkRYWZDVuC9dzbLWKwaqah0zlFOAAadIUDz+cwVVXwr4sDoqUMta5KsBqac+BDv5523WQag0qqqXF6wYeOd05RUcTadWWsRgt2szfPCeW/IqgQA04f63fVLZVkWS7hqWj9ERPCqRMzWApFnJapsmLOMRCunJlS3RByZ0FVrs16PxStb40Uq56rKherG6tXBmSz38KX3ubHzsixns+HSMmfWOgIg0LvJrWXy6g10MOiry7zzRZZ3O50wfeW0nM6mXqVnsywvytKVZVnOS2IOSH/W6y71liezSVmVnU6nKArvvfdeypmqhKhwBnrdbkgr7EWZSaUmLmryckUKP4hUkmETXAcKFxI5Bu4uEHKZBRBQ1FP7g0QS7Api1ia8rJYLojFhavr6F9jVtWlbx9hGhqCIaNpUET+MiBl68a0L0/qnsPlp+75aP/bFFn9NoQyJ5rzwjSeJXnPV6iaqjYchgF2aANAk85IQCTcJuxun+6a3ToCCpg7H4W3eoyURF1xkC/K1TryCiP9SIOrGkQGJgjw58rU2gDB1SYUo1cYuNGoDKdXFllviL85UjCoO2EN6eAAzAZhQwhvJC9lyCN3zQMaNNQTipgdITbqvhWxYODEBHogMaZPsYtEchiImN0xCKkAfQAAAIABJREFUp3kF0oWTiYhVffy93qESkk0BcXlhU69Vjz+qabPZpTdvz2hYcGHhNI635nImjrtcS97f2/oJidpfvyAQfiOAUmYeIOZIQLPeqL6xh9QersbvpQx4r0IJzaVYINt4cpTkQPQHtnrThJ83XsomEarGr59AqD1kKgxCnVNVVH3MQaReWjtlmgVRqEpG2dzPRUuBsdxRVee0QpVbC8AaM3XOTcuuKwno9/ubo+0s74IN2Sww06r5ncL1umY6p6wYzJ2vqlJIDJMJX4p4hTJbBWKF7gDcq1MAqeB1OMgMEHyl6rxhm7EFVHzQeQwTKYUEvhLmVbyjwDBNtMVSSVVt8CfCeIRcHmQiwE6kBBGiIJqFFNMgrzhm1ZDExLVZXlMgiJSUmIhDis6Q4T7pZmjkGJFK4g3UGH7iLAedvfU5aSxWLUTkAagyGSZDBImkZrFkAArXeRHnnbWGDHvRsE9lXDgtxas1xqnLyTBT2Iy5jn0BlNUonPrAAAh90+QvZxgGeVQCb5AFrJYJFsxM3qeC5+QR0+kQG5rOpwCsyUQiO5IIhqxlS8EsYyMikvJMMAWZSTZQztUrJAUCVVDi+LlFZ3NOBlZFHZNh4rmbMXHHGIQkAIGxGJglUFU1YUdOzjNWI54yNgFI1qjv/rFSqLXD1Op1Q5IMcx0AyXTnqDTHnjSuwrhcklBdaLVZ9pJHt35oJFUSjfEvNdPnVXvVXrVX7U+r2eR+jSEDScGv4ZT7AlO1DlVq69nholoQJxGp0Ah7BDHOPqldUdlOirQlYzVUKVGP0qs6cSJwUjnniE3IP4Log4Ii7tkgYjaAMmzNQCAQhLy6jIzzs/HNPDeGwUws6n1VIZSpCXyWGC/HkQZIwQQiRHVNpQ6aAIJiZyjGClFICt2YXREhTHtOVJETeSz+FtVUrRkMQBNZJsm8rE2SmvIZ76+NkQKFtnevCPYsbFd1W9S/68AALNzuxYta2Sjbc93qYsooV79i+waLv7zkAX+AXtA65f5WnXSBNqz+wmVB2Tq6vdgZrP6+R7VdlN/QvgGybNV0QhysmlryzXeherLaKKKi9Y9gvlLSNZJmWT+2NT5NrGXrNsF+oqQYJT25Mc4SyNUEaqZntHGKsFJq1y0h1JlIUxBQDEXCN6n2QtQIozbk62a60vrSRq1LFqdGw72eZCRrM35QsaJE/UoNdq/N2ALEMEyGEHXW1gAl2jLBqdOQlchGa5QNqwoRhRhNZsNsOZYdYATQjagrvVk1JQKpKcsqyzNjDTE7daFXNrIJ4qgZwwrLzFVVVVIBKGyR2Xw2nzlxzjsmym1ORJV3ojEvbWDkhU6LgETACLTuALoxs0KrqjLEbIy4sqyqLMuIyXk3n88BkKp4FdFyOnPeVVVVVpV4YcPWZoEzlaZKvHjxnilwReBFDcdFbjjU9RJXOe9FnfPiM81E1EkJYDqdMZPz3osUma2cK6x1zs2qMstz5zwAJ0oqfjabzmZsrO2ycz4rijwvnGrAWAk6vZsYa8VQv9cjIidimIfD4WQyiWkfnZ9MJr1eP8vzqqyGg+H51YWo3k7uOkUBwLC9K+/u7iZ5lnU6nXk5Uy+dXle0EuFQjYFAxtosy1XVzecA5tWcgDzLFBSKBRliEJz3AQ8WFWjMTdZOOdXYvK0Jk/aia+P4tWwN8cYtHqUk11TjtiFqfEnBHk53pICNasy6Fb+mCA8xKUKOz/QVLWYia4kUSi9wr7UlT1gecWeg9Mmm0+o/FpnyaIFGtUCILECpqaGRWJZEY/LyafuzTehMHNsoOltbaXh4vZCbbWABZE3vHMWvAhBCneWTEHPNKEUsIHHWE3QIQKWpClF/3xEpWhiuNtWTamxLk/0pHC6NaUcbn2R8R4XhejQW93HUG1bIINGCP1pncRKcBgzA19tMM0D1btDuXq0KBmClnpeoLLUN4PTmLc5RQGt+11ba6mJrw1tYnyn5MeogA60nLCHbgDTjklC69EuEKJPbNE3NiyPZ+luMNWduNl6oxsJRi3pxsz3VAEhLjYgam0CcEBCIkWCwsooy4j0RCpdJyGsMBjUDG78cxLu69JtEBzVBlJwnViWutZBQOicKqRqn1ZRrL70uM3svxIZInPNz8kxMJE4qLwrAqSPDlklUvfPmbnJ5fT0UzvOciNkYALP5rJzP8rzrIBaZKoOM1UzVBZ9O5T2ITdhW1IVcKyAOaRq63AEw8zMmI4qycnmWdfOues1YvJRECBFN6p0oG2YVqtQTsRIJ1FConBmlnzE51AOq6p2UmoSaNKVsQnUgy+SDCJ6XFRsmjhGzop6ULBmDbKYVEQIKmaDwCiF3RSPkNQUTkRJ5r/USjpZUvVx5IatjkHnB81RoSEojqoFjbVLhOEY0HMiqEfWGiJUyY0t1KdGNEpiMhikR+OAikZCdNHpGXUiSSSoIxQDIB3pu5HuSEVWn4krPmSUChJhBTGV5Z0IWCyKoB1HOmQAC18s6oiIiXl3gbeSmYKLSz3LOjbFMJCJOnUKZQ5JZeHGWjMArmIgtG2usVyeuZLXhSwlLN2Mr6qdzt7LUJ6KbmzkI3UIBeFECZ9Z41/p8iYi4oXcD5IsszwI+q6qBAqAvFUsvN51quZDEcsuea2168QilPyg8zrS4Ba1Ntt6MolFXb873O7ag06Y+pi4Fo7UtPF+1V+1Ve9X+BJutVf+Fn+u09O2mtVpWb6/AAp5UGzvJ+Rf/pIVzW1Bd7S2qIyBCDiqRYDVrJR6UgtBiaV3dvz2/17UHww2GoWSPAVJW88IwM7FCvfdAJZVzaiwRZgC8khMPiu+aYLdIR6rNAB9gGiNCIVWYhnrAtfWlKiFrUgs0UZBvlA6FAh9d3HxnZQTFz6/G31lZqge5NgUUCxyP2uZsWSUc9Z97A07NxkiJLFeba02n0gU1ZbP2EdbuWig+vT57Z2kNDQWBkoXzEl6Dgr66vXjQX1NJOn8bmY1PbJlrTVf+oK3zME309mC1flGKhy7CoZ3BattWOUqX1MhjfeZi1zU5UKl5z/oQcHR7vj1Ye2mvvkFZeeG0xigiSSniXgQTa4v+HnOE0ho4GJ/tjjaijav3oY00O3F4DeGg9XVsD9ZbY8YEhNIGGl+5Tn7vFlWgMOltRhYRcHB7vjtYDR0Lubi4PRpxPQfmVFyAC5Zu6krLmgsygjhVf2pMOFBthi1OT0v0tIy+pAbGy0OGIFHxEFUhYlFRaFBJ0wWpPwSARURVDm7OHq1sp6hGrq8N88n1mKQ+iOjn5189WX1Q+pJhZrPb3HcC/6VyZeiwpcxppaohVJnIO1+pqrEmsAnmbl5JZawpq9J7b5gN8+fnB2+sbqpIcl8gGMhEFGrIkEAURByYI8ysRJVzMDbPMpmKYaMiedFx3oVzik5ROS8qlauc95Vz3vvIFUqSJUBvQvDOizgmhKA/9SrwIXGWMSYk9yydc959cXX+1uqGeqeqIdfl3XTa63YBKsu5VJUCJstms7nzLrf5XELlU2PzzDnny6oz6oQKaf3BcHJ3J0z9ThdApyjyPHOu2p/N/vytt7z46XSqCmJM7iahKFk375jcGmtnZVWVFWf5cUyUGdtub3k2n3rRXqcjIs45Ywypeu995QfLywAya2fz+d30Ls/zPLNVVc6mM2tNgPmiYUNMoNL5GFnWcGaJaOFjQYIOiejjk8O3VxovyMLyU2W29SfS+gQjABRu8sXVWX3gzdUtXTgnpFkMayHQDxuDSuMnopoyibScEdEmSv/Q9M+mMymCIQqxdELENcKhhT4v4pjNZ0oAcHBzsjvYrNGl9qk18FpnTSEiZhPQ+3QvWryzHoxPdkeb6fu/J4c4nbjwRmipK1oXmK23QW1CqkNUrSoh4M6gw7uT3d52ujZtnK0g7HrPRdthWI9d69/BART+WDgeR4YTgKBU773A0+vTx0tbrUGuEZBmYBoRGOaI6NnlyRsru3EE4//aW8YLG3HsumodKRwM3noIKeDvTKoCH+zuw9vzncFq7Ww6vDnf7m/Uw388Od3qr6dxi/09nZxv9FfDE0/vXrZHAwA2+mvBBX7ygtYX2k5/M/TqaHL84tEHo+0aNw/L+eBm4bSHS3vhL2m7xldXz+/d5MnSHhK8ANDT68OX9mRvuEUEJToYn770hNcHS6Iw6Wv74vaqPvSoO9AARabO/Prq7N2ljVCMpYYgPrk6fmsU1ZJ6rQVuJVTJKJm0ZpR/meTGu6M1NZAYRyRESmw+v7kOR98arvaKgkjvptPSudxmGdtfXccZeX95CepU2RhYIwrM5vNydvh/PDv5n779VjW7A1CVs39/Ov/h5lBhReaGbdd2nHrnyuCTcxqoiF5VBfCBWUcGqoZ57iYAKu+s7QLKAUpVMKFD+pu7y9c6/SLPAZTiKuf3y2l7VDd7qwymBLITcHxz1D5ht7cScqZW3gO4LCeb3WUhytgCpCqG+Wh+u9cZBKYCQpJoNoBCvQ15cDVSahUC0f3ZTThzpztsOUOImA8ncWAf9oYIF4Y5JQLw7DYefdRfAiKJlVRUINYd3Y3T0Q3AB/VAVQwykCp7ZlbVUG+GlDqmCOvg2fjrrd5q8DwVpvh6vH9v7T0ebSpDWL+6PcPL2qPR1lfjAwAbvRVmLrJiXs3FuQzZVzcnAN5few2AYXJ+DuLMFF4xdXcd261cVZf3AkAEUc+kmYVhJaKQGz8YTgE+1UqY9YvLhZn63oPHk0kV6L1FlmV5HnYo8Y5RFJxZayR3Ir5gA+Dvj+Nrfn93lxDcxJaIVfB3X38eDv1g+1Ge2ywzcXsBDOHffPLvw9G//uBHC6OQtry/+fjf/Xcf/rj1O5JYbCj59/a7yBqI+4CGfPVEFBzJJuVYoDAQbR5lI4qTdaBKjTVILWdnrbW2lI3ah/cHGVWv2qv2qr1q/zk2W4vZ+D8CtcJPFrX5l0u7tAu1j0aE7x74oIioUQ1+JU0R6sWTA0BJDAeV17AVUS8eKQFHuNfuYN1yVvqKGFA5vL14PNxVqKDev8WyEZ33rCFlY4zabOzuyJMjASLOyGzu0wQDOzKNhyqcF1El4oyIwAwmpJCu4NiqLZ36RQn3eVjxdvjO0lK8QWv8IpaScJw42IsQUQOgUNtSaTEv6tNbeE8Lq9T7B7U5GILmPr0+e2e0nsyrtCjuJ/cDgC+uLx6PVtMd6425bR3VeHOjprRehl787d5LHN2e7wzWwg81sTG8++HNeRttrMHK0HYGa+H3ncHaYesm93FJ1RjCluiQ97DSgEh+cx+b0+PLc2OQol5RoWDmIkVyMYwx4h0aywVq7JG2wIV6NYq02Cj3u1YjhiCFGgBHt2e7/dVwvvcVKC69UMpCY90JYw00ZvhP8xjxl8Za3r853xuuxcg/DVikGkomPYEoKslOotkWcHospCDgtB4a3Sus/HsvU+t8pAkBiYMFqkO+Y6SgxjlINCtVgFzwRKSCCfFzXZhirb/6uAIObs52BmvMFOt6i5fSG5OEJJEqiVeQ/Ob02VtrrwEI9SaLrLid3YRCE3fzSZ53VNU7B4CtJSJXeWMMETtXzd0NM2d5JiLOVQCERERUtJTKGgui+bxEUnnFeQBsmIihBGb4VIlEiYgPJhdb3RGxCdCksIQXM5k1YGYzn1dZlgPodvuVOHFSutI777wTUWYWwHsfA+JUVcUQh4oVQmAN6SjU+1h32Bj+zeX5tzajlf7aYHnUH3rxlURcu6ocumSNqSo/LefdolNVbl5WxmYS1zeEQWRAPssyQ8aFAGpF5ZwaDlVoisxYm83nMwAAT6eTyd2k1+1P7u6KLPMuSnsRITBbsMinp/sPh6tlVYmKYQPgaDre6g5zIjbGO9cpijzLy/mMyXRHfZNlALxqVhQEnVdzy2ZezgAV1co7gEORH2NMWpah2raCoF6FmRtkKtRoWYCnEoRXr72EESZ0DxQpTOmjCKRGJtBvr06frGwgib4vLk4er26FB4XSIaoIVM1kIC08tzZSYiK71j5S/51aPWyQD8Q8EqnVt00CasFdEX6Llhhe1naHm+lhwfWCWgiEC0LpVUn024QCL9zt4OZod7gV4qF3R5vNgYWzKG5tDSk7DWvrtIBPJZg1UTbThEShQapAClLEwd3Rdn+r9TjNW2OOZBrqojhrYgfCNSnKPEQWpM6EC109HGmAtO7e49FW9IlAE4YcQxRShyMM+ezq+PXlzSD0Hi9vUWCxtwYqIgIIMAuaJ4V5gNYrpUaEG25OBOYoJTeUw7BRRqZiYye3v4Djydlmf/2lOz4I673V+sjp3cVGrxXE0CJYbg02oECdwCJe3dAhdwbb93qxPz56uLQbqEdM2B8fPRjtxO9CAeD5+ODh0h41M0gA3lh+XTV84gLol+PDx6NdJmLigBC9PtqWNJz150Rs6n/s9Fej/iiUg8iEtMUc3XWin45P31xac9Htrl9Nbx/1BnG0mGrpkYicUEIIRiafsKgY/UueI8bJqsaHcpT46Ozk/eVVLwqiT8cX766sUyNt6POb67dGy8ZYJv7s5uL7u48Ms/PiqkpUf35x8t21rVBQ+pdX1+8OejOt8qwgkwmU2fq7MYC7m6uQ6SISDIhYVa0akLWWHUM0yBDPLiwZy2yMFVXnfRgfwyHJBazNGdmdmxpmVrGQQacn1Q2AwkhOYTOVZ9Pp46JQYue1KHqV6snd5VZnicAMAXA0u97rr1gmiLdMzrmD6e1r/RGzqWtznUyvXuuPLMiLc1UZuJf9IgNsIxqFGGBizyZ804Ezy2Q+vT5/b3mDADY4nN58sLqehBV9cnH64domeYDw9d3Nt1c2oyqvAPDx1dkbw2VmMoa/mly/v74Vy54rAP7kdP/DtQcAVOSryembo41YgRFMqpU4VzqbFQbI2Boyuc2diLFRSjPDe6eEzBQAHo3WM84AhfrMmMLGbFcfLO/ZjJ33IHgogX91vv/mym5uzBvLu1NXnt5d7gy3RqPR5HZyWZ4dz85++Pi9ajrpd0IsgivLmRch60AEXzqCl8qyzW0nDJ7TSkQN56qVF8fEogpVDnnPVQA8vT4F8ObyumETqCh5ln109PX3Nh/OXQWgyPLhYKSq8/msdGJhpne3nSJneFXnnfzs9OTPNqJ/6z+envxga12EQYbI/ruDr7+/vhfovR9dHPxwfYeJxHtio4p//fFP/+rDHwbF9W8/+elff/ij+1tWNF0Wd59aAdVEfry32VH8wkJi6aD5ksKrIJUC15AVVFS9eI+ajoPGb4gU2NB6crNjx54kBLIl+dvG46v2qr1qr9qfWrNJsAGImEKNUr3g/mmaqoJ/h9RLdkCtp7TQKq2tkTq4klREwTEjHjFTZEwQkxW4oEIjlrKJhWJ89MxDgQejjec3p4+HOzUfrWvynAjMuTVQCXGWHSEpm8LIMTubhkw0iS9PyQqJGWRURJ13hkxmCSAKQeXRBU0RTomgJMdfhAgmhYG3TIEAmpC5Z6IgadmIoe3tvHxRrxck+hyFYDBK0az166SHpfF6YS9telGr360NNSnyScWOzKCYIOw+Ktm8FWnLIqL6IRp/kfY137Cg7t/z6Pa8DTJuD1ZrRPLw5nxnuJZmHjutQ990z8Zsrq2QF1vSPRQabnh4e9FKy/XSKxZuJTGlY/KSRggAiHUpg31MzaMispj+So21rAFE05SLKvoHgianqeC1NJ0GCDiYXAX2IhCh8Qf9tYPJxaPBCgBL3lBInySiDcOSmIhZBAL4pB9RNDpbdKEab0B0WTDUJEuXYw1WVhBxRDZDhiapbfdWGtIWf3dhfBsmWj3EMTn7gvM34RQKQihJHyy/RN9WrxIqHYsIUahJWieNaH1nrbjRkBrJe6+pDDqBVKWl6REQ0rELABfCjzikmCBRncu8XwxKrqbzqXNVoliKBG8KkRcvIlVVdYoOG/beB75GnjERq4KNoZi+ShDqFQROHiCBzMVMUkcaNSwzYlZRsDKziFaVYzbz6Ww0WspNUdI8LE7nXFmVhk1VVoENGmMSiQMYBICdcig8QCDmgLCIxMKuYQ5K5wBMXamixGysYTbeq/O+nqR5WWU28yKurJyxYSTyPPciofbOfDK9E/HOEZtpOa/KkokU2u/2YHg+mwKYTCaS+MBKRrwfDAbD/nA+n3eKwhkHQLwwCExFln9+dvzG2nZVVcysAuccgBVTXPr5g+FqEJaZzfIsU/Hd7jDkkQRQVlWnyEllfD2d+6kxlpmZSESLzIao8EDZsyYTlgDLeu8VIFFBYqxEniRHDjMrAGVuwe+NsYJkUmhbLKQlzoQvLk6erG4mbIKgeLKy9fTy+PHqtio0BmirVwktAKYvsO2AQJ6VIAy15sWr6gJaR81nmWgd9ecWqZEJZJJAc6lBKo57J7QR+C0EK94+mngCpZqeHrP7AYCPIAuBiImZIn24fZe44ZBoCmZIgiANaX1+Tfu8P7jpXeKRRsy3qIdR8hApYI6nR9v97aPJUQqNTNuc1JteUmrSi7d2fW2J9iZTZ41l1W4S9S5uHE3kXpD/NZpcP12p1kHST7VVWt8zSGEs7nlx8MKFxFEDa714IG8GCIbBUfY0HjVaeGAznqBWoGHseGvDpbjU2l9CWmwL/1psLYpukt4LG2+K1q91rXYv6cFo5/n48MFoh0DPx4cBedQWDv9wae/r6/2HSw8IDfYXnhjKigD6+mj72fjojaVdgOqa9YRGc06xLG0/fuTSUVz1TDEpBETkN+Pzt0brIGYSZVaRR73hV3e3jwdLYRgTtlsr5OGezWsHfVFVNSYY9MQx5zqAn58ffri6IfCiQsTvLq/++uri7aUVZgLFUuxpewIA78UY2+v0Sp7/4vz4vdGqYetUALy3svrr27sfbqzbTiZlSRLKxFgAs+ld5R0AD/7x9vCnxzffW+0DpJiLUCVejclsB4AjL74iIMuybtEBoXKV9168M0z9/iqASrwXrsZTL1Vm7KjTXVvpqmMAj7bXq3IO4P/8+uS/erSzszJiY2x3aGw+mVfvbOwdz65fW97y3j+/OXu8tPlgbQ3qXDV1VTknfX9p9OVk/L3NLTdvdNFBbgxM5bUCMxOAoTFZFpK3oqxKrx4KQwJr04yyMeanR8f//NHD9VGHmbwr//LB1icXZ3++uwmiv9s//ssHO8s9y6oA/ssHO/9wefKXO5s+VEAXATCwygxrAGCUgUJ8vdJPDva/t7GXkVbiKl++3uk/m03eHq0CyE1uiKZO5+IsqVfxvpyX1czaLO9Z0/31+VcACmunbqbwTuYA1FeB4OnECQkAaywE3juyGaknoszY4I0rQipGkRIxZZaxxhgT1od4l7HAVwDUlzl5YWWURLaf5UwolbxUVeUzkwPo54WqTstZqIYk8AAZJgpopMqnV6fvrm4ZMobp04vIVn5/deud5fVPr87eW10H4H1VVcEN6Y0BkQDCjH6vU5blTw8PfrSzM+h2AFXoDzaW/v70+rtrS0SGyQHwIes3AMC5OZWcS6/I8r/5xU//1fvfD5FCAP76wx/97cf3EUm99+f9lgjk8atcCK8OcjCVtVMQvAqr1BJdJBQB0qgh1CJRtZVlS5OAbX3yjbocz29sm99lprxqr9qr9qr9CTQbaUONIKNFGfiS9k30h6ZRDbykSyj9X4ko6I4BOIgAUUg3Hp4ffnfeM4Wgy8gSYoTypql2m/fWZohpVgjBWU8GwLOb8/dXVv7h8hLAv3y4OZuWDP63z2MEzQ9WlwCIBIPIBDn/i+t49FsbW0iMwJAq7+tpDK/4YG3XxMxKMW32Ly5OwqEPV1fC0EAJakAM8Ecp4OW7y0tI7/bz6/G3VleCMvyLy6MPV7Y+vmyih95b3UJCbIMT7PNxvMmbw9XPby7eGK7WCvlnrSC+d5Y3En5MycBqQ29UZx/6ZYoken+0obUVBXx6fRr+/85wo8F+FqY6GBT6xfUlgGfji0fDmqJ4WZ+UYpwbo+s4Hd3qr7atj3u7p77ktz+qNU9MmNfLFmrbQHoJOHkPIrvfn8Pb853h2uFNEzi2O1xLhy52BqvtWOmHS1vJZqODm5O94Wa7fwe3x7uD7ZoBuN+K8HoQGJ2iTdBh1E0UqoZeMPiB55PrR/0lIAMASMIXBMBXt5evD5e6ORmGhX50EeOMPlwZASDSUsUTPR1fvjZc+fKm4ZA+HG2Ev3w9PgXwfHz+aGk9fthaPbuJsWbvriwbauymT09P3lzb+vz8GMCD4Wbo5FETd78WeD3hEzi9PdscrB+3Bm1rUAejKYH2U9jRZn+9npBAJjpKYUc7o80Q5hlxK2hmTGJHqkIEouK8igFAuj8+qTuD9MEcTc4BnEyv9pY2IjqgIl7EC4golgsgqHx5dQjg6eX+k9UHhe0AuL67OrhpPsbt4br3zmYdAN55D19P7t7SZqfoingRMHHwSpRVaY3NiwJKog6qgdbnvQebzOSfXRy+trTx24tGUOz2V4JaGtbb0eRqo7sE740xXnxNBH6wtFH6eVmWeZaF0Tu4uVjNehfVHYCNon86n2z3V44mre+3O0p4FDPjt5fNsnyyvBZ4gr89PwHw+cXpg/5Snhe/vTp/Z21X1JXzeZZnAJ6N4w3f29iyeVa6SsV7lY+P999YXm9HH2/3lrKimM9m3ktv0DfAZ+dNLORep69MvnIAer3+ZHzV6/d73e50NpvdTYNF7ZxX0apyIfflvCwr7/18bqx14gFkqa5opyiY6LPTw/e3H352Hgfz+2+8A6CbZd5VEN/pFD8/iKP31vJKlmVZlgVug6gaYz4+iQvy7dXNELId67qIPL2NoXbvrm+JlxqbIPBvWnP35uom1f6mBHlFE4dq0aVfXJy8sbpFSLlK0LSnF0evr+wgUiP1YFx/COsNGAccjk93Rxt16OjucJugBzdx8W/212sr5mRyvtVfO24yXaxBtf7n9nC9fvTB9dHu0k6NSxKwf328N9oKYObX44Pd4VY7HnbE7JnhAAAgAElEQVR3tBUvHB/vDDeTcqFQ7I/jaQ9Hu0AI5QsAGLUFwt4wEBJ1//YYwMHNyd5wQ6GHN6c7o41mn1IcTeKr7QQOY9rFzm7PNgbrp5O2kNmI1Goko4+IuUkLqEKQaPclaCggp+2NldTT87uD+rYPlnYTKKlA84IA9kbbcRYJAPbHR+ntNupzXEhWCOynrr4WRo+IRJ+Oj54s7dQbCqDPrk/qax8vbwW07unVEYBn16evjdaI6OnVyZOVOAUByH16FXv1aLgGEknuzf3r492lrYN2n4drAjExGSICEqdRuXvJ/nh/L20MZWz1148nZxv9tbYh33wBL738xfvU+GsCEwMWUBvxQccJKLe2YUuq/4t1Q+pUmC3ooXmXmGORAuAe43JEBRCKeE28baNJAF4W3M8R5QLBObSSOfxmfPbWYEWJETMVah1J/vT2+vFgidpdoXqwNeDj3sRBkODUENWQTURJbajXEd32oYA1kQ2as0INMTH96uri/ZV1QywKgbyzsvrp5cX3d18rOoUxBGBSTi1bDkBVKg6ZZ3kp+nf7TXQtgYM+rELiFIBR77gjIurmpXfGdrKsC8BXtqzmSiZn2+sUZKgsUc3FQQ0EcAAA3+n0+mXvbjZW9dCyqtywtwRg2CsuU/6NpUFxNyWb5ZVUBkRS+nIGQHwZKtVklgjzTmGybic33fmsDD6yTibdFOf+V++9/cnl5V9/+C0v6lxlLQFY2d4whin4GquqLGdlVXpxOq+YyFpjrbXWAOh3DQBitpyHG64tj8KkrS6PiCDSKJ/dQbeqnPPV//31yY93VgcdS6TE9F/sbf3k8Pgv97adD14t3E6viqwobN61XZcFXC8DYKwRqQqCtXm/6Cuxc3I7v/Oit3fjL29O31rd+ezisGM7c5qrSulmAHpFt5t3VGRewrACyI1RKEjm1cx5l+V5xvY/7D/94cMn1jIArbT0s93+5uHk5O3tN2fz2UV1s9dbYfiekaBMdq1Z7vWstU5FwMZ2lITAs+n86vZ2Lg5AvyhskYu4Sr2IigoTGbbGGNUQ64aqKtVmn16e/tn2IyLMyvkvL47fXdlArLqGWTUvxxVUiywn6N38LrfWuSqzJmRx9b7iwhoigCZeAVgla81Pjk//fHs3s/AeAP753s7Pri6+t7ZFQEh1+jsMjb/95KfNP/QlZy3aFGn/pPY/o1hLLMrwR5RZaYNZMDxa8WcvwopUW8oLT9XE/4+JAl4hka/aq/aq/Wk3+/93B/6/ar+8vPzO6nI/505mAfzbo9O/3Fl3WrkZ/sPVdUAkm5Ovrz8cbQgJG/744vSDja360LPbiwedJefLXrf72/HFd9Yf1Id+fnH0rZUNEqvkP766/HBluX3Pn12ffnc5aD/y0dX1N/Xz48vjD1e2JIJP+PXl6bsrzdM/G5+/NYqxxp/fLMQaf3Z19vbKOhTKxIJPr07fW1q/f/cXh2V8+n7AmAi/HJ++N2psoXeWNj69Pm2Ctb+5vbG08sX15eujiEXu31wmciLhhayLx7eXW4OVoLkfTy62+r+vmAyABtN8SdsZfuOhf5J2dHuxPVglUCvWe6EFemY9Rgc3521EcnewmihS/Pz65OHS9u994sHtCYAHww0knunz2/MH3zwCf1R7NFxqh1x+dDH+7spAVAT88dVtQCTr9uXN5cPhioZESUTPx2cBkXw42vh6fPqotcC+vLl6PFyxDMP668urD9YWpvXz8+MnK1t1kGNYEqJqiI5uz7cGCwv15PasgSAVJ5PzrQR/7F8f7g33PLyIOwnhfqkd3ZxuDVYDE+3w5nS7ZdX/7rY/PtkbbQbuz9Ht+U561uOVB08vnz8Ybf5e1e7h0tbX18ePlrcsB/AXBzdnry3tzKt5r9NXwtHN2VIyVAB8eXn4YGnTGFs5dzA+2WsHmf7B7cvr0yer2wR13nnnDyZXu4M1ADu91cO7i73Wajmf3Wx0hgRM57MbrV5bub8CL6q77d5SzX07mlzuDFZFI+PvbHoTJvqLi2MAT5bXAVUvZPi3V+dvrW0CeLy08vT68kF/9FJZ8Wx8+WR5Xb10iu6vzo7fXVuYmi+uzl5fWQfAinlZHd1dP1qNEu/p1RmA9zcfZZ389nYMYH822e0N/vBRentz7/nNxVrWu/f7dm/JtEKPf3n09bcfPGJjvegvvn4aEMnQfn5w+N7airFZWVafXV2+39oLAPxfv/qH7+6+JiLOu99cnLyx1Cz7p7fjx4MRMWdZ9un56VvLzVr9zfnx22tbiZGIzy9O3lz9x6yB0B6v7rT/uT8+2RmsefGicjYd74wWPq6D8enuaAMAkzm4OULA7lVEcDI52+i3BPXkfCvlpgjOgyiriY5vz//w7+vg5nh3uLXwz9HWy0472RttB+fn8/FRQCQBnE4uAOwMNyxnDK60PLg5CYjk3mBr//Z4d7j50g/06O5ku78JgFgPJ6cxqjq109uAhUWo52RyGk7+Q9rh9GintwVCQFr3Rs0H9fzu4MFgLxRgJsbz64MHSeDvjwNKi/DE/fHR7qg+dLQ32g5U9/2b073Fsd2/PX84Wg8i+MvxyWsvG72nV4cAXl/arPndz65PnixvA3i8vPX06vj10cvn6+nV8eOlTREH4Kubi0f3Vsv18aOlTShERVX2by/2ft9uezy5SMEKv6dt9tdPJ+eb/0T72j+iPVp68NX18zpZZN0eLO3952DNv9Yf1RWQ/xPbdzfuv+Mf3t4YDn/H0b/Y21pbW/v6+f5HF/d12u8s935+NfuzteWXXvhP0v7H9x79r7/66q8e7977/eFwOazA10arX44v/tmjh/e7vbtxb4X+N++/+7//8tf/7fsf/rF9+Ks3H4VCdi+2/+GDt+mfaAb/wHbq528tbgcAjqbfaG7ca//x6PlfvP7Wi79v9tZO7s4fLT148dB/evvk4viD1S0An16evLOy0Su67aPvrqz85ury7eWV332T72/+fmPnpe1fvf+D33H0rz/4UfC7/O0nP/nH3f9Ve9VetVftVftHNBscNo3vRVKJwhhJ5YEF8iRSfIoo7hEgGy935gCKsU4cQsqAyEJyKhTvTtHtTJxCXyIhUVIQdAh1hJItxQUnagp3ZYUaNSD2Wn59ffJktCtwSjG268OV9V5urGER/D9nV/98b4mEuSKb84831//+4vo7qyukZGA+ujj/9vI6kTBxoJ19cnr83tomgM8vTx4M1hwZtoVTvLWy8dnl6bc3tiDy84uTby9vQNXDEehby1sfX119d2U7uLM+ujz6s9UtjdRO+tby8i+urkAx0jwEPob3fX91EzE8miSllApcmd9cn725tFa7/t8YrX4xvqCU9gmIjn4miiGBkSIaovhiGcEwiOGST27O3x9FRowq3hut/2p89t5oHdpEh4WU062go7BACICQg3L9u6QqsDuDkCapHfetYZkc3Z7vjNaJDAFE2BmsHU8C2NdmiqQEWWEclExdsTOSA+jw5mxvuEFkuI7JS1fuj892h2tEJPICKVKx0187apFiUl4WYmIQee+aIzHuIaZTpCb+C5yshIDXPBiumXRJvGH6fvYGqyEBGnMM1BapRDybLH4ksW8x/pThwq1eG6wbqIpvHioli1hTACD1qt7NZ0v9kajzKAEY4lDtQSTxMrwvQvmHkKUqvmnMLVSQ++ji+vtLnWGRGdupKvfny92Pr6Z/sZxXnX7lAeD14bJ6BbMlDjVYRfwCVQMg4Nn16Vsr60VhrWFj+Du7O5+cX3y4ue1dqBqFB0vrmuL9Dm8vNvtrAFTJE20O1k5uzzf764QoWLYGa6iJoPHLIK9ycnO22V9r009OJmdbww2ADscn24MtIAuP2BpsHN+ebg3WE0saqvDqNRZRRMjLaZmeXx09WNolsgpR9dvD9aObs+3hOgFzN48LjhDjaIjAXHqXW+vFgZQMUQyXBmAEVJZTAK+t7Ah5a00gJgAwNguJn87Lye7SBoi986K62V87mZxv9lcINCgGN9NrANYYEc8klROQWmNCt4XYGGvzDoC3Nx6KqogwW85i1a/MZqV4AKJU5AWgz69Pl03Hi1g2SvR4be/Ly6PNzrAocgChWMfmYMWClMgJAOwM17yoeF+bUs778IG9ubbDxqoKgQOFxFUlgBB4aLNsOp15VQA3k7GI5EX+bHz52nBFRJh55mYfbO3++vz09cHIZnmQMTudASIyGNNiVvO5L6twdKPok/h+t8ETnROT5QCq2Z2qiLF3IrbI5/MZ2wyAq9y4nPeyoqBuYHmcV00pm2XKTWbWllevxlez+Wx5NALw5tpWUeSZQZ7ZP3vw6GfPv/re3kOF/mz/+Xe2tphNOa+63e6Pn7zxy9PjHzx+I7cZgJ989us/f+cDmxfOOfb61srGF9cXT0YrxpjPL8/eWF5jmwHqvAD47OrsrdXNEDv5xsp9bKhm5pVVGVIkp3jlKLA15SYOEeKI6QiCdIkJUkXl+fhkaxC49mzYbo82j8ZnO6PNemveG+4QbM3x2xvthPytijqcLIr+td5KlBxhmkYbYZrqDA1IolVVGBkW9giuj+4NtwlolXdDfV7IuUzAAqhHBCDQKkPow+5om8mowkMMLGKAc7yJQGpUsd5yjiYn26PNWPZGabO/cTI53exvUArT2BxsMWWxIEVoaRGmvomoQmpdSMH11odYKiS9baDIBbYgG4aQwCGKdBKNUR0cmXSU6HykhP3xYXx3IgI9GG3t35zuDdcBGKb98fneYBUAU4wL/nJ8/GRpxzMhFOuQqOE8Gm0HUZfK96GSipAiRchqijipR+/Z1emT5S0iUs4MmzdWHn01Pnt9aUdIwqi8vrKdmVB/1qoyAEtFeHONqYFDbtm6RiEBMMhSFtOmkWECpWrUITQ40EsbFcIyxYDGlJwnDX3NamyFIgLHdw0bNLTt3lZkFId4AA6rUiJ1KKy0pe20hhXA19dNlY+90Q7FGSJQk6fYaZVWGBH06fXhk6WdlDNHADwb369m83j5kaLUsG5aVfjq9mS0EscMIRMDQRRkBU7EBYUgJBhB3L3x6dVLSuKYqM3VageFss9EJF5gNOQ3cOoBEz6oTy5P3lleB5jZWjYAcpMDYEPipZISgIibjeelq/q2N65u4FzpSwCfz+dv9Uc5wc1myugMumJolHcAOM2yXg/A/O6OBUHPYDeDLYqicBPHWsHfAXBuaizEla501YxRTpy6b/3gx8+ff31z9JSdAcAqnd4an18aVaiOVtfs3WFGIwBa3pXnFwD6a0sA/s3Tho/8o50Vi8rYfO6cpkwhP/nq6/qE/+W//hc315dHx18y2V6R0ixK1Q1l0LqFm2mwLFRK5yQ3FkBuTOnF+LKq7qzlnIx3d51ixftSZZp3lwDMxTnB33z61f/8wZukfjavijzPs6ys3P/26y/Cg/7lk0ecZXnHUKUAciZT9Fw54yxzxAAq8h0L5+k7g/Xf3E7aE/2j7Y3CegCM2c20ynLeWlmd3lVVWW1vrd2VnS8PjgDMZrfh/H5uXZndVjMAby+vZ1nx0dEzAB+sbIgP6actGJybSVX22c7FA7gbnzkiay2Aoj8YVMV4PjO2A+Cr6+fbnXWCH3TzQYdXhysAKvU2YxZhp9c3Y5qOBVRwxlm+vjy4HY8BXN2cDEZrxtphRmeTSdcWXtHtdCDlbDYHkGfdTpYDUJVpOcuzopN3P1zbZTKB4toxORNNZ7Ner9ftdWZ3s163L7Pp3XTqqkrmZZYZAGxMWVXTyeRnF+PvrS3BgPMsTHBGlMMAEImSkwiIKVYkFHwyxvzrX/zkv//WP6NUpQogSDJxwQ0Nvp6VlqUVLTWVtDMIEXlAnAOBmTyJBxlm8o0N7UVUyBiDUMmdlELOnKCmRpGjUod3N8zJaOFwSsSV0hTHxCXU4m2/aq/aq/aq/Wk1m/S9bwyUAbBAW2/+vO8GbE7yFBTYhViZQDBnTdemUChE86uxepSVwMEyAglVhg2nVIxOHIDD2wVF7dFokxRMbLkWycaJli4qv6LMzFmuDpTS5oVkadJ+fQW+vbIprfLVBCUNGa1iXTZRDXpgTAfITRqqWlEMR1PGetKIQkabkGLxjKCYptFLmTrTWNXPf+nU0NtL67+5PntnaQMCVXpneTMkIdPfH23fPAVpW6v7vRiK0Apr+L13bM1/uwOtSIT60Qk3TLdPwUjhJgHKbIbj4OZsd7gOojoBY1IdsD8+220FEhJhe7AS7rkzWAkZBncGq1BJqkO0cJQX684kGOBwcrk9WEGr7ClUwmJEynEWSli0ByUBs2G1CFRITUhrb0mVIK4EQFJyysKmweDwc5OZJ6MVy3HlE/izmwsAhc0IEj7PguDEDfu9npXKV90iQ7SgSEW9wAsBGHbMzNcvkyZCVALImhGA0Wg47PeIzGQ6CwS0mbJ4OInjEQpIKuBTpAm1cr3Wczybz5mNYeZEOpvMypDALgxM0uNai/wlISUNovtN9JqU21Y3h+sIEMD/y96bNVl2JOlhn3tEnHPumvtSlVWFwtZAAw1098ywu4cckjPTXDSkmWTUA830IqNMT9Jf4T+QmfQgMz6KD5JxKIkaiotIzgy7OdPANNBALwBqz6xcb9689ywR7nqIOOeezCoMpTeOWQVgWZl3OSdObO7++efucV3Eyq6ryYvmflpDHGvmkKAtC0spv8Jqzm7eRhNA0+M3tJkAiRGDco12yzUBRFidn+3lCQCzoRZhUo06K9Bz2hAxGxPji4MEFXWZA0nd1KJq2AIwbBhphI1xRhTwbeIzWOuscZGCGrShYJumAZDleVWWEaSYzS8AGGOIDYCqqgAM8oGvS21CJAwSsTEwMe090HaeXt/YIwJUYqj6Z8dHcdjR5h0zxlhj2Nj4jFmeaxAATVO7LFPVIiuS+4Sp9k3KpsjsfQAwnq7F45qJXeZE9f7m3mQybZqyrhuoPi7nAJxzPngAj5483trcyNhUVcXGrG9sSNMA8I2fDEbrW9u5c3Vd72XDqmkGg8HTxQzAudYAvvv6O4NikDmTOQuAWa3hELz3TfRJzBeLeCQsynJtOp1OXN00sdAQNIHsAKwxDepU7uN6S5JUAeDN9a04w/Ty0/vat1Ii1GsRV2nHRTStn3lQV3Woe1kIY04+RW+f9Y+A1QneBo5day9shVUnbvb2JU/wkgfU1mrriZfVBbq0eJ3wuzO9hdYMuz3Z63YdOvDoWi7l/o0U3eB0SWr/XAH4tROivW93nSYcXh3uj1uSYzfeCV7E3cmdh5eP7k3uxdfvTQ/SjAH3prcfzp7cnd6OestBfMbV97UNKe53IQ0XtWN4f20PiJV70tzGz9yb7ve/G5NptLWM0h2orZnTKRsAYlJYEmlzICLm1+yk/2oRpS8kT3HPBkYqO/RiopN+BKLoNQclANCtyc6zy+e3+mzQeP1V+djVfbuB7hZNIrQq+iux/QAAPL28WVz79mSvTfepAN1bvxM7+uDiMYDHs6cA7q7dTmMLxKS9D66jjW+sH6DFduOH7k330YbyR49wN+zxl/3RDkV9ALAAsxK/ZPckPSelAbk5nm9Pt2MCWwAElqCfzZ53oaC6WjDa/SLdbtek63xydvz+xo5jG1S4dfsH8QBUSVQijZ9VFGqNNVnmpAhNTYqvlss3RmMCqbEwRqQul9X89FxzC6BC7bQAUNiRD1UcBXZ5ZvMAnq5vrU/XLpclgKX3jhCs16puFleDcTZd2ynnl1zPG5OFeM7nAwp1pZWoZAStr2ConD0HkA0GG9vrcYr+q2/fDWG59Mv/7WdXAP746RmAt/M8L9xowAC+s2lHuV1b3//9T78E8D/8wb8E8Dfe2Cfjyi4Prcj51clfu7fzrx48//Ywd5kHcHl2nDtXSkx3SKRiWaeT0fyqDAKBYVc4Y+pq0QQBYDL7+5/86u99660QAhMNckeE2lc++B/eWVPwP390JvUVifUSysYDGAw3hENTBV96Mg6ADaGq67LWaCP82tamYQ7B/4fTc6mrpj1H16fTwWTiva/9zPuwmOW2KL66mr8+nIyzxC70GhpfWxCAz8+PAfzGwf2mrn969vyDjW0AIhI0gDA0NtRNUqRUBrYw1gKoRVUlN/kiVGk9s1jweDAaGdWoxGrlyLLJBxvrsDh+dEowMraO2Yewtj4FUJflcn66u7tfXpYDa5i1rsqqDES6rK8ALBfngywDMM6yRVOVZVPYnAGCOgGAy6tZ7twoK0aDIlRVWV4562oNQdQo1+KnxRgp07f++OT8+9vrjo1xxhoDwGWuXC61CQCiKrUxXXPWBgkgctYxs4j8/sd/+Hc++D7QuuWuW2RJu0t62zWBelOq9UVGZ7iln6TJtdhuW0Jr3KiICEfElLSTYCQQJcPpOH/Brlrpvem15GJrkx3jVXvVXrVX7S9cS8HaL0uj01MO/1x8q39UtoYBU6LRUZ9FSYBSazS0+B216mX/wCcgFgFmYmJ2nFtjo4svfv/O5FZmci9N0MaQMWy8eEMwlJ6oAXytlUpkPM6WwZE4Di4fIOGRkQwR0oO36mCq2dt25uGNcsxRFikA/FkvA1o3cNDUw9BDPZGSLsXcVOBeMRlWAhBa3AatHbIqddNpun20AwpQDK+O774z3dX2+z178mtbfz775u7Xfp66y/evkTr953z92eXNUeoVDGBidBwVhUh/SUCfXJ4hJWckqLSFexRtpsUuShoJRbqmC6xGi4jaCg6t/Q8QBWm6znD7rWe9PJgAnl6d3R6t91ka2jN82xFYTVK0QFglPllGrAbGMYCcOQJAHWg+cCbaD7+YnXdX/ObG7qdnR5mzqmLZAmBSZ8xakZOURnU0zFI/VERC45UFADJrlDmCZO3iA4DPLo7fnWzHPHqlFyobRVhWzZUnAHMP5g7jIHBKSJdUnJ6iJirdXD9cXN6Y1iZIZ6EGCUTUobaHL1BF2q5pvOyNIDV9cQbRoiqt7fx09gzXWwe8kLYZaikSJZUQvc19XCYmZOuPkwIQCEt7pBETaayIKhAVFRE2Bh2eAmlrX662Z/xhjO0e/+ns5hZgtoYNG0PGAJDQQMHGEUvd1BISDG3IMmwEDiQkj0iH+YYQiDjyGlQRi+QAOFrcjNWyLotErQiQZjb3dSmq0QhhJoI1zL4lCxu2BKjKF2crOtJ7e7c/OXwSwcd4U2dNyLIITboih2jMzWSs8z7keaYq5bKJQ0lKMemZy7IgFYC6KtXlAIqiODk5zouciX51vAIC7q9tfnlxWi2X2aAAsCiXo2pUlxUxjdfWQvDnsxkAJdo7OGgaKctqdjlrgmei4MPBeMM4C2CQFZ8dPjyYrA8H65F84etajcuKrKzKOEnO2bg6fn5xc/Sauo40TAD/+pOPbrzbnSdxDScQKK6DlO3ta5vGE5C6KsqrGMAWfeLowYvQQ+Qbi+rj2dHt6U5EnFsmb1yC6Zjq3ba1VfoH9zX46ToFTbs/b5zrdP3njSN/dflWPK7Y5SuvAa1OS6LVz9UmJAL0cW9r31m7/ejiCa1gshfv2roEVvS8r62Ydn1kbr51rfU+92x+DeR6PDs8GKdwUSG5N73z4PJR/PO16R0QSGNlKomIZHzr9mSPqOXMU8KQenePB2zfziVtXyGA0GU9iyOsj3pJHu9Mdx7NnkdIOvZcVM2KUkqda4TJEYjhKZXdi6K9VX4kEQtXs9nBWqs51kS3pCS9Y45F0UA91FhVVKk72JPg7nunkK4rqu2bLXpP1Ov5f9S8Ximqt1ZZApLMSjzclZKa2t212ykSAvrw4sm9tYP+fNxfu4s2zCA+YO9mCRxMiYlX83YTXEaaOAKFlYrQyY7VyKzOkAeLy3vDSQf1ml61GXR6dOe2bWtSRXGdhnelndDPzo8BvL+5y0SalHJKmHryvUFVoAEAqYDgnHWDYqD60cUpgPvDcZxTMSzWiZe6Woaq2dzbAcBiIxvOsW0kui8R6rpqwnA43Fpb4yzPNADY21x//vQQljJrSZu8GE/Wt6ryUpurjc3t2dFTAJbd8+dH0pR5NhgZdkbzbFqdnwAYjAdGBgCGhYVnUXY0/G9+8/7VYlEum4vzy59X/jc2i/GgAGB0bJ0pTPZ739ipqoZNcbk4/6Nn5X/2zTdHgyKOeagr40yUdsxs8wGAwWCUWY4yNC+KUFXLq0tSY9nktggq7LLoNguq/+fPHwD4u+++YYiaxpOhzBlVbYIHqR2ve9/88K3b/+LRyd9+c8f7EuIBFLm9LEvxtZQVyAGQ2pceIHx0efmD/T3HBhK8Dx9MRh9fXv3G7haAHx2t1KcPx/l0Y+IG9g8+//zX1teYaVkmTuV4OGwqf7k4v5MPydrbO3sxduTbm3sfnx1/a33TSxAVwywihcv+6Oz417bWM+vyQRFXTD6cns8uiei0vHjv4Jv1sjpcnN7KJ1mWTSZFLHht4Uj9yfEhLs6soQFTSXZnZ9cLNIAtATirK4geHz02XOSjsbPGMlw+KkbD7DwDMBnmcUAGuWG4yjcE/5OT41/f3mNDAIZ5VrgsH+QqXmNl8MwaGrExRHCUSm/9qy8fAvgrd28Z58QHxyZi/vlwWFZlEkCRbpnnzOxFAWKm3//4DwH83re+nzb1yu/RvtDu9u6FtshCW7Csq4ZF1FYhpeQZ6J8XQMQZWfvXTT6Y7oigngHM1BUWawV6a9bEM0Mi4bMLW6B0QsdSdf8xB+ir9qq9aq/af4otmbKUSnD2bYrr7Wswpx4VT3sKFndGR3eGtheOh3kiTlB3EW2hBCJKBa6TDq0iwkGEQ4ovizVnRVRCyoGtXkVVlLgrxNkAqoFCiMm2S6+BoAYZc1LFIq7UUw+j8UjAx2dH722kxFL3xts+gChYw5kxNhKHiAB8sLELUWLb6b5RF4xyKrRKci/RsYmaIkcZ1LfK+g63ZJwm6dc3Wbp/OxvunelO/ORnl0fvxrIh13X9eD3tvZ6yuXci7maN7xdbEmHwW/MAACAASURBVLvSwjjdDW6IPlpRIVdTfmuy3VfU+2vBRLgnPaxGzpVIp1wDwO1YX7srzaOrcbg93uovyhT4j34vV1ZNvEgbbZx+mBhFHielpYu8PlkD4CWxDx4vLkmlLRNOALgrrto9LVP7FqsCIsa0yKNhgAbWAXCpuiYym3LAD6zz4j85P3t/c4OUCEREjgAg42YZrLEOQCOhsA6GlwGZcU2wABQSRIOQl0T4bWAkhu62ypIqfjE7e2u8BkVMNn8yX9LVUjSGeCuASkBNi9CDAJJk6Ug3dB280g33wWQrjvcqEQMbQFsmCgvSqQKgTTxHUHrB1I2kHr6+NK63tkDw0fz57mQnKmt7k912ehX9SUW7nSNDMlnXSm2pgT4K0Od/rTCk3sTGci5AstilDyJQb8/GTvZeYDbdO7ujTXRrktTa3BhjrZPVrkzgAIP66d4BDip1HStFCsFYImWNhTKZbRBxbAE451QQc1q9tXXPwy/LRVVWIDBzkRULvwQSHFlWyxCChGCdS0/MzGSZE25kyCj0i7Nn9yabRHDWZs5GanCssRM5CMaYLNM0QNC6qUfDEYDhYHC1uLLGsjWOCdECIIrBdIZNrKydF8VsfgnAsh0MB0z0q5Nn33nt7XK5XC4XLnPWOgDFcBhHKc+Kpqp9aAZ5cXV5Ob9aNI0HYIy9vJyXVZNZ9+DiZDcfElFd1cVgELwHcNlcAnDG1FVJ6gCsjceLxTJ3pl6W8UzOMhfl13cP7gTvG18zG+ecaAziTfP0W+9+a7G8CiH4uvHBR15MIoC22IFChfQXp8/f3Ni+5kO6ucDjgRRpvGkhU8J3tMXeUxqOuDxj/PPj2dH+ZBstW1+TJfV1UNtNAm9/F/dOU7SC56Y4aOHM3g3aK3T3uy5h0l27n73DfyXyOqSJQET61fmje2sHCn148fTO2q3kkVT0ylXfHL1VB7tKeavN+DUyrTdImsSDdr15CZ4EANgb7vXP+Sfzp91Tq6pA707vxADhh7Mn99fvEkW2CwE4mO6LBAWezZ8fTG/dZMve+GtlslJ3eD6cHd2d7rX2bQqBeDQ7PJjsKtSwUV0l2Olfc0Wi6Q7IeAViJu4cnn26rKSQ6ZV+IG0wYMcIbpmhxGSiFLNsI4meVKRLNtIWVImLhdueH0x3n8yex/y5rRKYKjmtYEVtn+UGFrma35evid789r+YhP6D80eJHUlAC5X2AAeg9VQTcf8CPfdxO5qEttwgoJ3nudNROkCV2jHGyuHcf642Fw8BD5bze8Mp2rQMSOrESsDeRGbT7tWWhkq88rWmW3x7+5ZlBqhsSkMMTvmCWjGtpr1k5qz3AmKRED2X9wdt0l6CqDaiAhZwEDk5nwEI4KUPACxjbTRxuQOQDYbaVNVySSFYUgk1AGioy5qdsYURYu/9fHYxLFhcPsrszBUA7rzx9unZ5Zc/v/CLuV0bZVIuj698pQDOz45d8AD+x4+e/Hd/+V1rh/lgg60wHVpuJsMJgP3dXdH69z979ne/cQdc1k1trTWUiTKJAGAv0s0126C+rusPJoOP58sf7EwBDIeD5WJRew9gwNyo1rWv6mY0HhRF4QEfvNbK1qhvAPzO/S02gyLLjLPGGmvtP/7Tn/0XH75dN814sr5cLuO9AuC1ibyHi9lhWfmmrKsyRF1QyVqrUU41yk3wTdMo1BkLIKp/P/zGm40PvlkY6M7WVmC+mF8A+A/XU9L/m19+CuCDzduX1fxquazKWiVANagH4MUbqDXsiCi3zAZAUeRKphgMkvmS54OsWC6uAATv44uiGoKcXZxlYAD5IBfn7HA4MM4V+SVbq2hqr9ap4XqxAGBstr6dh0U5r6uBM4CQemt5MBwul/Pv723/+8Pj76xPv7e39ceHJ3/5zm1mxGI+wTc/Pj79cH1qCMTqLAfvQToY5IbImLyqa0tU+sq26Tv/+hv3yrrK2ATv1XBMLeQylzkXvbbRRVpW1cA4Ivapwjn+9nt/qaXg9w6i6GJ4qQukd/j0zZD00vU/0kGZKrWuzL0oitv4gEST6MzE1oRpQ7fTTbR31c6WXB1Jq/P+JZ1+1V61V+1V+4vR7LUzbIUCta3TDl9o17X67o9Wl26/03rF01eunay0yhwEtNFDrQ7X2XEK+OADhV5EJDRWgQvesRMRgTfMQUPMgwOgkQAR7ngBlGlM0B7d2r1n7n4yCcAfnR99uLEqZ0EEZjJkDRERf3r67IOtW0lsEHeRowB+cvb0g/X9bhC6B1O0n9J+uNO1218b6N7bUY19aXjdz2fH76R09Z191w7tSvABK423bV8D+/Q5rDc6eJ1s181sTw7evGbrRWwV605uPulnx6eA1pRdDYQm8/Lx1cmd0RZ1QG9vph7Pzw7GGy/ccxXCQNcldWjLaK7IOAoAli21CUwZ+GJ2+vb6pjOGCUNjFRpE31zbfLyY3ZtsdJeLecFSNDcBgGuVemOMSCwRm+wCw4SkXkR+HQHgIMkgULIwAGzssqzMaAmqIjFcVFUFuKp90yBY46+aOG6iIhBRqPK7a1s/uzj5xmQDEY6EIhUDRdBAgAQC4NW2JTkJhgAo537F5O0G7iV7XtvA6W4+Ugg8cDg/3R9ttpOVTF3VpGqptInxKFY0Otwdb7/0VNHrv3RWHEBH8+c7453rvo2EJyjp4eXz/UlXm6J1NjOgRKQiom2CH7127WiR9vS82PGWZomW7BbPtfbXuA+UmDtCU0QuteVo6XX1MMZRRuzjyfmzexsHrNz4Jn7GWAtF8EGZjHUiofYNgKBeFXWoANS+7nzj7QkjQYTIAzBsG6kjvFhLnbuiNpVIMNac1lf7o62IbxY2A9A0lfc+hBDt6MSRwWr7MHPEtZmZidjwCpeNTMAYZM1sbeKKqmhe5MYm8peqBpXpcOh99BiRplS3MMZM8gJAXuTnswsAl/OZQuM1vW+gaqx1Lo+dMWzqugSws7tbzedkma29ODtvJIynawDqJszOL4zLyGYAXJbFzJXWWccZgHK5BDAoisXVvGlqtLSj89n5cDh0LgPgnItT6X1jjRFhH7wx5qNnzz68ddAP1gZAADOx0Gfnp+9s7aTQdbIAlFRUPz9++tbmbuf8YOJu46QFmkJ6O2JkBypo9/6bW3d+efrozc07CfZSkQh/JUZkF6GJdCPqdv0qJ0DkcFGbfqQL1u7ZVtdgjtUGVE0JJPT6G+jEctwJ3dudpfT/ySxqhQ2B8NXZo/sbd1fkuHYbUis+gD7CSG1gL9BGHGO1afGiNLrWvqZ3K4S194HDq8O98R6gacKUCHQwPng8f3IwPlDok/nTO5MDtGoPuulQPJg9vrt20DIJ4zS3GXJf6Em/U60jlgB6ODu8M93DSqnoHyqdvKbrr6TfY8LcdPH2e6KJQtmtpXj6dddfDXT7bzSrRX07QhRPA8M2wpHGWFUlFVLhVgpHvwWk1U7aiHGFHqztPp5F2jWvFmOqL60vPMeKSdTujevPqv0/ey7Sa5KiQwPTk74EcFB90TPbv7Zem6aOdNwri9tHdrHacwmrjQ9M9O7Gzmez5+9Md+OsRm4UXyNgtgxHXa36PqU0PmeiRfX6ZFKGEAXwydnht7dvR+cWVC2zSSRW+XBj86Oz0w/WNxhg5j85u/je3m6eF0ERVJpl+cnzp/vG5c4BYGIGNEiDhkAmyzKgahoA6qtCDAAf9OzyvFg6ALdee501PPjqqwdPHm1t79jMAlCTsbHOEpNMJ5Pp2vrl1ZwH65yNL86ei1QAGqnHQ1MHyYj3d7cc1dKExXwG4Ozpyb39lJNnvDY2ytMiD5ktq3nh6Hx+BWBvZ/Phs68AgBsmXC2vioEp8kJIzZUBMJ8/b1r6f1Zk50cnoa4Hzv1ge/SHx1cAystzabyGAKAqr1R9nnGIxDrDLJBQVz4MssH//vnDv/POgTTlYDg1hqMzNTLotzZ3LuZXspwVq2TAxrmBavWDvfwPD6vvbxZe0BB9vqi+sz7c3bvdCF3OZgAylwUvgTwTxeQqoV4CWMxK60OxPs5HYwt7enRSzZe//drB5vrm2eXi2fMjAJ9eXr4x2RgNx0r48mp2KxtelvOhKTLr6iY6L4kBlyQ6ByiApmlsxhJCXdcAGs/OuaeLs73RTpENnLG7w82jev7NjX1nvJYLALnTnf39ra0dFSLobpGXdXN+cmJc5vKiLBcAGtHcFmtb29WTh9qU83JZLqqglA2yplo0ZRmXtrPZb79+798+Svzxv3rvXl7kAKwogKYqJc+aZQljjDHnV/Otnd3ZfLb0wVo7GY/+4Fdf/c7rd03MrcwkisZ7Ff3dt9/65z//xW/dP3CZA/AvHz78zYM7gMkHykQE/Wef/uj3vvX9G7kmEjSpGisufs0ZkJT2r0uh1X66O6JIkyOmtZY0hWeRKqICr+Dk7GkFKK3OuU4j0JWZh9VhipXAUkH0Xb5qr9qr9qr9RWx8wxHTNo32Y/rrRb0X1z/cmToEpQh3iCYvr2hkWql00VutBaTxqsnET47gVXdEJWhgsKh49fEDbXwQqQoRGIbJWOMGbpi7Iv4PwDnnsty6nE329ub+z2cnwlyraYT+3ZOj7+3vRTInA99aX/uzs1NAQUIt7BB1xHc2dr+6PG7z39PnZ8/e27odB+LDzVsfnT6LFAYFfnL29MONW2kwgG9Ot3/WRmgy0U9nR1iRIiEEWYmn7v+bdt+b69u/ujjuNNwvZqe4PkufzZ5DQaQ/6+K2+mzKFybr/en2J22viOhns+N3pzstdvlSkyb2pzUN2z870+WF7/TFuBJwa7z1bH7S2URPWywyWSYhaPAagmpQDSpBJagGVjy6Ork73CRVViUCQ1mVVSHyeH52Z7QWjZ9kAkEZKxcktZHX8RcGYgESa4yJ+fqYY9JDSlNCKVwCYKKouBeZy11W2KSUO8POcBbBCFJGtL6UI4tBkyZgUo76FY1TAYHWjQfw+ew8BAlBPj49+pPjp+gBYQoS5SAcAn10cQzAe2NVqmoBwDIF8ctyCeYqhKVvlr4pG1978Q3Ex2HAO9Ptzy/PWkCMAPpiPnt9skbEIPYir4/XfzWfkbHElin/cn7x+mQXJlOQ9hId0o0p7YW3g0hBt8abTy5P2jODDq9O90Zbmp4iftkAJvIMd8c7h1eHSqIkonJ4dbg72UkWIfVXzc0zaHu89fzqVK93Joas7U13Dy+PuqV3dPl8b7KH1TySRiw0HTTpZQXtT3efps1CAJ7OjiJzsx9ho6nAuWrLhYzGYSSfRFQodpfbpKJE3NYTSK+IhBhavTvaOro67YIWj+ZnO8MNiIp472tSJVVDhohCCEGEiZlM9OB78V58DB8LGkTFS6hDE80qImbDPkRsWkIQZt7KJw/OngUJIYhCz5rlzmDNWptleZbllOCDFJe9cjOIhpRPPeGwShI/aa2J4N1HTx4ACWfvEdxSyKcxJoaff2N3/7Ojp75uBkUxGo7+9Ktfvb29x2xylxfFADHkn0CEsqyGgyEAH7yEkOc5AAkyX84zl2V59umzBwAMc1OniG8YUwxHxtm6qa0xnGWcZexcVhRN0yzLxa3B5PHleQhhurY2Ho93tnd2tnceX569tXOLiGvflFUF4EcPHxhrB8VwMp786MGD33n/vWVdVnX1l9968+PDQ4LG3GofPXv2vddey7NsOCiGg+I333znX/z0J5RWFX96fPTe9p5hw2S+tXfw0+ePreV4yCDl3zSRj0qUrMAI47TvcgIlW9tCe3I4ErLf2Lzzy9NH0h2OEh7NjnaGm0msqm4N1k+W5wQmZmI6nB3tjne0DeuPS1ehIbkgrq1nrCwcan/vkJ+EkPY6FPsXj/HD+PKTy8Onl0ftQuqAzk6e40aL/E5Vvbt28KBXUeTmx6L+oCKaArdvXOvFK++Ndo+WR+1prrGOzcuvfl2NoZe+RNdeWx2ovVeJNGLJjy4fX0tzCYoD3nsFh/Pj9FwqB9P9FI1OAOHJ/OjWeLd9Itofbz2dn/RPkvi5G3HcsQfxYwRuA7c5ikok69pE9FOhAhHI/Y2DX55+KRJERTR8OXt0MNrvOwO1RcBSIHD6BUiBgOm2zMwU/bPd+LVLuQ1ITyuyPSo4Hp9gBrPy3ek+ErJ5Tf7emINOMFx/9+ubYkWSvabUKoB763e+On9I3SQSFBprba+kddJtpN1h7V5Kf0uLZrbuBOoti5cj8R3Inj7PoG+u7302O1qJNdAXVxevjdZSX1UjkS9IkAga91Kxp7I18QdodSK3o6WKj4+fvre5uxoSxSAv8jw3xgSIT8IlBJWUwjpo0/ggKkH+5OjJ2+M152y8JRMxGYj4upbgmQy7bGNrB0Bgu6SwpDAcDaaDURk8gMOjI9WwtTahpjo9fjafnc1nZ/VyYQxZpqr0TVMNp2ubm+tS14asyd329tb29tbi9PQXn32eW5tldm19c2f/9tvvv/fd3/w+ADccVb6sfPnf/tY7//Cf/mg4GqsjLxq8b5qrf/Tjz//+r71GujQkv/eNtX/ys0NHJndRZ8RgMPw3j5c/vLeVZfmyXCzLBYDxZDoeT5cNeyVbFD+8uwVAmIfTDTecuOFE2LLNXF7sbO0UxcCLiohKHZrlP/nZV3/z/lZTlRq8l6bydVAPCEP/+7/xV/+nf/FvNYSTi2My+F8++sXf+847thgNRhsuH8bNaAhZZieTAYDRdJTlNsvcZDL9we3b/+7RVz7UUcv96PTkG8M8+Cr4yldXi2YpPhjQs+dPzxdXO5ubXvXzB188PX5qrDHWAGh8LT5E3PtpvaiqWpTYuI9Pj769ue3YWbYx1Yxj88dPj37ntXsUggGqxdXVxfnVxXkol3/69IstNwRg2ORFptC7o+1Pz54xm+nGdLoxHYyH5Xx+fvjk6OkXh49/efzoq+XluXXw1YzqeV7YvLBA0Koum9JZVhEmOMPGGGlqhlpnv7e9/pPzi3/z+KmCfufN1//6m/f/2hv38yL7o2dPf3Bra1C4QeFAqH0zL0vfNMZaZ2xdV01VXy4Wo8HgD3711e99823x4epqkeeZddZlLtYhE+8BBN90QtAak3LaQP/Zp//hb7//l4jbcJxkenUbt6f80s0zH9dO4BY91NZ0bYN4eh9oN2a7TaMc6Twznf7bfqxVtqlN5nANi0wGTdTnCdwFFekL/79qr9qr9qr9xWox02LSBfue3hda76QFIabPXbE5OrU1NYlaWKcdxq+yxti0TssE9Tht3fEbFcJecJBG/ZAUXTmRqBcGqbi0bAuTG2uYbedByu0AGjSoiCjJN7Zuf96mWfz13X2AVJlIDbGofntj4+OzlDHwOxubIEYsI6n0+tr2o3mC8N5a3zWtJCHF++t7Pz1POGDCIluABoT3prufztIdP1jf+/j8MNIrAAitZJ60T9zjYK2gydfXt764SIljXl/b+uLiJHnDiN6ebjPos8vnAN6d7nYXjGNHXxO8995055O2V99cSwZb3yhNs9t9gYA0YdfQ0vuTtQfzi7vj9WuTd73FT+6PN5+1+TdvT7b6sj1SAvqmX2sVBAAPFzezdt4brzEEwKOrm/nd7rWqfL/j3U9jLIg4QYXU6g9pzcan/9XFCVKZGgXB+wBFUI0ZS395fvLGZCPSoMQ3cdCoNVIhbSkbCSQhRp7EXEq+8YB6kbfH6wT8Yn4B4P3pFoH+bHasqkTy4frGR+erhJXfGm0r6U/nJx+sb4fQADBEXiWyN1TUM7Ud4JaeS0qqpG+O1n7ZG5z747UOYCA2ZMzra1tfzM7ad/djmV1KdXdasz9RLFbr4cb8Kmh/svWszQh5a7LD3Hlm2x2Q3BFQ6M5o5/lVWnU7o12wrs6Q7m7Xdb74x9Zo66QtjL7Tkh9FQUS7092jhEi2gdu97ysiSbX3X0s+uzXdfZq4Obi1IlSipVFHOmSyLiMDViUAiR4oQUTl3vr+g/Nnb229Rt2hlLjeaSWIBCSznPYnW4dXaTHvDNcZ5KWBQkWtsYjVWoO2ZxxF9hGAEAITG2MANE3NbECQ4FMWC0BFjbFxi+0M1ggE1c18fFqlaptTylyWU5tfICKbdV1B1RiT4q+NERGRYNqUplGPf31z94vTVe7Ib9+5q4pPD598c+82gA/v3P/40Vfv374TTy1jTFM3WZ5lWfatO/cO2wrjb27vDQaF9wGSyEJlVUZj21i7s7UFgIiMMYurq/dvvfbJswfdHd/Y2h+PRr86f357MAGwWFauKIzhxeWsbnw2GMYOO0uXF7OyKiPBc0p2pr6/XL9153XDxlqLgNHaGMB7u7c+eZ6O7r/yjW8AMMZmzvrgv3t770+epLd+cP81Z62CjHGxw3/13fd//NUX8d0P9w4MGSVEaOC7B/d/8jRVdH1/9y6T6ZLKxfq2/SOJKSUQFoTWN0VK2tLAiMBxLd3fOPjybIXc7Q43ejQKEGh7uHncHpXbw824hjv4RjQkJL5DPHtyWuMm7Bk+kYuGl2JACgD7w10QPbs6BBBrkjy9fA7F9fjovqzoQkNbJw0RlO6uHXRo3b21O23femgdAOD2ZFehz+bP98fbAPYn288uj29Ndq/5xwBA90Y7h4t0yHwdFtnGw91onc137dXDfspI0oS9tSP56PLJnen+ncmBgYm5I+9OD8gQUlbBVLvmyeUhgFvj3ShFBWLUHkz2n7RFV7p0h4mxTnx7uvek9S/emcYUhxraHDXRLr0zvd0fpXtrd1Tl0ezw7vQWEV6b3v5q9vS1tdtxbEL0BwNEcndt7+FFe+vRTvSpdGVqEBWSLtFOJ/xTZpUUfx1xRlX1Eu5vvPbl2Vevb9xXla72a/xmC7PHQx5dcboeakfU5RillURYuTWjfG6/8GJlbQD7o71ODdXIGYyR3+3yfTx7mmolgV5bv/vV+cP+1++uHeAleLdcWw992UYK4IvzRy/25NZoJU2ezl/S1bfWtxMGq3h3bbdL/A3gjclGHzyNvwVtIuzLRNTmOUn5HCAEYo7KXoJuOw41gE9Ob3bg+wf3lA1LAOl7a2ufzmbx9d/Y2RMvla89J9r7z+c39au3J+vQRlWDlzLwclkCaMrGmVjcr3KcfTJbfDgxhvIvzk6ItFH5s7P593bXAJRejUVZB8ekvhIidsPT58/XpmvfePu92/feAvBnn3z+5OGRY3IW927tNb6sq/nW/h0Ar7/5xpOvfgEgq+r/+vvv/M9/9Fm/b//ge6/lhi4uz0eDNQD/4K98+I8/uZbs9e9/51vWDK21yBLJfbGoXTYYOMPeh3rRsAIoxpPtzd0YNSwaysV8cTkzxnI+yDOtq7our1Q8gH/25c1c2L97f3thbV0v//MP3vxfP05ltf/LX/8muTyzGSEohIEf3t/6tydJMv7uW7fyzJ6fHpY1inwYQvPudPKzWUrJ/UbuJsNiYzoCorGidR0uT04XF3PnjFhUi3IycFA3GE3jV0h1WS+na5vvbt69quaPlulS723sRjNPKAXuxJTKILLG1L72tScfAPz7h18AKeULKYiZiWP6hT89OvzB3g6AfDT457969Nu3ptoslOmqMQNTDkZDS/BByDKA0WiExoe6NM4J0WAwypxY667ml76uwRmA37q9XzfNHz+/NpJ/6xtvuCJvrhYAiJStAcjmmS3yDWNmyyUDOdsY2/5PP/35zRW+v2mNZej39rf++Fm68l+7e89YCzLRigTwf/z039/44g/f+a6yGiYiMsQ9Qf2CuGjjviKc2Ecyr8GWHSpJXcGr1Z/dtZIVoZ0bOB6XdPO+KSypOxb7Kjl1BtvXMjZftVftVXvV/pNvtD84AADpmd+UVC4iQio83Sa1WLWkOLaWe+/0VAUMSAlKwsnhHi/I0doHkcJIioIUEENVrieWUtWYRVAt2z5PPXab0idD7ZuMs8LlQh4kps34TmqJfFDvvSipNYZExNfT4SiCUI2vWMVyhz6okicwkRGkFCReqRb1oirKRIbJEEzU/hLqAsNmJW80hccKhCThGy0GxglohDY2ZkPvLtPCka08UUCgvzw/fn1tu3N2MfSLi5M31rciQ4xT9COhCwRK2GFrSlKsk9POT/tPP5VYO3P9zMtIFIDY2TQ8UUWL127tlkSDIUn+vpuSu/9n6wakzmkIwAqxieNDBAUjho2G4Hs3WgVGtFGlN01l6j2h9m/X9STxlZJBldadaKxF19E3iFq2cDsiMW9O8CFeNhIr66atztHenYgI6sgA8BJ88Koa+WKxnz7Uhoxl61aIjyoUDCILEpBCSUObaJ4ULIbzqi69eGZmjjg+a6v1EDELsxKBBCIkAuEIAl5TeuKUk7AaawWoqkp9sJzH2o7CFLRpEX5NYDYxEULa9i2akDoORJMvFllGmzKvXcExXaaIIoYEtjxrBRKHlUIX1R2NbVLRlSHaRQlqn7TYOwO6chsrNLn90QfSkx0XvSYiIircK67Q2m+J82LZqJBSl3gz3il6PYQNWbZMFEIANLMZgMIWNZo23JVUNUZSi8piOY+mnWET8cRYhQYillkgImrY5G4AIGjT1A04opDRma4SgkgwxmQuA1A3deYyhomQhGOn0EYaY8xicQUATJZt3VRgsmyqqlJf2zw3JiuKYRyrqi5DU8fYZ5dl1mYuy4hYQlBVF2t9MnlpQgjGUJFnllmCBJEit6JomgAgyzJVBIkYqhTZoKrLEAIxDYZDAHXdMJu6bkJo1qaT2WxeVrWqinjn3GgwAmCcXV9bPzw8ury8ANFkOB4MBkQgEpvlz0/OAPimyay9vLzIisF0ulasTZZnZ+fPjzjPJmtrce0V+fDo/EzqkombugYwGo+Hw7HLs7guysurRXU1Ggxnl7N7d+/84eeffvvufe+9r8rt7S2kgtqL8WBwcvxcvWcGG+esy/OC2FR1E8+WQTF0zs0XVyoqIYgPid3MRETWmrgkQ4hJXSOjKs5Mr1ZTK1ujAAnSqKSaFN3RSmpITeBY/TYt3aAiIYhqAeASuAAAIABJREFUygBKHbRoIl0NhND4dvG3WyYh/qC2Enw6WhNNEkgiTFWiVOqEK9m2tM41+kj8jElvdFuSWtRI2vM4HnEx2QmAWN2lvUXidrVrPX5RO9HTtYhgXrP5IiKTRFQn8LQDsbStZdrucrSoWO+6hP4bACRo/xPXsFURNmSIIZzg25RlNRAsw4JEVDQOi7IixGj6dO3WDtZOn2mzJcbbdPmmQcogJsM92kscBa8e6UxrSYggiukFU8qFTlNQAjGTF0HkPUO6kY8XYXZeQpBAIEOG2QAacQcRatOZaWsYExSWTec9ir/EIBhRfXzx5N76nbiotGPmI+H0bd/Q9TIu/ZBQb40HZl+16CmTnWYV1ZTkMVqZ4O349ADU+JZ27FKKRM5eQPSKzNvi/63eoNQKS/Rb/1Yk7eCnPkjLNq6D74DUpOckbToYA2cMAMvMIJUkbhkUBbcEFZUg7ZiosgZrM2ts5JkyWCFePQcTbxoQ4rrqWKgKMswpQ0sIzljDLKoiwTmXW0dAPEgiv15VHBlns8Vi3khQZwfZQEKw1qo1lxcnANhkbDMVEV+yBEC9KIJORgNwSj8tIoOMf3xy+Z1pXoDOKpmOBo14Y2hzaweA9+Hk/FSWnjNDUh68/o4bTBYnX+XObGzdnm6tAzh8fvSjH/1ZaGRjmP3N3/u7r+1vf/bxn/zkxz/efv12NiiOnhwB+O63P3j8+KFvlsYW9958r56dLufHpIPzRemcFqMNACYvQq2z6gy1LpYXl/P52mBjPj+el3XUrmyjsLlYw8EzoM6QgXPGFdPtjd0sGwAofb1YzKSugvqdvfvWuXKxODv+KjSVy9bPL04BzC/nxqKwbK1VoAleQGBan0wRDNgCIJu5PDcWqkG8r8pSRKdbu0VRnDx7AODs8Mirma5tXFZlkKLxjaqnIA385nR9f3cXgADzxWVm9PmzJ1YwmY4XoSaTOTvwdVCbASDOz85mF4vZYLLG6ubLC7JW6toySGSUZwAM6yDPjOXBaBQUvm6a5TwfDqpFpVUNwKxvl014Prv0Ypx1o8FwdjUjaXZGa0pLXM0AjHJZ272laupyOSiKLC9m8zOrIJeRzVORFSZCCIvLy/NzT24wHKqSiC6WV46kmGwBmM8ullVTi5+MhqMi874RCY1gOhqgCQDUMKybn13u7GyVImdHp9OtqV9Wl4uryXBUxopDQJZnZpAbYxSoqmWRFZnJTs5OnePMDgBYlysogPJiyOxAHE8La5xIaGJWnCDGsDXOsIGuirC1B0//KKCWqsIaSTPtGULWnc/Lx09PlSxzZA4oGSPQLl2RiILAxFY0y+TOwZZjsMY85202GyKEZIF0rvH2LCUAIfmWegTKKEdW4Wp49A//EV61V+1Ve9X+4jQbqXq60iABdGptwnGAa7pxbHId/OkUPSjUaNIjqSVAtZqwakyQTK16rNGKYXT1arUz2FhNAkKTtr7yAXUldC2zwi+bJtpRTdvJoG3wJDGB1IuSwJiyahLexFYYTSSiAERQzwGw5LpwHkVQCZbZUBa0CaEJqtbazLhIX2Jlr95w5EaJaBBE4JI1xTuiDXoKIKgBCIZTys5OS0+PTgAFaW2HNyY7X1wevzbZjE/0xeXp/ekmEUGVCAIQCXpgMEXLOBVViJeUDjjtEQgUhEZCF/LTgs7tVHeljRjtr7rS3KMdq4qUTo4MsSREFV0+xVVsGXfcmNUaS11pjQsTKz4qREihNs5looBR4saKQEWZW5tHIossXgJE4j0BDG4gonDOZcwSGgA+IrwS61KkZaaqQYRhomXNRggxJo2INVbJiKbwKhAKohoZicnWipZAyiFKCqCgXBWlL72vRcTCEJC5AYNNL0WXEhiWlCwsUhZIIMVoxuBzAgW2zJ4V6qHMEdxPWVZJoCqhnSsSMmqiSdnDPgjRMwBS9U3TqIJh1NoAAjwAFRBMjC4EG6gGCYRg2EK6xbUy7BMfkCkyotvquYiUL6KYWie9qRTrLCSjNGWvUrSkm8SOphalTSuZ0J0g7ZP0Tx+OSzaSjbUd+h4hOLmwNZXOiYCOqsL00uu0+fcSbhJD2Do3Q9wqHeIcgjfMuRtxTrWv04KwbLzzEkSDYTZkjbFBvKo4m0UUcrlcGmOMsQQV3xBBwKN8rQqlSFjWVwBCCOPRmInni4WI5EXOMEoSgvjGO+sA5NmwCX4yLJqmWVaL4XRojT05Pw7i86IAsFws7CAPwlVVxRq3MJaNy7Miz/KYw9E5W5fLoApwbh2IQhBAfNNY5+K6CvDEPMyyEEJdNcEQAZlzjZemiTsJIRgiurq68t6PhsOyqpzLrEFVl3XZAFCR44uTvMjzbHh2fjksBiK6XCyMs3VVR1hhaKaR6FpV1Wg8JkYIjbNWActmVBQAGmsXZWmy3FhHxNXZxcnJsTJPB+OmDl5qAFdVJY3Pi1GQgKDD4dAYrus6c5n3NYDSV85lxyfHa+tri8USgG8CiDSIr5tlUwMYDAomDkFC0+xurpc1CI7IGEJuk3CzRhUcfBAREQVRTM7HxGyMJoxGBBogAdKdiqLERLFmaKScqIKUVFVjjk5iFRVIhLDB7ENNsHHdimrk+rIxDPhU5ojQAkOgSKylXmGcbrcmAkYIskK3up3SvkD9Cl3t+peuGmg81NsScwAUof1skl+dgFhhWKB4SkkkqyosYrLBFOxApNSVB26leycwueWVKNC6sRLrH93Wjnzg9lTtMu6lYVydGUkGduEflLyE6LBOMoi4cCcwqS1ZwICKBFImGLZEJKmgMlvDGis0tX4PIBDBsZWWz6hoQ3w1UsmAFFySesOIlmXMa4OgQUgo4pJRzaBYo4w4uVg4xflT8vqm4yzxwZMpzDCAMJOS9LyPRIAXJWLLrSswcqE0rj2PlYepXWiEQNIigysCZXzr7vrBw/PHd9cOqMsw2zqgpMUYKal2q3yHJH3Y0nSQd9BeTSKKk9YZ3oIEraNjiMcZjMXkNCRvWuxmS0omqIaYFDBCuWCKMcg9t53eAHUJTP090mH63K0fgYiGICFqbG3yl5YztQp3MSQhbRmArAWDBBr9rn0VMPVcoUrsjHUKeA3WcGatqvo6RRNdrwkngAHUB0826ZaGjTE2aFBgOBh73ywXi7ouA9S53CYMiwOg0oilptZQ15nNs7wYTSYMhNIDuKznTnxhsqDZQhZN3YyzQUPNZH3KxPE4BWlMn10MCuP59vZIvOZrk+Xzs2dPnwCAsngVS2MKZjy4OH6wtbu/ffvg4vS88dVuTAEs6qyFeGP42eGXupgtLy627t2dzc4OhkOtGwDqxnu3Xv/RH/4/S69Pnj4dKqlyLUG9NEFL/QqANaBGFTAZZZldLhpxC28wGRaVJwD1cmG5ssXGwKwVk8m8moVQGWLrbIDMqyUA8U1mrDgVMaIECS6nYrR5sbyyLowKB6AMtq5Dnpnx+vb88hLeZ4yqDnVZO1cUwyEAhKZanpMdMWAZxroQfHl1rtUAyAFwkXGQxXJRMF2pFiZTcM3liAejcaEmpkyxIzu+PDsPJh9PcrVGfGOa5nJRZYOBNCWAPJsWuZ0tsFjMh9mmGht8kxe5UfZNCZMD8PCB7ObGZrm4NKQBylnGZPNRpgMPQOAHbjCwV1XQOlR145iNSO2b5d7O2pwrAOPCol7WvjHKFEw5L31VudFIVYcqJ6eHABo0Jhsyj4JxizoMEJq6UuhknC+XZWgu4y7YnW4cnR1fza8GlgfDkWW6mM+UQcMcgNbeNp4ULs99HWqDUDcADFvvQ2ENgOFo2EgQEVjyTcjzQebypvE2L7I8jyTQEMiBqdHMuEbJMEsQYhbxIppZC+Dk4mw6nRCpV89sINIqoszJBNa41QGty2o4GARSEcmsC5rksQQhFQMJEGXX+MY5UzfL3GQBiAQXZSJVaCADEBlRw5YNgnioMiwAUgrQ5JcmI5HwHGuKcWtbEZiNkoYQc8fDWisCbcnRr9qr9qq9an+xWqys3Wpw/3/bNWd2ewVCZETp6gMrlAFdsqH2O1HrXzm7afUN6l5NunvXyS4Qp4NKWrZUx5DAyjTq6ZwkXcJ7QavTJxvEsvMalFkUVVMByfvsq1K1JAIzO+siuyGWhqi1MmSgYJBE8kqM7lQQm8480xYzSdQ2FbRZ9nrEAcQ7Rk6JQlXptdHWV22Y52vjLXTWZzLMiPpaaTt22iZSlD6Y02GJRARknCUjr6UMdMmhQoKMY1e7CehPZ3yFFGDqTKCYcbMrM0mkifC4WhjX5i6anpH+Q9yZlT0rqHdrELESmAxR4kiuQhIjl4QlYm+W2Ess8dwjn0L7C0GxQjzRRuamexEIxCYZVBThWbTvKOXGtr27xtqNgKKQSOIOMxQSTZ/U1x53IgEC5OHj76Lt6+mpW0UEiSCE1ANebQRtp7S9ZtoHHce4G07qzV5r079UbUmjcP1AuBZjQqvp6yb15iWwyhdwLTZPU1nBFXRIKzyhHZp+T1c5IFbFhxPHpjWIX/oUrQ17nWzVcXdWM0FtV1vws58RNXWGlYlgjW2rJ2vEGprGcxvak+iuMVxOJEiIcKRhawzHNeCcYzbOZkRwxvoe2trUtXU2y7KqKg0ZQyaot9YSpQDIEOrlYllkmWErIuezM2eymGLSWAuATaRpszFGQqi9H4/GomKNyzIXayIxmWI4qGeNQL33RTHwdTMaTTamG2VVVXUJYDIYNk3d+KDBG2sMc9PUV80yz7OmrrWbXWIQGedAXFUVEavq/HLebQzf+Ml4IiK+aWpjmY11TlSMSWVqiGi5XJZV5fI8y7IQQhyiZVkul1U8RoKIhGCMjWVznp8eW5cxU55lg+FwsVwAODu7IOayLmM2YVVpvEBxNS8bqdKCJCZi70OcuLIqnXUgXF5dxYT3qlhE4DjPmhBAJnPWGiPiJSQ0h4M4S84aL0RBQuycihEDpXjYppSHghSBpURKVVgSs1EDwJDhVY4pbYvkxC+JJQLAClFlxOhXafE6JIF5fY8AaENUtffy6gzVF+yTa3K19wVa/ey/01XNwIv7vb/9epRltBDmzZumo1aBtsBodxK034o37E7slzxR/4Xr0u/GaUDXxklv3uyFdvNdaqGi9vTuxGASPDfP1lYwq/YulYSNtm6WVijR9e5B+/VMqCdyldRyRtcHOwqFngRqr5YOx1ZVoJWU6vysTNKdvyuh98I43JDXLx+wbjCg3Wf12qTptXGla9+iNodGGlXFqkwRurXbZz/e7EHSoASpXlSkN9ILC7UTYYqoo64qj7+stbpPD8hNfYuEfUmSaKXa3vx6dzOhJiAVhgqgSECV6LhIacrjJagfheScSycXGwb5EDoRA0BSdArQinslMsb0XXgiIbosVCVi98bZjA1xEmTDQTEZDU7PzjiY0SCrJUSy7fzqigCWAGA9n4xGw8ViHkjXi/UFLYLUlrkpK5dlg0EBACr/+sHj375/O8/zEhf1VeNLmji2lod5BmDRNF6CBlpCZeEz0uX5RahDU9e+yB89eAgAxcj7MClyqZePv/x8Np5mrOvTLVCYLxbLeQngX/7f/1duzeba8I3t3dJjcfbMWTcwBpDdjb2LZQNgkDkzGpRLnxWDtVHx+OnDi/mFdbkhl2sDoP5/2XuTHsuSJT3sM3M/wx0jMjIjMnKqrHqv3+vXr3tBNltEtyCKlCgB0opcC4K2WupXaKM/oI220koQSG0ICYJADQ1QavXAbrB6ekNlVeUQQ0ZG3PGc4+5mWrj7OedGFVsbLfiA8EJlRNx7Bh/NzT7/zKye3N5tXyyO9/vVfn93NJ12rprMT06W07Zrdl0HwFSF67qm6zabW1Mv9hpc6KZFNauq66tvu00HoJgWs4mFKZjtYnnclJWKM84XxsDYuPvv2o2In1hI0ADZueZkfgKEjCzBFgWTDyG44OqCO6eG6WQyr+tJURTedQC63c4Ys9+sJoUpjNnsdjAsPsynNXO1cwIguFAaM5tOtrvGyQbiSUmCONdUZSEkAEq2QUQDdutN47rlfCFdB+LpZBa97+uy3rZenAuidTGruWjC1jV7zyiL4uXZOQDxHRFPLLVdN5vMBTTtdqYop9Xk4uJi3zQApot6sVjuG7Gzo/3uWrt2uTgKVJlqUi/86vYjgPlydnN7x+Cnj48l+MJaMNeTyfr2ri4KAKJkF8uT09l2v++c/7XXn63Xa+eckWBtkSLnMDNTVVWGjRqBqutaIq7Lgiif/Ch33olHJeKCMLE1pvO+MNZw8l84Wi43260t7Xy2IGYRGW9JyOucCIUt7z7eiupieSSsPngFxeTdMMqMeIoGVQYMcYFCR/nLFKD41QFb5WBD1yx8aMgqhrGQEaglQ0Q+BFGN7JYQcoC0BzjyoTyUh/IrWFLsSNzX7enwg5GA68X0AKSlz79Xae2P7JE1u3wZDV64Q37R77wnAy0DAjO46lCmXg4XZyWUiCLpIH2RzQMNEMqppvM+EKEjEibmog3iJcSD9c7tmcyLZy+vPl6JBhUVCZ5gcmAv1ZgMOQKIEVyLLk/EJjlRp3zhPXCoUA3ZnBh6KbZfNB9uKSsA4tc5m7AqR4/ensEV++UwGIlmnDAbQd8ZtvRXplekvS9BXgoMCdo0DU22HnrOZLZKh6fnjBYjIzNT4jBW+lVHmGAedbXItATtKSwEHXCrbFnR2CC/h0YZY3PfeMtIrg1sgOiYHQeon2lEsSnSJ0dPJJXEuSQTgcCUEiVhWIw+Z2hP8cvWpQUj+ukTGbZqKYhXAGRSptTcZ9EiTGQ09TnBaApD2LcoaEiJs6EUGVU0LLNsJ+fh1TSswIBSJygT2UbNN2bwbdSF2j9gCL0/mlv9clMdGn3gEp5uicZlmpwYDWJe5ppeQQc3jXoGvUbWi4oDSZNaOyCbfRUH9EAHRGMQYgSoZF9SpBkbI1dSdPDsHWEPmhSjVZpIPY42YYwo6uEKk5C11NuqQUIIntnEs+7CKpvI8JUoILx3gb1hw0RsLQBiatsmSKiqaSRZG2IVIUKfrMZyWZRlXVVFUe7b7W6/QUllUTrn4jgykwJFWVZl5dXrZm2MQSBAk584EhkUwGK2mNVz0WAqUxjLzIW1250HsG8aig68JkkEWxTWGGOM8yHCZwpoEGuLmGXehVArjLWmKKL3EwBblFVVK3F8SFGUorLZboyxcUB8CG7n26411hJxPKRxLniR/Wb/6NEjAC54Y23XdbvdzktwzhdlYW1JbDJbASIiIYDABGNsCCEim6oQDQAm1UQklHVtjAlBfnhypgovwRrT7JvILQ0hNPuGmYOgdW5aV2VhRcUHD2hc8qoaghhjFV6CeAmta70PzGxtyP6PmRoFRTzwAvngSCiwBcAUaBChmvOkU/xloHsrvIY4aTLVqoeT8qKnaNH0u97oR56Q6NfJaAHpaJ0h7+T9IYWmFdTLGh1hSMPpQL84Rz8Pd/B4NEcjMSPZVtIEPY0P00Y+EIrR0h42oOHC8aeJJTkIOMpI0328r9/yMT7dpJHfOzIwdvgrjf8l4hjdeOjBbD+OlKP+t7RLJ/mWaX33GhR/MKIDYN/HAxgWAylEa5OSqEm3cqRq9lpILhmQROLMps/SOGWmaFYXhprHsRl4gkme540y13gYFIK+PHo+/nikDOr4+oM5Q0MzKXN6e/VkMKrTfWn63++zfmZr6mrEQ8aR3+J3RpHStLw3Nw5KryfouC7I+3TSLbXfFuOXoyZBKC/XzJBlACaNIWKoTUpIBfWcz/wXCBARZubMxFTAEHs4AKoq8aCKiEGawt71ey8E0nlX2MKyCfHkRIVABVtmG/cdkz1DmLgoSg7CxCISnEIlaAdgUpSmssbztJzUk4muxTuC9+vtdhZkebQA0Hn5vRdndVkEgJiabScdb+6uC0E1nQMop4tmt16tW2ONqPdOP11ec3k3W0yJ55efrgBU5c4qqUppiuV0Pqnsptm3Vxe7Zr/a7h8tpgCOz8+Xs9l8PunKxXxzd62rqpwVRfnp9uJuf/fk5DkAttXZ8fL9xfuP60sOU0tFwXPvOtENqAAwOz5xIdxtPk2rgmzdNs6W1jfrlSEuqrh/+e3dfnt7t91Mqvru7rqyxXRaqeh2vWt3nfMKoFQla621rmnIFvVkWRclqWw3m2DVhRZA027Vd0UxYVMtF4uzZy+anWu7vTFmwgaAh4qI67rVdlXKnshYNkyuaWS7D8YQAGOLbdu2TXu0nO1226Zzjx+fkMAHD4S6KgCoKBOeHj36SJu7/bYsSo37lEie7VBC5/1u3+zaVlQKa/a7sNddPa2hHoAPHRc1CN77qjJsjbUFyrIozHQ+LdUBWDVtXTJTcO2mJamqCizGmEld1XWZj5OgqtO6APHnk/NPtzeb/f7R2YmakoFXx0cAdrefPkkI4sXRet9tGj+pC4VYxrSsAOy6QLboRK2g2+62FuIpuKASXQEUgOu8sbYwxX7flYUhYLvfM8jaAqRVaQF4gWXTdo0PwbDNm1GMJoToldQ517VuOpsFkeBdYXJIpYONVUEUQpjNZzFhfQiBQcwcHWusZWayhlRExKsEeCZVDWEweZLeGbJIEYCju1B/VpR3irGjTtoLoiRisAuBRBQCIhHxITobHm4fD+WhPJSH8qtT+P/7kofyUB7KQ3koD+WhPJSH8lAeykN5KA/loTyUh/JQHspD+f+j2PEZ9UFJR77f8WfRez/zgY4mQhvFBJKDB9D4nDo6cie2k45JHYe5N0Y0hsw0oPE5e0oHmkKb968Y3qb5gCkzKDnFs5M+vyNSOI7IFiBC17ZsTQjBmsJSAaBtbh+dPPn1H/7aZr11wbngJAQEJZuiUxlDEiQlGEk8/RhqkEUTKSXHkB+O2TMJZcT56j24EosgsWCSV3aiAIShp1UpEukO+SjJG6zn+2W63P3LEBMPa6aqRc+l/mDt4Op73AA9eEzipGifimRoYz98/UNHD898BVE1pEpqerpaGsgxBzQ3g5JHeRrTIY4YQWGshSjFlkmaiZIrk3kOB3WLSSgOaL6ElO+GSoVqdpUdqDEgQR+EiqivC1EKp0oA1JBRMgIoRHL2mRE/hPr29dHMohtYP8oA+RAya5QQ5wYNzMee/jT4pREiM6QfptTqeFVOUxV5GHmapZcfjHniaoz83bLrYA51MPLJP+TM9JXKXJbvdR3Re9yU7ziY0EhsKEa8o+zXR/1dh46laSgpyZlMpM4yRfqEq8lLrn9LTrEV2dc9nwsA4DUwQTT44OMZdSa4DbMi9o9oEAlBBEoxFYlALBcqQsbGSUJGDVvRICGwNQCsMVKUIXiCWmMlhEBk2EQ6ZTzG76SFkg+ByVtjJ/XUmML5lplD8ACcc6pa2JIMG9iyqp1zVVW74HSHSFJ2vttttwzMJnNrjPPCbJqu2TU7wyam1Nxst9PJpCpqkI2J3euqquu6aVtmI+no3gikMCWz6Vzrg/cSjCelHAABYMOiMMbYovTBq0JUvfNEvG8bACbSCYlE1PtQVRUTb9t9dqlmAD5ITNoeVPe7/XQ6A1QUm+02rFc+B3gVaGEKIlhbBB+C98Za57vYIiazd/uiLIqi7LxnY8jAey+kpiji1O86p6rGGC8IDGO4KGzjuiBiiWOkYB8EHIKI86F1vu26znmRGHEwdD4z5gbWfyIvcspl1BNUQwo4qBp5FgBCCAoyJgahUxCJ+CiWc+iBg3l5wAfMi0GzSO/9OHOCr0NW2UAMHIqM+ZBjL2DKwUGQV96wBPMlB86qUQrQ6D3Du3pvCc1bep/aO309hEPuJdy9R/+NtLZxs8ZS5XtodUMbBjl1n9WHJNMpi+8YNkazenOoDg2xbJNDBijGz850TwIM28P6jEuMcDsKRjFuSu8ZPm7XuIlRxEfl4SAxz/B7FHpKqofuzJpFu0q/V90r49cedJPkVg90HsqdNZ4N/eTKf4xVgbQ9D4lr7118OMGUDoigQL+Hx5arAjFuMUAmjci48qr3OcOHc7hv8ZhtlBmlECGmkS/3/TAfozcRwGwYMrjMxxYl943Riu7zigNK5LxLgV9JVTVlM2d12gJQElVNHuMEie4WMRx2DN9JBMAYU5ZFEAFIvAbvVLUquQQBaPbNanNnYAnsgzAbFWlcO6mn3oUovLrQbLaBharShnbXtVvqtCiMNSbGowRgS9s517q2qip4hoTFpGzE3+z9szkDeHLyZFfUXfOBWY0SFRwUzgfZbsIVPT05A3D17pua/bTUzz77yfHxonOrTvTu5uO2aawtX3/xawCOjk8ubz+5IBPSdbtRSFEVGsS7zrXNfDoBMFs8u9vuP20+rrY3QZfT6vT5/GS9+7ReX6ADgKqszp48+ebdG+/o5PGydcFau7u7XO93p09fxkmzvf0Y2i28ivVtt2FMDNc+wAcpi4Kj77O1rQQLs767gSlPHp9X1ZQN753sth/jLjifztY7ceIX06WxNakNsut8V7PNUYJtYUxpyy446dqqLIHQ+J0EVpHSGgB1Ud1utjGi0aYJZcEKGGtDEOe7SVUDEA+RUNfzudd96JwPxCaI2LJSRV1PAZBKYW3QYKw1Kl0IpmRjWCD1tALggkwn9Xw/23Z3bWisK0Ewxgi8SBsDMU8La0g7t69MYGmkC8YWk9KS6rwup1MLYDqpC0MKKcuiKKanp2cX1x8vv/6ZISqq6v2mBdCIlKVdVFXrZF6VGxfafTOpuJhMmpiLCVoYbnZbNhwkrD+to3sBQEGQPULYckFkd9s7TOrppFTFum0K9taUhanjAg9em66bi9qC45ZKREHExu0WIFBVlVDE1E/jHSJJliwDdm0DkV3TVCp1PTHJfFAATdO4toXGSLLECCpCCByjSGSZwFCGGErRIfskruOoGApkN7kk1zTLn/indz6aJ0QkCu+9KowxD47aD+WhPJRf0WIRHW2/K8buaeffKfSdawaNPFsdvctkVq0PNLhDBOS+oUH53xxDqn9PWqiyAAAgAElEQVRBDAM/8tyhe6YSAFIVKMdw+WDk5MUYhUqMNkN+m2ggFgFzIUAHB6Cez3ft/v/5l3/qpEth2tgyGdIEx/hkPwgrp0QspAwIiWrAod6d2tebdtl6HFoGhDFpX4FoyEQ1n8a3pCwihBxwvVdze6VcISoJxtOD3o7mQ0ZuNZuy/UZ83xjJgDCBxmYbIRvJEkJysx4bwDH8CZl+zDVp+P1jU7qAmBbF9lBgfHIeGgJS9EUMPuGDzdWbUkSSvNm592ULER1UUVDKLJGrrkDEkZNzkwqpxuTbBI4ZnwHlYZIkJ6qYdjNjkal6AKLLhlDQPOcILCpBA1CqimYTsbczBQIyiC7hJAGSMQsg5lrBIWh2gCxQWmPZKzrjlKTaj0J08Ig4NQMxVGuyjDU3SnoUb7h35CU/vJvyzEKOhXV/wfY2vmazirIX44EDdapBnm86wiZGEuNwBqZl0K+YfqUcYiP56+x5nab7PeszLzkdBnaYcJQv4YQiqIJEgkOHZGFaAAUVkkGHWHFJWWLVGBM7iAwXbGE0yo1AMpmWhS02u63vvAYBYItiPqm2+y0R6rruui6IlEUlGrz3ZWkABO9DkH2zRTmrq3o2m0rA7aYry6pzLaIcJyZmF3x0VpUghq0PvvH7GKqqc52XUBdlCGHf7gpTBA1d2wJgNrNqBqCYFgUXMcVN510IvnXdZrcnot1uH4E5YxWibFRVmrZTUOucaNs0Tew6thZEnfeh7QDtus5b37ZtjPnauQ4AB6EcKcGHUIEV1LadqpaTSescgPV6XVYlM1dlWZWVsYVCnHNN24oEEw1vNtYUqmqMJWYgxEqKRBmJzrfOueipHYKQMQoVVRd8VVUxV7iKWGsNoSgrQ/AhgJiNMcaqprCwXpQLdl6dE+d9UCU2hi2zYRSFtf1KiKE7JFsy2Rs1uZaPwaUEKsSL0+9QVo3yIrvXEw3nFnlV5mCq6KNspCU7iNi8w43Ro8PwHklcDKssf61j5+OMN/VyM/2gvipDlN4BYDmEZ4hSpLz+kblOA1Y3cjeme98Of+Z+YObYOyNNI1Xiu/7d/XcHIN19SGwsk77jfZ1iTQyKj6a39gKt31MPNvz+cCdubsz8nQ22fwmDNCGCIxiOiHxwUYj3n+StZOjPpClFWJnGn+NQYBIRp+gUaT+m3Lejn4d1/Jv/ogRn510lh2PunbfHDx42nqQt9IMI9GJ4mOTjd2nSBccTGqOOGOaoHozBoE4On+UeGc2HvjMPtJwEfQ6xPw/3nPsnmiBA4mle7BAmw0SGAMAQpwihxDHmdR/oI8/iNAu9d1VVM0UARQwMEUmOd9lvZtorAKpBNGgORkTEzJbZGAPVyWJuCLv9hoa5A0nyUFWUDYhjRMuQDlpjaiPhzvl5Oa2KEmQs8eZuoxCCQsV3LQCFWMPWlrYsglMlOTo5Pp8vbz592u3XAHh95/f7tuu4QmWkA3Fhj+aPnjx+fH7yaE8AcH3x0ZY0mU1ms+rm8urd1z83hkR1OjWl4T//+Z8D+PWf/OZyOf3FN7+YmaKuaVou3K7Z7FeTenE0W6z2KwAqxoFc69Ham83qRu4Wi6UxhMZN53MAR4vl6lYeLR/drj52frOcLarZYlGW13er7eq6LggAOOxF68p0TefUG7Y36/V8/vj46HhD3vkOgIiHctu61su0YFVZ77e2tLBYTh7FQMymKqfTo8KWhrlpmvVm1TUr5ztn0HUOQNuEwhbW0MTop4CiIBAbKieTifc+aRe2LJiZqKrqyaSbTSYE2m63x49Otrs2zgVrCUFc6IqKl7r8tF51vgGI2Djv2y4AMCSinfPt8Wy222/3bTOfVEVhnOtKGx/ClWELhfjg2sB7Qgiuc8DNzft5YQA8niyKwvoGk3pS2qoqp/tmPa+KXdOu99uTR0cAyqJqvD+azZU6a/j65q4u7Yunj9++vaiMnUwmAO5uPx0fTaticn13d3Z6egRcf7wuStTTo5vVHYDWh/3qbmKsElf1pGu7RkJd1oa59c5F/+gg3vn93fpuu9l2+xNdBoEXBcDEwQOA09Bsdzu3Z4q7rRjLCmVjRDTGK/DsvY8BXrSPSREFmfRiREGE2WTq2s6HUFWlNSzeq2rMAlcU5XTKzFZBYFYRtqRQa4uRlFaCGiiTMoGI8wZClBOQRrnkQ8gSipApI1HOOB+irtB1nQLGWtf5tnVFkVNBPJSH8lAeyq9asQl5yEZ1/8XoZHtcxhccKP0j00MVIwhnbDX1t4+VzKQJDwZUesp9QKLHCCJQogcJO5BNvnwTKye7MMUK6t/GGc8b1Mf4dmOKtmtLawgUI8gwM4E27dZEu1E1ZbIgDRoAiIao2hMJkCiGRMRGSTS1uteEe4iI+r69H30vJ7uhgcaFRNAjzjuVDr2pgJrecstRkSJ7ANA+BfCoxE8ECeNLA62CZAAj55L7ntEffRL1ZlaldFI/EHKkHy46nAp5P02fMMVYJ3k/jtQ/Io4pt3Oun6HTKM1UHkiCsSeSWRoiIKgQhUqO2A9IZkcOXN+R3p/AuBg/EIkwqzkYV2ZUUv6XmBPwKgl6iv+l3hPENLo+TUkRES9aR2gwLgZOWUFVVJk4UjojgJ0rg4hQgqOaApKgkYOaw02llvdjAWjOjzTM7hEWEaeWDhf0s2bALzJ2ERUgHiJo9esujlTSzQ5tvt6ITUNzGNR0mDf9ysupAYAMMg5wx8Ad7idHmqj5HYfzWrOgGf05AAq5wpqtPwzrKNY9No8yHpGrIzGnsRITqSAmsGJjctIJUpWESI+sbAKpBB8XgkgwFiJqlADnnXEJkrCFtTF2ZHqvdN6XthQRlVAWVaxcjM/otVOVznWWCjCRB8AxYXg8KbGFNWxjDpwgSSUXlaIoAez2GwAxF2RRFF3oItOTQBF2Y+ZWutgboiIhMFPMy+Kcb0PH1joRE5GjOPV9q4p9u2dm572ogNnaAgAzi8i+aUIIbEyQ4DvxIdiiNLYIXRdHwDnHbKw1PRtIiURQmtqLR0xUnYSzUUXnnaowsy0K12lsmi1s07RN105QxQEoypKIJWgMu9m2XVGWqvDeF0VJBO+7oijUg9jEaJiGOa55a60PwYv6EIIoGxOzXAII3nUugG082iBitgnuF4BpgCMBFQhHkJE0AaOjPY+YDBmARFzCl5gjJAlARIPkY4k8s3v/g++ysMboS9rbDteFaJ84+0CqJ8E3Alc04333VjcNyacAVc7Jl/vdf8CFRithdA/GIiHF49Ue5blfIj1TMzVlODvpEaLRnj4+4YrvHXbE79lhU9V6MAlpq410ueGQru++dOH4FcPv4z8HoIx62ntKcZTNSkrk+oMn9a+IXLge5M1Cj4gVLnFkR/jjWJGiVIHs8EFJUcFAfOxVj3xH3ozjSeTAvfm+cm+MDnthgMNzq3sSer99fPchB9MsA8pCY9UMA9h38PYRyR3D1OtrQ6PxGz+J+uoq1MCMv793KbJPQW5GX894pBL3zbQbHdbv4C8CgwP1EyNpXEwKUjNQIuPmk3D5NOiGjCEOKpK0kXjAnBWiXAuIADHXt/bOE3EkvPeRsQ+2GtOcgZl41+4BuOAqaxRw4qamtIVlQo3aWCK12+0dAGvnKuSla4LMZ4vZfLHpdroP266bFMYaArDb7+tJfbxcNp1zgRZzqyXvulaMWhMAlIX1DcX9SUXU8Mnp07PHT4j4w/XHu08fAXSbdV1UZWH/6q//VFpfT6vPPnt5/Pj87cWb1cfr5aMCwH5/vdqKbz46onr54nR+trn7uN40i0fPl8fn8ukNgN3qYu391Ewmk7Ks7Wq1vd3eHC+WxOXs6AjApDJvt3dlXZwWx9t957hZXawXi+PJZKZEPir2ZGxpiqpedc1RPXHOedFiGpz3AhRVDaDbbkLXeKKystPFzGnw++1EasNsmBwBgPMhqJJ6KsRYs9qtNbi6rF1ou64FwKDgfefcpJ5U7CBMbOrJYj4/br1vugaAGBuEisJwUVpbgHg2maxuV0xkCxuP03xwXdeh804pgMqydN6zNSLKttg2OwCTogwS6tJsdhvvQ11YhBBUWueoYAD1ZAogaACosLYsSqt6JytVni8eH9dx6xc1EGjnfefgPCxr67uqKo+m0/cX7wAslkrKhFAWy6q0r55Uu85tjXn62XMvfrfbAjjuuK7Ktms759br9XQ6nU6qiSUNOqsnACpum33XaFPMl8VsDuPcblfZkq3tvEQ57YMGcc45Jd7smhilnW2lYK9YbXcAwNT5TkSUYJkj19iwkdFeodC2baG6b/ZVWfXSROS+AAkSfHDGsmFuusYSGWMicGjApTV2NklpdiAx3L3KSFlVpbhSBz2/lzjDRUQwbFJaPNXIevYhBB91FasAstNG1I9tYX0IvVD67pb6UB7KQ3ko/yYXi5FePyIl3ecoAN9jBdF3dH30ulhiOoxRQySN7kBNzRr/oLZqTzHotfL83Kil6bvN5Yv5+VC7+P3AmwCAiDrSyCNHtYdU0g40iO3I0QPXZS2ibehi7gJm27k2dGFWzlSDqPPxXEtN9toxwqKKFDxdoTETjIJhFfjl5vLz+dlgd0TbL0fsHPXeYFrlhC3oaXJRtWah0T3p82/3Vy9nZ6PH5HQh8QlKb7d349F5NT9OEJMEIqbhrFx7RHGwSg6ZKTH7dT8cCqhQtrIi+KvaZ2NIJt4AWV+sr8/mT+JApc9MTDeRjchEfiQ2BqJK8ud3F/3bf7o8RzJXkgWtyaxFzPYgQSMiEqAQGEoU2dTxA5ClmmDS3hMqDY1krggUfeLKMX0kjonPTpeS8kykEoEYQybpFkwMdsq/bPY/Wp4BqhhM01htgYoGzWqJCsWA99GYJY0ECo7s0X6IfnH38YfHjwnZ9tM+Mj9JXj75KxrmFlGCdu+ZwQOYQar4sLl6On8c14aOl3wPPeQVrj1dpJ+Wmhf9uAxVSBSYvFAosbGAEKSv5M32+vH8SW+hpwEZKEjUV2NkmOF6fXm6OBuWh45M4lSL/m2Uh2CQe4g5iGLH5WQ+UVDRwIOl5DidM2tDgzLe3Hz9xcnnRAQIExMxMV9tb86mR/EhX3/68KMnr2xhQWqImt1WJYRIx2MG4ILvghPVi9XVy6OzmNjR+U4VlP2jY1VEQuubEORie/Pi6ExF266RIADYGOc6UXHOIadsdr6b1BM2Zrtdx36pyoqIvPNE5Jwvy5KNEREms9tvAUwmE45oGRlVCeQNGbUSRIqi6j0BWSnpx0TOeWJDBGNi1iZocs12xhhVIWIRISJjozls0oQEAGI2qtJ2HRERs3TeiY8e6EVZBhHDHESgCWWlPIqcT2I+tpsllWVRqIKNqcpSlZQTfCwaJsXEB6dE1tggASBi+utP1/i+8punp55C07kALQvL4DgEQb1rPRP7wEFIlEQ1xCgXRDG+R0YUQGAdmM4yXiJElMOMpE2IDZOokGS5Le/WF88WZ/0sjGBFTj5C4xUxJN5WVdDt7uZ4+ijP+R5GidE0KK+g9G88cFAJYzn/cXP1eH7a/zmC2PqH0kDST3KBxjyzJHWG3WPYtHrRkE8ANInj8WURi9RMlU6CYlQh5P07CaVMGcsC4YAqOS7DczTSHYnwdv3h+eK81zX6DaNXSihTTCm3M32Q5cU9LCt1wdjpOAuQMdY23NNvqtp3cKwMx7MnIhDZe+ezuaeHIRiTyulQO8ubcNrcoAmUk9G/PbjWo9+5+ciSs3/aQRNI9f36sv/2xfL5SOzS6HJoJkPmOmYsOIXvSd4SIwhy9FJVpnvD2Q9o/wL6sL04n5+lDmDkxxIB39y9vXf3Z8uXfT3HCYYGHmw/brnfMpqc1Mlew3m7vnixOI/NSNTIXjMVFWjc4UQVkmipKkKGEg05bTB5gat+02x/tDyJeWbijUTEbDI9lqEhd6ZEQi1zwYai2DHMhbEK9eKJqHPOEhsyTGSt7YIHELyHNUrqVYLIH3/zy9959hKqUP2ji29/WNUASMSL33jHe9pvdnVZrFfbaVEWTOkwF/iT2+3febzY7RsiEuGqnggZ7zvSLgalKMoatumCVlSJgglG9L/9P/+oH4j/5O/85BefrlXKwodJYV++fP745PHJydN1u5vPJ9JWpi4AeNd1QWxZgqrdZnPD9aQsgXK33xnzKeYfsYV9XFofHJMhLkKoyNjHJyfbplUqAFx8eLvffCpPjifVzHunJG27FnLGTuvJIlIOZpM5adi2TTWxla3+168u/51XjyZ17ZuuaZqiqgDsu7BvxRoqyyoEkIaCLCkTyr1fbXdrALaqisIiuE+rWyJu25UtSgFHN3oAXBhR77rQefqDa/d75/MuBONd13WFtYEIwGq1Wnfh6XIZyIBN07RVURHx7e1tUVXxCDq4puscqbQKZTOxU6n0F3dXz2bHIGq7FoCIL4iq+WK72k4sT6czqBeAjG27DoByt5gYYi6rCmzYmsJa3MJLmEyPTpYWwGb1sen2IXS1Ye+6zXpfVsZW++N5+WixuPpYACgN2aIwRAK/b92sriZl0TpXMlsUQhaAWS7LohAJJcLt7d12t398fMSWFewbB2DfuuADG6Odtyx/8OHtF1UtqrYo2q4rbQkgSCCCcx5MQmbXeWuLuqzjBhmSAi+GbWlINZ4qk+s8kwGTSIh63GI265ru7u6unFTTyTSoUJbGPR+ZiaNx03WdqIiKqoC49ypTBrMailEThDUmjwKTuXfgFgWLKgw4pqjqjyKyLFM2Rnv7QhC8OOdd5wEADlA2CVqNDY00zyzC+yOvh/JQHspD+dUoPbu7Ryb+deW7qnDyTbqv7yuIc3Cl9E9W6w918YSn0PiD76/CCFMBDsDKfEs8ZB60Xo3xOr7afPhifp6wiJ7g1oNuURHuM5gFZWNAw2GWiIC5ruoQ4gE1GyZDhslGC0gZDi2BCAxRBZPq1/vrzydPxZACr+fnyUkvh5RikErQwVZMrUnad3ZfzNo4ZSAFUM5G4BBvEIDcx4yG0Xy3u3s+PSVCr05/u7l9tTilRN5gTorwoIMDCAPl73tGfbAGkVRuBRGbeH4/cvwnKCGm1e3NzUOeAyUDWzVxESnGHGWwknx5d/HT43MEiVbWl+vLnx49o5yROlY6uUNChUSCD4oQd2IFovXC/Vj3CsYwpzTZXukaGS8E6W23bL7FuwgxhW6GtSgZqETGxNSZlsgkFJ6Ugr4uZ2U9y55rw6BrRCQTmTRaASZ6gitFPF1ImY0B9Kv1zefzR7GePzx+Ers9xyobPTOTDUfriPJ4DWyObJli+DVVjgBcbD4+nZ0O9+YnH86DflGP5l36dbyo793S/0bZNMW9cjJ70mtvdOCrl1CYnjx0vb58vDiNffJ4fkoZaRmqMYYfhifem9R93XUY5nxmHnlYsW+YOETlEyqJHC2GLIBf3nz1wydfxOVAKSwYIiAoERFDDGYgTbufTKfe+6QxSwDgnffB13X1dPrIWMOGiVlEiLngUiAAmDl413atMz4mgy5t1XVdUZQo4kCYzXYTRETkpl09KufeeVsUxGytHUsVVSmKquu6uqyZWVXYlAqZTKZIMBJUJMpQVRIgxHUMzsI4iikhorKqvPHWFiJBgjjxAJjJGFPYsnNtkMDMyOc/kqObKZSZY/DEyM6CgAyVVe28i+uCiNrdrqwqVTVFKcFPp5P9btu0nbXGBw+g2+0BHC8fzWbT/b6JdM8ggZhUkloe2cc2shu8U8C1DsAXx49jbs2iKOqiQHS0t7ZtW2sDW2NsIV59CACCKAjbbePEefiM4GgMzdCGFohsZn53d/H86KkishtgzXhvIhFVCkhQBxOBySgJRjZJ6mSopmB+96VxhOpUKZ9bpRccTx+Nr+x5azQiIH8PH/Hw5OlgyfQvj/8lzOwgbEe+m4hIEb73nPLgZdpzuIZFdygu+vX73aaPhB3QnyoqcLG9ejo769HDv0md6av+r/2zB+EyKkr9UQhGAgPfhci+86wssNJpCCf2OB1crqC0DQ53UeK0EizZ+Nbehz+KKxoUiVEojBHV5pc3337+6Hl+JqM/fe5JiQqN25D2dbnfkA/ry+fL83uNyxoV3q8vz+dnsYFM/Hb9/tXRi+9XHsZjcr/bkrvH0Czgu/PoEJk+uH20B+UJOPgRpO7+7Oil5OPDwhZfr759ffxZf89oL8vxhYjjGk0vhyKkY9sD0mavxo0CkPSfRHUqohpRrAqCqgjEqIn9GVv889XNrx2dxLDGr+qpNYaYLMehVyZYY32SwBThwDT/42kqwxqOx92ltdPJRCR474vCNh4FU9Pu2s4XZW2NBcBsggizIeBPr97/7fMX3nV36/WSl59PpovJEQBbWtfumWzJtGt2u3a/LCf74JhhSZn59y9uf+fxXEWD81VdtxpKssbOKit7H5qmAQA1jpxh41UV+qc3HYC/f3b86Pzs7MmJFtP/7g//4nePzbQsjMePvnj5Wz/+Dbb18fLoyy//oNluptVsdjQHsLvbOL8vbNE0oeyaleGuOmKe7VY3ofkYyWLT6UkInS1gLLEp5tVShZ1IUK/SAVh//IiuK23Vtjtbsi34B69ffdrst7v1+uaTi2T5siZby94bgxjDZLNvj5omeOd9mM9LAJ54I4aAtgFJs5jWZV0EFRTGakVbA8CSCV3nfeMVu+CqkijIdusIUImhk1EWRcEswQCYzI5rCqJhu/k0m0xtPMvaN9ba5WLJZXm0pPXq9vLm06Swbdv6AA1pYokX75o/3+9/49GxSMfEz6oFlVZVq8kEgLhuOT9+cvI4dCtDitIWXEzryit2mzsAYOPaTlyAStfsQlHN6klVFKxud3u7hQXgmg2ptxQKyxJArLO63mzWIjg7On5x+gTA1q0nVRGcmO7Wc+W0DFAJnQ9tPT2anpwBgG/v1tsiOIFOKqomxWSxcCEQw98FAMHpLvjHs8XHuzveNQCEqAnOQDuRuJt1voNqCIEUpii9BN91bGzQYAmGCQCrVWhlSiLqXGfIBh/2rp0tZjefPh0tFgAmdV1WhfPOGN7sNpN62sccGzanpGVTVVeAEkEkREwzBYdRD4XhpMpDlUSZOYZuSA8RBUXOJIKoNcnTipCdsqIkI/XBS3R+STI+qgoMoNnty7ok5uC9Qm1Z9Icp/anSAxz5UB7KQ/nVKjb5h9kURigyshIKo6CQtOFeVeuVLNVEqcg+PmMWZMb4dAwEQqFEJqrSlOV7IliFHAIxo4yaUSQRn4FHQU5fw8SsNHw+0qxjEdEETFEgZVITayTJBzhbNARkQqGS8SLKwmz6a1iLIGAwKPkwBx1OzJRAsABC0pLTvuNz85C6Lz1NU2Ybc38c8rXaGzcxVPqIpiXUX9dfrQDkXiiwVCu831ydz84is6u3ml7Mz95url4tnrKpIwchxizjZNwe4HX3YRu1CgWnegwjDKiyZoJDng4amQQHvlZJx+c8mVKstLibhhAkshqN/Oz26tePztKLiBT4yfLsL1aXP10+D+oDqSE2ZGOjRF0TulZLUDbEjPFpwti+7zNhMHWSMkjizGEAyiFAOgdWwywhgAyBOUbZkgQteVUhZgYzMZMZkGtoZNTGCcUEUfEinlRN0akvi5JBrW8BtOKssYYNIh0yYYUEQozXlMiDhQGUJETsR7lIoJYTjMZA07whgPpwAqNJrnnu5XnYM1byRYIQJ8iH7eXZ7PRye6VKHkImIK7skX0fTxc4riPtn5G4TiCQJLQ7J3HK5CWF0uAuH0mI8aHcz+iRnQ8QNB8WUG7Nof1PeVZRBmhSxySQC6QSUQBWJiIN49ibQw8RlMmqBoHEWOfxuTFcA5ERFQlCyPhzfvPXt9++Pn795vYNFJEvEoKLEGRhCgBOFIA1XFhrDHdtq6KlLULwwTtjCwBVWZWoiQ1VtG/3ImKNYWIRDezqYhK7RIRMCCF4CR7AZDKxRfHh4tvl8giAYUSELogHAGZjzdHyCNAPF2+7rgNQV7VArS2aZj+bLoiodQ0T26IETIhJElRdcASIuNhGgRCB2MjgTexFiMBsSCFExnnHzGAqTDKevQ9laYqyYgkEMBnvPQBrrfgY0tFR9EgKmXAkClGvPnJMAYhqUU9ElY0RYD5fsmFjO+uDYe72XT8tjk+fuK4D09F8sVrfNc3eFqUtiljbzXpVVdV0OnG+895FXzYA89k81kqhsFZFRBVknGv2+8ZwcXe3a8UzlQAKWxmLfbfF4NGvCo3gdKR8SgaggwSKnrOEDIoSAM50tzzhoUAQSRbOyI6IULX2MRKGYBq9qTT2AI90Qs2LInVofjEhUc/yVpwB97RtQRVDTEOFKMjoIDHybzmgQVCCoZ7jyQKGgWFYQczAIwl37bdCAvrkagoIS/Itk1EUjUgSY1VASRXEMZ9TPkXIZ0dEUKZk9qn0pxRCYRzAcXAZTtUf3JahAwdGYtxngDTpIdEVQBUQksh1V1IiiXmVEgQsRElVGM5XCETkxfV4FvcEO0IMaB1HawQ8RnMzeehHCJSJKcnQeLrUI5DIEUZVoa2EqBEZNgCBJBNOJY1XhMAoo3uR5J3Ev4bIXovcn9xVCe5McyPNsCAhUTVzYGUlIdL3q8tnyyep5Qolfb44f7f+8OroBQgcw2lr/xjNHP0cKTE2CHkoSVPIQuR70iRL0TSUMi02iWhOw00hL0koAoSImJQBFJyjKABQNnGlESD6evn6zerrz48+z4I9HVyypYxHMyjpe6I+qCc1fd36TssaWmwBOIZYEZEEEsKSiaeVxASQFRAZsSw+EAEmLlxO/6uIBiaKsEYEIpiYQeK9LS0AF7w4H0eWiOOuVtWVghAEACntNluPUJUFk6pvRBHaNjB5BOdaAKFrp2bhfQqIsd3tJvV0sTxum/28nATsAczLhTrAd13wylqxLS1X0+Wq2bRBw94BsFygLIKXd2/eE9MOW/grMEQ7zAyAzfrW7Gle+c65P92Gf/j66avPf9u9JH4AACAASURBVPjk136ArvnqzV+FffsPn8/+xW34D57PaDr57OUPqvnxp+vLsp6dPv3B9PFnrfv017/4ZRy/9W5TGtusWppPw27vW9c0oa6tnZi4j5QkxeSpKdsg8IGNpeXs2Br77uM3CRFmfXT+4tHi5A7qw7bdOT06s2E1m8xcsQibGwDtZt162jZhXk23XRwb3V5c2Hoymc186ACs2nY5L1ebdro4mpalC4FUu+0V7e4Wj06q+TEAy9x2HeyUpCnbTcEWXAXfGUhRMICiLKGWCFerKwBE2jY7GOOUpmRb3wFYb5vjxzOHMDHUNKFzewTftK3zxCzRUxhW67oIlgGUpnDi63LuVdq2AUXzBAKzabZ3t6womnZdFXe+Ay1m29VHM5kBeHx05tGBRZVMUTbd/qh4DAlalDfrzdMnLwFQJa69FWUys6Pjuuj2BVvwlhgft41qC6Bt1qRmUs4ghaNQ2UXtmmu/Kxcnk3IRfAuAiokLd0zyaGaoronM+vaqmEyEislsBqDz68rXAjuZHq12q9f19E2z//HRYy/BEMVMQGYy3TcNERXGuhDI2NIa8V1wHYw1dQ1ADPkg1rAQFbaEoq6rzWYzm1dnZye3dysAtAcYdVUa1WbbrJtucfIoytEYCef26ubxs6cCCs5vN7vpbNI537XB7X1Z2cmsBlLIhD7iCghs42oen/0TQAhRssA5b62BihdhcNR5oiATJYBFgwRxwVVVXU8n15fXAMykLKe1MWZeLgA0bRuCD0GCSgQxkQ9iHspDeSgP5Vel2Oz5lc6mh+hH3yU+Hvw9jsY4fJWtmQFEi+fF+QPtQyrl5/f+v/2LgRwtsccL7seRzA9Q4Kv12y8Wz365/tB//sX8LKIibzbXAL5aX30+H9yZv9okr6LX8zP0Si7h69XF68Xzr9fvAbxcnr1dXb5Ynr5dDS5IL2fPdKyvA99u3g/fzs9j+7/dfQDw7e7i5fwZgG837+Mvfenvej4d6Abvdh+ez87fbYdWvFic90ZjumYzfPts/rT/fTA+879DSPQxpjgePAWAr1bvXy/P36yTQ/TnyzMcdvNX64+prxaPcVgU+nXunJeL88FQUgXwYZu+ejp9kqy1/OCrzcf+IU8XT6hX7XOcMQV+dnv1o+PTfn7R6AFfrt79ZHkW71DSP/v0Ll702WyZqStEpO/X108XTy7WyRnz6fyxApf51WezJ32HfNhdnU/PPuxShT+bHxPxv7pNV/700WlsFBH+enXza4tHUBKVGJPrL1aXP1mexT77y7vLnz46//LTMEa/efwMAJi+3t5+tjgWFc6OV282N7ljT7VnqQCqeLcbeb0tksvk1+sLAG9Wl58tzgD8Yvvx8/mT5EMHEuib9VW88lViNUKBb7dXL2en326v+gc+n+chPliuPdKYfs2lBzUVoItRxc6nZ/ktCuDDbvCpP5uexlHQZEXjKvvxPRk5U8eXX29S3U7mp0jAMH3cXJ3MHmuOs6mEm+1Nf9eT2UkEUK431wCu11dPFmcArtaXZ8u+VgBwlfvkdP6kXwYfdpfns7N+fgI4T8IhI5oyDgCX7OHELMs0cu0hUR1eGF2SE7GHCEAMDlVPJgCstdGorKrKGBtC8MGD6GfX7z5/dPbVp3cAfnT+g68+vX++eAwgeK9srLXffEri4oenX3yzfft09sh7zs93P7v8Zd+QF0dn0Rv6Y7MC8KlZPZkeE0hFr3ZDxIaz6bGoTKazIEEgTFzYIiZcent78Wxx9n59BeDV8VMA39ymkX159DSv7oQcjd0zz+Yn6N0Mid7dZclw/NRa6xtviyKEECRc72/P5ieXm2FAz2YnGJWr/d2T+ih27OXmU/zwcb3UPNn2u50qiqq62t7Gb58tTgA0u13TNJN6okT7pimKEtBmtwHAxlZVVRaFC85550OwImVZxpGTPMtFxPsgItaSMgfQl5df54acAxDPRjQyzD7sLp/OTy821wBO03RNc+Fycw3gYn39dPGkb9dFnurn89PxGiOi96vUyeeLJ/1DAICGuwA8mZ/GuXe1ujxdnl2N+v/R7AQp9OGBs3bcfG/3qaNOZidjiuTNenj4yeLJwW4/4gH2e4gCH0YvfT47R9IY8G794fnReUKJlBR4t3r/fPkMJO9XF88WT9/nXeZ8fo4I+XEglverq/P50w+5JueLJwkfTYdoeLdKMvxZ2q9TlRT4sP5wPn8au+h0dhJ3lqvNx7PZaT6WSE242lydzk6vsiQ8i0LyO2zQCNqq4iIrCeezp1E49x3y7erboQcWz9K4ExR4uxl8gV8uXo7dtN/cvYmfv370anRSlHs861oE+mr1Tfzsi+PPB/BYAcLPbtJi/8Hx6/7eb9fvXi9fvcl3Afj80cue0PnLu28BfH33/vXx8/icrz4NV75cPk9VyQrWaJjyTkGpNy42V+dJ6+gHgd6vLp4tTqNQjIBr3+ff3L19ffyy31m+WQ2d8+roeez/b+++fXX0AmBkt8eL9dWz2dOelHSxG6boeRzWPB4H+9HsfNyNw+4WiUIahgMk5COwUVqlXmL//ParHxx//otPXwH44vizCLz//Oar1O2PPjPgFGsBqtCv7lJnvlqOdDzCt6uLV8unaQMkQPHz9ccfH5+ml6kS6M9uk6rwW0dngCrxX9xcAPiru5sfLR4R9M1+9bqeqcaodyDCn1ynufdbj8+ZyLCJE/Or7fqHi+Ofr2/7Kvzm6TMAhlmBP3r/rv/8h1XNtjDWoCwBeAlSMKsWVAJQJg+xha2L5R++/+ZvPXsFQIj+5NPNbx8tvrzb98/5e6+ePy4Xq9Xdn9zuAPyLq0+/e/7YQ6dTA2v+4HoVL/udJ4VrA4BSlIpi1/K8MgCCk9ubWy7t//CHXwL4W1MG8Ldr+uNG//GPn1yvrjvdr+82AXbTrP7Lf/K/x6f9p7/z+evXr+rl45vbm9Xueu/a/+1N11fpP/u7z3YuAPhv/u+v/4v/6He3u9aQrQtu3Oa/+md/9p///d/upDDGAfifvxp2w3/0G8cW6LafCF1ZLf/pn/6i/+rf/fxZUW12m53v9gBaF27bndnu//B2+/denfwf36Qt7LeXRdfuNtvtYn58snj03//FV//ej5/9kz/+cwD/1unMWDbW/v43aSP7B68fda6dVJVzTXSe+Od/lWb43z0rARj11aSG8u//7P0//u3fai42AIyhxXTxv/z5L/7D3/jh3d0twxwtZ//8TZr/v/fqGQBRbdv2y5tPAP7l9dWPT89LS19dXp3PjgxbNhaAePeLuyRO/+0X52zCyt36Vv/guvvt4+qPbtu+7Z9NF051Vs+DChX2zW4z9NhvfcagZX1UldOgNK2WRpxqNasqouK//r/SMv9HPz46OjpuG2izduamnk//6b9KLf33P38MYLp8Mivpf/zLq//411/9s79M6+gffPaomtX/018nafnDukK780psbcQfvUoIQpx4gj54UWFjImU8BN9JgIqE4LMn9qyeSwiBAhMZw+KFmSfTyXa1mc6mx8slABdC0+y98xVKLmznAzMzR85xEmj7Zn+8PLr4+On69sPL6lV0fIihebIKEcv3mKq9tZx/RBU5nuwQK4cQJIeHCtkbJ7rYhyCucyGIZleq2WJeFqVCfYhZvOOZBAUJIsLmO2SXh/JQHspD+Te+WCCrnYocUy9/eYBGHkKTB9r+4RXxjJyGC2ikHw7+MD1hARRj/o6eE1VOyUhW7+J9wHToyy/X7z9fnlNGw77aXH4xPwPx54uzr9aXny+e9qbsm83lF/Pz+Kw3m8vXMWJUfuCb9bvPls8kUyjerq5ezJ9qBirebt+/mD9Lhsf6PYAX82fILfp28yF++2L+7O3m/YvZ8578ME5K83b7/sXsWYR53u0+HCCS2w/PZ+e9jfRu8+H5/GnsuXebCwBZ7wcU7zeX533ISDrc6vLvz+ZP328vns2fZvU7de6L2RmSmow3qw+vF0/jM75aXb5eREIiI8K4yyex496sP75ePNEeYQLerC5fLZ9CoaBv1x+ezc979OrD9vLp7DTuqJe76wgFJoRo8/F0/riv7NXmOiGD1HNsUz0FKUGk6fMDEP305BmJRkdSZf2zm3e/cfIMQBvc19vVs/mT8ZwcwwEXm48Ans6fAKSql9vrs9mTNIeBD7vLp7MzUjWsbze3AH7z0TmbIKpffrr+8dGJ5Oh9DAJzCIH6nDMQ0hRq8MtPH35yfAYFg6H4cvXh14/O4oGnikoIICKYX9xdvF6eiUBFv91dvZg/iSEL3m0uATxLw6ogvN9cPZ+fxiF7u718OTvjPHAxAkAcjTer61fzODfom+3Vi+lpv5i+3V49n0fAlKB4t7l8Pn86WMH5Z/QHvdhdR0P9bHZ6ubtKQxM7cHt1Nj1NfB/Qh93l/8vem8bKtlznYd+q2nv3sHs6Qw9nvOe+iY/DIymKFDVYlGJZsQbHomRBsqUYQZw4joIgyJ84cAAjQJDE/wIkQBwggWwZSgJbUCwqTiBbpiRroCWK5JM5PVLke+/ec6fT3afP6dNz772rauVHVe3efe6lKCmwTRm3gPfu6e491FyrvvrWtw7inoOel4NetevGLPFwedmtdcBEDAO6nPfbDffraDZs1/fhjzJGs41E3fXcoYoFd3IweLwcA9grIFZXi+v92h7A+7X90XxkIc6npgQaTgedRscYJvDlfNSp5VtZ9BfDXtzNS99fDLu1NjazYO4zYzuTBaM3vjA2Y/ZZT2bDw2YvEMGdnZPz8cMXdk9t4ezm1tqFli2otWEbSSYqKaWllIEM5+sZgPvj4VFjL4rKgRWTEoKZ0ywF49H86s7ukY3H8ngyBCBlAK9c+ebwXre2RwKCJIAnk+Hp7sFqvWqG1Um23C3VQGKdJA+uHu+U6raKwiAcLm96td1KpTpfz7I0LZcqQkilMvuWi9nwzu6hLfKD8cVxs2vL+3gytN7HtmouppfdWtseCRmY4XzciXftTPRkMjxsdsCstB4uru+2Tx3Ma6zpjOH8erdUByBkMFqO86WAmS9XN3vlhp8cxvvVFgGG+Wo93as02SBJ12makKCr1bROURiGAK7TBYDVcmEN9Gy1MqBSqbJaTC1TwJisXq8HIjJaaaWNMdroUhTaNs69wg1bliIRSQrLvz96/NL+SaqzTOmL2eVBs8tQ2hjjkY3B/LJddzMkaMP73a+1R/YnX7Th3FaXu6tn1YQJAPrTwWG9Z9GN/mzUs1MWs72yW99nf/o1mo/26+6Q5nI63K93rIwAM48X1zvxrj01uZVuVjetyo4t6c1yvBPvEnA9HwHYq7cZ8NDkaNeuFL7n5xIbeciwy9mw6zNAEBeLwUFts365zIAJTuKPQNZv9GI2OKhbJIv68/5h3cqnOF5bfz7o1RxS05+NenUL1zIDF9PRgTuSoYv5sFfr+AXI7e/680E3bls1g3a8d7m4alf3c4p8cUW8XFx2Yot10nBh56iCQ4d3ZWdgOB92Y3uiaU8vunDz8xMAx/WjnIX3ZHZxVD+wGNyj2ePj+hH5wf9o9uhO03oB0/nk/Kx1x9bi+c3DO81jX2P50u3efX/66Kx5Yu+6P3lwt3Unn6PfGt9/cfeureW3b87vNk/ztjufPjxrnDqmHvH98aMXdk4BYtJ3d47vjR/daR0BdH7zGMBZ6wRwptKj6ZODepf9OtZfDHpx19pm/cXGwOjW2oP5ZTdub/QVC8nBxw7Tcx7PJ81D8ibgg8kjACeN49zqezh9ctI4tidztrGYYNh4YupmZu5V27nl119eduN9IgwWIwA+ewSgvxh0q+18QQcsL9n1EmOMnY9tL322Eem/e/vm/t3WHRuijsDnN49faB3agr89fvRK60R55+03b+6dNA40K4CeFGD6fJX0VM38DwYgQET4/E3/vc0uC5DWX5iO3rfTZeC13d7nr/vv2m0LsYmxw8xMLIg+P3r8/s6RVeD9wlX/XTsdWQjR/tbs5pXmbiWOoygymfri5cV793pCis8N+x8+OZ3OJ2DUospbyfqVZmudrMthCUC1XElUqrP0q/PZB3pH5SBMk7VmlGt1ALV6DECtZwBen8w+fLSz29ojES6S1W8+fPKnjtoAf3Mt/Mw8+2CjHJRL2uiUzOdGi2/Zq6kMSunPLbP3NSMAa6UFIAmfnKze3yzXwvBjn38DwA+9+8X1cv4vlgbAh2Irfme+cv5mZvig2Xrp8PRv/P3f+Ksfefd43D87evl///T9/+qHv2+tRqVy+IlBAuB7X2yslutmTTZr0f/5e5M//2rV1sZw8FaidLt1EMXxktcAVusZVDqbJR97o/9dR5WTk1OBQKvFL37p5s+/XF2vbn7hi9cA/uPv/fCjJ48ArJP0N+5f/tvvOlvMlqvFBEApIJVwstYAfvPh9Yd7zahSWV5OXp9n72pEzWq0U6+WQgng175y8eGGMAEBkKXqJx6Ov+u0abRJM/PJ4eI7DuP1aqFUWm80Pv7W9fe+0gvDqgB+d2ijdWeaRSkIAaRJkqUJgEBgtV4BSNfruFwG+J/d63/fu16aLROo+W8/Gn/kxVOlIZX+1sOD33ly8Z69VqOxm641ADKGTWbZkf359Um9FbEuCf3GdPldp+2AjJUCeP0m+dZ2iUpVRmk8WzxYznqlWEDu7+7+3ltvdCu1KrKXz44A/PKXHvzZV7pRKQSREVStVqtBoG7G2XL506+/8dF3tAGwSn7xrel/9B3vDiM9nyWBnv9Pv/LGD7zz8OHDQXc3/tUH0+999bTS2GlWAgC/9PsPf+BdJ1mq5vPVb/dvAHznaQ/A9c38rXVyt1oFSTs9n8b1B4vZcbkWyND2eyuvJEWglAJDMBtjhJBhKSTvppBkSUASzGmakpShCIQQQspkno2WV0eHBwBEJAk0ny9ny1VcrYRSKqVZaxsgG0A5rhBRplRUCstBRWtTLpUzUnqdzudJs9QCILZX34Kv4FOzDQOO4+0ClGpttI+bw4aZkakVM1sbUoaWNkTVWgBABpIEgZGkidGahFBZZgwbY5TWpUDi9nb9eXqenqfn6Rs9Ca9zAWBzggw4wNH6SPlj5q+f3BO2deYY8O5Kwu+tCr8xFW6lnNcHG77jqXk1hyry99ypH2ye5B5GucxZjieez4encYf9i85qB+ezvnsjE4CTxoHFVOzHo1rPQz/O523jNwcc1HouFInnC/l9I7krCw6z9srHi4uDuGfAEAzBh7Xuk2WfBVt9w4N8N8g56kEg5/Z+YHdinhvgqujWpsu7ruUJwMV88GQ2eDIfWLQrr2BbgacNX0bGnXr3fDa0G4T7s8s7jbbnstJpbf98NrJ8FQbOp4OTRpcBG038qNa7mPftdtRikShkazC/AjsHrXb8FMuSHH7tWhyurrVVf9+43+e0C7dz+9Jk+OpOT8EoGA06jHcv5iPrFmZv6Na2+D6dmgPX3A6U8/+hW+14pJwAvNzsalBmjDIGgDJaG6WsA6avVXY+ht5RkBnAy419q9ynjLY75E3fYGjN2vDbk8HdZo8gDKBgADyej/Ju04s7bsSRC8LkY6OS72Duew1ikGE6n14e19oGZN91EHceLy9zEvJBrcMFZGHTeTaf8u2e2xxurvoaWzbK+48fbkybv10NEwHict5vxwf+7Jbazc7lbGQvtaxGdyVZdHJo/Rp9NhzIu7fNnnO1ySgMr9vlGU4HnWY3/6Zd2x8WWGYHcdcOJNtjfbkop8EWnsYmj1qzXYH5+PJe5MbVA9jJAQJxtRZXaxZktLNQkqTGsNI6TTNllA2rfRA3mVmQCMM8NDMRxHB1c9zqGtZaK63Vye6hfXF++m2BfkAora24oVWcNNb72JgszWwUS2OMFIEUgRVtDIOSED6ODIGNMWxVW3HY7BAJIcSD8cVJqwe/kz5qdJ5MBsWJhZ3WQi45ByLqz68OmxsW2/FO797lAwbsub1t3FYYb5acTasVZks2o9XNXqWZN9BeuXG1mmit1+u1DOTValoXpUAGcVyL49qL7WNXRikXi+lyMYur1TCQ+QsqlaqgIJAyCCIZSBnIIAhkEAD4wqPzr172v3rZ/8rw4ksXD98c9e0U9Nbo8d3dk1RrZRhEh43uxWQAZhhj/FFE22GFW52/UCqytTecjbqFyDC9Wqdvp2K2knwHfuYhAP1ZYTZw50/k4Z7Ne/bqXT8tbuQmn/7jZjluVXeLepTjxTW7J7gsEWB9WglEcLFTsCViuUk5NgrBcOuaVVW2vxqrqepBNpffg3rPD2c6bPSezPqgza+92r6forhX3+/PLm0Xu5iOevW9vHt0a53+fMibCR4AetWuXxqe1oTdmhs68b6HztCJ24P50PPRNxMOmIbzYdfhjwSiXtztLwb5g47qR5x7Cns6sGfqQAjrUktW1kWQECTOJ+d3W2f5nHnSOjqfPPL9348mZma+P9lgkfZ1927O7ZVv39x/YecsH3l3W6f3Jg/yot1t3nHTJsG7z5NvUIvDWXMId1rHztzLh58fdf3FoFftuiopujgUV2DkXQbYGDi3fuXcSsl/Pm0euwr1GDR5gV1h/fdBAsJ6nTtYtWjq+BGRJ29jFFi8G+PP32K92tkYaBd3Ip9nvNzm+fT8bvOuAw9BsMxTZgujn08enza7BopZMauXmodfuXloA/+8Ob73QuuOWzKA08YhPCxbaAvyhm++qjIR3rgavLfZJbD0sjWfHQ8kwcblkkRSCCv8IgMpBAnCF0f9DxwcS0lSAMD72odvjIckNsD7K61dK5UbSCGtiyhckUulUiQjIYWxvZQANlEoo1CWSqHSyqgMwDpbr7OVUVmWrsfTGwBqvVLrlVquAfw77313rdEOw7gSVWqVKgAFJhFY3SHJWC7S1SL93Hj1wXq0WCVJpqD4m6rlz07STMvJMlOJdZRHppSNmfaDL7Vns/F6sfhwUwDY3SkD0MYIw+t50h+MfupnPvZf/tgPdHb3eu3uUi0B/Pe/8I+vJ5f94RDAX/7w6TvPzl45OylHIhISwF6jsddoABDpKoIOSGulQ1EGwCqbTa9ml2MAZRlIGGOyUkkCaDV3m3ELwL/7LWeCzdnx3bPjuy+98AqAQIpSSShmAPV6XKsEkRQAvu1wX4iwHDfj/ea3H+y8MU3v3n2HCOTgegTgu856a42MREbiE49m3/1iL0mzLMniSu3PvfbyJ54slqtkueaPv3X9kdOGDKIgjEjK777bBbDIsmS5tEPuZjRUZBQZQSwDCSBdzUPwr9/rf3O7ZpgrlXKaGQC/8daD+eJG6Syq1gCkabK6uQwCAKhXakYro7In8+vDeEdnWZImSplv2mv++oNLaFSCMoAP7Acp63Ktubu/X67GdmilyVonVhAZWpsHj/sPHvcBDB9dvvnGW7//+S+NHtzPrkaj6+tVpn/69fs/9eGXTo5fODl+QZYaAP7XT3wxy+RchT/76QcfffdhVVZODrtpln2wGf7TLz8YPPzyarUE8M6KMOmqWokzlQL40y8cLler5WplndCNMdoYA06yVFsVAkEMVsYoYxhEMgBkVKpUqnG5XAmjUqka12rNuFIvBeVSUDZsNJvUZMbqM2ltjC6VS82dVqVcSbVJtTGGgyAMgkBn2hiuxvFsMb+8HDFzFEVRFFUqFcM8n81q9Votrkspy6UyAAiUyiU3xt0URJQvSSCArK6Tu8buofw+xbDRxmjDxrDSRimjlFGGlTaGmYRYrdbrJMkytVqvGSzDQIYBwMZp5QgRSBKkjbFqQpnOlNJKPZeOfJ6ep+fpT1gKnv6KN+fTmzNaenrf/6ztivvNOak8BWHevqXw1Nxky725t8iXuf9x4TW8yYR75eYjvMQb7Nm7e40QGxPao1LFonkf8dymt5WxZboD2GKFABezi/zeTRJ5qMZiOTY5vA0Nbc7Qixt1ZpDzJ/VSfPlu1qORvrnIyqtvXterdqwH1cV8ADj+3aElquS5ct7qvo5IOGEsty9F/kgnWOhbB7CtlcMT8PuQHOGym0C7Qhe6FBXK7l31fUNQ/rWjSbogRY67u9kPA9qGBAIMQzvuAzYK88zwIZLhs0XF0Cg+o47e5Dm9CjDMRjlrXuuNMJmGYRBIsK8By+K0HUbnio5kHC8jZ2MgsFHybG7t21igG++RELaSu47G6PrCwDow2n27xxnyLm42dQxDno1UaB1HNnUNQnmLFes6T8Q0WA27ceepPulSp9oeLi+7VefBdxB38+u6cWewsPcCRJ1ah0QuaVlo8Dy7hf6ZP6Td7PkjEfK5BBG1KjuBEP4JPPJe28Vcbk8Lt6cYzmuhOK89nQMqCMLmvRseMRB8a/cNwsX08rDREVLm9G1jz7XZlSIQIYBASgCBDKMoyDK1XieZyqwpaUOxBzKwu1ZL5bMeQBaaVCrLC2WP+kkIAWLJAKS0EZNEljqftSxLSZAIAgCZUkIGWqudUn2SLQ6bXQCC6KDRjks1rTURBUFgjXUiRMLFhfeoxwZxBzY9yb6oW9sbzq/a1V0iMHOvvl8YeLbqbMVZ4iHSJCFBUgYAgjD0YpFb7TBa3uyUG/lDDPs+7ScPr8lobKVVSpVqJQbA2gCQQaSVMkpHURgJydqAqBLXADRrrUylxrAQgkgEQRgGkUV133f2cpKsAbCBNnq5WhFIBQwgVevC/ocA54+/nXHinE17e9FE3hM2nWaD//ixurmaevWO7T28NYBuYzH5S7b7o5tw3MfbTA1i8I5D9mm3tl+84MrJgNzK6i28iffr+6PZqF1rW9HXjiNX5hligA2MyAUrt2ZbAGA2sKCIFiQMCxfPx2xkZotrA5hzEepiiBD2aNV28tnP5/ntOmBstwb7SbPwOwAPEfFmRbcHJof1A/LRq4nwaLbxPiaiO62TB5OHd6yHL9ELrbtbzEs/erZb5fZ0m090AM6ap8XsFo0Q2r57c6biadzWECAfiJpIgHG3dboZpFQIM814GsnNqZHP7My3xd9NegAAIABJREFUVw8wzFbR8hZiwmnz2I9jAnA+fQCAILZcpVFc3FyFHdTag+Vlt9q2Fk6v6rix3eo+CsOwP99IhWxMPteRGNanwcOWxVYDcKdxR7jw2/An3LagG4YvMzvA3bt+o/BwK8GRP6S4Blt0mP36m1cdAHuSa62ub261IVmStOaHIELBdqVcNdVAkGBmJw0MGMPMxhHAtRJCGm2yLLM5L0UBDH/o+PhTjx598OCQEr1eLd+904rCgEjYIGCQslqLsyUAVCvl1RKpTmQYJCoDsJgvAFCaAkgWc62z65vrSIiVXgLIpgvBWGQGwFKJIJAZEYBKPcZiJSEVBIT11IbKjJLa9hph+PWV+bF3HTfLZSH0fL3+5MR8e4sOOi0AqcqEFqUgImQA3vziJ2tBKImvF6uPvtwAMS+TQOMvvb9TrVXrcSmStJiHP/OZawCtWsvWTFTbjaIgCMRser2y0zunjVo1nc//7EH1n1wsf/KbT5eZWq7Uv/ehk0CWpJA//r52tVa/WVwv1wzgY7/3tq0TDiG0BpBoxVKGFQHAhZKrVCpSPXx4DUCp1GRLtZoDyBaToCTK1YrNjBRRlmkonSbLbJoCKEdWGRCGUS5VZBiq1I3BWrUp2AgpAKyTpZESQGLcEFkuFzpbAxBaT2/GQRgJxkfODgGUpdCaTJoCMMqUI1EqRwDiWrxMloYkgEQpKNWsVcuhsJHKK+WItAFQiaoA2vV6YijNMgCseW7mF/3+3fruxXJ+pxJbLeM//eLB2dmdyc04TZcyKj2+GlcC02x1ATxZJK+e9gC0FD68K2uNZhzXqNQAMFku9hr1nb1uygzMANxMZgYPANSr4mI47nZLtrqms9VsnQJQ2gDQVm2XRWZYkBXel9owKwWAhBQkDSwFmTRzpjVrFZgSG2OVNwWREIFhDQgpJSvNhsMw1KDGbrRcrwEERpaiSCudZmuJWqYVkTCGiYQ9thRSmTRZTOY7u604jivlijEmCIJAULlU8ufVBQkfv3BhowbjZgUARILtWqKM0s5OYu8PmB8XaWYGoiiSUs7n8+VyFWkNoFwuG2hCIAMpIJRS1le9VC7rFT+Tw/48PU/P0/P0DZ6CnN6zQSysAZXjAnCoU/EzPFD39L6AAbs9d2YvF7dGlMMz/oscAizQbGBtOeH5C5snAwVwwX/FTp9HFPKycZ3jTbwPPChITBaz5DYB+e7DhbPM/b8Lx9s+XRSUHHu1Xn/e36BbNiR3nl+zYVe5PXb+IsAYVx/2Ksfdz3E9tttCDApqdwdx92IxKDiQbnLlQ4PAW8xWXZl7BfXM/nx42Nhwx+xzzGaz5wyjB9ONlb/5yaeHk9u/ekv+qcu39i75bquwzWEGXOBXV1q7RfLXsUOSmIAv3wze4eUatXFBBbXhQsa30q0vuJgTKvR3+O4KKGawyWOecL7HAZhZW9V//yRrKBYDeGIDpdrwmbYhhCFLYYRmJoAFoAtAI4ix0SwD0Ku3+zkiaZ9LxS33FslzMyTEdqUXdtxcrIFCDwTY7sCLfQzAcD7KKaXwiKT9+6Dg7AyPSLrLrCNqPuzEVthfFLbTxXHNW58KbQOMFhux0f14b7S42kZhttIzo9wW0I2tGzdAks2i7Wf2BNvVknHkJWM8ymitSdfmT6ZbNXZ+/fio0cnnvsykAAgRAKVUGMggCJarhWYTiCAQgd1YEkiIgNms1kubNSGFRe5UlpEQURjlRbWopccLbfSnLC+Y1jmIZ7fUZNgIEp14Z7hw8lW9+r4MpUoVCGT3t1qHQWjfSILcThV49Iwxvqmxdrx/uXTocCfec+E1gFw4slj/xhhBUlpkVkjlpijaj3dHi+v9eDfvlXkTjdfT288xBoAN1RqGoZDSHUcwAIRRqFIOAqmVUjpNlVJKHxwcAmBlFqtFhkxKuVqvhRRRYJQyAJIkS1JlS6aUUkqDhObUV6YhYdXiXLsLIVlnt4pnp1B+qu/lmStKQOY16cdjPvEg72Ae/H1GtReefOt5t7G/Z1y8WRlwVRCO3KsdXM0v3HKeOy0jnwA3Gdyvt3O913a9bXhz0oc89AtrX4gt+M0hQn65Z8BBk7TZxeVPs5CmMSAJbwVsMKDN2lrw3qACSmdAEPBk582CIrZrcruGXWb7zzASHFr1pKgWXT9+NHvkfmAiwp3m6bknLd5t3c23o2+P79162mZ22/63WFvutZsJv7is+7lg6ydbgZQXNp9FyVsV5+MNp/KkefRw8njrne4NTm6CvfFF9Izc0SY2ur3Xc8Vt8xI/nFwcNw6IBRGdTwrSlo2z+9P7YvtoaGvs8KYgvbidy0f2qh2/x6fiUuUYrOSsGZth51hjwxORJHgv8sZxTlyivE8Bud8+59xI60tOMofCvROMowAzG0vqpMKwLQbhhlftcP3cDXT7uyGwALHXXBF+9EprgDqxYqNzISM2ho1hbc9I7K9WdwL2kIZslB9jD8CiKDBKZ0Z/8Ojo014+8rW9PaO04czmJkAYEi4nUwBqnagsWa2W9bhWjcoAFqsVgFYpAPDg8UU5VOVSSckwrsYAuoe9yXxWhQQQhMJkGqwA7HaPQkmhDJarVZquAOy0m40UVzeTkov1xwCOup3VelEXwUCtAMSlslEawJ3Dk8l43GzEmV4BONzfS9Nkvlw1a00AV+Nhqx7u7jRXSv30b93P+8B/8p13//Zv3lPe+jk5PauW4lBGN9Npf3QBYDofV4IwLAPAD71n9xe/4FauH33PznKdRtKUo9Lf/cRGOPKH33vyC597WKnWT3pHV1cjAIapXK2WwghAuWQjm0mhy2mqASyShBXCKAYgSqJOZeWDkpVk0KyVb6bzxWpZZjfjVSplAI16Kwij5Toh5kq5AiAuVTkQ89kEQFAK0kQDEIGw2ybNSDINQIahVup6sqxGMCIEEIXVNFmMry8BhIFodQ6sw0S2Xgo4hV9jTBiGKjOTZHlU6wFodTvr+RJAo74PYDK5IlGyFBUppdKqXK1oUDusXlrQGvhz73oRgpQIm3uHx0cH1/PF5dtfvR4/AjC+HD6q3APAWq/XWCWL4fBmp9ECkKySSzOh8YwF7e51ARwevcAqBbDTPrr/oH81GgWVOgASkZAGeXhDG6ELJGXoTjeFNFoZewYJ4TaVmjVgmCAkQbImZokgAsBGS1HWJtEgw9BKK52GYBuo2jLZ01SFQVSpVDRrIWSaqXK5Iptg5vlqCUCnSkgJ5tV6pZRSWkMIgKMo1FqTZaK6+cdbsD4po+2sZKcFG6jNHR8yaaUMszGcZW5oG2ZjTBgFxmhmXidJVIqicilJ0kxlAEoosWFG5jbvNs44IwiDMAu+hhXwPD1Pz9Pz9A2dnsGO/Dc4ndYONnBIEW/9I6aLeT8nSP4rmPwt+0zcQr2+Xuov+t1q++tf9zXSaaOHfJ/g4YP81+NGzwKFFkXmTWjU22k4v+zEf/xsFNOXx4NXm52vf92fqNSfXfbqbQCD+bBb63hc4l+DTWE94CyIwMDlYnT7gmqHACK6WAy68VZDdB0KieF82Gl08UdPo8lgv3n7xvHyet9HuflaaMu/xnTc6kkZEIQxyrB5dPMMIOMbJFlfb6VUfzZ6x+FLf5hbDuq25snj9bfnyna8Z0P/Dhfjtg+f0mvsA2DDSqkoirTRz/b7/XqpWaoDkIKICIa1B0n/JKZuvTBYtk4Cbqf+bNCr/3GGzx8yjZfXO/He9Xy0W3DW/iOZATaijpB0ObvcK8jL/huTDmxYEvaEaC8dezHvH9YOLBL3tRgoZ807AAh0b3L/hdZd+6X9g8G5IuEfMif3bx6c7Zz+/y6QS2+Pz89ap6YQhfwPTla68Y/3rkfTCx8nB+eTh6eNUycu+Ud/lONpMvrLoQ2hNlgOu9btA/hjGnDfYOnTV1ffsv8v0bD5QKenVGKIvnB1/Z6dna9/w7/C9LG33TnZL7w9/Uvvu1PzpMJnpv/nfvLj74wB/MzvDv/697/nYLerNF9ePX1s9nXSj7y314rry+X0578w/oGXYgB///M3/9n3fYsMo+Hwj/y0f+3pU/3Rd5wc/kt9xQuVuF6OAHx2PP7h9/yh7IfPLZLXqs+Zes/T8/Q8PU/P09dPbh9C2Lg2US5Vt3GgLpDH7DfwZ0EFShw9ZWgXLFB/q7EH1o4i4X7J2T63b6cie3GbqMGeYmGzy1QsAwq+XdZp130tjAuzCIAfzi5O6n7vAbAhK7PkjuWILIUe3vGcChnYVEXxRRuu1aYwpriPdx+ejTaxO7L3/pHOjdmejROs8iZvCCy3aQub8NSFKsQzLHb2depZOBvNRfaN7YmW7gkPp/2jei8nlxhLCvEifoPtyCfwTXW5GDkpLl9pTzW0Y/UwM1knaKajSvvx6vpOYxfwhKmNCyfBiYpCG6O+Rr9xD94wOza0kS04wNFgN/QZABkbNoZA0kpZyWDD2RESzCTERkHPEUkIcD7c8KpgsAqYrssIhtOqNjBElgxB/cVVr97ejKBnIW6bFs07AgruXDbnxjNDNrX/FKWFbz1uw8DpLwZ218c5U5TQqe3nBMnh8rJTbRcrMM/ocDHsbEOTRcIXP93g25/s36PJYK++9RDPMtlcXrhryy+4UMatCrjV69l/x1tXb2aWZ90CtjI9gK1yJks1oifTwXGrJ0hYKrW98qjVfTwddqo7VhbNZjI1KQCjzWK5JoZhLgUVZRJltFWRl4HMskwZ5wMopAhl6Pz6jZFeeczNCYaVUpYmAIIxJkuSUtk5OjnCjhWILJVVmrAxE043IZthAISlUJJI1NoYJaW0nobW6c/Gvy7U2KbCBrPLntcT6M9HnXjfzrv2G8PGHfh7l3PDxrAZzq+7tT0hJJg3L/K1nk+NV4vxTqXpZgJsai93Ob3JljWK8jbSxiitlskSXkJeGAgiwyyEMIZXy2UQhpVSBGC8nBitGEizTGVKQi6TdYkNgPlyaUmXJIiZRRAIksZ9Y8NeCgPWhgFoIiE2yJ0d9V6Ec2t12NTaswjbg9mw64IU5z7pBEJ/NujaMC+bjvqMvl14FPlVwn3MqY1UuMQPRxovr3eqe8/ChOyw3tDELc1xa5UDrudX+7X9YsbyRt+0F1jD+EnKT7XkcyXIE+6YtXDdy5htDZPCucNWlPtCPeTrpzcQNqsteU9tuNW00Im3Sr413j1dBW4uRd79+8th4eiRnSBgoQfb4CTnk4dnjTuuCShfW30uLfEagpjvT8/PWqeFwmymJCKRF+X+zYM7rZOt7G7+2W5CX/MGgvK8O5KfWwRdH6BbxXbfFDupBVov5sODuMOw1Dy30hTWVVhYthd3+4thLz9u5Ft0zlwnxFqCuVP2dtW7lXRjR1kT52I+OKh1ueCbs7Egnl7XnEQxNte590GQKJKVuHj9ZtRuLSkM7YxAhAAz5fLg8ALodrJ19eUlU71s8uaDLRb5Csl7syGwIFLA6zejD+3tEgkr4ep9XshXtfGMU++z7ZMoFtiNdMPauCB7hknQZx5efMvxcWY0kYhKJQDz5TwSshpGAMiYdbLeacYA5vOpNszMymj7FiEJwMpoAL1mI8Gq3tyJwsp6ndgqMtC1eh2ACIL1cmmZf1dX48PDDhHBqIAMgFqz3t7tpW98dTYef2in/rmV+dF33/n5L57bbP/IWW2ZmH86WP3w3drdu6/ster3VdLda07nEoAh8447p/cfDxaL6U+/3v+J97buHr9Yr9QBNEtxvVIPw3Izrg7G1wDiSmyfGVIUyUq9WmvEDSIJIAzLDENS/KM35z/2zQcwQqkslAJAvV5jvQQQhXFUqTarcwBXV0MA5bCUVnZ2WwygGTdkUHJ+JIM+gDhJS9CWjTa+vlxNp/VmDQC0DisxPImeABmFca0kUQoCy7slKxVSi2OSgaA0EFSKqgDCINCBCIMSgLgaB0gApMxhWAEQBKGABhBV40AEhiehwO/0R+9rtySJRKVhIAG06jXNbNYrABJasM4yDSAIglAE6/U8YGeYyqhSKSkA5bgG4N7982aj1d3fAwApJIVRVP69+1/plWsZGxmVbYmCUDYbtSzRN+P5ZDqtd44iq5Wkze9/4QsAAPnZlX6tUtnpHYYyAlCud5dJNp1MlDZXV1cAHj563CqHAFRQK1Xraba08ZsyQ0whcup8EMIYmxtmBTvHSGGJhGy0ZTgqnTHIsGEDYYxWikFMVmVbrI1eqxRCChms0vk6SZrlKBTBar2qRGUAk2S2XK9rlbjV2M+0DstVrbXRJk1TO96yJBGBrNXj2XQ2n87qqW7uttIsLYUVbCxVQm6GF2ZCScScb6phjGFvaSepWq/WxrAMpDbGaj7aXVW2yoQQWmsGytVytVqN0nSxWABIkyQIAqMNCSGEAJEMQ6XUarlKssTOY38QqP88PU/P0/P0jZc2wIoDoAqJwHlIDfeFt0mftTfaGLrecYdy68xptptbO1x3H5PVzXCiGZzv0HJj0e+yuPAuLjynkHVn7zGz8W7dluN/Wjt6MHvMXtXMYpHu0yYv5ACHQiFvuYsXhPrcJf3FwF/n88iGyK9M5IrSrXYHq4HNHYgHy3437m4CjRATbddO0T5mwBCMANPFsg+/g92ujjwRiA5qB4Pl8FZT9efDXu5OCzzKHdMYj+YXx7ZCgJN67+H0It/2PZz2jxs9wy6Iw2Gj92Q6YNesGCyHndhF/W7H+8PFaBt9Kmb0qY5TrPsNsEsn9b3z6bXHEJmZvzIbvtTcYx/e6IXG3vn8Ko93M1hc+TjafvvkjPW8lbY6jLtue2PmJNsMa2NsvCEAxAFxAEgAD2ZjkGCie9Pr+7Px5rG2nQW5jQ8713s2fotP1o2Re7W9J7NLw+Y2xn4LiGTuO6fIgu5jYXTAu58d1rsXXkuUmS9mg4NaF4VNH9negELf2n7VNqZddJvbaqzcU7u/vE0fGC5sXAgMC87mDOzV21ezYT6sr2bD/UbHSmPtNbpXsyGosH/bypJtOWfA5b37auO4Tc/oS/7KdqNzmTsaM18trvZdDCV++pbt0tLG887dzbAaoNjMEx4Jtu0tAAEmG/XG3mKvi0QpEiWJAEAUlRK1XqQLbTSDbdwCZRQArZUMpBTCKkVKIQMZCBK9eOc6W+S18/hmkOfSAo6RjGQQSOm00gEopYIoLFeqrip8TvqzURiGYRheTEcASqVSuVIGSClFICGFMcZG1lZK2emxV9vvz0Z5Pfdno17dxoN2DTZcjKxM3nB+bauWmXcrjeH82rjwTjyYXR22uswchEHeXkorlzsQAOuvnTefxYIaUW2aznNs4Wo9bYUuamqz0ejV967WU8VaqUyp7OG4DyDLVqvVQhvTaO7IqKSMqZQrq8VitVisVgsZBlEQCUlhKQqCUCmVKQXAMDtxNhKAECKwYcoPW92L2aUBa+ZMm8vZZbOykyqdwjDf7kXCBi/a7pJ+6HG7tj8oBN4dzIZdz3/s1rqDxcA1lFsEnLNs/ow8ue7p+l7upvxUF/Y1ycBOvDv2dfvMy2yy2pFc6DLF3l5Mo7llTNNg4uOc+NE5mF7aT/350Mr5MZxiyZOpXbDAbJ5MBoe1QzYCZEhoAP35CCzAgg0N8rA/5CJlu+IQhgvPst9GorYXZ7DTWCbe/tWJPBBAGM4v2zXPBLfl8aOsG/f6875rDmYXbNo/fQPqER7OHvq8uEa5Pz23I+Le5B7cZI27zRfevnnbtRpTjkVuhTsgAuFu6/TezfkzeYt3Wif3bx7kgN39m/O7rTuFSmCvNrapDKs8kU/uuQ9x7r2+0Y4kAqhX7fVXfobJB14uuYPiYrmVw16t019cFr98Mu8f1Q+o2Ci8ufXe5L57BjOA+5MHtmofTi4eTi5cefxLLuYDCyz6uDrII+H4EUB2Pdpeyp1xZGch4S1Rd2NeYT6HjEJfYYBhWDHjhdbZ2zf3iSRRQBTcmzy6u3MqSAB0p3F4Pn3iygG2f7t8EwG4P3F+/fduBvdmWyvmN7W7n52MAUPEXMCP3f28kU8XIhAiEBDv2++9PrgQJCQkEb50c/munTYJIVw8dSuQJwhYJev5cgEgU5lt7N999CiQgVHq9SdPAOcLmposNZlmUy6Xw1Jkv47CoFwpa3AUlQDUylGtHNXjKoBVltRrlSSbQs8qYgmgUpJxXC7HZQCNnYaWqETBa63qbz14PHl8fvHVN0Je/6MvPfzIYWMyGrSq5Vfe9UqmDevgvZH8+S+e/52/9pf/7n/4Y3/rR7/n+molhPqBo+gX7s3/h199PQyC3UalWau8eHT8f//nf/F//mdv6ixrx6JWDgAcdXpHZy+dvfRuAKtkVa0EL54cvHB08jf/4T8HcPfw6O7hEYC/+XMfP3/05u989tf/6t/+2f/mH34cQKlaWUsOiAH83GcuZvPZ+Gb8s69fAChV9tq7ZwAWi9n1zSCMZBjJf/zmBIDOlrM0Y80ASjIgOF3F37te3kzWer1areefXZiP3DksBZVEc6pWACIZQkTaQBt8/zvu/r9ffjtlI0rV1m7v4/euvveFdme/XSlXfuQDr/7cp77EjLhaEYL+wWe+CGC0mE1vZmFUAvBr90ZBpRJUKp8ezn/1q28BaDZbzd2973/t5d+4d5EyokpVyDIAo0ySLYOSaOztAtjf34dKrABop7MfhKGMouPG7uVqkjFEVBbVxm8/ePitd44XC7W8mQBIdZbqLC6HJFCqVgAs1qvM6PH1GEB/PWfNWuP1wQWAJ4PHWq2MSbN0DWRZtl6m6Z956ew3R1nc2otbe2EpAMBBmqz0cpm8VCr96lfPLycLUSpVaq0vzJJ3NJua5MPrBYDR1fXNbLJQuFoZANfT6XSxmC4Wy/UKgGEybHkhJGQAwBgWkNbVWill2OTnCgbEIG1MmmWJSm2YmkWyUoaVYQMIEmFgg9oF6+Xysn+ZpWmWprVq1RheJuu4WV8mKRMbY5aL5XQ6C8MoDKOoFKVJKoUMwqBea0TlyDBHYaS1ESRc9Jji/q8wH0oZSBkIIYQQzNCGldJZptIkW85Xs+liOV9lmQJISCmkDMIgKkVhGJKgeTJbJov1ar1ar7TWySpJVsl0OlmvVuvVOlknWZZZNW2tVJIkSrtwOHienqfn6Xn6E5UCQQJEbKwcsD/9dxsSOCSPrBFoLVwXbcZTypzSIntAhwhwdCJ3PRj20JeB3OOYmYUN6GGtXuGvcwfW3oAlx/wCuy23xQqtircUBBvtBGR4433NIKIAbE7i3sNF/6jWsYE7D+v7T5bOBfWo1rV2qyG39TJk7HOs6ovxcZEFMRsJb7YycFDrXhQE1LtxjxnD1bBb7UGgW+sO1sN23PNbRyJ/+N6t9gYezelUt5zyHAvSyqG5Jc0pOnbiznBV0I6sHgJ8sbw4iA/src54dyL77OgETL1atz/fknI7rHfsxsTiZYe17hNfkKOCkyADR/XeYw9WHtQ6gBU9cW7ZB3Hnwus3dSuWHMHWKujE7dzPtxO3YYrERL/t3lAUDNgRjAhgaBIkmIn5rLF7f7rZS7/U6BACxZltb2PEcbXzZOX2qweNfWKz2UDBRSbZ7NiFC4vgt2UOeYTvk8zw7EUWQghPrFGkJBEBZ409Ah7MrgHcbe4T6O3JpQwi+PYSJoCNxO2FJ6UM/L5CEAdCGGbTq+3259e+YveFlDYCSrfSGaw2gm7WHf5ifnnU6DHzYb37ZDY8qHc2fQOufL242/dYofdi85y1Apyfb9g8bcRhHE9mdhMoAEPCDn6CHwXD+ahXbfeqbQasjJcDBTwtrlPrALicXwJo1zt2rNq3Emi/3rnyYnN79Y4dvczEzLu13rXvezk10stgOg7IXrx3VdSOrO0x89Xiei/eY+Z2vXM5u2y7AMSbuwBq17uXHgPaj/c87Y7zjmEniXzcACCKCDyYDdq1PU9CMXawaHegzWRICjlYWTEpQyw0Z2wjFrG0T75c3XTjfQApZ/ABbVbJWpAUgbRyY6t0CQEbPhUiEEKuVstKpQJgvV4FcaCNISl3wnicLWwp7uwenF9fZCol4Tl6RACLQOZYgAxDpY0g3ot3blaTmEoE1CiqVmOrHfmOw5fiWjyZ3OhMaZ2yVmm6ljKA1w8lgtE6DEJtTLe6M1g6zP2k6fAFWzGWJjmcjwC0471KuRSGoTbaGN6pNMerib12p7orWRORMRCCkvUagJTSwqlaKRkGlubQqe1ZWcwgCGygm91Kc5LO3XOimtba3UuB5qwT74wWN77PtwbLG6KoUg7nq/mXH7991Ngzxmg2VsUwjhs30zEJIWXIzEIIEkGapQAYwnYbpbSfKRhCsDad+v5w5uexZrcURQQBnVl6LAAmgSIfwjNWAezV9q/moz0HgtN+rSi52LH8Lzv+uvVeLoTnkC8/awXGMu2IHf3aLdDuDz9+Hc4iQoBYJ/k3rurivRsv8blba9vFeqe2P55vpBhacY8I4/lwt97FBnGTRdVjItqt7QuPSO5Wd5ksgRS2vACu5iMAnUaXgcvpQBvHaTutnTyZ50vJgT/4cWBgUXm2W+1YxMzyEDvx3tAPf8++Z8+vtEu/hiswGcZuvH+9GO3Ee8y3YyXtVnevfT3s1fZtNGzyoJjl8tpKLebHtYgmEB/Evf5io8Zw3DghFo/mT44bx0Q4bZ0QyGpHnjXu5pASs7nTuHM+Pbd3nTXOCBJkrSOTx0ixU9PdndN7Xt7xZOeA4LzCielO6/T8xikwnu2c5ssYAMu0skmzhp2aiNgYBs6ap/cnD05bx6et4wc3j/IrT5rHxugns36v2gVAgg7rBw6RtC7STDn+bct+UDuAtVB8JHQQYKhX6/YLYo7deluSEGQDQcuz1tmD2Xn+61nzDIT7N/fPmnfPmmcAP5g+AnDaPAT4weTCcxn5sNFh8MX80ubHurkQqFftFo/EenEH4P7isht3/KGbHxMI4aSl3Um44tSFnmPSjrJqB5HrUSRIgAyD2Sijzxqn96deD7R5qo15e252AAAgAElEQVQWQtiGO24cPJo6zPGkcfBweiEEAwZMZ80DIty7uQBw1uoa1g8mI2tdKG2Ysve16p+dzOy9H2i2iEDERPyh/e6nroYfbB9IEgAEkzIqMdoY/f797r8YuVK/Z7fLWoONpMBIADBKU1hiQZKlXbm1gWH+jjvHSpvPDC4AvFyvSpAksICNeVIqhxyE9bj6vlbj7ST79hfORqOhlLJWaQDQOgVgjAIQVqTIdIlKcbwnkQAolWXZkI2sEnJWLcEYUyb9g6+c/pOHbqD9xfeeicwAeDIapZkQAhp6renHXz37B1++b6/5G74df/yl+J3v/EBnN55Iqsa77zw7qkThx/76v/9f/F+/Yy/4Wz/0gVqpzPNrLoW/9F//p//tL30u7wP/3V/49jtHp3/tf/vY3/mpn/xf/sqPrjn5H3/lcwD+yp96WSfzv/epi1q5VqrUx+mjj74aN5vtX/rqFMBPfug4ruxUKhWE1f/gI+/56d/4Qv7An/jQ3fFk/EtfufkLH3hnWq4CYEJcb+rxNYDvvLN7zyt4vr8h777wzpR1eTwoB1UAlVqbA5JhCGBydfHR97zy62+6CvmJ7/w2GYnpYqG0qdTbP/rh9/78J10pPvqB1z72+ufFeromyNLud551tM7++cMxgG9r78pK9bcePBJhKQpkJSj94Gvv+q233WzwTTvlAFhzUJJls5p9zytnv/KV+99+uJfNpwBCEtAKCG1XHC6vfGfebQSZvrmoNKsAImIA+7uns9loeT0AEAsRlstSBq/2zsbT60G2AvBqY+f09HB393B8ccHAL9/r/5mzk/Uy1SoNBH/Pae/XL9wy9/5msyo7N4tlmiaSxDftd9+aOKvgqBwDiILAhBUAq4wzhMzSrBQAU6pwmgGwY1lrDSGEFIKCR9NBt9q0Bzt26rtMZgf1/VycUbKAYBBnpJhhF4GwXGYtI1klpslsJhgyipbzRSmsVOu1yWQKIK7XqpVy/8kgrlRIG2GwnM5lKWIfOUqWSxhP56ukFFekFEardL0Kg6AUli8vLyvVKoCoLAG3tSC7oSMCkKmEIO0W2AYzVIqNMUplQspqrUyConLZxgkHkGaZDYlIUtarTa15sVilqWa/j44brWSdGGM4VVJpKcRqtcp0Vo6qkqPCodTz9Dw9T8/Tn5hEvfIx/M4g3+0AQOFcPHe5gT8NRuH8Jz/epcKZkHdPcnupHM6U8GFAiPNzajDfhiOpmANmQxsnZdq8TTi5cZD3x/Y7NHh2gmE4Rh2727wHk18t2J+iO6pWgVLHxmUPEM4Et0tNLuNuyLiM5Y5czkY3PiMiRzHBLpiiQ8q2WAbuw8Zbq9AAtmZYOFgp97myOfHt45zqXfYsNmxDlxZfwPm/T+aDw1q30GSbivNf5FwZACAh2AccKtIw3D3uNVyg0xZfyD5c7BYcaaFoy2UhW3GgkIUQbLe6wsKFLOyeU1uGLZE9fHZ9TxhQHo0nL89WPjwaSoUrkDvw5mVxzccsnWMZC5DY8oovRn53ELntI9bZSrE2bAAiIQlkXUhICmZj4OJgugzZ7iYEPMcTgH2gIOf2t8Xc3fSMvFfk0Zacar87D8gbe8tj0P/rSTN5sfwgNBvvSDeaPWq8NRsUnkfFGQO+H3DxmqJxRELkTB7/MN5upvzgwyXhQ1q5NiyWiKQXLnBQWiETmwjryF1lAIL0A8tBOv7/cGGkmNlybmFjp7ipAW4KISGEkNKGCxAk3akNO/qkjWFFIBkE1hc7lCEs3uGRFACrZGnYGOOEI+JqrLUqReVUpavVqlQqM+PxdNiJd8hXYCDEk9lVt9qCEFLIUlQRQiRZul7PhYU1QdoYJgqkBMEYkyZrrXQUhmEYRmEJQL3WqMbxaDRcr5da61KpZIwxxhCRMVytVAEopQDSRlnkDk4ug3z23X+5By6DQynSNDNsPEMUlguglAqFIEGCBINZGwYTkcVhLYnSnu0TkdJaGxNIKYXIh4mUgpmN1mCQoEajVQpLKlWJWmdZavGyMAwBisKyJDFd3GRpGpWiKCzVqrHFy1SmZ8spCSFFYNgIksxsI+TmQ2wze/hlznUd6wNqsVoIEJTK3Iy/AfWZiPJw4bliBrueXKA3uv7psXxy44f8+lt8e17LbubMAyy5ZiBPdcs1Hog52558C8ln4Ok5gSE3wIxHVX3/z+/2zEz3Ilcw/5xNvnOlAgEQU38+OK4fAhAQCso+yg0a4GI26NU6bPJc20lnc3iAfBKi/EVuFfRerGx5tmazbm5mSnvJ9fJ6t7oHgEROF7Smg12xCSD2oiqFrOTGjX+pEGLDgQeYDCsS7qnWfmDednMvtoD7v/CBVoxfTd3qJEgiN1qcQeLgSJM3IdGWJEvx3+JUm3t90FapHIzrCMzGeEZ33j2ItOtqnn3riivIYafG2RjkFyASwi0fcDqbHo4Urjn9TJz3egDWNQdkAAYbdmfTrFy2/dLjTTDGxnM8t8BysidBGBgiWHaz95727s9unDEc7ifsWM5rLvez9mo5bNiAYUthX2PYGFZEInfzyb0hnH80K/LUTPKBZQDAKNY6FAQgCgJBKjA6YAkizcaZm0Qg4Y7rhABRkplUpdpoQRRIWQqjKAwAF8xEKaWM0mCttdE6CGQUlkQoQxkmyQJAvRITOFXrWiVeJktWSoiSUVmiklA6BLtaqwWVejUUgycPPzNafvc7XrwZX0GLNEsMOE0yAC/dPSlFpaurPullt9OLyiVBbFIdlUuz1ZwpAsApzxeL/mAshIqEODw9fPXsrCTL1+Or3//qlwCUa63lgufj6yQIlsvVay8efseHPtBsVD71qU9/5c3zeqMEoLdf3+2cHO/Hg8vha6+89o7To+Vs3Z/M9xpxe7f66Tc+B+DN/pNqtZJk5Xe/8Oqv/Novf/KLD0+Oo0q1fOf4hfe98lKmGACb0nh6vZjPyrX6o9HFk/4DKaJmrdnpdB73HwHQkIeds+vpzeXN1Xtffvfi5ubzX/iUiMr1Wrnc2C1X9wFIUkm2XKyUpGixWg0GT3Z39yrl6Ppq9Cv3rv6tl44BjB5caEk7e5Xdvd58tZxeXbY7h0IIbUw5LAtKAFyMLk6OX9XrjHhd3+8kKzW9eay1CoSs1PcAsE7Xy1mj1dWGF4uxSpOlUjvNtkqy2WKWJGsANzdrE8pGoy4D4iwBU7lcDaSYz69v5qoaBQDm66xaKVUCE1UbBwen0+vR6PJipbG/s/OoPzSyDCBTRpHQTKyVDCjSydFOKW5Wy1ElUwRgr1m79/CtsNyYral/PTUkq6XGTmvnanyp00W71gRwenaAbLm4viJQuk4jGRnBKjPr5WKlTbu7D0CTnNwsWAVzPRegzGgiqtYaijlNEhurOknWhmGIQiEzlQYyyDJVrTWTLMnnTiLSWtswTRBiuBj3qi2/7ri5kP3o9SY0Y7NgOLOQFSIKj496i9lEMDfqNa3UTmv3+ubGL/vUajZXq9XDxw+1zuJKMwqCeKdBnhIShMGkP2Lm3fbOo8HjvcaeECKuxkrryc10f38P+cJ3a2NHxKTYCBsofL1eLxcro2HAWishZaVaYUAbYwxH5TKALFOr1SqKQiGFyjKtTZYqw6w4lSIAUItjY9iw0Vrbw900SZdqUYsauYUW/R8fx/P0PD1Pz9OfnBQUZ3APtGxZtdjeJhUsfRtbkZ6+B/le5VngU365Myf5VrDtYnL0EPdi41cXD1n6bR/lT8jRCfbn4fnq5KzaYvjt7XflBb4FiBR/Z287cv4l+8iLzj1qUxdexpkL9ZbvO2/X2cbOpvztOb7i8293ch4c8Ab2rYzyrQezR10Aj8LkS2bh3k2YZ771FJ+hpxz4GFsKkw6yctuJZxZuaxtRqGQuXoViJVLusE/whn4ehN3z3Zi2K+LWy/ONmUVWipfmOmOAF4VyXdejcrRBWF1/Epp9V7biaVRQfdxUkkMyLKC2lbGtGuANJlcoAFmy2KYI+UhEofD+djyzI/xBqehA7xBABlDQgiNy+09XwML2vFiY7Z4KryZLKOACmwHu+8atIca3OgAVO5O59do/sKAuO0Uc3c4aRSd3dyX54wU3XbE/lcm95Zyxa61AG9KawcY4eUVD7M8Z7KjNMRSwl0qElTRyZ+VkYNJ0DSAMQiAUbo9aykjYJ8tAhkGkWfdqe8PFuF1pyUAAeDK76lRbVirIsNGsiYWNVc2e1Wus4S4lkRBSRGFkpGk2mzBYLpcA5os5mKWQRAiCYL1eRaVyEARK67I9nwdKYSXVKViQnelIEKB0Riyeagi3LCilLXuRfKdhsDFaax1KH7/SanBuFG8hhEjTlJlJCOskLoUgoiAIlFL2gfljhRRBEJSikiCRmnS5WkohwyAEYBhZljBBkgyjUqYVQDutHSnk8GoIQMogUzqMhFWcFILJ6r0+5e1s50DPzyv8arsTsXAYypaGgu1vhU5ZxDko70dPDV8QO7ap7WabGdjiehtIZuOYTdtt4NYdNxHf2gkVWqhwF3m4cJOV/GjOjzo/RT09o/oMig2HsfBYiy9Ju9QaF8naAYUMQ25J9FqGlu9hpM4V8fIVj30VuYYC5cupA06Joa24SuHyp2r5VirgblvvA4QQ27c4Lj0D7FnmLlDzxneeLYX8/2PvTZpsWZI0oU/VzNz9jDHd8Y1ZlZmVnUU3UtKwYAONCPyG2gO/kkULsGtpRKCqaEmgxqw33SFujGfywUyVhQ3u58R9Vc0GIUXCJOTeiHPcbVTT4TNVtXImp1nnKb2dNpYZdXTDfNpBEimHNWlhJ9lqqGgYn2X1STMZF4oMmcKdYjVCqiIKTYnZRkR7PCkZD27yP0XTmM7vSNsgAueE4/k8OB3hkRQRMhVf4w4o+ywLs8+uW/TYj/04VkHiZjyejUzD06XOfcv7e0zxOSHgcRqPqpvuytgLYk4LHTuc0kyHdJlyJE6NRzOqwincYGQFimmMUZnkLO9VARJVhZTD59K3BCAThICghoitA8H7wRny8JEaD4c9SI1B0ACR9WLJtuq7vem8qyrjDID5fObmTWjb5XL+3/721//z//13/+bXf2TU3Fx/GMTHE9neHyybRdVsttuHzT3t6D//7b9sfbjfbudNXdcLADLwcrm8v72Hgki2nz78X6L/4te/efX6xXfv5gDmtRmY6dEYBYirur4/tP/hb//mpx9+mlU2BuEaW61n7u9++GlVmZnljzc3L1dny4V+enxQotdXXwO4fmiN0fO6uv30vavCl19Wv/nj3379+mtXmeDtMPQAfvfD7+5u3zuhP/r61xeLi+VX1eDlw/3Dwrnf/Oo/AfDD9z/VVU0iy7r5+ttf/u93f7laLYcwEPNyvhhigkJoU68e9o8qfS9UkX//7juuGt91AOJ1Nwels4V1VdW3B0NS19z7vbHN2XLN1t2+vwGwmp0NYRDtoNx14fHm3d3DnTV8dXnVOAvgdvOw3W4vLl6BZLG46F2rvSeu2DKsh1QAtNK+77rWz5p6sx8aYxcLezjsxWtTzUABwOsXF47M4/ahAu32LZhfXr04eL85HFSUHAFga0IQY2rYqusOzlaXL76ylVa2OnMVgOvbD66uv/36qx+u99+9v7VNtev29W7WD15VY3qTzeNG+z1EnbMK7Dbb5euXylJ5aXf7uN1D6KH+od2SMYe+recLKGrXOAGJIWMA9P1QVVXbd0HRVIug0knLXUeskR40SPTbkKh4TY9VqDCPpJXGQ2uBRDaSeLRG6RMcVa20XdcBFCQEURC3bdse2uVqCaD34dB1i/liXi/8MGgIi4uz5WoVBn/Y7wGwoGrqel5bZ19fvTbGbB63dd1A8eLqqphrR2YJRp6tmrZt8EEVxOxMTGlLKZWkQFX9MAAIQYip6/vZrGFjgwzMzEQIiIc9MQ+DqhrDxBRCIIIl2/kemR1WeC7P5bk8lz+kYp9aN0dlVOimmFr8INng8RlWaI6ySehgUUBP0Yj8XYQDJqpmaXV8slQy2kYloHzihpB7NgJVKoiY3QQ0iXZCyfef1Ps0zoTzHYNVEaFQGjGRrJlPrTBKdk7+PT43Qd6K4h1FFp9gOOPcJz+M4gAxMbPiODQfuxfY6PM6vOpEPELLSiqO7bGnXTg1wqN6TcWCPXJ9yz2gSaNjk0D+Ki/j543EMtQUQDtxxDk15yjTRjHEAUBFNBtCQDG5n84JcTIHEoY4meWJPV80nwheaYEsM4EU5CkvbrzV47TRgm7o2NXpoLPniKah/BM3nh7h3JNJS1OR7cafeTv1fbIVj82uQmmaNzRhajBnY1ynr5xiwIU0aFL/qW132sOfvWd2hHiPHki+QSfWfDZVy59TItWJ8Uxje2PPxt2J5OWYPi/gAhtVJQZEkh9Q8lbSfBUJjthcPO6OX6l6GVSFyRAoqAcTKYMo45UxGFyh7IOPjjnx36vZ+ubwGJ+5qlfGGAW8H0TE+2CtVUAU2UU3UbOIMJIDj7XWkFGDqo4Kqu4OO0OWiEMIzlUqKgRnHRE3dQ1AhDTk8yQiw5bAQXJOi8x2EplkazlmN4gew3lJ4hPxqg8qu0PzZoqx7Uwc/bSqqnZV9dkbtInYmJhSk+q67vt+B9R1HXtzOOwBtF1niJvZzFWVsxUb2/f9drsFMF+s2BiG1eS9RQAbIo+ALDKmnBJ5sxyRY6KlyalNZMaaglp/lo4LiRUjhaiYDTIRbEegZgR4aMQ8nmzZI0GVf+E899MvIomO3S5/6vhYIuPpucjk27Lvxk80+YOMDDfdOxT9vZL8BUbPU1gTvzo6FARNEdJ8QkBJdGc5Xvwv45ZMKfPKIh2hq6dJsPOn06lRlZQjmnLcxak7fQ420HTqoJRPR8vlBRr5RtF3KE2lTsDTVNkoJIv+QaJHU0p+km4iL/vR2iFLpQkr1um3ZTXy3qQ42cUNMzIvVpMfHvWV6ZIkStDJvOhRE+N/BExZQz4HUoiMgSDplrejs9ZjFq5axnvcXHwxzSppWZjpnFA8UopQRBHVKPsmzwlRugCGi0ZyRCq5ZS1/jhE5I28gSjkb81XpVE53qewsTbxOs3PzKA8p3mlESjAjDUIJMSIkdsAYFjUikoemRGyYoR4As3FE8cIPMezF+763ihfnF4fDFsBut+37rnImprUVhqgP6l3FrrZV5QDUlVHxh757efXm/rAFMG+am9ubfdctK64qAvC4vTl7sb68OO/Do2VTW27cfB/256vV7V0bHTYPXoYQXr968fH63aIyQu7y6urjTz8E8VdvvwCwedxr1w7EPdOsnv308f7P/uzsbHn/t4dwvnA1ewC3j/fn56vK0i+/+vqLt1/c3t3bqvmT1Zt/91e/e6Bu13sAq+VqvVys5tWn29vf/OLXv/nVn97t9t99+J5UNfSmUgAN09Xi/Pt3P318fHh99aq26+ViuVqufv/x7uIKANiH69v33/3w96GTm5t3zumbt7/88f3fEs9IqT18ArCwdr588+rs/Hd/+1dNs5yv1/v+tn049AEAlsslgFl9WJ+vdrvN5mFzfnlR144gkMGoJ1PFe1Su1i82XbfZXjdm4Wxzv721trp72PTh0xe2BrCcNy/OvwnQwfe1me/aIQztQ7u3rjbWMiyAZn7WDzfbricybdvbGQ+9v3vcMvDm7cXN3Q2AWTVT0Qq2cZUi9H54uL4+v7qaz2Y4D6jPAbz7eF1bR6CBlaxhQ7tO3qwXc1cvl3MAj5uPu53sd23wvrKWmIlNs2y8Lrbb7tDtAXz80DcklePAXDWz1kvbDRq0apoLawYvAPpuqMg4wrxZ7Co3n61FVIIOfng8POaMPWrJsDGG7XJx7gc/SHC2UgqR4XS+FwnGWOsssXm3uXmzuNQYXTeqsZlDsSUJgJa07OPhFdiQFdm3bdfUToj6fnDO3tzceQkRGzXAdrslxfpsdX/3MK/rtu1c11Wuihm9H2/umPnq9YvdbmOtCaLzxbyu6xAEKIrcke6fJAFATG3f9e0AIPpIKmCsaZY1kQlBJAQFDd4PGY5kJldVIsLGzOyspa7reiCFA1rnVFQkVFUtEg7dIaZ7CuIZRGTwXJ7Lc3kuf2jFHv31RIufWg1HRz7ZO+TIqBgxRXry9uTNo19HrTr9V1z5iguFpxQoPdXwJ3bbZ+3AZMblwO9oLGWNMpswIzJ3ZDTq+EFSE8fXit6dWpVoGU3swgxz5S6OLi7Tpp52mEbTQ3PXMswRteBsoWWfvfLQaGmN/aApOFj8VClXK5C3413Gk4XBxGAqEzEFWqaF8r3nlKdzMncTPKdM5+fH/mQu4rWRpRNUhgEUl8Aj0hsJYrKqExN6goOPpuTUD2T8LX04MTmLE0Oqb3QWKZhAzjSJ0aZP0WeUjeqj8Re6oxyMVuLQJuNAaWisejpJ5YVMOFTI/uSFVNGJwX0CfuYxlApO3JZHMzg7jx5Vp5nWtVRzSi9TGj2qe4QPi12XuqeSxzrZY0fLnRZ14uADmvw+0vxJf/T4/+wiFI1JyvOhABXESrOXZUpAqRDNjmKYro8G771Jl6X4MIiINUpEQbxlqxDLrnZ1rEc0DMOgRmezhWVbVdUw+H4YVMNlvYrBTYvFylonIl3fRrXVh0BEBc8iYpM8PNV7CcFDtVoszMTZ0LDph455CCH44C05Z2zlamstUbohJwQREVHheEECGSI2xor44x2sSEgSGWOQud70dnhljjQlk7c041PeD9HA9sEzc1M3tas73/V9VxySmTn7s5IC+8PeuappmsWw1BQaD2KuXN31LVvb933bdvW62e/bx+1DHdNx9h0RsRFRCT4QK7FEb7gnDt8JhEU+mjr9djyQyISST2rGjZZ+STtEy+/lG2Rq1qMb5CehBpnBPfUHO+kPcsw4iIhy1HNmCcd8swyw7MwTXOczU1He14yknD6ZZFRafCCF30IVqoSXy5d5A5LGVAmZuRDhy+UXoHT9MDJKq9B4vjmBXAt3TKxYqMip0vkjDYUATW7OejG/nAwkt5VleWabR5xzMriRDDL2lCZAEK8dmQqCz0zkiTdgZqFaPstTpzLJ4jLlTkkIjhJRczjwOGyKmTq0xAwiaMiwJMYbYMCslCTa5LynAMvxEr5xEpGPZ54eleVxJ7xswvBjHmeoxA5MZ7YwZByTduGhTAVzRIEgM5qrWS7mC6g1wskTGamjiEeesIgYxpvHKB9MUaIxYNxnWiTXz5VMPYWkj5Z+XLXxFCGd02S9ggmgePs8xT/jSfbRuZYCjixUfDnZIjCRsSYJoEQVqskTEwC6ttvv9vECYucMke0G3/fdq6urLngikgBLXFXOsgEQhqEXT4R913aHh3/9olmvzh0HJjgWrgwAM3dk+Xa3c3Vztjpj8jfb7WHolo3rus5jC4DEeO+Xi/lmW+0eOo+w3e5fVNVh8BVZADRov2sPw+ChS2e6Tfs//k//i+0fLYf53MEaAIvl2bqpKzebN6vddk/c/sVf/4d5XXcUdpvNV6/i3VP7drf/8NMPh3731df/8ssX63//u//jp+sfPn28W87qy6szAK9evJw1TVW5i3UjBGsX5+evBPRxu/uH7/8OAD0+rr/46u2XX9+++/hXf/nvHx/uZm59f79782rZd4f99hHAQ9teXbnXr15X1mz32/VyJfGycWv+m199/fbtGwAfP/ye4SxbOH9+fqnD2la1Svhw89Ory7eLiwsA3tq1NaGdf/jxvbzcffnHv23vb374uHn5ahkJ4na7eXH5cvBeJAzwXmUYht73DgTTRHTvMPTGGlEaJIBo27ZNXZ8vlwovKpWrANw9PHjvtQ/7m9u3dcOGLeNw2MEa0aHbPwIIfWuImbxKkDAEdu8+XVd26Jv6fnsLQLXj4P/+H37fwzVN3YbQ9z4EDwYj3S4vovFm8BAExiwXi3azZeMGwmw1fzgMAISN+H5ez6qK960aS4bsw+YBqt4PbB2A2WxRmUo0KBFbJmXrKkAvz15EXaXv+37ojbVMltm+XlyWPTQ9FwWiH/LkADp5NCQTwhEPvq9NQ8wxn0zvvaju2v3V1VVTNwCCCBF3Q19XNQSurm43j+EWs3kjQQBIENfUu8M++GCrOsjQNA0RGWPKfZVjd2In84cxRU3kSiIK5qqq6qY2lr0PxhgQ2kPnc7IXtkZFwCwilo0qQkiJI1p/AGA8I8boiIhoEIly15IZE4k8l+fyXJ7LH1SxyMFO2TJ5arbHv6eafvYbiM9nLRsESHErodMK4n+p+qIzlppyKfbbCGkc+7qlyktEXenxaCTSqNnFOimbHUc+VNCsKxbNMj+dm8rGEeXEkcmimZpjmlRJndrix3DZkUFyUo4MWD3+UJMhoeMDE5ecMuufkz/HplX+S7X4gkyazubQ5GlMsahiIOlkHWk6D0+phiYrNFma/5iSnR1yNfk3HU2f1BHONgrKQxPX3OlsTOKixmenCEIyc2j85KTd1DWAyGRTeUosx92P9hnFoDg+IZpI9NkmPNVmTvp77AtD038y7er4wejv+ZnZ/nyY39hAQkCzK+h0IxTnDjoGAfIDejKEEch4oqkd7edTNqFHHqKFgEoTk2+nj50OTFKa1smHRw7Hk56Wzo97dPT2jTZ/ipo/dv6MhZlLvD+hYBWqql4CAEMaRESCQplZRZU05jF0lYuad9t5H4ICjbWuclVdgzhbxynTojE2ekcCIKYQRCUwT2ayEIGIiIQgxhrvvQS1xvZ9l0anYDaDH6y1UJ3P5jGDJIi8Tzd9Z1YUt5oQyJAReCLWHIsdcQUGiKg2FRGFEEQCFfCCYNlIXjvJPp9EIJb4oYk3ZAo7y9ZYIrJsO2mhmt3fiJhyojvZdjuAa1czmUN/iPiptdYHT0QheFUQcT8Mw/B46NqYDTOIGmuZjAJswcaoKiGNZcq8JpGblIWUFpocpdYR+cUtpxM+NaH+z5M5Coc7OTAp6MhIqHlDHnvB5bYnjHz8R0dCzEM6ciihTC464aLWGYcAACAASURBVPLHlRMKi8vzcdR50XiUMtmOKU+DphEUgC8LKErQXX4CiSHl+P3IZNJ8aQF8jqRHfgkZLI4ioezfp1oHHYu28YvxaS3+mqN/alFUAACc7kIpI0iiSBUhecdq3v2EjPh+ts2J1FXVItJOe0iFm4/vjng1jWJjDPdNMuNUcMZliRhWPGhQsZFrRV9wAEQpUyVACBPSTgpDBuhL/0fP9c/6BWvJMJkdwJO/JRUiSKJUI7Cc9kIKlykSiY7jZigmDM3KVmmrKDco565TIit7OmGRI7BbIqEJ8TbFtOt5XJMjikotaUyEnQJCFRqPG4+DLbK6VhTLfGpEYEj00dYYuU+loyP7IcssYoIISCPoEDOZ58S7IYikqw8DEVBZN5vN266D9gAMo7EWGjbb3axyi7NVZc1wuJPgTaFA0cpUrnJEflYv/H6/7w6z+VlVbarZfLdvAVydz4bBb7fb8/Nm8EM3tIQPrqquD6LQrusADK0XJi/kqmaLzrFuPl2vv/1lszjTwzbODDcLY7vKVZvN45uzsz/90//0L/7dv7W1E1t/8/YrAF9/+dVwuHvYbe937b79Tuj24e7x4bD99vWrtnPt4QzAn/7Rl7//8Z3oQrf49P4v/9f/7ebHj7cLWy+ZL2dVBQJw2N6t15ck/uHu48W5Wa3md7cf7x6ujfYvlmcAvv/0brYfLi/fdotNu7kL+35vH8+XdfA7wux8eQHgb66/6/FBLZ+tXyy47g8PxtDeo6rp0B4uL84AHDpZLO35xRtiu1y//OmH7xsyX758FaD7bnv18hcA2t32+uMP89n5clltdptLcDcMizmtVqsYB7Cczci6vu1qy9v+YGYzI2DYzd4PfhuPl4auJYDYhICu903tZoulZTN07W6zv7q6BPD+wzvraOeVvN/ttlfr83D2QiBD6Lu2N3UF4Ksv33683QQ2jkgVIYR98L//4f0v3r5q5hbAYX8whtp+OAw+kCO2gNze3oiGuqoMCEDdVEM4VMb0bfe4eZg3TQTV275zoVkuFwAg4e76MLfO1e7g+zD0bKq2a8kYZ52tawCrxUqGYDy3fbs7bAcfVKXzfTNrYs7ietcMw2DYMRsmppxMQ/WI6UQeKiFEdD6e0jAMgyLzYaIOvqbaOTefVUysIkRQL1VdpxAW5qauoWiqyjkHwuXlxWF3eLh7uLw8BzCra1NVw9DPmpkPASDnHAAJwVoXOxNCydVQBBMAdH0fAuKhsnEWBMOWmEUDsSEiAYJICCGmumYiURURa521tj20xLxYLYl5uPcAtu0mXpA4eE8gCwuiePh0qvQ+l+fyXJ7LH0ix2c7IjgIo9k2GSo5KScI/GlTTEjkxJRugaP3jcyOTLv5VpwjBkzqLy0hySIrGlGr2XX9agxLivSKjRTRGHGUtVjWbeJmHj5Vkhh7VY8oxcDm3HhVzJSv92T7I83YEqmR9l0AU48vpqBVMmuZkFo2Ian5akpPWuDwjlHo6ZanCjGmNjmtJBybK3geTpovan3T6XHuU/UQKKc2NNvfoYTGxweMvVOZ5VB4+09up1Zw7XuyEkyIQzWFdxVWFiADOTgPZck8LwwBEA2UDDyWMdJzKbJ6XtgnZfSHbwsUjkkDEo9036ejUSxWqihBplmNM9smAaDr5FNPLHBn9k6kqDrf5qwlSMTFH/6kp/qd1FKWEp1BaaYCOINHiIfSE3iaQdGn55/bzaM1h3NdPXlEdg0CBI9e6yVgo3hFULNmjaO3ye0Y1sjU86X0GMTIkQMXVd9Ld1AMJ2XwuxiylERsuzC6jywoos4nvC0XkmUQVoswkUFK0vg27MIQeQAghzkjXd0Pwh7ZVJWLMZvPe913fArCuUoWXwUsgZkssEkCkIXkGBNWUfIyImI2FNebQtiq3y/nSWpsGKxKDDeuqPltfMHM/9H7wPvhZMwOgKo6MkoiIgrwOBM/pXqYjeCSCEgwyxhoVoaCS8moSMWUQXiQe2EgMPIwXk8VuqmpMx+6sG8Lgg7dsR/ezuK5SKpHaNW3bkqS0qmzibZVDe9jbug5DiLf29H1PzFVdD2FAjGcHFBrnmYAQglfPxuJkZ00oZPrpSLGnEnEi2iTLtSPqpQj8TD7RKQ2OLGCKohRZRVrqm8zK2NXyUW7hiTFS9i3lm3byTTL4bCkfP/WGKywdUdTlk6EoBuMtYBFgzCd3rISMc0EpcEijL9IndblwzvxLxhqNxmSdE//02HuVqYpCWTE4nuIikuh4lvD0AEMzSz6+rC2KTpNWipJwTDtOJCAkTD66HBIfaQQ/M7WTT5Ks0DJfmhUSJKguPlbCoTVFgtNk4KU6zVpZRP44jWkCR/rggwaD2eikSfECNxmXnXKoeJHd5byljCOpb6QpQygVPWccHMXnOOobiW8Sn3oka0l1ku//O2YzkeI0QXsSZXDZL8kG/4w2mlZCUhJnyuPLatUk11AOIx9P06cHb/lMVbPaJZLuUSQiVg0hOblLVjySfhT99AWTtLtJzDIUEPXwBOKilOTYbSiUgiK6qpJI8BIG74ko4hrwYCJhDRJUgmVT1RWBVotF1wkAw1Q5Xi5nlavvHu70IawWs6E9AGHwPlZSOTebLQLI2PphGAKZq6vXwrZ6YO/9oR0ASOtnSzt7+eKwf5CAWTW/vbutG1dXs9pZgQFwv79l5xozA9uKqVq6QftPNzdvXr06e3UFoHnxYn53+O7HDwak0AC2dcPMbe+D6s53AOpZ9eOHe6P+yzev1hX/48ftYjafLc7Wq7NXV/a7658AfPf+96/OZw5DbfsfPz1ut70jYyu+Wly+fXW+GzyANxev5ouz33/33fZx7+yntr3bDSF4vVhd+MAAKld/fPdTw9XLy/WH0N7d9ctZfXZ5vt8/dL1fLc8AvHxxSaxt38/mFxer5Xd31Vezs/3vfteLn5+92PYKYOi9ccy2apqFKu5uPyz87GJ9Plucd6HfP94B2D7c39w/rM5evPnmlx9+/PvHT++Y6GzuNo+b/e4BwHp1Vs3PrbGH/Waz21bNquv67tDtut6gQlAAhlXAbAwpGePqZg52g6gPfGgPg18B6IOcNdWh9+vV/HHzWBtr63nVVEPf3t0/rldnAJSdqftD541QbZp9uwNr2wdRevXyNYA76KfHR9fYQxgE7Adxrlbo0HdN03BlAdi6bnd7KFXGirF108A4IpD37aFb1XPEMz/WIG3Dy7lxD4dD4G42W3R+AHRuZwCC17Y9DDL44Hf7HVm7qpYH3d7fP8RMAkFCVLY5Jo4GojQH4ZSBAEECkRLFJB7MMAXbV8BR5cPQD8NsVvXD4PuBmRTqhyFe8ReCv7m5q2tXXV4tz5Ze/HK+6LeHOp4NA33XOWMWdr7dbruun82aePRmrBnCYPgkPjpy9dhVEHHftTEqfL6YEdGh7VTFOENEIhIkgMhaG/mMq6uaqOuHYRiGvg8hsHX9MEgIRZKKCEBeegJXXItKEBEGj7e1PZfn8lyeyx9SsUA5+Z4q8J8tExlAo4da+ST9S0AophOdvAUdPT8064sZXfuZZkveoViTAEQwcgRYfUb5Lzo1OGEr0KPnch/+iRGXyUnIWlbKiUoQabEsx05olkXHVUeM68SfriBLpzbwxJ2vpF37Z+TMz6BQqmUyUkD9uACl78dzeWrmJNT2qeme1HQ6fePo72JHnaz0Zy3CJ+PRk0eyDTwFDmiSxW+sZboAOWLrCKWapsdOoW7l6egOSzn8NdpuRKpgQ6X6Kf2WepANGBGNV0nTSPNHXUpm4wSEoEndT516yiSMT+rpx5995fT9yVPZOKMpXWQ0XI+qTKAedGzsicmtoz8sjhevUNkx6prPun+G/WS3naOaIjxKuXefs/OfVvUZLqNZcxyxzBOHMQViLE/W84iZpjYtR0s75hGL4X8KkCFT1jI68YmEiBMOfVe5WqE+eB9iIjAmkIjMmlkMkVaCYWut8z4llNvvd8baEIbBe2edIVZRMDRf/ghVCcKEeNmiYTMM/Ww+C0PYH/bB9wDIsAZ1zi4Xq8V8UVUOoMPhsNk/Gjbl4MeQNRwkBFXxIiDUaJ7McZoAUOq/qsZDfgDMCoqXmPMUcREVzjnzLJsQfbWMM2wO3UFCmM+XxERC0UUi3QYBjcbG4D1Uh+CDeBBSIJUE65yqusqp6DB4Va2tdaYaZABgIIZNBIWZyBDnS5nxFJibgonF2biQi07/QJKDRdqdkhsmOP5xic0qprHtNP6r41P/McZF8krU45111CnChL+dHCU+EVMn/mf5iyyGcIT9xA8TBKNK4DBKwnQzdOLVUvz9kq2WHuMcektU2HvqCcVkYePEl7MoLRzymLfHZv9flDwphacRIsFm3sBglPtnSKBcTjKEhBQERtSgRg0hz+vouVmmc+x27oAWqaTTK3smS6YiUfoU6C2LsCNeXALHAXgZiJiUSLPgVhCIIxaGUYIdLXX6YCTL6dno5yYuJXk4xgNzc1yiCEYH4sw8FUjej4pR2uVlHcXbSA3j2efTvhx5kmq+W2z6zMi381yclokTqKKoCRMpn4SVqkrGPqDQeJF3QTNHz1QlAKI0ndBEwQpAgwpRhJFBxGBBmgrOB0KJAYYQfLrdC4g+7FA1HK/oBdTAGVHkzIa15bbdi/jKLuaz2cPtndNgoXXdUL7CG2S8IrSbYOu+39ZVI8quosrZdn9YLaO8o156KH58d/3q1csv337d9ofb6zs19/N5c74+B1A742p7aHu2FVXc+lD54bb/fr+7PT97BeDs1dv3H68piHIgJlNX22EIQeaLmkSulgsA+7Y/n5neh6Dy3e3247Z/uVzf3N2+uzk4QxYOgLWzrjfXm4fW79iY11fzh0dztlpevVr9+he/qHgNoBvYre3N/dfGzr54+eL7T+95oa/Pr+43vP30ewDz9czQ/seP/9AsL87XF9f3j747mPVv/sWXX/7w/vrjzS2A/eHBB72o1uSa+8Pw6vK83e/evP1i27aq8vDwCKBpnAa93901/X67ter7N2evLWuAXi5f39/+LYB93/7y21+p4fvtFobFt7d3D6t53XVt0iJFIFAfPrz7sFhf7PfDbnvwqlU9P19d3t7cAjgculltCRDSxWLRdocPN9dXF5fkDCAPmwcAzNT1QRSd94Mfbu5vHNtmPt+27byu4uUwxHw+n/vuLpA59IdmPrcEtI8/fbpZX60BtH3oxbG1XrwYJsOGjGCoq2oY+tpVABgYuv6+7w1TY63OajMMbdur+IftAcYCmM9n9XwmvmdwH3zXtmqdm9VBhlk9630HoHG1F09Kho0PwRrrrOt6/vRwfbm4BJBEvAbfe+tkCJ54ZI0TDTOC90LJ8ZgRrweQROODdo6b6N3e9X27PzCxc5aIu7YP2AAg8G6/c/ZsGLwCxrq+77v9Yf3yIrpYxv16OOy9D01Tt4euqRtAexGidFfblHMnT20FgKaZEWy8CKgfBhFRJVBMFC6qKkFi+p3INayIMse/+mEIQZbNrGs7P3hDNo55kD66MDBxNI0tV/F8+jhr8XN5Ls/lufxhlGfW9Vyey3N5Ls/luTyX5/JcnstzeS7P5bk8l+fyXJ7Lc/n/qJiFnSuFMfsccHQ2Hk9tc1ab8svEPyp7ouXQHJIYS5ScCON7KY9Q8ncAAZyjd9KP5JYRPTnSn9H1IJ6jx9Q/zDlFRvRayNFbk0NnTU41sZ4cg5kyF+UDcyYuqc2RHQWOXcTGJ4EYgZhjcZK3AFGOqCJSGIESgiHlePxM6UR9HCUBjOhYmTP+penLlcdM53liSKBxGMXPhPL8pDCrafKocc0UGm+sje9QXsQ4qnzrL7KPjipESRSiECVV5CT9pCnjJpWz/+JNkv1BkstCdO6jcieLCkSJTGp/GuY6+RmdkUoMbMV18vpQyl4qKUsAcSQbzp3LbidEKsJMlkxlnTUWgEiQ4DUGfWTnCEaKKoz1sDIrsRLAnLovSpLmDIX8x58xcC7TDaUwfEW83yBNUcx5QyAwmdg4MTHHG0KIUpoYKY4gFHcGiMHxKHjcejRuF05puqPzQgxkjK5nMZs1FdLSTBeKmLQvVjBxZkJMjSeCoGNYdIrgyxc1ZF+OkusS2bcldazwBeRljq+pTmad2BjmQuCacqepiJiU4D9vkshJCmfJpJAdUIjAzJyb5NGFLIduT2h+5FEAVCWOeeRwSFkmE5EeEyem9Y2Tg0IbEqAatxMREFkUJcfA5NgiyWHGgFgUhpmZFSIqlXXGGGJA1Rqjqj4Mg+9634mErutDTMJGTIZFFUTGGBAXThfngijeZEPEDGKFBgkxr7kKEtWxURE2HMJQ1w0Td31/OByIyBpHZCpbGzZVXQWVwQ+imq+yIa89iBQSJKjGq24k+EE0xL32sL0VESbDxMyGjBEfRARUwqEjCZOkVVAVGMPOWQM2xkAhqs7ZbhhUYZ1lY+Loq6qCKhHPqplgiFtGxEenb2Nc8iNSKIitcc4RG4ESM8W5UohKZPjRjZCJ8yXQWe4lb7a4Izj1N0qJQuLxEgoaaXWkNKZSThyvSCffMedwKiKQKMXtDuT9GImYKSU/oOSNp4Amvhe/T/ublCBEwszI0b156wPEFPlDnIrY3eLufSTqKPOg0tHC2qYeozEXAcpI47OpT8QkOWo916wCEYjEt3A0QfFl5aCULrCOPSmcN2bpU1LJzKqkd4y7VoG0xEksphEk0Zj7n8SrSd6ooqoiooLMlDXXphNNQtJovWiIZBNVDUkOm3rENka2oqyJoeU5BKABIbm8xcZVx4pIQYhuM5GS4naTModiIjuWqA6wCqvGK2pJUuiI5kBtJRIGwcKpUhBJrFxVQRU3SlpIlVOAH0MnN71EZjtGRKf/EutPoesxyhsCn0QwEDSktWY2xpU9RJT4dBJjxaV6qgSRYWJw1iui9pQ3Zdmj03DEVJeSqjITc/Hej06WqqqWXE5hqSkHKKLTdVCIavlB4iAKkZD8myCCOCLR1HSaIVHx4r36IjdjxHv0mRWIxGdUHQFgCT4m7jTMgAQNpDGjhWrOMpOjti3YBQQQ9z4QULFxxvRhGEQDhZQigEhUJAQfgooyiIbD0O+Hft+1e/GdM9baWoldVc2Xi4ftdS96/uKNca6pnLMGikp1v90Nvn14vK2lWl5ddLuHwzDgoT2/uGyqpjsMw9A78t53s/VFI7Q5HMiaxawKbSd9P7Stm9W9kDHV1fnF4LXd7VxlLFsfHHw3dAfbzLSq3r1/L8YI9PXl1Vy7T/efVuu6tvxv/qv/+tXr1794++V85nbtwQQ/9Fsncr54fXa2DkLzWdO4RV01X716/bh7/PLl1YvZV//q17/94upN3ZCx+7mp231/uV4xpB/2X169/sXbb7pg3rx+M5+trVa1Wqvh9cs3Ly5ebPt2Plv1h93Dp4cQDjIwof/jb//kX//2P2Pl3/3N/9m2rWFdn79ar68etw/X//i779/99OGH9+JYQvt490m7IXRdvXrRHR61362qWsHnL18slufBuJmtTe3agdktm8ZeXL5koO07NkNjVw+393vvreVvvvmTs/MXzfqyatxmu33/8bGqXfB+NwzCbn1+sT5ft93BOHPYbToNi3o2b2b7vu+6XkUMUdVYsiAfSPX89RezZrV5uGdiBm323WEIm8dNt+9qW6k1Ifj9MEhQr2oEZI0Ave8H76uqJtX9vq2a6ubT3eC1V+4DlJUghnkYhqqqFs3cGDv4Ybvbns9XccO0h7334dB1CKqifej7vgvSWWOh4WF/8KH3wTNbZSVEqa0KAajt9jb7I8+bZdXUu3ZniA/9oRvaumqYTe/7umq6odUQouhURQiDimjK1aMSNXxmNjaaSQrkPSgAhTAAvuv3la1fvLraPjyS0MvXl6Ly/vr9br9dLVYquljOjTH7w362rHf3D0R0dnEZN+3QD+Tlxx9+mC+WCrRtu1ouicgYhsAYk1NSUkqIISFeP0VMooEYIkKEIMLGELMyGeO6fui6vu+GEITAzIbAwxCIePASgvg+hOAJxMRD3zOYFHvdVFxDESSAYR1bYwY/CHlFUFVRaf78v8dzeS7P5bn84RSztMuMhAF4ghUVKyR/OfkjG/sJcsxWRLbVC2IzRq9kC2qsasQRJvjOiS039YEvb0Slc9qBCbZ1ZO0gmSjjp/lhTF+OFtlx02OzJXXVpCVKEJdOYFdKii9N9WzC8a857dJRB5LJksDHbCempqMpOj5WXi21nYxFk0kxUdwxeedkXTOWeAwInawRUbaasrFYouHHcKqjGSQiGhM1/vMld9DCFtuHxkjxSKWaTAadxGfFLqlktM+ANEEeBQlBMSQyURBIeRoxF3E6YsoIxGnPx2Ema2ncOZSRq6O8hznX3MQcGmGAEdubkEruWzRhi10/naEpOEClCRRKn9w1U/o2oml5Nksr8ctobB898WQfpi8nxDchqdNS4L4RfcgI6pNBTa8xP6I+mvwx2YGJJYxBnVNw5TP0+5kefm5sPzuIE75XWBRNjjfKfH2GZsoXEdBXIaaIRwCarniJgXgioqIiJTtAAcXysp9uXZ3kqc1QWqRQEZWYxCBiwLE+Zgao61qBdn00Hvzg+8H3APngGTyEIV6tE9FNgYQghm28/oWIjDGzegYCM6/ma1URCcymRBc669iwqHCEwNJUlCR4AGCMIYKEULmaiPowsOHe95Wr4qU0GntOFG/gqOvamaoEckZgNAI7PvTG2OlcnV4ifzJ1E56fBMa4F04p/yQjQaG6Qssjf/w5QioTAJRIaGSgPTIaPX6eCMQKijbhhK0rErqTu57bJWIoCae8giODUBBkGnM2YndlMvJYpoIlIp4nBKdULrKjEiSdJ7Mw6KPZmw6LC/cgKDHACh7hUM27lQuEmKDfzC9VEQ8yCYlt5IOBcYGPFmLk4gRw3oOTeS7wHyZnIACNFWNkLGTIEI0JGjVdHp1bzVKKyWSyiPOuBeWMDaU2+GiCqBxUUQpRLwBjpvoxV+LIidJ/inElKYKJKdRdx3UiBZNJs03IeO8oxalAjvk27rQ9eGR8E7GXFjWLuthQPnbjyUxPFYtReqGoEJM1jDUBWTwWHScvHU32cukQYuK03MIYJJ8iHPO0jfv2mFCn0nkivsbqS9PH6RfG30t9RQBFODyfJsSUjmzJEEAQBhMKVMqFMpQoEDGMMVYiI9cAaFBVSucQkY0TEYMs2drWi2Ym2jPDGmFSZjXW1HXlnLWWa64s0e7QBR/ms1kf4EVhnKpUs2Yxm20O+8dtZ5u6294HEfjh/M1b2zSd7zbbzb7vreXKuW27dYu67bqXr1/XzaIdemUGBmvU2Hp99mK+mP/404fhIM6aL7/95eX5erVeb/rh8fZ2sz0EhbUmtP2n2+v13K7PZ8tq9unm/ocf351X9dmsftwfhF1d16Y287lrzOzN1csvrl6eLc8u1ueVW3zz6s3lanGxas7WZxer1bwyP336dLG8eHl+aV0DUNPU7TAMQbxS08z80H+4/XS3vev9NgTT9f2cSQY/ny8uz172vl0tanYL0VaM7cOw2XyYNbZtu6q+qJq1H4br64/ry8v3H+/quqrc4sPN9vJ8ZZ3b7R63+37wAzHefvElE3WH9mK91uCF+bDfAWql+3Dzoff0y1/+9n5zEPHb/WaxbJartTEcxD9sNt4P97d33W6wtel8aNvBubodhutPn+5vb3e77dXqbDaft11bOacEw6RhaBj7/Y6N633wCufqy7Pz+8fb9WrVe0+qQ+8vry6vri7vHh/arm/b1gdxVVNX80O7a/ue2IgIRELv+95v9916debYvbu+8cS2qoa+r5xjoq5rDWntKlUxhLmraueC92ypYl4sl0RRwGhlyeZ7t9X7xWJpDAJR7eaVa7quFWBWN9aY+WK13TwAap0V0RDC/rA3ZJytiGDYiErMEO2ci7lWgsSUCCk/NTL/zr9H1lmyg0w2qWpjqnk19zLM5/Oh67v+4Kqqbfuub4nofH0xm8+tNcZw0DAM/WG3r6pqsVqJgogOu3273xO4ntVV5darlWHa7XfWOsNmCD7dEnbMUJBP+4fBH9rWez/0IRo2PoSYhyd4CUFDEKiGICFIP/TByzAMUb8ybJxzEmTXbQffD6Gf14vLy0sIadDlYsVMXd9aqhgcLVAC1X/+3+G5PJfn8lz+cIpZumXW+LOWl7G2Y3vuNB8Tjdrw6bvZgkv5Go9fGqGSoxaSmZCtFRo1dlKe6oYTU1ELBENFNy8/GLXmCTqJ3N9pp9KrU+Bs6gOYVNBJVzEZdQp4VwBTLy0eXzk22DCxbU6gKERIDYjaNJeOT1ucTB/KbI+jmujoxb47eiD5vyTr5XjURysz+XRclVOD4uhOgty9k9ktgz811ifo3nQOYEbwLnc4t1XyrenRkqRfmU00F6PnyegcSpywSEXMDp9mRKPVF/OCSdTymQyzPRo6JZ+d5Cs6jiuDFwXGoOn8Y8wRRpzN0TF52miEFRswg1qxzs9knUu0Pt4soDriIHny6eQVpNokD7vs3UJVEwAzDSuv3HTDFAM0IdEnTIImA59UN5rPx8RQ5uoYLZ2MNFZYfJ5GsCCb5EfQh8bbtI9uqSgb/zMp/J40GZ880WVHxGrSdl6mYxqewlgEHHWtEAygKgmWk3Qfq4SQfaZCQvQyPSQggEcSLHWmy4YmcKSolG5EIzZdLlk840DGGIDiBTKxFTYm7hFnKiYO4n3wIoFAJjnfioRAhgkUxEdHzsbNQMrEq+UyegSIhnIAEHPGZUf21CNNgGkCW61x1hhVMcZ0XctsXr54WdfNdr8JwVtjiGCs4wzdkmFjTJBQnGqzyCBAOTmH5qXI854ZKWVQY7rcOvmA00QTM7F+5gKlvKwTALp8OoquI5Zdln8qPjQjqtn77AT+RCb+khv1hFTzMPR4A1LyOoFOr6UiivdmEGc380nuuti5yB1x1Bc9Sx8fkwAAIABJREFU7tcozPJPSp2smZNNc8rSpBSJnIhgxJQi8HKUzzkJpiyAiKDJH3R0PMzCCFy0jVFpOBZSR9LwVNpoipAgTiBpjgNA5qzjqiVxyclZmHNzmo+TsjDIU8Q4udwgPiop+WBh74VaE5cmQqLhEb5M6z269GdmCuT0hXFeEr5NyEsjmXrKqhEhkneWQunhcaycJEV2U83CDZPFzANOc0PMhbSiDhRdqnXsaKGhKbmOx5mpv5Plmkz+lG8fbwCa/pbCc3CivGGEX0tXMkE8Ea+5J2MeS4zcddwOERgv0jwzvZFc8kgzpWuMVgGRYXaGY7wPEylM8eEtElmJlNiytcaqIIj3fgCSD76zDZNhMRpFCYlCneWq4cqxtdTU1jlrK1dVlXWVlxAkQNLRzvXN49yZqlkwsZBxrN4YFYBdxXaxmn/68GEQ3bRiG26HzlW0dM7YWRj6edUQM7NbLc/A1ocQfE/MRFpbFtEggB+2D3caNBD/0S/+eLM/DEFfvnx9fft493DvqjlUfPBnc65rms9nv/36q+8+/P2+fdjtH3bdoZk1X799C25U7dlytpgtvnj1xlpLKtaYfsBysQL0/GLhKr6+e7i5vWPu/9WvftEH7PqhD54dVMxivlgvl81sZskE3w++VR2qmWMrjvhstfAMH7A77H58/+nVxXrTbh8ebx+290EDGfOw3fuhbxo3sHm8eTdbr2e180HJy2bfkqHeh0PvGUZElOxyfb7Zbu/urnvfPW5uNtu998GH4PtuGHZ///vr129eWTb3j9cM/eaLb2ZN84/ff/+4eSQGsxEFkXzatY2r2DhXVQB572UYiMg5M6/rYeg7PyxW6/lqdXd71/UDGx58aNu2H4bt46aqbO/3ED303hDV1vzil3+8Wq9/+OGni+W6tq6ezciY1XK93W7YGlGSINbayjZefVB9//6TqWo1rg9BFI2rAY0gYG1cZRwU4lvDfOgPofdNPauN6aFnZysNol6aprLWelESaoybrecBqKv54PVh/2ArG0IwbFQVbGpXB/Vd16uqrZwl0w+HRbNmMpadDwMZJnBlq6aaQan3PRNpvNcubu2kumT2G1PllkiuuFlFLdmK3MXVeQjBVnYYBg7kqopAtWuaakYUb8pWYzmoiPckaq1TQ4/bTdu1pHp+fh77b62p63q72+22+6pyzCbFA5gSbkdR4SMmw0aJ+mHY7zsfpB8GECloGHzf90xGcmiNMYaigzfIsHFVVVWVMSZyXu+DF2/YMDMMmIwfhiASU34zjGEb1CtENChk9uf/w+eY23N5Ls/lufz/tNgC9aQP6EhVe3LeOz76xISaqv1aII5i5jxt+0TNHKGG8lHRKifm3BTVmlpRJ+hndotDis5LH+LEDSUZHpgomsc9jdNARTs9BRriMyngiuJRuBCEU0g20mz8TM2npUCUn5lepK5k+wNTmGNSaxwSldpO1O6MkaGskB53kSZLUeyW8vkx0lQepPGpY6eD8eaczwx2Ypwc91ARb0YHTWI9i/KfF0GPa4tBzMnsLmBkria+QRmCTdZc9kMrxlsKnhTEqwPKxJSmR9KmQh6gDAvmaUjdpOihpvnio3HZ4pNKuWuUv0pLRhMvjOOJOqFgaLqsncCKUDrw2ZLG8/R2Aj1ZWRyv5Lgni6FWVny6BMhU9fNdSHRG2W8Uyeo9LlT+QUwKPu4fouSJM9nWqfWJCauYVDpVT6ct0Tic4+4l/CwBoHnWxqUtANukTSrwN07AjHHnKOKdLKfLnSk2OVulvRWvYyh9peloU69pxIOKVZw9h2PodKQlHhFBQBFvyk7AMoCYkiIOVuIXEcNiIoIYWAQflFOSBxEZQh9EDHNU0A3zMAgxGQWAvuuIuKoqEeURu0qRtrFJa9kPAxNqV8V7fgANwbeHgzEmDir4QRGDtCh4LzbEcWtQIs4MAYg+a5OtISOXABNH1FYniKRmqjjyvidK0V4a8KSc8LFMHomE9ejBsmITksxvKShHZx4/NBFsEWfGyGZHGE7zEdD0mC1iWxokUtd47zEAEjAlgDVjwjQV8MXLeHqGdszIxwMQKIATPxDJAiIhrVn4TkvZwAW6GZs43f6aa0nbb+LDXmZ2DFjIUrD4BZfmE2ufbNzJlOUuaaKWHHJ7JL/G7Tmu9bjR5ESsx3BupZgZYNQtsp8jIaZuKN0aW9HMxCd4oo4zzpMrs1GQT5Cm6OPU8zj/SSCoAsGMOl5ShygxypyI40Rep+/y/qB8kFf6Xf7PaCArBRVRwaShsoyaGeqJXND0/PixFg454anjS+NsTPyF8xoq0mXsmXXi+JlUWRY5CiBoOELzMV0PnbLojKSXHpVtE6f8eGJoqi2W47GJ17AmiUcKTwMgICixyX0mMoCJzxvDFDj6XZExzGbwQ+ytqKhKvMRDlKFqbBW8RlZvjDHGgTiyVrWk6tbnFz++u9s+3r989QbAbpDDfmfn8155dX7Z2p1jeFUrYVZZ8QAA24gOi/mqV7VUoTKH/dYsuO+2w2EXyfDs/Kpy5u7uTtvt8vLN6uLq9vrOWfP+w/t28AC+/eZbax1F9JydDAfnnLG6aOZkq//yz/4LAEr+V1++7cWKyJuLVXeooJ4g/XBou8PD/S2AumqubzeLurKX58PQr5vl/Isv2t/vD63/4sXL/SAANvutsXz/cCtE5+fnAlnUzb5eUt0oC4C+qmu3/uXq6sfquqlnjTvfPLzTqpa+nddO6iUA53btdrff7eZXb5Xw3Y8/ffvFm6676wdZL1zUnnyAqh96eXVek8LV9eXV5f3mrts8MujVl38E4OBt2w9Lo3//13+xaM7a3YMoHnZ78fhw0wL4tmqCC8aQGr+o3dly2fZBiAPIO691BQCM3RDqZu6DZ0BDODs7a/dbVS2pMbQffvrpXTOv9v1wvlx9eP/RVvb27p5ATVMtlgsAt9tdHw6rxblARDQEZWsADVBnGgCYm82+q5qZC9J7n65IMgxrhr7dswIwQxcqN/TDTMlaK97v+4NhFR+GENgbAK6qSGh32OthD6LLF1erAbd/fRs9guOhqUow1rC3VUVEJCLW2F5hjTHGABhCL5B+6ILIbIY+dM45Jg4prATISguBQpTXRVJMeHJtZwih9QdrXtSzOni/mM8Ofh+GYJydz+cAHh83ILBh64w1phuC7/12t2u8jyxfRH3wxtqhH0KQ3WHvvV+tlyD44NkwgKAxtRaFIP3gVdRaay0rcZAkMUUweDEagqgEJZcT+yiJRJYIESFQ6EMIRlWCD0QUJBATR0dv0c32UTQo6KG9r23VuFnfDZzSUOG5PJfn8lz+4IpZVatRMZ+w8RPVc4I9UValji2SUkZPiFGFL34SyJrYyc8JfnEEGSjnZ3L9QM5qlxVFOn5P83dPHE8yeDf5uFScI4un+j1N7ZyJVnpUFAmOLDOlxX/gKQpSMh5GK7N8WeYqVqATp4b83rSmqSI9/TXNPDReeRlre9prmixt+pk4P5zWmCzV41qmpmv6+/OikP55IXnUJBejLLoOEEApPda0h8VczxYas2E2ACRnx8wjY1KQpoxUxdmDiVNUWl5YJuYUQyg5KBxPBp5stNHgwJQ84r8JqXpi1R/9rrFvoAldUmlg+jhl3C5PVjHzaDIHwJhhNDeXjGIULCYTyHRAxXNtXNNCSGl3ZsN4AqZ9vpx+k1168jCPeMkRpzga7BEriqs1+gYh5Z484VrTOoDPfElTOqfR7wdPJhynr0033Gmny8L9E6UsStyVkV7LJMTbaUuaNcp4meakA2bEKQuZEQBI4HG9Faop9FTVxMSUIGPSNZ4qMetiKPlLBYoY8AxVQFQsV/FZio6CyWHQhOCJENN/Ajr4nhnGWB980zQEEklYKhFLCERk2CDuBUVsqMw1EbEAGi/wpMpWq9XaGL6++TT4IdJLYb+arGtj2DCZ6BopqiGIqgYVKNiaEaWZ0MA48wnayM4U6Ylx8yYqTdfgyrSWI3KMvk6U9+nx95O9Mn17gsJgXKspCeWXFIjh2NAJBDmhr3hSUzAlil5XGSWUwkxKPcQAybhfdOJ4l2cEZW+WyXhy7HFactra3K30wyVPZtpUUwEzmfgUnUoAnzrO5QYoA3lT1lTEOo24VQbuxr+nozuqNCN/IyvIYyj/Hf9dehNjqckQKCbpjInJRmaQDGGKM6+Z/aaNPEH+CsY2XpwNAvLOnfY4LQtRmVgQpThdopywGJksJbUDSTsuZe0to4n5G0VDylOYOP90VeMwct8LJTydjrK9AIFE+DzOBpeU3OWhcV3yj+YtOSHFsgQnq1DWPldFqsdvnGQ7GP+fpJqc7vtyYDl5YdxjEzlI4ygme7A8kwg09XhaDWGiBkJjIKVldjEDXryEPsLWTASmdMM8ISGUAUQxIYAPnomZrTWuaL0xmlU0iAQitYZn88YPveogEhQKY4hYoc5wVRtRCSLSb4fBn1+eiwY2/P7jjyyYLZfVfPFwf3fYbVSDKirm87NV7Uy9PGPjlOnh8bZZndeL+fb2w2G3YyOqwVUNG/PmzTdXZ+d+CItq/u2f/hmT/fEff4J6of7LL94sF82srn78x+8PnfdJHQoVD5dn9a+++fZyff6Lr/4f9t6rSbLkyBo7HuKqVKW6q7tHAgOsxq7tLj8zPtCMZnzgj+Yr3z5ySex+2F2Iwcz0TE+L0pXyiohwPoS4kVnVAJ5oC7OKsamuyrwipIf7iePuXy7mx2+v31/eXU+r6vZ2KZydlDVzfb9dOWPrsiiLoqoqh+E33/8WNDw/Pn/95kOh1GxazSZytVsXqjyazupCV0pIxcZ29+v79Xo1DIO11rE1drC9Y8uFasCy740Bynp+tpjdLm9vbm5V3RRF/eHyarvdzsr6xfknPSsASujdbmVdx0A9WWxbU2iSgurCTqeTvu1mBZXNBFJOmoYZbru5vjVHp41jU5RNP7Rni6PODW8/3AwdV9OiniwGY5frtZB0tmi0Vh+ubvuuP50dgXmz20qhJtOpkNI6K5UaesNCKIGh31k7lFIWSqzu74tSF0VJQiqlBLHUcjKfCAhd6M16Mzma3N3eL29uF9N5MZlASoBLqXd9f3t3a5xTumRmLRWTs85YtufnL5mkY3aOd9utMVYI4Zypy2rWNGwHAShnpFal0qVQZaGHrjPWrnbbWmopROuMca6uJ0VV3q9X613nCCfHp7tuuLq+1FpqVVRFo6VqppOua+uqPjk+MYMVJJRSUqjBOUGCmR07Qcq4QQhRF40kJUkpqf2mQFEjRAydHo5Xs+XvNyAHZ+0ggNPjExJkra2r6vLmfaGastQ+7nDXtUVZdG1Hgoy1m7vVdthqVc6P52VZFkVh+75tW2vd3fpmu92wZaX1fD6zzllngzYaVAIJImPsYJyXsMa5fjBDbwGy8QwBUYVz1hrv7M3OsvWHOiQEMweBH5xjMhuOWIAsjGMWXjFkwMUcCewAPLEjn8pTeSp/XkVRUOyCWhvpQIg6c8AC9m4a7aYHuERmlAdjYrzhD5bxQcGsSNr9IxdmRlDOewqwYKpI0vrT997WjJcEgCaxFDJEIjNn8ufnhLv0zmDdhDtcBIsewzYeYhbRRkZkLMSbOFovB0jYWC2PnB462gKBX0AxPBjvbWX+vr0Q8o/Vci8EWCIdjDZhFtU+exJn/Y84CJ608Eds29yGd3BitFTGRjJYeFMc8GkpEubo7VERUlVYRiAkBsQnRd8ihGwOvreFn/HjNWB2cAR2CY8I7Rz1HB5n1jgzCOQS0y3rJQAQBwSUfEiJYtB9P/c4PW4f1fDaTqCuuLg0KPhlJmQj2Wgp2lZ6lxgf51uRQxI4gCj8neOCoDgW+7YrI5t/meGfN/dgoiU+oJ9jaZGP9cwRGqJ4tDBWO9jd8XH5xY+8l/lgwf6h4hfOyNkKGSKi+RerGYWOVxn9takXxgW9X5fxSF8I5ziiCQTA03pdWNDwo5HWYPhJaRpFpCDWecSy/b8R9on6ukgzw09OJpZSeWhDkvRMLhLBu1KAmASkTKtegEBCKclgRRqAhfcflMQ0DJ01dVEU1trtbuMrVRQlBDkbGHmOOY1Y8rG1Q1cUGnCmb0VR2aHve8dm8BX1+OyknjjmXbtz1goiawwJxSHxxEgUl1IlWckjRJQEImezOBwyHcgjDvw48gTjPeF3WPYWQBDCMYpoDpLkW2GK8kBxrdE4hDzuf5Q6DCKkuQgTzLnUuDT3x90gTvM4GeJzAnATrRRwgoTSrQgrO2L7mU136LEev+KYny0iI6O8eaS/8vtjCylub0QA5LjxprrHivLDB/oXu5FuxgBcpCrvURb3Qjfs74JpelAOjSVm5/gRxeDXB6cWzJzSaaXtyosr9n7oSWYC7NOnRPkQ3nvYOwLIqKfx4DKFQxuFbJrfccv1sR7C5hBgVraAJCnYI6cRamcbTuCImZLil3ldjApecnukTIAdnM5w2oVT3/iJR+OewwwKLgJJGaGQrCmbOVF2Iq/DwZwKLt6pLr5eQuyNXX4DZ9dlqmv2zaFiNm7FuRhBmBJx8BEgSB6J+ml6h3b4Bvr8RKPABlHILESKC5CLJ6LMbABwyPvExg4gpaRSWsN5JZGU0P7pAtait86H+nDGmd3tTd93ZugBCElFuZRKEUDW2hIM0rqeTJur6+XlzS2A8/NXs9nZD6/ffqr0UVFq4vvd5vjk2f3tld31u80tgFLKpm4kMc+bV6/OV+v1zW07b8S0OVFlWRYVgKpUm3YNTS9efrJZLdthsDxIMPe7vmsBWOeev3j+7cWVLCrLrhBUFWq7689OTv76yy+c0wB++snLf/nPXx1PaynkD9f3y361mJwczebOMVjX1RTAu8vb1x9ud0N7fvLj0Hc7CWvdv/7mm3e3l//7PzftGgCUsFKqalq5vn17s502Rdv1w9Cvu92kXgDQ1szn8mLDR4tT1/f/53/8ct7MTo6ps2z7gRwD2HYd9NZCwRoh9POjl+VsujHWDYa7285YAI3majY5O9qt19vZ0E8m89V6DaAoS6WHu7tbAFrvnp2+Mk795Pir82d3v/vdr0tZNWVpBvPybAIAwm7bzSfnz5b3m+vlUpLorXVCil4DKMsSQKk1Q+w2933bHxWlUloq9ZOvvvpwdQHBQhKATT80Wjm4ly+ef7h+P1k089msMJv73TCYftvuALC1Zhgu7teFKpxSjgnsHCN5cu26rbM8rSbCip3aFkVFRM4NTVFPJ+V9uwGghKx1oZS2XbdZrwbrhKRK6LYbmrJqpAIwOFvppi5rZjebTTeb9Xq5e3X+ojWDsXy/vAXAxMMwCCmLoth1WylkoYuWOzMYHzLSwTHDOZaSHBwJUixAxOSEIGtzWR7VLWaXJYHz61CyhFDO9m3bWmeW26VkmpVHJKCUXq9XAKyzAFtrnXW7XbvqV6fzs9liVjT1arkCYIZhOp1AyskwNcZOZ9Oi0C46NxljATjyMXAUCWkZxvJgDXprnRv6wV9jLDtYydITKtu2JUEWhoSQUnpJr7XWWnulaegHF7RkEqT8KWln2kIVACmSpap9MlVZqE2/lpCCkrfHU3kqT+Wp/NkUhdESQm7kRi0tmt8PCG77ih2S0Z59kP/2JyMC6Z5oM/rHPoLMIcdoeI/NMVJQwq0UtH8aldvQIm/EfCSs2Fj7rAlu72uKRAIPlgQVXLosyenYWX/IVBttWyDaKtkYpLeFvkgNdAykU/Ws/Q4+fhUHlp/X1B+DZnj/l+jcTtEayLX33Ij23Q4Gx9Sqh2V82Z8wBUY7yDsTcki+ECZA+DVEfPdGn4gAGYXbhVfZAxMoUTMy5kKG3jDBpcFnlgxmYmbLSEZYMJ5H2AzEZA8mW+yOZGkm0x6jwb7X0/BmCsUepYPZET3+fB0ythFAZKwFfJLxgDaAg8tiBKweDAaBWHqDJhp+maVPApk1mtXCxZtjB0bMI+pI+w3iPzLICT0OdyRcOK8mxp7zvUoUYOT4CPbhPqN9nFynk7dmtmr8snxkaecCbJydEZbZH6n4SI5vScaqDw4Qb/Qf8t4T95/PgGO21kopABGbTyQE2Ib1lOBFeKwmYSvjgQBFN+30FooiLRH3mB0HV2mZPiQhhIOzjol9HEkv3WWgfcGyBbFPYclgF0I9EhERhwoLkJKKmS0bgDbbdV011lnP4oEP6gRhnA3QQIzynqYygALq+Ghe1fr52Rkgvv7968vbGwBNUWpdSKUBFLo0xhJ1XsAMZpASji2zIyFFOEggqbUxBnuwwrj2EhKXw2kjNDYu4Ad0qceEVjjpyUJ20ohQ7V+ZnkUCuctnGGEmwKcOT1XNkeUM/hHRu1pwDki5fKKDASHGHXuvyhHASUba/gVxeWcLfb9Jea96QYt4kBl3v7iSAggThTRGWcoxAkd61oNdN21vWaMirTNbBSBEpzYA0Q89uymbB2HDHz3cD9oU3HxHN/X4iLz/Y3fQXl+MUyhrZ4Ce3N5X5Ndq1vNxD8p7IHhEeJzZb6+xh+J1o0sIBdxz/+AweVkDghnkmNmKEKbWQ46OffhkhmBipHAl+zLyQAaOS4uTQhW/d46FSNl7fBsdQQmIGMx0XF1jbTm6guevRXK/jn9muwWArFZx/MK2wKHjEq8zA5gj8oncCztr0/j7uIPs1Sp/fz5kHPcEHu9F2qfiy4j3l5uXzhTCTSTVJD0cDFhB2jiGhJZKS23QM7Nly84CkCRBTEJIKFJCCilR0ATTRrVb3+dO12VRTwopd5uVM31dl3UzU6I0xi1XKwBnp2bWzBrtutVtV5ZwPZiNYTh07CasAdx/uByOF1VRtJvV/ep2uVzOp/r82emknq26tRIAUAr07BxYFsWH77+5vL6XGiRoGMxpMwNwsli8v9+1zk2IC6lLJa3rh4GasrYE4XoA5/MXf/HpWir+5OzV2w9X224zn/ZwfDSrhbAnZw0A4JV12/v13b99/e2r02cz4rKs59VMHLd/89WX91f3AHTBxqrldnlSS3U2OVqc/nC1mdP0s+qFkw2A77/79tevv6smZ/PZsSDUhZ6U5Vdf/vzr7797/eZHggTwyctz1vJqtSMIuO7+9u6To9OXJ/Pri3clWVmXAI5OnjmnxWQxDLcObthuLt5838yawXSffPFZoQnAbrvTRX11fTUQLbS2PQtj16t7ZpRVDWCzW1rmZrI4f/HyuF9d39y7DVvTt+2OSThnAGghp8fPNInr4XI6m5fNpG1bwWgKtdt1/jBAlUXf9ffX/Nnxi5pBSl5fXpEjktTu+qpxADbtRgj5ky+/uLpe7fq+H3oppRuGbugm0ykAJdRqfQNje9cVhTZsrbGVlqvNndJHZugAqFI1pSapnKKr1YqVOK7nDPv++tY4e3y0ANAaO7SddaYsmrqs3l5dvnj2anq0+ObNGzE4hxWAzWY1nx85QV3fNXWzXN1XRaNlwRh628E7kTAVRTGr596To911IFj2qeQd4Dc8Olhb2Qm012eM9EEPiKqqatt2tVovmhlLdF273W0BNE2z27UkSAjFTMbZxfERE/q+N30PYNevFzRTReGaSinV1JV1bK0FQWttrQPQ77rVcm0dC6md50RAmMEKRcMwsAvCkcmxI0HE7MixFIKEVFIixv5mT4lgdtYaYwAS0TPEn+/anXHOCiE0FdZZhhNCGNMKQYVMqbGeylN5Kk/lz6koINhpSBDACKo8UmhPO8yT+KYnZASoPfMc++rfgwcHbS89h/O3PShBb8t4GbkZQ2I8xyaw2/eJji08AAwee2HQFjPeEjLzBqmeYyU8z2m/yw4q/qAT0nEeBZZCchLHyAH5aN/RoYqcWU3RBOD0nYih8g8qBUSN+0GX53ZAiueVWhwu2TuS9P3JgQWUqnLQAjr4N1ZmpBscVDI8jr0xxnv3cwRNQoO9/R//DPYcBffB4PywV79gmjmvD0TzxCeYDcBHsA7tWLvY2HzRZO1MEz6a8Qcm1aExlsz6j4x1GKDYz5FRR2kC82O3+0aPLnvRggorhmPihdwszVdCxCoe1HJ/rmQW4B8plNUkPXIfL9g3zqJll2hoNA4x9rCh8MBRJf2jywd7F+wTgxPrLa92hlMgGKHjNdla2yMRjS2z1jrniCAleSKAEDIOHftp7jmXPhhQGhU/ZhQuCoHfEOE9CkDIiL06Z4WXgjKYykIoKaQj68zg6yNAEJKIJCl/jXG9ICEgSMByyLPDzq8EYQNCzVIpa0zv+qqs1ts1M7dda4yRUgFwjkkID/X4lZYEhIgVPprPjo/mXbftu1YI1XWtkrIuq2Y2lUINxgEYTN92PYCyrKQQ290mYUDMCYNw7FgEL9NREiOO5OMD/wCy51EmgKRKw7Y/PyIQszeuj+9T+6uccrEcazg+4uG2EIRTWGy+/zjujAcLkP2pmqfRjeIz4JAcPs6u3n9bqljcAlkwI21jBJEWoW8us/NhRUMvurjcgszPZMeoUdDemSZF/YAoZibA3vTdq2oiUkdKXWCTZT2Zcd/EYVdmvZ6hTmmvICCkwM42ylwUha0lYulxQ0vsyFFSxW08Ekt9m/MRS1zlWIO0a/qeDaGIweCUamavMlE9iG3OpgLHqcmA5z0xRXdixGXIziefCXNpfEJcQPwQjPRBAdi3be8L9odD0qsV3qLOI7vkWznnkP8o8Smtu2xZPbagolTPVxZ5Hj+cP8OMvRVnaryRgsYwank5/pj9np1oPSj73ZJt9+M700VZ7NPDtvgZIAhk0BKBSRApCixh3xySpAxbYgghpRDGwloDwSEcDTMF6N93u3VspJpUsuy7HYB2GMhxLYu6Lod+KMqjaVP2jpj0yfHJd29+ALDb3O7aXhZys9w4+3bdtZUu1ssNOydLKaoJgGG9kUoKUrf3w3PGpy9e8OmZsd3Qt5vbD1SVAHazs+Pp6aSamK0ZbFsoMS3ltrXVtJBVAUAQd6v7mVIsILWsC+lM9/LlSVPghwTZAAAgAElEQVQ36+3u9uY9gLo8e/ns/Ddvvn15qn/++V9st+/+5df/+T//4h9B/b/8+j/mP5QAXh29Oj9+9fnzF22/+f7D2y8+ebaYNH//lz+5+NB3u3VTEwCljTGF1pLEsNku71Y8qY4sFRY0lQ6AVP3tevVcVZvt+9ZWL148+/03X193O1VUp8+O7i8uAVxcvX3++c9Pzxbamh9bUYrh9W9/NT9/bhyXdX32+ZcAZFV1t+vrbXd0flY35f2Hy93GLBZyY6wstNYVgKEnKaTgNXXy29fvDTC4blYebVrz/OwcwNffroG+KZXpe8O2Mz2zY5LWGkAYOwBgmO3tNXHfFLptN1tjZs20260Bbjc7v+7qqW5XXDB++P710G6Pn81LKq9XvSikHdyiqQEIdr21Ukm27BxLqQRRWU5aom6zA6COz07nxx+u3zFQNBPBYOeGtgObqip1UQNQtdZaOaBpJq26UbPy6Pj49v52XjTGmbvNCkChq77rh2HY9itnu+VqPWs2N5vN3Wo97Lr5ZAZgvVkt5lMn1OXV9Wwy3WzWne0KVUlRrLsV4HOyWCmVUmqwlp0zbgDBOOOsDScuIUFfJn69/MxYE1KKvm9LqZVWdV0Vhb58f9V1/dHZ0eXlpT89PVrMLy6viMQwDMyYlHMQOWJmJ5UEsKjnYHd7fasLtVgsjDXWOl1oa5yI262oy265XPZLzWVBVVGUUkgi7p21zkaDScAzPC1bayVBkfT51Hw+QC+sLBvbWRAc20KXutSO2Vnuhx6AVqVxrd9AzWAsjFbaGCMLKYQS4gmOfCpP5an8+RXlyJJmsiqD87xy71WqkCCSolIVFH9AOhG0zsiBYITDHX8XITISRiuMpVWgeIQfTZzwT1Dmgp4VKBEOzJaIQ6ijUMFAFYE03u0FLtqowSZ1NpITg5VOSdFT0bBxFLX+wEpI2QXIs/BGm9Zl3JbIIImabkQQEOgPDkww8GGzQ5PGK32FBMC5rxbFJrC0iFZntFEj8TDiD0jfeb1WZsZWBuCkC6P/62hzxFu9QRVHKjhdgUhGrT6at4GU6b2YRxWfAZFZyHBwqV+IWTDAwsnA8WMCYA8xmmgt+yYxAEghAfbRqS2YGIKkgpIk2SPLDAefD5vTMBHx4LrBeYxGCAkQBbfmlKYDIDC5YF1m7/Q/BRFJYoJ0DOexbiIGxVwNAAiWkLKfkuNgDwkas6kyEt4XBsq3FIEz6+0Rb5A7F9fCyNgaW5WsvxzMVX6dIaTnyNZtdErnfA54G1AIG+dDoOEkvCyavUG1Y458jhDMyntm8pjZhV0y9TwGEiAIkQgpCXkYh5jBFHm80QiUAV6wHNLVhumZTjRAJEPTkn+2xyxIxGkdu4DjDA5zVxCRENITiJxjButMVwuSJJaROTeu7YBwew2PE16VRBdAPCYPSlZptETjoMTfAhhOICUECWahVAmA4OBYSemcBQMkpFSCJBgO1jgDBKKRDc0PhjeBhCDrQkJtISUDzloAQkrn2FpXaF3JWpICYDAMrmPnyHs3s3TkCEQsBtf7atZlDUGDGRRpdkPftVVRQbjeGWs7pRQAYmncwHCCaBj6oijXm3VTN2VR+kwLw2CVVo4MWQbR0BsS5L3UFSnT9QAWX7yo6+rm9nboDTM5YwQTKU1SW+bNbuN7T5eFYiUg2653jgfXOxKOJBFEAA09ETQyeQH2yzOtJmd8/yQoZAxcQOn/sMBCHNlHwJAoQAUJyraAGFUkLgeOy2EEL1wGlHBC1pgRHXPJ7z1E6TCKGUxp8z2AQaKIFWMbRJQr/j1p8+M4WZhkDppHicQA5IjeBWjGb6rCBfJpYrTGxClMSGELR0zH093cGHoE2Cej7SFxabASlzH0HY1A6LicXVI9GJ7/TmEcmD1pdIS0fBNipNy4PRGDZKJjB1UFRCG7duQlcpKaaaAEBdI9PHc4fiFioAOk2RUrwTbODkGOYpV99a3wQ5o6IyGwTIaZfPzPAN4G5j5BhO0u+INwkOTsxlQ2ggQEeWYNgR2gRekYhjtNBEBCAay0dGQBOBumVtCPiAD4WCV+r+eI0yVX6zR9vDSHICZBTI7AzoLjyiJiYsNWpOOxsWND8YFl4Rl/Aab0o+uYo6t7oICnGyNsnyBABuBJhl6xdCnYBqWzSUAKSSAX8u34ExGRA465VEcAVcmDGtlBlwBICcVw/lGc3Adk2CgR5tDePBcMdiZy5wUzE4QCkxjgQEyCIeAEEangrqQAsJVuEFaQcSRAQmqpHTvJCgCTs+T8RiyUkoTBOe4N6qqoJgA23a20PZu262hwblYrI2RTqn6w1eLZSbsDcHtzWTbzydHx/fV9t96hUOXRs1Kou/uuX/f2WAFo5o3r2+LkpKnZQe4GV1hXNJWzLQP1ZALP9ycamIfuWprhk/Nn13fXZrsk0Gr5DoAkXq/WkmnXuaMpHK+mjf6nf/xf/vlv/3Z786GdNABWrE7qouz/4/UPv/35V39f1LOz+fNvP9z+7NX8ZHb07ZvXABSLv/7yL9u+nUyO//Fnz3fdHU8Xq+2HqZ5f9+L5lz8HYIVUUmHYFBc3z7vv31y9bubN/XbHVW22BOD5/Ojb8uJ4ejrsoCv1iy8+f/P9m275YUWCxOzVl18B+HDz9s3v/k04hVmtlZienanWHJ88X2427eXts7MzAD++ey+EY7TPnv90db0cJCbPp8ub285h120+/PAdgKrWk1mjijNj+8G4zrCezL7/4e1nz58LpQHMlTOqYNXcXH3DTg7GQEq768nagdmLeCGomlZXV2sidF2vervpdvPFCQsWYjWpSwB6Oj2fy+vbW8Gb6ZEuZf3ypz+R7z+8/e61Zb5f3QMQStdGScuuW4GEVKWxZje0bG0znQCwgyknlb10RVnaYSChrLFwdj6bNWUzzCoA98u7aV3U1awmWU6bo2dHi9nRzc31dFqqYuazH/Fgtpu1HdiK/uKuK6pJOV3c3lwtJpMba+7WKwBVURCkAHVde3p2osoCzpHlzg6TagZgN+xM3/e2a/uWQZLEtt9WupaktGJvoEmSjq1xBoLZspQKRM454ZhIhIBLLJTUILbDICcNUaHLYuh6I2i1unl++gmA3lKhK11oR7hbXVhnVqtZM50Y64qyAmCGoW6qN++/l1q7wcwXi6oud31LQhgHKTUA6odSlgU3joTVqpcEOCthLbHU0dUJgHD+iEgQBBnHQpC1jtmJcCTmzMCdaQmkVWGdGzZbr1N4tuxgOhZoxEQp7UJEWECQhBZSisOzuKfyVJ7KU/kzKAoA+7DsvqRjpeCQtqdBRpU9/Jl/t/87Z79T/v3oG0TjVcTenEq2RKoKAJDI9ODxBgCWIg7C4ZcEEYbfgqEZjUMgHeaDkko/Ak15tQ9bkiyj3KuV9pkqWe0P7C7fHnq065AgEI7Q4Fj7vQc8/hfn32S/ZQDWIxV8pJZJ706x1YK96Q0fCojK+MBspBK3gR/We2zmYx9mp5sB9Rw/yIpDsAh5NMVyC8fTo5IRzDnStFddjnOGvbmRZgFn10bTl6M5nZFhIy8ncCkipPd42+KX+19nzduv6F4RGGF00GgwRds1mm97MEPmJh6N5NCt0cl0HOexngn989hOJGYdNGac2zhocH7tw7FLHMaDifMRDuwD/GX85pHX+RdkyIW/ICGKB285mPKPvCc9bIQUA9yZ0Ibs7nRKv/daohFjCG0ao6cRhdVJbu+i+CNHhxJSvGcuZ/SlPZggoArknCN2SirnnDHDlrdaaQDEJEhKIXeuFyQcXCEUEQ12cM76Rxk7sGEQtcPOWSuV7G0HsBRaKdW1LYCq0Ir0pl0VRenTKcymMwBtu/NsAhArJUzrPAQppURYNjwMg69r1w/EePnixTAMr7//gZWQJPqhH+6XWivfvKau63oy9Kbt+lKUBn0uKVKz9+Z/2MN8D6f5tjfMUdA+nGOHgNmfXj4mY/M3+t8CdBWorMFlNU7fKGsZ+AjLgR8usOzFj23KaZM8rB89JmpDlz3onT065+MvD2/KXnHAiP9YzQ8+j4siaCO8dyoT6p37DcSPkrREYo0j4pGRD+mxqqzdfq48ENEAIqodSaFxU9yDng+bQAefjBIpwq0uqiX5keaogWRqwihNOCkx436V6IWP9GPA1j2PP7YeMWRkfOwjOhyJyMbd/y7b1w+2+Ecq4Hn4UaXJ3d73t6s/fa3FvSnqXbEEwPGPWOBJv6K0ux1ecDgDE8qMjH6OLIQChWTjnOiyNCK4+XQiQWIMo0DMIeWUg7O+GoJD6p98+frd0pGFMwARhEXAHz9Wbm5u14We1gLAopJNU4NE3w/TuiFQqZVlDIOp3PD5y5cA/uPXdydV7YTerlZ1VXBRz6p6s1kVhR5EL5kBVM3RanU36zdFWWqtFfdSwXbD/d3qi/NPmqoBYE23uvvxt9/8/mhSnz7/4meff761/H+9+3+Wm/7Ht9cATp9/ZaFbZ+rZwg7u5GhRi+78TP/2m1/vllviDkChi/++7erjZ7//zb+Xwn766d/949/84ser1++ubs7mR7OflQC0KPpBWKsmZVWi0LVb6PKDkZ9/9aX4+S+af/hfARg1VazL/npy9f3Nf/8/Llc/6EKqQbBSom4A/Pj2u+Np/ekXL7YDr1etg/7Hv/+ffvfdd207/OpXv14uPgA4//yn6tnPNQ+b7fXF/Z0xvZSKrPvk+avVzc2v/v3fABSCCyXPTz8zbXd9d3E0P1pIebEdPjs9VqpaDg4AVWLX9U3TnM5O3W637W+dk59/+cXq5kN72QPYdoO0cnd7oYFmPj86e2EHd/H23f22Ozl5JksN4O768vLdj3CQUpSVloI32y3dWihJMoRJagpdSHVUiW2Pz569BNcXP3xTCK1ZCEYhFQCtZLfZ2t3aWTuwEUILCCnJwXZdC+DOmsZMJpN5bzwCKEkIqYqh668u3lVkALw4Oy3rutKy3+6KQitdvv/h+9PFTJQlpN6ttgAcWjMYK0hTuZMGwt1vNgUJqTV646Mors323cWHxeJoNpttt1sllXO277uyaqRWAJbbpRBEkFJIBoy1xhgrjRSSRBDf1hnDxjmnhITIbae0hOBgBdiYgYSwznVDXzfVehjaducEul0LoGgqIaU1drVaNeVcgK2zPi9cit6w3mwq1Zw9fwaQcbZg1GXlwMzCO2tb63yWQgFh7GCcrau674bgHxDtCgqhQfwJCDFSapvkAQAwa1X4sy4hhBBSKiGE8M40N/dbMAwbTbosKu0Khs/8Q33fa63wUd3hqTyVp/JU/osWxck5OpToXBWYDoHZhqDTe4BkD77ZL4cfHjoSCxOULqbR5ci7FEaNOeIpBICJs6wDGQ4yRi5PyAlzxHZCIL0YMGhviyKXGSKR7xW07/AjIRgc38Z5IMhsz+OoyEZ7cg8Ce6SDkh2Yw7HAGG4rViWBSvva96Mld/A9sA1cqO/YmmhmJLJnFqwv3MX7z8kwsNxGIcoBz/jUvTYiUBUoqeLpV85uAkWkloCQ2nMP4Iy97U0ZQsiLMXahJ9iIaMgxceCMZGaGn8HJmBtRwuwpnPVaBkWNnBqAQSKid7w3bjl2jTj9sm4kOpgS/ABW2e/MiKsAyRwMtjTt1y+7Zf8BmZFM4e6RKEsUDyE4xt8ZoQQecTEGOBEpg79bis2VT00aX3BYjVSF8RsOdurexDrAkAlZdfK1E2eLr0XmtX8gmuLQxEWaPhxpxok3PRr9vHczkEuM8XEAGM6faROEkIGG5p8twut8fttInx6jAIQLAwCRRpl9pvU4smP/uT04cuwzF1EZnwAbgPPhBiwLQUoIzxfw8UalEMzOOQdmKYQAlUXJLjgBWWsBDKZlS7P5YhgGKEiS96tbIaAkCqH1RIUrHdd1zcw+npEQZIwBcwxsSn3XwrFQwjgrhWSCc9Y5tux8pKSr67uffvHFYM3Vzd121+qqUloLKReL4+12q1UBQEnd7breDP3QMyPwyg9XDHOkV1G2GPeBj1FiRzDocNntHYP8AZF7KC/TtvS4vE8iLkFpGEPcUfAoCPjHOI//AOT4hyAczjltiBOaUnyttM6yKqaW0Pgn729nCRQ/bPRj7c23oXHB+UkfXn64PkX8OO00QZAwja7DlFc2rMPwEEJCG+EVFI5cZMSQMbRf51jT9LQoASPZ1bPHKX9+0Cg4e+/olZwaiRg1IxPc5PeKB+c842xleBZt3KLSEWusbL5n7s24tLvH78fwDn57T5066japakDKpXYwpTN+N+eHXvvO9KEe/lIx0jz93sHOU9PzTHAj9p4fUOVT0780qyKSFzsSxPxwacfh47GP4lzn6PoxSm433v+wpKlzMM/93w7McVYm0BJ+l6Q4eKFlBIYQwrEAbOouB4AdyAlQSjBPGPMJgSAkaUjDDjCaCpDoaUiJ6/akDfvzC660Ukp4NKfdumGzqSa6KsuqKtlx33ekdFnqdreWkwmAxfHz66vrcjY5OXnWd9ttu727+TCfHxG5q2HDwwoATU+Lsnr//vtZNf/pq0+WV2/vt0tyu/PjRd1MWyf8cHPfnh3PtCgn08aZ4fMX5z8eNVvu2DAACymUcsylVk2lCoV5Ldaba6lrLYt///pXAP7qJ38n2f70xee82/2/v/vt/ab/7PNP37z/pu1Ikvrq05cAyrLqtgZ2cFZue7Nqb0/q6uXpmSgm1csvzewTAOtBlUq6sploMTmanE7Let4U2r5bbzdOA/j85ec/vH/d9/3Xb9+/OjqtmukM+MXf/e3FxfXbi++rqgRwPDkrmmK53v7887+dt8vLd++//e3XkNMequ/M5n4DYP6i6U3f1Efv3n//+U9/pkT15nf/Lgv9/POvuO8u3r0FAJJs7fPFzLTtuw83bHFzcfPpi087ul7e3gKgoiSJ1fpqOn8OKJCwpqsmdcvYtRvbSQB10dyu7pSgQpfWut4aBguFl+fnp4uT1d0HAC9OTozD3fsfoKjrnSJz8ebd7PRkWhZXm02/3QBAoRjOOVcW1Hdw3tXIe1iE1eF27c6xY4ZUysBpVUq4siC3bcv5BMBkdiyk6rrWdoMUqlQNZr0g6s0gSMlCASBRTiW1d/f1tFGWu13XzCfvvr+xW7C1z4/PAAhJV3c35+fnEFiu18Zaawd2gNMyBL13VdkYa6WQ1jlmp5US3rM5KFRwYJ9J3LcLIYenP0CI0psE2Amp/CbrjNVKT2YTKeSn519cXL4D0PSTzW6nidpu/eqTz+DcruucY60Lf0bbTCZsjVBSaU0kjHX90CutHbMQ8JmCPlxcGsPGhgg4QophGIQk63IxzRkhwBuSFIQk70lFInLO9whLqfwe6aVWVdTWWmMHY4pmUoPIDMZZ69itu1VvFYCjPyDdnspTeSpP5b9eeTpEeSpP5ak8lafyVJ7KU3kqT+WpPJWn8lSeylN5Kk/lqfz/VFRIdkmjx98eDyGeV+/RBPYoDpzfmHOLUqH8Mjmmgh69YUCWs2vGU/7gKsPgLFF19NyiSC4aD/IJGR+Qed/peGxWOj/PD9nJd0JekXjSfeg9lZyrUjfs0z0ID3shdN/4FfncKf730W8qI1kcJAbZ43Jk5eM81fExTJEVFF/4+E2UXZ8+40M6Qvom0RWSY1b+Bko9Hgg6nIIG+k6jjKUUcxN4N8XcdTpQy7yXFYWohLGOifniHWbZUw/CtDmoMudd7D/JWUL7V0UeS8brQCSO+vlGqb3MwAF/KM5tT+ujg6HMCS6UHMAOeYUxb3gKpRr8syWNQSpTN0VSiYsDfUAFytztIismtTke344UqrDKs9XBnr4Rl/lDh2/sN4FjzT9S/ODF2Jf7l3H2S/Su3F8KiTeacVcPLkjJLmKFR1mWVSKbrhgJklH4ZRImMmsi0TJ2meMw2yjy28buje30z+KQDTzyvseBTwMzPikwelwIapav8n1ekHOOk1CjEPrTk4GctVJLIgHECHcgz0mVUiqhAZZSWljnjBAh6pAxRkJKIURRgIidU0LUdWXs4DBURQlg6Hm3WTliKSWB5tOZY9d1XTOZeF/sfujZRZHNjolCBgZyzPDpbuq62XX92zdvNtut1kXTzM6enZVV1a7b3Xbnw4wO3bDpN4KE49j/I7dqbxJmcyZ4bO+7sx5MslFuxIWDMB8jARZ/WqH9X/iRL0dRFj/OkoEG6RmlTB4QYdx0DoXHxwozxXQd8U1xQo7ibb8r9l0H/LynRwJI+M6neNnHd4/whAest1E2PLJeXbpiFNKeZByIk4JiUIK4q5DP1h6dapFlFCXARTokpy5O0SRDP+wPQaI8itgJBCIOf4W1HYNfHGz6SNpHeqRIwjS5A0SK4ciaH3e54JHCioiDaOOxp5JQ2Rfg+124J68JPtKhjDTAOMHYRyCRgTNJkc2d2LvMD8cn9tsDxWN/KiXZhsSTZ+eCnB4dntOmGdcdp/+yZsUFidR+8rv8noZGqXsf8TbJasp7PcOctJrHCwUFlWhkriNoKzFac+psilpNpMADFAPBOADkwxZzIMAyR363j4ge2JHM0Skccdtygoh5cGw1CgmBGLk19E/o0HF3KOqqbkoSFoBraXCY6LKeTAY7wBgWUFQoKXbdZrmRAF6cv/rlj+9K1/3ir/5pu9t99/1/3q+6+ezIGWiJzaYFgKJ/fnr+m5vrgjtr+t4MZlgNfffyi5etlWQsgPOjRbsrj2aLbtPOCvf+w5vjZ+f1rPlwuRYwAH77+6/fX3yYV5UkqsvCDEs9azb3u+lRXRZ8uVwCOLu/a8rp5nbZzE4XJ8fvbt4t2wvj1F98/sV8Mu0GB2BwOzfY48WkKTXc8K//8t3u7vK//f1fQmJ5e32EAcBCayHghBxWa9e7ppoNA66ub37z4/vjo+cAzp+9ZKfMtqe27YzpgaHf1pOT42P75c/+0lPzTNd985//OvBQiOaTT79Ug/z619/utneDaZdt21kL4KSa7W6v3737oZlqSfL9h4t3l+uTRW1Jda49evYMALvh22+/v7n80JSNg6hr0Q3dh+sr6zCpJwConPTtRiuhVSOVvLp6t7pfN0fHpydH3f3dbd8D2LKqpBJKMLC9Wyktj06nSuq7+2W7XRdaAjiZzXuHayXOPvuMUNxevlOlpKL46mefyu9eE/cANtte6EpBaCHqSrUuLCvnQkxtZmvMUBRl17aW4VgopRlOU8V2523E9XrdzGZFUdwMd1qpm6uro7Oj1c0NWzORalopAKttTyTWbdfMptNJNfSdJlJCrjYrKUhoCeBoPl9tN8YMfT+s12trXamLsqyW26WWCkBZllVR7fodgGHonLNlUXqHZWtDKGHHTildirKzbTRRkeIpRR0NIOGssdYRaOgHWYumaS7fXbz69JPl3R2AzXq9OD1Z3dwBrqxqtmawrm07EtIHVi6ryg591TTGuUIprYWx1oSMOuSjda+HlUZDJHrXF7IB0Ju2KhrnBgQzOxoNmabHiJHpk7kGBkiSsrDJ8cNZZ43p+h6Aj9/dDrvtsFadANEwDEIIXWjRi862H5FqT+WpPJWn8l+3pMza6ZPH3NiCZp/05NGKQ7j/Ty+cGR0BVGAK7i/+XXRwOSOgaZx/PMJEyZLM8CeOeidHBysE61TktkCmVgYAYr/1oZIPbN39CiIz6pKSm/xu8lZFhfvQbRe5bjla0Ml+GxX1R5TofBwe068fRgHbv/3QSmc+7Os/YJ3nkECyN0JTHuTnTPjrXhafaL0kkHevk30yGe/fzyGOf2jTYTplPGbpUvZzbFAcGwpGxt5tlP+71zt5LL+AplJaE486eu5VkQ4Glh+bTuHChFaP10T9KiIm+V2ZbZQ/iLPl9lEv6r0GZ0bjHiAyGu9ELjMTU6SAccRyjG3/vbmr4KOm4J5F+pHa5k/b86+l5MRNWbfnFR9HegyEOaIk5N3txvrH9O2+vQGLiM624ekU5EyMBhgfFQfZhTpwsIKj3U+HMzMIw1T54MnILoohyqqZ5fVBtjqDqeu/ZuYQ/sjj+ACkUEpIUrQ1W0HSwXivH+usCJmBfNJW13Y7JTWDu3YrJE0mTd+1SsnFfA5gNp1fXV//8OYHQWQHU1eTYRh6PWhRTOZTALd3144du+C+GLtHCCGEIKUVgPV6e3Nz23WdlFKXdVU3zmG93t5c3/R9r6QCYJxVshAke9PFiA5MPpLsAQydDoai52RyVP1jJZ+fGG//eHkgaP+kHTAO6/4r06GcH3w/XZiYIeSjj/kDKMrDioyuugfw28PjihGLHPf6cZJFsOoj63avDhH5QuYnHT4fn7EnLD/Sf8HJmfDYyUYCuCj5ZT8STPHggcmLOOoL+7I6h0wDykTZSvVDlbztkqQIO+YIuHl1ZBxuv1tEUCwCjnlD031/uOT3fUwoZs2JGkVcGyFwYsLuxlmeezEf7I+xhiOQm/ax3MGQEIchXDnuUmnE81mXqpBmZopcst+y/Bwza1ys2EcVUR47NSETo9x22XxPLRwfHYaSKF9u2XEFxp/ZmDwYEgIYjh2zi8npw7bpnyqICCTyQ6jUgQQQRIgN7IQg4QSTC1oBIR4zhL52zFIIKaVjC4CkaMqiLKvBmL5vKyGVUtu2o7Koy3I9OADbrjud1R3atuuLqprNFlU9dG27Xm0c03rLAKDvTmYndSV2u+712zcKTsHV09OmWbTr9fr2EsCCnNTl4JjQXlz1FvKMnzXHM3114SQA3N/fut6Kgpwzm+2mVC27yd9+9tNdO6y2N2fTIwClcH/z1U//xze/Xq4v/rf/9otf/ubb199++w9/+VcvFi9mk/l8MQPQ7jZGuKIo7+6W52d1Se6Xv/n6bKp/+rMv7fZaCAegt+yGodJi++EdtVbr2abFxd2qLqsXxwsAO9MVhSj05NNn55vBLNdtUy6Us7XWs6p+9+MbAPPp5NMvfz4p1ddf/3rYLY9efv5P//wPv//u6+XNhSy13AkAFx+un2m67sU+iysAACAASURBVPujl6/+43/8+3LVHZ/Op82sJHN9d3N/ew0AbCrFVSE3u7VQfHw6vb8bVndvjeNPnr0E0EyO79A0lZ5Mm7vb6+VypQBn+m7orbGlEABuluu5Lo9P5z27dr1rhJ7OTyXs6+/fVHaYvToH0LVDa9uyVKdHJ0NP3Nws5vMW0glBJGAFAAlMJ7O6md1fXKhawFohVCGqjp2PkGS7Xunyxemz77tu1/VVM2PHgpidWSxmTD2A64tL2LO6mVaT0tlBUM/GDmx1oS0xdT0As1xDqLPFkXBOWWvZff/dt7qZyq1o5vPBOQAX11e60BfvLxanJ5N6cnt/Z4ShviXC4AYAUkhnnbWGndu2GxJEQrBja23I+AcAcNY6uLjdeFvFn9xT3A4cgbb9zlo3DGa9XheVJkHd0N4v7yfTCQBjrFKyG3ZKFu3QC2Yh5Wq5Hgbrnfetttv1uq5r67gd+qosSUovmqy1RVEAeDY7J9LLzdZw72AH1ylS/kR5jOkRmDUUI5/BSwdkpxF+1ymrQvSCCFJJn2nNWOMfMthekpJKMdyu3znnHGxTTMuqWNBivVkfSqGn8lSeylP5L18UgTiq5ZldDsBrTtEkGg1ePlAZD2MPJkbR/sfjc7y6Kxj7NnW8Pr0JwTrgRGwaHxHsnPgKHsNF+Urmbxzrd0hlHIGMYDKPH+Y/QX63zk7x9/EyZAbF4euTDpyZoSEC0Ih9BA7dvn3t73loe37cJNlr9giIPobvPLQr+cGfeUzKw7LP5xzRlzyY9J79+7FnJLoZxjHKIz5mhAELphST78GjOdJYs3GkRPLbNzqCLi+yUaF9Y2jMRJLjB9GAoth+vxpC/KdsgtL+bwH1dPGpiWJzMLUzlOrg8xxY2Z/eGMlgCdPKr/eNz5lg/j3ZFEiLjvZTM6U0KaN4oMgq4WReJoMbD8uBEXlgZ/q7/SNyUbAXQdJPD8ruCsk/9tbzPriQDHEEHIyC/ZenHTysjH9NQCT3LkkwRQAoRwobczj2hnMuMpoCR9LXZlwjROSRzcwqTr1EI4EYAeZm5zyZBgA8o9AvDv+5Y4QctAEpcs46hzG9uG88SykIUkLF+kcmFLEg6k1nhh4AYhc5hpCiHzpfmb5rpRRw9nixIMK0aQAUWouQ2ZyVVuyskuJkcXK/up/O/AXFcnlXloXWBQO7tiPhSEgSQiufDhtd37FzuijgHDseenO1vW671lqDwHCAc47BQsABxholFUOkCZL6llOekEhc9htCZC0B+x2eNo1cRLlsX/uIxBoLjyL/sXk/XvbYsk3TCgiCJ6zFFMuYPdf7j9XiD9ZvbPjBfoT99mUgnG9TlCS5fPZ7Hj0UWHubdxqWTJ0Yd4MU+G6PPZk9IsnpUaowh3QykT+Y35mYkRnulP+DuKX4Z/Mo3AK9PgKI8RVB39jvSE5EugOGsqeij5EkOZsQSYaMAs1j8m4kyqTJlo6AhYxCNdb84bTNWkrZZw/nChOxA3thgqjAUOBEBVCUswqOY8BMh+O3t98jO+NkjLB1GkD/YTgFCVteYjJme1FGxTyYp3vTzK/mhzoLkh54sP0cXBpAWK88eFCWkutM/sJwsRQiE/KZuE4HiGkJ8/jXnlwfj9d5hPjzN3ECvAnk87vnGkTYqjSEdSzIsFAF1GCcB2tybYPjhjP0piis5QFA19p6Kmopl5s18WCULmSpHffdwFI0VQlACtZNfflh0+1W7662i+nxydHi+7c/btqdGfhoIQHMmtqwI1X0rrWm10ptWjMTdHNzt+1WjTIAwD2zFgIEJ1U1rStm8ZOvfvL+3Q/rwQBopJSlJEZTKme6gXtATLSSkBd3nXIDgMXpiSsn23b93ZuLn71aPT86O/nr48/OT8B6vdveLO8BVJJPj2fL7fbHyw879/z4+Pjy6vLq5m765ruTTz7bvf8OwHDy1wxX8Y5Wb7XddO326urmw9XFZ599qtQJANPe7rrt2lHRTC/ffHM+P1Wq/vH9t13X1yRqLQC03fp08er56YvNcvPd73+9BgrdTOdnN2vTbTpVKgBmsFc9mhkN1tq2O27w0598Zrft9fX17dW73a4FAEnVRDWTybq9G3rbLu/NwLXD0em5VhMAw9A1TWGId/1mc39Zl9ViNt+sb7edHVh7QuLZpNp2Q2c7NZlAC1UXvWMym0oaNnRydAzg29ffLeZKl/r9h8v55Igthm4l69lqvXYDtx0D2BnTDTeab4+qEpPG7frVtmNdCyjQAKCuy6qasLOF1g5QJB0ZSSA4PZsXrgNwe3u3urrdqs1kPrNk5vMp22HoukLqpmq6tgMgpRiGXaOUqprV3VIJYmutsXVRKKWtNQBW6/Vk0niPpuOjo8EaMxhnXVkWw9ABcMw7u/Owu9dLGEwCgghCeg8hInLMhg04aFyBTAjHUUQpyM51Qkpj3Xazsc4YYwZlhJCr5VppAaCeNMaardkeVSck1d31jVZKCLndbXe7HYCmrm7X1yc4cYaruqyqSglhLQPkokVcN/V63RKEgjawggSRJ05myQYpmb1ChD8c2Pm9MaZOAxGbwVhnpBDMwlkHZmOsZQvAcF8KoYViQIAsO7+Ht11LoKcAbE/lqTyVP8ei2PvFZvHnaQQaRvUqU2fjn4/ov/6fHDl48DeLUbenqCVi36YI1wZ9UIQA3y7pnundwSMogiIJkSAilzz1GJmiR5mB73VBvzVAEExuLo6t8yZYyjwO8PjPvuG11w05n4OiTptOwD9iaOYMq8Pn5Or6A/sy4CC8V++gwe+z/5J5QFn1UpXSc2KX5sZCAObSlNirxJhyIDdw+GD483ciHu9njQugTOrk+Ow45Xgc/kM6ovMh5olGI2u0fLLr8p8hQ0ve4ZG9FkbiwAaKUGnwxgrJfDgoEmM/x4qk4fHGOLlkvYTHeHeu/fEIYxAuHrsh2YzZDB55keGD6H+X19iBEE4W4rDmXmg5Ipx++iWXOiZrHoWn0B6ekxeOYG3e+WPT8guxt54QucaHN3m4Pmv1+LBEh/Nf+eb4rEbMLi6fIGScyyi1oamjoOMw6tkiigfWoRIx2m56hBBgF/xp4ECChBCegx1PW0IbPelGgBASakfuVUgBQUBAD8ZuicfmAARJTxPzmWrCSQZDUAi04ZjJuVATQBBJKb3JLUXwHR3cIIgUKSJSQpJQxvUAyqKUQjA5AI6ELgrrrLVGS6m1Zjts16vZpJFK+IgWbbtjtmVZbDYbKdV6uzo6Oi6KYtPKwfQAnDPG2dLZqqykUrtd69hJCICUUn3XAej6DoxC68X8RAi53m6sM0PXV3VjjOmHAYAQwjjrzKC1Fkh0QcIew9RnAOE0Xkn0jFDRgahmv8IOJmbuifvR8ggoEj/HOJGBvX1qnKFJxoY/nAQlERRO3vzGtJcy/U8vuZT1fRSmyiH+grjcwgrhsY8erFtPYUvy6LFOij0fYMdUlT1snxKry1+wf0CYSaDww6NmgXwIACG3HgNEQlJ+e7Zsx1dyEvEMsGCRThaygWD49McPCmPc09I+lZYYg2MaviSoow6TNtQkfsIKT2em40MxOmuPr6JMerowiqkfIxyf0bTHjTE+PPgaExzSCTH7pnqCXdxnx8EJv2XnonFScIozvjf5/SLD4YRJ7acwpQP6uTfOWZPD+k2bxriEfK8HRSbmzsvCpPyhpE9jTT1pXALJWOfIMI+bQASBA8It4lcPdyLm6DUNP1gudp1w4+AwJbBcQDIgyCBUO84ix6QFAAjK8N9QHFuEAwA26IgckXRkxzdny9wDzu0wlLYsCg2g0oId28Fst6t2u53OJ6R0UdRlVYGktT0Aib6oq1LR6zc/vnh5XjfTVWcXx8fv3182pZxP5wDOTl9JXVa6xMwQejusHcTZ8fz69rrvN5+8OANAurHsSInT5y/nTW2GAbr8dHFU1eXtugcwlLYoin7XCmGLUmnUzxaz//s/f/ni+Xmp1e22B3C1HE43y5eLk6O/nl9cb7/69Lw4P14vL58tplKY+6VPxab7flhtN9DuZrtVStaT8uWnn0q7s5cfmpvXAOoXn8Fcu//4ZXH1u251v+7txWrV2e793VKIGwCqEtfLpZ4sb++vv/32W13Pru7vL+83P//kk6o5IqUAfPPd7yre2X7bsvziy6+aaf3jxXWpi+fPny9/+9poAJAkYNkaefHjW+mod+Li3et2s9PFrG07khLA/GgmmK+ubrpeKEbbmaIoemM+O33erlcAttt7JdVydb9Zd0pjOnnGql5tLuqymB8/v353AUAUfDadrVbdi/nJhqRk8Ha7Xd6hd0VV+tNEaXbt7TA9OXZtV8ysPDp+/bvfzaQuGq3hVm4AUAg5nzTdZrOYz+V0qipebd/tzJoJoh0AvDh/LrW8ubkBOynEYHdwXJcFwc6aZl4tANhdf3F1O3Vmdb88OTuRKIkEBjest72uhBIAdFPwzrBz8+lkeXe36wcttRm6UhYKdHJ6AqDSxWazrur67u72+Oi0KRqjDRG1fe/YAbDO9X1fVqU1Rivt2Hn1ygHETpEGIIU0bIwziAYCGM5ZFuHsAXF1F7rqu951vRa6aztSsmomq/XdtFoAKCeT5c1t17bVs0nX9b3pAb8HozM7AFopguwGo6USSg7GaKUBKKkYPHgtXohtt3VO+N1EkjLcZZsaR+nBzCyC+uwYNhwokPRWhBBExNZadmzZMRufUBsEgf+PvTfrsSxJzsQ+Mz/r3eLGnmtldXV3saubS5MSqdEyAwygASQIehL4JD1pfpD+g6BnPWgeNJIGGkDUcDhkc2uym9W1Z1ZmZETGcuNuZ3V304MfP8uNyG5yRsAMgXDUcuPec9zNzd3NzT43M2cARkwtFQkxkyGxbESkrqutrghU6QrA7H1C8aE8lIfyUP6DLIHzIWn1WxoYTp1h0h7gS+Nv1EMH3Jt9rZB6AMGu9cZMjbuWew3Wp19r3/WGpQ/1EiL3hFf6fcgR2gaa8+EebtDk+fHuaI1/WuOi0PWuOZwGeHCIhe6RthXfWE/xH0B66D/Yr2L3w3vKgFO99vomzj01NTpv77WOYvGmm4d4etZboxb3Hdx6dmPvo7M57ulfa6g088ObY37wxcE7LaIj6GxBQZeycPCvDBtpvQVddc6cox1+NN20IGpvqh2UvoXTfW6HdkiCEMgZA81kkh23h5Z71MwZ5z7TuOze56dBTcYpP8f7HrYdVDJcKn7F0SDl2XA2Nk+0+bPA3Ug1lTe3eApUgwW0hHVtDaaoeJpaY41aSID8eHYEDLBBD1D27eRej0jEDr/vu0wNmbbDwT5fhp+pi4K8v4i0EGRvxjaD7X0tAfdhOHq9ueLo93O445hbWlYaPz64wLt+NHUjzdDKMv8FUeMo1XrmeldXaV+l5kcwKwKcc5MVSwJ/2tAmfRNrLeDuyxQQB2Foda2rWkP7dL2cxHEYRgZaKRVGIUqL0IYcg8RaA0CFzKzyLN9s16MkCcOgqsuqLIxUAYXbbAMgUGEQMGCVojAM8nz79Onjoqr29/e22w2ALFvHIYu1YRSSdnmpGhOcmd3t2yxwgejWWsUBEZFQnCTGmiAKyywDEHCURKNtvqnKKk4Ta4xHHgnDOToccc+13hD0C/Ue3PllsHz+LuV94l2wkyxjKL25w6TuHM69h4ZfQVrP7c4HiTWzq0Gxqb9ofTsDCd9K8V/Vqux+9qu5t63487Bftv8NN8luP2933FZ0dgKw+Vq6adCJgb5McovjDj/bhePkMUPdmSm+/Q5DlUaJ8LZl27d2o3Zc6Dah/jbVwLkDSNL3+k4Ad1/wSe/oFG1f74XOuwMVAliRMMi2q8G74sL0TqB8U+1wDfYA75fY6WwNCdRypEdFdyrgF6F0Aq3RBNqAkl2xLT3Qtc8F0GBd9rfjTjzfMz+789Phwd6g14M++dKbZd2v0tTZaX/iJ6lTA9g5aIHaPjJAUBQ0PwDsz1YF4jSgNjtpf3wF4jw5lQQQo1EyKaUiZY02/S28lyRTpKh1WJUqTgCMp3u61rU1irmoYZbb0eSARUZxgnB0dfUGwPnibDo7PNwf3Vxke+PpuixUnDICAicxiAyAXAvq9WSUHhwfaFOGUj06eR6ko/L8zNQ6He0DkCCN6y2MTcPZOt+gLA7Hoc7yaRJf0wpAbeo4jENOWUkSJY9m+x+eHvz1q0+vs/WHj1785//gHwIYgx7NiM2jSKk4CVhNfv71p0kQ/Oj7RyTl3ngPwHQy2+SF1ovSbJVZffP6bRwG6fRwX5WmyC8+/TMAJyczvn1z9kd/MK3yMlfp4fNkqz6I5fN39Z//wf8B4Hd/53c2WT6i7C+//SYJosXt8q9/8efjeE8/exyMxgeRAnB+flYzFVLMwvD08a+JvT07f3V1tZofncaRqGkIQDIxZE9On5f5u+vLNRt7udhOElUVa10LkwVQbvMqLyvNs/29LdaFrT6Y7y0uL1frdybbAjg4OLy5Xq0WhRVYImgjxMwRJByNRpfWAMhrHBzt1dc3e/H0ChebPE9FIpBR8Xg+vrl5B2B/HN9eFbOn86urV7w/4dH4+Pg4N5U1xSpb14YAPDs9PHl68ubLr64XN/MgHI/moVJZXZNS7nxlf7avURdJpbdZDQ0RIpnGo6paU11yGgN49PTp8jaLI0ymyep2UeU6SdM0TnVdnp2fPTrcB6CMTtJUolgYT54+xrvL89vVJJ0tN7dcBPO9KYA0STbZWiAGst6sojAtiqIqyyCOozACEFiIFWLO6wwga60QWyu1qbXWFDrpn4jAWKuIW520OTDytl9R56NwkultXuURU6AUakJZaK2Z4FQRy7RaL+eTQ0MUq3CUjkVkk61FJFYpACFKo/FkOknjtKrLm9vFdDqNglgxKyYRA2A0iiej6WK9AgJFgZHaioXUTEFf4nihJ4A0QSAkRMRMLk8lMYhR1bURTQIx0LYGkVLKaRIRRyC4cyAr2oolwJJJ1UjbWoJ/u2PMh/JQHspD+fdZ1DiYolGHu8iXDjZpna/8Tx4UccpWXxelJim3c+Txz/RDULxq533OGkWTiUgRdzG2QiQKAktGSAcSoM1m6LQ9AYMdvNL87RzahBwRJKyEuT1ah1iSxrWTCcRoXNvcoT2LBSxEhk513pmRWjShZ1G16X58sI3nYfsBRK3bE6GlwV3lA2q2Iurq8ezzMXLoPAscW6yHJ9r6Xdd95k0Rlj4NTMT+yd4/xETc+W80W6TP+NWYCwM13fPEdVTaOtlXsGNdwUMpDZVN0qrdSKfW84s8LAy4PJFCnUbf8ILIU+48FKk/94RgiXs3RLQBUi31btdnaXOaiUDI2sYVUpoc895dx4fgNvSKh7yauOx23pEQN/kFhQzgjX32+WKc5WjRNx6F2/XSGIvt2MO7rAiEuVtqA94yeTeqbkGCndeLDJxeehOs0X88s6l7BMI+xqWpxkPufhX4FSHwA9nwv7GGqE+nCzzrHzG0M61ZEc1ZgxA3JwyNVtZ1zvmndN2jgW+O+H+aB7xB52PewO2yJWImdgflJK2LT1dv94cInOehX03tP73Avcawb0WFAEQBEwPG6FqsBZEKAutcRImsWKNrgIIgBrGxNaxhYsWBv3zDOcIRiQKx4hAgI0ZgFQfEZI0WEaWoqsokiuMwrk1tjSFA1yV6Xl3WzTNWpBQ5RwViZsVKBUGglEqiNI7SIAi2+SoMolIXxtpABcSUl9k4HQVKjQ+OQg42ywWRDRXZssyqIhylURgkHGWbW10WREbXdRSltba2KiB1JbxdrZMozbYbow10dXTyaJxEURqvFzeGA11rERtC6kprsQIJgoBZhUEokKou3UzQxoLEWqtU4O7JMdawUioIIWTFCsCsmF1CJbhwLStaKeZOKBM5pZ7I6fFuEYo1BCEwK8XEwl6AUxvG20wt6TnRw4tkV/wCubMs7+5zfjfpbQ+DhdLbTgmgZmtqqG58Jtrtoz29IGrpbv/pliqhJwXZNk7lHrRvhEMjFkDsMnK5wDcvUNuVbtijKO0JoueP2D7O3lsa/cwvvuOiACZhvzt3rHOUcgt29YS2a1b5q2Xaja0Z9b5jtucfE7s9wm19/e0Vzb4WNCuO2l3Fb7pMJCwkJIxmqxOGYidV0BxvDgmnIODmZyJmP7giXcPstgy/W3WyuPPr68SZH1UWt3c7pzkCfGYxavxZvW89FGJqxSN53xs/US2MwFA3Hy3ICiQNE2Z2L1iIOG2Em5nWsAhEUARFYAaTKBI3dv7MhgHs7NReAfMIspUmqWQrmwXWwghsEzWORiPpaUGeD143aMbYeoWz5QO109XlgmyY1rzke+RIYTT9BZwrLBE3sQWCZrsRuNQQzFBMrJgVK0c0gYxYY400ksT25imRci1b52NLxCSdPz+TMtBEFoQgUIqVWAuxDutxVYaKA6VcQyKWmVRAIOXc79lAQYHICBtdO+1OiIUITc4MBiyxGgWsQGKsKBtFgQqjGlWqqIKdjWe11uF4Jro8P39ZFJt4skcqmk2Pz26vklhRIKYq8vW6XK7T2VhQWzFBECkr683VfjTONutkcpBwsKoyym9OX/zmOLRMkHptQIGKhZHly+vVtdGLN1c3Eu9fv7sgolhFudZxpAgmCczjo6Nnjz54cvjoH//e737v0ZPZZP9gOs90lheYjvbG44iAOIySIPzo0VGiKAQnaRxH4SrfUki6rk72DwITvl5+M52cfHD4jOJRrOKXX326vXynv/g8XlxFi9dvLrNXWXGy/2w8mv7kb17K+vKbs+uirJbbxeOjJ/PjZ4rUh4+f7588XmfVtt6UWb26uZqOplabq9Vild2SSrebm3ScBCrOsmL/5BlBLNlQFwnZdW4Pnzw5evzo6uXLSvDJx98/ev54HMbr1VKDZqMgDjmcTL55t03imHW11fbRfK6tPtzfF+Jqs7XavvjOr22vL7KqfPGdD0uJqsXi9mpdiRmFyXj/sFpvoiCYBGkwHU2n8+zqXTgKl1lpYY3G4ZNHN6/Pv3P6dBqNMrZllifjdL1cREEaKp7Mj0ezyWa1LLbZOFRpQPNHJyHh/Ozt3nxSK769vEEZluEYBDFGQKhyW1bjg9l6W+SlVhQwmSRCmBcBUxSMTS3jyXSZL+osn+ztZ3leZeuaw9BqnW2qukxmE0NS1/U0ScTobZYdHZ3uzQ/qos6yFQecTKbldpNn21pLopL1ejndn2+2a1PmtZWQOU2nWbbVWocqKIr86OC0NLWxhgBmZqKqLvbSfRWwUkrIElNVl1asU/REQExKBSAyYiwsqcDCAnIwP0pH48v11eH+SRwly9UtcWCsNcawoVW+qawus7Iu6lLb281KUTQeTQpdWpJ1vgoRRVFc1VWdV4vltS0QpYkoKKVurm6KvMzzKghCQlhZDSgRBEoFYCPubk4BxLk0EjEJE0kcJcSUm23IihWCIHAOknWtrbFe5qPZY5yw9eqnFTFibWMrkYhpUk5DMVTy+/8jHspDeSgP5e9PUZNw2lgULRa58xlA65aIoU9VB7AAvTd6d1wOjbYdC87rrA6wos7Wb4jwZkWD7xBbIiGSBmDgNkyorc5bM4R+CFVrBgqc8d6AWQ3M4ionabz7aWBitSjona4MLMt7S4eI9f/bg2noXu4Mv2zHprXwuuc7c2aXsubf4S87bXVd8IxwZQBBdup/a4L3LEdfDTfm0H0t7Xa2j1sNHugp8HeclppR8aYcPKzVjnpHVFM8vNoHmP2RpPfSuM8pomOfeE816X1JLW7m+UEOu/NYAIHQQmkdzeQ71rSz44Xc2tke833PxGoqu/+ntutdX3rd7M9X8lVRa9R5cIE94Cp+FAaYXUvE/YPdGNUDcu4+1OOdx0r6hA2GcfDaoNqdxd+TOd00Grzbf6w3dr1F5qHarpcDFnSfvF4IEBiN36ITOA4pMz4q3AEErBSzAmyjWDvoqbfiibi1y91XipViFogxFs6rXMSIrXWtdW2tZeY4if31jnDHPC3RBAqD0AFrRO6IiACpTJkXW2O0EVMWZW1rpRQBZZHHUSwiJIbFjNN4Oh4V2VagJ3EyGSWTUayrqqgLI9YYvVosrbGHh0frbGUMtLFZnhVZVmVZXZbjJD492Q8DIUGW5bfbPFRstbZWjIiIaQ5XiAKlWCmxTVi9iCWHrvUmpR8ZsWI9xuN67DAr24eMh3KSADiMQMS6UWNmYmZwL5egWyU9sSPdYNNwKtP7Z3b7yEDu9pN2dP/bWa2D9dQM4TCfBnlZfZ9wuCNbPSHddO7lPEWL0DUS3yFeHTG9wAOh5uyjF3Xcyan+Auykpt8WvHxpyeM+B/rUD5d/K6sBptaVvMX/B4/2KtyVgHcLkfJD2wx9jwtdAAh6v3lNolOBulfgkjUMGO9PcYYS0sGnPpXMfTxoOutsz3acvdCWjtdDj0Jm5ZQZf02NHx2PWWMoUD2jHIhvO2dGv1n1Tnl6m6Z7yS23Pvm7a6ETYOih20TE4J2HW69/atgFr/0NcOZOySG/sDy8eocOGox8TwS0mHOvpz3hT80RgECIW1C72SHh0d8Gze92hf5gyI4y0We4ACLuklwEzMqpnCSKKGQ21ggsSKwYK0bEEolSrLV2d8e7F1kp4lCEKt2k1hV005UggE3COFASxxEzh6EyWmttiNXeeCrGjEczErPabBJF2/UtQ5J0FBAno3Q8js7OzusiqzbbzXo7nwSII10YayA2q4ussLas6/kofnx0Mt/br7bXm+X2+fOPLhbXm6IS0glDBIppb7wXELZZuT+ZTqfTr1+9JEZeaw7BhNEoSUJ9PJ/87ic/OJzNx+N0tbz8N5/+/NXFm71x9OWb19NUAbi4vlptV3EY7c8mdVUncZSOkjAMvj17t93mx4d7ZVZUhlZ6uV2vxilHMLqyb8/fVXn9v//Bn+wfHlRFQOros/PLf/UXfzOfzypLKgxm+7PT06PFpkynh2kQ/uRnPz1bG6FbWwAAIABJREFUXgM0G0/W2+rZ4fHF5UUUBkWRgfnVqy8DsYdHh/PZ3tW6mExmRGJMvX/yAhyNpvumzOqyrLL88nrz/KNnz0/n8+m+1aou1rPxxIYhh9E2LxPB3t50s81jtsfH+7fLxQcffC+NApgyjkOOku16tT8bT+aHDL66XiIJ5xHX2kpWaF0QbBxRcbmYHB+uFlfG2rrW8zieHxykQWTMpsizbbY5fPQ8NPrm6l0yHu2fPgsCtT/ff/3t1xREEULFiILgox98nKjo/Ozb0Wg0nu0FHF2uFppjVkKiicnWVRKG18tFWZURj2uqmGg+nUQRZ8vl9PhAWK5ef6vXt3v70+1yvXd4kGf1ZJwSKS1a16WtyyrP4mS83W50vqyr7PbymplKplLrstajvcP9k9N0uhcoVW2yZDrZLldG66yqkzjmMEziUW0qZmZwWZVhlGhjXAqcSTqzJNbYJErdxqaNNmIAYWIXmOIECDv8TwRAFCTa1oqDOIhVoLJ8a2pjtCUipQItRoCyqApdKBUQc24ybQwgkYqCQFkjTGytKXW5zpdZnunKhBQJIYrjMIwgKPICoKrWHIR5UVZWA7AwFoaJ7U4EEIk3BwQgY7RiNU4ncZQ45aeuK21rvwP7fCp3bKKuPvLabJMMXwBJfv+f4qE8lIfyUP7+lMCp2oM0Wju21PCz0+Duhr8B6DTW95dd/OceRKhJ2tdT67wsHmrARPeGV7f13G3Bp7RrJT31UxEK2Lu8dUpwa4X4gDBfWQ8JFOxsFO6HVmt1O88QvB0aUy2B8Ghb92yn5+4gMeI7AX+9APle3SXoDjDjtq6O015rHrLO1deaofePLQFN6nvpbAYa7MC+/h1jmRsLkHzLTXd6kGiP727Ptd7AFW6nW4MNNddtN/zpJUNrws16+fobY6lxMvGvNJF/1BDibBC0EGjLyKHR3g3zrnns3/FWQ2eydv8lYT/Je2/da/JQv673FGkY1UbSib+WgHqrs5+iFH7dDqiXAbX9yds7vXAD06em3+97YBsfjzpkRPPJzz0/YrTLlJYYdA/7cEtHWC8Rw4CDbTXDyprwVT/IzYsi4kPq/Lzq9WDQHzf1WQjEHCix1sEcVpp0PwAAZiZAYL2XLDVe0t0EYkeNS0VkYQniwncEYDYArNggCESstZaYbV0bY9wy7mdKd57FELj8Zi6XJBE3qd+ZrTHa6FCFdVUHKgTEGssqJKDIMwDaVqLrjz76QCkFMuvbGwWJFQukMDqdTAEw8XqRnRwfx7N5MInPv369zbdibGmyvUkKgFmuz9+MZ2FAaagsYOM41cxVrbXYEACgAqWUCsKQWVlrxdoGdhLu89zxyPY2lv4AwQtOGSCS7WgJAGuNtdaKBYRBVqzLa7AzpgP5fHfu/uryHnPhzh7npfUuatk+dM8a/xX0vEcitGvIrQYPpgzrvUuI3xJ3TqY8n0TQ3QjVbKbSF0zN9tQs5C7Kldyipl6QfbOgB/t/03gjFCycP5qHhNqF2ueJNN3rVXlvIdlRXfyzJFb6QtDvCU2OyN4+0xMsPRDMs8dB6h2j+yJoyM17Sm+XauqTbl+8Z4iJCPCxu9abrv5Aq91ch3HgBMCIdvLIV0sd8NeT9S2+6YkbdEAaAppYxeGW4qvp4YyeDQ0N79vE+mLcOwX77QvosMjhYLcaUC/so09r/xC0d+CMNsMcNZjpbmhIFzfgez2IkW/Wq9/RhVoCG/9IEQgb6+ettWLIKoCgmKnSWiBEZEUg1kX5K2Zy4LKLGXKRP01u4G5ofN5MaZeQCojQ5IVMON3kRRCGs9mBkOxN94i4qussW9l8FQQKgC5yI0ins4ODQ7b8zTdviqL+/vc+uHr3WiMfRVMA2mzyZXl4PLtdrw4mx7kWtvXN9WUFpYtNrQvX7wlxGKewVCF5dPo0XC7TOL2taTyfAMivt8ycJKM0SUy5uV3fGilPDx5dLK7+zz/8w9/90Q8BzMZ7cXDz2Ztvnx2cLrPbvNyc7j8excE4icH86uwSwNev3kZx9MGTY05jbbeHs8OXN+ufvfrm5yX9+JPf+MFHvw7gf/nnf/zzL2/n4+i3P/n4Pz2YFebnf/zZyx9+//uVPbn67BcA1otPt/NDxqmqNuu8/LPLy6PZ7PjZxzKZPv2gFigAYuKTw2Ot7d788cXi9vz1V6dPvyOm3hbFd0+TUXAKwBpz/vqbl9+u9qfps6cn6+3qw/lczacXV1HI0e1qDeBmUcwTFUacRBwyblbXp8enV1eLQKo8rwHw5dtAqVGcTKJ4kV3CaqkoI94YU+uN1gaAsI3iiIyJ0rRYrMYUYjqazyZBEBb18frdGwCs9cH+8bYsF1erDz9U6exks7otF8vk8DjbVlFgASRBkNkqBR3N5wePT68W2ddv31loGB26SBxrOVCzcLTdXOuwsBBmmkymgYzfLa7Pz98AMOtiFib7x4+tvUxH48MDjtJUrEym6dW1TdMJAAriiCyqeu/R8zzTs4P9UZyOrhfXl1cISFcFgOl0slrckrXT8ejy3eb05Mk222bbLThwy82IduERYRDVujS1KevKiGbmXGcuv2TIoTvIZKWoyalE7IOPfFoKzcQipiyrMAyUCrTodb7Z35sbY62uAGyrtQAKxByQ6NloVld1ZYp8u6XmcI0DVgBIRAWculwGtjI2sEIqDACYohpFoYglIXKnncRMAaR+355kjDFSxyq11jKxrjWA2moRkHe2GZyS9vfMViR7lU9D323ioTyUh/JQ/l6UwEfbtGkX3fcDoGXwQegO3tXKSNwrc/tl8HMP/BKfxR/oWyTE3CqhzZWUjRMNnK7bpO8R91YHuLQa4wCwJOk07BZtcjqjOIf6hq62g3dxE/8nDXeGux31p9dAE2rVhYK/11jqk36f/dFrrx9Y3kv4PiTY7cv9uMMB6kStHcUuLpn6QwlvSezytEduYxd0Rk3LOuq8wzqWDP72X/VubOlPwV5HBP5CbW86NPZTz92ns73QA9u61nqeL32DsmUJNYHxvnnb0tHrN1FbpeucT8dIvSkxDCNseLGb0bL19mlAWVfzwBLycePe3u1pIPdNoBZX7mPu1vv1tn6ld012cmNFfVBu0OZ9hrR0Xw9FQffXrg3cq2nw4mA5eOiCWlPcz4lhFY79djCCnf/XkNM9+3qwovvtSss+eCSkMSbfbzj3qSEiVioATCORxDZXkXhwwFoj4obbtSKdp1sjimxAgZbaWkMgDp0TIIszHXVtiQgUhoGxxgaBd9+xDlhrbngUz48mpwF8ZC8AMEEpFSNyNvBkNDGidV1DRCCr1S2Ao5PjZJpY2Olk71hO8s0y2+RxVFdbG6hwNh4BmIzGxWqzyba5RZRESRRtN0sCBQypKgBhGk1je3x4enW5MHXFMEVeCMGKCVUwjiLARUUqlxJet9eQSW9Vw/OyJ7PgA4/RY5y3/5t1RN1n95CbyuylBzdfDidrv/wtXCB/2YzYBfdbMpovqP8n7jw5qKsnqd5H1F1n8vvLnePCvsDa2c/6E7+fmcCl6O14KNL/gA6q3E3xiTv3bgE98K3X45Y4Apzrq5W+dyT1fu8oHLLqfRywnQ3XiOzW11taVaSttZNy3Z7hXRebPb75tZ+Lg1weYffy320iNYCk7eDHhr29fXhQWg/HFoYj8hqBAAB3Y9exxopFD1fzh3DuETdu5H9u5WcrW/vbyHtUg16Dnr3kI+TpDkubB3t/7xx4i99g36cUyXDC7vDcb4y+jz7TZwfCDtwmuxXbTrve3LPiOeC43XXGbUeDbtmmL4TmMjFjbZPKhMgKl6YKFAequWys09QEzA2kbK1YEYIYY0yLd7odxrctEGYY0SpKtMkApMHepRbiOg6TvMqjILjdbFh0GoWX18soUgBgNnt7U6slmkw+/vhURP3sF1+rdMLR+Nnxvqtc62B1+84UuRiJRtN0FN9cv93W5tc++Ojs7FU6jQAkyURICUdhoCbT1JiCw1BDVVX95PQpgMvzX9hK89TWpkxCPt6bBMDrs4skDf/Jf/zbcbwHoMjrj5+9OLteGIsXj4//6vPNl9++naTTJI5I0SgKAPzgO8/COIyYifnJwTwMR2MdQfGXr88k1JPTJwCm8ShfS2UXX599dTw9+i9+80dfvHnzi68/1+CgJgCR4irfYHT4D//hf/vTX/zVfG8/nR9+/Yu/3lZFgCpkC+Dm5ro2eRhN15vV16++KbbrmQTffvsNlMotnT7+AMCXL79Zrm2gFAOrxeZgNLm8vP7w9Oj1/NHNxWtbagDfeXZS1sXqZgFtlsyRVKDt4t3rF48Pf+M3/yMAn3/2p/PZwWx+HKQzFfMH33t29fWZpKNfO30CYy/P3gCoy2r+9OjqZhluC1Lq+PFJvr7dbNcvPngaq4NsNgFQrRbT40ff/d6PvvnZn15cnD2NZ9bW+4cHG5Gronxxug9AKaUiFiV749EkSS4kUwhCUqIgZQEALMnedBxHi1W2yMsojdlKscnSKNx7dHTz9hbA8yePgyAqKKIwefvt2zQdcxRnm1tAnzx6OhlPAZRFXpZmXaHKNqTC9fa2XG1YiElMVV6+ewcg+vCjaJyulwtT6zCOYPXe4fE0L9+8u0jSCEBdVxZQrIhVraswikpdEEgpVRvt1lnIMZM2Ytw9N60qr0gBLsAZ2lQhx0qFtamKQgUcBRRkdpuXla51oSsAIGaQhjGmYKhKlxrawDBUyKFb7BZGQVnUo8kYZJlJRcqK1VVzIKStCYJACCJGYF3rRqzcEXOd24QIEyulqrq0xhhr20cEFp1bzuBsvi/42v8T/dudoT6Uh/JQHsp/ECVoleyBHXB/8U7hjZ4tQwF5n0p6p/SfGErPTrF16murFTYOEX2nSKe0O/ShvfRa+pqk7btXDlwbrHelaxVYaTxVugi1DnN5n8XY9brVSnd7gh4Ju/rwry7yns8d2fcQNXQZ6kh5zz41VN47XGTXQrxnaOl9v9HdZ4cPDge9BxaJD+7DXaO196HZ3Rtz0ONQ3SO9MdslRTxS4Vzg2lSCbc/9H4TGcbSb7kN0oe2leN2hN81geyztTFs0zw9p9D/tIH8DZrXz6ZevrxY66+PyDc0+3UKjA/Vra3go3oRuubE7ae4d2r6u1Rr2tv/zPTXdQ3nPbHfOIHde6zOGhg33CGie6vW+/0ZvarXukx638sZ3N788DQR/yfHdjngcHHCJJdiKFbEOR2mUS3JX2zgrn1STxdOhhz0HK6fyWjEEKMUBKwdMOJpHydiIJmLFQV1sASRJWuua2rtWG3HY92hCGITMCmiuqdFVDQhIqrIKwiAKQ6HABgFBFFEYBACkrk+fPrldriaj2ShJicjUGsaMJyM25vzVSwCVFiJSukCRr2q92WwscLQ/N6YKUAFgFN99/t3f/2/+6//5n/3zN+efM9rrIRE1N72DQNbaWtcAtNbW3Ye+45jefGzBeIsuq+LgnKU/2P6thr1KqWYgBV0sqUD/Mu/6f4cinedjD+bpL/F26b1XNvdq83AK3Tvnu0J3nB+9i3pzpOd2TBmsrT7uT73vO05KD0HyAsxKO2W7ZdUK8T5FA2x5IO7F+/Dd0+EeW7xX5N9Vu7j/gZbOlgm7Itbv0Q2xTR97hIrPFIJmo2g+W3etshdA/mYz+P/IwO9vpwzZ7jeePig38A7ud9l6ydaORYuWvpcf1jYxD82kGugl7rzXmctuT+4UpX7DXkj2Yjp6nqtuNjQJXr1XKdDfmGgw1t0cI9yji/XOOneowGA7a5pvfm1Wffuok+3SG6A7M7CDp8VPFf+he8Izuw2C6aqS3iC61eWyVPrrzI2BcWepEBJhIABYRGDFIfkkBGHAkI+pdxNTQMRNzLunwGdyJhJBXdXRdA/WANjmRcCIo5iBsjZlXVIQTZO9y7cvC40oDQFYlIWpThTXtdmUVTwasQIUj6d7VVnUZQmgNmQU1GTy8QfHYRRdX52tFldPH39gVPju3fWPH/8QQE5BGMWTcfTm/OzVN3+VFdVosvebP/jRbO9EKADwRz/5bBapqkISKEKYhuOj+Wy9XlOIpyfPBSGA29XVo9PTcTr98s3rkCa/98lvWIi1WK3WVZYd7u8BePrsJE0TU5SGqbJ4cnQ0m8SzeKZ59s/+5R/+d//lCYCiRk3q5PhQq2hbchxRkoyeHsy/fHP+wePnADbm18u8ur05L2vzxVdff/BknWzX+/PxbL53cHD67eU7APn5q9VynY6C7PNfsFKksFhcbVbrZJJcX74bRWMAxwezm73o5U3xbD49OdrPlstvXn51ef02SvfOLpZuck2ndahYrIGKZqPo2ePjmMJwtSxubxfLLYDVIr+8+OYHHyeH4yNblLWVwkhQ1vPJ5OLmOk5iAHtHk+JmkcbpbV1TxGESr95sN8v1h8+fnOzthZMZgHevvzo//2Y0OdRW8s3q8u1ra2rFaX19lbBdrQsA1xdXglIMBFabOk5iA2NJk1CoGMDj+fz549NXr17pukrSUaAia1Y3y9vf+vj70eiIN18AqOrKWjsJ9tI03l5Vo4PD9eWb0TTRujZ1YU0MYJJEjKgowrCu1tm2qvW2DqPR9HZxmyQTazSAzXo9GiWBnV0tbiC4vb397tPnuawODg6VZQAbLIuyLOqCSDl5ba0VK2EUNhldAQNtYSFijGElTa58ImZFYHbXOBqxoqfRflHlWZ2FCDOTG9hltoSQEQ0gDhJSHCDMdWbEllXhBJeQ1awBKOJQwlCFhEgFQZImZVnAuswvdLW4AhCEIfuc1pZEkRKBhcFOGSrnBFKBggTM7FQkF9ICbpd1X7oQAAt7dzsUgJtrsX6VRvFQHspDeSj/4ZUms3tPuA1lGe1+FrQKnNDwEPtvJQWpb6D1lMgWUOqu72hiTgXGtUmtBt2aJ854cOfV1Bl30mrIfXW3M5d87Wilutt8LO7Gobe9HpR7dN9OBW4zrLds2tX5PeTRcd2bSfdx0dfvrVl/V8Duoz2rsvfqe2ySNsC79dIj6RtLA4+kXpdbk7pzrWy/pW52DFrtWW8DW0K8c+pOvGDPvbGz04ScRk8e72O08CWYWuCm2ZA7uLqzEb1xKB7u4j51TQc85E5NvvzG4upoQ+cKSQ2nqMPQBP6B3lySOyz1dynAOU62sIHtvcA7Zpq3vX4ZgjEYq0Gv2i+b8OS+oUvcs4Kbd1oPL+nVKm0N92EjMrDQOmp+GUzg8dxhFYCHQXq97WofjCcGg7Pz5/tZ1aHuDZzQ96S9S+XQvO/4RESQRkHsv+ehB3eZuECstYo5CEIY0xjQ5K9V9bZlpSsRq1iFKgxYGTGijZsU1hpjTBiqMAjiKFas4jAGodTaow+2dRdFwzdSSoUqgkiNCoDV2loDBkSiILRixFomFEWu68rd3V3VFYBxOiqq8mA6SdO0iDIEymizNx67yx8t8e16baxhqcttZq09OtxPk0gb0lkBIFX4B7/9W4+fPP9PfuvXP/3i9XKZWYAhITCdzipjARijjTGoKwHEilKqE/kDeMRb93BzVbA7ZaSN1HYfunH3QwC4SMwOXNsZ4x1B2cvU98smb78Mn5ThCukjJgPPyx0pviM2u/fu2hnDyU3Dydc9RG3oQxdT0Ke5TVawg9R3xAw8jr0oEtNWcS9N0mROphYB7NjgVg11f/bZ1DqOtIdr1MPHGoq4MTsxZHv7mQl3dtJWqLTEdNkwqduzO2hN/H+6UO1GRvb2Ey9apd2W4PUM8QPQBN62deyWTph0eg+hp4sQfpUM9SxvXB5t59k+xKilByQ3YoNp1/Oy2R9an3OA6K5i1LXtod3eQKDZQV13qPG19Chwq8L5zz1vUzRrp5XOfj743bbXxLC55sG7J8P+AKK/2Bosr/VCHagAfXY2vrltb3t7n5tMXZoCcVGankzqahMiRcIALDSsZWImsgCTErA2IjAkJgyal60xrBQzuyuPXZNWrBAxMwAjlmRnTKjIi9roSToGsFwtgiCoa311fR2kKRDESZLnxWpr5rPJ06fPAFy9+zZI0sXqNqlqXeeis5EiZXUah8vbSysBgNVtYYAonglFdZ6V2c3zR09Pjo6/fHMepyNO9gAkYsG4WJx//ear7z9+PtvjQkMbTEOZz8YAYmWtqQiWFBQoYBVymCRJbYOqiNIxAyjK4u35u3E8fvFovsmyo/3DPN8ojsIoSINwNEoAKKVWq40pqqqsc9gwpsV2sReNo2D205+9/AcfvwaQ6+pf/PEf/HjznR9/55NSiV1lFU/Hs+PlZ6/22QJYFdWL42frPH/19U+NrSfHT7/+8vPZNDp7e/1f/aNnB/MDABfj6fp6NYnH14sbFcbM9PqrL8ZpLELvzr66urwCsCm3s6PT0eJVnOxdLbPidvFP/rN/9LMvvz7/5uUoCaI4BJBtNnmttVXzaTyJkzBKuK4OnjwqVsv9aQzg6Yff+8Wnf3N7exXMH4/39i+/fPXkw6frt4vtzVV+fiZWAzh++iw9Ofr5X3+R2fpIjWMVxXv7UW3CZLKtrD57CSA3dm8UX1y8KYzObuoXL2bLfFNoe7NezWajcDQDMBuP3q3XHNBqs1KT8ZOTxypkdwUKrAWwP5+gKsiYgFVlRLOJSBVl/u7q8qPnL5588gmAN599WqxuD5+epHvTcnWgoris8hSj2fxQ2VrnawDjo5NJPC+rbP/oWf72bZYt945Orrc5KSaSDz76HoDlehOKmDiMwihJY0qT1eXVYrmazQ/KbQGAWcVxoo1RikfJuKoKa8s4SkKKaqqZFIBIRcRsrKl1TeJ3Mi9SfKpeMtZqqwMKLalSF5UpAxUSWMgabQHUVAcIk3FKBeV1po2OKFYIalS1rgBwECsVgMmYWgBtzcXVxZOTp1VZFnntJGUQRbXWIqKcpzUIDZpouuP2LtBAAFHMVnRdVawYBC0VAIOaHRjZd6TpnzI7mduaHK0G60JzHspDeSgP5e9h4V/9yEN5KA/loTyUh/JQHspDeSgP5aE8lIfyUB7KQ3koD+Wh/P9RgiZs5L7SPxG/75YA8k4AO05Bv8wl6V7XyKFflrjMVOR978QK2nsO0YSMyu6RfOPn0DoyDKJo/B8kaI+vyccgSXtc7z0zh+45vVxJ9/ar9UvZ+ZWG/++5QPgKd/y8BGgyDLWeIHc9XryDDQDvJrlbuiqpq2r31ExghznZfWwa0Ljl7Y5j75qYQVPO36H1rGn/T/cyre8U07bQDZXtOT90MVytS04T8qR8k9SrtHPlwG5MnLRNuNkqPlSKOgo7z4l+EoI7/PW5GBte9RwPScj0+u79Lu4byn61bS7UIdHuo+p/0b8vYVhJfxlQm0gLAHm3xzaQvPXuaKtxx7kiLnJy6FnWrvpeoHbnlNTnW7M0ewz167NjLMH2anT+JH7m9KWIdyvanYO7C8w7WkuvhXbY/BhSO/gD55p+1Y1LnfND2b2/41cUN9nEZWNzQdrNimArtWuouUdFDDhSKnD3Ngqck2IzdUDO9dcF34mIGGuMrSHirh2I4jhCDBFiYrAxptRlVZbMymWpZHYuDoLWO4xgrChuvMsBsPIXp4dgxbWu8iIjQsQ8n++tVksA2+3qzZuzg/19EIpIjUej6BEba8ZpuspzywGATz7+5NXrV++uzpJQLRdmNIp/48c//vIXP51PxzWVAGyVHz16lutaG06SFMs8gDUQ4lAFUcQCgKMoL/Jaa5crU4hAbMUMnfP68kLaGUvUm/R+FNvJ1QtrJgBaazThotSINmp8R3ujfL//2S8JsL07FYbk7L7VhZD2hPhdubxToQzo6yTTsFXpJvpOjV2MMBHtugA3P/U3InSc7C8F6aSdDBweB10baAkCNMnz2oVm+/S2t9y3qQ57wvVOTzrBwezSoe4w+G8zTDv98cT7LnRpANCEbTfsaXQP8avdU2l3K3WcIerkY0eX0HvmWP/9XkIcPxa7CtKg9N0tqXEelqYp7gjo53ERgDqNitwG222zLmJAqPFD7pKKv4/2Vti2G7YnTLxkc021k6+ba7udop2GWi3m/QTs5B7B3SutOrdIakNX3BC18SAE26TZ9Hzylco97RJ36Tv8Zug/W4AbPvt77Y2YbphcLg9WLrDUitXWEFkrWpG4MFWINdbESUwgZpdAnQTUuKwSoxFnrQLXRMMbbfMib9P7Glut1vlms/j+d/esDda3t7e3ayY+Pjpxl6dZg5PTZ2I1W/Xq9asiW1sx1zeX09HUh7rCQvYmgZTmZ99++vGLx7PJIY/nt6t1tr0+Oj01YAD56l0usi5u99K9aHL00eMjbWytZbFah8kcwGwab0ojULoWCUSxCQKMkRZWLPIsYwDLdVVV2w+fBoEKGbhZ3sRhsD8bK2sno2Sz3gDY5FVRlGkU7s8m51e3Hz49Src1jZJss16us//pf/3fAICM1fInf/Hz//df/81iXf34k+N/+t//D5+fXSqp8iID8PKzn+fL8rd/80eff7YNA96bHnz04Yu3r19eXW//xR/+61//jV8HcHj84uxie/rkWTQ//vaLLz7+8e9GLz+rq2q93d4sty+ePwJwPEkpGtPn3745O6v1dH8US5jsHexfvPnqgycvrrYrALYoYyFT1brIs6p89vxJkW3250e3ot6+fQ3g6ckHq6N32WL9xv5iNpsdHxze3NxYaxdX15Uuf/SDjwEgMhzzj3/vt/76T/60LisVhVEYHnzwfDaarM3i+vYKwOHxo/PN4qMPP1oU+esvvw7S9PHe9M2bMwGYaBIpAKMoiDmMArXO1mE2S2cES4EKbI2irgAs17cvnp7mB/vnVxvRZHTNpg4gN+/eRcSPv/MhgL3xZC1VmRX7e4eK2UBnBvtBFARRtdkuljcAkMRHB/tJmBixaRpeX+YjSFEUYaCM1dE4AbDPfPXurUridDyejSaGOd/kVVmY2hQmd2slDCJWoRYNhIUuwiCaxBNillIa78g4CqPDamj0AAAgAElEQVSoMlVVly5QW0Tc8hFqsncLJFDBOl+Ngkk6So0YJZKGY6N1prduNRlriKguSxErsGBoW4ccKVIGGi50hiAkURgaMTAM5u02gwirgAMGQEzL5SrXWxCTqMqWxIp+qccPM0O4tFlMqYFxkd0CkeYKNXjHdSeGO7HmBbbXjqTxRf93yXn9UB7KQ3ko/x5LMPxzgGs0X/SRml9R+uro+57YDfrpR422H4i5iW0WK0IkBMstDNTY6wL2wbb+HoSd1ntAoTTCug367uu5jZratDhA6X5lj3YzLbZRxsMoqb9NVe8t733rfZbp3Rf6fWpsv9aOcDZAa7D04IBBbNlOmJkMe+1elEEju4zsvTp4s7Utvak3TGZF/sZrn9jLqebgHpJObuBcq9J+0zOOuo8QF6/tb+P1EJ9DNd327pLae6wIw+xRQwjOf9k3sjo2NOZKa+P26G144YfCs6DjDN/hVg806LcwsK17WEFHHYSEW+Ol4Y74nveq595y97BJb5wHoXCDGdDY4E1vyHOkHTQ0sXHdn3d4Mai6oa2NvR0+Qvf9QX+XBdbW6peNtP/rBex5nrxPzXMCTZqbtMUTQcQNWGFFLKyP+COC6p939PFTAinFIsZarQ2MZYgoVlEYAYjjiIiyvKiqqjZ1WZYCIcWwzUQRb6pKO8wEbWpT1+13DYpnBRBjdKgCEYG1j54+OTnYf3t+DuDNq683i8Xi8vLFi+cvry+rIt+bjN9d397I4uj44JNPfgjgow++u92sXr38ooB98ig5Pfno0eMnX372p0WRixYAh7PJv/mbL/VPf/p//d9/9HaVGfBemmitDYdCwWQcwanYUFmRQdcAmBVAYqEUSW/p+qHYhcB6AyS9i2v6NzK1ynoLqdgG8Om/AKAJtO0jj60w3aXib6/wtw92K72/grojggE6Nny1ef3+PIB+8t5/wUe/umZxuunaRfQ3a0BsFxnda/1e+hvw0jqMu03F2Qtv71Zr/z1f5U4bjWgeLjgvP5quUf9xL3W73et+vPi92N9dOUL9tNO+WlCvcfI8bPvjReNOXfc0OZCSnW5x3yzykcP+hrQufUoLeN3VKKhNSyM+cthtp72UJ7t0MXVwpPigvzYu2IVWk5+VTQB4G/9N/cYBeGjul6+Ldk/+pU+53g91lRYQlLttSLclw+dq8dt6V4XXMRqavaBwG1JHnbTyxEvR9pt+i+hwwG5mDikUT5HLGVkbYdWoAr37zYhQ2VqRIrEihhW7xES1NVVtOFAgtgIiYmaGMpYIynqUEL471ncsCYNQBdoIgCRJKkNP0+mr8tvb9UYZropsPwkvFlVV64urMwCsDSi+Wiy+8/jZ8dHRm7fZOFVglZVVWWnJKwARYzI7ZsXjBOl4Ok3HdW2ubi6NFKNJnJdrAF999XNwdHAw//7zX9NRdLst05BWq3Wu69njOQAVhFUGpSiJgqouL5c3i+VymhwSgWidpmMAJ/O9UhdhgDIn5rjS5aODk3QcvX15Nh0lLotikMRlPUoCNU6Tl3/5F7/29PA4PSzr8rsvjo72078+2wL44ScfPjr5+Oef/vHkOLxYXy63+vx6s9Fllq3evLkAwJqur16z/Xj/6PuX5z/783/1L3/4u78jxs5PJjfL12ffhADGe8enB7M//aufxOEkSMbrrFpeX+0dPd5P0tvFOt8uAERqbqo6HIVZlq3O8ic//PVlUZd1kZe8H6W82QAotB6Np+VmUdV6Np/W2yyKk3g8/v7x6c8+/UsA9u3bUTJ6c359HNxU09nhk0fXf/lOjWds8fT06fOPPgKwvX6XjFJt1OHTp3Vt35ydb6+uQqXeJeHN2c9nBy8AxOnoaDwdzU6ffXevuN1cLS6/94PfkG++GY1Hq21eFWcA0nESJtGm0vNIhYHa5IXRJtd5qOI0jAAkcagCvt1uK6sDnmpUSRCqWkaMqnj36q+WAFab7XieLi6uVJiuri+OD56MQ9qfpuv1anF584PvPQNwdnUm1abIzcX2araXCmh1dZtwWAgVZfXtV98AmM/m18vbaBMcPHqqib756ov5/jET5WVeunt1IMx2FCamNloZiERhREzaGGONFgOgzupUj/Iyc3dwu3XgUEgr1lgDIFEJC2vUud7OwumYpnp7m6aj7XZjjJklMwBBEACU1VsSgoiioDZ1EgWjJNV1DcAYQ0xEWOWrsczYYBTOlpvVPJ1PZrNN9g4As9oUi8pUcZAoCkqplChFgUHViY+hti5kQg6tNkxkhSIVA6hRlboIEQKEO/fG3i3tjhoHSSNcHspDeSgP5e9bCZq0TG2OvUbhJOn+1zfP27wc1J3bN/qYFdu6HO7eZdH9QXbwfav5t0qnEMhaEfIubAZMItaYJuWaiCIRghW21jiFnZkb34NGa2dL/s4a3yWBCFkm+OsIGUJoUhY3zgRGrFPpXaIfgViyBiZkJc1BuRA7jIQaaqU9yGpy/TU3SgxPxXzyJADECAQtSOFeb9zKIg6NGHc4xsxEbEXcBYrC/k7ebjAc423nZ+oVcSYikLZ10ziRV8o9EeLO2Nv8iao1Gzsfo+6/AGCbO9W9gt7YtxCBFsPuJL9V+wUiFoIoUB4waGonblhjjGlr6lvDzaB40gdsbLIzEREJWnQSArZobhGRNukjvLslk7TGHpF3XwBp6XqN1q+CiLiimokJLJYsGW/mMYuysASxYN9Tdjm7SFQzI1qrtW/QkzgyXFu2uc8DilTfhhwY8rYZKt9zX53p6nSz17lKkLgE2goC55dkqQYMQApKvNOGd7PyXCYIWxKQsHhlDiTEnebknMlExF3nKe7AoPFS9SNj/Wrzq19aU9YLEDeD/ASy7bRw13r7jjsOdGpYX4NzU2V47Sv5i7ht91QLuLTgRmM8MvzJM8R5EVp/pNDZqz2sxF1qZIkDEWutMFigXTp2IoJSzfwjB0m6ROTGChMFAFTgMBjlpkRerOB5R+KvoXDrUCmrYQWK2RgbhSpOYkAcXdaK0VrXVZZtRSRJYmJljBWrjdGuR0qFipUxxlot1oiVKAhJkYi1YgCIFQJUoMpsK5B1XR3M5pvlJbGOJ8l8kgA4jwK2chBPluv1JsvGgYrHe8/HY6mlKku9zgBcX7x5e3FOtQXso8cfRnHw7ed/cXu5Hifh/skJgL0nz/7oJz/5/Ou3AqRhYLU5evrB7burTZntz8bFZgNgq8uyrLQxzjI3RgNEpGQHenTYEwEAi3ET2JjW/ge7UWjBPs9R8dBDwIFLAefvLHZpnSA+h2e/oaHAuQ9/kW6Coje1FDegcG8Bt6LAOqd/eBEt/buyXL334DRGiCDkZ7gT5C2etQsLkTQ7b+Nq6n+SdnuBOFcrAD5PixVw62Tv6m53zSZBZHNjKfmVISCS1unDv9u6vimlBNJz8+28VFgFjk4B+p79HgztMd9vNNZaf3zYLUppbjdusCrxjO45IvbuKHMSo0EtnVgbVCfWinOOIxB7/UQE5OSvW/I9vMnvoYBAgnZLtWIBIWEWtoE/grJETrI6Bxohf7kBtSSI53yrMjT+jQKXKLBFBlvSuvXRTl4QkSh/OuK6xWAid5NKq2mJiLVOFNp+NdZvi+SuTnHzqdl7BFC9VMji9TUBAEUNf6y/sMWd/jk3VhFY62Wsm0ltYIEffg+VA+QzoPmzYcAn2KVGXvZhfbeUPMfQ+jsSGA4/ACAUUEAgIwaAu27M2JpA3HPQde8QmKS5b0ysGDHtImVml9OtaYVowAWAWMGSWBdoYBsHTEuKlcBY0/TIwDIxM9empoBtLUQUh7GI1jUBCFmJ5GGYWgh0pZSyIBGK4qTWVtUMwChoXYq1URBZYSO1WKthw3jkbi3bFnVEUgRyfHxyeXVtJGBIME9r5LZehaIB2DBki/Lm3YUKqqrOFvrwcBZGIwRBtI22BgBUzAHTeruIlVKMkuz64mJd3U6S9OrqbV4VAEwUHUyPHx8/CRMOQuRVvlqZ29vXpSAzEYAgCoQKbfW2UuMgzLZZYelozFzYrQ5tVgMYjdVM7RvNWblIIg6DcZLw7WrzerOd5+XpwQzAze06DALFOL9Z7MXp5xdXP3gsgZ2f3bwrwDMmAIubCwlUudoub2obYTQb/Zs/+X+S01PLo/XqGkCe1ftj9Tcvv4zS8dF+/OZs/cWXn17erCMxs6P9NUcAbi9fLde3KKxVZn31+mrxtsi0pouj0w/nB8frzRWAoyQajcdjASYjrejrb7+olFkurlUImHJ1dQkgr1Rob2en+3L9rt6Wl2evR8ePlT6/LpIf/+C3Afzkp3+2zpfz6cH+8bFSwZtXX5kwGIcsVSGUjEwJgKZTBlE4qvKlUXt7k1EcBq/Pzq63b4Pa7qcbAKv11Wj/UKGap/Fkup/d3uzFcV6ZIBmZTTZ/9hTAXhLe5pkVmFKfv32l1Y1lE4UzQKgSAIHg9vZ2dXEWxJOsKriu90aT8cH0xYvTMU9ef/0ZgKs161VpdLFdbvfn+6naPznVo1H07MVvnr9+vff8ewBs+ub1Fz+T+bhYLi8vdBJFFRWTcVzVPBqNFDGAWpcnjx5vri5AkkKJrnRdWsWmzl3GzNrYOAnzcksErQsRW+k65NiKbdeyElVXdcChwIgFWQKBWEAQC0Lo1qWBCTnSUi+WN4oCgV1vlkmcBDoI3VlvEutKKx0UOg8oiMKoNlVRZWmSGidPFFOgjNVQgQVMVbPi6XSel5Vk68oWAG6XBsKRSo2QiI6DBAJIzaR6hxrOhrRQAiCkAN4oE/JbGQkrdvukIsXEWjSDmFj/f+y9WbMkS3Ie9nlEbrWeqrP26b177jozmBkMB6sAkBJpoPig/6d3mslMJpNJBopGkaBpMREDgMBglnvn7r13n+6z116VmRHueoiIzKw63Xcgox4AsxM2c/tUVWbs4eH+xeceUgLQiASlIq3dAYewtVZrXZoyUrFW38bHvE7X6Tpdp3+YKQJQmzi1LRACyVOlxfpf1oPevyWtnf78/VMwgyrTs1kh/0Mdqbx2Ine6vJPia9lx5Tca0BxfjA4t8SZhhYNYeINFIJ42L+6uW7aQgPkJrOscVymyNkB+60mryoZp9gwRoBSLOP/N2n3JdXhJ7BIApTQpVTkUKaVR20MNkIdU6AAJJjgRiIS01uKbXpm9rg/FeAjjbS5QsvEXrX1uPh+0cq20t1YDs1HIEw8Ly0373JtFvjLVADXdAhsI0cZckjpatUNpmrSMymipTepqLolFcMUL1hwBotf4lVRVw/nsedu/xnKhCGDn5+YpNeRwXYePcq0HyBq/07m+1dBGhRoHnLYG4dZarUBN0zNUfJMi47vQ5QYBU2VJSxXaW9afX89C1ocgWMi/KdVvVSKk9it0032jmutT6dty9ujsb6hFA71Y42d9a/b1rPYiItCeaL0Tqn6t+98duQCOmK2VqiCiAMfUDoD1OchmncWftqzVUwQgBefxr7WOokgpXdG+82JZFmWe54CQ9nTwKI5Mwd7rVULxNSYmLKxIN2Qps0CsBVFhTZq1hzvbcWSTKI4oWpUGQEZUslmwOehvaViTl9OL04P33msJP3v27Ndffg7gzuqeKeal5WGfttq4e+Pm//q//7kiSjvxgwcPAHzz/OSLJ681IdKajRn0uh8+uPVK6a8ffVMW5aJ0DlDMIhAhpcg5X4GIlMi6KA9CKlCZNuZUPaAOLKMAjqzhdWgiMv6djRsu/54pEC1rcmUQr82xpDB9+Opc9MOxKVw3k9RymwKOg+ogcL3AzcyoFnJ1jIcAejqR6L6guiUI32w0NZRA1ITFmnvy2xtRawJUd0/jyVrG1ACby81vSk7Kv7uDXBVCrItGhQUVsdxT5quKvDO36jCi0a3VVGlOv0a60nUs/hqWupu8mLmqC2x+oPUaNneCjYI3NLDazF3LsznPN9tanSlW8rOW1lXBYQ01rjSrGrpGIw60xEZrHAgaNvi6Jwn1d9UgAw3nBd9bQZo1Ky1O5ZGrTRLfJILXXdf2M5KGnhPwQ6K1HhNfB6/GeAY7rV0X0yyXwnVwtQbp90MKU0XCrqpUuCURoQ0AAEWURK0V55ZLIVVNVCIkkbbWkFYOby0tM7OOYiKCtq4HlSIbbnF0e1iWtceTST+LAXR7cblYplGctdqTdHo5WrYjHVk7aCtjyuFwC8AyX5yfv+7v7UmiZqfnVBoRYbNqp5283c5nOYDDnZ04a0FkNJo/PXpeFEVHYW93O450oqM0zgBEW3rY3e5k2TIvbb4sy0VEyXBri+IUaRdA1m4Xb6YAGWNYQSk1mS/1obJkR6NxrBMAUVz22pSlrUW+EIr7re5kPLdSdrRWzG9enwM4H0+3+t1bN/dkvhp2e8cnx+/t7VBULparf/Vf/e43r94AeDm6bGUad7YffX388KDzhz/6oy+//PV7/Ru3h7em03MAT548bfdu9NtDHevF9k7UH753572Ynk0uX7CZZWQBpP2tQsQKkrR/8uI8bem97a3R+ej+bSl6XU05gN7uvmXNCVbTRWvQLXKenV4eH593xF7MFju9NoBZztt7h5aLeab3b77/7NEnxerpmNHa77VbewAOdvamX5+pRC4ml7/1w9/LV0W3a0fnx7EqDg+2Hh8dAbizM5BWR5azfDJVKpftnVu3754dvbm8tAdddA9vAJhfns+nM7MylzudzqB9enr09VdPaKuf2fbizeVwcANA98ZuvJi++uYx2n1eTIrZLBK9koh5sTIFgJv7N/b2Br/8lVgyEWmttVLUaWXz6VyipSmnAO7d2Hn++k1bY2VlVUjcoRvdO2Y+4UWpiMQsAezsDC7OBiS65GkitrO70yrzFRctpfWwv5qNAWRa3fngo7+bjtMkjrvtrN3vd7ZI5eeXJw4ihBKCMBtjDOK0FbULsSJs2Ra2cNZQFmdRHCOXZbHYXKCVSBFYsYAQFAuzFMaWophztsbkZQ7AWuNWkFZaI4pUHKm4KPMiz3XkEEOy1ha2iFUSxXG+ypVSrXaLLY9nY/ZxfprnNZW3XbVpCta0QQJkaeYK2t1gw+F2OGcbakAoHOiJAMJho1SkSEjAHE4flVKO1mDFCjfE2nW6TtfpOv0jSdFbQwtW+MVb3GQ2PGE202/AMSqosQIDpPp/CCAFwFFCnOLodC2qFOTK+gOqYFMS4J9gI4ojoEhlRIiq3TDF22MkqBsjAJS/rJnZutPjcCeaDX5QIcAcvDFEZC37bqkAIAgA1huO8HVSZAQiXHNI4d3eiNkyWwdtKGUCRiYAYJyq7JmPvhIErSPfgyyVlelCxFW3+q71WIXoNTqsORp+oOq3mhElwxa/bj+y1HScGowSEhKLtxbkM6yzbMQYFDR6pk5CALML8ydCFF6v6sBuvkiIHCrwJB6HJbvhAVEdcsURjzbrVSHeLBsuXcKggCFL1UXeS1lFngHXzMxP2kaUJ1SroGEqb9i61Mh7zTDzhm3oeQoZVPMenkwbTOjavH63OezbVnnjST0cDYTFvyZ1BrUdBhapKEB1/rRWakMnW6uMIMzmyhoDwsrcrGiDl/1OQbNm3IZeDNlLhaTAN6EpAUVRHUBOAjrg1pKIkHBYqQpApGIr7CFfEfgLKl0+TTxhLVVsLq7xEw+gGFsaY6Moci6HwmD28sSwLUzJbJVSQspYJrZRrJRSzNUN8VQVACIWscwUOEqA123Z2mWZ97NWp9OGpts3bw67ndHleWENgLvfeX+ZLx59+Wi7v9Vq914/f3YxGrdOTrO9gSnmWqcAvv7qq8lkfvuwfffWja5a/ekf/HCC+D/8u397fl68ODoDcHFyFEGlwq2t7UzxYj4bXZyU+VRscX55UrAG0IpdpFwn/NxpAiml2TbIVGsgryil3g6v1PMzoAoNCKyxnALOQaqevutT5TcmWp961ewKVV3Lg6pq1HX05xLV7HpnqrY0qiQt1iZ+Q6RSXbl1vDL8UJ2HhO2geiMAtbSJnNewcGPFuIptcgw3Dx4CCZqA6mimQmhqILeqppcNnjrqO60Zabaqrf9IQDD5KIhBqeRquFPV40WNl9Z6rpKezUZfKfBb0EhIc98CUJ1SBU6npxx6xK5xJPWWJPWpTpVXVVIduIMAqRDB5mZTvdzcVtTbWne1EkEmrT+0lipBGX6valdh334k3JAo5e8UlzB/1qFh14xKEld1diZ201VA/Mvru5FfTaj0m7C8xJ1NSt070sQm66LW8vLZ1CsjiCaRRiWdeieVh3vYjDY2NM/JDWMkisiRwhRipSLPsxQA0EprRZHSxtqSLQCto1acsogiIq0ZYLGWWUl9c7cmpbS2xjBEKUCU2LIUImvKVQHAtlpFubJzskk7TpNWaiJjV8viYP/Gxfw8n00BRFLqOO1lg+PReW7yrJPEnS7ny+XczmZzx0DfGg61intpSmSWxXK5NPu3hmkrYssrUzp9OEW0KBflwiyKRS9utzNSSjOnWdKJ2xmAfq+tiGxpOmk3TciyGFNmaTIbzQb9difrAXj68vFisbQsrRSmyHWnnyQJr8xut7u7vZVGMQCKorOzUZpmDLS2BlJMv3x1ev+ufnhr7/7+jU+f/RsAEUuyyq21N3bjn/yTH2/vdH//j//4g/3e0aQ8aSUAvved+5R1/vh3/9CUyz87+ubO/mGStP7Zb3/Y2/1XL0/f/OyXfw3A5LPbdz4wBXJTJC3aGg62Dm6w1kuDW4e3vvj1GYB8uSC0dnaG56M3y2m5PezPpjmYtm7e/e6HD7784gsAk+XJ6Oxcivn+nVvdYb/fTm4c3Hvx/DFyGc9mAGarvJO1z6cTLOznX3yWkRzcuNXttJ49+aywZrfbAfD46dcHd9+frZaltRolTy/N1m4KE2m5/93ffvX8FYDtTnd4e//s1ZOjJ89Uti1x/Fc/+6tb9+60+4OY1Gx0DuDz0atuu2MFw8FwLHQxHrOQc7twHI3JcvFe/24njY/Hs7gzgIiQ3Rv0l8vZoJ3p/VsAdLubdfT5xWR1MTubX3y/+3GCtNQoWKajyzKfAbAq7nUHksTnk8vtWzfidie37bgoL6cXxWLWymIAsRKl0RtuvX7yiO7db3daKo20mCiOoygGINYQEEeJ1wOJOnFHBEI20YkmDYBZyqJwaKPTsz3F3pMIvKBhtkTKvSIQrTUEpc211m4CL02R6jQYIrKyy0y34igGqCwLAEYMESU6JcEqXxq2McXz+XyRL3KTxyoGnL1Qnw34vbgWZJWIasgiYSGOKBERBVVy6QRChAhha7YIHnvgQEJnDtEagjkuBGKwZcPfrlRcp+t0na7TP8gUNQ6rUVsQ7u/qr2BU1EfFsqGF1wryukZ9JV1V3HyqCwu4iHfWduZJE1MIdCbytlpQI0HS0Du9lxM8vhCcNqs2CAUHL79xKYnckT4p7/dZwZXO08+ZBS72VkU5UyoEIhOIMIV4Txz8CTcb6vy31ssmRd7BunqB2TBqmxlhw/XcLOVfAkpThi1PvHXU4Cb4zr1CqNNaNzp9rZa0/rF+R9VjW3vFgQTCbNkhCg1TxfG0lIdlax2/zr9xkNeA2N5hrglJCBuqiALxz71LAFikGeB5vZ6uE4DK51ncK8FvN1jKVYcr37uOWFG5gAXqmVcAKr6iQMhKozXrKdI6kFdRl9ZoeLAl1xCBQBJqPO0+U7Pa3vgkr72IrD8fAsWtYyjr/BHP7Kvc8KtGvGUkgnNc5XPq6xhAXk8Papr1a71R2ffrhtxmSV4HlQ0mkP9NpBkLYZMJVIcTWLN+fbUrO9O/s3YisSYN6z98YFWGMIlbXAqAEs3+TfGApA8ZUZPGqqyb/epyZWnGOWeASGkdQWuttC9ChN1oFkXJlklpUsqFfhNSABEpz5v2gVD9/HQxQJlZFBOUBNtZmK21WZwMh0MdqW4rHl2cXZ68VnHkKrazfSfb6o7PzybLuSBpp+mCINYcHR+PxqtOVwFYLvPdgf7hxx/f3B8+e/SLz1+8/Je//8Nf//yn37wYf/X1IwARUQSJkvj73/1ofnb86en5dDwCiVaAMa1OFwDZUikt4iLZkb8GSCqFPkwPciEXvP1fAVQbPeoPYKoffTZ19wfgohoX1Qz32pxdblrg3WnNzbpRYCOPEBEkOAo3x12q/eOdyJR/0C/qZqvfWq9myVgTQ36XqpB9vzrDA74DrlgwHmKvMbB6OfvjPb22VpqrvLrtJZx7bfJSa1Kh/75y9HYofKMyUm0pa00MP4ZcZb1ZqHicijwO+BYpUg+dy2P9gaqloFBjCq9tjELg5nl1hODCMGhZq6w7yXrngF+pnlQBLK6OqvvOu59vvBqGoupiCffJNEqRijVfPd0Mnb02uSm8UX+7wVoMel/jO7f2FIv1Glg41asaVVWlOkSpFLOq7AanEeDGtRDVoDU2G6oIrmFLCr4gHoJsHngqUgy7PuiuhYEa6f8g1RTSgaCJumJU7eaNk801DALCDJA/ZBVydC0mQEo2pCjWiVIilqwwAMMcU0pe23FRe4SViIg1pd96lNJal6EoUiSMsiy2+30qFwC00q1OTww00G1lN3b3P//1I5rlrb5N24NuuwdgfHG0v729gkp0yq3u6GJGF5ciaG31V0vjNCAWgTFc5MvVIs9XvTQZ9HcIZZZGiU5chzIMUWQsWEQ0WeiSVayjSEfFagUgIonFQsTx4POiMMxQANksS3rdBICOVJbF0+mSFEURKdhuJ81hH967yaWZjqYA+p02GxsDW4OtYrbstu+UpZRFvj/cGs3lT//odwD8h//nL/7rP/hvf/X1z8/OLiK7zHS03Yuzrb2t4uLxs1MAu9s7Y1v88tOf3dvd2R/s3tw7+Muf/VR/8AOJLrTNf+v97wIYjc7H04s3r1785A/+RbEqVNSZzZcff+fB65Nz4vbB4SGAbLA7SDqXF2/acVyyMVYuL0etOPrw/e9MLk9LH5HA5otxbrAj+uzipTK0c+/BuCyK0ZkyJQhW8lgAACAASURBVICVscOD2zt70RePPnv+9NHN/eG9/sfnk4vFii8uZzf39gGcTOajzz5hMsLy/g++P1uV08mYNDRLO9vaudUCcP78ydn4OMs61kT9KG4n7XF5iZcnt+4m7Ug9f/YMwMG9G3fuP3j868dPv/4m6g56B/snr05Qlm1pKcoBKG2Vjrq9fjQt2JYmX2X9bqLR3+rOltM4EQCH25olu3Pz/U8/+/TN2cnjTz77+Ld+a85lDyoh+frFCwBMNOz0Bwe7DHAxn04u9977fn4xUuXUlCi0AmCy7qvHX6cRDVrZi6dPdJKkaaZIyiL3RA1SpCLLLIw4jSIVOfafAkVR4tZZYfPSlry+iiXI9krAESlFCgQhSVSiSBdcFGYV6biyOZwTiYiIknA0rwybyuJViEREWObFPKY4TpLJbGzYZFGrtDngvaYqGVaZl1U4E6BmOrgUqVgRaRUbWwAEFF5wKcXc0FfX5YkV6+BLAjmiJBuxZBKVWR8J5Dpdp+t0nf6RJc/gq02JgChs2mjwON4muhDeJkhD3fuWtKYX18gUbbIMml60QT8Nyo9/k6g6+/eH8FcO8IO6FuCBINX9Y/U1GeJ2LX8QByKH1rkbDnykvwBNQRpH9eumHFVKu1yxf+okFcHLd4cCkYt15K5cZGtBwqZCTxplCUAiDCIWUkRlWdb51g/SFSuTGpnAmBKbg7w5shsfFQWQaO33sOW7bbTxo2NGxDVGWRtIoSbr4FfIUYeAWdiEiCpnav8Oe0qG08jDZUfhjWos2K5t0sEwoTU9pqaLErxrPAesqrJ0Kr80TwclwDulE1zk7LWCGiZQgDX9p6rvJFiDdSEhcdPEbnC+KhC3sZT8+YA/M6BGHwfNqDnWvjHrS9HblPXqWBvpq0u/QiTRfIM2zeONDllbF2tZuqFce7xRoY1HQ9XDigiGc4AMmksh1M0ZjjUSUj3rJMQGA6tRGwr/8yu2JvU0OozcAAZ2ji+jruVarkQbVCgCAFYqIlJaayKybAExpnTPKKWjLIKIMYaFlVJCwlICuq45OSd9IZBSWqvIsmE3chyAU4CZd7a321lqbb7fb4+OV+fHZzs7nfZgAGCVT8zJTEOWk/P9w/vb/QdPH3/DYosi3x22j88XADqduNfCrf2D79y9efzii+nkcrXYL/Mc1lprAeg43r2xt7ezM7p8c378ur/Vvn3z5myev3rxhi1nUQKgFCZllShSBCJhZmEBKxVhbXJelaR09WN9T3P4rhbFa0/K2lTZzISAb5m/356q0poAmM+wKoxq3hatz9Qr2Tn2fhU7FiAO5LuNZl2tiKx/Fj/P/byrtxVqvLPWJeHUxS/ptb3EN2FTTQh7mtT4K63N/uYhTAMaagx12IsbkscvNAklo9Fr79I3CFzJ1mr9Y+2V+tXq3OntaW2LetuGWn8OJ6kesmzkWNmo7yjkSgNocwQbLWjwE6l2ifhNKofLZr1O1OgNdWVKrH2o8g4Ct3ljTHODr5Q1f5xcPUCVp4rPkX20xooJHWpVv1JtGPXw+7+azF2pSMFuqbiDiAaCLJ4u5RajAKRA4U6nKuu12R9aWZ8kuMIViKvNp8puTcBLY5sTAkiRWC5tCYCYI63AFjqK3GGMAoOttQzn6QILya1NIm0Ni7FuT1DC1ppVnmdZ2/WvDwvqqiZE0ERGrPWxuUmrKFaK0kifjU7z0qSxzEpZLZfD/q7zhyXSx6fnFmq4vZ/E7dOzCUhFSavT3jozZ6wVgPFk0kliFLNilZPhrX68nI53d2+kUZTFmQsrVJS5IqUjRRpas7URE4FYCFvtHoDD/YMsesxi82Jlo4iVjObLl69OuwlN56vLixGA0Xh8/+b7B4Pt84uLs9F42DEixfHlZF4aDRwe7ADo9LqDQQ9FWZSmN+wB7elyenI+2+/0Tb48uHELgBZ85/2H93Za//o//vsnz58Od25/+Wj08P1ZlmydHr0E8Oj4qLu9/8nRN1KUpOJ+V+12ur0sTjQuV7MWAcDgxt43JzHJi09+8Vergm4/GLQ0Tab5yZvj8fHR9o1DAPb8YhXNium4140vp6uYbK8br+ar0fRsb7h1OZsBKNIk7iSzyZxKs9fvvNLazFb7Oztfv355cX4OYP/mTcuSJsn2Re9iOiNFMItiNs4Udvtbs6UBcHDjFpFeLS7m43Jn74ZaLCejT3VsM6GXX/7q9u1dALv33sPpyfnLLznOosFebhPSaVvr+cWpJeqmQwDMraOjU1I26/R6g96cY1IkZCHeG2x7q5PEEQGJTksiTSjy+XQ2znotzE8ujAXQabdvdbd7Hf2L6XlCmIwvvvrlL7fvHOrtvnR6reUEABs+v5iIFK1Om4qiWC120mS+v315orf2di5HIwCtdu/k1eu9OweHd+/lL18tFnNrTKzjLGv5q2yI4iR2URFjlZRc5GURqShWSSmrQA4k7TR1ZoZIvbGtyVqtIohYMSKAIqUUrHeCDhdnWxFhYSKlEQHCJCJirXUrO1JRpKKSi1SnmW6Pl5d6Fpe2FBFFjZNjL5c8XxHSjGMvXmZUagFgxViARFmxlWBvyCVq2gyVgesPe+pt1f/LYtndJHidrtN1uk7/2FLkWICVKlqhPyKNq5Zr6nsN+TWEbEMl/Y2cD3nHx/r8v6kXBs3e1agBAbj9hkLsv/oQKQS7FLCsGX7O7qnVeq/kNuxPhhUrTs0LvCUNkAgzXEAPFk+Ua7hRUei8+njfV3O9GxpmDQsg3pGqhixEmElFpEhBsbBs9mPNOvFexC4k0TuuFkVNS1mrgOfthHs/woCtsS/eYjW+q1kOBGGvdnPQ7wVwN3s0fNI3TRutN9HwkHSd+WYXICBHtVGmgne2a0ulkdRvcXNyUuPstP5+7UCVyPG2nOdYFfBexIKrO15UNYlcyf7u5DrDqgnE1XVD66Zj05Jxo6ua760tCWpYj6FdCKAA+aydksI1uF9PsM2AfM1PnkXiGiAbOtP6Cg/fSAjv2DRpQ30bXb82ixuqlbMf36U7BYi3WWpVC2k8VH+/kUGj0E3QpVGZWtbQ258KvGfXtZ5wFyLTUkX5CfVR7ioLWVsm1TzzH5XjKjYgU0DAThIRPD27KHMiYsuOyJwmXaWIrWVeWlM6SVQaq2Nd9YaHuogUKR1FURRxwSIiJJZ9eCM3e4t8xZ2WmPLN61erfLWz15vPZ0k7A6DTVJYlWSmF2p0MKtk5OFxNR/mqyBe5VgRgu9cajcYKKEqbtAa9bnZpJOcyjiNbGACa5Pad21gt53k+Gs1//NvfP9jeny6OVBzPVjktJgDiOF1jLEm1TmUD0vY8uzUBsrbEKtipCWw00GS3MfitYX0WNIapsbN9i07viJzVu4KAPNUStaLKVRWqET0JdaDw+ruTgw8D+9rt0u7irOYzVxpDqDHX6iECCArEgevnZeQVf961ljZaFJY6VUDvJtXPlVv5THtkfg2Xk/qftfgedVetcVobp1/N3msKkaolTdlSHS1VX3r4DPUQN0pea2yjomGsqv+Lb34tNEJn+Bb5+UsIEtgFDZRK03i3xGv8SY1/qy6vynTnS40+8BWuvAK+Rf36tkTv+LexScrGCwHmU/6JNQRWvEMN+V7z2leouvtAwWlZvAK1UX+Ho9fBTq62LrDyGyKWgsUOdyGhv0nJzS3nwkIg7dWMeiNuGPh1YZWM8j0cYtMKICBuBJYRIJxvu5kR5iGzVjpSLr6HipUWMcHHAkopRSiMVJdlW0EkShBZsZZtxKS00g7iEB8TXQBi7wIjYgWsCLBijO31WnATL47iKMrSCBfWFEsom8WYTmfb2912twvg9Wp5cT49HA4H2zvzl0cgdfvB/em0PHtzppm6WymA0fnZXHGmOY5osLU1yHrn4/HhDWWZl/nKu9YTp3HUyhIoms9H1iKOlQEzy/72NoBcVKz/ssxztFKCKqxlEbZSFkYrbU0JIIn0aplnaWd/b3cym0dar1Z5t5udjcatJDkAA3j28vXNnS0VJXY2LzUiSifj+Wg8eyGvozhLOAYwW+S//upvb+zc3RpkF2cY9rJ++/DZ4y9WefvJ0QsAWdrazVrpMJM47ih9MVn+9o9+0t/dmZyd97JkagXAfJlPxpOtbmuclzydf/rJxf5wePfud+L2QNtlLgqALsp8vlDA9z7+8MWr07Oz8+1ht1isFHj34Mazb54DYMvDu/du3aXXzx7JYHvn5s3jV18uOU+7SbuTARiPpqW17TQebu/3dwaX5xc//8Un7Ujdf/iw3d+6vLwAoMwqaQ9KQrfbOj47I+ZyuUh6g16rc/r6SKkSwIcPsl5misNDtvHrZ48eH7+J2bLS83kxLWynHwM4f/7i/Hne3Urvf/SBnU2ff/ZU2QhRbMzCnddbKBY7Hk9Ky2naUakSs4LNl0urGd+79xGAvOCz01fWzm/tdu35OOv3ytni7Pi0uzW0aXvQGgJIdPxi8WK5XN3c32137pTPn13Mp6W1O3cefPTdj3/5y78DcOf+R9PxmRYTtVsHuztHr1fQaPe7/ZW9tGcASmtEWMRqHWutl6WNlBZhw4UVdvc1EalIR2mUrVAosLCz0LyGGpYlKVIGJTMLUNrCss3NKnKXE4oAMNYACxAFh25EOlKklsXCnSIwOFIxCWWtLGu1lma+LBaKlACRipMoATBdTSvt3/skOVuk9jHwgoWCAGF3RKGJwQSKKA4yRxnYYAay8pdG+uppigp2S9wRJqCU/+mqjXCdrtN1uk7/KFIU8JTKBnN6tfs6qO/UtA/+C8XduslXq75cM4oqZ+21t0JdhGp10+mcDH8cXWuOQqK9sYWwAzgkwYZSKdhoqLcK5xAMkFaOHanciVN4pYEihZo17vOtcnZ51p7IG10mYKc9c/B1FSIlTETsQypLUM0FIBXubHUV5cbGBn8Btzd1am9EpVwXBWOJGz3v9kxnHDTBpLpJ1XCv/8RNK64qCYC/pJbrG11FxPWMuzMnNHwjXekYAEDZ6NIrs20Nt157mZlC3YNz1nqDqLI5fA/4OROMV+cC75IVw2KFLZHiQMUQYWardeSg4OC2jBAGkOry/Pmof49DixqGedVGRUDNtWz2RlghAQmokEX4+ledEKxyEsXuMhQCACUKRAIWxQ0LNnixhQr7BeLN2XqQpf7jbau+gbp5eM1Xo2mj16n6YvMPadIHg0K3iahczaZaAkSViAqHJo1VSe963U1SIu0bXgO7zcMXOPCRCA6X9lc/u/yV9e95f00ipdzor4uFhpTxTj2KQ6muvq77iaCUjlRkpbThTht3XQAI1jKzO4kgZlFKx0lkea0gCAiKlNIqSnRiVAnAnaO41imlFKnBcBhnqZVlvlzePLx5786to9cvzy9GAHTB58vF4MatzqA3vRyZVfHBj3748vGTk88+L0ubtmIALOX2dvv569dHZxfbWwf/7i9+9flnf5aXbC33hl0AO8OdcnL+4MPv55PLJ189N6JeH59OJ9MoiWmVu+u/lHaXgFt3D5bvdqlCKNSzxy3JIHUDSLPZvVemSj0/3GIMewFENWjsqJ+p3vu2FFimaxvZuzmOIXs0If6/5za6dsyCGtvbLIya8AkaC7uCyN7iJFwvwcaJyFue8bUWh+xIhf9gfR9Es1pecNUHJC65yL9NObYhiGgNzqniewa6JQB/KZjb7ywooDrNqL5A45LqhrR5y0Fao6OaR1aoDq38Jwmk+LW+r3jntZew97ioqYBM4kOZ+e15Y09t9lsQO65TGgQfqjYaeHS6sQ16UVNtPq4O9Sa+UVylPXjNo3HjXuOpt03PcEmd74jqHNQd13nI1VdSwlRx/cji0bR6LIX82RsRQcidx1iI8tubrBVcYfANrmTVIjcG1XbiMqwrU22lFSYMISgNLcG/mqpNETWwHeQ6wg5ZDQeFaSUERQFideEivPrmtVMhghghglYRgERFsValYSuliHL8UKUpltgqKm0BwFrOSAu0gARshcSKEo4inUWx0/8ELMKaSABrjUCUVlGSsVCaZQAWs8vOYId0ZKC0TnZ29gBSROPL+YsXrxa5i40uO70k6feKMi+WM1uYy7PLXre/6qTnYzvIWgDKy/nK2P5Op9dr7Q12isIYNm4QlKJIeUhXERnLpjQWarlaDeO4nbUSHR2dnAA4mS6NG1Hm0piyzKMounFjb3p+0oqyCFsAtnrZ6flIZLo37OwOesJYzYs4i+/cOYC1Xz9+AeB8uohJhu3W0pp4ZlopttLeqs+/ePr8xx9+VBgL4P337rbT9LCXDFu3/vPRLzlXH33n8M69O//xp38dxxGAP/zJn/z8q89OX78QlNvbvaJQSz3LRmxRqkRNjy4AvHj9urN949b9D9OzUfte+s3zZ6eXl0X+axi5zMv7O7cA9Hqt6WJsiy4l2e6Ng+cv3/RWcnN///njl72sd3F2DGCxMiSye+Pw0eefrpaL23dvv3j29OLF8dZWLzcGgI7iRNuvvvzm/sN7drVczFeLfHm4t3v//oevzs/G43MAvYg+eHj7518c3/vgw9FkcvbsObP54LvvX8wX7SSKFQCsoixVMVYFVFLO5iTox0m2s5vF6cU3j47PzwD0IztoZd957/7DW4ejN0JsLUsOFauoTQQgS9uTyWSyKklFi3yh7XKvGxuTF7rcHuz/1sOHAP7u13+73SnbvcH3vrf1YJlPF/m4XDz56tnnf/M37338g3HSBSARDm4drKbT8dl5u7/dSqPesHX85OXB4Y07u3s/K0sA03z+ox9+9+uvvp7Hk9VspIHxaERpNp6PTGncqrdlWZYlgVSr3+8OiGVVLIytYyww24KtVlFEWgBD3mhiJwKCJFaimTlSiSZVSmmkZHEi2YvYEqXhMo1amc4KWyZRmqaZMWXJpXZRv5hMWRacz+cLpVSsUharlM5NXtoStiGfg0jz6gY1ldt6p3YvaNLuRhpNGkSJTgAUnDcCRLit1AkTaIoARKRZIhc+0mv78EqkIsVvj7x/na7TdbpO/6BTBEDgAb31n2oJq4CKGBGsk2/1yfyWJBv/htT0tKMaoKh17Fqoi4dvKr818pSMqsYCkA7swaax6gL+ViBGMCUcr55IOSVaKaW0AqCIrFWwpFBAk4ilOqKPM+/9VTYVGlKVZYypTZ26PCBgEK5CFbLCnmEjRARFzLYqQqnAPnCoiQMmGxSidSc4r3E3zLPmf2sMRJoVCL9tPL32Z7DYGo0hCtSDBl5QP/TtXA1me8VYcuW8FY50neYnStO0rK3TMEOlfp4A2Kof3K4eQElrDcKgkVIhxAwRESlmNsyWoJTSLh9mw8w6cmHu6nh8blIpr5r7XqTmDK70E/dbAKEA0aRcLgFerMzFJqpQd4GiZic3bVsKVlOz18gHv6f65r7GxN/o5Grkqf7nSgUafxBtQhibs6aZyaZ8Catg4/smTi78djxy46Onm62xc6j5DwULMgAca3VbrwNdrWvDCdAFKBT2U9SqBgW5IidendHNfMhdpiT1CIf/SGmKKIq0VtayMUaRVnGsXaD0SKyxLAbEAJelUTqKkzSIoxptIccbJ620dhdVV/ipQLnZPdgZxFE0ySfj6ayzvVeqWFTabm0BOD1+tZhd7ty+b1b5xclx3GmR0oXoVqrywqwWBYDth3sf3b+9WJmdQe/5xfTXv3q0MiJAlsa7O7sA3n94fzFfHL85Xi0nUOrpy5ew2NreFSgVxU40kjKWrRNoShEpArvws6FJoTUNKSHvmJjVelnfYKo+acD/DRz/7bLpN+5ujXOa5vSvWBBSeWoTKUCFYFhVZehdrVgrRerdKbwdJk5V1eq87e2JyLVcqljKFYrkN8rm20RrRPsKsWo+5/lYzeOtZsegoSWsr2UEaXmVv9HoGLd4qniZVOFIVZZBNoKFVXVHEzY6QofPIW7hb+jsUOdm/RqSixq7txc5zZY39hv3t1Lh1NCfqtS2ajPw6GYKzZaKJt/c5jYDYLqtXK6+3wQYrwwPAHInLHDA2TpUXbn/+t4AvD62JjibdQhQIOqCq0WoSME5dXun5xoo9W8SqkNTrhwcN3q2/idoPiGDesyrQash0UbD3dyrJqa454hqInZzP6xFc7P/pCHqq/VLsjmclWChav4rsmzLogBgiOJYMxsCYoqjKGYxzBRFOtXxKhcA87IsrQViJhIoI8zGanCmVKx1iRKAQAE2xEVnESGtSMXTfLZVZgDG43lvMEzThIE4aQlliNuWeH87Prm8dBFAtgbDcrFcGTM6eXM5Ge3uDQzz+eXZvMjZslURgFaWlMWq1U21gtJssdjfVtoFTIctSwPA2NxENkszFUGnvb62vW4rZ72Yr2b5GMDJqlQCI5KXK5tECjKZL3QUdTrtZaHaWRtAydjfUctlEWmkcToc7iqKnjx5dPH6dH/Yy7IEQMva8WyuCbsHO/mqXBmbdAY7veh9bY3BeHQBYDgY3BnudCPz3/3JH+Rl/tXjX/Vu9DuqX8wubt75IYDnl8fno5Nytdpu9149e17Y5b1bH60sHR8dffH061asAExnq/3Dh+dn45dPvtDd7UGrb+PVxWiRRKrI7dMvvwCQpGILu3+4a+Zzy2gl8floOnj/bjka/c1f/LQ3bAO4u3Ob56NW9nA4zMazNz8c/GjFVCyX0/HF1I4B3H+4/erVSQxYofnZeLvbWRXjk/OTo/NLYiRxCkB1uieXF2a1FEoXF89aGv2DO520Y1ktI8XSBjDcu90xq0lh4jh98/LYCt3+3keT6cTO590kGe4NAOwOO+M3x2VeHrbSgzu3f9p/Zk8n3bhDvEyiFMDp2fkqX2ZpPF0JW9GCna2t29vtNEMxnT0bvQHw/Y8/vt3bejWef/XVp3NLw97O5Zvze3dvvXj28s3J8d7ePoDZar6aj4fbw4s3p7xctVrpXn+Q3NUvnj+/fC//7R//DoDjozdI0pTS3Z3t0zw/O59oHV0evbbMTgQJC1sbRXEcx1Gs4iQtl0WiU03WrIxbmEmUFFwslrNuZ8ttB4wA11fOYSQsFoRMpTqOuZyV1sZRnJd5opJIxQAiHRFUO+5krYxnM611miZsjbEmjlpuvRe2MGJW5cqyscKAtCVjJRbGX2daac61Vt5QVxq7SXUYFKmIhcXd0UdwVqewcLi1xgkjFeJQ+YMfARFpaAViKMehtsqkcZug+ErMqOt0na7TdfqHn9RvfuQ6XafrdJ2u03W6TtfpOl2n63SdrtN1uk7X6Tpdp+t0nf7/SJE7DmfF8AwEQeNuRR9eR/mwPkQ+tm7jjuXqjxBKD4D1DCFpULHcwxJYPNUt3f4knKNQpTUmkTvULqSiFUApYndWL2wEECIogJzPa7i8kJTzWKWK6uLZlEr02s2iYhielhELGbZEmgBjGABDQKLiKLODkpcFFsxWawIptgygNFZrXZMUxN2HRgAU1DpXYNMTrmKShIoqUqRAVkSsreNEElnXUUw1paXBH+JmSCyimscKqa5GbRy1eb4DqI7P2KhSY0zX40ABJNS8Yca1U6oDQOezzLKZp9Kxiw3k2GWuAi4cnrv7pcGf2aDprf9Vfww0lpBo46dGosBHW8/e9S4pqictMxOJKFaIBFDMWdoqTGFLY03h+E1KKapvAaqnkcveGLP+Rf2Hjy7/tlTagjZJLr4dWqtAtCPjOVkCSBxFjQ5oeC8CotbuNWIy1Sf2YblJAFKqJoAQsTQow+6Gd09FW/MZ8afOSjlZEfiwgU1DnkilAkvHRVWtF7nAskHFQvHfUSUj6pb77FwQxbewDV2KdNwg4kCqWF2gigxWrU03TVWkTVkAYOYoipTSLjAsESp/0Ca9JZCuyIgNgeKY4D3vQMIiIe4B/AxnFtjGrfG+kwOpigwzsL68Ag1JGGKFpSAi0pHzOMpNAaCbdRZlaUqjtCKlrc1ZJI5jCotaaS3CRZErRVEUMcq8qCaSZqUBsLVbg+GYGTrutluXxWqwvfvwzoNvHn9lrem0EgC2u99J07g1yMdno9F8v7/zza9+9fL5i2VRpq3s1l4M4L17H3R623E8u1zZv/v5f16UIgQotbI2vzwDMD7qLJW8PnpmWK+YMSsLy5PyQkgXxiSJBhBHmY4ia9kyG2OB+oKwJpnN+24HueGpnsI15cBtVb7DFbgiP3nuFHO9MMN8cO6U9aqU+l4O1JPKv1PTokR8XAaIJHHs2J3GWK20Ehd2ikVJ2N6IxZP4mhmu+aG/jYfpvlMSQQnQDApBjQ233nWAxgJC5cHsn1ckUBC3xn1cACHv8yCAJlKkyFoWFqVUoMzp2ju1lvaelU+NlVZ3YeVkDEUEFhYRpavZTXEUi7A4f3y3J7vl4719awFdLUdVX1ccRBz5zlD1TgeqXThcVeoIIQjt9F1RV7f2KiAI1NoYuUHwnegFdFM0+WoyWYIi0SQgYvcjsxBXkl/gyaQEAMq3uym3g+xggURKi4hlC6WJlATPO3I7UOWUwEwACXScuAXC7CI5CIEUgTSJtRDf5uCH7GWbC0Ym3itkfSCpWoKB7R1iePoVJkTKhy4DREhsFbaspusHFYJ0oAr7qesJkIAiLT6MiYs4TfUgknjXXnLh3chfpFfJ5jAcVfc11QGnLVVXwWmQVlqRciJBiQJExIrzoPG0TYZ3Q1eA0p5aJSEetO82IlgJAcQrdTmQnXSYSk6AsYvzSGBRcaS1MgBMvoqFNOnSFMjEoFSijIEljgxiagPoJkleFow8S9JSEcCRhkaUJDqJU7YGgBFZ6mQ0X4CtcZqHVpGSTqc3m+QASBIztSY1UBynUWknWpliutj94LvUHTz++hGAGwdbxtpO3FFxmi+K1nYnTjuL2ThfFFGsExEAucLO7l6r1Y50sVhOiKXdO4TIbr8/WkxzFgA6TkmJVpLnZRrHlhExk0Wr258bABi0Wt1h7/hsudVPVtZmKp7PThnR87OLfDI+3B0C6HaGWa9ddKaX44sCPna8eQAAFnBJREFUNH11tFwt03bnbm/LKv3zp78E8NH23c9fHWuiWxczOznvbW/POY7UZG7o/Pzl15++BFBstQ93Xv+nv3mBducn3//+//i/ffnm9cnMXD568vTBR9sA1MmpLGZ7N29sbR9cjCdPPz95//32je3b529Ozsa8v5cBODw4eH38+tXXj1rdXrKlGJFdailhtIrJttsawA9+9IPj18fHL1+NTy+7uzfLkuM4vjw6Uu10rtLD4T0A3e3syc9/cXh3fLj33vHlq0+efX6jf/fg3vvy6ulKCgBnZ6PR6eWN++8dHOxC2BTmzlY5mS8++fnfHNw62Mn6bpLPZmc7e7dSniepBfStO/fy1ayVdfZu3n3z+hWAp08e37u9q4jz1RJlvhVDRVEr1kXcWpjyVrsFoNPdezR6st87W5bFcrrgcinMCtQiMrYA8Pjxq+3D3eky12nXWJ2Uq3t3D//wo/fOFxdDSMErACqfv8pXkDLKok7JXcXfPbj1nz7/RWsQTc9eWWMAmNmFSdLzi5O9g/6zx9+sItl61Okf3L44fvln//O/7mwPAWhKjOXp6rx8WvT37uweWlXgxXxmZwu3mKNIQ2mAtNJlUZLW55fHaZolKovjOC9XAISglbbg2WKUpi1FCsJaQxiWSyeuMp1YcGmK1vCg02vhnEfTFYnqZ1ulLQwbAIlOIhWz2PlyamG0UBRHRCpGrCQCwMQWhdYJC4sgkjiJU4DFWGMrB4YkokhpVdgCEBATUWk5IlWYPFYJAA1lpIR3FJecuZN0O63ObD5la6blGECqsqWZO75qL+4vyyUby4q1UkVpAWgVC5hFIh2LNbHSzr+RDZOqA0Ndp+t0na7TP6IUQRo+LGuscqdVBXvHa2XkQcmGQ9ZbEtWae/hc4TPBNGngCG8HG+rk7Rkfi4kb9Q21o6qOAYKovw3AQrNVjbern0kUMxsR0Zwq7TVsDxMIE6IIiY3EWgNhp6FGkXbAFnnP6NpjVAKKIuJwRB+nqOq1Rp19dQhk6yhvDSuh0dq/p498ZV1vApHNfmjUZPNPqTuv8WJd/woB9FjUFeO0bibBeW0rRbVRtzbihMYgfnva9Fe70pa3/vSO+SWNdgRYzQXfB5G/R5yICCogbA7eZll/vfJpe0s531qB8ExlWDXgYnhwN4TqCpOdgKIImGMdXC7Y6t4tdL0ytPYdEREz6qBuVEdEDcgaeaNwrZZBGnj7LViPwWav7XTtQA8JV5BW0FwFyzb8akMlqn/X3GjraKR1Z4UQa834jF4SVCulfmvTN9SFPnAgCJFSWoXetv7dIGBkrQup8U9d60oE+SbVXaaoGrUNNBwBRqmfDr3hXdRFwAJhKB1AEbZWqUhH1piSmeMkUUqBSGslSrl2WRfBlYXZioiFtdYSEUG7no+VtkZAajGb7/a60+lyuL2/zPPFYh4rHE/OAdy8/fDw/r1ffPKZlWL39l6v37k4PymN3dnq7d04fH30NYBcVqWZ/u3P/+qzp1OQtFOdtYhLNiVc4Kfnrx61tzruqq5Ot0M6ZsVJnKzKZbvVylcFAM7cBU813OSOu966nt1JRrD81/xLa0nl5F4APqqoBdJ47FvSOiL57WlNkvu3qfnl1UgI/x9S7TQdIpOu5fLW/nELI6AkzYcCAinVg1XkiDUn6HelKx7C76jCev2UkLgwVs2YulIHnqxkkR+vxt7zlrZS/cWVC6rfVvXmaFO9QTWe9WImOK5fkd5BJF75dq0Gb9mNhEIcj8YD7toXIq6xNqx1RcgYoTNoLfaAIq0oYJMCcte3rUWsceEOXBVE+C3hQqsa1tuJj3xT76sbJ2f1wAjVg+/jV0poA4fqhqcpBJqs9k14CFwQJNp6jLMqAg75KKWhLL8lsQvNGKr191EW6jZUG2x1KQ5V+58fARW2P3Lx1wSNmR/+cBitVCMFf7LRODCQoDOFJSchuAf7rlBKu2DMbDmJY+XuTmRhiIgFwBAhFGWhtVZELGDmNI6SKDbG5KsVAB3HaZxqUiUXcRxDxBqTQykVt+MEwIoX8+VZOV51OkNCNh6fjkbL3U7LMihOlg5GTNpplu7vDGaT8Sovl8uc1bLIF2Vetlvp6eUZgGFfZ1kSaSRaKYZhm0U6VspYNkY0xQC0AtuiLAyznZWjOInPp9xO2l2SNIkAZFG2O9w5OX1ureFIWcF4tppOZwSVJGlZum2rmM8XhVmcXJwoRPs7O1vd7VuHQ87x+nwalwmA7d7gYll+8+L57d3haIWtRPeEPvvqYlGuXh+/ePb6AkAySf8Wcm93+OmTR48++/r45fH/tfz5v/yj3293ekIlgP7WTnpxmS+Xz55/ZVR5cOfW0YujB7c/PC9tPssnEAAPf+cHN3vD+eXFZHKZtQ4vXr6+XC6+896D/qB/8vJrWuQAdnb2bvcGPxPz+uXxs+cvwLqtEuq0ojgju3CocazjhZViMkYc93rb0xdP8o8PlsSz5cIUMwDtnfaD7/0gS7JXX36ut3Za7ch0brXGF3q2GL95Ex0CwOLk7Nb+jR//6AfFgnuDB0+ef1rMz4oiTnuKrX343vsAXjx/brTMLy61TieFaQ/3qFwev355OSkEdp5PAeyYLU08XuSffvKf273dwWBA5ye55AUXLRUByE0BGylEIBUJEqWG3ez27vCm6qrVyh1crIqiKAoh+09/cDBfla/PZy/OTwfdTKhDPD86eQNgXMoP7vZ3+tuH33n48N5qli+evHmzGj3fefBg2Nv+6tO/BWC12jq4Jeese9HlZFJOF8lwB5MxkUrTGEBRFIUphoNhFmcEaWWZEObzWdSL++3eLFcAOq2uUmq5Wq2KBQuXZSkiWsVusWulAOR2pVUSx4mIbbfas3jKwtC0MstIxSALwLCNdUKAYVvawkjZXnYARFFScg7AioWIWCblbyFgYRErIlpFGjEAS6I16ViLJCxWYEl0QjHIUVssAAWKVUJAySsjpqV7VkxZlmmcLYqZCwFkySY6ZbEEMtZo0oaMRmTZuH0jolipSAsiHVlbMliruBVnq7IQErmGI6/TdbpO/whTJFcspzqyVcW+CEqguzJWUMcld9AFAK9J1oGuQkA/UCMMedBS3TNS/dNQeteqJ+L5BLWFJYC/MtvrpNR8uH4z4C+y3jpphiaqfhICUZImWkdFaSwXLO6oO4JSbJnAihQhg5ClBVvjlPZIR9bYAChthPWqMCzlSJPVb9U95pumjgizdUgQ+fiGDR5kNU5vQeQaDQpopogg3AG7Sb4L3zRDGlLjt6rDNuI0oTFM63/UD6DxGNwN3gJA3OUbzggJJNK3W0zv+r5q4jvK+i9K3minyizyZIwwqrUJfaUy9Lap+/crE5AG3ta0YQmwbKkBNYYS/CJzK6/JI3OD7jgtFajjDUGA4em1YWbV8CHX9lplVDYBglCx2iqrlJ4KPaxouSRq086sKqhDW0NLgmFaTTmiinTZxIbqqRiIN3BXt0tdRBM7cEZ1MBnrkbHWUFVPYRFSShGIrQ2xLKmmRa1hIWurRJqrzNevgTb6QfCAx9raanZNEwXwrFWCZw2LiGgNFs/QKUpjbGmMMda6CyRZhNgqFTWxliqAKZGKdExUklJsjFPNITxZTFaLxWwye1oWKorH8/mzoyMlcnF+ytYCmM5naScdj04PDw+//5Pf/+Kzz16+OGlnSW+na4tJN4kA/PPf/f1pYf+H/+XP00STYH+n/eOf/JO//OlPKVM3H34A4PDwxpNHj06OntooXS5W3WHXFqtVkedFURrTarcBWCcVq34nEnL0cLveSf4cp8Kpr1Kww0BQvSzehiW507SGDKlloP/cOMV5F+ARxJ005rKbAy63wL1sojPvTtWsaixX36Kwu4Wl79dsE2artjMJk8lNXmnMYAqwSNU/a3sgHMhS7a0+g8bcXmu6bHyu8qi7gxigilJXyRm29UlOLamgrpwXXO0xaiCAVC3/dz298UsFOtHm4+Kr8q2vy/p/HX5ZazTN/UhqcJDBjdMe94DrD3JEPKnGpVGcJ4qKaAXH5mvcmOTxx/AhSJwrU8dLZkHAFevB3mi865gKzdzsgOrgGICPQBoKVNXCCREoAUCUZ6yCvOdBY6564LmWrs07uJqHcE2d0xN1faZSB2V9x2bbOMQVNA8XnDfN+jGGleYlewIhpfyJSLXh+8UXdhkfRZNpbeSDTusp29Us8b+zIk1wUDRUHKdRQiICK15ikyKyIob9HsQQViLWOyS5iHiOM2uMWZUlAG1tu5MpwBqTphFpzUxJHK/yfDFbAuhlsSg7nYyn0+Xtvdud7vZyfHx8uRzeRRwngzQBwGyTNIbC8+fPW4kabu+MxqPFPC+Zl8tVlioAaZoppdgUVgyg21n6+vjN9s6O5VKJdcENjTWTxTRRWitWpC2sisjY8nR0UUIDYMOLfKGJC1NGccQKl5P5ZDqP4xQip5NLAK9Hr0tTQKiXZLd2t/vdbpa2aTFXUffOTr/3vR8C2NoaHI/nL16/+eDe7f6g+9mTo+/duf0nv/f7RT797/+n5xcrA+Ag0bt72z/+8Hf+9Pf+6evx5IsXL//6Z7/88//7/6Ao2RsOAMTQvV6vLIvR6HJRrv6bP/oXf/XTn375zedpe7udRVGmAUxGZ4Nu77vf/9Ev/vovd7NOcv/+2eST4f7+7PQ4mlvnnpIU5Xg2O7x5kPW3Zr96NOxunVxc3rt9I47jIzl68+IxgFTdjhOazk90a+/23e9+Mpl98lf/50qlq5XtZwJgdHH8vbsPj46PPzse317a3d3h/Y8+fP7llwtTdDtbpy+P3HSO7nYgImkyO3q+vbWz0p2vf/1TSzZW0Q9u/i6A5eSznrkFit68OuZIPXj/w/Hli163s5jmaRL1O1sAnj55lmp8/OEHbVX0twaMy4ILRQDR1JQAWja7nEygNYswL1NVzpbj0ey0m0attNVO+gBKwywrIlhGFksSpzvdDlvs3bj1l58+uhxP3ZJ4c3L6hz/5J+/f3mFKH3zng3//d3/9b/7tn7W6W3/0e/98WTCAN08++eCD93/56o2I7rayyeo4LxcwudKRk2BJmmghY8yKF6SUmk8VKM4ykJxdHLc7PQBxpH34bMbKLJZloZTW8D4rThcythRroigmRRAZzS6V0t24PylGy2LuJnCqEyNGqyiLW6tyWebFdDKJ4zTSUe44oaA0blk2DFakHMPaSGnFKvGR5Q0bgBRpZhZiJqtBCsqK6P+3vWvpkSMpwl9GVlZXdffM9LwH2d61rR3Z2LLEihNCYpHgDAgh8ce4I/EzuLCL0LKHPYAQa+3auzZmWHve3dNdXY+sjOCQldXV80DiuNKE7DlUdmVl5DPii4iM4NDAcBo+c58yFDupB2YgLNZW7DiiCIB1VaS1YxYWUNmPB05Y+0AhUgAKl6/G6yaKmFkqf2RzbudOJEG6HBdwS7d0S7f03aCo4x0ACQFQaKSt4BIUBC/uIC8BG1iWCxt8snVMCRGQQQ5sLdIdXbFjpF6uK6hqS/f3B0lRdfUlQeu6tfyd0FCEwCNWSyKpV4UFUCL9QUKki3mZZZkPkFQKRLH3a1REpEg7RIpdVPoU3QvFLuhwi/ivNswsqIothx5O5SUAo9skISJ4m7ln4AYwsVW/r+u7JaX1knLlhWoEt7guKNn5zdKzS6XdSiXUsIzhLNgirb1SIdz4Li1Gp0MhYDjc4n8NXULOW26WYYX/g7wCL2gBpWbGcEuOWXwYnG9jZyTDF2WxVK7UH1p6M4TsO/rqEANXGGp0LfJXfS/QLiy6ug3vbt8JtThXS9tcBKwDndYFTd/DHJ21vWjbYsU1ySxajx4//cnDZLL81pXXu4iPQhMTF+CWzh+EuP5uHZ3wzUWA5aUOVE1QYPehALDWRlHku0VYmARawyc1Ui0ni90tfHJ5gTZh2/Bhe6rBB5ppINLOh4bZrmKLgA9dGttQqkT8dGsSZDEzEwOo6qosC2bWJoJSdVWBmRQ5tgFgFRbRWntBXOvIiRVhr15rHQGItM7mM5A+fHe4vb0ZJ2nt4Jih4Gzd72kAO1s7u7t7r9LeYLg+nmT/fv0qifRqanLrinz861/9FsCjJz/83e//kNWypt3Txw+fPd7/6Oc/O3pzeHZ4sjvaADAeX/STJB6mFxmbXpqmA9MbiJMiz0mRD4xl1xppVGODuWERd3eX5XGWtrztXlHsHayuXZGXbEZtja1vrzRJea8FPAIwco1F6JJdLKzQpfzG/5OufFOWiq48DGUdLKezHy9z18IqEtDBDgPecX9pxdyc+eUqsOX7vMumdP4r4WaZeoe69gdLVrerHtBXvtjuReHUvMb7b8F1+ySs5sY+0Io3C7wY145Ot7iD9wX3zi5+dvk4BgRE7R6FgFxJe9tGW+0V0adjRWq2imbCgdFBQQPkBWgVOeX8m/6w8t1KpLrtXnxAJCDFSrXrbtkj3f9ssXn5bZqwcHJvDKW+LCSn6Vx90Jzyob8WpiqgHWzXchykxRA5waBFR6AjZbWTuSON3LCyFsYLDzWCwdQw1AghtVRoo3Xgs3srISGlfB759kTshJj4fyLir65p9g3PEfuGBgmhc1YKwEp8oCUpRUoJKVXXdSESR4ZIO3Z1Y4qGUkoTMREp5dgBHClyXFdWRVoPkx6AWV64uo4jYyMjAAFx1KvKIjGxShSA0erQaBWX5duT8cvs5d6du3GSZvNpNpv208RD1YMknpyfzPN5WdZF6Yoyn2XT2CjDNFoZpD0AYEe1o5gwmU6TOI00ldVc05Y470CqAIBJgwwRKanYDeKEwE7EAKN+DOC89ExJXYurHWKd5XlRFhurAytyNr8AkOWTvonWeuv77z3cWBvOs9yWVnFhFXqGRivrAJAkO5ujd0cXr789ubc32t3cWV0Z6Sg9e3f4i59+9I9vTgB88unHA5ia3Zf/OVRxfP/ug3pe/PEvf568PX7yy98A+NNnn87L4tGD/Vf/Osjn+XhWlPn09OQQ8drKxtb793YBHJ8cff3Jy+2797d271+cjTk2u7tbqcLp+MwMNubjIwAFREGOD45kuEKO87wQwdGbb8VEJkomtgSQ5dWHT59+/eKr/q5+qN+ri7KEjhXlbE2vB8CWBWqJbfG9lDTKo+OD/vqKs8Ukn47WhyuDBMBJVkzPj2b5+2sDTC4Oj8az9dFOYUsAWzsjKxGAaW0nz1/odDDJ8u07O0ZzMZ+t9Fdm0Vhg7mzfAXB+cGgZZVmOtjfPp/Oz46PU9AtRrsoTAMCzZz+Qujw8nVgriYpi4MdPH2nI27fH+w/vCUoAxhiGUiBbWgbnVU5cffjB/W9Oxz969oRNCuDNVy9OK/fXz/+2tvqTpOdev/zi+/cffLn/+Pnf//nF8897MQCY1BwcHJSQ7HR8Z7C6ubd9XjtAKxF/cwsEmzt71lpry6rI8yJXjntJr+baOZemCQAnbj4Zu1qMiWJJqsjW7GxtmYV0gxKaKHaORSQinWWz6exie3WPiCBCurnixZHPxS1KKRPFCqhcFce9WJt5CQCkdWx61qKsCyhws49Ck1ZEPkuksNRcaUeOK5ASEQerhFhgEHntxsHVUhGgFA3MYLA6TNP+9OLiPDuFkp7uAVBaGW2U1BW8EkpGGW4vCwKINBGcY+dqyxWUKKhaOIn7w+HQZ5e6pVu6pVv6btF/AZhitZgGakNIAAAAAElFTkSuQmCC
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
width="100%" height="100%" viewBox="0 0 52.866 51.739" enable-background="new 0 0 52.866 51.739" xml:space="preserve">
<polygon fill-rule="evenodd" clip-rule="evenodd" fill="#B2B1B1" points="44.415,3.704 14.633,3.704 14.633,51.739 52.866,51.739
52.866,11.997 44.415,3.704 44.415,3.704 "/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E20917" d="M42.481,1.85c0,2.811,0,5.655,0,6.226c0.576,0,3.471,0,6.308,0
L42.481,1.85L42.481,1.85L42.481,1.85z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#F5F5F5" d="M41.175,1.307c-10.689,0-27.428,0-28.284,0
c0,1.255,0,46.237,0,47.492c1.24,0,35.794,0,37.034,0c0-0.935,0-26.096,0-39.417h-8.75V1.307L41.175,1.307L41.175,1.307z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M42.481,1.85l6.308,6.226c-2.837,0-5.731,0-6.308,0
C42.481,7.505,42.481,4.66,42.481,1.85L42.481,1.85L42.481,1.85z M49.925,48.799c-1.24,0-35.794,0-37.034,0
c0-1.255,0-46.236,0-47.492c0.856,0,17.595,0,28.284,0v8.075h8.75C49.925,22.703,49.925,47.864,49.925,48.799L49.925,48.799
L49.925,48.799L49.925,48.799z M11.583,0v50.105h39.649V8.65L42.467,0H11.583L11.583,0L11.583,0L11.583,0z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#B2B1B1" d="M39.015,19.902V5.337H12.891c0,3.47,0,8.805,0,14.565H39.015
L39.015,19.902L39.015,19.902z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E20917" d="M1.307,16.936c1.238,0,33.62,0,34.857,0c0-1.12,0-10.861,0-11.981
c-1.237,0-33.619,0-34.857,0C1.307,6.075,1.307,15.816,1.307,16.936L1.307,16.936L1.307,16.936z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M36.164,16.936c-1.237,0-33.619,0-34.857,0
c0-1.12,0-10.861,0-11.981c1.238,0,33.62,0,34.857,0C36.164,6.075,36.164,15.816,36.164,16.936L36.164,16.936L36.164,16.936z
M0,3.647v14.596h37.471V3.647h-0.653H0L0,3.647L0,3.647L0,3.647z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#FFFFFF" d="M9.905,8.311v2.267h0.881c0.635,0,1.059-0.042,1.272-0.125
c0.214-0.083,0.382-0.214,0.503-0.392c0.122-0.178,0.183-0.385,0.183-0.621c0-0.291-0.086-0.53-0.256-0.72
c-0.17-0.188-0.386-0.307-0.647-0.354c-0.191-0.037-0.578-0.055-1.158-0.055H9.905L9.905,8.311L9.905,8.311z M8.292,14.928V6.963
h2.583c0.979,0,1.616,0.04,1.914,0.12c0.456,0.12,0.839,0.38,1.146,0.78c0.309,0.401,0.463,0.918,0.463,1.552
c0,0.49-0.089,0.901-0.267,1.234c-0.177,0.333-0.402,0.595-0.676,0.786c-0.273,0.19-0.552,0.316-0.834,0.377
c-0.385,0.077-0.94,0.114-1.668,0.114H9.905v3.002H8.292L8.292,14.928L8.292,14.928L8.292,14.928z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#FFFFFF" d="M17.315,8.311v5.27h1.195c0.447,0,0.77-0.025,0.968-0.076
c0.26-0.065,0.475-0.175,0.646-0.331c0.171-0.156,0.311-0.412,0.419-0.769c0.107-0.356,0.162-0.842,0.162-1.457
s-0.055-1.087-0.162-1.416c-0.108-0.33-0.26-0.586-0.454-0.771c-0.195-0.185-0.441-0.31-0.741-0.375
c-0.224-0.05-0.661-0.076-1.313-0.076H17.315L17.315,8.311L17.315,8.311z M15.702,6.963h2.931c0.661,0,1.165,0.05,1.512,0.152
c0.467,0.138,0.865,0.382,1.197,0.733c0.332,0.352,0.585,0.782,0.759,1.29c0.173,0.509,0.26,1.137,0.26,1.883
c0,0.656-0.081,1.221-0.244,1.695c-0.198,0.58-0.481,1.049-0.851,1.408c-0.277,0.271-0.653,0.483-1.126,0.635
c-0.354,0.113-0.827,0.169-1.42,0.169h-3.018V6.963L15.702,6.963L15.702,6.963L15.702,6.963z"/>
<polygon fill-rule="evenodd" clip-rule="evenodd" fill="#FFFFFF" points="23.727,14.928 23.727,6.963 29.18,6.963 29.18,8.311
25.34,8.311 25.34,10.19 28.648,10.19 28.648,11.538 25.34,11.538 25.34,14.928 23.727,14.928 23.727,14.928 "/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E30921" d="M25.983,35.519c-2.812,2.1-4.745,5.082-3.982,5.547l-0.666-0.335
C20.948,40.259,21.825,37.729,25.983,35.519L25.983,35.519L25.983,35.519L25.983,35.519L25.983,35.519z"/>
<path fill="none" stroke="#E30921" stroke-width="0.5197" stroke-miterlimit="2.6131" d="M25.983,35.519
c-2.812,2.1-4.745,5.082-3.982,5.547l-0.666-0.335C20.948,40.259,21.825,37.729,25.983,35.519L25.983,35.519L25.983,35.519
L25.983,35.519L25.983,35.519z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E30921" d="M47.193,34.952l-0.023-0.368c-0.004-0.047,0-0.105-0.002-0.166
h-0.005c-0.015,0.057-0.033,0.122-0.052,0.175l-0.127,0.351h-0.071l-0.124-0.36c-0.015-0.049-0.029-0.108-0.044-0.166H46.74
c-0.001,0.058,0,0.11-0.003,0.166l-0.022,0.368h-0.089l0.047-0.61h0.121l0.119,0.331c0.016,0.046,0.028,0.097,0.043,0.153h0.003
c0.014-0.056,0.028-0.11,0.043-0.155l0.12-0.329h0.119l0.046,0.61H47.193L47.193,34.952L47.193,34.952L47.193,34.952L47.193,34.952z
M46.604,34.342v0.078h-0.187v0.532h-0.091V34.42h-0.186v-0.078H46.604L46.604,34.342L46.604,34.342L46.604,34.342L46.604,34.342
L46.604,34.342L46.604,34.342z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#E30921" d="M28.566,34.342c0.568-0.985,1.172-2.088,1.814-3.325
c1.3-2.505,2.067-4.538,2.526-6.316c0.834,2.165,2.059,4.225,3.702,5.639c0.511,0.44,1.075,0.845,1.667,1.215
C35.172,32.035,31.688,32.963,28.566,34.342L28.566,34.342L28.566,34.342z M47.102,33.165c0.821-1.749-2.684-2.349-7.452-1.796
c-0.838-0.472-1.652-1.007-2.389-1.593c-1.836-1.507-3.187-4.034-4.027-6.566c0.383-2.121,0.359-3.924,0.401-5.872
c-0.182,0.888-0.312,2.372-0.811,4.482c-0.643-2.466-0.783-4.757-0.394-5.904c0.086-0.251,0.293-0.545,0.385-0.61
c0.358,0.179,0.792,0.619,0.889,1.541c0.323-1.702-0.509-1.642-0.742-1.642l-0.523-0.004c-0.29,0-0.551,0.232-0.677,0.705
c-0.431,1.605-0.225,4.505,0.669,7.419c-0.556,1.942-1.416,4.301-2.806,7.101c-3.741,7.533-6.472,11.047-8.29,10.306l0.649,0.333
c1.21,0.617,3.286-1.02,6.551-6.667c3.069-1.107,7.154-1.921,10.714-2.278c3.505,1.878,7.53,2.523,7.734,1.313
c-0.907,0.436-3.514-0.17-6.149-1.445C44.442,31.758,47.17,32.083,47.102,33.165L47.102,33.165L47.102,33.165L47.102,33.165z"/>
<path fill="none" stroke="#E30921" stroke-width="0.5197" stroke-miterlimit="2.6131" d="M28.566,34.342
c0.568-0.985,1.172-2.088,1.814-3.325c1.3-2.505,2.067-4.538,2.526-6.316c0.834,2.165,2.059,4.225,3.702,5.639
c0.511,0.44,1.075,0.845,1.667,1.215C35.172,32.035,31.688,32.963,28.566,34.342L28.566,34.342L28.566,34.342z M47.102,33.165
c0.821-1.749-2.684-2.349-7.452-1.796c-0.838-0.472-1.652-1.007-2.389-1.593c-1.836-1.507-3.187-4.034-4.027-6.566
c0.383-2.121,0.359-3.924,0.401-5.872c-0.182,0.888-0.312,2.372-0.811,4.482c-0.643-2.466-0.783-4.757-0.394-5.904
c0.086-0.251,0.293-0.545,0.385-0.61c0.358,0.179,0.792,0.619,0.889,1.541c0.323-1.702-0.509-1.642-0.742-1.642l-0.523-0.004
c-0.29,0-0.551,0.232-0.677,0.705c-0.431,1.605-0.225,4.505,0.669,7.419c-0.556,1.942-1.416,4.301-2.806,7.101
c-3.741,7.533-6.472,11.047-8.29,10.306l0.649,0.333c1.21,0.617,3.286-1.02,6.551-6.667c3.069-1.107,7.154-1.921,10.714-2.278
c3.505,1.878,7.53,2.523,7.734,1.313c-0.907,0.436-3.514-0.17-6.149-1.445C44.442,31.758,47.17,32.083,47.102,33.165"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M19.435,42.372l-0.528-2.746c-0.148-0.775-0.302-1.79-0.431-2.613
h-0.053c-0.129,0.834-0.298,1.882-0.446,2.623l-0.542,2.736H19.435L19.435,42.372L19.435,42.372L19.435,42.372L19.435,42.372z
M17.233,43.649l-0.675,3.17h-1.566l2.582-11.478h1.856l2.442,11.478h-1.585l-0.667-3.17H17.233L17.233,43.649L17.233,43.649
L17.233,43.649L17.233,43.649L17.233,43.649L17.233,43.649z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M26.4,41.704c0-0.164,0-0.352-0.025-0.524
c-0.076-0.741-0.504-1.392-1.079-1.392c-0.985,0-1.331,1.391-1.331,2.936c0,1.689,0.442,2.89,1.275,2.89
c0.367,0,0.846-0.192,1.103-1.175c0.041-0.146,0.058-0.334,0.058-0.539V41.704L26.4,41.704L26.4,41.704L26.4,41.704L26.4,41.704z
M28.008,35.036v9.649c0,0.631,0.043,1.56,0.067,2.135h-1.387l-0.1-1.004h-0.053c-0.277,0.586-0.894,1.14-1.728,1.14
c-1.521,0-2.463-1.661-2.463-4.243c0-2.914,1.239-4.297,2.549-4.297c0.653,0,1.183,0.307,1.472,0.93H26.4v-4.309H28.008
L28.008,35.036L28.008,35.036L28.008,35.036L28.008,35.036L28.008,35.036L28.008,35.036z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M31.874,45.692c0.992,0,1.207-1.75,1.207-3.016
c0-1.225-0.215-3-1.242-3c-1.047,0-1.255,1.775-1.255,3c0,1.383,0.239,3.016,1.272,3.016H31.874L31.874,45.692L31.874,45.692
L31.874,45.692L31.874,45.692z M31.831,46.955c-1.647,0-2.849-1.423-2.849-4.255c0-2.998,1.422-4.285,2.92-4.285
c1.632,0,2.814,1.469,2.814,4.254c0,3.282-1.626,4.286-2.869,4.286H31.831L31.831,46.955L31.831,46.955L31.831,46.955L31.831,46.955
L31.831,46.955L31.831,46.955z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M37.293,43.887c0,0.194,0.024,0.38,0.066,0.519
c0.264,1.01,0.743,1.208,1.073,1.208c0.951,0,1.305-1.263,1.305-2.96c0-1.582-0.371-2.865-1.323-2.865
c-0.521,0-0.955,0.625-1.064,1.235c-0.032,0.165-0.057,0.376-0.057,0.548V43.887L37.293,43.887L37.293,43.887L37.293,43.887
L37.293,43.887z M35.686,35.036h1.607v4.444h0.034c0.419-0.75,1.005-1.064,1.737-1.064c1.397,0,2.291,1.59,2.291,4.135
c0,2.959-1.206,4.405-2.571,4.405c-0.815,0-1.27-0.433-1.635-1.183h-0.053l-0.101,1.047h-1.379c0.025-0.56,0.068-1.504,0.068-2.135
V35.036L35.686,35.036L35.686,35.036L35.686,35.036L35.686,35.036L35.686,35.036L35.686,35.036z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#1D1D1B" d="M45.958,41.853c0.019-1.456-0.493-2.223-1.129-2.223
c-0.819,0-1.203,1.188-1.249,2.223H45.958L45.958,41.853L45.958,41.853L45.958,41.853L45.958,41.853z M43.571,43.017
c0.016,2.119,0.928,2.635,1.887,2.635c0.591,0,1.088-0.138,1.439-0.301l0.24,1.17c-0.494,0.248-1.256,0.393-1.973,0.393
c-2.073,0-3.172-1.575-3.172-4.123c0-2.715,1.246-4.384,2.963-4.384c1.721,0,2.52,1.653,2.52,3.731c0,0.414-0.016,0.67-0.04,0.887
L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017L43.571,43.017z"/>
<path fill-rule="evenodd" clip-rule="evenodd" fill="#B2B1B1" d="M49.925,10.912c0-0.524,0-1.036,0-1.529h-7.589v1.529H49.925
L49.925,10.912L49.925,10.912z"/>
</svg>
\rules except wikilink
<$button class="cpfadeable">{{$:/core/images/preview-open}} MultiMedia</$button>
<$button tooltip="View the next paper">
Next {{$:/core/images/right-arrow}}
<$action-navigate $to={{!!next_paper}}/>
</$button>
<$button tooltip="View the next session">
Next {{$:/core/images/right-arrow}}
<$action-navigate $to={{!!next_session_title}}/>
</$button>
@@.cppinktext ''NO PDF''@@
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<span class="cpicon cpfadeable cpw25px">{{$:/causal/images/pdficon02}}</span>
<$button tooltip="View the preceding paper">
{{$:/core/images/left-arrow}} Prev
<$action-navigate $to={{!!prev_paper}}/>
</$button>
<$button tooltip="View the preceding session">
{{$:/core/images/left-arrow}} Prev
<$action-navigate $to={{!!prev_session_title}}/>
</$button>
!!!Count of Tiddlers with subscript/superscript in title: <$count filter="[regexp[,,]] [regexp[\^\^]]"/>
List of Tiddlers with subscript/superscript in title:
<$list filter="[regexp[,,]] [regexp[\^\^]]"/>
!!!Count of Missing Tiddlers: <$count filter="[all[missing]sort[title]]"/>
List of Missing Tiddlers:
<$list filter="[all[missing]sort[title]]"/>
\rules except wikilink
Proceedings of the 22nd Annual Conference of the International Speech Communication Association (INTERSPEECH 2021). ISSN 2308-457X. © 2021 International Speech Communication Association. All rights reserved. For technical support please contact Causal Productions (info@causalproductions.com).
<$button tooltip="View the Session List">
{{$:/core/images/up-arrow}} Sessions
<$action-navigate $to="Session List"/>
</$button>
/*
* CONFERENCE Paper abstract card
*/
.cpabstractcardauthorheading { font-size:1em; }
/* the following style is for the <div> that contains the author names (maybe multiline) and affiliation
names (maybe multiline). the 0.75em spaces it a bit from the button row that follows */
.cpabstractcardauthorarea { font-size:1em; line-height:1.15; margin-top:0.5em; margin-bottom:0.75em; }
/* the following style is for the <p> that contains the author names only */
p.cpabstractcardauthornames { font-style:normal; margin-top:0em; margin-bottom:0em; }
/* the following style is for the <p> that contains the affiliations only, the 0.25em separates it from the author names */
p.cpabstractcardaffiliationlist { font-style:italic; margin-top:0.25em; margin-bottom:0em; }
/* the abstract paragraph is the last thing on the tiddler so make the p bottom margin zero */
.cpabstractcardabstract { font-size:1em; line-height:1.15; }
.cpabstractcardabstract > p { margin-top:0.75em; margin-bottom:0em; }
/* the following style is for the <p> that contains the buttons in a single row. The 0.5 spaces the rows close together. */
.cpbuttonrow > p { margin-top:0.5em; margin-bottom:0.5em; }
/* the following style is for the VIEW PDF button which might have a MULTIMEDIA button next to it.
Need separate <p> style "lineheightforbuttons" to avoid extra vertical space due to line-height, and the <span>
is needed to keep the hover area confined with the buttons and not full width. The hover
tooltip is vertically sized by the line-height of the span. */
.lineheightforbuttons { line-height:1em; }
.cpabscardpdfandmediabutton { display:inline-flex;align-items:flex-start;line-height:1.5em; }
.cpaidxlinkrowstyle { width:30px;text-align:left;padding-left:0;margin-left:0; }
/* the following style is based on the normal table top margin of 1em, with margin-top
reduced to 0.5em because the link row table is borderless so it needs to be moved a bit
closer to surrounding elements. The bottom margin is zero because that is the end of the tiddler. */
.cpaidxlinkrowtable { margin-top:0.5em; margin-bottom:0em; }
.cpaidxlinkrowtable td { padding-left:0em; padding-right:1em; }
/*
* CONFERENCE Author Index List tiddler styles
*/
/* the author list is a borderless table so reduce margin-top to 0.5em to make the vertical whitespace appear
consistent with bordered tables. Bottom margin is set to zero because that is the end of the tiddler. */
.cpauthorindexlisttable { margin-top:0.5em; margin-bottom:0em; }
/* the next line ensures all td elements within a .cpsesslisttable have zero left-right padding
and I include the font and line-height definition to avoid adding more structure elements */
.cpauthorindexlisttable td { padding-left:0em; padding-right:0em; font-size:1em; line-height:1.5; }
/*
* CONFERENCE Author Index Person Card
*/
/* the following style is for the author paper table. 0.75em at top is standard
for all our bordered tables, and 0em at bottom because it is the end of the tiddler */
.cpaidxauthortable { margin-top:1em; margin-bottom:0em; }
/* the following styles are used within the table */
.cpauthorindexpersoncardauthorname { font-size:1em; font-weight:bold; }
.cpauthorindexpersoncardconferencename { font-size:1em; font-weight:bold; }
.cpauthorindexpersoncardpapercode { font-size:1em; line-height:1.15; white-space:nowrap; }
.cpauthorindexpersoncardpapertitle { font-size:1em; line-height:1.15; }
/*
* Global change to TIDDLYWIKI built-in styles
*/
/* make the titlebar smaller. This affects the tiddler title, and the 3 control buttons on top right
*/
.tc-titlebar { font-size:1.2em; }
/* the margin-bottom spec in the next class allows vertical space between tiddler title and body to close
*/
.tc-titlebar h2 { font-weight: bold; margin-bottom:0.5em; }
/* the tiddler body begins with a <p> so the top margin contributes to the space between title and body.
The following selector selects the first child <p> of the tiddler-body and sets the top/bottom margin to
a minimum value, which can be extended in cases such as the abstract card author list.
*/
.tc-tiddler-body > p { margin-top:0.5em; margin-bottom:0.5em; }
/* the following makes the tags wrapper disappear, allowing the vertical space between tiddler title and
tiddler body to close.
*/
.tc-tags-wrapper { display: none; }
\rules except wikilink
.cpwelcomepagespaceaboveiconwithconferencename { padding-top:0.75em; }
.cpwelcomepagespaceaboveiconwithoutconferencename { padding-top:0.0em; }
/* the following styles force the conference logos to lose their descender padding due
to the line-height of the parent */
.cpwelcomepagespaceaboveiconwithconferencename > img { display:block; }
.cpwelcomepagespaceaboveiconwithoutconferencename > img { display:block; }
.icon_size_on_welcome_page { width:250px; }
/* the confinfo page table is borderess so reduce the top margin a bit to make it consistent
with other tiddlers. Bottom margin is set to zero because that is the end of the tiddler. */
.cpconfinfotable { margin-top:1em; margin-bottom:0em; }
.cpconfinfotable td { padding-left:0em; padding-bottom:0.5em; }
.cpconfinfotable tr:last-child td { padding-bottom:0em; }
/* the following style is used for <a> elements surrounding buttons, to ensure that
the text inside the button does not cause a mysterious underline character to appear between
buttons on the same line, and force the text color to black instead of normal link blue.
Note that the TW text colour is not black but 51^3. */
a.externallinkbutton { color: rgb(51,51,51); text-decoration: none; }
/* the following reveals and styles allow buttons and table cells with class
"cpfadeable" to be faded when turned off. Specifically, PDF and MEDIA link
buttons can be switched off, resulting in not clickable links (can still
be tabbed and entered but ignore this), and faded appearance */
<$reveal type="match" state="$:/causal/config/hidePDFandMEDIA" text="hide">
a.externallinkbutton {
pointer-events: none;
cursor: default;
}
.cpfadeable {
opacity: 0.33;
}
.cpabscardpdfandmediabutton:hover::after, .cpaidxauthortable td:first-child:hover::after, .cpconfinfotable td:first-child:hover::after, .cpsessionviewtable td:first-child:hover::after {
display: inline;
position: absolute;
border: 1px solid #ccc;
border-radius: 4px;
box-shadow: 1px 1px 4px #000;
background-color: #fff;
margin-left: 5px;
margin-top: -25px;
padding: 3px;
opacity: 1;
}
.cpabscardpdfandmediabutton::after, .cpaidxauthortable td:first-child::after, .cpconfinfotable td:first-child::after, .cpsessionviewtable td:first-child::after {
content: "PDF+MEDIA files are only available in the final proceedings";
opacity: 1;
}
.cpabscardpdfandmediabutton::after, .cpaidxauthortable td:first-child::after, .cpconfinfotable td:first-child::after, .cpsessionviewtable td:first-child::after {
display: none;
}
</$reveal>
<$reveal type="match" state="$:/causal/config/hidePDFandMEDIA" text="show">
.cpfadeable {
opacity: 1;
}
</$reveal>
.cpconferencedisambiguator { font-size:1.12em; font-weight:bold; }
.cpprevnextanchortext { font-size:1.12em; font-weight:bold; }
.cpredtext { color:red; }
.cppinktext { color:#FFB0B0; }
.cpcenter { text-align:center; }
.cpmailingaddress { padding-left:2em; }
.cptightlineheight { line-height:1.15; }
.cpemabovezerobelow { margin-top:1em; margin-bottom:0em; }
.cpcopyrightpage { line-height:1.15; margin-top:0.75em; margin-bottom:0em; }
.cpsupportpage { line-height:1.15; margin-top:0.75em; margin-bottom:0em; }
.cpsupportpagetable { margin-top:1em; margin-bottom:0em; }
/* the following causes cpicon to have no line-height, otherwise the icons
get a descender margin below the icon caused by the font style of the parent */
.cpicon > img { display: block; }
.cpw25px > img { width:25px; }
/* the following is used in the session view to force a minimum width for the pdf icon column, using @@ ... @@ syntax */
.pdficonintable { display:block;width:30px; }
/*
* CONFERENCE Session List tiddler styles
*/
/* the session list is a borderess table so reduce the margin-top to 0.5em to make it consistent
with bordered tables. Bottom margin is set to zero because that is the end of the tiddler. */
.cpsessionlisttable { margin-top:0.5em; margin-bottom:0em; }
/* the next line ensures all td elements within a .cpsesslisttable have zero left-right padding */
.cpsessionlisttable td { padding-left:0em; padding-right:0.5em; }
/* note that in session list table, the vertical alignment of table cells must be done
using TW5 operators and not CSS. Operators such as display:flex and align-content:flex-start do not seem to work. */
.cpsessionlistsessioncode { font-size:1em; line-height:1.15; white-space:nowrap; }
.cpsessionlistsessionname { font-size:1em; line-height:1.15; }
/*
* CONFERENCE Session View tiddler styles
*/
/* the following style adds a bit of space above and below table row to separate cell text from rulers */
table.cpsessionviewtable { margin-top:0.75em; margin-bottom:0em; }
/* the following styles are for entries within the session view table */
.cpsessionviewpapercode { font-size:1em; line-height:1.15; white-space:nowrap; }
.cpsessionviewpapertitle { font-size:1em; line-height:1.15; }
.cpsessionviewpaperauthor { font-size:1em;font-style:italic;line-height:1.15; }
.cpsessionviewmetadata { font-size:1em; line-height:1.15; }
.cpsessionviewmetadata table { margin-top:0.6em; margin-bottom:0.75em; }
.cpsessionviewmetadata tr:first-child td:first-child { padding-bottom:0.2em; } /* make the padding 0.2em on the bottom of top left cell, to space this row a bit more from subsequent rows */
.cpsessionviewmetadata td { padding-left:0px; padding-right:0px; }
.cpsessionviewmetadata td:first-child { width:1px; white-space: nowrap; } /* ensure that 'chairs:' column is just wide enough for the word */
/* the following class is used to make borderless tables */
.cpborderless,
.cpborderless table,
.cpborderless td,
.cpborderless tr,
.cpborderless th,
.cpborderless tbody { border:0 !important; }
/* the following class essentially defines the visual appearance of H2 headers, for use
in tables where tiddler !! syntax does not work. For all header style definitions see w3schools
or t287/00_gv.txt */
.cph2 { display: block; font-size: 1.5em; margin-top: 0.83em; margin-bottom: 0.83em; margin-left: 0; margin-right: 0; font-weight: bold; }
.cph3 { display: block; font-size: 1.0em; margin-top: 0.83em; margin-bottom: 0.83em; margin-left: 0; margin-right: 0; font-weight: bold; }
/* the following allows tables to have extra space between content and row divider rules */
.cptablecelltopbottomspace1 td { padding-top:0.1em; padding-bottom:0.1em; }
.cptablecelltopbottomspace2 td { padding-top:0.2em; padding-bottom:0.2em; }
.cptablecelltopbottomspace3 td { padding-top:0.3em; padding-bottom:0.3em; }
/*
* Welcome Page tiddler styles
*/
/* width of svg logo for the whole publication */
.cppublicationsvg { width:TODO_publication_welcomeartwork_displaywidth; }
.cppublicationname { font-weight:bold;font-size:1.3em; }
.cppublicationdatevenue {
font-size:1.1em;
display:flex;
justify-content:space-between;
}
/* each individual conference in the publication is named in the following style */
.cpwelcomepageconferencename { font-weight:bold;line-height:1.2; }
/* the following style is for the publication header which is a table with icon in left cell
and conference name and date/venue in right cell. We need to have a small top margin to separate
from the tiddler title.
*/
.cpwelcomepagepublicationtable,
.cpwelcomepagepublicationtable td { margin-top:1em; margin-bottom:0px; padding-top:0px; padding-bottom:0px; }
/* the following style is for a table which contains a per-conference row with icon in left cell, and major
headings in right cell such as preface, session list, author index. We want all margins to be zero so it
can butt up to its vertical neighbours efficiently.
*/
.cpwelcomepageconferencetable,
.cpwelcomepageconferencetable td { margin-top:0px; margin-bottom:0px; padding-top:0px; padding-bottom:0px; }
/* the copyright message is displayed in tiny font on the welcome page. To make it readable the user can click on the COPYRIGHT STATEMENT heading to see the text in a readable tiddler */
.cpwelcomepagecopyright { display: block; font-size: 0.5em; margin-top: 0.1em; margin-bottom: 0.1em; margin-left: 0; margin-right: 0; font-weight: bold; line-height:1.5em; }
/* the following style is applied to the conference information, session list, and author index links.
TW mandates that the links be blue, and not bold, so specifying these in the following style will have
no effect. We can control font size, italic, and other parameters which will work correctly. */
.cpwelcomepageconferencelinks {}
\rules except wikilink
<$button>{{$:/core/images/preview-open}} View Folder</$button>
\rules except wikilink
<$checkbox tiddler="$:/state/causal" field="view multimedia list" checked="yes" unchecked="no" default="no"> View MultiMedia list</$checkbox>
<a href={{!!pdf_file_full_name}} class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in abscard view}}</a>
<$button tooltip="View the top level Welcome Page">
{{$:/core/images/up-arrow}} Welcome
<$action-navigate $to="Welcome Page"/>
</$button>
\rules except wikilink
<$button class="cpfadeable">{{$:/core/images/preview-open}} Accompanying ZIP</$button>
{
"tiddlers": {
"$:/Acknowledgements": {
"title": "$:/Acknowledgements",
"type": "text/vnd.tiddlywiki",
"text": "TiddlyWiki incorporates code from these fine OpenSource projects:\n\n* [[The Stanford Javascript Crypto Library|http://bitwiseshiftleft.github.io/sjcl/]]\n* [[The Jasmine JavaScript Test Framework|http://pivotal.github.io/jasmine/]]\n* [[Normalize.css by Nicolas Gallagher|http://necolas.github.io/normalize.css/]]\n\nAnd media from these projects:\n\n* World flag icons from [[Wikipedia|http://commons.wikimedia.org/wiki/Category:SVG_flags_by_country]]\n"
},
"$:/core/copyright.txt": {
"title": "$:/core/copyright.txt",
"type": "text/plain",
"text": "TiddlyWiki created by Jeremy Ruston, (jeremy [at] jermolene [dot] com)\n\nCopyright © Jeremy Ruston 2004-2007\nCopyright © UnaMesa Association 2007-2016\n\nRedistribution and use in source and binary forms, with or without modification,\nare permitted provided that the following conditions are met:\n\nRedistributions of source code must retain the above copyright notice, this\nlist of conditions and the following disclaimer.\n\nRedistributions in binary form must reproduce the above copyright notice, this\nlist of conditions and the following disclaimer in the documentation and/or other\nmaterials provided with the distribution.\n\nNeither the name of the UnaMesa Association nor the names of its contributors may be\nused to endorse or promote products derived from this software without specific\nprior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS' AND ANY\nEXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES\nOF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT\nSHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,\nINCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED\nTO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR\nBUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN\nCONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN\nANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH\nDAMAGE.\n"
},
"$:/core/icon": {
"title": "$:/core/icon",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\"><path d=\"M64 0l54.56 32v64L64 128 9.44 96V32L64 0zm21.127 95.408c-3.578-.103-5.15-.094-6.974-3.152l-1.42.042c-1.653-.075-.964-.04-2.067-.097-1.844-.07-1.548-1.86-1.873-2.8-.52-3.202.687-6.43.65-9.632-.014-1.14-1.593-5.17-2.157-6.61-1.768.34-3.546.406-5.34.497-4.134-.01-8.24-.527-12.317-1.183-.8 3.35-3.16 8.036-1.21 11.44 2.37 3.52 4.03 4.495 6.61 4.707 2.572.212 3.16 3.18 2.53 4.242-.55.73-1.52.864-2.346 1.04l-1.65.08c-1.296-.046-2.455-.404-3.61-.955-1.93-1.097-3.925-3.383-5.406-5.024.345.658.55 1.938.24 2.53-.878 1.27-4.665 1.26-6.4.47-1.97-.89-6.73-7.162-7.468-11.86 1.96-3.78 4.812-7.07 6.255-11.186-3.146-2.05-4.83-5.384-4.61-9.16l.08-.44c-3.097.59-1.49.37-4.82.628-10.608-.032-19.935-7.37-14.68-18.774.34-.673.664-1.287 1.243-.994.466.237.4 1.18.166 2.227-3.005 13.627 11.67 13.732 20.69 11.21.89-.25 2.67-1.936 3.905-2.495 2.016-.91 4.205-1.282 6.376-1.55 5.4-.63 11.893 2.276 15.19 2.37 3.3.096 7.99-.805 10.87-.615 2.09.098 4.143.483 6.16 1.03 1.306-6.49 1.4-11.27 4.492-12.38 1.814.293 3.213 2.818 4.25 4.167 2.112-.086 4.12.46 6.115 1.066 3.61-.522 6.642-2.593 9.833-4.203-3.234 2.69-3.673 7.075-3.303 11.127.138 2.103-.444 4.386-1.164 6.54-1.348 3.507-3.95 7.204-6.97 7.014-1.14-.036-1.805-.695-2.653-1.4-.164 1.427-.81 2.7-1.434 3.96-1.44 2.797-5.203 4.03-8.687 7.016-3.484 2.985 1.114 13.65 2.23 15.594 1.114 1.94 4.226 2.652 3.02 4.406-.37.58-.936.785-1.54 1.01l-.82.11zm-40.097-8.85l.553.14c.694-.27 2.09.15 2.83.353-1.363-1.31-3.417-3.24-4.897-4.46-.485-1.47-.278-2.96-.174-4.46l.02-.123c-.582 1.205-1.322 2.376-1.72 3.645-.465 1.71 2.07 3.557 3.052 4.615l.336.3z\" fill-rule=\"evenodd\"/></svg>"
},
"$:/core/images/advanced-search-button": {
"title": "$:/core/images/advanced-search-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-advanced-search-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M74.5651535,87.9848361 C66.9581537,93.0488876 57.8237115,96 48,96 C21.490332,96 0,74.509668 0,48 C0,21.490332 21.490332,0 48,0 C74.509668,0 96,21.490332 96,48 C96,57.8541369 93.0305793,67.0147285 87.9377231,74.6357895 L122.284919,108.982985 C125.978897,112.676963 125.973757,118.65366 122.284271,122.343146 C118.593975,126.033442 112.613238,126.032921 108.92411,122.343793 L74.5651535,87.9848361 Z M48,80 C65.673112,80 80,65.673112 80,48 C80,30.326888 65.673112,16 48,16 C30.326888,16 16,30.326888 16,48 C16,65.673112 30.326888,80 48,80 Z\"></path>\n <circle cx=\"48\" cy=\"48\" r=\"8\"></circle>\n <circle cx=\"28\" cy=\"48\" r=\"8\"></circle>\n <circle cx=\"68\" cy=\"48\" r=\"8\"></circle>\n </g>\n</svg>"
},
"$:/core/images/auto-height": {
"title": "$:/core/images/auto-height",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-auto-height tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path d=\"M67.9867828,114.356363 L67.9579626,99.8785426 C67.9550688,98.4248183 67.1636987,97.087107 65.8909901,96.3845863 L49.9251455,87.5716209 L47.992126,95.0735397 L79.8995411,95.0735397 C84.1215894,95.0735397 85.4638131,89.3810359 81.686497,87.4948823 L49.7971476,71.5713518 L48.0101917,79.1500092 L79.992126,79.1500092 C84.2093753,79.1500092 85.5558421,73.4676733 81.7869993,71.5753162 L49.805065,55.517008 L48.0101916,63.0917009 L79.9921259,63.0917015 C84.2035118,63.0917016 85.5551434,57.4217887 81.7966702,55.5218807 L65.7625147,47.4166161 L67.9579705,50.9864368 L67.9579705,35.6148245 L77.1715737,44.8284272 C78.7336709,46.3905243 81.2663308,46.3905243 82.8284279,44.8284271 C84.390525,43.2663299 84.390525,40.7336699 82.8284278,39.1715728 L66.8284271,23.1715728 C65.2663299,21.6094757 62.73367,21.6094757 61.1715729,23.1715729 L45.1715729,39.1715729 C43.6094757,40.73367 43.6094757,43.26633 45.1715729,44.8284271 C46.73367,46.3905243 49.26633,46.3905243 50.8284271,44.8284271 L59.9579705,35.6988837 L59.9579705,50.9864368 C59.9579705,52.495201 60.806922,53.8755997 62.1534263,54.5562576 L78.1875818,62.6615223 L79.9921261,55.0917015 L48.0101917,55.0917009 C43.7929424,55.0917008 42.4464755,60.7740368 46.2153183,62.6663939 L78.1972526,78.7247021 L79.992126,71.1500092 L48.0101917,71.1500092 C43.7881433,71.1500092 42.4459197,76.842513 46.2232358,78.7286665 L78.1125852,94.6521971 L79.8995411,87.0735397 L47.992126,87.0735397 C43.8588276,87.0735397 42.4404876,92.5780219 46.0591064,94.5754586 L62.024951,103.388424 L59.9579785,99.8944677 L59.9867142,114.32986 L50.8284271,105.171573 C49.26633,103.609476 46.73367,103.609476 45.1715729,105.171573 C43.6094757,106.73367 43.6094757,109.26633 45.1715729,110.828427 L61.1715729,126.828427 C62.73367,128.390524 65.2663299,128.390524 66.8284271,126.828427 L82.8284278,110.828427 C84.390525,109.26633 84.390525,106.73367 82.8284279,105.171573 C81.2663308,103.609476 78.7336709,103.609476 77.1715737,105.171573 L67.9867828,114.356363 L67.9867828,114.356363 Z M16,20 L112,20 C114.209139,20 116,18.209139 116,16 C116,13.790861 114.209139,12 112,12 L16,12 C13.790861,12 12,13.790861 12,16 C12,18.209139 13.790861,20 16,20 L16,20 Z\"></path>\n</svg>"
},
"$:/core/images/blank": {
"title": "$:/core/images/blank",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-blank tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\"></svg>"
},
"$:/core/images/bold": {
"title": "$:/core/images/bold",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-bold tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M41.1456583,51.8095238 L41.1456583,21.8711485 L67.4985994,21.8711485 C70.0084159,21.8711485 72.4285598,22.0802967 74.7591036,22.4985994 C77.0896475,22.9169022 79.1512515,23.6638602 80.9439776,24.7394958 C82.7367036,25.8151314 84.170863,27.3090474 85.2464986,29.2212885 C86.3221342,31.1335296 86.859944,33.5835518 86.859944,36.5714286 C86.859944,41.9496067 85.2465147,45.8337882 82.0196078,48.2240896 C78.792701,50.614391 74.6694929,51.8095238 69.6498599,51.8095238 L41.1456583,51.8095238 Z M13,0 L13,128 L75.0280112,128 C80.7647346,128 86.3519803,127.28292 91.789916,125.848739 C97.2278517,124.414559 102.068139,122.203563 106.310924,119.215686 C110.553709,116.22781 113.929959,112.373506 116.439776,107.652661 C118.949592,102.931816 120.204482,97.3445701 120.204482,90.8907563 C120.204482,82.8832466 118.262391,76.0411115 114.378151,70.3641457 C110.493911,64.6871798 104.607883,60.7133634 96.719888,58.442577 C102.456611,55.6937304 106.788968,52.1680887 109.717087,47.8655462 C112.645206,43.5630037 114.109244,38.1849062 114.109244,31.7310924 C114.109244,25.7553389 113.123259,20.7357813 111.151261,16.6722689 C109.179262,12.6087565 106.400578,9.35201972 102.815126,6.90196078 C99.2296739,4.45190185 94.927196,2.68908101 89.907563,1.61344538 C84.8879301,0.537809748 79.3305627,0 73.2352941,0 L13,0 Z M41.1456583,106.128852 L41.1456583,70.9915966 L71.8011204,70.9915966 C77.896389,70.9915966 82.7964334,72.3958776 86.5014006,75.2044818 C90.2063677,78.0130859 92.0588235,82.7039821 92.0588235,89.2773109 C92.0588235,92.6237329 91.4911355,95.3725383 90.3557423,97.5238095 C89.2203491,99.6750808 87.6965548,101.378145 85.7843137,102.633053 C83.8720726,103.887961 81.661077,104.784311 79.1512605,105.322129 C76.641444,105.859947 74.0121519,106.128852 71.2633053,106.128852 L41.1456583,106.128852 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/cancel-button": {
"title": "$:/core/images/cancel-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-cancel-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n\t<g fill-rule=\"evenodd\">\n\t <path d=\"M64,76.3137085 L47.0294734,93.2842351 C43.9038742,96.4098343 38.8399231,96.4084656 35.7157288,93.2842712 C32.5978915,90.166434 32.5915506,85.0947409 35.7157649,81.9705266 L52.6862915,65 L35.7157649,48.0294734 C32.5901657,44.9038742 32.5915344,39.8399231 35.7157288,36.7157288 C38.833566,33.5978915 43.9052591,33.5915506 47.0294734,36.7157649 L64,53.6862915 L80.9705266,36.7157649 C84.0961258,33.5901657 89.1600769,33.5915344 92.2842712,36.7157288 C95.4021085,39.833566 95.4084494,44.9052591 92.2842351,48.0294734 L75.3137085,65 L92.2842351,81.9705266 C95.4098343,85.0961258 95.4084656,90.1600769 92.2842712,93.2842712 C89.166434,96.4021085 84.0947409,96.4084494 80.9705266,93.2842351 L64,76.3137085 Z M64,129 C99.346224,129 128,100.346224 128,65 C128,29.653776 99.346224,1 64,1 C28.653776,1 1.13686838e-13,29.653776 1.13686838e-13,65 C1.13686838e-13,100.346224 28.653776,129 64,129 Z M64,113 C90.509668,113 112,91.509668 112,65 C112,38.490332 90.509668,17 64,17 C37.490332,17 16,38.490332 16,65 C16,91.509668 37.490332,113 64,113 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/chevron-down": {
"title": "$:/core/images/chevron-down",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-down tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n\t<g fill-rule=\"evenodd\" transform=\"translate(64.000000, 40.500000) rotate(-270.000000) translate(-64.000000, -40.500000) translate(-22.500000, -26.500000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n\t</g>\n</svg>"
},
"$:/core/images/chevron-left": {
"title": "$:/core/images/chevron-left",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-left tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\" version=\"1.1\">\n <g fill-rule=\"evenodd\" transform=\"translate(92.500000, 64.000000) rotate(-180.000000) translate(-92.500000, -64.000000) translate(6.000000, -3.000000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n </g>\n</svg>"
},
"$:/core/images/chevron-right": {
"title": "$:/core/images/chevron-right",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-right tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\" transform=\"translate(-48.000000, -3.000000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n </g>\n</svg>"
},
"$:/core/images/chevron-up": {
"title": "$:/core/images/chevron-up",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-chevron-up tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n\t<g fill-rule=\"evenodd\" transform=\"translate(64.000000, 89.500000) rotate(-90.000000) translate(-64.000000, -89.500000) translate(-22.500000, 22.500000)\">\n <path d=\"M112.743107,112.12741 C111.310627,113.561013 109.331747,114.449239 107.145951,114.449239 L27.9777917,114.449239 C23.6126002,114.449239 20.0618714,110.904826 20.0618714,106.532572 C20.0618714,102.169214 23.6059497,98.6159054 27.9777917,98.6159054 L99.2285381,98.6159054 L99.2285381,27.365159 C99.2285381,22.9999675 102.77295,19.4492387 107.145205,19.4492387 C111.508562,19.4492387 115.061871,22.993317 115.061871,27.365159 L115.061871,106.533318 C115.061871,108.71579 114.175869,110.694669 112.743378,112.127981 Z\" transform=\"translate(67.561871, 66.949239) rotate(-45.000000) translate(-67.561871, -66.949239) \"></path>\n <path d=\"M151.35638,112.12741 C149.923899,113.561013 147.94502,114.449239 145.759224,114.449239 L66.5910645,114.449239 C62.225873,114.449239 58.6751442,110.904826 58.6751442,106.532572 C58.6751442,102.169214 62.2192225,98.6159054 66.5910645,98.6159054 L137.841811,98.6159054 L137.841811,27.365159 C137.841811,22.9999675 141.386223,19.4492387 145.758478,19.4492387 C150.121835,19.4492387 153.675144,22.993317 153.675144,27.365159 L153.675144,106.533318 C153.675144,108.71579 152.789142,110.694669 151.356651,112.127981 Z\" transform=\"translate(106.175144, 66.949239) rotate(-45.000000) translate(-106.175144, -66.949239) \"></path>\n\t</g>\n</svg>"
},
"$:/core/images/clone-button": {
"title": "$:/core/images/clone-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-clone-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M32.2650915,96 L32.2650915,120.002359 C32.2650915,124.419334 35.8432884,128 40.2627323,128 L120.002359,128 C124.419334,128 128,124.421803 128,120.002359 L128,40.2627323 C128,35.8457573 124.421803,32.2650915 120.002359,32.2650915 L96,32.2650915 L96,48 L108.858899,48 C110.519357,48 111.853018,49.3405131 111.853018,50.9941198 L111.853018,108.858899 C111.853018,110.519357 110.512505,111.853018 108.858899,111.853018 L50.9941198,111.853018 C49.333661,111.853018 48,110.512505 48,108.858899 L48,96 L32.2650915,96 Z\"></path>\n <path d=\"M40,56 L32.0070969,56 C27.5881712,56 24,52.418278 24,48 C24,43.5907123 27.5848994,40 32.0070969,40 L40,40 L40,32.0070969 C40,27.5881712 43.581722,24 48,24 C52.4092877,24 56,27.5848994 56,32.0070969 L56,40 L63.9929031,40 C68.4118288,40 72,43.581722 72,48 C72,52.4092877 68.4151006,56 63.9929031,56 L56,56 L56,63.9929031 C56,68.4118288 52.418278,72 48,72 C43.5907123,72 40,68.4151006 40,63.9929031 L40,56 Z M7.9992458,0 C3.58138434,0 0,3.5881049 0,7.9992458 L0,88.0007542 C0,92.4186157 3.5881049,96 7.9992458,96 L88.0007542,96 C92.4186157,96 96,92.4118951 96,88.0007542 L96,7.9992458 C96,3.58138434 92.4118951,0 88.0007542,0 L7.9992458,0 Z M19.0010118,16 C17.3435988,16 16,17.336731 16,19.0010118 L16,76.9989882 C16,78.6564012 17.336731,80 19.0010118,80 L76.9989882,80 C78.6564012,80 80,78.663269 80,76.9989882 L80,19.0010118 C80,17.3435988 78.663269,16 76.9989882,16 L19.0010118,16 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/close-all-button": {
"title": "$:/core/images/close-all-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-close-all-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\" transform=\"translate(-23.000000, -23.000000)\">\n <path d=\"M43,131 L22.9976794,131 C18.5827987,131 15,127.418278 15,123 C15,118.590712 18.5806831,115 22.9976794,115 L43,115 L43,94.9976794 C43,90.5827987 46.581722,87 51,87 C55.4092877,87 59,90.5806831 59,94.9976794 L59,115 L79.0023206,115 C83.4172013,115 87,118.581722 87,123 C87,127.409288 83.4193169,131 79.0023206,131 L59,131 L59,151.002321 C59,155.417201 55.418278,159 51,159 C46.5907123,159 43,155.419317 43,151.002321 L43,131 Z\" transform=\"translate(51.000000, 123.000000) rotate(-45.000000) translate(-51.000000, -123.000000) \"></path>\n <path d=\"M43,59 L22.9976794,59 C18.5827987,59 15,55.418278 15,51 C15,46.5907123 18.5806831,43 22.9976794,43 L43,43 L43,22.9976794 C43,18.5827987 46.581722,15 51,15 C55.4092877,15 59,18.5806831 59,22.9976794 L59,43 L79.0023206,43 C83.4172013,43 87,46.581722 87,51 C87,55.4092877 83.4193169,59 79.0023206,59 L59,59 L59,79.0023206 C59,83.4172013 55.418278,87 51,87 C46.5907123,87 43,83.4193169 43,79.0023206 L43,59 Z\" transform=\"translate(51.000000, 51.000000) rotate(-45.000000) translate(-51.000000, -51.000000) \"></path>\n <path d=\"M115,59 L94.9976794,59 C90.5827987,59 87,55.418278 87,51 C87,46.5907123 90.5806831,43 94.9976794,43 L115,43 L115,22.9976794 C115,18.5827987 118.581722,15 123,15 C127.409288,15 131,18.5806831 131,22.9976794 L131,43 L151.002321,43 C155.417201,43 159,46.581722 159,51 C159,55.4092877 155.419317,59 151.002321,59 L131,59 L131,79.0023206 C131,83.4172013 127.418278,87 123,87 C118.590712,87 115,83.4193169 115,79.0023206 L115,59 Z\" transform=\"translate(123.000000, 51.000000) rotate(-45.000000) translate(-123.000000, -51.000000) \"></path>\n <path d=\"M115,131 L94.9976794,131 C90.5827987,131 87,127.418278 87,123 C87,118.590712 90.5806831,115 94.9976794,115 L115,115 L115,94.9976794 C115,90.5827987 118.581722,87 123,87 C127.409288,87 131,90.5806831 131,94.9976794 L131,115 L151.002321,115 C155.417201,115 159,118.581722 159,123 C159,127.409288 155.419317,131 151.002321,131 L131,131 L131,151.002321 C131,155.417201 127.418278,159 123,159 C118.590712,159 115,155.419317 115,151.002321 L115,131 Z\" transform=\"translate(123.000000, 123.000000) rotate(-45.000000) translate(-123.000000, -123.000000) \"></path>\n </g>\n</svg>"
},
"$:/core/images/close-button": {
"title": "$:/core/images/close-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-close-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M65.0864256,75.4091629 L14.9727349,125.522854 C11.8515951,128.643993 6.78104858,128.64922 3.65685425,125.525026 C0.539017023,122.407189 0.5336324,117.334539 3.65902635,114.209145 L53.7727171,64.0954544 L3.65902635,13.9817637 C0.537886594,10.8606239 0.532659916,5.79007744 3.65685425,2.6658831 C6.77469148,-0.451954124 11.8473409,-0.457338747 14.9727349,2.66805521 L65.0864256,52.7817459 L115.200116,2.66805521 C118.321256,-0.453084553 123.391803,-0.458311231 126.515997,2.6658831 C129.633834,5.78372033 129.639219,10.8563698 126.513825,13.9817637 L76.4001341,64.0954544 L126.513825,114.209145 C129.634965,117.330285 129.640191,122.400831 126.515997,125.525026 C123.39816,128.642863 118.32551,128.648248 115.200116,125.522854 L65.0864256,75.4091629 L65.0864256,75.4091629 Z\"></path>\n </g>\n</svg>\n"
},
"$:/core/images/close-others-button": {
"title": "$:/core/images/close-others-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-close-others-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64,128 C99.346224,128 128,99.346224 128,64 C128,28.653776 99.346224,0 64,0 C28.653776,0 0,28.653776 0,64 C0,99.346224 28.653776,128 64,128 Z M64,112 C90.509668,112 112,90.509668 112,64 C112,37.490332 90.509668,16 64,16 C37.490332,16 16,37.490332 16,64 C16,90.509668 37.490332,112 64,112 Z M64,96 C81.673112,96 96,81.673112 96,64 C96,46.326888 81.673112,32 64,32 C46.326888,32 32,46.326888 32,64 C32,81.673112 46.326888,96 64,96 Z M64,80 C72.836556,80 80,72.836556 80,64 C80,55.163444 72.836556,48 64,48 C55.163444,48 48,55.163444 48,64 C48,72.836556 55.163444,80 64,80 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/delete-button": {
"title": "$:/core/images/delete-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-delete-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\" transform=\"translate(12.000000, 0.000000)\">\n <rect x=\"0\" y=\"11\" width=\"105\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"28\" y=\"0\" width=\"48\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"8\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n <rect x=\"8\" y=\"112\" width=\"88\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"80\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n <rect x=\"56\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n <rect x=\"32\" y=\"16\" width=\"16\" height=\"112\" rx=\"8\"></rect>\n </g>\n</svg>"
},
"$:/core/images/done-button": {
"title": "$:/core/images/done-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-done-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M3.52445141,76.8322939 C2.07397484,75.3828178 1.17514421,73.3795385 1.17514421,71.1666288 L1.17514421,23.1836596 C1.17514421,18.7531992 4.75686621,15.1751442 9.17514421,15.1751442 C13.5844319,15.1751442 17.1751442,18.7606787 17.1751442,23.1836596 L17.1751442,63.1751442 L119.173716,63.1751442 C123.590457,63.1751442 127.175144,66.7568662 127.175144,71.1751442 C127.175144,75.5844319 123.592783,79.1751442 119.173716,79.1751442 L9.17657227,79.1751442 C6.96796403,79.1751442 4.9674142,78.279521 3.51911285,76.8315312 Z\" id=\"Rectangle-285\" transform=\"translate(64.175144, 47.175144) rotate(-45.000000) translate(-64.175144, -47.175144) \"></path>\n </g>\n</svg>"
},
"$:/core/images/down-arrow": {
"title": "$:/core/images/down-arrow",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-down-arrow tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <path d=\"M109.35638,81.3533152 C107.923899,82.7869182 105.94502,83.6751442 103.759224,83.6751442 L24.5910645,83.6751442 C20.225873,83.6751442 16.6751442,80.1307318 16.6751442,75.7584775 C16.6751442,71.3951199 20.2192225,67.8418109 24.5910645,67.8418109 L95.8418109,67.8418109 L95.8418109,-3.40893546 C95.8418109,-7.77412698 99.3862233,-11.3248558 103.758478,-11.3248558 C108.121835,-11.3248558 111.675144,-7.78077754 111.675144,-3.40893546 L111.675144,75.7592239 C111.675144,77.9416955 110.789142,79.9205745 109.356651,81.3538862 Z\" transform=\"translate(64.175144, 36.175144) rotate(45.000000) translate(-64.175144, -36.175144) \"></path>\n</svg>"
},
"$:/core/images/download-button": {
"title": "$:/core/images/download-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-download-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\"><g fill-rule=\"evenodd\"><path class=\"tc-image-download-button-ring\" d=\"M64,128 C99.346224,128 128,99.346224 128,64 C128,28.653776 99.346224,0 64,0 C28.653776,0 0,28.653776 0,64 C0,99.346224 28.653776,128 64,128 Z M64,112 C90.509668,112 112,90.509668 112,64 C112,37.490332 90.509668,16 64,16 C37.490332,16 16,37.490332 16,64 C16,90.509668 37.490332,112 64,112 Z\"/><path d=\"M34.3496823,66.4308767 L61.2415823,93.634668 C63.0411536,95.4551107 65.9588502,95.4551107 67.7584215,93.634668 L94.6503215,66.4308767 C96.4498928,64.610434 96.4498928,61.6588981 94.6503215,59.8384554 C93.7861334,58.9642445 92.6140473,58.4731195 91.3919019,58.4731195 L82.9324098,58.4731195 C80.3874318,58.4731195 78.3243078,56.3860674 78.3243078,53.8115729 L78.3243078,38.6615466 C78.3243078,36.0870521 76.2611837,34 73.7162058,34 L55.283798,34 C52.7388201,34 50.675696,36.0870521 50.675696,38.6615466 L50.675696,38.6615466 L50.675696,53.8115729 C50.675696,56.3860674 48.612572,58.4731195 46.0675941,58.4731195 L37.608102,58.4731195 C35.063124,58.4731195 33,60.5601716 33,63.134666 C33,64.3709859 33.4854943,65.5566658 34.3496823,66.4308767 L34.3496823,66.4308767 Z\"/></g></svg>"
},
"$:/core/images/edit-button": {
"title": "$:/core/images/edit-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-edit-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M116.870058,45.3431458 L108.870058,45.3431458 L108.870058,45.3431458 L108.870058,61.3431458 L116.870058,61.3431458 L116.870058,45.3431458 Z M124.870058,45.3431458 L127.649881,45.3431458 C132.066101,45.3431458 135.656854,48.9248678 135.656854,53.3431458 C135.656854,57.7524334 132.07201,61.3431458 127.649881,61.3431458 L124.870058,61.3431458 L124.870058,45.3431458 Z M100.870058,45.3431458 L15.6638275,45.3431458 C15.5064377,45.3431458 15.3501085,45.3476943 15.1949638,45.3566664 L15.1949638,45.3566664 C15.0628002,45.3477039 14.928279,45.3431458 14.7913977,45.3431458 C6.68160973,45.3431458 -8.34314575,53.3431458 -8.34314575,53.3431458 C-8.34314575,53.3431458 6.85614548,61.3431458 14.7913977,61.3431458 C14.9266533,61.3431458 15.0596543,61.3384973 15.190398,61.3293588 C15.3470529,61.3385075 15.5049057,61.3431458 15.6638275,61.3431458 L100.870058,61.3431458 L100.870058,45.3431458 L100.870058,45.3431458 Z\" transform=\"translate(63.656854, 53.343146) rotate(-45.000000) translate(-63.656854, -53.343146) \"></path>\n <path d=\"M35.1714596,124.189544 C41.9594858,123.613403 49.068777,121.917633 58.85987,118.842282 C60.6854386,118.268877 62.4306907,117.705515 65.1957709,116.802278 C81.1962861,111.575575 87.0734839,109.994907 93.9414474,109.655721 C102.29855,109.242993 107.795169,111.785371 111.520478,118.355045 C112.610163,120.276732 115.051363,120.951203 116.97305,119.861518 C118.894737,118.771832 119.569207,116.330633 118.479522,114.408946 C113.146151,105.003414 104.734907,101.112919 93.5468356,101.66546 C85.6716631,102.054388 79.4899908,103.716944 62.7116783,109.197722 C59.9734132,110.092199 58.2519873,110.64787 56.4625698,111.20992 C37.002649,117.322218 25.6914684,118.282267 16.8654804,112.957098 C14.9739614,111.815848 12.5154166,112.424061 11.3741667,114.31558 C10.2329168,116.207099 10.84113,118.665644 12.7326489,119.806894 C19.0655164,123.627836 26.4866335,124.926678 35.1714596,124.189544 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/erase": {
"title": "$:/core/images/erase",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-erase tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M60.0870401,127.996166 L123.102318,64.980888 C129.636723,58.4464827 129.629513,47.8655877 123.098967,41.3350425 L99.4657866,17.7018617 C92.927448,11.1635231 82.3486358,11.1698163 75.8199411,17.698511 L4.89768189,88.6207702 C-1.63672343,95.1551755 -1.6295126,105.736071 4.90103262,112.266616 L20.6305829,127.996166 L60.0870401,127.996166 Z M25.1375576,120.682546 L10.812569,106.357558 C7.5455063,103.090495 7.54523836,97.793808 10.8048093,94.5342371 L46.2691086,59.0699377 L81.7308914,94.5317205 L55.5800654,120.682546 L25.1375576,120.682546 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/excise": {
"title": "$:/core/images/excise",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-excise tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M56,107.313709 L53.6568542,109.656854 C50.5326599,112.781049 45.4673401,112.781049 42.3431457,109.656854 C39.2189514,106.53266 39.2189514,101.46734 42.3431458,98.3431457 L58.3431458,82.3431457 C61.4673401,79.2189514 66.5326599,79.2189514 69.6568542,82.3431458 L85.6568542,98.3431458 C88.7810486,101.46734 88.7810486,106.53266 85.6568542,109.656854 C82.5326599,112.781049 77.4673401,112.781049 74.3431458,109.656854 L72,107.313708 L72,121.597798 C72,125.133636 68.418278,128 64,128 C59.581722,128 56,125.133636 56,121.597798 L56,107.313709 Z M0,40.0070969 C0,35.5848994 3.59071231,32 8,32 C12.418278,32 16,35.5881712 16,40.0070969 L16,71.9929031 C16,76.4151006 12.4092877,80 8,80 C3.581722,80 0,76.4118288 0,71.9929031 L0,40.0070969 Z M32,40.0070969 C32,35.5848994 35.5907123,32 40,32 C44.418278,32 48,35.5881712 48,40.0070969 L48,71.9929031 C48,76.4151006 44.4092877,80 40,80 C35.581722,80 32,76.4118288 32,71.9929031 L32,40.0070969 Z M80,40.0070969 C80,35.5848994 83.5907123,32 88,32 C92.418278,32 96,35.5881712 96,40.0070969 L96,71.9929031 C96,76.4151006 92.4092877,80 88,80 C83.581722,80 80,76.4118288 80,71.9929031 L80,40.0070969 Z M56,8.00709688 C56,3.58489938 59.5907123,0 64,0 C68.418278,0 72,3.58817117 72,8.00709688 L72,39.9929031 C72,44.4151006 68.4092877,48 64,48 C59.581722,48 56,44.4118288 56,39.9929031 L56,8.00709688 Z M112,40.0070969 C112,35.5848994 115.590712,32 120,32 C124.418278,32 128,35.5881712 128,40.0070969 L128,71.9929031 C128,76.4151006 124.409288,80 120,80 C115.581722,80 112,76.4118288 112,71.9929031 L112,40.0070969 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/export-button": {
"title": "$:/core/images/export-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-export-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00348646,127.999999 C8.00464867,128 8.00581094,128 8.00697327,128 L119.993027,128 C122.205254,128 124.207939,127.101378 125.657096,125.651198 L125.656838,125.65759 C127.104563,124.210109 128,122.21009 128,119.999949 L128,56.0000511 C128,51.5817449 124.409288,48 120,48 C115.581722,48 112,51.5797863 112,56.0000511 L112,112 L16,112 L16,56.0000511 C16,51.5817449 12.4092877,48 8,48 C3.581722,48 7.10542736e-15,51.5797863 7.10542736e-15,56.0000511 L7.10542736e-15,119.999949 C7.10542736e-15,124.418255 3.59071231,128 8,128 C8.00116233,128 8.0023246,128 8.00348681,127.999999 Z M56.6235633,27.3113724 L47.6580188,36.2769169 C44.5333664,39.4015692 39.4634864,39.4061295 36.339292,36.2819351 C33.2214548,33.1640979 33.2173444,28.0901742 36.3443103,24.9632084 L58.9616908,2.34582788 C60.5248533,0.782665335 62.5748436,0.000361191261 64.624516,2.38225238e-14 L64.6193616,0.00151809229 C66.6695374,0.000796251595 68.7211167,0.781508799 70.2854358,2.34582788 L92.9028163,24.9632084 C96.0274686,28.0878607 96.0320289,33.1577408 92.9078345,36.2819351 C89.7899973,39.3997724 84.7160736,39.4038827 81.5891078,36.2769169 L72.6235633,27.3113724 L72.6235633,88.5669606 C72.6235633,92.9781015 69.0418413,96.5662064 64.6235633,96.5662064 C60.2142756,96.5662064 56.6235633,92.984822 56.6235633,88.5669606 L56.6235633,27.3113724 L56.6235633,27.3113724 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/file": {
"title": "$:/core/images/file",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-file tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"nonzero\">\n <path d=\"M111.96811,30.5 L112,30.5 L112,119.999079 C112,124.417866 108.419113,128 104.000754,128 L23.9992458,128 C19.5813843,128 16,124.417687 16,119.999079 L16,8.00092105 C16,3.58213437 19.5808867,0 23.9992458,0 L81,0 L81,0.0201838424 C83.1589869,-0.071534047 85.3482153,0.707077645 86.9982489,2.35711116 L109.625176,24.9840387 C111.151676,26.510538 111.932942,28.4998414 111.96811,30.5 L111.96811,30.5 Z M81,8 L24,8 L24,120 L104,120 L104,30.5 L89.0003461,30.5 C84.5818769,30.5 81,26.9216269 81,22.4996539 L81,8 Z\"></path>\n <rect x=\"32\" y=\"36\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"52\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"68\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"84\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"100\" width=\"64\" height=\"8\" rx=\"4\"></rect>\n <rect x=\"32\" y=\"20\" width=\"40\" height=\"8\" rx=\"4\"></rect>\n </g>\n</svg>"
},
"$:/core/images/fixed-height": {
"title": "$:/core/images/fixed-height",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fixed-height tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M60,35.6568542 L50.8284271,44.8284271 C49.26633,46.3905243 46.73367,46.3905243 45.1715729,44.8284271 C43.6094757,43.26633 43.6094757,40.73367 45.1715729,39.1715729 L61.1715729,23.1715729 C62.73367,21.6094757 65.2663299,21.6094757 66.8284271,23.1715728 L82.8284278,39.1715728 C84.390525,40.7336699 84.390525,43.2663299 82.8284279,44.8284271 C81.2663308,46.3905243 78.7336709,46.3905243 77.1715737,44.8284272 L68,35.6568539 L68,93.3431461 L77.1715737,84.1715728 C78.7336709,82.6094757 81.2663308,82.6094757 82.8284279,84.1715729 C84.390525,85.7336701 84.390525,88.2663301 82.8284278,89.8284272 L66.8284271,105.828427 C65.2663299,107.390524 62.73367,107.390524 61.1715729,105.828427 L45.1715729,89.8284271 C43.6094757,88.26633 43.6094757,85.73367 45.1715729,84.1715729 C46.73367,82.6094757 49.26633,82.6094757 50.8284271,84.1715729 L60,93.3431458 L60,35.6568542 L60,35.6568542 Z M16,116 L112,116 C114.209139,116 116,114.209139 116,112 C116,109.790861 114.209139,108 112,108 L16,108 C13.790861,108 12,109.790861 12,112 C12,114.209139 13.790861,116 16,116 L16,116 Z M16,20 L112,20 C114.209139,20 116,18.209139 116,16 C116,13.790861 114.209139,12 112,12 L16,12 C13.790861,12 12,13.790861 12,16 C12,18.209139 13.790861,20 16,20 L16,20 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/fold-all-button": {
"title": "$:/core/images/fold-all-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fold-all tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"64\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M64.0292774,58.6235628 C61.9791013,58.6242848 59.9275217,57.8435723 58.3632024,56.279253 L35.7458219,33.6618725 C32.6211696,30.5372202 32.6166093,25.4673401 35.7408036,22.3431458 C38.8586409,19.2253085 43.9325646,19.2211982 47.0595304,22.348164 L64.0250749,39.3137085 L80.9906194,22.348164 C84.1152717,19.2235117 89.1851518,19.2189514 92.3093461,22.3431458 C95.4271834,25.460983 95.4312937,30.5349067 92.3043279,33.6618725 L69.6869474,56.279253 C68.1237851,57.8424153 66.0737951,58.6247195 64.0241231,58.6250809 Z\" transform=\"translate(64.024316, 39.313708) scale(1, -1) translate(-64.024316, -39.313708) \"></path>\n <path d=\"M64.0292774,123.621227 C61.9791013,123.621949 59.9275217,122.841236 58.3632024,121.276917 L35.7458219,98.6595365 C32.6211696,95.5348842 32.6166093,90.4650041 35.7408036,87.3408098 C38.8586409,84.2229725 43.9325646,84.2188622 47.0595304,87.345828 L64.0250749,104.311373 L80.9906194,87.345828 C84.1152717,84.2211757 89.1851518,84.2166154 92.3093461,87.3408098 C95.4271834,90.458647 95.4312937,95.5325707 92.3043279,98.6595365 L69.6869474,121.276917 C68.1237851,122.840079 66.0737951,123.622383 64.0241231,123.622745 Z\" transform=\"translate(64.024316, 104.311372) scale(1, -1) translate(-64.024316, -104.311372) \"></path>\n </g>\n</svg>"
},
"$:/core/images/fold-button": {
"title": "$:/core/images/fold-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fold tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M64.0292774,63.6235628 C61.9791013,63.6242848 59.9275217,62.8435723 58.3632024,61.279253 L35.7458219,38.6618725 C32.6211696,35.5372202 32.6166093,30.4673401 35.7408036,27.3431458 C38.8586409,24.2253085 43.9325646,24.2211982 47.0595304,27.348164 L64.0250749,44.3137085 L80.9906194,27.348164 C84.1152717,24.2235117 89.1851518,24.2189514 92.3093461,27.3431458 C95.4271834,30.460983 95.4312937,35.5349067 92.3043279,38.6618725 L69.6869474,61.279253 C68.1237851,62.8424153 66.0737951,63.6247195 64.0241231,63.6250809 Z\" transform=\"translate(64.024316, 44.313708) scale(1, -1) translate(-64.024316, -44.313708) \"></path>\n <path d=\"M64.0049614,105.998482 C61.9547853,105.999204 59.9032057,105.218491 58.3388864,103.654172 L35.7215059,81.0367916 C32.5968535,77.9121393 32.5922933,72.8422592 35.7164876,69.7180649 C38.8343248,66.6002276 43.9082485,66.5961173 47.0352144,69.7230831 L64.0007589,86.6886276 L80.9663034,69.7230831 C84.0909557,66.5984308 89.1608358,66.5938705 92.2850301,69.7180649 C95.4028673,72.8359021 95.4069777,77.9098258 92.2800119,81.0367916 L69.6626314,103.654172 C68.099469,105.217334 66.0494791,105.999639 63.999807,106 Z\" transform=\"translate(64.000000, 86.688628) scale(1, -1) translate(-64.000000, -86.688628) \"></path>\n </g>\n</svg>"
},
"$:/core/images/fold-others-button": {
"title": "$:/core/images/fold-others-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-fold-others tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"56.0314331\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M101.657101,104.948818 C100.207918,103.498614 98.2051847,102.599976 95.9929031,102.599976 L72,102.599976 L72,78.6070725 C72,76.3964271 71.1036108,74.3936927 69.6545293,72.9441002 L69.6571005,72.9488183 C68.2079177,71.4986143 66.2051847,70.5999756 63.9929031,70.5999756 L32.0070969,70.5999756 C27.5881712,70.5999756 24,74.1816976 24,78.5999756 C24,83.0092633 27.5848994,86.5999756 32.0070969,86.5999756 L56,86.5999756 L56,110.592879 C56,112.803524 56.8963895,114.806259 58.3454713,116.255852 L58.3429,116.251133 C59.7920828,117.701337 61.7948156,118.599976 64.0070969,118.599976 L88,118.599976 L88,142.592879 C88,147.011804 91.581722,150.599976 96,150.599976 C100.409288,150.599976 104,147.015076 104,142.592879 L104,110.607072 C104,108.396427 103.103611,106.393693 101.654529,104.9441 Z\" transform=\"translate(64.000000, 110.599976) rotate(-45.000000) translate(-64.000000, -110.599976) \"></path>\n <path d=\"M101.725643,11.7488671 C100.27646,10.2986632 98.2737272,9.40002441 96.0614456,9.40002441 L72.0685425,9.40002441 L72.0685425,-14.5928787 C72.0685425,-16.8035241 71.1721533,-18.8062584 69.7230718,-20.255851 L69.725643,-20.2511329 C68.2764602,-21.7013368 66.2737272,-22.5999756 64.0614456,-22.5999756 L32.0756394,-22.5999756 C27.6567137,-22.5999756 24.0685425,-19.0182536 24.0685425,-14.5999756 C24.0685425,-10.1906879 27.6534419,-6.59997559 32.0756394,-6.59997559 L56.0685425,-6.59997559 L56.0685425,17.3929275 C56.0685425,19.6035732 56.964932,21.6063078 58.4140138,23.0559004 L58.4114425,23.0511823 C59.8606253,24.5013859 61.8633581,25.4000244 64.0756394,25.4000244 L88.0685425,25.4000244 L88.0685425,49.3929275 C88.0685425,53.8118532 91.6502645,57.4000244 96.0685425,57.4000244 C100.47783,57.4000244 104.068542,53.815125 104.068542,49.3929275 L104.068542,17.4071213 C104.068542,15.1964759 103.172153,13.1937416 101.723072,11.744149 Z\" transform=\"translate(64.068542, 17.400024) scale(1, -1) rotate(-45.000000) translate(-64.068542, -17.400024) \"></path>\n </g>\n</svg>"
},
"$:/core/images/folder": {
"title": "$:/core/images/folder",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-folder tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M55.6943257,128.000004 L7.99859666,128.000004 C3.5810937,128.000004 0,124.413822 0,119.996384 L0,48.0036243 C0,43.5833471 3.58387508,40.0000044 7.99859666,40.0000044 L16,40.0000044 L16,31.9999914 C16,27.5817181 19.5783731,24 24.0003461,24 L55.9996539,24 C60.4181231,24 64,27.5800761 64,31.9999914 L64,40.0000044 L104.001403,40.0000044 C108.418906,40.0000044 112,43.5861868 112,48.0036243 L112,59.8298353 L104,59.7475921 L104,51.9994189 C104,49.7887607 102.207895,48.0000044 99.9972215,48.0000044 L56,48.0000044 L56,36.0000255 C56,33.7898932 54.2072328,32 51.9957423,32 L28.0042577,32 C25.7890275,32 24,33.7908724 24,36.0000255 L24,48.0000044 L12.0027785,48.0000044 C9.78987688,48.0000044 8,49.7906032 8,51.9994189 L8,116.00059 C8,118.211248 9.79210499,120.000004 12.0027785,120.000004 L58.7630167,120.000004 L55.6943257,128.000004 L55.6943257,128.000004 Z\"></path>\n <path d=\"M23.8728955,55.5 L119.875702,55.5 C124.293205,55.5 126.87957,59.5532655 125.650111,64.5630007 L112.305967,118.936999 C111.077582,123.942356 106.497904,128 102.083183,128 L6.08037597,128 C1.66287302,128 -0.923492342,123.946735 0.305967145,118.936999 L13.650111,64.5630007 C14.878496,59.5576436 19.4581739,55.5 23.8728955,55.5 L23.8728955,55.5 L23.8728955,55.5 Z M25.6530124,64 L113.647455,64 C115.858129,64 117.151473,66.0930612 116.538306,68.6662267 L105.417772,115.333773 C104.803671,117.910859 102.515967,120 100.303066,120 L12.3086228,120 C10.0979492,120 8.8046054,117.906939 9.41777189,115.333773 L20.5383062,68.6662267 C21.1524069,66.0891409 23.4401107,64 25.6530124,64 L25.6530124,64 L25.6530124,64 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/full-screen-button": {
"title": "$:/core/images/full-screen-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-full-screen-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g>\n <g>\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n <g transform=\"translate(104.000000, 104.000000) rotate(-180.000000) translate(-104.000000, -104.000000) translate(80.000000, 80.000000)\">\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n <g transform=\"translate(24.000000, 104.000000) rotate(-90.000000) translate(-24.000000, -104.000000) translate(0.000000, 80.000000)\">\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n <g transform=\"translate(104.000000, 24.000000) rotate(90.000000) translate(-104.000000, -24.000000) translate(80.000000, 0.000000)\">\n <path d=\"M5.29777586e-31,8 C1.59060409e-15,3.581722 3.581722,0 8,0 L40,0 C44.418278,0 48,3.581722 48,8 C48,12.418278 44.418278,16 40,16 L16,16 L16,40 C16,44.418278 12.418278,48 8,48 C3.581722,48 -3.55271368e-15,44.418278 0,40 L3.55271368e-15,8 Z\"></path>\n </g>\n </g>\n</svg>"
},
"$:/core/images/github": {
"title": "$:/core/images/github",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-github tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M63.9383506,1.60695328 C28.6017227,1.60695328 -0.055756057,30.2970814 -0.055756057,65.6906208 C-0.055756057,94.003092 18.2804728,118.019715 43.7123154,126.493393 C46.9143781,127.083482 48.0812647,125.104717 48.0812647,123.405261 C48.0812647,121.886765 48.02626,117.85449 47.9948287,112.508284 C30.1929317,116.379268 26.4368926,103.916587 26.4368926,103.916587 C23.5255693,96.5129372 19.3294921,94.5420399 19.3294921,94.5420399 C13.5186324,90.5687739 19.7695302,90.6474524 19.7695302,90.6474524 C26.1933001,91.099854 29.5721638,97.2525155 29.5721638,97.2525155 C35.2808718,107.044059 44.5531024,104.215566 48.1991321,102.575118 C48.7806109,98.4366275 50.4346826,95.612068 52.2616263,94.0109598 C38.0507543,92.3941159 23.1091047,86.8944862 23.1091047,62.3389152 C23.1091047,55.3443933 25.6039634,49.6205298 29.6978889,45.1437211 C29.0378318,43.5229433 26.8415704,37.0044266 30.3265147,28.1845627 C30.3265147,28.1845627 35.6973364,26.4615028 47.9241083,34.7542205 C53.027764,33.330139 58.5046663,32.6220321 63.9462084,32.5944947 C69.3838216,32.6220321 74.856795,33.330139 79.9683085,34.7542205 C92.1872225,26.4615028 97.5501864,28.1845627 97.5501864,28.1845627 C101.042989,37.0044266 98.8467271,43.5229433 98.190599,45.1437211 C102.292382,49.6205298 104.767596,55.3443933 104.767596,62.3389152 C104.767596,86.9574291 89.8023734,92.3744463 75.5482834,93.9598188 C77.8427675,95.9385839 79.8897303,99.8489072 79.8897303,105.828476 C79.8897303,114.392635 79.8111521,121.304544 79.8111521,123.405261 C79.8111521,125.120453 80.966252,127.114954 84.2115327,126.489459 C109.623731,117.996111 127.944244,93.9952241 127.944244,65.6906208 C127.944244,30.2970814 99.2867652,1.60695328 63.9383506,1.60695328\"></path>\n </g>\n </svg>\n"
},
"$:/core/images/globe": {
"title": "$:/core/images/globe",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-globe tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M72.8111354,37.1275855 C72.8111354,37.9789875 72.8111354,38.8303894 72.8111354,39.6817913 C72.8111354,41.8784743 73.7885604,46.5631866 72.8111354,48.5143758 C71.3445471,51.4420595 68.1617327,52.0543531 66.4170946,54.3812641 C65.2352215,55.9575873 61.7987417,64.9821523 62.7262858,67.3005778 C66.6959269,77.2228204 74.26087,70.4881886 80.6887657,76.594328 C81.5527211,77.415037 83.5758191,78.8666631 83.985137,79.8899578 C87.2742852,88.1128283 76.4086873,94.8989524 87.7419325,106.189751 C88.9872885,107.430443 91.555495,102.372895 91.8205061,101.575869 C92.6726866,99.0129203 98.5458765,96.1267309 100.908882,94.5234439 C102.928056,93.1534443 105.782168,91.8557166 107.236936,89.7775886 C109.507391,86.5342557 108.717505,82.2640435 110.334606,79.0328716 C112.473794,74.7585014 114.163418,69.3979002 116.332726,65.0674086 C120.230862,57.2857361 121.054075,67.1596684 121.400359,67.5059523 C121.757734,67.8633269 122.411167,67.5059523 122.916571,67.5059523 C123.011132,67.5059523 124.364019,67.6048489 124.432783,67.5059523 C125.0832,66.5705216 123.390209,49.5852316 123.114531,48.2089091 C121.710578,41.1996597 116.17083,32.4278331 111.249523,27.7092761 C104.975994,21.6942076 104.160516,11.5121686 92.9912146,12.7547535 C92.7872931,12.7774397 87.906794,22.9027026 85.2136766,26.2672064 C81.486311,30.9237934 82.7434931,22.1144904 78.6876623,22.1144904 C78.6065806,22.1144904 77.5045497,22.0107615 77.4353971,22.1144904 C76.8488637,22.9942905 75.9952305,26.0101404 75.1288269,26.5311533 C74.8635477,26.6906793 73.4071369,26.2924966 73.2826811,26.5311533 C71.0401728,30.8313939 81.5394677,28.7427264 79.075427,34.482926 C76.7225098,39.9642538 72.747373,32.4860199 72.747373,43.0434079\"></path>\n <path d=\"M44.4668556,7.01044608 C54.151517,13.1403033 45.1489715,19.2084878 47.1611905,23.2253896 C48.8157833,26.5283781 51.4021933,28.6198851 48.8753629,33.038878 C46.8123257,36.6467763 42.0052989,37.0050492 39.251679,39.7621111 C36.2115749,42.8060154 33.7884281,48.7028116 32.4624592,52.6732691 C30.8452419,57.5158356 47.0088721,59.5388126 44.5246867,63.6811917 C43.1386839,65.9923513 37.7785192,65.1466282 36.0880227,63.8791519 C34.9234453,63.0059918 32.4946425,63.3331166 31.6713597,62.0997342 C29.0575851,58.1839669 29.4107339,54.0758543 28.0457962,49.9707786 C27.1076833,47.1493864 21.732611,47.8501656 20.2022714,49.3776393 C19.6790362,49.8998948 19.8723378,51.1703278 19.8723378,51.8829111 C19.8723378,57.1682405 26.9914913,55.1986414 26.9914913,58.3421973 C26.9914913,72.9792302 30.9191897,64.8771867 38.1313873,69.6793121 C48.1678018,76.3618966 45.9763926,76.981595 53.0777543,84.0829567 C56.7511941,87.7563965 60.8192437,87.7689005 62.503478,93.3767069 C64.1046972,98.7081071 53.1759798,98.7157031 50.786754,100.825053 C49.663965,101.816317 47.9736094,104.970571 46.5680513,105.439676 C44.7757187,106.037867 43.334221,105.93607 41.6242359,107.219093 C39.1967302,109.040481 37.7241465,112.151588 37.6034934,112.030935 C35.4555278,109.88297 34.0848666,96.5511248 33.7147244,93.7726273 C33.1258872,89.3524817 28.1241923,88.2337027 26.7275443,84.7420826 C25.1572737,80.8164061 28.2518481,75.223612 25.599097,70.9819941 C19.0797019,60.557804 13.7775712,56.4811506 10.2493953,44.6896152 C9.3074899,41.5416683 13.5912267,38.1609942 15.1264825,35.8570308 C17.0029359,33.0410312 17.7876232,30.0028946 19.8723378,27.2224065 C22.146793,24.1888519 40.8551166,9.46076832 43.8574051,8.63490613 L44.4668556,7.01044608 Z\"></path>\n <path d=\"M64,126 C98.2416545,126 126,98.2416545 126,64 C126,29.7583455 98.2416545,2 64,2 C29.7583455,2 2,29.7583455 2,64 C2,98.2416545 29.7583455,126 64,126 Z M64,120 C94.927946,120 120,94.927946 120,64 C120,33.072054 94.927946,8 64,8 C33.072054,8 8,33.072054 8,64 C8,94.927946 33.072054,120 64,120 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-1": {
"title": "$:/core/images/heading-1",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-1 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M14,30 L27.25,30 L27.25,60.104 L61.7,60.104 L61.7,30 L74.95,30 L74.95,105.684 L61.7,105.684 L61.7,71.552 L27.25,71.552 L27.25,105.684 L14,105.684 L14,30 Z M84.3350766,43.78 C86.8790893,43.78 89.3523979,43.5680021 91.7550766,43.144 C94.1577553,42.7199979 96.3307336,42.0133383 98.2740766,41.024 C100.21742,40.0346617 101.87807,38.7626744 103.256077,37.208 C104.634084,35.6533256 105.535075,33.7453446 105.959077,31.484 L115.817077,31.484 L115.817077,105.684 L102.567077,105.684 L102.567077,53.32 L84.3350766,53.32 L84.3350766,43.78 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-2": {
"title": "$:/core/images/heading-2",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-2 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M125.519077,105.684 L74.8510766,105.684 C74.9217436,99.5359693 76.4057288,94.1653563 79.3030766,89.572 C82.2004244,84.9786437 86.1577182,80.986017 91.1750766,77.594 C93.5777553,75.8273245 96.0863969,74.113675 98.7010766,72.453 C101.315756,70.792325 103.718399,69.0080095 105.909077,67.1 C108.099754,65.1919905 109.901736,63.1250111 111.315077,60.899 C112.728417,58.6729889 113.47041,56.1113478 113.541077,53.214 C113.541077,51.8713266 113.382078,50.4403409 113.064077,48.921 C112.746075,47.4016591 112.127748,45.9883399 111.209077,44.681 C110.290405,43.3736601 109.018418,42.2783377 107.393077,41.395 C105.767735,40.5116622 103.647756,40.07 101.033077,40.07 C98.6303979,40.07 96.6340846,40.5469952 95.0440766,41.501 C93.4540687,42.4550048 92.1820814,43.762325 91.2280766,45.423 C90.2740719,47.083675 89.5674123,49.0446554 89.1080766,51.306 C88.648741,53.5673446 88.3837436,56.0053203 88.3130766,58.62 L76.2290766,58.62 C76.2290766,54.5213128 76.7767378,50.7230175 77.8720766,47.225 C78.9674154,43.7269825 80.610399,40.7060127 82.8010766,38.162 C84.9917542,35.6179873 87.6593942,33.6216739 90.8040766,32.173 C93.948759,30.7243261 97.6057224,30 101.775077,30 C106.297766,30 110.078395,30.7419926 113.117077,32.226 C116.155758,33.7100074 118.611401,35.5826554 120.484077,37.844 C122.356753,40.1053446 123.681739,42.5609868 124.459077,45.211 C125.236414,47.8610133 125.625077,50.3873213 125.625077,52.79 C125.625077,55.7580148 125.165748,58.4433213 124.247077,60.846 C123.328405,63.2486787 122.091751,65.4569899 120.537077,67.471 C118.982402,69.4850101 117.215753,71.3399915 115.237077,73.036 C113.2584,74.7320085 111.209087,76.3219926 109.089077,77.806 C106.969066,79.2900074 104.849087,80.7033266 102.729077,82.046 C100.609066,83.3886734 98.6480856,84.7313266 96.8460766,86.074 C95.0440676,87.4166734 93.47175,88.8123261 92.1290766,90.261 C90.7864032,91.7096739 89.8677458,93.2466585 89.3730766,94.872 L125.519077,94.872 L125.519077,105.684 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-3": {
"title": "$:/core/images/heading-3",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-3 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M94.8850766,62.224 C96.8637532,62.294667 98.8424001,62.1533351 100.821077,61.8 C102.799753,61.4466649 104.566402,60.8283378 106.121077,59.945 C107.675751,59.0616623 108.930072,57.8426744 109.884077,56.288 C110.838081,54.7333256 111.315077,52.8253446 111.315077,50.564 C111.315077,47.3839841 110.237421,44.8400095 108.082077,42.932 C105.926733,41.0239905 103.153094,40.07 99.7610766,40.07 C97.641066,40.07 95.8037511,40.4939958 94.2490766,41.342 C92.6944022,42.1900042 91.4047484,43.3383261 90.3800766,44.787 C89.3554048,46.2356739 88.5957458,47.860991 88.1010766,49.663 C87.6064075,51.465009 87.3944096,53.3199905 87.4650766,55.228 L75.3810766,55.228 C75.5224107,51.623982 76.1937373,48.2850154 77.3950766,45.211 C78.596416,42.1369846 80.2393995,39.4693446 82.3240766,37.208 C84.4087537,34.9466554 86.9350618,33.1800064 89.9030766,31.908 C92.8710915,30.6359936 96.2277246,30 99.9730766,30 C102.870424,30 105.714729,30.4239958 108.506077,31.272 C111.297424,32.1200042 113.806065,33.3566585 116.032077,34.982 C118.258088,36.6073415 120.042403,38.6743208 121.385077,41.183 C122.72775,43.6916792 123.399077,46.5713171 123.399077,49.822 C123.399077,53.5673521 122.551085,56.8356527 120.855077,59.627 C119.159068,62.4183473 116.509095,64.4499936 112.905077,65.722 L112.905077,65.934 C117.145098,66.7820042 120.448731,68.8843166 122.816077,72.241 C125.183422,75.5976835 126.367077,79.6786426 126.367077,84.484 C126.367077,88.017351 125.660417,91.1796527 124.247077,93.971 C122.833736,96.7623473 120.925755,99.129657 118.523077,101.073 C116.120398,103.016343 113.329093,104.517995 110.149077,105.578 C106.969061,106.638005 103.612428,107.168 100.079077,107.168 C95.7683884,107.168 92.005426,106.549673 88.7900766,105.313 C85.5747272,104.076327 82.8894207,102.327345 80.7340766,100.066 C78.5787325,97.8046554 76.9357489,95.0840159 75.8050766,91.904 C74.6744043,88.7239841 74.0737436,85.1906861 74.0030766,81.304 L86.0870766,81.304 C85.9457426,85.8266893 87.0587315,89.5896517 89.4260766,92.593 C91.7934218,95.5963483 95.3443863,97.098 100.079077,97.098 C104.107097,97.098 107.481396,95.9496782 110.202077,93.653 C112.922757,91.3563219 114.283077,88.0880212 114.283077,83.848 C114.283077,80.9506522 113.717749,78.6540085 112.587077,76.958 C111.456404,75.2619915 109.972419,73.9723378 108.135077,73.089 C106.297734,72.2056623 104.230755,71.6580011 101.934077,71.446 C99.6373985,71.2339989 97.2877553,71.163333 94.8850766,71.234 L94.8850766,62.224 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-4": {
"title": "$:/core/images/heading-4",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-4 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8,30 L21.25,30 L21.25,60.104 L55.7,60.104 L55.7,30 L68.95,30 L68.95,105.684 L55.7,105.684 L55.7,71.552 L21.25,71.552 L21.25,105.684 L8,105.684 L8,30 Z M84.5890766,78.548 L107.061077,78.548 L107.061077,45.9 L106.849077,45.9 L84.5890766,78.548 Z M128.049077,88.088 L118.509077,88.088 L118.509077,105.684 L107.061077,105.684 L107.061077,88.088 L75.2610766,88.088 L75.2610766,76.11 L107.061077,31.484 L118.509077,31.484 L118.509077,78.548 L128.049077,78.548 L128.049077,88.088 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-5": {
"title": "$:/core/images/heading-5",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-5 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M83.7550766,31.484 L122.127077,31.484 L122.127077,42.296 L92.7650766,42.296 L88.9490766,61.164 L89.1610766,61.376 C90.7864181,59.5386575 92.8533974,58.1430048 95.3620766,57.189 C97.8707558,56.2349952 100.361731,55.758 102.835077,55.758 C106.509762,55.758 109.795729,56.3763272 112.693077,57.613 C115.590424,58.8496729 118.0284,60.5809889 120.007077,62.807 C121.985753,65.0330111 123.487405,67.6653181 124.512077,70.704 C125.536748,73.7426819 126.049077,77.028649 126.049077,80.562 C126.049077,83.5300148 125.572081,86.5863176 124.618077,89.731 C123.664072,92.8756824 122.144754,95.7376538 120.060077,98.317 C117.9754,100.896346 115.30776,103.016325 112.057077,104.677 C108.806394,106.337675 104.919766,107.168 100.397077,107.168 C96.7930586,107.168 93.454092,106.691005 90.3800766,105.737 C87.3060613,104.782995 84.6030883,103.35201 82.2710766,101.444 C79.939065,99.5359905 78.0840835,97.1863473 76.7060766,94.395 C75.3280697,91.6036527 74.5684107,88.3353521 74.4270766,84.59 L86.5110766,84.59 C86.8644117,88.6180201 88.2423979,91.7096559 90.6450766,93.865 C93.0477553,96.0203441 96.2277235,97.098 100.185077,97.098 C102.729089,97.098 104.884401,96.6740042 106.651077,95.826 C108.417752,94.9779958 109.848738,93.8120074 110.944077,92.328 C112.039415,90.8439926 112.816741,89.1126766 113.276077,87.134 C113.735412,85.1553234 113.965077,83.0353446 113.965077,80.774 C113.965077,78.7246564 113.682413,76.763676 113.117077,74.891 C112.55174,73.018324 111.703749,71.3753404 110.573077,69.962 C109.442404,68.5486596 107.976086,67.4180042 106.174077,66.57 C104.372068,65.7219958 102.269755,65.298 99.8670766,65.298 C97.3230639,65.298 94.9380878,65.7749952 92.7120766,66.729 C90.4860655,67.6830048 88.8784149,69.4673203 87.8890766,72.082 L75.8050766,72.082 L83.7550766,31.484 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/heading-6": {
"title": "$:/core/images/heading-6",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-heading-6 tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M6,30 L19.25,30 L19.25,60.104 L53.7,60.104 L53.7,30 L66.95,30 L66.95,105.684 L53.7,105.684 L53.7,71.552 L19.25,71.552 L19.25,105.684 L6,105.684 L6,30 Z M112.587077,50.246 C112.304409,47.2073181 111.226753,44.751676 109.354077,42.879 C107.481401,41.006324 104.955093,40.07 101.775077,40.07 C99.584399,40.07 97.6940846,40.4763293 96.1040766,41.289 C94.5140687,42.1016707 93.1714154,43.1793266 92.0760766,44.522 C90.9807378,45.8646734 90.0974133,47.401658 89.4260766,49.133 C88.7547399,50.864342 88.2070787,52.6839905 87.7830766,54.592 C87.3590745,56.5000095 87.0587442,58.390324 86.8820766,60.263 C86.7054091,62.135676 86.5464107,63.8846585 86.4050766,65.51 L86.6170766,65.722 C88.2424181,62.7539852 90.4860623,60.5456739 93.3480766,59.097 C96.2100909,57.6483261 99.3017267,56.924 102.623077,56.924 C106.297762,56.924 109.583729,57.5599936 112.481077,58.832 C115.378424,60.1040064 117.834067,61.8529889 119.848077,64.079 C121.862087,66.3050111 123.399071,68.9373181 124.459077,71.976 C125.519082,75.0146819 126.049077,78.300649 126.049077,81.834 C126.049077,85.438018 125.466082,88.7769846 124.300077,91.851 C123.134071,94.9250154 121.455754,97.6103219 119.265077,99.907 C117.074399,102.203678 114.459758,103.987994 111.421077,105.26 C108.382395,106.532006 105.025762,107.168 101.351077,107.168 C95.9097161,107.168 91.4400941,106.16101 87.9420766,104.147 C84.4440591,102.13299 81.6880867,99.3770175 79.6740766,95.879 C77.6600666,92.3809825 76.2644138,88.2823568 75.4870766,83.583 C74.7097394,78.8836432 74.3210766,73.8133605 74.3210766,68.372 C74.3210766,63.9199777 74.7980719,59.4326893 75.7520766,54.91 C76.7060814,50.3873107 78.278399,46.2710186 80.4690766,42.561 C82.6597542,38.8509815 85.5393921,35.8300117 89.1080766,33.498 C92.6767611,31.1659883 97.0757171,30 102.305077,30 C105.273091,30 108.064397,30.4946617 110.679077,31.484 C113.293756,32.4733383 115.608067,33.8513245 117.622077,35.618 C119.636087,37.3846755 121.27907,39.5046543 122.551077,41.978 C123.823083,44.4513457 124.529743,47.2073181 124.671077,50.246 L112.587077,50.246 Z M100.927077,97.098 C103.117754,97.098 105.025735,96.6563378 106.651077,95.773 C108.276418,94.8896623 109.636738,93.7413404 110.732077,92.328 C111.827415,90.9146596 112.640074,89.271676 113.170077,87.399 C113.700079,85.526324 113.965077,83.6006766 113.965077,81.622 C113.965077,79.6433234 113.700079,77.7353425 113.170077,75.898 C112.640074,74.0606575 111.827415,72.4530069 110.732077,71.075 C109.636738,69.6969931 108.276418,68.5840042 106.651077,67.736 C105.025735,66.8879958 103.117754,66.464 100.927077,66.464 C98.736399,66.464 96.8107516,66.8703293 95.1500766,67.683 C93.4894017,68.4956707 92.0937489,69.5909931 90.9630766,70.969 C89.8324043,72.3470069 88.9844128,73.9546575 88.4190766,75.792 C87.8537405,77.6293425 87.5710766,79.5726564 87.5710766,81.622 C87.5710766,83.6713436 87.8537405,85.6146575 88.4190766,87.452 C88.9844128,89.2893425 89.8324043,90.9323261 90.9630766,92.381 C92.0937489,93.8296739 93.4894017,94.9779958 95.1500766,95.826 C96.8107516,96.6740042 98.736399,97.098 100.927077,97.098 L100.927077,97.098 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/help": {
"title": "$:/core/images/help",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-help tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M36.0548906,111.44117 C30.8157418,115.837088 20.8865444,118.803477 9.5,118.803477 C7.86465619,118.803477 6.25937294,118.742289 4.69372699,118.624467 C12.612543,115.984876 18.7559465,110.02454 21.0611049,102.609942 C8.74739781,92.845129 1.04940554,78.9359851 1.04940554,63.5 C1.04940554,33.9527659 29.2554663,10 64.0494055,10 C98.8433448,10 127.049406,33.9527659 127.049406,63.5 C127.049406,93.0472341 98.8433448,117 64.0494055,117 C53.9936953,117 44.48824,114.999337 36.0548906,111.44117 L36.0548906,111.44117 Z M71.4042554,77.5980086 C71.406883,77.2865764 71.4095079,76.9382011 71.4119569,76.5610548 C71.4199751,75.3262169 71.4242825,74.0811293 71.422912,72.9158546 C71.4215244,71.736154 71.4143321,70.709635 71.4001396,69.8743525 C71.4078362,68.5173028 71.9951951,67.7870427 75.1273009,65.6385471 C75.2388969,65.5619968 76.2124091,64.8981068 76.5126553,64.6910879 C79.6062455,62.5580654 81.5345849,60.9050204 83.2750652,58.5038955 C85.6146327,55.2762841 86.8327108,51.426982 86.8327108,46.8554323 C86.8327108,33.5625756 76.972994,24.9029551 65.3778484,24.9029551 C54.2752771,24.9029551 42.8794554,34.5115163 41.3121702,47.1975534 C40.9043016,50.4989536 43.2499725,53.50591 46.5513726,53.9137786 C49.8527728,54.3216471 52.8597292,51.9759763 53.2675978,48.6745761 C54.0739246,42.1479456 60.2395837,36.9492759 65.3778484,36.9492759 C70.6427674,36.9492759 74.78639,40.5885487 74.78639,46.8554323 C74.78639,50.4892974 73.6853224,52.008304 69.6746221,54.7736715 C69.4052605,54.9593956 68.448509,55.6118556 68.3131127,55.7047319 C65.6309785,57.5445655 64.0858213,58.803255 62.6123358,60.6352315 C60.5044618,63.2559399 59.3714208,66.3518252 59.3547527,69.9487679 C59.3684999,70.8407274 59.3752803,71.8084521 59.3765995,72.9300232 C59.3779294,74.0607297 59.3737237,75.2764258 59.36589,76.482835 C59.3634936,76.8518793 59.3609272,77.1924914 59.3583633,77.4963784 C59.3568319,77.6778944 59.3556368,77.8074256 59.3549845,77.8730928 C59.3219814,81.1994287 61.9917551,83.9227111 65.318091,83.9557142 C68.644427,83.9887173 71.3677093,81.3189435 71.4007124,77.9926076 C71.4014444,77.9187458 71.402672,77.7856841 71.4042554,77.5980086 Z M65.3778489,102.097045 C69.5359735,102.097045 72.9067994,98.7262189 72.9067994,94.5680944 C72.9067994,90.4099698 69.5359735,87.0391439 65.3778489,87.0391439 C61.2197243,87.0391439 57.8488984,90.4099698 57.8488984,94.5680944 C57.8488984,98.7262189 61.2197243,102.097045 65.3778489,102.097045 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/home-button": {
"title": "$:/core/images/home-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-home-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M112.9847,119.501583 C112.99485,119.336814 113,119.170705 113,119.003406 L113,67.56802 C116.137461,70.5156358 121.076014,70.4518569 124.133985,67.3938855 C127.25818,64.2696912 127.260618,59.2068102 124.131541,56.0777326 L70.3963143,2.34250601 C68.8331348,0.779326498 66.7828947,-0.000743167069 64.7337457,1.61675364e-05 C62.691312,-0.00409949529 60.6426632,0.777559815 59.077717,2.34250601 L33,28.420223 L33,28.420223 L33,8.00697327 C33,3.58484404 29.4092877,0 25,0 C20.581722,0 17,3.59075293 17,8.00697327 L17,44.420223 L5.3424904,56.0777326 C2.21694607,59.2032769 2.22220878,64.2760483 5.34004601,67.3938855 C8.46424034,70.5180798 13.5271213,70.5205187 16.6561989,67.3914411 L17,67.04764 L17,119.993027 C17,119.994189 17.0000002,119.995351 17.0000007,119.996514 C17.0000002,119.997675 17,119.998838 17,120 C17,124.418278 20.5881049,128 24.9992458,128 L105.000754,128 C109.418616,128 113,124.409288 113,120 C113,119.832611 112.99485,119.666422 112.9847,119.501583 Z M97,112 L97,51.5736087 L97,51.5736087 L64.7370156,19.3106244 L33,51.04764 L33,112 L97,112 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/import-button": {
"title": "$:/core/images/import-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-import-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M105.449437,94.2138951 C105.449437,94.2138951 110.049457,94.1897106 110.049457,99.4026111 C110.049457,104.615512 105.163246,104.615511 105.163246,104.615511 L45.0075072,105.157833 C45.0075072,105.157833 0.367531803,106.289842 0.367532368,66.6449212 C0.367532934,27.0000003 45.0428249,27.0000003 45.0428249,27.0000003 L105.532495,27.0000003 C105.532495,27.0000003 138.996741,25.6734987 138.996741,55.1771866 C138.996741,84.6808745 105.727102,82.8457535 105.727102,82.8457535 L56.1735087,82.8457535 C56.1735087,82.8457535 22.6899229,85.1500223 22.6899229,66.0913753 C22.6899229,47.0327282 56.1735087,49.3383013 56.1735087,49.3383013 L105.727102,49.3383013 C105.727102,49.3383013 111.245209,49.3383024 111.245209,54.8231115 C111.245209,60.3079206 105.727102,60.5074524 105.727102,60.5074524 L56.1735087,60.5074524 C56.1735087,60.5074524 37.48913,60.5074528 37.48913,66.6449195 C37.48913,72.7823862 56.1735087,71.6766023 56.1735087,71.6766023 L105.727102,71.6766029 C105.727102,71.6766029 127.835546,73.1411469 127.835546,55.1771866 C127.835546,35.5304025 105.727102,38.3035317 105.727102,38.3035317 L45.0428249,38.3035317 C45.0428249,38.3035317 11.5287276,38.3035313 11.5287276,66.6449208 C11.5287276,94.9863103 45.0428244,93.9579678 45.0428244,93.9579678 L105.449437,94.2138951 Z\" transform=\"translate(69.367532, 66.000000) rotate(-45.000000) translate(-69.367532, -66.000000) \"></path>\n </g>\n</svg>"
},
"$:/core/images/info-button": {
"title": "$:/core/images/info-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-info-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <g transform=\"translate(0.049406, 0.000000)\">\n <path d=\"M64,128 C99.346224,128 128,99.346224 128,64 C128,28.653776 99.346224,0 64,0 C28.653776,0 0,28.653776 0,64 C0,99.346224 28.653776,128 64,128 Z M64,112 C90.509668,112 112,90.509668 112,64 C112,37.490332 90.509668,16 64,16 C37.490332,16 16,37.490332 16,64 C16,90.509668 37.490332,112 64,112 Z\"></path>\n <circle cx=\"64\" cy=\"32\" r=\"8\"></circle>\n <rect x=\"56\" y=\"48\" width=\"16\" height=\"56\" rx=\"8\"></rect>\n </g>\n </g>\n</svg>"
},
"$:/core/images/italic": {
"title": "$:/core/images/italic",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-italic tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <polygon points=\"66.7114846 0 89.1204482 0 62.4089636 128 40 128\"></polygon>\n </g>\n</svg>"
},
"$:/core/images/left-arrow": {
"created": "20150315234410875",
"modified": "20150315235324760",
"tags": "$:/tags/Image",
"title": "$:/core/images/left-arrow",
"text": "<svg class=\"tc-image-left-arrow tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path transform=\"rotate(135, 63.8945, 64.1752)\" d=\"m109.07576,109.35336c-1.43248,1.43361 -3.41136,2.32182 -5.59717,2.32182l-79.16816,0c-4.36519,0 -7.91592,-3.5444 -7.91592,-7.91666c0,-4.36337 3.54408,-7.91667 7.91592,-7.91667l71.25075,0l0,-71.25075c0,-4.3652 3.54442,-7.91592 7.91667,-7.91592c4.36336,0 7.91667,3.54408 7.91667,7.91592l0,79.16815c0,2.1825 -0.88602,4.16136 -2.3185,5.59467l-0.00027,-0.00056z\"/>\n</svg>\n"
},
"$:/core/images/line-width": {
"title": "$:/core/images/line-width",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-line-width tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M128,-97 L112.992786,-97 C112.452362,-97 112,-96.5522847 112,-96 C112,-95.4438648 112.444486,-95 112.992786,-95 L128,-95 L128,-97 Z M128,-78.6794919 L111.216185,-88.3696322 C110.748163,-88.6398444 110.132549,-88.4782926 109.856406,-88 C109.578339,-87.5183728 109.741342,-86.9117318 110.216185,-86.6375814 L128,-76.3700908 L128,-78.6794919 Z M78.6794919,-128 L88.3696322,-111.216185 C88.6437826,-110.741342 88.4816272,-110.134474 88,-109.856406 C87.5217074,-109.580264 86.9077936,-109.748163 86.6375814,-110.216185 L76.3700908,-128 L78.6794919,-128 Z M97,-128 L97,-112.992786 C97,-112.444486 96.5561352,-112 96,-112 C95.4477153,-112 95,-112.452362 95,-112.992786 L95,-128 L97,-128 Z M115.629909,-128 L105.362419,-110.216185 C105.088268,-109.741342 104.481627,-109.578339 104,-109.856406 C103.521707,-110.132549 103.360156,-110.748163 103.630368,-111.216185 L113.320508,-128 L115.629909,-128 Z M128,-113.320508 L111.216185,-103.630368 C110.741342,-103.356217 110.134474,-103.518373 109.856406,-104 C109.580264,-104.478293 109.748163,-105.092206 110.216185,-105.362419 L128,-115.629909 L128,-113.320508 Z M48,-96 C48,-96.5522847 48.4523621,-97 48.9927864,-97 L79.0072136,-97 C79.5555144,-97 80,-96.5561352 80,-96 C80,-95.4477153 79.5476379,-95 79.0072136,-95 L48.9927864,-95 C48.4444856,-95 48,-95.4438648 48,-96 Z M54.4307806,-120 C54.706923,-120.478293 55.3225377,-120.639844 55.7905589,-120.369632 L81.7838153,-105.362419 C82.2586577,-105.088268 82.4216611,-104.481627 82.1435935,-104 C81.8674512,-103.521707 81.2518365,-103.360156 80.7838153,-103.630368 L54.7905589,-118.637581 C54.3157165,-118.911732 54.152713,-119.518373 54.4307806,-120 Z M104,-82.1435935 C104.478293,-82.4197359 105.092206,-82.2518365 105.362419,-81.7838153 L120.369632,-55.7905589 C120.643783,-55.3157165 120.481627,-54.7088482 120,-54.4307806 C119.521707,-54.1546382 118.907794,-54.3225377 118.637581,-54.7905589 L103.630368,-80.7838153 C103.356217,-81.2586577 103.518373,-81.865526 104,-82.1435935 Z M96,-80 C96.5522847,-80 97,-79.5476379 97,-79.0072136 L97,-48.9927864 C97,-48.4444856 96.5561352,-48 96,-48 C95.4477153,-48 95,-48.4523621 95,-48.9927864 L95,-79.0072136 C95,-79.5555144 95.4438648,-80 96,-80 Z M88,-82.1435935 C88.4782926,-81.8674512 88.6398444,-81.2518365 88.3696322,-80.7838153 L73.3624186,-54.7905589 C73.0882682,-54.3157165 72.4816272,-54.152713 72,-54.4307806 C71.5217074,-54.706923 71.3601556,-55.3225377 71.6303678,-55.7905589 L86.6375814,-81.7838153 C86.9117318,-82.2586577 87.5183728,-82.4216611 88,-82.1435935 Z M82.1435935,-88 C82.4197359,-87.5217074 82.2518365,-86.9077936 81.7838153,-86.6375814 L55.7905589,-71.6303678 C55.3157165,-71.3562174 54.7088482,-71.5183728 54.4307806,-72 C54.1546382,-72.4782926 54.3225377,-73.0922064 54.7905589,-73.3624186 L80.7838153,-88.3696322 C81.2586577,-88.6437826 81.865526,-88.4816272 82.1435935,-88 Z M1.30626177e-08,-41.9868843 L15.0170091,-57.9923909 L20.7983821,-52.9749272 L44.7207091,-81.2095939 L73.4260467,-42.1002685 L85.984793,-56.6159488 L104.48741,-34.0310661 L127.969109,-47.4978019 L127.969109,7.99473128e-07 L1.30626177e-08,7.99473128e-07 L1.30626177e-08,-41.9868843 Z M96,-84 C102.627417,-84 108,-89.372583 108,-96 C108,-102.627417 102.627417,-108 96,-108 C89.372583,-108 84,-102.627417 84,-96 C84,-89.372583 89.372583,-84 96,-84 Z\"></path>\n <path d=\"M16,18 L112,18 C113.104569,18 114,17.1045695 114,16 C114,14.8954305 113.104569,14 112,14 L16,14 C14.8954305,14 14,14.8954305 14,16 C14,17.1045695 14.8954305,18 16,18 L16,18 Z M16,35 L112,35 C114.209139,35 116,33.209139 116,31 C116,28.790861 114.209139,27 112,27 L16,27 C13.790861,27 12,28.790861 12,31 C12,33.209139 13.790861,35 16,35 L16,35 Z M16,56 L112,56 C115.313708,56 118,53.3137085 118,50 C118,46.6862915 115.313708,44 112,44 L16,44 C12.6862915,44 10,46.6862915 10,50 C10,53.3137085 12.6862915,56 16,56 L16,56 Z M16,85 L112,85 C117.522847,85 122,80.5228475 122,75 C122,69.4771525 117.522847,65 112,65 L16,65 C10.4771525,65 6,69.4771525 6,75 C6,80.5228475 10.4771525,85 16,85 L16,85 Z M16,128 L112,128 C120.836556,128 128,120.836556 128,112 C128,103.163444 120.836556,96 112,96 L16,96 C7.163444,96 0,103.163444 0,112 C0,120.836556 7.163444,128 16,128 L16,128 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/link": {
"title": "$:/core/images/link",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-link tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M128.719999,57.568543 C130.219553,53.8628171 131.045202,49.8121445 131.045202,45.5685425 C131.045202,27.8915447 116.718329,13.5685425 99.0452364,13.5685425 L67.0451674,13.5685425 C49.3655063,13.5685425 35.0452019,27.8954305 35.0452019,45.5685425 C35.0452019,63.2455403 49.3720745,77.5685425 67.0451674,77.5685425 L99.0452364,77.5685425 C100.406772,77.5685425 101.748384,77.4835732 103.065066,77.3186499 C96.4792444,73.7895096 91.1190212,68.272192 87.7873041,61.5685425 L67.0506214,61.5685425 C58.2110723,61.5685425 51.0452019,54.4070414 51.0452019,45.5685425 C51.0452019,36.7319865 58.2005234,29.5685425 67.0506214,29.5685425 L99.0397824,29.5685425 C107.879331,29.5685425 115.045202,36.7300436 115.045202,45.5685425 C115.045202,48.9465282 113.99957,52.0800164 112.21335,54.6623005 C114.314383,56.4735917 117.050039,57.5685425 120.041423,57.5685425 L128.720003,57.5685425 Z\" transform=\"translate(83.045202, 45.568542) rotate(-225.000000) translate(-83.045202, -45.568542)\"></path>\n <path d=\"M-0.106255113,71.0452019 C-1.60580855,74.7509276 -2.43145751,78.8016001 -2.43145751,83.0452019 C-2.43145751,100.7222 11.8954151,115.045202 29.568508,115.045202 L61.568577,115.045202 C79.2482381,115.045202 93.5685425,100.718314 93.5685425,83.0452019 C93.5685425,65.3682041 79.2416699,51.0452019 61.568577,51.0452019 L29.568508,51.0452019 C28.206973,51.0452019 26.8653616,51.1301711 25.5486799,51.2950943 C32.1345,54.8242347 37.4947231,60.3415524 40.8264403,67.0452019 L61.563123,67.0452019 C70.4026721,67.0452019 77.5685425,74.206703 77.5685425,83.0452019 C77.5685425,91.8817579 70.413221,99.0452019 61.563123,99.0452019 L29.573962,99.0452019 C20.7344129,99.0452019 13.5685425,91.8837008 13.5685425,83.0452019 C13.5685425,79.6672162 14.6141741,76.533728 16.4003949,73.9514439 C14.2993609,72.1401527 11.5637054,71.0452019 8.5723215,71.0452019 L-0.106255113,71.0452019 Z\" transform=\"translate(45.568542, 83.045202) rotate(-225.000000) translate(-45.568542, -83.045202)\"></path>\n </g>\n</svg>"
},
"$:/core/images/list-bullet": {
"title": "$:/core/images/list-bullet",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-list-bullet tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M11.6363636,40.2727273 C18.0629498,40.2727273 23.2727273,35.0629498 23.2727273,28.6363636 C23.2727273,22.2097775 18.0629498,17 11.6363636,17 C5.20977746,17 0,22.2097775 0,28.6363636 C0,35.0629498 5.20977746,40.2727273 11.6363636,40.2727273 Z M11.6363636,75.1818182 C18.0629498,75.1818182 23.2727273,69.9720407 23.2727273,63.5454545 C23.2727273,57.1188684 18.0629498,51.9090909 11.6363636,51.9090909 C5.20977746,51.9090909 0,57.1188684 0,63.5454545 C0,69.9720407 5.20977746,75.1818182 11.6363636,75.1818182 Z M11.6363636,110.090909 C18.0629498,110.090909 23.2727273,104.881132 23.2727273,98.4545455 C23.2727273,92.0279593 18.0629498,86.8181818 11.6363636,86.8181818 C5.20977746,86.8181818 0,92.0279593 0,98.4545455 C0,104.881132 5.20977746,110.090909 11.6363636,110.090909 Z M34.9090909,22.8181818 L128,22.8181818 L128,34.4545455 L34.9090909,34.4545455 L34.9090909,22.8181818 Z M34.9090909,57.7272727 L128,57.7272727 L128,69.3636364 L34.9090909,69.3636364 L34.9090909,57.7272727 Z M34.9090909,92.6363636 L128,92.6363636 L128,104.272727 L34.9090909,104.272727 L34.9090909,92.6363636 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/list-number": {
"title": "$:/core/images/list-number",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-list-number tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M33.8390805,22.3563218 L128,22.3563218 L128,34.1264368 L33.8390805,34.1264368 L33.8390805,22.3563218 Z M33.8390805,57.6666667 L128,57.6666667 L128,69.4367816 L33.8390805,69.4367816 L33.8390805,57.6666667 Z M33.8390805,92.9770115 L128,92.9770115 L128,104.747126 L33.8390805,104.747126 L33.8390805,92.9770115 Z M0.379509711,42.6307008 L0.379509711,40.4082314 L1.37821948,40.4082314 C2.20382368,40.4082314 2.82301754,40.268077 3.23581964,39.9877642 C3.64862174,39.7074513 3.85501969,39.0400498 3.85501969,37.9855395 L3.85501969,22.7686318 C3.85501969,21.3270228 3.66193774,20.4327047 3.27576803,20.0856507 C2.88959832,19.7385967 1.79768657,19.5650723 0,19.5650723 L0,17.4226919 C3.50215975,17.2758613 6.25191314,16.4683055 8.24934266,15 L10.3666074,15 L10.3666074,37.865406 C10.3666074,38.786434 10.5164123,39.4404875 10.8160268,39.8275862 C11.1156412,40.2146849 11.764796,40.4082314 12.7635108,40.4082314 L13.7622206,40.4082314 L13.7622206,42.6307008 L0.379509711,42.6307008 Z M0.0798967812,77.9873934 L0.0798967812,76.0852799 C7.27064304,69.5312983 10.8659622,63.5046623 10.8659622,58.005191 C10.8659622,56.4434479 10.5397203,55.195407 9.88722667,54.2610308 C9.23473303,53.3266546 8.36253522,52.8594735 7.27060709,52.8594735 C6.3784219,52.8594735 5.61608107,53.1764892 4.98356173,53.8105302 C4.35104238,54.4445712 4.03478745,55.1753759 4.03478745,56.0029663 C4.03478745,56.9773871 4.28113339,57.8316611 4.77383268,58.5658139 C4.88036225,58.7259926 4.93362624,58.8461249 4.93362624,58.9262143 C4.93362624,59.0730449 4.77383427,59.2065252 4.45424555,59.3266593 C4.2411864,59.4067486 3.70188852,59.6336652 2.83633573,60.0074156 C1.99741533,60.3811661 1.47809145,60.5680386 1.2783485,60.5680386 C1.03865696,60.5680386 0.765679018,60.1976307 0.459406492,59.4568039 C0.153133966,58.715977 0,57.9184322 0,57.0641453 C0,55.1153036 0.848894811,53.5202138 2.5467099,52.2788283 C4.24452499,51.0374428 6.34512352,50.4167594 8.84856852,50.4167594 C11.3120649,50.4167594 13.3793735,51.0874979 15.0505562,52.4289952 C16.7217389,53.7704924 17.5573177,55.5224215 17.5573177,57.684835 C17.5573177,58.9662652 17.2743527,60.2076321 16.7084144,61.4089729 C16.142476,62.6103138 14.7875733,64.4623531 12.6436656,66.9651465 C10.4997579,69.4679398 8.40914641,71.7804862 6.3717683,73.902855 L17.8169822,73.902855 L16.7982982,79.6292176 L14.6810335,79.6292176 C14.7609307,79.3489048 14.8008787,79.0952922 14.8008787,78.8683723 C14.8008787,78.4812736 14.7010087,78.237672 14.5012658,78.1375603 C14.3015228,78.0374485 13.9020429,77.9873934 13.3028141,77.9873934 L0.0798967812,77.9873934 Z M12.2042333,97.1935484 C13.9486551,97.2335931 15.4400468,97.8309175 16.6784531,98.9855395 C17.9168594,100.140162 18.5360532,101.75861 18.5360532,103.840934 C18.5360532,106.830938 17.4041935,109.233584 15.14044,111.048943 C12.8766866,112.864303 10.1402492,113.771969 6.93104577,113.771969 C4.92030005,113.771969 3.26245842,113.388213 1.95747114,112.62069 C0.652483855,111.853166 0,110.848727 0,109.607341 C0,108.833144 0.26964894,108.209124 0.808954909,107.735261 C1.34826088,107.261399 1.93749375,107.024472 2.57667119,107.024472 C3.21584864,107.024472 3.73850152,107.224692 4.14464552,107.625139 C4.55078953,108.025586 4.92696644,108.67964 5.27318756,109.587319 C5.73925445,110.855401 6.51158227,111.489433 7.59019421,111.489433 C8.85523291,111.489433 9.87723568,111.012241 10.6562332,110.057842 C11.4352307,109.103444 11.8247236,107.371536 11.8247236,104.862069 C11.8247236,103.153495 11.7048796,101.838714 11.4651881,100.917686 C11.2254966,99.9966584 10.6728827,99.5361513 9.80732989,99.5361513 C9.22141723,99.5361513 8.62219737,99.843156 8.00965231,100.457175 C7.51695303,100.951059 7.07752513,101.197998 6.69135542,101.197998 C6.3584505,101.197998 6.08880156,101.051169 5.88240051,100.757508 C5.67599946,100.463847 5.57280049,100.183539 5.57280049,99.916574 C5.57280049,99.5962164 5.67599946,99.3225818 5.88240051,99.0956618 C6.08880156,98.8687419 6.57150646,98.5016711 7.33052967,97.9944383 C10.2068282,96.0722929 11.6449559,93.9766521 11.6449559,91.7074527 C11.6449559,90.5194601 11.3386879,89.615131 10.7261429,88.9944383 C10.1135978,88.3737455 9.37455999,88.0634038 8.5090072,88.0634038 C7.71003539,88.0634038 6.98431355,88.3270274 6.33181991,88.8542825 C5.67932627,89.3815377 5.35308434,90.0122321 5.35308434,90.7463849 C5.35308434,91.3871 5.60608828,91.9810874 6.11210376,92.5283648 C6.28521432,92.7285883 6.3717683,92.8954387 6.3717683,93.028921 C6.3717683,93.1490551 5.80250943,93.4560598 4.6639746,93.9499444 C3.52543978,94.4438289 2.80970494,94.6907675 2.51674861,94.6907675 C2.10394651,94.6907675 1.76771758,94.3570667 1.50805174,93.6896552 C1.24838591,93.0222436 1.11855494,92.4082342 1.11855494,91.8476085 C1.11855494,90.0989901 2.04734573,88.6240327 3.90495518,87.4226919 C5.76256463,86.2213511 7.86982116,85.6206897 10.226788,85.6206897 C12.2907985,85.6206897 14.0784711,86.0678487 15.5898594,86.9621802 C17.1012478,87.8565117 17.8569306,89.0778566 17.8569306,90.6262514 C17.8569306,91.987771 17.2876717,93.2491599 16.1491369,94.4104561 C15.0106021,95.5717522 13.6956474,96.4994404 12.2042333,97.1935484 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/locked-padlock": {
"title": "$:/core/images/locked-padlock",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-locked-padlock tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M96.4723753,64 L105,64 L105,96.0097716 C105,113.673909 90.6736461,128 73.001193,128 L55.998807,128 C38.3179793,128 24,113.677487 24,96.0097716 L24,64 L32.0000269,64 C32.0028554,48.2766389 32.3030338,16.2688026 64.1594984,16.2688041 C95.9543927,16.2688056 96.4648869,48.325931 96.4723753,64 Z M80.5749059,64 L48.4413579,64 C48.4426205,47.71306 48.5829272,31.9999996 64.1595001,31.9999996 C79.8437473,31.9999996 81.1369461,48.1359182 80.5749059,64 Z M67.7315279,92.3641717 C70.8232551,91.0923621 73,88.0503841 73,84.5 C73,79.8055796 69.1944204,76 64.5,76 C59.8055796,76 56,79.8055796 56,84.5 C56,87.947435 58.0523387,90.9155206 61.0018621,92.2491029 L55.9067479,115.020857 L72.8008958,115.020857 L67.7315279,92.3641717 L67.7315279,92.3641717 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/mail": {
"title": "$:/core/images/mail",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-mail tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M122.826782,104.894066 C121.945525,105.22777 120.990324,105.41043 119.993027,105.41043 L8.00697327,105.41043 C7.19458381,105.41043 6.41045219,105.289614 5.67161357,105.064967 L5.67161357,105.064967 L39.8346483,70.9019325 L60.6765759,91.7438601 C61.6118278,92.679112 62.8865166,93.0560851 64.0946097,92.8783815 C65.2975108,93.0473238 66.5641085,92.6696979 67.4899463,91.7438601 L88.5941459,70.6396605 C88.6693095,70.7292352 88.7490098,70.8162939 88.8332479,70.9005321 L122.826782,104.894066 Z M127.903244,98.6568194 C127.966933,98.2506602 128,97.8343714 128,97.4103789 L128,33.410481 C128,32.7414504 127.917877,32.0916738 127.763157,31.4706493 L94.2292399,65.0045665 C94.3188145,65.0797417 94.4058701,65.1594458 94.4901021,65.2436778 L127.903244,98.6568194 Z M0.205060636,99.2178117 C0.0709009529,98.6370366 0,98.0320192 0,97.4103789 L0,33.410481 C0,32.694007 0.0944223363,31.9995312 0.27147538,31.3387595 L0.27147538,31.3387595 L34.1777941,65.2450783 L0.205060636,99.2178117 L0.205060636,99.2178117 Z M5.92934613,25.6829218 C6.59211333,25.5051988 7.28862283,25.4104299 8.00697327,25.4104299 L119.993027,25.4104299 C120.759109,25.4104299 121.500064,25.5178649 122.201605,25.7184927 L122.201605,25.7184927 L64.0832611,83.8368368 L5.92934613,25.6829218 L5.92934613,25.6829218 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/menu-button": {
"title": "$:/core/images/menu-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-menu-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <rect x=\"0\" y=\"16\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"56\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"96\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n</svg>"
},
"$:/core/images/mono-block": {
"title": "$:/core/images/mono-block",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-mono-block tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M23.9653488,32.9670593 L24.3217888,32.9670593 C25.0766067,32.9670593 25.6497006,33.1592554 26.0410876,33.5436534 C26.4324747,33.9280514 26.6281653,34.4906619 26.6281653,35.2315017 C26.6281653,36.0562101 26.4219913,36.6502709 26.009637,37.0137017 C25.5972828,37.3771326 24.9158602,37.5588453 23.9653488,37.5588453 L17.6542639,37.5588453 C16.6897744,37.5588453 16.0048573,37.380627 15.5994921,37.0241852 C15.1941269,36.6677435 14.9914474,36.0701882 14.9914474,35.2315017 C14.9914474,34.4207713 15.1941269,33.8406885 15.5994921,33.4912358 C16.0048573,33.141783 16.6897744,32.9670593 17.6542639,32.9670593 L18.388111,32.9670593 L17.5284616,30.5139133 L8.47069195,30.5139133 L7.5691084,32.9670593 L8.30295547,32.9670593 C9.25346691,32.9670593 9.93488953,33.1452775 10.3472438,33.5017193 C10.759598,33.8581611 10.965772,34.4347494 10.965772,35.2315017 C10.965772,36.0562101 10.759598,36.6502709 10.3472438,37.0137017 C9.93488953,37.3771326 9.25346691,37.5588453 8.30295547,37.5588453 L2.89345418,37.5588453 C1.92896463,37.5588453 1.24404754,37.3771326 0.838682371,37.0137017 C0.433317198,36.6502709 0.230637652,36.0562101 0.230637652,35.2315017 C0.230637652,34.4906619 0.426328248,33.9280514 0.817715312,33.5436534 C1.20910238,33.1592554 1.78219626,32.9670593 2.53701417,32.9670593 L2.89345418,32.9670593 L8.51262607,17.3256331 L6.83526132,17.3256331 C5.88474988,17.3256331 5.20332727,17.1439204 4.79097304,16.7804895 C4.37861882,16.4170587 4.1724448,15.8299869 4.1724448,15.0192565 C4.1724448,14.1945481 4.37861882,13.6004873 4.79097304,13.2370565 C5.20332727,12.8736257 5.88474988,12.691913 6.83526132,12.691913 L14.6979086,12.691913 C15.9419603,12.691913 16.815579,13.3628521 17.318791,14.7047506 L17.318791,14.7676518 L23.9653488,32.9670593 Z M12.9786097,17.3256331 L9.9383861,26.1737321 L16.0188333,26.1737321 L12.9786097,17.3256331 Z M35.3809383,26.6979086 L35.3809383,33.0928616 L38.5259972,33.0928616 C40.7485166,33.0928616 42.3140414,32.8482484 43.2226185,32.3590146 C44.1311956,31.8697807 44.5854773,31.0520736 44.5854773,29.9058686 C44.5854773,28.7456855 44.1521624,27.9209895 43.2855197,27.4317556 C42.4188769,26.9425218 40.9022748,26.6979086 38.7356678,26.6979086 L35.3809383,26.6979086 Z M46.0741385,24.370565 C47.5977525,24.9296893 48.7159844,25.6949794 49.428868,26.666458 C50.1417516,27.6379366 50.498188,28.8784752 50.498188,30.388111 C50.498188,31.6601189 50.1906743,32.8202846 49.5756374,33.8686428 C48.9606006,34.917001 48.0799929,35.7766419 46.933788,36.4475911 C46.2628387,36.8389782 45.5115266,37.1220307 44.6798291,37.296757 C43.8481316,37.4714834 42.6704935,37.5588453 41.1468796,37.5588453 L39.3856466,37.5588453 L30.2020747,37.5588453 C29.2795194,37.5588453 28.6190637,37.3771326 28.2206876,37.0137017 C27.8223114,36.6502709 27.6231264,36.0562101 27.6231264,35.2315017 C27.6231264,34.4906619 27.811828,33.9280514 28.189237,33.5436534 C28.5666459,33.1592554 29.118773,32.9670593 29.8456347,32.9670593 L30.2020747,32.9670593 L30.2020747,17.3256331 L29.8456347,17.3256331 C29.118773,17.3256331 28.5666459,17.1299425 28.189237,16.7385554 C27.811828,16.3471683 27.6231264,15.7740744 27.6231264,15.0192565 C27.6231264,14.2085262 27.8258059,13.6179599 28.2311711,13.24754 C28.6365363,12.8771201 29.2934976,12.691913 30.2020747,12.691913 L39.8469219,12.691913 C42.796303,12.691913 45.0362615,13.2650068 46.5668644,14.4112118 C48.0974674,15.5574168 48.8627574,17.2347648 48.8627574,19.443306 C48.8627574,20.5335986 48.6286276,21.4945792 48.1603609,22.3262767 C47.6920943,23.1579742 46.9966938,23.8393968 46.0741385,24.370565 L46.0741385,24.370565 Z M35.3809383,17.1998307 L35.3809383,22.4835296 L38.2114913,22.4835296 C39.9307988,22.4835296 41.1433816,22.2808501 41.8492761,21.8754849 C42.5551706,21.4701197 42.9081126,20.7852027 42.9081126,19.8207131 C42.9081126,18.912136 42.5901154,18.2481858 41.9541114,17.8288425 C41.3181074,17.4094992 40.2872373,17.1998307 38.8614701,17.1998307 L35.3809383,17.1998307 Z M71.244119,13.3838259 C71.5236812,12.880614 71.8102281,12.5241775 72.1037684,12.3145059 C72.3973087,12.1048342 72.7677231,12 73.2150226,12 C73.8999499,12 74.3856819,12.1817127 74.6722332,12.5451435 C74.9587844,12.9085744 75.1020579,13.5305909 75.1020579,14.4112118 L75.143992,19.8626472 C75.143992,20.8271368 74.9867406,21.4771091 74.6722332,21.8125837 C74.3577257,22.1480584 73.7881263,22.3157932 72.9634178,22.3157932 C72.3763372,22.3157932 71.92555,22.1760142 71.6110425,21.896452 C71.2965351,21.6168898 71.0274605,21.0997075 70.8038107,20.3448896 C70.4403799,19.0169692 69.8602971,18.0629775 69.0635448,17.482886 C68.2667926,16.9027945 67.1625385,16.612753 65.7507494,16.612753 C63.5981206,16.612753 61.9487284,17.3396038 60.8025235,18.7933272 C59.6563185,20.2470506 59.0832246,22.3507245 59.0832246,25.104412 C59.0832246,27.8441215 59.6633074,29.9477954 60.8234905,31.4154969 C61.9836736,32.8831984 63.6400547,33.6170381 65.7926836,33.6170381 C67.2603851,33.6170381 68.878327,33.1278116 70.6465578,32.149344 C72.4147886,31.1708763 73.5295261,30.6816498 73.9908037,30.6816498 C74.53595,30.6816498 74.9937262,30.9122852 75.3641461,31.3735628 C75.734566,31.8348404 75.9197732,32.4079343 75.9197732,33.0928616 C75.9197732,34.3229353 74.836486,35.4831009 72.669879,36.5733935 C70.5032721,37.663686 68.0641285,38.2088241 65.3523753,38.2088241 C61.6901107,38.2088241 58.7267959,36.9997358 56.4623422,34.5815228 C54.1978885,32.1633099 53.0656786,29.0043046 53.0656786,25.104412 C53.0656786,21.3443006 54.2118664,18.22024 56.5042763,15.7321366 C58.7966863,13.2440331 61.7040894,12 65.226573,12 C66.2190187,12 67.1974717,12.1118232 68.1619613,12.3354729 C69.1264508,12.5591227 70.1538264,12.9085702 71.244119,13.3838259 L71.244119,13.3838259 Z M81.4645862,32.9670593 L81.4645862,17.3256331 L81.1081461,17.3256331 C80.3533282,17.3256331 79.7802344,17.1299425 79.3888473,16.7385554 C78.9974602,16.3471683 78.8017696,15.7740744 78.8017696,15.0192565 C78.8017696,14.2085262 79.0114381,13.6179599 79.4307814,13.24754 C79.8501247,12.8771201 80.5280528,12.691913 81.4645862,12.691913 L85.4063933,12.691913 L86.6434498,12.691913 C89.5648747,12.691913 91.7034933,12.8177141 93.0593699,13.06932 C94.4152465,13.320926 95.5684233,13.740263 96.5189347,14.3273436 C98.210286,15.3337675 99.5067362,16.7699967 100.408324,18.6360743 C101.309912,20.5021519 101.7607,22.6582429 101.7607,25.104412 C101.7607,27.6903623 101.247012,29.9512876 100.219621,31.8872557 C99.1922296,33.8232239 97.7350336,35.2874089 95.8479888,36.2798546 C94.9953241,36.7271541 93.9959043,37.0521403 92.8496993,37.2548229 C91.7034944,37.4575055 89.9981906,37.5588453 87.7337369,37.5588453 L85.4063933,37.5588453 L81.4645862,37.5588453 C80.5000966,37.5588453 79.8151795,37.380627 79.4098143,37.0241852 C79.0044492,36.6677435 78.8017696,36.0701882 78.8017696,35.2315017 C78.8017696,34.4906619 78.9974602,33.9280514 79.3888473,33.5436534 C79.7802344,33.1592554 80.3533282,32.9670593 81.1081461,32.9670593 L81.4645862,32.9670593 Z M86.8740874,17.2417648 L86.8740874,32.9670593 L88.0692098,32.9670593 C90.7110725,32.9670593 92.6609895,32.3205814 93.9190194,31.0276063 C95.1770492,29.7346312 95.8060547,27.7462749 95.8060547,25.0624779 C95.8060547,22.4206153 95.1665658,20.4497314 93.8875688,19.1497672 C92.6085718,17.849803 90.6831161,17.1998307 88.1111439,17.1998307 C87.7756693,17.1998307 87.5205727,17.2033252 87.3458463,17.2103142 C87.1711199,17.2173033 87.0138685,17.2277867 86.8740874,17.2417648 L86.8740874,17.2417648 Z M121.94052,17.1159625 L112.190837,17.1159625 L112.190837,22.4835296 L115.88104,22.4835296 L115.88104,22.2319249 C115.88104,21.4351727 116.055763,20.841112 116.405216,20.4497249 C116.754669,20.0583378 117.285829,19.8626472 117.998713,19.8626472 C118.627728,19.8626472 119.141415,20.0408655 119.539792,20.3973072 C119.938168,20.753749 120.137353,21.2045363 120.137353,21.7496826 C120.137353,21.7776388 120.144342,21.8684951 120.15832,22.0222543 C120.172298,22.1760135 120.179287,22.3297704 120.179287,22.4835296 L120.179287,26.8237109 C120.179287,27.7602442 120.011552,28.4311834 119.676077,28.8365486 C119.340603,29.2419138 118.795465,29.4445933 118.040647,29.4445933 C117.327763,29.4445933 116.789614,29.2558917 116.426183,28.8784827 C116.062752,28.5010738 115.88104,27.9419578 115.88104,27.201118 L115.88104,26.8237109 L112.190837,26.8237109 L112.190837,33.0928616 L121.94052,33.0928616 L121.94052,30.5977816 C121.94052,29.6612482 122.118738,28.9903091 122.47518,28.5849439 C122.831622,28.1795787 123.415199,27.9768992 124.225929,27.9768992 C125.022682,27.9768992 125.592281,28.1760842 125.934745,28.5744604 C126.277208,28.9728365 126.448438,29.6472701 126.448438,30.5977816 L126.448438,35.6718099 C126.448438,36.4266278 126.30167,36.9298322 126.008129,37.1814382 C125.714589,37.4330442 125.134506,37.5588453 124.267863,37.5588453 L107.095842,37.5588453 C106.173287,37.5588453 105.512831,37.3771326 105.114455,37.0137017 C104.716079,36.6502709 104.516894,36.0562101 104.516894,35.2315017 C104.516894,34.4906619 104.705595,33.9280514 105.083004,33.5436534 C105.460413,33.1592554 106.01254,32.9670593 106.739402,32.9670593 L107.095842,32.9670593 L107.095842,17.3256331 L106.739402,17.3256331 C106.026518,17.3256331 105.477886,17.126448 105.093488,16.7280719 C104.70909,16.3296957 104.516894,15.7600963 104.516894,15.0192565 C104.516894,14.2085262 104.719573,13.6179599 105.124938,13.24754 C105.530304,12.8771201 106.187265,12.691913 107.095842,12.691913 L124.267863,12.691913 C125.120528,12.691913 125.697116,12.8212085 125.997646,13.0798036 C126.298175,13.3383986 126.448438,13.8520864 126.448438,14.6208824 L126.448438,19.3175037 C126.448438,20.2680151 126.273714,20.9494377 125.924261,21.361792 C125.574808,21.7741462 125.008703,21.9803202 124.225929,21.9803202 C123.415199,21.9803202 122.831622,21.7706517 122.47518,21.3513084 C122.118738,20.9319652 121.94052,20.254037 121.94052,19.3175037 L121.94052,17.1159625 Z M19.7719369,47.6405477 C20.037521,47.1373358 20.3205734,46.7808993 20.6211028,46.5712277 C20.9216322,46.361556 21.295541,46.2567218 21.7428405,46.2567218 C22.4277678,46.2567218 22.9134998,46.4384345 23.2000511,46.8018653 C23.4866023,47.1652962 23.6298758,47.7873127 23.6298758,48.6679336 L23.6718099,54.119369 C23.6718099,55.0838586 23.5145586,55.7338309 23.2000511,56.0693055 C22.8855436,56.4047802 22.3089553,56.572515 21.4702687,56.572515 C20.8831881,56.572515 20.4254119,56.4292415 20.0969263,56.1426902 C19.7684407,55.856139 19.4993662,55.3424512 19.2896945,54.6016114 C18.9122856,53.2597129 18.3322027,52.3022267 17.5494286,51.7291243 C16.7666545,51.1560218 15.6693894,50.8694748 14.2576003,50.8694748 C12.1049715,50.8694748 10.4590738,51.5963256 9.31985785,53.050049 C8.18064193,54.5037724 7.61104252,56.6074463 7.61104252,59.3611338 C7.61104252,62.1148214 8.20859773,64.2429566 9.40372609,65.7456034 C10.5988544,67.2482501 12.2936748,67.9995623 14.488238,67.9995623 C14.9914499,67.9995623 15.5645438,67.9401562 16.2075368,67.8213423 C16.8505299,67.7025283 17.6053364,67.5173212 18.4719792,67.2657152 L18.4719792,63.9529198 L16.1027015,63.9529198 C15.1521901,63.9529198 14.4777564,63.7781961 14.0793803,63.4287433 C13.6810042,63.0792906 13.4818191,62.4992078 13.4818191,61.6884774 C13.4818191,60.8497908 13.6810042,60.2522356 14.0793803,59.8957938 C14.4777564,59.5393521 15.1521901,59.3611338 16.1027015,59.3611338 L23.6718099,59.3611338 C24.6502776,59.3611338 25.3386891,59.5358576 25.7370653,59.8853103 C26.1354414,60.2347631 26.3346265,60.8218348 26.3346265,61.6465433 C26.3346265,62.3873831 26.1354414,62.9569825 25.7370653,63.3553586 C25.3386891,63.7537347 24.7621008,63.9529198 24.0072829,63.9529198 L23.6718099,63.9529198 L23.6718099,68.9430799 L23.6718099,69.1946846 C23.6718099,69.6419841 23.6228873,69.9529924 23.5250405,70.1277188 C23.4271937,70.3024451 23.2315031,70.4806634 22.9379628,70.6623788 C22.1412106,71.1376345 20.8762107,71.5569715 19.1429251,71.9204023 C17.4096396,72.2838332 15.6554131,72.4655459 13.8801932,72.4655459 C10.2179286,72.4655459 7.25461383,71.2564576 4.99016011,68.8382446 C2.72570638,66.4200317 1.59349651,63.2610264 1.59349651,59.3611338 C1.59349651,55.6010224 2.73968428,52.4769618 5.03209423,49.9888583 C7.32450417,47.5007549 10.2319073,46.2567218 13.7543909,46.2567218 C14.7328585,46.2567218 15.7078171,46.368545 16.6792957,46.5921947 C17.6507743,46.8158445 18.6816444,47.165292 19.7719369,47.6405477 L19.7719369,47.6405477 Z M35.611576,51.5823548 L35.611576,56.4047785 L42.4678043,56.4047785 L42.4678043,51.5823548 L42.1323314,51.5823548 C41.3775135,51.5823548 40.8009251,51.3866642 40.402549,50.9952772 C40.0041729,50.6038901 39.8049878,50.0307962 39.8049878,49.2759783 C39.8049878,48.4512699 40.0111618,47.8572091 40.4235161,47.4937783 C40.8358703,47.1303474 41.5172929,46.9486347 42.4678043,46.9486347 L47.8773056,46.9486347 C48.8278171,46.9486347 49.5022507,47.1303474 49.9006269,47.4937783 C50.299003,47.8572091 50.498188,48.4512699 50.498188,49.2759783 C50.498188,50.0307962 50.3059919,50.6038901 49.9215939,50.9952772 C49.5371959,51.3866642 48.9745854,51.5823548 48.2337456,51.5823548 L47.8773056,51.5823548 L47.8773056,67.2237811 L48.2337456,67.2237811 C48.9885636,67.2237811 49.5616574,67.4159772 49.9530445,67.8003752 C50.3444316,68.1847732 50.5401222,68.7473837 50.5401222,69.4882235 C50.5401222,70.3129319 50.3374426,70.9069927 49.9320774,71.2704235 C49.5267123,71.6338543 48.8417952,71.815567 47.8773056,71.815567 L42.4678043,71.815567 C41.5033148,71.815567 40.8183977,71.6373488 40.4130325,71.280907 C40.0076674,70.9244652 39.8049878,70.32691 39.8049878,69.4882235 C39.8049878,68.7473837 40.0041729,68.1847732 40.402549,67.8003752 C40.8009251,67.4159772 41.3775135,67.2237811 42.1323314,67.2237811 L42.4678043,67.2237811 L42.4678043,61.0384986 L35.611576,61.0384986 L35.611576,67.2237811 L35.9470489,67.2237811 C36.7018668,67.2237811 37.2784552,67.4159772 37.6768313,67.8003752 C38.0752074,68.1847732 38.2743925,68.7473837 38.2743925,69.4882235 C38.2743925,70.3129319 38.0682185,70.9069927 37.6558642,71.2704235 C37.24351,71.6338543 36.5620874,71.815567 35.611576,71.815567 L30.2020747,71.815567 C29.2375851,71.815567 28.552668,71.6373488 28.1473029,71.280907 C27.7419377,70.9244652 27.5392581,70.32691 27.5392581,69.4882235 C27.5392581,68.7473837 27.7349487,68.1847732 28.1263358,67.8003752 C28.5177229,67.4159772 29.0908168,67.2237811 29.8456347,67.2237811 L30.2020747,67.2237811 L30.2020747,51.5823548 L29.8456347,51.5823548 C29.1047949,51.5823548 28.5421844,51.3866642 28.1577864,50.9952772 C27.7733884,50.6038901 27.5811923,50.0307962 27.5811923,49.2759783 C27.5811923,48.4512699 27.7803773,47.8572091 28.1787534,47.4937783 C28.5771296,47.1303474 29.2515632,46.9486347 30.2020747,46.9486347 L35.611576,46.9486347 C36.5481093,46.9486347 37.2260374,47.1303474 37.6453807,47.4937783 C38.064724,47.8572091 38.2743925,48.4512699 38.2743925,49.2759783 C38.2743925,50.0307962 38.0752074,50.6038901 37.6768313,50.9952772 C37.2784552,51.3866642 36.7018668,51.5823548 35.9470489,51.5823548 L35.611576,51.5823548 Z M67.365213,51.5823548 L67.365213,67.2237811 L70.887679,67.2237811 C71.8381904,67.2237811 72.519613,67.4019993 72.9319673,67.7584411 C73.3443215,68.1148829 73.5504955,68.6914712 73.5504955,69.4882235 C73.5504955,70.2989538 73.340827,70.8895201 72.9214837,71.25994 C72.5021404,71.6303599 71.8242123,71.815567 70.887679,71.815567 L58.4332458,71.815567 C57.4827343,71.815567 56.8013117,71.6338543 56.3889575,71.2704235 C55.9766033,70.9069927 55.7704292,70.3129319 55.7704292,69.4882235 C55.7704292,68.6774931 55.9731088,68.0974103 56.378474,67.7479575 C56.7838391,67.3985048 57.4687562,67.2237811 58.4332458,67.2237811 L61.9557117,67.2237811 L61.9557117,51.5823548 L58.4332458,51.5823548 C57.4827343,51.5823548 56.8013117,51.4006421 56.3889575,51.0372113 C55.9766033,50.6737805 55.7704292,50.0867087 55.7704292,49.2759783 C55.7704292,48.4512699 55.9731088,47.8641981 56.378474,47.5147453 C56.7838391,47.1652926 57.4687562,46.9905689 58.4332458,46.9905689 L70.887679,46.9905689 C71.8801247,46.9905689 72.5720308,47.1652926 72.9634178,47.5147453 C73.3548049,47.8641981 73.5504955,48.4512699 73.5504955,49.2759783 C73.5504955,50.0867087 73.347816,50.6737805 72.9424508,51.0372113 C72.5370856,51.4006421 71.8521685,51.5823548 70.887679,51.5823548 L67.365213,51.5823548 Z M97.8608265,51.5823548 L97.8608265,63.1771386 L97.8608265,63.5755127 C97.8608265,65.4485794 97.7385199,66.8044357 97.493903,67.6431222 C97.2492861,68.4818088 96.8404325,69.2296264 96.26733,69.8865976 C95.5264902,70.7392623 94.4991146,71.3822457 93.1851723,71.815567 C91.87123,72.2488884 90.2917273,72.4655459 88.4466169,72.4655459 C87.1466527,72.4655459 85.8921362,72.3397448 84.6830298,72.0881388 C83.4739233,71.8365328 82.3102631,71.4591296 81.1920144,70.9559176 C80.5769776,70.6763554 80.175113,70.31293 79.9864085,69.8656305 C79.797704,69.418331 79.7033532,68.6914802 79.7033532,67.6850564 L79.7033532,63.3658422 C79.7033532,62.1637247 79.8780769,61.3250508 80.2275297,60.849795 C80.5769824,60.3745393 81.185021,60.136915 82.0516638,60.136915 C83.2957156,60.136915 83.9806326,61.0524675 84.1064356,62.8835998 C84.1204137,63.2050963 84.1413806,63.4497096 84.1693368,63.6174469 C84.3370741,65.2389076 84.7144774,66.3466561 85.301558,66.9407258 C85.8886386,67.5347954 86.8251579,67.8318258 88.1111439,67.8318258 C89.7046484,67.8318258 90.8263749,67.4089943 91.476357,66.5633187 C92.126339,65.7176431 92.4513252,64.1765796 92.4513252,61.9400821 L92.4513252,51.5823548 L88.9288593,51.5823548 C87.9783478,51.5823548 87.2969252,51.4006421 86.884571,51.0372113 C86.4722168,50.6737805 86.2660427,50.0867087 86.2660427,49.2759783 C86.2660427,48.4512699 86.4652278,47.8641981 86.8636039,47.5147453 C87.26198,47.1652926 87.9503916,46.9905689 88.9288593,46.9905689 L99.6220595,46.9905689 C100.600527,46.9905689 101.288939,47.1652926 101.687315,47.5147453 C102.085691,47.8641981 102.284876,48.4512699 102.284876,49.2759783 C102.284876,50.0867087 102.078702,50.6737805 101.666348,51.0372113 C101.253994,51.4006421 100.572571,51.5823548 99.6220595,51.5823548 L97.8608265,51.5823548 Z M112.505343,51.5823548 L112.505343,57.9353738 L118.984165,51.4565525 C118.257303,51.3726838 117.747109,51.1665098 117.453569,50.8380242 C117.160029,50.5095387 117.013261,49.9888619 117.013261,49.2759783 C117.013261,48.4512699 117.212446,47.8572091 117.610822,47.4937783 C118.009198,47.1303474 118.683632,46.9486347 119.634143,46.9486347 L124.771073,46.9486347 C125.721584,46.9486347 126.396018,47.1303474 126.794394,47.4937783 C127.19277,47.8572091 127.391955,48.4512699 127.391955,49.2759783 C127.391955,50.0447743 127.19277,50.6213627 126.794394,51.0057607 C126.396018,51.3901587 125.812441,51.5823548 125.043645,51.5823548 L124.561402,51.5823548 L118.459988,57.641835 C119.592215,58.4805215 120.626579,59.5812811 121.563113,60.9441468 C122.499646,62.3070125 123.596911,64.400203 124.854941,67.2237811 L125.127513,67.2237811 L125.546854,67.2237811 C126.371563,67.2237811 126.98659,67.4124827 127.391955,67.7898917 C127.79732,68.1673006 128,68.7334056 128,69.4882235 C128,70.3129319 127.793826,70.9069927 127.381472,71.2704235 C126.969118,71.6338543 126.287695,71.815567 125.337183,71.815567 L122.758235,71.815567 C121.626008,71.815567 120.710456,71.0537715 120.01155,69.5301576 C119.885747,69.2505954 119.787902,69.026949 119.718012,68.8592117 C118.795456,66.9022764 117.949793,65.3926632 117.180997,64.3303269 C116.412201,63.2679906 115.510627,62.2965265 114.476247,61.4159056 L112.505343,63.302941 L112.505343,67.2237811 L112.840816,67.2237811 C113.595634,67.2237811 114.172222,67.4159772 114.570599,67.8003752 C114.968975,68.1847732 115.16816,68.7473837 115.16816,69.4882235 C115.16816,70.3129319 114.961986,70.9069927 114.549631,71.2704235 C114.137277,71.6338543 113.455855,71.815567 112.505343,71.815567 L107.095842,71.815567 C106.131352,71.815567 105.446435,71.6373488 105.04107,71.280907 C104.635705,70.9244652 104.433025,70.32691 104.433025,69.4882235 C104.433025,68.7473837 104.628716,68.1847732 105.020103,67.8003752 C105.41149,67.4159772 105.984584,67.2237811 106.739402,67.2237811 L107.095842,67.2237811 L107.095842,51.5823548 L106.739402,51.5823548 C105.998562,51.5823548 105.435952,51.3866642 105.051554,50.9952772 C104.667156,50.6038901 104.474959,50.0307962 104.474959,49.2759783 C104.474959,48.4512699 104.674145,47.8572091 105.072521,47.4937783 C105.470897,47.1303474 106.14533,46.9486347 107.095842,46.9486347 L112.505343,46.9486347 C113.441877,46.9486347 114.119805,47.1303474 114.539148,47.4937783 C114.958491,47.8572091 115.16816,48.4512699 115.16816,49.2759783 C115.16816,50.0307962 114.968975,50.6038901 114.570599,50.9952772 C114.172222,51.3866642 113.595634,51.5823548 112.840816,51.5823548 L112.505343,51.5823548 Z M13.439885,96.325622 L17.4445933,84.4372993 C17.6961993,83.6545252 18.0456468,83.0849258 18.4929463,82.728484 C18.9402458,82.3720422 19.5343065,82.193824 20.2751463,82.193824 L23.5460076,82.193824 C24.496519,82.193824 25.1779416,82.3755367 25.5902958,82.7389675 C26.0026501,83.1023984 26.2088241,83.6964591 26.2088241,84.5211676 C26.2088241,85.2759855 26.009639,85.8490794 25.6112629,86.2404664 C25.2128868,86.6318535 24.6362984,86.8275441 23.8814805,86.8275441 L23.5460076,86.8275441 L24.1330852,102.46897 L24.4895252,102.46897 C25.2443431,102.46897 25.8104481,102.661166 26.187857,103.045564 C26.565266,103.429962 26.7539676,103.992573 26.7539676,104.733413 C26.7539676,105.558121 26.5547826,106.152182 26.1564064,106.515613 C25.7580303,106.879044 25.0835967,107.060756 24.1330852,107.060756 L19.4154969,107.060756 C18.4649855,107.060756 17.7905518,106.882538 17.3921757,106.526096 C16.9937996,106.169654 16.7946145,105.572099 16.7946145,104.733413 C16.7946145,103.992573 16.9868106,103.429962 17.3712086,103.045564 C17.7556066,102.661166 18.325206,102.46897 19.0800239,102.46897 L19.4154969,102.46897 L19.1219581,89.6790642 L16.0607674,99.1981091 C15.8371177,99.9109927 15.5191204,100.42468 15.1067662,100.739188 C14.694412,101.053695 14.1248126,101.210947 13.3979509,101.210947 C12.6710892,101.210947 12.0945008,101.053695 11.6681685,100.739188 C11.2418362,100.42468 10.91685,99.9109927 10.6932002,99.1981091 L7.65297664,89.6790642 L7.35943781,102.46897 L7.69491075,102.46897 C8.44972866,102.46897 9.01932808,102.661166 9.40372609,103.045564 C9.78812409,103.429962 9.98032022,103.992573 9.98032022,104.733413 C9.98032022,105.558121 9.77764067,106.152182 9.3722755,106.515613 C8.96691032,106.879044 8.29597114,107.060756 7.35943781,107.060756 L2.62088241,107.060756 C1.68434908,107.060756 1.01340989,106.879044 0.608044719,106.515613 C0.202679546,106.152182 0,105.558121 0,104.733413 C0,103.992573 0.192196121,103.429962 0.57659413,103.045564 C0.960992139,102.661166 1.53059155,102.46897 2.28540946,102.46897 L2.62088241,102.46897 L3.22892713,86.8275441 L2.89345418,86.8275441 C2.13863627,86.8275441 1.56204791,86.6318535 1.16367179,86.2404664 C0.765295672,85.8490794 0.5661106,85.2759855 0.5661106,84.5211676 C0.5661106,83.6964591 0.772284622,83.1023984 1.18463885,82.7389675 C1.59699308,82.3755367 2.27841569,82.193824 3.22892713,82.193824 L6.49978838,82.193824 C7.22665007,82.193824 7.81022738,82.3685477 8.25053783,82.7180005 C8.69084827,83.0674532 9.05077919,83.6405471 9.33034138,84.4372993 L13.439885,96.325622 Z M43.8935644,98.3803938 L43.8935644,86.8275441 L42.7403761,86.8275441 C41.8178209,86.8275441 41.1573651,86.6458314 40.758989,86.2824006 C40.3606129,85.9189697 40.1614278,85.3318979 40.1614278,84.5211676 C40.1614278,83.7104372 40.3606129,83.119871 40.758989,82.7494511 C41.1573651,82.3790312 41.8178209,82.193824 42.7403761,82.193824 L48.6950209,82.193824 C49.6035981,82.193824 50.2605593,82.3790312 50.6659245,82.7494511 C51.0712897,83.119871 51.2739692,83.7104372 51.2739692,84.5211676 C51.2739692,85.2620074 51.0817731,85.8316068 50.6973751,86.2299829 C50.3129771,86.628359 49.7643445,86.8275441 49.051461,86.8275441 L48.6950209,86.8275441 L48.6950209,105.865634 C48.6950209,106.522605 48.6251315,106.934953 48.4853504,107.10269 C48.3455693,107.270428 48.0310665,107.354295 47.5418327,107.354295 L45.4451268,107.354295 C44.7741775,107.354295 44.3024234,107.284406 44.0298503,107.144625 C43.7572771,107.004843 43.5231473,106.76023 43.3274538,106.410777 L34.6051571,91.0838571 L34.6051571,102.46897 L35.8212466,102.46897 C36.7298237,102.46897 37.379796,102.643694 37.7711831,102.993147 C38.1625701,103.3426 38.3582607,103.922682 38.3582607,104.733413 C38.3582607,105.558121 38.1590757,106.152182 37.7606995,106.515613 C37.3623234,106.879044 36.7158456,107.060756 35.8212466,107.060756 L29.8037005,107.060756 C28.8951234,107.060756 28.2381621,106.879044 27.832797,106.515613 C27.4274318,106.152182 27.2247522,105.558121 27.2247522,104.733413 C27.2247522,103.992573 27.4134539,103.429962 27.7908629,103.045564 C28.1682718,102.661166 28.7273878,102.46897 29.4682276,102.46897 L29.8037005,102.46897 L29.8037005,86.8275441 L29.4682276,86.8275441 C28.755344,86.8275441 28.203217,86.628359 27.8118299,86.2299829 C27.4204428,85.8316068 27.2247522,85.2620074 27.2247522,84.5211676 C27.2247522,83.7104372 27.4309263,83.119871 27.8432805,82.7494511 C28.2556347,82.3790312 28.9091015,82.193824 29.8037005,82.193824 L33.2422983,82.193824 C34.0670067,82.193824 34.6261227,82.3021527 34.919663,82.5188134 C35.2132033,82.7354741 35.5416839,83.1722835 35.9051148,83.8292546 L43.8935644,98.3803938 Z M64.6604624,86.3662688 C62.8572863,86.3662688 61.4420239,87.0931196 60.4146329,88.546843 C59.3872418,90.0005663 58.873554,92.0203728 58.873554,94.6063231 C58.873554,97.1922733 59.3907363,99.2190688 60.4251164,100.68677 C61.4594965,102.154472 62.8712644,102.888312 64.6604624,102.888312 C66.4636385,102.888312 67.8823953,102.157966 68.9167754,100.697254 C69.9511555,99.2365414 70.4683378,97.2062514 70.4683378,94.6063231 C70.4683378,92.0203728 69.95465,90.0005663 68.9272589,88.546843 C67.8998679,87.0931196 66.4776166,86.3662688 64.6604624,86.3662688 L64.6604624,86.3662688 Z M64.6604624,81.501911 C68.0990773,81.501911 70.929602,82.7319662 73.1521214,85.1921135 C75.3746408,87.6522607 76.4858838,90.7902992 76.4858838,94.6063231 C76.4858838,98.4503032 75.3816297,101.595331 73.1730884,104.0415 C70.9645471,106.487669 68.1270335,107.710735 64.6604624,107.710735 C61.2358256,107.710735 58.4053009,106.477185 56.1688034,104.010049 C53.9323059,101.542913 52.8140739,98.4083688 52.8140739,94.6063231 C52.8140739,90.7763211 53.9218224,87.6347881 56.1373528,85.1816299 C58.3528831,82.7284717 61.1938912,81.501911 64.6604624,81.501911 L64.6604624,81.501911 Z M87.4611651,98.1707232 L87.4611651,102.46897 L89.6207722,102.46897 C90.5293493,102.46897 91.1758272,102.643694 91.5602252,102.993147 C91.9446232,103.3426 92.1368193,103.922682 92.1368193,104.733413 C92.1368193,105.558121 91.9411287,106.152182 91.5497417,106.515613 C91.1583546,106.879044 90.5153712,107.060756 89.6207722,107.060756 L82.3661697,107.060756 C81.4436145,107.060756 80.7831587,106.879044 80.3847826,106.515613 C79.9864065,106.152182 79.7872214,105.558121 79.7872214,104.733413 C79.7872214,103.992573 79.9759231,103.429962 80.353332,103.045564 C80.730741,102.661166 81.282868,102.46897 82.0097297,102.46897 L82.3661697,102.46897 L82.3661697,86.8275441 L82.0097297,86.8275441 C81.2968461,86.8275441 80.7482136,86.628359 80.3638155,86.2299829 C79.9794175,85.8316068 79.7872214,85.2620074 79.7872214,84.5211676 C79.7872214,83.7104372 79.989901,83.119871 80.3952661,82.7494511 C80.8006313,82.3790312 81.4575926,82.193824 82.3661697,82.193824 L91.0255652,82.193824 C94.450202,82.193824 97.0396079,82.8507853 98.7938606,84.1647276 C100.548113,85.4786699 101.425227,87.414609 101.425227,89.972603 C101.425227,92.6703781 100.551608,94.7111515 98.8043442,96.0949843 C97.0570805,97.4788171 94.4641801,98.1707232 91.0255652,98.1707232 L87.4611651,98.1707232 Z M87.4611651,86.8275441 L87.4611651,93.4531348 L90.4384875,93.4531348 C92.0879044,93.4531348 93.328443,93.1735768 94.1601405,92.6144525 C94.9918381,92.0553281 95.4076806,91.2166541 95.4076806,90.0984053 C95.4076806,89.0500471 94.9778602,88.2428234 94.1182064,87.67671 C93.2585527,87.1105966 92.031992,86.8275441 90.4384875,86.8275441 L87.4611651,86.8275441 Z M114.727851,107.396229 L113.092421,109.03166 C113.69348,108.835966 114.284046,108.689198 114.864137,108.591352 C115.444229,108.493505 116.013828,108.444582 116.572953,108.444582 C117.677223,108.444582 118.840883,108.608823 120.063968,108.937308 C121.287053,109.265794 122.031376,109.430034 122.29696,109.430034 C122.744259,109.430034 123.327837,109.279772 124.047709,108.979242 C124.767582,108.678713 125.253314,108.52845 125.50492,108.52845 C126.02211,108.52845 126.45193,108.727636 126.794394,109.126012 C127.136858,109.524388 127.308087,110.024098 127.308087,110.625156 C127.308087,111.421909 126.836333,112.099837 125.892811,112.658961 C124.949288,113.218086 123.792617,113.497643 122.422762,113.497643 C121.486229,113.497643 120.28413,113.277492 118.816428,112.837181 C117.348727,112.396871 116.286406,112.176719 115.629435,112.176719 C114.636989,112.176719 113.518757,112.449288 112.274706,112.994434 C111.030654,113.53958 110.261869,113.812149 109.968329,113.812149 C109.36727,113.812149 108.857077,113.612964 108.437734,113.214588 C108.01839,112.816212 107.808722,112.337469 107.808722,111.778345 C107.808722,111.386958 107.941512,110.971115 108.207096,110.530805 C108.47268,110.090494 108.94094,109.520895 109.611889,108.821989 L111.729562,106.683349 C109.395218,105.830685 107.536157,104.29661 106.152324,102.08108 C104.768491,99.8655494 104.076585,97.3180772 104.076585,94.4385866 C104.076585,90.6365409 105.180839,87.5299526 107.389381,85.1187288 C109.597922,82.7075049 112.442425,81.501911 115.922974,81.501911 C119.389545,81.501911 122.227059,82.7109994 124.4356,85.1292123 C126.644141,87.5474252 127.748395,90.650519 127.748395,94.4385866 C127.748395,98.2126762 126.65113,101.322759 124.456567,103.768928 C122.262004,106.215097 119.480402,107.438163 116.111677,107.438163 C115.888028,107.438163 115.660887,107.434669 115.430248,107.42768 C115.199609,107.420691 114.965479,107.410207 114.727851,107.396229 L114.727851,107.396229 Z M115.922974,86.3662688 C114.119798,86.3662688 112.704535,87.0931196 111.677144,88.546843 C110.649753,90.0005663 110.136065,92.0203728 110.136065,94.6063231 C110.136065,97.1922733 110.653248,99.2190688 111.687628,100.68677 C112.722008,102.154472 114.133776,102.888312 115.922974,102.888312 C117.72615,102.888312 119.144907,102.157966 120.179287,100.697254 C121.213667,99.2365414 121.730849,97.2062514 121.730849,94.6063231 C121.730849,92.0203728 121.217161,90.0005663 120.18977,88.546843 C119.162379,87.0931196 117.740128,86.3662688 115.922974,86.3662688 L115.922974,86.3662688 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/mono-line": {
"title": "$:/core/images/mono-line",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-mono-line tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M60.4374591,84.522627 L61.3450888,84.522627 C63.2671377,84.522627 64.7264493,85.0120303 65.7230673,85.9908515 C66.7196852,86.9696727 67.2179868,88.4022896 67.2179868,90.288745 C67.2179868,92.3887615 66.6929905,93.9014625 65.6429823,94.8268935 C64.5929741,95.7523244 62.857817,96.215033 60.4374591,96.215033 L44.3670747,96.215033 C41.9111232,96.215033 40.1670679,95.7612227 39.1348565,94.8535884 C38.102645,93.9459542 37.586547,92.424355 37.586547,90.288745 C37.586547,88.2243221 38.102645,86.747214 39.1348565,85.8573766 C40.1670679,84.9675391 41.9111232,84.522627 44.3670747,84.522627 L46.235724,84.522627 L44.0467348,78.2759992 L20.9822627,78.2759992 L18.6864935,84.522627 L20.5551429,84.522627 C22.9755008,84.522627 24.7106579,84.9764373 25.7606661,85.8840716 C26.8106743,86.7917058 27.3356705,88.2599156 27.3356705,90.288745 C27.3356705,92.3887615 26.8106743,93.9014625 25.7606661,94.8268935 C24.7106579,95.7523244 22.9755008,96.215033 20.5551429,96.215033 L6.78052766,96.215033 C4.32457622,96.215033 2.58052094,95.7523244 1.54830946,94.8268935 C0.516097994,93.9014625 0,92.3887615 0,90.288745 C0,88.4022896 0.498301511,86.9696727 1.49491948,85.9908515 C2.49153745,85.0120303 3.95084902,84.522627 5.87289797,84.522627 L6.78052766,84.522627 L21.0890427,44.6937008 L16.8178442,44.6937008 C14.3974863,44.6937008 12.6623292,44.2309922 11.612321,43.3055613 C10.5623128,42.3801303 10.0373165,40.8852258 10.0373165,38.8208028 C10.0373165,36.7207864 10.5623128,35.2080854 11.612321,34.2826544 C12.6623292,33.3572234 14.3974863,32.8945149 16.8178442,32.8945149 L36.8390873,32.8945149 C40.0069087,32.8945149 42.231469,34.6029772 43.512835,38.0199531 L43.512835,38.180123 L60.4374591,84.522627 Z M32.4611088,44.6937008 L24.7195615,67.224273 L40.2026561,67.224273 L32.4611088,44.6937008 Z M89.5058233,68.5590225 L89.5058233,84.8429669 L97.5143205,84.8429669 C103.173687,84.8429669 107.160099,84.22009 109.473676,82.9743176 C111.787254,81.7285451 112.944025,79.6463566 112.944025,76.7276897 C112.944025,73.7734293 111.840643,71.6734444 109.633846,70.4276719 C107.427049,69.1818994 103.565213,68.5590225 98.0482204,68.5590225 L89.5058233,68.5590225 Z M116.734714,62.6327346 C120.614405,64.0564746 123.461842,66.0051894 125.277111,68.4789376 C127.092379,70.9526857 128,74.1115614 128,77.9556593 C128,81.1946677 127.216955,84.1488838 125.650841,86.8183962 C124.084727,89.4879087 121.84237,91.676876 118.923703,93.385364 C117.215215,94.3819819 115.302093,95.1027395 113.18428,95.5476582 C111.066467,95.9925769 108.06776,96.215033 104.188068,96.215033 L99.7033098,96.215033 L76.3184979,96.215033 C73.9693269,96.215033 72.2875593,95.7523244 71.2731446,94.8268935 C70.2587299,93.9014625 69.7515301,92.3887615 69.7515301,90.288745 C69.7515301,88.4022896 70.2320352,86.9696727 71.1930596,85.9908515 C72.1540841,85.0120303 73.5600062,84.522627 75.4108682,84.522627 L76.3184979,84.522627 L76.3184979,44.6937008 L75.4108682,44.6937008 C73.5600062,44.6937008 72.1540841,44.1953993 71.1930596,43.1987813 C70.2320352,42.2021633 69.7515301,40.7428518 69.7515301,38.8208028 C69.7515301,36.7563799 70.2676281,35.2525771 71.2998396,34.3093494 C72.3320511,33.3661217 74.0049204,32.8945149 76.3184979,32.8945149 L100.877889,32.8945149 C108.388118,32.8945149 114.09189,34.3538264 117.989378,37.2724934 C121.886867,40.1911603 123.835581,44.4623161 123.835581,50.0860889 C123.835581,52.8623819 123.239399,55.3093982 122.047017,57.4272114 C120.854635,59.5450246 119.083885,61.2801816 116.734714,62.6327346 L116.734714,62.6327346 Z M89.5058233,44.3733609 L89.5058233,57.8276363 L96.7134708,57.8276363 C101.091471,57.8276363 104.179161,57.3115383 105.976633,56.2793268 C107.774104,55.2471153 108.672827,53.50306 108.672827,51.0471086 C108.672827,48.7335312 107.863087,47.0428653 106.243583,45.9750604 C104.624078,44.9072554 101.999097,44.3733609 98.3685602,44.3733609 L89.5058233,44.3733609 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/new-button": {
"title": "$:/core/images/new-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M56,72 L8.00697327,72 C3.59075293,72 0,68.418278 0,64 C0,59.5907123 3.58484404,56 8.00697327,56 L56,56 L56,8.00697327 C56,3.59075293 59.581722,0 64,0 C68.4092877,0 72,3.58484404 72,8.00697327 L72,56 L119.993027,56 C124.409247,56 128,59.581722 128,64 C128,68.4092877 124.415156,72 119.993027,72 L72,72 L72,119.993027 C72,124.409247 68.418278,128 64,128 C59.5907123,128 56,124.415156 56,119.993027 L56,72 L56,72 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/new-here-button": {
"title": "$:/core/images/new-here-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-here-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n \t<g transform=\"translate(52.233611, 64.389922) rotate(75.000000) translate(-52.233611, -64.389922) translate(-7.734417, 3.702450)\">\n\t <path d=\"M18.9270186,45.959338 L18.9080585,49.6521741 C18.8884833,53.4648378 21.0574548,58.7482162 23.7526408,61.4434022 L78.5671839,116.257945 C81.2617332,118.952495 85.6348701,118.950391 88.3334363,116.251825 L115.863237,88.7220241 C118.555265,86.0299959 118.564544,81.6509578 115.869358,78.9557717 L61.0548144,24.1412286 C58.3602652,21.4466794 53.0787224,19.2788426 49.2595808,19.3006519 L25.9781737,19.4336012 C22.1633003,19.4553862 19.0471195,22.5673232 19.0275223,26.3842526 L18.9871663,34.2443819 C19.0818862,34.255617 19.1779758,34.2665345 19.2754441,34.2771502 C22.6891275,34.6489512 27.0485594,34.2348566 31.513244,33.2285542 C31.7789418,32.8671684 32.075337,32.5211298 32.4024112,32.1940556 C34.8567584,29.7397084 38.3789778,29.0128681 41.4406288,30.0213822 C41.5958829,29.9543375 41.7503946,29.8866669 41.9041198,29.8183808 L42.1110981,30.2733467 C43.1114373,30.6972371 44.0473796,31.3160521 44.8614145,32.1300869 C48.2842088,35.5528813 48.2555691,41.130967 44.7974459,44.5890903 C41.4339531,47.952583 36.0649346,48.0717177 32.6241879,44.9262969 C27.8170558,45.8919233 23.0726921,46.2881596 18.9270186,45.959338 Z\"></path>\n\t <path d=\"M45.4903462,38.8768094 C36.7300141,42.6833154 26.099618,44.7997354 18.1909048,43.9383587 C7.2512621,42.7468685 1.50150083,35.8404432 4.66865776,24.7010202 C7.51507386,14.6896965 15.4908218,6.92103848 24.3842626,4.38423012 C34.1310219,1.60401701 42.4070208,6.15882777 42.4070209,16.3101169 L34.5379395,16.310117 C34.5379394,11.9285862 31.728784,10.3825286 26.5666962,11.8549876 C20.2597508,13.6540114 14.3453742,19.4148216 12.2444303,26.8041943 C10.4963869,32.9523565 12.6250796,35.5092726 19.0530263,36.2093718 C25.5557042,36.9176104 35.0513021,34.9907189 42.7038419,31.5913902 L42.7421786,31.6756595 C44.3874154,31.5384763 47.8846101,37.3706354 45.9274416,38.6772897 L45.9302799,38.6835285 C45.9166992,38.6895612 45.9031139,38.6955897 45.8895238,38.7016142 C45.8389288,38.7327898 45.7849056,38.7611034 45.7273406,38.7863919 C45.6506459,38.8200841 45.571574,38.8501593 45.4903462,38.8768094 Z\"></path>\n </g>\n <rect x=\"96\" y=\"80\" width=\"16\" height=\"48\" rx=\"8\"></rect>\n <rect x=\"80\" y=\"96\" width=\"48\" height=\"16\" rx=\"8\"></rect>\n </g>\n </g>\n</svg>"
},
"$:/core/images/new-image-button": {
"title": "$:/core/images/new-image-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-image-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M81.3619177,73.6270062 L97.1875317,46.2162388 C97.91364,44.9585822 97.4824378,43.3533085 96.2260476,42.6279312 L46.2162388,13.7547547 C44.9585822,13.0286463 43.3533085,13.4598485 42.6279312,14.7162388 L30.0575956,36.4886988 L40.0978909,31.2276186 C43.1404959,29.6333041 46.8692155,31.3421319 47.6479264,34.6877101 L51.2545483,52.3903732 L61.1353556,53.2399953 C63.2899974,53.4346096 65.1046382,54.9309951 65.706105,57.0091178 C65.7395572,57.1246982 65.8069154,57.3539875 65.9047035,57.6813669 C66.0696435,58.2335608 66.2581528,58.852952 66.4667073,59.5238092 C67.0618822,61.4383079 67.6960725,63.3742727 68.3393254,65.2021174 C68.5462918,65.7902259 68.7511789,66.3583016 68.953259,66.9034738 C69.5777086,68.5881157 70.1617856,70.0172008 70.6783305,71.110045 C70.9334784,71.6498566 71.1627732,72.0871602 71.4035746,72.5373068 C71.6178999,72.7492946 71.9508843,72.9623307 72.4151452,73.1586945 C73.5561502,73.6412938 75.1990755,73.899146 77.0720271,73.9171651 C77.9355886,73.9254732 78.7819239,73.8832103 79.5638842,73.8072782 C80.0123946,73.7637257 80.3172916,73.7224469 80.4352582,73.7027375 C80.7503629,73.6500912 81.0598053,73.6256267 81.3619177,73.6270062 L81.3619177,73.6270062 L81.3619177,73.6270062 L81.3619177,73.6270062 Z M37.4707881,2.64867269 C38.9217993,0.135447653 42.1388058,-0.723707984 44.6486727,0.725364314 L108.293614,37.4707881 C110.806839,38.9217993 111.665994,42.1388058 110.216922,44.6486727 L73.4714982,108.293614 C72.0204871,110.806839 68.8034805,111.665994 66.2936136,110.216922 L2.64867269,73.4714982 C0.135447653,72.0204871 -0.723707984,68.8034805 0.725364314,66.2936136 L37.4707881,2.64867269 L37.4707881,2.64867269 L37.4707881,2.64867269 L37.4707881,2.64867269 Z M80.3080975,53.1397764 C82.8191338,54.5895239 86.0299834,53.7291793 87.4797308,51.218143 C88.9294783,48.7071068 88.0691338,45.4962571 85.5580975,44.0465097 C83.0470612,42.5967622 79.8362116,43.4571068 78.3864641,45.968143 C76.9367166,48.4791793 77.7970612,51.6900289 80.3080975,53.1397764 L80.3080975,53.1397764 L80.3080975,53.1397764 L80.3080975,53.1397764 Z M96,112 L88.0070969,112 C83.5881712,112 80,108.418278 80,104 C80,99.5907123 83.5848994,96 88.0070969,96 L96,96 L96,88.0070969 C96,83.5881712 99.581722,80 104,80 C108.409288,80 112,83.5848994 112,88.0070969 L112,96 L119.992903,96 C124.411829,96 128,99.581722 128,104 C128,108.409288 124.415101,112 119.992903,112 L112,112 L112,119.992903 C112,124.411829 108.418278,128 104,128 C99.5907123,128 96,124.415101 96,119.992903 L96,112 L96,112 Z M33.3471097,51.7910932 C40.7754579,59.7394511 42.3564368,62.4818351 40.7958321,65.1848818 C39.2352273,67.8879286 26.9581062,62.8571718 24.7019652,66.7649227 C22.4458242,70.6726735 23.7947046,70.0228006 22.2648667,72.6725575 L41.9944593,84.0634431 C41.9944593,84.0634431 36.3904568,75.8079231 37.7602356,73.4353966 C40.2754811,69.0788636 46.5298923,72.1787882 48.1248275,69.4162793 C50.538989,65.234829 43.0222016,59.7770885 33.3471097,51.7910932 L33.3471097,51.7910932 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/new-journal-button": {
"title": "$:/core/images/new-journal-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-new-journal-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M102.545455,112.818182 L102.545455,124.636364 L102.545455,124.636364 L102.545455,124.636364 C102.545455,125.941761 103.630828,127 104.969697,127 L111.030303,127 C112.369172,127 113.454545,125.941761 113.454545,124.636364 L113.454545,112.818182 L125.575758,112.818182 C126.914626,112.818182 128,111.759982 128,110.454545 L128,104.545455 C128,103.240018 126.914626,102.181818 125.575758,102.181818 L113.454545,102.181818 L113.454545,90.3636364 C113.454545,89.0582 112.369172,88 111.030303,88 L104.969697,88 L104.969697,88 C103.630828,88 102.545455,89.0582 102.545455,90.3636364 L102.545455,102.181818 L90.4242424,102.181818 L90.4242424,102.181818 C89.0853705,102.181818 88,103.240018 88,104.545455 L88,110.454545 L88,110.454545 L88,110.454545 C88,111.759982 89.0853705,112.818182 90.4242424,112.818182 L102.545455,112.818182 Z\"></path>\n <g transform=\"translate(59.816987, 64.316987) rotate(30.000000) translate(-59.816987, -64.316987) translate(20.316987, 12.816987)\">\n <g transform=\"translate(0.000000, 0.000000)\">\n <path d=\"M9.99631148,0 C4.4755011,0 -2.27373675e-13,4.48070044 -2.27373675e-13,9.99759461 L-2.27373675e-13,91.6128884 C-2.27373675e-13,97.1344074 4.46966773,101.610483 9.99631148,101.610483 L68.9318917,101.610483 C74.4527021,101.610483 78.9282032,97.1297826 78.9282032,91.6128884 L78.9282032,9.99759461 C78.9282032,4.47607557 74.4585355,0 68.9318917,0 L9.99631148,0 Z M20.8885263,26 C24.2022348,26 26.8885263,23.3137085 26.8885263,20 C26.8885263,16.6862915 24.2022348,14 20.8885263,14 C17.5748178,14 14.8885263,16.6862915 14.8885263,20 C14.8885263,23.3137085 17.5748178,26 20.8885263,26 Z M57.3033321,25.6783342 C60.6170406,25.6783342 63.3033321,22.9920427 63.3033321,19.6783342 C63.3033321,16.3646258 60.6170406,13.6783342 57.3033321,13.6783342 C53.9896236,13.6783342 51.3033321,16.3646258 51.3033321,19.6783342 C51.3033321,22.9920427 53.9896236,25.6783342 57.3033321,25.6783342 Z\"></path>\n <text font-family=\"Helvetica\" font-size=\"47.1724138\" font-weight=\"bold\" fill=\"#FFFFFF\">\n <tspan x=\"42\" y=\"77.4847912\" text-anchor=\"middle\"><<now \"DD\">></tspan>\n </text>\n </g>\n </g>\n </g>\n</svg>"
},
"$:/core/images/opacity": {
"title": "$:/core/images/opacity",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-opacity tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M102.361773,65 C101.833691,67.051742 101.183534,69.0544767 100.419508,71 L82.5835324,71 C83.7602504,69.1098924 84.7666304,67.1027366 85.581205,65 L102.361773,65 Z M102.834311,63 C103.256674,61.0388326 103.568427,59.0365486 103.762717,57 L87.6555706,57 C87.3692052,59.0609452 86.9083652,61.0660782 86.2884493,63 L102.834311,63 Z M99.5852583,73 C98.6682925,75.0747721 97.6196148,77.0783056 96.4498253,79 L75.8124196,79 C77.8387053,77.2115633 79.6621163,75.1985844 81.2437158,73 L99.5852583,73 Z M95.1689122,81 C93.7449202,83.1155572 92.1695234,85.1207336 90.458251,87 L60.4614747,87 C65.1836162,85.86248 69.5430327,83.794147 73.3347255,81 L95.1689122,81 Z M87.6555706,47 L103.762717,47 C101.246684,20.6269305 79.0321807,0 52,0 C23.281193,0 0,23.281193 0,52 C0,77.2277755 17.9651296,98.2595701 41.8000051,103 L62.1999949,103 C67.8794003,101.870444 73.2255333,99.8158975 78.074754,97 L39,97 L39,95 L81.2493857,95 C83.8589242,93.2215015 86.2981855,91.2116653 88.5376609,89 L39,89 L39,87 L43.5385253,87 C27.7389671,83.1940333 16,68.967908 16,52 C16,32.117749 32.117749,16 52,16 C70.1856127,16 85.2217929,29.4843233 87.6555706,47 Z M87.8767787,49 L103.914907,49 C103.971379,49.9928025 104,50.9930589 104,52 C104,53.0069411 103.971379,54.0071975 103.914907,55 L87.8767787,55 C87.958386,54.0107999 88,53.0102597 88,52 C88,50.9897403 87.958386,49.9892001 87.8767787,49 Z\"></path>\n <path d=\"M76,128 C104.718807,128 128,104.718807 128,76 C128,47.281193 104.718807,24 76,24 C47.281193,24 24,47.281193 24,76 C24,104.718807 47.281193,128 76,128 L76,128 Z M76,112 C95.882251,112 112,95.882251 112,76 C112,56.117749 95.882251,40 76,40 C56.117749,40 40,56.117749 40,76 C40,95.882251 56.117749,112 76,112 L76,112 Z\"></path>\n <path d=\"M37,58 L90,58 L90,62 L37,62 L37,58 L37,58 Z M40,50 L93,50 L93,54 L40,54 L40,50 L40,50 Z M40,42 L93,42 L93,46 L40,46 L40,42 L40,42 Z M32,66 L85,66 L85,70 L32,70 L32,66 L32,66 Z M30,74 L83,74 L83,78 L30,78 L30,74 L30,74 Z M27,82 L80,82 L80,86 L27,86 L27,82 L27,82 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/open-window": {
"title": "$:/core/images/open-window",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-open-window tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M16,112 L104.993898,112 C108.863261,112 112,115.590712 112,120 C112,124.418278 108.858091,128 104.993898,128 L7.00610161,128 C3.13673853,128 0,124.409288 0,120 C0,119.998364 4.30952878e-07,119.996727 1.29273572e-06,119.995091 C4.89579306e-07,119.993456 0,119.99182 0,119.990183 L0,24.0098166 C0,19.586117 3.59071231,16 8,16 C12.418278,16 16,19.5838751 16,24.0098166 L16,112 Z\"></path>\n <path d=\"M96,43.1959595 L96,56 C96,60.418278 99.581722,64 104,64 C108.418278,64 112,60.418278 112,56 L112,24 C112,19.5907123 108.415101,16 103.992903,16 L72.0070969,16 C67.5881712,16 64,19.581722 64,24 C64,28.4092877 67.5848994,32 72.0070969,32 L84.5685425,32 L48.2698369,68.2987056 C45.1421332,71.4264093 45.1434327,76.4904296 48.267627,79.614624 C51.3854642,82.7324612 56.4581306,82.7378289 59.5835454,79.6124141 L96,43.1959595 Z M32,7.9992458 C32,3.58138434 35.5881049,0 39.9992458,0 L120.000754,0 C124.418616,0 128,3.5881049 128,7.9992458 L128,88.0007542 C128,92.4186157 124.411895,96 120.000754,96 L39.9992458,96 C35.5813843,96 32,92.4118951 32,88.0007542 L32,7.9992458 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/options-button": {
"title": "$:/core/images/options-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-options-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M110.48779,76.0002544 C109.354214,80.4045063 107.611262,84.5641217 105.354171,88.3838625 L105.354171,88.3838625 L112.07833,95.1080219 C115.20107,98.2307613 115.210098,103.299824 112.089164,106.420759 L106.420504,112.089418 C103.301049,115.208874 98.2346851,115.205502 95.1077675,112.078585 L88.3836082,105.354425 C84.5638673,107.611516 80.4042519,109.354468 76,110.488045 L76,110.488045 L76,119.993281 C76,124.409501 72.4220153,128.000254 68.0083475,128.000254 L59.9916525,128.000254 C55.5800761,128.000254 52,124.41541 52,119.993281 L52,110.488045 C47.5957481,109.354468 43.4361327,107.611516 39.6163918,105.354425 L32.8922325,112.078585 C29.7694931,115.201324 24.7004301,115.210353 21.5794957,112.089418 L15.9108363,106.420759 C12.7913807,103.301303 12.7947522,98.2349395 15.9216697,95.1080219 L22.6458291,88.3838625 C20.3887383,84.5641217 18.6457859,80.4045063 17.5122098,76.0002544 L8.00697327,76.0002544 C3.59075293,76.0002544 2.19088375e-16,72.4222697 4.89347582e-16,68.0086019 L9.80228577e-16,59.9919069 C1.25035972e-15,55.5803305 3.58484404,52.0002544 8.00697327,52.0002544 L17.5122098,52.0002544 C18.6457859,47.5960025 20.3887383,43.4363871 22.6458291,39.6166462 L15.9216697,32.8924868 C12.7989304,29.7697475 12.7899019,24.7006845 15.9108363,21.5797501 L21.5794957,15.9110907 C24.6989513,12.7916351 29.7653149,12.7950065 32.8922325,15.9219241 L39.6163918,22.6460835 C43.4361327,20.3889927 47.5957481,18.6460403 52,17.5124642 L52,8.00722764 C52,3.5910073 55.5779847,0.000254375069 59.9916525,0.000254375069 L68.0083475,0.000254375069 C72.4199239,0.000254375069 76,3.58509841 76,8.00722764 L76,17.5124642 C80.4042519,18.6460403 84.5638673,20.3889927 88.3836082,22.6460835 L95.1077675,15.9219241 C98.2305069,12.7991848 103.29957,12.7901562 106.420504,15.9110907 L112.089164,21.5797501 C115.208619,24.6992057 115.205248,29.7655693 112.07833,32.8924868 L105.354171,39.6166462 L105.354171,39.6166462 C107.611262,43.4363871 109.354214,47.5960025 110.48779,52.0002544 L119.993027,52.0002544 C124.409247,52.0002544 128,55.5782391 128,59.9919069 L128,68.0086019 C128,72.4201783 124.415156,76.0002544 119.993027,76.0002544 L110.48779,76.0002544 L110.48779,76.0002544 Z M64,96.0002544 C81.673112,96.0002544 96,81.6733664 96,64.0002544 C96,46.3271424 81.673112,32.0002544 64,32.0002544 C46.326888,32.0002544 32,46.3271424 32,64.0002544 C32,81.6733664 46.326888,96.0002544 64,96.0002544 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/paint": {
"title": "$:/core/images/paint",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-paint tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M83.5265806,76.1907935 C90.430962,69.2864121 91.8921169,59.0000433 87.9100453,50.6642209 L125.812763,12.7615036 C128.732035,9.84223095 128.72611,5.10322984 125.812796,2.18991592 C122.893542,-0.729338085 118.161775,-0.730617045 115.241209,2.18994966 L77.3384914,40.092667 C69.002669,36.1105954 58.7163002,37.5717503 51.8119188,44.4761317 L83.5265806,76.1907935 L83.5265806,76.1907935 L83.5265806,76.1907935 L83.5265806,76.1907935 Z M80.8836921,78.8336819 L49.1690303,47.1190201 C49.1690303,47.1190201 8.50573364,81.242543 0,80.2820711 C0,80.2820711 3.78222974,85.8744423 6.82737483,88.320684 C20.8514801,82.630792 44.1526049,63.720771 44.1526049,63.720771 L44.8144806,64.3803375 C44.8144806,64.3803375 19.450356,90.2231043 9.18040433,92.0477601 C10.4017154,93.4877138 13.5343883,96.1014812 15.4269991,97.8235871 C20.8439164,96.3356979 50.1595367,69.253789 50.1595367,69.253789 L50.8214124,69.9133555 L18.4136144,100.936036 L23.6993903,106.221812 L56.1060358,75.2002881 L56.7679115,75.8598546 C56.7679115,75.8598546 28.9040131,106.396168 28.0841366,108.291555 C28.0841366,108.291555 34.1159238,115.144621 35.6529617,116.115796 C36.3545333,113.280171 63.5365402,82.6307925 63.5365402,82.6307925 L64.1984159,83.290359 C64.1984159,83.290359 43.6013016,107.04575 39.2343772,120.022559 C42.443736,123.571575 46.7339155,125.159692 50.1595362,126.321151 C47.9699978,114.504469 80.8836921,78.8336819 80.8836921,78.8336819 L80.8836921,78.8336819 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/palette": {
"title": "$:/core/images/palette",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-palette tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M80.2470434,39.1821571 C75.0645698,38.2680897 69.6261555,37.7814854 64.0193999,37.7814854 C28.6624616,37.7814854 0,57.1324214 0,81.0030106 C0,90.644534 4.67604329,99.5487133 12.5805659,106.738252 C23.5031767,91.1899067 26.3405471,72.3946229 36.8885698,63.5622337 C52.0716764,50.8486559 63.4268694,55.7343343 63.4268694,55.7343343 L80.2470434,39.1821571 Z M106.781666,48.8370714 C119.830962,56.749628 128.0388,68.229191 128.0388,81.0030106 C128.0388,90.3534932 128.557501,98.4142085 116.165191,106.082518 C105.367708,112.763955 112.341384,99.546808 104.321443,95.1851533 C96.3015017,90.8234987 84.3749007,96.492742 86.1084305,103.091059 C89.3087234,115.272303 105.529892,114.54645 92.4224435,119.748569 C79.3149955,124.950687 74.2201582,124.224536 64.0193999,124.224536 C56.1979176,124.224536 48.7040365,123.277578 41.7755684,121.544216 C51.620343,117.347916 69.6563669,109.006202 75.129737,102.088562 C82.7876655,92.4099199 87.3713218,80.0000002 83.3235694,72.4837191 C83.1303943,72.1250117 94.5392656,60.81569 106.781666,48.8370714 Z M1.13430476,123.866563 C0.914084026,123.867944 0.693884185,123.868637 0.473712455,123.868637 C33.9526848,108.928928 22.6351223,59.642592 59.2924543,59.6425917 C59.6085574,61.0606542 59.9358353,62.5865065 60.3541977,64.1372318 C34.4465025,59.9707319 36.7873124,112.168427 1.13429588,123.866563 L1.13430476,123.866563 Z M1.84669213,123.859694 C40.7185279,123.354338 79.9985412,101.513051 79.9985401,79.0466836 C70.7284906,79.0466835 65.9257264,75.5670082 63.1833375,71.1051511 C46.585768,64.1019718 32.81846,116.819636 1.84665952,123.859695 L1.84669213,123.859694 Z M67.1980193,59.8524981 C62.748213,63.9666823 72.0838429,76.2846822 78.5155805,71.1700593 C89.8331416,59.8524993 112.468264,37.2173758 123.785825,25.8998146 C135.103386,14.5822535 123.785825,3.26469247 112.468264,14.5822535 C101.150703,25.8998144 78.9500931,48.9868127 67.1980193,59.8524981 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/permalink-button": {
"title": "$:/core/images/permalink-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-permalink-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M80.4834582,48 L73.0956761,80 L73.0956761,80 L47.5165418,80 L54.9043239,48 L80.4834582,48 Z M84.1773493,32 L89.8007299,7.64246248 C90.7941633,3.33942958 95.0918297,0.64641956 99.3968675,1.64031585 C103.693145,2.63218977 106.385414,6.93288901 105.390651,11.2416793 L100.598215,32 L104.000754,32 C108.411895,32 112,35.581722 112,40 C112,44.4092877 108.418616,48 104.000754,48 L96.9043239,48 L89.5165418,80 L104.000754,80 C108.411895,80 112,83.581722 112,88 C112,92.4092877 108.418616,96 104.000754,96 L85.8226507,96 L80.1992701,120.357538 C79.2058367,124.66057 74.9081703,127.35358 70.6031325,126.359684 C66.3068546,125.36781 63.6145865,121.067111 64.6093491,116.758321 L69.401785,96 L43.8226507,96 L38.1992701,120.357538 C37.2058367,124.66057 32.9081703,127.35358 28.6031325,126.359684 C24.3068546,125.36781 21.6145865,121.067111 22.6093491,116.758321 L27.401785,96 L23.9992458,96 C19.5881049,96 16,92.418278 16,88 C16,83.5907123 19.5813843,80 23.9992458,80 L31.0956761,80 L38.4834582,48 L23.9992458,48 C19.5881049,48 16,44.418278 16,40 C16,35.5907123 19.5813843,32 23.9992458,32 L42.1773493,32 L47.8007299,7.64246248 C48.7941633,3.33942958 53.0918297,0.64641956 57.3968675,1.64031585 C61.6931454,2.63218977 64.3854135,6.93288901 63.3906509,11.2416793 L58.598215,32 L84.1773493,32 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/permaview-button": {
"title": "$:/core/images/permaview-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-permaview-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M81.4834582,48 L79.6365127,56 L79.6365127,56 L74.0573784,56 L75.9043239,48 L81.4834582,48 Z M85.1773493,32 L90.8007299,7.64246248 C91.7941633,3.33942958 96.0918297,0.64641956 100.396867,1.64031585 C104.693145,2.63218977 107.385414,6.93288901 106.390651,11.2416793 L101.598215,32 L104.000754,32 C108.411895,32 112,35.581722 112,40 C112,44.4092877 108.418616,48 104.000754,48 L97.9043239,48 L96.0573784,56 L104.000754,56 C108.411895,56 112,59.581722 112,64 C112,68.4092877 108.418616,72 104.000754,72 L92.3634873,72 L90.5165418,80 L104.000754,80 C108.411895,80 112,83.581722 112,88 C112,92.4092877 108.418616,96 104.000754,96 L86.8226507,96 L81.1992701,120.357538 C80.2058367,124.66057 75.9081703,127.35358 71.6031325,126.359684 C67.3068546,125.36781 64.6145865,121.067111 65.6093491,116.758321 L70.401785,96 L64.8226507,96 L59.1992701,120.357538 C58.2058367,124.66057 53.9081703,127.35358 49.6031325,126.359684 C45.3068546,125.36781 42.6145865,121.067111 43.6093491,116.758321 L48.401785,96 L42.8226507,96 L37.1992701,120.357538 C36.2058367,124.66057 31.9081703,127.35358 27.6031325,126.359684 C23.3068546,125.36781 20.6145865,121.067111 21.6093491,116.758321 L26.401785,96 L23.9992458,96 C19.5881049,96 16,92.418278 16,88 C16,83.5907123 19.5813843,80 23.9992458,80 L30.0956761,80 L31.9426216,72 L23.9992458,72 C19.5881049,72 16,68.418278 16,64 C16,59.5907123 19.5813843,56 23.9992458,56 L35.6365127,56 L37.4834582,48 L23.9992458,48 C19.5881049,48 16,44.418278 16,40 C16,35.5907123 19.5813843,32 23.9992458,32 L41.1773493,32 L46.8007299,7.64246248 C47.7941633,3.33942958 52.0918297,0.64641956 56.3968675,1.64031585 C60.6931454,2.63218977 63.3854135,6.93288901 62.3906509,11.2416793 L57.598215,32 L63.1773493,32 L68.8007299,7.64246248 C69.7941633,3.33942958 74.0918297,0.64641956 78.3968675,1.64031585 C82.6931454,2.63218977 85.3854135,6.93288901 84.3906509,11.2416793 L79.598215,32 L85.1773493,32 Z M53.9043239,48 L52.0573784,56 L57.6365127,56 L59.4834582,48 L53.9043239,48 Z M75.9426216,72 L74.0956761,80 L74.0956761,80 L68.5165418,80 L70.3634873,72 L75.9426216,72 L75.9426216,72 Z M48.3634873,72 L46.5165418,80 L52.0956761,80 L53.9426216,72 L48.3634873,72 L48.3634873,72 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/picture": {
"title": "$:/core/images/picture",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-picture tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M112,68.2332211 L112,20.0027785 C112,17.7898769 110.207895,16 107.997221,16 L20.0027785,16 C17.7898769,16 16,17.792105 16,20.0027785 L16,58.312373 L25.2413115,43.7197989 C28.041793,39.297674 34.2643908,38.7118128 37.8410347,42.5335275 L56.0882845,63.1470817 L69.7748997,56.7400579 C72.766567,55.3552503 76.3013751,55.9473836 78.678437,58.2315339 C78.8106437,58.3585731 79.0742301,58.609836 79.4527088,58.9673596 C80.0910923,59.570398 80.8117772,60.2441563 81.598127,60.9705595 C83.8422198,63.043576 86.1541548,65.1151944 88.3956721,67.0372264 C89.1168795,67.6556396 89.8200801,68.2492007 90.5021258,68.8146755 C92.6097224,70.5620551 94.4693308,72.0029474 95.9836366,73.0515697 C96.7316295,73.5695379 97.3674038,73.9719282 98.0281481,74.3824999 C98.4724987,74.4989557 99.0742374,74.5263881 99.8365134,74.4317984 C101.709944,74.1993272 104.074502,73.2878514 106.559886,71.8846196 C107.705822,71.2376318 108.790494,70.5370325 109.764561,69.8410487 C110.323259,69.4418522 110.694168,69.1550757 110.834827,69.0391868 C111.210545,68.7296319 111.600264,68.4615815 112,68.2332211 L112,68.2332211 Z M0,8.00697327 C0,3.58484404 3.59075293,0 8.00697327,0 L119.993027,0 C124.415156,0 128,3.59075293 128,8.00697327 L128,119.993027 C128,124.415156 124.409247,128 119.993027,128 L8.00697327,128 C3.58484404,128 0,124.409247 0,119.993027 L0,8.00697327 L0,8.00697327 Z M95,42 C99.418278,42 103,38.418278 103,34 C103,29.581722 99.418278,26 95,26 C90.581722,26 87,29.581722 87,34 C87,38.418278 90.581722,42 95,42 L95,42 Z M32,76 C47.8587691,80.8294182 52.0345556,83.2438712 52.0345556,88 C52.0345556,92.7561288 32,95.4712486 32,102.347107 C32,109.222965 33.2849191,107.337637 33.2849191,112 L67.999999,112 C67.999999,112 54.3147136,105.375255 54.3147136,101.200691 C54.3147136,93.535181 64.9302432,92.860755 64.9302432,88 C64.9302432,80.6425555 50.8523779,79.167282 32,76 L32,76 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/plugin-generic-language": {
"title": "$:/core/images/plugin-generic-language",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M61.2072232,68.1369825 C56.8829239,70.9319564 54.2082892,74.793177 54.2082892,79.0581634 C54.2082892,86.9638335 63.3980995,93.4821994 75.2498076,94.3940006 C77.412197,98.2964184 83.8475284,101.178858 91.5684735,101.403106 C86.4420125,100.27851 82.4506393,97.6624107 80.9477167,94.3948272 C92.8046245,93.4861461 102,86.9662269 102,79.0581634 C102,70.5281905 91.3014611,63.6132813 78.1041446,63.6132813 C71.5054863,63.6132813 65.5315225,65.3420086 61.2072232,68.1369825 Z M74.001066,53.9793443 C69.6767667,56.7743182 63.7028029,58.5030456 57.1041446,58.5030456 C54.4851745,58.5030456 51.9646095,58.2307276 49.6065315,57.7275105 C46.2945155,59.9778212 41.2235699,61.4171743 35.5395922,61.4171743 C35.4545771,61.4171743 35.3696991,61.4168523 35.2849622,61.4162104 C39.404008,60.5235193 42.7961717,58.6691298 44.7630507,56.286533 C37.8379411,53.5817651 33.2082892,48.669413 33.2082892,43.0581634 C33.2082892,34.5281905 43.9068281,27.6132812 57.1041446,27.6132812 C70.3014611,27.6132812 81,34.5281905 81,43.0581634 C81,47.3231498 78.3253653,51.1843704 74.001066,53.9793443 Z M64,0 L118.5596,32 L118.5596,96 L64,128 L9.44039956,96 L9.44039956,32 L64,0 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/plugin-generic-plugin": {
"title": "$:/core/images/plugin-generic-plugin",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M40.3972881,76.4456988 L40.3972881,95.3404069 L54.5170166,95.3404069 L54.5170166,95.3404069 C54.5165526,95.3385183 54.516089,95.3366295 54.515626,95.3347404 C54.6093153,95.3385061 54.7034848,95.3404069 54.7980982,95.3404069 C58.6157051,95.3404069 61.710487,92.245625 61.710487,88.4280181 C61.710487,86.6197822 61.01617,84.9737128 59.8795929,83.7418666 L59.8795929,83.7418666 C59.8949905,83.7341665 59.9104102,83.7265043 59.925852,83.7188798 C58.8840576,82.5086663 58.2542926,80.9336277 58.2542926,79.2114996 C58.2542926,75.3938927 61.3490745,72.2991108 65.1666814,72.2991108 C68.9842884,72.2991108 72.0790703,75.3938927 72.0790703,79.2114996 C72.0790703,81.1954221 71.2432806,82.9841354 69.9045961,84.2447446 L69.9045961,84.2447446 C69.9333407,84.2629251 69.9619885,84.281245 69.9905383,84.2997032 L69.9905383,84.2997032 C69.1314315,85.4516923 68.6228758,86.8804654 68.6228758,88.4280181 C68.6228758,91.8584969 71.1218232,94.7053153 74.3986526,95.2474079 C74.3913315,95.2784624 74.3838688,95.3094624 74.3762652,95.3404069 L95.6963988,95.3404069 L95.6963988,75.5678578 L95.6963988,75.5678578 C95.6466539,75.5808558 95.5967614,75.5934886 95.5467242,75.6057531 C95.5504899,75.5120637 95.5523907,75.4178943 95.5523907,75.3232809 C95.5523907,71.505674 92.4576088,68.4108921 88.6400019,68.4108921 C86.831766,68.4108921 85.1856966,69.105209 83.9538504,70.2417862 L83.9538504,70.2417862 C83.9461503,70.2263886 83.938488,70.2109688 83.9308636,70.1955271 C82.7206501,71.2373215 81.1456115,71.8670865 79.4234834,71.8670865 C75.6058765,71.8670865 72.5110946,68.7723046 72.5110946,64.9546976 C72.5110946,61.1370907 75.6058765,58.0423088 79.4234834,58.0423088 C81.4074059,58.0423088 83.1961192,58.8780985 84.4567284,60.2167829 L84.4567284,60.2167829 C84.4749089,60.1880383 84.4932288,60.1593906 84.511687,60.1308407 L84.511687,60.1308407 C85.6636761,60.9899475 87.0924492,61.4985032 88.6400019,61.4985032 C92.0704807,61.4985032 94.9172991,58.9995558 95.4593917,55.7227265 C95.538755,55.7414363 95.6177614,55.761071 95.6963988,55.7816184 L95.6963988,40.0412962 L74.3762652,40.0412962 L74.3762652,40.0412962 C74.3838688,40.0103516 74.3913315,39.9793517 74.3986526,39.9482971 L74.3986526,39.9482971 C71.1218232,39.4062046 68.6228758,36.5593862 68.6228758,33.1289073 C68.6228758,31.5813547 69.1314315,30.1525815 69.9905383,29.0005925 C69.9619885,28.9821342 69.9333407,28.9638143 69.9045961,28.9456339 C71.2432806,27.6850247 72.0790703,25.8963113 72.0790703,23.9123888 C72.0790703,20.0947819 68.9842884,17 65.1666814,17 C61.3490745,17 58.2542926,20.0947819 58.2542926,23.9123888 C58.2542926,25.6345169 58.8840576,27.2095556 59.925852,28.419769 L59.925852,28.419769 C59.9104102,28.4273935 59.8949905,28.4350558 59.8795929,28.4427558 C61.01617,29.674602 61.710487,31.3206715 61.710487,33.1289073 C61.710487,36.9465143 58.6157051,40.0412962 54.7980982,40.0412962 C54.7034848,40.0412962 54.6093153,40.0393953 54.515626,40.0356296 L54.515626,40.0356296 C54.516089,40.0375187 54.5165526,40.0394075 54.5170166,40.0412962 L40.3972881,40.0412962 L40.3972881,52.887664 L40.3972881,52.887664 C40.4916889,53.3430132 40.5412962,53.8147625 40.5412962,54.2980982 C40.5412962,58.1157051 37.4465143,61.210487 33.6289073,61.210487 C32.0813547,61.210487 30.6525815,60.7019313 29.5005925,59.8428245 C29.4821342,59.8713744 29.4638143,59.9000221 29.4456339,59.9287667 C28.1850247,58.5900823 26.3963113,57.7542926 24.4123888,57.7542926 C20.5947819,57.7542926 17.5,60.8490745 17.5,64.6666814 C17.5,68.4842884 20.5947819,71.5790703 24.4123888,71.5790703 C26.134517,71.5790703 27.7095556,70.9493053 28.919769,69.9075109 L28.919769,69.9075109 C28.9273935,69.9229526 28.9350558,69.9383724 28.9427558,69.95377 C30.174602,68.8171928 31.8206715,68.1228758 33.6289073,68.1228758 C37.4465143,68.1228758 40.5412962,71.2176578 40.5412962,75.0352647 C40.5412962,75.5186004 40.4916889,75.9903496 40.3972881,76.4456988 Z M64,0 L118.5596,32 L118.5596,96 L64,128 L9.44039956,96 L9.44039956,32 L64,0 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/plugin-generic-theme": {
"title": "$:/core/images/plugin-generic-theme",
"tags": "$:/tags/Image",
"text": "<svg width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M29.4078519,91.4716406 L51.4693474,69.4101451 L51.4646675,69.4054652 C50.5969502,68.5377479 50.5929779,67.1348725 51.4693474,66.2585029 C52.3396494,65.3882009 53.7499654,65.3874786 54.6163097,66.2538229 L64.0805963,75.7181095 C64.9483136,76.5858268 64.9522859,77.9887022 64.0759163,78.8650718 C63.2056143,79.7353737 61.7952984,79.736096 60.9289541,78.8697517 L60.9242741,78.8650718 L60.9242741,78.8650718 L38.8627786,100.926567 C36.2518727,103.537473 32.0187578,103.537473 29.4078519,100.926567 C26.796946,98.3156614 26.796946,94.0825465 29.4078519,91.4716406 Z M60.8017407,66.3810363 C58.3659178,63.6765806 56.3370667,61.2899536 54.9851735,59.5123615 C48.1295381,50.4979488 44.671561,55.2444054 40.7586738,59.5123614 C36.8457866,63.7803174 41.789473,67.2384487 38.0759896,70.2532832 C34.3625062,73.2681177 34.5917646,74.3131575 28.3243876,68.7977024 C22.0570105,63.2822473 21.6235306,61.7636888 24.5005999,58.6166112 C27.3776691,55.4695337 29.7823103,60.4247912 35.6595047,54.8320442 C41.5366991,49.2392972 36.5996215,44.2825646 36.5996215,44.2825646 C36.5996215,44.2825646 48.8365511,19.267683 65.1880231,21.1152173 C81.5394952,22.9627517 59.0022276,18.7228947 53.3962199,38.3410355 C50.9960082,46.7405407 53.8429162,44.7613399 58.3941742,48.3090467 C59.7875202,49.3951602 64.4244828,52.7100463 70.1884353,56.9943417 L90.8648751,36.3179019 L92.4795866,31.5515482 L100.319802,26.8629752 L103.471444,30.0146174 L98.782871,37.8548326 L94.0165173,39.4695441 L73.7934912,59.6925702 C86.4558549,69.2403631 102.104532,81.8392557 102.104532,86.4016913 C102.104533,93.6189834 99.0337832,97.9277545 92.5695848,95.5655717 C87.8765989,93.8506351 73.8015497,80.3744087 63.8173444,69.668717 L60.9242741,72.5617873 L57.7726319,69.4101451 L60.8017407,66.3810363 L60.8017407,66.3810363 Z M63.9533761,1.42108547e-13 L118.512977,32 L118.512977,96 L63.9533761,128 L9.39377563,96 L9.39377563,32 L63.9533761,1.42108547e-13 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/preview-closed": {
"title": "$:/core/images/preview-closed",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-preview-closed tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M0.0881363238,64 C-0.210292223,65.8846266 0.249135869,67.8634737 1.4664206,69.4579969 C16.2465319,88.8184886 39.1692554,100.414336 64,100.414336 C88.8307446,100.414336 111.753468,88.8184886 126.533579,69.4579969 C127.750864,67.8634737 128.210292,65.8846266 127.911864,64 C110.582357,78.4158332 88.3036732,87.0858436 64,87.0858436 C39.6963268,87.0858436 17.4176431,78.4158332 0.0881363238,64 Z\"></path>\n <rect x=\"62\" y=\"96\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(80.000000, 101.000000) rotate(-5.000000) translate(-80.000000, -101.000000) \" x=\"78\" y=\"93\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(48.000000, 101.000000) rotate(-355.000000) translate(-48.000000, -101.000000) \" x=\"46\" y=\"93\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(32.000000, 96.000000) rotate(-350.000000) translate(-32.000000, -96.000000) \" x=\"30\" y=\"88\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(96.000000, 96.000000) rotate(-10.000000) translate(-96.000000, -96.000000) \" x=\"94\" y=\"88\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(112.000000, 88.000000) rotate(-20.000000) translate(-112.000000, -88.000000) \" x=\"110\" y=\"80\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n <rect transform=\"translate(16.000000, 88.000000) rotate(-340.000000) translate(-16.000000, -88.000000) \" x=\"14\" y=\"80\" width=\"4\" height=\"16\" rx=\"4\"></rect>\n </g>\n</svg>"
},
"$:/core/images/preview-open": {
"title": "$:/core/images/preview-open",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-preview-open tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64.1099282,99.5876785 C39.2791836,99.5876785 16.3564602,87.9918313 1.57634884,68.6313396 C-0.378878622,66.070184 -0.378878622,62.5174945 1.57634884,59.9563389 C16.3564602,40.5958472 39.2791836,29 64.1099282,29 C88.9406729,29 111.863396,40.5958472 126.643508,59.9563389 C128.598735,62.5174945 128.598735,66.070184 126.643508,68.6313396 C111.863396,87.9918313 88.9406729,99.5876785 64.1099282,99.5876785 Z M110.213805,67.5808331 C111.654168,66.0569335 111.654168,63.9430665 110.213805,62.4191669 C99.3257042,50.8995835 82.4391647,44 64.1470385,44 C45.8549124,44 28.9683729,50.8995835 18.0802717,62.4191669 C16.6399094,63.9430665 16.6399094,66.0569335 18.0802717,67.5808331 C28.9683729,79.1004165 45.8549124,86 64.1470385,86 C82.4391647,86 99.3257042,79.1004165 110.213805,67.5808331 Z\"></path>\n <path d=\"M63.5,88 C76.4786916,88 87,77.4786916 87,64.5 C87,51.5213084 76.4786916,41 63.5,41 C50.5213084,41 40,51.5213084 40,64.5 C40,77.4786916 50.5213084,88 63.5,88 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/quote": {
"title": "$:/core/images/quote",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-quote tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M51.2188077,117.712501 L51.2188077,62.1993386 L27.4274524,62.1993386 C27.4274524,53.3075754 29.1096526,45.797753 32.4741035,39.669646 C35.8385544,33.541539 42.0867267,28.9154883 51.2188077,25.7913554 L51.2188077,2 C43.7689521,2.96127169 36.8599155,5.18417913 30.4914905,8.668789 C24.1230656,12.1533989 18.6559149,16.5391352 14.0898743,21.8261295 C9.52383382,27.1131238 5.97919764,33.2411389 3.45585945,40.2103586 C0.932521268,47.1795784 -0.208971741,54.6293222 0.0313461819,62.5598136 L0.0313461819,117.712501 L51.2188077,117.712501 Z M128,117.712501 L128,62.1993386 L104.208645,62.1993386 C104.208645,53.3075754 105.890845,45.797753 109.255296,39.669646 C112.619747,33.541539 118.867919,28.9154883 128,25.7913554 L128,2 C120.550144,2.96127169 113.641108,5.18417913 107.272683,8.668789 C100.904258,12.1533989 95.4371072,16.5391352 90.8710666,21.8261295 C86.3050261,27.1131238 82.7603899,33.2411389 80.2370517,40.2103586 C77.7137136,47.1795784 76.5722206,54.6293222 76.8125385,62.5598136 L76.8125385,117.712501 L128,117.712501 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/refresh-button": {
"title": "$:/core/images/refresh-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-refresh-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M106.369002,39.4325143 C116.529932,60.3119371 112.939592,86.1974934 95.5979797,103.539105 C73.7286194,125.408466 38.2713806,125.408466 16.4020203,103.539105 C-5.46734008,81.6697449 -5.46734008,46.2125061 16.4020203,24.3431458 C19.5262146,21.2189514 24.5915344,21.2189514 27.7157288,24.3431458 C30.8399231,27.4673401 30.8399231,32.5326599 27.7157288,35.6568542 C12.0947571,51.2778259 12.0947571,76.6044251 27.7157288,92.2253967 C43.3367004,107.846368 68.6632996,107.846368 84.2842712,92.2253967 C97.71993,78.7897379 99.5995262,58.1740623 89.9230597,42.729491 L83.4844861,54.9932839 C81.4307001,58.9052072 76.5945372,60.4115251 72.682614,58.3577391 C68.7706907,56.3039532 67.2643728,51.4677903 69.3181587,47.555867 L84.4354914,18.7613158 C86.4966389,14.8353707 91.3577499,13.3347805 95.273202,15.415792 L124.145886,30.7612457 C128.047354,32.8348248 129.52915,37.6785572 127.455571,41.5800249 C125.381992,45.4814927 120.53826,46.9632892 116.636792,44.8897102 L106.369002,39.4325143 Z M98.1470904,27.0648707 C97.9798954,26.8741582 97.811187,26.6843098 97.6409651,26.4953413 L98.6018187,26.1987327 L98.1470904,27.0648707 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/right-arrow": {
"title": "$:/core/images/right-arrow",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-right-arrow tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path d=\"M80.3563798,109.353315 C78.9238993,110.786918 76.9450203,111.675144 74.7592239,111.675144 L-4.40893546,111.675144 C-8.77412698,111.675144 -12.3248558,108.130732 -12.3248558,103.758478 C-12.3248558,99.3951199 -8.78077754,95.8418109 -4.40893546,95.8418109 L66.8418109,95.8418109 L66.8418109,24.5910645 C66.8418109,20.225873 70.3862233,16.6751442 74.7584775,16.6751442 C79.1218352,16.6751442 82.6751442,20.2192225 82.6751442,24.5910645 L82.6751442,103.759224 C82.6751442,105.941695 81.7891419,107.920575 80.3566508,109.353886 Z\" transform=\"translate(35.175144, 64.175144) rotate(-45.000000) translate(-35.175144, -64.175144) \"></path>\n</svg>"
},
"$:/core/images/save-button": {
"title": "$:/core/images/save-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-save-button tc-image-button\" viewBox=\"0 0 128 128\" width=\"22pt\" height=\"22pt\">\n <g fill-rule=\"evenodd\">\n <path d=\"M120.78304,34.329058 C125.424287,43.1924006 128.049406,53.2778608 128.049406,63.9764502 C128.049406,99.3226742 99.3956295,127.97645 64.0494055,127.97645 C28.7031816,127.97645 0.0494055385,99.3226742 0.0494055385,63.9764502 C0.0494055385,28.6302262 28.7031816,-0.0235498012 64.0494055,-0.0235498012 C82.8568763,-0.0235498012 99.769563,8.08898558 111.479045,21.0056358 L114.159581,18.3250998 C117.289194,15.1954866 122.356036,15.1939641 125.480231,18.3181584 C128.598068,21.4359957 128.601317,26.5107804 125.473289,29.6388083 L120.78304,34.329058 Z M108.72451,46.3875877 C110.870571,51.8341374 112.049406,57.767628 112.049406,63.9764502 C112.049406,90.4861182 90.5590735,111.97645 64.0494055,111.97645 C37.5397375,111.97645 16.0494055,90.4861182 16.0494055,63.9764502 C16.0494055,37.4667822 37.5397375,15.9764502 64.0494055,15.9764502 C78.438886,15.9764502 91.3495036,22.308215 100.147097,32.3375836 L58.9411255,73.5435552 L41.975581,56.5780107 C38.8486152,53.4510448 33.7746915,53.4551552 30.6568542,56.5729924 C27.5326599,59.6971868 27.5372202,64.7670668 30.6618725,67.8917192 L53.279253,90.5090997 C54.8435723,92.073419 56.8951519,92.8541315 58.9380216,92.8558261 C60.987971,92.8559239 63.0389578,92.0731398 64.6049211,90.5071765 L108.72451,46.3875877 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/size": {
"title": "$:/core/images/size",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-size tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <path d=\"M92.3431458,26 L83.1715729,35.1715729 C81.6094757,36.73367 81.6094757,39.26633 83.1715729,40.8284271 C84.73367,42.3905243 87.26633,42.3905243 88.8284271,40.8284271 L104.828427,24.8284271 C106.390524,23.26633 106.390524,20.73367 104.828427,19.1715729 L88.8284271,3.17157288 C87.26633,1.60947571 84.73367,1.60947571 83.1715729,3.17157288 C81.6094757,4.73367004 81.6094757,7.26632996 83.1715729,8.82842712 L92.3431457,18 L22,18 C19.790861,18 18,19.790861 18,22 L18,92.3431458 L8.82842712,83.1715729 C7.26632996,81.6094757 4.73367004,81.6094757 3.17157288,83.1715729 C1.60947571,84.73367 1.60947571,87.26633 3.17157288,88.8284271 L19.1715729,104.828427 C20.73367,106.390524 23.26633,106.390524 24.8284271,104.828427 L40.8284271,88.8284271 C42.3905243,87.26633 42.3905243,84.73367 40.8284271,83.1715729 C39.26633,81.6094757 36.73367,81.6094757 35.1715729,83.1715729 L26,92.3431458 L26,22 L22,26 L92.3431458,26 L92.3431458,26 Z M112,52 L112,116 L116,112 L52,112 C49.790861,112 48,113.790861 48,116 C48,118.209139 49.790861,120 52,120 L116,120 C118.209139,120 120,118.209139 120,116 L120,52 C120,49.790861 118.209139,48 116,48 C113.790861,48 112,49.790861 112,52 L112,52 Z\"></path>\n</svg>"
},
"$:/core/images/spiral": {
"title": "$:/core/images/spiral",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-spiral tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"nonzero\">\n <path d=\"M64.534 68.348c3.39 0 6.097-2.62 6.476-5.968l-4.755-.538 4.75.583c.377-3.07-1.194-6.054-3.89-7.78-2.757-1.773-6.34-2.01-9.566-.7-3.46 1.403-6.14 4.392-7.35 8.148l-.01.026c-1.3 4.08-.72 8.64 1.58 12.52 2.5 4.2 6.77 7.2 11.76 8.27 5.37 1.15 11.11-.05 15.83-3.31 5.04-3.51 8.46-9.02 9.45-15.3 1.05-6.7-.72-13.63-4.92-19.19l.02.02c-4.42-5.93-11.2-9.82-18.78-10.78-7.96-1.01-16.13 1.31-22.59 6.43-6.81 5.39-11.18 13.41-12.11 22.26-.98 9.27 1.87 18.65 7.93 26.02 6.32 7.69 15.6 12.56 25.74 13.48 10.54.96 21.15-2.42 29.45-9.4l.01-.01c8.58-7.25 13.94-17.78 14.86-29.21.94-11.84-2.96-23.69-10.86-32.9-8.19-9.5-19.95-15.36-32.69-16.27-13.16-.94-26.24 3.49-36.34 12.34l.01-.01c-10.41 9.08-16.78 22.1-17.68 36.15-.93 14.44 4.03 28.77 13.79 39.78 10.03 11.32 24.28 18.2 39.6 19.09 15.73.92 31.31-4.56 43.24-15.234 12.23-10.954 19.61-26.44 20.5-43.074.14-2.64-1.89-4.89-4.52-5.03-2.64-.14-4.89 1.88-5.03 4.52-.75 14.1-7 27.2-17.33 36.45-10.03 8.98-23.11 13.58-36.3 12.81-12.79-.75-24.67-6.48-33-15.89-8.07-9.11-12.17-20.94-11.41-32.827.74-11.52 5.942-22.15 14.43-29.54l.01-.01c8.18-7.17 18.74-10.75 29.35-9.998 10.21.726 19.6 5.41 26.11 12.96 6.24 7.273 9.32 16.61 8.573 25.894-.718 8.9-4.88 17.064-11.504 22.66l.01-.007c-6.36 5.342-14.44 7.92-22.425 7.19-7.604-.68-14.52-4.314-19.21-10.027-4.44-5.4-6.517-12.23-5.806-18.94.67-6.3 3.76-11.977 8.54-15.766 4.46-3.54 10.05-5.128 15.44-4.44 5.03.63 9.46 3.18 12.32 7.01l.02.024c2.65 3.5 3.75 7.814 3.1 11.92-.59 3.71-2.58 6.925-5.45 8.924-2.56 1.767-5.61 2.403-8.38 1.81-2.42-.516-4.42-1.92-5.53-3.79-.93-1.56-1.15-3.3-.69-4.75l-4.56-1.446L59.325 65c.36-1.12 1.068-1.905 1.84-2.22.25-.103.48-.14.668-.13.06.006.11.015.14.025.01 0 .01 0-.01-.01-.02-.015-.054-.045-.094-.088-.06-.064-.12-.145-.17-.244-.15-.29-.23-.678-.18-1.11l-.005.04c.15-1.332 1.38-2.523 3.035-2.523-2.65 0-4.79 2.144-4.79 4.787s2.14 4.785 4.78 4.785z\"></path>\n </g>\n</svg>"
},
"$:/core/images/stamp": {
"title": "$:/core/images/stamp",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-stamp tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M49.7334301,64 L16.0098166,64 C11.5838751,64 8,67.5829053 8,72.002643 L8,74.4986785 L8,97 L120,97 L120,74.4986785 L120,72.002643 C120,67.5737547 116.413883,64 111.990183,64 L78.2665699,64 C76.502049,60.7519149 75.5,57.0311962 75.5,53.0769231 C75.5,46.6017951 78.1869052,40.7529228 82.5087769,36.5800577 C85.3313113,32.7688808 87,28.0549983 87,22.952183 C87,10.2760423 76.7025492,0 64,0 C51.2974508,0 41,10.2760423 41,22.952183 C41,28.0549983 42.6686887,32.7688808 45.4912231,36.5800577 C49.8130948,40.7529228 52.5,46.6017951 52.5,53.0769231 C52.5,57.0311962 51.497951,60.7519149 49.7334301,64 Z M8,104 L120,104 L120,112 L8,112 L8,104 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/star-filled": {
"title": "$:/core/images/star-filled",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-star-filled tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"nonzero\">\n <path d=\"M61.8361286,96.8228569 L99.1627704,124.110219 C101.883827,126.099427 105.541968,123.420868 104.505636,120.198072 L90.2895569,75.9887263 L89.0292911,79.8977279 L126.314504,52.5528988 C129.032541,50.5595011 127.635256,46.2255025 124.273711,46.2229134 L78.1610486,46.1873965 L81.4604673,48.6032923 L67.1773543,4.41589688 C66.1361365,1.19470104 61.6144265,1.19470104 60.5732087,4.41589688 L46.2900957,48.6032923 L49.5895144,46.1873965 L3.47685231,46.2229134 C0.115307373,46.2255025 -1.28197785,50.5595011 1.43605908,52.5528988 L38.7212719,79.8977279 L37.4610061,75.9887263 L23.2449266,120.198072 C22.2085954,123.420868 25.8667356,126.099427 28.5877926,124.110219 L65.9144344,96.8228569 L61.8361286,96.8228569 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/storyview-classic": {
"title": "$:/core/images/storyview-classic",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-storyview-classic tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00697327,0 C3.58484404,0 0,3.59075293 0,8.00697327 L0,119.993027 C0,124.415156 3.59075293,128 8.00697327,128 L119.993027,128 C124.415156,128 128,124.409247 128,119.993027 L128,8.00697327 C128,3.58484404 124.409247,0 119.993027,0 L8.00697327,0 L8.00697327,0 Z M23.9992458,16 C19.5813843,16 16,19.5776607 16,23.9924054 L16,40.0075946 C16,44.4216782 19.5881049,48 23.9992458,48 L104.000754,48 C108.418616,48 112,44.4223393 112,40.0075946 L112,23.9924054 C112,19.5783218 108.411895,16 104.000754,16 L23.9992458,16 L23.9992458,16 Z M23.9992458,64 C19.5813843,64 16,67.5907123 16,72 C16,76.418278 19.5881049,80 23.9992458,80 L104.000754,80 C108.418616,80 112,76.4092877 112,72 C112,67.581722 108.411895,64 104.000754,64 L23.9992458,64 L23.9992458,64 Z M23.9992458,96 C19.5813843,96 16,99.5907123 16,104 C16,108.418278 19.5881049,112 23.9992458,112 L104.000754,112 C108.418616,112 112,108.409288 112,104 C112,99.581722 108.411895,96 104.000754,96 L23.9992458,96 L23.9992458,96 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/storyview-pop": {
"title": "$:/core/images/storyview-pop",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-storyview-pop tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00697327,0 C3.58484404,0 0,3.59075293 0,8.00697327 L0,119.993027 C0,124.415156 3.59075293,128 8.00697327,128 L119.993027,128 C124.415156,128 128,124.409247 128,119.993027 L128,8.00697327 C128,3.58484404 124.409247,0 119.993027,0 L8.00697327,0 L8.00697327,0 Z M23.9992458,16 C19.5813843,16 16,19.5776607 16,23.9924054 L16,40.0075946 C16,44.4216782 19.5881049,48 23.9992458,48 L104.000754,48 C108.418616,48 112,44.4223393 112,40.0075946 L112,23.9924054 C112,19.5783218 108.411895,16 104.000754,16 L23.9992458,16 L23.9992458,16 Z M16.0098166,56 C11.586117,56 8,59.5776607 8,63.9924054 L8,80.0075946 C8,84.4216782 11.5838751,88 16.0098166,88 L111.990183,88 C116.413883,88 120,84.4223393 120,80.0075946 L120,63.9924054 C120,59.5783218 116.416125,56 111.990183,56 L16.0098166,56 L16.0098166,56 Z M23.9992458,96 C19.5813843,96 16,99.5907123 16,104 C16,108.418278 19.5881049,112 23.9992458,112 L104.000754,112 C108.418616,112 112,108.409288 112,104 C112,99.581722 108.411895,96 104.000754,96 L23.9992458,96 L23.9992458,96 Z M23.9992458,64 C19.5813843,64 16,67.5907123 16,72 C16,76.418278 19.5881049,80 23.9992458,80 L104.000754,80 C108.418616,80 112,76.4092877 112,72 C112,67.581722 108.411895,64 104.000754,64 L23.9992458,64 L23.9992458,64 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/storyview-zoomin": {
"title": "$:/core/images/storyview-zoomin",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-storyview-zoomin tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M8.00697327,0 C3.58484404,0 0,3.59075293 0,8.00697327 L0,119.993027 C0,124.415156 3.59075293,128 8.00697327,128 L119.993027,128 C124.415156,128 128,124.409247 128,119.993027 L128,8.00697327 C128,3.58484404 124.409247,0 119.993027,0 L8.00697327,0 L8.00697327,0 Z M23.9992458,16 C19.5813843,16 16,19.578055 16,24.0085154 L16,71.9914846 C16,76.4144655 19.5881049,80 23.9992458,80 L104.000754,80 C108.418616,80 112,76.421945 112,71.9914846 L112,24.0085154 C112,19.5855345 108.411895,16 104.000754,16 L23.9992458,16 L23.9992458,16 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/strikethrough": {
"title": "$:/core/images/strikethrough",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-strikethrough tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M92.793842,38.7255689 L108.215529,38.7255689 C107.987058,31.985687 106.70193,26.1883331 104.360107,21.3333333 C102.018284,16.4783336 98.8197436,12.4516001 94.7643909,9.25301205 C90.7090382,6.05442399 85.9969032,3.71263572 80.6278447,2.22757697 C75.2587862,0.742518233 69.4328739,0 63.1499331,0 C57.552404,0 52.0977508,0.713959839 46.7858099,2.14190094 C41.473869,3.56984203 36.7331757,5.74027995 32.5635877,8.65327979 C28.3939997,11.5662796 25.0526676,15.2788708 22.5394913,19.7911647 C20.026315,24.3034585 18.7697456,29.6438781 18.7697456,35.8125837 C18.7697456,41.4101128 19.883523,46.0651309 22.1111111,49.7777778 C24.3386992,53.4904246 27.3087722,56.5176144 31.021419,58.8594378 C34.7340659,61.2012612 38.9321497,63.0861151 43.6157965,64.5140562 C48.2994433,65.9419973 53.068695,67.1985666 57.9236948,68.2838019 C62.7786945,69.3690371 67.5479462,70.4256977 72.231593,71.4538153 C76.9152398,72.4819329 81.1133237,73.8241773 84.8259705,75.480589 C88.5386174,77.1370007 91.5086903,79.2788802 93.7362784,81.9062918 C95.9638666,84.5337035 97.0776439,87.9607107 97.0776439,92.1874163 C97.0776439,96.6425926 96.1637753,100.298067 94.3360107,103.153949 C92.5082461,106.009831 90.109341,108.265944 87.1392236,109.922356 C84.1691061,111.578768 80.827774,112.749662 77.1151272,113.435074 C73.4024803,114.120485 69.7184476,114.463186 66.0629183,114.463186 C61.4935068,114.463186 57.0383974,113.892018 52.6974565,112.749665 C48.3565156,111.607312 44.5582492,109.836692 41.3025435,107.437751 C38.0468378,105.03881 35.4194656,101.983062 33.4203481,98.270415 C31.4212305,94.5577681 30.4216867,90.1312171 30.4216867,84.9906292 L15,84.9906292 C15,92.4159229 16.3422445,98.8415614 19.0267738,104.267738 C21.711303,109.693914 25.3667774,114.149023 29.9933066,117.633199 C34.6198357,121.117376 39.9888137,123.71619 46.1004016,125.429719 C52.2119895,127.143248 58.6947448,128 65.5488621,128 C71.1463912,128 76.7723948,127.343157 82.4270415,126.029451 C88.0816882,124.715745 93.1936407,122.602424 97.7630522,119.689424 C102.332464,116.776425 106.073613,113.006717 108.986613,108.380187 C111.899613,103.753658 113.356091,98.1847715 113.356091,91.6733601 C113.356091,85.6188899 112.242314,80.5926126 110.014726,76.5943775 C107.787137,72.5961424 104.817065,69.2833688 101.104418,66.6559572 C97.3917708,64.0285455 93.193687,61.9437828 88.5100402,60.4016064 C83.8263934,58.85943 79.0571416,57.5171855 74.2021419,56.3748327 C69.3471422,55.2324798 64.5778904,54.1758192 59.8942436,53.2048193 C55.2105968,52.2338193 51.012513,51.0058084 47.2998661,49.5207497 C43.5872193,48.0356909 40.6171463,46.1222786 38.3895582,43.7804552 C36.1619701,41.4386318 35.0481928,38.3828836 35.0481928,34.6131191 C35.0481928,30.6148841 35.8192694,27.273552 37.3614458,24.5890228 C38.9036222,21.9044935 40.9598265,19.762614 43.5301205,18.1633199 C46.1004145,16.5640259 49.041929,15.4216902 52.3547523,14.7362784 C55.6675757,14.0508667 59.0374661,13.708166 62.4645248,13.708166 C70.9179361,13.708166 77.8576257,15.6786952 83.2838019,19.6198126 C88.709978,23.56093 91.8799597,29.9294518 92.793842,38.7255689 L92.793842,38.7255689 Z\"></path>\n <rect x=\"5\" y=\"54\" width=\"118\" height=\"16\"></rect>\n </g>\n</svg>"
},
"$:/core/images/subscript": {
"title": "$:/core/images/subscript",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-subscript tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M2.27170276,16 L22.1825093,16 L43.8305003,49.6746527 L66.4138983,16 L85.1220387,16 L53.5854592,61.9685735 L87.3937414,111.411516 L67.0820462,111.411516 L43.295982,74.9306422 L19.1090291,111.411516 L0,111.411516 L33.8082822,61.9685735 L2.27170276,16 Z M127.910914,128.411516 L85.3276227,128.411516 C85.3870139,123.24448 86.6342108,118.730815 89.0692508,114.870386 C91.5042907,111.009956 94.8301491,107.654403 99.0469256,104.803624 C101.066227,103.318844 103.174584,101.878629 105.372059,100.482935 C107.569534,99.0872413 109.588805,97.5876355 111.429933,95.9840726 C113.271061,94.3805097 114.785514,92.6433426 115.973338,90.7725192 C117.161163,88.9016958 117.784761,86.7487964 117.844152,84.3137564 C117.844152,83.1853233 117.710524,81.9826691 117.443264,80.7057579 C117.176003,79.4288467 116.656338,78.2410402 115.884252,77.1423026 C115.112166,76.0435651 114.04314,75.123015 112.677142,74.3806248 C111.311144,73.6382345 109.529434,73.267045 107.331959,73.267045 C105.312658,73.267045 103.634881,73.6679297 102.298579,74.4697112 C100.962276,75.2714926 99.8932503,76.3702137 99.0914688,77.7659073 C98.2896874,79.161601 97.6957841,80.8096826 97.3097412,82.7102016 C96.9236982,84.6107206 96.7009845,86.6596869 96.6415933,88.857162 L86.4857457,88.857162 C86.4857457,85.4124713 86.9460207,82.2202411 87.8665846,79.2803758 C88.7871485,76.3405105 90.1679736,73.801574 92.0091014,71.6634901 C93.8502292,69.5254062 96.092214,67.8476295 98.7351233,66.6301095 C101.378033,65.4125895 104.451482,64.8038386 107.955564,64.8038386 C111.756602,64.8038386 114.933984,65.4274371 117.487807,66.6746527 C120.041629,67.9218683 122.105443,69.4957119 123.67931,71.3962309 C125.253178,73.2967499 126.366746,75.3605638 127.02005,77.5877345 C127.673353,79.8149053 128,81.9381095 128,83.9574109 C128,86.4518421 127.613963,88.7086746 126.841877,90.727976 C126.069791,92.7472774 125.03046,94.6032252 123.723854,96.2958749 C122.417247,97.9885247 120.932489,99.5475208 119.269534,100.97291 C117.60658,102.398299 115.884261,103.734582 114.102524,104.981797 C112.320788,106.229013 110.539078,107.416819 108.757341,108.545253 C106.975605,109.673686 105.327523,110.802102 103.813047,111.930535 C102.298571,113.058968 100.977136,114.231927 99.8487031,115.449447 C98.7202699,116.666967 97.9481956,117.958707 97.5324571,119.324705 L127.910914,119.324705 L127.910914,128.411516 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/superscript": {
"title": "$:/core/images/superscript",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-superscript tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M2.27170276,16 L22.1825093,16 L43.8305003,49.6746527 L66.4138983,16 L85.1220387,16 L53.5854592,61.9685735 L87.3937414,111.411516 L67.0820462,111.411516 L43.295982,74.9306422 L19.1090291,111.411516 L0,111.411516 L33.8082822,61.9685735 L2.27170276,16 Z M127.910914,63.4115159 L85.3276227,63.4115159 C85.3870139,58.2444799 86.6342108,53.7308149 89.0692508,49.8703857 C91.5042907,46.0099565 94.8301491,42.654403 99.0469256,39.8036245 C101.066227,38.318844 103.174584,36.8786285 105.372059,35.4829349 C107.569534,34.0872413 109.588805,32.5876355 111.429933,30.9840726 C113.271061,29.3805097 114.785514,27.6433426 115.973338,25.7725192 C117.161163,23.9016958 117.784761,21.7487964 117.844152,19.3137564 C117.844152,18.1853233 117.710524,16.9826691 117.443264,15.7057579 C117.176003,14.4288467 116.656338,13.2410402 115.884252,12.1423026 C115.112166,11.0435651 114.04314,10.123015 112.677142,9.38062477 C111.311144,8.63823453 109.529434,8.26704499 107.331959,8.26704499 C105.312658,8.26704499 103.634881,8.6679297 102.298579,9.46971115 C100.962276,10.2714926 99.8932503,11.3702137 99.0914688,12.7659073 C98.2896874,14.161601 97.6957841,15.8096826 97.3097412,17.7102016 C96.9236982,19.6107206 96.7009845,21.6596869 96.6415933,23.857162 L86.4857457,23.857162 C86.4857457,20.4124713 86.9460207,17.2202411 87.8665846,14.2803758 C88.7871485,11.3405105 90.1679736,8.80157397 92.0091014,6.6634901 C93.8502292,4.52540622 96.092214,2.84762946 98.7351233,1.63010947 C101.378033,0.412589489 104.451482,-0.196161372 107.955564,-0.196161372 C111.756602,-0.196161372 114.933984,0.427437071 117.487807,1.67465266 C120.041629,2.92186826 122.105443,4.49571195 123.67931,6.39623095 C125.253178,8.29674995 126.366746,10.3605638 127.02005,12.5877345 C127.673353,14.8149053 128,16.9381095 128,18.9574109 C128,21.4518421 127.613963,23.7086746 126.841877,25.727976 C126.069791,27.7472774 125.03046,29.6032252 123.723854,31.2958749 C122.417247,32.9885247 120.932489,34.5475208 119.269534,35.97291 C117.60658,37.3982993 115.884261,38.7345816 114.102524,39.9817972 C112.320788,41.2290128 110.539078,42.4168194 108.757341,43.5452525 C106.975605,44.6736857 105.327523,45.8021019 103.813047,46.9305351 C102.298571,48.0589682 100.977136,49.2319272 99.8487031,50.4494472 C98.7202699,51.6669672 97.9481956,52.9587068 97.5324571,54.3247048 L127.910914,54.3247048 L127.910914,63.4115159 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/tag-button": {
"title": "$:/core/images/tag-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-tag-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M18.1643182,47.6600756 L18.1677196,51.7651887 C18.1708869,55.5878829 20.3581578,60.8623899 23.0531352,63.5573673 L84.9021823,125.406414 C87.5996731,128.103905 91.971139,128.096834 94.6717387,125.396234 L125.766905,94.3010679 C128.473612,91.5943612 128.472063,87.2264889 125.777085,84.5315115 L63.9280381,22.6824644 C61.2305472,19.9849735 55.9517395,17.801995 52.1318769,17.8010313 L25.0560441,17.7942007 C21.2311475,17.7932358 18.1421354,20.8872832 18.1452985,24.7049463 L18.1535504,34.6641936 C18.2481119,34.6754562 18.3439134,34.6864294 18.4409623,34.6971263 C22.1702157,35.1081705 26.9295004,34.6530132 31.806204,33.5444844 C32.1342781,33.0700515 32.5094815,32.6184036 32.9318197,32.1960654 C35.6385117,29.4893734 39.5490441,28.718649 42.94592,29.8824694 C43.0432142,29.8394357 43.1402334,29.7961748 43.2369683,29.7526887 L43.3646982,30.0368244 C44.566601,30.5115916 45.6933052,31.2351533 46.6655958,32.2074439 C50.4612154,36.0030635 50.4663097,42.1518845 46.6769742,45.94122 C43.0594074,49.5587868 37.2914155,49.7181264 33.4734256,46.422636 C28.1082519,47.5454734 22.7987486,48.0186448 18.1643182,47.6600756 Z\"></path>\n <path d=\"M47.6333528,39.5324628 L47.6562932,39.5834939 C37.9670934,43.9391617 26.0718874,46.3819521 17.260095,45.4107025 C5.27267473,44.0894301 -1.02778744,36.4307276 2.44271359,24.0779512 C5.56175386,12.9761516 14.3014034,4.36129832 24.0466405,1.54817001 C34.7269254,-1.53487574 43.7955833,3.51606438 43.7955834,14.7730751 L35.1728168,14.7730752 C35.1728167,9.91428944 32.0946059,8.19982862 26.4381034,9.83267419 C19.5270911,11.8276553 13.046247,18.2159574 10.7440788,26.4102121 C8.82861123,33.2280582 11.161186,36.0634845 18.2047888,36.8398415 C25.3302805,37.6252244 35.7353482,35.4884477 44.1208333,31.7188498 L44.1475077,31.7781871 C44.159701,31.7725635 44.1718402,31.7671479 44.1839238,31.7619434 C45.9448098,31.0035157 50.4503245,38.3109156 47.7081571,39.5012767 C47.6834429,39.512005 47.6585061,39.5223987 47.6333528,39.5324628 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/theme-button": {
"title": "$:/core/images/theme-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-theme-button tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M55.854113,66.9453198 C54.3299482,65.1432292 53.0133883,63.518995 51.9542746,62.1263761 C40.8899947,47.578055 35.3091807,55.2383404 28.9941893,62.1263758 C22.6791979,69.0144112 30.6577916,74.5954741 24.6646171,79.4611023 C18.6714426,84.3267304 19.0414417,86.0133155 8.92654943,77.1119468 C-1.18834284,68.2105781 -1.88793412,65.7597832 2.7553553,60.6807286 C7.39864472,55.601674 11.2794845,63.5989423 20.7646627,54.5728325 C30.2498409,45.5467226 22.2819131,37.5470737 22.2819131,37.5470737 C22.2819131,37.5470737 42.0310399,-2.82433362 68.4206088,0.157393922 C94.8101776,3.13912147 58.4373806,-3.70356506 49.3898693,27.958066 C45.5161782,41.5139906 50.1107906,38.3197672 57.4560458,44.0453955 C59.1625767,45.3756367 63.8839488,48.777453 70.127165,53.3625321 C63.9980513,59.2416709 58.9704753,64.0315459 55.854113,66.9453198 Z M67.4952439,79.8919946 C83.5082212,96.9282402 105.237121,117.617674 112.611591,120.312493 C123.044132,124.12481 128.000001,117.170903 128,105.522947 C127.999999,98.3705516 104.170675,78.980486 84.0760493,63.7529565 C76.6683337,70.9090328 70.7000957,76.7055226 67.4952439,79.8919946 Z\"></path>\n <path d=\"M58.2852966,138.232794 L58.2852966,88.3943645 C56.318874,88.3923153 54.7254089,86.7952906 54.7254089,84.8344788 C54.7254089,82.8684071 56.3175932,81.2745911 58.2890859,81.2745911 L79.6408336,81.2745911 C81.608998,81.2745911 83.2045105,82.8724076 83.2045105,84.8344788 C83.2045105,86.7992907 81.614366,88.3923238 79.6446228,88.3943645 L79.6446228,88.3943646 L79.6446228,138.232794 C79.6446228,144.131009 74.8631748,148.912457 68.9649597,148.912457 C63.0667446,148.912457 58.2852966,144.131009 58.2852966,138.232794 Z M65.405072,-14.8423767 L72.5248474,-14.8423767 L76.0847351,-0.690681892 L72.5248474,6.51694947 L72.5248474,81.2745911 L65.405072,81.2745911 L65.405072,6.51694947 L61.8451843,-0.690681892 L65.405072,-14.8423767 Z\" transform=\"translate(68.964960, 67.035040) rotate(45.000000) translate(-68.964960, -67.035040) \"></path>\n </g>\n</svg>"
},
"$:/core/images/tip": {
"title": "$:/core/images/tip",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-tip tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64,128.241818 C99.346224,128.241818 128,99.5880417 128,64.2418177 C128,28.8955937 99.346224,0.241817675 64,0.241817675 C28.653776,0.241817675 0,28.8955937 0,64.2418177 C0,99.5880417 28.653776,128.241818 64,128.241818 Z M75.9358659,91.4531941 C75.3115438,95.581915 70.2059206,98.8016748 64,98.8016748 C57.7940794,98.8016748 52.6884562,95.581915 52.0641341,91.4531941 C54.3299053,94.0502127 58.8248941,95.8192805 64,95.8192805 C69.1751059,95.8192805 73.6700947,94.0502127 75.9358659,91.4531941 L75.9358659,91.4531941 Z M75.9358659,95.9453413 C75.3115438,100.074062 70.2059206,103.293822 64,103.293822 C57.7940794,103.293822 52.6884562,100.074062 52.0641341,95.9453413 C54.3299053,98.5423599 58.8248941,100.311428 64,100.311428 C69.1751059,100.311428 73.6700947,98.5423599 75.9358659,95.9453413 L75.9358659,95.9453413 Z M75.9358659,100.40119 C75.3115438,104.529911 70.2059206,107.74967 64,107.74967 C57.7940794,107.74967 52.6884562,104.529911 52.0641341,100.40119 C54.3299053,102.998208 58.8248941,104.767276 64,104.767276 C69.1751059,104.767276 73.6700947,102.998208 75.9358659,100.40119 L75.9358659,100.40119 Z M75.9358659,104.893337 C75.3115438,109.022058 70.2059206,112.241818 64,112.241818 C57.7940794,112.241818 52.6884562,109.022058 52.0641341,104.893337 C54.3299053,107.490356 58.8248941,109.259423 64,109.259423 C69.1751059,109.259423 73.6700947,107.490356 75.9358659,104.893337 L75.9358659,104.893337 Z M64.3010456,24.2418177 C75.9193117,24.2418188 88.0000013,32.0619847 88,48.4419659 C87.9999987,64.8219472 75.9193018,71.7540963 75.9193021,83.5755932 C75.9193022,89.4486648 70.0521957,92.8368862 63.9999994,92.8368862 C57.947803,92.8368862 51.9731007,89.8295115 51.9731007,83.5755932 C51.9731007,71.1469799 39.9999998,65.4700602 40,48.4419647 C40.0000002,31.4138691 52.6827796,24.2418166 64.3010456,24.2418177 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/twitter": {
"title": "$:/core/images/twitter",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-twitter tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M41.6263422,115.803477 C27.0279663,115.803477 13.4398394,111.540813 1.99987456,104.234833 C4.02221627,104.472643 6.08004574,104.594302 8.16644978,104.594302 C20.277456,104.594302 31.4238403,100.47763 40.270894,93.5715185 C28.9590538,93.3635501 19.4123842,85.9189246 16.1230832,75.6885328 C17.7011365,75.9892376 19.320669,76.1503787 20.9862896,76.1503787 C23.344152,76.1503787 25.6278127,75.8359011 27.7971751,75.247346 C15.9709927,72.8821073 7.06079851,62.4745062 7.06079851,49.9982394 C7.06079851,49.8898938 7.06079851,49.7820074 7.06264203,49.67458 C10.5482779,51.6032228 14.5339687,52.7615103 18.7717609,52.8951059 C11.8355159,48.277565 7.2714207,40.3958845 7.2714207,31.4624258 C7.2714207,26.7434257 8.54621495,22.3200804 10.7713439,18.5169676 C23.5211299,34.0957738 42.568842,44.3472839 64.0532269,45.4210985 C63.6126256,43.5365285 63.3835682,41.5711584 63.3835682,39.5529928 C63.3835682,25.3326379 74.95811,13.8034766 89.2347917,13.8034766 C96.6697089,13.8034766 103.387958,16.930807 108.103682,21.9353619 C113.991886,20.780288 119.52429,18.6372496 124.518847,15.6866694 C122.588682,21.6993889 118.490075,26.7457211 113.152623,29.9327334 C118.381769,29.3102055 123.363882,27.926045 127.999875,25.8780385 C124.534056,31.0418981 120.151087,35.5772616 115.100763,39.2077561 C115.150538,40.3118708 115.175426,41.4224128 115.175426,42.538923 C115.175426,76.5663154 89.1744164,115.803477 41.6263422,115.803477\"></path>\n </g>\n</svg>\n"
},
"$:/core/images/underline": {
"title": "$:/core/images/underline",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-underline tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M7,117.421488 L121.247934,117.421488 L121.247934,128 L7,128 L7,117.421488 Z M104.871212,98.8958333 L104.871212,0 L88.6117424,0 L88.6117424,55.8560606 C88.6117424,60.3194668 88.0060035,64.432115 86.7945076,68.1941288 C85.5830116,71.9561425 83.7657949,75.239885 81.342803,78.0454545 C78.9198111,80.8510241 75.8911167,83.0189317 72.2566288,84.5492424 C68.6221409,86.0795531 64.3182067,86.844697 59.344697,86.844697 C53.0959284,86.844697 48.1862552,85.0593613 44.6155303,81.4886364 C41.0448054,77.9179114 39.2594697,73.0720003 39.2594697,66.9507576 L39.2594697,0 L23,0 L23,65.0378788 C23,70.3939662 23.5419769,75.2717583 24.625947,79.6714015 C25.709917,84.0710447 27.5908957,87.864883 30.2689394,91.0530303 C32.9469831,94.2411776 36.4538925,96.6960141 40.7897727,98.4176136 C45.125653,100.139213 50.545422,101 57.0492424,101 C64.3182182,101 70.630655,99.5653553 75.9867424,96.6960227 C81.3428298,93.8266902 85.742407,89.33147 89.1856061,83.2102273 L89.5681818,83.2102273 L89.5681818,98.8958333 L104.871212,98.8958333 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/unfold-all-button": {
"title": "$:/core/images/unfold-all-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-unfold-all tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <rect x=\"0\" y=\"64\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M85.598226,8.34884273 C84.1490432,6.89863875 82.1463102,6 79.9340286,6 L47.9482224,6 C43.5292967,6 39.9411255,9.581722 39.9411255,14 C39.9411255,18.4092877 43.5260249,22 47.9482224,22 L71.9411255,22 L71.9411255,45.9929031 C71.9411255,50.4118288 75.5228475,54 79.9411255,54 C84.3504132,54 87.9411255,50.4151006 87.9411255,45.9929031 L87.9411255,14.0070969 C87.9411255,11.7964515 87.0447363,9.79371715 85.5956548,8.34412458 Z\" transform=\"translate(63.941125, 30.000000) scale(1, -1) rotate(-45.000000) translate(-63.941125, -30.000000) \"></path>\n <path d=\"M85.6571005,72.2899682 C84.2079177,70.8397642 82.2051847,69.9411255 79.9929031,69.9411255 L48.0070969,69.9411255 C43.5881712,69.9411255 40,73.5228475 40,77.9411255 C40,82.3504132 43.5848994,85.9411255 48.0070969,85.9411255 L72,85.9411255 L72,109.934029 C72,114.352954 75.581722,117.941125 80,117.941125 C84.4092877,117.941125 88,114.356226 88,109.934029 L88,77.9482224 C88,75.737577 87.1036108,73.7348426 85.6545293,72.2852501 Z\" transform=\"translate(64.000000, 93.941125) scale(1, -1) rotate(-45.000000) translate(-64.000000, -93.941125) \"></path>\n </g>\n</svg>"
},
"$:/core/images/unfold-button": {
"title": "$:/core/images/unfold-button",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-unfold tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <rect x=\"0\" y=\"0\" width=\"128\" height=\"16\" rx=\"8\"></rect>\n <path d=\"M85.598226,11.3488427 C84.1490432,9.89863875 82.1463102,9 79.9340286,9 L47.9482224,9 C43.5292967,9 39.9411255,12.581722 39.9411255,17 C39.9411255,21.4092877 43.5260249,25 47.9482224,25 L71.9411255,25 L71.9411255,48.9929031 C71.9411255,53.4118288 75.5228475,57 79.9411255,57 C84.3504132,57 87.9411255,53.4151006 87.9411255,48.9929031 L87.9411255,17.0070969 C87.9411255,14.7964515 87.0447363,12.7937171 85.5956548,11.3441246 Z\" transform=\"translate(63.941125, 33.000000) scale(1, -1) rotate(-45.000000) translate(-63.941125, -33.000000) \"></path>\n <path d=\"M85.6571005,53.4077172 C84.2079177,51.9575133 82.2051847,51.0588745 79.9929031,51.0588745 L48.0070969,51.0588745 C43.5881712,51.0588745 40,54.6405965 40,59.0588745 C40,63.4681622 43.5848994,67.0588745 48.0070969,67.0588745 L72,67.0588745 L72,91.0517776 C72,95.4707033 75.581722,99.0588745 80,99.0588745 C84.4092877,99.0588745 88,95.4739751 88,91.0517776 L88,59.0659714 C88,56.855326 87.1036108,54.8525917 85.6545293,53.4029991 Z\" transform=\"translate(64.000000, 75.058875) scale(1, -1) rotate(-45.000000) translate(-64.000000, -75.058875) \"></path>\n </g>\n</svg>"
},
"$:/core/images/unlocked-padlock": {
"title": "$:/core/images/unlocked-padlock",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-unlocked-padlock tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M48.6266053,64 L105,64 L105,96.0097716 C105,113.673909 90.6736461,128 73.001193,128 L55.998807,128 C38.3179793,128 24,113.677487 24,96.0097716 L24,64 L30.136303,64 C19.6806213,51.3490406 2.77158986,28.2115132 25.8366966,8.85759246 C50.4723026,-11.8141335 71.6711028,13.2108337 81.613302,25.0594855 C91.5555012,36.9081373 78.9368488,47.4964439 69.1559674,34.9513593 C59.375086,22.4062748 47.9893192,10.8049522 35.9485154,20.9083862 C23.9077117,31.0118202 34.192312,43.2685325 44.7624679,55.8655518 C47.229397,58.805523 48.403443,61.5979188 48.6266053,64 Z M67.7315279,92.3641717 C70.8232551,91.0923621 73,88.0503841 73,84.5 C73,79.8055796 69.1944204,76 64.5,76 C59.8055796,76 56,79.8055796 56,84.5 C56,87.947435 58.0523387,90.9155206 61.0018621,92.2491029 L55.9067479,115.020857 L72.8008958,115.020857 L67.7315279,92.3641717 L67.7315279,92.3641717 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/up-arrow": {
"created": "20150316000544368",
"modified": "20150316000831867",
"tags": "$:/tags/Image",
"title": "$:/core/images/up-arrow",
"text": "<svg class=\"tc-image-up-arrow tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n<path transform=\"rotate(-135, 63.8945, 64.1752)\" d=\"m109.07576,109.35336c-1.43248,1.43361 -3.41136,2.32182 -5.59717,2.32182l-79.16816,0c-4.36519,0 -7.91592,-3.5444 -7.91592,-7.91666c0,-4.36337 3.54408,-7.91667 7.91592,-7.91667l71.25075,0l0,-71.25074c0,-4.3652 3.54442,-7.91592 7.91667,-7.91592c4.36336,0 7.91667,3.54408 7.91667,7.91592l0,79.16815c0,2.1825 -0.88602,4.16136 -2.3185,5.59467l-0.00027,-0.00056l0.00001,-0.00001z\" />\n</svg>\n \n"
},
"$:/core/images/video": {
"title": "$:/core/images/video",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-video tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M64,12 C29.0909091,12 8.72727273,14.9166667 5.81818182,17.8333333 C2.90909091,20.75 1.93784382e-15,41.1666667 0,64.5 C1.93784382e-15,87.8333333 2.90909091,108.25 5.81818182,111.166667 C8.72727273,114.083333 29.0909091,117 64,117 C98.9090909,117 119.272727,114.083333 122.181818,111.166667 C125.090909,108.25 128,87.8333333 128,64.5 C128,41.1666667 125.090909,20.75 122.181818,17.8333333 C119.272727,14.9166667 98.9090909,12 64,12 Z M54.9161194,44.6182253 C51.102648,42.0759111 48.0112186,43.7391738 48.0112186,48.3159447 L48.0112186,79.6840553 C48.0112186,84.2685636 51.109784,85.9193316 54.9161194,83.3817747 L77.0838806,68.6032672 C80.897352,66.0609529 80.890216,61.9342897 77.0838806,59.3967328 L54.9161194,44.6182253 Z\"></path>\n </g>\n</svg>"
},
"$:/core/images/warning": {
"title": "$:/core/images/warning",
"tags": "$:/tags/Image",
"text": "<svg class=\"tc-image-warning tc-image-button\" width=\"22pt\" height=\"22pt\" viewBox=\"0 0 128 128\">\n <g fill-rule=\"evenodd\">\n <path d=\"M57.0717968,11 C60.1509982,5.66666667 67.8490018,5.66666667 70.9282032,11 L126.353829,107 C129.433031,112.333333 125.584029,119 119.425626,119 L8.57437416,119 C2.41597129,119 -1.43303051,112.333333 1.64617093,107 L57.0717968,11 Z M64,37 C59.581722,37 56,40.5820489 56,44.9935776 L56,73.0064224 C56,77.4211534 59.5907123,81 64,81 C68.418278,81 72,77.4179511 72,73.0064224 L72,44.9935776 C72,40.5788466 68.4092877,37 64,37 Z M64,104 C68.418278,104 72,100.418278 72,96 C72,91.581722 68.418278,88 64,88 C59.581722,88 56,91.581722 56,96 C56,100.418278 59.581722,104 64,104 Z\"></path>\n </g>\n</svg>"
},
"$:/language/Buttons/AdvancedSearch/Caption": {
"title": "$:/language/Buttons/AdvancedSearch/Caption",
"text": "advanced search"
},
"$:/language/Buttons/AdvancedSearch/Hint": {
"title": "$:/language/Buttons/AdvancedSearch/Hint",
"text": "Advanced search"
},
"$:/language/Buttons/Cancel/Caption": {
"title": "$:/language/Buttons/Cancel/Caption",
"text": "cancel"
},
"$:/language/Buttons/Cancel/Hint": {
"title": "$:/language/Buttons/Cancel/Hint",
"text": "Discard changes to this tiddler"
},
"$:/language/Buttons/Clone/Caption": {
"title": "$:/language/Buttons/Clone/Caption",
"text": "clone"
},
"$:/language/Buttons/Clone/Hint": {
"title": "$:/language/Buttons/Clone/Hint",
"text": "Clone this tiddler"
},
"$:/language/Buttons/Close/Caption": {
"title": "$:/language/Buttons/Close/Caption",
"text": "close"
},
"$:/language/Buttons/Close/Hint": {
"title": "$:/language/Buttons/Close/Hint",
"text": "Close this tiddler"
},
"$:/language/Buttons/CloseAll/Caption": {
"title": "$:/language/Buttons/CloseAll/Caption",
"text": "close all"
},
"$:/language/Buttons/CloseAll/Hint": {
"title": "$:/language/Buttons/CloseAll/Hint",
"text": "Close all tiddlers"
},
"$:/language/Buttons/CloseOthers/Caption": {
"title": "$:/language/Buttons/CloseOthers/Caption",
"text": "close others"
},
"$:/language/Buttons/CloseOthers/Hint": {
"title": "$:/language/Buttons/CloseOthers/Hint",
"text": "Close other tiddlers"
},
"$:/language/Buttons/ControlPanel/Caption": {
"title": "$:/language/Buttons/ControlPanel/Caption",
"text": "control panel"
},
"$:/language/Buttons/ControlPanel/Hint": {
"title": "$:/language/Buttons/ControlPanel/Hint",
"text": "Open control panel"
},
"$:/language/Buttons/Delete/Caption": {
"title": "$:/language/Buttons/Delete/Caption",
"text": "delete"
},
"$:/language/Buttons/Delete/Hint": {
"title": "$:/language/Buttons/Delete/Hint",
"text": "Delete this tiddler"
},
"$:/language/Buttons/Edit/Caption": {
"title": "$:/language/Buttons/Edit/Caption",
"text": "edit"
},
"$:/language/Buttons/Edit/Hint": {
"title": "$:/language/Buttons/Edit/Hint",
"text": "Edit this tiddler"
},
"$:/language/Buttons/Encryption/Caption": {
"title": "$:/language/Buttons/Encryption/Caption",
"text": "encryption"
},
"$:/language/Buttons/Encryption/Hint": {
"title": "$:/language/Buttons/Encryption/Hint",
"text": "Set or clear a password for saving this wiki"
},
"$:/language/Buttons/Encryption/ClearPassword/Caption": {
"title": "$:/language/Buttons/Encryption/ClearPassword/Caption",
"text": "clear password"
},
"$:/language/Buttons/Encryption/ClearPassword/Hint": {
"title": "$:/language/Buttons/Encryption/ClearPassword/Hint",
"text": "Clear the password and save this wiki without encryption"
},
"$:/language/Buttons/Encryption/SetPassword/Caption": {
"title": "$:/language/Buttons/Encryption/SetPassword/Caption",
"text": "set password"
},
"$:/language/Buttons/Encryption/SetPassword/Hint": {
"title": "$:/language/Buttons/Encryption/SetPassword/Hint",
"text": "Set a password for saving this wiki with encryption"
},
"$:/language/Buttons/ExportPage/Caption": {
"title": "$:/language/Buttons/ExportPage/Caption",
"text": "export all"
},
"$:/language/Buttons/ExportPage/Hint": {
"title": "$:/language/Buttons/ExportPage/Hint",
"text": "Export all tiddlers"
},
"$:/language/Buttons/ExportTiddler/Caption": {
"title": "$:/language/Buttons/ExportTiddler/Caption",
"text": "export tiddler"
},
"$:/language/Buttons/ExportTiddler/Hint": {
"title": "$:/language/Buttons/ExportTiddler/Hint",
"text": "Export tiddler"
},
"$:/language/Buttons/ExportTiddlers/Caption": {
"title": "$:/language/Buttons/ExportTiddlers/Caption",
"text": "export tiddlers"
},
"$:/language/Buttons/ExportTiddlers/Hint": {
"title": "$:/language/Buttons/ExportTiddlers/Hint",
"text": "Export tiddlers"
},
"$:/language/Buttons/Fold/Caption": {
"title": "$:/language/Buttons/Fold/Caption",
"text": "fold tiddler"
},
"$:/language/Buttons/Fold/Hint": {
"title": "$:/language/Buttons/Fold/Hint",
"text": "Fold the body of this tiddler"
},
"$:/language/Buttons/Fold/FoldBar/Caption": {
"title": "$:/language/Buttons/Fold/FoldBar/Caption",
"text": "fold-bar"
},
"$:/language/Buttons/Fold/FoldBar/Hint": {
"title": "$:/language/Buttons/Fold/FoldBar/Hint",
"text": "Optional bars to fold and unfold tiddlers"
},
"$:/language/Buttons/Unfold/Caption": {
"title": "$:/language/Buttons/Unfold/Caption",
"text": "unfold tiddler"
},
"$:/language/Buttons/Unfold/Hint": {
"title": "$:/language/Buttons/Unfold/Hint",
"text": "Unfold the body of this tiddler"
},
"$:/language/Buttons/FoldOthers/Caption": {
"title": "$:/language/Buttons/FoldOthers/Caption",
"text": "fold other tiddlers"
},
"$:/language/Buttons/FoldOthers/Hint": {
"title": "$:/language/Buttons/FoldOthers/Hint",
"text": "Fold the bodies of other opened tiddlers"
},
"$:/language/Buttons/FoldAll/Caption": {
"title": "$:/language/Buttons/FoldAll/Caption",
"text": "fold all tiddlers"
},
"$:/language/Buttons/FoldAll/Hint": {
"title": "$:/language/Buttons/FoldAll/Hint",
"text": "Fold the bodies of all opened tiddlers"
},
"$:/language/Buttons/UnfoldAll/Caption": {
"title": "$:/language/Buttons/UnfoldAll/Caption",
"text": "unfold all tiddlers"
},
"$:/language/Buttons/UnfoldAll/Hint": {
"title": "$:/language/Buttons/UnfoldAll/Hint",
"text": "Unfold the bodies of all opened tiddlers"
},
"$:/language/Buttons/FullScreen/Caption": {
"title": "$:/language/Buttons/FullScreen/Caption",
"text": "full-screen"
},
"$:/language/Buttons/FullScreen/Hint": {
"title": "$:/language/Buttons/FullScreen/Hint",
"text": "Enter or leave full-screen mode"
},
"$:/language/Buttons/Help/Caption": {
"title": "$:/language/Buttons/Help/Caption",
"text": "help"
},
"$:/language/Buttons/Help/Hint": {
"title": "$:/language/Buttons/Help/Hint",
"text": "Show help panel"
},
"$:/language/Buttons/Import/Caption": {
"title": "$:/language/Buttons/Import/Caption",
"text": "import"
},
"$:/language/Buttons/Import/Hint": {
"title": "$:/language/Buttons/Import/Hint",
"text": "Import many types of file including text, image, TiddlyWiki or JSON"
},
"$:/language/Buttons/Info/Caption": {
"title": "$:/language/Buttons/Info/Caption",
"text": "info"
},
"$:/language/Buttons/Info/Hint": {
"title": "$:/language/Buttons/Info/Hint",
"text": "Show information for this tiddler"
},
"$:/language/Buttons/Home/Caption": {
"title": "$:/language/Buttons/Home/Caption",
"text": "home"
},
"$:/language/Buttons/Home/Hint": {
"title": "$:/language/Buttons/Home/Hint",
"text": "Open the default tiddlers"
},
"$:/language/Buttons/Language/Caption": {
"title": "$:/language/Buttons/Language/Caption",
"text": "language"
},
"$:/language/Buttons/Language/Hint": {
"title": "$:/language/Buttons/Language/Hint",
"text": "Choose the user interface language"
},
"$:/language/Buttons/More/Caption": {
"title": "$:/language/Buttons/More/Caption",
"text": "more"
},
"$:/language/Buttons/More/Hint": {
"title": "$:/language/Buttons/More/Hint",
"text": "More actions"
},
"$:/language/Buttons/NewHere/Caption": {
"title": "$:/language/Buttons/NewHere/Caption",
"text": "new here"
},
"$:/language/Buttons/NewHere/Hint": {
"title": "$:/language/Buttons/NewHere/Hint",
"text": "Create a new tiddler tagged with this one"
},
"$:/language/Buttons/NewJournal/Caption": {
"title": "$:/language/Buttons/NewJournal/Caption",
"text": "new journal"
},
"$:/language/Buttons/NewJournal/Hint": {
"title": "$:/language/Buttons/NewJournal/Hint",
"text": "Create a new journal tiddler"
},
"$:/language/Buttons/NewJournalHere/Caption": {
"title": "$:/language/Buttons/NewJournalHere/Caption",
"text": "new journal here"
},
"$:/language/Buttons/NewJournalHere/Hint": {
"title": "$:/language/Buttons/NewJournalHere/Hint",
"text": "Create a new journal tiddler tagged with this one"
},
"$:/language/Buttons/NewImage/Caption": {
"title": "$:/language/Buttons/NewImage/Caption",
"text": "new image"
},
"$:/language/Buttons/NewImage/Hint": {
"title": "$:/language/Buttons/NewImage/Hint",
"text": "Create a new image tiddler"
},
"$:/language/Buttons/NewMarkdown/Caption": {
"title": "$:/language/Buttons/NewMarkdown/Caption",
"text": "new Markdown tiddler"
},
"$:/language/Buttons/NewMarkdown/Hint": {
"title": "$:/language/Buttons/NewMarkdown/Hint",
"text": "Create a new Markdown tiddler"
},
"$:/language/Buttons/NewTiddler/Caption": {
"title": "$:/language/Buttons/NewTiddler/Caption",
"text": "new tiddler"
},
"$:/language/Buttons/NewTiddler/Hint": {
"title": "$:/language/Buttons/NewTiddler/Hint",
"text": "Create a new tiddler"
},
"$:/language/Buttons/OpenWindow/Caption": {
"title": "$:/language/Buttons/OpenWindow/Caption",
"text": "open in new window"
},
"$:/language/Buttons/OpenWindow/Hint": {
"title": "$:/language/Buttons/OpenWindow/Hint",
"text": "Open tiddler in new window"
},
"$:/language/Buttons/Palette/Caption": {
"title": "$:/language/Buttons/Palette/Caption",
"text": "palette"
},
"$:/language/Buttons/Palette/Hint": {
"title": "$:/language/Buttons/Palette/Hint",
"text": "Choose the colour palette"
},
"$:/language/Buttons/Permalink/Caption": {
"title": "$:/language/Buttons/Permalink/Caption",
"text": "permalink"
},
"$:/language/Buttons/Permalink/Hint": {
"title": "$:/language/Buttons/Permalink/Hint",
"text": "Set browser address bar to a direct link to this tiddler"
},
"$:/language/Buttons/Permaview/Caption": {
"title": "$:/language/Buttons/Permaview/Caption",
"text": "permaview"
},
"$:/language/Buttons/Permaview/Hint": {
"title": "$:/language/Buttons/Permaview/Hint",
"text": "Set browser address bar to a direct link to all the tiddlers in this story"
},
"$:/language/Buttons/Refresh/Caption": {
"title": "$:/language/Buttons/Refresh/Caption",
"text": "refresh"
},
"$:/language/Buttons/Refresh/Hint": {
"title": "$:/language/Buttons/Refresh/Hint",
"text": "Perform a full refresh of the wiki"
},
"$:/language/Buttons/Save/Caption": {
"title": "$:/language/Buttons/Save/Caption",
"text": "ok"
},
"$:/language/Buttons/Save/Hint": {
"title": "$:/language/Buttons/Save/Hint",
"text": "Confirm changes to this tiddler"
},
"$:/language/Buttons/SaveWiki/Caption": {
"title": "$:/language/Buttons/SaveWiki/Caption",
"text": "save changes"
},
"$:/language/Buttons/SaveWiki/Hint": {
"title": "$:/language/Buttons/SaveWiki/Hint",
"text": "Save changes"
},
"$:/language/Buttons/StoryView/Caption": {
"title": "$:/language/Buttons/StoryView/Caption",
"text": "storyview"
},
"$:/language/Buttons/StoryView/Hint": {
"title": "$:/language/Buttons/StoryView/Hint",
"text": "Choose the story visualisation"
},
"$:/language/Buttons/HideSideBar/Caption": {
"title": "$:/language/Buttons/HideSideBar/Caption",
"text": "hide sidebar"
},
"$:/language/Buttons/HideSideBar/Hint": {
"title": "$:/language/Buttons/HideSideBar/Hint",
"text": "Hide sidebar"
},
"$:/language/Buttons/ShowSideBar/Caption": {
"title": "$:/language/Buttons/ShowSideBar/Caption",
"text": "show sidebar"
},
"$:/language/Buttons/ShowSideBar/Hint": {
"title": "$:/language/Buttons/ShowSideBar/Hint",
"text": "Show sidebar"
},
"$:/language/Buttons/TagManager/Caption": {
"title": "$:/language/Buttons/TagManager/Caption",
"text": "tag manager"
},
"$:/language/Buttons/TagManager/Hint": {
"title": "$:/language/Buttons/TagManager/Hint",
"text": "Open tag manager"
},
"$:/language/Buttons/Theme/Caption": {
"title": "$:/language/Buttons/Theme/Caption",
"text": "theme"
},
"$:/language/Buttons/Theme/Hint": {
"title": "$:/language/Buttons/Theme/Hint",
"text": "Choose the display theme"
},
"$:/language/Buttons/Bold/Caption": {
"title": "$:/language/Buttons/Bold/Caption",
"text": "bold"
},
"$:/language/Buttons/Bold/Hint": {
"title": "$:/language/Buttons/Bold/Hint",
"text": "Apply bold formatting to selection"
},
"$:/language/Buttons/Clear/Caption": {
"title": "$:/language/Buttons/Clear/Caption",
"text": "clear"
},
"$:/language/Buttons/Clear/Hint": {
"title": "$:/language/Buttons/Clear/Hint",
"text": "Clear image to solid colour"
},
"$:/language/Buttons/EditorHeight/Caption": {
"title": "$:/language/Buttons/EditorHeight/Caption",
"text": "editor height"
},
"$:/language/Buttons/EditorHeight/Caption/Auto": {
"title": "$:/language/Buttons/EditorHeight/Caption/Auto",
"text": "Automatically adjust height to fit content"
},
"$:/language/Buttons/EditorHeight/Caption/Fixed": {
"title": "$:/language/Buttons/EditorHeight/Caption/Fixed",
"text": "Fixed height:"
},
"$:/language/Buttons/EditorHeight/Hint": {
"title": "$:/language/Buttons/EditorHeight/Hint",
"text": "Choose the height of the text editor"
},
"$:/language/Buttons/Excise/Caption": {
"title": "$:/language/Buttons/Excise/Caption",
"text": "excise"
},
"$:/language/Buttons/Excise/Caption/Excise": {
"title": "$:/language/Buttons/Excise/Caption/Excise",
"text": "Perform excision"
},
"$:/language/Buttons/Excise/Caption/MacroName": {
"title": "$:/language/Buttons/Excise/Caption/MacroName",
"text": "Macro name:"
},
"$:/language/Buttons/Excise/Caption/NewTitle": {
"title": "$:/language/Buttons/Excise/Caption/NewTitle",
"text": "Title of new tiddler:"
},
"$:/language/Buttons/Excise/Caption/Replace": {
"title": "$:/language/Buttons/Excise/Caption/Replace",
"text": "Replace excised text with:"
},
"$:/language/Buttons/Excise/Caption/Replace/Macro": {
"title": "$:/language/Buttons/Excise/Caption/Replace/Macro",
"text": "macro"
},
"$:/language/Buttons/Excise/Caption/Replace/Link": {
"title": "$:/language/Buttons/Excise/Caption/Replace/Link",
"text": "link"
},
"$:/language/Buttons/Excise/Caption/Replace/Transclusion": {
"title": "$:/language/Buttons/Excise/Caption/Replace/Transclusion",
"text": "transclusion"
},
"$:/language/Buttons/Excise/Caption/Tag": {
"title": "$:/language/Buttons/Excise/Caption/Tag",
"text": "Tag new tiddler with the title of this tiddler"
},
"$:/language/Buttons/Excise/Caption/TiddlerExists": {
"title": "$:/language/Buttons/Excise/Caption/TiddlerExists",
"text": "Warning: tiddler already exists"
},
"$:/language/Buttons/Excise/Hint": {
"title": "$:/language/Buttons/Excise/Hint",
"text": "Excise the selected text into a new tiddler"
},
"$:/language/Buttons/Heading1/Caption": {
"title": "$:/language/Buttons/Heading1/Caption",
"text": "heading 1"
},
"$:/language/Buttons/Heading1/Hint": {
"title": "$:/language/Buttons/Heading1/Hint",
"text": "Apply heading level 1 formatting to lines containing selection"
},
"$:/language/Buttons/Heading2/Caption": {
"title": "$:/language/Buttons/Heading2/Caption",
"text": "heading 2"
},
"$:/language/Buttons/Heading2/Hint": {
"title": "$:/language/Buttons/Heading2/Hint",
"text": "Apply heading level 2 formatting to lines containing selection"
},
"$:/language/Buttons/Heading3/Caption": {
"title": "$:/language/Buttons/Heading3/Caption",
"text": "heading 3"
},
"$:/language/Buttons/Heading3/Hint": {
"title": "$:/language/Buttons/Heading3/Hint",
"text": "Apply heading level 3 formatting to lines containing selection"
},
"$:/language/Buttons/Heading4/Caption": {
"title": "$:/language/Buttons/Heading4/Caption",
"text": "heading 4"
},
"$:/language/Buttons/Heading4/Hint": {
"title": "$:/language/Buttons/Heading4/Hint",
"text": "Apply heading level 4 formatting to lines containing selection"
},
"$:/language/Buttons/Heading5/Caption": {
"title": "$:/language/Buttons/Heading5/Caption",
"text": "heading 5"
},
"$:/language/Buttons/Heading5/Hint": {
"title": "$:/language/Buttons/Heading5/Hint",
"text": "Apply heading level 5 formatting to lines containing selection"
},
"$:/language/Buttons/Heading6/Caption": {
"title": "$:/language/Buttons/Heading6/Caption",
"text": "heading 6"
},
"$:/language/Buttons/Heading6/Hint": {
"title": "$:/language/Buttons/Heading6/Hint",
"text": "Apply heading level 6 formatting to lines containing selection"
},
"$:/language/Buttons/Italic/Caption": {
"title": "$:/language/Buttons/Italic/Caption",
"text": "italic"
},
"$:/language/Buttons/Italic/Hint": {
"title": "$:/language/Buttons/Italic/Hint",
"text": "Apply italic formatting to selection"
},
"$:/language/Buttons/LineWidth/Caption": {
"title": "$:/language/Buttons/LineWidth/Caption",
"text": "line width"
},
"$:/language/Buttons/LineWidth/Hint": {
"title": "$:/language/Buttons/LineWidth/Hint",
"text": "Set line width for painting"
},
"$:/language/Buttons/Link/Caption": {
"title": "$:/language/Buttons/Link/Caption",
"text": "link"
},
"$:/language/Buttons/Link/Hint": {
"title": "$:/language/Buttons/Link/Hint",
"text": "Create wikitext link"
},
"$:/language/Buttons/ListBullet/Caption": {
"title": "$:/language/Buttons/ListBullet/Caption",
"text": "bulleted list"
},
"$:/language/Buttons/ListBullet/Hint": {
"title": "$:/language/Buttons/ListBullet/Hint",
"text": "Apply bulleted list formatting to lines containing selection"
},
"$:/language/Buttons/ListNumber/Caption": {
"title": "$:/language/Buttons/ListNumber/Caption",
"text": "numbered list"
},
"$:/language/Buttons/ListNumber/Hint": {
"title": "$:/language/Buttons/ListNumber/Hint",
"text": "Apply numbered list formatting to lines containing selection"
},
"$:/language/Buttons/MonoBlock/Caption": {
"title": "$:/language/Buttons/MonoBlock/Caption",
"text": "monospaced block"
},
"$:/language/Buttons/MonoBlock/Hint": {
"title": "$:/language/Buttons/MonoBlock/Hint",
"text": "Apply monospaced block formatting to lines containing selection"
},
"$:/language/Buttons/MonoLine/Caption": {
"title": "$:/language/Buttons/MonoLine/Caption",
"text": "monospaced"
},
"$:/language/Buttons/MonoLine/Hint": {
"title": "$:/language/Buttons/MonoLine/Hint",
"text": "Apply monospaced character formatting to selection"
},
"$:/language/Buttons/Opacity/Caption": {
"title": "$:/language/Buttons/Opacity/Caption",
"text": "opacity"
},
"$:/language/Buttons/Opacity/Hint": {
"title": "$:/language/Buttons/Opacity/Hint",
"text": "Set painting opacity"
},
"$:/language/Buttons/Paint/Caption": {
"title": "$:/language/Buttons/Paint/Caption",
"text": "paint colour"
},
"$:/language/Buttons/Paint/Hint": {
"title": "$:/language/Buttons/Paint/Hint",
"text": "Set painting colour"
},
"$:/language/Buttons/Picture/Caption": {
"title": "$:/language/Buttons/Picture/Caption",
"text": "picture"
},
"$:/language/Buttons/Picture/Hint": {
"title": "$:/language/Buttons/Picture/Hint",
"text": "Insert picture"
},
"$:/language/Buttons/Preview/Caption": {
"title": "$:/language/Buttons/Preview/Caption",
"text": "preview"
},
"$:/language/Buttons/Preview/Hint": {
"title": "$:/language/Buttons/Preview/Hint",
"text": "Show preview pane"
},
"$:/language/Buttons/PreviewType/Caption": {
"title": "$:/language/Buttons/PreviewType/Caption",
"text": "preview type"
},
"$:/language/Buttons/PreviewType/Hint": {
"title": "$:/language/Buttons/PreviewType/Hint",
"text": "Choose preview type"
},
"$:/language/Buttons/Quote/Caption": {
"title": "$:/language/Buttons/Quote/Caption",
"text": "quote"
},
"$:/language/Buttons/Quote/Hint": {
"title": "$:/language/Buttons/Quote/Hint",
"text": "Apply quoted text formatting to lines containing selection"
},
"$:/language/Buttons/Size/Caption": {
"title": "$:/language/Buttons/Size/Caption",
"text": "image size"
},
"$:/language/Buttons/Size/Caption/Height": {
"title": "$:/language/Buttons/Size/Caption/Height",
"text": "Height:"
},
"$:/language/Buttons/Size/Caption/Resize": {
"title": "$:/language/Buttons/Size/Caption/Resize",
"text": "Resize image"
},
"$:/language/Buttons/Size/Caption/Width": {
"title": "$:/language/Buttons/Size/Caption/Width",
"text": "Width:"
},
"$:/language/Buttons/Size/Hint": {
"title": "$:/language/Buttons/Size/Hint",
"text": "Set image size"
},
"$:/language/Buttons/Stamp/Caption": {
"title": "$:/language/Buttons/Stamp/Caption",
"text": "stamp"
},
"$:/language/Buttons/Stamp/Caption/New": {
"title": "$:/language/Buttons/Stamp/Caption/New",
"text": "Add your own"
},
"$:/language/Buttons/Stamp/Hint": {
"title": "$:/language/Buttons/Stamp/Hint",
"text": "Insert a preconfigured snippet of text"
},
"$:/language/Buttons/Stamp/New/Title": {
"title": "$:/language/Buttons/Stamp/New/Title",
"text": "Name as shown in menu"
},
"$:/language/Buttons/Stamp/New/Text": {
"title": "$:/language/Buttons/Stamp/New/Text",
"text": "Text of snippet. (Remember to add a descriptive title in the caption field)."
},
"$:/language/Buttons/Strikethrough/Caption": {
"title": "$:/language/Buttons/Strikethrough/Caption",
"text": "strikethrough"
},
"$:/language/Buttons/Strikethrough/Hint": {
"title": "$:/language/Buttons/Strikethrough/Hint",
"text": "Apply strikethrough formatting to selection"
},
"$:/language/Buttons/Subscript/Caption": {
"title": "$:/language/Buttons/Subscript/Caption",
"text": "subscript"
},
"$:/language/Buttons/Subscript/Hint": {
"title": "$:/language/Buttons/Subscript/Hint",
"text": "Apply subscript formatting to selection"
},
"$:/language/Buttons/Superscript/Caption": {
"title": "$:/language/Buttons/Superscript/Caption",
"text": "superscript"
},
"$:/language/Buttons/Superscript/Hint": {
"title": "$:/language/Buttons/Superscript/Hint",
"text": "Apply superscript formatting to selection"
},
"$:/language/Buttons/Underline/Caption": {
"title": "$:/language/Buttons/Underline/Caption",
"text": "underline"
},
"$:/language/Buttons/Underline/Hint": {
"title": "$:/language/Buttons/Underline/Hint",
"text": "Apply underline formatting to selection"
},
"$:/language/ControlPanel/Advanced/Caption": {
"title": "$:/language/ControlPanel/Advanced/Caption",
"text": "Advanced"
},
"$:/language/ControlPanel/Advanced/Hint": {
"title": "$:/language/ControlPanel/Advanced/Hint",
"text": "Internal information about this TiddlyWiki"
},
"$:/language/ControlPanel/Appearance/Caption": {
"title": "$:/language/ControlPanel/Appearance/Caption",
"text": "Appearance"
},
"$:/language/ControlPanel/Appearance/Hint": {
"title": "$:/language/ControlPanel/Appearance/Hint",
"text": "Ways to customise the appearance of your TiddlyWiki."
},
"$:/language/ControlPanel/Basics/AnimDuration/Prompt": {
"title": "$:/language/ControlPanel/Basics/AnimDuration/Prompt",
"text": "Animation duration:"
},
"$:/language/ControlPanel/Basics/Caption": {
"title": "$:/language/ControlPanel/Basics/Caption",
"text": "Basics"
},
"$:/language/ControlPanel/Basics/DefaultTiddlers/BottomHint": {
"title": "$:/language/ControlPanel/Basics/DefaultTiddlers/BottomHint",
"text": "Use [[double square brackets]] for titles with spaces. Or you can choose to <$button set=\"$:/DefaultTiddlers\" setTo=\"[list[$:/StoryList]]\">retain story ordering</$button>"
},
"$:/language/ControlPanel/Basics/DefaultTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/DefaultTiddlers/Prompt",
"text": "Default tiddlers:"
},
"$:/language/ControlPanel/Basics/DefaultTiddlers/TopHint": {
"title": "$:/language/ControlPanel/Basics/DefaultTiddlers/TopHint",
"text": "Choose which tiddlers are displayed at startup:"
},
"$:/language/ControlPanel/Basics/Language/Prompt": {
"title": "$:/language/ControlPanel/Basics/Language/Prompt",
"text": "Hello! Current language:"
},
"$:/language/ControlPanel/Basics/NewJournal/Title/Prompt": {
"title": "$:/language/ControlPanel/Basics/NewJournal/Title/Prompt",
"text": "Title of new journal tiddlers"
},
"$:/language/ControlPanel/Basics/NewJournal/Tags/Prompt": {
"title": "$:/language/ControlPanel/Basics/NewJournal/Tags/Prompt",
"text": "Tags for new journal tiddlers"
},
"$:/language/ControlPanel/Basics/OverriddenShadowTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/OverriddenShadowTiddlers/Prompt",
"text": "Number of overridden shadow tiddlers:"
},
"$:/language/ControlPanel/Basics/ShadowTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/ShadowTiddlers/Prompt",
"text": "Number of shadow tiddlers:"
},
"$:/language/ControlPanel/Basics/Subtitle/Prompt": {
"title": "$:/language/ControlPanel/Basics/Subtitle/Prompt",
"text": "Subtitle:"
},
"$:/language/ControlPanel/Basics/SystemTiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/SystemTiddlers/Prompt",
"text": "Number of system tiddlers:"
},
"$:/language/ControlPanel/Basics/Tags/Prompt": {
"title": "$:/language/ControlPanel/Basics/Tags/Prompt",
"text": "Number of tags:"
},
"$:/language/ControlPanel/Basics/Tiddlers/Prompt": {
"title": "$:/language/ControlPanel/Basics/Tiddlers/Prompt",
"text": "Number of tiddlers:"
},
"$:/language/ControlPanel/Basics/Title/Prompt": {
"title": "$:/language/ControlPanel/Basics/Title/Prompt",
"text": "Title of this ~TiddlyWiki:"
},
"$:/language/ControlPanel/Basics/Username/Prompt": {
"title": "$:/language/ControlPanel/Basics/Username/Prompt",
"text": "Username for signing edits:"
},
"$:/language/ControlPanel/Basics/Version/Prompt": {
"title": "$:/language/ControlPanel/Basics/Version/Prompt",
"text": "~TiddlyWiki version:"
},
"$:/language/ControlPanel/EditorTypes/Caption": {
"title": "$:/language/ControlPanel/EditorTypes/Caption",
"text": "Editor Types"
},
"$:/language/ControlPanel/EditorTypes/Editor/Caption": {
"title": "$:/language/ControlPanel/EditorTypes/Editor/Caption",
"text": "Editor"
},
"$:/language/ControlPanel/EditorTypes/Hint": {
"title": "$:/language/ControlPanel/EditorTypes/Hint",
"text": "These tiddlers determine which editor is used to edit specific tiddler types."
},
"$:/language/ControlPanel/EditorTypes/Type/Caption": {
"title": "$:/language/ControlPanel/EditorTypes/Type/Caption",
"text": "Type"
},
"$:/language/ControlPanel/Info/Caption": {
"title": "$:/language/ControlPanel/Info/Caption",
"text": "Info"
},
"$:/language/ControlPanel/Info/Hint": {
"title": "$:/language/ControlPanel/Info/Hint",
"text": "Information about this TiddlyWiki"
},
"$:/language/ControlPanel/KeyboardShortcuts/Add/Prompt": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Add/Prompt",
"text": "Type shortcut here"
},
"$:/language/ControlPanel/KeyboardShortcuts/Add/Caption": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Add/Caption",
"text": "add shortcut"
},
"$:/language/ControlPanel/KeyboardShortcuts/Caption": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Caption",
"text": "Keyboard Shortcuts"
},
"$:/language/ControlPanel/KeyboardShortcuts/Hint": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Hint",
"text": "Manage keyboard shortcut assignments"
},
"$:/language/ControlPanel/KeyboardShortcuts/NoShortcuts/Caption": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/NoShortcuts/Caption",
"text": "No keyboard shortcuts assigned"
},
"$:/language/ControlPanel/KeyboardShortcuts/Remove/Hint": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Remove/Hint",
"text": "remove keyboard shortcut"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/All": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/All",
"text": "All platforms"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/Mac": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/Mac",
"text": "Macintosh platform only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/NonMac": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/NonMac",
"text": "Non-Macintosh platforms only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/Linux": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/Linux",
"text": "Linux platform only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/NonLinux": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/NonLinux",
"text": "Non-Linux platforms only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/Windows": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/Windows",
"text": "Windows platform only"
},
"$:/language/ControlPanel/KeyboardShortcuts/Platform/NonWindows": {
"title": "$:/language/ControlPanel/KeyboardShortcuts/Platform/NonWindows",
"text": "Non-Windows platforms only"
},
"$:/language/ControlPanel/LoadedModules/Caption": {
"title": "$:/language/ControlPanel/LoadedModules/Caption",
"text": "Loaded Modules"
},
"$:/language/ControlPanel/LoadedModules/Hint": {
"title": "$:/language/ControlPanel/LoadedModules/Hint",
"text": "These are the currently loaded tiddler modules linked to their source tiddlers. Any italicised modules lack a source tiddler, typically because they were setup during the boot process."
},
"$:/language/ControlPanel/Palette/Caption": {
"title": "$:/language/ControlPanel/Palette/Caption",
"text": "Palette"
},
"$:/language/ControlPanel/Palette/Editor/Clone/Caption": {
"title": "$:/language/ControlPanel/Palette/Editor/Clone/Caption",
"text": "clone"
},
"$:/language/ControlPanel/Palette/Editor/Clone/Prompt": {
"title": "$:/language/ControlPanel/Palette/Editor/Clone/Prompt",
"text": "It is recommended that you clone this shadow palette before editing it"
},
"$:/language/ControlPanel/Palette/Editor/Prompt/Modified": {
"title": "$:/language/ControlPanel/Palette/Editor/Prompt/Modified",
"text": "This shadow palette has been modified"
},
"$:/language/ControlPanel/Palette/Editor/Prompt": {
"title": "$:/language/ControlPanel/Palette/Editor/Prompt",
"text": "Editing"
},
"$:/language/ControlPanel/Palette/Editor/Reset/Caption": {
"title": "$:/language/ControlPanel/Palette/Editor/Reset/Caption",
"text": "reset"
},
"$:/language/ControlPanel/Palette/HideEditor/Caption": {
"title": "$:/language/ControlPanel/Palette/HideEditor/Caption",
"text": "hide editor"
},
"$:/language/ControlPanel/Palette/Prompt": {
"title": "$:/language/ControlPanel/Palette/Prompt",
"text": "Current palette:"
},
"$:/language/ControlPanel/Palette/ShowEditor/Caption": {
"title": "$:/language/ControlPanel/Palette/ShowEditor/Caption",
"text": "show editor"
},
"$:/language/ControlPanel/Parsing/Caption": {
"title": "$:/language/ControlPanel/Parsing/Caption",
"text": "Parsing"
},
"$:/language/ControlPanel/Parsing/Hint": {
"title": "$:/language/ControlPanel/Parsing/Hint",
"text": "Here you can globally disable individual wiki parser rules. Take care as disabling some parser rules can prevent ~TiddlyWiki functioning correctly (you can restore normal operation with [[safe mode|http://tiddlywiki.com/#SafeMode]] )"
},
"$:/language/ControlPanel/Parsing/Block/Caption": {
"title": "$:/language/ControlPanel/Parsing/Block/Caption",
"text": "Block Parse Rules"
},
"$:/language/ControlPanel/Parsing/Inline/Caption": {
"title": "$:/language/ControlPanel/Parsing/Inline/Caption",
"text": "Inline Parse Rules"
},
"$:/language/ControlPanel/Parsing/Pragma/Caption": {
"title": "$:/language/ControlPanel/Parsing/Pragma/Caption",
"text": "Pragma Parse Rules"
},
"$:/language/ControlPanel/Plugins/Add/Caption": {
"title": "$:/language/ControlPanel/Plugins/Add/Caption",
"text": "Get more plugins"
},
"$:/language/ControlPanel/Plugins/Add/Hint": {
"title": "$:/language/ControlPanel/Plugins/Add/Hint",
"text": "Install plugins from the official library"
},
"$:/language/ControlPanel/Plugins/AlreadyInstalled/Hint": {
"title": "$:/language/ControlPanel/Plugins/AlreadyInstalled/Hint",
"text": "This plugin is already installed at version <$text text=<<installedVersion>>/>"
},
"$:/language/ControlPanel/Plugins/Caption": {
"title": "$:/language/ControlPanel/Plugins/Caption",
"text": "Plugins"
},
"$:/language/ControlPanel/Plugins/Disable/Caption": {
"title": "$:/language/ControlPanel/Plugins/Disable/Caption",
"text": "disable"
},
"$:/language/ControlPanel/Plugins/Disable/Hint": {
"title": "$:/language/ControlPanel/Plugins/Disable/Hint",
"text": "Disable this plugin when reloading page"
},
"$:/language/ControlPanel/Plugins/Disabled/Status": {
"title": "$:/language/ControlPanel/Plugins/Disabled/Status",
"text": "(disabled)"
},
"$:/language/ControlPanel/Plugins/Empty/Hint": {
"title": "$:/language/ControlPanel/Plugins/Empty/Hint",
"text": "None"
},
"$:/language/ControlPanel/Plugins/Enable/Caption": {
"title": "$:/language/ControlPanel/Plugins/Enable/Caption",
"text": "enable"
},
"$:/language/ControlPanel/Plugins/Enable/Hint": {
"title": "$:/language/ControlPanel/Plugins/Enable/Hint",
"text": "Enable this plugin when reloading page"
},
"$:/language/ControlPanel/Plugins/Install/Caption": {
"title": "$:/language/ControlPanel/Plugins/Install/Caption",
"text": "install"
},
"$:/language/ControlPanel/Plugins/Installed/Hint": {
"title": "$:/language/ControlPanel/Plugins/Installed/Hint",
"text": "Currently installed plugins:"
},
"$:/language/ControlPanel/Plugins/Languages/Caption": {
"title": "$:/language/ControlPanel/Plugins/Languages/Caption",
"text": "Languages"
},
"$:/language/ControlPanel/Plugins/Languages/Hint": {
"title": "$:/language/ControlPanel/Plugins/Languages/Hint",
"text": "Language pack plugins"
},
"$:/language/ControlPanel/Plugins/NoInfoFound/Hint": {
"title": "$:/language/ControlPanel/Plugins/NoInfoFound/Hint",
"text": "No ''\"<$text text=<<currentTab>>/>\"'' found"
},
"$:/language/ControlPanel/Plugins/NoInformation/Hint": {
"title": "$:/language/ControlPanel/Plugins/NoInformation/Hint",
"text": "No information provided"
},
"$:/language/ControlPanel/Plugins/NotInstalled/Hint": {
"title": "$:/language/ControlPanel/Plugins/NotInstalled/Hint",
"text": "This plugin is not currently installed"
},
"$:/language/ControlPanel/Plugins/OpenPluginLibrary": {
"title": "$:/language/ControlPanel/Plugins/OpenPluginLibrary",
"text": "open plugin library"
},
"$:/language/ControlPanel/Plugins/Plugins/Caption": {
"title": "$:/language/ControlPanel/Plugins/Plugins/Caption",
"text": "Plugins"
},
"$:/language/ControlPanel/Plugins/Plugins/Hint": {
"title": "$:/language/ControlPanel/Plugins/Plugins/Hint",
"text": "Plugins"
},
"$:/language/ControlPanel/Plugins/Reinstall/Caption": {
"title": "$:/language/ControlPanel/Plugins/Reinstall/Caption",
"text": "reinstall"
},
"$:/language/ControlPanel/Plugins/Themes/Caption": {
"title": "$:/language/ControlPanel/Plugins/Themes/Caption",
"text": "Themes"
},
"$:/language/ControlPanel/Plugins/Themes/Hint": {
"title": "$:/language/ControlPanel/Plugins/Themes/Hint",
"text": "Theme plugins"
},
"$:/language/ControlPanel/Saving/Caption": {
"title": "$:/language/ControlPanel/Saving/Caption",
"text": "Saving"
},
"$:/language/ControlPanel/Saving/Heading": {
"title": "$:/language/ControlPanel/Saving/Heading",
"text": "Saving"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Advanced/Heading": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Advanced/Heading",
"text": "Advanced Settings"
},
"$:/language/ControlPanel/Saving/TiddlySpot/BackupDir": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/BackupDir",
"text": "Backup Directory"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Backups": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Backups",
"text": "Backups"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Description": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Description",
"text": "These settings are only used when saving to http://tiddlyspot.com or a compatible remote server"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Filename": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Filename",
"text": "Upload Filename"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Heading": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Heading",
"text": "~TiddlySpot"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Hint": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Hint",
"text": "//The server URL defaults to `http://<wikiname>.tiddlyspot.com/store.cgi` and can be changed to use a custom server address, e.g. `http://example.com/store.php`.//"
},
"$:/language/ControlPanel/Saving/TiddlySpot/Password": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/Password",
"text": "Password"
},
"$:/language/ControlPanel/Saving/TiddlySpot/ServerURL": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/ServerURL",
"text": "Server URL"
},
"$:/language/ControlPanel/Saving/TiddlySpot/UploadDir": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/UploadDir",
"text": "Upload Directory"
},
"$:/language/ControlPanel/Saving/TiddlySpot/UserName": {
"title": "$:/language/ControlPanel/Saving/TiddlySpot/UserName",
"text": "Wiki Name"
},
"$:/language/ControlPanel/Settings/AutoSave/Caption": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Caption",
"text": "Autosave"
},
"$:/language/ControlPanel/Settings/AutoSave/Disabled/Description": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Disabled/Description",
"text": "Do not save changes automatically"
},
"$:/language/ControlPanel/Settings/AutoSave/Enabled/Description": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Enabled/Description",
"text": "Save changes automatically"
},
"$:/language/ControlPanel/Settings/AutoSave/Hint": {
"title": "$:/language/ControlPanel/Settings/AutoSave/Hint",
"text": "Automatically save changes during editing"
},
"$:/language/ControlPanel/Settings/CamelCase/Caption": {
"title": "$:/language/ControlPanel/Settings/CamelCase/Caption",
"text": "Camel Case Wiki Links"
},
"$:/language/ControlPanel/Settings/CamelCase/Hint": {
"title": "$:/language/ControlPanel/Settings/CamelCase/Hint",
"text": "You can globally disable automatic linking of ~CamelCase phrases. Requires reload to take effect"
},
"$:/language/ControlPanel/Settings/CamelCase/Description": {
"title": "$:/language/ControlPanel/Settings/CamelCase/Description",
"text": "Enable automatic ~CamelCase linking"
},
"$:/language/ControlPanel/Settings/Caption": {
"title": "$:/language/ControlPanel/Settings/Caption",
"text": "Settings"
},
"$:/language/ControlPanel/Settings/EditorToolbar/Caption": {
"title": "$:/language/ControlPanel/Settings/EditorToolbar/Caption",
"text": "Editor Toolbar"
},
"$:/language/ControlPanel/Settings/EditorToolbar/Hint": {
"title": "$:/language/ControlPanel/Settings/EditorToolbar/Hint",
"text": "Enable or disable the editor toolbar:"
},
"$:/language/ControlPanel/Settings/EditorToolbar/Description": {
"title": "$:/language/ControlPanel/Settings/EditorToolbar/Description",
"text": "Show editor toolbar"
},
"$:/language/ControlPanel/Settings/Hint": {
"title": "$:/language/ControlPanel/Settings/Hint",
"text": "These settings let you customise the behaviour of TiddlyWiki."
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Caption": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Caption",
"text": "Navigation Address Bar"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Hint": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Hint",
"text": "Behaviour of the browser address bar when navigating to a tiddler:"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/No/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/No/Description",
"text": "Do not update the address bar"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Permalink/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Permalink/Description",
"text": "Include the target tiddler"
},
"$:/language/ControlPanel/Settings/NavigationAddressBar/Permaview/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationAddressBar/Permaview/Description",
"text": "Include the target tiddler and the current story sequence"
},
"$:/language/ControlPanel/Settings/NavigationHistory/Caption": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/Caption",
"text": "Navigation History"
},
"$:/language/ControlPanel/Settings/NavigationHistory/Hint": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/Hint",
"text": "Update browser history when navigating to a tiddler:"
},
"$:/language/ControlPanel/Settings/NavigationHistory/No/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/No/Description",
"text": "Do not update history"
},
"$:/language/ControlPanel/Settings/NavigationHistory/Yes/Description": {
"title": "$:/language/ControlPanel/Settings/NavigationHistory/Yes/Description",
"text": "Update history"
},
"$:/language/ControlPanel/Settings/PerformanceInstrumentation/Caption": {
"title": "$:/language/ControlPanel/Settings/PerformanceInstrumentation/Caption",
"text": "Performance Instrumentation"
},
"$:/language/ControlPanel/Settings/PerformanceInstrumentation/Hint": {
"title": "$:/language/ControlPanel/Settings/PerformanceInstrumentation/Hint",
"text": "Displays performance statistics in the browser developer console. Requires reload to take effect"
},
"$:/language/ControlPanel/Settings/PerformanceInstrumentation/Description": {
"title": "$:/language/ControlPanel/Settings/PerformanceInstrumentation/Description",
"text": "Enable performance instrumentation"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Caption": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Caption",
"text": "Toolbar Button Style"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Hint": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Hint",
"text": "Choose the style for toolbar buttons:"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Borderless": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Borderless",
"text": "Borderless"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Boxed": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Boxed",
"text": "Boxed"
},
"$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Rounded": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Rounded",
"text": "Rounded"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Caption": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Caption",
"text": "Toolbar Buttons"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Hint": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Hint",
"text": "Default toolbar button appearance:"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Icons/Description": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Icons/Description",
"text": "Include icon"
},
"$:/language/ControlPanel/Settings/ToolbarButtons/Text/Description": {
"title": "$:/language/ControlPanel/Settings/ToolbarButtons/Text/Description",
"text": "Include text"
},
"$:/language/ControlPanel/Settings/DefaultSidebarTab/Caption": {
"title": "$:/language/ControlPanel/Settings/DefaultSidebarTab/Caption",
"text": "Default Sidebar Tab"
},
"$:/language/ControlPanel/Settings/DefaultSidebarTab/Hint": {
"title": "$:/language/ControlPanel/Settings/DefaultSidebarTab/Hint",
"text": "Specify which sidebar tab is displayed by default"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/Caption": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/Caption",
"text": "Tiddler Opening Behaviour"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/InsideRiver/Hint": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/InsideRiver/Hint",
"text": "Navigation from //within// the story river"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OutsideRiver/Hint": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OutsideRiver/Hint",
"text": "Navigation from //outside// the story river"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAbove": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAbove",
"text": "Open above the current tiddler"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenBelow": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenBelow",
"text": "Open below the current tiddler"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtTop": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtTop",
"text": "Open at the top of the story river"
},
"$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtBottom": {
"title": "$:/language/ControlPanel/Settings/LinkToBehaviour/OpenAtBottom",
"text": "Open at the bottom of the story river"
},
"$:/language/ControlPanel/Settings/TitleLinks/Caption": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/Caption",
"text": "Tiddler Titles"
},
"$:/language/ControlPanel/Settings/TitleLinks/Hint": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/Hint",
"text": "Optionally display tiddler titles as links"
},
"$:/language/ControlPanel/Settings/TitleLinks/No/Description": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/No/Description",
"text": "Do not display tiddler titles as links"
},
"$:/language/ControlPanel/Settings/TitleLinks/Yes/Description": {
"title": "$:/language/ControlPanel/Settings/TitleLinks/Yes/Description",
"text": "Display tiddler titles as links"
},
"$:/language/ControlPanel/Settings/MissingLinks/Caption": {
"title": "$:/language/ControlPanel/Settings/MissingLinks/Caption",
"text": "Wiki Links"
},
"$:/language/ControlPanel/Settings/MissingLinks/Hint": {
"title": "$:/language/ControlPanel/Settings/MissingLinks/Hint",
"text": "Choose whether to link to tiddlers that do not exist yet"
},
"$:/language/ControlPanel/Settings/MissingLinks/Description": {
"title": "$:/language/ControlPanel/Settings/MissingLinks/Description",
"text": "Enable links to missing tiddlers"
},
"$:/language/ControlPanel/StoryView/Caption": {
"title": "$:/language/ControlPanel/StoryView/Caption",
"text": "Story View"
},
"$:/language/ControlPanel/StoryView/Prompt": {
"title": "$:/language/ControlPanel/StoryView/Prompt",
"text": "Current view:"
},
"$:/language/ControlPanel/Theme/Caption": {
"title": "$:/language/ControlPanel/Theme/Caption",
"text": "Theme"
},
"$:/language/ControlPanel/Theme/Prompt": {
"title": "$:/language/ControlPanel/Theme/Prompt",
"text": "Current theme:"
},
"$:/language/ControlPanel/TiddlerFields/Caption": {
"title": "$:/language/ControlPanel/TiddlerFields/Caption",
"text": "Tiddler Fields"
},
"$:/language/ControlPanel/TiddlerFields/Hint": {
"title": "$:/language/ControlPanel/TiddlerFields/Hint",
"text": "This is the full set of TiddlerFields in use in this wiki (including system tiddlers but excluding shadow tiddlers)."
},
"$:/language/ControlPanel/Toolbars/Caption": {
"title": "$:/language/ControlPanel/Toolbars/Caption",
"text": "Toolbars"
},
"$:/language/ControlPanel/Toolbars/EditToolbar/Caption": {
"title": "$:/language/ControlPanel/Toolbars/EditToolbar/Caption",
"text": "Edit Toolbar"
},
"$:/language/ControlPanel/Toolbars/EditToolbar/Hint": {
"title": "$:/language/ControlPanel/Toolbars/EditToolbar/Hint",
"text": "Choose which buttons are displayed for tiddlers in edit mode"
},
"$:/language/ControlPanel/Toolbars/Hint": {
"title": "$:/language/ControlPanel/Toolbars/Hint",
"text": "Select which toolbar buttons are displayed"
},
"$:/language/ControlPanel/Toolbars/PageControls/Caption": {
"title": "$:/language/ControlPanel/Toolbars/PageControls/Caption",
"text": "Page Toolbar"
},
"$:/language/ControlPanel/Toolbars/PageControls/Hint": {
"title": "$:/language/ControlPanel/Toolbars/PageControls/Hint",
"text": "Choose which buttons are displayed on the main page toolbar"
},
"$:/language/ControlPanel/Toolbars/EditorToolbar/Caption": {
"title": "$:/language/ControlPanel/Toolbars/EditorToolbar/Caption",
"text": "Editor Toolbar"
},
"$:/language/ControlPanel/Toolbars/EditorToolbar/Hint": {
"title": "$:/language/ControlPanel/Toolbars/EditorToolbar/Hint",
"text": "Choose which buttons are displayed in the editor toolbar. Note that some buttons will only appear when editing tiddlers of a certain type"
},
"$:/language/ControlPanel/Toolbars/ViewToolbar/Caption": {
"title": "$:/language/ControlPanel/Toolbars/ViewToolbar/Caption",
"text": "View Toolbar"
},
"$:/language/ControlPanel/Toolbars/ViewToolbar/Hint": {
"title": "$:/language/ControlPanel/Toolbars/ViewToolbar/Hint",
"text": "Choose which buttons are displayed for tiddlers in view mode"
},
"$:/language/ControlPanel/Tools/Download/Full/Caption": {
"title": "$:/language/ControlPanel/Tools/Download/Full/Caption",
"text": "Download full wiki"
},
"$:/language/Date/DaySuffix/1": {
"title": "$:/language/Date/DaySuffix/1",
"text": "st"
},
"$:/language/Date/DaySuffix/2": {
"title": "$:/language/Date/DaySuffix/2",
"text": "nd"
},
"$:/language/Date/DaySuffix/3": {
"title": "$:/language/Date/DaySuffix/3",
"text": "rd"
},
"$:/language/Date/DaySuffix/4": {
"title": "$:/language/Date/DaySuffix/4",
"text": "th"
},
"$:/language/Date/DaySuffix/5": {
"title": "$:/language/Date/DaySuffix/5",
"text": "th"
},
"$:/language/Date/DaySuffix/6": {
"title": "$:/language/Date/DaySuffix/6",
"text": "th"
},
"$:/language/Date/DaySuffix/7": {
"title": "$:/language/Date/DaySuffix/7",
"text": "th"
},
"$:/language/Date/DaySuffix/8": {
"title": "$:/language/Date/DaySuffix/8",
"text": "th"
},
"$:/language/Date/DaySuffix/9": {
"title": "$:/language/Date/DaySuffix/9",
"text": "th"
},
"$:/language/Date/DaySuffix/10": {
"title": "$:/language/Date/DaySuffix/10",
"text": "th"
},
"$:/language/Date/DaySuffix/11": {
"title": "$:/language/Date/DaySuffix/11",
"text": "th"
},
"$:/language/Date/DaySuffix/12": {
"title": "$:/language/Date/DaySuffix/12",
"text": "th"
},
"$:/language/Date/DaySuffix/13": {
"title": "$:/language/Date/DaySuffix/13",
"text": "th"
},
"$:/language/Date/DaySuffix/14": {
"title": "$:/language/Date/DaySuffix/14",
"text": "th"
},
"$:/language/Date/DaySuffix/15": {
"title": "$:/language/Date/DaySuffix/15",
"text": "th"
},
"$:/language/Date/DaySuffix/16": {
"title": "$:/language/Date/DaySuffix/16",
"text": "th"
},
"$:/language/Date/DaySuffix/17": {
"title": "$:/language/Date/DaySuffix/17",
"text": "th"
},
"$:/language/Date/DaySuffix/18": {
"title": "$:/language/Date/DaySuffix/18",
"text": "th"
},
"$:/language/Date/DaySuffix/19": {
"title": "$:/language/Date/DaySuffix/19",
"text": "th"
},
"$:/language/Date/DaySuffix/20": {
"title": "$:/language/Date/DaySuffix/20",
"text": "th"
},
"$:/language/Date/DaySuffix/21": {
"title": "$:/language/Date/DaySuffix/21",
"text": "st"
},
"$:/language/Date/DaySuffix/22": {
"title": "$:/language/Date/DaySuffix/22",
"text": "nd"
},
"$:/language/Date/DaySuffix/23": {
"title": "$:/language/Date/DaySuffix/23",
"text": "rd"
},
"$:/language/Date/DaySuffix/24": {
"title": "$:/language/Date/DaySuffix/24",
"text": "th"
},
"$:/language/Date/DaySuffix/25": {
"title": "$:/language/Date/DaySuffix/25",
"text": "th"
},
"$:/language/Date/DaySuffix/26": {
"title": "$:/language/Date/DaySuffix/26",
"text": "th"
},
"$:/language/Date/DaySuffix/27": {
"title": "$:/language/Date/DaySuffix/27",
"text": "th"
},
"$:/language/Date/DaySuffix/28": {
"title": "$:/language/Date/DaySuffix/28",
"text": "th"
},
"$:/language/Date/DaySuffix/29": {
"title": "$:/language/Date/DaySuffix/29",
"text": "th"
},
"$:/language/Date/DaySuffix/30": {
"title": "$:/language/Date/DaySuffix/30",
"text": "th"
},
"$:/language/Date/DaySuffix/31": {
"title": "$:/language/Date/DaySuffix/31",
"text": "st"
},
"$:/language/Date/Long/Day/0": {
"title": "$:/language/Date/Long/Day/0",
"text": "Sunday"
},
"$:/language/Date/Long/Day/1": {
"title": "$:/language/Date/Long/Day/1",
"text": "Monday"
},
"$:/language/Date/Long/Day/2": {
"title": "$:/language/Date/Long/Day/2",
"text": "Tuesday"
},
"$:/language/Date/Long/Day/3": {
"title": "$:/language/Date/Long/Day/3",
"text": "Wednesday"
},
"$:/language/Date/Long/Day/4": {
"title": "$:/language/Date/Long/Day/4",
"text": "Thursday"
},
"$:/language/Date/Long/Day/5": {
"title": "$:/language/Date/Long/Day/5",
"text": "Friday"
},
"$:/language/Date/Long/Day/6": {
"title": "$:/language/Date/Long/Day/6",
"text": "Saturday"
},
"$:/language/Date/Long/Month/1": {
"title": "$:/language/Date/Long/Month/1",
"text": "January"
},
"$:/language/Date/Long/Month/2": {
"title": "$:/language/Date/Long/Month/2",
"text": "February"
},
"$:/language/Date/Long/Month/3": {
"title": "$:/language/Date/Long/Month/3",
"text": "March"
},
"$:/language/Date/Long/Month/4": {
"title": "$:/language/Date/Long/Month/4",
"text": "April"
},
"$:/language/Date/Long/Month/5": {
"title": "$:/language/Date/Long/Month/5",
"text": "May"
},
"$:/language/Date/Long/Month/6": {
"title": "$:/language/Date/Long/Month/6",
"text": "June"
},
"$:/language/Date/Long/Month/7": {
"title": "$:/language/Date/Long/Month/7",
"text": "July"
},
"$:/language/Date/Long/Month/8": {
"title": "$:/language/Date/Long/Month/8",
"text": "August"
},
"$:/language/Date/Long/Month/9": {
"title": "$:/language/Date/Long/Month/9",
"text": "September"
},
"$:/language/Date/Long/Month/10": {
"title": "$:/language/Date/Long/Month/10",
"text": "October"
},
"$:/language/Date/Long/Month/11": {
"title": "$:/language/Date/Long/Month/11",
"text": "November"
},
"$:/language/Date/Long/Month/12": {
"title": "$:/language/Date/Long/Month/12",
"text": "December"
},
"$:/language/Date/Period/am": {
"title": "$:/language/Date/Period/am",
"text": "am"
},
"$:/language/Date/Period/pm": {
"title": "$:/language/Date/Period/pm",
"text": "pm"
},
"$:/language/Date/Short/Day/0": {
"title": "$:/language/Date/Short/Day/0",
"text": "Sun"
},
"$:/language/Date/Short/Day/1": {
"title": "$:/language/Date/Short/Day/1",
"text": "Mon"
},
"$:/language/Date/Short/Day/2": {
"title": "$:/language/Date/Short/Day/2",
"text": "Tue"
},
"$:/language/Date/Short/Day/3": {
"title": "$:/language/Date/Short/Day/3",
"text": "Wed"
},
"$:/language/Date/Short/Day/4": {
"title": "$:/language/Date/Short/Day/4",
"text": "Thu"
},
"$:/language/Date/Short/Day/5": {
"title": "$:/language/Date/Short/Day/5",
"text": "Fri"
},
"$:/language/Date/Short/Day/6": {
"title": "$:/language/Date/Short/Day/6",
"text": "Sat"
},
"$:/language/Date/Short/Month/1": {
"title": "$:/language/Date/Short/Month/1",
"text": "Jan"
},
"$:/language/Date/Short/Month/2": {
"title": "$:/language/Date/Short/Month/2",
"text": "Feb"
},
"$:/language/Date/Short/Month/3": {
"title": "$:/language/Date/Short/Month/3",
"text": "Mar"
},
"$:/language/Date/Short/Month/4": {
"title": "$:/language/Date/Short/Month/4",
"text": "Apr"
},
"$:/language/Date/Short/Month/5": {
"title": "$:/language/Date/Short/Month/5",
"text": "May"
},
"$:/language/Date/Short/Month/6": {
"title": "$:/language/Date/Short/Month/6",
"text": "Jun"
},
"$:/language/Date/Short/Month/7": {
"title": "$:/language/Date/Short/Month/7",
"text": "Jul"
},
"$:/language/Date/Short/Month/8": {
"title": "$:/language/Date/Short/Month/8",
"text": "Aug"
},
"$:/language/Date/Short/Month/9": {
"title": "$:/language/Date/Short/Month/9",
"text": "Sep"
},
"$:/language/Date/Short/Month/10": {
"title": "$:/language/Date/Short/Month/10",
"text": "Oct"
},
"$:/language/Date/Short/Month/11": {
"title": "$:/language/Date/Short/Month/11",
"text": "Nov"
},
"$:/language/Date/Short/Month/12": {
"title": "$:/language/Date/Short/Month/12",
"text": "Dec"
},
"$:/language/RelativeDate/Future/Days": {
"title": "$:/language/RelativeDate/Future/Days",
"text": "<<period>> days from now"
},
"$:/language/RelativeDate/Future/Hours": {
"title": "$:/language/RelativeDate/Future/Hours",
"text": "<<period>> hours from now"
},
"$:/language/RelativeDate/Future/Minutes": {
"title": "$:/language/RelativeDate/Future/Minutes",
"text": "<<period>> minutes from now"
},
"$:/language/RelativeDate/Future/Months": {
"title": "$:/language/RelativeDate/Future/Months",
"text": "<<period>> months from now"
},
"$:/language/RelativeDate/Future/Second": {
"title": "$:/language/RelativeDate/Future/Second",
"text": "1 second from now"
},
"$:/language/RelativeDate/Future/Seconds": {
"title": "$:/language/RelativeDate/Future/Seconds",
"text": "<<period>> seconds from now"
},
"$:/language/RelativeDate/Future/Years": {
"title": "$:/language/RelativeDate/Future/Years",
"text": "<<period>> years from now"
},
"$:/language/RelativeDate/Past/Days": {
"title": "$:/language/RelativeDate/Past/Days",
"text": "<<period>> days ago"
},
"$:/language/RelativeDate/Past/Hours": {
"title": "$:/language/RelativeDate/Past/Hours",
"text": "<<period>> hours ago"
},
"$:/language/RelativeDate/Past/Minutes": {
"title": "$:/language/RelativeDate/Past/Minutes",
"text": "<<period>> minutes ago"
},
"$:/language/RelativeDate/Past/Months": {
"title": "$:/language/RelativeDate/Past/Months",
"text": "<<period>> months ago"
},
"$:/language/RelativeDate/Past/Second": {
"title": "$:/language/RelativeDate/Past/Second",
"text": "1 second ago"
},
"$:/language/RelativeDate/Past/Seconds": {
"title": "$:/language/RelativeDate/Past/Seconds",
"text": "<<period>> seconds ago"
},
"$:/language/RelativeDate/Past/Years": {
"title": "$:/language/RelativeDate/Past/Years",
"text": "<<period>> years ago"
},
"$:/language/Docs/ModuleTypes/animation": {
"title": "$:/language/Docs/ModuleTypes/animation",
"text": "Animations that may be used with the RevealWidget."
},
"$:/language/Docs/ModuleTypes/command": {
"title": "$:/language/Docs/ModuleTypes/command",
"text": "Commands that can be executed under Node.js."
},
"$:/language/Docs/ModuleTypes/config": {
"title": "$:/language/Docs/ModuleTypes/config",
"text": "Data to be inserted into `$tw.config`."
},
"$:/language/Docs/ModuleTypes/filteroperator": {
"title": "$:/language/Docs/ModuleTypes/filteroperator",
"text": "Individual filter operator methods."
},
"$:/language/Docs/ModuleTypes/global": {
"title": "$:/language/Docs/ModuleTypes/global",
"text": "Global data to be inserted into `$tw`."
},
"$:/language/Docs/ModuleTypes/isfilteroperator": {
"title": "$:/language/Docs/ModuleTypes/isfilteroperator",
"text": "Operands for the ''is'' filter operator."
},
"$:/language/Docs/ModuleTypes/macro": {
"title": "$:/language/Docs/ModuleTypes/macro",
"text": "JavaScript macro definitions."
},
"$:/language/Docs/ModuleTypes/parser": {
"title": "$:/language/Docs/ModuleTypes/parser",
"text": "Parsers for different content types."
},
"$:/language/Docs/ModuleTypes/saver": {
"title": "$:/language/Docs/ModuleTypes/saver",
"text": "Savers handle different methods for saving files from the browser."
},
"$:/language/Docs/ModuleTypes/startup": {
"title": "$:/language/Docs/ModuleTypes/startup",
"text": "Startup functions."
},
"$:/language/Docs/ModuleTypes/storyview": {
"title": "$:/language/Docs/ModuleTypes/storyview",
"text": "Story views customise the animation and behaviour of list widgets."
},
"$:/language/Docs/ModuleTypes/tiddlerdeserializer": {
"title": "$:/language/Docs/ModuleTypes/tiddlerdeserializer",
"text": "Converts different content types into tiddlers."
},
"$:/language/Docs/ModuleTypes/tiddlerfield": {
"title": "$:/language/Docs/ModuleTypes/tiddlerfield",
"text": "Defines the behaviour of an individual tiddler field."
},
"$:/language/Docs/ModuleTypes/tiddlermethod": {
"title": "$:/language/Docs/ModuleTypes/tiddlermethod",
"text": "Adds methods to the `$tw.Tiddler` prototype."
},
"$:/language/Docs/ModuleTypes/upgrader": {
"title": "$:/language/Docs/ModuleTypes/upgrader",
"text": "Applies upgrade processing to tiddlers during an upgrade/import."
},
"$:/language/Docs/ModuleTypes/utils": {
"title": "$:/language/Docs/ModuleTypes/utils",
"text": "Adds methods to `$tw.utils`."
},
"$:/language/Docs/ModuleTypes/utils-node": {
"title": "$:/language/Docs/ModuleTypes/utils-node",
"text": "Adds Node.js-specific methods to `$tw.utils`."
},
"$:/language/Docs/ModuleTypes/widget": {
"title": "$:/language/Docs/ModuleTypes/widget",
"text": "Widgets encapsulate DOM rendering and refreshing."
},
"$:/language/Docs/ModuleTypes/wikimethod": {
"title": "$:/language/Docs/ModuleTypes/wikimethod",
"text": "Adds methods to `$tw.Wiki`."
},
"$:/language/Docs/ModuleTypes/wikirule": {
"title": "$:/language/Docs/ModuleTypes/wikirule",
"text": "Individual parser rules for the main WikiText parser."
},
"$:/language/Docs/PaletteColours/alert-background": {
"title": "$:/language/Docs/PaletteColours/alert-background",
"text": "Alert background"
},
"$:/language/Docs/PaletteColours/alert-border": {
"title": "$:/language/Docs/PaletteColours/alert-border",
"text": "Alert border"
},
"$:/language/Docs/PaletteColours/alert-highlight": {
"title": "$:/language/Docs/PaletteColours/alert-highlight",
"text": "Alert highlight"
},
"$:/language/Docs/PaletteColours/alert-muted-foreground": {
"title": "$:/language/Docs/PaletteColours/alert-muted-foreground",
"text": "Alert muted foreground"
},
"$:/language/Docs/PaletteColours/background": {
"title": "$:/language/Docs/PaletteColours/background",
"text": "General background"
},
"$:/language/Docs/PaletteColours/blockquote-bar": {
"title": "$:/language/Docs/PaletteColours/blockquote-bar",
"text": "Blockquote bar"
},
"$:/language/Docs/PaletteColours/button-background": {
"title": "$:/language/Docs/PaletteColours/button-background",
"text": "Default button background"
},
"$:/language/Docs/PaletteColours/button-border": {
"title": "$:/language/Docs/PaletteColours/button-border",
"text": "Default button border"
},
"$:/language/Docs/PaletteColours/button-foreground": {
"title": "$:/language/Docs/PaletteColours/button-foreground",
"text": "Default button foreground"
},
"$:/language/Docs/PaletteColours/dirty-indicator": {
"title": "$:/language/Docs/PaletteColours/dirty-indicator",
"text": "Unsaved changes indicator"
},
"$:/language/Docs/PaletteColours/code-background": {
"title": "$:/language/Docs/PaletteColours/code-background",
"text": "Code background"
},
"$:/language/Docs/PaletteColours/code-border": {
"title": "$:/language/Docs/PaletteColours/code-border",
"text": "Code border"
},
"$:/language/Docs/PaletteColours/code-foreground": {
"title": "$:/language/Docs/PaletteColours/code-foreground",
"text": "Code foreground"
},
"$:/language/Docs/PaletteColours/download-background": {
"title": "$:/language/Docs/PaletteColours/download-background",
"text": "Download button background"
},
"$:/language/Docs/PaletteColours/download-foreground": {
"title": "$:/language/Docs/PaletteColours/download-foreground",
"text": "Download button foreground"
},
"$:/language/Docs/PaletteColours/dragger-background": {
"title": "$:/language/Docs/PaletteColours/dragger-background",
"text": "Dragger background"
},
"$:/language/Docs/PaletteColours/dragger-foreground": {
"title": "$:/language/Docs/PaletteColours/dragger-foreground",
"text": "Dragger foreground"
},
"$:/language/Docs/PaletteColours/dropdown-background": {
"title": "$:/language/Docs/PaletteColours/dropdown-background",
"text": "Dropdown background"
},
"$:/language/Docs/PaletteColours/dropdown-border": {
"title": "$:/language/Docs/PaletteColours/dropdown-border",
"text": "Dropdown border"
},
"$:/language/Docs/PaletteColours/dropdown-tab-background-selected": {
"title": "$:/language/Docs/PaletteColours/dropdown-tab-background-selected",
"text": "Dropdown tab background for selected tabs"
},
"$:/language/Docs/PaletteColours/dropdown-tab-background": {
"title": "$:/language/Docs/PaletteColours/dropdown-tab-background",
"text": "Dropdown tab background"
},
"$:/language/Docs/PaletteColours/dropzone-background": {
"title": "$:/language/Docs/PaletteColours/dropzone-background",
"text": "Dropzone background"
},
"$:/language/Docs/PaletteColours/external-link-background-hover": {
"title": "$:/language/Docs/PaletteColours/external-link-background-hover",
"text": "External link background hover"
},
"$:/language/Docs/PaletteColours/external-link-background-visited": {
"title": "$:/language/Docs/PaletteColours/external-link-background-visited",
"text": "External link background visited"
},
"$:/language/Docs/PaletteColours/external-link-background": {
"title": "$:/language/Docs/PaletteColours/external-link-background",
"text": "External link background"
},
"$:/language/Docs/PaletteColours/external-link-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/external-link-foreground-hover",
"text": "External link foreground hover"
},
"$:/language/Docs/PaletteColours/external-link-foreground-visited": {
"title": "$:/language/Docs/PaletteColours/external-link-foreground-visited",
"text": "External link foreground visited"
},
"$:/language/Docs/PaletteColours/external-link-foreground": {
"title": "$:/language/Docs/PaletteColours/external-link-foreground",
"text": "External link foreground"
},
"$:/language/Docs/PaletteColours/foreground": {
"title": "$:/language/Docs/PaletteColours/foreground",
"text": "General foreground"
},
"$:/language/Docs/PaletteColours/message-background": {
"title": "$:/language/Docs/PaletteColours/message-background",
"text": "Message box background"
},
"$:/language/Docs/PaletteColours/message-border": {
"title": "$:/language/Docs/PaletteColours/message-border",
"text": "Message box border"
},
"$:/language/Docs/PaletteColours/message-foreground": {
"title": "$:/language/Docs/PaletteColours/message-foreground",
"text": "Message box foreground"
},
"$:/language/Docs/PaletteColours/modal-backdrop": {
"title": "$:/language/Docs/PaletteColours/modal-backdrop",
"text": "Modal backdrop"
},
"$:/language/Docs/PaletteColours/modal-background": {
"title": "$:/language/Docs/PaletteColours/modal-background",
"text": "Modal background"
},
"$:/language/Docs/PaletteColours/modal-border": {
"title": "$:/language/Docs/PaletteColours/modal-border",
"text": "Modal border"
},
"$:/language/Docs/PaletteColours/modal-footer-background": {
"title": "$:/language/Docs/PaletteColours/modal-footer-background",
"text": "Modal footer background"
},
"$:/language/Docs/PaletteColours/modal-footer-border": {
"title": "$:/language/Docs/PaletteColours/modal-footer-border",
"text": "Modal footer border"
},
"$:/language/Docs/PaletteColours/modal-header-border": {
"title": "$:/language/Docs/PaletteColours/modal-header-border",
"text": "Modal header border"
},
"$:/language/Docs/PaletteColours/muted-foreground": {
"title": "$:/language/Docs/PaletteColours/muted-foreground",
"text": "General muted foreground"
},
"$:/language/Docs/PaletteColours/notification-background": {
"title": "$:/language/Docs/PaletteColours/notification-background",
"text": "Notification background"
},
"$:/language/Docs/PaletteColours/notification-border": {
"title": "$:/language/Docs/PaletteColours/notification-border",
"text": "Notification border"
},
"$:/language/Docs/PaletteColours/page-background": {
"title": "$:/language/Docs/PaletteColours/page-background",
"text": "Page background"
},
"$:/language/Docs/PaletteColours/pre-background": {
"title": "$:/language/Docs/PaletteColours/pre-background",
"text": "Preformatted code background"
},
"$:/language/Docs/PaletteColours/pre-border": {
"title": "$:/language/Docs/PaletteColours/pre-border",
"text": "Preformatted code border"
},
"$:/language/Docs/PaletteColours/primary": {
"title": "$:/language/Docs/PaletteColours/primary",
"text": "General primary"
},
"$:/language/Docs/PaletteColours/sidebar-button-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-button-foreground",
"text": "Sidebar button foreground"
},
"$:/language/Docs/PaletteColours/sidebar-controls-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/sidebar-controls-foreground-hover",
"text": "Sidebar controls foreground hover"
},
"$:/language/Docs/PaletteColours/sidebar-controls-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-controls-foreground",
"text": "Sidebar controls foreground"
},
"$:/language/Docs/PaletteColours/sidebar-foreground-shadow": {
"title": "$:/language/Docs/PaletteColours/sidebar-foreground-shadow",
"text": "Sidebar foreground shadow"
},
"$:/language/Docs/PaletteColours/sidebar-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-foreground",
"text": "Sidebar foreground"
},
"$:/language/Docs/PaletteColours/sidebar-muted-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/sidebar-muted-foreground-hover",
"text": "Sidebar muted foreground hover"
},
"$:/language/Docs/PaletteColours/sidebar-muted-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-muted-foreground",
"text": "Sidebar muted foreground"
},
"$:/language/Docs/PaletteColours/sidebar-tab-background-selected": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-background-selected",
"text": "Sidebar tab background for selected tabs"
},
"$:/language/Docs/PaletteColours/sidebar-tab-background": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-background",
"text": "Sidebar tab background"
},
"$:/language/Docs/PaletteColours/sidebar-tab-border-selected": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-border-selected",
"text": "Sidebar tab border for selected tabs"
},
"$:/language/Docs/PaletteColours/sidebar-tab-border": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-border",
"text": "Sidebar tab border"
},
"$:/language/Docs/PaletteColours/sidebar-tab-divider": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-divider",
"text": "Sidebar tab divider"
},
"$:/language/Docs/PaletteColours/sidebar-tab-foreground-selected": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-foreground-selected",
"text": "Sidebar tab foreground for selected tabs"
},
"$:/language/Docs/PaletteColours/sidebar-tab-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-tab-foreground",
"text": "Sidebar tab foreground"
},
"$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground-hover",
"text": "Sidebar tiddler link foreground hover"
},
"$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground": {
"title": "$:/language/Docs/PaletteColours/sidebar-tiddler-link-foreground",
"text": "Sidebar tiddler link foreground"
},
"$:/language/Docs/PaletteColours/site-title-foreground": {
"title": "$:/language/Docs/PaletteColours/site-title-foreground",
"text": "Site title foreground"
},
"$:/language/Docs/PaletteColours/static-alert-foreground": {
"title": "$:/language/Docs/PaletteColours/static-alert-foreground",
"text": "Static alert foreground"
},
"$:/language/Docs/PaletteColours/tab-background-selected": {
"title": "$:/language/Docs/PaletteColours/tab-background-selected",
"text": "Tab background for selected tabs"
},
"$:/language/Docs/PaletteColours/tab-background": {
"title": "$:/language/Docs/PaletteColours/tab-background",
"text": "Tab background"
},
"$:/language/Docs/PaletteColours/tab-border-selected": {
"title": "$:/language/Docs/PaletteColours/tab-border-selected",
"text": "Tab border for selected tabs"
},
"$:/language/Docs/PaletteColours/tab-border": {
"title": "$:/language/Docs/PaletteColours/tab-border",
"text": "Tab border"
},
"$:/language/Docs/PaletteColours/tab-divider": {
"title": "$:/language/Docs/PaletteColours/tab-divider",
"text": "Tab divider"
},
"$:/language/Docs/PaletteColours/tab-foreground-selected": {
"title": "$:/language/Docs/PaletteColours/tab-foreground-selected",
"text": "Tab foreground for selected tabs"
},
"$:/language/Docs/PaletteColours/tab-foreground": {
"title": "$:/language/Docs/PaletteColours/tab-foreground",
"text": "Tab foreground"
},
"$:/language/Docs/PaletteColours/table-border": {
"title": "$:/language/Docs/PaletteColours/table-border",
"text": "Table border"
},
"$:/language/Docs/PaletteColours/table-footer-background": {
"title": "$:/language/Docs/PaletteColours/table-footer-background",
"text": "Table footer background"
},
"$:/language/Docs/PaletteColours/table-header-background": {
"title": "$:/language/Docs/PaletteColours/table-header-background",
"text": "Table header background"
},
"$:/language/Docs/PaletteColours/tag-background": {
"title": "$:/language/Docs/PaletteColours/tag-background",
"text": "Tag background"
},
"$:/language/Docs/PaletteColours/tag-foreground": {
"title": "$:/language/Docs/PaletteColours/tag-foreground",
"text": "Tag foreground"
},
"$:/language/Docs/PaletteColours/tiddler-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-background",
"text": "Tiddler background"
},
"$:/language/Docs/PaletteColours/tiddler-border": {
"title": "$:/language/Docs/PaletteColours/tiddler-border",
"text": "Tiddler border"
},
"$:/language/Docs/PaletteColours/tiddler-controls-foreground-hover": {
"title": "$:/language/Docs/PaletteColours/tiddler-controls-foreground-hover",
"text": "Tiddler controls foreground hover"
},
"$:/language/Docs/PaletteColours/tiddler-controls-foreground-selected": {
"title": "$:/language/Docs/PaletteColours/tiddler-controls-foreground-selected",
"text": "Tiddler controls foreground for selected controls"
},
"$:/language/Docs/PaletteColours/tiddler-controls-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-controls-foreground",
"text": "Tiddler controls foreground"
},
"$:/language/Docs/PaletteColours/tiddler-editor-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-background",
"text": "Tiddler editor background"
},
"$:/language/Docs/PaletteColours/tiddler-editor-border-image": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-border-image",
"text": "Tiddler editor border image"
},
"$:/language/Docs/PaletteColours/tiddler-editor-border": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-border",
"text": "Tiddler editor border"
},
"$:/language/Docs/PaletteColours/tiddler-editor-fields-even": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-fields-even",
"text": "Tiddler editor background for even fields"
},
"$:/language/Docs/PaletteColours/tiddler-editor-fields-odd": {
"title": "$:/language/Docs/PaletteColours/tiddler-editor-fields-odd",
"text": "Tiddler editor background for odd fields"
},
"$:/language/Docs/PaletteColours/tiddler-info-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-info-background",
"text": "Tiddler info panel background"
},
"$:/language/Docs/PaletteColours/tiddler-info-border": {
"title": "$:/language/Docs/PaletteColours/tiddler-info-border",
"text": "Tiddler info panel border"
},
"$:/language/Docs/PaletteColours/tiddler-info-tab-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-info-tab-background",
"text": "Tiddler info panel tab background"
},
"$:/language/Docs/PaletteColours/tiddler-link-background": {
"title": "$:/language/Docs/PaletteColours/tiddler-link-background",
"text": "Tiddler link background"
},
"$:/language/Docs/PaletteColours/tiddler-link-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-link-foreground",
"text": "Tiddler link foreground"
},
"$:/language/Docs/PaletteColours/tiddler-subtitle-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-subtitle-foreground",
"text": "Tiddler subtitle foreground"
},
"$:/language/Docs/PaletteColours/tiddler-title-foreground": {
"title": "$:/language/Docs/PaletteColours/tiddler-title-foreground",
"text": "Tiddler title foreground"
},
"$:/language/Docs/PaletteColours/toolbar-new-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-new-button",
"text": "Toolbar 'new tiddler' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-options-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-options-button",
"text": "Toolbar 'options' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-save-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-save-button",
"text": "Toolbar 'save' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-info-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-info-button",
"text": "Toolbar 'info' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-edit-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-edit-button",
"text": "Toolbar 'edit' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-close-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-close-button",
"text": "Toolbar 'close' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-delete-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-delete-button",
"text": "Toolbar 'delete' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-cancel-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-cancel-button",
"text": "Toolbar 'cancel' button foreground"
},
"$:/language/Docs/PaletteColours/toolbar-done-button": {
"title": "$:/language/Docs/PaletteColours/toolbar-done-button",
"text": "Toolbar 'done' button foreground"
},
"$:/language/Docs/PaletteColours/untagged-background": {
"title": "$:/language/Docs/PaletteColours/untagged-background",
"text": "Untagged pill background"
},
"$:/language/Docs/PaletteColours/very-muted-foreground": {
"title": "$:/language/Docs/PaletteColours/very-muted-foreground",
"text": "Very muted foreground"
},
"$:/language/EditTemplate/Body/External/Hint": {
"title": "$:/language/EditTemplate/Body/External/Hint",
"text": "This is an external tiddler stored outside of the main TiddlyWiki file. You can edit the tags and fields but cannot directly edit the content itself"
},
"$:/language/EditTemplate/Body/Placeholder": {
"title": "$:/language/EditTemplate/Body/Placeholder",
"text": "Type the text for this tiddler"
},
"$:/language/EditTemplate/Body/Preview/Type/Output": {
"title": "$:/language/EditTemplate/Body/Preview/Type/Output",
"text": "output"
},
"$:/language/EditTemplate/Field/Remove/Caption": {
"title": "$:/language/EditTemplate/Field/Remove/Caption",
"text": "remove field"
},
"$:/language/EditTemplate/Field/Remove/Hint": {
"title": "$:/language/EditTemplate/Field/Remove/Hint",
"text": "Remove field"
},
"$:/language/EditTemplate/Fields/Add/Button": {
"title": "$:/language/EditTemplate/Fields/Add/Button",
"text": "add"
},
"$:/language/EditTemplate/Fields/Add/Name/Placeholder": {
"title": "$:/language/EditTemplate/Fields/Add/Name/Placeholder",
"text": "field name"
},
"$:/language/EditTemplate/Fields/Add/Prompt": {
"title": "$:/language/EditTemplate/Fields/Add/Prompt",
"text": "Add a new field:"
},
"$:/language/EditTemplate/Fields/Add/Value/Placeholder": {
"title": "$:/language/EditTemplate/Fields/Add/Value/Placeholder",
"text": "field value"
},
"$:/language/EditTemplate/Fields/Add/Dropdown/System": {
"title": "$:/language/EditTemplate/Fields/Add/Dropdown/System",
"text": "System fields"
},
"$:/language/EditTemplate/Fields/Add/Dropdown/User": {
"title": "$:/language/EditTemplate/Fields/Add/Dropdown/User",
"text": "User fields"
},
"$:/language/EditTemplate/Shadow/Warning": {
"title": "$:/language/EditTemplate/Shadow/Warning",
"text": "This is a shadow tiddler. Any changes you make will override the default version from the plugin <<pluginLink>>"
},
"$:/language/EditTemplate/Shadow/OverriddenWarning": {
"title": "$:/language/EditTemplate/Shadow/OverriddenWarning",
"text": "This is a modified shadow tiddler. You can revert to the default version in the plugin <<pluginLink>> by deleting this tiddler"
},
"$:/language/EditTemplate/Tags/Add/Button": {
"title": "$:/language/EditTemplate/Tags/Add/Button",
"text": "add"
},
"$:/language/EditTemplate/Tags/Add/Placeholder": {
"title": "$:/language/EditTemplate/Tags/Add/Placeholder",
"text": "tag name"
},
"$:/language/EditTemplate/Tags/Dropdown/Caption": {
"title": "$:/language/EditTemplate/Tags/Dropdown/Caption",
"text": "tag list"
},
"$:/language/EditTemplate/Tags/Dropdown/Hint": {
"title": "$:/language/EditTemplate/Tags/Dropdown/Hint",
"text": "Show tag list"
},
"$:/language/EditTemplate/Title/BadCharacterWarning": {
"title": "$:/language/EditTemplate/Title/BadCharacterWarning",
"text": "Warning: avoid using any of the characters <<bad-chars>> in tiddler titles"
},
"$:/language/EditTemplate/Type/Dropdown/Caption": {
"title": "$:/language/EditTemplate/Type/Dropdown/Caption",
"text": "content type list"
},
"$:/language/EditTemplate/Type/Dropdown/Hint": {
"title": "$:/language/EditTemplate/Type/Dropdown/Hint",
"text": "Show content type list"
},
"$:/language/EditTemplate/Type/Delete/Caption": {
"title": "$:/language/EditTemplate/Type/Delete/Caption",
"text": "delete content type"
},
"$:/language/EditTemplate/Type/Delete/Hint": {
"title": "$:/language/EditTemplate/Type/Delete/Hint",
"text": "Delete content type"
},
"$:/language/EditTemplate/Type/Placeholder": {
"title": "$:/language/EditTemplate/Type/Placeholder",
"text": "content type"
},
"$:/language/EditTemplate/Type/Prompt": {
"title": "$:/language/EditTemplate/Type/Prompt",
"text": "Type:"
},
"$:/language/Exporters/StaticRiver": {
"title": "$:/language/Exporters/StaticRiver",
"text": "Static HTML"
},
"$:/language/Exporters/JsonFile": {
"title": "$:/language/Exporters/JsonFile",
"text": "JSON file"
},
"$:/language/Exporters/CsvFile": {
"title": "$:/language/Exporters/CsvFile",
"text": "CSV file"
},
"$:/language/Exporters/TidFile": {
"title": "$:/language/Exporters/TidFile",
"text": "\".tid\" file"
},
"$:/language/Docs/Fields/_canonical_uri": {
"title": "$:/language/Docs/Fields/_canonical_uri",
"text": "The full URI of an external image tiddler"
},
"$:/language/Docs/Fields/bag": {
"title": "$:/language/Docs/Fields/bag",
"text": "The name of the bag from which a tiddler came"
},
"$:/language/Docs/Fields/caption": {
"title": "$:/language/Docs/Fields/caption",
"text": "The text to be displayed on a tab or button"
},
"$:/language/Docs/Fields/color": {
"title": "$:/language/Docs/Fields/color",
"text": "The CSS color value associated with a tiddler"
},
"$:/language/Docs/Fields/component": {
"title": "$:/language/Docs/Fields/component",
"text": "The name of the component responsible for an [[alert tiddler|AlertMechanism]]"
},
"$:/language/Docs/Fields/current-tiddler": {
"title": "$:/language/Docs/Fields/current-tiddler",
"text": "Used to cache the top tiddler in a [[history list|HistoryMechanism]]"
},
"$:/language/Docs/Fields/created": {
"title": "$:/language/Docs/Fields/created",
"text": "The date a tiddler was created"
},
"$:/language/Docs/Fields/creator": {
"title": "$:/language/Docs/Fields/creator",
"text": "The name of the person who created a tiddler"
},
"$:/language/Docs/Fields/dependents": {
"title": "$:/language/Docs/Fields/dependents",
"text": "For a plugin, lists the dependent plugin titles"
},
"$:/language/Docs/Fields/description": {
"title": "$:/language/Docs/Fields/description",
"text": "The descriptive text for a plugin, or a modal dialogue"
},
"$:/language/Docs/Fields/draft.of": {
"title": "$:/language/Docs/Fields/draft.of",
"text": "For draft tiddlers, contains the title of the tiddler of which this is a draft"
},
"$:/language/Docs/Fields/draft.title": {
"title": "$:/language/Docs/Fields/draft.title",
"text": "For draft tiddlers, contains the proposed new title of the tiddler"
},
"$:/language/Docs/Fields/footer": {
"title": "$:/language/Docs/Fields/footer",
"text": "The footer text for a wizard"
},
"$:/language/Docs/Fields/hack-to-give-us-something-to-compare-against": {
"title": "$:/language/Docs/Fields/hack-to-give-us-something-to-compare-against",
"text": "A temporary storage field used in [[$:/core/templates/static.content]]"
},
"$:/language/Docs/Fields/icon": {
"title": "$:/language/Docs/Fields/icon",
"text": "The title of the tiddler containing the icon associated with a tiddler"
},
"$:/language/Docs/Fields/library": {
"title": "$:/language/Docs/Fields/library",
"text": "If set to \"yes\" indicates that a tiddler should be saved as a JavaScript library"
},
"$:/language/Docs/Fields/list": {
"title": "$:/language/Docs/Fields/list",
"text": "An ordered list of tiddler titles associated with a tiddler"
},
"$:/language/Docs/Fields/list-before": {
"title": "$:/language/Docs/Fields/list-before",
"text": "If set, the title of a tiddler before which this tiddler should be added to the ordered list of tiddler titles, or at the start of the list if this field is present but empty"
},
"$:/language/Docs/Fields/list-after": {
"title": "$:/language/Docs/Fields/list-after",
"text": "If set, the title of the tiddler after which this tiddler should be added to the ordered list of tiddler titles"
},
"$:/language/Docs/Fields/modified": {
"title": "$:/language/Docs/Fields/modified",
"text": "The date and time at which a tiddler was last modified"
},
"$:/language/Docs/Fields/modifier": {
"title": "$:/language/Docs/Fields/modifier",
"text": "The tiddler title associated with the person who last modified a tiddler"
},
"$:/language/Docs/Fields/name": {
"title": "$:/language/Docs/Fields/name",
"text": "The human readable name associated with a plugin tiddler"
},
"$:/language/Docs/Fields/plugin-priority": {
"title": "$:/language/Docs/Fields/plugin-priority",
"text": "A numerical value indicating the priority of a plugin tiddler"
},
"$:/language/Docs/Fields/plugin-type": {
"title": "$:/language/Docs/Fields/plugin-type",
"text": "The type of plugin in a plugin tiddler"
},
"$:/language/Docs/Fields/revision": {
"title": "$:/language/Docs/Fields/revision",
"text": "The revision of the tiddler held at the server"
},
"$:/language/Docs/Fields/released": {
"title": "$:/language/Docs/Fields/released",
"text": "Date of a TiddlyWiki release"
},
"$:/language/Docs/Fields/source": {
"title": "$:/language/Docs/Fields/source",
"text": "The source URL associated with a tiddler"
},
"$:/language/Docs/Fields/subtitle": {
"title": "$:/language/Docs/Fields/subtitle",
"text": "The subtitle text for a wizard"
},
"$:/language/Docs/Fields/tags": {
"title": "$:/language/Docs/Fields/tags",
"text": "A list of tags associated with a tiddler"
},
"$:/language/Docs/Fields/text": {
"title": "$:/language/Docs/Fields/text",
"text": "The body text of a tiddler"
},
"$:/language/Docs/Fields/title": {
"title": "$:/language/Docs/Fields/title",
"text": "The unique name of a tiddler"
},
"$:/language/Docs/Fields/type": {
"title": "$:/language/Docs/Fields/type",
"text": "The content type of a tiddler"
},
"$:/language/Docs/Fields/version": {
"title": "$:/language/Docs/Fields/version",
"text": "Version information for a plugin"
},
"$:/language/Filters/AllTiddlers": {
"title": "$:/language/Filters/AllTiddlers",
"text": "All tiddlers except system tiddlers"
},
"$:/language/Filters/RecentSystemTiddlers": {
"title": "$:/language/Filters/RecentSystemTiddlers",
"text": "Recently modified tiddlers, including system tiddlers"
},
"$:/language/Filters/RecentTiddlers": {
"title": "$:/language/Filters/RecentTiddlers",
"text": "Recently modified tiddlers"
},
"$:/language/Filters/AllTags": {
"title": "$:/language/Filters/AllTags",
"text": "All tags except system tags"
},
"$:/language/Filters/Missing": {
"title": "$:/language/Filters/Missing",
"text": "Missing tiddlers"
},
"$:/language/Filters/Drafts": {
"title": "$:/language/Filters/Drafts",
"text": "Draft tiddlers"
},
"$:/language/Filters/Orphans": {
"title": "$:/language/Filters/Orphans",
"text": "Orphan tiddlers"
},
"$:/language/Filters/SystemTiddlers": {
"title": "$:/language/Filters/SystemTiddlers",
"text": "System tiddlers"
},
"$:/language/Filters/ShadowTiddlers": {
"title": "$:/language/Filters/ShadowTiddlers",
"text": "Shadow tiddlers"
},
"$:/language/Filters/OverriddenShadowTiddlers": {
"title": "$:/language/Filters/OverriddenShadowTiddlers",
"text": "Overridden shadow tiddlers"
},
"$:/language/Filters/SystemTags": {
"title": "$:/language/Filters/SystemTags",
"text": "System tags"
},
"$:/language/Filters/TypedTiddlers": {
"title": "$:/language/Filters/TypedTiddlers",
"text": "Non wiki-text tiddlers"
},
"GettingStarted": {
"title": "GettingStarted",
"text": "\\define lingo-base() $:/language/ControlPanel/Basics/\nWelcome to ~TiddlyWiki and the ~TiddlyWiki community\n\nBefore you start storing important information in ~TiddlyWiki it is important to make sure that you can reliably save changes. See http://tiddlywiki.com/#GettingStarted for details\n\n!! Set up this ~TiddlyWiki\n\n<div class=\"tc-control-panel\">\n\n|<$link to=\"$:/SiteTitle\"><<lingo Title/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteTitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/SiteSubtitle\"><<lingo Subtitle/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteSubtitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/DefaultTiddlers\"><<lingo DefaultTiddlers/Prompt>></$link> |<<lingo DefaultTiddlers/TopHint>><br> <$edit tag=\"textarea\" tiddler=\"$:/DefaultTiddlers\"/><br>//<<lingo DefaultTiddlers/BottomHint>>// |\n</div>\n\nSee the [[control panel|$:/ControlPanel]] for more options.\n"
},
"$:/language/Help/build": {
"title": "$:/language/Help/build",
"description": "Automatically run configured commands",
"text": "Build the specified build targets for the current wiki. If no build targets are specified then all available targets will be built.\n\n```\n--build <target> [<target> ...]\n```\n\nBuild targets are defined in the `tiddlywiki.info` file of a wiki folder.\n\n"
},
"$:/language/Help/clearpassword": {
"title": "$:/language/Help/clearpassword",
"description": "Clear a password for subsequent crypto operations",
"text": "Clear the password for subsequent crypto operations\n\n```\n--clearpassword\n```\n"
},
"$:/language/Help/default": {
"title": "$:/language/Help/default",
"text": "\\define commandTitle()\n$:/language/Help/$(command)$\n\\end\n```\nusage: tiddlywiki [<wikifolder>] [--<command> [<args>...]...]\n```\n\nAvailable commands:\n\n<ul>\n<$list filter=\"[commands[]sort[title]]\" variable=\"command\">\n<li><$link to=<<commandTitle>>><$macrocall $name=\"command\" $type=\"text/plain\" $output=\"text/plain\"/></$link>: <$transclude tiddler=<<commandTitle>> field=\"description\"/></li>\n</$list>\n</ul>\n\nTo get detailed help on a command:\n\n```\ntiddlywiki --help <command>\n```\n"
},
"$:/language/Help/editions": {
"title": "$:/language/Help/editions",
"description": "Lists the available editions of TiddlyWiki",
"text": "Lists the names and descriptions of the available editions. You can create a new wiki of a specified edition with the `--init` command.\n\n```\n--editions\n```\n"
},
"$:/language/Help/help": {
"title": "$:/language/Help/help",
"description": "Display help for TiddlyWiki commands",
"text": "Displays help text for a command:\n\n```\n--help [<command>]\n```\n\nIf the command name is omitted then a list of available commands is displayed.\n"
},
"$:/language/Help/init": {
"title": "$:/language/Help/init",
"description": "Initialise a new wiki folder",
"text": "Initialise an empty [[WikiFolder|WikiFolders]] with a copy of the specified edition.\n\n```\n--init <edition> [<edition> ...]\n```\n\nFor example:\n\n```\ntiddlywiki ./MyWikiFolder --init empty\n```\n\nNote:\n\n* The wiki folder directory will be created if necessary\n* The \"edition\" defaults to ''empty''\n* The init command will fail if the wiki folder is not empty\n* The init command removes any `includeWikis` definitions in the edition's `tiddlywiki.info` file\n* When multiple editions are specified, editions initialised later will overwrite any files shared with earlier editions (so, the final `tiddlywiki.info` file will be copied from the last edition)\n* `--editions` returns a list of available editions\n"
},
"$:/language/Help/load": {
"title": "$:/language/Help/load",
"description": "Load tiddlers from a file",
"text": "Load tiddlers from 2.x.x TiddlyWiki files (`.html`), `.tiddler`, `.tid`, `.json` or other files\n\n```\n--load <filepath>\n```\n\nTo load tiddlers from an encrypted TiddlyWiki file you should first specify the password with the PasswordCommand. For example:\n\n```\ntiddlywiki ./MyWiki --password pa55w0rd --load my_encrypted_wiki.html\n```\n\nNote that TiddlyWiki will not load an older version of an already loaded plugin.\n"
},
"$:/language/Help/makelibrary": {
"title": "$:/language/Help/makelibrary",
"description": "Construct library plugin required by upgrade process",
"text": "Constructs the `$:/UpgradeLibrary` tiddler for the upgrade process.\n\nThe upgrade library is formatted as an ordinary plugin tiddler with the plugin type `library`. It contains a copy of each of the plugins, themes and language packs available within the TiddlyWiki5 repository.\n\nThis command is intended for internal use; it is only relevant to users constructing a custom upgrade procedure.\n\n```\n--makelibrary <title>\n```\n\nThe title argument defaults to `$:/UpgradeLibrary`.\n"
},
"$:/language/Help/notfound": {
"title": "$:/language/Help/notfound",
"text": "No such help item"
},
"$:/language/Help/output": {
"title": "$:/language/Help/output",
"description": "Set the base output directory for subsequent commands",
"text": "Sets the base output directory for subsequent commands. The default output directory is the `output` subdirectory of the edition directory.\n\n```\n--output <pathname>\n```\n\nIf the specified pathname is relative then it is resolved relative to the current working directory. For example `--output .` sets the output directory to the current working directory.\n\n"
},
"$:/language/Help/password": {
"title": "$:/language/Help/password",
"description": "Set a password for subsequent crypto operations",
"text": "Set a password for subsequent crypto operations\n\n```\n--password <password>\n```\n\n''Note'': This should not be used for serving TiddlyWiki with password protection. Instead, see the password option under the [[ServerCommand]].\n"
},
"$:/language/Help/rendertiddler": {
"title": "$:/language/Help/rendertiddler",
"description": "Render an individual tiddler as a specified ContentType",
"text": "Render an individual tiddler as a specified ContentType, defaulting to `text/html` and save it to the specified filename. Optionally a template can be specified, in which case the template tiddler is rendered with the \"currentTiddler\" variable set to the tiddler that is being rendered (the first parameter value).\n\n```\n--rendertiddler <title> <filename> [<type>] [<template>]\n```\n\nBy default, the filename is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nAny missing directories in the path to the filename are automatically created.\n"
},
"$:/language/Help/rendertiddlers": {
"title": "$:/language/Help/rendertiddlers",
"description": "Render tiddlers matching a filter to a specified ContentType",
"text": "Render a set of tiddlers matching a filter to separate files of a specified ContentType (defaults to `text/html`) and extension (defaults to `.html`).\n\n```\n--rendertiddlers <filter> <template> <pathname> [<type>] [<extension>] [\"noclean\"]\n```\n\nFor example:\n\n```\n--rendertiddlers [!is[system]] $:/core/templates/static.tiddler.html ./static text/plain\n```\n\nBy default, the pathname is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nAny files in the target directory are deleted unless the ''noclean'' flag is specified. The target directory is recursively created if it is missing.\n"
},
"$:/language/Help/savetiddler": {
"title": "$:/language/Help/savetiddler",
"description": "Saves a raw tiddler to a file",
"text": "Saves an individual tiddler in its raw text or binary format to the specified filename.\n\n```\n--savetiddler <title> <filename>\n```\n\nBy default, the filename is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nAny missing directories in the path to the filename are automatically created.\n"
},
"$:/language/Help/savetiddlers": {
"title": "$:/language/Help/savetiddlers",
"description": "Saves a group of raw tiddlers to a directory",
"text": "Saves a group of tiddlers in their raw text or binary format to the specified directory.\n\n```\n--savetiddlers <filter> <pathname> [\"noclean\"]\n```\n\nBy default, the pathname is resolved relative to the `output` subdirectory of the edition directory. The `--output` command can be used to direct output to a different directory.\n\nThe output directory is cleared of existing files before saving the specified files. The deletion can be disabled by specifying the ''noclean'' flag.\n\nAny missing directories in the pathname are automatically created.\n"
},
"$:/language/Help/server": {
"title": "$:/language/Help/server",
"description": "Provides an HTTP server interface to TiddlyWiki",
"text": "The server built in to TiddlyWiki5 is very simple. Although compatible with TiddlyWeb it doesn't support many of the features needed for robust Internet-facing usage.\n\nAt the root, it serves a rendering of a specified tiddler. Away from the root, it serves individual tiddlers encoded in JSON, and supports the basic HTTP operations for `GET`, `PUT` and `DELETE`.\n\n```\n--server <port> <roottiddler> <rendertype> <servetype> <username> <password> <host> <pathprefix>\n```\n\nThe parameters are:\n\n* ''port'' - port number to serve from (defaults to \"8080\")\n* ''roottiddler'' - the tiddler to serve at the root (defaults to \"$:/core/save/all\")\n* ''rendertype'' - the content type to which the root tiddler should be rendered (defaults to \"text/plain\")\n* ''servetype'' - the content type with which the root tiddler should be served (defaults to \"text/html\")\n* ''username'' - the default username for signing edits\n* ''password'' - optional password for basic authentication\n* ''host'' - optional hostname to serve from (defaults to \"127.0.0.1\" aka \"localhost\")\n* ''pathprefix'' - optional prefix for paths\n\nIf the password parameter is specified then the browser will prompt the user for the username and password. Note that the password is transmitted in plain text so this implementation isn't suitable for general use.\n\nFor example:\n\n```\n--server 8080 $:/core/save/all text/plain text/html MyUserName passw0rd\n```\n\nThe username and password can be specified as empty strings if you need to set the hostname or pathprefix and don't want to require a password:\n\n```\n--server 8080 $:/core/save/all text/plain text/html \"\" \"\" 192.168.0.245\n```\n\nTo run multiple TiddlyWiki servers at the same time you'll need to put each one on a different port.\n"
},
"$:/language/Help/setfield": {
"title": "$:/language/Help/setfield",
"description": "Prepares external tiddlers for use",
"text": "//Note that this command is experimental and may change or be replaced before being finalised//\n\nSets the specified field of a group of tiddlers to the result of wikifying a template tiddler with the `currentTiddler` variable set to the tiddler.\n\n```\n--setfield <filter> <fieldname> <templatetitle> <rendertype>\n```\n\nThe parameters are:\n\n* ''filter'' - filter identifying the tiddlers to be affected\n* ''fieldname'' - the field to modify (defaults to \"text\")\n* ''templatetitle'' - the tiddler to wikify into the specified field. If blank or missing then the specified field is deleted\n* ''rendertype'' - the text type to render (defaults to \"text/plain\"; \"text/html\" can be used to include HTML tags)\n"
},
"$:/language/Help/unpackplugin": {
"title": "$:/language/Help/unpackplugin",
"description": "Unpack the payload tiddlers from a plugin",
"text": "Extract the payload tiddlers from a plugin, creating them as ordinary tiddlers:\n\n```\n--unpackplugin <title>\n```\n"
},
"$:/language/Help/verbose": {
"title": "$:/language/Help/verbose",
"description": "Triggers verbose output mode",
"text": "Triggers verbose output, useful for debugging\n\n```\n--verbose\n```\n"
},
"$:/language/Help/version": {
"title": "$:/language/Help/version",
"description": "Displays the version number of TiddlyWiki",
"text": "Displays the version number of TiddlyWiki.\n\n```\n--version\n```\n"
},
"$:/languages/en-GB/icon": {
"title": "$:/languages/en-GB/icon",
"type": "image/svg+xml",
"text": "<svg xmlns=\"http://www.w3.org/2000/svg\" viewBox=\"0 0 60 30\" width=\"1200\" height=\"600\">\n<clipPath id=\"t\">\n\t<path d=\"M30,15 h30 v15 z v15 h-30 z h-30 v-15 z v-15 h30 z\"/>\n</clipPath>\n<path d=\"M0,0 v30 h60 v-30 z\" fill=\"#00247d\"/>\n<path d=\"M0,0 L60,30 M60,0 L0,30\" stroke=\"#fff\" stroke-width=\"6\"/>\n<path d=\"M0,0 L60,30 M60,0 L0,30\" clip-path=\"url(#t)\" stroke=\"#cf142b\" stroke-width=\"4\"/>\n<path d=\"M30,0 v30 M0,15 h60\" stroke=\"#fff\" stroke-width=\"10\"/>\n<path d=\"M30,0 v30 M0,15 h60\" stroke=\"#cf142b\" stroke-width=\"6\"/>\n</svg>\n"
},
"$:/language/Import/Imported/Hint": {
"title": "$:/language/Import/Imported/Hint",
"text": "The following tiddlers were imported:"
},
"$:/language/Import/Listing/Cancel/Caption": {
"title": "$:/language/Import/Listing/Cancel/Caption",
"text": "Cancel"
},
"$:/language/Import/Listing/Hint": {
"title": "$:/language/Import/Listing/Hint",
"text": "These tiddlers are ready to import:"
},
"$:/language/Import/Listing/Import/Caption": {
"title": "$:/language/Import/Listing/Import/Caption",
"text": "Import"
},
"$:/language/Import/Listing/Select/Caption": {
"title": "$:/language/Import/Listing/Select/Caption",
"text": "Select"
},
"$:/language/Import/Listing/Status/Caption": {
"title": "$:/language/Import/Listing/Status/Caption",
"text": "Status"
},
"$:/language/Import/Listing/Title/Caption": {
"title": "$:/language/Import/Listing/Title/Caption",
"text": "Title"
},
"$:/language/Import/Upgrader/Plugins/Suppressed/Incompatible": {
"title": "$:/language/Import/Upgrader/Plugins/Suppressed/Incompatible",
"text": "Blocked incompatible or obsolete plugin"
},
"$:/language/Import/Upgrader/Plugins/Suppressed/Version": {
"title": "$:/language/Import/Upgrader/Plugins/Suppressed/Version",
"text": "Blocked plugin (due to incoming <<incoming>> being older than existing <<existing>>)"
},
"$:/language/Import/Upgrader/Plugins/Upgraded": {
"title": "$:/language/Import/Upgrader/Plugins/Upgraded",
"text": "Upgraded plugin from <<incoming>> to <<upgraded>>"
},
"$:/language/Import/Upgrader/State/Suppressed": {
"title": "$:/language/Import/Upgrader/State/Suppressed",
"text": "Blocked temporary state tiddler"
},
"$:/language/Import/Upgrader/System/Suppressed": {
"title": "$:/language/Import/Upgrader/System/Suppressed",
"text": "Blocked system tiddler"
},
"$:/language/Import/Upgrader/ThemeTweaks/Created": {
"title": "$:/language/Import/Upgrader/ThemeTweaks/Created",
"text": "Migrated theme tweak from <$text text=<<from>>/>"
},
"$:/language/AboveStory/ClassicPlugin/Warning": {
"title": "$:/language/AboveStory/ClassicPlugin/Warning",
"text": "It looks like you are trying to load a plugin designed for ~TiddlyWiki Classic. Please note that [[these plugins do not work with TiddlyWiki version 5.x.x|http://tiddlywiki.com/#TiddlyWikiClassic]]. ~TiddlyWiki Classic plugins detected:"
},
"$:/language/BinaryWarning/Prompt": {
"title": "$:/language/BinaryWarning/Prompt",
"text": "This tiddler contains binary data"
},
"$:/language/ClassicWarning/Hint": {
"title": "$:/language/ClassicWarning/Hint",
"text": "This tiddler is written in TiddlyWiki Classic wiki text format, which is not fully compatible with TiddlyWiki version 5. See http://tiddlywiki.com/static/Upgrading.html for more details."
},
"$:/language/ClassicWarning/Upgrade/Caption": {
"title": "$:/language/ClassicWarning/Upgrade/Caption",
"text": "upgrade"
},
"$:/language/CloseAll/Button": {
"title": "$:/language/CloseAll/Button",
"text": "close all"
},
"$:/language/ColourPicker/Recent": {
"title": "$:/language/ColourPicker/Recent",
"text": "Recent:"
},
"$:/language/ConfirmCancelTiddler": {
"title": "$:/language/ConfirmCancelTiddler",
"text": "Do you wish to discard changes to the tiddler \"<$text text=<<title>>/>\"?"
},
"$:/language/ConfirmDeleteTiddler": {
"title": "$:/language/ConfirmDeleteTiddler",
"text": "Do you wish to delete the tiddler \"<$text text=<<title>>/>\"?"
},
"$:/language/ConfirmOverwriteTiddler": {
"title": "$:/language/ConfirmOverwriteTiddler",
"text": "Do you wish to overwrite the tiddler \"<$text text=<<title>>/>\"?"
},
"$:/language/ConfirmEditShadowTiddler": {
"title": "$:/language/ConfirmEditShadowTiddler",
"text": "You are about to edit a ShadowTiddler. Any changes will override the default system making future upgrades non-trivial. Are you sure you want to edit \"<$text text=<<title>>/>\"?"
},
"$:/language/Count": {
"title": "$:/language/Count",
"text": "count"
},
"$:/language/DefaultNewTiddlerTitle": {
"title": "$:/language/DefaultNewTiddlerTitle",
"text": "New Tiddler"
},
"$:/language/DropMessage": {
"title": "$:/language/DropMessage",
"text": "Drop here (or use the 'Escape' key to cancel)"
},
"$:/language/Encryption/Cancel": {
"title": "$:/language/Encryption/Cancel",
"text": "Cancel"
},
"$:/language/Encryption/ConfirmClearPassword": {
"title": "$:/language/Encryption/ConfirmClearPassword",
"text": "Do you wish to clear the password? This will remove the encryption applied when saving this wiki"
},
"$:/language/Encryption/PromptSetPassword": {
"title": "$:/language/Encryption/PromptSetPassword",
"text": "Set a new password for this TiddlyWiki"
},
"$:/language/Encryption/Username": {
"title": "$:/language/Encryption/Username",
"text": "Username"
},
"$:/language/Encryption/Password": {
"title": "$:/language/Encryption/Password",
"text": "Password"
},
"$:/language/Encryption/RepeatPassword": {
"title": "$:/language/Encryption/RepeatPassword",
"text": "Repeat password"
},
"$:/language/Encryption/PasswordNoMatch": {
"title": "$:/language/Encryption/PasswordNoMatch",
"text": "Passwords do not match"
},
"$:/language/Encryption/SetPassword": {
"title": "$:/language/Encryption/SetPassword",
"text": "Set password"
},
"$:/language/Error/Caption": {
"title": "$:/language/Error/Caption",
"text": "Error"
},
"$:/language/Error/Filter": {
"title": "$:/language/Error/Filter",
"text": "Filter error"
},
"$:/language/Error/FilterSyntax": {
"title": "$:/language/Error/FilterSyntax",
"text": "Syntax error in filter expression"
},
"$:/language/Error/IsFilterOperator": {
"title": "$:/language/Error/IsFilterOperator",
"text": "Filter Error: Unknown operand for the 'is' filter operator"
},
"$:/language/Error/LoadingPluginLibrary": {
"title": "$:/language/Error/LoadingPluginLibrary",
"text": "Error loading plugin library"
},
"$:/language/Error/RecursiveTransclusion": {
"title": "$:/language/Error/RecursiveTransclusion",
"text": "Recursive transclusion error in transclude widget"
},
"$:/language/Error/RetrievingSkinny": {
"title": "$:/language/Error/RetrievingSkinny",
"text": "Error retrieving skinny tiddler list"
},
"$:/language/Error/SavingToTWEdit": {
"title": "$:/language/Error/SavingToTWEdit",
"text": "Error saving to TWEdit"
},
"$:/language/Error/WhileSaving": {
"title": "$:/language/Error/WhileSaving",
"text": "Error while saving"
},
"$:/language/Error/XMLHttpRequest": {
"title": "$:/language/Error/XMLHttpRequest",
"text": "XMLHttpRequest error code"
},
"$:/language/InternalJavaScriptError/Title": {
"title": "$:/language/InternalJavaScriptError/Title",
"text": "Internal JavaScript Error"
},
"$:/language/InternalJavaScriptError/Hint": {
"title": "$:/language/InternalJavaScriptError/Hint",
"text": "Well, this is embarrassing. It is recommended that you restart TiddlyWiki by refreshing your browser"
},
"$:/language/InvalidFieldName": {
"title": "$:/language/InvalidFieldName",
"text": "Illegal characters in field name \"<$text text=<<fieldName>>/>\". Fields can only contain lowercase letters, digits and the characters underscore (`_`), hyphen (`-`) and period (`.`)"
},
"$:/language/LazyLoadingWarning": {
"title": "$:/language/LazyLoadingWarning",
"text": "<p>Loading external text from ''<$text text={{!!_canonical_uri}}/>''</p><p>If this message doesn't disappear you may be using a browser that doesn't support external text in this configuration. See http://tiddlywiki.com/#ExternalText</p>"
},
"$:/language/LoginToTiddlySpace": {
"title": "$:/language/LoginToTiddlySpace",
"text": "Login to TiddlySpace"
},
"$:/language/MissingTiddler/Hint": {
"title": "$:/language/MissingTiddler/Hint",
"text": "Missing tiddler \"<$text text=<<currentTiddler>>/>\" - click {{$:/core/images/edit-button}} to create"
},
"$:/language/No": {
"title": "$:/language/No",
"text": "No"
},
"$:/language/OfficialPluginLibrary": {
"title": "$:/language/OfficialPluginLibrary",
"text": "Official ~TiddlyWiki Plugin Library"
},
"$:/language/OfficialPluginLibrary/Hint": {
"title": "$:/language/OfficialPluginLibrary/Hint",
"text": "The official ~TiddlyWiki plugin library at tiddlywiki.com. Plugins, themes and language packs are maintained by the core team."
},
"$:/language/PluginReloadWarning": {
"title": "$:/language/PluginReloadWarning",
"text": "Please save {{$:/core/ui/Buttons/save-wiki}} and reload {{$:/core/ui/Buttons/refresh}} to allow changes to plugins to take effect"
},
"$:/language/RecentChanges/DateFormat": {
"title": "$:/language/RecentChanges/DateFormat",
"text": "DDth MMM YYYY"
},
"$:/language/SystemTiddler/Tooltip": {
"title": "$:/language/SystemTiddler/Tooltip",
"text": "This is a system tiddler"
},
"$:/language/TagManager/Colour/Heading": {
"title": "$:/language/TagManager/Colour/Heading",
"text": "Colour"
},
"$:/language/TagManager/Count/Heading": {
"title": "$:/language/TagManager/Count/Heading",
"text": "Count"
},
"$:/language/TagManager/Icon/Heading": {
"title": "$:/language/TagManager/Icon/Heading",
"text": "Icon"
},
"$:/language/TagManager/Info/Heading": {
"title": "$:/language/TagManager/Info/Heading",
"text": "Info"
},
"$:/language/TagManager/Tag/Heading": {
"title": "$:/language/TagManager/Tag/Heading",
"text": "Tag"
},
"$:/language/Tiddler/DateFormat": {
"title": "$:/language/Tiddler/DateFormat",
"text": "DDth MMM YYYY at hh12:0mmam"
},
"$:/language/UnsavedChangesWarning": {
"title": "$:/language/UnsavedChangesWarning",
"text": "You have unsaved changes in TiddlyWiki"
},
"$:/language/Yes": {
"title": "$:/language/Yes",
"text": "Yes"
},
"$:/language/Modals/Download": {
"title": "$:/language/Modals/Download",
"type": "text/vnd.tiddlywiki",
"subtitle": "Download changes",
"footer": "<$button message=\"tm-close-tiddler\">Close</$button>",
"help": "http://tiddlywiki.com/static/DownloadingChanges.html",
"text": "Your browser only supports manual saving.\n\nTo save your modified wiki, right click on the download link below and select \"Download file\" or \"Save file\", and then choose the folder and filename.\n\n//You can marginally speed things up by clicking the link with the control key (Windows) or the options/alt key (Mac OS X). You will not be prompted for the folder or filename, but your browser is likely to give it an unrecognisable name -- you may need to rename the file to include an `.html` extension before you can do anything useful with it.//\n\nOn smartphones that do not allow files to be downloaded you can instead bookmark the link, and then sync your bookmarks to a desktop computer from where the wiki can be saved normally.\n"
},
"$:/language/Modals/SaveInstructions": {
"title": "$:/language/Modals/SaveInstructions",
"type": "text/vnd.tiddlywiki",
"subtitle": "Save your work",
"footer": "<$button message=\"tm-close-tiddler\">Close</$button>",
"help": "http://tiddlywiki.com/static/SavingChanges.html",
"text": "Your changes to this wiki need to be saved as a ~TiddlyWiki HTML file.\n\n!!! Desktop browsers\n\n# Select ''Save As'' from the ''File'' menu\n# Choose a filename and location\n#* Some browsers also require you to explicitly specify the file saving format as ''Webpage, HTML only'' or similar\n# Close this tab\n\n!!! Smartphone browsers\n\n# Create a bookmark to this page\n#* If you've got iCloud or Google Sync set up then the bookmark will automatically sync to your desktop where you can open it and save it as above\n# Close this tab\n\n//If you open the bookmark again in Mobile Safari you will see this message again. If you want to go ahead and use the file, just click the ''close'' button below//\n"
},
"$:/config/NewJournal/Title": {
"title": "$:/config/NewJournal/Title",
"text": "DDth MMM YYYY"
},
"$:/config/NewJournal/Tags": {
"title": "$:/config/NewJournal/Tags",
"text": "Journal"
},
"$:/language/Notifications/Save/Done": {
"title": "$:/language/Notifications/Save/Done",
"text": "Saved wiki"
},
"$:/language/Notifications/Save/Starting": {
"title": "$:/language/Notifications/Save/Starting",
"text": "Starting to save wiki"
},
"$:/language/Search/DefaultResults/Caption": {
"title": "$:/language/Search/DefaultResults/Caption",
"text": "List"
},
"$:/language/Search/Filter/Caption": {
"title": "$:/language/Search/Filter/Caption",
"text": "Filter"
},
"$:/language/Search/Filter/Hint": {
"title": "$:/language/Search/Filter/Hint",
"text": "Search via a [[filter expression|http://tiddlywiki.com/static/Filters.html]]"
},
"$:/language/Search/Filter/Matches": {
"title": "$:/language/Search/Filter/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/Matches": {
"title": "$:/language/Search/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/Matches/All": {
"title": "$:/language/Search/Matches/All",
"text": "All matches:"
},
"$:/language/Search/Matches/Title": {
"title": "$:/language/Search/Matches/Title",
"text": "Title matches:"
},
"$:/language/Search/Search": {
"title": "$:/language/Search/Search",
"text": "Search"
},
"$:/language/Search/Shadows/Caption": {
"title": "$:/language/Search/Shadows/Caption",
"text": "Shadows"
},
"$:/language/Search/Shadows/Hint": {
"title": "$:/language/Search/Shadows/Hint",
"text": "Search for shadow tiddlers"
},
"$:/language/Search/Shadows/Matches": {
"title": "$:/language/Search/Shadows/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/Standard/Caption": {
"title": "$:/language/Search/Standard/Caption",
"text": "Standard"
},
"$:/language/Search/Standard/Hint": {
"title": "$:/language/Search/Standard/Hint",
"text": "Search for standard tiddlers"
},
"$:/language/Search/Standard/Matches": {
"title": "$:/language/Search/Standard/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/Search/System/Caption": {
"title": "$:/language/Search/System/Caption",
"text": "System"
},
"$:/language/Search/System/Hint": {
"title": "$:/language/Search/System/Hint",
"text": "Search for system tiddlers"
},
"$:/language/Search/System/Matches": {
"title": "$:/language/Search/System/Matches",
"text": "//<small><<resultCount>> matches</small>//"
},
"$:/language/SideBar/All/Caption": {
"title": "$:/language/SideBar/All/Caption",
"text": "All"
},
"$:/language/SideBar/Contents/Caption": {
"title": "$:/language/SideBar/Contents/Caption",
"text": "Contents"
},
"$:/language/SideBar/Drafts/Caption": {
"title": "$:/language/SideBar/Drafts/Caption",
"text": "Drafts"
},
"$:/language/SideBar/Missing/Caption": {
"title": "$:/language/SideBar/Missing/Caption",
"text": "Missing"
},
"$:/language/SideBar/More/Caption": {
"title": "$:/language/SideBar/More/Caption",
"text": "More"
},
"$:/language/SideBar/Open/Caption": {
"title": "$:/language/SideBar/Open/Caption",
"text": "Open"
},
"$:/language/SideBar/Orphans/Caption": {
"title": "$:/language/SideBar/Orphans/Caption",
"text": "Orphans"
},
"$:/language/SideBar/Recent/Caption": {
"title": "$:/language/SideBar/Recent/Caption",
"text": "Recent"
},
"$:/language/SideBar/Shadows/Caption": {
"title": "$:/language/SideBar/Shadows/Caption",
"text": "Shadows"
},
"$:/language/SideBar/System/Caption": {
"title": "$:/language/SideBar/System/Caption",
"text": "System"
},
"$:/language/SideBar/Tags/Caption": {
"title": "$:/language/SideBar/Tags/Caption",
"text": "Tags"
},
"$:/language/SideBar/Tags/Untagged/Caption": {
"title": "$:/language/SideBar/Tags/Untagged/Caption",
"text": "untagged"
},
"$:/language/SideBar/Tools/Caption": {
"title": "$:/language/SideBar/Tools/Caption",
"text": "Tools"
},
"$:/language/SideBar/Types/Caption": {
"title": "$:/language/SideBar/Types/Caption",
"text": "Types"
},
"$:/SiteSubtitle": {
"title": "$:/SiteSubtitle",
"text": "a non-linear personal web notebook"
},
"$:/SiteTitle": {
"title": "$:/SiteTitle",
"text": "My ~TiddlyWiki"
},
"$:/language/Snippets/ListByTag": {
"title": "$:/language/Snippets/ListByTag",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "List of tiddlers by tag",
"text": "<<list-links \"[tag[task]sort[title]]\">>\n"
},
"$:/language/Snippets/MacroDefinition": {
"title": "$:/language/Snippets/MacroDefinition",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "Macro definition",
"text": "\\define macroName(param1:\"default value\",param2)\nText of the macro\n\\end\n"
},
"$:/language/Snippets/Table4x3": {
"title": "$:/language/Snippets/Table4x3",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "Table with 4 columns by 3 rows",
"text": "|! |!Alpha |!Beta |!Gamma |!Delta |\n|!One | | | | |\n|!Two | | | | |\n|!Three | | | | |\n"
},
"$:/language/Snippets/TableOfContents": {
"title": "$:/language/Snippets/TableOfContents",
"tags": "$:/tags/TextEditor/Snippet",
"caption": "Table of Contents",
"text": "<div class=\"tc-table-of-contents\">\n\n<<toc-selective-expandable 'TableOfContents'>>\n\n</div>"
},
"$:/language/ThemeTweaks/ThemeTweaks": {
"title": "$:/language/ThemeTweaks/ThemeTweaks",
"text": "Theme Tweaks"
},
"$:/language/ThemeTweaks/ThemeTweaks/Hint": {
"title": "$:/language/ThemeTweaks/ThemeTweaks/Hint",
"text": "You can tweak certain aspects of the ''Vanilla'' theme."
},
"$:/language/ThemeTweaks/Options": {
"title": "$:/language/ThemeTweaks/Options",
"text": "Options"
},
"$:/language/ThemeTweaks/Options/SidebarLayout": {
"title": "$:/language/ThemeTweaks/Options/SidebarLayout",
"text": "Sidebar layout"
},
"$:/language/ThemeTweaks/Options/SidebarLayout/Fixed-Fluid": {
"title": "$:/language/ThemeTweaks/Options/SidebarLayout/Fixed-Fluid",
"text": "Fixed story, fluid sidebar"
},
"$:/language/ThemeTweaks/Options/SidebarLayout/Fluid-Fixed": {
"title": "$:/language/ThemeTweaks/Options/SidebarLayout/Fluid-Fixed",
"text": "Fluid story, fixed sidebar"
},
"$:/language/ThemeTweaks/Options/StickyTitles": {
"title": "$:/language/ThemeTweaks/Options/StickyTitles",
"text": "Sticky titles"
},
"$:/language/ThemeTweaks/Options/StickyTitles/Hint": {
"title": "$:/language/ThemeTweaks/Options/StickyTitles/Hint",
"text": "Causes tiddler titles to \"stick\" to the top of the browser window. Caution: Does not work at all with Chrome, and causes some layout issues in Firefox"
},
"$:/language/ThemeTweaks/Options/CodeWrapping": {
"title": "$:/language/ThemeTweaks/Options/CodeWrapping",
"text": "Wrap long lines in code blocks"
},
"$:/language/ThemeTweaks/Settings": {
"title": "$:/language/ThemeTweaks/Settings",
"text": "Settings"
},
"$:/language/ThemeTweaks/Settings/FontFamily": {
"title": "$:/language/ThemeTweaks/Settings/FontFamily",
"text": "Font family"
},
"$:/language/ThemeTweaks/Settings/CodeFontFamily": {
"title": "$:/language/ThemeTweaks/Settings/CodeFontFamily",
"text": "Code font family"
},
"$:/language/ThemeTweaks/Settings/BackgroundImage": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImage",
"text": "Page background image"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageAttachment": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageAttachment",
"text": "Page background image attachment"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Scroll": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Scroll",
"text": "Scroll with tiddlers"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Fixed": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageAttachment/Fixed",
"text": "Fixed to window"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize",
"text": "Page background image size"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize/Auto": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize/Auto",
"text": "Auto"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize/Cover": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize/Cover",
"text": "Cover"
},
"$:/language/ThemeTweaks/Settings/BackgroundImageSize/Contain": {
"title": "$:/language/ThemeTweaks/Settings/BackgroundImageSize/Contain",
"text": "Contain"
},
"$:/language/ThemeTweaks/Metrics": {
"title": "$:/language/ThemeTweaks/Metrics",
"text": "Sizes"
},
"$:/language/ThemeTweaks/Metrics/FontSize": {
"title": "$:/language/ThemeTweaks/Metrics/FontSize",
"text": "Font size"
},
"$:/language/ThemeTweaks/Metrics/LineHeight": {
"title": "$:/language/ThemeTweaks/Metrics/LineHeight",
"text": "Line height"
},
"$:/language/ThemeTweaks/Metrics/BodyFontSize": {
"title": "$:/language/ThemeTweaks/Metrics/BodyFontSize",
"text": "Font size for tiddler body"
},
"$:/language/ThemeTweaks/Metrics/BodyLineHeight": {
"title": "$:/language/ThemeTweaks/Metrics/BodyLineHeight",
"text": "Line height for tiddler body"
},
"$:/language/ThemeTweaks/Metrics/StoryLeft": {
"title": "$:/language/ThemeTweaks/Metrics/StoryLeft",
"text": "Story left position"
},
"$:/language/ThemeTweaks/Metrics/StoryLeft/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryLeft/Hint",
"text": "how far the left margin of the story river<br>(tiddler area) is from the left of the page"
},
"$:/language/ThemeTweaks/Metrics/StoryTop": {
"title": "$:/language/ThemeTweaks/Metrics/StoryTop",
"text": "Story top position"
},
"$:/language/ThemeTweaks/Metrics/StoryTop/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryTop/Hint",
"text": "how far the top margin of the story river<br>is from the top of the page"
},
"$:/language/ThemeTweaks/Metrics/StoryRight": {
"title": "$:/language/ThemeTweaks/Metrics/StoryRight",
"text": "Story right"
},
"$:/language/ThemeTweaks/Metrics/StoryRight/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryRight/Hint",
"text": "how far the left margin of the sidebar <br>is from the left of the page"
},
"$:/language/ThemeTweaks/Metrics/StoryWidth": {
"title": "$:/language/ThemeTweaks/Metrics/StoryWidth",
"text": "Story width"
},
"$:/language/ThemeTweaks/Metrics/StoryWidth/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/StoryWidth/Hint",
"text": "the overall width of the story river"
},
"$:/language/ThemeTweaks/Metrics/TiddlerWidth": {
"title": "$:/language/ThemeTweaks/Metrics/TiddlerWidth",
"text": "Tiddler width"
},
"$:/language/ThemeTweaks/Metrics/TiddlerWidth/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/TiddlerWidth/Hint",
"text": "within the story river"
},
"$:/language/ThemeTweaks/Metrics/SidebarBreakpoint": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarBreakpoint",
"text": "Sidebar breakpoint"
},
"$:/language/ThemeTweaks/Metrics/SidebarBreakpoint/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarBreakpoint/Hint",
"text": "the minimum page width at which the story<br>river and sidebar will appear side by side"
},
"$:/language/ThemeTweaks/Metrics/SidebarWidth": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarWidth",
"text": "Sidebar width"
},
"$:/language/ThemeTweaks/Metrics/SidebarWidth/Hint": {
"title": "$:/language/ThemeTweaks/Metrics/SidebarWidth/Hint",
"text": "the width of the sidebar in fluid-fixed layout"
},
"$:/language/TiddlerInfo/Advanced/Caption": {
"title": "$:/language/TiddlerInfo/Advanced/Caption",
"text": "Advanced"
},
"$:/language/TiddlerInfo/Advanced/PluginInfo/Empty/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/PluginInfo/Empty/Hint",
"text": "none"
},
"$:/language/TiddlerInfo/Advanced/PluginInfo/Heading": {
"title": "$:/language/TiddlerInfo/Advanced/PluginInfo/Heading",
"text": "Plugin Details"
},
"$:/language/TiddlerInfo/Advanced/PluginInfo/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/PluginInfo/Hint",
"text": "This plugin contains the following shadow tiddlers:"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/Heading": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/Heading",
"text": "Shadow Status"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/NotShadow/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/NotShadow/Hint",
"text": "The tiddler <$link to=<<infoTiddler>>><$text text=<<infoTiddler>>/></$link> is not a shadow tiddler"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Hint",
"text": "The tiddler <$link to=<<infoTiddler>>><$text text=<<infoTiddler>>/></$link> is a shadow tiddler"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Source": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/Shadow/Source",
"text": "It is defined in the plugin <$link to=<<pluginTiddler>>><$text text=<<pluginTiddler>>/></$link>"
},
"$:/language/TiddlerInfo/Advanced/ShadowInfo/OverriddenShadow/Hint": {
"title": "$:/language/TiddlerInfo/Advanced/ShadowInfo/OverriddenShadow/Hint",
"text": "It is overridden by an ordinary tiddler"
},
"$:/language/TiddlerInfo/Fields/Caption": {
"title": "$:/language/TiddlerInfo/Fields/Caption",
"text": "Fields"
},
"$:/language/TiddlerInfo/List/Caption": {
"title": "$:/language/TiddlerInfo/List/Caption",
"text": "List"
},
"$:/language/TiddlerInfo/List/Empty": {
"title": "$:/language/TiddlerInfo/List/Empty",
"text": "This tiddler does not have a list"
},
"$:/language/TiddlerInfo/Listed/Caption": {
"title": "$:/language/TiddlerInfo/Listed/Caption",
"text": "Listed"
},
"$:/language/TiddlerInfo/Listed/Empty": {
"title": "$:/language/TiddlerInfo/Listed/Empty",
"text": "This tiddler is not listed by any others"
},
"$:/language/TiddlerInfo/References/Caption": {
"title": "$:/language/TiddlerInfo/References/Caption",
"text": "References"
},
"$:/language/TiddlerInfo/References/Empty": {
"title": "$:/language/TiddlerInfo/References/Empty",
"text": "No tiddlers link to this one"
},
"$:/language/TiddlerInfo/Tagging/Caption": {
"title": "$:/language/TiddlerInfo/Tagging/Caption",
"text": "Tagging"
},
"$:/language/TiddlerInfo/Tagging/Empty": {
"title": "$:/language/TiddlerInfo/Tagging/Empty",
"text": "No tiddlers are tagged with this one"
},
"$:/language/TiddlerInfo/Tools/Caption": {
"title": "$:/language/TiddlerInfo/Tools/Caption",
"text": "Tools"
},
"$:/language/Docs/Types/application/javascript": {
"title": "$:/language/Docs/Types/application/javascript",
"description": "JavaScript code",
"name": "application/javascript",
"group": "Developer"
},
"$:/language/Docs/Types/application/json": {
"title": "$:/language/Docs/Types/application/json",
"description": "JSON data",
"name": "application/json",
"group": "Developer"
},
"$:/language/Docs/Types/application/x-tiddler-dictionary": {
"title": "$:/language/Docs/Types/application/x-tiddler-dictionary",
"description": "Data dictionary",
"name": "application/x-tiddler-dictionary",
"group": "Developer"
},
"$:/language/Docs/Types/image/gif": {
"title": "$:/language/Docs/Types/image/gif",
"description": "GIF image",
"name": "image/gif",
"group": "Image"
},
"$:/language/Docs/Types/image/jpeg": {
"title": "$:/language/Docs/Types/image/jpeg",
"description": "JPEG image",
"name": "image/jpeg",
"group": "Image"
},
"$:/language/Docs/Types/image/png": {
"title": "$:/language/Docs/Types/image/png",
"description": "PNG image",
"name": "image/png",
"group": "Image"
},
"$:/language/Docs/Types/image/svg+xml": {
"title": "$:/language/Docs/Types/image/svg+xml",
"description": "Structured Vector Graphics image",
"name": "image/svg+xml",
"group": "Image"
},
"$:/language/Docs/Types/image/x-icon": {
"title": "$:/language/Docs/Types/image/x-icon",
"description": "ICO format icon file",
"name": "image/x-icon",
"group": "Image"
},
"$:/language/Docs/Types/text/css": {
"title": "$:/language/Docs/Types/text/css",
"description": "Static stylesheet",
"name": "text/css",
"group": "Developer"
},
"$:/language/Docs/Types/text/html": {
"title": "$:/language/Docs/Types/text/html",
"description": "HTML markup",
"name": "text/html",
"group": "Text"
},
"$:/language/Docs/Types/text/plain": {
"title": "$:/language/Docs/Types/text/plain",
"description": "Plain text",
"name": "text/plain",
"group": "Text"
},
"$:/language/Docs/Types/text/vnd.tiddlywiki": {
"title": "$:/language/Docs/Types/text/vnd.tiddlywiki",
"description": "TiddlyWiki 5",
"name": "text/vnd.tiddlywiki",
"group": "Text"
},
"$:/language/Docs/Types/text/x-tiddlywiki": {
"title": "$:/language/Docs/Types/text/x-tiddlywiki",
"description": "TiddlyWiki Classic",
"name": "text/x-tiddlywiki",
"group": "Text"
},
"$:/languages/en-GB": {
"title": "$:/languages/en-GB",
"name": "en-GB",
"description": "English (British)",
"author": "JeremyRuston",
"core-version": ">=5.0.0\"",
"text": "Stub pseudo-plugin for the default language"
},
"$:/core/modules/commander.js": {
"text": "/*\\\ntitle: $:/core/modules/commander.js\ntype: application/javascript\nmodule-type: global\n\nThe $tw.Commander class is a command interpreter\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nParse a sequence of commands\n\tcommandTokens: an array of command string tokens\n\twiki: reference to the wiki store object\n\tstreams: {output:, error:}, each of which has a write(string) method\n\tcallback: a callback invoked as callback(err) where err is null if there was no error\n*/\nvar Commander = function(commandTokens,callback,wiki,streams) {\n\tvar path = require(\"path\");\n\tthis.commandTokens = commandTokens;\n\tthis.nextToken = 0;\n\tthis.callback = callback;\n\tthis.wiki = wiki;\n\tthis.streams = streams;\n\tthis.outputPath = path.resolve($tw.boot.wikiPath,$tw.config.wikiOutputSubDir);\n};\n\n/*\nAdd a string of tokens to the command queue\n*/\nCommander.prototype.addCommandTokens = function(commandTokens) {\n\tvar params = commandTokens.slice(0);\n\tparams.unshift(0);\n\tparams.unshift(this.nextToken);\n\tArray.prototype.splice.apply(this.commandTokens,params);\n};\n\n/*\nExecute the sequence of commands and invoke a callback on completion\n*/\nCommander.prototype.execute = function() {\n\tthis.executeNextCommand();\n};\n\n/*\nExecute the next command in the sequence\n*/\nCommander.prototype.executeNextCommand = function() {\n\tvar self = this;\n\t// Invoke the callback if there are no more commands\n\tif(this.nextToken >= this.commandTokens.length) {\n\t\tthis.callback(null);\n\t} else {\n\t\t// Get and check the command token\n\t\tvar commandName = this.commandTokens[this.nextToken++];\n\t\tif(commandName.substr(0,2) !== \"--\") {\n\t\t\tthis.callback(\"Missing command: \" + commandName);\n\t\t} else {\n\t\t\tcommandName = commandName.substr(2); // Trim off the --\n\t\t\t// Accumulate the parameters to the command\n\t\t\tvar params = [];\n\t\t\twhile(this.nextToken < this.commandTokens.length && \n\t\t\t\tthis.commandTokens[this.nextToken].substr(0,2) !== \"--\") {\n\t\t\t\tparams.push(this.commandTokens[this.nextToken++]);\n\t\t\t}\n\t\t\t// Get the command info\n\t\t\tvar command = $tw.commands[commandName],\n\t\t\t\tc,err;\n\t\t\tif(!command) {\n\t\t\t\tthis.callback(\"Unknown command: \" + commandName);\n\t\t\t} else {\n\t\t\t\tif(this.verbose) {\n\t\t\t\t\tthis.streams.output.write(\"Executing command: \" + commandName + \" \" + params.join(\" \") + \"\\n\");\n\t\t\t\t}\n\t\t\t\tif(command.info.synchronous) {\n\t\t\t\t\t// Synchronous command\n\t\t\t\t\tc = new command.Command(params,this);\n\t\t\t\t\terr = c.execute();\n\t\t\t\t\tif(err) {\n\t\t\t\t\t\tthis.callback(err);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tthis.executeNextCommand();\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\t// Asynchronous command\n\t\t\t\t\tc = new command.Command(params,this,function(err) {\n\t\t\t\t\t\tif(err) {\n\t\t\t\t\t\t\tself.callback(err);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tself.executeNextCommand();\n\t\t\t\t\t\t}\n\t\t\t\t\t});\n\t\t\t\t\terr = c.execute();\n\t\t\t\t\tif(err) {\n\t\t\t\t\t\tthis.callback(err);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n};\n\nCommander.initCommands = function(moduleType) {\n\tmoduleType = moduleType || \"command\";\n\t$tw.commands = {};\n\t$tw.modules.forEachModuleOfType(moduleType,function(title,module) {\n\t\tvar c = $tw.commands[module.info.name] = {};\n\t\t// Add the methods defined by the module\n\t\tfor(var f in module) {\n\t\t\tif($tw.utils.hop(module,f)) {\n\t\t\t\tc[f] = module[f];\n\t\t\t}\n\t\t}\n\t});\n};\n\nexports.Commander = Commander;\n\n})();\n",
"title": "$:/core/modules/commander.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/commands/build.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/build.js\ntype: application/javascript\nmodule-type: command\n\nCommand to build a build target\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"build\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\t// Get the build targets defined in the wiki\n\tvar buildTargets = $tw.boot.wikiInfo.build;\n\tif(!buildTargets) {\n\t\treturn \"No build targets defined\";\n\t}\n\t// Loop through each of the specified targets\n\tvar targets;\n\tif(this.params.length > 0) {\n\t\ttargets = this.params;\n\t} else {\n\t\ttargets = Object.keys(buildTargets);\n\t}\n\tfor(var targetIndex=0; targetIndex<targets.length; targetIndex++) {\n\t\tvar target = targets[targetIndex],\n\t\t\tcommands = buildTargets[target];\n\t\tif(!commands) {\n\t\t\treturn \"Build target '\" + target + \"' not found\";\n\t\t}\n\t\t// Add the commands to the queue\n\t\tthis.commander.addCommandTokens(commands);\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/build.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/clearpassword.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/clearpassword.js\ntype: application/javascript\nmodule-type: command\n\nClear password for crypto operations\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"clearpassword\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\t$tw.crypto.setPassword(null);\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/clearpassword.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/editions.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/editions.js\ntype: application/javascript\nmodule-type: command\n\nCommand to list the available editions\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"editions\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tvar self = this;\n\t// Output the list\n\tthis.commander.streams.output.write(\"Available editions:\\n\\n\");\n\tvar editionInfo = $tw.utils.getEditionInfo();\n\t$tw.utils.each(editionInfo,function(info,name) {\n\t\tself.commander.streams.output.write(\" \" + name + \": \" + info.description + \"\\n\");\n\t});\n\tthis.commander.streams.output.write(\"\\n\");\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/editions.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/help.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/help.js\ntype: application/javascript\nmodule-type: command\n\nHelp command\n\n\\*/\n(function(){\n\n/*jshint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"help\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tvar subhelp = this.params[0] || \"default\",\n\t\thelpBase = \"$:/language/Help/\",\n\t\ttext;\n\tif(!this.commander.wiki.getTiddler(helpBase + subhelp)) {\n\t\tsubhelp = \"notfound\";\n\t}\n\t// Wikify the help as formatted text (ie block elements generate newlines)\n\ttext = this.commander.wiki.renderTiddler(\"text/plain-formatted\",helpBase + subhelp);\n\t// Remove any leading linebreaks\n\ttext = text.replace(/^(\\r?\\n)*/g,\"\");\n\tthis.commander.streams.output.write(text);\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/help.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/init.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/init.js\ntype: application/javascript\nmodule-type: command\n\nCommand to initialise an empty wiki folder\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"init\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tvar fs = require(\"fs\"),\n\t\tpath = require(\"path\");\n\t// Check that we don't already have a valid wiki folder\n\tif($tw.boot.wikiTiddlersPath || ($tw.utils.isDirectory($tw.boot.wikiPath) && !$tw.utils.isDirectoryEmpty($tw.boot.wikiPath))) {\n\t\treturn \"Wiki folder is not empty\";\n\t}\n\t// Loop through each of the specified editions\n\tvar editions = this.params.length > 0 ? this.params : [\"empty\"];\n\tfor(var editionIndex=0; editionIndex<editions.length; editionIndex++) {\n\t\tvar editionName = editions[editionIndex];\n\t\t// Check the edition exists\n\t\tvar editionPath = $tw.findLibraryItem(editionName,$tw.getLibraryItemSearchPaths($tw.config.editionsPath,$tw.config.editionsEnvVar));\n\t\tif(!$tw.utils.isDirectory(editionPath)) {\n\t\t\treturn \"Edition '\" + editionName + \"' not found\";\n\t\t}\n\t\t// Copy the edition content\n\t\tvar err = $tw.utils.copyDirectory(editionPath,$tw.boot.wikiPath);\n\t\tif(!err) {\n\t\t\tthis.commander.streams.output.write(\"Copied edition '\" + editionName + \"' to \" + $tw.boot.wikiPath + \"\\n\");\n\t\t} else {\n\t\t\treturn err;\n\t\t}\n\t}\n\t// Tweak the tiddlywiki.info to remove any included wikis\n\tvar packagePath = $tw.boot.wikiPath + \"/tiddlywiki.info\",\n\t\tpackageJson = JSON.parse(fs.readFileSync(packagePath));\n\tdelete packageJson.includeWikis;\n\tfs.writeFileSync(packagePath,JSON.stringify(packageJson,null,$tw.config.preferences.jsonSpaces));\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/init.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/load.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/load.js\ntype: application/javascript\nmodule-type: command\n\nCommand to load tiddlers from a file\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"load\",\n\tsynchronous: false\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\");\n\tif(this.params.length < 1) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar ext = path.extname(self.params[0]);\n\tfs.readFile(this.params[0],$tw.utils.getTypeEncoding(ext),function(err,data) {\n\t\tif (err) {\n\t\t\tself.callback(err);\n\t\t} else {\n\t\t\tvar fields = {title: self.params[0]},\n\t\t\t\ttype = path.extname(self.params[0]);\n\t\t\tvar tiddlers = self.commander.wiki.deserializeTiddlers(type,data,fields);\n\t\t\tif(!tiddlers) {\n\t\t\t\tself.callback(\"No tiddlers found in file \\\"\" + self.params[0] + \"\\\"\");\n\t\t\t} else {\n\t\t\t\tfor(var t=0; t<tiddlers.length; t++) {\n\t\t\t\t\tself.commander.wiki.importTiddler(new $tw.Tiddler(tiddlers[t]));\n\t\t\t\t}\n\t\t\t\tself.callback(null);\t\n\t\t\t}\n\t\t}\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/load.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/makelibrary.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/makelibrary.js\ntype: application/javascript\nmodule-type: command\n\nCommand to pack all of the plugins in the library into a plugin tiddler of type \"library\"\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"makelibrary\",\n\tsynchronous: true\n};\n\nvar UPGRADE_LIBRARY_TITLE = \"$:/UpgradeLibrary\";\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tvar wiki = this.commander.wiki,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\tupgradeLibraryTitle = this.params[0] || UPGRADE_LIBRARY_TITLE,\n\t\ttiddlers = {};\n\t// Collect up the library plugins\n\tvar collectPlugins = function(folder) {\n\t\t\tvar pluginFolders = fs.readdirSync(folder);\n\t\t\tfor(var p=0; p<pluginFolders.length; p++) {\n\t\t\t\tif(!$tw.boot.excludeRegExp.test(pluginFolders[p])) {\n\t\t\t\t\tpluginFields = $tw.loadPluginFolder(path.resolve(folder,\"./\" + pluginFolders[p]));\n\t\t\t\t\tif(pluginFields && pluginFields.title) {\n\t\t\t\t\t\ttiddlers[pluginFields.title] = pluginFields;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t},\n\t\tcollectPublisherPlugins = function(folder) {\n\t\t\tvar publisherFolders = fs.readdirSync(folder);\n\t\t\tfor(var t=0; t<publisherFolders.length; t++) {\n\t\t\t\tif(!$tw.boot.excludeRegExp.test(publisherFolders[t])) {\n\t\t\t\t\tcollectPlugins(path.resolve(folder,\"./\" + publisherFolders[t]));\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\tcollectPublisherPlugins(path.resolve($tw.boot.corePath,$tw.config.pluginsPath));\n\tcollectPublisherPlugins(path.resolve($tw.boot.corePath,$tw.config.themesPath));\n\tcollectPlugins(path.resolve($tw.boot.corePath,$tw.config.languagesPath));\n\t// Save the upgrade library tiddler\n\tvar pluginFields = {\n\t\ttitle: upgradeLibraryTitle,\n\t\ttype: \"application/json\",\n\t\t\"plugin-type\": \"library\",\n\t\t\"text\": JSON.stringify({tiddlers: tiddlers},null,$tw.config.preferences.jsonSpaces)\n\t};\n\twiki.addTiddler(new $tw.Tiddler(pluginFields));\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/makelibrary.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/output.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/output.js\ntype: application/javascript\nmodule-type: command\n\nCommand to set the default output location (defaults to current working directory)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"output\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tvar fs = require(\"fs\"),\n\t\tpath = require(\"path\");\n\tif(this.params.length < 1) {\n\t\treturn \"Missing output path\";\n\t}\n\tthis.commander.outputPath = path.resolve(process.cwd(),this.params[0]);\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/output.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/password.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/password.js\ntype: application/javascript\nmodule-type: command\n\nSave password for crypto operations\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"password\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 1) {\n\t\treturn \"Missing password\";\n\t}\n\t$tw.crypto.setPassword(this.params[0]);\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/password.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/rendertiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/rendertiddler.js\ntype: application/javascript\nmodule-type: command\n\nCommand to render a tiddler and save it to a file\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"rendertiddler\",\n\tsynchronous: false\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\ttitle = this.params[0],\n\t\tfilename = path.resolve(this.commander.outputPath,this.params[1]),\n\t\ttype = this.params[2] || \"text/html\",\n\t\ttemplate = this.params[3],\n\t\tvariables = {};\n\t$tw.utils.createFileDirectories(filename);\n\tif(template) {\n\t\tvariables.currentTiddler = title;\n\t\ttitle = template;\n\t}\n\tfs.writeFile(filename,this.commander.wiki.renderTiddler(type,title,{variables: variables}),\"utf8\",function(err) {\n\t\tself.callback(err);\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/rendertiddler.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/rendertiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/rendertiddlers.js\ntype: application/javascript\nmodule-type: command\n\nCommand to render several tiddlers to a folder of files\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.info = {\n\tname: \"rendertiddlers\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\twiki = this.commander.wiki,\n\t\tfilter = this.params[0],\n\t\ttemplate = this.params[1],\n\t\toutputPath = this.commander.outputPath,\n\t\tpathname = path.resolve(outputPath,this.params[2]),\t\t\n\t\ttype = this.params[3] || \"text/html\",\n\t\textension = this.params[4] || \".html\",\n\t\tdeleteDirectory = (this.params[5] || \"\").toLowerCase() !== \"noclean\",\n\t\ttiddlers = wiki.filterTiddlers(filter);\n\tif(deleteDirectory) {\n\t\t$tw.utils.deleteDirectory(pathname);\n\t}\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar parser = wiki.parseTiddler(template),\n\t\t\twidgetNode = wiki.makeWidget(parser,{variables: {currentTiddler: title}}),\n\t\t\tcontainer = $tw.fakeDocument.createElement(\"div\");\n\t\twidgetNode.render(container,null);\n\t\tvar text = type === \"text/html\" ? container.innerHTML : container.textContent,\n\t\t\texportPath = null;\n\t\tif($tw.utils.hop($tw.macros,\"tv-get-export-path\")) {\n\t\t\tvar macroPath = $tw.macros[\"tv-get-export-path\"].run.apply(self,[title]);\n\t\t\tif(macroPath) {\n\t\t\t\texportPath = path.resolve(outputPath,macroPath + extension);\n\t\t\t}\n\t\t}\n\t\tvar finalPath = exportPath || path.resolve(pathname,encodeURIComponent(title) + extension);\n\t\t$tw.utils.createFileDirectories(finalPath);\n\t\tfs.writeFileSync(finalPath,text,\"utf8\");\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/rendertiddlers.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/savelibrarytiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/savelibrarytiddlers.js\ntype: application/javascript\nmodule-type: command\n\nCommand to save the subtiddlers of a bundle tiddler as a series of JSON files\n\n--savelibrarytiddlers <tiddler> <pathname> <skinnylisting>\n\nThe tiddler identifies the bundle tiddler that contains the subtiddlers.\n\nThe pathname specifies the pathname to the folder in which the JSON files should be saved. The filename is the URL encoded title of the subtiddler.\n\nThe skinnylisting specifies the title of the tiddler to which a JSON catalogue of the subtiddlers will be saved. The JSON file contains the same data as the bundle tiddler but with the `text` field removed.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"savelibrarytiddlers\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\tcontainerTitle = this.params[0],\n\t\tfilter = this.params[1],\n\t\tbasepath = this.params[2],\n\t\tskinnyListTitle = this.params[3];\n\t// Get the container tiddler as data\n\tvar containerData = self.commander.wiki.getTiddlerDataCached(containerTitle,undefined);\n\tif(!containerData) {\n\t\treturn \"'\" + containerTitle + \"' is not a tiddler bundle\";\n\t}\n\t// Filter the list of plugins\n\tvar pluginList = [];\n\t$tw.utils.each(containerData.tiddlers,function(tiddler,title) {\n\t\tpluginList.push(title);\n\t});\n\tvar filteredPluginList;\n\tif(filter) {\n\t\tfilteredPluginList = self.commander.wiki.filterTiddlers(filter,null,self.commander.wiki.makeTiddlerIterator(pluginList));\n\t} else {\n\t\tfilteredPluginList = pluginList;\n\t}\n\t// Iterate through the plugins\n\tvar skinnyList = [];\n\t$tw.utils.each(filteredPluginList,function(title) {\n\t\tvar tiddler = containerData.tiddlers[title];\n\t\t// Save each JSON file and collect the skinny data\n\t\tvar pathname = path.resolve(self.commander.outputPath,basepath + encodeURIComponent(title) + \".json\");\n\t\t$tw.utils.createFileDirectories(pathname);\n\t\tfs.writeFileSync(pathname,JSON.stringify(tiddler,null,$tw.config.preferences.jsonSpaces),\"utf8\");\n\t\t// Collect the skinny list data\n\t\tvar pluginTiddlers = JSON.parse(tiddler.text),\n\t\t\treadmeContent = (pluginTiddlers.tiddlers[title + \"/readme\"] || {}).text,\n\t\t\ticonTiddler = pluginTiddlers.tiddlers[title + \"/icon\"] || {},\n\t\t\ticonType = iconTiddler.type,\n\t\t\ticonText = iconTiddler.text,\n\t\t\ticonContent;\n\t\tif(iconType && iconText) {\n\t\t\ticonContent = $tw.utils.makeDataUri(iconText,iconType);\n\t\t}\n\t\tskinnyList.push($tw.utils.extend({},tiddler,{text: undefined, readme: readmeContent, icon: iconContent}));\n\t});\n\t// Save the catalogue tiddler\n\tif(skinnyListTitle) {\n\t\tself.commander.wiki.setTiddlerData(skinnyListTitle,skinnyList);\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/savelibrarytiddlers.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/savetiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/savetiddler.js\ntype: application/javascript\nmodule-type: command\n\nCommand to save the content of a tiddler to a file\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"savetiddler\",\n\tsynchronous: false\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 2) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\ttitle = this.params[0],\n\t\tfilename = path.resolve(this.commander.outputPath,this.params[1]),\n\t\ttiddler = this.commander.wiki.getTiddler(title);\n\tif(tiddler) {\n\t\tvar type = tiddler.fields.type || \"text/vnd.tiddlywiki\",\n\t\t\tcontentTypeInfo = $tw.config.contentTypeInfo[type] || {encoding: \"utf8\"};\n\t\t$tw.utils.createFileDirectories(filename);\n\t\tfs.writeFile(filename,tiddler.fields.text,contentTypeInfo.encoding,function(err) {\n\t\t\tself.callback(err);\n\t\t});\n\t} else {\n\t\treturn \"Missing tiddler: \" + title;\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/savetiddler.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/savetiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/savetiddlers.js\ntype: application/javascript\nmodule-type: command\n\nCommand to save several tiddlers to a folder of files\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.info = {\n\tname: \"savetiddlers\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 1) {\n\t\treturn \"Missing filename\";\n\t}\n\tvar self = this,\n\t\tfs = require(\"fs\"),\n\t\tpath = require(\"path\"),\n\t\twiki = this.commander.wiki,\n\t\tfilter = this.params[0],\n\t\tpathname = path.resolve(this.commander.outputPath,this.params[1]),\n\t\tdeleteDirectory = (this.params[2] || \"\").toLowerCase() !== \"noclean\",\n\t\ttiddlers = wiki.filterTiddlers(filter);\n\tif(deleteDirectory) {\n\t\t$tw.utils.deleteDirectory(pathname);\n\t}\n\t$tw.utils.createDirectory(pathname);\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = self.commander.wiki.getTiddler(title),\n\t\t\ttype = tiddler.fields.type || \"text/vnd.tiddlywiki\",\n\t\t\tcontentTypeInfo = $tw.config.contentTypeInfo[type] || {encoding: \"utf8\"},\n\t\t\tfilename = path.resolve(pathname,encodeURIComponent(title));\n\t\tfs.writeFileSync(filename,tiddler.fields.text,contentTypeInfo.encoding);\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/savetiddlers.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/server.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/server.js\ntype: application/javascript\nmodule-type: command\n\nServe tiddlers over http\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nif($tw.node) {\n\tvar util = require(\"util\"),\n\t\tfs = require(\"fs\"),\n\t\turl = require(\"url\"),\n\t\tpath = require(\"path\"),\n\t\thttp = require(\"http\");\n}\n\nexports.info = {\n\tname: \"server\",\n\tsynchronous: true\n};\n\n/*\nA simple HTTP server with regexp-based routes\n*/\nfunction SimpleServer(options) {\n\tthis.routes = options.routes || [];\n\tthis.wiki = options.wiki;\n\tthis.variables = options.variables || {};\n}\n\nSimpleServer.prototype.set = function(obj) {\n\tvar self = this;\n\t$tw.utils.each(obj,function(value,name) {\n\t\tself.variables[name] = value;\n\t});\n};\n\nSimpleServer.prototype.get = function(name) {\n\treturn this.variables[name];\n};\n\nSimpleServer.prototype.addRoute = function(route) {\n\tthis.routes.push(route);\n};\n\nSimpleServer.prototype.findMatchingRoute = function(request,state) {\n\tvar pathprefix = this.get(\"pathprefix\") || \"\";\n\tfor(var t=0; t<this.routes.length; t++) {\n\t\tvar potentialRoute = this.routes[t],\n\t\t\tpathRegExp = potentialRoute.path,\n\t\t\tpathname = state.urlInfo.pathname,\n\t\t\tmatch;\n\t\tif(pathprefix) {\n\t\t\tif(pathname.substr(0,pathprefix.length) === pathprefix) {\n\t\t\t\tpathname = pathname.substr(pathprefix.length);\n\t\t\t\tmatch = potentialRoute.path.exec(pathname);\n\t\t\t} else {\n\t\t\t\tmatch = false;\n\t\t\t}\n\t\t} else {\n\t\t\tmatch = potentialRoute.path.exec(pathname);\n\t\t}\n\t\tif(match && request.method === potentialRoute.method) {\n\t\t\tstate.params = [];\n\t\t\tfor(var p=1; p<match.length; p++) {\n\t\t\t\tstate.params.push(match[p]);\n\t\t\t}\n\t\t\treturn potentialRoute;\n\t\t}\n\t}\n\treturn null;\n};\n\nSimpleServer.prototype.checkCredentials = function(request,incomingUsername,incomingPassword) {\n\tvar header = request.headers.authorization || \"\",\n\t\ttoken = header.split(/\\s+/).pop() || \"\",\n\t\tauth = $tw.utils.base64Decode(token),\n\t\tparts = auth.split(/:/),\n\t\tusername = parts[0],\n\t\tpassword = parts[1];\n\tif(incomingUsername === username && incomingPassword === password) {\n\t\treturn \"ALLOWED\";\n\t} else {\n\t\treturn \"DENIED\";\n\t}\n};\n\nSimpleServer.prototype.listen = function(port,host) {\n\tvar self = this;\n\thttp.createServer(function(request,response) {\n\t\t// Compose the state object\n\t\tvar state = {};\n\t\tstate.wiki = self.wiki;\n\t\tstate.server = self;\n\t\tstate.urlInfo = url.parse(request.url);\n\t\t// Find the route that matches this path\n\t\tvar route = self.findMatchingRoute(request,state);\n\t\t// Check for the username and password if we've got one\n\t\tvar username = self.get(\"username\"),\n\t\t\tpassword = self.get(\"password\");\n\t\tif(username && password) {\n\t\t\t// Check they match\n\t\t\tif(self.checkCredentials(request,username,password) !== \"ALLOWED\") {\n\t\t\t\tvar servername = state.wiki.getTiddlerText(\"$:/SiteTitle\") || \"TiddlyWiki5\";\n\t\t\t\tresponse.writeHead(401,\"Authentication required\",{\n\t\t\t\t\t\"WWW-Authenticate\": 'Basic realm=\"Please provide your username and password to login to ' + servername + '\"'\n\t\t\t\t});\n\t\t\t\tresponse.end();\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t\t// Return a 404 if we didn't find a route\n\t\tif(!route) {\n\t\t\tresponse.writeHead(404);\n\t\t\tresponse.end();\n\t\t\treturn;\n\t\t}\n\t\t// Set the encoding for the incoming request\n\t\t// TODO: Presumably this would need tweaking if we supported PUTting binary tiddlers\n\t\trequest.setEncoding(\"utf8\");\n\t\t// Dispatch the appropriate method\n\t\tswitch(request.method) {\n\t\t\tcase \"GET\": // Intentional fall-through\n\t\t\tcase \"DELETE\":\n\t\t\t\troute.handler(request,response,state);\n\t\t\t\tbreak;\n\t\t\tcase \"PUT\":\n\t\t\t\tvar data = \"\";\n\t\t\t\trequest.on(\"data\",function(chunk) {\n\t\t\t\t\tdata += chunk.toString();\n\t\t\t\t});\n\t\t\t\trequest.on(\"end\",function() {\n\t\t\t\t\tstate.data = data;\n\t\t\t\t\troute.handler(request,response,state);\n\t\t\t\t});\n\t\t\t\tbreak;\n\t\t}\n\t}).listen(port,host);\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n\t// Set up server\n\tthis.server = new SimpleServer({\n\t\twiki: this.commander.wiki\n\t});\n\t// Add route handlers\n\tthis.server.addRoute({\n\t\tmethod: \"PUT\",\n\t\tpath: /^\\/recipes\\/default\\/tiddlers\\/(.+)$/,\n\t\thandler: function(request,response,state) {\n\t\t\tvar title = decodeURIComponent(state.params[0]),\n\t\t\t\tfields = JSON.parse(state.data);\n\t\t\t// Pull up any subfields in the `fields` object\n\t\t\tif(fields.fields) {\n\t\t\t\t$tw.utils.each(fields.fields,function(field,name) {\n\t\t\t\t\tfields[name] = field;\n\t\t\t\t});\n\t\t\t\tdelete fields.fields;\n\t\t\t}\n\t\t\t// Remove any revision field\n\t\t\tif(fields.revision) {\n\t\t\t\tdelete fields.revision;\n\t\t\t}\n\t\t\tstate.wiki.addTiddler(new $tw.Tiddler(state.wiki.getCreationFields(),fields,{title: title},state.wiki.getModificationFields()));\n\t\t\tvar changeCount = state.wiki.getChangeCount(title).toString();\n\t\t\tresponse.writeHead(204, \"OK\",{\n\t\t\t\tEtag: \"\\\"default/\" + encodeURIComponent(title) + \"/\" + changeCount + \":\\\"\",\n\t\t\t\t\"Content-Type\": \"text/plain\"\n\t\t\t});\n\t\t\tresponse.end();\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"DELETE\",\n\t\tpath: /^\\/bags\\/default\\/tiddlers\\/(.+)$/,\n\t\thandler: function(request,response,state) {\n\t\t\tvar title = decodeURIComponent(state.params[0]);\n\t\t\tstate.wiki.deleteTiddler(title);\n\t\t\tresponse.writeHead(204, \"OK\", {\n\t\t\t\t\"Content-Type\": \"text/plain\"\n\t\t\t});\n\t\t\tresponse.end();\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": state.server.get(\"serveType\")});\n\t\t\tvar text = state.wiki.renderTiddler(state.server.get(\"renderType\"),state.server.get(\"rootTiddler\"));\n\t\t\tresponse.end(text,\"utf8\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/status$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": \"application/json\"});\n\t\t\tvar text = JSON.stringify({\n\t\t\t\tusername: state.server.get(\"username\"),\n\t\t\t\tspace: {\n\t\t\t\t\trecipe: \"default\"\n\t\t\t\t},\n\t\t\t\ttiddlywiki_version: $tw.version\n\t\t\t});\n\t\t\tresponse.end(text,\"utf8\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/favicon.ico$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": \"image/x-icon\"});\n\t\t\tvar buffer = state.wiki.getTiddlerText(\"$:/favicon.ico\",\"\");\n\t\t\tresponse.end(buffer,\"base64\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/recipes\\/default\\/tiddlers.json$/,\n\t\thandler: function(request,response,state) {\n\t\t\tresponse.writeHead(200, {\"Content-Type\": \"application/json\"});\n\t\t\tvar tiddlers = [];\n\t\t\tstate.wiki.forEachTiddler({sortField: \"title\"},function(title,tiddler) {\n\t\t\t\tvar tiddlerFields = {};\n\t\t\t\t$tw.utils.each(tiddler.fields,function(field,name) {\n\t\t\t\t\tif(name !== \"text\") {\n\t\t\t\t\t\ttiddlerFields[name] = tiddler.getFieldString(name);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\ttiddlerFields.revision = state.wiki.getChangeCount(title);\n\t\t\t\ttiddlerFields.type = tiddlerFields.type || \"text/vnd.tiddlywiki\";\n\t\t\t\ttiddlers.push(tiddlerFields);\n\t\t\t});\n\t\t\tvar text = JSON.stringify(tiddlers);\n\t\t\tresponse.end(text,\"utf8\");\n\t\t}\n\t});\n\tthis.server.addRoute({\n\t\tmethod: \"GET\",\n\t\tpath: /^\\/recipes\\/default\\/tiddlers\\/(.+)$/,\n\t\thandler: function(request,response,state) {\n\t\t\tvar title = decodeURIComponent(state.params[0]),\n\t\t\t\ttiddler = state.wiki.getTiddler(title),\n\t\t\t\ttiddlerFields = {},\n\t\t\t\tknownFields = [\n\t\t\t\t\t\"bag\", \"created\", \"creator\", \"modified\", \"modifier\", \"permissions\", \"recipe\", \"revision\", \"tags\", \"text\", \"title\", \"type\", \"uri\"\n\t\t\t\t];\n\t\t\tif(tiddler) {\n\t\t\t\t$tw.utils.each(tiddler.fields,function(field,name) {\n\t\t\t\t\tvar value = tiddler.getFieldString(name);\n\t\t\t\t\tif(knownFields.indexOf(name) !== -1) {\n\t\t\t\t\t\ttiddlerFields[name] = value;\n\t\t\t\t\t} else {\n\t\t\t\t\t\ttiddlerFields.fields = tiddlerFields.fields || {};\n\t\t\t\t\t\ttiddlerFields.fields[name] = value;\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\ttiddlerFields.revision = state.wiki.getChangeCount(title);\n\t\t\t\ttiddlerFields.type = tiddlerFields.type || \"text/vnd.tiddlywiki\";\n\t\t\t\tresponse.writeHead(200, {\"Content-Type\": \"application/json\"});\n\t\t\t\tresponse.end(JSON.stringify(tiddlerFields),\"utf8\");\n\t\t\t} else {\n\t\t\t\tresponse.writeHead(404);\n\t\t\t\tresponse.end();\n\t\t\t}\n\t\t}\n\t});\n};\n\nCommand.prototype.execute = function() {\n\tif(!$tw.boot.wikiTiddlersPath) {\n\t\t$tw.utils.warning(\"Warning: Wiki folder '\" + $tw.boot.wikiPath + \"' does not exist or is missing a tiddlywiki.info file\");\n\t}\n\tvar port = this.params[0] || \"8080\",\n\t\trootTiddler = this.params[1] || \"$:/core/save/all\",\n\t\trenderType = this.params[2] || \"text/plain\",\n\t\tserveType = this.params[3] || \"text/html\",\n\t\tusername = this.params[4],\n\t\tpassword = this.params[5],\n\t\thost = this.params[6] || \"127.0.0.1\",\n\t\tpathprefix = this.params[7];\n\tthis.server.set({\n\t\trootTiddler: rootTiddler,\n\t\trenderType: renderType,\n\t\tserveType: serveType,\n\t\tusername: username,\n\t\tpassword: password,\n\t\tpathprefix: pathprefix\n\t});\n\tthis.server.listen(port,host);\n\tconsole.log(\"Serving on \" + host + \":\" + port);\n\tconsole.log(\"(press ctrl-C to exit)\");\n\t// Warn if required plugins are missing\n\tif(!$tw.wiki.getTiddler(\"$:/plugins/tiddlywiki/tiddlyweb\") || !$tw.wiki.getTiddler(\"$:/plugins/tiddlywiki/filesystem\")) {\n\t\t$tw.utils.warning(\"Warning: Plugins required for client-server operation (\\\"tiddlywiki/filesystem\\\" and \\\"tiddlywiki/tiddlyweb\\\") are missing from tiddlywiki.info file\");\n\t}\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/server.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/setfield.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/setfield.js\ntype: application/javascript\nmodule-type: command\n\nCommand to modify selected tiddlers to set a field to the text of a template tiddler that has been wikified with the selected tiddler as the current tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.info = {\n\tname: \"setfield\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 4) {\n\t\treturn \"Missing parameters\";\n\t}\n\tvar self = this,\n\t\twiki = this.commander.wiki,\n\t\tfilter = this.params[0],\n\t\tfieldname = this.params[1] || \"text\",\n\t\ttemplatetitle = this.params[2],\n\t\trendertype = this.params[3] || \"text/plain\",\n\t\ttiddlers = wiki.filterTiddlers(filter);\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar parser = wiki.parseTiddler(templatetitle),\n\t\t\tnewFields = {},\n\t\t\ttiddler = wiki.getTiddler(title);\n\t\tif(parser) {\n\t\t\tvar widgetNode = wiki.makeWidget(parser,{variables: {currentTiddler: title}});\n\t\t\tvar container = $tw.fakeDocument.createElement(\"div\");\n\t\t\twidgetNode.render(container,null);\n\t\t\tnewFields[fieldname] = rendertype === \"text/html\" ? container.innerHTML : container.textContent;\n\t\t} else {\n\t\t\tnewFields[fieldname] = undefined;\n\t\t}\n\t\twiki.addTiddler(new $tw.Tiddler(tiddler,newFields));\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/setfield.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/unpackplugin.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/unpackplugin.js\ntype: application/javascript\nmodule-type: command\n\nCommand to extract the shadow tiddlers from within a plugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"unpackplugin\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander,callback) {\n\tthis.params = params;\n\tthis.commander = commander;\n\tthis.callback = callback;\n};\n\nCommand.prototype.execute = function() {\n\tif(this.params.length < 1) {\n\t\treturn \"Missing plugin name\";\n\t}\n\tvar self = this,\n\t\ttitle = this.params[0],\n\t\tpluginData = this.commander.wiki.getTiddlerDataCached(title);\n\tif(!pluginData) {\n\t\treturn \"Plugin '\" + title + \"' not found\";\n\t}\n\t$tw.utils.each(pluginData.tiddlers,function(tiddler) {\n\t\tself.commander.wiki.addTiddler(new $tw.Tiddler(tiddler));\n\t});\n\treturn null;\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/unpackplugin.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/verbose.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/verbose.js\ntype: application/javascript\nmodule-type: command\n\nVerbose command\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"verbose\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tthis.commander.verbose = true;\n\t// Output the boot message log\n\tthis.commander.streams.output.write(\"Boot log:\\n \" + $tw.boot.logMessages.join(\"\\n \") + \"\\n\");\n\treturn null; // No error\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/verbose.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/commands/version.js": {
"text": "/*\\\ntitle: $:/core/modules/commands/version.js\ntype: application/javascript\nmodule-type: command\n\nVersion command\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.info = {\n\tname: \"version\",\n\tsynchronous: true\n};\n\nvar Command = function(params,commander) {\n\tthis.params = params;\n\tthis.commander = commander;\n};\n\nCommand.prototype.execute = function() {\n\tthis.commander.streams.output.write($tw.version + \"\\n\");\n\treturn null; // No error\n};\n\nexports.Command = Command;\n\n})();\n",
"title": "$:/core/modules/commands/version.js",
"type": "application/javascript",
"module-type": "command"
},
"$:/core/modules/config.js": {
"text": "/*\\\ntitle: $:/core/modules/config.js\ntype: application/javascript\nmodule-type: config\n\nCore configuration constants\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.preferences = {};\n\nexports.preferences.notificationDuration = 3 * 1000;\nexports.preferences.jsonSpaces = 4;\n\nexports.textPrimitives = {\n\tupperLetter: \"[A-Z\\u00c0-\\u00d6\\u00d8-\\u00de\\u0150\\u0170]\",\n\tlowerLetter: \"[a-z\\u00df-\\u00f6\\u00f8-\\u00ff\\u0151\\u0171]\",\n\tanyLetter: \"[A-Za-z0-9\\u00c0-\\u00d6\\u00d8-\\u00de\\u00df-\\u00f6\\u00f8-\\u00ff\\u0150\\u0170\\u0151\\u0171]\",\n\tblockPrefixLetters:\t\"[A-Za-z0-9-_\\u00c0-\\u00d6\\u00d8-\\u00de\\u00df-\\u00f6\\u00f8-\\u00ff\\u0150\\u0170\\u0151\\u0171]\"\n};\n\nexports.textPrimitives.unWikiLink = \"~\";\nexports.textPrimitives.wikiLink = exports.textPrimitives.upperLetter + \"+\" +\n\texports.textPrimitives.lowerLetter + \"+\" +\n\texports.textPrimitives.upperLetter +\n\texports.textPrimitives.anyLetter + \"*\";\n\nexports.htmlEntities = {quot:34, amp:38, apos:39, lt:60, gt:62, nbsp:160, iexcl:161, cent:162, pound:163, curren:164, yen:165, brvbar:166, sect:167, uml:168, copy:169, ordf:170, laquo:171, not:172, shy:173, reg:174, macr:175, deg:176, plusmn:177, sup2:178, sup3:179, acute:180, micro:181, para:182, middot:183, cedil:184, sup1:185, ordm:186, raquo:187, frac14:188, frac12:189, frac34:190, iquest:191, Agrave:192, Aacute:193, Acirc:194, Atilde:195, Auml:196, Aring:197, AElig:198, Ccedil:199, Egrave:200, Eacute:201, Ecirc:202, Euml:203, Igrave:204, Iacute:205, Icirc:206, Iuml:207, ETH:208, Ntilde:209, Ograve:210, Oacute:211, Ocirc:212, Otilde:213, Ouml:214, times:215, Oslash:216, Ugrave:217, Uacute:218, Ucirc:219, Uuml:220, Yacute:221, THORN:222, szlig:223, agrave:224, aacute:225, acirc:226, atilde:227, auml:228, aring:229, aelig:230, ccedil:231, egrave:232, eacute:233, ecirc:234, euml:235, igrave:236, iacute:237, icirc:238, iuml:239, eth:240, ntilde:241, ograve:242, oacute:243, ocirc:244, otilde:245, ouml:246, divide:247, oslash:248, ugrave:249, uacute:250, ucirc:251, uuml:252, yacute:253, thorn:254, yuml:255, OElig:338, oelig:339, Scaron:352, scaron:353, Yuml:376, fnof:402, circ:710, tilde:732, Alpha:913, Beta:914, Gamma:915, Delta:916, Epsilon:917, Zeta:918, Eta:919, Theta:920, Iota:921, Kappa:922, Lambda:923, Mu:924, Nu:925, Xi:926, Omicron:927, Pi:928, Rho:929, Sigma:931, Tau:932, Upsilon:933, Phi:934, Chi:935, Psi:936, Omega:937, alpha:945, beta:946, gamma:947, delta:948, epsilon:949, zeta:950, eta:951, theta:952, iota:953, kappa:954, lambda:955, mu:956, nu:957, xi:958, omicron:959, pi:960, rho:961, sigmaf:962, sigma:963, tau:964, upsilon:965, phi:966, chi:967, psi:968, omega:969, thetasym:977, upsih:978, piv:982, ensp:8194, emsp:8195, thinsp:8201, zwnj:8204, zwj:8205, lrm:8206, rlm:8207, ndash:8211, mdash:8212, lsquo:8216, rsquo:8217, sbquo:8218, ldquo:8220, rdquo:8221, bdquo:8222, dagger:8224, Dagger:8225, bull:8226, hellip:8230, permil:8240, prime:8242, Prime:8243, lsaquo:8249, rsaquo:8250, oline:8254, frasl:8260, euro:8364, image:8465, weierp:8472, real:8476, trade:8482, alefsym:8501, larr:8592, uarr:8593, rarr:8594, darr:8595, harr:8596, crarr:8629, lArr:8656, uArr:8657, rArr:8658, dArr:8659, hArr:8660, forall:8704, part:8706, exist:8707, empty:8709, nabla:8711, isin:8712, notin:8713, ni:8715, prod:8719, sum:8721, minus:8722, lowast:8727, radic:8730, prop:8733, infin:8734, ang:8736, and:8743, or:8744, cap:8745, cup:8746, int:8747, there4:8756, sim:8764, cong:8773, asymp:8776, ne:8800, equiv:8801, le:8804, ge:8805, sub:8834, sup:8835, nsub:8836, sube:8838, supe:8839, oplus:8853, otimes:8855, perp:8869, sdot:8901, lceil:8968, rceil:8969, lfloor:8970, rfloor:8971, lang:9001, rang:9002, loz:9674, spades:9824, clubs:9827, hearts:9829, diams:9830 };\n\nexports.htmlVoidElements = \"area,base,br,col,command,embed,hr,img,input,keygen,link,meta,param,source,track,wbr\".split(\",\");\n\nexports.htmlBlockElements = \"address,article,aside,audio,blockquote,canvas,dd,div,dl,fieldset,figcaption,figure,footer,form,h1,h2,h3,h4,h5,h6,header,hgroup,hr,li,noscript,ol,output,p,pre,section,table,tfoot,ul,video\".split(\",\");\n\nexports.htmlUnsafeElements = \"script\".split(\",\");\n\n})();\n",
"title": "$:/core/modules/config.js",
"type": "application/javascript",
"module-type": "config"
},
"$:/core/modules/deserializers.js": {
"text": "/*\\\ntitle: $:/core/modules/deserializers.js\ntype: application/javascript\nmodule-type: tiddlerdeserializer\n\nFunctions to deserialise tiddlers from a block of text\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nUtility function to parse an old-style tiddler DIV in a *.tid file. It looks like this:\n\n<div title=\"Title\" creator=\"JoeBloggs\" modifier=\"JoeBloggs\" created=\"201102111106\" modified=\"201102111310\" tags=\"myTag [[my long tag]]\">\n<pre>The text of the tiddler (without the expected HTML encoding).\n</pre>\n</div>\n\nNote that the field attributes are HTML encoded, but that the body of the <PRE> tag is not encoded.\n\nWhen these tiddler DIVs are encountered within a TiddlyWiki HTML file then the body is encoded in the usual way.\n*/\nvar parseTiddlerDiv = function(text /* [,fields] */) {\n\t// Slot together the default results\n\tvar result = {};\n\tif(arguments.length > 1) {\n\t\tfor(var f=1; f<arguments.length; f++) {\n\t\t\tvar fields = arguments[f];\n\t\t\tfor(var t in fields) {\n\t\t\t\tresult[t] = fields[t];\t\t\n\t\t\t}\n\t\t}\n\t}\n\t// Parse the DIV body\n\tvar startRegExp = /^\\s*<div\\s+([^>]*)>(\\s*<pre>)?/gi,\n\t\tendRegExp,\n\t\tmatch = startRegExp.exec(text);\n\tif(match) {\n\t\t// Old-style DIVs don't have the <pre> tag\n\t\tif(match[2]) {\n\t\t\tendRegExp = /<\\/pre>\\s*<\\/div>\\s*$/gi;\n\t\t} else {\n\t\t\tendRegExp = /<\\/div>\\s*$/gi;\n\t\t}\n\t\tvar endMatch = endRegExp.exec(text);\n\t\tif(endMatch) {\n\t\t\t// Extract the text\n\t\t\tresult.text = text.substring(match.index + match[0].length,endMatch.index);\n\t\t\t// Process the attributes\n\t\t\tvar attrRegExp = /\\s*([^=\\s]+)\\s*=\\s*(?:\"([^\"]*)\"|'([^']*)')/gi,\n\t\t\t\tattrMatch;\n\t\t\tdo {\n\t\t\t\tattrMatch = attrRegExp.exec(match[1]);\n\t\t\t\tif(attrMatch) {\n\t\t\t\t\tvar name = attrMatch[1];\n\t\t\t\t\tvar value = attrMatch[2] !== undefined ? attrMatch[2] : attrMatch[3];\n\t\t\t\t\tresult[name] = value;\n\t\t\t\t}\n\t\t\t} while(attrMatch);\n\t\t\treturn result;\n\t\t}\n\t}\n\treturn undefined;\n};\n\nexports[\"application/x-tiddler-html-div\"] = function(text,fields) {\n\treturn [parseTiddlerDiv(text,fields)];\n};\n\nexports[\"application/json\"] = function(text,fields) {\n\tvar incoming = JSON.parse(text),\n\t\tresults = [];\n\tif($tw.utils.isArray(incoming)) {\n\t\tfor(var t=0; t<incoming.length; t++) {\n\t\t\tvar incomingFields = incoming[t],\n\t\t\t\tfields = {};\n\t\t\tfor(var f in incomingFields) {\n\t\t\t\tif(typeof incomingFields[f] === \"string\") {\n\t\t\t\t\tfields[f] = incomingFields[f];\n\t\t\t\t}\n\t\t\t}\n\t\t\tresults.push(fields);\n\t\t}\n\t}\n\treturn results;\n};\n\n/*\nParse an HTML file into tiddlers. There are three possibilities:\n# A TiddlyWiki classic HTML file containing `text/x-tiddlywiki` tiddlers\n# A TiddlyWiki5 HTML file containing `text/vnd.tiddlywiki` tiddlers\n# An ordinary HTML file\n*/\nexports[\"text/html\"] = function(text,fields) {\n\t// Check if we've got a store area\n\tvar storeAreaMarkerRegExp = /<div id=[\"']?storeArea['\"]?( style=[\"']?display:none;[\"']?)?>/gi,\n\t\tmatch = storeAreaMarkerRegExp.exec(text);\n\tif(match) {\n\t\t// If so, it's either a classic TiddlyWiki file or an unencrypted TW5 file\n\t\t// First read the normal tiddlers\n\t\tvar results = deserializeTiddlyWikiFile(text,storeAreaMarkerRegExp.lastIndex,!!match[1],fields);\n\t\t// Then any system tiddlers\n\t\tvar systemAreaMarkerRegExp = /<div id=[\"']?systemArea['\"]?( style=[\"']?display:none;[\"']?)?>/gi,\n\t\t\tsysMatch = systemAreaMarkerRegExp.exec(text);\n\t\tif(sysMatch) {\n\t\t\tresults.push.apply(results,deserializeTiddlyWikiFile(text,systemAreaMarkerRegExp.lastIndex,!!sysMatch[1],fields));\n\t\t}\n\t\treturn results;\n\t} else {\n\t\t// Check whether we've got an encrypted file\n\t\tvar encryptedStoreArea = $tw.utils.extractEncryptedStoreArea(text);\n\t\tif(encryptedStoreArea) {\n\t\t\t// If so, attempt to decrypt it using the current password\n\t\t\treturn $tw.utils.decryptStoreArea(encryptedStoreArea);\n\t\t} else {\n\t\t\t// It's not a TiddlyWiki so we'll return the entire HTML file as a tiddler\n\t\t\treturn deserializeHtmlFile(text,fields);\n\t\t}\n\t}\n};\n\nfunction deserializeHtmlFile(text,fields) {\n\tvar result = {};\n\t$tw.utils.each(fields,function(value,name) {\n\t\tresult[name] = value;\n\t});\n\tresult.text = text;\n\tresult.type = \"text/html\";\n\treturn [result];\n}\n\nfunction deserializeTiddlyWikiFile(text,storeAreaEnd,isTiddlyWiki5,fields) {\n\tvar results = [],\n\t\tendOfDivRegExp = /(<\\/div>\\s*)/gi,\n\t\tstartPos = storeAreaEnd,\n\t\tdefaultType = isTiddlyWiki5 ? undefined : \"text/x-tiddlywiki\";\n\tendOfDivRegExp.lastIndex = startPos;\n\tvar match = endOfDivRegExp.exec(text);\n\twhile(match) {\n\t\tvar endPos = endOfDivRegExp.lastIndex,\n\t\t\ttiddlerFields = parseTiddlerDiv(text.substring(startPos,endPos),fields,{type: defaultType});\n\t\tif(!tiddlerFields) {\n\t\t\tbreak;\n\t\t}\n\t\t$tw.utils.each(tiddlerFields,function(value,name) {\n\t\t\tif(typeof value === \"string\") {\n\t\t\t\ttiddlerFields[name] = $tw.utils.htmlDecode(value);\n\t\t\t}\n\t\t});\n\t\tif(tiddlerFields.text !== null) {\n\t\t\tresults.push(tiddlerFields);\n\t\t}\n\t\tstartPos = endPos;\n\t\tmatch = endOfDivRegExp.exec(text);\n\t}\n\treturn results;\n}\n\n})();\n",
"title": "$:/core/modules/deserializers.js",
"type": "application/javascript",
"module-type": "tiddlerdeserializer"
},
"$:/core/modules/editor/engines/framed.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/engines/framed.js\ntype: application/javascript\nmodule-type: library\n\nText editor engine based on a simple input or textarea within an iframe. This is done so that the selection is preserved even when clicking away from the textarea\n\n\\*/\n(function(){\n\n/*jslint node: true,browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar HEIGHT_VALUE_TITLE = \"$:/config/TextEditor/EditorHeight/Height\";\n\nfunction FramedEngine(options) {\n\t// Save our options\n\toptions = options || {};\n\tthis.widget = options.widget;\n\tthis.value = options.value;\n\tthis.parentNode = options.parentNode;\n\tthis.nextSibling = options.nextSibling;\n\t// Create our hidden dummy text area for reading styles\n\tthis.dummyTextArea = this.widget.document.createElement(\"textarea\");\n\tif(this.widget.editClass) {\n\t\tthis.dummyTextArea.className = this.widget.editClass;\n\t}\n\tthis.dummyTextArea.setAttribute(\"hidden\",\"true\");\n\tthis.parentNode.insertBefore(this.dummyTextArea,this.nextSibling);\n\tthis.widget.domNodes.push(this.dummyTextArea);\n\t// Create the iframe\n\tthis.iframeNode = this.widget.document.createElement(\"iframe\");\n\tthis.parentNode.insertBefore(this.iframeNode,this.nextSibling);\n\tthis.iframeDoc = this.iframeNode.contentWindow.document;\n\t// (Firefox requires us to put some empty content in the iframe)\n\tthis.iframeDoc.open();\n\tthis.iframeDoc.write(\"\");\n\tthis.iframeDoc.close();\n\t// Style the iframe\n\tthis.iframeNode.className = this.dummyTextArea.className;\n\tthis.iframeNode.style.border = \"none\";\n\tthis.iframeNode.style.padding = \"0\";\n\tthis.iframeNode.style.resize = \"none\";\n\tthis.iframeDoc.body.style.margin = \"0\";\n\tthis.iframeDoc.body.style.padding = \"0\";\n\tthis.widget.domNodes.push(this.iframeNode);\n\t// Construct the textarea or input node\n\tvar tag = this.widget.editTag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"input\";\n\t}\n\tthis.domNode = this.iframeDoc.createElement(tag);\n\t// Set the text\n\tif(this.widget.editTag === \"textarea\") {\n\t\tthis.domNode.appendChild(this.iframeDoc.createTextNode(this.value));\n\t} else {\n\t\tthis.domNode.value = this.value;\n\t}\n\t// Set the attributes\n\tif(this.widget.editType) {\n\t\tthis.domNode.setAttribute(\"type\",this.widget.editType);\n\t}\n\tif(this.widget.editPlaceholder) {\n\t\tthis.domNode.setAttribute(\"placeholder\",this.widget.editPlaceholder);\n\t}\n\tif(this.widget.editSize) {\n\t\tthis.domNode.setAttribute(\"size\",this.widget.editSize);\n\t}\n\tif(this.widget.editRows) {\n\t\tthis.domNode.setAttribute(\"rows\",this.widget.editRows);\n\t}\n\t// Copy the styles from the dummy textarea\n\tthis.copyStyles();\n\t// Add event listeners\n\t$tw.utils.addEventListeners(this.domNode,[\n\t\t{name: \"input\",handlerObject: this,handlerMethod: \"handleInputEvent\"},\n\t\t{name: \"keydown\",handlerObject: this.widget,handlerMethod: \"handleKeydownEvent\"}\n\t]);\n\t// Insert the element into the DOM\n\tthis.iframeDoc.body.appendChild(this.domNode);\n}\n\n/*\nCopy styles from the dummy text area to the textarea in the iframe\n*/\nFramedEngine.prototype.copyStyles = function() {\n\t// Copy all styles\n\t$tw.utils.copyStyles(this.dummyTextArea,this.domNode);\n\t// Override the ones that should not be set the same as the dummy textarea\n\tthis.domNode.style.display = \"block\";\n\tthis.domNode.style.width = \"100%\";\n\tthis.domNode.style.margin = \"0\";\n\t// In Chrome setting -webkit-text-fill-color overrides the placeholder text colour\n\tthis.domNode.style[\"-webkit-text-fill-color\"] = \"currentcolor\";\n};\n\n/*\nSet the text of the engine if it doesn't currently have focus\n*/\nFramedEngine.prototype.setText = function(text,type) {\n\tif(!this.domNode.isTiddlyWikiFakeDom) {\n\t\tif(this.domNode.ownerDocument.activeElement !== this.domNode) {\n\t\t\tthis.domNode.value = text;\n\t\t}\n\t\t// Fix the height if needed\n\t\tthis.fixHeight();\n\t}\n};\n\n/*\nGet the text of the engine\n*/\nFramedEngine.prototype.getText = function() {\n\treturn this.domNode.value;\n};\n\n/*\nFix the height of textarea to fit content\n*/\nFramedEngine.prototype.fixHeight = function() {\n\t// Make sure styles are updated\n\tthis.copyStyles();\n\t// Adjust height\n\tif(this.widget.editTag === \"textarea\") {\n\t\tif(this.widget.editAutoHeight) {\n\t\t\tif(this.domNode && !this.domNode.isTiddlyWikiFakeDom) {\n\t\t\t\tvar newHeight = $tw.utils.resizeTextAreaToFit(this.domNode,this.widget.editMinHeight);\n\t\t\t\tthis.iframeNode.style.height = (newHeight + 14) + \"px\"; // +14 for the border on the textarea\n\t\t\t}\n\t\t} else {\n\t\t\tvar fixedHeight = parseInt(this.widget.wiki.getTiddlerText(HEIGHT_VALUE_TITLE,\"400px\"),10);\n\t\t\tfixedHeight = Math.max(fixedHeight,20);\n\t\t\tthis.domNode.style.height = fixedHeight + \"px\";\n\t\t\tthis.iframeNode.style.height = (fixedHeight + 14) + \"px\";\n\t\t}\n\t}\n};\n\n/*\nFocus the engine node\n*/\nFramedEngine.prototype.focus = function() {\n\tif(this.domNode.focus && this.domNode.select) {\n\t\tthis.domNode.focus();\n\t\tthis.domNode.select();\n\t}\n};\n\n/*\nHandle a dom \"input\" event which occurs when the text has changed\n*/\nFramedEngine.prototype.handleInputEvent = function(event) {\n\tthis.widget.saveChanges(this.getText());\n\tthis.fixHeight();\n\treturn true;\n};\n\n/*\nCreate a blank structure representing a text operation\n*/\nFramedEngine.prototype.createTextOperation = function() {\n\tvar operation = {\n\t\ttext: this.domNode.value,\n\t\tselStart: this.domNode.selectionStart,\n\t\tselEnd: this.domNode.selectionEnd,\n\t\tcutStart: null,\n\t\tcutEnd: null,\n\t\treplacement: null,\n\t\tnewSelStart: null,\n\t\tnewSelEnd: null\n\t};\n\toperation.selection = operation.text.substring(operation.selStart,operation.selEnd);\n\treturn operation;\n};\n\n/*\nExecute a text operation\n*/\nFramedEngine.prototype.executeTextOperation = function(operation) {\n\t// Perform the required changes to the text area and the underlying tiddler\n\tvar newText = operation.text;\n\tif(operation.replacement !== null) {\n\t\tnewText = operation.text.substring(0,operation.cutStart) + operation.replacement + operation.text.substring(operation.cutEnd);\n\t\t// Attempt to use a execCommand to modify the value of the control\n\t\tif(this.iframeDoc.queryCommandSupported(\"insertText\") && this.iframeDoc.queryCommandSupported(\"delete\") && !$tw.browser.isFirefox) {\n\t\t\tthis.domNode.focus();\n\t\t\tthis.domNode.setSelectionRange(operation.cutStart,operation.cutEnd);\n\t\t\tif(operation.replacement === \"\") {\n\t\t\t\tthis.iframeDoc.execCommand(\"delete\",false,\"\");\n\t\t\t} else {\n\t\t\t\tthis.iframeDoc.execCommand(\"insertText\",false,operation.replacement);\n\t\t\t}\n\t\t} else {\n\t\t\tthis.domNode.value = newText;\n\t\t}\n\t\tthis.domNode.focus();\n\t\tthis.domNode.setSelectionRange(operation.newSelStart,operation.newSelEnd);\n\t}\n\tthis.domNode.focus();\n\treturn newText;\n};\n\nexports.FramedEngine = FramedEngine;\n\n})();\n",
"title": "$:/core/modules/editor/engines/framed.js",
"type": "application/javascript",
"module-type": "library"
},
"$:/core/modules/editor/engines/simple.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/engines/simple.js\ntype: application/javascript\nmodule-type: library\n\nText editor engine based on a simple input or textarea tag\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar HEIGHT_VALUE_TITLE = \"$:/config/TextEditor/EditorHeight/Height\";\n\nfunction SimpleEngine(options) {\n\t// Save our options\n\toptions = options || {};\n\tthis.widget = options.widget;\n\tthis.value = options.value;\n\tthis.parentNode = options.parentNode;\n\tthis.nextSibling = options.nextSibling;\n\t// Construct the textarea or input node\n\tvar tag = this.widget.editTag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"input\";\n\t}\n\tthis.domNode = this.widget.document.createElement(tag);\n\t// Set the text\n\tif(this.widget.editTag === \"textarea\") {\n\t\tthis.domNode.appendChild(this.widget.document.createTextNode(this.value));\n\t} else {\n\t\tthis.domNode.value = this.value;\n\t}\n\t// Set the attributes\n\tif(this.widget.editType) {\n\t\tthis.domNode.setAttribute(\"type\",this.widget.editType);\n\t}\n\tif(this.widget.editPlaceholder) {\n\t\tthis.domNode.setAttribute(\"placeholder\",this.widget.editPlaceholder);\n\t}\n\tif(this.widget.editSize) {\n\t\tthis.domNode.setAttribute(\"size\",this.widget.editSize);\n\t}\n\tif(this.widget.editRows) {\n\t\tthis.domNode.setAttribute(\"rows\",this.widget.editRows);\n\t}\n\tif(this.widget.editClass) {\n\t\tthis.domNode.className = this.widget.editClass;\n\t}\n\t// Add an input event handler\n\t$tw.utils.addEventListeners(this.domNode,[\n\t\t{name: \"focus\", handlerObject: this, handlerMethod: \"handleFocusEvent\"},\n\t\t{name: \"input\", handlerObject: this, handlerMethod: \"handleInputEvent\"}\n\t]);\n\t// Insert the element into the DOM\n\tthis.parentNode.insertBefore(this.domNode,this.nextSibling);\n\tthis.widget.domNodes.push(this.domNode);\n}\n\n/*\nSet the text of the engine if it doesn't currently have focus\n*/\nSimpleEngine.prototype.setText = function(text,type) {\n\tif(!this.domNode.isTiddlyWikiFakeDom) {\n\t\tif(this.domNode.ownerDocument.activeElement !== this.domNode) {\n\t\t\tthis.domNode.value = text;\n\t\t}\n\t\t// Fix the height if needed\n\t\tthis.fixHeight();\n\t}\n};\n\n/*\nGet the text of the engine\n*/\nSimpleEngine.prototype.getText = function() {\n\treturn this.domNode.value;\n};\n\n/*\nFix the height of textarea to fit content\n*/\nSimpleEngine.prototype.fixHeight = function() {\n\tif(this.widget.editTag === \"textarea\") {\n\t\tif(this.widget.editAutoHeight) {\n\t\t\tif(this.domNode && !this.domNode.isTiddlyWikiFakeDom) {\n\t\t\t\t$tw.utils.resizeTextAreaToFit(this.domNode,this.widget.editMinHeight);\n\t\t\t}\n\t\t} else {\n\t\t\tvar fixedHeight = parseInt(this.widget.wiki.getTiddlerText(HEIGHT_VALUE_TITLE,\"400px\"),10);\n\t\t\tfixedHeight = Math.max(fixedHeight,20);\n\t\t\tthis.domNode.style.height = fixedHeight + \"px\";\n\t\t}\n\t}\n};\n\n/*\nFocus the engine node\n*/\nSimpleEngine.prototype.focus = function() {\n\tif(this.domNode.focus && this.domNode.select) {\n\t\tthis.domNode.focus();\n\t\tthis.domNode.select();\n\t}\n};\n\n/*\nHandle a dom \"input\" event which occurs when the text has changed\n*/\nSimpleEngine.prototype.handleInputEvent = function(event) {\n\tthis.widget.saveChanges(this.getText());\n\tthis.fixHeight();\n\treturn true;\n};\n\n/*\nHandle a dom \"focus\" event\n*/\nSimpleEngine.prototype.handleFocusEvent = function(event) {\n\tif(this.widget.editFocusPopup) {\n\t\t$tw.popup.triggerPopup({\n\t\t\tdomNode: this.domNode,\n\t\t\ttitle: this.widget.editFocusPopup,\n\t\t\twiki: this.widget.wiki,\n\t\t\tforce: true\n\t\t});\n\t}\n\treturn true;\n};\n\n/*\nCreate a blank structure representing a text operation\n*/\nSimpleEngine.prototype.createTextOperation = function() {\n\treturn null;\n};\n\n/*\nExecute a text operation\n*/\nSimpleEngine.prototype.executeTextOperation = function(operation) {\n};\n\nexports.SimpleEngine = SimpleEngine;\n\n})();\n",
"title": "$:/core/modules/editor/engines/simple.js",
"type": "application/javascript",
"module-type": "library"
},
"$:/core/modules/editor/factory.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/factory.js\ntype: application/javascript\nmodule-type: library\n\nFactory for constructing text editor widgets with specified engines for the toolbar and non-toolbar cases\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar DEFAULT_MIN_TEXT_AREA_HEIGHT = \"100px\"; // Minimum height of textareas in pixels\n\n// Configuration tiddlers\nvar HEIGHT_MODE_TITLE = \"$:/config/TextEditor/EditorHeight/Mode\";\nvar ENABLE_TOOLBAR_TITLE = \"$:/config/TextEditor/EnableToolbar\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nfunction editTextWidgetFactory(toolbarEngine,nonToolbarEngine) {\n\n\tvar EditTextWidget = function(parseTreeNode,options) {\n\t\t// Initialise the editor operations if they've not been done already\n\t\tif(!this.editorOperations) {\n\t\t\tEditTextWidget.prototype.editorOperations = {};\n\t\t\t$tw.modules.applyMethods(\"texteditoroperation\",this.editorOperations);\n\t\t}\n\t\tthis.initialise(parseTreeNode,options);\n\t};\n\n\t/*\n\tInherit from the base widget class\n\t*/\n\tEditTextWidget.prototype = new Widget();\n\n\t/*\n\tRender this widget into the DOM\n\t*/\n\tEditTextWidget.prototype.render = function(parent,nextSibling) {\n\t\t// Save the parent dom node\n\t\tthis.parentDomNode = parent;\n\t\t// Compute our attributes\n\t\tthis.computeAttributes();\n\t\t// Execute our logic\n\t\tthis.execute();\n\t\t// Create the wrapper for the toolbar and render its content\n\t\tif(this.editShowToolbar) {\n\t\t\tthis.toolbarNode = this.document.createElement(\"div\");\n\t\t\tthis.toolbarNode.className = \"tc-editor-toolbar\";\n\t\t\tparent.insertBefore(this.toolbarNode,nextSibling);\n\t\t\tthis.renderChildren(this.toolbarNode,null);\n\t\t\tthis.domNodes.push(this.toolbarNode);\n\t\t}\n\t\t// Create our element\n\t\tvar editInfo = this.getEditInfo(),\n\t\t\tEngine = this.editShowToolbar ? toolbarEngine : nonToolbarEngine;\n\t\tthis.engine = new Engine({\n\t\t\t\twidget: this,\n\t\t\t\tvalue: editInfo.value,\n\t\t\t\ttype: editInfo.type,\n\t\t\t\tparentNode: parent,\n\t\t\t\tnextSibling: nextSibling\n\t\t\t});\n\t\t// Call the postRender hook\n\t\tif(this.postRender) {\n\t\t\tthis.postRender();\n\t\t}\n\t\t// Fix height\n\t\tthis.engine.fixHeight();\n\t\t// Focus if required\n\t\tif(this.editFocus === \"true\" || this.editFocus === \"yes\") {\n\t\t\tthis.engine.focus();\n\t\t}\n\t\t// Add widget message listeners\n\t\tthis.addEventListeners([\n\t\t\t{type: \"tm-edit-text-operation\", handler: \"handleEditTextOperationMessage\"}\n\t\t]);\n\t};\n\n\t/*\n\tGet the tiddler being edited and current value\n\t*/\n\tEditTextWidget.prototype.getEditInfo = function() {\n\t\t// Get the edit value\n\t\tvar self = this,\n\t\t\tvalue,\n\t\t\ttype = \"text/plain\",\n\t\t\tupdate;\n\t\tif(this.editIndex) {\n\t\t\tvalue = this.wiki.extractTiddlerDataItem(this.editTitle,this.editIndex,this.editDefault);\n\t\t\tupdate = function(value) {\n\t\t\t\tvar data = self.wiki.getTiddlerData(self.editTitle,{});\n\t\t\t\tif(data[self.editIndex] !== value) {\n\t\t\t\t\tdata[self.editIndex] = value;\n\t\t\t\t\tself.wiki.setTiddlerData(self.editTitle,data);\n\t\t\t\t}\n\t\t\t};\n\t\t} else {\n\t\t\t// Get the current tiddler and the field name\n\t\t\tvar tiddler = this.wiki.getTiddler(this.editTitle);\n\t\t\tif(tiddler) {\n\t\t\t\t// If we've got a tiddler, the value to display is the field string value\n\t\t\t\tvalue = tiddler.getFieldString(this.editField);\n\t\t\t\tif(this.editField === \"text\") {\n\t\t\t\t\ttype = tiddler.fields.type || \"text/vnd.tiddlywiki\";\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// Otherwise, we need to construct a default value for the editor\n\t\t\t\tswitch(this.editField) {\n\t\t\t\t\tcase \"text\":\n\t\t\t\t\t\tvalue = \"Type the text for the tiddler '\" + this.editTitle + \"'\";\n\t\t\t\t\t\ttype = \"text/vnd.tiddlywiki\";\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"title\":\n\t\t\t\t\t\tvalue = this.editTitle;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tvalue = \"\";\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t\tif(this.editDefault !== undefined) {\n\t\t\t\t\tvalue = this.editDefault;\n\t\t\t\t}\n\t\t\t}\n\t\t\tupdate = function(value) {\n\t\t\t\tvar tiddler = self.wiki.getTiddler(self.editTitle),\n\t\t\t\t\tupdateFields = {\n\t\t\t\t\t\ttitle: self.editTitle\n\t\t\t\t\t};\n\t\t\t\tupdateFields[self.editField] = value;\n\t\t\t\tself.wiki.addTiddler(new $tw.Tiddler(self.wiki.getCreationFields(),tiddler,updateFields,self.wiki.getModificationFields()));\n\t\t\t};\n\t\t}\n\t\tif(this.editType) {\n\t\t\ttype = this.editType;\n\t\t}\n\t\treturn {value: value || \"\", type: type, update: update};\n\t};\n\n\t/*\n\tHandle an edit text operation message from the toolbar\n\t*/\n\tEditTextWidget.prototype.handleEditTextOperationMessage = function(event) {\n\t\t// Prepare information about the operation\n\t\tvar operation = this.engine.createTextOperation();\n\t\t// Invoke the handler for the selected operation\n\t\tvar handler = this.editorOperations[event.param];\n\t\tif(handler) {\n\t\t\thandler.call(this,event,operation);\n\t\t}\n\t\t// Execute the operation via the engine\n\t\tvar newText = this.engine.executeTextOperation(operation);\n\t\t// Fix the tiddler height and save changes\n\t\tthis.engine.fixHeight();\n\t\tthis.saveChanges(newText);\n\t};\n\n\t/*\n\tCompute the internal state of the widget\n\t*/\n\tEditTextWidget.prototype.execute = function() {\n\t\t// Get our parameters\n\t\tthis.editTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t\tthis.editField = this.getAttribute(\"field\",\"text\");\n\t\tthis.editIndex = this.getAttribute(\"index\");\n\t\tthis.editDefault = this.getAttribute(\"default\");\n\t\tthis.editClass = this.getAttribute(\"class\");\n\t\tthis.editPlaceholder = this.getAttribute(\"placeholder\");\n\t\tthis.editSize = this.getAttribute(\"size\");\n\t\tthis.editRows = this.getAttribute(\"rows\");\n\t\tthis.editAutoHeight = this.wiki.getTiddlerText(HEIGHT_MODE_TITLE,\"auto\");\n\t\tthis.editAutoHeight = this.getAttribute(\"autoHeight\",this.editAutoHeight === \"auto\" ? \"yes\" : \"no\") === \"yes\";\n\t\tthis.editMinHeight = this.getAttribute(\"minHeight\",DEFAULT_MIN_TEXT_AREA_HEIGHT);\n\t\tthis.editFocusPopup = this.getAttribute(\"focusPopup\");\n\t\tthis.editFocus = this.getAttribute(\"focus\");\n\t\t// Get the default editor element tag and type\n\t\tvar tag,type;\n\t\tif(this.editField === \"text\") {\n\t\t\ttag = \"textarea\";\n\t\t} else {\n\t\t\ttag = \"input\";\n\t\t\tvar fieldModule = $tw.Tiddler.fieldModules[this.editField];\n\t\t\tif(fieldModule && fieldModule.editTag) {\n\t\t\t\ttag = fieldModule.editTag;\n\t\t\t}\n\t\t\tif(fieldModule && fieldModule.editType) {\n\t\t\t\ttype = fieldModule.editType;\n\t\t\t}\n\t\t\ttype = type || \"text\";\n\t\t}\n\t\t// Get the rest of our parameters\n\t\tthis.editTag = this.getAttribute(\"tag\",tag);\n\t\tthis.editType = this.getAttribute(\"type\",type);\n\t\t// Make the child widgets\n\t\tthis.makeChildWidgets();\n\t\t// Determine whether to show the toolbar\n\t\tthis.editShowToolbar = this.wiki.getTiddlerText(ENABLE_TOOLBAR_TITLE,\"yes\");\n\t\tthis.editShowToolbar = (this.editShowToolbar === \"yes\") && !!(this.children && this.children.length > 0);\n\t};\n\n\t/*\n\tSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n\t*/\n\tEditTextWidget.prototype.refresh = function(changedTiddlers) {\n\t\tvar changedAttributes = this.computeAttributes();\n\t\t// Completely rerender if any of our attributes have changed\n\t\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedAttributes[\"default\"] || changedAttributes[\"class\"] || changedAttributes.placeholder || changedAttributes.size || changedAttributes.autoHeight || changedAttributes.minHeight || changedAttributes.focusPopup || changedAttributes.rows || changedTiddlers[HEIGHT_MODE_TITLE] || changedTiddlers[ENABLE_TOOLBAR_TITLE]) {\n\t\t\tthis.refreshSelf();\n\t\t\treturn true;\n\t\t} else if(changedTiddlers[this.editTitle]) {\n\t\t\tvar editInfo = this.getEditInfo();\n\t\t\tthis.updateEditor(editInfo.value,editInfo.type);\n\t\t}\n\t\tthis.engine.fixHeight();\n\t\tif(this.editShowToolbar) {\n\t\t\treturn this.refreshChildren(changedTiddlers);\t\t\t\n\t\t} else {\n\t\t\treturn false;\n\t\t}\n\t};\n\n\t/*\n\tUpdate the editor with new text. This method is separate from updateEditorDomNode()\n\tso that subclasses can override updateEditor() and still use updateEditorDomNode()\n\t*/\n\tEditTextWidget.prototype.updateEditor = function(text,type) {\n\t\tthis.updateEditorDomNode(text,type);\n\t};\n\n\t/*\n\tUpdate the editor dom node with new text\n\t*/\n\tEditTextWidget.prototype.updateEditorDomNode = function(text,type) {\n\t\tthis.engine.setText(text,type);\n\t};\n\n\t/*\n\tSave changes back to the tiddler store\n\t*/\n\tEditTextWidget.prototype.saveChanges = function(text) {\n\t\tvar editInfo = this.getEditInfo();\n\t\tif(text !== editInfo.value) {\n\t\t\teditInfo.update(text);\n\t\t}\n\t};\n\n\t/*\n\tHandle a dom \"keydown\" event, which we'll bubble up to our container for the keyboard widgets benefit\n\t*/\n\tEditTextWidget.prototype.handleKeydownEvent = function(event) {\n\t\t// Check for a keyboard shortcut\n\t\tif(this.toolbarNode) {\n\t\t\tvar shortcutElements = this.toolbarNode.querySelectorAll(\"[data-tw-keyboard-shortcut]\");\n\t\t\tfor(var index=0; index<shortcutElements.length; index++) {\n\t\t\t\tvar el = shortcutElements[index],\n\t\t\t\t\tshortcutData = el.getAttribute(\"data-tw-keyboard-shortcut\"),\n\t\t\t\t\tkeyInfoArray = $tw.keyboardManager.parseKeyDescriptors(shortcutData,{\n\t\t\t\t\t\twiki: this.wiki\n\t\t\t\t\t});\n\t\t\t\tif($tw.keyboardManager.checkKeyDescriptors(event,keyInfoArray)) {\n\t\t\t\t\tvar clickEvent = this.document.createEvent(\"Events\");\n\t\t\t\t clickEvent.initEvent(\"click\",true,false);\n\t\t\t\t el.dispatchEvent(clickEvent);\n\t\t\t\t\tevent.preventDefault();\n\t\t\t\t\tevent.stopPropagation();\n\t\t\t\t\treturn true;\t\t\t\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\t// Propogate the event to the container\n\t\tif(this.propogateKeydownEvent(event)) {\n\t\t\t// Ignore the keydown if it was already handled\n\t\t\tevent.preventDefault();\n\t\t\tevent.stopPropagation();\n\t\t\treturn true;\n\t\t}\n\t\t// Otherwise, process the keydown normally\n\t\treturn false;\n\t};\n\n\t/*\n\tPropogate keydown events to our container for the keyboard widgets benefit\n\t*/\n\tEditTextWidget.prototype.propogateKeydownEvent = function(event) {\n\t\tvar newEvent = this.document.createEventObject ? this.document.createEventObject() : this.document.createEvent(\"Events\");\n\t\tif(newEvent.initEvent) {\n\t\t\tnewEvent.initEvent(\"keydown\", true, true);\n\t\t}\n\t\tnewEvent.keyCode = event.keyCode;\n\t\tnewEvent.which = event.which;\n\t\tnewEvent.metaKey = event.metaKey;\n\t\tnewEvent.ctrlKey = event.ctrlKey;\n\t\tnewEvent.altKey = event.altKey;\n\t\tnewEvent.shiftKey = event.shiftKey;\n\t\treturn !this.parentDomNode.dispatchEvent(newEvent);\n\t};\n\n\treturn EditTextWidget;\n\n}\n\nexports.editTextWidgetFactory = editTextWidgetFactory;\n\n})();\n",
"title": "$:/core/modules/editor/factory.js",
"type": "application/javascript",
"module-type": "library"
},
"$:/core/modules/editor/operations/bitmap/clear.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/bitmap/clear.js\ntype: application/javascript\nmodule-type: bitmapeditoroperation\n\nBitmap editor operation to clear the image\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"clear\"] = function(event) {\n\tvar ctx = this.canvasDomNode.getContext(\"2d\");\n\tctx.globalAlpha = 1;\n\tctx.fillStyle = event.paramObject.colour || \"white\";\n\tctx.fillRect(0,0,this.canvasDomNode.width,this.canvasDomNode.height);\n\t// Save changes\n\tthis.strokeEnd();\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/bitmap/clear.js",
"type": "application/javascript",
"module-type": "bitmapeditoroperation"
},
"$:/core/modules/editor/operations/bitmap/resize.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/bitmap/resize.js\ntype: application/javascript\nmodule-type: bitmapeditoroperation\n\nBitmap editor operation to resize the image\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"resize\"] = function(event) {\n\t// Get the new width\n\tvar newWidth = parseInt(event.paramObject.width || this.canvasDomNode.width,10),\n\t\tnewHeight = parseInt(event.paramObject.height || this.canvasDomNode.height,10);\n\t// Update if necessary\n\tif(newWidth > 0 && newHeight > 0 && !(newWidth === this.currCanvas.width && newHeight === this.currCanvas.height)) {\n\t\tthis.changeCanvasSize(newWidth,newHeight);\n\t}\n\t// Update the input controls\n\tthis.refreshToolbar();\n\t// Save the image into the tiddler\n\tthis.saveChanges();\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/bitmap/resize.js",
"type": "application/javascript",
"module-type": "bitmapeditoroperation"
},
"$:/core/modules/editor/operations/text/excise.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/excise.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to excise the selection to a new tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"excise\"] = function(event,operation) {\n\tvar editTiddler = this.wiki.getTiddler(this.editTitle),\n\t\teditTiddlerTitle = this.editTitle;\n\tif(editTiddler && editTiddler.fields[\"draft.of\"]) {\n\t\teditTiddlerTitle = editTiddler.fields[\"draft.of\"];\n\t}\n\tvar excisionTitle = event.paramObject.title || this.wiki.generateNewTitle(\"New Excision\");\n\tthis.wiki.addTiddler(new $tw.Tiddler(\n\t\tthis.wiki.getCreationFields(),\n\t\tthis.wiki.getModificationFields(),\n\t\t{\n\t\t\ttitle: excisionTitle,\n\t\t\ttext: operation.selection,\n\t\t\ttags: event.paramObject.tagnew === \"yes\" ? [editTiddlerTitle] : []\n\t\t}\n\t));\n\toperation.replacement = excisionTitle;\n\tswitch(event.paramObject.type || \"transclude\") {\n\t\tcase \"transclude\":\n\t\t\toperation.replacement = \"{{\" + operation.replacement+ \"}}\";\n\t\t\tbreak;\n\t\tcase \"link\":\n\t\t\toperation.replacement = \"[[\" + operation.replacement+ \"]]\";\n\t\t\tbreak;\n\t\tcase \"macro\":\n\t\t\toperation.replacement = \"<<\" + (event.paramObject.macro || \"translink\") + \" \\\"\\\"\\\"\" + operation.replacement + \"\\\"\\\"\\\">>\";\n\t\t\tbreak;\n\t}\n\toperation.cutStart = operation.selStart;\n\toperation.cutEnd = operation.selEnd;\n\toperation.newSelStart = operation.selStart;\n\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/excise.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/make-link.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/make-link.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to make a link\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"make-link\"] = function(event,operation) {\n\tif(operation.selection) {\n\t\toperation.replacement = \"[[\" + operation.selection + \"|\" + event.paramObject.text + \"]]\";\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t} else {\n\t\toperation.replacement = \"[[\" + event.paramObject.text + \"]]\";\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t}\n\toperation.newSelStart = operation.selStart + operation.replacement.length;\n\toperation.newSelEnd = operation.newSelStart;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/make-link.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/prefix-lines.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/prefix-lines.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to add a prefix to the selected lines\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"prefix-lines\"] = function(event,operation) {\n\t// Cut just past the preceding line break, or the start of the text\n\toperation.cutStart = $tw.utils.findPrecedingLineBreak(operation.text,operation.selStart);\n\t// Cut to just past the following line break, or to the end of the text\n\toperation.cutEnd = $tw.utils.findFollowingLineBreak(operation.text,operation.selEnd);\n\t// Compose the required prefix\n\tvar prefix = $tw.utils.repeat(event.paramObject.character,event.paramObject.count);\n\t// Process each line\n\tvar lines = operation.text.substring(operation.cutStart,operation.cutEnd).split(/\\r?\\n/mg);\n\t$tw.utils.each(lines,function(line,index) {\n\t\t// Remove and count any existing prefix characters\n\t\tvar count = 0;\n\t\twhile(line.charAt(0) === event.paramObject.character) {\n\t\t\tline = line.substring(1);\n\t\t\tcount++;\n\t\t}\n\t\t// Remove any whitespace\n\t\twhile(line.charAt(0) === \" \") {\n\t\t\tline = line.substring(1);\n\t\t}\n\t\t// We're done if we removed the exact required prefix, otherwise add it\n\t\tif(count !== event.paramObject.count) {\n\t\t\t// Apply the prefix\n\t\t\tline = prefix + \" \" + line;\n\t\t}\n\t\t// Save the modified line\n\t\tlines[index] = line;\n\t});\n\t// Stitch the replacement text together and set the selection\n\toperation.replacement = lines.join(\"\\n\");\n\tif(lines.length === 1) {\n\t\toperation.newSelStart = operation.cutStart + operation.replacement.length;\n\t\toperation.newSelEnd = operation.newSelStart;\n\t} else {\n\t\toperation.newSelStart = operation.cutStart;\n\t\toperation.newSelEnd = operation.newSelStart + operation.replacement.length;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/prefix-lines.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/replace-all.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/replace-all.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to replace the entire text\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"replace-all\"] = function(event,operation) {\n\toperation.cutStart = 0;\n\toperation.cutEnd = operation.text.length;\n\toperation.replacement = event.paramObject.text;\n\toperation.newSelStart = 0;\n\toperation.newSelEnd = operation.replacement.length;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/replace-all.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/replace-selection.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/replace-selection.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to replace the selection\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"replace-selection\"] = function(event,operation) {\n\toperation.replacement = event.paramObject.text;\n\toperation.cutStart = operation.selStart;\n\toperation.cutEnd = operation.selEnd;\n\toperation.newSelStart = operation.selStart;\n\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/replace-selection.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/wrap-lines.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/wrap-lines.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to wrap the selected lines with a prefix and suffix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"wrap-lines\"] = function(event,operation) {\n\t// Cut just past the preceding line break, or the start of the text\n\toperation.cutStart = $tw.utils.findPrecedingLineBreak(operation.text,operation.selStart);\n\t// Cut to just past the following line break, or to the end of the text\n\toperation.cutEnd = $tw.utils.findFollowingLineBreak(operation.text,operation.selEnd);\n\t// Add the prefix and suffix\n\toperation.replacement = event.paramObject.prefix + \"\\n\" +\n\t\t\t\toperation.text.substring(operation.cutStart,operation.cutEnd) + \"\\n\" +\n\t\t\t\tevent.paramObject.suffix + \"\\n\";\n\toperation.newSelStart = operation.cutStart + event.paramObject.prefix.length + 1;\n\toperation.newSelEnd = operation.newSelStart + (operation.cutEnd - operation.cutStart);\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/wrap-lines.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/editor/operations/text/wrap-selection.js": {
"text": "/*\\\ntitle: $:/core/modules/editor/operations/text/wrap-selection.js\ntype: application/javascript\nmodule-type: texteditoroperation\n\nText editor operation to wrap the selection with the specified prefix and suffix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports[\"wrap-selection\"] = function(event,operation) {\n\tif(operation.selStart === operation.selEnd) {\n\t\t// No selection; check if we're within the prefix/suffix\n\t\tif(operation.text.substring(operation.selStart - event.paramObject.prefix.length,operation.selStart + event.paramObject.suffix.length) === event.paramObject.prefix + event.paramObject.suffix) {\n\t\t\t// Remove the prefix and suffix unless they comprise the entire text\n\t\t\tif(operation.selStart > event.paramObject.prefix.length || (operation.selEnd + event.paramObject.suffix.length) < operation.text.length ) {\n\t\t\t\toperation.cutStart = operation.selStart - event.paramObject.prefix.length;\n\t\t\t\toperation.cutEnd = operation.selEnd + event.paramObject.suffix.length;\n\t\t\t\toperation.replacement = \"\";\n\t\t\t\toperation.newSelStart = operation.cutStart;\n\t\t\t\toperation.newSelEnd = operation.newSelStart;\n\t\t\t}\n\t\t} else {\n\t\t\t// Wrap the cursor instead\n\t\t\toperation.cutStart = operation.selStart;\n\t\t\toperation.cutEnd = operation.selEnd;\n\t\t\toperation.replacement = event.paramObject.prefix + event.paramObject.suffix;\n\t\t\toperation.newSelStart = operation.selStart + event.paramObject.prefix.length;\n\t\t\toperation.newSelEnd = operation.newSelStart;\n\t\t}\n\t} else if(operation.text.substring(operation.selStart,operation.selStart + event.paramObject.prefix.length) === event.paramObject.prefix && operation.text.substring(operation.selEnd - event.paramObject.suffix.length,operation.selEnd) === event.paramObject.suffix) {\n\t\t// Prefix and suffix are already present, so remove them\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t\toperation.replacement = operation.selection.substring(event.paramObject.prefix.length,operation.selection.length - event.paramObject.suffix.length);\n\t\toperation.newSelStart = operation.selStart;\n\t\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n\t} else {\n\t\t// Add the prefix and suffix\n\t\toperation.cutStart = operation.selStart;\n\t\toperation.cutEnd = operation.selEnd;\n\t\toperation.replacement = event.paramObject.prefix + operation.selection + event.paramObject.suffix;\n\t\toperation.newSelStart = operation.selStart;\n\t\toperation.newSelEnd = operation.selStart + operation.replacement.length;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/editor/operations/text/wrap-selection.js",
"type": "application/javascript",
"module-type": "texteditoroperation"
},
"$:/core/modules/filters/addprefix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/addprefix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for adding a prefix to each title in the list. This is\nespecially useful in contexts where only a filter expression is allowed\nand macro substitution isn't available.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.addprefix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(operator.operand + title);\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/addprefix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/addsuffix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/addsuffix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for adding a suffix to each title in the list. This is\nespecially useful in contexts where only a filter expression is allowed\nand macro substitution isn't available.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.addsuffix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title + operator.operand);\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/addsuffix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/after.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/after.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler from the current list that is after the tiddler named in the operand.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.after = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\tvar index = results.indexOf(operator.operand);\n\tif(index === -1 || index > (results.length - 2)) {\n\t\treturn [];\n\t} else {\n\t\treturn [results[index + 1]];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/after.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/all/current.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/current.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[current]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.current = function(source,prefix,options) {\n\tvar currTiddlerTitle = options.widget && options.widget.getVariable(\"currentTiddler\");\n\tif(currTiddlerTitle) {\n\t\treturn [currTiddlerTitle];\n\t} else {\n\t\treturn [];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/current.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/missing.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/missing.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[missing]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.missing = function(source,prefix,options) {\n\treturn options.wiki.getMissingTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/missing.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/orphans.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/orphans.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[orphans]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.orphans = function(source,prefix,options) {\n\treturn options.wiki.getOrphanTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/orphans.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/shadows.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/shadows.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[shadows]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.shadows = function(source,prefix,options) {\n\treturn options.wiki.allShadowTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/shadows.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all/tiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all/tiddlers.js\ntype: application/javascript\nmodule-type: allfilteroperator\n\nFilter function for [all[tiddlers]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tiddlers = function(source,prefix,options) {\n\treturn options.wiki.allTitles();\n};\n\n})();\n",
"title": "$:/core/modules/filters/all/tiddlers.js",
"type": "application/javascript",
"module-type": "allfilteroperator"
},
"$:/core/modules/filters/all.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/all.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for selecting tiddlers\n\n[all[shadows+tiddlers]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar allFilterOperators;\n\nfunction getAllFilterOperators() {\n\tif(!allFilterOperators) {\n\t\tallFilterOperators = {};\n\t\t$tw.modules.applyMethods(\"allfilteroperator\",allFilterOperators);\n\t}\n\treturn allFilterOperators;\n}\n\n/*\nExport our filter function\n*/\nexports.all = function(source,operator,options) {\n\t// Get our suboperators\n\tvar allFilterOperators = getAllFilterOperators();\n\t// Cycle through the suboperators accumulating their results\n\tvar results = [],\n\t\tsubops = operator.operand.split(\"+\");\n\t// Check for common optimisations\n\tif(subops.length === 1 && subops[0] === \"\") {\n\t\treturn source;\n\t} else if(subops.length === 1 && subops[0] === \"tiddlers\") {\n\t\treturn options.wiki.each;\n\t} else if(subops.length === 1 && subops[0] === \"shadows\") {\n\t\treturn options.wiki.eachShadow;\n\t} else if(subops.length === 2 && subops[0] === \"tiddlers\" && subops[1] === \"shadows\") {\n\t\treturn options.wiki.eachTiddlerPlusShadows;\n\t} else if(subops.length === 2 && subops[0] === \"shadows\" && subops[1] === \"tiddlers\") {\n\t\treturn options.wiki.eachShadowPlusTiddlers;\n\t}\n\t// Do it the hard way\n\tfor(var t=0; t<subops.length; t++) {\n\t\tvar subop = allFilterOperators[subops[t]];\n\t\tif(subop) {\n\t\t\t$tw.utils.pushTop(results,subop(source,operator.prefix,options));\n\t\t}\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/all.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/backlinks.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/backlinks.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning all the backlinks from a tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.backlinks = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.getTiddlerBacklinks(title));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/backlinks.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/before.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/before.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler from the current list that is before the tiddler named in the operand.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.before = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\tvar index = results.indexOf(operator.operand);\n\tif(index <= 0) {\n\t\treturn [];\n\t} else {\n\t\treturn [results[index - 1]];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/before.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/commands.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/commands.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the commands available in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.commands = function(source,operator,options) {\n\tvar results = [];\n\t$tw.utils.each($tw.commands,function(commandInfo,name) {\n\t\tresults.push(name);\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/commands.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/days.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/days.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects tiddlers with a specified date field within a specified date interval.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.days = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldName = operator.suffix || \"modified\",\n\t\tdayInterval = (parseInt(operator.operand,10)||0),\n\t\tdayIntervalSign = $tw.utils.sign(dayInterval),\n\t\ttargetTimeStamp = (new Date()).setHours(0,0,0,0) + 1000*60*60*24*dayInterval,\n\t\tisWithinDays = function(dateField) {\n\t\t\tvar sign = $tw.utils.sign(targetTimeStamp - (new Date(dateField)).setHours(0,0,0,0));\n\t\t\treturn sign === 0 || sign === dayIntervalSign;\n\t\t};\n\n\tif(operator.prefix === \"!\") {\n\t\ttargetTimeStamp = targetTimeStamp - 1000*60*60*24*dayIntervalSign;\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\t\tif(!isWithinDays($tw.utils.parseDate(tiddler.fields[fieldName]))) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\t\tif(isWithinDays($tw.utils.parseDate(tiddler.fields[fieldName]))) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/days.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/each.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/each.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects one tiddler for each unique value of the specified field.\nWith suffix \"list\", selects all tiddlers that are values in a specified list field.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.each = function(source,operator,options) {\n\tvar results =[] ,\n\t\tvalue,values = {},\n\t\tfield = operator.operand || \"title\";\n\tif(operator.suffix !== \"list-item\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler) {\n\t\t\t\tvalue = (field === \"title\") ? title : tiddler.getFieldString(field);\n\t\t\t\tif(!$tw.utils.hop(values,value)) {\n\t\t\t\t\tvalues[value] = true;\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler) {\n\t\t\t\t$tw.utils.each(\n\t\t\t\t\toptions.wiki.getTiddlerList(title,field),\n\t\t\t\t\tfunction(value) {\n\t\t\t\t\t\tif(!$tw.utils.hop(values,value)) {\n\t\t\t\t\t\t\tvalues[value] = true;\n\t\t\t\t\t\t\tresults.push(value);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/each.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/eachday.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/eachday.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects one tiddler for each unique day covered by the specified date field\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.eachday = function(source,operator,options) {\n\tvar results = [],\n\t\tvalues = [],\n\t\tfieldName = operator.operand || \"modified\";\n\t// Function to convert a date/time to a date integer\n\tvar toDate = function(value) {\n\t\tvalue = (new Date(value)).setHours(0,0,0,0);\n\t\treturn value+0;\n\t};\n\tsource(function(tiddler,title) {\n\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\tvar value = toDate($tw.utils.parseDate(tiddler.fields[fieldName]));\n\t\t\tif(values.indexOf(value) === -1) {\n\t\t\t\tvalues.push(value);\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/eachday.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/editiondescription.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/editiondescription.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the descriptions of the specified edition names\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.editiondescription = function(source,operator,options) {\n\tvar results = [],\n\t\teditionInfo = $tw.utils.getEditionInfo();\n\tif(editionInfo) {\n\t\tsource(function(tiddler,title) {\n\t\t\tif($tw.utils.hop(editionInfo,title)) {\n\t\t\t\tresults.push(editionInfo[title].description || \"\");\t\t\t\t\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/editiondescription.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/editions.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/editions.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the available editions in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.editions = function(source,operator,options) {\n\tvar results = [],\n\t\teditionInfo = $tw.utils.getEditionInfo();\n\tif(editionInfo) {\n\t\t$tw.utils.each(editionInfo,function(info,name) {\n\t\t\tresults.push(name);\n\t\t});\n\t}\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/editions.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/field.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/field.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for comparing fields for equality\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.field = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldname = (operator.suffix || operator.operator || \"title\").toLowerCase();\n\tif(operator.prefix === \"!\") {\n\t\tif(operator.regexp) {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && !operator.regexp.exec(text)) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t});\n\t\t} else {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && text !== operator.operand) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t} else {\n\t\tif(operator.regexp) {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && !!operator.regexp.exec(text)) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t} else {\n\t\t\tsource(function(tiddler,title) {\n\t\t\t\tif(tiddler) {\n\t\t\t\t\tvar text = tiddler.getFieldString(fieldname);\n\t\t\t\t\tif(text !== null && text === operator.operand) {\n\t\t\t\t\t\tresults.push(title);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/field.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/fields.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/fields.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the fields on the selected tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.fields = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(tiddler) {\n\t\t\tfor(var fieldName in tiddler.fields) {\n\t\t\t\t$tw.utils.pushTop(results,fieldName);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/fields.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/get.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/get.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for replacing tiddler titles by the value of the field specified in the operand.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.get = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(tiddler) {\n\t\t\tvar value = tiddler.getFieldString(operator.operand);\n\t\t\tif(value) {\n\t\t\t\tresults.push(value);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/get.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/getindex.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/getindex.js\ntype: application/javascript\nmodule-type: filteroperator\n\nreturns the value at a given index of datatiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.getindex = function(source,operator,options) {\n\tvar data,title,results = [];\n\tif(operator.operand){\n\t\tsource(function(tiddler,title) {\n\t\t\ttitle = tiddler ? tiddler.fields.title : title;\n\t\t\tdata = options.wiki.extractTiddlerDataItem(tiddler,operator.operand);\n\t\t\tif(data) {\n\t\t\t\tresults.push(data);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/getindex.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/has.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/has.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking if a tiddler has the specified field\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.has = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!tiddler || (tiddler && (!$tw.utils.hop(tiddler.fields,operator.operand) || tiddler.fields[operator.operand] === \"\"))) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && $tw.utils.hop(tiddler.fields,operator.operand) && !(tiddler.fields[operator.operand] === \"\" || tiddler.fields[operator.operand].length === 0)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/has.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/haschanged.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/haschanged.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returns tiddlers from the list that have a non-zero changecount.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.haschanged = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.getChangeCount(title) === 0) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.getChangeCount(title) > 0) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/haschanged.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/indexes.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/indexes.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the indexes of a data tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.indexes = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar data = options.wiki.getTiddlerDataCached(title);\n\t\tif(data) {\n\t\t\t$tw.utils.pushTop(results,Object.keys(data));\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/indexes.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/is/current.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/current.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[current]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.current = function(source,prefix,options) {\n\tvar results = [],\n\t\tcurrTiddlerTitle = options.widget && options.widget.getVariable(\"currentTiddler\");\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title !== currTiddlerTitle) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title === currTiddlerTitle) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/current.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/image.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/image.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[image]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.image = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.isImageTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.isImageTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/image.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/missing.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/missing.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[missing]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.missing = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/missing.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/orphan.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/orphan.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[orphan]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.orphan = function(source,prefix,options) {\n\tvar results = [],\n\t\torphanTitles = options.wiki.getOrphanTitles();\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(orphanTitles.indexOf(title) === -1) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(orphanTitles.indexOf(title) !== -1) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/orphan.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/shadow.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/shadow.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[shadow]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.shadow = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.isShadowTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.isShadowTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/shadow.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/system.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/system.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[system]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.system = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.isSystemTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.isSystemTiddler(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/system.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/tag.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/tag.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[tag]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tag = function(source,prefix,options) {\n\tvar results = [],\n\t\ttagMap = options.wiki.getTagMap();\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!$tw.utils.hop(tagMap,title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif($tw.utils.hop(tagMap,title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/tag.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is/tiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is/tiddler.js\ntype: application/javascript\nmodule-type: isfilteroperator\n\nFilter function for [is[tiddler]]\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tiddler = function(source,prefix,options) {\n\tvar results = [];\n\tif(prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(options.wiki.tiddlerExists(title)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/is/tiddler.js",
"type": "application/javascript",
"module-type": "isfilteroperator"
},
"$:/core/modules/filters/is.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/is.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking tiddler properties\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar isFilterOperators;\n\nfunction getIsFilterOperators() {\n\tif(!isFilterOperators) {\n\t\tisFilterOperators = {};\n\t\t$tw.modules.applyMethods(\"isfilteroperator\",isFilterOperators);\n\t}\n\treturn isFilterOperators;\n}\n\n/*\nExport our filter function\n*/\nexports.is = function(source,operator,options) {\n\t// Dispatch to the correct isfilteroperator\n\tvar isFilterOperators = getIsFilterOperators();\n\tvar isFilterOperator = isFilterOperators[operator.operand];\n\tif(isFilterOperator) {\n\t\treturn isFilterOperator(source,operator.prefix,options);\n\t} else {\n\t\treturn [$tw.language.getString(\"Error/IsFilterOperator\")];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/is.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/limit.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/limit.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for chopping the results to a specified maximum number of entries\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.limit = function(source,operator,options) {\n\tvar results = [];\n\t// Convert to an array\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\t// Slice the array if necessary\n\tvar limit = Math.min(results.length,parseInt(operator.operand,10));\n\tif(operator.prefix === \"!\") {\n\t\tresults = results.slice(-limit);\n\t} else {\n\t\tresults = results.slice(0,limit);\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/limit.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/links.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/links.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning all the links from a tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.links = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.getTiddlerLinks(title));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/links.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/list.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/list.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddlers whose title is listed in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.list = function(source,operator,options) {\n\tvar results = [],\n\t\ttr = $tw.utils.parseTextReference(operator.operand),\n\t\tcurrTiddlerTitle = options.widget && options.widget.getVariable(\"currentTiddler\"),\n\t\tlist = options.wiki.getTiddlerList(tr.title || currTiddlerTitle,tr.field,tr.index);\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(list.indexOf(title) === -1) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tresults = list;\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/list.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/listed.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/listed.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all tiddlers that have the selected tiddlers in a list\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.listed = function(source,operator,options) {\n\tvar field = operator.operand || \"list\",\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.findListingsOfTiddler(title,field));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/listed.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/listops.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/listops.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operators for manipulating the current selection list\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nReverse list\n*/\nexports.reverse = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.unshift(title);\n\t});\n\treturn results;\n};\n\n/*\nFirst entry/entries in list\n*/\nexports.first = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(0,count);\n};\n\n/*\nLast entry/entries in list\n*/\nexports.last = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(-count);\n};\n\n/*\nAll but the first entry/entries of the list\n*/\nexports.rest = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(count);\n};\nexports.butfirst = exports.rest;\nexports.bf = exports.rest;\n\n/*\nAll but the last entry/entries of the list\n*/\nexports.butlast = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(0,-count);\n};\nexports.bl = exports.butlast;\n\n/*\nThe nth member of the list\n*/\nexports.nth = function(source,operator,options) {\n\tvar count = parseInt(operator.operand) || 1,\n\t\tresults = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results.slice(count - 1,count);\n};\n\n})();\n",
"title": "$:/core/modules/filters/listops.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/modules.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/modules.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the titles of the modules of a given type in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.modules = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.each($tw.modules.types[title],function(moduleInfo,moduleName) {\n\t\t\tresults.push(moduleName);\n\t\t});\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/modules.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/moduletypes.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/moduletypes.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the module types in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.moduletypes = function(source,operator,options) {\n\tvar results = [];\n\t$tw.utils.each($tw.modules.types,function(moduleInfo,type) {\n\t\tresults.push(type);\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/moduletypes.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/next.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/next.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler whose title occurs next in the list supplied in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.next = function(source,operator,options) {\n\tvar results = [],\n\t\tlist = options.wiki.getTiddlerList(operator.operand);\n\tsource(function(tiddler,title) {\n\t\tvar match = list.indexOf(title);\n\t\t// increment match and then test if result is in range\n\t\tmatch++;\n\t\tif(match > 0 && match < list.length) {\n\t\t\tresults.push(list[match]);\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/next.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/plugintiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/plugintiddlers.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the titles of the shadow tiddlers within a plugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.plugintiddlers = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar pluginInfo = options.wiki.getPluginInfo(title) || options.wiki.getTiddlerDataCached(title,{tiddlers:[]});\n\t\tif(pluginInfo && pluginInfo.tiddlers) {\n\t\t\t$tw.utils.each(pluginInfo.tiddlers,function(fields,title) {\n\t\t\t\tresults.push(title);\n\t\t\t});\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/plugintiddlers.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/prefix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/prefix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking if a title starts with a prefix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.prefix = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(0,operator.operand.length) !== operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(0,operator.operand.length) === operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/prefix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/previous.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/previous.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning the tiddler whose title occurs immediately prior in the list supplied in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.previous = function(source,operator,options) {\n\tvar results = [],\n\t\tlist = options.wiki.getTiddlerList(operator.operand);\n\tsource(function(tiddler,title) {\n\t\tvar match = list.indexOf(title);\n\t\t// increment match and then test if result is in range\n\t\tmatch--;\n\t\tif(match >= 0) {\n\t\t\tresults.push(list[match]);\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/previous.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/regexp.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/regexp.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for regexp matching\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.regexp = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldname = (operator.suffix || \"title\").toLowerCase(),\n\t\tregexpString, regexp, flags = \"\", match,\n\t\tgetFieldString = function(tiddler,title) {\n\t\t\tif(tiddler) {\n\t\t\t\treturn tiddler.getFieldString(fieldname);\n\t\t\t} else if(fieldname === \"title\") {\n\t\t\t\treturn title;\n\t\t\t} else {\n\t\t\t\treturn null;\n\t\t\t}\n\t\t};\n\t// Process flags and construct regexp\n\tregexpString = operator.operand;\n\tmatch = /^\\(\\?([gim]+)\\)/.exec(regexpString);\n\tif(match) {\n\t\tflags = match[1];\n\t\tregexpString = regexpString.substr(match[0].length);\n\t} else {\n\t\tmatch = /\\(\\?([gim]+)\\)$/.exec(regexpString);\n\t\tif(match) {\n\t\t\tflags = match[1];\n\t\t\tregexpString = regexpString.substr(0,regexpString.length - match[0].length);\n\t\t}\n\t}\n\ttry {\n\t\tregexp = new RegExp(regexpString,flags);\n\t} catch(e) {\n\t\treturn [\"\" + e];\n\t}\n\t// Process the incoming tiddlers\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tvar text = getFieldString(tiddler,title);\n\t\t\tif(text !== null) {\n\t\t\t\tif(!regexp.exec(text)) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tvar text = getFieldString(tiddler,title);\n\t\t\tif(text !== null) {\n\t\t\t\tif(!!regexp.exec(text)) {\n\t\t\t\t\tresults.push(title);\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/regexp.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/removeprefix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/removeprefix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for removing a prefix from each title in the list. Titles that do not start with the prefix are removed.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.removeprefix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(title.substr(0,operator.operand.length) === operator.operand) {\n\t\t\tresults.push(title.substr(operator.operand.length));\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/removeprefix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/removesuffix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/removesuffix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for removing a suffix from each title in the list. Titles that do not end with the suffix are removed.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.removesuffix = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tif(title.substr(-operator.operand.length) === operator.operand) {\n\t\t\tresults.push(title.substr(0,title.length - operator.operand.length));\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/removesuffix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/sameday.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/sameday.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that selects tiddlers with a modified date field on the same day as the provided value.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.sameday = function(source,operator,options) {\n\tvar results = [],\n\t\tfieldName = operator.suffix || \"modified\",\n\t\ttargetDate = (new Date($tw.utils.parseDate(operator.operand))).setHours(0,0,0,0);\n\t// Function to convert a date/time to a date integer\n\tvar isSameDay = function(dateField) {\n\t\t\treturn (new Date(dateField)).setHours(0,0,0,0) === targetDate;\n\t\t};\n\tsource(function(tiddler,title) {\n\t\tif(tiddler && tiddler.fields[fieldName]) {\n\t\t\tif(isSameDay($tw.utils.parseDate(tiddler.fields[fieldName]))) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/sameday.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/search.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/search.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for searching for the text in the operand tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.search = function(source,operator,options) {\n\tvar invert = operator.prefix === \"!\";\n\tif(operator.suffix) {\n\t\treturn options.wiki.search(operator.operand,{\n\t\t\tsource: source,\n\t\t\tinvert: invert,\n\t\t\tfield: operator.suffix\n\t\t});\n\t} else {\n\t\treturn options.wiki.search(operator.operand,{\n\t\t\tsource: source,\n\t\t\tinvert: invert\n\t\t});\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/filters/search.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/shadowsource.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/shadowsource.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the source plugins for shadow tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.shadowsource = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar source = options.wiki.getShadowSource(title);\n\t\tif(source) {\n\t\t\t$tw.utils.pushTop(results,source);\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/shadowsource.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/sort.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/sort.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for sorting\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.sort = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",false,false);\n\treturn results;\n};\n\nexports.nsort = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",false,true);\n\treturn results;\n};\n\nexports.sortcs = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",true,false);\n\treturn results;\n};\n\nexports.nsortcs = function(source,operator,options) {\n\tvar results = prepare_results(source);\n\toptions.wiki.sortTiddlers(results,operator.operand || \"title\",operator.prefix === \"!\",true,true);\n\treturn results;\n};\n\nvar prepare_results = function (source) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tresults.push(title);\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/sort.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/splitbefore.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/splitbefore.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator that splits each result on the first occurance of the specified separator and returns the unique values.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.splitbefore = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\tvar parts = title.split(operator.operand);\n\t\tif(parts.length === 1) {\n\t\t\t$tw.utils.pushTop(results,parts[0]);\n\t\t} else {\n\t\t\t$tw.utils.pushTop(results,parts[0] + operator.operand);\n\t\t}\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/splitbefore.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/storyviews.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/storyviews.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the story views in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.storyviews = function(source,operator,options) {\n\tvar results = [],\n\t\tstoryviews = {};\n\t$tw.modules.applyMethods(\"storyview\",storyviews);\n\t$tw.utils.each(storyviews,function(info,name) {\n\t\tresults.push(name);\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/storyviews.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/suffix.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/suffix.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking if a title ends with a suffix\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.suffix = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(-operator.operand.length) !== operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(title.substr(-operator.operand.length) === operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/suffix.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/tag.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/tag.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for checking for the presence of a tag\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tag = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && !tiddler.hasTag(operator.operand)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.hasTag(operator.operand)) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t\tresults = options.wiki.sortByList(results,operator.operand);\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/tag.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/tagging.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/tagging.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all tiddlers that are tagged with the selected tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tagging = function(source,operator,options) {\n\tvar results = [];\n\tsource(function(tiddler,title) {\n\t\t$tw.utils.pushTop(results,options.wiki.getTiddlersWithTag(title));\n\t});\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/tagging.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/tags.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/tags.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all the tags of the selected tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.tags = function(source,operator,options) {\n\tvar tags = {};\n\tsource(function(tiddler,title) {\n\t\tvar t, length;\n\t\tif(tiddler && tiddler.fields.tags) {\n\t\t\tfor(t=0, length=tiddler.fields.tags.length; t<length; t++) {\n\t\t\t\ttags[tiddler.fields.tags[t]] = true;\n\t\t\t}\n\t\t}\n\t});\n\treturn Object.keys(tags);\n};\n\n})();\n",
"title": "$:/core/modules/filters/tags.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/title.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/title.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for comparing title fields for equality\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.title = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && tiddler.fields.title !== operator.operand) {\n\t\t\t\tresults.push(title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tresults.push(operator.operand);\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/title.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/untagged.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/untagged.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator returning all the selected tiddlers that are untagged\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.untagged = function(source,operator,options) {\n\tvar results = [];\n\tif(operator.prefix === \"!\") {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(tiddler && $tw.utils.isArray(tiddler.fields.tags) && tiddler.fields.tags.length > 0) {\n\t\t\t\t$tw.utils.pushTop(results,title);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tsource(function(tiddler,title) {\n\t\t\tif(!tiddler || !tiddler.hasField(\"tags\") || ($tw.utils.isArray(tiddler.fields.tags) && tiddler.fields.tags.length === 0)) {\n\t\t\t\t$tw.utils.pushTop(results,title);\n\t\t\t}\n\t\t});\n\t}\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/untagged.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/wikiparserrules.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/wikiparserrules.js\ntype: application/javascript\nmodule-type: filteroperator\n\nFilter operator for returning the names of the wiki parser rules in this wiki\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nExport our filter function\n*/\nexports.wikiparserrules = function(source,operator,options) {\n\tvar results = [];\n\t$tw.utils.each($tw.modules.types.wikirule,function(mod) {\n\t\tvar exp = mod.exports;\n\t\tif(exp.types[operator.operand]) {\n\t\t\tresults.push(exp.name);\n\t\t}\n\t});\n\tresults.sort();\n\treturn results;\n};\n\n})();\n",
"title": "$:/core/modules/filters/wikiparserrules.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters/x-listops.js": {
"text": "/*\\\ntitle: $:/core/modules/filters/x-listops.js\ntype: application/javascript\nmodule-type: filteroperator\n\nExtended filter operators to manipulate the current list.\n\n\\*/\n(function () {\n\n /*jslint node: true, browser: true */\n /*global $tw: false */\n \"use strict\";\n\n /*\n Fetch titles from the current list\n */\n var prepare_results = function (source) {\n var results = [];\n source(function (tiddler, title) {\n results.push(title);\n });\n return results;\n };\n\n /*\n Moves a number of items from the tail of the current list before the item named in the operand\n */\n exports.putbefore = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1;\n return (index === -1) ?\n results.slice(0, -1) :\n results.slice(0, index).concat(results.slice(-count)).concat(results.slice(index, -count));\n };\n\n /*\n Moves a number of items from the tail of the current list after the item named in the operand\n */\n exports.putafter = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1;\n return (index === -1) ?\n results.slice(0, -1) :\n results.slice(0, index + 1).concat(results.slice(-count)).concat(results.slice(index + 1, -count));\n };\n\n /*\n Replaces the item named in the operand with a number of items from the tail of the current list\n */\n exports.replace = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1;\n return (index === -1) ?\n results.slice(0, -count) :\n results.slice(0, index).concat(results.slice(-count)).concat(results.slice(index + 1, -count));\n };\n\n /*\n Moves a number of items from the tail of the current list to the head of the list\n */\n exports.putfirst = function (source, operator) {\n var results = prepare_results(source),\n count = parseInt(operator.suffix) || 1;\n return results.slice(-count).concat(results.slice(0, -count));\n };\n\n /*\n Moves a number of items from the head of the current list to the tail of the list\n */\n exports.putlast = function (source, operator) {\n var results = prepare_results(source),\n count = parseInt(operator.suffix) || 1;\n return results.slice(count).concat(results.slice(0, count));\n };\n\n /*\n Moves the item named in the operand a number of places forward or backward in the list\n */\n exports.move = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand),\n count = parseInt(operator.suffix) || 1,\n marker = results.splice(index, 1);\n return results.slice(0, index + count).concat(marker).concat(results.slice(index + count));\n };\n\n /*\n Returns the items from the current list that are after the item named in the operand\n */\n exports.allafter = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand);\n return (index === -1 || index > (results.length - 2)) ? [] :\n (operator.suffix) ? results.slice(index) :\n results.slice(index + 1);\n };\n\n /*\n Returns the items from the current list that are before the item named in the operand\n */\n exports.allbefore = function (source, operator) {\n var results = prepare_results(source),\n index = results.indexOf(operator.operand);\n return (index <= 0) ? [] :\n (operator.suffix) ? results.slice(0, index + 1) :\n results.slice(0, index);\n };\n\n /*\n Appends the items listed in the operand array to the tail of the current list\n */\n exports.append = function (source, operator) {\n var append = $tw.utils.parseStringArray(operator.operand, \"true\"),\n results = prepare_results(source),\n count = parseInt(operator.suffix) || append.length;\n return (append.length === 0) ? results :\n (operator.prefix) ? results.concat(append.slice(-count)) :\n results.concat(append.slice(0, count));\n };\n\n /*\n Prepends the items listed in the operand array to the head of the current list\n */\n exports.prepend = function (source, operator) {\n var prepend = $tw.utils.parseStringArray(operator.operand, \"true\"),\n results = prepare_results(source),\n count = parseInt(operator.suffix) || prepend.length;\n return (prepend.length === 0) ? results :\n (operator.prefix) ? prepend.slice(-count).concat(results) :\n prepend.slice(0, count).concat(results);\n };\n\n /*\n Returns all items from the current list except the items listed in the operand array\n */\n exports.remove = function (source, operator) {\n var array = $tw.utils.parseStringArray(operator.operand, \"true\"),\n results = prepare_results(source),\n count = parseInt(operator.suffix) || array.length,\n p,\n len,\n index;\n len = array.length - 1;\n for (p = 0; p < count; ++p) {\n if (operator.prefix) {\n index = results.indexOf(array[len - p]);\n } else {\n index = results.indexOf(array[p]);\n }\n if (index !== -1) {\n results.splice(index, 1);\n }\n }\n return results;\n };\n\n /*\n Returns all items from the current list sorted in the order of the items in the operand array\n */\n exports.sortby = function (source, operator) {\n var results = prepare_results(source);\n if (!results || results.length < 2) {\n return results;\n }\n var lookup = $tw.utils.parseStringArray(operator.operand, \"true\");\n results.sort(function (a, b) {\n return lookup.indexOf(a) - lookup.indexOf(b);\n });\n return results;\n };\n\n /*\n Removes all duplicate items from the current list\n */\n exports.unique = function (source, operator) {\n var results = prepare_results(source);\n var set = results.reduce(function (a, b) {\n if (a.indexOf(b) < 0) {\n a.push(b);\n }\n return a;\n }, []);\n return set;\n };\n})();\n",
"title": "$:/core/modules/filters/x-listops.js",
"type": "application/javascript",
"module-type": "filteroperator"
},
"$:/core/modules/filters.js": {
"text": "/*\\\ntitle: $:/core/modules/filters.js\ntype: application/javascript\nmodule-type: wikimethod\n\nAdds tiddler filtering methods to the $tw.Wiki object.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nParses an operation (i.e. a run) within a filter string\n\toperators: Array of array of operator nodes into which results should be inserted\n\tfilterString: filter string\n\tp: start position within the string\nReturns the new start position, after the parsed operation\n*/\nfunction parseFilterOperation(operators,filterString,p) {\n\tvar operator, operand, bracketPos, curlyBracketPos;\n\t// Skip the starting square bracket\n\tif(filterString.charAt(p++) !== \"[\") {\n\t\tthrow \"Missing [ in filter expression\";\n\t}\n\t// Process each operator in turn\n\tdo {\n\t\toperator = {};\n\t\t// Check for an operator prefix\n\t\tif(filterString.charAt(p) === \"!\") {\n\t\t\toperator.prefix = filterString.charAt(p++);\n\t\t}\n\t\t// Get the operator name\n\t\tvar nextBracketPos = filterString.substring(p).search(/[\\[\\{<\\/]/);\n\t\tif(nextBracketPos === -1) {\n\t\t\tthrow \"Missing [ in filter expression\";\n\t\t}\n\t\tnextBracketPos += p;\n\t\tvar bracket = filterString.charAt(nextBracketPos);\n\t\toperator.operator = filterString.substring(p,nextBracketPos);\n\t\t\n\t\t// Any suffix?\n\t\tvar colon = operator.operator.indexOf(':');\n\t\tif(colon > -1) {\n\t\t\toperator.suffix = operator.operator.substring(colon + 1);\n\t\t\toperator.operator = operator.operator.substring(0,colon) || \"field\";\n\t\t}\n\t\t// Empty operator means: title\n\t\telse if(operator.operator === \"\") {\n\t\t\toperator.operator = \"title\";\n\t\t}\n\n\t\tp = nextBracketPos + 1;\n\t\tswitch (bracket) {\n\t\t\tcase \"{\": // Curly brackets\n\t\t\t\toperator.indirect = true;\n\t\t\t\tnextBracketPos = filterString.indexOf(\"}\",p);\n\t\t\t\tbreak;\n\t\t\tcase \"[\": // Square brackets\n\t\t\t\tnextBracketPos = filterString.indexOf(\"]\",p);\n\t\t\t\tbreak;\n\t\t\tcase \"<\": // Angle brackets\n\t\t\t\toperator.variable = true;\n\t\t\t\tnextBracketPos = filterString.indexOf(\">\",p);\n\t\t\t\tbreak;\n\t\t\tcase \"/\": // regexp brackets\n\t\t\t\tvar rex = /^((?:[^\\\\\\/]*|\\\\.)*)\\/(?:\\(([mygi]+)\\))?/g,\n\t\t\t\t\trexMatch = rex.exec(filterString.substring(p));\n\t\t\t\tif(rexMatch) {\n\t\t\t\t\toperator.regexp = new RegExp(rexMatch[1], rexMatch[2]);\n// DEPRECATION WARNING\nconsole.log(\"WARNING: Filter\",operator.operator,\"has a deprecated regexp operand\",operator.regexp);\n\t\t\t\t\tnextBracketPos = p + rex.lastIndex - 1;\n\t\t\t\t}\n\t\t\t\telse {\n\t\t\t\t\tthrow \"Unterminated regular expression in filter expression\";\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t}\n\t\t\n\t\tif(nextBracketPos === -1) {\n\t\t\tthrow \"Missing closing bracket in filter expression\";\n\t\t}\n\t\tif(!operator.regexp) {\n\t\t\toperator.operand = filterString.substring(p,nextBracketPos);\n\t\t}\n\t\tp = nextBracketPos + 1;\n\t\t\t\n\t\t// Push this operator\n\t\toperators.push(operator);\n\t} while(filterString.charAt(p) !== \"]\");\n\t// Skip the ending square bracket\n\tif(filterString.charAt(p++) !== \"]\") {\n\t\tthrow \"Missing ] in filter expression\";\n\t}\n\t// Return the parsing position\n\treturn p;\n}\n\n/*\nParse a filter string\n*/\nexports.parseFilter = function(filterString) {\n\tfilterString = filterString || \"\";\n\tvar results = [], // Array of arrays of operator nodes {operator:,operand:}\n\t\tp = 0, // Current position in the filter string\n\t\tmatch;\n\tvar whitespaceRegExp = /(\\s+)/mg,\n\t\toperandRegExp = /((?:\\+|\\-)?)(?:(\\[)|(?:\"([^\"]*)\")|(?:'([^']*)')|([^\\s\\[\\]]+))/mg;\n\twhile(p < filterString.length) {\n\t\t// Skip any whitespace\n\t\twhitespaceRegExp.lastIndex = p;\n\t\tmatch = whitespaceRegExp.exec(filterString);\n\t\tif(match && match.index === p) {\n\t\t\tp = p + match[0].length;\n\t\t}\n\t\t// Match the start of the operation\n\t\tif(p < filterString.length) {\n\t\t\toperandRegExp.lastIndex = p;\n\t\t\tmatch = operandRegExp.exec(filterString);\n\t\t\tif(!match || match.index !== p) {\n\t\t\t\tthrow $tw.language.getString(\"Error/FilterSyntax\");\n\t\t\t}\n\t\t\tvar operation = {\n\t\t\t\tprefix: \"\",\n\t\t\t\toperators: []\n\t\t\t};\n\t\t\tif(match[1]) {\n\t\t\t\toperation.prefix = match[1];\n\t\t\t\tp++;\n\t\t\t}\n\t\t\tif(match[2]) { // Opening square bracket\n\t\t\t\tp = parseFilterOperation(operation.operators,filterString,p);\n\t\t\t} else {\n\t\t\t\tp = match.index + match[0].length;\n\t\t\t}\n\t\t\tif(match[3] || match[4] || match[5]) { // Double quoted string, single quoted string or unquoted title\n\t\t\t\toperation.operators.push(\n\t\t\t\t\t{operator: \"title\", operand: match[3] || match[4] || match[5]}\n\t\t\t\t);\n\t\t\t}\n\t\t\tresults.push(operation);\n\t\t}\n\t}\n\treturn results;\n};\n\nexports.getFilterOperators = function() {\n\tif(!this.filterOperators) {\n\t\t$tw.Wiki.prototype.filterOperators = {};\n\t\t$tw.modules.applyMethods(\"filteroperator\",this.filterOperators);\n\t}\n\treturn this.filterOperators;\n};\n\nexports.filterTiddlers = function(filterString,widget,source) {\n\tvar fn = this.compileFilter(filterString);\n\treturn fn.call(this,source,widget);\n};\n\n/*\nCompile a filter into a function with the signature fn(source,widget) where:\nsource: an iterator function for the source tiddlers, called source(iterator), where iterator is called as iterator(tiddler,title)\nwidget: an optional widget node for retrieving the current tiddler etc.\n*/\nexports.compileFilter = function(filterString) {\n\tvar filterParseTree;\n\ttry {\n\t\tfilterParseTree = this.parseFilter(filterString);\n\t} catch(e) {\n\t\treturn function(source,widget) {\n\t\t\treturn [$tw.language.getString(\"Error/Filter\") + \": \" + e];\n\t\t};\n\t}\n\t// Get the hashmap of filter operator functions\n\tvar filterOperators = this.getFilterOperators();\n\t// Assemble array of functions, one for each operation\n\tvar operationFunctions = [];\n\t// Step through the operations\n\tvar self = this;\n\t$tw.utils.each(filterParseTree,function(operation) {\n\t\t// Create a function for the chain of operators in the operation\n\t\tvar operationSubFunction = function(source,widget) {\n\t\t\tvar accumulator = source,\n\t\t\t\tresults = [],\n\t\t\t\tcurrTiddlerTitle = widget && widget.getVariable(\"currentTiddler\");\n\t\t\t$tw.utils.each(operation.operators,function(operator) {\n\t\t\t\tvar operand = operator.operand,\n\t\t\t\t\toperatorFunction;\n\t\t\t\tif(!operator.operator) {\n\t\t\t\t\toperatorFunction = filterOperators.title;\n\t\t\t\t} else if(!filterOperators[operator.operator]) {\n\t\t\t\t\toperatorFunction = filterOperators.field;\n\t\t\t\t} else {\n\t\t\t\t\toperatorFunction = filterOperators[operator.operator];\n\t\t\t\t}\n\t\t\t\tif(operator.indirect) {\n\t\t\t\t\toperand = self.getTextReference(operator.operand,\"\",currTiddlerTitle);\n\t\t\t\t}\n\t\t\t\tif(operator.variable) {\n\t\t\t\t\toperand = widget.getVariable(operator.operand,{defaultValue: \"\"});\n\t\t\t\t}\n\t\t\t\t// Invoke the appropriate filteroperator module\n\t\t\t\tresults = operatorFunction(accumulator,{\n\t\t\t\t\t\t\toperator: operator.operator,\n\t\t\t\t\t\t\toperand: operand,\n\t\t\t\t\t\t\tprefix: operator.prefix,\n\t\t\t\t\t\t\tsuffix: operator.suffix,\n\t\t\t\t\t\t\tregexp: operator.regexp\n\t\t\t\t\t\t},{\n\t\t\t\t\t\t\twiki: self,\n\t\t\t\t\t\t\twidget: widget\n\t\t\t\t\t\t});\n\t\t\t\tif($tw.utils.isArray(results)) {\n\t\t\t\t\taccumulator = self.makeTiddlerIterator(results);\n\t\t\t\t} else {\n\t\t\t\t\taccumulator = results;\n\t\t\t\t}\n\t\t\t});\n\t\t\tif($tw.utils.isArray(results)) {\n\t\t\t\treturn results;\n\t\t\t} else {\n\t\t\t\tvar resultArray = [];\n\t\t\t\tresults(function(tiddler,title) {\n\t\t\t\t\tresultArray.push(title);\n\t\t\t\t});\n\t\t\t\treturn resultArray;\n\t\t\t}\n\t\t};\n\t\t// Wrap the operator functions in a wrapper function that depends on the prefix\n\t\toperationFunctions.push((function() {\n\t\t\tswitch(operation.prefix || \"\") {\n\t\t\t\tcase \"\": // No prefix means that the operation is unioned into the result\n\t\t\t\t\treturn function(results,source,widget) {\n\t\t\t\t\t\t$tw.utils.pushTop(results,operationSubFunction(source,widget));\n\t\t\t\t\t};\n\t\t\t\tcase \"-\": // The results of this operation are removed from the main result\n\t\t\t\t\treturn function(results,source,widget) {\n\t\t\t\t\t\t$tw.utils.removeArrayEntries(results,operationSubFunction(source,widget));\n\t\t\t\t\t};\n\t\t\t\tcase \"+\": // This operation is applied to the main results so far\n\t\t\t\t\treturn function(results,source,widget) {\n\t\t\t\t\t\t// This replaces all the elements of the array, but keeps the actual array so that references to it are preserved\n\t\t\t\t\t\tsource = self.makeTiddlerIterator(results);\n\t\t\t\t\t\tresults.splice(0,results.length);\n\t\t\t\t\t\t$tw.utils.pushTop(results,operationSubFunction(source,widget));\n\t\t\t\t\t};\n\t\t\t}\n\t\t})());\n\t});\n\t// Return a function that applies the operations to a source iterator of tiddler titles\n\treturn $tw.perf.measure(\"filter\",function filterFunction(source,widget) {\n\t\tif(!source) {\n\t\t\tsource = self.each;\n\t\t} else if(typeof source === \"object\") { // Array or hashmap\n\t\t\tsource = self.makeTiddlerIterator(source);\n\t\t}\n\t\tvar results = [];\n\t\t$tw.utils.each(operationFunctions,function(operationFunction) {\n\t\t\toperationFunction(results,source,widget);\n\t\t});\n\t\treturn results;\n\t});\n};\n\n})();\n",
"title": "$:/core/modules/filters.js",
"type": "application/javascript",
"module-type": "wikimethod"
},
"$:/core/modules/info/platform.js": {
"text": "/*\\\ntitle: $:/core/modules/info/platform.js\ntype: application/javascript\nmodule-type: info\n\nInitialise basic platform $:/info/ tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.getInfoTiddlerFields = function() {\n\tvar mapBoolean = function(value) {return value ? \"yes\" : \"no\";},\n\t\tinfoTiddlerFields = [];\n\t// Basics\n\tinfoTiddlerFields.push({title: \"$:/info/browser\", text: mapBoolean(!!$tw.browser)});\n\tinfoTiddlerFields.push({title: \"$:/info/node\", text: mapBoolean(!!$tw.node)});\n\treturn infoTiddlerFields;\n};\n\n})();\n",
"title": "$:/core/modules/info/platform.js",
"type": "application/javascript",
"module-type": "info"
},
"$:/core/modules/keyboard.js": {
"text": "/*\\\ntitle: $:/core/modules/keyboard.js\ntype: application/javascript\nmodule-type: global\n\nKeyboard handling utilities\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar namedKeys = {\n\t\"cancel\": 3,\n\t\"help\": 6,\n\t\"backspace\": 8,\n\t\"tab\": 9,\n\t\"clear\": 12,\n\t\"return\": 13,\n\t\"enter\": 13,\n\t\"pause\": 19,\n\t\"escape\": 27,\n\t\"space\": 32,\n\t\"page_up\": 33,\n\t\"page_down\": 34,\n\t\"end\": 35,\n\t\"home\": 36,\n\t\"left\": 37,\n\t\"up\": 38,\n\t\"right\": 39,\n\t\"down\": 40,\n\t\"printscreen\": 44,\n\t\"insert\": 45,\n\t\"delete\": 46,\n\t\"0\": 48,\n\t\"1\": 49,\n\t\"2\": 50,\n\t\"3\": 51,\n\t\"4\": 52,\n\t\"5\": 53,\n\t\"6\": 54,\n\t\"7\": 55,\n\t\"8\": 56,\n\t\"9\": 57,\n\t\"firefoxsemicolon\": 59,\n\t\"firefoxequals\": 61,\n\t\"a\": 65,\n\t\"b\": 66,\n\t\"c\": 67,\n\t\"d\": 68,\n\t\"e\": 69,\n\t\"f\": 70,\n\t\"g\": 71,\n\t\"h\": 72,\n\t\"i\": 73,\n\t\"j\": 74,\n\t\"k\": 75,\n\t\"l\": 76,\n\t\"m\": 77,\n\t\"n\": 78,\n\t\"o\": 79,\n\t\"p\": 80,\n\t\"q\": 81,\n\t\"r\": 82,\n\t\"s\": 83,\n\t\"t\": 84,\n\t\"u\": 85,\n\t\"v\": 86,\n\t\"w\": 87,\n\t\"x\": 88,\n\t\"y\": 89,\n\t\"z\": 90,\n\t\"numpad0\": 96,\n\t\"numpad1\": 97,\n\t\"numpad2\": 98,\n\t\"numpad3\": 99,\n\t\"numpad4\": 100,\n\t\"numpad5\": 101,\n\t\"numpad6\": 102,\n\t\"numpad7\": 103,\n\t\"numpad8\": 104,\n\t\"numpad9\": 105,\n\t\"multiply\": 106,\n\t\"add\": 107,\n\t\"separator\": 108,\n\t\"subtract\": 109,\n\t\"decimal\": 110,\n\t\"divide\": 111,\n\t\"f1\": 112,\n\t\"f2\": 113,\n\t\"f3\": 114,\n\t\"f4\": 115,\n\t\"f5\": 116,\n\t\"f6\": 117,\n\t\"f7\": 118,\n\t\"f8\": 119,\n\t\"f9\": 120,\n\t\"f10\": 121,\n\t\"f11\": 122,\n\t\"f12\": 123,\n\t\"f13\": 124,\n\t\"f14\": 125,\n\t\"f15\": 126,\n\t\"f16\": 127,\n\t\"f17\": 128,\n\t\"f18\": 129,\n\t\"f19\": 130,\n\t\"f20\": 131,\n\t\"f21\": 132,\n\t\"f22\": 133,\n\t\"f23\": 134,\n\t\"f24\": 135,\n\t\"firefoxminus\": 173,\n\t\"semicolon\": 186,\n\t\"equals\": 187,\n\t\"comma\": 188,\n\t\"dash\": 189,\n\t\"period\": 190,\n\t\"slash\": 191,\n\t\"backquote\": 192,\n\t\"openbracket\": 219,\n\t\"backslash\": 220,\n\t\"closebracket\": 221,\n\t\"quote\": 222\n};\n\nfunction KeyboardManager(options) {\n\tvar self = this;\n\toptions = options || \"\";\n\t// Save the named key hashmap\n\tthis.namedKeys = namedKeys;\n\t// Create a reverse mapping of code to keyname\n\tthis.keyNames = [];\n\t$tw.utils.each(namedKeys,function(keyCode,name) {\n\t\tself.keyNames[keyCode] = name.substr(0,1).toUpperCase() + name.substr(1);\n\t});\n\t// Save the platform-specific name of the \"meta\" key\n\tthis.metaKeyName = $tw.platform.isMac ? \"cmd-\" : \"win-\";\n}\n\n/*\nReturn an array of keycodes for the modifier keys ctrl, shift, alt, meta\n*/\nKeyboardManager.prototype.getModifierKeys = function() {\n\treturn [\n\t\t16, // Shift\n\t\t17, // Ctrl\n\t\t18, // Alt\n\t\t20, // CAPS LOCK\n\t\t91, // Meta (left)\n\t\t93, // Meta (right)\n\t\t224 // Meta (Firefox)\n\t]\n};\n\n/*\nParses a key descriptor into the structure:\n{\n\tkeyCode: numeric keycode\n\tshiftKey: boolean\n\taltKey: boolean\n\tctrlKey: boolean\n\tmetaKey: boolean\n}\nKey descriptors have the following format:\n\tctrl+enter\n\tctrl+shift+alt+A\n*/\nKeyboardManager.prototype.parseKeyDescriptor = function(keyDescriptor) {\n\tvar components = keyDescriptor.split(/\\+|\\-/),\n\t\tinfo = {\n\t\t\tkeyCode: 0,\n\t\t\tshiftKey: false,\n\t\t\taltKey: false,\n\t\t\tctrlKey: false,\n\t\t\tmetaKey: false\n\t\t};\n\tfor(var t=0; t<components.length; t++) {\n\t\tvar s = components[t].toLowerCase(),\n\t\t\tc = s.charCodeAt(0);\n\t\t// Look for modifier keys\n\t\tif(s === \"ctrl\") {\n\t\t\tinfo.ctrlKey = true;\n\t\t} else if(s === \"shift\") {\n\t\t\tinfo.shiftKey = true;\n\t\t} else if(s === \"alt\") {\n\t\t\tinfo.altKey = true;\n\t\t} else if(s === \"meta\" || s === \"cmd\" || s === \"win\") {\n\t\t\tinfo.metaKey = true;\n\t\t}\n\t\t// Replace named keys with their code\n\t\tif(this.namedKeys[s]) {\n\t\t\tinfo.keyCode = this.namedKeys[s];\n\t\t}\n\t}\n\tif(info.keyCode) {\n\t\treturn info;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nParse a list of key descriptors into an array of keyInfo objects. The key descriptors can be passed as an array of strings or a space separated string\n*/\nKeyboardManager.prototype.parseKeyDescriptors = function(keyDescriptors,options) {\n\tvar self = this;\n\toptions = options || {};\n\toptions.stack = options.stack || [];\n\tvar wiki = options.wiki || $tw.wiki;\n\tif(typeof keyDescriptors === \"string\" && keyDescriptors === \"\") {\n\t\treturn [];\n\t}\n\tif(!$tw.utils.isArray(keyDescriptors)) {\n\t\tkeyDescriptors = keyDescriptors.split(\" \");\n\t}\n\tvar result = [];\n\t$tw.utils.each(keyDescriptors,function(keyDescriptor) {\n\t\t// Look for a named shortcut\n\t\tif(keyDescriptor.substr(0,2) === \"((\" && keyDescriptor.substr(-2,2) === \"))\") {\n\t\t\tif(options.stack.indexOf(keyDescriptor) === -1) {\n\t\t\t\toptions.stack.push(keyDescriptor);\n\t\t\t\tvar name = keyDescriptor.substring(2,keyDescriptor.length - 2),\n\t\t\t\t\tlookupName = function(configName) {\n\t\t\t\t\t\tvar keyDescriptors = wiki.getTiddlerText(\"$:/config/\" + configName + \"/\" + name);\n\t\t\t\t\t\tif(keyDescriptors) {\n\t\t\t\t\t\t\tresult.push.apply(result,self.parseKeyDescriptors(keyDescriptors,options));\n\t\t\t\t\t\t}\n\t\t\t\t\t};\n\t\t\t\tlookupName(\"shortcuts\");\n\t\t\t\tlookupName($tw.platform.isMac ? \"shortcuts-mac\" : \"shortcuts-not-mac\");\n\t\t\t\tlookupName($tw.platform.isWindows ? \"shortcuts-windows\" : \"shortcuts-not-windows\");\n\t\t\t\tlookupName($tw.platform.isLinux ? \"shortcuts-linux\" : \"shortcuts-not-linux\");\n\t\t\t}\n\t\t} else {\n\t\t\tresult.push(self.parseKeyDescriptor(keyDescriptor));\n\t\t}\n\t});\n\treturn result;\n};\n\nKeyboardManager.prototype.getPrintableShortcuts = function(keyInfoArray) {\n\tvar self = this,\n\t\tresult = [];\n\t$tw.utils.each(keyInfoArray,function(keyInfo) {\n\t\tif(keyInfo) {\n\t\t\tresult.push((keyInfo.ctrlKey ? \"ctrl-\" : \"\") + \n\t\t\t\t (keyInfo.shiftKey ? \"shift-\" : \"\") + \n\t\t\t\t (keyInfo.altKey ? \"alt-\" : \"\") + \n\t\t\t\t (keyInfo.metaKey ? self.metaKeyName : \"\") + \n\t\t\t\t (self.keyNames[keyInfo.keyCode]));\n\t\t}\n\t});\n\treturn result;\n}\n\nKeyboardManager.prototype.checkKeyDescriptor = function(event,keyInfo) {\n\treturn keyInfo &&\n\t\t\tevent.keyCode === keyInfo.keyCode && \n\t\t\tevent.shiftKey === keyInfo.shiftKey && \n\t\t\tevent.altKey === keyInfo.altKey && \n\t\t\tevent.ctrlKey === keyInfo.ctrlKey && \n\t\t\tevent.metaKey === keyInfo.metaKey;\n};\n\nKeyboardManager.prototype.checkKeyDescriptors = function(event,keyInfoArray) {\n\tfor(var t=0; t<keyInfoArray.length; t++) {\n\t\tif(this.checkKeyDescriptor(event,keyInfoArray[t])) {\n\t\t\treturn true;\n\t\t}\n\t}\n\treturn false;\n};\n\nexports.KeyboardManager = KeyboardManager;\n\n})();\n",
"title": "$:/core/modules/keyboard.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/language.js": {
"text": "/*\\\ntitle: $:/core/modules/language.js\ntype: application/javascript\nmodule-type: global\n\nThe $tw.Language() manages translateable strings\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nCreate an instance of the language manager. Options include:\nwiki: wiki from which to retrieve translation tiddlers\n*/\nfunction Language(options) {\n\toptions = options || \"\";\n\tthis.wiki = options.wiki || $tw.wiki;\n}\n\n/*\nReturn a wikified translateable string. The title is automatically prefixed with \"$:/language/\"\nOptions include:\nvariables: optional hashmap of variables to supply to the language wikification\n*/\nLanguage.prototype.getString = function(title,options) {\n\toptions = options || {};\n\ttitle = \"$:/language/\" + title;\n\treturn this.wiki.renderTiddler(\"text/plain\",title,{variables: options.variables});\n};\n\n/*\nReturn a raw, unwikified translateable string. The title is automatically prefixed with \"$:/language/\"\n*/\nLanguage.prototype.getRawString = function(title) {\n\ttitle = \"$:/language/\" + title;\n\treturn this.wiki.getTiddlerText(title);\n};\n\nexports.Language = Language;\n\n})();\n",
"title": "$:/core/modules/language.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/macros/changecount.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/changecount.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to return the changecount for the current tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"changecount\";\n\nexports.params = [];\n\n/*\nRun the macro\n*/\nexports.run = function() {\n\treturn this.wiki.getChangeCount(this.getVariable(\"currentTiddler\")) + \"\";\n};\n\n})();\n",
"title": "$:/core/modules/macros/changecount.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/contrastcolour.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/contrastcolour.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to choose which of two colours has the highest contrast with a base colour\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"contrastcolour\";\n\nexports.params = [\n\t{name: \"target\"},\n\t{name: \"fallbackTarget\"},\n\t{name: \"colourA\"},\n\t{name: \"colourB\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(target,fallbackTarget,colourA,colourB) {\n\tvar rgbTarget = $tw.utils.parseCSSColor(target) || $tw.utils.parseCSSColor(fallbackTarget);\n\tif(!rgbTarget) {\n\t\treturn colourA;\n\t}\n\tvar rgbColourA = $tw.utils.parseCSSColor(colourA),\n\t\trgbColourB = $tw.utils.parseCSSColor(colourB);\n\tif(rgbColourA && !rgbColourB) {\n\t\treturn rgbColourA;\n\t}\n\tif(rgbColourB && !rgbColourA) {\n\t\treturn rgbColourB;\n\t}\n\tif(!rgbColourA && !rgbColourB) {\n\t\t// If neither colour is readable, return a crude inverse of the target\n\t\treturn [255 - rgbTarget[0],255 - rgbTarget[1],255 - rgbTarget[2],rgbTarget[3]];\n\t}\n\t// Colour brightness formula derived from http://www.w3.org/WAI/ER/WD-AERT/#color-contrast\n\tvar brightnessTarget = rgbTarget[0] * 0.299 + rgbTarget[1] * 0.587 + rgbTarget[2] * 0.114,\n\t\tbrightnessA = rgbColourA[0] * 0.299 + rgbColourA[1] * 0.587 + rgbColourA[2] * 0.114,\n\t\tbrightnessB = rgbColourB[0] * 0.299 + rgbColourB[1] * 0.587 + rgbColourB[2] * 0.114;\n\treturn Math.abs(brightnessTarget - brightnessA) > Math.abs(brightnessTarget - brightnessB) ? colourA : colourB;\n};\n\n})();\n",
"title": "$:/core/modules/macros/contrastcolour.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/csvtiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/csvtiddlers.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to output tiddlers matching a filter to CSV\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"csvtiddlers\";\n\nexports.params = [\n\t{name: \"filter\"},\n\t{name: \"format\"},\n];\n\n/*\nRun the macro\n*/\nexports.run = function(filter,format) {\n\tvar self = this,\n\t\ttiddlers = this.wiki.filterTiddlers(filter),\n\t\ttiddler,\n\t\tfields = [],\n\t\tt,f;\n\t// Collect all the fields\n\tfor(t=0;t<tiddlers.length; t++) {\n\t\ttiddler = this.wiki.getTiddler(tiddlers[t]);\n\t\tfor(f in tiddler.fields) {\n\t\t\tif(fields.indexOf(f) === -1) {\n\t\t\t\tfields.push(f);\n\t\t\t}\n\t\t}\n\t}\n\t// Sort the fields and bring the standard ones to the front\n\tfields.sort();\n\t\"title text modified modifier created creator\".split(\" \").reverse().forEach(function(value,index) {\n\t\tvar p = fields.indexOf(value);\n\t\tif(p !== -1) {\n\t\t\tfields.splice(p,1);\n\t\t\tfields.unshift(value)\n\t\t}\n\t});\n\t// Output the column headings\n\tvar output = [], row = [];\n\tfields.forEach(function(value) {\n\t\trow.push(quoteAndEscape(value))\n\t});\n\toutput.push(row.join(\",\"));\n\t// Output each tiddler\n\tfor(var t=0;t<tiddlers.length; t++) {\n\t\trow = [];\n\t\ttiddler = this.wiki.getTiddler(tiddlers[t]);\n\t\t\tfor(f=0; f<fields.length; f++) {\n\t\t\t\trow.push(quoteAndEscape(tiddler ? tiddler.getFieldString(fields[f]) || \"\" : \"\"));\n\t\t\t}\n\t\toutput.push(row.join(\",\"));\n\t}\n\treturn output.join(\"\\n\");\n};\n\nfunction quoteAndEscape(value) {\n\treturn \"\\\"\" + value.replace(/\"/mg,\"\\\"\\\"\") + \"\\\"\";\n}\n\n})();\n",
"title": "$:/core/modules/macros/csvtiddlers.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/displayshortcuts.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/displayshortcuts.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to display a list of keyboard shortcuts in human readable form. Notably, it resolves named shortcuts like `((bold))` to the underlying keystrokes.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"displayshortcuts\";\n\nexports.params = [\n\t{name: \"shortcuts\"},\n\t{name: \"prefix\"},\n\t{name: \"separator\"},\n\t{name: \"suffix\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(shortcuts,prefix,separator,suffix) {\n\tvar shortcutArray = $tw.keyboardManager.getPrintableShortcuts($tw.keyboardManager.parseKeyDescriptors(shortcuts,{\n\t\twiki: this.wiki\n\t}));\n\tif(shortcutArray.length > 0) {\n\t\tshortcutArray.sort(function(a,b) {\n\t\t return a.toLowerCase().localeCompare(b.toLowerCase());\n\t\t})\n\t\treturn prefix + shortcutArray.join(separator) + suffix;\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/macros/displayshortcuts.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/dumpvariables.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/dumpvariables.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to dump all active variable values\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"dumpvariables\";\n\nexports.params = [\n];\n\n/*\nRun the macro\n*/\nexports.run = function() {\n\tvar output = [\"|!Variable |!Value |\"],\n\t\tvariables = [], variable;\n\tfor(variable in this.variables) {\n\t\tvariables.push(variable);\n\t}\n\tvariables.sort();\n\tfor(var index=0; index<variables.length; index++) {\n\t\tvar variable = variables[index];\n\t\toutput.push(\"|\" + variable + \" |<input size=50 value=<<\" + variable + \">>/> |\")\n\t}\n\treturn output.join(\"\\n\");\n};\n\n})();\n",
"title": "$:/core/modules/macros/dumpvariables.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/jsontiddlers.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/jsontiddlers.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to output tiddlers matching a filter to JSON\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"jsontiddlers\";\n\nexports.params = [\n\t{name: \"filter\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(filter) {\n\tvar tiddlers = this.wiki.filterTiddlers(filter),\n\t\tdata = [];\n\tfor(var t=0;t<tiddlers.length; t++) {\n\t\tvar tiddler = this.wiki.getTiddler(tiddlers[t]);\n\t\tif(tiddler) {\n\t\t\tvar fields = new Object();\n\t\t\tfor(var field in tiddler.fields) {\n\t\t\t\tfields[field] = tiddler.getFieldString(field);\n\t\t\t}\n\t\t\tdata.push(fields);\n\t\t}\n\t}\n\treturn JSON.stringify(data,null,$tw.config.preferences.jsonSpaces);\n};\n\n})();\n",
"title": "$:/core/modules/macros/jsontiddlers.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/makedatauri.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/makedatauri.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to convert a string of text to a data URI\n\n<<makedatauri text:\"Text to be converted\" type:\"text/vnd.tiddlywiki\">>\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"makedatauri\";\n\nexports.params = [\n\t{name: \"text\"},\n\t{name: \"type\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(text,type) {\n\treturn $tw.utils.makeDataUri(text,type);\n};\n\n})();\n",
"title": "$:/core/modules/macros/makedatauri.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/now.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/now.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to return a formatted version of the current time\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"now\";\n\nexports.params = [\n\t{name: \"format\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(format) {\n\treturn $tw.utils.formatDateString(new Date(),format || \"0hh:0mm, DDth MMM YYYY\");\n};\n\n})();\n",
"title": "$:/core/modules/macros/now.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/qualify.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/qualify.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to qualify a state tiddler title according\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"qualify\";\n\nexports.params = [\n\t{name: \"title\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(title) {\n\treturn title + \"-\" + this.getStateQualifier();\n};\n\n})();\n",
"title": "$:/core/modules/macros/qualify.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/resolvepath.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/resolvepath.js\ntype: application/javascript\nmodule-type: macro\n\nResolves a relative path for an absolute rootpath.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"resolvepath\";\n\nexports.params = [\n\t{name: \"source\"},\n\t{name: \"root\"}\n];\n\n/*\nRun the macro\n*/\nexports.run = function(source, root) {\n\treturn $tw.utils.resolvePath(source, root);\n};\n\n})();\n",
"title": "$:/core/modules/macros/resolvepath.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/macros/version.js": {
"text": "/*\\\ntitle: $:/core/modules/macros/version.js\ntype: application/javascript\nmodule-type: macro\n\nMacro to return the TiddlyWiki core version number\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInformation about this macro\n*/\n\nexports.name = \"version\";\n\nexports.params = [];\n\n/*\nRun the macro\n*/\nexports.run = function() {\n\treturn $tw.version;\n};\n\n})();\n",
"title": "$:/core/modules/macros/version.js",
"type": "application/javascript",
"module-type": "macro"
},
"$:/core/modules/parsers/audioparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/audioparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe audio parser parses an audio tiddler into an embeddable HTML element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar AudioParser = function(type,text,options) {\n\tvar element = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"audio\",\n\t\t\tattributes: {\n\t\t\t\tcontrols: {type: \"string\", value: \"controls\"}\n\t\t\t}\n\t\t},\n\t\tsrc;\n\tif(options._canonical_uri) {\n\t\telement.attributes.src = {type: \"string\", value: options._canonical_uri};\n\t} else if(text) {\n\t\telement.attributes.src = {type: \"string\", value: \"data:\" + type + \";base64,\" + text};\n\t}\n\tthis.tree = [element];\n};\n\nexports[\"audio/ogg\"] = AudioParser;\nexports[\"audio/mpeg\"] = AudioParser;\nexports[\"audio/mp3\"] = AudioParser;\nexports[\"audio/mp4\"] = AudioParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/audioparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/csvparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/csvparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe CSV text parser processes CSV files into a table wrapped in a scrollable widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar CsvParser = function(type,text,options) {\n\t// Table framework\n\tthis.tree = [{\n\t\t\"type\": \"scrollable\", \"children\": [{\n\t\t\t\"type\": \"element\", \"tag\": \"table\", \"children\": [{\n\t\t\t\t\"type\": \"element\", \"tag\": \"tbody\", \"children\": []\n\t\t\t}], \"attributes\": {\n\t\t\t\t\"class\": {\"type\": \"string\", \"value\": \"tc-csv-table\"}\n\t\t\t}\n\t\t}]\n\t}];\n\t// Split the text into lines\n\tvar lines = text.split(/\\r?\\n/mg),\n\t\ttag = \"th\";\n\tfor(var line=0; line<lines.length; line++) {\n\t\tvar lineText = lines[line];\n\t\tif(lineText) {\n\t\t\tvar row = {\n\t\t\t\t\t\"type\": \"element\", \"tag\": \"tr\", \"children\": []\n\t\t\t\t};\n\t\t\tvar columns = lineText.split(\",\");\n\t\t\tfor(var column=0; column<columns.length; column++) {\n\t\t\t\trow.children.push({\n\t\t\t\t\t\t\"type\": \"element\", \"tag\": tag, \"children\": [{\n\t\t\t\t\t\t\t\"type\": \"text\",\n\t\t\t\t\t\t\t\"text\": columns[column]\n\t\t\t\t\t\t}]\n\t\t\t\t\t});\n\t\t\t}\n\t\t\ttag = \"td\";\n\t\t\tthis.tree[0].children[0].children[0].children.push(row);\n\t\t}\n\t}\n};\n\nexports[\"text/csv\"] = CsvParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/csvparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/htmlparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/htmlparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe HTML parser displays text as raw HTML\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar HtmlParser = function(type,text,options) {\n\tvar src;\n\tif(options._canonical_uri) {\n\t\tsrc = options._canonical_uri;\n\t} else if(text) {\n\t\tsrc = \"data:text/html;charset=utf-8,\" + encodeURIComponent(text);\n\t}\n\tthis.tree = [{\n\t\ttype: \"element\",\n\t\ttag: \"iframe\",\n\t\tattributes: {\n\t\t\tsrc: {type: \"string\", value: src},\n\t\t\tsandbox: {type: \"string\", value: \"\"}\n\t\t}\n\t}];\n};\n\nexports[\"text/html\"] = HtmlParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/htmlparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/imageparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/imageparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe image parser parses an image into an embeddable HTML element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar ImageParser = function(type,text,options) {\n\tvar element = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"img\",\n\t\t\tattributes: {}\n\t\t},\n\t\tsrc;\n\tif(options._canonical_uri) {\n\t\telement.attributes.src = {type: \"string\", value: options._canonical_uri};\n\t\tif(type === \"application/pdf\" || type === \".pdf\") {\n\t\t\telement.tag = \"embed\";\n\t\t}\n\t} else if(text) {\n\t\tif(type === \"application/pdf\" || type === \".pdf\") {\n\t\t\telement.attributes.src = {type: \"string\", value: \"data:application/pdf;base64,\" + text};\n\t\t\telement.tag = \"embed\";\n\t\t} else if(type === \"image/svg+xml\" || type === \".svg\") {\n\t\t\telement.attributes.src = {type: \"string\", value: \"data:image/svg+xml,\" + encodeURIComponent(text)};\n\t\t} else {\n\t\t\telement.attributes.src = {type: \"string\", value: \"data:\" + type + \";base64,\" + text};\n\t\t}\n\t}\n\tthis.tree = [element];\n};\n\nexports[\"image/svg+xml\"] = ImageParser;\nexports[\"image/jpg\"] = ImageParser;\nexports[\"image/jpeg\"] = ImageParser;\nexports[\"image/png\"] = ImageParser;\nexports[\"image/gif\"] = ImageParser;\nexports[\"application/pdf\"] = ImageParser;\nexports[\"image/x-icon\"] = ImageParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/imageparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/utils/parseutils.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/parseutils.js\ntype: application/javascript\nmodule-type: utils\n\nUtility functions concerned with parsing text into tokens.\n\nMost functions have the following pattern:\n\n* The parameters are:\n** `source`: the source string being parsed\n** `pos`: the current parse position within the string\n** Any further parameters are used to identify the token that is being parsed\n* The return value is:\n** null if the token was not found at the specified position\n** an object representing the token with the following standard fields:\n*** `type`: string indicating the type of the token\n*** `start`: start position of the token in the source string\n*** `end`: end position of the token in the source string\n*** Any further fields required to describe the token\n\nThe exception is `skipWhiteSpace`, which just returns the position after the whitespace.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nLook for a whitespace token. Returns null if not found, otherwise returns {type: \"whitespace\", start:, end:,}\n*/\nexports.parseWhiteSpace = function(source,pos) {\n\tvar p = pos,c;\n\twhile(true) {\n\t\tc = source.charAt(p);\n\t\tif((c === \" \") || (c === \"\\f\") || (c === \"\\n\") || (c === \"\\r\") || (c === \"\\t\") || (c === \"\\v\") || (c === \"\\u00a0\")) { // Ignores some obscure unicode spaces\n\t\t\tp++;\n\t\t} else {\n\t\t\tbreak;\n\t\t}\n\t}\n\tif(p === pos) {\n\t\treturn null;\n\t} else {\n\t\treturn {\n\t\t\ttype: \"whitespace\",\n\t\t\tstart: pos,\n\t\t\tend: p\n\t\t}\n\t}\n};\n\n/*\nConvenience wrapper for parseWhiteSpace. Returns the position after the whitespace\n*/\nexports.skipWhiteSpace = function(source,pos) {\n\tvar c;\n\twhile(true) {\n\t\tc = source.charAt(pos);\n\t\tif((c === \" \") || (c === \"\\f\") || (c === \"\\n\") || (c === \"\\r\") || (c === \"\\t\") || (c === \"\\v\") || (c === \"\\u00a0\")) { // Ignores some obscure unicode spaces\n\t\t\tpos++;\n\t\t} else {\n\t\t\treturn pos;\n\t\t}\n\t}\n};\n\n/*\nLook for a given string token. Returns null if not found, otherwise returns {type: \"token\", value:, start:, end:,}\n*/\nexports.parseTokenString = function(source,pos,token) {\n\tvar match = source.indexOf(token,pos) === pos;\n\tif(match) {\n\t\treturn {\n\t\t\ttype: \"token\",\n\t\t\tvalue: token,\n\t\t\tstart: pos,\n\t\t\tend: pos + token.length\n\t\t};\n\t}\n\treturn null;\n};\n\n/*\nLook for a token matching a regex. Returns null if not found, otherwise returns {type: \"regexp\", match:, start:, end:,}\n*/\nexports.parseTokenRegExp = function(source,pos,reToken) {\n\tvar node = {\n\t\ttype: \"regexp\",\n\t\tstart: pos\n\t};\n\treToken.lastIndex = pos;\n\tnode.match = reToken.exec(source);\n\tif(node.match && node.match.index === pos) {\n\t\tnode.end = pos + node.match[0].length;\n\t\treturn node;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nLook for a string literal. Returns null if not found, otherwise returns {type: \"string\", value:, start:, end:,}\n*/\nexports.parseStringLiteral = function(source,pos) {\n\tvar node = {\n\t\ttype: \"string\",\n\t\tstart: pos\n\t};\n\tvar reString = /(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\")|(?:'([^']*)')/g;\n\treString.lastIndex = pos;\n\tvar match = reString.exec(source);\n\tif(match && match.index === pos) {\n\t\tnode.value = match[1] !== undefined ? match[1] :(\n\t\t\tmatch[2] !== undefined ? match[2] : match[3] \n\t\t\t\t\t);\n\t\tnode.end = pos + match[0].length;\n\t\treturn node;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nLook for a macro invocation parameter. Returns null if not found, or {type: \"macro-parameter\", name:, value:, start:, end:}\n*/\nexports.parseMacroParameter = function(source,pos) {\n\tvar node = {\n\t\ttype: \"macro-parameter\",\n\t\tstart: pos\n\t};\n\t// Define our regexp\n\tvar reMacroParameter = /(?:([A-Za-z0-9\\-_]+)\\s*:)?(?:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\\s>\"'=]+)))/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the parameter\n\tvar token = $tw.utils.parseTokenRegExp(source,pos,reMacroParameter);\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Get the parameter details\n\tnode.value = token.match[2] !== undefined ? token.match[2] : (\n\t\t\t\t\ttoken.match[3] !== undefined ? token.match[3] : (\n\t\t\t\t\t\ttoken.match[4] !== undefined ? token.match[4] : (\n\t\t\t\t\t\t\ttoken.match[5] !== undefined ? token.match[5] : (\n\t\t\t\t\t\t\t\ttoken.match[6] !== undefined ? token.match[6] : (\n\t\t\t\t\t\t\t\t\t\"\"\n\t\t\t\t\t\t\t\t)\n\t\t\t\t\t\t\t)\n\t\t\t\t\t\t)\n\t\t\t\t\t)\n\t\t\t\t);\n\tif(token.match[1]) {\n\t\tnode.name = token.match[1];\n\t}\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n/*\nLook for a macro invocation. Returns null if not found, or {type: \"macrocall\", name:, parameters:, start:, end:}\n*/\nexports.parseMacroInvocation = function(source,pos) {\n\tvar node = {\n\t\ttype: \"macrocall\",\n\t\tstart: pos,\n\t\tparams: []\n\t};\n\t// Define our regexps\n\tvar reMacroName = /([^\\s>\"'=]+)/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a double less than sign\n\tvar token = $tw.utils.parseTokenString(source,pos,\"<<\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Get the macro name\n\tvar name = $tw.utils.parseTokenRegExp(source,pos,reMacroName);\n\tif(!name) {\n\t\treturn null;\n\t}\n\tnode.name = name.match[1];\n\tpos = name.end;\n\t// Process parameters\n\tvar parameter = $tw.utils.parseMacroParameter(source,pos);\n\twhile(parameter) {\n\t\tnode.params.push(parameter);\n\t\tpos = parameter.end;\n\t\t// Get the next parameter\n\t\tparameter = $tw.utils.parseMacroParameter(source,pos);\n\t}\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a double greater than sign\n\ttoken = $tw.utils.parseTokenString(source,pos,\">>\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n/*\nLook for an HTML attribute definition. Returns null if not found, otherwise returns {type: \"attribute\", name:, valueType: \"string|indirect|macro\", value:, start:, end:,}\n*/\nexports.parseAttribute = function(source,pos) {\n\tvar node = {\n\t\tstart: pos\n\t};\n\t// Define our regexps\n\tvar reAttributeName = /([^\\/\\s>\"'=]+)/g,\n\t\treUnquotedAttribute = /([^\\/\\s<>\"'=]+)/g,\n\t\treIndirectValue = /\\{\\{([^\\}]+)\\}\\}/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Get the attribute name\n\tvar name = $tw.utils.parseTokenRegExp(source,pos,reAttributeName);\n\tif(!name) {\n\t\treturn null;\n\t}\n\tnode.name = name.match[1];\n\tpos = name.end;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for an equals sign\n\tvar token = $tw.utils.parseTokenString(source,pos,\"=\");\n\tif(token) {\n\t\tpos = token.end;\n\t\t// Skip whitespace\n\t\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t\t// Look for a string literal\n\t\tvar stringLiteral = $tw.utils.parseStringLiteral(source,pos);\n\t\tif(stringLiteral) {\n\t\t\tpos = stringLiteral.end;\n\t\t\tnode.type = \"string\";\n\t\t\tnode.value = stringLiteral.value;\n\t\t} else {\n\t\t\t// Look for an indirect value\n\t\t\tvar indirectValue = $tw.utils.parseTokenRegExp(source,pos,reIndirectValue);\n\t\t\tif(indirectValue) {\n\t\t\t\tpos = indirectValue.end;\n\t\t\t\tnode.type = \"indirect\";\n\t\t\t\tnode.textReference = indirectValue.match[1];\n\t\t\t} else {\n\t\t\t\t// Look for a unquoted value\n\t\t\t\tvar unquotedValue = $tw.utils.parseTokenRegExp(source,pos,reUnquotedAttribute);\n\t\t\t\tif(unquotedValue) {\n\t\t\t\t\tpos = unquotedValue.end;\n\t\t\t\t\tnode.type = \"string\";\n\t\t\t\t\tnode.value = unquotedValue.match[1];\n\t\t\t\t} else {\n\t\t\t\t\t// Look for a macro invocation value\n\t\t\t\t\tvar macroInvocation = $tw.utils.parseMacroInvocation(source,pos);\n\t\t\t\t\tif(macroInvocation) {\n\t\t\t\t\t\tpos = macroInvocation.end;\n\t\t\t\t\t\tnode.type = \"macro\";\n\t\t\t\t\t\tnode.value = macroInvocation;\n\t\t\t\t\t} else {\n\t\t\t\t\t\tnode.type = \"string\";\n\t\t\t\t\t\tnode.value = \"true\";\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t} else {\n\t\tnode.type = \"string\";\n\t\tnode.value = \"true\";\n\t}\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n})();\n",
"title": "$:/core/modules/utils/parseutils.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/parsers/textparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/textparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe plain text parser processes blocks of source text into a degenerate parse tree consisting of a single text node\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar TextParser = function(type,text,options) {\n\tthis.tree = [{\n\t\ttype: \"codeblock\",\n\t\tattributes: {\n\t\t\tcode: {type: \"string\", value: text},\n\t\t\tlanguage: {type: \"string\", value: type}\n\t\t}\n\t}];\n};\n\nexports[\"text/plain\"] = TextParser;\nexports[\"text/x-tiddlywiki\"] = TextParser;\nexports[\"application/javascript\"] = TextParser;\nexports[\"application/json\"] = TextParser;\nexports[\"text/css\"] = TextParser;\nexports[\"application/x-tiddler-dictionary\"] = TextParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/textparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/videoparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/videoparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe video parser parses a video tiddler into an embeddable HTML element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar AudioParser = function(type,text,options) {\n\tvar element = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"video\",\n\t\t\tattributes: {\n\t\t\t\tcontrols: {type: \"string\", value: \"controls\"}\n\t\t\t}\n\t\t},\n\t\tsrc;\n\tif(options._canonical_uri) {\n\t\telement.attributes.src = {type: \"string\", value: options._canonical_uri};\n\t} else if(text) {\n\t\telement.attributes.src = {type: \"string\", value: \"data:\" + type + \";base64,\" + text};\n\t}\n\tthis.tree = [element];\n};\n\nexports[\"video/mp4\"] = AudioParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/videoparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/wikiparser/rules/codeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/codeblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for code blocks. For example:\n\n```\n\t```\n\tThis text will not be //wikified//\n\t```\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"codeblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match and get language if defined\n\tthis.matchRegExp = /```([\\w-]*)\\r?\\n/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /(\\r?\\n```$)/mg;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Look for the end of the block\n\treEnd.lastIndex = this.parser.pos;\n\tvar match = reEnd.exec(this.parser.source),\n\t\ttext;\n\t// Process the block\n\tif(match) {\n\t\ttext = this.parser.source.substring(this.parser.pos,match.index);\n\t\tthis.parser.pos = match.index + match[0].length;\n\t} else {\n\t\ttext = this.parser.source.substr(this.parser.pos);\n\t\tthis.parser.pos = this.parser.sourceLength;\n\t}\n\t// Return the $codeblock widget\n\treturn [{\n\t\t\ttype: \"codeblock\",\n\t\t\tattributes: {\n\t\t\t\t\tcode: {type: \"string\", value: text},\n\t\t\t\t\tlanguage: {type: \"string\", value: this.match[1]}\n\t\t\t}\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/codeblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/codeinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/codeinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for code runs. For example:\n\n```\n\tThis is a `code run`.\n\tThis is another ``code run``\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"codeinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(``?)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar reEnd = new RegExp(this.match[1], \"mg\");\n\t// Look for the end marker\n\treEnd.lastIndex = this.parser.pos;\n\tvar match = reEnd.exec(this.parser.source),\n\t\ttext;\n\t// Process the text\n\tif(match) {\n\t\ttext = this.parser.source.substring(this.parser.pos,match.index);\n\t\tthis.parser.pos = match.index + match[0].length;\n\t} else {\n\t\ttext = this.parser.source.substr(this.parser.pos);\n\t\tthis.parser.pos = this.parser.sourceLength;\n\t}\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"code\",\n\t\tchildren: [{\n\t\t\ttype: \"text\",\n\t\t\ttext: text\n\t\t}]\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/codeinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/commentblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/commentblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for HTML comments. For example:\n\n```\n<!-- This is a comment -->\n```\n\nNote that the syntax for comments is simplified to an opening \"<!--\" sequence and a closing \"-->\" sequence -- HTML itself implements a more complex format (see http://ostermiller.org/findhtmlcomment.html)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"commentblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\tthis.matchRegExp = /<!--/mg;\n\tthis.endMatchRegExp = /-->/mg;\n};\n\nexports.findNextMatch = function(startPos) {\n\tthis.matchRegExp.lastIndex = startPos;\n\tthis.match = this.matchRegExp.exec(this.parser.source);\n\tif(this.match) {\n\t\tthis.endMatchRegExp.lastIndex = startPos + this.match[0].length;\n\t\tthis.endMatch = this.endMatchRegExp.exec(this.parser.source);\n\t\tif(this.endMatch) {\n\t\t\treturn this.match.index;\n\t\t}\n\t}\n\treturn undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.endMatchRegExp.lastIndex;\n\t// Don't return any elements\n\treturn [];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/commentblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/commentinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/commentinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for HTML comments. For example:\n\n```\n<!-- This is a comment -->\n```\n\nNote that the syntax for comments is simplified to an opening \"<!--\" sequence and a closing \"-->\" sequence -- HTML itself implements a more complex format (see http://ostermiller.org/findhtmlcomment.html)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"commentinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\tthis.matchRegExp = /<!--/mg;\n\tthis.endMatchRegExp = /-->/mg;\n};\n\nexports.findNextMatch = function(startPos) {\n\tthis.matchRegExp.lastIndex = startPos;\n\tthis.match = this.matchRegExp.exec(this.parser.source);\n\tif(this.match) {\n\t\tthis.endMatchRegExp.lastIndex = startPos + this.match[0].length;\n\t\tthis.endMatch = this.endMatchRegExp.exec(this.parser.source);\n\t\tif(this.endMatch) {\n\t\t\treturn this.match.index;\n\t\t}\n\t}\n\treturn undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.endMatchRegExp.lastIndex;\n\t// Don't return any elements\n\treturn [];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/commentinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/dash.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/dash.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for dashes. For example:\n\n```\nThis is an en-dash: --\n\nThis is an em-dash: ---\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"dash\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /-{2,3}(?!-)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar dash = this.match[0].length === 2 ? \"–\" : \"—\";\n\treturn [{\n\t\ttype: \"entity\",\n\t\tentity: dash\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/dash.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/bold.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/bold.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - bold. For example:\n\n```\n\tThis is ''bold'' text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except bold \n\\rules only bold \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"bold\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /''/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/''/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"strong\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/bold.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/italic.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/italic.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - italic. For example:\n\n```\n\tThis is //italic// text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except italic\n\\rules only italic\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"italic\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\/\\//mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/\\/\\//mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"em\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/italic.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/strikethrough.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/strikethrough.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - strikethrough. For example:\n\n```\n\tThis is ~~strikethrough~~ text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except strikethrough \n\\rules only strikethrough \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"strikethrough\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /~~/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/~~/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"strike\",\n\t\tchildren: tree\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/strikethrough.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/subscript.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/subscript.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - subscript. For example:\n\n```\n\tThis is ,,subscript,, text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except subscript \n\\rules only subscript \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"subscript\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /,,/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/,,/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"sub\",\n\t\tchildren: tree\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/subscript.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/superscript.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/superscript.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - superscript. For example:\n\n```\n\tThis is ^^superscript^^ text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except superscript \n\\rules only superscript \n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"superscript\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\^\\^/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/\\^\\^/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"sup\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/superscript.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/emphasis/underscore.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/emphasis/underscore.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for emphasis - underscore. For example:\n\n```\n\tThis is __underscore__ text\n```\n\nThis wikiparser can be modified using the rules eg:\n\n```\n\\rules except underscore \n\\rules only underscore\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"underscore\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /__/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\n\t// Parse the run including the terminator\n\tvar tree = this.parser.parseInlineRun(/__/mg,{eatTerminator: true});\n\n\t// Return the classed span\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"u\",\n\t\tchildren: tree\n\t}];\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/emphasis/underscore.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/entity.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/entity.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for HTML entities. For example:\n\n```\n\tThis is a copyright symbol: ©\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"entity\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(&#?[a-zA-Z0-9]{2,8};)/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar entityString = this.match[1];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Return the entity\n\treturn [{type: \"entity\", entity: this.match[0]}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/entity.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/extlink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/extlink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for external links. For example:\n\n```\nAn external link: http://www.tiddlywiki.com/\n\nA suppressed external link: ~http://www.tiddlyspace.com/\n```\n\nExternal links can be suppressed by preceding them with `~`.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"extlink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /~?(?:file|http|https|mailto|ftp|irc|news|data|skype):[^\\s<>{}\\[\\]`|\"\\\\^]+(?:\\/|\\b)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Create the link unless it is suppressed\n\tif(this.match[0].substr(0,1) === \"~\") {\n\t\treturn [{type: \"text\", text: this.match[0].substr(1)}];\n\t} else {\n\t\treturn [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"a\",\n\t\t\tattributes: {\n\t\t\t\thref: {type: \"string\", value: this.match[0]},\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-tiddlylink-external\"},\n\t\t\t\ttarget: {type: \"string\", value: \"_blank\"},\n\t\t\t\trel: {type: \"string\", value: \"noopener noreferrer\"}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\", text: this.match[0]\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/extlink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/filteredtranscludeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/filteredtranscludeblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for block-level filtered transclusion. For example:\n\n```\n{{{ [tag[docs]] }}}\n{{{ [tag[docs]] |tooltip}}}\n{{{ [tag[docs]] ||TemplateTitle}}}\n{{{ [tag[docs]] |tooltip||TemplateTitle}}}\n{{{ [tag[docs]] }}width:40;height:50;}.class.class\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"filteredtranscludeblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{\\{([^\\|]+?)(?:\\|([^\\|\\{\\}]+))?(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}([^\\}]*)\\}(?:\\.(\\S+))?(?:\\r?\\n|$)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar filter = this.match[1],\n\t\ttooltip = this.match[2],\n\t\ttemplate = $tw.utils.trim(this.match[3]),\n\t\tstyle = this.match[4],\n\t\tclasses = this.match[5];\n\t// Return the list widget\n\tvar node = {\n\t\ttype: \"list\",\n\t\tattributes: {\n\t\t\tfilter: {type: \"string\", value: filter}\n\t\t},\n\t\tisBlock: true\n\t};\n\tif(tooltip) {\n\t\tnode.attributes.tooltip = {type: \"string\", value: tooltip};\n\t}\n\tif(template) {\n\t\tnode.attributes.template = {type: \"string\", value: template};\n\t}\n\tif(style) {\n\t\tnode.attributes.style = {type: \"string\", value: style};\n\t}\n\tif(classes) {\n\t\tnode.attributes.itemClass = {type: \"string\", value: classes.split(\".\").join(\" \")};\n\t}\n\treturn [node];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/filteredtranscludeblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/filteredtranscludeinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/filteredtranscludeinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for inline filtered transclusion. For example:\n\n```\n{{{ [tag[docs]] }}}\n{{{ [tag[docs]] |tooltip}}}\n{{{ [tag[docs]] ||TemplateTitle}}}\n{{{ [tag[docs]] |tooltip||TemplateTitle}}}\n{{{ [tag[docs]] }}width:40;height:50;}.class.class\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"filteredtranscludeinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{\\{([^\\|]+?)(?:\\|([^\\|\\{\\}]+))?(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}([^\\}]*)\\}(?:\\.(\\S+))?/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar filter = this.match[1],\n\t\ttooltip = this.match[2],\n\t\ttemplate = $tw.utils.trim(this.match[3]),\n\t\tstyle = this.match[4],\n\t\tclasses = this.match[5];\n\t// Return the list widget\n\tvar node = {\n\t\ttype: \"list\",\n\t\tattributes: {\n\t\t\tfilter: {type: \"string\", value: filter}\n\t\t}\n\t};\n\tif(tooltip) {\n\t\tnode.attributes.tooltip = {type: \"string\", value: tooltip};\n\t}\n\tif(template) {\n\t\tnode.attributes.template = {type: \"string\", value: template};\n\t}\n\tif(style) {\n\t\tnode.attributes.style = {type: \"string\", value: style};\n\t}\n\tif(classes) {\n\t\tnode.attributes.itemClass = {type: \"string\", value: classes.split(\".\").join(\" \")};\n\t}\n\treturn [node];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/filteredtranscludeinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/hardlinebreaks.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/hardlinebreaks.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for marking areas with hard line breaks. For example:\n\n```\n\"\"\"\nThis is some text\nThat is set like\nIt is a Poem\nWhen it is\nClearly\nNot\n\"\"\"\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"hardlinebreaks\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\"\"\"(?:\\r?\\n)?/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /(\"\"\")|(\\r?\\n)/mg,\n\t\ttree = [],\n\t\tmatch;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tdo {\n\t\t// Parse the run up to the terminator\n\t\ttree.push.apply(tree,this.parser.parseInlineRun(reEnd,{eatTerminator: false}));\n\t\t// Redo the terminator match\n\t\treEnd.lastIndex = this.parser.pos;\n\t\tmatch = reEnd.exec(this.parser.source);\n\t\tif(match) {\n\t\t\tthis.parser.pos = reEnd.lastIndex;\n\t\t\t// Add a line break if the terminator was a line break\n\t\t\tif(match[2]) {\n\t\t\t\ttree.push({type: \"element\", tag: \"br\"});\n\t\t\t}\n\t\t}\n\t} while(match && !match[1]);\n\t// Return the nodes\n\treturn tree;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/hardlinebreaks.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/heading.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/heading.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for headings\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"heading\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(!{1,6})/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar headingLevel = this.match[1].length;\n\t// Move past the !s\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse any classes, whitespace and then the heading itself\n\tvar classes = this.parser.parseClasses();\n\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\tvar tree = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t// Return the heading\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"h\" + headingLevel, \n\t\tattributes: {\n\t\t\t\"class\": {type: \"string\", value: classes.join(\" \")}\n\t\t},\n\t\tchildren: tree\n\t}];\n};\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/heading.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/horizrule.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/horizrule.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for rules. For example:\n\n```\n---\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"horizrule\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /-{3,}\\r?(?:\\n|$)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\treturn [{type: \"element\", tag: \"hr\"}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/horizrule.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/html.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/html.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki rule for HTML elements and widgets. For example:\n\n{{{\n<aside>\nThis is an HTML5 aside element\n</aside>\n\n<$slider target=\"MyTiddler\">\nThis is a widget invocation\n</$slider>\n\n}}}\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"html\";\nexports.types = {inline: true, block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n};\n\nexports.findNextMatch = function(startPos) {\n\t// Find the next tag\n\tthis.nextTag = this.findNextTag(this.parser.source,startPos,{\n\t\trequireLineBreak: this.is.block\n\t});\n\treturn this.nextTag ? this.nextTag.start : undefined;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Retrieve the most recent match so that recursive calls don't overwrite it\n\tvar tag = this.nextTag;\n\tthis.nextTag = null;\n\t// Advance the parser position to past the tag\n\tthis.parser.pos = tag.end;\n\t// Check for an immediately following double linebreak\n\tvar hasLineBreak = !tag.isSelfClosing && !!$tw.utils.parseTokenRegExp(this.parser.source,this.parser.pos,/([^\\S\\n\\r]*\\r?\\n(?:[^\\S\\n\\r]*\\r?\\n|$))/g);\n\t// Set whether we're in block mode\n\ttag.isBlock = this.is.block || hasLineBreak;\n\t// Parse the body if we need to\n\tif(!tag.isSelfClosing && $tw.config.htmlVoidElements.indexOf(tag.tag) === -1) {\n\t\t\tvar reEndString = \"</\" + $tw.utils.escapeRegExp(tag.tag) + \">\",\n\t\t\t\treEnd = new RegExp(\"(\" + reEndString + \")\",\"mg\");\n\t\tif(hasLineBreak) {\n\t\t\ttag.children = this.parser.parseBlocks(reEndString);\n\t\t} else {\n\t\t\ttag.children = this.parser.parseInlineRun(reEnd);\n\t\t}\n\t\treEnd.lastIndex = this.parser.pos;\n\t\tvar endMatch = reEnd.exec(this.parser.source);\n\t\tif(endMatch && endMatch.index === this.parser.pos) {\n\t\t\tthis.parser.pos = endMatch.index + endMatch[0].length;\n\t\t}\n\t}\n\t// Return the tag\n\treturn [tag];\n};\n\n/*\nLook for an HTML tag. Returns null if not found, otherwise returns {type: \"element\", name:, attributes: [], isSelfClosing:, start:, end:,}\n*/\nexports.parseTag = function(source,pos,options) {\n\toptions = options || {};\n\tvar token,\n\t\tnode = {\n\t\t\ttype: \"element\",\n\t\t\tstart: pos,\n\t\t\tattributes: {}\n\t\t};\n\t// Define our regexps\n\tvar reTagName = /([a-zA-Z0-9\\-\\$]+)/g;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a less than sign\n\ttoken = $tw.utils.parseTokenString(source,pos,\"<\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Get the tag name\n\ttoken = $tw.utils.parseTokenRegExp(source,pos,reTagName);\n\tif(!token) {\n\t\treturn null;\n\t}\n\tnode.tag = token.match[1];\n\tif(node.tag.charAt(0) === \"$\") {\n\t\tnode.type = node.tag.substr(1);\n\t}\n\tpos = token.end;\n\t// Process attributes\n\tvar attribute = $tw.utils.parseAttribute(source,pos);\n\twhile(attribute) {\n\t\tnode.attributes[attribute.name] = attribute;\n\t\tpos = attribute.end;\n\t\t// Get the next attribute\n\t\tattribute = $tw.utils.parseAttribute(source,pos);\n\t}\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for a closing slash\n\ttoken = $tw.utils.parseTokenString(source,pos,\"/\");\n\tif(token) {\n\t\tpos = token.end;\n\t\tnode.isSelfClosing = true;\n\t}\n\t// Look for a greater than sign\n\ttoken = $tw.utils.parseTokenString(source,pos,\">\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Check for a required line break\n\tif(options.requireLineBreak) {\n\t\ttoken = $tw.utils.parseTokenRegExp(source,pos,/([^\\S\\n\\r]*\\r?\\n(?:[^\\S\\n\\r]*\\r?\\n|$))/g);\n\t\tif(!token) {\n\t\t\treturn null;\n\t\t}\n\t}\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\nexports.findNextTag = function(source,pos,options) {\n\t// A regexp for finding candidate HTML tags\n\tvar reLookahead = /<([a-zA-Z\\-\\$]+)/g;\n\t// Find the next candidate\n\treLookahead.lastIndex = pos;\n\tvar match = reLookahead.exec(source);\n\twhile(match) {\n\t\t// Try to parse the candidate as a tag\n\t\tvar tag = this.parseTag(source,match.index,options);\n\t\t// Return success\n\t\tif(tag && this.isLegalTag(tag)) {\n\t\t\treturn tag;\n\t\t}\n\t\t// Look for the next match\n\t\treLookahead.lastIndex = match.index + 1;\n\t\tmatch = reLookahead.exec(source);\n\t}\n\t// Failed\n\treturn null;\n};\n\nexports.isLegalTag = function(tag) {\n\t// Widgets are always OK\n\tif(tag.type !== \"element\") {\n\t\treturn true;\n\t// If it's an HTML tag that starts with a dash then it's not legal\n\t} else if(tag.tag.charAt(0) === \"-\") {\n\t\treturn false;\n\t} else {\n\t\t// Otherwise it's OK\n\t\treturn true;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/html.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/image.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/image.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for embedding images. For example:\n\n```\n[img[http://tiddlywiki.com/fractalveg.jpg]]\n[img width=23 height=24 [http://tiddlywiki.com/fractalveg.jpg]]\n[img width={{!!width}} height={{!!height}} [http://tiddlywiki.com/fractalveg.jpg]]\n[img[Description of image|http://tiddlywiki.com/fractalveg.jpg]]\n[img[TiddlerTitle]]\n[img[Description of image|TiddlerTitle]]\n```\n\nGenerates the `<$image>` widget.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"image\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n};\n\nexports.findNextMatch = function(startPos) {\n\t// Find the next tag\n\tthis.nextImage = this.findNextImage(this.parser.source,startPos);\n\treturn this.nextImage ? this.nextImage.start : undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.nextImage.end;\n\tvar node = {\n\t\ttype: \"image\",\n\t\tattributes: this.nextImage.attributes\n\t};\n\treturn [node];\n};\n\n/*\nFind the next image from the current position\n*/\nexports.findNextImage = function(source,pos) {\n\t// A regexp for finding candidate HTML tags\n\tvar reLookahead = /(\\[img)/g;\n\t// Find the next candidate\n\treLookahead.lastIndex = pos;\n\tvar match = reLookahead.exec(source);\n\twhile(match) {\n\t\t// Try to parse the candidate as a tag\n\t\tvar tag = this.parseImage(source,match.index);\n\t\t// Return success\n\t\tif(tag) {\n\t\t\treturn tag;\n\t\t}\n\t\t// Look for the next match\n\t\treLookahead.lastIndex = match.index + 1;\n\t\tmatch = reLookahead.exec(source);\n\t}\n\t// Failed\n\treturn null;\n};\n\n/*\nLook for an image at the specified position. Returns null if not found, otherwise returns {type: \"image\", attributes: [], isSelfClosing:, start:, end:,}\n*/\nexports.parseImage = function(source,pos) {\n\tvar token,\n\t\tnode = {\n\t\t\ttype: \"image\",\n\t\t\tstart: pos,\n\t\t\tattributes: {}\n\t\t};\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the `[img`\n\ttoken = $tw.utils.parseTokenString(source,pos,\"[img\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Process attributes\n\tif(source.charAt(pos) !== \"[\") {\n\t\tvar attribute = $tw.utils.parseAttribute(source,pos);\n\t\twhile(attribute) {\n\t\t\tnode.attributes[attribute.name] = attribute;\n\t\t\tpos = attribute.end;\n\t\t\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t\t\tif(source.charAt(pos) !== \"[\") {\n\t\t\t\t// Get the next attribute\n\t\t\t\tattribute = $tw.utils.parseAttribute(source,pos);\n\t\t\t} else {\n\t\t\t\tattribute = null;\n\t\t\t}\n\t\t}\n\t}\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the `[` after the attributes\n\ttoken = $tw.utils.parseTokenString(source,pos,\"[\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Get the source up to the terminating `]]`\n\ttoken = $tw.utils.parseTokenRegExp(source,pos,/(?:([^|\\]]*?)\\|)?([^\\]]+?)\\]\\]/g);\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\tif(token.match[1]) {\n\t\tnode.attributes.tooltip = {type: \"string\", value: token.match[1].trim()};\n\t}\n\tnode.attributes.source = {type: \"string\", value: (token.match[2] || \"\").trim()};\n\t// Update the end position\n\tnode.end = pos;\n\treturn node;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/image.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/list.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/list.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for lists. For example:\n\n```\n* This is an unordered list\n* It has two items\n\n# This is a numbered list\n## With a subitem\n# And a third item\n\n; This is a term that is being defined\n: This is the definition of that term\n```\n\nNote that lists can be nested arbitrarily:\n\n```\n#** One\n#* Two\n#** Three\n#**** Four\n#**# Five\n#**## Six\n## Seven\n### Eight\n## Nine\n```\n\nA CSS class can be applied to a list item as follows:\n\n```\n* List item one\n*.active List item two has the class `active`\n* List item three\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"list\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /([\\*#;:>]+)/mg;\n};\n\nvar listTypes = {\n\t\"*\": {listTag: \"ul\", itemTag: \"li\"},\n\t\"#\": {listTag: \"ol\", itemTag: \"li\"},\n\t\";\": {listTag: \"dl\", itemTag: \"dt\"},\n\t\":\": {listTag: \"dl\", itemTag: \"dd\"},\n\t\">\": {listTag: \"blockquote\", itemTag: \"p\"}\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Array of parse tree nodes for the previous row of the list\n\tvar listStack = [];\n\t// Cycle through the items in the list\n\twhile(true) {\n\t\t// Match the list marker\n\t\tvar reMatch = /([\\*#;:>]+)/mg;\n\t\treMatch.lastIndex = this.parser.pos;\n\t\tvar match = reMatch.exec(this.parser.source);\n\t\tif(!match || match.index !== this.parser.pos) {\n\t\t\tbreak;\n\t\t}\n\t\t// Check whether the list type of the top level matches\n\t\tvar listInfo = listTypes[match[0].charAt(0)];\n\t\tif(listStack.length > 0 && listStack[0].tag !== listInfo.listTag) {\n\t\t\tbreak;\n\t\t}\n\t\t// Move past the list marker\n\t\tthis.parser.pos = match.index + match[0].length;\n\t\t// Walk through the list markers for the current row\n\t\tfor(var t=0; t<match[0].length; t++) {\n\t\t\tlistInfo = listTypes[match[0].charAt(t)];\n\t\t\t// Remove any stacked up element if we can't re-use it because the list type doesn't match\n\t\t\tif(listStack.length > t && listStack[t].tag !== listInfo.listTag) {\n\t\t\t\tlistStack.splice(t,listStack.length - t);\n\t\t\t}\n\t\t\t// Construct the list element or reuse the previous one at this level\n\t\t\tif(listStack.length <= t) {\n\t\t\t\tvar listElement = {type: \"element\", tag: listInfo.listTag, children: [\n\t\t\t\t\t{type: \"element\", tag: listInfo.itemTag, children: []}\n\t\t\t\t]};\n\t\t\t\t// Link this list element into the last child item of the parent list item\n\t\t\t\tif(t) {\n\t\t\t\t\tvar prevListItem = listStack[t-1].children[listStack[t-1].children.length-1];\n\t\t\t\t\tprevListItem.children.push(listElement);\n\t\t\t\t}\n\t\t\t\t// Save this element in the stack\n\t\t\t\tlistStack[t] = listElement;\n\t\t\t} else if(t === (match[0].length - 1)) {\n\t\t\t\tlistStack[t].children.push({type: \"element\", tag: listInfo.itemTag, children: []});\n\t\t\t}\n\t\t}\n\t\tif(listStack.length > match[0].length) {\n\t\t\tlistStack.splice(match[0].length,listStack.length - match[0].length);\n\t\t}\n\t\t// Process the body of the list item into the last list item\n\t\tvar lastListChildren = listStack[listStack.length-1].children,\n\t\t\tlastListItem = lastListChildren[lastListChildren.length-1],\n\t\t\tclasses = this.parser.parseClasses();\n\t\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\t\tvar tree = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t\tlastListItem.children.push.apply(lastListItem.children,tree);\n\t\tif(classes.length > 0) {\n\t\t\t$tw.utils.addClassToParseTreeNode(lastListItem,classes.join(\" \"));\n\t\t}\n\t\t// Consume any whitespace following the list item\n\t\tthis.parser.skipWhitespace();\n\t}\n\t// Return the root element of the list\n\treturn [listStack[0]];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/list.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/macrocallblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/macrocallblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki rule for block macro calls\n\n```\n<<name value value2>>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"macrocallblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /<<([^>\\s]+)(?:\\s*)((?:[^>]|(?:>(?!>)))*?)>>(?:\\r?\\n|$)/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar macroName = this.match[1],\n\t\tparamString = this.match[2];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar params = [],\n\t\treParam = /\\s*(?:([A-Za-z0-9\\-_]+)\\s*:)?(?:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\"'\\s]+)))/mg,\n\t\tparamMatch = reParam.exec(paramString);\n\twhile(paramMatch) {\n\t\t// Process this parameter\n\t\tvar paramInfo = {\n\t\t\tvalue: paramMatch[2] || paramMatch[3] || paramMatch[4] || paramMatch[5] || paramMatch[6]\n\t\t};\n\t\tif(paramMatch[1]) {\n\t\t\tparamInfo.name = paramMatch[1];\n\t\t}\n\t\tparams.push(paramInfo);\n\t\t// Find the next match\n\t\tparamMatch = reParam.exec(paramString);\n\t}\n\treturn [{\n\t\ttype: \"macrocall\",\n\t\tname: macroName,\n\t\tparams: params,\n\t\tisBlock: true\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/macrocallblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/macrocallinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/macrocallinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki rule for macro calls\n\n```\n<<name value value2>>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"macrocallinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /<<([^\\s>]+)\\s*([\\s\\S]*?)>>/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get all the details of the match\n\tvar macroName = this.match[1],\n\t\tparamString = this.match[2];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\tvar params = [],\n\t\treParam = /\\s*(?:([A-Za-z0-9\\-_]+)\\s*:)?(?:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\"'\\s]+)))/mg,\n\t\tparamMatch = reParam.exec(paramString);\n\twhile(paramMatch) {\n\t\t// Process this parameter\n\t\tvar paramInfo = {\n\t\t\tvalue: paramMatch[2] || paramMatch[3] || paramMatch[4] || paramMatch[5]|| paramMatch[6]\n\t\t};\n\t\tif(paramMatch[1]) {\n\t\t\tparamInfo.name = paramMatch[1];\n\t\t}\n\t\tparams.push(paramInfo);\n\t\t// Find the next match\n\t\tparamMatch = reParam.exec(paramString);\n\t}\n\treturn [{\n\t\ttype: \"macrocall\",\n\t\tname: macroName,\n\t\tparams: params\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/macrocallinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/macrodef.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/macrodef.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki pragma rule for macro definitions\n\n```\n\\define name(param:defaultvalue,param2:defaultvalue)\ndefinition text, including $param$ markers\n\\end\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"macrodef\";\nexports.types = {pragma: true};\n\n/*\nInstantiate parse rule\n*/\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /^\\\\define\\s+([^(\\s]+)\\(\\s*([^)]*)\\)(\\s*\\r?\\n)?/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Move past the macro name and parameters\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse the parameters\n\tvar paramString = this.match[2],\n\t\tparams = [];\n\tif(paramString !== \"\") {\n\t\tvar reParam = /\\s*([A-Za-z0-9\\-_]+)(?:\\s*:\\s*(?:\"\"\"([\\s\\S]*?)\"\"\"|\"([^\"]*)\"|'([^']*)'|\\[\\[([^\\]]*)\\]\\]|([^\"'\\s]+)))?/mg,\n\t\t\tparamMatch = reParam.exec(paramString);\n\t\twhile(paramMatch) {\n\t\t\t// Save the parameter details\n\t\t\tvar paramInfo = {name: paramMatch[1]},\n\t\t\t\tdefaultValue = paramMatch[2] || paramMatch[3] || paramMatch[4] || paramMatch[5] || paramMatch[6];\n\t\t\tif(defaultValue) {\n\t\t\t\tparamInfo[\"default\"] = defaultValue;\n\t\t\t}\n\t\t\tparams.push(paramInfo);\n\t\t\t// Look for the next parameter\n\t\t\tparamMatch = reParam.exec(paramString);\n\t\t}\n\t}\n\t// Is this a multiline definition?\n\tvar reEnd;\n\tif(this.match[3]) {\n\t\t// If so, the end of the body is marked with \\end\n\t\treEnd = /(\\r?\\n\\\\end[^\\S\\n\\r]*(?:$|\\r?\\n))/mg;\n\t} else {\n\t\t// Otherwise, the end of the definition is marked by the end of the line\n\t\treEnd = /(\\r?\\n)/mg;\n\t\t// Move past any whitespace\n\t\tthis.parser.pos = $tw.utils.skipWhiteSpace(this.parser.source,this.parser.pos);\n\t}\n\t// Find the end of the definition\n\treEnd.lastIndex = this.parser.pos;\n\tvar text,\n\t\tendMatch = reEnd.exec(this.parser.source);\n\tif(endMatch) {\n\t\ttext = this.parser.source.substring(this.parser.pos,endMatch.index);\n\t\tthis.parser.pos = endMatch.index + endMatch[0].length;\n\t} else {\n\t\t// We didn't find the end of the definition, so we'll make it blank\n\t\ttext = \"\";\n\t}\n\t// Save the macro definition\n\treturn [{\n\t\ttype: \"set\",\n\t\tattributes: {\n\t\t\tname: {type: \"string\", value: this.match[1]},\n\t\t\tvalue: {type: \"string\", value: text}\n\t\t},\n\t\tchildren: [],\n\t\tparams: params\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/macrodef.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/prettyextlink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/prettyextlink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for external links. For example:\n\n```\n[ext[http://tiddlywiki.com/fractalveg.jpg]]\n[ext[Tooltip|http://tiddlywiki.com/fractalveg.jpg]]\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"prettyextlink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n};\n\nexports.findNextMatch = function(startPos) {\n\t// Find the next tag\n\tthis.nextLink = this.findNextLink(this.parser.source,startPos);\n\treturn this.nextLink ? this.nextLink.start : undefined;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.nextLink.end;\n\treturn [this.nextLink];\n};\n\n/*\nFind the next link from the current position\n*/\nexports.findNextLink = function(source,pos) {\n\t// A regexp for finding candidate links\n\tvar reLookahead = /(\\[ext\\[)/g;\n\t// Find the next candidate\n\treLookahead.lastIndex = pos;\n\tvar match = reLookahead.exec(source);\n\twhile(match) {\n\t\t// Try to parse the candidate as a link\n\t\tvar link = this.parseLink(source,match.index);\n\t\t// Return success\n\t\tif(link) {\n\t\t\treturn link;\n\t\t}\n\t\t// Look for the next match\n\t\treLookahead.lastIndex = match.index + 1;\n\t\tmatch = reLookahead.exec(source);\n\t}\n\t// Failed\n\treturn null;\n};\n\n/*\nLook for an link at the specified position. Returns null if not found, otherwise returns {type: \"element\", tag: \"a\", attributes: [], isSelfClosing:, start:, end:,}\n*/\nexports.parseLink = function(source,pos) {\n\tvar token,\n\t\ttextNode = {\n\t\t\ttype: \"text\"\n\t\t},\n\t\tnode = {\n\t\t\ttype: \"element\",\n\t\t\ttag: \"a\",\n\t\t\tstart: pos,\n\t\t\tattributes: {\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-tiddlylink-external\"},\n\t\t\t},\n\t\t\tchildren: [textNode]\n\t\t};\n\t// Skip whitespace\n\tpos = $tw.utils.skipWhiteSpace(source,pos);\n\t// Look for the `[ext[`\n\ttoken = $tw.utils.parseTokenString(source,pos,\"[ext[\");\n\tif(!token) {\n\t\treturn null;\n\t}\n\tpos = token.end;\n\t// Look ahead for the terminating `]]`\n\tvar closePos = source.indexOf(\"]]\",pos);\n\tif(closePos === -1) {\n\t\treturn null;\n\t}\n\t// Look for a `|` separating the tooltip\n\tvar splitPos = source.indexOf(\"|\",pos);\n\tif(splitPos === -1 || splitPos > closePos) {\n\t\tsplitPos = null;\n\t}\n\t// Pull out the tooltip and URL\n\tvar tooltip, URL;\n\tif(splitPos) {\n\t\tURL = source.substring(splitPos + 1,closePos).trim();\n\t\ttextNode.text = source.substring(pos,splitPos).trim();\n\t} else {\n\t\tURL = source.substring(pos,closePos).trim();\n\t\ttextNode.text = URL;\n\t}\n\tnode.attributes.href = {type: \"string\", value: URL};\n\tnode.attributes.target = {type: \"string\", value: \"_blank\"};\n\tnode.attributes.rel = {type: \"string\", value: \"noopener noreferrer\"};\n\t// Update the end position\n\tnode.end = closePos + 2;\n\treturn node;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/prettyextlink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/prettylink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/prettylink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for pretty links. For example:\n\n```\n[[Introduction]]\n\n[[Link description|TiddlerTitle]]\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"prettylink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\[\\[(.*?)(?:\\|(.*?))?\\]\\]/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Process the link\n\tvar text = this.match[1],\n\t\tlink = this.match[2] || text;\n\tif($tw.utils.isLinkExternal(link)) {\n\t\treturn [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"a\",\n\t\t\tattributes: {\n\t\t\t\thref: {type: \"string\", value: link},\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-tiddlylink-external\"},\n\t\t\t\ttarget: {type: \"string\", value: \"_blank\"},\n\t\t\t\trel: {type: \"string\", value: \"noopener noreferrer\"}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\", text: text\n\t\t\t}]\n\t\t}];\n\t} else {\n\t\treturn [{\n\t\t\ttype: \"link\",\n\t\t\tattributes: {\n\t\t\t\tto: {type: \"string\", value: link}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\", text: text\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/prettylink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/quoteblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/quoteblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for quote blocks. For example:\n\n```\n\t<<<.optionalClass(es) optional cited from\n\ta quote\n\t<<<\n\t\n\t<<<.optionalClass(es)\n\ta quote\n\t<<< optional cited from\n```\n\nQuotes can be quoted by putting more <s\n\n```\n\t<<<\n\tQuote Level 1\n\t\n\t<<<<\n\tQuoteLevel 2\n\t<<<<\n\t\n\t<<<\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"quoteblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /(<<<+)/mg;\n};\n\nexports.parse = function() {\n\tvar classes = [\"tc-quote\"];\n\t// Get all the details of the match\n\tvar reEndString = \"^\" + this.match[1] + \"(?!<)\";\n\t// Move past the <s\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t\n\t// Parse any classes, whitespace and then the optional cite itself\n\tclasses.push.apply(classes, this.parser.parseClasses());\n\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\tvar cite = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t// before handling the cite, parse the body of the quote\n\tvar tree= this.parser.parseBlocks(reEndString);\n\t// If we got a cite, put it before the text\n\tif(cite.length > 0) {\n\t\ttree.unshift({\n\t\t\ttype: \"element\",\n\t\t\ttag: \"cite\",\n\t\t\tchildren: cite\n\t\t});\n\t}\n\t// Parse any optional cite\n\tthis.parser.skipWhitespace({treatNewlinesAsNonWhitespace: true});\n\tcite = this.parser.parseInlineRun(/(\\r?\\n)/mg);\n\t// If we got a cite, push it\n\tif(cite.length > 0) {\n\t\ttree.push({\n\t\t\ttype: \"element\",\n\t\t\ttag: \"cite\",\n\t\t\tchildren: cite\n\t\t});\n\t}\n\t// Return the blockquote element\n\treturn [{\n\t\ttype: \"element\",\n\t\ttag: \"blockquote\",\n\t\tattributes: {\n\t\t\tclass: { type: \"string\", value: classes.join(\" \") },\n\t\t},\n\t\tchildren: tree\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/quoteblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/rules.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/rules.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki pragma rule for rules specifications\n\n```\n\\rules except ruleone ruletwo rulethree\n\\rules only ruleone ruletwo rulethree\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"rules\";\nexports.types = {pragma: true};\n\n/*\nInstantiate parse rule\n*/\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /^\\\\rules[^\\S\\n]/mg;\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Move past the pragma invocation\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse whitespace delimited tokens terminated by a line break\n\tvar reMatch = /[^\\S\\n]*(\\S+)|(\\r?\\n)/mg,\n\t\ttokens = [];\n\treMatch.lastIndex = this.parser.pos;\n\tvar match = reMatch.exec(this.parser.source);\n\twhile(match && match.index === this.parser.pos) {\n\t\tthis.parser.pos = reMatch.lastIndex;\n\t\t// Exit if we've got the line break\n\t\tif(match[2]) {\n\t\t\tbreak;\n\t\t}\n\t\t// Process the token\n\t\tif(match[1]) {\n\t\t\ttokens.push(match[1]);\n\t\t}\n\t\t// Match the next token\n\t\tmatch = reMatch.exec(this.parser.source);\n\t}\n\t// Process the tokens\n\tif(tokens.length > 0) {\n\t\tthis.parser.amendRules(tokens[0],tokens.slice(1));\n\t}\n\t// No parse tree nodes to return\n\treturn [];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/rules.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/styleblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/styleblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for assigning styles and classes to paragraphs and other blocks. For example:\n\n```\n@@.myClass\n@@background-color:red;\nThis paragraph will have the CSS class `myClass`.\n\n* The `<ul>` around this list will also have the class `myClass`\n* List item 2\n\n@@\n```\n\nNote that classes and styles can be mixed subject to the rule that styles must precede classes. For example\n\n```\n@@.myFirstClass.mySecondClass\n@@width:100px;.myThirdClass\nThis is a paragraph\n@@\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"styleblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /@@((?:[^\\.\\r\\n\\s:]+:[^\\r\\n;]+;)+)?(?:\\.([^\\r\\n\\s]+))?\\r?\\n/mg;\n};\n\nexports.parse = function() {\n\tvar reEndString = \"^@@(?:\\\\r?\\\\n)?\";\n\tvar classes = [], styles = [];\n\tdo {\n\t\t// Get the class and style\n\t\tif(this.match[1]) {\n\t\t\tstyles.push(this.match[1]);\n\t\t}\n\t\tif(this.match[2]) {\n\t\t\tclasses.push(this.match[2].split(\".\").join(\" \"));\n\t\t}\n\t\t// Move past the match\n\t\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t\t// Look for another line of classes and styles\n\t\tthis.match = this.matchRegExp.exec(this.parser.source);\n\t} while(this.match && this.match.index === this.parser.pos);\n\t// Parse the body\n\tvar tree = this.parser.parseBlocks(reEndString);\n\tfor(var t=0; t<tree.length; t++) {\n\t\tif(classes.length > 0) {\n\t\t\t$tw.utils.addClassToParseTreeNode(tree[t],classes.join(\" \"));\n\t\t}\n\t\tif(styles.length > 0) {\n\t\t\t$tw.utils.addAttributeToParseTreeNode(tree[t],\"style\",styles.join(\"\"));\n\t\t}\n\t}\n\treturn tree;\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/styleblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/styleinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/styleinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for assigning styles and classes to inline runs. For example:\n\n```\n@@.myClass This is some text with a class@@\n@@background-color:red;This is some text with a background colour@@\n@@width:100px;.myClass This is some text with a class and a width@@\n```\n\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"styleinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /@@((?:[^\\.\\r\\n\\s:]+:[^\\r\\n;]+;)+)?(\\.(?:[^\\r\\n\\s]+)\\s+)?/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /@@/g;\n\t// Get the styles and class\n\tvar stylesString = this.match[1],\n\t\tclassString = this.match[2] ? this.match[2].split(\".\").join(\" \") : undefined;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Parse the run up to the terminator\n\tvar tree = this.parser.parseInlineRun(reEnd,{eatTerminator: true});\n\t// Return the classed span\n\tvar node = {\n\t\ttype: \"element\",\n\t\ttag: \"span\",\n\t\tattributes: {\n\t\t\t\"class\": {type: \"string\", value: \"tc-inline-style\"}\n\t\t},\n\t\tchildren: tree\n\t};\n\tif(classString) {\n\t\t$tw.utils.addClassToParseTreeNode(node,classString);\n\t}\n\tif(stylesString) {\n\t\t$tw.utils.addAttributeToParseTreeNode(node,\"style\",stylesString);\n\t}\n\treturn [node];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/styleinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/syslink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/syslink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for system tiddler links.\nCan be suppressed preceding them with `~`.\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"syslink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /~?\\$:\\/[a-zA-Z0-9/.\\-_]+/mg;\n};\n\nexports.parse = function() {\n\tvar match = this.match[0];\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Create the link unless it is suppressed\n\tif(match.substr(0,1) === \"~\") {\n\t\treturn [{type: \"text\", text: match.substr(1)}];\n\t} else {\n\t\treturn [{\n\t\t\ttype: \"link\",\n\t\t\tattributes: {\n\t\t\t\tto: {type: \"string\", value: match}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: match\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();",
"title": "$:/core/modules/parsers/wikiparser/rules/syslink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/table.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/table.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text block rule for tables.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"table\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /^\\|(?:[^\\n]*)\\|(?:[fhck]?)\\r?(?:\\n|$)/mg;\n};\n\nvar processRow = function(prevColumns) {\n\tvar cellRegExp = /(?:\\|([^\\n\\|]*)\\|)|(\\|[fhck]?\\r?(?:\\n|$))/mg,\n\t\tcellTermRegExp = /((?:\\x20*)\\|)/mg,\n\t\ttree = [],\n\t\tcol = 0,\n\t\tcolSpanCount = 1,\n\t\tprevCell,\n\t\tvAlign;\n\t// Match a single cell\n\tcellRegExp.lastIndex = this.parser.pos;\n\tvar cellMatch = cellRegExp.exec(this.parser.source);\n\twhile(cellMatch && cellMatch.index === this.parser.pos) {\n\t\tif(cellMatch[1] === \"~\") {\n\t\t\t// Rowspan\n\t\t\tvar last = prevColumns[col];\n\t\t\tif(last) {\n\t\t\t\tlast.rowSpanCount++;\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(last.element,\"rowspan\",last.rowSpanCount);\n\t\t\t\tvAlign = $tw.utils.getAttributeValueFromParseTreeNode(last.element,\"valign\",\"center\");\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(last.element,\"valign\",vAlign);\n\t\t\t\tif(colSpanCount > 1) {\n\t\t\t\t\t$tw.utils.addAttributeToParseTreeNode(last.element,\"colspan\",colSpanCount);\n\t\t\t\t\tcolSpanCount = 1;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Move to just before the `|` terminating the cell\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t} else if(cellMatch[1] === \">\") {\n\t\t\t// Colspan\n\t\t\tcolSpanCount++;\n\t\t\t// Move to just before the `|` terminating the cell\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t} else if(cellMatch[1] === \"<\" && prevCell) {\n\t\t\tcolSpanCount = 1 + $tw.utils.getAttributeValueFromParseTreeNode(prevCell,\"colspan\",1);\n\t\t\t$tw.utils.addAttributeToParseTreeNode(prevCell,\"colspan\",colSpanCount);\n\t\t\tcolSpanCount = 1;\n\t\t\t// Move to just before the `|` terminating the cell\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t} else if(cellMatch[2]) {\n\t\t\t// End of row\n\t\t\tif(prevCell && colSpanCount > 1) {\n\t\t\t\tif(prevCell.attributes && prevCell.attributes && prevCell.attributes.colspan) {\n\t\t\t\t\t\tcolSpanCount += prevCell.attributes.colspan.value;\n\t\t\t\t} else {\n\t\t\t\t\tcolSpanCount -= 1;\n\t\t\t\t}\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(prevCell,\"colspan\",colSpanCount);\n\t\t\t}\n\t\t\tthis.parser.pos = cellRegExp.lastIndex - 1;\n\t\t\tbreak;\n\t\t} else {\n\t\t\t// For ordinary cells, step beyond the opening `|`\n\t\t\tthis.parser.pos++;\n\t\t\t// Look for a space at the start of the cell\n\t\t\tvar spaceLeft = false;\n\t\t\tvAlign = null;\n\t\t\tif(this.parser.source.substr(this.parser.pos).search(/^\\^([^\\^]|\\^\\^)/) === 0) {\n\t\t\t\tvAlign = \"top\";\n\t\t\t} else if(this.parser.source.substr(this.parser.pos).search(/^,([^,]|,,)/) === 0) {\n\t\t\t\tvAlign = \"bottom\";\n\t\t\t}\n\t\t\tif(vAlign) {\n\t\t\t\tthis.parser.pos++;\n\t\t\t}\n\t\t\tvar chr = this.parser.source.substr(this.parser.pos,1);\n\t\t\twhile(chr === \" \") {\n\t\t\t\tspaceLeft = true;\n\t\t\t\tthis.parser.pos++;\n\t\t\t\tchr = this.parser.source.substr(this.parser.pos,1);\n\t\t\t}\n\t\t\t// Check whether this is a heading cell\n\t\t\tvar cell;\n\t\t\tif(chr === \"!\") {\n\t\t\t\tthis.parser.pos++;\n\t\t\t\tcell = {type: \"element\", tag: \"th\", children: []};\n\t\t\t} else {\n\t\t\t\tcell = {type: \"element\", tag: \"td\", children: []};\n\t\t\t}\n\t\t\ttree.push(cell);\n\t\t\t// Record information about this cell\n\t\t\tprevCell = cell;\n\t\t\tprevColumns[col] = {rowSpanCount:1,element:cell};\n\t\t\t// Check for a colspan\n\t\t\tif(colSpanCount > 1) {\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"colspan\",colSpanCount);\n\t\t\t\tcolSpanCount = 1;\n\t\t\t}\n\t\t\t// Parse the cell\n\t\t\tcell.children = this.parser.parseInlineRun(cellTermRegExp,{eatTerminator: true});\n\t\t\t// Set the alignment for the cell\n\t\t\tif(vAlign) {\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"valign\",vAlign);\n\t\t\t}\n\t\t\tif(this.parser.source.substr(this.parser.pos - 2,1) === \" \") { // spaceRight\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"align\",spaceLeft ? \"center\" : \"left\");\n\t\t\t} else if(spaceLeft) {\n\t\t\t\t$tw.utils.addAttributeToParseTreeNode(cell,\"align\",\"right\");\n\t\t\t}\n\t\t\t// Move back to the closing `|`\n\t\t\tthis.parser.pos--;\n\t\t}\n\t\tcol++;\n\t\tcellRegExp.lastIndex = this.parser.pos;\n\t\tcellMatch = cellRegExp.exec(this.parser.source);\n\t}\n\treturn tree;\n};\n\nexports.parse = function() {\n\tvar rowContainerTypes = {\"c\":\"caption\", \"h\":\"thead\", \"\":\"tbody\", \"f\":\"tfoot\"},\n\t\ttable = {type: \"element\", tag: \"table\", children: []},\n\t\trowRegExp = /^\\|([^\\n]*)\\|([fhck]?)\\r?(?:\\n|$)/mg,\n\t\trowTermRegExp = /(\\|(?:[fhck]?)\\r?(?:\\n|$))/mg,\n\t\tprevColumns = [],\n\t\tcurrRowType,\n\t\trowContainer,\n\t\trowCount = 0;\n\t// Match the row\n\trowRegExp.lastIndex = this.parser.pos;\n\tvar rowMatch = rowRegExp.exec(this.parser.source);\n\twhile(rowMatch && rowMatch.index === this.parser.pos) {\n\t\tvar rowType = rowMatch[2];\n\t\t// Check if it is a class assignment\n\t\tif(rowType === \"k\") {\n\t\t\t$tw.utils.addClassToParseTreeNode(table,rowMatch[1]);\n\t\t\tthis.parser.pos = rowMatch.index + rowMatch[0].length;\n\t\t} else {\n\t\t\t// Otherwise, create a new row if this one is of a different type\n\t\t\tif(rowType !== currRowType) {\n\t\t\t\trowContainer = {type: \"element\", tag: rowContainerTypes[rowType], children: []};\n\t\t\t\ttable.children.push(rowContainer);\n\t\t\t\tcurrRowType = rowType;\n\t\t\t}\n\t\t\t// Is this a caption row?\n\t\t\tif(currRowType === \"c\") {\n\t\t\t\t// If so, move past the opening `|` of the row\n\t\t\t\tthis.parser.pos++;\n\t\t\t\t// Move the caption to the first row if it isn't already\n\t\t\t\tif(table.children.length !== 1) {\n\t\t\t\t\ttable.children.pop(); // Take rowContainer out of the children array\n\t\t\t\t\ttable.children.splice(0,0,rowContainer); // Insert it at the bottom\t\t\t\t\t\t\n\t\t\t\t}\n\t\t\t\t// Set the alignment - TODO: figure out why TW did this\n//\t\t\t\trowContainer.attributes.align = rowCount === 0 ? \"top\" : \"bottom\";\n\t\t\t\t// Parse the caption\n\t\t\t\trowContainer.children = this.parser.parseInlineRun(rowTermRegExp,{eatTerminator: true});\n\t\t\t} else {\n\t\t\t\t// Create the row\n\t\t\t\tvar theRow = {type: \"element\", tag: \"tr\", children: []};\n\t\t\t\t$tw.utils.addClassToParseTreeNode(theRow,rowCount%2 ? \"oddRow\" : \"evenRow\");\n\t\t\t\trowContainer.children.push(theRow);\n\t\t\t\t// Process the row\n\t\t\t\ttheRow.children = processRow.call(this,prevColumns);\n\t\t\t\tthis.parser.pos = rowMatch.index + rowMatch[0].length;\n\t\t\t\t// Increment the row count\n\t\t\t\trowCount++;\n\t\t\t}\n\t\t}\n\t\trowMatch = rowRegExp.exec(this.parser.source);\n\t}\n\treturn [table];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/table.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/transcludeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/transcludeblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for block-level transclusion. For example:\n\n```\n{{MyTiddler}}\n{{MyTiddler||TemplateTitle}}\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"transcludeblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{([^\\{\\}\\|]*)(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}(?:\\r?\\n|$)/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar template = $tw.utils.trim(this.match[2]),\n\t\ttextRef = $tw.utils.trim(this.match[1]);\n\t// Prepare the transclude widget\n\tvar transcludeNode = {\n\t\t\ttype: \"transclude\",\n\t\t\tattributes: {},\n\t\t\tisBlock: true\n\t\t};\n\t// Prepare the tiddler widget\n\tvar tr, targetTitle, targetField, targetIndex, tiddlerNode;\n\tif(textRef) {\n\t\ttr = $tw.utils.parseTextReference(textRef);\n\t\ttargetTitle = tr.title;\n\t\ttargetField = tr.field;\n\t\ttargetIndex = tr.index;\n\t\ttiddlerNode = {\n\t\t\ttype: \"tiddler\",\n\t\t\tattributes: {\n\t\t\t\ttiddler: {type: \"string\", value: targetTitle}\n\t\t\t},\n\t\t\tisBlock: true,\n\t\t\tchildren: [transcludeNode]\n\t\t};\n\t}\n\tif(template) {\n\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: template};\n\t\tif(textRef) {\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t} else {\n\t\tif(textRef) {\n\t\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: targetTitle};\n\t\t\tif(targetField) {\n\t\t\t\ttranscludeNode.attributes.field = {type: \"string\", value: targetField};\n\t\t\t}\n\t\t\tif(targetIndex) {\n\t\t\t\ttranscludeNode.attributes.index = {type: \"string\", value: targetIndex};\n\t\t\t}\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/transcludeblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/transcludeinline.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/transcludeinline.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for inline-level transclusion. For example:\n\n```\n{{MyTiddler}}\n{{MyTiddler||TemplateTitle}}\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"transcludeinline\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\{\\{([^\\{\\}\\|]*)(?:\\|\\|([^\\|\\{\\}]+))?\\}\\}/mg;\n};\n\nexports.parse = function() {\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Get the match details\n\tvar template = $tw.utils.trim(this.match[2]),\n\t\ttextRef = $tw.utils.trim(this.match[1]);\n\t// Prepare the transclude widget\n\tvar transcludeNode = {\n\t\t\ttype: \"transclude\",\n\t\t\tattributes: {}\n\t\t};\n\t// Prepare the tiddler widget\n\tvar tr, targetTitle, targetField, targetIndex, tiddlerNode;\n\tif(textRef) {\n\t\ttr = $tw.utils.parseTextReference(textRef);\n\t\ttargetTitle = tr.title;\n\t\ttargetField = tr.field;\n\t\ttargetIndex = tr.index;\n\t\ttiddlerNode = {\n\t\t\ttype: \"tiddler\",\n\t\t\tattributes: {\n\t\t\t\ttiddler: {type: \"string\", value: targetTitle}\n\t\t\t},\n\t\t\tchildren: [transcludeNode]\n\t\t};\n\t}\n\tif(template) {\n\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: template};\n\t\tif(textRef) {\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t} else {\n\t\tif(textRef) {\n\t\t\ttranscludeNode.attributes.tiddler = {type: \"string\", value: targetTitle};\n\t\t\tif(targetField) {\n\t\t\t\ttranscludeNode.attributes.field = {type: \"string\", value: targetField};\n\t\t\t}\n\t\t\tif(targetIndex) {\n\t\t\t\ttranscludeNode.attributes.index = {type: \"string\", value: targetIndex};\n\t\t\t}\n\t\t\treturn [tiddlerNode];\n\t\t} else {\n\t\t\treturn [transcludeNode];\n\t\t}\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/transcludeinline.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/typedblock.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/typedblock.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text rule for typed blocks. For example:\n\n```\n$$$.js\nThis will be rendered as JavaScript\n$$$\n\n$$$.svg\n<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"150\" height=\"100\">\n <circle cx=\"100\" cy=\"50\" r=\"40\" stroke=\"black\" stroke-width=\"2\" fill=\"red\" />\n</svg>\n$$$\n\n$$$text/vnd.tiddlywiki>text/html\nThis will be rendered as an //HTML representation// of WikiText\n$$$\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.name = \"typedblock\";\nexports.types = {block: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = /\\$\\$\\$([^ >\\r\\n]*)(?: *> *([^ \\r\\n]+))?\\r?\\n/mg;\n};\n\nexports.parse = function() {\n\tvar reEnd = /\\r?\\n\\$\\$\\$\\r?(?:\\n|$)/mg;\n\t// Save the type\n\tvar parseType = this.match[1],\n\t\trenderType = this.match[2];\n\t// Move past the match\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// Look for the end of the block\n\treEnd.lastIndex = this.parser.pos;\n\tvar match = reEnd.exec(this.parser.source),\n\t\ttext;\n\t// Process the block\n\tif(match) {\n\t\ttext = this.parser.source.substring(this.parser.pos,match.index);\n\t\tthis.parser.pos = match.index + match[0].length;\n\t} else {\n\t\ttext = this.parser.source.substr(this.parser.pos);\n\t\tthis.parser.pos = this.parser.sourceLength;\n\t}\n\t// Parse the block according to the specified type\n\tvar parser = this.parser.wiki.parseText(parseType,text,{defaultType: \"text/plain\"});\n\t// If there's no render type, just return the parse tree\n\tif(!renderType) {\n\t\treturn parser.tree;\n\t} else {\n\t\t// Otherwise, render to the rendertype and return in a <PRE> tag\n\t\tvar widgetNode = this.parser.wiki.makeWidget(parser),\n\t\t\tcontainer = $tw.fakeDocument.createElement(\"div\");\n\t\twidgetNode.render(container,null);\n\t\ttext = renderType === \"text/html\" ? container.innerHTML : container.textContent;\n\t\treturn [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"pre\",\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\",\n\t\t\t\ttext: text\n\t\t\t}]\n\t\t}];\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/typedblock.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/rules/wikilink.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/wikilink.js\ntype: application/javascript\nmodule-type: wikirule\n\nWiki text inline rule for wiki links. For example:\n\n```\nAWikiLink\nAnotherLink\n~SuppressedLink\n```\n\nPrecede a camel case word with `~` to prevent it from being recognised as a link.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.name = \"wikilink\";\nexports.types = {inline: true};\n\nexports.init = function(parser) {\n\tthis.parser = parser;\n\t// Regexp to match\n\tthis.matchRegExp = new RegExp($tw.config.textPrimitives.unWikiLink + \"?\" + $tw.config.textPrimitives.wikiLink,\"mg\");\n};\n\n/*\nParse the most recent match\n*/\nexports.parse = function() {\n\t// Get the details of the match\n\tvar linkText = this.match[0];\n\t// Move past the macro call\n\tthis.parser.pos = this.matchRegExp.lastIndex;\n\t// If the link starts with the unwikilink character then just output it as plain text\n\tif(linkText.substr(0,1) === $tw.config.textPrimitives.unWikiLink) {\n\t\treturn [{type: \"text\", text: linkText.substr(1)}];\n\t}\n\t// If the link has been preceded with a blocked letter then don't treat it as a link\n\tif(this.match.index > 0) {\n\t\tvar preRegExp = new RegExp($tw.config.textPrimitives.blockPrefixLetters,\"mg\");\n\t\tpreRegExp.lastIndex = this.match.index-1;\n\t\tvar preMatch = preRegExp.exec(this.parser.source);\n\t\tif(preMatch && preMatch.index === this.match.index-1) {\n\t\t\treturn [{type: \"text\", text: linkText}];\n\t\t}\n\t}\n\treturn [{\n\t\ttype: \"link\",\n\t\tattributes: {\n\t\t\tto: {type: \"string\", value: linkText}\n\t\t},\n\t\tchildren: [{\n\t\t\ttype: \"text\",\n\t\t\ttext: linkText\n\t\t}]\n\t}];\n};\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/wikilink.js",
"type": "application/javascript",
"module-type": "wikirule"
},
"$:/core/modules/parsers/wikiparser/wikiparser.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/wikiparser.js\ntype: application/javascript\nmodule-type: parser\n\nThe wiki text parser processes blocks of source text into a parse tree.\n\nThe parse tree is made up of nested arrays of these JavaScript objects:\n\n\t{type: \"element\", tag: <string>, attributes: {}, children: []} - an HTML element\n\t{type: \"text\", text: <string>} - a text node\n\t{type: \"entity\", value: <string>} - an entity\n\t{type: \"raw\", html: <string>} - raw HTML\n\nAttributes are stored as hashmaps of the following objects:\n\n\t{type: \"string\", value: <string>} - literal string\n\t{type: \"indirect\", textReference: <textReference>} - indirect through a text reference\n\t{type: \"macro\", macro: <TBD>} - indirect through a macro invocation\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar WikiParser = function(type,text,options) {\n\tthis.wiki = options.wiki;\n\tvar self = this;\n\t// Check for an externally linked tiddler\n\tif($tw.browser && (text || \"\") === \"\" && options._canonical_uri) {\n\t\tthis.loadRemoteTiddler(options._canonical_uri);\n\t\ttext = $tw.language.getRawString(\"LazyLoadingWarning\");\n\t}\n\t// Initialise the classes if we don't have them already\n\tif(!this.pragmaRuleClasses) {\n\t\tWikiParser.prototype.pragmaRuleClasses = $tw.modules.createClassesFromModules(\"wikirule\",\"pragma\",$tw.WikiRuleBase);\n\t\tthis.setupRules(WikiParser.prototype.pragmaRuleClasses,\"$:/config/WikiParserRules/Pragmas/\");\n\t}\n\tif(!this.blockRuleClasses) {\n\t\tWikiParser.prototype.blockRuleClasses = $tw.modules.createClassesFromModules(\"wikirule\",\"block\",$tw.WikiRuleBase);\n\t\tthis.setupRules(WikiParser.prototype.blockRuleClasses,\"$:/config/WikiParserRules/Block/\");\n\t}\n\tif(!this.inlineRuleClasses) {\n\t\tWikiParser.prototype.inlineRuleClasses = $tw.modules.createClassesFromModules(\"wikirule\",\"inline\",$tw.WikiRuleBase);\n\t\tthis.setupRules(WikiParser.prototype.inlineRuleClasses,\"$:/config/WikiParserRules/Inline/\");\n\t}\n\t// Save the parse text\n\tthis.type = type || \"text/vnd.tiddlywiki\";\n\tthis.source = text || \"\";\n\tthis.sourceLength = this.source.length;\n\t// Set current parse position\n\tthis.pos = 0;\n\t// Instantiate the pragma parse rules\n\tthis.pragmaRules = this.instantiateRules(this.pragmaRuleClasses,\"pragma\",0);\n\t// Instantiate the parser block and inline rules\n\tthis.blockRules = this.instantiateRules(this.blockRuleClasses,\"block\",0);\n\tthis.inlineRules = this.instantiateRules(this.inlineRuleClasses,\"inline\",0);\n\t// Parse any pragmas\n\tthis.tree = [];\n\tvar topBranch = this.parsePragmas();\n\t// Parse the text into inline runs or blocks\n\tif(options.parseAsInline) {\n\t\ttopBranch.push.apply(topBranch,this.parseInlineRun());\n\t} else {\n\t\ttopBranch.push.apply(topBranch,this.parseBlocks());\n\t}\n\t// Return the parse tree\n};\n\n/*\n*/\nWikiParser.prototype.loadRemoteTiddler = function(url) {\n\tvar self = this;\n\t$tw.utils.httpRequest({\n\t\turl: url,\n\t\ttype: \"GET\",\n\t\tcallback: function(err,data) {\n\t\t\tif(!err) {\n\t\t\t\tvar tiddlers = self.wiki.deserializeTiddlers(\".tid\",data,self.wiki.getCreationFields());\n\t\t\t\t$tw.utils.each(tiddlers,function(tiddler) {\n\t\t\t\t\ttiddler[\"_canonical_uri\"] = url;\n\t\t\t\t});\n\t\t\t\tif(tiddlers) {\n\t\t\t\t\tself.wiki.addTiddlers(tiddlers);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\n*/\nWikiParser.prototype.setupRules = function(proto,configPrefix) {\n\tvar self = this;\n\tif(!$tw.safemode) {\n\t\t$tw.utils.each(proto,function(object,name) {\n\t\t\tif(self.wiki.getTiddlerText(configPrefix + name,\"enable\") !== \"enable\") {\n\t\t\t\tdelete proto[name];\n\t\t\t}\n\t\t});\n\t}\n};\n\n/*\nInstantiate an array of parse rules\n*/\nWikiParser.prototype.instantiateRules = function(classes,type,startPos) {\n\tvar rulesInfo = [],\n\t\tself = this;\n\t$tw.utils.each(classes,function(RuleClass) {\n\t\t// Instantiate the rule\n\t\tvar rule = new RuleClass(self);\n\t\trule.is = {};\n\t\trule.is[type] = true;\n\t\trule.init(self);\n\t\tvar matchIndex = rule.findNextMatch(startPos);\n\t\tif(matchIndex !== undefined) {\n\t\t\trulesInfo.push({\n\t\t\t\trule: rule,\n\t\t\t\tmatchIndex: matchIndex\n\t\t\t});\n\t\t}\n\t});\n\treturn rulesInfo;\n};\n\n/*\nSkip any whitespace at the current position. Options are:\n\ttreatNewlinesAsNonWhitespace: true if newlines are NOT to be treated as whitespace\n*/\nWikiParser.prototype.skipWhitespace = function(options) {\n\toptions = options || {};\n\tvar whitespaceRegExp = options.treatNewlinesAsNonWhitespace ? /([^\\S\\n]+)/mg : /(\\s+)/mg;\n\twhitespaceRegExp.lastIndex = this.pos;\n\tvar whitespaceMatch = whitespaceRegExp.exec(this.source);\n\tif(whitespaceMatch && whitespaceMatch.index === this.pos) {\n\t\tthis.pos = whitespaceRegExp.lastIndex;\n\t}\n};\n\n/*\nGet the next match out of an array of parse rule instances\n*/\nWikiParser.prototype.findNextMatch = function(rules,startPos) {\n\t// Find the best matching rule by finding the closest match position\n\tvar matchingRule,\n\t\tmatchingRulePos = this.sourceLength;\n\t// Step through each rule\n\tfor(var t=0; t<rules.length; t++) {\n\t\tvar ruleInfo = rules[t];\n\t\t// Ask the rule to get the next match if we've moved past the current one\n\t\tif(ruleInfo.matchIndex !== undefined && ruleInfo.matchIndex < startPos) {\n\t\t\truleInfo.matchIndex = ruleInfo.rule.findNextMatch(startPos);\n\t\t}\n\t\t// Adopt this match if it's closer than the current best match\n\t\tif(ruleInfo.matchIndex !== undefined && ruleInfo.matchIndex <= matchingRulePos) {\n\t\t\tmatchingRule = ruleInfo;\n\t\t\tmatchingRulePos = ruleInfo.matchIndex;\n\t\t}\n\t}\n\treturn matchingRule;\n};\n\n/*\nParse any pragmas at the beginning of a block of parse text\n*/\nWikiParser.prototype.parsePragmas = function() {\n\tvar currentTreeBranch = this.tree;\n\twhile(true) {\n\t\t// Skip whitespace\n\t\tthis.skipWhitespace();\n\t\t// Check for the end of the text\n\t\tif(this.pos >= this.sourceLength) {\n\t\t\tbreak;\n\t\t}\n\t\t// Check if we've arrived at a pragma rule match\n\t\tvar nextMatch = this.findNextMatch(this.pragmaRules,this.pos);\n\t\t// If not, just exit\n\t\tif(!nextMatch || nextMatch.matchIndex !== this.pos) {\n\t\t\tbreak;\n\t\t}\n\t\t// Process the pragma rule\n\t\tvar subTree = nextMatch.rule.parse();\n\t\tif(subTree.length > 0) {\n\t\t\t// Quick hack; we only cope with a single parse tree node being returned, which is true at the moment\n\t\t\tcurrentTreeBranch.push.apply(currentTreeBranch,subTree);\n\t\t\tsubTree[0].children = [];\n\t\t\tcurrentTreeBranch = subTree[0].children;\n\t\t}\n\t}\n\treturn currentTreeBranch;\n};\n\n/*\nParse a block from the current position\n\tterminatorRegExpString: optional regular expression string that identifies the end of plain paragraphs. Must not include capturing parenthesis\n*/\nWikiParser.prototype.parseBlock = function(terminatorRegExpString) {\n\tvar terminatorRegExp = terminatorRegExpString ? new RegExp(\"(\" + terminatorRegExpString + \"|\\\\r?\\\\n\\\\r?\\\\n)\",\"mg\") : /(\\r?\\n\\r?\\n)/mg;\n\tthis.skipWhitespace();\n\tif(this.pos >= this.sourceLength) {\n\t\treturn [];\n\t}\n\t// Look for a block rule that applies at the current position\n\tvar nextMatch = this.findNextMatch(this.blockRules,this.pos);\n\tif(nextMatch && nextMatch.matchIndex === this.pos) {\n\t\treturn nextMatch.rule.parse();\n\t}\n\t// Treat it as a paragraph if we didn't find a block rule\n\treturn [{type: \"element\", tag: \"p\", children: this.parseInlineRun(terminatorRegExp)}];\n};\n\n/*\nParse a series of blocks of text until a terminating regexp is encountered or the end of the text\n\tterminatorRegExpString: terminating regular expression\n*/\nWikiParser.prototype.parseBlocks = function(terminatorRegExpString) {\n\tif(terminatorRegExpString) {\n\t\treturn this.parseBlocksTerminated(terminatorRegExpString);\n\t} else {\n\t\treturn this.parseBlocksUnterminated();\n\t}\n};\n\n/*\nParse a block from the current position to the end of the text\n*/\nWikiParser.prototype.parseBlocksUnterminated = function() {\n\tvar tree = [];\n\twhile(this.pos < this.sourceLength) {\n\t\ttree.push.apply(tree,this.parseBlock());\n\t}\n\treturn tree;\n};\n\n/*\nParse blocks of text until a terminating regexp is encountered\n*/\nWikiParser.prototype.parseBlocksTerminated = function(terminatorRegExpString) {\n\tvar terminatorRegExp = new RegExp(\"(\" + terminatorRegExpString + \")\",\"mg\"),\n\t\ttree = [];\n\t// Skip any whitespace\n\tthis.skipWhitespace();\n\t// Check if we've got the end marker\n\tterminatorRegExp.lastIndex = this.pos;\n\tvar match = terminatorRegExp.exec(this.source);\n\t// Parse the text into blocks\n\twhile(this.pos < this.sourceLength && !(match && match.index === this.pos)) {\n\t\tvar blocks = this.parseBlock(terminatorRegExpString);\n\t\ttree.push.apply(tree,blocks);\n\t\t// Skip any whitespace\n\t\tthis.skipWhitespace();\n\t\t// Check if we've got the end marker\n\t\tterminatorRegExp.lastIndex = this.pos;\n\t\tmatch = terminatorRegExp.exec(this.source);\n\t}\n\tif(match && match.index === this.pos) {\n\t\tthis.pos = match.index + match[0].length;\n\t}\n\treturn tree;\n};\n\n/*\nParse a run of text at the current position\n\tterminatorRegExp: a regexp at which to stop the run\n\toptions: see below\nOptions available:\n\teatTerminator: move the parse position past any encountered terminator (default false)\n*/\nWikiParser.prototype.parseInlineRun = function(terminatorRegExp,options) {\n\tif(terminatorRegExp) {\n\t\treturn this.parseInlineRunTerminated(terminatorRegExp,options);\n\t} else {\n\t\treturn this.parseInlineRunUnterminated(options);\n\t}\n};\n\nWikiParser.prototype.parseInlineRunUnterminated = function(options) {\n\tvar tree = [];\n\t// Find the next occurrence of an inline rule\n\tvar nextMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t// Loop around the matches until we've reached the end of the text\n\twhile(this.pos < this.sourceLength && nextMatch) {\n\t\t// Process the text preceding the run rule\n\t\tif(nextMatch.matchIndex > this.pos) {\n\t\t\ttree.push({type: \"text\", text: this.source.substring(this.pos,nextMatch.matchIndex)});\n\t\t\tthis.pos = nextMatch.matchIndex;\n\t\t}\n\t\t// Process the run rule\n\t\ttree.push.apply(tree,nextMatch.rule.parse());\n\t\t// Look for the next run rule\n\t\tnextMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t}\n\t// Process the remaining text\n\tif(this.pos < this.sourceLength) {\n\t\ttree.push({type: \"text\", text: this.source.substr(this.pos)});\n\t}\n\tthis.pos = this.sourceLength;\n\treturn tree;\n};\n\nWikiParser.prototype.parseInlineRunTerminated = function(terminatorRegExp,options) {\n\toptions = options || {};\n\tvar tree = [];\n\t// Find the next occurrence of the terminator\n\tterminatorRegExp.lastIndex = this.pos;\n\tvar terminatorMatch = terminatorRegExp.exec(this.source);\n\t// Find the next occurrence of a inlinerule\n\tvar inlineRuleMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t// Loop around until we've reached the end of the text\n\twhile(this.pos < this.sourceLength && (terminatorMatch || inlineRuleMatch)) {\n\t\t// Return if we've found the terminator, and it precedes any inline rule match\n\t\tif(terminatorMatch) {\n\t\t\tif(!inlineRuleMatch || inlineRuleMatch.matchIndex >= terminatorMatch.index) {\n\t\t\t\tif(terminatorMatch.index > this.pos) {\n\t\t\t\t\ttree.push({type: \"text\", text: this.source.substring(this.pos,terminatorMatch.index)});\n\t\t\t\t}\n\t\t\t\tthis.pos = terminatorMatch.index;\n\t\t\t\tif(options.eatTerminator) {\n\t\t\t\t\tthis.pos += terminatorMatch[0].length;\n\t\t\t\t}\n\t\t\t\treturn tree;\n\t\t\t}\n\t\t}\n\t\t// Process any inline rule, along with the text preceding it\n\t\tif(inlineRuleMatch) {\n\t\t\t// Preceding text\n\t\t\tif(inlineRuleMatch.matchIndex > this.pos) {\n\t\t\t\ttree.push({type: \"text\", text: this.source.substring(this.pos,inlineRuleMatch.matchIndex)});\n\t\t\t\tthis.pos = inlineRuleMatch.matchIndex;\n\t\t\t}\n\t\t\t// Process the inline rule\n\t\t\ttree.push.apply(tree,inlineRuleMatch.rule.parse());\n\t\t\t// Look for the next inline rule\n\t\t\tinlineRuleMatch = this.findNextMatch(this.inlineRules,this.pos);\n\t\t\t// Look for the next terminator match\n\t\t\tterminatorRegExp.lastIndex = this.pos;\n\t\t\tterminatorMatch = terminatorRegExp.exec(this.source);\n\t\t}\n\t}\n\t// Process the remaining text\n\tif(this.pos < this.sourceLength) {\n\t\ttree.push({type: \"text\", text: this.source.substr(this.pos)});\n\t}\n\tthis.pos = this.sourceLength;\n\treturn tree;\n};\n\n/*\nParse zero or more class specifiers `.classname`\n*/\nWikiParser.prototype.parseClasses = function() {\n\tvar classRegExp = /\\.([^\\s\\.]+)/mg,\n\t\tclassNames = [];\n\tclassRegExp.lastIndex = this.pos;\n\tvar match = classRegExp.exec(this.source);\n\twhile(match && match.index === this.pos) {\n\t\tthis.pos = match.index + match[0].length;\n\t\tclassNames.push(match[1]);\n\t\tmatch = classRegExp.exec(this.source);\n\t}\n\treturn classNames;\n};\n\n/*\nAmend the rules used by this instance of the parser\n\ttype: `only` keeps just the named rules, `except` keeps all but the named rules\n\tnames: array of rule names\n*/\nWikiParser.prototype.amendRules = function(type,names) {\n\tnames = names || [];\n\t// Define the filter function\n\tvar keepFilter;\n\tif(type === \"only\") {\n\t\tkeepFilter = function(name) {\n\t\t\treturn names.indexOf(name) !== -1;\n\t\t};\n\t} else if(type === \"except\") {\n\t\tkeepFilter = function(name) {\n\t\t\treturn names.indexOf(name) === -1;\n\t\t};\n\t} else {\n\t\treturn;\n\t}\n\t// Define a function to process each of our rule arrays\n\tvar processRuleArray = function(ruleArray) {\n\t\tfor(var t=ruleArray.length-1; t>=0; t--) {\n\t\t\tif(!keepFilter(ruleArray[t].rule.name)) {\n\t\t\t\truleArray.splice(t,1);\n\t\t\t}\n\t\t}\n\t};\n\t// Process each rule array\n\tprocessRuleArray(this.pragmaRules);\n\tprocessRuleArray(this.blockRules);\n\tprocessRuleArray(this.inlineRules);\n};\n\nexports[\"text/vnd.tiddlywiki\"] = WikiParser;\n\n})();\n\n",
"title": "$:/core/modules/parsers/wikiparser/wikiparser.js",
"type": "application/javascript",
"module-type": "parser"
},
"$:/core/modules/parsers/wikiparser/rules/wikirulebase.js": {
"text": "/*\\\ntitle: $:/core/modules/parsers/wikiparser/rules/wikirulebase.js\ntype: application/javascript\nmodule-type: global\n\nBase class for wiki parser rules\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nThis constructor is always overridden with a blank constructor, and so shouldn't be used\n*/\nvar WikiRuleBase = function() {\n};\n\n/*\nTo be overridden by individual rules\n*/\nWikiRuleBase.prototype.init = function(parser) {\n\tthis.parser = parser;\n};\n\n/*\nDefault implementation of findNextMatch uses RegExp matching\n*/\nWikiRuleBase.prototype.findNextMatch = function(startPos) {\n\tthis.matchRegExp.lastIndex = startPos;\n\tthis.match = this.matchRegExp.exec(this.parser.source);\n\treturn this.match ? this.match.index : undefined;\n};\n\nexports.WikiRuleBase = WikiRuleBase;\n\n})();\n",
"title": "$:/core/modules/parsers/wikiparser/rules/wikirulebase.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/pluginswitcher.js": {
"text": "/*\\\ntitle: $:/core/modules/pluginswitcher.js\ntype: application/javascript\nmodule-type: global\n\nManages switching plugins for themes and languages.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\noptions:\nwiki: wiki store to be used\npluginType: type of plugin to be switched\ncontrollerTitle: title of tiddler used to control switching of this resource\ndefaultPlugins: array of default plugins to be used if nominated plugin isn't found\n*/\nfunction PluginSwitcher(options) {\n\tthis.wiki = options.wiki;\n\tthis.pluginType = options.pluginType;\n\tthis.controllerTitle = options.controllerTitle;\n\tthis.defaultPlugins = options.defaultPlugins || [];\n\t// Switch to the current plugin\n\tthis.switchPlugins();\n\t// Listen for changes to the selected plugin\n\tvar self = this;\n\tthis.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.utils.hop(changes,self.controllerTitle)) {\n\t\t\tself.switchPlugins();\n\t\t}\n\t});\n}\n\nPluginSwitcher.prototype.switchPlugins = function() {\n\t// Get the name of the current theme\n\tvar selectedPluginTitle = this.wiki.getTiddlerText(this.controllerTitle);\n\t// If it doesn't exist, then fallback to one of the default themes\n\tvar index = 0;\n\twhile(!this.wiki.getTiddler(selectedPluginTitle) && index < this.defaultPlugins.length) {\n\t\tselectedPluginTitle = this.defaultPlugins[index++];\n\t}\n\t// Accumulate the titles of the plugins that we need to load\n\tvar plugins = [],\n\t\tself = this,\n\t\taccumulatePlugin = function(title) {\n\t\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\t\tif(tiddler && tiddler.isPlugin() && plugins.indexOf(title) === -1) {\n\t\t\t\tplugins.push(title);\n\t\t\t\tvar pluginInfo = JSON.parse(self.wiki.getTiddlerText(title)),\n\t\t\t\t\tdependents = $tw.utils.parseStringArray(tiddler.fields.dependents || \"\");\n\t\t\t\t$tw.utils.each(dependents,function(title) {\n\t\t\t\t\taccumulatePlugin(title);\n\t\t\t\t});\n\t\t\t}\n\t\t};\n\taccumulatePlugin(selectedPluginTitle);\n\t// Unregister any existing theme tiddlers\n\tvar unregisteredTiddlers = $tw.wiki.unregisterPluginTiddlers(this.pluginType);\n\t// Register any new theme tiddlers\n\tvar registeredTiddlers = $tw.wiki.registerPluginTiddlers(this.pluginType,plugins);\n\t// Unpack the current theme tiddlers\n\t$tw.wiki.unpackPluginTiddlers();\n};\n\nexports.PluginSwitcher = PluginSwitcher;\n\n})();\n",
"title": "$:/core/modules/pluginswitcher.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/saver-handler.js": {
"text": "/*\\\ntitle: $:/core/modules/saver-handler.js\ntype: application/javascript\nmodule-type: global\n\nThe saver handler tracks changes to the store and handles saving the entire wiki via saver modules.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInstantiate the saver handler with the following options:\nwiki: wiki to be synced\ndirtyTracking: true if dirty tracking should be performed\n*/\nfunction SaverHandler(options) {\n\tvar self = this;\n\tthis.wiki = options.wiki;\n\tthis.dirtyTracking = options.dirtyTracking;\n\tthis.pendingAutoSave = false;\n\t// Make a logger\n\tthis.logger = new $tw.utils.Logger(\"saver-handler\");\n\t// Initialise our savers\n\tif($tw.browser) {\n\t\tthis.initSavers();\n\t}\n\t// Only do dirty tracking if required\n\tif($tw.browser && this.dirtyTracking) {\n\t\t// Compile the dirty tiddler filter\n\t\tthis.filterFn = this.wiki.compileFilter(this.wiki.getTiddlerText(this.titleSyncFilter));\n\t\t// Count of changes that have not yet been saved\n\t\tthis.numChanges = 0;\n\t\t// Listen out for changes to tiddlers\n\t\tthis.wiki.addEventListener(\"change\",function(changes) {\n\t\t\t// Filter the changes so that we only count changes to tiddlers that we care about\n\t\t\tvar filteredChanges = self.filterFn.call(self.wiki,function(callback) {\n\t\t\t\t$tw.utils.each(changes,function(change,title) {\n\t\t\t\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\t\t\t\tcallback(tiddler,title);\n\t\t\t\t});\n\t\t\t});\n\t\t\t// Adjust the number of changes\n\t\t\tself.numChanges += filteredChanges.length;\n\t\t\tself.updateDirtyStatus();\n\t\t\t// Do any autosave if one is pending and there's no more change events\n\t\t\tif(self.pendingAutoSave && self.wiki.getSizeOfTiddlerEventQueue() === 0) {\n\t\t\t\t// Check if we're dirty\n\t\t\t\tif(self.numChanges > 0) {\n\t\t\t\t\tself.saveWiki({\n\t\t\t\t\t\tmethod: \"autosave\",\n\t\t\t\t\t\tdownloadType: \"text/plain\"\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t\tself.pendingAutoSave = false;\n\t\t\t}\n\t\t});\n\t\t// Listen for the autosave event\n\t\t$tw.rootWidget.addEventListener(\"tm-auto-save-wiki\",function(event) {\n\t\t\t// Do the autosave unless there are outstanding tiddler change events\n\t\t\tif(self.wiki.getSizeOfTiddlerEventQueue() === 0) {\n\t\t\t\t// Check if we're dirty\n\t\t\t\tif(self.numChanges > 0) {\n\t\t\t\t\tself.saveWiki({\n\t\t\t\t\t\tmethod: \"autosave\",\n\t\t\t\t\t\tdownloadType: \"text/plain\"\n\t\t\t\t\t});\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// Otherwise put ourselves in the \"pending autosave\" state and wait for the change event before we do the autosave\n\t\t\t\tself.pendingAutoSave = true;\n\t\t\t}\n\t\t});\n\t\t// Set up our beforeunload handler\n\t\t$tw.addUnloadTask(function(event) {\n\t\t\tvar confirmationMessage;\n\t\t\tif(self.isDirty()) {\n\t\t\t\tconfirmationMessage = $tw.language.getString(\"UnsavedChangesWarning\");\n\t\t\t\tevent.returnValue = confirmationMessage; // Gecko\n\t\t\t}\n\t\t\treturn confirmationMessage;\n\t\t});\n\t}\n\t// Install the save action handlers\n\tif($tw.browser) {\n\t\t$tw.rootWidget.addEventListener(\"tm-save-wiki\",function(event) {\n\t\t\tself.saveWiki({\n\t\t\t\ttemplate: event.param,\n\t\t\t\tdownloadType: \"text/plain\",\n\t\t\t\tvariables: event.paramObject\n\t\t\t});\n\t\t});\n\t\t$tw.rootWidget.addEventListener(\"tm-download-file\",function(event) {\n\t\t\tself.saveWiki({\n\t\t\t\tmethod: \"download\",\n\t\t\t\ttemplate: event.param,\n\t\t\t\tdownloadType: \"text/plain\",\n\t\t\t\tvariables: event.paramObject\n\t\t\t});\n\t\t});\n\t}\n}\n\nSaverHandler.prototype.titleSyncFilter = \"$:/config/SaverFilter\";\nSaverHandler.prototype.titleAutoSave = \"$:/config/AutoSave\";\nSaverHandler.prototype.titleSavedNotification = \"$:/language/Notifications/Save/Done\";\n\n/*\nSelect the appropriate saver modules and set them up\n*/\nSaverHandler.prototype.initSavers = function(moduleType) {\n\tmoduleType = moduleType || \"saver\";\n\t// Instantiate the available savers\n\tthis.savers = [];\n\tvar self = this;\n\t$tw.modules.forEachModuleOfType(moduleType,function(title,module) {\n\t\tif(module.canSave(self)) {\n\t\t\tself.savers.push(module.create(self.wiki));\n\t\t}\n\t});\n\t// Sort the savers into priority order\n\tthis.savers.sort(function(a,b) {\n\t\tif(a.info.priority < b.info.priority) {\n\t\t\treturn -1;\n\t\t} else {\n\t\t\tif(a.info.priority > b.info.priority) {\n\t\t\t\treturn +1;\n\t\t\t} else {\n\t\t\t\treturn 0;\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\nSave the wiki contents. Options are:\n\tmethod: \"save\", \"autosave\" or \"download\"\n\ttemplate: the tiddler containing the template to save\n\tdownloadType: the content type for the saved file\n*/\nSaverHandler.prototype.saveWiki = function(options) {\n\toptions = options || {};\n\tvar self = this,\n\t\tmethod = options.method || \"save\",\n\t\tvariables = options.variables || {},\n\t\ttemplate = options.template || \"$:/core/save/all\",\n\t\tdownloadType = options.downloadType || \"text/plain\",\n\t\ttext = this.wiki.renderTiddler(downloadType,template,options),\n\t\tcallback = function(err) {\n\t\t\tif(err) {\n\t\t\t\talert($tw.language.getString(\"Error/WhileSaving\") + \":\\n\\n\" + err);\n\t\t\t} else {\n\t\t\t\t// Clear the task queue if we're saving (rather than downloading)\n\t\t\t\tif(method !== \"download\") {\n\t\t\t\t\tself.numChanges = 0;\n\t\t\t\t\tself.updateDirtyStatus();\n\t\t\t\t}\n\t\t\t\t$tw.notifier.display(self.titleSavedNotification);\n\t\t\t\tif(options.callback) {\n\t\t\t\t\toptions.callback();\n\t\t\t\t}\n\t\t\t}\n\t\t};\n\t// Ignore autosave if disabled\n\tif(method === \"autosave\" && this.wiki.getTiddlerText(this.titleAutoSave,\"yes\") !== \"yes\") {\n\t\treturn false;\n\t}\n\t// Call the highest priority saver that supports this method\n\tfor(var t=this.savers.length-1; t>=0; t--) {\n\t\tvar saver = this.savers[t];\n\t\tif(saver.info.capabilities.indexOf(method) !== -1 && saver.save(text,method,callback,{variables: {filename: variables.filename}})) {\n\t\t\tthis.logger.log(\"Saving wiki with method\",method,\"through saver\",saver.info.name);\n\t\t\treturn true;\n\t\t}\n\t}\n\treturn false;\n};\n\n/*\nChecks whether the wiki is dirty (ie the window shouldn't be closed)\n*/\nSaverHandler.prototype.isDirty = function() {\n\treturn this.numChanges > 0;\n};\n\n/*\nUpdate the document body with the class \"tc-dirty\" if the wiki has unsaved/unsynced changes\n*/\nSaverHandler.prototype.updateDirtyStatus = function() {\n\tif($tw.browser) {\n\t\t$tw.utils.toggleClass(document.body,\"tc-dirty\",this.isDirty());\n\t}\n};\n\nexports.SaverHandler = SaverHandler;\n\n})();\n",
"title": "$:/core/modules/saver-handler.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/savers/andtidwiki.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/andtidwiki.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via the AndTidWiki Android app\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false, netscape: false, Components: false */\n\"use strict\";\n\nvar AndTidWiki = function(wiki) {\n};\n\nAndTidWiki.prototype.save = function(text,method,callback) {\n\t// Get the pathname of this document\n\tvar pathname = decodeURIComponent(document.location.toString().split(\"#\")[0]);\n\t// Strip the file://\n\tif(pathname.indexOf(\"file://\") === 0) {\n\t\tpathname = pathname.substr(7);\n\t}\n\t// Strip any query or location part\n\tvar p = pathname.indexOf(\"?\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\tp = pathname.indexOf(\"#\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\t// Save the file\n\twindow.twi.saveFile(pathname,text);\n\t// Call the callback\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nAndTidWiki.prototype.info = {\n\tname: \"andtidwiki\",\n\tpriority: 1600,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn !!window.twi && !!window.twi.saveFile;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new AndTidWiki(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/andtidwiki.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/download.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/download.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via HTML5's download APIs\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar DownloadSaver = function(wiki) {\n};\n\nDownloadSaver.prototype.save = function(text,method,callback,options) {\n\toptions = options || {};\n\t// Get the current filename\n\tvar filename = options.variables.filename;\n\tif(!filename) {\n\t\tvar p = document.location.pathname.lastIndexOf(\"/\");\n\t\tif(p !== -1) {\n\t\t\tfilename = document.location.pathname.substr(p+1);\n\t\t}\n\t}\n\tif(!filename) {\n\t\tfilename = \"tiddlywiki.html\";\n\t}\n\t// Set up the link\n\tvar link = document.createElement(\"a\");\n\tlink.setAttribute(\"target\",\"_blank\");\n\tlink.setAttribute(\"rel\",\"noopener noreferrer\");\n\tif(Blob !== undefined) {\n\t\tvar blob = new Blob([text], {type: \"text/html\"});\n\t\tlink.setAttribute(\"href\", URL.createObjectURL(blob));\n\t} else {\n\t\tlink.setAttribute(\"href\",\"data:text/html,\" + encodeURIComponent(text));\n\t}\n\tlink.setAttribute(\"download\",filename);\n\tdocument.body.appendChild(link);\n\tlink.click();\n\tdocument.body.removeChild(link);\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nDownloadSaver.prototype.info = {\n\tname: \"download\",\n\tpriority: 100,\n\tcapabilities: [\"save\", \"download\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn document.createElement(\"a\").download !== undefined;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new DownloadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/download.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/fsosaver.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/fsosaver.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via MS FileSystemObject ActiveXObject\n\nNote: Since TiddlyWiki's markup contains the MOTW, the FileSystemObject normally won't be available. \nHowever, if the wiki is loaded as an .HTA file (Windows HTML Applications) then the FSO can be used.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar FSOSaver = function(wiki) {\n};\n\nFSOSaver.prototype.save = function(text,method,callback) {\n\t// Get the pathname of this document\n\tvar pathname = unescape(document.location.pathname);\n\t// Test for a Windows path of the form /x:\\blah...\n\tif(/^\\/[A-Z]\\:\\\\[^\\\\]+/i.test(pathname)) {\t// ie: ^/[a-z]:/[^/]+\n\t\t// Remove the leading slash\n\t\tpathname = pathname.substr(1);\n\t} else if(document.location.hostname !== \"\" && /^\\/\\\\[^\\\\]+\\\\[^\\\\]+/i.test(pathname)) {\t// test for \\\\server\\share\\blah... - ^/[^/]+/[^/]+\n\t\t// Remove the leading slash\n\t\tpathname = pathname.substr(1);\n\t\t// reconstruct UNC path\n\t\tpathname = \"\\\\\\\\\" + document.location.hostname + pathname;\n\t} else {\n\t\treturn false;\n\t}\n\t// Save the file (as UTF-16)\n\tvar fso = new ActiveXObject(\"Scripting.FileSystemObject\");\n\tvar file = fso.OpenTextFile(pathname,2,-1,-1);\n\tfile.Write(text);\n\tfile.Close();\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nFSOSaver.prototype.info = {\n\tname: \"FSOSaver\",\n\tpriority: 120,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\ttry {\n\t\treturn (window.location.protocol === \"file:\") && !!(new ActiveXObject(\"Scripting.FileSystemObject\"));\n\t} catch(e) { return false; }\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new FSOSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/fsosaver.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/manualdownload.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/manualdownload.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via HTML5's download APIs\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Title of the tiddler containing the download message\nvar downloadInstructionsTitle = \"$:/language/Modals/Download\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar ManualDownloadSaver = function(wiki) {\n};\n\nManualDownloadSaver.prototype.save = function(text,method,callback) {\n\t$tw.modal.display(downloadInstructionsTitle,{\n\t\tdownloadLink: \"data:text/html,\" + encodeURIComponent(text)\n\t});\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nManualDownloadSaver.prototype.info = {\n\tname: \"manualdownload\",\n\tpriority: 0,\n\tcapabilities: [\"save\", \"download\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn true;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new ManualDownloadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/manualdownload.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/msdownload.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/msdownload.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via window.navigator.msSaveBlob()\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar MsDownloadSaver = function(wiki) {\n};\n\nMsDownloadSaver.prototype.save = function(text,method,callback) {\n\t// Get the current filename\n\tvar filename = \"tiddlywiki.html\",\n\t\tp = document.location.pathname.lastIndexOf(\"/\");\n\tif(p !== -1) {\n\t\tfilename = document.location.pathname.substr(p+1);\n\t}\n\t// Set up the link\n\tvar blob = new Blob([text], {type: \"text/html\"});\n\twindow.navigator.msSaveBlob(blob,filename);\n\t// Callback that we succeeded\n\tcallback(null);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nMsDownloadSaver.prototype.info = {\n\tname: \"msdownload\",\n\tpriority: 110,\n\tcapabilities: [\"save\", \"download\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn !!window.navigator.msSaveBlob;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new MsDownloadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/msdownload.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/put.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/put.js\ntype: application/javascript\nmodule-type: saver\n\nSaves wiki by performing a PUT request to the server\n\nWorks with any server which accepts a PUT request\nto the current URL, such as a WebDAV server.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar PutSaver = function(wiki) {\n\tthis.wiki = wiki;\n\tvar self = this;\n\t// Async server probe. Until probe finishes, save will fail fast\n\t// See also https://github.com/Jermolene/TiddlyWiki5/issues/2276\n\tvar req = new XMLHttpRequest();\n\treq.open(\"OPTIONS\",encodeURI(document.location.protocol + \"//\" + document.location.hostname + \":\" + document.location.port + document.location.pathname));\n\treq.onload = function() {\n\t\t// Check DAV header http://www.webdav.org/specs/rfc2518.html#rfc.section.9.1\n\t\tself.serverAcceptsPuts = (this.status === 200 && !!this.getResponseHeader('dav'));\n\t};\n\treq.send();\n};\n\nPutSaver.prototype.save = function(text,method,callback) {\n\tif (!this.serverAcceptsPuts) {\n\t\treturn false;\n\t}\n\tvar req = new XMLHttpRequest();\n\t// TODO: store/check ETags if supported by server, to protect against overwrites\n\t// Prompt: Do you want to save over this? Y/N\n\t// Merging would be ideal, and may be possible using future generic merge flow\n\treq.onload = function() {\n\t\tif (this.status === 200 || this.status === 201) {\n\t\t\tcallback(null); // success\n\t\t}\n\t\telse {\n\t\t\tcallback(this.responseText); // fail\n\t\t}\n\t};\n\treq.open(\"PUT\", encodeURI(window.location.href));\n\treq.setRequestHeader(\"Content-Type\", \"text/html;charset=UTF-8\");\n\treq.send(text);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nPutSaver.prototype.info = {\n\tname: \"put\",\n\tpriority: 2000,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn /^https?:/.test(location.protocol);\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new PutSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/put.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/tiddlyfox.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/tiddlyfox.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via the TiddlyFox file extension\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false, netscape: false, Components: false */\n\"use strict\";\n\nvar TiddlyFoxSaver = function(wiki) {\n};\n\nTiddlyFoxSaver.prototype.save = function(text,method,callback) {\n\tvar messageBox = document.getElementById(\"tiddlyfox-message-box\");\n\tif(messageBox) {\n\t\t// Get the pathname of this document\n\t\tvar pathname = document.location.toString().split(\"#\")[0];\n\t\t// Replace file://localhost/ with file:///\n\t\tif(pathname.indexOf(\"file://localhost/\") === 0) {\n\t\t\tpathname = \"file://\" + pathname.substr(16);\n\t\t}\n\t\t// Windows path file:///x:/blah/blah --> x:\\blah\\blah\n\t\tif(/^file\\:\\/\\/\\/[A-Z]\\:\\//i.test(pathname)) {\n\t\t\t// Remove the leading slash and convert slashes to backslashes\n\t\t\tpathname = pathname.substr(8).replace(/\\//g,\"\\\\\");\n\t\t// Firefox Windows network path file://///server/share/blah/blah --> //server/share/blah/blah\n\t\t} else if(pathname.indexOf(\"file://///\") === 0) {\n\t\t\tpathname = \"\\\\\\\\\" + unescape(pathname.substr(10)).replace(/\\//g,\"\\\\\");\n\t\t// Mac/Unix local path file:///path/path --> /path/path\n\t\t} else if(pathname.indexOf(\"file:///\") === 0) {\n\t\t\tpathname = unescape(pathname.substr(7));\n\t\t// Mac/Unix local path file:/path/path --> /path/path\n\t\t} else if(pathname.indexOf(\"file:/\") === 0) {\n\t\t\tpathname = unescape(pathname.substr(5));\n\t\t// Otherwise Windows networth path file://server/share/path/path --> \\\\server\\share\\path\\path\n\t\t} else {\n\t\t\tpathname = \"\\\\\\\\\" + unescape(pathname.substr(7)).replace(new RegExp(\"/\",\"g\"),\"\\\\\");\n\t\t}\n\t\t// Create the message element and put it in the message box\n\t\tvar message = document.createElement(\"div\");\n\t\tmessage.setAttribute(\"data-tiddlyfox-path\",decodeURIComponent(pathname));\n\t\tmessage.setAttribute(\"data-tiddlyfox-content\",text);\n\t\tmessageBox.appendChild(message);\n\t\t// Add an event handler for when the file has been saved\n\t\tmessage.addEventListener(\"tiddlyfox-have-saved-file\",function(event) {\n\t\t\tcallback(null);\n\t\t}, false);\n\t\t// Create and dispatch the custom event to the extension\n\t\tvar event = document.createEvent(\"Events\");\n\t\tevent.initEvent(\"tiddlyfox-save-file\",true,false);\n\t\tmessage.dispatchEvent(event);\n\t\treturn true;\n\t} else {\n\t\treturn false;\n\t}\n};\n\n/*\nInformation about this saver\n*/\nTiddlyFoxSaver.prototype.info = {\n\tname: \"tiddlyfox\",\n\tpriority: 1500,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn (window.location.protocol === \"file:\");\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new TiddlyFoxSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/tiddlyfox.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/tiddlyie.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/tiddlyie.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via Internet Explorer BHO extenion (TiddlyIE)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar TiddlyIESaver = function(wiki) {\n};\n\nTiddlyIESaver.prototype.save = function(text,method,callback) {\n\t// Check existence of TiddlyIE BHO extension (note: only works after document is complete)\n\tif(typeof(window.TiddlyIE) != \"undefined\") {\n\t\t// Get the pathname of this document\n\t\tvar pathname = unescape(document.location.pathname);\n\t\t// Test for a Windows path of the form /x:/blah...\n\t\tif(/^\\/[A-Z]\\:\\/[^\\/]+/i.test(pathname)) {\t// ie: ^/[a-z]:/[^/]+ (is this better?: ^/[a-z]:/[^/]+(/[^/]+)*\\.[^/]+ )\n\t\t\t// Remove the leading slash\n\t\t\tpathname = pathname.substr(1);\n\t\t\t// Convert slashes to backslashes\n\t\t\tpathname = pathname.replace(/\\//g,\"\\\\\");\n\t\t} else if(document.hostname !== \"\" && /^\\/[^\\/]+\\/[^\\/]+/i.test(pathname)) {\t// test for \\\\server\\share\\blah... - ^/[^/]+/[^/]+\n\t\t\t// Convert slashes to backslashes\n\t\t\tpathname = pathname.replace(/\\//g,\"\\\\\");\n\t\t\t// reconstruct UNC path\n\t\t\tpathname = \"\\\\\\\\\" + document.location.hostname + pathname;\n\t\t} else return false;\n\t\t// Prompt the user to save the file\n\t\twindow.TiddlyIE.save(pathname, text);\n\t\t// Callback that we succeeded\n\t\tcallback(null);\n\t\treturn true;\n\t} else {\n\t\treturn false;\n\t}\n};\n\n/*\nInformation about this saver\n*/\nTiddlyIESaver.prototype.info = {\n\tname: \"tiddlyiesaver\",\n\tpriority: 1500,\n\tcapabilities: [\"save\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn (window.location.protocol === \"file:\");\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new TiddlyIESaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/tiddlyie.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/twedit.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/twedit.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via the TWEdit iOS app\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false, netscape: false, Components: false */\n\"use strict\";\n\nvar TWEditSaver = function(wiki) {\n};\n\nTWEditSaver.prototype.save = function(text,method,callback) {\n\t// Bail if we're not running under TWEdit\n\tif(typeof DeviceInfo !== \"object\") {\n\t\treturn false;\n\t}\n\t// Get the pathname of this document\n\tvar pathname = decodeURIComponent(document.location.pathname);\n\t// Strip any query or location part\n\tvar p = pathname.indexOf(\"?\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\tp = pathname.indexOf(\"#\");\n\tif(p !== -1) {\n\t\tpathname = pathname.substr(0,p);\n\t}\n\t// Remove the leading \"/Documents\" from path\n\tvar prefix = \"/Documents\";\n\tif(pathname.indexOf(prefix) === 0) {\n\t\tpathname = pathname.substr(prefix.length);\n\t}\n\t// Error handler\n\tvar errorHandler = function(event) {\n\t\t// Error\n\t\tcallback($tw.language.getString(\"Error/SavingToTWEdit\") + \": \" + event.target.error.code);\n\t};\n\t// Get the file system\n\twindow.requestFileSystem(LocalFileSystem.PERSISTENT,0,function(fileSystem) {\n\t\t// Now we've got the filesystem, get the fileEntry\n\t\tfileSystem.root.getFile(pathname, {create: true}, function(fileEntry) {\n\t\t\t// Now we've got the fileEntry, create the writer\n\t\t\tfileEntry.createWriter(function(writer) {\n\t\t\t\twriter.onerror = errorHandler;\n\t\t\t\twriter.onwrite = function() {\n\t\t\t\t\tcallback(null);\n\t\t\t\t};\n\t\t\t\twriter.position = 0;\n\t\t\t\twriter.write(text);\n\t\t\t},errorHandler);\n\t\t}, errorHandler);\n\t}, errorHandler);\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nTWEditSaver.prototype.info = {\n\tname: \"twedit\",\n\tpriority: 1600,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn true;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new TWEditSaver(wiki);\n};\n\n/////////////////////////// Hack\n// HACK: This ensures that TWEdit recognises us as a TiddlyWiki document\nif($tw.browser) {\n\twindow.version = {title: \"TiddlyWiki\"};\n}\n\n})();\n",
"title": "$:/core/modules/savers/twedit.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/savers/upload.js": {
"text": "/*\\\ntitle: $:/core/modules/savers/upload.js\ntype: application/javascript\nmodule-type: saver\n\nHandles saving changes via upload to a server.\n\nDesigned to be compatible with BidiX's UploadPlugin at http://tiddlywiki.bidix.info/#UploadPlugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSelect the appropriate saver module and set it up\n*/\nvar UploadSaver = function(wiki) {\n\tthis.wiki = wiki;\n};\n\nUploadSaver.prototype.save = function(text,method,callback) {\n\t// Get the various parameters we need\n\tvar backupDir = this.wiki.getTextReference(\"$:/UploadBackupDir\") || \".\",\n\t\tusername = this.wiki.getTextReference(\"$:/UploadName\"),\n\t\tpassword = $tw.utils.getPassword(\"upload\"),\n\t\tuploadDir = this.wiki.getTextReference(\"$:/UploadDir\") || \".\",\n\t\tuploadFilename = this.wiki.getTextReference(\"$:/UploadFilename\") || \"index.html\",\n\t\turl = this.wiki.getTextReference(\"$:/UploadURL\");\n\t// Bail out if we don't have the bits we need\n\tif(!username || username.toString().trim() === \"\" || !password || password.toString().trim() === \"\") {\n\t\treturn false;\n\t}\n\t// Construct the url if not provided\n\tif(!url) {\n\t\turl = \"http://\" + username + \".tiddlyspot.com/store.cgi\";\n\t}\n\t// Assemble the header\n\tvar boundary = \"---------------------------\" + \"AaB03x\";\t\n\tvar uploadFormName = \"UploadPlugin\";\n\tvar head = [];\n\thead.push(\"--\" + boundary + \"\\r\\nContent-disposition: form-data; name=\\\"UploadPlugin\\\"\\r\\n\");\n\thead.push(\"backupDir=\" + backupDir + \";user=\" + username + \";password=\" + password + \";uploaddir=\" + uploadDir + \";;\"); \n\thead.push(\"\\r\\n\" + \"--\" + boundary);\n\thead.push(\"Content-disposition: form-data; name=\\\"userfile\\\"; filename=\\\"\" + uploadFilename + \"\\\"\");\n\thead.push(\"Content-Type: text/html;charset=UTF-8\");\n\thead.push(\"Content-Length: \" + text.length + \"\\r\\n\");\n\thead.push(\"\");\n\t// Assemble the tail and the data itself\n\tvar tail = \"\\r\\n--\" + boundary + \"--\\r\\n\",\n\t\tdata = head.join(\"\\r\\n\") + text + tail;\n\t// Do the HTTP post\n\tvar http = new XMLHttpRequest();\n\thttp.open(\"POST\",url,true,username,password);\n\thttp.setRequestHeader(\"Content-Type\",\"multipart/form-data; charset=UTF-8; boundary=\" + boundary);\n\thttp.onreadystatechange = function() {\n\t\tif(http.readyState == 4 && http.status == 200) {\n\t\t\tif(http.responseText.substr(0,4) === \"0 - \") {\n\t\t\t\tcallback(null);\n\t\t\t} else {\n\t\t\t\tcallback(http.responseText);\n\t\t\t}\n\t\t}\n\t};\n\ttry {\n\t\thttp.send(data);\n\t} catch(ex) {\n\t\treturn callback($tw.language.getString(\"Error/Caption\") + \":\" + ex);\n\t}\n\t$tw.notifier.display(\"$:/language/Notifications/Save/Starting\");\n\treturn true;\n};\n\n/*\nInformation about this saver\n*/\nUploadSaver.prototype.info = {\n\tname: \"upload\",\n\tpriority: 2000,\n\tcapabilities: [\"save\", \"autosave\"]\n};\n\n/*\nStatic method that returns true if this saver is capable of working\n*/\nexports.canSave = function(wiki) {\n\treturn true;\n};\n\n/*\nCreate an instance of this saver\n*/\nexports.create = function(wiki) {\n\treturn new UploadSaver(wiki);\n};\n\n})();\n",
"title": "$:/core/modules/savers/upload.js",
"type": "application/javascript",
"module-type": "saver"
},
"$:/core/modules/browser-messaging.js": {
"text": "/*\\\ntitle: $:/core/modules/browser-messaging.js\ntype: application/javascript\nmodule-type: startup\n\nBrowser message handling\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"browser-messaging\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\n/*\nLoad a specified url as an iframe and call the callback when it is loaded. If the url is already loaded then the existing iframe instance is used\n*/\nfunction loadIFrame(url,callback) {\n\t// Check if iframe already exists\n\tvar iframeInfo = $tw.browserMessaging.iframeInfoMap[url];\n\tif(iframeInfo) {\n\t\t// We've already got the iframe\n\t\tcallback(null,iframeInfo);\n\t} else {\n\t\t// Create the iframe and save it in the list\n\t\tvar iframe = document.createElement(\"iframe\"),\n\t\t\tiframeInfo = {\n\t\t\t\turl: url,\n\t\t\t\tstatus: \"loading\",\n\t\t\t\tdomNode: iframe\n\t\t\t};\n\t\t$tw.browserMessaging.iframeInfoMap[url] = iframeInfo;\n\t\tsaveIFrameInfoTiddler(iframeInfo);\n\t\t// Add the iframe to the DOM and hide it\n\t\tiframe.style.display = \"none\";\n\t\tdocument.body.appendChild(iframe);\n\t\t// Set up onload\n\t\tiframe.onload = function() {\n\t\t\tiframeInfo.status = \"loaded\";\n\t\t\tsaveIFrameInfoTiddler(iframeInfo);\n\t\t\tcallback(null,iframeInfo);\n\t\t};\n\t\tiframe.onerror = function() {\n\t\t\tcallback(\"Cannot load iframe\");\n\t\t};\n\t\ttry {\n\t\t\tiframe.src = url;\n\t\t} catch(ex) {\n\t\t\tcallback(ex);\n\t\t}\n\t}\n}\n\nfunction saveIFrameInfoTiddler(iframeInfo) {\n\t$tw.wiki.addTiddler(new $tw.Tiddler($tw.wiki.getCreationFields(),{\n\t\ttitle: \"$:/temp/ServerConnection/\" + iframeInfo.url,\n\t\ttext: iframeInfo.status,\n\t\ttags: [\"$:/tags/ServerConnection\"],\n\t\turl: iframeInfo.url\n\t},$tw.wiki.getModificationFields()));\n}\n\nexports.startup = function() {\n\t// Initialise the store of iframes we've created\n\t$tw.browserMessaging = {\n\t\tiframeInfoMap: {} // Hashmap by URL of {url:,status:\"loading/loaded\",domNode:}\n\t};\n\t// Listen for widget messages to control loading the plugin library\n\t$tw.rootWidget.addEventListener(\"tm-load-plugin-library\",function(event) {\n\t\tvar paramObject = event.paramObject || {},\n\t\t\turl = paramObject.url;\n\t\tif(url) {\n\t\t\tloadIFrame(url,function(err,iframeInfo) {\n\t\t\t\tif(err) {\n\t\t\t\t\talert($tw.language.getString(\"Error/LoadingPluginLibrary\") + \": \" + url);\n\t\t\t\t} else {\n\t\t\t\t\tiframeInfo.domNode.contentWindow.postMessage({\n\t\t\t\t\t\tverb: \"GET\",\n\t\t\t\t\t\turl: \"recipes/library/tiddlers.json\",\n\t\t\t\t\t\tcookies: {\n\t\t\t\t\t\t\ttype: \"save-info\",\n\t\t\t\t\t\t\tinfoTitlePrefix: paramObject.infoTitlePrefix || \"$:/temp/RemoteAssetInfo/\",\n\t\t\t\t\t\t\turl: url\n\t\t\t\t\t\t}\n\t\t\t\t\t},\"*\");\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n\t$tw.rootWidget.addEventListener(\"tm-load-plugin-from-library\",function(event) {\n\t\tvar paramObject = event.paramObject || {},\n\t\t\turl = paramObject.url,\n\t\t\ttitle = paramObject.title;\n\t\tif(url && title) {\n\t\t\tloadIFrame(url,function(err,iframeInfo) {\n\t\t\t\tif(err) {\n\t\t\t\t\talert($tw.language.getString(\"Error/LoadingPluginLibrary\") + \": \" + url);\n\t\t\t\t} else {\n\t\t\t\t\tiframeInfo.domNode.contentWindow.postMessage({\n\t\t\t\t\t\tverb: \"GET\",\n\t\t\t\t\t\turl: \"recipes/library/tiddlers/\" + encodeURIComponent(title) + \".json\",\n\t\t\t\t\t\tcookies: {\n\t\t\t\t\t\t\ttype: \"save-tiddler\",\n\t\t\t\t\t\t\turl: url\n\t\t\t\t\t\t}\n\t\t\t\t\t},\"*\");\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n\t// Listen for window messages from other windows\n\twindow.addEventListener(\"message\",function listener(event){\n\t\tconsole.log(\"browser-messaging: \",document.location.toString())\n\t\tconsole.log(\"browser-messaging: Received message from\",event.origin);\n\t\tconsole.log(\"browser-messaging: Message content\",event.data);\n\t\tswitch(event.data.verb) {\n\t\t\tcase \"GET-RESPONSE\":\n\t\t\t\tif(event.data.status.charAt(0) === \"2\") {\n\t\t\t\t\tif(event.data.cookies) {\n\t\t\t\t\t\tif(event.data.cookies.type === \"save-info\") {\n\t\t\t\t\t\t\tvar tiddlers = JSON.parse(event.data.body);\n\t\t\t\t\t\t\t$tw.utils.each(tiddlers,function(tiddler) {\n\t\t\t\t\t\t\t\t$tw.wiki.addTiddler(new $tw.Tiddler($tw.wiki.getCreationFields(),tiddler,{\n\t\t\t\t\t\t\t\t\ttitle: event.data.cookies.infoTitlePrefix + event.data.cookies.url + \"/\" + tiddler.title,\n\t\t\t\t\t\t\t\t\t\"original-title\": tiddler.title,\n\t\t\t\t\t\t\t\t\ttext: \"\",\n\t\t\t\t\t\t\t\t\ttype: \"text/vnd.tiddlywiki\",\n\t\t\t\t\t\t\t\t\t\"original-type\": tiddler.type,\n\t\t\t\t\t\t\t\t\t\"plugin-type\": undefined,\n\t\t\t\t\t\t\t\t\t\"original-plugin-type\": tiddler[\"plugin-type\"],\n\t\t\t\t\t\t\t\t\t\"module-type\": undefined,\n\t\t\t\t\t\t\t\t\t\"original-module-type\": tiddler[\"module-type\"],\n\t\t\t\t\t\t\t\t\ttags: [\"$:/tags/RemoteAssetInfo\"],\n\t\t\t\t\t\t\t\t\t\"original-tags\": $tw.utils.stringifyList(tiddler.tags || []),\n\t\t\t\t\t\t\t\t\t\"server-url\": event.data.cookies.url\n\t\t\t\t\t\t\t\t},$tw.wiki.getModificationFields()));\n\t\t\t\t\t\t\t});\n\t\t\t\t\t\t} else if(event.data.cookies.type === \"save-tiddler\") {\n\t\t\t\t\t\t\tvar tiddler = JSON.parse(event.data.body);\n\t\t\t\t\t\t\t$tw.wiki.addTiddler(new $tw.Tiddler(tiddler));\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tbreak;\n\t\t}\n\t},false);\n};\n\n})();\n",
"title": "$:/core/modules/browser-messaging.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/commands.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/commands.js\ntype: application/javascript\nmodule-type: startup\n\nCommand processing\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"commands\";\nexports.platforms = [\"node\"];\nexports.after = [\"story\"];\nexports.synchronous = false;\n\nexports.startup = function(callback) {\n\t// On the server, start a commander with the command line arguments\n\tvar commander = new $tw.Commander(\n\t\t$tw.boot.argv,\n\t\tfunction(err) {\n\t\t\tif(err) {\n\t\t\t\treturn $tw.utils.error(\"Error: \" + err);\n\t\t\t}\n\t\t\tcallback();\n\t\t},\n\t\t$tw.wiki,\n\t\t{output: process.stdout, error: process.stderr}\n\t);\n\tcommander.execute();\n};\n\n})();\n",
"title": "$:/core/modules/startup/commands.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/favicon.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/favicon.js\ntype: application/javascript\nmodule-type: startup\n\nFavicon handling\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"favicon\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\t\t\n// Favicon tiddler\nvar FAVICON_TITLE = \"$:/favicon.ico\";\n\nexports.startup = function() {\n\t// Set up the favicon\n\tsetFavicon();\n\t// Reset the favicon when the tiddler changes\n\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.utils.hop(changes,FAVICON_TITLE)) {\n\t\t\tsetFavicon();\n\t\t}\n\t});\n};\n\nfunction setFavicon() {\n\tvar tiddler = $tw.wiki.getTiddler(FAVICON_TITLE);\n\tif(tiddler) {\n\t\tvar faviconLink = document.getElementById(\"faviconLink\");\n\t\tfaviconLink.setAttribute(\"href\",\"data:\" + tiddler.fields.type + \";base64,\" + tiddler.fields.text);\n\t}\n}\n\n})();\n",
"title": "$:/core/modules/startup/favicon.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/info.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/info.js\ntype: application/javascript\nmodule-type: startup\n\nInitialise $:/info tiddlers via $:/temp/info-plugin pseudo-plugin\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"info\";\nexports.before = [\"startup\"];\nexports.after = [\"load-modules\"];\nexports.synchronous = true;\n\nexports.startup = function() {\n\t// Collect up the info tiddlers\n\tvar infoTiddlerFields = {};\n\t// Give each info module a chance to fill in as many info tiddlers as they want\n\t$tw.modules.forEachModuleOfType(\"info\",function(title,moduleExports) {\n\t\tif(moduleExports && moduleExports.getInfoTiddlerFields) {\n\t\t\tvar tiddlerFieldsArray = moduleExports.getInfoTiddlerFields(infoTiddlerFields);\n\t\t\t$tw.utils.each(tiddlerFieldsArray,function(fields) {\n\t\t\t\tif(fields) {\n\t\t\t\t\tinfoTiddlerFields[fields.title] = fields;\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n\t// Bake the info tiddlers into a plugin\n\tvar fields = {\n\t\ttitle: \"$:/temp/info-plugin\",\n\t\ttype: \"application/json\",\n\t\t\"plugin-type\": \"info\",\n\t\ttext: JSON.stringify({tiddlers: infoTiddlerFields},null,$tw.config.preferences.jsonSpaces)\n\t};\n\t$tw.wiki.addTiddler(new $tw.Tiddler(fields));\n\t$tw.wiki.readPluginInfo();\n\t$tw.wiki.registerPluginTiddlers(\"info\");\n\t$tw.wiki.unpackPluginTiddlers();\n};\n\n})();\n",
"title": "$:/core/modules/startup/info.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/load-modules.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/load-modules.js\ntype: application/javascript\nmodule-type: startup\n\nLoad core modules\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"load-modules\";\nexports.synchronous = true;\n\nexports.startup = function() {\n\t// Load modules\n\t$tw.modules.applyMethods(\"utils\",$tw.utils);\n\tif($tw.node) {\n\t\t$tw.modules.applyMethods(\"utils-node\",$tw.utils);\n\t}\n\t$tw.modules.applyMethods(\"global\",$tw);\n\t$tw.modules.applyMethods(\"config\",$tw.config);\n\t$tw.Tiddler.fieldModules = $tw.modules.getModulesByTypeAsHashmap(\"tiddlerfield\");\n\t$tw.modules.applyMethods(\"tiddlermethod\",$tw.Tiddler.prototype);\n\t$tw.modules.applyMethods(\"wikimethod\",$tw.Wiki.prototype);\n\t$tw.modules.applyMethods(\"tiddlerdeserializer\",$tw.Wiki.tiddlerDeserializerModules);\n\t$tw.macros = $tw.modules.getModulesByTypeAsHashmap(\"macro\");\n\t$tw.wiki.initParsers();\n\t$tw.Commander.initCommands();\n};\n\n})();\n",
"title": "$:/core/modules/startup/load-modules.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/password.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/password.js\ntype: application/javascript\nmodule-type: startup\n\nPassword handling\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"password\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\nexports.startup = function() {\n\t$tw.rootWidget.addEventListener(\"tm-set-password\",function(event) {\n\t\t$tw.passwordPrompt.createPrompt({\n\t\t\tserviceName: $tw.language.getString(\"Encryption/PromptSetPassword\"),\n\t\t\tnoUserName: true,\n\t\t\tsubmitText: $tw.language.getString(\"Encryption/SetPassword\"),\n\t\t\tcanCancel: true,\n\t\t\trepeatPassword: true,\n\t\t\tcallback: function(data) {\n\t\t\t\tif(data) {\n\t\t\t\t\t$tw.crypto.setPassword(data.password);\n\t\t\t\t}\n\t\t\t\treturn true; // Get rid of the password prompt\n\t\t\t}\n\t\t});\n\t});\n\t$tw.rootWidget.addEventListener(\"tm-clear-password\",function(event) {\n\t\tif($tw.browser) {\n\t\t\tif(!confirm($tw.language.getString(\"Encryption/ConfirmClearPassword\"))) {\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t\t$tw.crypto.setPassword(null);\n\t});\n\t// Ensure that $:/isEncrypted is maintained properly\n\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.utils.hop(changes,\"$:/isEncrypted\")) {\n\t\t\t$tw.crypto.updateCryptoStateTiddler();\n\t\t}\n\t});\n};\n\n})();\n",
"title": "$:/core/modules/startup/password.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/render.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/render.js\ntype: application/javascript\nmodule-type: startup\n\nTitle, stylesheet and page rendering\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"render\";\nexports.platforms = [\"browser\"];\nexports.after = [\"story\"];\nexports.synchronous = true;\n\n// Default story and history lists\nvar PAGE_TITLE_TITLE = \"$:/core/wiki/title\";\nvar PAGE_STYLESHEET_TITLE = \"$:/core/ui/PageStylesheet\";\nvar PAGE_TEMPLATE_TITLE = \"$:/core/ui/PageTemplate\";\n\n// Time (in ms) that we defer refreshing changes to draft tiddlers\nvar DRAFT_TIDDLER_TIMEOUT_TITLE = \"$:/config/Drafts/TypingTimeout\";\nvar DRAFT_TIDDLER_TIMEOUT = 400;\n\nexports.startup = function() {\n\t// Set up the title\n\t$tw.titleWidgetNode = $tw.wiki.makeTranscludeWidget(PAGE_TITLE_TITLE,{document: $tw.fakeDocument, parseAsInline: true});\n\t$tw.titleContainer = $tw.fakeDocument.createElement(\"div\");\n\t$tw.titleWidgetNode.render($tw.titleContainer,null);\n\tdocument.title = $tw.titleContainer.textContent;\n\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\tif($tw.titleWidgetNode.refresh(changes,$tw.titleContainer,null)) {\n\t\t\tdocument.title = $tw.titleContainer.textContent;\n\t\t}\n\t});\n\t// Set up the styles\n\t$tw.styleWidgetNode = $tw.wiki.makeTranscludeWidget(PAGE_STYLESHEET_TITLE,{document: $tw.fakeDocument});\n\t$tw.styleContainer = $tw.fakeDocument.createElement(\"style\");\n\t$tw.styleWidgetNode.render($tw.styleContainer,null);\n\t$tw.styleElement = document.createElement(\"style\");\n\t$tw.styleElement.innerHTML = $tw.styleContainer.textContent;\n\tdocument.head.insertBefore($tw.styleElement,document.head.firstChild);\n\t$tw.wiki.addEventListener(\"change\",$tw.perf.report(\"styleRefresh\",function(changes) {\n\t\tif($tw.styleWidgetNode.refresh(changes,$tw.styleContainer,null)) {\n\t\t\t$tw.styleElement.innerHTML = $tw.styleContainer.textContent;\n\t\t}\n\t}));\n\t// Display the $:/core/ui/PageTemplate tiddler to kick off the display\n\t$tw.perf.report(\"mainRender\",function() {\n\t\t$tw.pageWidgetNode = $tw.wiki.makeTranscludeWidget(PAGE_TEMPLATE_TITLE,{document: document, parentWidget: $tw.rootWidget});\n\t\t$tw.pageContainer = document.createElement(\"div\");\n\t\t$tw.utils.addClass($tw.pageContainer,\"tc-page-container-wrapper\");\n\t\tdocument.body.insertBefore($tw.pageContainer,document.body.firstChild);\n\t\t$tw.pageWidgetNode.render($tw.pageContainer,null);\n\t})();\n\t// Prepare refresh mechanism\n\tvar deferredChanges = Object.create(null),\n\t\ttimerId;\n\tfunction refresh() {\n\t\t// Process the refresh\n\t\t$tw.pageWidgetNode.refresh(deferredChanges);\n\t\tdeferredChanges = Object.create(null);\n\t}\n\t// Add the change event handler\n\t$tw.wiki.addEventListener(\"change\",$tw.perf.report(\"mainRefresh\",function(changes) {\n\t\t// Check if only drafts have changed\n\t\tvar onlyDraftsHaveChanged = true;\n\t\tfor(var title in changes) {\n\t\t\tvar tiddler = $tw.wiki.getTiddler(title);\n\t\t\tif(!tiddler || !tiddler.hasField(\"draft.of\")) {\n\t\t\t\tonlyDraftsHaveChanged = false;\n\t\t\t}\n\t\t}\n\t\t// Defer the change if only drafts have changed\n\t\tif(timerId) {\n\t\t\tclearTimeout(timerId);\n\t\t}\n\t\ttimerId = null;\n\t\tif(onlyDraftsHaveChanged) {\n\t\t\tvar timeout = parseInt($tw.wiki.getTiddlerText(DRAFT_TIDDLER_TIMEOUT_TITLE,\"\"),10);\n\t\t\tif(isNaN(timeout)) {\n\t\t\t\ttimeout = DRAFT_TIDDLER_TIMEOUT;\n\t\t\t}\n\t\t\ttimerId = setTimeout(refresh,timeout);\n\t\t\t$tw.utils.extend(deferredChanges,changes);\n\t\t} else {\n\t\t\t$tw.utils.extend(deferredChanges,changes);\n\t\t\trefresh();\n\t\t}\n\t}));\n\t// Fix up the link between the root widget and the page container\n\t$tw.rootWidget.domNodes = [$tw.pageContainer];\n\t$tw.rootWidget.children = [$tw.pageWidgetNode];\n};\n\n})();\n",
"title": "$:/core/modules/startup/render.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/rootwidget.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/rootwidget.js\ntype: application/javascript\nmodule-type: startup\n\nSetup the root widget and the core root widget handlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"rootwidget\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.before = [\"story\"];\nexports.synchronous = true;\n\nexports.startup = function() {\n\t// Install the modal message mechanism\n\t$tw.modal = new $tw.utils.Modal($tw.wiki);\n\t$tw.rootWidget.addEventListener(\"tm-modal\",function(event) {\n\t\t$tw.modal.display(event.param,{variables: event.paramObject});\n\t});\n\t// Install the notification mechanism\n\t$tw.notifier = new $tw.utils.Notifier($tw.wiki);\n\t$tw.rootWidget.addEventListener(\"tm-notify\",function(event) {\n\t\t$tw.notifier.display(event.param,{variables: event.paramObject});\n\t});\n\t// Install the scroller\n\t$tw.pageScroller = new $tw.utils.PageScroller();\n\t$tw.rootWidget.addEventListener(\"tm-scroll\",function(event) {\n\t\t$tw.pageScroller.handleEvent(event);\n\t});\n\tvar fullscreen = $tw.utils.getFullScreenApis();\n\tif(fullscreen) {\n\t\t$tw.rootWidget.addEventListener(\"tm-full-screen\",function(event) {\n\t\t\tif(document[fullscreen._fullscreenElement]) {\n\t\t\t\tdocument[fullscreen._exitFullscreen]();\n\t\t\t} else {\n\t\t\t\tdocument.documentElement[fullscreen._requestFullscreen](Element.ALLOW_KEYBOARD_INPUT);\n\t\t\t}\n\t\t});\n\t}\n\t// If we're being viewed on a data: URI then give instructions for how to save\n\tif(document.location.protocol === \"data:\") {\n\t\t$tw.rootWidget.dispatchEvent({\n\t\t\ttype: \"tm-modal\",\n\t\t\tparam: \"$:/language/Modals/SaveInstructions\"\n\t\t});\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/startup/rootwidget.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup.js": {
"text": "/*\\\ntitle: $:/core/modules/startup.js\ntype: application/javascript\nmodule-type: startup\n\nMiscellaneous startup logic for both the client and server.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"startup\";\nexports.after = [\"load-modules\"];\nexports.synchronous = true;\n\n// Set to `true` to enable performance instrumentation\nvar PERFORMANCE_INSTRUMENTATION_CONFIG_TITLE = \"$:/config/Performance/Instrumentation\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nexports.startup = function() {\n\tvar modules,n,m,f;\n\t// Minimal browser detection\n\tif($tw.browser) {\n\t\t$tw.browser.isIE = (/msie|trident/i.test(navigator.userAgent));\n\t\t$tw.browser.isFirefox = !!document.mozFullScreenEnabled;\n\t}\n\t// Platform detection\n\t$tw.platform = {};\n\tif($tw.browser) {\n\t\t$tw.platform.isMac = /Mac/.test(navigator.platform);\n\t\t$tw.platform.isWindows = /win/i.test(navigator.platform);\n\t\t$tw.platform.isLinux = /Linux/i.test(navigator.appVersion);\n\t} else {\n\t\tswitch(require(\"os\").platform()) {\n\t\t\tcase \"darwin\":\n\t\t\t\t$tw.platform.isMac = true;\n\t\t\t\tbreak;\n\t\t\tcase \"win32\":\n\t\t\t\t$tw.platform.isWindows = true;\n\t\t\t\tbreak;\n\t\t\tcase \"freebsd\":\n\t\t\t\t$tw.platform.isLinux = true;\n\t\t\t\tbreak;\n\t\t\tcase \"linux\":\n\t\t\t\t$tw.platform.isLinux = true;\n\t\t\t\tbreak;\n\t\t}\n\t}\n\t// Initialise version\n\t$tw.version = $tw.utils.extractVersionInfo();\n\t// Set up the performance framework\n\t$tw.perf = new $tw.Performance($tw.wiki.getTiddlerText(PERFORMANCE_INSTRUMENTATION_CONFIG_TITLE,\"no\") === \"yes\");\n\t// Kick off the language manager and switcher\n\t$tw.language = new $tw.Language();\n\t$tw.languageSwitcher = new $tw.PluginSwitcher({\n\t\twiki: $tw.wiki,\n\t\tpluginType: \"language\",\n\t\tcontrollerTitle: \"$:/language\",\n\t\tdefaultPlugins: [\n\t\t\t\"$:/languages/en-US\"\n\t\t]\n\t});\n\t// Kick off the theme manager\n\t$tw.themeManager = new $tw.PluginSwitcher({\n\t\twiki: $tw.wiki,\n\t\tpluginType: \"theme\",\n\t\tcontrollerTitle: \"$:/theme\",\n\t\tdefaultPlugins: [\n\t\t\t\"$:/themes/tiddlywiki/snowwhite\",\n\t\t\t\"$:/themes/tiddlywiki/vanilla\"\n\t\t]\n\t});\n\t// Kick off the keyboard manager\n\t$tw.keyboardManager = new $tw.KeyboardManager();\n\t// Clear outstanding tiddler store change events to avoid an unnecessary refresh cycle at startup\n\t$tw.wiki.clearTiddlerEventQueue();\n\t// Create a root widget for attaching event handlers. By using it as the parentWidget for another widget tree, one can reuse the event handlers\n\tif($tw.browser) {\n\t\t$tw.rootWidget = new widget.widget({\n\t\t\ttype: \"widget\",\n\t\t\tchildren: []\n\t\t},{\n\t\t\twiki: $tw.wiki,\n\t\t\tdocument: document\n\t\t});\n\t}\n\t// Find a working syncadaptor\n\t$tw.syncadaptor = undefined;\n\t$tw.modules.forEachModuleOfType(\"syncadaptor\",function(title,module) {\n\t\tif(!$tw.syncadaptor && module.adaptorClass) {\n\t\t\t$tw.syncadaptor = new module.adaptorClass({wiki: $tw.wiki});\n\t\t}\n\t});\n\t// Set up the syncer object if we've got a syncadaptor\n\tif($tw.syncadaptor) {\n\t\t$tw.syncer = new $tw.Syncer({wiki: $tw.wiki, syncadaptor: $tw.syncadaptor});\n\t} \n\t// Setup the saver handler\n\t$tw.saverHandler = new $tw.SaverHandler({wiki: $tw.wiki, dirtyTracking: !$tw.syncadaptor});\n\t// Host-specific startup\n\tif($tw.browser) {\n\t\t// Install the popup manager\n\t\t$tw.popup = new $tw.utils.Popup();\n\t\t// Install the animator\n\t\t$tw.anim = new $tw.utils.Animator();\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/startup.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/story.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/story.js\ntype: application/javascript\nmodule-type: startup\n\nLoad core modules\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"story\";\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\n// Default story and history lists\nvar DEFAULT_STORY_TITLE = \"$:/StoryList\";\nvar DEFAULT_HISTORY_TITLE = \"$:/HistoryList\";\n\n// Default tiddlers\nvar DEFAULT_TIDDLERS_TITLE = \"$:/DefaultTiddlers\";\n\n// Config\nvar CONFIG_UPDATE_ADDRESS_BAR = \"$:/config/Navigation/UpdateAddressBar\"; // Can be \"no\", \"permalink\", \"permaview\"\nvar CONFIG_UPDATE_HISTORY = \"$:/config/Navigation/UpdateHistory\"; // Can be \"yes\" or \"no\"\n\nexports.startup = function() {\n\t// Open startup tiddlers\n\topenStartupTiddlers();\n\tif($tw.browser) {\n\t\t// Set up location hash update\n\t\t$tw.wiki.addEventListener(\"change\",function(changes) {\n\t\t\tif($tw.utils.hop(changes,DEFAULT_STORY_TITLE) || $tw.utils.hop(changes,DEFAULT_HISTORY_TITLE)) {\n\t\t\t\tupdateLocationHash({\n\t\t\t\t\tupdateAddressBar: $tw.wiki.getTiddlerText(CONFIG_UPDATE_ADDRESS_BAR,\"permaview\").trim(),\n\t\t\t\t\tupdateHistory: $tw.wiki.getTiddlerText(CONFIG_UPDATE_HISTORY,\"no\").trim()\n\t\t\t\t});\n\t\t\t}\n\t\t});\n\t\t// Listen for changes to the browser location hash\n\t\twindow.addEventListener(\"hashchange\",function() {\n\t\t\tvar hash = $tw.utils.getLocationHash();\n\t\t\tif(hash !== $tw.locationHash) {\n\t\t\t\t$tw.locationHash = hash;\n\t\t\t\topenStartupTiddlers({defaultToCurrentStory: true});\n\t\t\t}\n\t\t},false);\n\t\t// Listen for the tm-browser-refresh message\n\t\t$tw.rootWidget.addEventListener(\"tm-browser-refresh\",function(event) {\n\t\t\twindow.location.reload(true);\n\t\t});\n\t\t// Listen for the tm-home message\n\t\t$tw.rootWidget.addEventListener(\"tm-home\",function(event) {\n\t\t\twindow.location.hash = \"\";\n\t\t\tvar storyFilter = $tw.wiki.getTiddlerText(DEFAULT_TIDDLERS_TITLE),\n\t\t\t\tstoryList = $tw.wiki.filterTiddlers(storyFilter);\n\t\t\t//invoke any hooks that might change the default story list\n\t\t\tstoryList = $tw.hooks.invokeHook(\"th-opening-default-tiddlers-list\",storyList);\n\t\t\t$tw.wiki.addTiddler({title: DEFAULT_STORY_TITLE, text: \"\", list: storyList},$tw.wiki.getModificationFields());\n\t\t\tif(storyList[0]) {\n\t\t\t\t$tw.wiki.addToHistory(storyList[0]);\t\t\t\t\n\t\t\t}\n\t\t});\n\t\t// Listen for the tm-permalink message\n\t\t$tw.rootWidget.addEventListener(\"tm-permalink\",function(event) {\n\t\t\tupdateLocationHash({\n\t\t\t\tupdateAddressBar: \"permalink\",\n\t\t\t\tupdateHistory: $tw.wiki.getTiddlerText(CONFIG_UPDATE_HISTORY,\"no\").trim(),\n\t\t\t\ttargetTiddler: event.param || event.tiddlerTitle\n\t\t\t});\n\t\t});\n\t\t// Listen for the tm-permaview message\n\t\t$tw.rootWidget.addEventListener(\"tm-permaview\",function(event) {\n\t\t\tupdateLocationHash({\n\t\t\t\tupdateAddressBar: \"permaview\",\n\t\t\t\tupdateHistory: $tw.wiki.getTiddlerText(CONFIG_UPDATE_HISTORY,\"no\").trim(),\n\t\t\t\ttargetTiddler: event.param || event.tiddlerTitle\n\t\t\t});\n\t\t});\n\t}\n};\n\n/*\nProcess the location hash to open the specified tiddlers. Options:\ndefaultToCurrentStory: If true, the current story is retained as the default, instead of opening the default tiddlers\n*/\nfunction openStartupTiddlers(options) {\n\toptions = options || {};\n\t// Work out the target tiddler and the story filter. \"null\" means \"unspecified\"\n\tvar target = null,\n\t\tstoryFilter = null;\n\tif($tw.locationHash.length > 1) {\n\t\tvar hash = $tw.locationHash.substr(1),\n\t\t\tsplit = hash.indexOf(\":\");\n\t\tif(split === -1) {\n\t\t\ttarget = decodeURIComponent(hash.trim());\n\t\t} else {\n\t\t\ttarget = decodeURIComponent(hash.substr(0,split).trim());\n\t\t\tstoryFilter = decodeURIComponent(hash.substr(split + 1).trim());\n\t\t}\n\t}\n\t// If the story wasn't specified use the current tiddlers or a blank story\n\tif(storyFilter === null) {\n\t\tif(options.defaultToCurrentStory) {\n\t\t\tvar currStoryList = $tw.wiki.getTiddlerList(DEFAULT_STORY_TITLE);\n\t\t\tstoryFilter = $tw.utils.stringifyList(currStoryList);\n\t\t} else {\n\t\t\tif(target && target !== \"\") {\n\t\t\t\tstoryFilter = \"\";\n\t\t\t} else {\n\t\t\t\tstoryFilter = $tw.wiki.getTiddlerText(DEFAULT_TIDDLERS_TITLE);\n\t\t\t}\n\t\t}\n\t}\n\t// Process the story filter to get the story list\n\tvar storyList = $tw.wiki.filterTiddlers(storyFilter);\n\t// Invoke any hooks that want to change the default story list\n\tstoryList = $tw.hooks.invokeHook(\"th-opening-default-tiddlers-list\",storyList);\n\t// If the target tiddler isn't included then splice it in at the top\n\tif(target && storyList.indexOf(target) === -1) {\n\t\tstoryList.unshift(target);\n\t}\n\t// Save the story list\n\t$tw.wiki.addTiddler({title: DEFAULT_STORY_TITLE, text: \"\", list: storyList},$tw.wiki.getModificationFields());\n\t// If a target tiddler was specified add it to the history stack\n\tif(target && target !== \"\") {\n\t\t// The target tiddler doesn't need double square brackets, but we'll silently remove them if they're present\n\t\tif(target.indexOf(\"[[\") === 0 && target.substr(-2) === \"]]\") {\n\t\t\ttarget = target.substr(2,target.length - 4);\n\t\t}\n\t\t$tw.wiki.addToHistory(target);\n\t} else if(storyList.length > 0) {\n\t\t$tw.wiki.addToHistory(storyList[0]);\n\t}\n}\n\n/*\noptions: See below\noptions.updateAddressBar: \"permalink\", \"permaview\" or \"no\" (defaults to \"permaview\")\noptions.updateHistory: \"yes\" or \"no\" (defaults to \"no\")\noptions.targetTiddler: optional title of target tiddler for permalink\n*/\nfunction updateLocationHash(options) {\n\tif(options.updateAddressBar !== \"no\") {\n\t\t// Get the story and the history stack\n\t\tvar storyList = $tw.wiki.getTiddlerList(DEFAULT_STORY_TITLE),\n\t\t\thistoryList = $tw.wiki.getTiddlerData(DEFAULT_HISTORY_TITLE,[]),\n\t\t\ttargetTiddler = \"\";\n\t\tif(options.targetTiddler) {\n\t\t\ttargetTiddler = options.targetTiddler;\n\t\t} else {\n\t\t\t// The target tiddler is the one at the top of the stack\n\t\t\tif(historyList.length > 0) {\n\t\t\t\ttargetTiddler = historyList[historyList.length-1].title;\n\t\t\t}\n\t\t\t// Blank the target tiddler if it isn't present in the story\n\t\t\tif(storyList.indexOf(targetTiddler) === -1) {\n\t\t\t\ttargetTiddler = \"\";\n\t\t\t}\n\t\t}\n\t\t// Assemble the location hash\n\t\tif(options.updateAddressBar === \"permalink\") {\n\t\t\t$tw.locationHash = \"#\" + encodeURIComponent(targetTiddler);\n\t\t} else {\n\t\t\t$tw.locationHash = \"#\" + encodeURIComponent(targetTiddler) + \":\" + encodeURIComponent($tw.utils.stringifyList(storyList));\n\t\t}\n\t\t// Only change the location hash if we must, thus avoiding unnecessary onhashchange events\n\t\tif($tw.utils.getLocationHash() !== $tw.locationHash) {\n\t\t\tif(options.updateHistory === \"yes\") {\n\t\t\t\t// Assign the location hash so that history is updated\n\t\t\t\twindow.location.hash = $tw.locationHash;\n\t\t\t} else {\n\t\t\t\t// We use replace so that browser history isn't affected\n\t\t\t\twindow.location.replace(window.location.toString().split(\"#\")[0] + $tw.locationHash);\n\t\t\t}\n\t\t}\n\t}\n}\n\n})();\n",
"title": "$:/core/modules/startup/story.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/startup/windows.js": {
"text": "/*\\\ntitle: $:/core/modules/startup/windows.js\ntype: application/javascript\nmodule-type: startup\n\nSetup root widget handlers for the messages concerned with opening external browser windows\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Export name and synchronous status\nexports.name = \"windows\";\nexports.platforms = [\"browser\"];\nexports.after = [\"startup\"];\nexports.synchronous = true;\n\n// Global to keep track of open windows (hashmap by title)\nvar windows = {};\n\nexports.startup = function() {\n\t// Handle open window message\n\t$tw.rootWidget.addEventListener(\"tm-open-window\",function(event) {\n\t\t// Get the parameters\n\t\tvar refreshHandler,\n\t\t\ttitle = event.param || event.tiddlerTitle,\n\t\t\tparamObject = event.paramObject || {},\n\t\t\ttemplate = paramObject.template || \"$:/core/templates/single.tiddler.window\",\n\t\t\twidth = paramObject.width || \"700\",\n\t\t\theight = paramObject.height || \"600\",\n\t\t\tvariables = $tw.utils.extend({},paramObject,{currentTiddler: title});\n\t\t// Open the window\n\t\tvar srcWindow = window.open(\"\",\"external-\" + title,\"scrollbars,width=\" + width + \",height=\" + height),\n\t\t\tsrcDocument = srcWindow.document;\n\t\twindows[title] = srcWindow;\n\t\t// Check for reopening the same window\n\t\tif(srcWindow.haveInitialisedWindow) {\n\t\t\treturn;\n\t\t}\n\t\t// Initialise the document\n\t\tsrcDocument.write(\"<html><head></head><body class='tc-body tc-single-tiddler-window'></body></html>\");\n\t\tsrcDocument.close();\n\t\tsrcDocument.title = title;\n\t\tsrcWindow.addEventListener(\"beforeunload\",function(event) {\n\t\t\tdelete windows[title];\n\t\t\t$tw.wiki.removeEventListener(\"change\",refreshHandler);\n\t\t},false);\n\t\t// Set up the styles\n\t\tvar styleWidgetNode = $tw.wiki.makeTranscludeWidget(\"$:/core/ui/PageStylesheet\",{document: $tw.fakeDocument, variables: variables}),\n\t\t\tstyleContainer = $tw.fakeDocument.createElement(\"style\");\n\t\tstyleWidgetNode.render(styleContainer,null);\n\t\tvar styleElement = srcDocument.createElement(\"style\");\n\t\tstyleElement.innerHTML = styleContainer.textContent;\n\t\tsrcDocument.head.insertBefore(styleElement,srcDocument.head.firstChild);\n\t\t// Render the text of the tiddler\n\t\tvar parser = $tw.wiki.parseTiddler(template),\n\t\t\twidgetNode = $tw.wiki.makeWidget(parser,{document: srcDocument, parentWidget: $tw.rootWidget, variables: variables});\n\t\twidgetNode.render(srcDocument.body,srcDocument.body.firstChild);\n\t\t// Function to handle refreshes\n\t\trefreshHandler = function(changes) {\n\t\t\tif(styleWidgetNode.refresh(changes,styleContainer,null)) {\n\t\t\t\tstyleElement.innerHTML = styleContainer.textContent;\n\t\t\t}\n\t\t\twidgetNode.refresh(changes);\n\t\t};\n\t\t$tw.wiki.addEventListener(\"change\",refreshHandler);\n\t\tsrcWindow.haveInitialisedWindow = true;\n\t});\n\t// Close open windows when unloading main window\n\t$tw.addUnloadTask(function() {\n\t\t$tw.utils.each(windows,function(win) {\n\t\t\twin.close();\n\t\t});\n\t});\n\n};\n\n})();\n",
"title": "$:/core/modules/startup/windows.js",
"type": "application/javascript",
"module-type": "startup"
},
"$:/core/modules/story.js": {
"text": "/*\\\ntitle: $:/core/modules/story.js\ntype: application/javascript\nmodule-type: global\n\nLightweight object for managing interactions with the story and history lists.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nConstruct Story object with options:\nwiki: reference to wiki object to use to resolve tiddler titles\nstoryTitle: title of story list tiddler\nhistoryTitle: title of history list tiddler\n*/\nfunction Story(options) {\n\toptions = options || {};\n\tthis.wiki = options.wiki || $tw.wiki;\n\tthis.storyTitle = options.storyTitle || \"$:/StoryList\";\n\tthis.historyTitle = options.historyTitle || \"$:/HistoryList\";\n};\n\nStory.prototype.navigateTiddler = function(navigateTo,navigateFromTitle,navigateFromClientRect) {\n\tthis.addToStory(navigateTo,navigateFromTitle);\n\tthis.addToHistory(navigateTo,navigateFromClientRect);\n};\n\nStory.prototype.getStoryList = function() {\n\treturn this.wiki.getTiddlerList(this.storyTitle) || [];\n};\n\nStory.prototype.addToStory = function(navigateTo,navigateFromTitle,options) {\n\toptions = options || {};\n\tvar storyList = this.getStoryList();\n\t// See if the tiddler is already there\n\tvar slot = storyList.indexOf(navigateTo);\n\t// Quit if it already exists in the story river\n\tif(slot >= 0) {\n\t\treturn;\n\t}\n\t// First we try to find the position of the story element we navigated from\n\tvar fromIndex = storyList.indexOf(navigateFromTitle);\n\tif(fromIndex >= 0) {\n\t\t// The tiddler is added from inside the river\n\t\t// Determine where to insert the tiddler; Fallback is \"below\"\n\t\tswitch(options.openLinkFromInsideRiver) {\n\t\t\tcase \"top\":\n\t\t\t\tslot = 0;\n\t\t\t\tbreak;\n\t\t\tcase \"bottom\":\n\t\t\t\tslot = storyList.length;\n\t\t\t\tbreak;\n\t\t\tcase \"above\":\n\t\t\t\tslot = fromIndex;\n\t\t\t\tbreak;\n\t\t\tcase \"below\": // Intentional fall-through\n\t\t\tdefault:\n\t\t\t\tslot = fromIndex + 1;\n\t\t\t\tbreak;\n\t\t}\n\t} else {\n\t\t// The tiddler is opened from outside the river. Determine where to insert the tiddler; default is \"top\"\n\t\tif(options.openLinkFromOutsideRiver === \"bottom\") {\n\t\t\t// Insert at bottom\n\t\t\tslot = storyList.length;\n\t\t} else {\n\t\t\t// Insert at top\n\t\t\tslot = 0;\n\t\t}\n\t}\n\t// Add the tiddler\n\tstoryList.splice(slot,0,navigateTo);\n\t// Save the story\n\tthis.saveStoryList(storyList);\n};\n\nStory.prototype.saveStoryList = function(storyList) {\n\tvar storyTiddler = this.wiki.getTiddler(this.storyTitle);\n\tthis.wiki.addTiddler(new $tw.Tiddler(\n\t\tthis.wiki.getCreationFields(),\n\t\t{title: this.storyTitle},\n\t\tstoryTiddler,\n\t\t{list: storyList},\n\t\tthis.wiki.getModificationFields()\n\t));\n};\n\nStory.prototype.addToHistory = function(navigateTo,navigateFromClientRect) {\n\tvar titles = $tw.utils.isArray(navigateTo) ? navigateTo : [navigateTo];\n\t// Add a new record to the top of the history stack\n\tvar historyList = this.wiki.getTiddlerData(this.historyTitle,[]);\n\t$tw.utils.each(titles,function(title) {\n\t\thistoryList.push({title: title, fromPageRect: navigateFromClientRect});\n\t});\n\tthis.wiki.setTiddlerData(this.historyTitle,historyList,{\"current-tiddler\": titles[titles.length-1]});\n};\n\nStory.prototype.storyCloseTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyCloseAllTiddlers = function() {\n// TBD\n};\n\nStory.prototype.storyCloseOtherTiddlers = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyEditTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyDeleteTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storySaveTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyCancelTiddler = function(targetTitle) {\n// TBD\n};\n\nStory.prototype.storyNewTiddler = function(targetTitle) {\n// TBD\n};\n\nexports.Story = Story;\n\n\n})();\n",
"title": "$:/core/modules/story.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/storyviews/classic.js": {
"text": "/*\\\ntitle: $:/core/modules/storyviews/classic.js\ntype: application/javascript\nmodule-type: storyview\n\nViews the story as a linear sequence\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar easing = \"cubic-bezier(0.645, 0.045, 0.355, 1)\"; // From http://easings.net/#easeInOutCubic\n\nvar ClassicStoryView = function(listWidget) {\n\tthis.listWidget = listWidget;\n};\n\nClassicStoryView.prototype.navigateTo = function(historyInfo) {\n\tvar listElementIndex = this.listWidget.findListItem(0,historyInfo.title);\n\tif(listElementIndex === undefined) {\n\t\treturn;\n\t}\n\tvar listItemWidget = this.listWidget.children[listElementIndex],\n\t\ttargetElement = listItemWidget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Scroll the node into view\n\tthis.listWidget.dispatchEvent({type: \"tm-scroll\", target: targetElement});\n};\n\nClassicStoryView.prototype.insert = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Get the current height of the tiddler\n\tvar computedStyle = window.getComputedStyle(targetElement),\n\t\tcurrMarginBottom = parseInt(computedStyle.marginBottom,10),\n\t\tcurrMarginTop = parseInt(computedStyle.marginTop,10),\n\t\tcurrHeight = targetElement.offsetHeight + currMarginTop;\n\t// Reset the margin once the transition is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(targetElement,[\n\t\t\t{transition: \"none\"},\n\t\t\t{marginBottom: \"\"}\n\t\t]);\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{marginBottom: (-currHeight) + \"px\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t// Transition to the final position\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"opacity \" + duration + \"ms \" + easing + \", \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms \" + easing},\n\t\t{marginBottom: currMarginBottom + \"px\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n};\n\nClassicStoryView.prototype.remove = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\tremoveElement = function() {\n\t\t\twidget.removeChildDomNodes();\n\t\t};\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Get the current height of the tiddler\n\tvar currWidth = targetElement.offsetWidth,\n\t\tcomputedStyle = window.getComputedStyle(targetElement),\n\t\tcurrMarginBottom = parseInt(computedStyle.marginBottom,10),\n\t\tcurrMarginTop = parseInt(computedStyle.marginTop,10),\n\t\tcurrHeight = targetElement.offsetHeight + currMarginTop;\n\t// Remove the dom nodes of the widget at the end of the transition\n\tsetTimeout(removeElement,duration);\n\t// Animate the closure\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{transform: \"translateX(0px)\"},\n\t\t{marginBottom: currMarginBottom + \"px\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms \" + easing + \", \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms \" + easing},\n\t\t{transform: \"translateX(-\" + currWidth + \"px)\"},\n\t\t{marginBottom: (-currHeight) + \"px\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n};\n\nexports.classic = ClassicStoryView;\n\n})();",
"title": "$:/core/modules/storyviews/classic.js",
"type": "application/javascript",
"module-type": "storyview"
},
"$:/core/modules/storyviews/pop.js": {
"text": "/*\\\ntitle: $:/core/modules/storyviews/pop.js\ntype: application/javascript\nmodule-type: storyview\n\nAnimates list insertions and removals\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar PopStoryView = function(listWidget) {\n\tthis.listWidget = listWidget;\n};\n\nPopStoryView.prototype.navigateTo = function(historyInfo) {\n\tvar listElementIndex = this.listWidget.findListItem(0,historyInfo.title);\n\tif(listElementIndex === undefined) {\n\t\treturn;\n\t}\n\tvar listItemWidget = this.listWidget.children[listElementIndex],\n\t\ttargetElement = listItemWidget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Scroll the node into view\n\tthis.listWidget.dispatchEvent({type: \"tm-scroll\", target: targetElement});\n};\n\nPopStoryView.prototype.insert = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Reset once the transition is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(targetElement,[\n\t\t\t{transition: \"none\"},\n\t\t\t{transform: \"none\"}\n\t\t]);\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{transform: \"scale(2)\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t// Transition to the final position\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{transform: \"scale(1)\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n};\n\nPopStoryView.prototype.remove = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\tremoveElement = function() {\n\t\t\tif(targetElement.parentNode) {\n\t\t\t\twidget.removeChildDomNodes();\n\t\t\t}\n\t\t};\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Remove the element at the end of the transition\n\tsetTimeout(removeElement,duration);\n\t// Animate the closure\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: \"none\"},\n\t\t{transform: \"scale(1)\"},\n\t\t{opacity: \"1.0\"}\n\t]);\n\t$tw.utils.forceLayout(targetElement);\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{transform: \"scale(0.1)\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n};\n\nexports.pop = PopStoryView;\n\n})();\n",
"title": "$:/core/modules/storyviews/pop.js",
"type": "application/javascript",
"module-type": "storyview"
},
"$:/core/modules/storyviews/zoomin.js": {
"text": "/*\\\ntitle: $:/core/modules/storyviews/zoomin.js\ntype: application/javascript\nmodule-type: storyview\n\nZooms between individual tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar easing = \"cubic-bezier(0.645, 0.045, 0.355, 1)\"; // From http://easings.net/#easeInOutCubic\n\nvar ZoominListView = function(listWidget) {\n\tvar self = this;\n\tthis.listWidget = listWidget;\n\t// Get the index of the tiddler that is at the top of the history\n\tvar history = this.listWidget.wiki.getTiddlerDataCached(this.listWidget.historyTitle,[]),\n\t\ttargetTiddler;\n\tif(history.length > 0) {\n\t\ttargetTiddler = history[history.length-1].title;\n\t}\n\t// Make all the tiddlers position absolute, and hide all but the top (or first) one\n\t$tw.utils.each(this.listWidget.children,function(itemWidget,index) {\n\t\tvar domNode = itemWidget.findFirstDomNode();\n\t\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\t\tif(!(domNode instanceof Element)) {\n\t\t\treturn;\n\t\t}\n\t\tif((targetTiddler && targetTiddler !== itemWidget.parseTreeNode.itemTitle) || (!targetTiddler && index)) {\n\t\t\tdomNode.style.display = \"none\";\n\t\t} else {\n\t\t\tself.currentTiddlerDomNode = domNode;\n\t\t}\n\t\t$tw.utils.addClass(domNode,\"tc-storyview-zoomin-tiddler\");\n\t});\n};\n\nZoominListView.prototype.navigateTo = function(historyInfo) {\n\tvar duration = $tw.utils.getAnimationDuration(),\n\t\tlistElementIndex = this.listWidget.findListItem(0,historyInfo.title);\n\tif(listElementIndex === undefined) {\n\t\treturn;\n\t}\n\tvar listItemWidget = this.listWidget.children[listElementIndex],\n\t\ttargetElement = listItemWidget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Make the new tiddler be position absolute and visible so that we can measure it\n\t$tw.utils.addClass(targetElement,\"tc-storyview-zoomin-tiddler\");\n\t$tw.utils.setStyle(targetElement,[\n\t\t{display: \"block\"},\n\t\t{transformOrigin: \"0 0\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t{transition: \"none\"},\n\t\t{opacity: \"0.0\"}\n\t]);\n\t// Get the position of the source node, or use the centre of the window as the source position\n\tvar sourceBounds = historyInfo.fromPageRect || {\n\t\t\tleft: window.innerWidth/2 - 2,\n\t\t\ttop: window.innerHeight/2 - 2,\n\t\t\twidth: window.innerWidth/8,\n\t\t\theight: window.innerHeight/8\n\t\t};\n\t// Try to find the title node in the target tiddler\n\tvar titleDomNode = findTitleDomNode(listItemWidget) || listItemWidget.findFirstDomNode(),\n\t\tzoomBounds = titleDomNode.getBoundingClientRect();\n\t// Compute the transform for the target tiddler to make the title lie over the source rectange\n\tvar targetBounds = targetElement.getBoundingClientRect(),\n\t\tscale = sourceBounds.width / zoomBounds.width,\n\t\tx = sourceBounds.left - targetBounds.left - (zoomBounds.left - targetBounds.left) * scale,\n\t\ty = sourceBounds.top - targetBounds.top - (zoomBounds.top - targetBounds.top) * scale;\n\t// Transform the target tiddler to its starting position\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transform: \"translateX(\" + x + \"px) translateY(\" + y + \"px) scale(\" + scale + \")\"}\n\t]);\n\t// Force layout\n\t$tw.utils.forceLayout(targetElement);\n\t// Apply the ending transitions with a timeout to ensure that the previously applied transformations are applied first\n\tvar self = this,\n\t\tprevCurrentTiddler = this.currentTiddlerDomNode;\n\tthis.currentTiddlerDomNode = targetElement;\n\t// Transform the target tiddler to its natural size\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t{opacity: \"1.0\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t{zIndex: \"500\"},\n\t]);\n\t// Transform the previous tiddler out of the way and then hide it\n\tif(prevCurrentTiddler && prevCurrentTiddler !== targetElement) {\n\t\tscale = zoomBounds.width / sourceBounds.width;\n\t\tx = zoomBounds.left - targetBounds.left - (sourceBounds.left - targetBounds.left) * scale;\n\t\ty = zoomBounds.top - targetBounds.top - (sourceBounds.top - targetBounds.top) * scale;\n\t\t$tw.utils.setStyle(prevCurrentTiddler,[\n\t\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t\t{opacity: \"0.0\"},\n\t\t\t{transformOrigin: \"0 0\"},\n\t\t\t{transform: \"translateX(\" + x + \"px) translateY(\" + y + \"px) scale(\" + scale + \")\"},\n\t\t\t{zIndex: \"0\"}\n\t\t]);\n\t\t// Hide the tiddler when the transition has finished\n\t\tsetTimeout(function() {\n\t\t\tif(self.currentTiddlerDomNode !== prevCurrentTiddler) {\n\t\t\t\tprevCurrentTiddler.style.display = \"none\";\n\t\t\t}\n\t\t},duration);\n\t}\n\t// Scroll the target into view\n//\t$tw.pageScroller.scrollIntoView(targetElement);\n};\n\n/*\nFind the first child DOM node of a widget that has the class \"tc-title\"\n*/\nfunction findTitleDomNode(widget,targetClass) {\n\ttargetClass = targetClass || \"tc-title\";\n\tvar domNode = widget.findFirstDomNode();\n\tif(domNode && domNode.querySelector) {\n\t\treturn domNode.querySelector(\".\" + targetClass);\n\t}\n\treturn null;\n}\n\nZoominListView.prototype.insert = function(widget) {\n\tvar targetElement = widget.findFirstDomNode();\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\treturn;\n\t}\n\t// Make the newly inserted node position absolute and hidden\n\t$tw.utils.addClass(targetElement,\"tc-storyview-zoomin-tiddler\");\n\t$tw.utils.setStyle(targetElement,[\n\t\t{display: \"none\"}\n\t]);\n};\n\nZoominListView.prototype.remove = function(widget) {\n\tvar targetElement = widget.findFirstDomNode(),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\tremoveElement = function() {\n\t\t\twidget.removeChildDomNodes();\n\t\t};\n\t// Abandon if the list entry isn't a DOM element (it might be a text node)\n\tif(!(targetElement instanceof Element)) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Abandon if hidden\n\tif(targetElement.style.display != \"block\" ) {\n\t\tremoveElement();\n\t\treturn;\n\t}\n\t// Set up the tiddler that is being closed\n\t$tw.utils.addClass(targetElement,\"tc-storyview-zoomin-tiddler\");\n\t$tw.utils.setStyle(targetElement,[\n\t\t{display: \"block\"},\n\t\t{transformOrigin: \"50% 50%\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t{transition: \"none\"},\n\t\t{zIndex: \"0\"}\n\t]);\n\t// We'll move back to the previous or next element in the story\n\tvar toWidget = widget.previousSibling();\n\tif(!toWidget) {\n\t\ttoWidget = widget.nextSibling();\n\t}\n\tvar toWidgetDomNode = toWidget && toWidget.findFirstDomNode();\n\t// Set up the tiddler we're moving back in\n\tif(toWidgetDomNode) {\n\t\t$tw.utils.addClass(toWidgetDomNode,\"tc-storyview-zoomin-tiddler\");\n\t\t$tw.utils.setStyle(toWidgetDomNode,[\n\t\t\t{display: \"block\"},\n\t\t\t{transformOrigin: \"50% 50%\"},\n\t\t\t{transform: \"translateX(0px) translateY(0px) scale(10)\"},\n\t\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t\t{opacity: \"0\"},\n\t\t\t{zIndex: \"500\"}\n\t\t]);\n\t\tthis.currentTiddlerDomNode = toWidgetDomNode;\n\t}\n\t// Animate them both\n\t// Force layout\n\t$tw.utils.forceLayout(this.listWidget.parentDomNode);\n\t// First, the tiddler we're closing\n\t$tw.utils.setStyle(targetElement,[\n\t\t{transformOrigin: \"50% 50%\"},\n\t\t{transform: \"translateX(0px) translateY(0px) scale(0.1)\"},\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms \" + easing + \", opacity \" + duration + \"ms \" + easing},\n\t\t{opacity: \"0\"},\n\t\t{zIndex: \"0\"}\n\t]);\n\tsetTimeout(removeElement,duration);\n\t// Now the tiddler we're going back to\n\tif(toWidgetDomNode) {\n\t\t$tw.utils.setStyle(toWidgetDomNode,[\n\t\t\t{transform: \"translateX(0px) translateY(0px) scale(1)\"},\n\t\t\t{opacity: \"1\"}\n\t\t]);\n\t}\n\treturn true; // Indicate that we'll delete the DOM node\n};\n\nexports.zoomin = ZoominListView;\n\n})();\n",
"title": "$:/core/modules/storyviews/zoomin.js",
"type": "application/javascript",
"module-type": "storyview"
},
"$:/core/modules/syncer.js": {
"text": "/*\\\ntitle: $:/core/modules/syncer.js\ntype: application/javascript\nmodule-type: global\n\nThe syncer tracks changes to the store. If a syncadaptor is used then individual tiddlers are synchronised through it. If there is no syncadaptor then the entire wiki is saved via saver modules.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nInstantiate the syncer with the following options:\nsyncadaptor: reference to syncadaptor to be used\nwiki: wiki to be synced\n*/\nfunction Syncer(options) {\n\tvar self = this;\n\tthis.wiki = options.wiki;\n\tthis.syncadaptor = options.syncadaptor;\n\t// Make a logger\n\tthis.logger = new $tw.utils.Logger(\"syncer\" + ($tw.browser ? \"-browser\" : \"\") + ($tw.node ? \"-server\" : \"\"));\n\t// Compile the dirty tiddler filter\n\tthis.filterFn = this.wiki.compileFilter(this.wiki.getTiddlerText(this.titleSyncFilter));\n\t// Record information for known tiddlers\n\tthis.readTiddlerInfo();\n\t// Tasks are {type: \"load\"/\"save\"/\"delete\", title:, queueTime:, lastModificationTime:}\n\tthis.taskQueue = {}; // Hashmap of tasks yet to be performed\n\tthis.taskInProgress = {}; // Hash of tasks in progress\n\tthis.taskTimerId = null; // Timer for task dispatch\n\tthis.pollTimerId = null; // Timer for polling server\n\t// Listen out for changes to tiddlers\n\tthis.wiki.addEventListener(\"change\",function(changes) {\n\t\tself.syncToServer(changes);\n\t});\n\t// Browser event handlers\n\tif($tw.browser) {\n\t\t// Set up our beforeunload handler\n\t\t$tw.addUnloadTask(function(event) {\n\t\t\tvar confirmationMessage;\n\t\t\tif(self.isDirty()) {\n\t\t\t\tconfirmationMessage = $tw.language.getString(\"UnsavedChangesWarning\");\n\t\t\t\tevent.returnValue = confirmationMessage; // Gecko\n\t\t\t}\n\t\t\treturn confirmationMessage;\n\t\t});\n\t\t// Listen out for login/logout/refresh events in the browser\n\t\t$tw.rootWidget.addEventListener(\"tm-login\",function() {\n\t\t\tself.handleLoginEvent();\n\t\t});\n\t\t$tw.rootWidget.addEventListener(\"tm-logout\",function() {\n\t\t\tself.handleLogoutEvent();\n\t\t});\n\t\t$tw.rootWidget.addEventListener(\"tm-server-refresh\",function() {\n\t\t\tself.handleRefreshEvent();\n\t\t});\n\t}\n\t// Listen out for lazyLoad events\n\tthis.wiki.addEventListener(\"lazyLoad\",function(title) {\n\t\tself.handleLazyLoadEvent(title);\n\t});\n\t// Get the login status\n\tthis.getStatus(function(err,isLoggedIn) {\n\t\t// Do a sync from the server\n\t\tself.syncFromServer();\n\t});\n}\n\n/*\nConstants\n*/\nSyncer.prototype.titleIsLoggedIn = \"$:/status/IsLoggedIn\";\nSyncer.prototype.titleUserName = \"$:/status/UserName\";\nSyncer.prototype.titleSyncFilter = \"$:/config/SyncFilter\";\nSyncer.prototype.titleSavedNotification = \"$:/language/Notifications/Save/Done\";\nSyncer.prototype.taskTimerInterval = 1 * 1000; // Interval for sync timer\nSyncer.prototype.throttleInterval = 1 * 1000; // Defer saving tiddlers if they've changed in the last 1s...\nSyncer.prototype.fallbackInterval = 10 * 1000; // Unless the task is older than 10s\nSyncer.prototype.pollTimerInterval = 60 * 1000; // Interval for polling for changes from the adaptor\n\n\n/*\nRead (or re-read) the latest tiddler info from the store\n*/\nSyncer.prototype.readTiddlerInfo = function() {\n\t// Hashmap by title of {revision:,changeCount:,adaptorInfo:}\n\tthis.tiddlerInfo = {};\n\t// Record information for known tiddlers\n\tvar self = this,\n\t\ttiddlers = this.filterFn.call(this.wiki);\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\tself.tiddlerInfo[title] = {\n\t\t\trevision: tiddler.fields.revision,\n\t\t\tadaptorInfo: self.syncadaptor && self.syncadaptor.getTiddlerInfo(tiddler),\n\t\t\tchangeCount: self.wiki.getChangeCount(title),\n\t\t\thasBeenLazyLoaded: false\n\t\t};\n\t});\n};\n\n/*\nCreate an tiddlerInfo structure if it doesn't already exist\n*/\nSyncer.prototype.createTiddlerInfo = function(title) {\n\tif(!$tw.utils.hop(this.tiddlerInfo,title)) {\n\t\tthis.tiddlerInfo[title] = {\n\t\t\trevision: null,\n\t\t\tadaptorInfo: {},\n\t\t\tchangeCount: -1,\n\t\t\thasBeenLazyLoaded: false\n\t\t};\n\t}\n};\n\n/*\nChecks whether the wiki is dirty (ie the window shouldn't be closed)\n*/\nSyncer.prototype.isDirty = function() {\n\treturn (this.numTasksInQueue() > 0) || (this.numTasksInProgress() > 0);\n};\n\n/*\nUpdate the document body with the class \"tc-dirty\" if the wiki has unsaved/unsynced changes\n*/\nSyncer.prototype.updateDirtyStatus = function() {\n\tif($tw.browser) {\n\t\t$tw.utils.toggleClass(document.body,\"tc-dirty\",this.isDirty());\n\t}\n};\n\n/*\nSave an incoming tiddler in the store, and updates the associated tiddlerInfo\n*/\nSyncer.prototype.storeTiddler = function(tiddlerFields) {\n\t// Save the tiddler\n\tvar tiddler = new $tw.Tiddler(this.wiki.getTiddler(tiddlerFields.title),tiddlerFields);\n\tthis.wiki.addTiddler(tiddler);\n\t// Save the tiddler revision and changeCount details\n\tthis.tiddlerInfo[tiddlerFields.title] = {\n\t\trevision: tiddlerFields.revision,\n\t\tadaptorInfo: this.syncadaptor.getTiddlerInfo(tiddler),\n\t\tchangeCount: this.wiki.getChangeCount(tiddlerFields.title),\n\t\thasBeenLazyLoaded: true\n\t};\n};\n\nSyncer.prototype.getStatus = function(callback) {\n\tvar self = this;\n\t// Check if the adaptor supports getStatus()\n\tif(this.syncadaptor && this.syncadaptor.getStatus) {\n\t\t// Mark us as not logged in\n\t\tthis.wiki.addTiddler({title: this.titleIsLoggedIn,text: \"no\"});\n\t\t// Get login status\n\t\tthis.syncadaptor.getStatus(function(err,isLoggedIn,username) {\n\t\t\tif(err) {\n\t\t\t\tself.logger.alert(err);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t// Set the various status tiddlers\n\t\t\tself.wiki.addTiddler({title: self.titleIsLoggedIn,text: isLoggedIn ? \"yes\" : \"no\"});\n\t\t\tif(isLoggedIn) {\n\t\t\t\tself.wiki.addTiddler({title: self.titleUserName,text: username || \"\"});\n\t\t\t} else {\n\t\t\t\tself.wiki.deleteTiddler(self.titleUserName);\n\t\t\t}\n\t\t\t// Invoke the callback\n\t\t\tif(callback) {\n\t\t\t\tcallback(err,isLoggedIn,username);\n\t\t\t}\n\t\t});\n\t} else {\n\t\tcallback(null,true,\"UNAUTHENTICATED\");\n\t}\n};\n\n/*\nSynchronise from the server by reading the skinny tiddler list and queuing up loads for any tiddlers that we don't already have up to date\n*/\nSyncer.prototype.syncFromServer = function() {\n\tif(this.syncadaptor && this.syncadaptor.getSkinnyTiddlers) {\n\t\tthis.logger.log(\"Retrieving skinny tiddler list\");\n\t\tvar self = this;\n\t\tif(this.pollTimerId) {\n\t\t\tclearTimeout(this.pollTimerId);\n\t\t\tthis.pollTimerId = null;\n\t\t}\n\t\tthis.syncadaptor.getSkinnyTiddlers(function(err,tiddlers) {\n\t\t\t// Trigger the next sync\n\t\t\tself.pollTimerId = setTimeout(function() {\n\t\t\t\tself.pollTimerId = null;\n\t\t\t\tself.syncFromServer.call(self);\n\t\t\t},self.pollTimerInterval);\n\t\t\t// Check for errors\n\t\t\tif(err) {\n\t\t\t\tself.logger.alert($tw.language.getString(\"Error/RetrievingSkinny\") + \":\",err);\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t// Process each incoming tiddler\n\t\t\tfor(var t=0; t<tiddlers.length; t++) {\n\t\t\t\t// Get the incoming tiddler fields, and the existing tiddler\n\t\t\t\tvar tiddlerFields = tiddlers[t],\n\t\t\t\t\tincomingRevision = tiddlerFields.revision + \"\",\n\t\t\t\t\ttiddler = self.wiki.getTiddler(tiddlerFields.title),\n\t\t\t\t\ttiddlerInfo = self.tiddlerInfo[tiddlerFields.title],\n\t\t\t\t\tcurrRevision = tiddlerInfo ? tiddlerInfo.revision : null;\n\t\t\t\t// Ignore the incoming tiddler if it's the same as the revision we've already got\n\t\t\t\tif(currRevision !== incomingRevision) {\n\t\t\t\t\t// Do a full load if we've already got a fat version of the tiddler\n\t\t\t\t\tif(tiddler && tiddler.fields.text !== undefined) {\n\t\t\t\t\t\t// Do a full load of this tiddler\n\t\t\t\t\t\tself.enqueueSyncTask({\n\t\t\t\t\t\t\ttype: \"load\",\n\t\t\t\t\t\t\ttitle: tiddlerFields.title\n\t\t\t\t\t\t});\n\t\t\t\t\t} else {\n\t\t\t\t\t\t// Load the skinny version of the tiddler\n\t\t\t\t\t\tself.storeTiddler(tiddlerFields);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n};\n\n/*\nSynchronise a set of changes to the server\n*/\nSyncer.prototype.syncToServer = function(changes) {\n\tvar self = this,\n\t\tnow = Date.now(),\n\t\tfilteredChanges = this.filterFn.call(this.wiki,function(callback) {\n\t\t\t$tw.utils.each(changes,function(change,title) {\n\t\t\t\tvar tiddler = self.wiki.getTiddler(title);\n\t\t\t\tcallback(tiddler,title);\n\t\t\t});\n\t\t});\n\t$tw.utils.each(changes,function(change,title,object) {\n\t\t// Process the change if it is a deletion of a tiddler we're already syncing, or is on the filtered change list\n\t\tif((change.deleted && $tw.utils.hop(self.tiddlerInfo,title)) || filteredChanges.indexOf(title) !== -1) {\n\t\t\t// Queue a task to sync this tiddler\n\t\t\tself.enqueueSyncTask({\n\t\t\t\ttype: change.deleted ? \"delete\" : \"save\",\n\t\t\t\ttitle: title\n\t\t\t});\n\t\t}\n\t});\n};\n\n/*\nLazily load a skinny tiddler if we can\n*/\nSyncer.prototype.handleLazyLoadEvent = function(title) {\n\t// Don't lazy load the same tiddler twice\n\tvar info = this.tiddlerInfo[title];\n\tif(!info || !info.hasBeenLazyLoaded) {\n\t\tthis.createTiddlerInfo(title);\n\t\tthis.tiddlerInfo[title].hasBeenLazyLoaded = true;\n\t\t// Queue up a sync task to load this tiddler\n\t\tthis.enqueueSyncTask({\n\t\t\ttype: \"load\",\n\t\t\ttitle: title\n\t\t});\t\t\n\t}\n};\n\n/*\nDispay a password prompt and allow the user to login\n*/\nSyncer.prototype.handleLoginEvent = function() {\n\tvar self = this;\n\tthis.getStatus(function(err,isLoggedIn,username) {\n\t\tif(!isLoggedIn) {\n\t\t\t$tw.passwordPrompt.createPrompt({\n\t\t\t\tserviceName: $tw.language.getString(\"LoginToTiddlySpace\"),\n\t\t\t\tcallback: function(data) {\n\t\t\t\t\tself.login(data.username,data.password,function(err,isLoggedIn) {\n\t\t\t\t\t\tself.syncFromServer();\n\t\t\t\t\t});\n\t\t\t\t\treturn true; // Get rid of the password prompt\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n};\n\n/*\nAttempt to login to TiddlyWeb.\n\tusername: username\n\tpassword: password\n\tcallback: invoked with arguments (err,isLoggedIn)\n*/\nSyncer.prototype.login = function(username,password,callback) {\n\tthis.logger.log(\"Attempting to login as\",username);\n\tvar self = this;\n\tif(this.syncadaptor.login) {\n\t\tthis.syncadaptor.login(username,password,function(err) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\tself.getStatus(function(err,isLoggedIn,username) {\n\t\t\t\tif(callback) {\n\t\t\t\t\tcallback(null,isLoggedIn);\n\t\t\t\t}\n\t\t\t});\n\t\t});\n\t} else {\n\t\tcallback(null,true);\n\t}\n};\n\n/*\nAttempt to log out of TiddlyWeb\n*/\nSyncer.prototype.handleLogoutEvent = function() {\n\tthis.logger.log(\"Attempting to logout\");\n\tvar self = this;\n\tif(this.syncadaptor.logout) {\n\t\tthis.syncadaptor.logout(function(err) {\n\t\t\tif(err) {\n\t\t\t\tself.logger.alert(err);\n\t\t\t} else {\n\t\t\t\tself.getStatus();\n\t\t\t}\n\t\t});\n\t}\n};\n\n/*\nImmediately refresh from the server\n*/\nSyncer.prototype.handleRefreshEvent = function() {\n\tthis.syncFromServer();\n};\n\n/*\nQueue up a sync task. If there is already a pending task for the tiddler, just update the last modification time\n*/\nSyncer.prototype.enqueueSyncTask = function(task) {\n\tvar self = this,\n\t\tnow = Date.now();\n\t// Set the timestamps on this task\n\ttask.queueTime = now;\n\ttask.lastModificationTime = now;\n\t// Fill in some tiddlerInfo if the tiddler is one we haven't seen before\n\tthis.createTiddlerInfo(task.title);\n\t// Bail if this is a save and the tiddler is already at the changeCount that the server has\n\tif(task.type === \"save\" && this.wiki.getChangeCount(task.title) <= this.tiddlerInfo[task.title].changeCount) {\n\t\treturn;\n\t}\n\t// Check if this tiddler is already in the queue\n\tif($tw.utils.hop(this.taskQueue,task.title)) {\n\t\t// this.logger.log(\"Re-queueing up sync task with type:\",task.type,\"title:\",task.title);\n\t\tvar existingTask = this.taskQueue[task.title];\n\t\t// If so, just update the last modification time\n\t\texistingTask.lastModificationTime = task.lastModificationTime;\n\t\t// If the new task is a save then we upgrade the existing task to a save. Thus a pending load is turned into a save if the tiddler changes locally in the meantime. But a pending save is not modified to become a load\n\t\tif(task.type === \"save\" || task.type === \"delete\") {\n\t\t\texistingTask.type = task.type;\n\t\t}\n\t} else {\n\t\t// this.logger.log(\"Queuing up sync task with type:\",task.type,\"title:\",task.title);\n\t\t// If it is not in the queue, insert it\n\t\tthis.taskQueue[task.title] = task;\n\t\tthis.updateDirtyStatus();\n\t}\n\t// Process the queue\n\t$tw.utils.nextTick(function() {self.processTaskQueue.call(self);});\n};\n\n/*\nReturn the number of tasks in progress\n*/\nSyncer.prototype.numTasksInProgress = function() {\n\treturn $tw.utils.count(this.taskInProgress);\n};\n\n/*\nReturn the number of tasks in the queue\n*/\nSyncer.prototype.numTasksInQueue = function() {\n\treturn $tw.utils.count(this.taskQueue);\n};\n\n/*\nTrigger a timeout if one isn't already outstanding\n*/\nSyncer.prototype.triggerTimeout = function() {\n\tvar self = this;\n\tif(!this.taskTimerId) {\n\t\tthis.taskTimerId = setTimeout(function() {\n\t\t\tself.taskTimerId = null;\n\t\t\tself.processTaskQueue.call(self);\n\t\t},self.taskTimerInterval);\n\t}\n};\n\n/*\nProcess the task queue, performing the next task if appropriate\n*/\nSyncer.prototype.processTaskQueue = function() {\n\tvar self = this;\n\t// Only process a task if the sync adaptor is fully initialised and we're not already performing a task. If we are already performing a task then we'll dispatch the next one when it completes\n\tif(this.syncadaptor.isReady() && this.numTasksInProgress() === 0) {\n\t\t// Choose the next task to perform\n\t\tvar task = this.chooseNextTask();\n\t\t// Perform the task if we had one\n\t\tif(task) {\n\t\t\t// Remove the task from the queue and add it to the in progress list\n\t\t\tdelete this.taskQueue[task.title];\n\t\t\tthis.taskInProgress[task.title] = task;\n\t\t\tthis.updateDirtyStatus();\n\t\t\t// Dispatch the task\n\t\t\tthis.dispatchTask(task,function(err) {\n\t\t\t\tif(err) {\n\t\t\t\t\tself.logger.alert(\"Sync error while processing '\" + task.title + \"':\\n\" + err);\n\t\t\t\t}\n\t\t\t\t// Mark that this task is no longer in progress\n\t\t\t\tdelete self.taskInProgress[task.title];\n\t\t\t\tself.updateDirtyStatus();\n\t\t\t\t// Process the next task\n\t\t\t\tself.processTaskQueue.call(self);\n\t\t\t});\n\t\t} else {\n\t\t\t// Make sure we've set a time if there wasn't a task to perform, but we've still got tasks in the queue\n\t\t\tif(this.numTasksInQueue() > 0) {\n\t\t\t\tthis.triggerTimeout();\n\t\t\t}\n\t\t}\n\t}\n};\n\n/*\nChoose the next applicable task\n*/\nSyncer.prototype.chooseNextTask = function() {\n\tvar self = this,\n\t\tcandidateTask = null,\n\t\tnow = Date.now();\n\t// Select the best candidate task\n\t$tw.utils.each(this.taskQueue,function(task,title) {\n\t\t// Exclude the task if there's one of the same name in progress\n\t\tif($tw.utils.hop(self.taskInProgress,title)) {\n\t\t\treturn;\n\t\t}\n\t\t// Exclude the task if it is a save and the tiddler has been modified recently, but not hit the fallback time\n\t\tif(task.type === \"save\" && (now - task.lastModificationTime) < self.throttleInterval &&\n\t\t\t(now - task.queueTime) < self.fallbackInterval) {\n\t\t\treturn;\n\t\t}\n\t\t// Exclude the task if it is newer than the current best candidate\n\t\tif(candidateTask && candidateTask.queueTime < task.queueTime) {\n\t\t\treturn;\n\t\t}\n\t\t// Now this is our best candidate\n\t\tcandidateTask = task;\n\t});\n\treturn candidateTask;\n};\n\n/*\nDispatch a task and invoke the callback\n*/\nSyncer.prototype.dispatchTask = function(task,callback) {\n\tvar self = this;\n\tif(task.type === \"save\") {\n\t\tvar changeCount = this.wiki.getChangeCount(task.title),\n\t\t\ttiddler = this.wiki.getTiddler(task.title);\n\t\tthis.logger.log(\"Dispatching 'save' task:\",task.title);\n\t\tif(tiddler) {\n\t\t\tthis.syncadaptor.saveTiddler(tiddler,function(err,adaptorInfo,revision) {\n\t\t\t\tif(err) {\n\t\t\t\t\treturn callback(err);\n\t\t\t\t}\n\t\t\t\t// Adjust the info stored about this tiddler\n\t\t\t\tself.tiddlerInfo[task.title] = {\n\t\t\t\t\tchangeCount: changeCount,\n\t\t\t\t\tadaptorInfo: adaptorInfo,\n\t\t\t\t\trevision: revision\n\t\t\t\t};\n\t\t\t\t// Invoke the callback\n\t\t\t\tcallback(null);\n\t\t\t},{\n\t\t\t\ttiddlerInfo: self.tiddlerInfo[task.title]\n\t\t\t});\n\t\t} else {\n\t\t\tthis.logger.log(\" Not Dispatching 'save' task:\",task.title,\"tiddler does not exist\");\n\t\t\treturn callback(null);\n\t\t}\n\t} else if(task.type === \"load\") {\n\t\t// Load the tiddler\n\t\tthis.logger.log(\"Dispatching 'load' task:\",task.title);\n\t\tthis.syncadaptor.loadTiddler(task.title,function(err,tiddlerFields) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\t// Store the tiddler\n\t\t\tif(tiddlerFields) {\n\t\t\t\tself.storeTiddler(tiddlerFields);\n\t\t\t}\n\t\t\t// Invoke the callback\n\t\t\tcallback(null);\n\t\t});\n\t} else if(task.type === \"delete\") {\n\t\t// Delete the tiddler\n\t\tthis.logger.log(\"Dispatching 'delete' task:\",task.title);\n\t\tthis.syncadaptor.deleteTiddler(task.title,function(err) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\tdelete self.tiddlerInfo[task.title];\n\t\t\t// Invoke the callback\n\t\t\tcallback(null);\n\t\t},{\n\t\t\ttiddlerInfo: self.tiddlerInfo[task.title]\n\t\t});\n\t}\n};\n\nexports.Syncer = Syncer;\n\n})();\n",
"title": "$:/core/modules/syncer.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/tiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/tiddler.js\ntype: application/javascript\nmodule-type: tiddlermethod\n\nExtension methods for the $tw.Tiddler object (constructor and methods required at boot time are in boot/boot.js)\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.hasTag = function(tag) {\n\treturn this.fields.tags && this.fields.tags.indexOf(tag) !== -1;\n};\n\nexports.isPlugin = function() {\n\treturn this.fields.type === \"application/json\" && this.hasField(\"plugin-type\");\n};\n\nexports.isDraft = function() {\n\treturn this.hasField(\"draft.of\");\n};\n\nexports.getFieldString = function(field) {\n\tvar value = this.fields[field];\n\t// Check for a missing field\n\tif(value === undefined || value === null) {\n\t\treturn \"\";\n\t}\n\t// Parse the field with the associated module (if any)\n\tvar fieldModule = $tw.Tiddler.fieldModules[field];\n\tif(fieldModule && fieldModule.stringify) {\n\t\treturn fieldModule.stringify.call(this,value);\n\t} else {\n\t\treturn value.toString();\n\t}\n};\n\n/*\nGet all the fields as a name:value block. Options:\n\texclude: an array of field names to exclude\n*/\nexports.getFieldStringBlock = function(options) {\n\toptions = options || {};\n\tvar exclude = options.exclude || [];\n\tvar fields = [];\n\tfor(var field in this.fields) {\n\t\tif($tw.utils.hop(this.fields,field)) {\n\t\t\tif(exclude.indexOf(field) === -1) {\n\t\t\t\tfields.push(field + \": \" + this.getFieldString(field));\n\t\t\t}\n\t\t}\n\t}\n\treturn fields.join(\"\\n\");\n};\n\n/*\nCompare two tiddlers for equality\ntiddler: the tiddler to compare\nexcludeFields: array of field names to exclude from the comparison\n*/\nexports.isEqual = function(tiddler,excludeFields) {\n\tif(!(tiddler instanceof $tw.Tiddler)) {\n\t\treturn false;\n\t}\n\texcludeFields = excludeFields || [];\n\tvar self = this,\n\t\tdifferences = []; // Fields that have differences\n\t// Add to the differences array\n\tfunction addDifference(fieldName) {\n\t\t// Check for this field being excluded\n\t\tif(excludeFields.indexOf(fieldName) === -1) {\n\t\t\t// Save the field as a difference\n\t\t\t$tw.utils.pushTop(differences,fieldName);\n\t\t}\n\t}\n\t// Returns true if the two values of this field are equal\n\tfunction isFieldValueEqual(fieldName) {\n\t\tvar valueA = self.fields[fieldName],\n\t\t\tvalueB = tiddler.fields[fieldName];\n\t\t// Check for identical string values\n\t\tif(typeof(valueA) === \"string\" && typeof(valueB) === \"string\" && valueA === valueB) {\n\t\t\treturn true;\n\t\t}\n\t\t// Check for identical array values\n\t\tif($tw.utils.isArray(valueA) && $tw.utils.isArray(valueB) && $tw.utils.isArrayEqual(valueA,valueB)) {\n\t\t\treturn true;\n\t\t}\n\t\t// Otherwise the fields must be different\n\t\treturn false;\n\t}\n\t// Compare our fields\n\tfor(var fieldName in this.fields) {\n\t\tif(!isFieldValueEqual(fieldName)) {\n\t\t\taddDifference(fieldName);\n\t\t}\n\t}\n\t// There's a difference for every field in the other tiddler that we don't have\n\tfor(fieldName in tiddler.fields) {\n\t\tif(!(fieldName in this.fields)) {\n\t\t\taddDifference(fieldName);\n\t\t}\n\t}\n\t// Return whether there were any differences\n\treturn differences.length === 0;\n};\n\n})();\n",
"title": "$:/core/modules/tiddler.js",
"type": "application/javascript",
"module-type": "tiddlermethod"
},
"$:/core/modules/upgraders/plugins.js": {
"text": "/*\\\ntitle: $:/core/modules/upgraders/plugins.js\ntype: application/javascript\nmodule-type: upgrader\n\nUpgrader module that checks that plugins are newer than any already installed version\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar UPGRADE_LIBRARY_TITLE = \"$:/UpgradeLibrary\";\n\nvar BLOCKED_PLUGINS = {\n\t\"$:/themes/tiddlywiki/stickytitles\": {\n\t\tversions: [\"*\"]\n\t},\n\t\"$:/plugins/tiddlywiki/fullscreen\": {\n\t\tversions: [\"*\"]\n\t}\n};\n\nexports.upgrade = function(wiki,titles,tiddlers) {\n\tvar self = this,\n\t\tmessages = {},\n\t\tupgradeLibrary,\n\t\tgetLibraryTiddler = function(title) {\n\t\t\tif(!upgradeLibrary) {\n\t\t\t\tupgradeLibrary = wiki.getTiddlerData(UPGRADE_LIBRARY_TITLE,{});\n\t\t\t\tupgradeLibrary.tiddlers = upgradeLibrary.tiddlers || {};\n\t\t\t}\n\t\t\treturn upgradeLibrary.tiddlers[title];\n\t\t};\n\n\t// Go through all the incoming tiddlers\n\t$tw.utils.each(titles,function(title) {\n\t\tvar incomingTiddler = tiddlers[title];\n\t\t// Check if we're dealing with a plugin\n\t\tif(incomingTiddler && incomingTiddler[\"plugin-type\"] && incomingTiddler.version) {\n\t\t\t// Upgrade the incoming plugin if it is in the upgrade library\n\t\t\tvar libraryTiddler = getLibraryTiddler(title);\n\t\t\tif(libraryTiddler && libraryTiddler[\"plugin-type\"] && libraryTiddler.version) {\n\t\t\t\ttiddlers[title] = libraryTiddler;\n\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/Plugins/Upgraded\",{variables: {incoming: incomingTiddler.version, upgraded: libraryTiddler.version}});\n\t\t\t\treturn;\n\t\t\t}\n\t\t\t// Suppress the incoming plugin if it is older than the currently installed one\n\t\t\tvar existingTiddler = wiki.getTiddler(title);\n\t\t\tif(existingTiddler && existingTiddler.hasField(\"plugin-type\") && existingTiddler.hasField(\"version\")) {\n\t\t\t\t// Reject the incoming plugin by blanking all its fields\n\t\t\t\tif($tw.utils.checkVersions(existingTiddler.fields.version,incomingTiddler.version)) {\n\t\t\t\t\ttiddlers[title] = Object.create(null);\n\t\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/Plugins/Suppressed/Version\",{variables: {incoming: incomingTiddler.version, existing: existingTiddler.fields.version}});\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(incomingTiddler && incomingTiddler[\"plugin-type\"]) {\n\t\t\t// Check whether the plugin is on the blocked list\n\t\t\tvar blockInfo = BLOCKED_PLUGINS[title];\n\t\t\tif(blockInfo) {\n\t\t\t\tif(blockInfo.versions.indexOf(\"*\") !== -1 || (incomingTiddler.version && blockInfo.versions.indexOf(incomingTiddler.version) !== -1)) {\n\t\t\t\t\ttiddlers[title] = Object.create(null);\n\t\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/Plugins/Suppressed/Incompatible\");\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/upgraders/plugins.js",
"type": "application/javascript",
"module-type": "upgrader"
},
"$:/core/modules/upgraders/system.js": {
"text": "/*\\\ntitle: $:/core/modules/upgraders/system.js\ntype: application/javascript\nmodule-type: upgrader\n\nUpgrader module that suppresses certain system tiddlers that shouldn't be imported\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar DONT_IMPORT_LIST = [\"$:/StoryList\",\"$:/HistoryList\"],\n\tDONT_IMPORT_PREFIX_LIST = [\"$:/temp/\",\"$:/state/\"];\n\nexports.upgrade = function(wiki,titles,tiddlers) {\n\tvar self = this,\n\t\tmessages = {};\n\t// Check for tiddlers on our list\n\t$tw.utils.each(titles,function(title) {\n\t\tif(DONT_IMPORT_LIST.indexOf(title) !== -1) {\n\t\t\ttiddlers[title] = Object.create(null);\n\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/System/Suppressed\");\n\t\t} else {\n\t\t\tfor(var t=0; t<DONT_IMPORT_PREFIX_LIST.length; t++) {\n\t\t\t\tvar prefix = DONT_IMPORT_PREFIX_LIST[t];\n\t\t\t\tif(title.substr(0,prefix.length) === prefix) {\n\t\t\t\t\ttiddlers[title] = Object.create(null);\n\t\t\t\t\tmessages[title] = $tw.language.getString(\"Import/Upgrader/State/Suppressed\");\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/upgraders/system.js",
"type": "application/javascript",
"module-type": "upgrader"
},
"$:/core/modules/upgraders/themetweaks.js": {
"text": "/*\\\ntitle: $:/core/modules/upgraders/themetweaks.js\ntype: application/javascript\nmodule-type: upgrader\n\nUpgrader module that handles the change in theme tweak storage introduced in 5.0.14-beta.\n\nPreviously, theme tweaks were stored in two data tiddlers:\n\n* $:/themes/tiddlywiki/vanilla/metrics\n* $:/themes/tiddlywiki/vanilla/settings\n\nNow, each tweak is stored in its own separate tiddler.\n\nThis upgrader copies any values from the old format to the new. The old data tiddlers are not deleted in case they have been used to store additional indexes.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar MAPPINGS = {\n\t\"$:/themes/tiddlywiki/vanilla/metrics\": {\n\t\t\"fontsize\": \"$:/themes/tiddlywiki/vanilla/metrics/fontsize\",\n\t\t\"lineheight\": \"$:/themes/tiddlywiki/vanilla/metrics/lineheight\",\n\t\t\"storyleft\": \"$:/themes/tiddlywiki/vanilla/metrics/storyleft\",\n\t\t\"storytop\": \"$:/themes/tiddlywiki/vanilla/metrics/storytop\",\n\t\t\"storyright\": \"$:/themes/tiddlywiki/vanilla/metrics/storyright\",\n\t\t\"storywidth\": \"$:/themes/tiddlywiki/vanilla/metrics/storywidth\",\n\t\t\"tiddlerwidth\": \"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth\"\n\t},\n\t\"$:/themes/tiddlywiki/vanilla/settings\": {\n\t\t\"fontfamily\": \"$:/themes/tiddlywiki/vanilla/settings/fontfamily\"\n\t}\n};\n\nexports.upgrade = function(wiki,titles,tiddlers) {\n\tvar self = this,\n\t\tmessages = {};\n\t// Check for tiddlers on our list\n\t$tw.utils.each(titles,function(title) {\n\t\tvar mapping = MAPPINGS[title];\n\t\tif(mapping) {\n\t\t\tvar tiddler = new $tw.Tiddler(tiddlers[title]),\n\t\t\t\ttiddlerData = wiki.getTiddlerDataCached(tiddler,{});\n\t\t\tfor(var index in mapping) {\n\t\t\t\tvar mappedTitle = mapping[index];\n\t\t\t\tif(!tiddlers[mappedTitle] || tiddlers[mappedTitle].title !== mappedTitle) {\n\t\t\t\t\ttiddlers[mappedTitle] = {\n\t\t\t\t\t\ttitle: mappedTitle,\n\t\t\t\t\t\ttext: tiddlerData[index]\n\t\t\t\t\t};\n\t\t\t\t\tmessages[mappedTitle] = $tw.language.getString(\"Import/Upgrader/ThemeTweaks/Created\",{variables: {\n\t\t\t\t\t\tfrom: title + \"##\" + index\n\t\t\t\t\t}});\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t});\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/upgraders/themetweaks.js",
"type": "application/javascript",
"module-type": "upgrader"
},
"$:/core/modules/utils/crypto.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/crypto.js\ntype: application/javascript\nmodule-type: utils\n\nUtility functions related to crypto.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nLook for an encrypted store area in the text of a TiddlyWiki file\n*/\nexports.extractEncryptedStoreArea = function(text) {\n\tvar encryptedStoreAreaStartMarker = \"<pre id=\\\"encryptedStoreArea\\\" type=\\\"text/plain\\\" style=\\\"display:none;\\\">\",\n\t\tencryptedStoreAreaStart = text.indexOf(encryptedStoreAreaStartMarker);\n\tif(encryptedStoreAreaStart !== -1) {\n\t\tvar encryptedStoreAreaEnd = text.indexOf(\"</pre>\",encryptedStoreAreaStart);\n\t\tif(encryptedStoreAreaEnd !== -1) {\n\t\t\treturn $tw.utils.htmlDecode(text.substring(encryptedStoreAreaStart + encryptedStoreAreaStartMarker.length,encryptedStoreAreaEnd-1));\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nAttempt to extract the tiddlers from an encrypted store area using the current password. If the password is not provided then the password in the password store will be used\n*/\nexports.decryptStoreArea = function(encryptedStoreArea,password) {\n\tvar decryptedText = $tw.crypto.decrypt(encryptedStoreArea,password);\n\tif(decryptedText) {\n\t\tvar json = JSON.parse(decryptedText),\n\t\t\ttiddlers = [];\n\t\tfor(var title in json) {\n\t\t\tif(title !== \"$:/isEncrypted\") {\n\t\t\t\ttiddlers.push(json[title]);\n\t\t\t}\n\t\t}\n\t\treturn tiddlers;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n\n/*\nAttempt to extract the tiddlers from an encrypted store area using the current password. If that fails, the user is prompted for a password.\nencryptedStoreArea: text of the TiddlyWiki encrypted store area\ncallback: function(tiddlers) called with the array of decrypted tiddlers\n\nThe following configuration settings are supported:\n\n$tw.config.usePasswordVault: causes any password entered by the user to also be put into the system password vault\n*/\nexports.decryptStoreAreaInteractive = function(encryptedStoreArea,callback,options) {\n\t// Try to decrypt with the current password\n\tvar tiddlers = $tw.utils.decryptStoreArea(encryptedStoreArea);\n\tif(tiddlers) {\n\t\tcallback(tiddlers);\n\t} else {\n\t\t// Prompt for a new password and keep trying\n\t\t$tw.passwordPrompt.createPrompt({\n\t\t\tserviceName: \"Enter a password to decrypt the imported TiddlyWiki\",\n\t\t\tnoUserName: true,\n\t\t\tcanCancel: true,\n\t\t\tsubmitText: \"Decrypt\",\n\t\t\tcallback: function(data) {\n\t\t\t\t// Exit if the user cancelled\n\t\t\t\tif(!data) {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t\t// Attempt to decrypt the tiddlers\n\t\t\t\tvar tiddlers = $tw.utils.decryptStoreArea(encryptedStoreArea,data.password);\n\t\t\t\tif(tiddlers) {\n\t\t\t\t\tif($tw.config.usePasswordVault) {\n\t\t\t\t\t\t$tw.crypto.setPassword(data.password);\n\t\t\t\t\t}\n\t\t\t\t\tcallback(tiddlers);\n\t\t\t\t\t// Exit and remove the password prompt\n\t\t\t\t\treturn true;\n\t\t\t\t} else {\n\t\t\t\t\t// We didn't decrypt everything, so continue to prompt for password\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t}\n\t\t});\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/utils/crypto.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/animations/slide.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/animations/slide.js\ntype: application/javascript\nmodule-type: animation\n\nA simple slide animation that varies the height of the element\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nfunction slideOpen(domNode,options) {\n\toptions = options || {};\n\tvar duration = options.duration || $tw.utils.getAnimationDuration();\n\t// Get the current height of the domNode\n\tvar computedStyle = window.getComputedStyle(domNode),\n\t\tcurrMarginBottom = parseInt(computedStyle.marginBottom,10),\n\t\tcurrMarginTop = parseInt(computedStyle.marginTop,10),\n\t\tcurrPaddingBottom = parseInt(computedStyle.paddingBottom,10),\n\t\tcurrPaddingTop = parseInt(computedStyle.paddingTop,10),\n\t\tcurrHeight = domNode.offsetHeight;\n\t// Reset the margin once the transition is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(domNode,[\n\t\t\t{transition: \"none\"},\n\t\t\t{marginBottom: \"\"},\n\t\t\t{marginTop: \"\"},\n\t\t\t{paddingBottom: \"\"},\n\t\t\t{paddingTop: \"\"},\n\t\t\t{height: \"auto\"},\n\t\t\t{opacity: \"\"}\n\t\t]);\n\t\tif(options.callback) {\n\t\t\toptions.callback();\n\t\t}\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(domNode,[\n\t\t{transition: \"none\"},\n\t\t{marginTop: \"0px\"},\n\t\t{marginBottom: \"0px\"},\n\t\t{paddingTop: \"0px\"},\n\t\t{paddingBottom: \"0px\"},\n\t\t{height: \"0px\"},\n\t\t{opacity: \"0\"}\n\t]);\n\t$tw.utils.forceLayout(domNode);\n\t// Transition to the final position\n\t$tw.utils.setStyle(domNode,[\n\t\t{transition: \"margin-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"height \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{marginBottom: currMarginBottom + \"px\"},\n\t\t{marginTop: currMarginTop + \"px\"},\n\t\t{paddingBottom: currPaddingBottom + \"px\"},\n\t\t{paddingTop: currPaddingTop + \"px\"},\n\t\t{height: currHeight + \"px\"},\n\t\t{opacity: \"1\"}\n\t]);\n}\n\nfunction slideClosed(domNode,options) {\n\toptions = options || {};\n\tvar duration = options.duration || $tw.utils.getAnimationDuration(),\n\t\tcurrHeight = domNode.offsetHeight;\n\t// Clear the properties we've set when the animation is over\n\tsetTimeout(function() {\n\t\t$tw.utils.setStyle(domNode,[\n\t\t\t{transition: \"none\"},\n\t\t\t{marginBottom: \"\"},\n\t\t\t{marginTop: \"\"},\n\t\t\t{paddingBottom: \"\"},\n\t\t\t{paddingTop: \"\"},\n\t\t\t{height: \"auto\"},\n\t\t\t{opacity: \"\"}\n\t\t]);\n\t\tif(options.callback) {\n\t\t\toptions.callback();\n\t\t}\n\t},duration);\n\t// Set up the initial position of the element\n\t$tw.utils.setStyle(domNode,[\n\t\t{height: currHeight + \"px\"},\n\t\t{opacity: \"1\"}\n\t]);\n\t$tw.utils.forceLayout(domNode);\n\t// Transition to the final position\n\t$tw.utils.setStyle(domNode,[\n\t\t{transition: \"margin-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"margin-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-top \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"padding-bottom \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"height \" + duration + \"ms ease-in-out, \" +\n\t\t\t\t\t\"opacity \" + duration + \"ms ease-in-out\"},\n\t\t{marginTop: \"0px\"},\n\t\t{marginBottom: \"0px\"},\n\t\t{paddingTop: \"0px\"},\n\t\t{paddingBottom: \"0px\"},\n\t\t{height: \"0px\"},\n\t\t{opacity: \"0\"}\n\t]);\n}\n\nexports.slide = {\n\topen: slideOpen,\n\tclose: slideClosed\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom/animations/slide.js",
"type": "application/javascript",
"module-type": "animation"
},
"$:/core/modules/utils/dom/animator.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/animator.js\ntype: application/javascript\nmodule-type: utils\n\nOrchestrates animations and transitions\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nfunction Animator() {\n\t// Get the registered animation modules\n\tthis.animations = {};\n\t$tw.modules.applyMethods(\"animation\",this.animations);\n}\n\nAnimator.prototype.perform = function(type,domNode,options) {\n\toptions = options || {};\n\t// Find an animation that can handle this type\n\tvar chosenAnimation;\n\t$tw.utils.each(this.animations,function(animation,name) {\n\t\tif($tw.utils.hop(animation,type)) {\n\t\t\tchosenAnimation = animation[type];\n\t\t}\n\t});\n\tif(!chosenAnimation) {\n\t\tchosenAnimation = function(domNode,options) {\n\t\t\tif(options.callback) {\n\t\t\t\toptions.callback();\n\t\t\t}\n\t\t};\n\t}\n\t// Call the animation\n\tchosenAnimation(domNode,options);\n};\n\nexports.Animator = Animator;\n\n})();\n",
"title": "$:/core/modules/utils/dom/animator.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/browser.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/browser.js\ntype: application/javascript\nmodule-type: utils\n\nBrowser feature detection\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nSet style properties of an element\n\telement: dom node\n\tstyles: ordered array of {name: value} pairs\n*/\nexports.setStyle = function(element,styles) {\n\tif(element.nodeType === 1) { // Element.ELEMENT_NODE\n\t\tfor(var t=0; t<styles.length; t++) {\n\t\t\tfor(var styleName in styles[t]) {\n\t\t\t\telement.style[$tw.utils.convertStyleNameToPropertyName(styleName)] = styles[t][styleName];\n\t\t\t}\n\t\t}\n\t}\n};\n\n/*\nConverts a standard CSS property name into the local browser-specific equivalent. For example:\n\t\"background-color\" --> \"backgroundColor\"\n\t\"transition\" --> \"webkitTransition\"\n*/\n\nvar styleNameCache = {}; // We'll cache the style name conversions\n\nexports.convertStyleNameToPropertyName = function(styleName) {\n\t// Return from the cache if we can\n\tif(styleNameCache[styleName]) {\n\t\treturn styleNameCache[styleName];\n\t}\n\t// Convert it by first removing any hyphens\n\tvar propertyName = $tw.utils.unHyphenateCss(styleName);\n\t// Then check if it needs a prefix\n\tif($tw.browser && document.body.style[propertyName] === undefined) {\n\t\tvar prefixes = [\"O\",\"MS\",\"Moz\",\"webkit\"];\n\t\tfor(var t=0; t<prefixes.length; t++) {\n\t\t\tvar prefixedName = prefixes[t] + propertyName.substr(0,1).toUpperCase() + propertyName.substr(1);\n\t\t\tif(document.body.style[prefixedName] !== undefined) {\n\t\t\t\tpropertyName = prefixedName;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t}\n\t// Put it in the cache too\n\tstyleNameCache[styleName] = propertyName;\n\treturn propertyName;\n};\n\n/*\nConverts a JS format CSS property name back into the dashed form used in CSS declarations. For example:\n\t\"backgroundColor\" --> \"background-color\"\n\t\"webkitTransform\" --> \"-webkit-transform\"\n*/\nexports.convertPropertyNameToStyleName = function(propertyName) {\n\t// Rehyphenate the name\n\tvar styleName = $tw.utils.hyphenateCss(propertyName);\n\t// If there's a webkit prefix, add a dash (other browsers have uppercase prefixes, and so get the dash automatically)\n\tif(styleName.indexOf(\"webkit\") === 0) {\n\t\tstyleName = \"-\" + styleName;\n\t} else if(styleName.indexOf(\"-m-s\") === 0) {\n\t\tstyleName = \"-ms\" + styleName.substr(4);\n\t}\n\treturn styleName;\n};\n\n/*\nRound trip a stylename to a property name and back again. For example:\n\t\"transform\" --> \"webkitTransform\" --> \"-webkit-transform\"\n*/\nexports.roundTripPropertyName = function(propertyName) {\n\treturn $tw.utils.convertPropertyNameToStyleName($tw.utils.convertStyleNameToPropertyName(propertyName));\n};\n\n/*\nConverts a standard event name into the local browser specific equivalent. For example:\n\t\"animationEnd\" --> \"webkitAnimationEnd\"\n*/\n\nvar eventNameCache = {}; // We'll cache the conversions\n\nvar eventNameMappings = {\n\t\"transitionEnd\": {\n\t\tcorrespondingCssProperty: \"transition\",\n\t\tmappings: {\n\t\t\ttransition: \"transitionend\",\n\t\t\tOTransition: \"oTransitionEnd\",\n\t\t\tMSTransition: \"msTransitionEnd\",\n\t\t\tMozTransition: \"transitionend\",\n\t\t\twebkitTransition: \"webkitTransitionEnd\"\n\t\t}\n\t},\n\t\"animationEnd\": {\n\t\tcorrespondingCssProperty: \"animation\",\n\t\tmappings: {\n\t\t\tanimation: \"animationend\",\n\t\t\tOAnimation: \"oAnimationEnd\",\n\t\t\tMSAnimation: \"msAnimationEnd\",\n\t\t\tMozAnimation: \"animationend\",\n\t\t\twebkitAnimation: \"webkitAnimationEnd\"\n\t\t}\n\t}\n};\n\nexports.convertEventName = function(eventName) {\n\tif(eventNameCache[eventName]) {\n\t\treturn eventNameCache[eventName];\n\t}\n\tvar newEventName = eventName,\n\t\tmappings = eventNameMappings[eventName];\n\tif(mappings) {\n\t\tvar convertedProperty = $tw.utils.convertStyleNameToPropertyName(mappings.correspondingCssProperty);\n\t\tif(mappings.mappings[convertedProperty]) {\n\t\t\tnewEventName = mappings.mappings[convertedProperty];\n\t\t}\n\t}\n\t// Put it in the cache too\n\teventNameCache[eventName] = newEventName;\n\treturn newEventName;\n};\n\n/*\nReturn the names of the fullscreen APIs\n*/\nexports.getFullScreenApis = function() {\n\tvar d = document,\n\t\tdb = d.body,\n\t\tresult = {\n\t\t\"_requestFullscreen\": db.webkitRequestFullscreen !== undefined ? \"webkitRequestFullscreen\" :\n\t\t\t\t\t\t\tdb.mozRequestFullScreen !== undefined ? \"mozRequestFullScreen\" :\n\t\t\t\t\t\t\tdb.msRequestFullscreen !== undefined ? \"msRequestFullscreen\" :\n\t\t\t\t\t\t\tdb.requestFullscreen !== undefined ? \"requestFullscreen\" : \"\",\n\t\t\"_exitFullscreen\": d.webkitExitFullscreen !== undefined ? \"webkitExitFullscreen\" :\n\t\t\t\t\t\t\td.mozCancelFullScreen !== undefined ? \"mozCancelFullScreen\" :\n\t\t\t\t\t\t\td.msExitFullscreen !== undefined ? \"msExitFullscreen\" :\n\t\t\t\t\t\t\td.exitFullscreen !== undefined ? \"exitFullscreen\" : \"\",\n\t\t\"_fullscreenElement\": d.webkitFullscreenElement !== undefined ? \"webkitFullscreenElement\" :\n\t\t\t\t\t\t\td.mozFullScreenElement !== undefined ? \"mozFullScreenElement\" :\n\t\t\t\t\t\t\td.msFullscreenElement !== undefined ? \"msFullscreenElement\" :\n\t\t\t\t\t\t\td.fullscreenElement !== undefined ? \"fullscreenElement\" : \"\",\n\t\t\"_fullscreenChange\": d.webkitFullscreenElement !== undefined ? \"webkitfullscreenchange\" :\n\t\t\t\t\t\t\td.mozFullScreenElement !== undefined ? \"mozfullscreenchange\" :\n\t\t\t\t\t\t\td.msFullscreenElement !== undefined ? \"MSFullscreenChange\" :\n\t\t\t\t\t\t\td.fullscreenElement !== undefined ? \"fullscreenchange\" : \"\"\n\t};\n\tif(!result._requestFullscreen || !result._exitFullscreen || !result._fullscreenElement || !result._fullscreenChange) {\n\t\treturn null;\n\t} else {\n\t\treturn result;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom/browser.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/csscolorparser.js": {
"text": "// (c) Dean McNamee <dean@gmail.com>, 2012.\n//\n// https://github.com/deanm/css-color-parser-js\n//\n// Permission is hereby granted, free of charge, to any person obtaining a copy\n// of this software and associated documentation files (the \"Software\"), to\n// deal in the Software without restriction, including without limitation the\n// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or\n// sell copies of the Software, and to permit persons to whom the Software is\n// furnished to do so, subject to the following conditions:\n//\n// The above copyright notice and this permission notice shall be included in\n// all copies or substantial portions of the Software.\n//\n// THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS\n// IN THE SOFTWARE.\n\n// http://www.w3.org/TR/css3-color/\nvar kCSSColorTable = {\n \"transparent\": [0,0,0,0], \"aliceblue\": [240,248,255,1],\n \"antiquewhite\": [250,235,215,1], \"aqua\": [0,255,255,1],\n \"aquamarine\": [127,255,212,1], \"azure\": [240,255,255,1],\n \"beige\": [245,245,220,1], \"bisque\": [255,228,196,1],\n \"black\": [0,0,0,1], \"blanchedalmond\": [255,235,205,1],\n \"blue\": [0,0,255,1], \"blueviolet\": [138,43,226,1],\n \"brown\": [165,42,42,1], \"burlywood\": [222,184,135,1],\n \"cadetblue\": [95,158,160,1], \"chartreuse\": [127,255,0,1],\n \"chocolate\": [210,105,30,1], \"coral\": [255,127,80,1],\n \"cornflowerblue\": [100,149,237,1], \"cornsilk\": [255,248,220,1],\n \"crimson\": [220,20,60,1], \"cyan\": [0,255,255,1],\n \"darkblue\": [0,0,139,1], \"darkcyan\": [0,139,139,1],\n \"darkgoldenrod\": [184,134,11,1], \"darkgray\": [169,169,169,1],\n \"darkgreen\": [0,100,0,1], \"darkgrey\": [169,169,169,1],\n \"darkkhaki\": [189,183,107,1], \"darkmagenta\": [139,0,139,1],\n \"darkolivegreen\": [85,107,47,1], \"darkorange\": [255,140,0,1],\n \"darkorchid\": [153,50,204,1], \"darkred\": [139,0,0,1],\n \"darksalmon\": [233,150,122,1], \"darkseagreen\": [143,188,143,1],\n \"darkslateblue\": [72,61,139,1], \"darkslategray\": [47,79,79,1],\n \"darkslategrey\": [47,79,79,1], \"darkturquoise\": [0,206,209,1],\n \"darkviolet\": [148,0,211,1], \"deeppink\": [255,20,147,1],\n \"deepskyblue\": [0,191,255,1], \"dimgray\": [105,105,105,1],\n \"dimgrey\": [105,105,105,1], \"dodgerblue\": [30,144,255,1],\n \"firebrick\": [178,34,34,1], \"floralwhite\": [255,250,240,1],\n \"forestgreen\": [34,139,34,1], \"fuchsia\": [255,0,255,1],\n \"gainsboro\": [220,220,220,1], \"ghostwhite\": [248,248,255,1],\n \"gold\": [255,215,0,1], \"goldenrod\": [218,165,32,1],\n \"gray\": [128,128,128,1], \"green\": [0,128,0,1],\n \"greenyellow\": [173,255,47,1], \"grey\": [128,128,128,1],\n \"honeydew\": [240,255,240,1], \"hotpink\": [255,105,180,1],\n \"indianred\": [205,92,92,1], \"indigo\": [75,0,130,1],\n \"ivory\": [255,255,240,1], \"khaki\": [240,230,140,1],\n \"lavender\": [230,230,250,1], \"lavenderblush\": [255,240,245,1],\n \"lawngreen\": [124,252,0,1], \"lemonchiffon\": [255,250,205,1],\n \"lightblue\": [173,216,230,1], \"lightcoral\": [240,128,128,1],\n \"lightcyan\": [224,255,255,1], \"lightgoldenrodyellow\": [250,250,210,1],\n \"lightgray\": [211,211,211,1], \"lightgreen\": [144,238,144,1],\n \"lightgrey\": [211,211,211,1], \"lightpink\": [255,182,193,1],\n \"lightsalmon\": [255,160,122,1], \"lightseagreen\": [32,178,170,1],\n \"lightskyblue\": [135,206,250,1], \"lightslategray\": [119,136,153,1],\n \"lightslategrey\": [119,136,153,1], \"lightsteelblue\": [176,196,222,1],\n \"lightyellow\": [255,255,224,1], \"lime\": [0,255,0,1],\n \"limegreen\": [50,205,50,1], \"linen\": [250,240,230,1],\n \"magenta\": [255,0,255,1], \"maroon\": [128,0,0,1],\n \"mediumaquamarine\": [102,205,170,1], \"mediumblue\": [0,0,205,1],\n \"mediumorchid\": [186,85,211,1], \"mediumpurple\": [147,112,219,1],\n \"mediumseagreen\": [60,179,113,1], \"mediumslateblue\": [123,104,238,1],\n \"mediumspringgreen\": [0,250,154,1], \"mediumturquoise\": [72,209,204,1],\n \"mediumvioletred\": [199,21,133,1], \"midnightblue\": [25,25,112,1],\n \"mintcream\": [245,255,250,1], \"mistyrose\": [255,228,225,1],\n \"moccasin\": [255,228,181,1], \"navajowhite\": [255,222,173,1],\n \"navy\": [0,0,128,1], \"oldlace\": [253,245,230,1],\n \"olive\": [128,128,0,1], \"olivedrab\": [107,142,35,1],\n \"orange\": [255,165,0,1], \"orangered\": [255,69,0,1],\n \"orchid\": [218,112,214,1], \"palegoldenrod\": [238,232,170,1],\n \"palegreen\": [152,251,152,1], \"paleturquoise\": [175,238,238,1],\n \"palevioletred\": [219,112,147,1], \"papayawhip\": [255,239,213,1],\n \"peachpuff\": [255,218,185,1], \"peru\": [205,133,63,1],\n \"pink\": [255,192,203,1], \"plum\": [221,160,221,1],\n \"powderblue\": [176,224,230,1], \"purple\": [128,0,128,1],\n \"red\": [255,0,0,1], \"rosybrown\": [188,143,143,1],\n \"royalblue\": [65,105,225,1], \"saddlebrown\": [139,69,19,1],\n \"salmon\": [250,128,114,1], \"sandybrown\": [244,164,96,1],\n \"seagreen\": [46,139,87,1], \"seashell\": [255,245,238,1],\n \"sienna\": [160,82,45,1], \"silver\": [192,192,192,1],\n \"skyblue\": [135,206,235,1], \"slateblue\": [106,90,205,1],\n \"slategray\": [112,128,144,1], \"slategrey\": [112,128,144,1],\n \"snow\": [255,250,250,1], \"springgreen\": [0,255,127,1],\n \"steelblue\": [70,130,180,1], \"tan\": [210,180,140,1],\n \"teal\": [0,128,128,1], \"thistle\": [216,191,216,1],\n \"tomato\": [255,99,71,1], \"turquoise\": [64,224,208,1],\n \"violet\": [238,130,238,1], \"wheat\": [245,222,179,1],\n \"white\": [255,255,255,1], \"whitesmoke\": [245,245,245,1],\n \"yellow\": [255,255,0,1], \"yellowgreen\": [154,205,50,1]}\n\nfunction clamp_css_byte(i) { // Clamp to integer 0 .. 255.\n i = Math.round(i); // Seems to be what Chrome does (vs truncation).\n return i < 0 ? 0 : i > 255 ? 255 : i;\n}\n\nfunction clamp_css_float(f) { // Clamp to float 0.0 .. 1.0.\n return f < 0 ? 0 : f > 1 ? 1 : f;\n}\n\nfunction parse_css_int(str) { // int or percentage.\n if (str[str.length - 1] === '%')\n return clamp_css_byte(parseFloat(str) / 100 * 255);\n return clamp_css_byte(parseInt(str));\n}\n\nfunction parse_css_float(str) { // float or percentage.\n if (str[str.length - 1] === '%')\n return clamp_css_float(parseFloat(str) / 100);\n return clamp_css_float(parseFloat(str));\n}\n\nfunction css_hue_to_rgb(m1, m2, h) {\n if (h < 0) h += 1;\n else if (h > 1) h -= 1;\n\n if (h * 6 < 1) return m1 + (m2 - m1) * h * 6;\n if (h * 2 < 1) return m2;\n if (h * 3 < 2) return m1 + (m2 - m1) * (2/3 - h) * 6;\n return m1;\n}\n\nfunction parseCSSColor(css_str) {\n // Remove all whitespace, not compliant, but should just be more accepting.\n var str = css_str.replace(/ /g, '').toLowerCase();\n\n // Color keywords (and transparent) lookup.\n if (str in kCSSColorTable) return kCSSColorTable[str].slice(); // dup.\n\n // #abc and #abc123 syntax.\n if (str[0] === '#') {\n if (str.length === 4) {\n var iv = parseInt(str.substr(1), 16); // TODO(deanm): Stricter parsing.\n if (!(iv >= 0 && iv <= 0xfff)) return null; // Covers NaN.\n return [((iv & 0xf00) >> 4) | ((iv & 0xf00) >> 8),\n (iv & 0xf0) | ((iv & 0xf0) >> 4),\n (iv & 0xf) | ((iv & 0xf) << 4),\n 1];\n } else if (str.length === 7) {\n var iv = parseInt(str.substr(1), 16); // TODO(deanm): Stricter parsing.\n if (!(iv >= 0 && iv <= 0xffffff)) return null; // Covers NaN.\n return [(iv & 0xff0000) >> 16,\n (iv & 0xff00) >> 8,\n iv & 0xff,\n 1];\n }\n\n return null;\n }\n\n var op = str.indexOf('('), ep = str.indexOf(')');\n if (op !== -1 && ep + 1 === str.length) {\n var fname = str.substr(0, op);\n var params = str.substr(op+1, ep-(op+1)).split(',');\n var alpha = 1; // To allow case fallthrough.\n switch (fname) {\n case 'rgba':\n if (params.length !== 4) return null;\n alpha = parse_css_float(params.pop());\n // Fall through.\n case 'rgb':\n if (params.length !== 3) return null;\n return [parse_css_int(params[0]),\n parse_css_int(params[1]),\n parse_css_int(params[2]),\n alpha];\n case 'hsla':\n if (params.length !== 4) return null;\n alpha = parse_css_float(params.pop());\n // Fall through.\n case 'hsl':\n if (params.length !== 3) return null;\n var h = (((parseFloat(params[0]) % 360) + 360) % 360) / 360; // 0 .. 1\n // NOTE(deanm): According to the CSS spec s/l should only be\n // percentages, but we don't bother and let float or percentage.\n var s = parse_css_float(params[1]);\n var l = parse_css_float(params[2]);\n var m2 = l <= 0.5 ? l * (s + 1) : l + s - l * s;\n var m1 = l * 2 - m2;\n return [clamp_css_byte(css_hue_to_rgb(m1, m2, h+1/3) * 255),\n clamp_css_byte(css_hue_to_rgb(m1, m2, h) * 255),\n clamp_css_byte(css_hue_to_rgb(m1, m2, h-1/3) * 255),\n alpha];\n default:\n return null;\n }\n }\n\n return null;\n}\n\ntry { exports.parseCSSColor = parseCSSColor } catch(e) { }\n",
"title": "$:/core/modules/utils/dom/csscolorparser.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom.js\ntype: application/javascript\nmodule-type: utils\n\nVarious static DOM-related utility functions.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nDetermines whether element 'a' contains element 'b'\nCode thanks to John Resig, http://ejohn.org/blog/comparing-document-position/\n*/\nexports.domContains = function(a,b) {\n\treturn a.contains ?\n\t\ta !== b && a.contains(b) :\n\t\t!!(a.compareDocumentPosition(b) & 16);\n};\n\nexports.removeChildren = function(node) {\n\twhile(node.hasChildNodes()) {\n\t\tnode.removeChild(node.firstChild);\n\t}\n};\n\nexports.hasClass = function(el,className) {\n\treturn el && el.className && el.className.toString().split(\" \").indexOf(className) !== -1;\n};\n\nexports.addClass = function(el,className) {\n\tvar c = el.className.split(\" \");\n\tif(c.indexOf(className) === -1) {\n\t\tc.push(className);\n\t}\n\tel.className = c.join(\" \");\n};\n\nexports.removeClass = function(el,className) {\n\tvar c = el.className.split(\" \"),\n\t\tp = c.indexOf(className);\n\tif(p !== -1) {\n\t\tc.splice(p,1);\n\t\tel.className = c.join(\" \");\n\t}\n};\n\nexports.toggleClass = function(el,className,status) {\n\tif(status === undefined) {\n\t\tstatus = !exports.hasClass(el,className);\n\t}\n\tif(status) {\n\t\texports.addClass(el,className);\n\t} else {\n\t\texports.removeClass(el,className);\n\t}\n};\n\n/*\nGet the first parent element that has scrollbars or use the body as fallback.\n*/\nexports.getScrollContainer = function(el) {\n\tvar doc = el.ownerDocument;\n\twhile(el.parentNode) {\t\n\t\tel = el.parentNode;\n\t\tif(el.scrollTop) {\n\t\t\treturn el;\n\t\t}\n\t}\n\treturn doc.body;\n};\n\n/*\nGet the scroll position of the viewport\nReturns:\n\t{\n\t\tx: horizontal scroll position in pixels,\n\t\ty: vertical scroll position in pixels\n\t}\n*/\nexports.getScrollPosition = function() {\n\tif(\"scrollX\" in window) {\n\t\treturn {x: window.scrollX, y: window.scrollY};\n\t} else {\n\t\treturn {x: document.documentElement.scrollLeft, y: document.documentElement.scrollTop};\n\t}\n};\n\n/*\nAdjust the height of a textarea to fit its content, preserving scroll position, and return the height\n*/\nexports.resizeTextAreaToFit = function(domNode,minHeight) {\n\t// Get the scroll container and register the current scroll position\n\tvar container = $tw.utils.getScrollContainer(domNode),\n\t\tscrollTop = container.scrollTop;\n // Measure the specified minimum height\n\tdomNode.style.height = minHeight;\n\tvar measuredHeight = domNode.offsetHeight;\n\t// Set its height to auto so that it snaps to the correct height\n\tdomNode.style.height = \"auto\";\n\t// Calculate the revised height\n\tvar newHeight = Math.max(domNode.scrollHeight + domNode.offsetHeight - domNode.clientHeight,measuredHeight);\n\t// Only try to change the height if it has changed\n\tif(newHeight !== domNode.offsetHeight) {\n\t\tdomNode.style.height = newHeight + \"px\";\n\t\t// Make sure that the dimensions of the textarea are recalculated\n\t\t$tw.utils.forceLayout(domNode);\n\t\t// Set the container to the position we registered at the beginning\n\t\tcontainer.scrollTop = scrollTop;\n\t}\n\treturn newHeight;\n};\n\n/*\nGets the bounding rectangle of an element in absolute page coordinates\n*/\nexports.getBoundingPageRect = function(element) {\n\tvar scrollPos = $tw.utils.getScrollPosition(),\n\t\tclientRect = element.getBoundingClientRect();\n\treturn {\n\t\tleft: clientRect.left + scrollPos.x,\n\t\twidth: clientRect.width,\n\t\tright: clientRect.right + scrollPos.x,\n\t\ttop: clientRect.top + scrollPos.y,\n\t\theight: clientRect.height,\n\t\tbottom: clientRect.bottom + scrollPos.y\n\t};\n};\n\n/*\nSaves a named password in the browser\n*/\nexports.savePassword = function(name,password) {\n\ttry {\n\t\tif(window.localStorage) {\n\t\t\tlocalStorage.setItem(\"tw5-password-\" + name,password);\n\t\t}\n\t} catch(e) {\n\t}\n};\n\n/*\nRetrieve a named password from the browser\n*/\nexports.getPassword = function(name) {\n\ttry {\n\t\treturn window.localStorage ? localStorage.getItem(\"tw5-password-\" + name) : \"\";\n\t} catch(e) {\n\t\treturn \"\";\n\t}\n};\n\n/*\nForce layout of a dom node and its descendents\n*/\nexports.forceLayout = function(element) {\n\tvar dummy = element.offsetWidth;\n};\n\n/*\nPulse an element for debugging purposes\n*/\nexports.pulseElement = function(element) {\n\t// Event handler to remove the class at the end\n\telement.addEventListener($tw.browser.animationEnd,function handler(event) {\n\t\telement.removeEventListener($tw.browser.animationEnd,handler,false);\n\t\t$tw.utils.removeClass(element,\"pulse\");\n\t},false);\n\t// Apply the pulse class\n\t$tw.utils.removeClass(element,\"pulse\");\n\t$tw.utils.forceLayout(element);\n\t$tw.utils.addClass(element,\"pulse\");\n};\n\n/*\nAttach specified event handlers to a DOM node\ndomNode: where to attach the event handlers\nevents: array of event handlers to be added (see below)\nEach entry in the events array is an object with these properties:\nhandlerFunction: optional event handler function\nhandlerObject: optional event handler object\nhandlerMethod: optionally specifies object handler method name (defaults to `handleEvent`)\n*/\nexports.addEventListeners = function(domNode,events) {\n\t$tw.utils.each(events,function(eventInfo) {\n\t\tvar handler;\n\t\tif(eventInfo.handlerFunction) {\n\t\t\thandler = eventInfo.handlerFunction;\n\t\t} else if(eventInfo.handlerObject) {\n\t\t\tif(eventInfo.handlerMethod) {\n\t\t\t\thandler = function(event) {\n\t\t\t\t\teventInfo.handlerObject[eventInfo.handlerMethod].call(eventInfo.handlerObject,event);\n\t\t\t\t};\t\n\t\t\t} else {\n\t\t\t\thandler = eventInfo.handlerObject;\n\t\t\t}\n\t\t}\n\t\tdomNode.addEventListener(eventInfo.name,handler,false);\n\t});\n};\n\n/*\nGet the computed styles applied to an element as an array of strings of individual CSS properties\n*/\nexports.getComputedStyles = function(domNode) {\n\tvar textAreaStyles = window.getComputedStyle(domNode,null),\n\t\tstyleDefs = [],\n\t\tname;\n\tfor(var t=0; t<textAreaStyles.length; t++) {\n\t\tname = textAreaStyles[t];\n\t\tstyleDefs.push(name + \": \" + textAreaStyles.getPropertyValue(name) + \";\");\n\t}\n\treturn styleDefs;\n};\n\n/*\nApply a set of styles passed as an array of strings of individual CSS properties\n*/\nexports.setStyles = function(domNode,styleDefs) {\n\tdomNode.style.cssText = styleDefs.join(\"\");\n};\n\n/*\nCopy the computed styles from a source element to a destination element\n*/\nexports.copyStyles = function(srcDomNode,dstDomNode) {\n\t$tw.utils.setStyles(dstDomNode,$tw.utils.getComputedStyles(srcDomNode));\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/http.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/http.js\ntype: application/javascript\nmodule-type: utils\n\nBrowser HTTP support\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nA quick and dirty HTTP function; to be refactored later. Options are:\n\turl: URL to retrieve\n\ttype: GET, PUT, POST etc\n\tcallback: function invoked with (err,data)\n*/\nexports.httpRequest = function(options) {\n\tvar type = options.type || \"GET\",\n\t\theaders = options.headers || {accept: \"application/json\"},\n\t\trequest = new XMLHttpRequest(),\n\t\tdata = \"\",\n\t\tf,results;\n\t// Massage the data hashmap into a string\n\tif(options.data) {\n\t\tif(typeof options.data === \"string\") { // Already a string\n\t\t\tdata = options.data;\n\t\t} else { // A hashmap of strings\n\t\t\tresults = [];\n\t\t\t$tw.utils.each(options.data,function(dataItem,dataItemTitle) {\n\t\t\t\tresults.push(dataItemTitle + \"=\" + encodeURIComponent(dataItem));\n\t\t\t});\n\t\t\tdata = results.join(\"&\");\n\t\t}\n\t}\n\t// Set up the state change handler\n\trequest.onreadystatechange = function() {\n\t\tif(this.readyState === 4) {\n\t\t\tif(this.status === 200 || this.status === 201 || this.status === 204) {\n\t\t\t\t// Success!\n\t\t\t\toptions.callback(null,this.responseText,this);\n\t\t\t\treturn;\n\t\t\t}\n\t\t// Something went wrong\n\t\toptions.callback($tw.language.getString(\"Error/XMLHttpRequest\") + \": \" + this.status);\n\t\t}\n\t};\n\t// Make the request\n\trequest.open(type,options.url,true);\n\tif(headers) {\n\t\t$tw.utils.each(headers,function(header,headerTitle,object) {\n\t\t\trequest.setRequestHeader(headerTitle,header);\n\t\t});\n\t}\n\tif(data && !$tw.utils.hop(headers,\"Content-type\")) {\n\t\trequest.setRequestHeader(\"Content-type\",\"application/x-www-form-urlencoded; charset=UTF-8\");\n\t}\n\ttry {\n\t\trequest.send(data);\n\t} catch(e) {\n\t\toptions.callback(e);\n\t}\n\treturn request;\n};\n\n})();\n",
"title": "$:/core/modules/utils/dom/http.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/keyboard.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/keyboard.js\ntype: application/javascript\nmodule-type: utils\n\nKeyboard utilities; now deprecated. Instead, use $tw.keyboardManager\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n[\"parseKeyDescriptor\",\"checkKeyDescriptor\"].forEach(function(method) {\n\texports[method] = function() {\n\t\tif($tw.keyboardManager) {\n\t\t\treturn $tw.keyboardManager[method].apply($tw.keyboardManager,Array.prototype.slice.call(arguments,0));\n\t\t} else {\n\t\t\treturn null\n\t\t}\n\t};\n});\n\n})();\n",
"title": "$:/core/modules/utils/dom/keyboard.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/modal.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/modal.js\ntype: application/javascript\nmodule-type: utils\n\nModal message mechanism\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nvar Modal = function(wiki) {\n\tthis.wiki = wiki;\n\tthis.modalCount = 0;\n};\n\n/*\nDisplay a modal dialogue\n\ttitle: Title of tiddler to display\n\toptions: see below\nOptions include:\n\tdownloadLink: Text of a big download link to include\n*/\nModal.prototype.display = function(title,options) {\n\toptions = options || {};\n\tvar self = this,\n\t\trefreshHandler,\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\ttiddler = this.wiki.getTiddler(title);\n\t// Don't do anything if the tiddler doesn't exist\n\tif(!tiddler) {\n\t\treturn;\n\t}\n\t// Create the variables\n\tvar variables = $tw.utils.extend({currentTiddler: title},options.variables);\n\t// Create the wrapper divs\n\tvar wrapper = document.createElement(\"div\"),\n\t\tmodalBackdrop = document.createElement(\"div\"),\n\t\tmodalWrapper = document.createElement(\"div\"),\n\t\tmodalHeader = document.createElement(\"div\"),\n\t\theaderTitle = document.createElement(\"h3\"),\n\t\tmodalBody = document.createElement(\"div\"),\n\t\tmodalLink = document.createElement(\"a\"),\n\t\tmodalFooter = document.createElement(\"div\"),\n\t\tmodalFooterHelp = document.createElement(\"span\"),\n\t\tmodalFooterButtons = document.createElement(\"span\");\n\t// Up the modal count and adjust the body class\n\tthis.modalCount++;\n\tthis.adjustPageClass();\n\t// Add classes\n\t$tw.utils.addClass(wrapper,\"tc-modal-wrapper\");\n\t$tw.utils.addClass(modalBackdrop,\"tc-modal-backdrop\");\n\t$tw.utils.addClass(modalWrapper,\"tc-modal\");\n\t$tw.utils.addClass(modalHeader,\"tc-modal-header\");\n\t$tw.utils.addClass(modalBody,\"tc-modal-body\");\n\t$tw.utils.addClass(modalFooter,\"tc-modal-footer\");\n\t// Join them together\n\twrapper.appendChild(modalBackdrop);\n\twrapper.appendChild(modalWrapper);\n\tmodalHeader.appendChild(headerTitle);\n\tmodalWrapper.appendChild(modalHeader);\n\tmodalWrapper.appendChild(modalBody);\n\tmodalFooter.appendChild(modalFooterHelp);\n\tmodalFooter.appendChild(modalFooterButtons);\n\tmodalWrapper.appendChild(modalFooter);\n\t// Render the title of the message\n\tvar headerWidgetNode = this.wiki.makeTranscludeWidget(title,{\n\t\tfield: \"subtitle\",\n\t\tmode: \"inline\",\n\t\tchildren: [{\n\t\t\ttype: \"text\",\n\t\t\tattributes: {\n\t\t\t\ttext: {\n\t\t\t\t\ttype: \"string\",\n\t\t\t\t\tvalue: title\n\t\t}}}],\n\t\tparentWidget: $tw.rootWidget,\n\t\tdocument: document,\n\t\tvariables: variables\n\t});\n\theaderWidgetNode.render(headerTitle,null);\n\t// Render the body of the message\n\tvar bodyWidgetNode = this.wiki.makeTranscludeWidget(title,{\n\t\tparentWidget: $tw.rootWidget,\n\t\tdocument: document,\n\t\tvariables: variables\n\t});\n\tbodyWidgetNode.render(modalBody,null);\n\t// Setup the link if present\n\tif(options.downloadLink) {\n\t\tmodalLink.href = options.downloadLink;\n\t\tmodalLink.appendChild(document.createTextNode(\"Right-click to save changes\"));\n\t\tmodalBody.appendChild(modalLink);\n\t}\n\t// Render the footer of the message\n\tif(tiddler && tiddler.fields && tiddler.fields.help) {\n\t\tvar link = document.createElement(\"a\");\n\t\tlink.setAttribute(\"href\",tiddler.fields.help);\n\t\tlink.setAttribute(\"target\",\"_blank\");\n\t\tlink.setAttribute(\"rel\",\"noopener noreferrer\");\n\t\tlink.appendChild(document.createTextNode(\"Help\"));\n\t\tmodalFooterHelp.appendChild(link);\n\t\tmodalFooterHelp.style.float = \"left\";\n\t}\n\tvar footerWidgetNode = this.wiki.makeTranscludeWidget(title,{\n\t\tfield: \"footer\",\n\t\tmode: \"inline\",\n\t\tchildren: [{\n\t\t\ttype: \"button\",\n\t\t\tattributes: {\n\t\t\t\tmessage: {\n\t\t\t\t\ttype: \"string\",\n\t\t\t\t\tvalue: \"tm-close-tiddler\"\n\t\t\t\t}\n\t\t\t},\n\t\t\tchildren: [{\n\t\t\t\ttype: \"text\",\n\t\t\t\tattributes: {\n\t\t\t\t\ttext: {\n\t\t\t\t\t\ttype: \"string\",\n\t\t\t\t\t\tvalue: $tw.language.getString(\"Buttons/Close/Caption\")\n\t\t\t}}}\n\t\t]}],\n\t\tparentWidget: $tw.rootWidget,\n\t\tdocument: document,\n\t\tvariables: variables\n\t});\n\tfooterWidgetNode.render(modalFooterButtons,null);\n\t// Set up the refresh handler\n\trefreshHandler = function(changes) {\n\t\theaderWidgetNode.refresh(changes,modalHeader,null);\n\t\tbodyWidgetNode.refresh(changes,modalBody,null);\n\t\tfooterWidgetNode.refresh(changes,modalFooterButtons,null);\n\t};\n\tthis.wiki.addEventListener(\"change\",refreshHandler);\n\t// Add the close event handler\n\tvar closeHandler = function(event) {\n\t\t// Remove our refresh handler\n\t\tself.wiki.removeEventListener(\"change\",refreshHandler);\n\t\t// Decrease the modal count and adjust the body class\n\t\tself.modalCount--;\n\t\tself.adjustPageClass();\n\t\t// Force layout and animate the modal message away\n\t\t$tw.utils.forceLayout(modalBackdrop);\n\t\t$tw.utils.forceLayout(modalWrapper);\n\t\t$tw.utils.setStyle(modalBackdrop,[\n\t\t\t{opacity: \"0\"}\n\t\t]);\n\t\t$tw.utils.setStyle(modalWrapper,[\n\t\t\t{transform: \"translateY(\" + window.innerHeight + \"px)\"}\n\t\t]);\n\t\t// Set up an event for the transition end\n\t\twindow.setTimeout(function() {\n\t\t\tif(wrapper.parentNode) {\n\t\t\t\t// Remove the modal message from the DOM\n\t\t\t\tdocument.body.removeChild(wrapper);\n\t\t\t}\n\t\t},duration);\n\t\t// Don't let anyone else handle the tm-close-tiddler message\n\t\treturn false;\n\t};\n\theaderWidgetNode.addEventListener(\"tm-close-tiddler\",closeHandler,false);\n\tbodyWidgetNode.addEventListener(\"tm-close-tiddler\",closeHandler,false);\n\tfooterWidgetNode.addEventListener(\"tm-close-tiddler\",closeHandler,false);\n\t// Set the initial styles for the message\n\t$tw.utils.setStyle(modalBackdrop,[\n\t\t{opacity: \"0\"}\n\t]);\n\t$tw.utils.setStyle(modalWrapper,[\n\t\t{transformOrigin: \"0% 0%\"},\n\t\t{transform: \"translateY(\" + (-window.innerHeight) + \"px)\"}\n\t]);\n\t// Put the message into the document\n\tdocument.body.appendChild(wrapper);\n\t// Set up animation for the styles\n\t$tw.utils.setStyle(modalBackdrop,[\n\t\t{transition: \"opacity \" + duration + \"ms ease-out\"}\n\t]);\n\t$tw.utils.setStyle(modalWrapper,[\n\t\t{transition: $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out\"}\n\t]);\n\t// Force layout\n\t$tw.utils.forceLayout(modalBackdrop);\n\t$tw.utils.forceLayout(modalWrapper);\n\t// Set final animated styles\n\t$tw.utils.setStyle(modalBackdrop,[\n\t\t{opacity: \"0.7\"}\n\t]);\n\t$tw.utils.setStyle(modalWrapper,[\n\t\t{transform: \"translateY(0px)\"}\n\t]);\n};\n\nModal.prototype.adjustPageClass = function() {\n\tif($tw.pageContainer) {\n\t\t$tw.utils.toggleClass($tw.pageContainer,\"tc-modal-displayed\",this.modalCount > 0);\n\t}\n};\n\nexports.Modal = Modal;\n\n})();\n",
"title": "$:/core/modules/utils/dom/modal.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/notifier.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/notifier.js\ntype: application/javascript\nmodule-type: utils\n\nNotifier mechanism\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nvar Notifier = function(wiki) {\n\tthis.wiki = wiki;\n};\n\n/*\nDisplay a notification\n\ttitle: Title of tiddler containing the notification text\n\toptions: see below\nOptions include:\n*/\nNotifier.prototype.display = function(title,options) {\n\toptions = options || {};\n\t// Create the wrapper divs\n\tvar self = this,\n\t\tnotification = document.createElement(\"div\"),\n\t\ttiddler = this.wiki.getTiddler(title),\n\t\tduration = $tw.utils.getAnimationDuration(),\n\t\trefreshHandler;\n\t// Don't do anything if the tiddler doesn't exist\n\tif(!tiddler) {\n\t\treturn;\n\t}\n\t// Add classes\n\t$tw.utils.addClass(notification,\"tc-notification\");\n\t// Create the variables\n\tvar variables = $tw.utils.extend({currentTiddler: title},options.variables);\n\t// Render the body of the notification\n\tvar widgetNode = this.wiki.makeTranscludeWidget(title,{parentWidget: $tw.rootWidget, document: document, variables: variables});\n\twidgetNode.render(notification,null);\n\trefreshHandler = function(changes) {\n\t\twidgetNode.refresh(changes,notification,null);\n\t};\n\tthis.wiki.addEventListener(\"change\",refreshHandler);\n\t// Set the initial styles for the notification\n\t$tw.utils.setStyle(notification,[\n\t\t{opacity: \"0\"},\n\t\t{transformOrigin: \"0% 0%\"},\n\t\t{transform: \"translateY(\" + (-window.innerHeight) + \"px)\"},\n\t\t{transition: \"opacity \" + duration + \"ms ease-out, \" + $tw.utils.roundTripPropertyName(\"transform\") + \" \" + duration + \"ms ease-in-out\"}\n\t]);\n\t// Add the notification to the DOM\n\tdocument.body.appendChild(notification);\n\t// Force layout\n\t$tw.utils.forceLayout(notification);\n\t// Set final animated styles\n\t$tw.utils.setStyle(notification,[\n\t\t{opacity: \"1.0\"},\n\t\t{transform: \"translateY(0px)\"}\n\t]);\n\t// Set a timer to remove the notification\n\twindow.setTimeout(function() {\n\t\t// Remove our change event handler\n\t\tself.wiki.removeEventListener(\"change\",refreshHandler);\n\t\t// Force layout and animate the notification away\n\t\t$tw.utils.forceLayout(notification);\n\t\t$tw.utils.setStyle(notification,[\n\t\t\t{opacity: \"0.0\"},\n\t\t\t{transform: \"translateX(\" + (notification.offsetWidth) + \"px)\"}\n\t\t]);\n\t\t// Remove the modal message from the DOM once the transition ends\n\t\tsetTimeout(function() {\n\t\t\tif(notification.parentNode) {\n\t\t\t\tdocument.body.removeChild(notification);\n\t\t\t}\n\t\t},duration);\n\t},$tw.config.preferences.notificationDuration);\n};\n\nexports.Notifier = Notifier;\n\n})();\n",
"title": "$:/core/modules/utils/dom/notifier.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/popup.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/popup.js\ntype: application/javascript\nmodule-type: utils\n\nModule that creates a $tw.utils.Popup object prototype that manages popups in the browser\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nCreates a Popup object with these options:\n\trootElement: the DOM element to which the popup zapper should be attached\n*/\nvar Popup = function(options) {\n\toptions = options || {};\n\tthis.rootElement = options.rootElement || document.documentElement;\n\tthis.popups = []; // Array of {title:,wiki:,domNode:} objects\n};\n\n/*\nTrigger a popup open or closed. Parameters are in a hashmap:\n\ttitle: title of the tiddler where the popup details are stored\n\tdomNode: dom node to which the popup will be positioned\n\twiki: wiki\n\tforce: if specified, forces the popup state to true or false (instead of toggling it)\n*/\nPopup.prototype.triggerPopup = function(options) {\n\t// Check if this popup is already active\n\tvar index = this.findPopup(options.title);\n\t// Compute the new state\n\tvar state = index === -1;\n\tif(options.force !== undefined) {\n\t\tstate = options.force;\n\t}\n\t// Show or cancel the popup according to the new state\n\tif(state) {\n\t\tthis.show(options);\n\t} else {\n\t\tthis.cancel(index);\n\t}\n};\n\nPopup.prototype.findPopup = function(title) {\n\tvar index = -1;\n\tfor(var t=0; t<this.popups.length; t++) {\n\t\tif(this.popups[t].title === title) {\n\t\t\tindex = t;\n\t\t}\n\t}\n\treturn index;\n};\n\nPopup.prototype.handleEvent = function(event) {\n\tif(event.type === \"click\") {\n\t\t// Find out what was clicked on\n\t\tvar info = this.popupInfo(event.target),\n\t\t\tcancelLevel = info.popupLevel - 1;\n\t\t// Don't remove the level that was clicked on if we clicked on a handle\n\t\tif(info.isHandle) {\n\t\t\tcancelLevel++;\n\t\t}\n\t\t// Cancel\n\t\tthis.cancel(cancelLevel);\n\t}\n};\n\n/*\nFind the popup level containing a DOM node. Returns:\npopupLevel: count of the number of nested popups containing the specified element\nisHandle: true if the specified element is within a popup handle\n*/\nPopup.prototype.popupInfo = function(domNode) {\n\tvar isHandle = false,\n\t\tpopupCount = 0,\n\t\tnode = domNode;\n\t// First check ancestors to see if we're within a popup handle\n\twhile(node) {\n\t\tif($tw.utils.hasClass(node,\"tc-popup-handle\")) {\n\t\t\tisHandle = true;\n\t\t\tpopupCount++;\n\t\t}\n\t\tif($tw.utils.hasClass(node,\"tc-popup-keep\")) {\n\t\t\tisHandle = true;\n\t\t}\n\t\tnode = node.parentNode;\n\t}\n\t// Then count the number of ancestor popups\n\tnode = domNode;\n\twhile(node) {\n\t\tif($tw.utils.hasClass(node,\"tc-popup\")) {\n\t\t\tpopupCount++;\n\t\t}\n\t\tnode = node.parentNode;\n\t}\n\tvar info = {\n\t\tpopupLevel: popupCount,\n\t\tisHandle: isHandle\n\t};\n\treturn info;\n};\n\n/*\nDisplay a popup by adding it to the stack\n*/\nPopup.prototype.show = function(options) {\n\t// Find out what was clicked on\n\tvar info = this.popupInfo(options.domNode);\n\t// Cancel any higher level popups\n\tthis.cancel(info.popupLevel);\n\t// Store the popup details if not already there\n\tif(this.findPopup(options.title) === -1) {\n\t\tthis.popups.push({\n\t\t\ttitle: options.title,\n\t\t\twiki: options.wiki,\n\t\t\tdomNode: options.domNode\n\t\t});\n\t}\n\t// Set the state tiddler\n\toptions.wiki.setTextReference(options.title,\n\t\t\t\"(\" + options.domNode.offsetLeft + \",\" + options.domNode.offsetTop + \",\" + \n\t\t\t\toptions.domNode.offsetWidth + \",\" + options.domNode.offsetHeight + \")\");\n\t// Add the click handler if we have any popups\n\tif(this.popups.length > 0) {\n\t\tthis.rootElement.addEventListener(\"click\",this,true);\t\t\n\t}\n};\n\n/*\nCancel all popups at or above a specified level or DOM node\nlevel: popup level to cancel (0 cancels all popups)\n*/\nPopup.prototype.cancel = function(level) {\n\tvar numPopups = this.popups.length;\n\tlevel = Math.max(0,Math.min(level,numPopups));\n\tfor(var t=level; t<numPopups; t++) {\n\t\tvar popup = this.popups.pop();\n\t\tif(popup.title) {\n\t\t\tpopup.wiki.deleteTiddler(popup.title);\n\t\t}\n\t}\n\tif(this.popups.length === 0) {\n\t\tthis.rootElement.removeEventListener(\"click\",this,false);\n\t}\n};\n\n/*\nReturns true if the specified title and text identifies an active popup\n*/\nPopup.prototype.readPopupState = function(text) {\n\tvar popupLocationRegExp = /^\\((-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+)\\)$/;\n\treturn popupLocationRegExp.test(text);\n};\n\nexports.Popup = Popup;\n\n})();\n",
"title": "$:/core/modules/utils/dom/popup.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/dom/scroller.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/dom/scroller.js\ntype: application/javascript\nmodule-type: utils\n\nModule that creates a $tw.utils.Scroller object prototype that manages scrolling in the browser\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nEvent handler for when the `tm-scroll` event hits the document body\n*/\nvar PageScroller = function() {\n\tthis.idRequestFrame = null;\n\tthis.requestAnimationFrame = window.requestAnimationFrame ||\n\t\twindow.webkitRequestAnimationFrame ||\n\t\twindow.mozRequestAnimationFrame ||\n\t\tfunction(callback) {\n\t\t\treturn window.setTimeout(callback, 1000/60);\n\t\t};\n\tthis.cancelAnimationFrame = window.cancelAnimationFrame ||\n\t\twindow.webkitCancelAnimationFrame ||\n\t\twindow.webkitCancelRequestAnimationFrame ||\n\t\twindow.mozCancelAnimationFrame ||\n\t\twindow.mozCancelRequestAnimationFrame ||\n\t\tfunction(id) {\n\t\t\twindow.clearTimeout(id);\n\t\t};\n};\n\nPageScroller.prototype.cancelScroll = function() {\n\tif(this.idRequestFrame) {\n\t\tthis.cancelAnimationFrame.call(window,this.idRequestFrame);\n\t\tthis.idRequestFrame = null;\n\t}\n};\n\n/*\nHandle an event\n*/\nPageScroller.prototype.handleEvent = function(event) {\n\tif(event.type === \"tm-scroll\") {\n\t\treturn this.scrollIntoView(event.target);\n\t}\n\treturn true;\n};\n\n/*\nHandle a scroll event hitting the page document\n*/\nPageScroller.prototype.scrollIntoView = function(element) {\n\tvar duration = $tw.utils.getAnimationDuration();\n\t// Now get ready to scroll the body\n\tthis.cancelScroll();\n\tthis.startTime = Date.now();\n\tvar scrollPosition = $tw.utils.getScrollPosition();\n\t// Get the client bounds of the element and adjust by the scroll position\n\tvar clientBounds = element.getBoundingClientRect(),\n\t\tbounds = {\n\t\t\tleft: clientBounds.left + scrollPosition.x,\n\t\t\ttop: clientBounds.top + scrollPosition.y,\n\t\t\twidth: clientBounds.width,\n\t\t\theight: clientBounds.height\n\t\t};\n\t// We'll consider the horizontal and vertical scroll directions separately via this function\n\t// targetPos/targetSize - position and size of the target element\n\t// currentPos/currentSize - position and size of the current scroll viewport\n\t// returns: new position of the scroll viewport\n\tvar getEndPos = function(targetPos,targetSize,currentPos,currentSize) {\n\t\t\tvar newPos = currentPos;\n\t\t\t// If the target is above/left of the current view, then scroll to it's top/left\n\t\t\tif(targetPos <= currentPos) {\n\t\t\t\tnewPos = targetPos;\n\t\t\t// If the target is smaller than the window and the scroll position is too far up, then scroll till the target is at the bottom of the window\n\t\t\t} else if(targetSize < currentSize && currentPos < (targetPos + targetSize - currentSize)) {\n\t\t\t\tnewPos = targetPos + targetSize - currentSize;\n\t\t\t// If the target is big, then just scroll to the top\n\t\t\t} else if(currentPos < targetPos) {\n\t\t\t\tnewPos = targetPos;\n\t\t\t// Otherwise, stay where we are\n\t\t\t} else {\n\t\t\t\tnewPos = currentPos;\n\t\t\t}\n\t\t\t// If we are scrolling within 50 pixels of the top/left then snap to zero\n\t\t\tif(newPos < 50) {\n\t\t\t\tnewPos = 0;\n\t\t\t}\n\t\t\treturn newPos;\n\t\t},\n\t\tendX = getEndPos(bounds.left,bounds.width,scrollPosition.x,window.innerWidth),\n\t\tendY = getEndPos(bounds.top,bounds.height,scrollPosition.y,window.innerHeight);\n\t// Only scroll if the position has changed\n\tif(endX !== scrollPosition.x || endY !== scrollPosition.y) {\n\t\tvar self = this,\n\t\t\tdrawFrame;\n\t\tdrawFrame = function () {\n\t\t\tvar t;\n\t\t\tif(duration <= 0) {\n\t\t\t\tt = 1;\n\t\t\t} else {\n\t\t\t\tt = ((Date.now()) - self.startTime) / duration;\t\n\t\t\t}\n\t\t\tif(t >= 1) {\n\t\t\t\tself.cancelScroll();\n\t\t\t\tt = 1;\n\t\t\t}\n\t\t\tt = $tw.utils.slowInSlowOut(t);\n\t\t\twindow.scrollTo(scrollPosition.x + (endX - scrollPosition.x) * t,scrollPosition.y + (endY - scrollPosition.y) * t);\n\t\t\tif(t < 1) {\n\t\t\t\tself.idRequestFrame = self.requestAnimationFrame.call(window,drawFrame);\n\t\t\t}\n\t\t};\n\t\tdrawFrame();\n\t}\n};\n\nexports.PageScroller = PageScroller;\n\n})();\n",
"title": "$:/core/modules/utils/dom/scroller.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/edition-info.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/edition-info.js\ntype: application/javascript\nmodule-type: utils-node\n\nInformation about the available editions\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar fs = require(\"fs\"),\n\tpath = require(\"path\");\n\nvar editionInfo;\n\nexports.getEditionInfo = function() {\n\tif(!editionInfo) {\n\t\t// Enumerate the edition paths\n\t\tvar editionPaths = $tw.getLibraryItemSearchPaths($tw.config.editionsPath,$tw.config.editionsEnvVar);\n\t\teditionInfo = {};\n\t\tfor(var editionIndex=0; editionIndex<editionPaths.length; editionIndex++) {\n\t\t\tvar editionPath = editionPaths[editionIndex];\n\t\t\t// Enumerate the folders\n\t\t\tvar entries = fs.readdirSync(editionPath);\n\t\t\tfor(var entryIndex=0; entryIndex<entries.length; entryIndex++) {\n\t\t\t\tvar entry = entries[entryIndex];\n\t\t\t\t// Check if directories have a valid tiddlywiki.info\n\t\t\t\tif(!editionInfo[entry] && $tw.utils.isDirectory(path.resolve(editionPath,entry))) {\n\t\t\t\t\tvar info;\n\t\t\t\t\ttry {\n\t\t\t\t\t\tinfo = JSON.parse(fs.readFileSync(path.resolve(editionPath,entry,\"tiddlywiki.info\"),\"utf8\"));\n\t\t\t\t\t} catch(ex) {\n\t\t\t\t\t}\n\t\t\t\t\tif(info) {\n\t\t\t\t\t\teditionInfo[entry] = info;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treturn editionInfo;\n};\n\n})();\n",
"title": "$:/core/modules/utils/edition-info.js",
"type": "application/javascript",
"module-type": "utils-node"
},
"$:/core/modules/utils/fakedom.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/fakedom.js\ntype: application/javascript\nmodule-type: global\n\nA barebones implementation of DOM interfaces needed by the rendering mechanism.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Sequence number used to enable us to track objects for testing\nvar sequenceNumber = null;\n\nvar bumpSequenceNumber = function(object) {\n\tif(sequenceNumber !== null) {\n\t\tobject.sequenceNumber = sequenceNumber++;\n\t}\n};\n\nvar TW_TextNode = function(text) {\n\tbumpSequenceNumber(this);\n\tthis.textContent = text;\n};\n\nObject.defineProperty(TW_TextNode.prototype, \"nodeType\", {\n\tget: function() {\n\t\treturn 3;\n\t}\n});\n\nObject.defineProperty(TW_TextNode.prototype, \"formattedTextContent\", {\n\tget: function() {\n\t\treturn this.textContent.replace(/(\\r?\\n)/g,\"\");\n\t}\n});\n\nvar TW_Element = function(tag,namespace) {\n\tbumpSequenceNumber(this);\n\tthis.isTiddlyWikiFakeDom = true;\n\tthis.tag = tag;\n\tthis.attributes = {};\n\tthis.isRaw = false;\n\tthis.children = [];\n\tthis.style = {};\n\tthis.namespaceURI = namespace || \"http://www.w3.org/1999/xhtml\";\n};\n\nObject.defineProperty(TW_Element.prototype, \"nodeType\", {\n\tget: function() {\n\t\treturn 1;\n\t}\n});\n\nTW_Element.prototype.getAttribute = function(name) {\n\tif(this.isRaw) {\n\t\tthrow \"Cannot getAttribute on a raw TW_Element\";\n\t}\n\treturn this.attributes[name];\n};\n\nTW_Element.prototype.setAttribute = function(name,value) {\n\tif(this.isRaw) {\n\t\tthrow \"Cannot setAttribute on a raw TW_Element\";\n\t}\n\tthis.attributes[name] = value;\n};\n\nTW_Element.prototype.setAttributeNS = function(namespace,name,value) {\n\tthis.setAttribute(name,value);\n};\n\nTW_Element.prototype.removeAttribute = function(name) {\n\tif(this.isRaw) {\n\t\tthrow \"Cannot removeAttribute on a raw TW_Element\";\n\t}\n\tif($tw.utils.hop(this.attributes,name)) {\n\t\tdelete this.attributes[name];\n\t}\n};\n\nTW_Element.prototype.appendChild = function(node) {\n\tthis.children.push(node);\n\tnode.parentNode = this;\n};\n\nTW_Element.prototype.insertBefore = function(node,nextSibling) {\n\tif(nextSibling) {\n\t\tvar p = this.children.indexOf(nextSibling);\n\t\tif(p !== -1) {\n\t\t\tthis.children.splice(p,0,node);\n\t\t\tnode.parentNode = this;\n\t\t} else {\n\t\t\tthis.appendChild(node);\n\t\t}\n\t} else {\n\t\tthis.appendChild(node);\n\t}\n};\n\nTW_Element.prototype.removeChild = function(node) {\n\tvar p = this.children.indexOf(node);\n\tif(p !== -1) {\n\t\tthis.children.splice(p,1);\n\t}\n};\n\nTW_Element.prototype.hasChildNodes = function() {\n\treturn !!this.children.length;\n};\n\nObject.defineProperty(TW_Element.prototype, \"childNodes\", {\n\tget: function() {\n\t\treturn this.children;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"firstChild\", {\n\tget: function() {\n\t\treturn this.children[0];\n\t}\n});\n\nTW_Element.prototype.addEventListener = function(type,listener,useCapture) {\n\t// Do nothing\n};\n\nObject.defineProperty(TW_Element.prototype, \"tagName\", {\n\tget: function() {\n\t\treturn this.tag || \"\";\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"className\", {\n\tget: function() {\n\t\treturn this.attributes[\"class\"] || \"\";\n\t},\n\tset: function(value) {\n\t\tthis.attributes[\"class\"] = value;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"value\", {\n\tget: function() {\n\t\treturn this.attributes.value || \"\";\n\t},\n\tset: function(value) {\n\t\tthis.attributes.value = value;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"outerHTML\", {\n\tget: function() {\n\t\tvar output = [],attr,a,v;\n\t\toutput.push(\"<\",this.tag);\n\t\tif(this.attributes) {\n\t\t\tattr = [];\n\t\t\tfor(a in this.attributes) {\n\t\t\t\tattr.push(a);\n\t\t\t}\n\t\t\tattr.sort();\n\t\t\tfor(a=0; a<attr.length; a++) {\n\t\t\t\tv = this.attributes[attr[a]];\n\t\t\t\tif(v !== undefined) {\n\t\t\t\t\toutput.push(\" \",attr[a],\"=\\\"\",$tw.utils.htmlEncode(v),\"\\\"\");\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\tif(this.style) {\n\t\t\tvar style = [];\n\t\t\tfor(var s in this.style) {\n\t\t\t\tstyle.push(s + \":\" + this.style[s] + \";\");\n\t\t\t}\n\t\t\tif(style.length > 0) {\n\t\t\t\toutput.push(\" style=\\\"\",style.join(\"\"),\"\\\"\")\n\t\t\t}\n\t\t}\n\t\toutput.push(\">\");\n\t\tif($tw.config.htmlVoidElements.indexOf(this.tag) === -1) {\n\t\t\toutput.push(this.innerHTML);\n\t\t\toutput.push(\"</\",this.tag,\">\");\n\t\t}\n\t\treturn output.join(\"\");\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"innerHTML\", {\n\tget: function() {\n\t\tif(this.isRaw) {\n\t\t\treturn this.rawHTML;\n\t\t} else {\n\t\t\tvar b = [];\n\t\t\t$tw.utils.each(this.children,function(node) {\n\t\t\t\tif(node instanceof TW_Element) {\n\t\t\t\t\tb.push(node.outerHTML);\n\t\t\t\t} else if(node instanceof TW_TextNode) {\n\t\t\t\t\tb.push($tw.utils.htmlEncode(node.textContent));\n\t\t\t\t}\n\t\t\t});\n\t\t\treturn b.join(\"\");\n\t\t}\n\t},\n\tset: function(value) {\n\t\tthis.isRaw = true;\n\t\tthis.rawHTML = value;\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"textContent\", {\n\tget: function() {\n\t\tif(this.isRaw) {\n\t\t\tthrow \"Cannot get textContent on a raw TW_Element\";\n\t\t} else {\n\t\t\tvar b = [];\n\t\t\t$tw.utils.each(this.children,function(node) {\n\t\t\t\tb.push(node.textContent);\n\t\t\t});\n\t\t\treturn b.join(\"\");\n\t\t}\n\t},\n\tset: function(value) {\n\t\tthis.children = [new TW_TextNode(value)];\n\t}\n});\n\nObject.defineProperty(TW_Element.prototype, \"formattedTextContent\", {\n\tget: function() {\n\t\tif(this.isRaw) {\n\t\t\tthrow \"Cannot get formattedTextContent on a raw TW_Element\";\n\t\t} else {\n\t\t\tvar b = [],\n\t\t\t\tisBlock = $tw.config.htmlBlockElements.indexOf(this.tag) !== -1;\n\t\t\tif(isBlock) {\n\t\t\t\tb.push(\"\\n\");\n\t\t\t}\n\t\t\tif(this.tag === \"li\") {\n\t\t\t\tb.push(\"* \");\n\t\t\t}\n\t\t\t$tw.utils.each(this.children,function(node) {\n\t\t\t\tb.push(node.formattedTextContent);\n\t\t\t});\n\t\t\tif(isBlock) {\n\t\t\t\tb.push(\"\\n\");\n\t\t\t}\n\t\t\treturn b.join(\"\");\n\t\t}\n\t}\n});\n\nvar document = {\n\tsetSequenceNumber: function(value) {\n\t\tsequenceNumber = value;\n\t},\n\tcreateElementNS: function(namespace,tag) {\n\t\treturn new TW_Element(tag,namespace);\n\t},\n\tcreateElement: function(tag) {\n\t\treturn new TW_Element(tag);\n\t},\n\tcreateTextNode: function(text) {\n\t\treturn new TW_TextNode(text);\n\t},\n\tcompatMode: \"CSS1Compat\", // For KaTeX to know that we're not a browser in quirks mode\n\tisTiddlyWikiFakeDom: true\n};\n\nexports.fakeDocument = document;\n\n})();\n",
"title": "$:/core/modules/utils/fakedom.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/utils/filesystem.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/filesystem.js\ntype: application/javascript\nmodule-type: utils-node\n\nFile system utilities\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar fs = require(\"fs\"),\n\tpath = require(\"path\");\n\n/*\nRecursively (and synchronously) copy a directory and all its content\n*/\nexports.copyDirectory = function(srcPath,dstPath) {\n\t// Remove any trailing path separators\n\tsrcPath = $tw.utils.removeTrailingSeparator(srcPath);\n\tdstPath = $tw.utils.removeTrailingSeparator(dstPath);\n\t// Create the destination directory\n\tvar err = $tw.utils.createDirectory(dstPath);\n\tif(err) {\n\t\treturn err;\n\t}\n\t// Function to copy a folder full of files\n\tvar copy = function(srcPath,dstPath) {\n\t\tvar srcStats = fs.lstatSync(srcPath),\n\t\t\tdstExists = fs.existsSync(dstPath);\n\t\tif(srcStats.isFile()) {\n\t\t\t$tw.utils.copyFile(srcPath,dstPath);\n\t\t} else if(srcStats.isDirectory()) {\n\t\t\tvar items = fs.readdirSync(srcPath);\n\t\t\tfor(var t=0; t<items.length; t++) {\n\t\t\t\tvar item = items[t],\n\t\t\t\t\terr = copy(srcPath + path.sep + item,dstPath + path.sep + item);\n\t\t\t\tif(err) {\n\t\t\t\t\treturn err;\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t};\n\tcopy(srcPath,dstPath);\n\treturn null;\n};\n\n/*\nCopy a file\n*/\nvar FILE_BUFFER_LENGTH = 64 * 1024,\n\tfileBuffer;\n\nexports.copyFile = function(srcPath,dstPath) {\n\t// Create buffer if required\n\tif(!fileBuffer) {\n\t\tfileBuffer = new Buffer(FILE_BUFFER_LENGTH);\n\t}\n\t// Create any directories in the destination\n\t$tw.utils.createDirectory(path.dirname(dstPath));\n\t// Copy the file\n\tvar srcFile = fs.openSync(srcPath,\"r\"),\n\t\tdstFile = fs.openSync(dstPath,\"w\"),\n\t\tbytesRead = 1,\n\t\tpos = 0;\n\twhile (bytesRead > 0) {\n\t\tbytesRead = fs.readSync(srcFile,fileBuffer,0,FILE_BUFFER_LENGTH,pos);\n\t\tfs.writeSync(dstFile,fileBuffer,0,bytesRead);\n\t\tpos += bytesRead;\n\t}\n\tfs.closeSync(srcFile);\n\tfs.closeSync(dstFile);\n\treturn null;\n};\n\n/*\nRemove trailing path separator\n*/\nexports.removeTrailingSeparator = function(dirPath) {\n\tvar len = dirPath.length;\n\tif(dirPath.charAt(len-1) === path.sep) {\n\t\tdirPath = dirPath.substr(0,len-1);\n\t}\n\treturn dirPath;\n};\n\n/*\nRecursively create a directory\n*/\nexports.createDirectory = function(dirPath) {\n\tif(dirPath.substr(dirPath.length-1,1) !== path.sep) {\n\t\tdirPath = dirPath + path.sep;\n\t}\n\tvar pos = 1;\n\tpos = dirPath.indexOf(path.sep,pos);\n\twhile(pos !== -1) {\n\t\tvar subDirPath = dirPath.substr(0,pos);\n\t\tif(!$tw.utils.isDirectory(subDirPath)) {\n\t\t\ttry {\n\t\t\t\tfs.mkdirSync(subDirPath);\n\t\t\t} catch(e) {\n\t\t\t\treturn \"Error creating directory '\" + subDirPath + \"'\";\n\t\t\t}\n\t\t}\n\t\tpos = dirPath.indexOf(path.sep,pos + 1);\n\t}\n\treturn null;\n};\n\n/*\nRecursively create directories needed to contain a specified file\n*/\nexports.createFileDirectories = function(filePath) {\n\treturn $tw.utils.createDirectory(path.dirname(filePath));\n};\n\n/*\nRecursively delete a directory\n*/\nexports.deleteDirectory = function(dirPath) {\n\tif(fs.existsSync(dirPath)) {\n\t\tvar entries = fs.readdirSync(dirPath);\n\t\tfor(var entryIndex=0; entryIndex<entries.length; entryIndex++) {\n\t\t\tvar currPath = dirPath + path.sep + entries[entryIndex];\n\t\t\tif(fs.lstatSync(currPath).isDirectory()) {\n\t\t\t\t$tw.utils.deleteDirectory(currPath);\n\t\t\t} else {\n\t\t\t\tfs.unlinkSync(currPath);\n\t\t\t}\n\t\t}\n\tfs.rmdirSync(dirPath);\n\t}\n\treturn null;\n};\n\n/*\nCheck if a path identifies a directory\n*/\nexports.isDirectory = function(dirPath) {\n\treturn fs.existsSync(dirPath) && fs.statSync(dirPath).isDirectory();\n};\n\n/*\nCheck if a path identifies a directory that is empty\n*/\nexports.isDirectoryEmpty = function(dirPath) {\n\tif(!$tw.utils.isDirectory(dirPath)) {\n\t\treturn false;\n\t}\n\tvar files = fs.readdirSync(dirPath),\n\t\tempty = true;\n\t$tw.utils.each(files,function(file,index) {\n\t\tif(file.charAt(0) !== \".\") {\n\t\t\tempty = false;\n\t\t}\n\t});\n\treturn empty;\n};\n\n/*\nRecursively delete a tree of empty directories\n*/\nexports.deleteEmptyDirs = function(dirpath,callback) {\n\tvar self = this;\n\tfs.readdir(dirpath,function(err,files) {\n\t\tif(err) {\n\t\t\treturn callback(err);\n\t\t}\n\t\tif(files.length > 0) {\n\t\t\treturn callback(null);\n\t\t}\n\t\tfs.rmdir(dirpath,function(err) {\n\t\t\tif(err) {\n\t\t\t\treturn callback(err);\n\t\t\t}\n\t\t\tself.deleteEmptyDirs(path.dirname(dirpath),callback);\n\t\t});\n\t});\n};\n\n})();\n",
"title": "$:/core/modules/utils/filesystem.js",
"type": "application/javascript",
"module-type": "utils-node"
},
"$:/core/modules/utils/logger.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/logger.js\ntype: application/javascript\nmodule-type: utils\n\nA basic logging implementation\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar ALERT_TAG = \"$:/tags/Alert\";\n\n/*\nMake a new logger\n*/\nfunction Logger(componentName) {\n\tthis.componentName = componentName || \"\";\n}\n\n/*\nLog a message\n*/\nLogger.prototype.log = function(/* args */) {\n\tif(console !== undefined && console.log !== undefined) {\n\t\treturn Function.apply.call(console.log, console, [this.componentName + \":\"].concat(Array.prototype.slice.call(arguments,0)));\n\t}\n};\n\n/*\nAlert a message\n*/\nLogger.prototype.alert = function(/* args */) {\n\t// Prepare the text of the alert\n\tvar text = Array.prototype.join.call(arguments,\" \");\n\t// Create alert tiddlers in the browser\n\tif($tw.browser) {\n\t\t// Check if there is an existing alert with the same text and the same component\n\t\tvar existingAlerts = $tw.wiki.getTiddlersWithTag(ALERT_TAG),\n\t\t\talertFields,\n\t\t\texistingCount,\n\t\t\tself = this;\n\t\t$tw.utils.each(existingAlerts,function(title) {\n\t\t\tvar tiddler = $tw.wiki.getTiddler(title);\n\t\t\tif(tiddler.fields.text === text && tiddler.fields.component === self.componentName && tiddler.fields.modified && (!alertFields || tiddler.fields.modified < alertFields.modified)) {\n\t\t\t\t\talertFields = $tw.utils.extend({},tiddler.fields);\n\t\t\t}\n\t\t});\n\t\tif(alertFields) {\n\t\t\texistingCount = alertFields.count || 1;\n\t\t} else {\n\t\t\talertFields = {\n\t\t\t\ttitle: $tw.wiki.generateNewTitle(\"$:/temp/alerts/alert\",{prefix: \"\"}),\n\t\t\t\ttext: text,\n\t\t\t\ttags: [ALERT_TAG],\n\t\t\t\tcomponent: this.componentName\n\t\t\t};\n\t\t\texistingCount = 0;\n\t\t}\n\t\talertFields.modified = new Date();\n\t\tif(++existingCount > 1) {\n\t\t\talertFields.count = existingCount;\n\t\t} else {\n\t\t\talertFields.count = undefined;\n\t\t}\n\t\t$tw.wiki.addTiddler(new $tw.Tiddler(alertFields));\n\t\t// Log the alert as well\n\t\tthis.log.apply(this,Array.prototype.slice.call(arguments,0));\n\t} else {\n\t\t// Print an orange message to the console if not in the browser\n\t\tconsole.error(\"\\x1b[1;33m\" + text + \"\\x1b[0m\");\n\t}\n};\n\nexports.Logger = Logger;\n\n})();\n",
"title": "$:/core/modules/utils/logger.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/parsetree.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/parsetree.js\ntype: application/javascript\nmodule-type: utils\n\nParse tree utility functions.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nexports.addAttributeToParseTreeNode = function(node,name,value) {\n\tnode.attributes = node.attributes || {};\n\tnode.attributes[name] = {type: \"string\", value: value};\n};\n\nexports.getAttributeValueFromParseTreeNode = function(node,name,defaultValue) {\n\tif(node.attributes && node.attributes[name] && node.attributes[name].value !== undefined) {\n\t\treturn node.attributes[name].value;\n\t}\n\treturn defaultValue;\n};\n\nexports.addClassToParseTreeNode = function(node,classString) {\n\tvar classes = [];\n\tnode.attributes = node.attributes || {};\n\tnode.attributes[\"class\"] = node.attributes[\"class\"] || {type: \"string\", value: \"\"};\n\tif(node.attributes[\"class\"].type === \"string\") {\n\t\tif(node.attributes[\"class\"].value !== \"\") {\n\t\t\tclasses = node.attributes[\"class\"].value.split(\" \");\n\t\t}\n\t\tif(classString !== \"\") {\n\t\t\t$tw.utils.pushTop(classes,classString.split(\" \"));\n\t\t}\n\t\tnode.attributes[\"class\"].value = classes.join(\" \");\n\t}\n};\n\nexports.addStyleToParseTreeNode = function(node,name,value) {\n\t\tnode.attributes = node.attributes || {};\n\t\tnode.attributes.style = node.attributes.style || {type: \"string\", value: \"\"};\n\t\tif(node.attributes.style.type === \"string\") {\n\t\t\tnode.attributes.style.value += name + \":\" + value + \";\";\n\t\t}\n};\n\nexports.findParseTreeNode = function(nodeArray,search) {\n\tfor(var t=0; t<nodeArray.length; t++) {\n\t\tif(nodeArray[t].type === search.type && nodeArray[t].tag === search.tag) {\n\t\t\treturn nodeArray[t];\n\t\t}\n\t}\n\treturn undefined;\n};\n\n/*\nHelper to get the text of a parse tree node or array of nodes\n*/\nexports.getParseTreeText = function getParseTreeText(tree) {\n\tvar output = [];\n\tif($tw.utils.isArray(tree)) {\n\t\t$tw.utils.each(tree,function(node) {\n\t\t\toutput.push(getParseTreeText(node));\n\t\t});\n\t} else {\n\t\tif(tree.type === \"text\") {\n\t\t\toutput.push(tree.text);\n\t\t}\n\t\tif(tree.children) {\n\t\t\treturn getParseTreeText(tree.children);\n\t\t}\n\t}\n\treturn output.join(\"\");\n};\n\n})();\n",
"title": "$:/core/modules/utils/parsetree.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/performance.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/performance.js\ntype: application/javascript\nmodule-type: global\n\nPerformance measurement.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nfunction Performance(enabled) {\n\tthis.enabled = !!enabled;\n\tthis.measures = {}; // Hashmap of current values of measurements\n\tthis.logger = new $tw.utils.Logger(\"performance\");\n}\n\n/*\nWrap performance reporting around a top level function\n*/\nPerformance.prototype.report = function(name,fn) {\n\tvar self = this;\n\tif(this.enabled) {\n\t\treturn function() {\n\t\t\tself.measures = {};\n\t\t\tvar startTime = $tw.utils.timer(),\n\t\t\t\tresult = fn.apply(this,arguments);\n\t\t\tself.logger.log(name + \": \" + $tw.utils.timer(startTime).toFixed(2) + \"ms\");\n\t\t\tfor(var m in self.measures) {\n\t\t\t\tself.logger.log(\"+\" + m + \": \" + self.measures[m].toFixed(2) + \"ms\");\n\t\t\t}\n\t\t\treturn result;\n\t\t};\n\t} else {\n\t\treturn fn;\n\t}\n};\n\n/*\nWrap performance measurements around a subfunction\n*/\nPerformance.prototype.measure = function(name,fn) {\n\tvar self = this;\n\tif(this.enabled) {\n\t\treturn function() {\n\t\t\tvar startTime = $tw.utils.timer(),\n\t\t\t\tresult = fn.apply(this,arguments),\n\t\t\t\tvalue = self.measures[name] || 0;\n\t\t\tself.measures[name] = value + $tw.utils.timer(startTime);\n\t\t\treturn result;\n\t\t};\n\t} else {\n\t\treturn fn;\n\t}\n};\n\nexports.Performance = Performance;\n\n})();\n",
"title": "$:/core/modules/utils/performance.js",
"type": "application/javascript",
"module-type": "global"
},
"$:/core/modules/utils/pluginmaker.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/pluginmaker.js\ntype: application/javascript\nmodule-type: utils\n\nA quick and dirty way to pack up plugins within the browser.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nRepack a plugin, and then delete any non-shadow payload tiddlers\n*/\nexports.repackPlugin = function(title,additionalTiddlers,excludeTiddlers) {\n\tadditionalTiddlers = additionalTiddlers || [];\n\texcludeTiddlers = excludeTiddlers || [];\n\t// Get the plugin tiddler\n\tvar pluginTiddler = $tw.wiki.getTiddler(title);\n\tif(!pluginTiddler) {\n\t\tthrow \"No such tiddler as \" + title;\n\t}\n\t// Extract the JSON\n\tvar jsonPluginTiddler;\n\ttry {\n\t\tjsonPluginTiddler = JSON.parse(pluginTiddler.fields.text);\n\t} catch(e) {\n\t\tthrow \"Cannot parse plugin tiddler \" + title + \"\\n\" + $tw.language.getString(\"Error/Caption\") + \": \" + e;\n\t}\n\t// Get the list of tiddlers\n\tvar tiddlers = Object.keys(jsonPluginTiddler.tiddlers);\n\t// Add the additional tiddlers\n\t$tw.utils.pushTop(tiddlers,additionalTiddlers);\n\t// Remove any excluded tiddlers\n\tfor(var t=tiddlers.length-1; t>=0; t--) {\n\t\tif(excludeTiddlers.indexOf(tiddlers[t]) !== -1) {\n\t\t\ttiddlers.splice(t,1);\n\t\t}\n\t}\n\t// Pack up the tiddlers into a block of JSON\n\tvar plugins = {};\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = $tw.wiki.getTiddler(title),\n\t\t\tfields = {};\n\t\t$tw.utils.each(tiddler.fields,function (value,name) {\n\t\t\tfields[name] = tiddler.getFieldString(name);\n\t\t});\n\t\tplugins[title] = fields;\n\t});\n\t// Retrieve and bump the version number\n\tvar pluginVersion = $tw.utils.parseVersion(pluginTiddler.getFieldString(\"version\") || \"0.0.0\") || {\n\t\t\tmajor: \"0\",\n\t\t\tminor: \"0\",\n\t\t\tpatch: \"0\"\n\t\t};\n\tpluginVersion.patch++;\n\tvar version = pluginVersion.major + \".\" + pluginVersion.minor + \".\" + pluginVersion.patch;\n\tif(pluginVersion.prerelease) {\n\t\tversion += \"-\" + pluginVersion.prerelease;\n\t}\n\tif(pluginVersion.build) {\n\t\tversion += \"+\" + pluginVersion.build;\n\t}\n\t// Save the tiddler\n\t$tw.wiki.addTiddler(new $tw.Tiddler(pluginTiddler,{text: JSON.stringify({tiddlers: plugins},null,4), version: version}));\n\t// Delete any non-shadow constituent tiddlers\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tif($tw.wiki.tiddlerExists(title)) {\n\t\t\t$tw.wiki.deleteTiddler(title);\n\t\t}\n\t});\n\t// Trigger an autosave\n\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n\t// Return a heartwarming confirmation\n\treturn \"Plugin \" + title + \" successfully saved\";\n};\n\n})();\n",
"title": "$:/core/modules/utils/pluginmaker.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/utils/utils.js": {
"text": "/*\\\ntitle: $:/core/modules/utils/utils.js\ntype: application/javascript\nmodule-type: utils\n\nVarious static utility functions.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nDisplay a warning, in colour if we're on a terminal\n*/\nexports.warning = function(text) {\n\tconsole.log($tw.node ? \"\\x1b[1;33m\" + text + \"\\x1b[0m\" : text);\n};\n\n/*\nRepeats a string\n*/\nexports.repeat = function(str,count) {\n\tvar result = \"\";\n\tfor(var t=0;t<count;t++) {\n\t\tresult += str;\n\t}\n\treturn result;\n};\n\n/*\nTrim whitespace from the start and end of a string\nThanks to Steven Levithan, http://blog.stevenlevithan.com/archives/faster-trim-javascript\n*/\nexports.trim = function(str) {\n\tif(typeof str === \"string\") {\n\t\treturn str.replace(/^\\s\\s*/, '').replace(/\\s\\s*$/, '');\n\t} else {\n\t\treturn str;\n\t}\n};\n\n/*\nFind the line break preceding a given position in a string\nReturns position immediately after that line break, or the start of the string\n*/\nexports.findPrecedingLineBreak = function(text,pos) {\n\tvar result = text.lastIndexOf(\"\\n\",pos - 1);\n\tif(result === -1) {\n\t\tresult = 0;\n\t} else {\n\t\tresult++;\n\t\tif(text.charAt(result) === \"\\r\") {\n\t\t\tresult++;\n\t\t}\n\t}\n\treturn result;\n};\n\n/*\nFind the line break following a given position in a string\n*/\nexports.findFollowingLineBreak = function(text,pos) {\n\t// Cut to just past the following line break, or to the end of the text\n\tvar result = text.indexOf(\"\\n\",pos);\n\tif(result === -1) {\n\t\tresult = text.length;\n\t} else {\n\t\tif(text.charAt(result) === \"\\r\") {\n\t\t\tresult++;\n\t\t}\n\t}\n\treturn result;\n};\n\n/*\nReturn the number of keys in an object\n*/\nexports.count = function(object) {\n\treturn Object.keys(object || {}).length;\n};\n\n/*\nCheck if an array is equal by value and by reference.\n*/\nexports.isArrayEqual = function(array1,array2) {\n\tif(array1 === array2) {\n\t\treturn true;\n\t}\n\tarray1 = array1 || [];\n\tarray2 = array2 || [];\n\tif(array1.length !== array2.length) {\n\t\treturn false;\n\t}\n\treturn array1.every(function(value,index) {\n\t\treturn value === array2[index];\n\t});\n};\n\n/*\nPush entries onto an array, removing them first if they already exist in the array\n\tarray: array to modify (assumed to be free of duplicates)\n\tvalue: a single value to push or an array of values to push\n*/\nexports.pushTop = function(array,value) {\n\tvar t,p;\n\tif($tw.utils.isArray(value)) {\n\t\t// Remove any array entries that are duplicated in the new values\n\t\tif(value.length !== 0) {\n\t\t\tif(array.length !== 0) {\n\t\t\t\tif(value.length < array.length) {\n\t\t\t\t\tfor(t=0; t<value.length; t++) {\n\t\t\t\t\t\tp = array.indexOf(value[t]);\n\t\t\t\t\t\tif(p !== -1) {\n\t\t\t\t\t\t\tarray.splice(p,1);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tfor(t=array.length-1; t>=0; t--) {\n\t\t\t\t\t\tp = value.indexOf(array[t]);\n\t\t\t\t\t\tif(p !== -1) {\n\t\t\t\t\t\t\tarray.splice(t,1);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\t// Push the values on top of the main array\n\t\t\tarray.push.apply(array,value);\n\t\t}\n\t} else {\n\t\tp = array.indexOf(value);\n\t\tif(p !== -1) {\n\t\t\tarray.splice(p,1);\n\t\t}\n\t\tarray.push(value);\n\t}\n\treturn array;\n};\n\n/*\nRemove entries from an array\n\tarray: array to modify\n\tvalue: a single value to remove, or an array of values to remove\n*/\nexports.removeArrayEntries = function(array,value) {\n\tvar t,p;\n\tif($tw.utils.isArray(value)) {\n\t\tfor(t=0; t<value.length; t++) {\n\t\t\tp = array.indexOf(value[t]);\n\t\t\tif(p !== -1) {\n\t\t\t\tarray.splice(p,1);\n\t\t\t}\n\t\t}\n\t} else {\n\t\tp = array.indexOf(value);\n\t\tif(p !== -1) {\n\t\t\tarray.splice(p,1);\n\t\t}\n\t}\n};\n\n/*\nCheck whether any members of a hashmap are present in another hashmap\n*/\nexports.checkDependencies = function(dependencies,changes) {\n\tvar hit = false;\n\t$tw.utils.each(changes,function(change,title) {\n\t\tif($tw.utils.hop(dependencies,title)) {\n\t\t\thit = true;\n\t\t}\n\t});\n\treturn hit;\n};\n\nexports.extend = function(object /* [, src] */) {\n\t$tw.utils.each(Array.prototype.slice.call(arguments, 1), function(source) {\n\t\tif(source) {\n\t\t\tfor(var property in source) {\n\t\t\t\tobject[property] = source[property];\n\t\t\t}\n\t\t}\n\t});\n\treturn object;\n};\n\nexports.deepCopy = function(object) {\n\tvar result,t;\n\tif($tw.utils.isArray(object)) {\n\t\t// Copy arrays\n\t\tresult = object.slice(0);\n\t} else if(typeof object === \"object\") {\n\t\tresult = {};\n\t\tfor(t in object) {\n\t\t\tif(object[t] !== undefined) {\n\t\t\t\tresult[t] = $tw.utils.deepCopy(object[t]);\n\t\t\t}\n\t\t}\n\t} else {\n\t\tresult = object;\n\t}\n\treturn result;\n};\n\nexports.extendDeepCopy = function(object,extendedProperties) {\n\tvar result = $tw.utils.deepCopy(object),t;\n\tfor(t in extendedProperties) {\n\t\tif(extendedProperties[t] !== undefined) {\n\t\t\tresult[t] = $tw.utils.deepCopy(extendedProperties[t]);\n\t\t}\n\t}\n\treturn result;\n};\n\nexports.deepFreeze = function deepFreeze(object) {\n\tvar property, key;\n\tObject.freeze(object);\n\tfor(key in object) {\n\t\tproperty = object[key];\n\t\tif($tw.utils.hop(object,key) && (typeof property === \"object\") && !Object.isFrozen(property)) {\n\t\t\tdeepFreeze(property);\n\t\t}\n\t}\n};\n\nexports.slowInSlowOut = function(t) {\n\treturn (1 - ((Math.cos(t * Math.PI) + 1) / 2));\n};\n\nexports.formatDateString = function(date,template) {\n\tvar result = \"\",\n\t\tt = template,\n\t\tmatches = [\n\t\t\t[/^0hh12/, function() {\n\t\t\t\treturn $tw.utils.pad($tw.utils.getHours12(date));\n\t\t\t}],\n\t\t\t[/^wYYYY/, function() {\n\t\t\t\treturn $tw.utils.getYearForWeekNo(date);\n\t\t\t}],\n\t\t\t[/^hh12/, function() {\n\t\t\t\treturn $tw.utils.getHours12(date);\n\t\t\t}],\n\t\t\t[/^DDth/, function() {\n\t\t\t\treturn date.getDate() + $tw.utils.getDaySuffix(date);\n\t\t\t}],\n\t\t\t[/^YYYY/, function() {\n\t\t\t\treturn date.getFullYear();\n\t\t\t}],\n\t\t\t[/^0hh/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getHours());\n\t\t\t}],\n\t\t\t[/^0mm/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getMinutes());\n\t\t\t}],\n\t\t\t[/^0ss/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getSeconds());\n\t\t\t}],\n\t\t\t[/^0DD/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getDate());\n\t\t\t}],\n\t\t\t[/^0MM/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getMonth()+1);\n\t\t\t}],\n\t\t\t[/^0WW/, function() {\n\t\t\t\treturn $tw.utils.pad($tw.utils.getWeek(date));\n\t\t\t}],\n\t\t\t[/^ddd/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Short/Day/\" + date.getDay());\n\t\t\t}],\n\t\t\t[/^mmm/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Short/Month/\" + (date.getMonth() + 1));\n\t\t\t}],\n\t\t\t[/^DDD/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Long/Day/\" + date.getDay());\n\t\t\t}],\n\t\t\t[/^MMM/, function() {\n\t\t\t\treturn $tw.language.getString(\"Date/Long/Month/\" + (date.getMonth() + 1));\n\t\t\t}],\n\t\t\t[/^TZD/, function() {\n\t\t\t\tvar tz = date.getTimezoneOffset(),\n\t\t\t\tatz = Math.abs(tz);\n\t\t\t\treturn (tz < 0 ? '+' : '-') + $tw.utils.pad(Math.floor(atz / 60)) + ':' + $tw.utils.pad(atz % 60);\n\t\t\t}],\n\t\t\t[/^wYY/, function() {\n\t\t\t\treturn $tw.utils.pad($tw.utils.getYearForWeekNo(date) - 2000);\n\t\t\t}],\n\t\t\t[/^[ap]m/, function() {\n\t\t\t\treturn $tw.utils.getAmPm(date).toLowerCase();\n\t\t\t}],\n\t\t\t[/^hh/, function() {\n\t\t\t\treturn date.getHours();\n\t\t\t}],\n\t\t\t[/^mm/, function() {\n\t\t\t\treturn date.getMinutes();\n\t\t\t}],\n\t\t\t[/^ss/, function() {\n\t\t\t\treturn date.getSeconds();\n\t\t\t}],\n\t\t\t[/^[AP]M/, function() {\n\t\t\t\treturn $tw.utils.getAmPm(date).toUpperCase();\n\t\t\t}],\n\t\t\t[/^DD/, function() {\n\t\t\t\treturn date.getDate();\n\t\t\t}],\n\t\t\t[/^MM/, function() {\n\t\t\t\treturn date.getMonth() + 1;\n\t\t\t}],\n\t\t\t[/^WW/, function() {\n\t\t\t\treturn $tw.utils.getWeek(date);\n\t\t\t}],\n\t\t\t[/^YY/, function() {\n\t\t\t\treturn $tw.utils.pad(date.getFullYear() - 2000);\n\t\t\t}]\n\t\t];\n\twhile(t.length){\n\t\tvar matchString = \"\";\n\t\t$tw.utils.each(matches, function(m) {\n\t\t\tvar match = m[0].exec(t);\n\t\t\tif(match) {\n\t\t\t\tmatchString = m[1].call();\n\t\t\t\tt = t.substr(match[0].length);\n\t\t\t\treturn false;\n\t\t\t}\n\t\t});\n\t\tif(matchString) {\n\t\t\tresult += matchString;\n\t\t} else {\n\t\t\tresult += t.charAt(0);\n\t\t\tt = t.substr(1);\n\t\t}\n\t}\n\tresult = result.replace(/\\\\(.)/g,\"$1\");\n\treturn result;\n};\n\nexports.getAmPm = function(date) {\n\treturn $tw.language.getString(\"Date/Period/\" + (date.getHours() >= 12 ? \"pm\" : \"am\"));\n};\n\nexports.getDaySuffix = function(date) {\n\treturn $tw.language.getString(\"Date/DaySuffix/\" + date.getDate());\n};\n\nexports.getWeek = function(date) {\n\tvar dt = new Date(date.getTime());\n\tvar d = dt.getDay();\n\tif(d === 0) {\n\t\td = 7; // JavaScript Sun=0, ISO Sun=7\n\t}\n\tdt.setTime(dt.getTime() + (4 - d) * 86400000);// shift day to Thurs of same week to calculate weekNo\n\tvar n = Math.floor((dt.getTime()-new Date(dt.getFullYear(),0,1) + 3600000) / 86400000);\n\treturn Math.floor(n / 7) + 1;\n};\n\nexports.getYearForWeekNo = function(date) {\n\tvar dt = new Date(date.getTime());\n\tvar d = dt.getDay();\n\tif(d === 0) {\n\t\td = 7; // JavaScript Sun=0, ISO Sun=7\n\t}\n\tdt.setTime(dt.getTime() + (4 - d) * 86400000);// shift day to Thurs of same week\n\treturn dt.getFullYear();\n};\n\nexports.getHours12 = function(date) {\n\tvar h = date.getHours();\n\treturn h > 12 ? h-12 : ( h > 0 ? h : 12 );\n};\n\n/*\nConvert a date delta in milliseconds into a string representation of \"23 seconds ago\", \"27 minutes ago\" etc.\n\tdelta: delta in milliseconds\nReturns an object with these members:\n\tdescription: string describing the delta period\n\tupdatePeriod: time in millisecond until the string will be inaccurate\n*/\nexports.getRelativeDate = function(delta) {\n\tvar futurep = false;\n\tif(delta < 0) {\n\t\tdelta = -1 * delta;\n\t\tfuturep = true;\n\t}\n\tvar units = [\n\t\t{name: \"Years\", duration: 365 * 24 * 60 * 60 * 1000},\n\t\t{name: \"Months\", duration: (365/12) * 24 * 60 * 60 * 1000},\n\t\t{name: \"Days\", duration: 24 * 60 * 60 * 1000},\n\t\t{name: \"Hours\", duration: 60 * 60 * 1000},\n\t\t{name: \"Minutes\", duration: 60 * 1000},\n\t\t{name: \"Seconds\", duration: 1000}\n\t];\n\tfor(var t=0; t<units.length; t++) {\n\t\tvar result = Math.floor(delta / units[t].duration);\n\t\tif(result >= 2) {\n\t\t\treturn {\n\t\t\t\tdelta: delta,\n\t\t\t\tdescription: $tw.language.getString(\n\t\t\t\t\t\"RelativeDate/\" + (futurep ? \"Future\" : \"Past\") + \"/\" + units[t].name,\n\t\t\t\t\t{variables:\n\t\t\t\t\t\t{period: result.toString()}\n\t\t\t\t\t}\n\t\t\t\t),\n\t\t\t\tupdatePeriod: units[t].duration\n\t\t\t};\n\t\t}\n\t}\n\treturn {\n\t\tdelta: delta,\n\t\tdescription: $tw.language.getString(\n\t\t\t\"RelativeDate/\" + (futurep ? \"Future\" : \"Past\") + \"/Second\",\n\t\t\t{variables:\n\t\t\t\t{period: \"1\"}\n\t\t\t}\n\t\t),\n\t\tupdatePeriod: 1000\n\t};\n};\n\n// Convert & to \"&\", < to \"<\", > to \">\", \" to \""\"\nexports.htmlEncode = function(s) {\n\tif(s) {\n\t\treturn s.toString().replace(/&/mg,\"&\").replace(/</mg,\"<\").replace(/>/mg,\">\").replace(/\\\"/mg,\""\");\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\n// Converts all HTML entities to their character equivalents\nexports.entityDecode = function(s) {\n\tvar converter = String.fromCodePoint || String.fromCharCode,\n\t\te = s.substr(1,s.length-2); // Strip the & and the ;\n\tif(e.charAt(0) === \"#\") {\n\t\tif(e.charAt(1) === \"x\" || e.charAt(1) === \"X\") {\n\t\t\treturn converter(parseInt(e.substr(2),16));\t\n\t\t} else {\n\t\t\treturn converter(parseInt(e.substr(1),10));\n\t\t}\n\t} else {\n\t\tvar c = $tw.config.htmlEntities[e];\n\t\tif(c) {\n\t\t\treturn converter(c);\n\t\t} else {\n\t\t\treturn s; // Couldn't convert it as an entity, just return it raw\n\t\t}\n\t}\n};\n\nexports.unescapeLineBreaks = function(s) {\n\treturn s.replace(/\\\\n/mg,\"\\n\").replace(/\\\\b/mg,\" \").replace(/\\\\s/mg,\"\\\\\").replace(/\\r/mg,\"\");\n};\n\n/*\n * Returns an escape sequence for given character. Uses \\x for characters <=\n * 0xFF to save space, \\u for the rest.\n *\n * The code needs to be in sync with th code template in the compilation\n * function for \"action\" nodes.\n */\n// Copied from peg.js, thanks to David Majda\nexports.escape = function(ch) {\n\tvar charCode = ch.charCodeAt(0);\n\tif(charCode <= 0xFF) {\n\t\treturn '\\\\x' + $tw.utils.pad(charCode.toString(16).toUpperCase());\n\t} else {\n\t\treturn '\\\\u' + $tw.utils.pad(charCode.toString(16).toUpperCase(),4);\n\t}\n};\n\n// Turns a string into a legal JavaScript string\n// Copied from peg.js, thanks to David Majda\nexports.stringify = function(s) {\n\t/*\n\t* ECMA-262, 5th ed., 7.8.4: All characters may appear literally in a string\n\t* literal except for the closing quote character, backslash, carriage return,\n\t* line separator, paragraph separator, and line feed. Any character may\n\t* appear in the form of an escape sequence.\n\t*\n\t* For portability, we also escape all non-ASCII characters.\n\t*/\n\treturn (s || \"\")\n\t\t.replace(/\\\\/g, '\\\\\\\\') // backslash\n\t\t.replace(/\"/g, '\\\\\"') // double quote character\n\t\t.replace(/'/g, \"\\\\'\") // single quote character\n\t\t.replace(/\\r/g, '\\\\r') // carriage return\n\t\t.replace(/\\n/g, '\\\\n') // line feed\n\t\t.replace(/[\\x80-\\uFFFF]/g, exports.escape); // non-ASCII characters\n};\n\n/*\nEscape the RegExp special characters with a preceding backslash\n*/\nexports.escapeRegExp = function(s) {\n return s.replace(/[\\-\\/\\\\\\^\\$\\*\\+\\?\\.\\(\\)\\|\\[\\]\\{\\}]/g, '\\\\$&');\n};\n\n// Checks whether a link target is external, i.e. not a tiddler title\nexports.isLinkExternal = function(to) {\n\tvar externalRegExp = /^(?:file|http|https|mailto|ftp|irc|news|data|skype):[^\\s<>{}\\[\\]`|\"\\\\^]+(?:\\/|\\b)/i;\n\treturn externalRegExp.test(to);\n};\n\nexports.nextTick = function(fn) {\n/*global window: false */\n\tif(typeof process === \"undefined\") {\n\t\t// Apparently it would be faster to use postMessage - http://dbaron.org/log/20100309-faster-timeouts\n\t\twindow.setTimeout(fn,4);\n\t} else {\n\t\tprocess.nextTick(fn);\n\t}\n};\n\n/*\nConvert a hyphenated CSS property name into a camel case one\n*/\nexports.unHyphenateCss = function(propName) {\n\treturn propName.replace(/-([a-z])/gi, function(match0,match1) {\n\t\treturn match1.toUpperCase();\n\t});\n};\n\n/*\nConvert a camelcase CSS property name into a dashed one (\"backgroundColor\" --> \"background-color\")\n*/\nexports.hyphenateCss = function(propName) {\n\treturn propName.replace(/([A-Z])/g, function(match0,match1) {\n\t\treturn \"-\" + match1.toLowerCase();\n\t});\n};\n\n/*\nParse a text reference of one of these forms:\n* title\n* !!field\n* title!!field\n* title##index\n* etc\nReturns an object with the following fields, all optional:\n* title: tiddler title\n* field: tiddler field name\n* index: JSON property index\n*/\nexports.parseTextReference = function(textRef) {\n\t// Separate out the title, field name and/or JSON indices\n\tvar reTextRef = /(?:(.*?)!!(.+))|(?:(.*?)##(.+))|(.*)/mg,\n\t\tmatch = reTextRef.exec(textRef),\n\t\tresult = {};\n\tif(match && reTextRef.lastIndex === textRef.length) {\n\t\t// Return the parts\n\t\tif(match[1]) {\n\t\t\tresult.title = match[1];\n\t\t}\n\t\tif(match[2]) {\n\t\t\tresult.field = match[2];\n\t\t}\n\t\tif(match[3]) {\n\t\t\tresult.title = match[3];\n\t\t}\n\t\tif(match[4]) {\n\t\t\tresult.index = match[4];\n\t\t}\n\t\tif(match[5]) {\n\t\t\tresult.title = match[5];\n\t\t}\n\t} else {\n\t\t// If we couldn't parse it\n\t\tresult.title = textRef\n\t}\n\treturn result;\n};\n\n/*\nChecks whether a string is a valid fieldname\n*/\nexports.isValidFieldName = function(name) {\n\tif(!name || typeof name !== \"string\") {\n\t\treturn false;\n\t}\n\tname = name.toLowerCase().trim();\n\tvar fieldValidatorRegEx = /^[a-z0-9\\-\\._]+$/mg;\n\treturn fieldValidatorRegEx.test(name);\n};\n\n/*\nExtract the version number from the meta tag or from the boot file\n*/\n\n// Browser version\nexports.extractVersionInfo = function() {\n\tif($tw.packageInfo) {\n\t\treturn $tw.packageInfo.version;\n\t} else {\n\t\tvar metatags = document.getElementsByTagName(\"meta\");\n\t\tfor(var t=0; t<metatags.length; t++) {\n\t\t\tvar m = metatags[t];\n\t\t\tif(m.name === \"tiddlywiki-version\") {\n\t\t\t\treturn m.content;\n\t\t\t}\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nGet the animation duration in ms\n*/\nexports.getAnimationDuration = function() {\n\treturn parseInt($tw.wiki.getTiddlerText(\"$:/config/AnimationDuration\",\"400\"),10);\n};\n\n/*\nHash a string to a number\nDerived from http://stackoverflow.com/a/15710692\n*/\nexports.hashString = function(str) {\n\treturn str.split(\"\").reduce(function(a,b) {\n\t\ta = ((a << 5) - a) + b.charCodeAt(0);\n\t\treturn a & a;\n\t},0);\n};\n\n/*\nDecode a base64 string\n*/\nexports.base64Decode = function(string64) {\n\tif($tw.browser) {\n\t\t// TODO\n\t\tthrow \"$tw.utils.base64Decode() doesn't work in the browser\";\n\t} else {\n\t\treturn (new Buffer(string64,\"base64\")).toString();\n\t}\n};\n\n/*\nConvert a hashmap into a tiddler dictionary format sequence of name:value pairs\n*/\nexports.makeTiddlerDictionary = function(data) {\n\tvar output = [];\n\tfor(var name in data) {\n\t\toutput.push(name + \": \" + data[name]);\n\t}\n\treturn output.join(\"\\n\");\n};\n\n/*\nHigh resolution microsecond timer for profiling\n*/\nexports.timer = function(base) {\n\tvar m;\n\tif($tw.node) {\n\t\tvar r = process.hrtime();\t\t\n\t\tm = r[0] * 1e3 + (r[1] / 1e6);\n\t} else if(window.performance) {\n\t\tm = performance.now();\n\t} else {\n\t\tm = Date.now();\n\t}\n\tif(typeof base !== \"undefined\") {\n\t\tm = m - base;\n\t}\n\treturn m;\n};\n\n/*\nConvert text and content type to a data URI\n*/\nexports.makeDataUri = function(text,type) {\n\ttype = type || \"text/vnd.tiddlywiki\";\n\tvar typeInfo = $tw.config.contentTypeInfo[type] || $tw.config.contentTypeInfo[\"text/plain\"],\n\t\tisBase64 = typeInfo.encoding === \"base64\",\n\t\tparts = [];\n\tparts.push(\"data:\");\n\tparts.push(type);\n\tparts.push(isBase64 ? \";base64\" : \"\");\n\tparts.push(\",\");\n\tparts.push(isBase64 ? text : encodeURIComponent(text));\n\treturn parts.join(\"\");\n};\n\n/*\nUseful for finding out the fully escaped CSS selector equivalent to a given tag. For example:\n\n$tw.utils.tagToCssSelector(\"$:/tags/Stylesheet\") --> tc-tagged-\\%24\\%3A\\%2Ftags\\%2FStylesheet\n*/\nexports.tagToCssSelector = function(tagName) {\n\treturn \"tc-tagged-\" + encodeURIComponent(tagName).replace(/[!\"#$%&'()*+,\\-./:;<=>?@[\\\\\\]^`{\\|}~,]/mg,function(c) {\n\t\treturn \"\\\\\" + c;\n\t});\n};\n\n\n/*\nIE does not have sign function\n*/\nexports.sign = Math.sign || function(x) {\n\tx = +x; // convert to a number\n\tif (x === 0 || isNaN(x)) {\n\t\treturn x;\n\t}\n\treturn x > 0 ? 1 : -1;\n};\n\n/*\nIE does not have an endsWith function\n*/\nexports.strEndsWith = function(str,ending,position) {\n\tif(str.endsWith) {\n\t\treturn str.endsWith(ending,position);\n\t} else {\n\t\tif (typeof position !== 'number' || !isFinite(position) || Math.floor(position) !== position || position > str.length) {\n\t\t\tposition = str.length;\n\t\t}\n\t\tposition -= str.length;\n\t\tvar lastIndex = str.indexOf(ending, position);\n\t\treturn lastIndex !== -1 && lastIndex === position;\n\t}\n};\n\n})();\n",
"title": "$:/core/modules/utils/utils.js",
"type": "application/javascript",
"module-type": "utils"
},
"$:/core/modules/widgets/action-deletefield.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-deletefield.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to delete fields of a tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar DeleteFieldWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nDeleteFieldWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nDeleteFieldWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nDeleteFieldWidget.prototype.execute = function() {\n\tthis.actionTiddler = this.getAttribute(\"$tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.actionField = this.getAttribute(\"$field\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nDeleteFieldWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$tiddler\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nDeleteFieldWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar self = this,\n\t\ttiddler = this.wiki.getTiddler(self.actionTiddler),\n\t\tremoveFields = {};\n\tif(this.actionField) {\n\t\tremoveFields[this.actionField] = undefined;\n\t}\n\tif(tiddler) {\n\t\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\t\tif(name.charAt(0) !== \"$\" && name !== \"title\") {\n\t\t\t\tremoveFields[name] = undefined;\n\t\t\t}\n\t\t});\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getModificationFields(),tiddler,removeFields,this.wiki.getCreationFields()));\n\t}\n\treturn true; // Action was invoked\n};\n\nexports[\"action-deletefield\"] = DeleteFieldWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-deletefield.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-deletetiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-deletetiddler.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to delete a tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar DeleteTiddlerWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nDeleteTiddlerWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nDeleteTiddlerWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nDeleteTiddlerWidget.prototype.execute = function() {\n\tthis.actionFilter = this.getAttribute(\"$filter\");\n\tthis.actionTiddler = this.getAttribute(\"$tiddler\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nDeleteTiddlerWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$filter\"] || changedAttributes[\"$tiddler\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nDeleteTiddlerWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar tiddlers = [];\n\tif(this.actionFilter) {\n\t\ttiddlers = this.wiki.filterTiddlers(this.actionFilter,this);\n\t}\n\tif(this.actionTiddler) {\n\t\ttiddlers.push(this.actionTiddler);\n\t}\n\tfor(var t=0; t<tiddlers.length; t++) {\n\t\tthis.wiki.deleteTiddler(tiddlers[t]);\n\t}\n\treturn true; // Action was invoked\n};\n\nexports[\"action-deletetiddler\"] = DeleteTiddlerWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-deletetiddler.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-listops.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-listops.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to apply list operations to any tiddler field (defaults to the 'list' field of the current tiddler)\n\n\\*/\n(function() {\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\nvar ActionListopsWidget = function(parseTreeNode, options) {\n\tthis.initialise(parseTreeNode, options);\n};\n/**\n * Inherit from the base widget class\n */\nActionListopsWidget.prototype = new Widget();\n/**\n * Render this widget into the DOM\n */\nActionListopsWidget.prototype.render = function(parent, nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n/**\n * Compute the internal state of the widget\n */\nActionListopsWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.target = this.getAttribute(\"$tiddler\", this.getVariable(\n\t\t\"currentTiddler\"));\n\tthis.filter = this.getAttribute(\"$filter\");\n\tthis.subfilter = this.getAttribute(\"$subfilter\");\n\tthis.listField = this.getAttribute(\"$field\", \"list\");\n\tthis.listIndex = this.getAttribute(\"$index\");\n\tthis.filtertags = this.getAttribute(\"$tags\");\n};\n/**\n * \tRefresh the widget by ensuring our attributes are up to date\n */\nActionListopsWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.$tiddler || changedAttributes.$filter ||\n\t\tchangedAttributes.$subfilter || changedAttributes.$field ||\n\t\tchangedAttributes.$index || changedAttributes.$tags) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n/**\n * \tInvoke the action associated with this widget\n */\nActionListopsWidget.prototype.invokeAction = function(triggeringWidget,\n\tevent) {\n\t//Apply the specified filters to the lists\n\tvar field = this.listField,\n\t\tindex,\n\t\ttype = \"!!\",\n\t\tlist = this.listField;\n\tif(this.listIndex) {\n\t\tfield = undefined;\n\t\tindex = this.listIndex;\n\t\ttype = \"##\";\n\t\tlist = this.listIndex;\n\t}\n\tif(this.filter) {\n\t\tthis.wiki.setText(this.target, field, index, $tw.utils.stringifyList(\n\t\t\tthis.wiki\n\t\t\t.filterTiddlers(this.filter, this)));\n\t}\n\tif(this.subfilter) {\n\t\tvar subfilter = \"[list[\" + this.target + type + list + \"]] \" + this.subfilter;\n\t\tthis.wiki.setText(this.target, field, index, $tw.utils.stringifyList(\n\t\t\tthis.wiki\n\t\t\t.filterTiddlers(subfilter, this)));\n\t}\n\tif(this.filtertags) {\n\t\tvar tagfilter = \"[list[\" + this.target + \"!!tags]] \" + this.filtertags;\n\t\tthis.wiki.setText(this.target, \"tags\", undefined, $tw.utils.stringifyList(\n\t\t\tthis.wiki.filterTiddlers(tagfilter, this)));\n\t}\n\treturn true; // Action was invoked\n};\n\nexports[\"action-listops\"] = ActionListopsWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-listops.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-navigate.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-navigate.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to navigate to a tiddler\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar NavigateWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nNavigateWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nNavigateWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nNavigateWidget.prototype.execute = function() {\n\tthis.actionTo = this.getAttribute(\"$to\");\n\tthis.actionScroll = this.getAttribute(\"$scroll\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nNavigateWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$to\"] || changedAttributes[\"$scroll\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nNavigateWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar bounds = triggeringWidget && triggeringWidget.getBoundingClientRect && triggeringWidget.getBoundingClientRect(),\n\t\tsuppressNavigation = event.metaKey || event.ctrlKey || (event.button === 1);\n\tif(this.actionScroll === \"yes\") {\n\t\tsuppressNavigation = false;\n\t} else if(this.actionScroll === \"no\") {\n\t\tsuppressNavigation = true;\n\t}\n\tthis.dispatchEvent({\n\t\ttype: \"tm-navigate\",\n\t\tnavigateTo: this.actionTo === undefined ? this.getVariable(\"currentTiddler\") : this.actionTo,\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\"),\n\t\tnavigateFromNode: triggeringWidget,\n\t\tnavigateFromClientRect: bounds && { top: bounds.top, left: bounds.left, width: bounds.width, right: bounds.right, bottom: bounds.bottom, height: bounds.height\n\t\t},\n\t\tnavigateSuppressNavigation: suppressNavigation\n\t});\n\treturn true; // Action was invoked\n};\n\nexports[\"action-navigate\"] = NavigateWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-navigate.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-sendmessage.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-sendmessage.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to send a message\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SendMessageWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSendMessageWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSendMessageWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nSendMessageWidget.prototype.execute = function() {\n\tthis.actionMessage = this.getAttribute(\"$message\");\n\tthis.actionParam = this.getAttribute(\"$param\");\n\tthis.actionName = this.getAttribute(\"$name\");\n\tthis.actionValue = this.getAttribute(\"$value\",\"\");\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nSendMessageWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(Object.keys(changedAttributes).length) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nSendMessageWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\t// Get the string parameter\n\tvar param = this.actionParam;\n\t// Assemble the attributes as a hashmap\n\tvar paramObject = Object.create(null);\n\tvar count = 0;\n\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\tif(name.charAt(0) !== \"$\") {\n\t\t\tparamObject[name] = attribute;\n\t\t\tcount++;\n\t\t}\n\t});\n\t// Add name/value pair if present\n\tif(this.actionName) {\n\t\tparamObject[this.actionName] = this.actionValue;\n\t}\n\t// Dispatch the message\n\tthis.dispatchEvent({\n\t\ttype: this.actionMessage,\n\t\tparam: param,\n\t\tparamObject: paramObject,\n\t\ttiddlerTitle: this.getVariable(\"currentTiddler\"),\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\")\n\t});\n\treturn true; // Action was invoked\n};\n\nexports[\"action-sendmessage\"] = SendMessageWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-sendmessage.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/action-setfield.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/action-setfield.js\ntype: application/javascript\nmodule-type: widget\n\nAction widget to set a single field or index on a tiddler.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SetFieldWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSetFieldWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSetFieldWidget.prototype.render = function(parent,nextSibling) {\n\tthis.computeAttributes();\n\tthis.execute();\n};\n\n/*\nCompute the internal state of the widget\n*/\nSetFieldWidget.prototype.execute = function() {\n\tthis.actionTiddler = this.getAttribute(\"$tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.actionField = this.getAttribute(\"$field\");\n\tthis.actionIndex = this.getAttribute(\"$index\");\n\tthis.actionValue = this.getAttribute(\"$value\");\n\tthis.actionTimestamp = this.getAttribute(\"$timestamp\",\"yes\") === \"yes\";\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nSetFieldWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"$tiddler\"] || changedAttributes[\"$field\"] || changedAttributes[\"$index\"] || changedAttributes[\"$value\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nInvoke the action associated with this widget\n*/\nSetFieldWidget.prototype.invokeAction = function(triggeringWidget,event) {\n\tvar self = this,\n\t\toptions = {};\n\toptions.suppressTimestamp = !this.actionTimestamp;\n\tif((typeof this.actionField == \"string\") || (typeof this.actionIndex == \"string\") || (typeof this.actionValue == \"string\")) {\n\t\tthis.wiki.setText(this.actionTiddler,this.actionField,this.actionIndex,this.actionValue,options);\n\t}\n\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\tif(name.charAt(0) !== \"$\") {\n\t\t\tself.wiki.setText(self.actionTiddler,name,undefined,attribute,options);\n\t\t}\n\t});\n\treturn true; // Action was invoked\n};\n\nexports[\"action-setfield\"] = SetFieldWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/action-setfield.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/browse.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/browse.js\ntype: application/javascript\nmodule-type: widget\n\nBrowse widget for browsing for files to import\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar BrowseWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nBrowseWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nBrowseWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar domNode = this.document.createElement(\"input\");\n\tdomNode.setAttribute(\"type\",\"file\");\n\tif(this.browseMultiple) {\n\t\tdomNode.setAttribute(\"multiple\",\"multiple\");\n\t}\n\tif(this.tooltip) {\n\t\tdomNode.setAttribute(\"title\",this.tooltip);\n\t}\n\t// Nw.js supports \"nwsaveas\" to force a \"save as\" dialogue that allows a new or existing file to be selected\n\tif(this.nwsaveas) {\n\t\tdomNode.setAttribute(\"nwsaveas\",this.nwsaveas);\n\t}\n\t// Nw.js supports \"webkitdirectory\" to allow a directory to be selected\n\tif(this.webkitdirectory) {\n\t\tdomNode.setAttribute(\"webkitdirectory\",this.webkitdirectory);\n\t}\n\t// Add a click event handler\n\tdomNode.addEventListener(\"change\",function (event) {\n\t\tif(self.message) {\n\t\t\tself.dispatchEvent({type: self.message, param: self.param, files: event.target.files});\n\t\t} else {\n\t\t\tself.wiki.readFiles(event.target.files,function(tiddlerFieldsArray) {\n\t\t\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify(tiddlerFieldsArray)});\n\t\t\t});\n\t\t}\n\t\treturn false;\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nBrowseWidget.prototype.execute = function() {\n\tthis.browseMultiple = this.getAttribute(\"multiple\");\n\tthis.message = this.getAttribute(\"message\");\n\tthis.param = this.getAttribute(\"param\");\n\tthis.tooltip = this.getAttribute(\"tooltip\");\n\tthis.nwsaveas = this.getAttribute(\"nwsaveas\");\n\tthis.webkitdirectory = this.getAttribute(\"webkitdirectory\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nBrowseWidget.prototype.refresh = function(changedTiddlers) {\n\treturn false;\n};\n\nexports.browse = BrowseWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/browse.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/button.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/button.js\ntype: application/javascript\nmodule-type: widget\n\nButton widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ButtonWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nButtonWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nButtonWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar tag = \"button\";\n\tif(this.buttonTag && $tw.config.htmlUnsafeElements.indexOf(this.buttonTag) === -1) {\n\t\ttag = this.buttonTag;\n\t}\n\tvar domNode = this.document.createElement(tag);\n\t// Assign classes\n\tvar classes = this[\"class\"].split(\" \") || [],\n\t\tisPoppedUp = this.popup && this.isPoppedUp();\n\tif(this.selectedClass) {\n\t\tif(this.set && this.setTo && this.isSelected()) {\n\t\t\t$tw.utils.pushTop(classes,this.selectedClass.split(\" \"));\n\t\t}\n\t\tif(isPoppedUp) {\n\t\t\t$tw.utils.pushTop(classes,this.selectedClass.split(\" \"));\n\t\t}\n\t}\n\tif(isPoppedUp) {\n\t\t$tw.utils.pushTop(classes,\"tc-popup-handle\");\n\t}\n\tdomNode.className = classes.join(\" \");\n\t// Assign other attributes\n\tif(this.style) {\n\t\tdomNode.setAttribute(\"style\",this.style);\n\t}\n\tif(this.tooltip) {\n\t\tdomNode.setAttribute(\"title\",this.tooltip);\n\t}\n\tif(this[\"aria-label\"]) {\n\t\tdomNode.setAttribute(\"aria-label\",this[\"aria-label\"]);\n\t}\n\t// Add a click event handler\n\tdomNode.addEventListener(\"click\",function (event) {\n\t\tvar handled = false;\n\t\tif(self.invokeActions(this,event)) {\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.to) {\n\t\t\tself.navigateTo(event);\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.message) {\n\t\t\tself.dispatchMessage(event);\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.popup) {\n\t\t\tself.triggerPopup(event);\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.set) {\n\t\t\tself.setTiddler();\n\t\t\thandled = true;\n\t\t}\n\t\tif(self.actions) {\n\t\t\tself.invokeActionString(self.actions,self,event);\n\t\t}\n\t\tif(handled) {\n\t\t\tevent.preventDefault();\n\t\t\tevent.stopPropagation();\n\t\t}\n\t\treturn handled;\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nWe don't allow actions to propagate because we trigger actions ourselves\n*/\nButtonWidget.prototype.allowActionPropagation = function() {\n\treturn false;\n};\n\nButtonWidget.prototype.getBoundingClientRect = function() {\n\treturn this.domNodes[0].getBoundingClientRect();\n};\n\nButtonWidget.prototype.isSelected = function() {\n return this.wiki.getTextReference(this.set,this.defaultSetValue,this.getVariable(\"currentTiddler\")) === this.setTo;\n};\n\nButtonWidget.prototype.isPoppedUp = function() {\n\tvar tiddler = this.wiki.getTiddler(this.popup);\n\tvar result = tiddler && tiddler.fields.text ? $tw.popup.readPopupState(tiddler.fields.text) : false;\n\treturn result;\n};\n\nButtonWidget.prototype.navigateTo = function(event) {\n\tvar bounds = this.getBoundingClientRect();\n\tthis.dispatchEvent({\n\t\ttype: \"tm-navigate\",\n\t\tnavigateTo: this.to,\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\"),\n\t\tnavigateFromNode: this,\n\t\tnavigateFromClientRect: { top: bounds.top, left: bounds.left, width: bounds.width, right: bounds.right, bottom: bounds.bottom, height: bounds.height\n\t\t},\n\t\tnavigateSuppressNavigation: event.metaKey || event.ctrlKey || (event.button === 1)\n\t});\n};\n\nButtonWidget.prototype.dispatchMessage = function(event) {\n\tthis.dispatchEvent({type: this.message, param: this.param, tiddlerTitle: this.getVariable(\"currentTiddler\")});\n};\n\nButtonWidget.prototype.triggerPopup = function(event) {\n\t$tw.popup.triggerPopup({\n\t\tdomNode: this.domNodes[0],\n\t\ttitle: this.popup,\n\t\twiki: this.wiki\n\t});\n};\n\nButtonWidget.prototype.setTiddler = function() {\n\tthis.wiki.setTextReference(this.set,this.setTo,this.getVariable(\"currentTiddler\"));\n};\n\n/*\nCompute the internal state of the widget\n*/\nButtonWidget.prototype.execute = function() {\n\t// Get attributes\n\tthis.actions = this.getAttribute(\"actions\");\n\tthis.to = this.getAttribute(\"to\");\n\tthis.message = this.getAttribute(\"message\");\n\tthis.param = this.getAttribute(\"param\");\n\tthis.set = this.getAttribute(\"set\");\n\tthis.setTo = this.getAttribute(\"setTo\");\n\tthis.popup = this.getAttribute(\"popup\");\n\tthis.hover = this.getAttribute(\"hover\");\n\tthis[\"class\"] = this.getAttribute(\"class\",\"\");\n\tthis[\"aria-label\"] = this.getAttribute(\"aria-label\");\n\tthis.tooltip = this.getAttribute(\"tooltip\");\n\tthis.style = this.getAttribute(\"style\");\n\tthis.selectedClass = this.getAttribute(\"selectedClass\");\n\tthis.defaultSetValue = this.getAttribute(\"default\",\"\");\n\tthis.buttonTag = this.getAttribute(\"tag\");\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nButtonWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.to || changedAttributes.message || changedAttributes.param || changedAttributes.set || changedAttributes.setTo || changedAttributes.popup || changedAttributes.hover || changedAttributes[\"class\"] || changedAttributes.selectedClass || changedAttributes.style || (this.set && changedTiddlers[this.set]) || (this.popup && changedTiddlers[this.popup])) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.button = ButtonWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/button.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/checkbox.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/checkbox.js\ntype: application/javascript\nmodule-type: widget\n\nCheckbox widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar CheckboxWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nCheckboxWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nCheckboxWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Create our elements\n\tthis.labelDomNode = this.document.createElement(\"label\");\n\tthis.labelDomNode.setAttribute(\"class\",this.checkboxClass);\n\tthis.inputDomNode = this.document.createElement(\"input\");\n\tthis.inputDomNode.setAttribute(\"type\",\"checkbox\");\n\tif(this.getValue()) {\n\t\tthis.inputDomNode.setAttribute(\"checked\",\"true\");\n\t}\n\tthis.labelDomNode.appendChild(this.inputDomNode);\n\tthis.spanDomNode = this.document.createElement(\"span\");\n\tthis.labelDomNode.appendChild(this.spanDomNode);\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(this.inputDomNode,[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n\t// Insert the label into the DOM and render any children\n\tparent.insertBefore(this.labelDomNode,nextSibling);\n\tthis.renderChildren(this.spanDomNode,null);\n\tthis.domNodes.push(this.labelDomNode);\n};\n\nCheckboxWidget.prototype.getValue = function() {\n\tvar tiddler = this.wiki.getTiddler(this.checkboxTitle);\n\tif(tiddler) {\n\t\tif(this.checkboxTag) {\n\t\t\tif(this.checkboxInvertTag) {\n\t\t\t\treturn !tiddler.hasTag(this.checkboxTag);\n\t\t\t} else {\n\t\t\t\treturn tiddler.hasTag(this.checkboxTag);\n\t\t\t}\n\t\t}\n\t\tif(this.checkboxField) {\n\t\t\tvar value = tiddler.fields[this.checkboxField] || this.checkboxDefault || \"\";\n\t\t\tif(value === this.checkboxChecked) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif(value === this.checkboxUnchecked) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t} else {\n\t\tif(this.checkboxTag) {\n\t\t\treturn false;\n\t\t}\n\t\tif(this.checkboxField) {\n\t\t\tif(this.checkboxDefault === this.checkboxChecked) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t\tif(this.checkboxDefault === this.checkboxUnchecked) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n};\n\nCheckboxWidget.prototype.handleChangeEvent = function(event) {\n\tvar checked = this.inputDomNode.checked,\n\t\ttiddler = this.wiki.getTiddler(this.checkboxTitle),\n\t\tfallbackFields = {text: \"\"},\n\t\tnewFields = {title: this.checkboxTitle},\n\t\thasChanged = false,\n\t\ttagCheck = false,\n\t\thasTag = tiddler && tiddler.hasTag(this.checkboxTag);\n\tif(this.checkboxTag && this.checkboxInvertTag === \"yes\") {\n\t\ttagCheck = hasTag === checked;\n\t} else {\n\t\ttagCheck = hasTag !== checked;\n\t}\n\t// Set the tag if specified\n\tif(this.checkboxTag && (!tiddler || tagCheck)) {\n\t\tnewFields.tags = tiddler ? (tiddler.fields.tags || []).slice(0) : [];\n\t\tvar pos = newFields.tags.indexOf(this.checkboxTag);\n\t\tif(pos !== -1) {\n\t\t\tnewFields.tags.splice(pos,1);\n\t\t}\n\t\tif(this.checkboxInvertTag === \"yes\" && !checked) {\n\t\t\tnewFields.tags.push(this.checkboxTag);\n\t\t} else if(this.checkboxInvertTag !== \"yes\" && checked) {\n\t\t\tnewFields.tags.push(this.checkboxTag);\n\t\t}\n\t\thasChanged = true;\n\t}\n\t// Set the field if specified\n\tif(this.checkboxField) {\n\t\tvar value = checked ? this.checkboxChecked : this.checkboxUnchecked;\n\t\tif(!tiddler || tiddler.fields[this.checkboxField] !== value) {\n\t\t\tnewFields[this.checkboxField] = value;\n\t\t\thasChanged = true;\n\t\t}\n\t}\n\tif(hasChanged) {\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getCreationFields(),fallbackFields,tiddler,newFields,this.wiki.getModificationFields()));\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nCheckboxWidget.prototype.execute = function() {\n\t// Get the parameters from the attributes\n\tthis.checkboxTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.checkboxTag = this.getAttribute(\"tag\");\n\tthis.checkboxField = this.getAttribute(\"field\");\n\tthis.checkboxChecked = this.getAttribute(\"checked\");\n\tthis.checkboxUnchecked = this.getAttribute(\"unchecked\");\n\tthis.checkboxDefault = this.getAttribute(\"default\");\n\tthis.checkboxClass = this.getAttribute(\"class\",\"\");\n\tthis.checkboxInvertTag = this.getAttribute(\"invertTag\",\"\");\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nCheckboxWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.tag || changedAttributes.invertTag || changedAttributes.field || changedAttributes.checked || changedAttributes.unchecked || changedAttributes[\"default\"] || changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\tvar refreshed = false;\n\t\tif(changedTiddlers[this.checkboxTitle]) {\n\t\t\tthis.inputDomNode.checked = this.getValue();\n\t\t\trefreshed = true;\n\t\t}\n\t\treturn this.refreshChildren(changedTiddlers) || refreshed;\n\t}\n};\n\nexports.checkbox = CheckboxWidget;\n\n})();",
"title": "$:/core/modules/widgets/checkbox.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/codeblock.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/codeblock.js\ntype: application/javascript\nmodule-type: widget\n\nCode block node widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar CodeBlockWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nCodeBlockWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nCodeBlockWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar codeNode = this.document.createElement(\"code\"),\n\t\tdomNode = this.document.createElement(\"pre\");\n\tcodeNode.appendChild(this.document.createTextNode(this.getAttribute(\"code\")));\n\tdomNode.appendChild(codeNode);\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.domNodes.push(domNode);\n\tif(this.postRender) {\n\t\tthis.postRender();\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nCodeBlockWidget.prototype.execute = function() {\n\tthis.language = this.getAttribute(\"language\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nCodeBlockWidget.prototype.refresh = function(changedTiddlers) {\n\treturn false;\n};\n\nexports.codeblock = CodeBlockWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/codeblock.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/count.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/count.js\ntype: application/javascript\nmodule-type: widget\n\nCount widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar CountWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nCountWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nCountWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar textNode = this.document.createTextNode(this.currentCount);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nCountWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.filter = this.getAttribute(\"filter\");\n\t// Execute the filter\n\tif(this.filter) {\n\t\tthis.currentCount = this.wiki.filterTiddlers(this.filter,this).length;\n\t} else {\n\t\tthis.currentCount = undefined;\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nCountWidget.prototype.refresh = function(changedTiddlers) {\n\t// Re-execute the filter to get the count\n\tthis.computeAttributes();\n\tvar oldCount = this.currentCount;\n\tthis.execute();\n\tif(this.currentCount !== oldCount) {\n\t\t// Regenerate and rerender the widget and replace the existing DOM node\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\n\t}\n\n};\n\nexports.count = CountWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/count.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/dropzone.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/dropzone.js\ntype: application/javascript\nmodule-type: widget\n\nDropzone widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar DropZoneWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nDropZoneWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nDropZoneWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar domNode = this.document.createElement(\"div\");\n\tdomNode.className = \"tc-dropzone\";\n\t// Add event handlers\n\t$tw.utils.addEventListeners(domNode,[\n\t\t{name: \"dragenter\", handlerObject: this, handlerMethod: \"handleDragEnterEvent\"},\n\t\t{name: \"dragover\", handlerObject: this, handlerMethod: \"handleDragOverEvent\"},\n\t\t{name: \"dragleave\", handlerObject: this, handlerMethod: \"handleDragLeaveEvent\"},\n\t\t{name: \"drop\", handlerObject: this, handlerMethod: \"handleDropEvent\"},\n\t\t{name: \"paste\", handlerObject: this, handlerMethod: \"handlePasteEvent\"}\n\t]);\n\tdomNode.addEventListener(\"click\",function (event) {\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nDropZoneWidget.prototype.enterDrag = function() {\n\t// Check for this window being the source of the drag\n\tif($tw.dragInProgress) {\n\t\treturn false;\n\t}\n\t// We count enter/leave events\n\tthis.dragEnterCount = (this.dragEnterCount || 0) + 1;\n\t// If we're entering for the first time we need to apply highlighting\n\tif(this.dragEnterCount === 1) {\n\t\t$tw.utils.addClass(this.domNodes[0],\"tc-dragover\");\n\t}\n};\n\nDropZoneWidget.prototype.leaveDrag = function() {\n\t// Reduce the enter count\n\tthis.dragEnterCount = (this.dragEnterCount || 0) - 1;\n\t// Remove highlighting if we're leaving externally\n\tif(this.dragEnterCount <= 0) {\n\t\t$tw.utils.removeClass(this.domNodes[0],\"tc-dragover\");\n\t}\n};\n\nDropZoneWidget.prototype.handleDragEnterEvent = function(event) {\n\tthis.enterDrag();\n\t// Tell the browser that we're ready to handle the drop\n\tevent.preventDefault();\n\t// Tell the browser not to ripple the drag up to any parent drop handlers\n\tevent.stopPropagation();\n};\n\nDropZoneWidget.prototype.handleDragOverEvent = function(event) {\n\t// Check for being over a TEXTAREA or INPUT\n\tif([\"TEXTAREA\",\"INPUT\"].indexOf(event.target.tagName) !== -1) {\n\t\treturn false;\n\t}\n\t// Check for this window being the source of the drag\n\tif($tw.dragInProgress) {\n\t\treturn false;\n\t}\n\t// Tell the browser that we're still interested in the drop\n\tevent.preventDefault();\n\tevent.dataTransfer.dropEffect = \"copy\"; // Explicitly show this is a copy\n};\n\nDropZoneWidget.prototype.handleDragLeaveEvent = function(event) {\n\tthis.leaveDrag();\n};\n\nDropZoneWidget.prototype.handleDropEvent = function(event) {\n\tthis.leaveDrag();\n\t// Check for being over a TEXTAREA or INPUT\n\tif([\"TEXTAREA\",\"INPUT\"].indexOf(event.target.tagName) !== -1) {\n\t\treturn false;\n\t}\n\t// Check for this window being the source of the drag\n\tif($tw.dragInProgress) {\n\t\treturn false;\n\t}\n\tvar self = this,\n\t\tdataTransfer = event.dataTransfer;\n\t// Reset the enter count\n\tthis.dragEnterCount = 0;\n\t// Remove highlighting\n\t$tw.utils.removeClass(this.domNodes[0],\"tc-dragover\");\n\t// Import any files in the drop\n\tvar numFiles = this.wiki.readFiles(dataTransfer.files,function(tiddlerFieldsArray) {\n\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify(tiddlerFieldsArray)});\n\t});\n\t// Try to import the various data types we understand\n\tif(numFiles === 0) {\n\t\tthis.importData(dataTransfer);\n\t}\n\t// Tell the browser that we handled the drop\n\tevent.preventDefault();\n\t// Stop the drop ripple up to any parent handlers\n\tevent.stopPropagation();\n};\n\nDropZoneWidget.prototype.importData = function(dataTransfer) {\n\t// Try each provided data type in turn\n\tfor(var t=0; t<this.importDataTypes.length; t++) {\n\t\tif(!$tw.browser.isIE || this.importDataTypes[t].IECompatible) {\n\t\t\t// Get the data\n\t\t\tvar dataType = this.importDataTypes[t];\n\t\t\t\tvar data = dataTransfer.getData(dataType.type);\n\t\t\t// Import the tiddlers in the data\n\t\t\tif(data !== \"\" && data !== null) {\n\t\t\t\tif($tw.log.IMPORT) {\n\t\t\t\t\tconsole.log(\"Importing data type '\" + dataType.type + \"', data: '\" + data + \"'\")\n\t\t\t\t}\n\t\t\t\tvar tiddlerFields = dataType.convertToFields(data);\n\t\t\t\tif(!tiddlerFields.title) {\n\t\t\t\t\ttiddlerFields.title = this.wiki.generateNewTitle(\"Untitled\");\n\t\t\t\t}\n\t\t\t\tthis.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify([tiddlerFields])});\n\t\t\t\treturn;\n\t\t\t}\n\t\t}\n\t}\n};\n\nDropZoneWidget.prototype.importDataTypes = [\n\t{type: \"text/vnd.tiddler\", IECompatible: false, convertToFields: function(data) {\n\t\treturn JSON.parse(data);\n\t}},\n\t{type: \"URL\", IECompatible: true, convertToFields: function(data) {\n\t\t// Check for tiddler data URI\n\t\tvar match = decodeURIComponent(data).match(/^data\\:text\\/vnd\\.tiddler,(.*)/i);\n\t\tif(match) {\n\t\t\treturn JSON.parse(match[1]);\n\t\t} else {\n\t\t\treturn { // As URL string\n\t\t\t\ttext: data\n\t\t\t};\n\t\t}\n\t}},\n\t{type: \"text/x-moz-url\", IECompatible: false, convertToFields: function(data) {\n\t\t// Check for tiddler data URI\n\t\tvar match = decodeURIComponent(data).match(/^data\\:text\\/vnd\\.tiddler,(.*)/i);\n\t\tif(match) {\n\t\t\treturn JSON.parse(match[1]);\n\t\t} else {\n\t\t\treturn { // As URL string\n\t\t\t\ttext: data\n\t\t\t};\n\t\t}\n\t}},\n\t{type: \"text/html\", IECompatible: false, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}},\n\t{type: \"text/plain\", IECompatible: false, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}},\n\t{type: \"Text\", IECompatible: true, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}},\n\t{type: \"text/uri-list\", IECompatible: false, convertToFields: function(data) {\n\t\treturn {\n\t\t\ttext: data\n\t\t};\n\t}}\n];\n\nDropZoneWidget.prototype.handlePasteEvent = function(event) {\n\t// Let the browser handle it if we're in a textarea or input box\n\tif([\"TEXTAREA\",\"INPUT\"].indexOf(event.target.tagName) == -1) {\n\t\tvar self = this,\n\t\t\titems = event.clipboardData.items;\n\t\t// Enumerate the clipboard items\n\t\tfor(var t = 0; t<items.length; t++) {\n\t\t\tvar item = items[t];\n\t\t\tif(item.kind === \"file\") {\n\t\t\t\t// Import any files\n\t\t\t\tthis.wiki.readFile(item.getAsFile(),function(tiddlerFieldsArray) {\n\t\t\t\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify(tiddlerFieldsArray)});\n\t\t\t\t});\n\t\t\t} else if(item.kind === \"string\") {\n\t\t\t\t// Create tiddlers from string items\n\t\t\t\tvar type = item.type;\n\t\t\t\titem.getAsString(function(str) {\n\t\t\t\t\tvar tiddlerFields = {\n\t\t\t\t\t\ttitle: self.wiki.generateNewTitle(\"Untitled\"),\n\t\t\t\t\t\ttext: str,\n\t\t\t\t\t\ttype: type\n\t\t\t\t\t};\n\t\t\t\t\tif($tw.log.IMPORT) {\n\t\t\t\t\t\tconsole.log(\"Importing string '\" + str + \"', type: '\" + type + \"'\");\n\t\t\t\t\t}\n\t\t\t\t\tself.dispatchEvent({type: \"tm-import-tiddlers\", param: JSON.stringify([tiddlerFields])});\n\t\t\t\t});\n\t\t\t}\n\t\t}\n\t\t// Tell the browser that we've handled the paste\n\t\tevent.stopPropagation();\n\t\tevent.preventDefault();\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nDropZoneWidget.prototype.execute = function() {\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nDropZoneWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.dropzone = DropZoneWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/dropzone.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-binary.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-binary.js\ntype: application/javascript\nmodule-type: widget\n\nEdit-binary widget; placeholder for editing binary tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar BINARY_WARNING_MESSAGE = \"$:/core/ui/BinaryWarning\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditBinaryWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditBinaryWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditBinaryWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEditBinaryWidget.prototype.execute = function() {\n\t// Construct the child widgets\n\tthis.makeChildWidgets([{\n\t\ttype: \"transclude\",\n\t\tattributes: {\n\t\t\ttiddler: {type: \"string\", value: BINARY_WARNING_MESSAGE}\n\t\t}\n\t}]);\n};\n\n/*\nRefresh by refreshing our child widget\n*/\nEditBinaryWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports[\"edit-binary\"] = EditBinaryWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit-binary.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-bitmap.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-bitmap.js\ntype: application/javascript\nmodule-type: widget\n\nEdit-bitmap widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n// Default image sizes\nvar DEFAULT_IMAGE_WIDTH = 600,\n\tDEFAULT_IMAGE_HEIGHT = 370;\n\n// Configuration tiddlers\nvar LINE_WIDTH_TITLE = \"$:/config/BitmapEditor/LineWidth\",\n\tLINE_COLOUR_TITLE = \"$:/config/BitmapEditor/Colour\",\n\tLINE_OPACITY_TITLE = \"$:/config/BitmapEditor/Opacity\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditBitmapWidget = function(parseTreeNode,options) {\n\t// Initialise the editor operations if they've not been done already\n\tif(!this.editorOperations) {\n\t\tEditBitmapWidget.prototype.editorOperations = {};\n\t\t$tw.modules.applyMethods(\"bitmapeditoroperation\",this.editorOperations);\n\t}\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditBitmapWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditBitmapWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Create the wrapper for the toolbar and render its content\n\tthis.toolbarNode = this.document.createElement(\"div\");\n\tthis.toolbarNode.className = \"tc-editor-toolbar\";\n\tparent.insertBefore(this.toolbarNode,nextSibling);\n\tthis.domNodes.push(this.toolbarNode);\n\t// Create the on-screen canvas\n\tthis.canvasDomNode = $tw.utils.domMaker(\"canvas\",{\n\t\tdocument: this.document,\n\t\t\"class\":\"tc-edit-bitmapeditor\",\n\t\teventListeners: [{\n\t\t\tname: \"touchstart\", handlerObject: this, handlerMethod: \"handleTouchStartEvent\"\n\t\t},{\n\t\t\tname: \"touchmove\", handlerObject: this, handlerMethod: \"handleTouchMoveEvent\"\n\t\t},{\n\t\t\tname: \"touchend\", handlerObject: this, handlerMethod: \"handleTouchEndEvent\"\n\t\t},{\n\t\t\tname: \"mousedown\", handlerObject: this, handlerMethod: \"handleMouseDownEvent\"\n\t\t},{\n\t\t\tname: \"mousemove\", handlerObject: this, handlerMethod: \"handleMouseMoveEvent\"\n\t\t},{\n\t\t\tname: \"mouseup\", handlerObject: this, handlerMethod: \"handleMouseUpEvent\"\n\t\t}]\n\t});\n\t// Set the width and height variables\n\tthis.setVariable(\"tv-bitmap-editor-width\",this.canvasDomNode.width + \"px\");\n\tthis.setVariable(\"tv-bitmap-editor-height\",this.canvasDomNode.height + \"px\");\n\t// Render toolbar child widgets\n\tthis.renderChildren(this.toolbarNode,null);\n\t// // Insert the elements into the DOM\n\tparent.insertBefore(this.canvasDomNode,nextSibling);\n\tthis.domNodes.push(this.canvasDomNode);\n\t// Load the image into the canvas\n\tif($tw.browser) {\n\t\tthis.loadCanvas();\n\t}\n\t// Add widget message listeners\n\tthis.addEventListeners([\n\t\t{type: \"tm-edit-bitmap-operation\", handler: \"handleEditBitmapOperationMessage\"}\n\t]);\n};\n\n/*\nHandle an edit bitmap operation message from the toolbar\n*/\nEditBitmapWidget.prototype.handleEditBitmapOperationMessage = function(event) {\n\t// Invoke the handler\n\tvar handler = this.editorOperations[event.param];\n\tif(handler) {\n\t\thandler.call(this,event);\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nEditBitmapWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.editTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nJust refresh the toolbar\n*/\nEditBitmapWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nSet the bitmap size variables and refresh the toolbar\n*/\nEditBitmapWidget.prototype.refreshToolbar = function() {\n\t// Set the width and height variables\n\tthis.setVariable(\"tv-bitmap-editor-width\",this.canvasDomNode.width + \"px\");\n\tthis.setVariable(\"tv-bitmap-editor-height\",this.canvasDomNode.height + \"px\");\n\t// Refresh each of our child widgets\n\t$tw.utils.each(this.children,function(childWidget) {\n\t\tchildWidget.refreshSelf();\n\t});\n};\n\nEditBitmapWidget.prototype.loadCanvas = function() {\n\tvar tiddler = this.wiki.getTiddler(this.editTitle),\n\t\tcurrImage = new Image();\n\t// Set up event handlers for loading the image\n\tvar self = this;\n\tcurrImage.onload = function() {\n\t\t// Copy the image to the on-screen canvas\n\t\tself.initCanvas(self.canvasDomNode,currImage.width,currImage.height,currImage);\n\t\t// And also copy the current bitmap to the off-screen canvas\n\t\tself.currCanvas = self.document.createElement(\"canvas\");\n\t\tself.initCanvas(self.currCanvas,currImage.width,currImage.height,currImage);\n\t\t// Set the width and height input boxes\n\t\tself.refreshToolbar();\n\t};\n\tcurrImage.onerror = function() {\n\t\t// Set the on-screen canvas size and clear it\n\t\tself.initCanvas(self.canvasDomNode,DEFAULT_IMAGE_WIDTH,DEFAULT_IMAGE_HEIGHT);\n\t\t// Set the off-screen canvas size and clear it\n\t\tself.currCanvas = self.document.createElement(\"canvas\");\n\t\tself.initCanvas(self.currCanvas,DEFAULT_IMAGE_WIDTH,DEFAULT_IMAGE_HEIGHT);\n\t\t// Set the width and height input boxes\n\t\tself.refreshToolbar();\n\t};\n\t// Get the current bitmap into an image object\n\tcurrImage.src = \"data:\" + tiddler.fields.type + \";base64,\" + tiddler.fields.text;\n};\n\nEditBitmapWidget.prototype.initCanvas = function(canvas,width,height,image) {\n\tcanvas.width = width;\n\tcanvas.height = height;\n\tvar ctx = canvas.getContext(\"2d\");\n\tif(image) {\n\t\tctx.drawImage(image,0,0);\n\t} else {\n\t\tctx.fillStyle = \"#fff\";\n\t\tctx.fillRect(0,0,canvas.width,canvas.height);\n\t}\n};\n\n/*\n** Change the size of the canvas, preserving the current image\n*/\nEditBitmapWidget.prototype.changeCanvasSize = function(newWidth,newHeight) {\n\t// Create and size a new canvas\n\tvar newCanvas = this.document.createElement(\"canvas\");\n\tthis.initCanvas(newCanvas,newWidth,newHeight);\n\t// Copy the old image\n\tvar ctx = newCanvas.getContext(\"2d\");\n\tctx.drawImage(this.currCanvas,0,0);\n\t// Set the new canvas as the current one\n\tthis.currCanvas = newCanvas;\n\t// Set the size of the onscreen canvas\n\tthis.canvasDomNode.width = newWidth;\n\tthis.canvasDomNode.height = newHeight;\n\t// Paint the onscreen canvas with the offscreen canvas\n\tctx = this.canvasDomNode.getContext(\"2d\");\n\tctx.drawImage(this.currCanvas,0,0);\n};\n\nEditBitmapWidget.prototype.handleTouchStartEvent = function(event) {\n\tthis.brushDown = true;\n\tthis.strokeStart(event.touches[0].clientX,event.touches[0].clientY);\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleTouchMoveEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.strokeMove(event.touches[0].clientX,event.touches[0].clientY);\n\t}\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleTouchEndEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.brushDown = false;\n\t\tthis.strokeEnd();\n\t}\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleMouseDownEvent = function(event) {\n\tthis.strokeStart(event.clientX,event.clientY);\n\tthis.brushDown = true;\n\tevent.preventDefault();\n\tevent.stopPropagation();\n\treturn false;\n};\n\nEditBitmapWidget.prototype.handleMouseMoveEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.strokeMove(event.clientX,event.clientY);\n\t\tevent.preventDefault();\n\t\tevent.stopPropagation();\n\t\treturn false;\n\t}\n\treturn true;\n};\n\nEditBitmapWidget.prototype.handleMouseUpEvent = function(event) {\n\tif(this.brushDown) {\n\t\tthis.brushDown = false;\n\t\tthis.strokeEnd();\n\t\tevent.preventDefault();\n\t\tevent.stopPropagation();\n\t\treturn false;\n\t}\n\treturn true;\n};\n\nEditBitmapWidget.prototype.adjustCoordinates = function(x,y) {\n\tvar canvasRect = this.canvasDomNode.getBoundingClientRect(),\n\t\tscale = this.canvasDomNode.width/canvasRect.width;\n\treturn {x: (x - canvasRect.left) * scale, y: (y - canvasRect.top) * scale};\n};\n\nEditBitmapWidget.prototype.strokeStart = function(x,y) {\n\t// Start off a new stroke\n\tthis.stroke = [this.adjustCoordinates(x,y)];\n};\n\nEditBitmapWidget.prototype.strokeMove = function(x,y) {\n\tvar ctx = this.canvasDomNode.getContext(\"2d\"),\n\t\tt;\n\t// Add the new position to the end of the stroke\n\tthis.stroke.push(this.adjustCoordinates(x,y));\n\t// Redraw the previous image\n\tctx.drawImage(this.currCanvas,0,0);\n\t// Render the stroke\n\tctx.globalAlpha = parseFloat(this.wiki.getTiddlerText(LINE_OPACITY_TITLE,\"1.0\"));\n\tctx.strokeStyle = this.wiki.getTiddlerText(LINE_COLOUR_TITLE,\"#ff0\");\n\tctx.lineWidth = parseFloat(this.wiki.getTiddlerText(LINE_WIDTH_TITLE,\"3\"));\n\tctx.lineCap = \"round\";\n\tctx.lineJoin = \"round\";\n\tctx.beginPath();\n\tctx.moveTo(this.stroke[0].x,this.stroke[0].y);\n\tfor(t=1; t<this.stroke.length-1; t++) {\n\t\tvar s1 = this.stroke[t],\n\t\t\ts2 = this.stroke[t-1],\n\t\t\ttx = (s1.x + s2.x)/2,\n\t\t\tty = (s1.y + s2.y)/2;\n\t\tctx.quadraticCurveTo(s2.x,s2.y,tx,ty);\n\t}\n\tctx.stroke();\n};\n\nEditBitmapWidget.prototype.strokeEnd = function() {\n\t// Copy the bitmap to the off-screen canvas\n\tvar ctx = this.currCanvas.getContext(\"2d\");\n\tctx.drawImage(this.canvasDomNode,0,0);\n\t// Save the image into the tiddler\n\tthis.saveChanges();\n};\n\nEditBitmapWidget.prototype.saveChanges = function() {\n\tvar tiddler = this.wiki.getTiddler(this.editTitle);\n\tif(tiddler) {\n\t\t// data URIs look like \"data:<type>;base64,<text>\"\n\t\tvar dataURL = this.canvasDomNode.toDataURL(tiddler.fields.type),\n\t\t\tposColon = dataURL.indexOf(\":\"),\n\t\t\tposSemiColon = dataURL.indexOf(\";\"),\n\t\t\tposComma = dataURL.indexOf(\",\"),\n\t\t\ttype = dataURL.substring(posColon+1,posSemiColon),\n\t\t\ttext = dataURL.substring(posComma+1);\n\t\tvar update = {type: type, text: text};\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getModificationFields(),tiddler,update,this.wiki.getCreationFields()));\n\t}\n};\n\nexports[\"edit-bitmap\"] = EditBitmapWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit-bitmap.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-shortcut.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-shortcut.js\ntype: application/javascript\nmodule-type: widget\n\nWidget to display an editable keyboard shortcut\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditShortcutWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditShortcutWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditShortcutWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.inputNode = this.document.createElement(\"input\");\n\t// Assign classes\n\tif(this.shortcutClass) {\n\t\tthis.inputNode.className = this.shortcutClass;\t\t\n\t}\n\t// Assign other attributes\n\tif(this.shortcutStyle) {\n\t\tthis.inputNode.setAttribute(\"style\",this.shortcutStyle);\n\t}\n\tif(this.shortcutTooltip) {\n\t\tthis.inputNode.setAttribute(\"title\",this.shortcutTooltip);\n\t}\n\tif(this.shortcutPlaceholder) {\n\t\tthis.inputNode.setAttribute(\"placeholder\",this.shortcutPlaceholder);\n\t}\n\tif(this.shortcutAriaLabel) {\n\t\tthis.inputNode.setAttribute(\"aria-label\",this.shortcutAriaLabel);\n\t}\n\t// Assign the current shortcut\n\tthis.updateInputNode();\n\t// Add event handlers\n\t$tw.utils.addEventListeners(this.inputNode,[\n\t\t{name: \"keydown\", handlerObject: this, handlerMethod: \"handleKeydownEvent\"}\n\t]);\n\t// Link into the DOM\n\tparent.insertBefore(this.inputNode,nextSibling);\n\tthis.domNodes.push(this.inputNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEditShortcutWidget.prototype.execute = function() {\n\tthis.shortcutTiddler = this.getAttribute(\"tiddler\");\n\tthis.shortcutField = this.getAttribute(\"field\");\n\tthis.shortcutIndex = this.getAttribute(\"index\");\n\tthis.shortcutPlaceholder = this.getAttribute(\"placeholder\");\n\tthis.shortcutDefault = this.getAttribute(\"default\",\"\");\n\tthis.shortcutClass = this.getAttribute(\"class\");\n\tthis.shortcutStyle = this.getAttribute(\"style\");\n\tthis.shortcutTooltip = this.getAttribute(\"tooltip\");\n\tthis.shortcutAriaLabel = this.getAttribute(\"aria-label\");\n};\n\n/*\nUpdate the value of the input node\n*/\nEditShortcutWidget.prototype.updateInputNode = function() {\n\tif(this.shortcutField) {\n\t\tvar tiddler = this.wiki.getTiddler(this.shortcutTiddler);\n\t\tif(tiddler && $tw.utils.hop(tiddler.fields,this.shortcutField)) {\n\t\t\tthis.inputNode.value = tiddler.getFieldString(this.shortcutField);\n\t\t} else {\n\t\t\tthis.inputNode.value = this.shortcutDefault;\n\t\t}\n\t} else if(this.shortcutIndex) {\n\t\tthis.inputNode.value = this.wiki.extractTiddlerDataItem(this.shortcutTiddler,this.shortcutIndex,this.shortcutDefault);\n\t} else {\n\t\tthis.inputNode.value = this.wiki.getTiddlerText(this.shortcutTiddler,this.shortcutDefault);\n\t}\n};\n\n/*\nHandle a dom \"keydown\" event\n*/\nEditShortcutWidget.prototype.handleKeydownEvent = function(event) {\n\t// Ignore shift, ctrl, meta, alt\n\tif(event.keyCode && $tw.keyboardManager.getModifierKeys().indexOf(event.keyCode) === -1) {\n\t\t// Get the shortcut text representation\n\t\tvar value = $tw.keyboardManager.getPrintableShortcuts([{\n\t\t\tctrlKey: event.ctrlKey,\n\t\t\tshiftKey: event.shiftKey,\n\t\t\taltKey: event.altKey,\n\t\t\tmetaKey: event.metaKey,\n\t\t\tkeyCode: event.keyCode\n\t\t}]);\n\t\tif(value.length > 0) {\n\t\t\tthis.wiki.setText(this.shortcutTiddler,this.shortcutField,this.shortcutIndex,value[0]);\n\t\t}\n\t\t// Ignore the keydown if it was already handled\n\t\tevent.preventDefault();\n\t\tevent.stopPropagation();\n\t\treturn true;\t\t\n\t} else {\n\t\treturn false;\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget needed re-rendering\n*/\nEditShortcutWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedAttributes.placeholder || changedAttributes[\"default\"] || changedAttributes[\"class\"] || changedAttributes.style || changedAttributes.tooltip || changedAttributes[\"aria-label\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else if(changedTiddlers[this.shortcutTiddler]) {\n\t\tthis.updateInputNode();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports[\"edit-shortcut\"] = EditShortcutWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit-shortcut.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit-text.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit-text.js\ntype: application/javascript\nmodule-type: widget\n\nEdit-text widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar editTextWidgetFactory = require(\"$:/core/modules/editor/factory.js\").editTextWidgetFactory,\n\tFramedEngine = require(\"$:/core/modules/editor/engines/framed.js\").FramedEngine,\n\tSimpleEngine = require(\"$:/core/modules/editor/engines/simple.js\").SimpleEngine;\n\nexports[\"edit-text\"] = editTextWidgetFactory(FramedEngine,SimpleEngine);\n\n})();\n",
"title": "$:/core/modules/widgets/edit-text.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/edit.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/edit.js\ntype: application/javascript\nmodule-type: widget\n\nEdit widget is a meta-widget chooses the appropriate actual editting widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EditWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEditWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEditWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n// Mappings from content type to editor type are stored in tiddlers with this prefix\nvar EDITOR_MAPPING_PREFIX = \"$:/config/EditorTypeMappings/\";\n\n/*\nCompute the internal state of the widget\n*/\nEditWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.editTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.editField = this.getAttribute(\"field\",\"text\");\n\tthis.editIndex = this.getAttribute(\"index\");\n\tthis.editClass = this.getAttribute(\"class\");\n\tthis.editPlaceholder = this.getAttribute(\"placeholder\");\n\t// Choose the appropriate edit widget\n\tthis.editorType = this.getEditorType();\n\t// Make the child widgets\n\tthis.makeChildWidgets([{\n\t\ttype: \"edit-\" + this.editorType,\n\t\tattributes: {\n\t\t\ttiddler: {type: \"string\", value: this.editTitle},\n\t\t\tfield: {type: \"string\", value: this.editField},\n\t\t\tindex: {type: \"string\", value: this.editIndex},\n\t\t\t\"class\": {type: \"string\", value: this.editClass},\n\t\t\t\"placeholder\": {type: \"string\", value: this.editPlaceholder}\n\t\t},\n\t\tchildren: this.parseTreeNode.children\n\t}]);\n};\n\nEditWidget.prototype.getEditorType = function() {\n\t// Get the content type of the thing we're editing\n\tvar type;\n\tif(this.editField === \"text\") {\n\t\tvar tiddler = this.wiki.getTiddler(this.editTitle);\n\t\tif(tiddler) {\n\t\t\ttype = tiddler.fields.type;\n\t\t}\n\t}\n\ttype = type || \"text/vnd.tiddlywiki\";\n\tvar editorType = this.wiki.getTiddlerText(EDITOR_MAPPING_PREFIX + type);\n\tif(!editorType) {\n\t\tvar typeInfo = $tw.config.contentTypeInfo[type];\n\t\tif(typeInfo && typeInfo.encoding === \"base64\") {\n\t\t\teditorType = \"binary\";\n\t\t} else {\n\t\t\teditorType = \"text\";\n\t\t}\n\t}\n\treturn editorType;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nEditWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\t// Refresh if an attribute has changed, or the type associated with the target tiddler has changed\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || (changedTiddlers[this.editTitle] && this.getEditorType() !== this.editorType)) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.edit = EditWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/edit.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/element.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/element.js\ntype: application/javascript\nmodule-type: widget\n\nElement widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ElementWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nElementWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nElementWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Neuter blacklisted elements\n\tvar tag = this.parseTreeNode.tag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"safe-\" + tag;\n\t}\n\tvar domNode = this.document.createElementNS(this.namespace,tag);\n\tthis.assignAttributes(domNode,{excludeEventAttributes: true});\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nElementWidget.prototype.execute = function() {\n\t// Select the namespace for the tag\n\tvar tagNamespaces = {\n\t\t\tsvg: \"http://www.w3.org/2000/svg\",\n\t\t\tmath: \"http://www.w3.org/1998/Math/MathML\",\n\t\t\tbody: \"http://www.w3.org/1999/xhtml\"\n\t\t};\n\tthis.namespace = tagNamespaces[this.parseTreeNode.tag];\n\tif(this.namespace) {\n\t\tthis.setVariable(\"namespace\",this.namespace);\n\t} else {\n\t\tthis.namespace = this.getVariable(\"namespace\",{defaultValue: \"http://www.w3.org/1999/xhtml\"});\n\t}\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nElementWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes(),\n\t\thasChangedAttributes = $tw.utils.count(changedAttributes) > 0;\n\tif(hasChangedAttributes) {\n\t\t// Update our attributes\n\t\tthis.assignAttributes(this.domNodes[0],{excludeEventAttributes: true});\n\t}\n\treturn this.refreshChildren(changedTiddlers) || hasChangedAttributes;\n};\n\nexports.element = ElementWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/element.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/encrypt.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/encrypt.js\ntype: application/javascript\nmodule-type: widget\n\nEncrypt widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EncryptWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEncryptWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEncryptWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar textNode = this.document.createTextNode(this.encryptedText);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEncryptWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.filter = this.getAttribute(\"filter\",\"[!is[system]]\");\n\t// Encrypt the filtered tiddlers\n\tvar tiddlers = this.wiki.filterTiddlers(this.filter),\n\t\tjson = {},\n\t\tself = this;\n\t$tw.utils.each(tiddlers,function(title) {\n\t\tvar tiddler = self.wiki.getTiddler(title),\n\t\t\tjsonTiddler = {};\n\t\tfor(var f in tiddler.fields) {\n\t\t\tjsonTiddler[f] = tiddler.getFieldString(f);\n\t\t}\n\t\tjson[title] = jsonTiddler;\n\t});\n\tthis.encryptedText = $tw.utils.htmlEncode($tw.crypto.encrypt(JSON.stringify(json)));\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nEncryptWidget.prototype.refresh = function(changedTiddlers) {\n\t// We don't need to worry about refreshing because the encrypt widget isn't for interactive use\n\treturn false;\n};\n\nexports.encrypt = EncryptWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/encrypt.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/entity.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/entity.js\ntype: application/javascript\nmodule-type: widget\n\nHTML entity widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar EntityWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nEntityWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nEntityWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.execute();\n\tvar entityString = this.getAttribute(\"entity\",this.parseTreeNode.entity || \"\"),\n\t\ttextNode = this.document.createTextNode($tw.utils.entityDecode(entityString));\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nEntityWidget.prototype.execute = function() {\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nEntityWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.entity) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.entity = EntityWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/entity.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/fieldmangler.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/fieldmangler.js\ntype: application/javascript\nmodule-type: widget\n\nField mangler widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar FieldManglerWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.addEventListeners([\n\t\t{type: \"tm-remove-field\", handler: \"handleRemoveFieldEvent\"},\n\t\t{type: \"tm-add-field\", handler: \"handleAddFieldEvent\"},\n\t\t{type: \"tm-remove-tag\", handler: \"handleRemoveTagEvent\"},\n\t\t{type: \"tm-add-tag\", handler: \"handleAddTagEvent\"}\n\t]);\n};\n\n/*\nInherit from the base widget class\n*/\nFieldManglerWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nFieldManglerWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nFieldManglerWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.mangleTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nFieldManglerWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nFieldManglerWidget.prototype.handleRemoveFieldEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle),\n\t\tdeletion = {};\n\tdeletion[event.param] = undefined;\n\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,deletion));\n\treturn true;\n};\n\nFieldManglerWidget.prototype.handleAddFieldEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle),\n\t\taddition = this.wiki.getModificationFields(),\n\t\thadInvalidFieldName = false,\n\t\taddField = function(name,value) {\n\t\t\tvar trimmedName = name.toLowerCase().trim();\n\t\t\tif(!$tw.utils.isValidFieldName(trimmedName)) {\n\t\t\t\tif(!hadInvalidFieldName) {\n\t\t\t\t\talert($tw.language.getString(\n\t\t\t\t\t\t\"InvalidFieldName\",\n\t\t\t\t\t\t{variables:\n\t\t\t\t\t\t\t{fieldName: trimmedName}\n\t\t\t\t\t\t}\n\t\t\t\t\t));\n\t\t\t\t\thadInvalidFieldName = true;\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tif(!value && tiddler) {\n\t\t\t\t\tvalue = tiddler.fields[trimmedName];\n\t\t\t\t}\n\t\t\t\taddition[trimmedName] = value || \"\";\n\t\t\t}\n\t\t\treturn;\n\t\t};\n\taddition.title = this.mangleTitle;\n\tif(typeof event.param === \"string\") {\n\t\taddField(event.param,\"\");\n\t}\n\tif(typeof event.paramObject === \"object\") {\n\t\tfor(var name in event.paramObject) {\n\t\t\taddField(name,event.paramObject[name]);\n\t\t}\n\t}\n\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,addition));\n\treturn true;\n};\n\nFieldManglerWidget.prototype.handleRemoveTagEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle);\n\tif(tiddler && tiddler.fields.tags) {\n\t\tvar p = tiddler.fields.tags.indexOf(event.param);\n\t\tif(p !== -1) {\n\t\t\tvar modification = this.wiki.getModificationFields();\n\t\t\tmodification.tags = (tiddler.fields.tags || []).slice(0);\n\t\t\tmodification.tags.splice(p,1);\n\t\t\tif(modification.tags.length === 0) {\n\t\t\t\tmodification.tags = undefined;\n\t\t\t}\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,modification));\n\t\t}\n\t}\n\treturn true;\n};\n\nFieldManglerWidget.prototype.handleAddTagEvent = function(event) {\n\tvar tiddler = this.wiki.getTiddler(this.mangleTitle);\n\tif(tiddler && typeof event.param === \"string\") {\n\t\tvar tag = event.param.trim();\n\t\tif(tag !== \"\") {\n\t\t\tvar modification = this.wiki.getModificationFields();\n\t\t\tmodification.tags = (tiddler.fields.tags || []).slice(0);\n\t\t\t$tw.utils.pushTop(modification.tags,tag);\n\t\t\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,modification));\t\t\t\n\t\t}\n\t} else if(typeof event.param === \"string\" && event.param.trim() !== \"\" && this.mangleTitle.trim() !== \"\") {\n\t\tvar tag = [];\n\t\ttag.push(event.param.trim());\n\t\tthis.wiki.addTiddler({title: this.mangleTitle, tags: tag});\t\t\n\t}\n\treturn true;\n};\n\nexports.fieldmangler = FieldManglerWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/fieldmangler.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/fields.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/fields.js\ntype: application/javascript\nmodule-type: widget\n\nFields widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar FieldsWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nFieldsWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nFieldsWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar textNode = this.document.createTextNode(this.text);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nFieldsWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.tiddlerTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.template = this.getAttribute(\"template\");\n\tthis.exclude = this.getAttribute(\"exclude\");\n\tthis.stripTitlePrefix = this.getAttribute(\"stripTitlePrefix\",\"no\") === \"yes\";\n\t// Get the value to display\n\tvar tiddler = this.wiki.getTiddler(this.tiddlerTitle);\n\t// Get the exclusion list\n\tvar exclude;\n\tif(this.exclude) {\n\t\texclude = this.exclude.split(\" \");\n\t} else {\n\t\texclude = [\"text\"]; \n\t}\n\t// Compose the template\n\tvar text = [];\n\tif(this.template && tiddler) {\n\t\tvar fields = [];\n\t\tfor(var fieldName in tiddler.fields) {\n\t\t\tif(exclude.indexOf(fieldName) === -1) {\n\t\t\t\tfields.push(fieldName);\n\t\t\t}\n\t\t}\n\t\tfields.sort();\n\t\tfor(var f=0; f<fields.length; f++) {\n\t\t\tfieldName = fields[f];\n\t\t\tif(exclude.indexOf(fieldName) === -1) {\n\t\t\t\tvar row = this.template,\n\t\t\t\t\tvalue = tiddler.getFieldString(fieldName);\n\t\t\t\tif(this.stripTitlePrefix && fieldName === \"title\") {\n\t\t\t\t\tvar reStrip = /^\\{[^\\}]+\\}(.+)/mg,\n\t\t\t\t\t\treMatch = reStrip.exec(value);\n\t\t\t\t\tif(reMatch) {\n\t\t\t\t\t\tvalue = reMatch[1];\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\trow = row.replace(\"$name$\",fieldName);\n\t\t\t\trow = row.replace(\"$value$\",value);\n\t\t\t\trow = row.replace(\"$encoded_value$\",$tw.utils.htmlEncode(value));\n\t\t\t\ttext.push(row);\n\t\t\t}\n\t\t}\n\t}\n\tthis.text = text.join(\"\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nFieldsWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.template || changedAttributes.exclude || changedAttributes.stripTitlePrefix || changedTiddlers[this.tiddlerTitle]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.fields = FieldsWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/fields.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/image.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/image.js\ntype: application/javascript\nmodule-type: widget\n\nThe image widget displays an image referenced with an external URI or with a local tiddler title.\n\n```\n<$image src=\"TiddlerTitle\" width=\"320\" height=\"400\" class=\"classnames\">\n```\n\nThe image source can be the title of an existing tiddler or the URL of an external image.\n\nExternal images always generate an HTML `<img>` tag.\n\nTiddlers that have a _canonical_uri field generate an HTML `<img>` tag with the src attribute containing the URI.\n\nTiddlers that contain image data generate an HTML `<img>` tag with the src attribute containing a base64 representation of the image.\n\nTiddlers that contain wikitext could be rendered to a DIV of the usual size of a tiddler, and then transformed to the size requested.\n\nThe width and height attributes are interpreted as a number of pixels, and do not need to include the \"px\" suffix.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ImageWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nImageWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nImageWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\t// Determine what type of image it is\n\tvar tag = \"img\", src = \"\",\n\t\ttiddler = this.wiki.getTiddler(this.imageSource);\n\tif(!tiddler) {\n\t\t// The source isn't the title of a tiddler, so we'll assume it's a URL\n\t\tsrc = this.getVariable(\"tv-get-export-image-link\",{params: [{name: \"src\",value: this.imageSource}],defaultValue: this.imageSource});\n\t} else {\n\t\t// Check if it is an image tiddler\n\t\tif(this.wiki.isImageTiddler(this.imageSource)) {\n\t\t\tvar type = tiddler.fields.type,\n\t\t\t\ttext = tiddler.fields.text,\n\t\t\t\t_canonical_uri = tiddler.fields._canonical_uri;\n\t\t\t// If the tiddler has body text then it doesn't need to be lazily loaded\n\t\t\tif(text) {\n\t\t\t\t// Render the appropriate element for the image type\n\t\t\t\tswitch(type) {\n\t\t\t\t\tcase \"application/pdf\":\n\t\t\t\t\t\ttag = \"embed\";\n\t\t\t\t\t\tsrc = \"data:application/pdf;base64,\" + text;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"image/svg+xml\":\n\t\t\t\t\t\tsrc = \"data:image/svg+xml,\" + encodeURIComponent(text);\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tsrc = \"data:\" + type + \";base64,\" + text;\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t} else if(_canonical_uri) {\n\t\t\t\tswitch(type) {\n\t\t\t\t\tcase \"application/pdf\":\n\t\t\t\t\t\ttag = \"embed\";\n\t\t\t\t\t\tsrc = _canonical_uri;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase \"image/svg+xml\":\n\t\t\t\t\t\tsrc = _canonical_uri;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tdefault:\n\t\t\t\t\t\tsrc = _canonical_uri;\n\t\t\t\t\t\tbreak;\n\t\t\t\t}\t\n\t\t\t} else {\n\t\t\t\t// Just trigger loading of the tiddler\n\t\t\t\tthis.wiki.getTiddlerText(this.imageSource);\n\t\t\t}\n\t\t}\n\t}\n\t// Create the element and assign the attributes\n\tvar domNode = this.document.createElement(tag);\n\tdomNode.setAttribute(\"src\",src);\n\tif(this.imageClass) {\n\t\tdomNode.setAttribute(\"class\",this.imageClass);\t\t\n\t}\n\tif(this.imageWidth) {\n\t\tdomNode.setAttribute(\"width\",this.imageWidth);\n\t}\n\tif(this.imageHeight) {\n\t\tdomNode.setAttribute(\"height\",this.imageHeight);\n\t}\n\tif(this.imageTooltip) {\n\t\tdomNode.setAttribute(\"title\",this.imageTooltip);\t\t\n\t}\n\tif(this.imageAlt) {\n\t\tdomNode.setAttribute(\"alt\",this.imageAlt);\t\t\n\t}\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.domNodes.push(domNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nImageWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.imageSource = this.getAttribute(\"source\");\n\tthis.imageWidth = this.getAttribute(\"width\");\n\tthis.imageHeight = this.getAttribute(\"height\");\n\tthis.imageClass = this.getAttribute(\"class\");\n\tthis.imageTooltip = this.getAttribute(\"tooltip\");\n\tthis.imageAlt = this.getAttribute(\"alt\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nImageWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.source || changedAttributes.width || changedAttributes.height || changedAttributes[\"class\"] || changedAttributes.tooltip || changedTiddlers[this.imageSource]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\t\n\t}\n};\n\nexports.image = ImageWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/image.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/importvariables.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/importvariables.js\ntype: application/javascript\nmodule-type: widget\n\nImport variable definitions from other tiddlers\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ImportVariablesWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nImportVariablesWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nImportVariablesWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nImportVariablesWidget.prototype.execute = function(tiddlerList) {\n\tvar self = this;\n\t// Get our parameters\n\tthis.filter = this.getAttribute(\"filter\");\n\t// Compute the filter\n\tthis.tiddlerList = tiddlerList || this.wiki.filterTiddlers(this.filter,this);\n\t// Accumulate the <$set> widgets from each tiddler\n\tvar widgetStackStart,widgetStackEnd;\n\tfunction addWidgetNode(widgetNode) {\n\t\tif(widgetNode) {\n\t\t\tif(!widgetStackStart && !widgetStackEnd) {\n\t\t\t\twidgetStackStart = widgetNode;\n\t\t\t\twidgetStackEnd = widgetNode;\n\t\t\t} else {\n\t\t\t\twidgetStackEnd.children = [widgetNode];\n\t\t\t\twidgetStackEnd = widgetNode;\n\t\t\t}\n\t\t}\n\t}\n\t$tw.utils.each(this.tiddlerList,function(title) {\n\t\tvar parser = self.wiki.parseTiddler(title);\n\t\tif(parser) {\n\t\t\tvar parseTreeNode = parser.tree[0];\n\t\t\twhile(parseTreeNode && parseTreeNode.type === \"set\") {\n\t\t\t\taddWidgetNode({\n\t\t\t\t\ttype: \"set\",\n\t\t\t\t\tattributes: parseTreeNode.attributes,\n\t\t\t\t\tparams: parseTreeNode.params\n\t\t\t\t});\n\t\t\t\tparseTreeNode = parseTreeNode.children[0];\n\t\t\t}\n\t\t} \n\t});\n\t// Add our own children to the end of the pile\n\tvar parseTreeNodes;\n\tif(widgetStackStart && widgetStackEnd) {\n\t\tparseTreeNodes = [widgetStackStart];\n\t\twidgetStackEnd.children = this.parseTreeNode.children;\n\t} else {\n\t\tparseTreeNodes = this.parseTreeNode.children;\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(parseTreeNodes);\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nImportVariablesWidget.prototype.refresh = function(changedTiddlers) {\n\t// Recompute our attributes and the filter list\n\tvar changedAttributes = this.computeAttributes(),\n\t\ttiddlerList = this.wiki.filterTiddlers(this.getAttribute(\"filter\"),this);\n\t// Refresh if the filter has changed, or the list of tiddlers has changed, or any of the tiddlers in the list has changed\n\tfunction haveListedTiddlersChanged() {\n\t\tvar changed = false;\n\t\ttiddlerList.forEach(function(title) {\n\t\t\tif(changedTiddlers[title]) {\n\t\t\t\tchanged = true;\n\t\t\t}\n\t\t});\n\t\treturn changed;\n\t}\n\tif(changedAttributes.filter || !$tw.utils.isArrayEqual(this.tiddlerList,tiddlerList) || haveListedTiddlersChanged()) {\n\t\t// Compute the filter\n\t\tthis.removeChildDomNodes();\n\t\tthis.execute(tiddlerList);\n\t\tthis.renderChildren(this.parentDomNode,this.findNextSiblingDomNode());\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.importvariables = ImportVariablesWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/importvariables.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/keyboard.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/keyboard.js\ntype: application/javascript\nmodule-type: widget\n\nKeyboard shortcut widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar KeyboardWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nKeyboardWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nKeyboardWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create element\n\tvar domNode = this.document.createElement(\"div\");\n\t// Assign classes\n\tvar classes = (this[\"class\"] || \"\").split(\" \");\n\tclasses.push(\"tc-keyboard\");\n\tdomNode.className = classes.join(\" \");\n\t// Add a keyboard event handler\n\tdomNode.addEventListener(\"keydown\",function (event) {\n\t\tif($tw.keyboardManager.checkKeyDescriptors(event,self.keyInfoArray)) {\n\t\t\tself.invokeActions(self,event);\n\t\t\tif(self.actions) {\n\t\t\t\tself.invokeActionString(self.actions,self,event);\n\t\t\t}\n\t\t\tself.dispatchMessage(event);\n\t\t\tevent.preventDefault();\n\t\t\tevent.stopPropagation();\n\t\t\treturn true;\n\t\t}\n\t\treturn false;\n\t},false);\n\t// Insert element\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nKeyboardWidget.prototype.dispatchMessage = function(event) {\n\tthis.dispatchEvent({type: this.message, param: this.param, tiddlerTitle: this.getVariable(\"currentTiddler\")});\n};\n\n/*\nCompute the internal state of the widget\n*/\nKeyboardWidget.prototype.execute = function() {\n\t// Get attributes\n\tthis.actions = this.getAttribute(\"actions\");\n\tthis.message = this.getAttribute(\"message\");\n\tthis.param = this.getAttribute(\"param\");\n\tthis.key = this.getAttribute(\"key\");\n\tthis.keyInfoArray = $tw.keyboardManager.parseKeyDescriptors(this.key);\n\tthis[\"class\"] = this.getAttribute(\"class\");\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nKeyboardWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.message || changedAttributes.param || changedAttributes.key || changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.keyboard = KeyboardWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/keyboard.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/link.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/link.js\ntype: application/javascript\nmodule-type: widget\n\nLink widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\nvar MISSING_LINK_CONFIG_TITLE = \"$:/config/MissingLinks\";\n\nvar LinkWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nLinkWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nLinkWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Get the value of the tv-wikilinks configuration macro\n\tvar wikiLinksMacro = this.getVariable(\"tv-wikilinks\"),\n\t\tuseWikiLinks = wikiLinksMacro ? (wikiLinksMacro.trim() !== \"no\") : true,\n\t\tmissingLinksEnabled = !(this.hideMissingLinks && this.isMissing && !this.isShadow);\n\t// Render the link if required\n\tif(useWikiLinks && missingLinksEnabled) {\n\t\tthis.renderLink(parent,nextSibling);\n\t} else {\n\t\t// Just insert the link text\n\t\tvar domNode = this.document.createElement(\"span\");\n\t\tparent.insertBefore(domNode,nextSibling);\n\t\tthis.renderChildren(domNode,null);\n\t\tthis.domNodes.push(domNode);\n\t}\n};\n\n/*\nRender this widget into the DOM\n*/\nLinkWidget.prototype.renderLink = function(parent,nextSibling) {\n\tvar self = this;\n\t// Sanitise the specified tag\n\tvar tag = this.linkTag;\n\tif($tw.config.htmlUnsafeElements.indexOf(tag) !== -1) {\n\t\ttag = \"a\";\n\t}\n\t// Create our element\n\tvar domNode = this.document.createElement(tag);\n\t// Assign classes\n\tvar classes = [];\n\tif(this.linkClasses) {\n\t\tclasses.push(this.linkClasses);\n\t}\n\tclasses.push(\"tc-tiddlylink\");\n\tif(this.isShadow) {\n\t\tclasses.push(\"tc-tiddlylink-shadow\");\n\t}\n\tif(this.isMissing && !this.isShadow) {\n\t\tclasses.push(\"tc-tiddlylink-missing\");\n\t} else {\n\t\tif(!this.isMissing) {\n\t\t\tclasses.push(\"tc-tiddlylink-resolves\");\n\t\t}\n\t}\n\tdomNode.setAttribute(\"class\",classes.join(\" \"));\n\t// Set an href\n\tvar wikiLinkTemplateMacro = this.getVariable(\"tv-wikilink-template\"),\n\t\twikiLinkTemplate = wikiLinkTemplateMacro ? wikiLinkTemplateMacro.trim() : \"#$uri_encoded$\",\n\t\twikiLinkText = wikiLinkTemplate.replace(\"$uri_encoded$\",encodeURIComponent(this.to));\n\twikiLinkText = wikiLinkText.replace(\"$uri_doubleencoded$\",encodeURIComponent(encodeURIComponent(this.to)));\n\twikiLinkText = this.getVariable(\"tv-get-export-link\",{params: [{name: \"to\",value: this.to}],defaultValue: wikiLinkText});\n\tif(tag === \"a\") {\n\t\tdomNode.setAttribute(\"href\",wikiLinkText);\n\t}\n\tif(this.tabIndex) {\n\t\tdomNode.setAttribute(\"tabindex\",this.tabIndex);\n\t}\n\t// Set the tooltip\n\t// HACK: Performance issues with re-parsing the tooltip prevent us defaulting the tooltip to \"<$transclude field='tooltip'><$transclude field='title'/></$transclude>\"\n\tvar tooltipWikiText = this.tooltip || this.getVariable(\"tv-wikilink-tooltip\");\n\tif(tooltipWikiText) {\n\t\tvar tooltipText = this.wiki.renderText(\"text/plain\",\"text/vnd.tiddlywiki\",tooltipWikiText,{\n\t\t\t\tparseAsInline: true,\n\t\t\t\tvariables: {\n\t\t\t\t\tcurrentTiddler: this.to\n\t\t\t\t},\n\t\t\t\tparentWidget: this\n\t\t\t});\n\t\tdomNode.setAttribute(\"title\",tooltipText);\n\t}\n\tif(this[\"aria-label\"]) {\n\t\tdomNode.setAttribute(\"aria-label\",this[\"aria-label\"]);\n\t}\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(domNode,[\n\t\t{name: \"click\", handlerObject: this, handlerMethod: \"handleClickEvent\"},\n\t]);\n\tif(this.draggable === \"yes\") {\n\t\t$tw.utils.addEventListeners(domNode,[\n\t\t\t{name: \"dragstart\", handlerObject: this, handlerMethod: \"handleDragStartEvent\"},\n\t\t\t{name: \"dragend\", handlerObject: this, handlerMethod: \"handleDragEndEvent\"}\n\t\t]);\n\t}\n\t// Insert the link into the DOM and render any children\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nLinkWidget.prototype.handleClickEvent = function(event) {\n\t// Send the click on its way as a navigate event\n\tvar bounds = this.domNodes[0].getBoundingClientRect();\n\tthis.dispatchEvent({\n\t\ttype: \"tm-navigate\",\n\t\tnavigateTo: this.to,\n\t\tnavigateFromTitle: this.getVariable(\"storyTiddler\"),\n\t\tnavigateFromNode: this,\n\t\tnavigateFromClientRect: { top: bounds.top, left: bounds.left, width: bounds.width, right: bounds.right, bottom: bounds.bottom, height: bounds.height\n\t\t},\n\t\tnavigateSuppressNavigation: event.metaKey || event.ctrlKey || (event.button === 1)\n\t});\n\tif(this.domNodes[0].hasAttribute(\"href\")) {\n\t\tevent.preventDefault();\n\t}\n\tevent.stopPropagation();\n\treturn false;\n};\n\nLinkWidget.prototype.handleDragStartEvent = function(event) {\n\tif(event.target === this.domNodes[0]) {\n\t\tif(this.to) {\n\t\t\t$tw.dragInProgress = true;\n\t\t\t// Set the dragging class on the element being dragged\n\t\t\t$tw.utils.addClass(event.target,\"tc-tiddlylink-dragging\");\n\t\t\t// Create the drag image elements\n\t\t\tthis.dragImage = this.document.createElement(\"div\");\n\t\t\tthis.dragImage.className = \"tc-tiddler-dragger\";\n\t\t\tvar inner = this.document.createElement(\"div\");\n\t\t\tinner.className = \"tc-tiddler-dragger-inner\";\n\t\t\tinner.appendChild(this.document.createTextNode(this.to));\n\t\t\tthis.dragImage.appendChild(inner);\n\t\t\tthis.document.body.appendChild(this.dragImage);\n\t\t\t// Astoundingly, we need to cover the dragger up: http://www.kryogenix.org/code/browser/custom-drag-image.html\n\t\t\tvar cover = this.document.createElement(\"div\");\n\t\t\tcover.className = \"tc-tiddler-dragger-cover\";\n\t\t\tcover.style.left = (inner.offsetLeft - 16) + \"px\";\n\t\t\tcover.style.top = (inner.offsetTop - 16) + \"px\";\n\t\t\tcover.style.width = (inner.offsetWidth + 32) + \"px\";\n\t\t\tcover.style.height = (inner.offsetHeight + 32) + \"px\";\n\t\t\tthis.dragImage.appendChild(cover);\n\t\t\t// Set the data transfer properties\n\t\t\tvar dataTransfer = event.dataTransfer;\n\t\t\t// First the image\n\t\t\tdataTransfer.effectAllowed = \"copy\";\n\t\t\tif(dataTransfer.setDragImage) {\n\t\t\t\tdataTransfer.setDragImage(this.dragImage.firstChild,-16,-16);\n\t\t\t}\n\t\t\t// Then the data\n\t\t\tdataTransfer.clearData();\n\t\t\tvar jsonData = this.wiki.getTiddlerAsJson(this.to),\n\t\t\t\ttextData = this.wiki.getTiddlerText(this.to,\"\"),\n\t\t\t\ttitle = (new RegExp(\"^\" + $tw.config.textPrimitives.wikiLink + \"$\",\"mg\")).exec(this.to) ? this.to : \"[[\" + this.to + \"]]\";\n\t\t\t// IE doesn't like these content types\n\t\t\tif(!$tw.browser.isIE) {\n\t\t\t\tdataTransfer.setData(\"text/vnd.tiddler\",jsonData);\n\t\t\t\tdataTransfer.setData(\"text/plain\",title);\n\t\t\t\tdataTransfer.setData(\"text/x-moz-url\",\"data:text/vnd.tiddler,\" + encodeURIComponent(jsonData));\n\t\t\t}\n\t\t\tdataTransfer.setData(\"URL\",\"data:text/vnd.tiddler,\" + encodeURIComponent(jsonData));\n\t\t\tdataTransfer.setData(\"Text\",title);\n\t\t\tevent.stopPropagation();\n\t\t} else {\n\t\t\tevent.preventDefault();\n\t\t}\n\t}\n};\n\nLinkWidget.prototype.handleDragEndEvent = function(event) {\n\tif(event.target === this.domNodes[0]) {\n\t\t$tw.dragInProgress = false;\n\t\t// Remove the dragging class on the element being dragged\n\t\t$tw.utils.removeClass(event.target,\"tc-tiddlylink-dragging\");\n\t\t// Delete the drag image element\n\t\tif(this.dragImage) {\n\t\t\tthis.dragImage.parentNode.removeChild(this.dragImage);\n\t\t}\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nLinkWidget.prototype.execute = function() {\n\t// Pick up our attributes\n\tthis.to = this.getAttribute(\"to\",this.getVariable(\"currentTiddler\"));\n\tthis.tooltip = this.getAttribute(\"tooltip\");\n\tthis[\"aria-label\"] = this.getAttribute(\"aria-label\");\n\tthis.linkClasses = this.getAttribute(\"class\");\n\tthis.tabIndex = this.getAttribute(\"tabindex\");\n\tthis.draggable = this.getAttribute(\"draggable\",\"yes\");\n\tthis.linkTag = this.getAttribute(\"tag\",\"a\");\n\t// Determine the link characteristics\n\tthis.isMissing = !this.wiki.tiddlerExists(this.to);\n\tthis.isShadow = this.wiki.isShadowTiddler(this.to);\n\tthis.hideMissingLinks = ($tw.wiki.getTiddlerText(MISSING_LINK_CONFIG_TITLE,\"yes\") === \"no\");\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nLinkWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.to || changedTiddlers[this.to] || changedAttributes[\"aria-label\"] || changedAttributes.tooltip || changedTiddlers[MISSING_LINK_CONFIG_TITLE]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.link = LinkWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/link.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/linkcatcher.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/linkcatcher.js\ntype: application/javascript\nmodule-type: widget\n\nLinkcatcher widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar LinkCatcherWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.addEventListeners([\n\t\t{type: \"tm-navigate\", handler: \"handleNavigateEvent\"}\n\t]);\n};\n\n/*\nInherit from the base widget class\n*/\nLinkCatcherWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nLinkCatcherWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nLinkCatcherWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.catchTo = this.getAttribute(\"to\");\n\tthis.catchMessage = this.getAttribute(\"message\");\n\tthis.catchSet = this.getAttribute(\"set\");\n\tthis.catchSetTo = this.getAttribute(\"setTo\");\n\tthis.catchActions = this.getAttribute(\"actions\");\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nLinkCatcherWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.to || changedAttributes.message || changedAttributes.set || changedAttributes.setTo) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\n/*\nHandle a tm-navigate event\n*/\nLinkCatcherWidget.prototype.handleNavigateEvent = function(event) {\n\tif(this.catchTo) {\n\t\tthis.wiki.setTextReference(this.catchTo,event.navigateTo,this.getVariable(\"currentTiddler\"));\n\t}\n\tif(this.catchMessage && this.parentWidget) {\n\t\tthis.parentWidget.dispatchEvent({\n\t\t\ttype: this.catchMessage,\n\t\t\tparam: event.navigateTo,\n\t\t\tnavigateTo: event.navigateTo\n\t\t});\n\t}\n\tif(this.catchSet) {\n\t\tvar tiddler = this.wiki.getTiddler(this.catchSet);\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(tiddler,{title: this.catchSet, text: this.catchSetTo}));\n\t}\n\tif(this.catchActions) {\n\t\tthis.invokeActionString(this.catchActions,this);\n\t}\n\treturn false;\n};\n\nexports.linkcatcher = LinkCatcherWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/linkcatcher.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/list.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/list.js\ntype: application/javascript\nmodule-type: widget\n\nList and list item widgets\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\n/*\nThe list widget creates list element sub-widgets that reach back into the list widget for their configuration\n*/\n\nvar ListWidget = function(parseTreeNode,options) {\n\t// Initialise the storyviews if they've not been done already\n\tif(!this.storyViews) {\n\t\tListWidget.prototype.storyViews = {};\n\t\t$tw.modules.applyMethods(\"storyview\",this.storyViews);\n\t}\n\t// Main initialisation inherited from widget.js\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nListWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nListWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n\t// Construct the storyview\n\tvar StoryView = this.storyViews[this.storyViewName];\n\tif(StoryView && !this.document.isTiddlyWikiFakeDom) {\n\t\tthis.storyview = new StoryView(this);\n\t} else {\n\t\tthis.storyview = null;\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nListWidget.prototype.execute = function() {\n\t// Get our attributes\n\tthis.template = this.getAttribute(\"template\");\n\tthis.editTemplate = this.getAttribute(\"editTemplate\");\n\tthis.variableName = this.getAttribute(\"variable\",\"currentTiddler\");\n\tthis.storyViewName = this.getAttribute(\"storyview\");\n\tthis.historyTitle = this.getAttribute(\"history\");\n\t// Compose the list elements\n\tthis.list = this.getTiddlerList();\n\tvar members = [],\n\t\tself = this;\n\t// Check for an empty list\n\tif(this.list.length === 0) {\n\t\tmembers = this.getEmptyMessage();\n\t} else {\n\t\t$tw.utils.each(this.list,function(title,index) {\n\t\t\tmembers.push(self.makeItemTemplate(title));\n\t\t});\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(members);\n\t// Clear the last history\n\tthis.history = [];\n};\n\nListWidget.prototype.getTiddlerList = function() {\n\tvar defaultFilter = \"[!is[system]sort[title]]\";\n\treturn this.wiki.filterTiddlers(this.getAttribute(\"filter\",defaultFilter),this);\n};\n\nListWidget.prototype.getEmptyMessage = function() {\n\tvar emptyMessage = this.getAttribute(\"emptyMessage\",\"\"),\n\t\tparser = this.wiki.parseText(\"text/vnd.tiddlywiki\",emptyMessage,{parseAsInline: true});\n\tif(parser) {\n\t\treturn parser.tree;\n\t} else {\n\t\treturn [];\n\t}\n};\n\n/*\nCompose the template for a list item\n*/\nListWidget.prototype.makeItemTemplate = function(title) {\n\t// Check if the tiddler is a draft\n\tvar tiddler = this.wiki.getTiddler(title),\n\t\tisDraft = tiddler && tiddler.hasField(\"draft.of\"),\n\t\ttemplate = this.template,\n\t\ttemplateTree;\n\tif(isDraft && this.editTemplate) {\n\t\ttemplate = this.editTemplate;\n\t}\n\t// Compose the transclusion of the template\n\tif(template) {\n\t\ttemplateTree = [{type: \"transclude\", attributes: {tiddler: {type: \"string\", value: template}}}];\n\t} else {\n\t\tif(this.parseTreeNode.children && this.parseTreeNode.children.length > 0) {\n\t\t\ttemplateTree = this.parseTreeNode.children;\n\t\t} else {\n\t\t\t// Default template is a link to the title\n\t\t\ttemplateTree = [{type: \"element\", tag: this.parseTreeNode.isBlock ? \"div\" : \"span\", children: [{type: \"link\", attributes: {to: {type: \"string\", value: title}}, children: [\n\t\t\t\t\t{type: \"text\", text: title}\n\t\t\t]}]}];\n\t\t}\n\t}\n\t// Return the list item\n\treturn {type: \"listitem\", itemTitle: title, variableName: this.variableName, children: templateTree};\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nListWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes(),\n\t\tresult;\n\t// Call the storyview\n\tif(this.storyview && this.storyview.refreshStart) {\n\t\tthis.storyview.refreshStart(changedTiddlers,changedAttributes);\n\t}\n\t// Completely refresh if any of our attributes have changed\n\tif(changedAttributes.filter || changedAttributes.template || changedAttributes.editTemplate || changedAttributes.emptyMessage || changedAttributes.storyview || changedAttributes.history) {\n\t\tthis.refreshSelf();\n\t\tresult = true;\n\t} else {\n\t\t// Handle any changes to the list\n\t\tresult = this.handleListChanges(changedTiddlers);\n\t\t// Handle any changes to the history stack\n\t\tif(this.historyTitle && changedTiddlers[this.historyTitle]) {\n\t\t\tthis.handleHistoryChanges();\n\t\t}\n\t}\n\t// Call the storyview\n\tif(this.storyview && this.storyview.refreshEnd) {\n\t\tthis.storyview.refreshEnd(changedTiddlers,changedAttributes);\n\t}\n\treturn result;\n};\n\n/*\nHandle any changes to the history list\n*/\nListWidget.prototype.handleHistoryChanges = function() {\n\t// Get the history data\n\tvar newHistory = this.wiki.getTiddlerDataCached(this.historyTitle,[]);\n\t// Ignore any entries of the history that match the previous history\n\tvar entry = 0;\n\twhile(entry < newHistory.length && entry < this.history.length && newHistory[entry].title === this.history[entry].title) {\n\t\tentry++;\n\t}\n\t// Navigate forwards to each of the new tiddlers\n\twhile(entry < newHistory.length) {\n\t\tif(this.storyview && this.storyview.navigateTo) {\n\t\t\tthis.storyview.navigateTo(newHistory[entry]);\n\t\t}\n\t\tentry++;\n\t}\n\t// Update the history\n\tthis.history = newHistory;\n};\n\n/*\nProcess any changes to the list\n*/\nListWidget.prototype.handleListChanges = function(changedTiddlers) {\n\t// Get the new list\n\tvar prevList = this.list;\n\tthis.list = this.getTiddlerList();\n\t// Check for an empty list\n\tif(this.list.length === 0) {\n\t\t// Check if it was empty before\n\t\tif(prevList.length === 0) {\n\t\t\t// If so, just refresh the empty message\n\t\t\treturn this.refreshChildren(changedTiddlers);\n\t\t} else {\n\t\t\t// Replace the previous content with the empty message\n\t\t\tfor(t=this.children.length-1; t>=0; t--) {\n\t\t\t\tthis.removeListItem(t);\n\t\t\t}\n\t\t\tvar nextSibling = this.findNextSiblingDomNode();\n\t\t\tthis.makeChildWidgets(this.getEmptyMessage());\n\t\t\tthis.renderChildren(this.parentDomNode,nextSibling);\n\t\t\treturn true;\n\t\t}\n\t} else {\n\t\t// If the list was empty then we need to remove the empty message\n\t\tif(prevList.length === 0) {\n\t\t\tthis.removeChildDomNodes();\n\t\t\tthis.children = [];\n\t\t}\n\t\t// Cycle through the list, inserting and removing list items as needed\n\t\tvar hasRefreshed = false;\n\t\tfor(var t=0; t<this.list.length; t++) {\n\t\t\tvar index = this.findListItem(t,this.list[t]);\n\t\t\tif(index === undefined) {\n\t\t\t\t// The list item must be inserted\n\t\t\t\tthis.insertListItem(t,this.list[t]);\n\t\t\t\thasRefreshed = true;\n\t\t\t} else {\n\t\t\t\t// There are intervening list items that must be removed\n\t\t\t\tfor(var n=index-1; n>=t; n--) {\n\t\t\t\t\tthis.removeListItem(n);\n\t\t\t\t\thasRefreshed = true;\n\t\t\t\t}\n\t\t\t\t// Refresh the item we're reusing\n\t\t\t\tvar refreshed = this.children[t].refresh(changedTiddlers);\n\t\t\t\thasRefreshed = hasRefreshed || refreshed;\n\t\t\t}\n\t\t}\n\t\t// Remove any left over items\n\t\tfor(t=this.children.length-1; t>=this.list.length; t--) {\n\t\t\tthis.removeListItem(t);\n\t\t\thasRefreshed = true;\n\t\t}\n\t\treturn hasRefreshed;\n\t}\n};\n\n/*\nFind the list item with a given title, starting from a specified position\n*/\nListWidget.prototype.findListItem = function(startIndex,title) {\n\twhile(startIndex < this.children.length) {\n\t\tif(this.children[startIndex].parseTreeNode.itemTitle === title) {\n\t\t\treturn startIndex;\n\t\t}\n\t\tstartIndex++;\n\t}\n\treturn undefined;\n};\n\n/*\nInsert a new list item at the specified index\n*/\nListWidget.prototype.insertListItem = function(index,title) {\n\t// Create, insert and render the new child widgets\n\tvar widget = this.makeChildWidget(this.makeItemTemplate(title));\n\twidget.parentDomNode = this.parentDomNode; // Hack to enable findNextSiblingDomNode() to work\n\tthis.children.splice(index,0,widget);\n\tvar nextSibling = widget.findNextSiblingDomNode();\n\twidget.render(this.parentDomNode,nextSibling);\n\t// Animate the insertion if required\n\tif(this.storyview && this.storyview.insert) {\n\t\tthis.storyview.insert(widget);\n\t}\n\treturn true;\n};\n\n/*\nRemove the specified list item\n*/\nListWidget.prototype.removeListItem = function(index) {\n\tvar widget = this.children[index];\n\t// Animate the removal if required\n\tif(this.storyview && this.storyview.remove) {\n\t\tthis.storyview.remove(widget);\n\t} else {\n\t\twidget.removeChildDomNodes();\n\t}\n\t// Remove the child widget\n\tthis.children.splice(index,1);\n};\n\nexports.list = ListWidget;\n\nvar ListItemWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nListItemWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nListItemWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nListItemWidget.prototype.execute = function() {\n\t// Set the current list item title\n\tthis.setVariable(this.parseTreeNode.variableName,this.parseTreeNode.itemTitle);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nListItemWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.listitem = ListItemWidget;\n\n})();",
"title": "$:/core/modules/widgets/list.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/macrocall.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/macrocall.js\ntype: application/javascript\nmodule-type: widget\n\nMacrocall widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar MacroCallWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nMacroCallWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nMacroCallWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nMacroCallWidget.prototype.execute = function() {\n\t// Get the parse type if specified\n\tthis.parseType = this.getAttribute(\"$type\",\"text/vnd.tiddlywiki\");\n\tthis.renderOutput = this.getAttribute(\"$output\",\"text/html\");\n\t// Merge together the parameters specified in the parse tree with the specified attributes\n\tvar params = this.parseTreeNode.params ? this.parseTreeNode.params.slice(0) : [];\n\t$tw.utils.each(this.attributes,function(attribute,name) {\n\t\tif(name.charAt(0) !== \"$\") {\n\t\t\tparams.push({name: name, value: attribute});\t\t\t\n\t\t}\n\t});\n\t// Get the macro value\n\tvar text = this.getVariable(this.parseTreeNode.name || this.getAttribute(\"$name\"),{params: params}),\n\t\tparseTreeNodes;\n\t// Are we rendering to HTML?\n\tif(this.renderOutput === \"text/html\") {\n\t\t// If so we'll return the parsed macro\n\t\tvar parser = this.wiki.parseText(this.parseType,text,\n\t\t\t\t\t\t\t{parseAsInline: !this.parseTreeNode.isBlock});\n\t\tparseTreeNodes = parser ? parser.tree : [];\n\t} else {\n\t\t// Otherwise, we'll render the text\n\t\tvar plainText = this.wiki.renderText(\"text/plain\",this.parseType,text,{parentWidget: this});\n\t\tparseTreeNodes = [{type: \"text\", text: plainText}];\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(parseTreeNodes);\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nMacroCallWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif($tw.utils.count(changedAttributes) > 0) {\n\t\t// Rerender ourselves\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.macrocall = MacroCallWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/macrocall.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/navigator.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/navigator.js\ntype: application/javascript\nmodule-type: widget\n\nNavigator widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar IMPORT_TITLE = \"$:/Import\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar NavigatorWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.addEventListeners([\n\t\t{type: \"tm-navigate\", handler: \"handleNavigateEvent\"},\n\t\t{type: \"tm-edit-tiddler\", handler: \"handleEditTiddlerEvent\"},\n\t\t{type: \"tm-delete-tiddler\", handler: \"handleDeleteTiddlerEvent\"},\n\t\t{type: \"tm-save-tiddler\", handler: \"handleSaveTiddlerEvent\"},\n\t\t{type: \"tm-cancel-tiddler\", handler: \"handleCancelTiddlerEvent\"},\n\t\t{type: \"tm-close-tiddler\", handler: \"handleCloseTiddlerEvent\"},\n\t\t{type: \"tm-close-all-tiddlers\", handler: \"handleCloseAllTiddlersEvent\"},\n\t\t{type: \"tm-close-other-tiddlers\", handler: \"handleCloseOtherTiddlersEvent\"},\n\t\t{type: \"tm-new-tiddler\", handler: \"handleNewTiddlerEvent\"},\n\t\t{type: \"tm-import-tiddlers\", handler: \"handleImportTiddlersEvent\"},\n\t\t{type: \"tm-perform-import\", handler: \"handlePerformImportEvent\"},\n\t\t{type: \"tm-fold-tiddler\", handler: \"handleFoldTiddlerEvent\"},\n\t\t{type: \"tm-fold-other-tiddlers\", handler: \"handleFoldOtherTiddlersEvent\"},\n\t\t{type: \"tm-fold-all-tiddlers\", handler: \"handleFoldAllTiddlersEvent\"},\n\t\t{type: \"tm-unfold-all-tiddlers\", handler: \"handleUnfoldAllTiddlersEvent\"},\n\t\t{type: \"tm-rename-tiddler\", handler: \"handleRenameTiddlerEvent\"}\n\t]);\n};\n\n/*\nInherit from the base widget class\n*/\nNavigatorWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nNavigatorWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nNavigatorWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.storyTitle = this.getAttribute(\"story\");\n\tthis.historyTitle = this.getAttribute(\"history\");\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nNavigatorWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.story || changedAttributes.history) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nNavigatorWidget.prototype.getStoryList = function() {\n\treturn this.storyTitle ? this.wiki.getTiddlerList(this.storyTitle) : null;\n};\n\nNavigatorWidget.prototype.saveStoryList = function(storyList) {\n\tvar storyTiddler = this.wiki.getTiddler(this.storyTitle);\n\tthis.wiki.addTiddler(new $tw.Tiddler(\n\t\t{title: this.storyTitle},\n\t\tstoryTiddler,\n\t\t{list: storyList}\n\t));\n};\n\nNavigatorWidget.prototype.removeTitleFromStory = function(storyList,title) {\n\tvar p = storyList.indexOf(title);\n\twhile(p !== -1) {\n\t\tstoryList.splice(p,1);\n\t\tp = storyList.indexOf(title);\n\t}\n};\n\nNavigatorWidget.prototype.replaceFirstTitleInStory = function(storyList,oldTitle,newTitle) {\n\tvar pos = storyList.indexOf(oldTitle);\n\tif(pos !== -1) {\n\t\tstoryList[pos] = newTitle;\n\t\tdo {\n\t\t\tpos = storyList.indexOf(oldTitle,pos + 1);\n\t\t\tif(pos !== -1) {\n\t\t\t\tstoryList.splice(pos,1);\n\t\t\t}\n\t\t} while(pos !== -1);\n\t} else {\n\t\tstoryList.splice(0,0,newTitle);\n\t}\n};\n\nNavigatorWidget.prototype.addToStory = function(title,fromTitle) {\n\tvar storyList = this.getStoryList();\n\t// Quit if we cannot get hold of the story list\n\tif(!storyList) {\n\t\treturn;\n\t}\n\t// See if the tiddler is already there\n\tvar slot = storyList.indexOf(title);\n\t// Quit if it already exists in the story river\n\tif(slot >= 0) {\n\t\treturn;\n\t}\n\t// First we try to find the position of the story element we navigated from\n\tvar fromIndex = storyList.indexOf(fromTitle);\n\tif(fromIndex >= 0) {\n\t\t// The tiddler is added from inside the river\n\t\t// Determine where to insert the tiddler; Fallback is \"below\"\n\t\tswitch(this.getAttribute(\"openLinkFromInsideRiver\",\"below\")) {\n\t\t\tcase \"top\":\n\t\t\t\tslot = 0;\n\t\t\t\tbreak;\n\t\t\tcase \"bottom\":\n\t\t\t\tslot = storyList.length;\n\t\t\t\tbreak;\n\t\t\tcase \"above\":\n\t\t\t\tslot = fromIndex;\n\t\t\t\tbreak;\n\t\t\tcase \"below\": // Intentional fall-through\n\t\t\tdefault:\n\t\t\t\tslot = fromIndex + 1;\n\t\t\t\tbreak;\n\t\t}\n\t} else {\n\t\t// The tiddler is opened from outside the river. Determine where to insert the tiddler; default is \"top\"\n\t\tif(this.getAttribute(\"openLinkFromOutsideRiver\",\"top\") === \"bottom\") {\n\t\t\t// Insert at bottom\n\t\t\tslot = storyList.length;\n\t\t} else {\n\t\t\t// Insert at top\n\t\t\tslot = 0;\n\t\t}\n\t}\n\t// Add the tiddler\n\tstoryList.splice(slot,0,title);\n\t// Save the story\n\tthis.saveStoryList(storyList);\n};\n\n/*\nAdd a new record to the top of the history stack\ntitle: a title string or an array of title strings\nfromPageRect: page coordinates of the origin of the navigation\n*/\nNavigatorWidget.prototype.addToHistory = function(title,fromPageRect) {\n\tthis.wiki.addToHistory(title,fromPageRect,this.historyTitle);\n};\n\n/*\nHandle a tm-navigate event\n*/\nNavigatorWidget.prototype.handleNavigateEvent = function(event) {\n\tif(event.navigateTo) {\n\t\tthis.addToStory(event.navigateTo,event.navigateFromTitle);\n\t\tif(!event.navigateSuppressNavigation) {\n\t\t\tthis.addToHistory(event.navigateTo,event.navigateFromClientRect);\n\t\t}\n\t}\n\treturn false;\n};\n\n// Close a specified tiddler\nNavigatorWidget.prototype.handleCloseTiddlerEvent = function(event) {\n\tvar title = event.param || event.tiddlerTitle,\n\t\tstoryList = this.getStoryList();\n\t// Look for tiddlers with this title to close\n\tthis.removeTitleFromStory(storyList,title);\n\tthis.saveStoryList(storyList);\n\treturn false;\n};\n\n// Close all tiddlers\nNavigatorWidget.prototype.handleCloseAllTiddlersEvent = function(event) {\n\tthis.saveStoryList([]);\n\treturn false;\n};\n\n// Close other tiddlers\nNavigatorWidget.prototype.handleCloseOtherTiddlersEvent = function(event) {\n\tvar title = event.param || event.tiddlerTitle;\n\tthis.saveStoryList([title]);\n\treturn false;\n};\n\n// Place a tiddler in edit mode\nNavigatorWidget.prototype.handleEditTiddlerEvent = function(event) {\n\tvar self = this;\n\tfunction isUnmodifiedShadow(title) {\n\t\treturn self.wiki.isShadowTiddler(title) && !self.wiki.tiddlerExists(title);\n\t}\n\tfunction confirmEditShadow(title) {\n\t\treturn confirm($tw.language.getString(\n\t\t\t\"ConfirmEditShadowTiddler\",\n\t\t\t{variables:\n\t\t\t\t{title: title}\n\t\t\t}\n\t\t));\n\t}\n\tvar title = event.param || event.tiddlerTitle;\n\tif(isUnmodifiedShadow(title) && !confirmEditShadow(title)) {\n\t\treturn false;\n\t}\n\t// Replace the specified tiddler with a draft in edit mode\n\tvar draftTiddler = this.makeDraftTiddler(title);\n\t// Update the story and history if required\n\tif(!event.paramObject || event.paramObject.suppressNavigation !== \"yes\") {\n\t\tvar draftTitle = draftTiddler.fields.title,\n\t\t\tstoryList = this.getStoryList();\n\t\tthis.removeTitleFromStory(storyList,draftTitle);\n\t\tthis.replaceFirstTitleInStory(storyList,title,draftTitle);\n\t\tthis.addToHistory(draftTitle,event.navigateFromClientRect);\n\t\tthis.saveStoryList(storyList);\n\t\treturn false;\n\t}\n};\n\n// Delete a tiddler\nNavigatorWidget.prototype.handleDeleteTiddlerEvent = function(event) {\n\t// Get the tiddler we're deleting\n\tvar title = event.param || event.tiddlerTitle,\n\t\ttiddler = this.wiki.getTiddler(title),\n\t\tstoryList = this.getStoryList(),\n\t\toriginalTitle = tiddler ? tiddler.fields[\"draft.of\"] : \"\",\n\t\tconfirmationTitle;\n\tif(!tiddler) {\n\t\treturn false;\n\t}\n\t// Check if the tiddler we're deleting is in draft mode\n\tif(originalTitle) {\n\t\t// If so, we'll prompt for confirmation referencing the original tiddler\n\t\tconfirmationTitle = originalTitle;\n\t} else {\n\t\t// If not a draft, then prompt for confirmation referencing the specified tiddler\n\t\tconfirmationTitle = title;\n\t}\n\t// Seek confirmation\n\tif((this.wiki.getTiddler(originalTitle) || (tiddler.fields.text || \"\") !== \"\") && !confirm($tw.language.getString(\n\t\t\t\t\"ConfirmDeleteTiddler\",\n\t\t\t\t{variables:\n\t\t\t\t\t{title: confirmationTitle}\n\t\t\t\t}\n\t\t\t))) {\n\t\treturn false;\n\t}\n\t// Delete the original tiddler\n\tif(originalTitle) {\n\t\tthis.wiki.deleteTiddler(originalTitle);\n\t\tthis.removeTitleFromStory(storyList,originalTitle);\n\t}\n\t// Delete this tiddler\n\tthis.wiki.deleteTiddler(title);\n\t// Remove the closed tiddler from the story\n\tthis.removeTitleFromStory(storyList,title);\n\tthis.saveStoryList(storyList);\n\t// Trigger an autosave\n\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n\treturn false;\n};\n\n/*\nCreate/reuse the draft tiddler for a given title\n*/\nNavigatorWidget.prototype.makeDraftTiddler = function(targetTitle) {\n\t// See if there is already a draft tiddler for this tiddler\n\tvar draftTitle = this.wiki.findDraft(targetTitle);\n\tif(draftTitle) {\n\t\treturn this.wiki.getTiddler(draftTitle);\n\t}\n\t// Get the current value of the tiddler we're editing\n\tvar tiddler = this.wiki.getTiddler(targetTitle);\n\t// Save the initial value of the draft tiddler\n\tdraftTitle = this.generateDraftTitle(targetTitle);\n\tvar draftTiddler = new $tw.Tiddler(\n\t\t\ttiddler,\n\t\t\t{\n\t\t\t\ttitle: draftTitle,\n\t\t\t\t\"draft.title\": targetTitle,\n\t\t\t\t\"draft.of\": targetTitle\n\t\t\t},\n\t\t\tthis.wiki.getModificationFields()\n\t\t);\n\tthis.wiki.addTiddler(draftTiddler);\n\treturn draftTiddler;\n};\n\n/*\nGenerate a title for the draft of a given tiddler\n*/\nNavigatorWidget.prototype.generateDraftTitle = function(title) {\n\tvar c = 0,\n\t\tdraftTitle;\n\tdo {\n\t\tdraftTitle = \"Draft \" + (c ? (c + 1) + \" \" : \"\") + \"of '\" + title + \"'\";\n\t\tc++;\n\t} while(this.wiki.tiddlerExists(draftTitle));\n\treturn draftTitle;\n};\n\n// Take a tiddler out of edit mode, saving the changes\nNavigatorWidget.prototype.handleSaveTiddlerEvent = function(event) {\n\tvar title = event.param || event.tiddlerTitle,\n\t\ttiddler = this.wiki.getTiddler(title),\n\t\tstoryList = this.getStoryList();\n\t// Replace the original tiddler with the draft\n\tif(tiddler) {\n\t\tvar draftTitle = (tiddler.fields[\"draft.title\"] || \"\").trim(),\n\t\t\tdraftOf = (tiddler.fields[\"draft.of\"] || \"\").trim();\n\t\tif(draftTitle) {\n\t\t\tvar isRename = draftOf !== draftTitle,\n\t\t\t\tisConfirmed = true;\n\t\t\tif(isRename && this.wiki.tiddlerExists(draftTitle)) {\n\t\t\t\tisConfirmed = confirm($tw.language.getString(\n\t\t\t\t\t\"ConfirmOverwriteTiddler\",\n\t\t\t\t\t{variables:\n\t\t\t\t\t\t{title: draftTitle}\n\t\t\t\t\t}\n\t\t\t\t));\n\t\t\t}\n\t\t\tif(isConfirmed) {\n\t\t\t\t// Create the new tiddler and pass it through the th-saving-tiddler hook\n\t\t\t\tvar newTiddler = new $tw.Tiddler(this.wiki.getCreationFields(),tiddler,{\n\t\t\t\t\ttitle: draftTitle,\n\t\t\t\t\t\"draft.title\": undefined,\n\t\t\t\t\t\"draft.of\": undefined\n\t\t\t\t},this.wiki.getModificationFields());\n\t\t\t\tnewTiddler = $tw.hooks.invokeHook(\"th-saving-tiddler\",newTiddler);\n\t\t\t\tthis.wiki.addTiddler(newTiddler);\n\t\t\t\t// Remove the draft tiddler\n\t\t\t\tthis.wiki.deleteTiddler(title);\n\t\t\t\t// Remove the original tiddler if we're renaming it\n\t\t\t\tif(isRename) {\n\t\t\t\t\tthis.wiki.deleteTiddler(draftOf);\n\t\t\t\t}\n\t\t\t\tif(!event.paramObject || event.paramObject.suppressNavigation !== \"yes\") {\n\t\t\t\t\t// Replace the draft in the story with the original\n\t\t\t\t\tthis.replaceFirstTitleInStory(storyList,title,draftTitle);\n\t\t\t\t\tthis.addToHistory(draftTitle,event.navigateFromClientRect);\n\t\t\t\t\tif(draftTitle !== this.storyTitle) {\n\t\t\t\t\t\tthis.saveStoryList(storyList);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// Trigger an autosave\n\t\t\t\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n};\n\n// Take a tiddler out of edit mode without saving the changes\nNavigatorWidget.prototype.handleCancelTiddlerEvent = function(event) {\n\t// Flip the specified tiddler from draft back to the original\n\tvar draftTitle = event.param || event.tiddlerTitle,\n\t\tdraftTiddler = this.wiki.getTiddler(draftTitle),\n\t\toriginalTitle = draftTiddler && draftTiddler.fields[\"draft.of\"];\n\tif(draftTiddler && originalTitle) {\n\t\t// Ask for confirmation if the tiddler text has changed\n\t\tvar isConfirmed = true,\n\t\t\toriginalTiddler = this.wiki.getTiddler(originalTitle),\n\t\t\tstoryList = this.getStoryList();\n\t\tif(this.wiki.isDraftModified(draftTitle)) {\n\t\t\tisConfirmed = confirm($tw.language.getString(\n\t\t\t\t\"ConfirmCancelTiddler\",\n\t\t\t\t{variables:\n\t\t\t\t\t{title: draftTitle}\n\t\t\t\t}\n\t\t\t));\n\t\t}\n\t\t// Remove the draft tiddler\n\t\tif(isConfirmed) {\n\t\t\tthis.wiki.deleteTiddler(draftTitle);\n\t\t\tif(!event.paramObject || event.paramObject.suppressNavigation !== \"yes\") {\n\t\t\t\tif(originalTiddler) {\n\t\t\t\t\tthis.replaceFirstTitleInStory(storyList,draftTitle,originalTitle);\n\t\t\t\t\tthis.addToHistory(originalTitle,event.navigateFromClientRect);\n\t\t\t\t} else {\n\t\t\t\t\tthis.removeTitleFromStory(storyList,draftTitle);\n\t\t\t\t}\n\t\t\t\tthis.saveStoryList(storyList);\n\t\t\t}\n\t\t}\n\t}\n\treturn false;\n};\n\n// Create a new draft tiddler\n// event.param can either be the title of a template tiddler, or a hashmap of fields.\n//\n// The title of the newly created tiddler follows these rules:\n// * If a hashmap was used and a title field was specified, use that title\n// * If a hashmap was used without a title field, use a default title, if necessary making it unique with a numeric suffix\n// * If a template tiddler was used, use the title of the template, if necessary making it unique with a numeric suffix\n//\n// If a draft of the target tiddler already exists then it is reused\nNavigatorWidget.prototype.handleNewTiddlerEvent = function(event) {\n\t// Get the story details\n\tvar storyList = this.getStoryList(),\n\t\ttemplateTiddler, additionalFields, title, draftTitle, existingTiddler;\n\t// Get the template tiddler (if any)\n\tif(typeof event.param === \"string\") {\n\t\t// Get the template tiddler\n\t\ttemplateTiddler = this.wiki.getTiddler(event.param);\n\t\t// Generate a new title\n\t\ttitle = this.wiki.generateNewTitle(event.param || $tw.language.getString(\"DefaultNewTiddlerTitle\"));\n\t}\n\t// Get the specified additional fields\n\tif(typeof event.paramObject === \"object\") {\n\t\tadditionalFields = event.paramObject;\n\t}\n\tif(typeof event.param === \"object\") { // Backwards compatibility with 5.1.3\n\t\tadditionalFields = event.param;\n\t}\n\tif(additionalFields && additionalFields.title) {\n\t\ttitle = additionalFields.title;\n\t}\n\t// Generate a title if we don't have one\n\ttitle = title || this.wiki.generateNewTitle($tw.language.getString(\"DefaultNewTiddlerTitle\"));\n\t// Find any existing draft for this tiddler\n\tdraftTitle = this.wiki.findDraft(title);\n\t// Pull in any existing tiddler\n\tif(draftTitle) {\n\t\texistingTiddler = this.wiki.getTiddler(draftTitle);\n\t} else {\n\t\tdraftTitle = this.generateDraftTitle(title);\n\t\texistingTiddler = this.wiki.getTiddler(title);\n\t}\n\t// Merge the tags\n\tvar mergedTags = [];\n\tif(existingTiddler && existingTiddler.fields.tags) {\n\t\t$tw.utils.pushTop(mergedTags,existingTiddler.fields.tags)\n\t}\n\tif(additionalFields && additionalFields.tags) {\n\t\t// Merge tags\n\t\tmergedTags = $tw.utils.pushTop(mergedTags,$tw.utils.parseStringArray(additionalFields.tags));\n\t}\n\tif(templateTiddler && templateTiddler.fields.tags) {\n\t\t// Merge tags\n\t\tmergedTags = $tw.utils.pushTop(mergedTags,templateTiddler.fields.tags);\n\t}\n\t// Save the draft tiddler\n\tvar draftTiddler = new $tw.Tiddler({\n\t\t\ttext: \"\",\n\t\t\t\"draft.title\": title\n\t\t},\n\t\ttemplateTiddler,\n\t\texistingTiddler,\n\t\tadditionalFields,\n\t\tthis.wiki.getCreationFields(),\n\t\t{\n\t\t\ttitle: draftTitle,\n\t\t\t\"draft.of\": title,\n\t\t\ttags: mergedTags\n\t\t},this.wiki.getModificationFields());\n\tthis.wiki.addTiddler(draftTiddler);\n\t// Update the story to insert the new draft at the top and remove any existing tiddler\n\tif(storyList.indexOf(draftTitle) === -1) {\n\t\tvar slot = storyList.indexOf(event.navigateFromTitle);\n\t\tstoryList.splice(slot + 1,0,draftTitle);\n\t}\n\tif(storyList.indexOf(title) !== -1) {\n\t\tstoryList.splice(storyList.indexOf(title),1);\t\t\n\t}\n\tthis.saveStoryList(storyList);\n\t// Add a new record to the top of the history stack\n\tthis.addToHistory(draftTitle);\n\treturn false;\n};\n\n// Import JSON tiddlers into a pending import tiddler\nNavigatorWidget.prototype.handleImportTiddlersEvent = function(event) {\n\tvar self = this;\n\t// Get the tiddlers\n\tvar tiddlers = [];\n\ttry {\n\t\ttiddlers = JSON.parse(event.param);\t\n\t} catch(e) {\n\t}\n\t// Get the current $:/Import tiddler\n\tvar importTiddler = this.wiki.getTiddler(IMPORT_TITLE),\n\t\timportData = this.wiki.getTiddlerData(IMPORT_TITLE,{}),\n\t\tnewFields = new Object({\n\t\t\ttitle: IMPORT_TITLE,\n\t\t\ttype: \"application/json\",\n\t\t\t\"plugin-type\": \"import\",\n\t\t\t\"status\": \"pending\"\n\t\t}),\n\t\tincomingTiddlers = [];\n\t// Process each tiddler\n\timportData.tiddlers = importData.tiddlers || {};\n\t$tw.utils.each(tiddlers,function(tiddlerFields) {\n\t\tvar title = tiddlerFields.title;\n\t\tif(title) {\n\t\t\tincomingTiddlers.push(title);\n\t\t\timportData.tiddlers[title] = tiddlerFields;\n\t\t}\n\t});\n\t// Give the active upgrader modules a chance to process the incoming tiddlers\n\tvar messages = this.wiki.invokeUpgraders(incomingTiddlers,importData.tiddlers);\n\t$tw.utils.each(messages,function(message,title) {\n\t\tnewFields[\"message-\" + title] = message;\n\t});\n\t// Deselect any suppressed tiddlers\n\t$tw.utils.each(importData.tiddlers,function(tiddler,title) {\n\t\tif($tw.utils.count(tiddler) === 0) {\n\t\t\tnewFields[\"selection-\" + title] = \"unchecked\";\n\t\t}\n\t});\n\t// Save the $:/Import tiddler\n\tnewFields.text = JSON.stringify(importData,null,$tw.config.preferences.jsonSpaces);\n\tthis.wiki.addTiddler(new $tw.Tiddler(importTiddler,newFields));\n\t// Update the story and history details\n\tif(this.getVariable(\"tv-auto-open-on-import\") !== \"no\") {\n\t\tvar storyList = this.getStoryList(),\n\t\t\thistory = [];\n\t\t// Add it to the story\n\t\tif(storyList.indexOf(IMPORT_TITLE) === -1) {\n\t\t\tstoryList.unshift(IMPORT_TITLE);\n\t\t}\n\t\t// And to history\n\t\thistory.push(IMPORT_TITLE);\n\t\t// Save the updated story and history\n\t\tthis.saveStoryList(storyList);\n\t\tthis.addToHistory(history);\t\t\n\t}\n\treturn false;\n};\n\n// \nNavigatorWidget.prototype.handlePerformImportEvent = function(event) {\n\tvar self = this,\n\t\timportTiddler = this.wiki.getTiddler(event.param),\n\t\timportData = this.wiki.getTiddlerDataCached(event.param,{tiddlers: {}}),\n\t\timportReport = [];\n\t// Add the tiddlers to the store\n\timportReport.push($tw.language.getString(\"Import/Imported/Hint\") + \"\\n\");\n\t$tw.utils.each(importData.tiddlers,function(tiddlerFields) {\n\t\tvar title = tiddlerFields.title;\n\t\tif(title && importTiddler && importTiddler.fields[\"selection-\" + title] !== \"unchecked\") {\n\t\t\tself.wiki.addTiddler(new $tw.Tiddler(tiddlerFields));\n\t\t\timportReport.push(\"# [[\" + tiddlerFields.title + \"]]\");\n\t\t}\n\t});\n\t// Replace the $:/Import tiddler with an import report\n\tthis.wiki.addTiddler(new $tw.Tiddler({\n\t\ttitle: event.param,\n\t\ttext: importReport.join(\"\\n\"),\n\t\t\"status\": \"complete\"\n\t}));\n\t// Navigate to the $:/Import tiddler\n\tthis.addToHistory([event.param]);\n\t// Trigger an autosave\n\t$tw.rootWidget.dispatchEvent({type: \"tm-auto-save-wiki\"});\n};\n\nNavigatorWidget.prototype.handleFoldTiddlerEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {};\n\tif(paramObject.foldedState) {\n\t\tvar foldedState = this.wiki.getTiddlerText(paramObject.foldedState,\"show\") === \"show\" ? \"hide\" : \"show\";\n\t\tthis.wiki.setText(paramObject.foldedState,\"text\",null,foldedState);\n\t}\n};\n\nNavigatorWidget.prototype.handleFoldOtherTiddlersEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tprefix = paramObject.foldedStatePrefix;\n\t$tw.utils.each(this.getStoryList(),function(title) {\n\t\tself.wiki.setText(prefix + title,\"text\",null,event.param === title ? \"show\" : \"hide\");\n\t});\n};\n\nNavigatorWidget.prototype.handleFoldAllTiddlersEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tprefix = paramObject.foldedStatePrefix;\n\t$tw.utils.each(this.getStoryList(),function(title) {\n\t\tself.wiki.setText(prefix + title,\"text\",null,\"hide\");\n\t});\n};\n\nNavigatorWidget.prototype.handleUnfoldAllTiddlersEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tprefix = paramObject.foldedStatePrefix;\n\t$tw.utils.each(this.getStoryList(),function(title) {\n\t\tself.wiki.setText(prefix + title,\"text\",null,\"show\");\n\t});\n};\n\nNavigatorWidget.prototype.handleRenameTiddlerEvent = function(event) {\n\tvar self = this,\n\t\tparamObject = event.paramObject || {},\n\t\tfrom = paramObject.from || event.tiddlerTitle,\n\t\tto = paramObject.to;\n\t$tw.wiki.renameTiddler(from,to);\n};\n\nexports.navigator = NavigatorWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/navigator.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/password.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/password.js\ntype: application/javascript\nmodule-type: widget\n\nPassword widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar PasswordWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nPasswordWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nPasswordWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Get the current password\n\tvar password = $tw.browser ? $tw.utils.getPassword(this.passwordName) || \"\" : \"\";\n\t// Create our element\n\tvar domNode = this.document.createElement(\"input\");\n\tdomNode.setAttribute(\"type\",\"password\");\n\tdomNode.setAttribute(\"value\",password);\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(domNode,[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n\t// Insert the label into the DOM and render any children\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tthis.domNodes.push(domNode);\n};\n\nPasswordWidget.prototype.handleChangeEvent = function(event) {\n\tvar password = this.domNodes[0].value;\n\treturn $tw.utils.savePassword(this.passwordName,password);\n};\n\n/*\nCompute the internal state of the widget\n*/\nPasswordWidget.prototype.execute = function() {\n\t// Get the parameters from the attributes\n\tthis.passwordName = this.getAttribute(\"name\",\"\");\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nPasswordWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.name) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.password = PasswordWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/password.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/radio.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/radio.js\ntype: application/javascript\nmodule-type: widget\n\nRadio widget\n\nWill set a field to the selected value:\n\n```\n\t<$radio field=\"myfield\" value=\"check 1\">one</$radio>\n\t<$radio field=\"myfield\" value=\"check 2\">two</$radio>\n\t<$radio field=\"myfield\" value=\"check 3\">three</$radio>\n```\n\n|Parameter |Description |h\n|tiddler |Name of the tiddler in which the field should be set. Defaults to current tiddler |\n|field |The name of the field to be set |\n|value |The value to set |\n|class |Optional class name(s) |\n\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar RadioWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nRadioWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nRadioWidget.prototype.render = function(parent,nextSibling) {\n\t// Save the parent dom node\n\tthis.parentDomNode = parent;\n\t// Compute our attributes\n\tthis.computeAttributes();\n\t// Execute our logic\n\tthis.execute();\n\t// Create our elements\n\tthis.labelDomNode = this.document.createElement(\"label\");\n\tthis.labelDomNode.setAttribute(\"class\",this.radioClass);\n\tthis.inputDomNode = this.document.createElement(\"input\");\n\tthis.inputDomNode.setAttribute(\"type\",\"radio\");\n\tif(this.getValue() == this.radioValue) {\n\t\tthis.inputDomNode.setAttribute(\"checked\",\"true\");\n\t}\n\tthis.labelDomNode.appendChild(this.inputDomNode);\n\tthis.spanDomNode = this.document.createElement(\"span\");\n\tthis.labelDomNode.appendChild(this.spanDomNode);\n\t// Add a click event handler\n\t$tw.utils.addEventListeners(this.inputDomNode,[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n\t// Insert the label into the DOM and render any children\n\tparent.insertBefore(this.labelDomNode,nextSibling);\n\tthis.renderChildren(this.spanDomNode,null);\n\tthis.domNodes.push(this.labelDomNode);\n};\n\nRadioWidget.prototype.getValue = function() {\n\tvar tiddler = this.wiki.getTiddler(this.radioTitle);\n\treturn tiddler && tiddler.getFieldString(this.radioField);\n};\n\nRadioWidget.prototype.setValue = function() {\n\tif(this.radioField) {\n\t\tvar tiddler = this.wiki.getTiddler(this.radioTitle),\n\t\t\taddition = {};\n\t\taddition[this.radioField] = this.radioValue;\n\t\tthis.wiki.addTiddler(new $tw.Tiddler(this.wiki.getCreationFields(),{title: this.radioTitle},tiddler,addition,this.wiki.getModificationFields()));\n\t}\n};\n\nRadioWidget.prototype.handleChangeEvent = function(event) {\n\tif(this.inputDomNode.checked) {\n\t\tthis.setValue();\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nRadioWidget.prototype.execute = function() {\n\t// Get the parameters from the attributes\n\tthis.radioTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.radioField = this.getAttribute(\"field\",\"text\");\n\tthis.radioValue = this.getAttribute(\"value\");\n\tthis.radioClass = this.getAttribute(\"class\",\"\");\n\tif(this.radioClass !== \"\") {\n\t\tthis.radioClass += \" \";\n\t}\n\tthis.radioClass += \"tc-radio\";\n\t// Make the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nRadioWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.value || changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\tvar refreshed = false;\n\t\tif(changedTiddlers[this.radioTitle]) {\n\t\t\tthis.inputDomNode.checked = this.getValue() === this.radioValue;\n\t\t\trefreshed = true;\n\t\t}\n\t\treturn this.refreshChildren(changedTiddlers) || refreshed;\n\t}\n};\n\nexports.radio = RadioWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/radio.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/raw.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/raw.js\ntype: application/javascript\nmodule-type: widget\n\nRaw widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar RawWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nRawWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nRawWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.execute();\n\tvar div = this.document.createElement(\"div\");\n\tdiv.innerHTML=this.parseTreeNode.html;\n\tparent.insertBefore(div,nextSibling);\n\tthis.domNodes.push(div);\t\n};\n\n/*\nCompute the internal state of the widget\n*/\nRawWidget.prototype.execute = function() {\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nRawWidget.prototype.refresh = function(changedTiddlers) {\n\treturn false;\n};\n\nexports.raw = RawWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/raw.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/reveal.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/reveal.js\ntype: application/javascript\nmodule-type: widget\n\nReveal widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar RevealWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nRevealWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nRevealWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar tag = this.parseTreeNode.isBlock ? \"div\" : \"span\";\n\tif(this.revealTag && $tw.config.htmlUnsafeElements.indexOf(this.revealTag) === -1) {\n\t\ttag = this.revealTag;\n\t}\n\tvar domNode = this.document.createElement(tag);\n\tvar classes = this[\"class\"].split(\" \") || [];\n\tclasses.push(\"tc-reveal\");\n\tdomNode.className = classes.join(\" \");\n\tif(this.style) {\n\t\tdomNode.setAttribute(\"style\",this.style);\n\t}\n\tparent.insertBefore(domNode,nextSibling);\n\tthis.renderChildren(domNode,null);\n\tif(!domNode.isTiddlyWikiFakeDom && this.type === \"popup\" && this.isOpen) {\n\t\tthis.positionPopup(domNode);\n\t\t$tw.utils.addClass(domNode,\"tc-popup\"); // Make sure that clicks don't dismiss popups within the revealed content\n\t}\n\tif(!this.isOpen) {\n\t\tdomNode.setAttribute(\"hidden\",\"true\");\n\t}\n\tthis.domNodes.push(domNode);\n};\n\nRevealWidget.prototype.positionPopup = function(domNode) {\n\tdomNode.style.position = \"absolute\";\n\tdomNode.style.zIndex = \"1000\";\n\tswitch(this.position) {\n\t\tcase \"left\":\n\t\t\tdomNode.style.left = (this.popup.left - domNode.offsetWidth) + \"px\";\n\t\t\tdomNode.style.top = this.popup.top + \"px\";\n\t\t\tbreak;\n\t\tcase \"above\":\n\t\t\tdomNode.style.left = this.popup.left + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top - domNode.offsetHeight) + \"px\";\n\t\t\tbreak;\n\t\tcase \"aboveright\":\n\t\t\tdomNode.style.left = (this.popup.left + this.popup.width) + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top + this.popup.height - domNode.offsetHeight) + \"px\";\n\t\t\tbreak;\n\t\tcase \"right\":\n\t\t\tdomNode.style.left = (this.popup.left + this.popup.width) + \"px\";\n\t\t\tdomNode.style.top = this.popup.top + \"px\";\n\t\t\tbreak;\n\t\tcase \"belowleft\":\n\t\t\tdomNode.style.left = (this.popup.left + this.popup.width - domNode.offsetWidth) + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top + this.popup.height) + \"px\";\n\t\t\tbreak;\n\t\tdefault: // Below\n\t\t\tdomNode.style.left = this.popup.left + \"px\";\n\t\t\tdomNode.style.top = (this.popup.top + this.popup.height) + \"px\";\n\t\t\tbreak;\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nRevealWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.state = this.getAttribute(\"state\");\n\tthis.revealTag = this.getAttribute(\"tag\");\n\tthis.type = this.getAttribute(\"type\");\n\tthis.text = this.getAttribute(\"text\");\n\tthis.position = this.getAttribute(\"position\");\n\tthis[\"class\"] = this.getAttribute(\"class\",\"\");\n\tthis.style = this.getAttribute(\"style\",\"\");\n\tthis[\"default\"] = this.getAttribute(\"default\",\"\");\n\tthis.animate = this.getAttribute(\"animate\",\"no\");\n\tthis.retain = this.getAttribute(\"retain\",\"no\");\n\tthis.openAnimation = this.animate === \"no\" ? undefined : \"open\";\n\tthis.closeAnimation = this.animate === \"no\" ? undefined : \"close\";\n\t// Compute the title of the state tiddler and read it\n\tthis.stateTitle = this.state;\n\tthis.readState();\n\t// Construct the child widgets\n\tvar childNodes = this.isOpen ? this.parseTreeNode.children : [];\n\tthis.hasChildNodes = this.isOpen;\n\tthis.makeChildWidgets(childNodes);\n};\n\n/*\nRead the state tiddler\n*/\nRevealWidget.prototype.readState = function() {\n\t// Read the information from the state tiddler\n\tvar state = this.stateTitle ? this.wiki.getTextReference(this.stateTitle,this[\"default\"],this.getVariable(\"currentTiddler\")) : this[\"default\"];\n\tswitch(this.type) {\n\t\tcase \"popup\":\n\t\t\tthis.readPopupState(state);\n\t\t\tbreak;\n\t\tcase \"match\":\n\t\t\tthis.readMatchState(state);\n\t\t\tbreak;\n\t\tcase \"nomatch\":\n\t\t\tthis.readMatchState(state);\n\t\t\tthis.isOpen = !this.isOpen;\n\t\t\tbreak;\n\t}\n};\n\nRevealWidget.prototype.readMatchState = function(state) {\n\tthis.isOpen = state === this.text;\n};\n\nRevealWidget.prototype.readPopupState = function(state) {\n\tvar popupLocationRegExp = /^\\((-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+),(-?[0-9\\.E]+)\\)$/,\n\t\tmatch = popupLocationRegExp.exec(state);\n\t// Check if the state matches the location regexp\n\tif(match) {\n\t\t// If so, we're open\n\t\tthis.isOpen = true;\n\t\t// Get the location\n\t\tthis.popup = {\n\t\t\tleft: parseFloat(match[1]),\n\t\t\ttop: parseFloat(match[2]),\n\t\t\twidth: parseFloat(match[3]),\n\t\t\theight: parseFloat(match[4])\n\t\t};\n\t} else {\n\t\t// If not, we're closed\n\t\tthis.isOpen = false;\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nRevealWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.state || changedAttributes.type || changedAttributes.text || changedAttributes.position || changedAttributes[\"default\"] || changedAttributes.animate) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\tvar refreshed = false,\n\t\t\tcurrentlyOpen = this.isOpen;\n\t\tthis.readState();\n\t\tif(this.isOpen !== currentlyOpen) {\n\t\t\tif(this.retain === \"yes\") {\n\t\t\t\tthis.updateState();\n\t\t\t} else {\n\t\t\t\tthis.refreshSelf();\n\t\t\t\trefreshed = true;\n\t\t\t}\n\t\t}\n\t\treturn this.refreshChildren(changedTiddlers) || refreshed;\n\t}\n};\n\n/*\nCalled by refresh() to dynamically show or hide the content\n*/\nRevealWidget.prototype.updateState = function() {\n\t// Read the current state\n\tthis.readState();\n\t// Construct the child nodes if needed\n\tvar domNode = this.domNodes[0];\n\tif(this.isOpen && !this.hasChildNodes) {\n\t\tthis.hasChildNodes = true;\n\t\tthis.makeChildWidgets(this.parseTreeNode.children);\n\t\tthis.renderChildren(domNode,null);\n\t}\n\t// Animate our DOM node\n\tif(!domNode.isTiddlyWikiFakeDom && this.type === \"popup\" && this.isOpen) {\n\t\tthis.positionPopup(domNode);\n\t\t$tw.utils.addClass(domNode,\"tc-popup\"); // Make sure that clicks don't dismiss popups within the revealed content\n\n\t}\n\tif(this.isOpen) {\n\t\tdomNode.removeAttribute(\"hidden\");\n $tw.anim.perform(this.openAnimation,domNode);\n\t} else {\n\t\t$tw.anim.perform(this.closeAnimation,domNode,{callback: function() {\n\t\t\tdomNode.setAttribute(\"hidden\",\"true\");\n }});\n\t}\n};\n\nexports.reveal = RevealWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/reveal.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/scrollable.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/scrollable.js\ntype: application/javascript\nmodule-type: widget\n\nScrollable widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ScrollableWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n\tthis.scaleFactor = 1;\n\tthis.addEventListeners([\n\t\t{type: \"tm-scroll\", handler: \"handleScrollEvent\"}\n\t]);\n\tif($tw.browser) {\n\t\tthis.requestAnimationFrame = window.requestAnimationFrame ||\n\t\t\twindow.webkitRequestAnimationFrame ||\n\t\t\twindow.mozRequestAnimationFrame ||\n\t\t\tfunction(callback) {\n\t\t\t\treturn window.setTimeout(callback, 1000/60);\n\t\t\t};\n\t\tthis.cancelAnimationFrame = window.cancelAnimationFrame ||\n\t\t\twindow.webkitCancelAnimationFrame ||\n\t\t\twindow.webkitCancelRequestAnimationFrame ||\n\t\t\twindow.mozCancelAnimationFrame ||\n\t\t\twindow.mozCancelRequestAnimationFrame ||\n\t\t\tfunction(id) {\n\t\t\t\twindow.clearTimeout(id);\n\t\t\t};\n\t}\n};\n\n/*\nInherit from the base widget class\n*/\nScrollableWidget.prototype = new Widget();\n\nScrollableWidget.prototype.cancelScroll = function() {\n\tif(this.idRequestFrame) {\n\t\tthis.cancelAnimationFrame.call(window,this.idRequestFrame);\n\t\tthis.idRequestFrame = null;\n\t}\n};\n\n/*\nHandle a scroll event\n*/\nScrollableWidget.prototype.handleScrollEvent = function(event) {\n\t// Pass the scroll event through if our offsetsize is larger than our scrollsize\n\tif(this.outerDomNode.scrollWidth <= this.outerDomNode.offsetWidth && this.outerDomNode.scrollHeight <= this.outerDomNode.offsetHeight && this.fallthrough === \"yes\") {\n\t\treturn true;\n\t}\n\tthis.scrollIntoView(event.target);\n\treturn false; // Handled event\n};\n\n/*\nScroll an element into view\n*/\nScrollableWidget.prototype.scrollIntoView = function(element) {\n\tvar duration = $tw.utils.getAnimationDuration();\n\tthis.cancelScroll();\n\tthis.startTime = Date.now();\n\tvar scrollPosition = {\n\t\tx: this.outerDomNode.scrollLeft,\n\t\ty: this.outerDomNode.scrollTop\n\t};\n\t// Get the client bounds of the element and adjust by the scroll position\n\tvar scrollableBounds = this.outerDomNode.getBoundingClientRect(),\n\t\tclientTargetBounds = element.getBoundingClientRect(),\n\t\tbounds = {\n\t\t\tleft: clientTargetBounds.left + scrollPosition.x - scrollableBounds.left,\n\t\t\ttop: clientTargetBounds.top + scrollPosition.y - scrollableBounds.top,\n\t\t\twidth: clientTargetBounds.width,\n\t\t\theight: clientTargetBounds.height\n\t\t};\n\t// We'll consider the horizontal and vertical scroll directions separately via this function\n\tvar getEndPos = function(targetPos,targetSize,currentPos,currentSize) {\n\t\t\t// If the target is already visible then stay where we are\n\t\t\tif(targetPos >= currentPos && (targetPos + targetSize) <= (currentPos + currentSize)) {\n\t\t\t\treturn currentPos;\n\t\t\t// If the target is above/left of the current view, then scroll to its top/left\n\t\t\t} else if(targetPos <= currentPos) {\n\t\t\t\treturn targetPos;\n\t\t\t// If the target is smaller than the window and the scroll position is too far up, then scroll till the target is at the bottom of the window\n\t\t\t} else if(targetSize < currentSize && currentPos < (targetPos + targetSize - currentSize)) {\n\t\t\t\treturn targetPos + targetSize - currentSize;\n\t\t\t// If the target is big, then just scroll to the top\n\t\t\t} else if(currentPos < targetPos) {\n\t\t\t\treturn targetPos;\n\t\t\t// Otherwise, stay where we are\n\t\t\t} else {\n\t\t\t\treturn currentPos;\n\t\t\t}\n\t\t},\n\t\tendX = getEndPos(bounds.left,bounds.width,scrollPosition.x,this.outerDomNode.offsetWidth),\n\t\tendY = getEndPos(bounds.top,bounds.height,scrollPosition.y,this.outerDomNode.offsetHeight);\n\t// Only scroll if necessary\n\tif(endX !== scrollPosition.x || endY !== scrollPosition.y) {\n\t\tvar self = this,\n\t\t\tdrawFrame;\n\t\tdrawFrame = function () {\n\t\t\tvar t;\n\t\t\tif(duration <= 0) {\n\t\t\t\tt = 1;\n\t\t\t} else {\n\t\t\t\tt = ((Date.now()) - self.startTime) / duration;\t\n\t\t\t}\n\t\t\tif(t >= 1) {\n\t\t\t\tself.cancelScroll();\n\t\t\t\tt = 1;\n\t\t\t}\n\t\t\tt = $tw.utils.slowInSlowOut(t);\n\t\t\tself.outerDomNode.scrollLeft = scrollPosition.x + (endX - scrollPosition.x) * t;\n\t\t\tself.outerDomNode.scrollTop = scrollPosition.y + (endY - scrollPosition.y) * t;\n\t\t\tif(t < 1) {\n\t\t\t\tself.idRequestFrame = self.requestAnimationFrame.call(window,drawFrame);\n\t\t\t}\n\t\t};\n\t\tdrawFrame();\n\t}\n};\n\n/*\nRender this widget into the DOM\n*/\nScrollableWidget.prototype.render = function(parent,nextSibling) {\n\tvar self = this;\n\t// Remember parent\n\tthis.parentDomNode = parent;\n\t// Compute attributes and execute state\n\tthis.computeAttributes();\n\tthis.execute();\n\t// Create elements\n\tthis.outerDomNode = this.document.createElement(\"div\");\n\t$tw.utils.setStyle(this.outerDomNode,[\n\t\t{overflowY: \"auto\"},\n\t\t{overflowX: \"auto\"},\n\t\t{webkitOverflowScrolling: \"touch\"}\n\t]);\n\tthis.innerDomNode = this.document.createElement(\"div\");\n\tthis.outerDomNode.appendChild(this.innerDomNode);\n\t// Assign classes\n\tthis.outerDomNode.className = this[\"class\"] || \"\";\n\t// Insert element\n\tparent.insertBefore(this.outerDomNode,nextSibling);\n\tthis.renderChildren(this.innerDomNode,null);\n\tthis.domNodes.push(this.outerDomNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nScrollableWidget.prototype.execute = function() {\n\t// Get attributes\n\tthis.fallthrough = this.getAttribute(\"fallthrough\",\"yes\");\n\tthis[\"class\"] = this.getAttribute(\"class\");\n\t// Make child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nScrollableWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes[\"class\"]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports.scrollable = ScrollableWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/scrollable.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/select.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/select.js\ntype: application/javascript\nmodule-type: widget\n\nSelect widget:\n\n```\n<$select tiddler=\"MyTiddler\" field=\"text\">\n<$list filter=\"[tag[chapter]]\">\n<option value=<<currentTiddler>>>\n<$view field=\"description\"/>\n</option>\n</$list>\n</$select>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SelectWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSelectWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSelectWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n\tthis.setSelectValue();\n\t$tw.utils.addEventListeners(this.getSelectDomNode(),[\n\t\t{name: \"change\", handlerObject: this, handlerMethod: \"handleChangeEvent\"}\n\t]);\n};\n\n/*\nHandle a change event\n*/\nSelectWidget.prototype.handleChangeEvent = function(event) {\n\t// Get the new value and assign it to the tiddler\n\tif(this.selectMultiple == false) {\n\t\tvar value = this.getSelectDomNode().value;\n\t} else {\n\t\tvar value = this.getSelectValues()\n\t\t\t\tvalue = $tw.utils.stringifyList(value);\n\t}\n\tthis.wiki.setText(this.selectTitle,this.selectField,this.selectIndex,value);\n\t// Trigger actions\n\tif(this.selectActions) {\n\t\tthis.invokeActionString(this.selectActions,this,event);\n\t}\n};\n\n/*\nIf necessary, set the value of the select element to the current value\n*/\nSelectWidget.prototype.setSelectValue = function() {\n\tvar value = this.selectDefault;\n\t// Get the value\n\tif(this.selectIndex) {\n\t\tvalue = this.wiki.extractTiddlerDataItem(this.selectTitle,this.selectIndex);\n\t} else {\n\t\tvar tiddler = this.wiki.getTiddler(this.selectTitle);\n\t\tif(tiddler) {\n\t\t\tif(this.selectField === \"text\") {\n\t\t\t\t// Calling getTiddlerText() triggers lazy loading of skinny tiddlers\n\t\t\t\tvalue = this.wiki.getTiddlerText(this.selectTitle);\n\t\t\t} else {\n\t\t\t\tif($tw.utils.hop(tiddler.fields,this.selectField)) {\n\t\t\t\t\tvalue = tiddler.getFieldString(this.selectField);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif(this.selectField === \"title\") {\n\t\t\t\tvalue = this.selectTitle;\n\t\t\t}\n\t\t}\n\t}\n\t// Assign it to the select element if it's different than the current value\n\tif (this.selectMultiple) {\n\t\tvalue = value === undefined ? \"\" : value;\n\t\tvar select = this.getSelectDomNode();\n\t\tvar values = Array.isArray(value) ? value : $tw.utils.parseStringArray(value);\n\t\tfor(var i=0; i < select.children.length; i++){\n\t\t\tif(values.indexOf(select.children[i].value) != -1) {\n\t\t\t\tselect.children[i].selected = true;\n\t\t\t}\n\t\t}\n\t\t\n\t} else {\n\t\tvar domNode = this.getSelectDomNode();\n\t\tif(domNode.value !== value) {\n\t\t\tdomNode.value = value;\n\t\t}\n\t}\n};\n\n/*\nGet the DOM node of the select element\n*/\nSelectWidget.prototype.getSelectDomNode = function() {\n\treturn this.children[0].domNodes[0];\n};\n\n// Return an array of the selected opion values\n// select is an HTML select element\nSelectWidget.prototype.getSelectValues = function() {\n\tvar select, result, options, opt;\n\tselect = this.getSelectDomNode();\n\tresult = [];\n\toptions = select && select.options;\n\tfor (var i=0; i<options.length; i++) {\n\t\topt = options[i];\n\t\tif (opt.selected) {\n\t\t\tresult.push(opt.value || opt.text);\n\t\t}\n\t}\n\treturn result;\n}\n\n/*\nCompute the internal state of the widget\n*/\nSelectWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.selectActions = this.getAttribute(\"actions\");\n\tthis.selectTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.selectField = this.getAttribute(\"field\",\"text\");\n\tthis.selectIndex = this.getAttribute(\"index\");\n\tthis.selectClass = this.getAttribute(\"class\");\n\tthis.selectDefault = this.getAttribute(\"default\");\n\tthis.selectMultiple = this.getAttribute(\"multiple\", false);\n\tthis.selectSize = this.getAttribute(\"size\");\n\t// Make the child widgets\n\tvar selectNode = {\n\t\ttype: \"element\",\n\t\ttag: \"select\",\n\t\tchildren: this.parseTreeNode.children\n\t};\n\tif(this.selectClass) {\n\t\t$tw.utils.addAttributeToParseTreeNode(selectNode,\"class\",this.selectClass);\n\t}\n\tif(this.selectMultiple) {\n\t\t$tw.utils.addAttributeToParseTreeNode(selectNode,\"multiple\",\"multiple\");\n\t}\n\tif(this.selectSize) {\n\t\t$tw.utils.addAttributeToParseTreeNode(selectNode,\"size\",this.selectSize);\n\t}\n\tthis.makeChildWidgets([selectNode]);\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nSelectWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\t// If we're using a different tiddler/field/index then completely refresh ourselves\n\tif(changedAttributes.selectTitle || changedAttributes.selectField || changedAttributes.selectIndex) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t// If the target tiddler value has changed, just update setting and refresh the children\n\t} else {\n\t\tvar childrenRefreshed = this.refreshChildren(changedTiddlers);\n\t\tif(changedTiddlers[this.selectTitle] || childrenRefreshed) {\n\t\t\tthis.setSelectValue();\n\t\t} \n\t\treturn childrenRefreshed;\n\t}\n};\n\nexports.select = SelectWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/select.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/set.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/set.js\ntype: application/javascript\nmodule-type: widget\n\nSet variable widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar SetWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nSetWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nSetWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nSetWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.setName = this.getAttribute(\"name\",\"currentTiddler\");\n\tthis.setFilter = this.getAttribute(\"filter\");\n\tthis.setValue = this.getAttribute(\"value\");\n\tthis.setEmptyValue = this.getAttribute(\"emptyValue\");\n\t// Set context variable\n\tthis.setVariable(this.setName,this.getValue(),this.parseTreeNode.params);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nGet the value to be assigned\n*/\nSetWidget.prototype.getValue = function() {\n\tvar value = this.setValue;\n\tif(this.setFilter) {\n\t\tvar results = this.wiki.filterTiddlers(this.setFilter,this);\n\t\tif(!this.setValue) {\n\t\t\tvalue = $tw.utils.stringifyList(results);\n\t\t}\n\t\tif(results.length === 0 && this.setEmptyValue !== undefined) {\n\t\t\tvalue = this.setEmptyValue;\n\t\t}\n\t} else if(!value && this.setEmptyValue) {\n\t\tvalue = this.setEmptyValue;\n\t}\n\treturn value;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nSetWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.name || changedAttributes.filter || changedAttributes.value || changedAttributes.emptyValue ||\n\t (this.setFilter && this.getValue() != this.variables[this.setName].value)) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.setvariable = SetWidget;\nexports.set = SetWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/set.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/text.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/text.js\ntype: application/javascript\nmodule-type: widget\n\nText node widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar TextNodeWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nTextNodeWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nTextNodeWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tvar text = this.getAttribute(\"text\",this.parseTreeNode.text || \"\");\n\ttext = text.replace(/\\r/mg,\"\");\n\tvar textNode = this.document.createTextNode(text);\n\tparent.insertBefore(textNode,nextSibling);\n\tthis.domNodes.push(textNode);\n};\n\n/*\nCompute the internal state of the widget\n*/\nTextNodeWidget.prototype.execute = function() {\n\t// Nothing to do for a text node\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nTextNodeWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.text) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.text = TextNodeWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/text.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/tiddler.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/tiddler.js\ntype: application/javascript\nmodule-type: widget\n\nTiddler widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar TiddlerWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nTiddlerWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nTiddlerWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nTiddlerWidget.prototype.execute = function() {\n\tthis.tiddlerState = this.computeTiddlerState();\n\tthis.setVariable(\"currentTiddler\",this.tiddlerState.currentTiddler);\n\tthis.setVariable(\"missingTiddlerClass\",this.tiddlerState.missingTiddlerClass);\n\tthis.setVariable(\"shadowTiddlerClass\",this.tiddlerState.shadowTiddlerClass);\n\tthis.setVariable(\"systemTiddlerClass\",this.tiddlerState.systemTiddlerClass);\n\tthis.setVariable(\"tiddlerTagClasses\",this.tiddlerState.tiddlerTagClasses);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nCompute the tiddler state flags\n*/\nTiddlerWidget.prototype.computeTiddlerState = function() {\n\t// Get our parameters\n\tthis.tiddlerTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\t// Compute the state\n\tvar state = {\n\t\tcurrentTiddler: this.tiddlerTitle || \"\",\n\t\tmissingTiddlerClass: (this.wiki.tiddlerExists(this.tiddlerTitle) || this.wiki.isShadowTiddler(this.tiddlerTitle)) ? \"tc-tiddler-exists\" : \"tc-tiddler-missing\",\n\t\tshadowTiddlerClass: this.wiki.isShadowTiddler(this.tiddlerTitle) ? \"tc-tiddler-shadow\" : \"\",\n\t\tsystemTiddlerClass: this.wiki.isSystemTiddler(this.tiddlerTitle) ? \"tc-tiddler-system\" : \"\",\n\t\ttiddlerTagClasses: this.getTagClasses()\n\t};\n\t// Compute a simple hash to make it easier to detect changes\n\tstate.hash = state.currentTiddler + state.missingTiddlerClass + state.shadowTiddlerClass + state.systemTiddlerClass + state.tiddlerTagClasses;\n\treturn state;\n};\n\n/*\nCreate a string of CSS classes derived from the tags of the current tiddler\n*/\nTiddlerWidget.prototype.getTagClasses = function() {\n\tvar tiddler = this.wiki.getTiddler(this.tiddlerTitle);\n\tif(tiddler) {\n\t\tvar tags = [];\n\t\t$tw.utils.each(tiddler.fields.tags,function(tag) {\n\t\t\ttags.push(\"tc-tagged-\" + encodeURIComponent(tag));\n\t\t});\n\t\treturn tags.join(\" \");\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nTiddlerWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes(),\n\t\tnewTiddlerState = this.computeTiddlerState();\n\tif(changedAttributes.tiddler || newTiddlerState.hash !== this.tiddlerState.hash) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.tiddler = TiddlerWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/tiddler.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/transclude.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/transclude.js\ntype: application/javascript\nmodule-type: widget\n\nTransclude widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar TranscludeWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nTranscludeWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nTranscludeWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nTranscludeWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.transcludeTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.transcludeSubTiddler = this.getAttribute(\"subtiddler\");\n\tthis.transcludeField = this.getAttribute(\"field\");\n\tthis.transcludeIndex = this.getAttribute(\"index\");\n\tthis.transcludeMode = this.getAttribute(\"mode\");\n\t// Parse the text reference\n\tvar parseAsInline = !this.parseTreeNode.isBlock;\n\tif(this.transcludeMode === \"inline\") {\n\t\tparseAsInline = true;\n\t} else if(this.transcludeMode === \"block\") {\n\t\tparseAsInline = false;\n\t}\n\tvar parser = this.wiki.parseTextReference(\n\t\t\t\t\t\tthis.transcludeTitle,\n\t\t\t\t\t\tthis.transcludeField,\n\t\t\t\t\t\tthis.transcludeIndex,\n\t\t\t\t\t\t{\n\t\t\t\t\t\t\tparseAsInline: parseAsInline,\n\t\t\t\t\t\t\tsubTiddler: this.transcludeSubTiddler\n\t\t\t\t\t\t}),\n\t\tparseTreeNodes = parser ? parser.tree : this.parseTreeNode.children;\n\t// Set context variables for recursion detection\n\tvar recursionMarker = this.makeRecursionMarker();\n\tthis.setVariable(\"transclusion\",recursionMarker);\n\t// Check for recursion\n\tif(parser) {\n\t\tif(this.parentWidget && this.parentWidget.hasVariable(\"transclusion\",recursionMarker)) {\n\t\t\tparseTreeNodes = [{type: \"element\", tag: \"span\", attributes: {\n\t\t\t\t\"class\": {type: \"string\", value: \"tc-error\"}\n\t\t\t}, children: [\n\t\t\t\t{type: \"text\", text: $tw.language.getString(\"Error/RecursiveTransclusion\")}\n\t\t\t]}];\n\t\t}\n\t}\n\t// Construct the child widgets\n\tthis.makeChildWidgets(parseTreeNodes);\n};\n\n/*\nCompose a string comprising the title, field and/or index to identify this transclusion for recursion detection\n*/\nTranscludeWidget.prototype.makeRecursionMarker = function() {\n\tvar output = [];\n\toutput.push(\"{\");\n\toutput.push(this.getVariable(\"currentTiddler\",{defaultValue: \"\"}));\n\toutput.push(\"|\");\n\toutput.push(this.transcludeTitle || \"\");\n\toutput.push(\"|\");\n\toutput.push(this.transcludeField || \"\");\n\toutput.push(\"|\");\n\toutput.push(this.transcludeIndex || \"\");\n\toutput.push(\"|\");\n\toutput.push(this.transcludeSubTiddler || \"\");\n\toutput.push(\"}\");\n\treturn output.join(\"\");\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nTranscludeWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedTiddlers[this.transcludeTitle]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn this.refreshChildren(changedTiddlers);\t\t\n\t}\n};\n\nexports.transclude = TranscludeWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/transclude.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/vars.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/vars.js\ntype: application/javascript\nmodule-type: widget\n\nThis widget allows multiple variables to be set in one go:\n\n```\n\\define helloworld() Hello world!\n<$vars greeting=\"Hi\" me={{!!title}} sentence=<<helloworld>>>\n <<greeting>>! I am <<me>> and I say: <<sentence>>\n</$vars>\n```\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar VarsWidget = function(parseTreeNode,options) {\n\t// Call the constructor\n\tWidget.call(this);\n\t// Initialise\t\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nVarsWidget.prototype = Object.create(Widget.prototype);\n\n/*\nRender this widget into the DOM\n*/\nVarsWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nVarsWidget.prototype.execute = function() {\n\t// Parse variables\n\tvar self = this;\n\t$tw.utils.each(this.attributes,function(val,key) {\n\t\tif(key.charAt(0) !== \"$\") {\n\t\t\tself.setVariable(key,val);\n\t\t}\n\t});\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nRefresh the widget by ensuring our attributes are up to date\n*/\nVarsWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(Object.keys(changedAttributes).length) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t}\n\treturn this.refreshChildren(changedTiddlers);\n};\n\nexports[\"vars\"] = VarsWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/vars.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/view.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/view.js\ntype: application/javascript\nmodule-type: widget\n\nView widget\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar ViewWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nViewWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nViewWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tif(this.text) {\n\t\tvar textNode = this.document.createTextNode(this.text);\n\t\tparent.insertBefore(textNode,nextSibling);\n\t\tthis.domNodes.push(textNode);\n\t} else {\n\t\tthis.makeChildWidgets();\n\t\tthis.renderChildren(parent,nextSibling);\n\t}\n};\n\n/*\nCompute the internal state of the widget\n*/\nViewWidget.prototype.execute = function() {\n\t// Get parameters from our attributes\n\tthis.viewTitle = this.getAttribute(\"tiddler\",this.getVariable(\"currentTiddler\"));\n\tthis.viewSubtiddler = this.getAttribute(\"subtiddler\");\n\tthis.viewField = this.getAttribute(\"field\",\"text\");\n\tthis.viewIndex = this.getAttribute(\"index\");\n\tthis.viewFormat = this.getAttribute(\"format\",\"text\");\n\tthis.viewTemplate = this.getAttribute(\"template\",\"\");\n\tswitch(this.viewFormat) {\n\t\tcase \"htmlwikified\":\n\t\t\tthis.text = this.getValueAsHtmlWikified();\n\t\t\tbreak;\n\t\tcase \"plainwikified\":\n\t\t\tthis.text = this.getValueAsPlainWikified();\n\t\t\tbreak;\n\t\tcase \"htmlencodedplainwikified\":\n\t\t\tthis.text = this.getValueAsHtmlEncodedPlainWikified();\n\t\t\tbreak;\n\t\tcase \"htmlencoded\":\n\t\t\tthis.text = this.getValueAsHtmlEncoded();\n\t\t\tbreak;\n\t\tcase \"urlencoded\":\n\t\t\tthis.text = this.getValueAsUrlEncoded();\n\t\t\tbreak;\n\t\tcase \"doubleurlencoded\":\n\t\t\tthis.text = this.getValueAsDoubleUrlEncoded();\n\t\t\tbreak;\n\t\tcase \"date\":\n\t\t\tthis.text = this.getValueAsDate(this.viewTemplate);\n\t\t\tbreak;\n\t\tcase \"relativedate\":\n\t\t\tthis.text = this.getValueAsRelativeDate();\n\t\t\tbreak;\n\t\tcase \"stripcomments\":\n\t\t\tthis.text = this.getValueAsStrippedComments();\n\t\t\tbreak;\n\t\tcase \"jsencoded\":\n\t\t\tthis.text = this.getValueAsJsEncoded();\n\t\t\tbreak;\n\t\tdefault: // \"text\"\n\t\t\tthis.text = this.getValueAsText();\n\t\t\tbreak;\n\t}\n};\n\n/*\nThe various formatter functions are baked into this widget for the moment. Eventually they will be replaced by macro functions\n*/\n\n/*\nRetrieve the value of the widget. Options are:\nasString: Optionally return the value as a string\n*/\nViewWidget.prototype.getValue = function(options) {\n\toptions = options || {};\n\tvar value = options.asString ? \"\" : undefined;\n\tif(this.viewIndex) {\n\t\tvalue = this.wiki.extractTiddlerDataItem(this.viewTitle,this.viewIndex);\n\t} else {\n\t\tvar tiddler;\n\t\tif(this.viewSubtiddler) {\n\t\t\ttiddler = this.wiki.getSubTiddler(this.viewTitle,this.viewSubtiddler);\t\n\t\t} else {\n\t\t\ttiddler = this.wiki.getTiddler(this.viewTitle);\n\t\t}\n\t\tif(tiddler) {\n\t\t\tif(this.viewField === \"text\" && !this.viewSubtiddler) {\n\t\t\t\t// Calling getTiddlerText() triggers lazy loading of skinny tiddlers\n\t\t\t\tvalue = this.wiki.getTiddlerText(this.viewTitle);\n\t\t\t} else {\n\t\t\t\tif($tw.utils.hop(tiddler.fields,this.viewField)) {\n\t\t\t\t\tif(options.asString) {\n\t\t\t\t\t\tvalue = tiddler.getFieldString(this.viewField);\n\t\t\t\t\t} else {\n\t\t\t\t\t\tvalue = tiddler.fields[this.viewField];\t\t\t\t\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tif(this.viewField === \"title\") {\n\t\t\t\tvalue = this.viewTitle;\n\t\t\t}\n\t\t}\n\t}\n\treturn value;\n};\n\nViewWidget.prototype.getValueAsText = function() {\n\treturn this.getValue({asString: true});\n};\n\nViewWidget.prototype.getValueAsHtmlWikified = function() {\n\treturn this.wiki.renderText(\"text/html\",\"text/vnd.tiddlywiki\",this.getValueAsText(),{parentWidget: this});\n};\n\nViewWidget.prototype.getValueAsPlainWikified = function() {\n\treturn this.wiki.renderText(\"text/plain\",\"text/vnd.tiddlywiki\",this.getValueAsText(),{parentWidget: this});\n};\n\nViewWidget.prototype.getValueAsHtmlEncodedPlainWikified = function() {\n\treturn $tw.utils.htmlEncode(this.wiki.renderText(\"text/plain\",\"text/vnd.tiddlywiki\",this.getValueAsText(),{parentWidget: this}));\n};\n\nViewWidget.prototype.getValueAsHtmlEncoded = function() {\n\treturn $tw.utils.htmlEncode(this.getValueAsText());\n};\n\nViewWidget.prototype.getValueAsUrlEncoded = function() {\n\treturn encodeURIComponent(this.getValueAsText());\n};\n\nViewWidget.prototype.getValueAsDoubleUrlEncoded = function() {\n\treturn encodeURIComponent(encodeURIComponent(this.getValueAsText()));\n};\n\nViewWidget.prototype.getValueAsDate = function(format) {\n\tformat = format || \"YYYY MM DD 0hh:0mm\";\n\tvar value = $tw.utils.parseDate(this.getValue());\n\tif(value && $tw.utils.isDate(value) && value.toString() !== \"Invalid Date\") {\n\t\treturn $tw.utils.formatDateString(value,format);\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\nViewWidget.prototype.getValueAsRelativeDate = function(format) {\n\tvar value = $tw.utils.parseDate(this.getValue());\n\tif(value && $tw.utils.isDate(value) && value.toString() !== \"Invalid Date\") {\n\t\treturn $tw.utils.getRelativeDate((new Date()) - (new Date(value))).description;\n\t} else {\n\t\treturn \"\";\n\t}\n};\n\nViewWidget.prototype.getValueAsStrippedComments = function() {\n\tvar lines = this.getValueAsText().split(\"\\n\"),\n\t\tout = [];\n\tfor(var line=0; line<lines.length; line++) {\n\t\tvar text = lines[line];\n\t\tif(!/^\\s*\\/\\/#/.test(text)) {\n\t\t\tout.push(text);\n\t\t}\n\t}\n\treturn out.join(\"\\n\");\n};\n\nViewWidget.prototype.getValueAsJsEncoded = function() {\n\treturn $tw.utils.stringify(this.getValueAsText());\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nViewWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\tif(changedAttributes.tiddler || changedAttributes.field || changedAttributes.index || changedAttributes.template || changedAttributes.format || changedTiddlers[this.viewTitle]) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\treturn false;\t\n\t}\n};\n\nexports.view = ViewWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/view.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/widget.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/widget.js\ntype: application/javascript\nmodule-type: widget\n\nWidget base class\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nCreate a widget object for a parse tree node\n\tparseTreeNode: reference to the parse tree node to be rendered\n\toptions: see below\nOptions include:\n\twiki: mandatory reference to wiki associated with this render tree\n\tparentWidget: optional reference to a parent renderer node for the context chain\n\tdocument: optional document object to use instead of global document\n*/\nvar Widget = function(parseTreeNode,options) {\n\tif(arguments.length > 0) {\n\t\tthis.initialise(parseTreeNode,options);\n\t}\n};\n\n/*\nInitialise widget properties. These steps are pulled out of the constructor so that we can reuse them in subclasses\n*/\nWidget.prototype.initialise = function(parseTreeNode,options) {\n\toptions = options || {};\n\t// Save widget info\n\tthis.parseTreeNode = parseTreeNode;\n\tthis.wiki = options.wiki;\n\tthis.parentWidget = options.parentWidget;\n\tthis.variablesConstructor = function() {};\n\tthis.variablesConstructor.prototype = this.parentWidget ? this.parentWidget.variables : {};\n\tthis.variables = new this.variablesConstructor();\n\tthis.document = options.document;\n\tthis.attributes = {};\n\tthis.children = [];\n\tthis.domNodes = [];\n\tthis.eventListeners = {};\n\t// Hashmap of the widget classes\n\tif(!this.widgetClasses) {\n\t\tWidget.prototype.widgetClasses = $tw.modules.applyMethods(\"widget\");\n\t}\n};\n\n/*\nRender this widget into the DOM\n*/\nWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nWidget.prototype.execute = function() {\n\tthis.makeChildWidgets();\n};\n\n/*\nSet the value of a context variable\nname: name of the variable\nvalue: value of the variable\nparams: array of {name:, default:} for each parameter\n*/\nWidget.prototype.setVariable = function(name,value,params) {\n\tthis.variables[name] = {value: value, params: params};\n};\n\n/*\nGet the prevailing value of a context variable\nname: name of variable\noptions: see below\nOptions include\nparams: array of {name:, value:} for each parameter\ndefaultValue: default value if the variable is not defined\n*/\nWidget.prototype.getVariable = function(name,options) {\n\toptions = options || {};\n\tvar actualParams = options.params || [],\n\t\tparentWidget = this.parentWidget;\n\t// Check for the variable defined in the parent widget (or an ancestor in the prototype chain)\n\tif(parentWidget && name in parentWidget.variables) {\n\t\tvar variable = parentWidget.variables[name],\n\t\t\tvalue = variable.value;\n\t\t// Substitute any parameters specified in the definition\n\t\tvalue = this.substituteVariableParameters(value,variable.params,actualParams);\n\t\tvalue = this.substituteVariableReferences(value);\n\t\treturn value;\n\t}\n\t// If the variable doesn't exist in the parent widget then look for a macro module\n\treturn this.evaluateMacroModule(name,actualParams,options.defaultValue);\n};\n\nWidget.prototype.substituteVariableParameters = function(text,formalParams,actualParams) {\n\tif(formalParams) {\n\t\tvar nextAnonParameter = 0, // Next candidate anonymous parameter in macro call\n\t\t\tparamInfo, paramValue;\n\t\t// Step through each of the parameters in the macro definition\n\t\tfor(var p=0; p<formalParams.length; p++) {\n\t\t\t// Check if we've got a macro call parameter with the same name\n\t\t\tparamInfo = formalParams[p];\n\t\t\tparamValue = undefined;\n\t\t\tfor(var m=0; m<actualParams.length; m++) {\n\t\t\t\tif(actualParams[m].name === paramInfo.name) {\n\t\t\t\t\tparamValue = actualParams[m].value;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// If not, use the next available anonymous macro call parameter\n\t\t\twhile(nextAnonParameter < actualParams.length && actualParams[nextAnonParameter].name) {\n\t\t\t\tnextAnonParameter++;\n\t\t\t}\n\t\t\tif(paramValue === undefined && nextAnonParameter < actualParams.length) {\n\t\t\t\tparamValue = actualParams[nextAnonParameter++].value;\n\t\t\t}\n\t\t\t// If we've still not got a value, use the default, if any\n\t\t\tparamValue = paramValue || paramInfo[\"default\"] || \"\";\n\t\t\t// Replace any instances of this parameter\n\t\t\ttext = text.replace(new RegExp(\"\\\\$\" + $tw.utils.escapeRegExp(paramInfo.name) + \"\\\\$\",\"mg\"),paramValue);\n\t\t}\n\t}\n\treturn text;\n};\n\nWidget.prototype.substituteVariableReferences = function(text) {\n\tvar self = this;\n\treturn (text || \"\").replace(/\\$\\(([^\\)\\$]+)\\)\\$/g,function(match,p1,offset,string) {\n\t\treturn self.getVariable(p1,{defaultValue: \"\"});\n\t});\n};\n\nWidget.prototype.evaluateMacroModule = function(name,actualParams,defaultValue) {\n\tif($tw.utils.hop($tw.macros,name)) {\n\t\tvar macro = $tw.macros[name],\n\t\t\targs = [];\n\t\tif(macro.params.length > 0) {\n\t\t\tvar nextAnonParameter = 0, // Next candidate anonymous parameter in macro call\n\t\t\t\tparamInfo, paramValue;\n\t\t\t// Step through each of the parameters in the macro definition\n\t\t\tfor(var p=0; p<macro.params.length; p++) {\n\t\t\t\t// Check if we've got a macro call parameter with the same name\n\t\t\t\tparamInfo = macro.params[p];\n\t\t\t\tparamValue = undefined;\n\t\t\t\tfor(var m=0; m<actualParams.length; m++) {\n\t\t\t\t\tif(actualParams[m].name === paramInfo.name) {\n\t\t\t\t\t\tparamValue = actualParams[m].value;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\t// If not, use the next available anonymous macro call parameter\n\t\t\t\twhile(nextAnonParameter < actualParams.length && actualParams[nextAnonParameter].name) {\n\t\t\t\t\tnextAnonParameter++;\n\t\t\t\t}\n\t\t\t\tif(paramValue === undefined && nextAnonParameter < actualParams.length) {\n\t\t\t\t\tparamValue = actualParams[nextAnonParameter++].value;\n\t\t\t\t}\n\t\t\t\t// If we've still not got a value, use the default, if any\n\t\t\t\tparamValue = paramValue || paramInfo[\"default\"] || \"\";\n\t\t\t\t// Save the parameter\n\t\t\t\targs.push(paramValue);\n\t\t\t}\n\t\t}\n\t\telse for(var i=0; i<actualParams.length; ++i) {\n\t\t\targs.push(actualParams[i].value);\n\t\t}\n\t\treturn (macro.run.apply(this,args) || \"\").toString();\n\t} else {\n\t\treturn defaultValue;\n\t}\n};\n\n/*\nCheck whether a given context variable value exists in the parent chain\n*/\nWidget.prototype.hasVariable = function(name,value) {\n\tvar node = this;\n\twhile(node) {\n\t\tif($tw.utils.hop(node.variables,name) && node.variables[name].value === value) {\n\t\t\treturn true;\n\t\t}\n\t\tnode = node.parentWidget;\n\t}\n\treturn false;\n};\n\n/*\nConstruct a qualifying string based on a hash of concatenating the values of a given variable in the parent chain\n*/\nWidget.prototype.getStateQualifier = function(name) {\n\tthis.qualifiers = this.qualifiers || Object.create(null);\n\tname = name || \"transclusion\";\n\tif(this.qualifiers[name]) {\n\t\treturn this.qualifiers[name];\n\t} else {\n\t\tvar output = [],\n\t\t\tnode = this;\n\t\twhile(node && node.parentWidget) {\n\t\t\tif($tw.utils.hop(node.parentWidget.variables,name)) {\n\t\t\t\toutput.push(node.getVariable(name));\n\t\t\t}\n\t\t\tnode = node.parentWidget;\n\t\t}\n\t\tvar value = $tw.utils.hashString(output.join(\"\"));\n\t\tthis.qualifiers[name] = value;\n\t\treturn value;\n\t}\n};\n\n/*\nCompute the current values of the attributes of the widget. Returns a hashmap of the names of the attributes that have changed\n*/\nWidget.prototype.computeAttributes = function() {\n\tvar changedAttributes = {},\n\t\tself = this,\n\t\tvalue;\n\t$tw.utils.each(this.parseTreeNode.attributes,function(attribute,name) {\n\t\tif(attribute.type === \"indirect\") {\n\t\t\tvalue = self.wiki.getTextReference(attribute.textReference,\"\",self.getVariable(\"currentTiddler\"));\n\t\t} else if(attribute.type === \"macro\") {\n\t\t\tvalue = self.getVariable(attribute.value.name,{params: attribute.value.params});\n\t\t} else { // String attribute\n\t\t\tvalue = attribute.value;\n\t\t}\n\t\t// Check whether the attribute has changed\n\t\tif(self.attributes[name] !== value) {\n\t\t\tself.attributes[name] = value;\n\t\t\tchangedAttributes[name] = true;\n\t\t}\n\t});\n\treturn changedAttributes;\n};\n\n/*\nCheck for the presence of an attribute\n*/\nWidget.prototype.hasAttribute = function(name) {\n\treturn $tw.utils.hop(this.attributes,name);\n};\n\n/*\nGet the value of an attribute\n*/\nWidget.prototype.getAttribute = function(name,defaultText) {\n\tif($tw.utils.hop(this.attributes,name)) {\n\t\treturn this.attributes[name];\n\t} else {\n\t\treturn defaultText;\n\t}\n};\n\n/*\nAssign the computed attributes of the widget to a domNode\noptions include:\nexcludeEventAttributes: ignores attributes whose name begins with \"on\"\n*/\nWidget.prototype.assignAttributes = function(domNode,options) {\n\toptions = options || {};\n\tvar self = this;\n\t$tw.utils.each(this.attributes,function(v,a) {\n\t\t// Check exclusions\n\t\tif(options.excludeEventAttributes && a.substr(0,2) === \"on\") {\n\t\t\tv = undefined;\n\t\t}\n\t\tif(v !== undefined) {\n\t\t\tvar b = a.split(\":\");\n\t\t\t// Setting certain attributes can cause a DOM error (eg xmlns on the svg element)\n\t\t\ttry {\n\t\t\t\tif (b.length == 2 && b[0] == \"xlink\"){\n\t\t\t\t\tdomNode.setAttributeNS(\"http://www.w3.org/1999/xlink\",b[1],v);\n\t\t\t\t} else {\n\t\t\t\t\tdomNode.setAttributeNS(null,a,v);\n\t\t\t\t}\n\t\t\t} catch(e) {\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\nMake child widgets correspondng to specified parseTreeNodes\n*/\nWidget.prototype.makeChildWidgets = function(parseTreeNodes) {\n\tthis.children = [];\n\tvar self = this;\n\t$tw.utils.each(parseTreeNodes || (this.parseTreeNode && this.parseTreeNode.children),function(childNode) {\n\t\tself.children.push(self.makeChildWidget(childNode));\n\t});\n};\n\n/*\nConstruct the widget object for a parse tree node\n*/\nWidget.prototype.makeChildWidget = function(parseTreeNode) {\n\tvar WidgetClass = this.widgetClasses[parseTreeNode.type];\n\tif(!WidgetClass) {\n\t\tWidgetClass = this.widgetClasses.text;\n\t\tparseTreeNode = {type: \"text\", text: \"Undefined widget '\" + parseTreeNode.type + \"'\"};\n\t}\n\treturn new WidgetClass(parseTreeNode,{\n\t\twiki: this.wiki,\n\t\tvariables: {},\n\t\tparentWidget: this,\n\t\tdocument: this.document\n\t});\n};\n\n/*\nGet the next sibling of this widget\n*/\nWidget.prototype.nextSibling = function() {\n\tif(this.parentWidget) {\n\t\tvar index = this.parentWidget.children.indexOf(this);\n\t\tif(index !== -1 && index < this.parentWidget.children.length-1) {\n\t\t\treturn this.parentWidget.children[index+1];\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nGet the previous sibling of this widget\n*/\nWidget.prototype.previousSibling = function() {\n\tif(this.parentWidget) {\n\t\tvar index = this.parentWidget.children.indexOf(this);\n\t\tif(index !== -1 && index > 0) {\n\t\t\treturn this.parentWidget.children[index-1];\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nRender the children of this widget into the DOM\n*/\nWidget.prototype.renderChildren = function(parent,nextSibling) {\n\t$tw.utils.each(this.children,function(childWidget) {\n\t\tchildWidget.render(parent,nextSibling);\n\t});\n};\n\n/*\nAdd a list of event listeners from an array [{type:,handler:},...]\n*/\nWidget.prototype.addEventListeners = function(listeners) {\n\tvar self = this;\n\t$tw.utils.each(listeners,function(listenerInfo) {\n\t\tself.addEventListener(listenerInfo.type,listenerInfo.handler);\n\t});\n};\n\n/*\nAdd an event listener\n*/\nWidget.prototype.addEventListener = function(type,handler) {\n\tvar self = this;\n\tif(typeof handler === \"string\") { // The handler is a method name on this widget\n\t\tthis.eventListeners[type] = function(event) {\n\t\t\treturn self[handler].call(self,event);\n\t\t};\n\t} else { // The handler is a function\n\t\tthis.eventListeners[type] = function(event) {\n\t\t\treturn handler.call(self,event);\n\t\t};\n\t}\n};\n\n/*\nDispatch an event to a widget. If the widget doesn't handle the event then it is also dispatched to the parent widget\n*/\nWidget.prototype.dispatchEvent = function(event) {\n\t// Dispatch the event if this widget handles it\n\tvar listener = this.eventListeners[event.type];\n\tif(listener) {\n\t\t// Don't propagate the event if the listener returned false\n\t\tif(!listener(event)) {\n\t\t\treturn false;\n\t\t}\n\t}\n\t// Dispatch the event to the parent widget\n\tif(this.parentWidget) {\n\t\treturn this.parentWidget.dispatchEvent(event);\n\t}\n\treturn true;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nWidget.prototype.refresh = function(changedTiddlers) {\n\treturn this.refreshChildren(changedTiddlers);\n};\n\n/*\nRebuild a previously rendered widget\n*/\nWidget.prototype.refreshSelf = function() {\n\tvar nextSibling = this.findNextSiblingDomNode();\n\tthis.removeChildDomNodes();\n\tthis.render(this.parentDomNode,nextSibling);\n};\n\n/*\nRefresh all the children of a widget\n*/\nWidget.prototype.refreshChildren = function(changedTiddlers) {\n\tvar self = this,\n\t\trefreshed = false;\n\t$tw.utils.each(this.children,function(childWidget) {\n\t\trefreshed = childWidget.refresh(changedTiddlers) || refreshed;\n\t});\n\treturn refreshed;\n};\n\n/*\nFind the next sibling in the DOM to this widget. This is done by scanning the widget tree through all next siblings and their descendents that share the same parent DOM node\n*/\nWidget.prototype.findNextSiblingDomNode = function(startIndex) {\n\t// Refer to this widget by its index within its parents children\n\tvar parent = this.parentWidget,\n\t\tindex = startIndex !== undefined ? startIndex : parent.children.indexOf(this);\nif(index === -1) {\n\tthrow \"node not found in parents children\";\n}\n\t// Look for a DOM node in the later siblings\n\twhile(++index < parent.children.length) {\n\t\tvar domNode = parent.children[index].findFirstDomNode();\n\t\tif(domNode) {\n\t\t\treturn domNode;\n\t\t}\n\t}\n\t// Go back and look for later siblings of our parent if it has the same parent dom node\n\tvar grandParent = parent.parentWidget;\n\tif(grandParent && parent.parentDomNode === this.parentDomNode) {\n\t\tindex = grandParent.children.indexOf(parent);\n\t\tif(index !== -1) {\n\t\t\treturn parent.findNextSiblingDomNode(index);\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nFind the first DOM node generated by a widget or its children\n*/\nWidget.prototype.findFirstDomNode = function() {\n\t// Return the first dom node of this widget, if we've got one\n\tif(this.domNodes.length > 0) {\n\t\treturn this.domNodes[0];\n\t}\n\t// Otherwise, recursively call our children\n\tfor(var t=0; t<this.children.length; t++) {\n\t\tvar domNode = this.children[t].findFirstDomNode();\n\t\tif(domNode) {\n\t\t\treturn domNode;\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nRemove any DOM nodes created by this widget or its children\n*/\nWidget.prototype.removeChildDomNodes = function() {\n\t// If this widget has directly created DOM nodes, delete them and exit. This assumes that any child widgets are contained within the created DOM nodes, which would normally be the case\n\tif(this.domNodes.length > 0) {\n\t\t$tw.utils.each(this.domNodes,function(domNode) {\n\t\t\tdomNode.parentNode.removeChild(domNode);\n\t\t});\n\t\tthis.domNodes = [];\n\t} else {\n\t\t// Otherwise, ask the child widgets to delete their DOM nodes\n\t\t$tw.utils.each(this.children,function(childWidget) {\n\t\t\tchildWidget.removeChildDomNodes();\n\t\t});\n\t}\n};\n\n/*\nInvoke the action widgets that are descendents of the current widget.\n*/\nWidget.prototype.invokeActions = function(triggeringWidget,event) {\n\tvar handled = false;\n\t// For each child widget\n\tfor(var t=0; t<this.children.length; t++) {\n\t\tvar child = this.children[t];\n\t\t// Invoke the child if it is an action widget\n\t\tif(child.invokeAction && child.invokeAction(triggeringWidget,event)) {\n\t\t\thandled = true;\n\t\t}\n\t\t// Propagate through through the child if it permits it\n\t\tif(child.allowActionPropagation() && child.invokeActions(triggeringWidget,event)) {\n\t\t\thandled = true;\n\t\t}\n\t}\n\treturn handled;\n};\n\n/*\nInvoke the action widgets defined in a string\n*/\nWidget.prototype.invokeActionString = function(actions,triggeringWidget,event) {\n\tactions = actions || \"\";\n\tvar parser = this.wiki.parseText(\"text/vnd.tiddlywiki\",actions,{\n\t\t\tparentWidget: this,\n\t\t\tdocument: this.document\n\t\t}),\n\t\twidgetNode = this.wiki.makeWidget(parser,{\n\t\t\tparentWidget: this,\n\t\t\tdocument: this.document\n\t\t});\n\tvar container = this.document.createElement(\"div\");\n\twidgetNode.render(container,null);\n\treturn widgetNode.invokeActions(this,event);\n};\n\nWidget.prototype.allowActionPropagation = function() {\n\treturn true;\n};\n\nexports.widget = Widget;\n\n})();\n",
"title": "$:/core/modules/widgets/widget.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/widgets/wikify.js": {
"text": "/*\\\ntitle: $:/core/modules/widgets/wikify.js\ntype: application/javascript\nmodule-type: widget\n\nWidget to wikify text into a variable\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar Widget = require(\"$:/core/modules/widgets/widget.js\").widget;\n\nvar WikifyWidget = function(parseTreeNode,options) {\n\tthis.initialise(parseTreeNode,options);\n};\n\n/*\nInherit from the base widget class\n*/\nWikifyWidget.prototype = new Widget();\n\n/*\nRender this widget into the DOM\n*/\nWikifyWidget.prototype.render = function(parent,nextSibling) {\n\tthis.parentDomNode = parent;\n\tthis.computeAttributes();\n\tthis.execute();\n\tthis.renderChildren(parent,nextSibling);\n};\n\n/*\nCompute the internal state of the widget\n*/\nWikifyWidget.prototype.execute = function() {\n\t// Get our parameters\n\tthis.wikifyName = this.getAttribute(\"name\");\n\tthis.wikifyText = this.getAttribute(\"text\");\n\tthis.wikifyType = this.getAttribute(\"type\");\n\tthis.wikifyMode = this.getAttribute(\"mode\",\"block\");\n\tthis.wikifyOutput = this.getAttribute(\"output\",\"text\");\n\t// Create the parse tree\n\tthis.wikifyParser = this.wiki.parseText(this.wikifyType,this.wikifyText,{\n\t\t\tparseAsInline: this.wikifyMode === \"inline\"\n\t\t});\n\t// Create the widget tree \n\tthis.wikifyWidgetNode = this.wiki.makeWidget(this.wikifyParser,{\n\t\t\tdocument: $tw.fakeDocument,\n\t\t\tparentWidget: this\n\t\t});\n\t// Render the widget tree to the container\n\tthis.wikifyContainer = $tw.fakeDocument.createElement(\"div\");\n\tthis.wikifyWidgetNode.render(this.wikifyContainer,null);\n\tthis.wikifyResult = this.getResult();\n\t// Set context variable\n\tthis.setVariable(this.wikifyName,this.wikifyResult);\n\t// Construct the child widgets\n\tthis.makeChildWidgets();\n};\n\n/*\nReturn the result string\n*/\nWikifyWidget.prototype.getResult = function() {\n\tvar result;\n\tswitch(this.wikifyOutput) {\n\t\tcase \"text\":\n\t\t\tresult = this.wikifyContainer.textContent;\n\t\t\tbreak;\n\t\tcase \"html\":\n\t\t\tresult = this.wikifyContainer.innerHTML;\n\t\t\tbreak;\n\t\tcase \"parsetree\":\n\t\t\tresult = JSON.stringify(this.wikifyParser.tree,0,$tw.config.preferences.jsonSpaces);\n\t\t\tbreak;\n\t\tcase \"widgettree\":\n\t\t\tresult = JSON.stringify(this.getWidgetTree(),0,$tw.config.preferences.jsonSpaces);\n\t\t\tbreak;\n\t}\n\treturn result;\n};\n\n/*\nReturn a string of the widget tree\n*/\nWikifyWidget.prototype.getWidgetTree = function() {\n\tvar copyNode = function(widgetNode,resultNode) {\n\t\t\tvar type = widgetNode.parseTreeNode.type;\n\t\t\tresultNode.type = type;\n\t\t\tswitch(type) {\n\t\t\t\tcase \"element\":\n\t\t\t\t\tresultNode.tag = widgetNode.parseTreeNode.tag;\n\t\t\t\t\tbreak;\n\t\t\t\tcase \"text\":\n\t\t\t\t\tresultNode.text = widgetNode.parseTreeNode.text;\n\t\t\t\t\tbreak;\t\n\t\t\t}\n\t\t\tif(Object.keys(widgetNode.attributes || {}).length > 0) {\n\t\t\t\tresultNode.attributes = {};\n\t\t\t\t$tw.utils.each(widgetNode.attributes,function(attr,attrName) {\n\t\t\t\t\tresultNode.attributes[attrName] = widgetNode.getAttribute(attrName);\n\t\t\t\t});\n\t\t\t}\n\t\t\tif(Object.keys(widgetNode.children || {}).length > 0) {\n\t\t\t\tresultNode.children = [];\n\t\t\t\t$tw.utils.each(widgetNode.children,function(widgetChildNode) {\n\t\t\t\t\tvar node = {};\n\t\t\t\t\tresultNode.children.push(node);\n\t\t\t\t\tcopyNode(widgetChildNode,node);\n\t\t\t\t});\n\t\t\t}\n\t\t},\n\t\tresults = {};\n\tcopyNode(this.wikifyWidgetNode,results);\n\treturn results;\n};\n\n/*\nSelectively refreshes the widget if needed. Returns true if the widget or any of its children needed re-rendering\n*/\nWikifyWidget.prototype.refresh = function(changedTiddlers) {\n\tvar changedAttributes = this.computeAttributes();\n\t// Refresh ourselves entirely if any of our attributes have changed\n\tif(changedAttributes.name || changedAttributes.text || changedAttributes.type || changedAttributes.mode || changedAttributes.output) {\n\t\tthis.refreshSelf();\n\t\treturn true;\n\t} else {\n\t\t// Refresh the widget tree\n\t\tif(this.wikifyWidgetNode.refresh(changedTiddlers)) {\n\t\t\t// Check if there was any change\n\t\t\tvar result = this.getResult();\n\t\t\tif(result !== this.wikifyResult) {\n\t\t\t\t// If so, save the change\n\t\t\t\tthis.wikifyResult = result;\n\t\t\t\tthis.setVariable(this.wikifyName,this.wikifyResult);\n\t\t\t\t// Refresh each of our child widgets\n\t\t\t\t$tw.utils.each(this.children,function(childWidget) {\n\t\t\t\t\tchildWidget.refreshSelf();\n\t\t\t\t});\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t\t// Just refresh the children\n\t\treturn this.refreshChildren(changedTiddlers);\n\t}\n};\n\nexports.wikify = WikifyWidget;\n\n})();\n",
"title": "$:/core/modules/widgets/wikify.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/core/modules/wiki-bulkops.js": {
"text": "/*\\\ntitle: $:/core/modules/wiki-bulkops.js\ntype: application/javascript\nmodule-type: wikimethod\n\nBulk tiddler operations such as rename.\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\n/*\nRename a tiddler, and relink any tags or lists that reference it.\n*/\nexports.renameTiddler = function(fromTitle,toTitle) {\n\tvar self = this;\n\tfromTitle = (fromTitle || \"\").trim();\n\ttoTitle = (toTitle || \"\").trim();\n\tif(fromTitle && toTitle && fromTitle !== toTitle) {\n\t\t// Rename the tiddler itself\n\t\tvar tiddler = this.getTiddler(fromTitle);\n\t\tthis.addTiddler(new $tw.Tiddler(tiddler,{title: toTitle},this.getModificationFields()));\n\t\tthis.deleteTiddler(fromTitle);\n\t\t// Rename any tags or lists that reference it\n\t\tthis.each(function(tiddler,title) {\n\t\t\tvar tags = (tiddler.fields.tags || []).slice(0),\n\t\t\t\tlist = (tiddler.fields.list || []).slice(0),\n\t\t\t\tisModified = false;\n\t\t\t// Rename tags\n\t\t\t$tw.utils.each(tags,function (title,index) {\n\t\t\t\tif(title === fromTitle) {\n\t\t\t\t\ttags[index] = toTitle;\n\t\t\t\t\tisModified = true;\n\t\t\t\t}\n\t\t\t});\n\t\t\t// Rename lists\n\t\t\t$tw.utils.each(list,function (title,index) {\n\t\t\t\tif(title === fromTitle) {\n\t\t\t\t\tlist[index] = toTitle;\n\t\t\t\t\tisModified = true;\n\t\t\t\t}\n\t\t\t});\n\t\t\tif(isModified) {\n\t\t\t\tself.addTiddler(new $tw.Tiddler(tiddler,{tags: tags, list: list},self.getModificationFields()));\n\t\t\t}\n\t\t});\n\t}\n}\n\n})();\n",
"title": "$:/core/modules/wiki-bulkops.js",
"type": "application/javascript",
"module-type": "wikimethod"
},
"$:/core/modules/wiki.js": {
"text": "/*\\\ntitle: $:/core/modules/wiki.js\ntype: application/javascript\nmodule-type: wikimethod\n\nExtension methods for the $tw.Wiki object\n\nAdds the following properties to the wiki object:\n\n* `eventListeners` is a hashmap by type of arrays of listener functions\n* `changedTiddlers` is a hashmap describing changes to named tiddlers since wiki change events were last dispatched. Each entry is a hashmap containing two fields:\n\tmodified: true/false\n\tdeleted: true/false\n* `changeCount` is a hashmap by tiddler title containing a numerical index that starts at zero and is incremented each time a tiddler is created changed or deleted\n* `caches` is a hashmap by tiddler title containing a further hashmap of named cache objects. Caches are automatically cleared when a tiddler is modified or deleted\n* `globalCache` is a hashmap by cache name of cache objects that are cleared whenever any tiddler change occurs\n\n\\*/\n(function(){\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar widget = require(\"$:/core/modules/widgets/widget.js\");\n\nvar USER_NAME_TITLE = \"$:/status/UserName\";\n\n/*\nGet the value of a text reference. Text references can have any of these forms:\n\t<tiddlertitle>\n\t<tiddlertitle>!!<fieldname>\n\t!!<fieldname> - specifies a field of the current tiddlers\n\t<tiddlertitle>##<index>\n*/\nexports.getTextReference = function(textRef,defaultText,currTiddlerTitle) {\n\tvar tr = $tw.utils.parseTextReference(textRef),\n\t\ttitle = tr.title || currTiddlerTitle;\n\tif(tr.field) {\n\t\tvar tiddler = this.getTiddler(title);\n\t\tif(tr.field === \"title\") { // Special case so we can return the title of a non-existent tiddler\n\t\t\treturn title;\n\t\t} else if(tiddler && $tw.utils.hop(tiddler.fields,tr.field)) {\n\t\t\treturn tiddler.getFieldString(tr.field);\n\t\t} else {\n\t\t\treturn defaultText;\n\t\t}\n\t} else if(tr.index) {\n\t\treturn this.extractTiddlerDataItem(title,tr.index,defaultText);\n\t} else {\n\t\treturn this.getTiddlerText(title,defaultText);\n\t}\n};\n\nexports.setTextReference = function(textRef,value,currTiddlerTitle) {\n\tvar tr = $tw.utils.parseTextReference(textRef),\n\t\ttitle = tr.title || currTiddlerTitle;\n\tthis.setText(title,tr.field,tr.index,value);\n};\n\nexports.setText = function(title,field,index,value,options) {\n\toptions = options || {};\n\tvar creationFields = options.suppressTimestamp ? {} : this.getCreationFields(),\n\t\tmodificationFields = options.suppressTimestamp ? {} : this.getModificationFields();\n\t// Check if it is a reference to a tiddler field\n\tif(index) {\n\t\tvar data = this.getTiddlerData(title,Object.create(null));\n\t\tif(value !== undefined) {\n\t\t\tdata[index] = value;\n\t\t} else {\n\t\t\tdelete data[index];\n\t\t}\n\t\tthis.setTiddlerData(title,data,modificationFields);\n\t} else {\n\t\tvar tiddler = this.getTiddler(title),\n\t\t\tfields = {title: title};\n\t\tfields[field || \"text\"] = value;\n\t\tthis.addTiddler(new $tw.Tiddler(creationFields,tiddler,fields,modificationFields));\n\t}\n};\n\nexports.deleteTextReference = function(textRef,currTiddlerTitle) {\n\tvar tr = $tw.utils.parseTextReference(textRef),\n\t\ttitle,tiddler,fields;\n\t// Check if it is a reference to a tiddler\n\tif(tr.title && !tr.field) {\n\t\tthis.deleteTiddler(tr.title);\n\t// Else check for a field reference\n\t} else if(tr.field) {\n\t\ttitle = tr.title || currTiddlerTitle;\n\t\ttiddler = this.getTiddler(title);\n\t\tif(tiddler && $tw.utils.hop(tiddler.fields,tr.field)) {\n\t\t\tfields = Object.create(null);\n\t\t\tfields[tr.field] = undefined;\n\t\t\tthis.addTiddler(new $tw.Tiddler(tiddler,fields,this.getModificationFields()));\n\t\t}\n\t}\n};\n\nexports.addEventListener = function(type,listener) {\n\tthis.eventListeners = this.eventListeners || {};\n\tthis.eventListeners[type] = this.eventListeners[type] || [];\n\tthis.eventListeners[type].push(listener);\t\n};\n\nexports.removeEventListener = function(type,listener) {\n\tvar listeners = this.eventListeners[type];\n\tif(listeners) {\n\t\tvar p = listeners.indexOf(listener);\n\t\tif(p !== -1) {\n\t\t\tlisteners.splice(p,1);\n\t\t}\n\t}\n};\n\nexports.dispatchEvent = function(type /*, args */) {\n\tvar args = Array.prototype.slice.call(arguments,1),\n\t\tlisteners = this.eventListeners[type];\n\tif(listeners) {\n\t\tfor(var p=0; p<listeners.length; p++) {\n\t\t\tvar listener = listeners[p];\n\t\t\tlistener.apply(listener,args);\n\t\t}\n\t}\n};\n\n/*\nCauses a tiddler to be marked as changed, incrementing the change count, and triggers event handlers.\nThis method should be called after the changes it describes have been made to the wiki.tiddlers[] array.\n\ttitle: Title of tiddler\n\tisDeleted: defaults to false (meaning the tiddler has been created or modified),\n\t\ttrue if the tiddler has been deleted\n*/\nexports.enqueueTiddlerEvent = function(title,isDeleted) {\n\t// Record the touch in the list of changed tiddlers\n\tthis.changedTiddlers = this.changedTiddlers || Object.create(null);\n\tthis.changedTiddlers[title] = this.changedTiddlers[title] || Object.create(null);\n\tthis.changedTiddlers[title][isDeleted ? \"deleted\" : \"modified\"] = true;\n\t// Increment the change count\n\tthis.changeCount = this.changeCount || Object.create(null);\n\tif($tw.utils.hop(this.changeCount,title)) {\n\t\tthis.changeCount[title]++;\n\t} else {\n\t\tthis.changeCount[title] = 1;\n\t}\n\t// Trigger events\n\tthis.eventListeners = this.eventListeners || {};\n\tif(!this.eventsTriggered) {\n\t\tvar self = this;\n\t\t$tw.utils.nextTick(function() {\n\t\t\tvar changes = self.changedTiddlers;\n\t\t\tself.changedTiddlers = Object.create(null);\n\t\t\tself.eventsTriggered = false;\n\t\t\tif($tw.utils.count(changes) > 0) {\n\t\t\t\tself.dispatchEvent(\"change\",changes);\n\t\t\t}\n\t\t});\n\t\tthis.eventsTriggered = true;\n\t}\n};\n\nexports.getSizeOfTiddlerEventQueue = function() {\n\treturn $tw.utils.count(this.changedTiddlers);\n};\n\nexports.clearTiddlerEventQueue = function() {\n\tthis.changedTiddlers = Object.create(null);\n\tthis.changeCount = Object.create(null);\n};\n\nexports.getChangeCount = function(title) {\n\tthis.changeCount = this.changeCount || Object.create(null);\n\tif($tw.utils.hop(this.changeCount,title)) {\n\t\treturn this.changeCount[title];\n\t} else {\n\t\treturn 0;\n\t}\n};\n\n/*\nGenerate an unused title from the specified base\n*/\nexports.generateNewTitle = function(baseTitle,options) {\n\toptions = options || {};\n\tvar c = 0,\n\t\ttitle = baseTitle;\n\twhile(this.tiddlerExists(title) || this.isShadowTiddler(title) || this.findDraft(title)) {\n\t\ttitle = baseTitle + \n\t\t\t(options.prefix || \" \") + \n\t\t\t(++c);\n\t}\n\treturn title;\n};\n\nexports.isSystemTiddler = function(title) {\n\treturn title && title.indexOf(\"$:/\") === 0;\n};\n\nexports.isTemporaryTiddler = function(title) {\n\treturn title && title.indexOf(\"$:/temp/\") === 0;\n};\n\nexports.isImageTiddler = function(title) {\n\tvar tiddler = this.getTiddler(title);\n\tif(tiddler) {\t\t\n\t\tvar contentTypeInfo = $tw.config.contentTypeInfo[tiddler.fields.type || \"text/vnd.tiddlywiki\"];\n\t\treturn !!contentTypeInfo && contentTypeInfo.flags.indexOf(\"image\") !== -1;\n\t} else {\n\t\treturn null;\n\t}\n};\n\n/*\nLike addTiddler() except it will silently reject any plugin tiddlers that are older than the currently loaded version. Returns true if the tiddler was imported\n*/\nexports.importTiddler = function(tiddler) {\n\tvar existingTiddler = this.getTiddler(tiddler.fields.title);\n\t// Check if we're dealing with a plugin\n\tif(tiddler && tiddler.hasField(\"plugin-type\") && tiddler.hasField(\"version\") && existingTiddler && existingTiddler.hasField(\"plugin-type\") && existingTiddler.hasField(\"version\")) {\n\t\t// Reject the incoming plugin if it is older\n\t\tif(!$tw.utils.checkVersions(tiddler.fields.version,existingTiddler.fields.version)) {\n\t\t\treturn false;\n\t\t}\n\t}\n\t// Fall through to adding the tiddler\n\tthis.addTiddler(tiddler);\n\treturn true;\n};\n\n/*\nReturn a hashmap of the fields that should be set when a tiddler is created\n*/\nexports.getCreationFields = function() {\n\tvar fields = {\n\t\t\tcreated: new Date()\n\t\t},\n\t\tcreator = this.getTiddlerText(USER_NAME_TITLE);\n\tif(creator) {\n\t\tfields.creator = creator;\n\t}\n\treturn fields;\n};\n\n/*\nReturn a hashmap of the fields that should be set when a tiddler is modified\n*/\nexports.getModificationFields = function() {\n\tvar fields = Object.create(null),\n\t\tmodifier = this.getTiddlerText(USER_NAME_TITLE);\n\tfields.modified = new Date();\n\tif(modifier) {\n\t\tfields.modifier = modifier;\n\t}\n\treturn fields;\n};\n\n/*\nReturn a sorted array of tiddler titles. Options include:\nsortField: field to sort by\nexcludeTag: tag to exclude\nincludeSystem: whether to include system tiddlers (defaults to false)\n*/\nexports.getTiddlers = function(options) {\n\toptions = options || Object.create(null);\n\tvar self = this,\n\t\tsortField = options.sortField || \"title\",\n\t\ttiddlers = [], t, titles = [];\n\tthis.each(function(tiddler,title) {\n\t\tif(options.includeSystem || !self.isSystemTiddler(title)) {\n\t\t\tif(!options.excludeTag || !tiddler.hasTag(options.excludeTag)) {\n\t\t\t\ttiddlers.push(tiddler);\n\t\t\t}\n\t\t}\n\t});\n\ttiddlers.sort(function(a,b) {\n\t\tvar aa = a.fields[sortField].toLowerCase() || \"\",\n\t\t\tbb = b.fields[sortField].toLowerCase() || \"\";\n\t\tif(aa < bb) {\n\t\t\treturn -1;\n\t\t} else {\n\t\t\tif(aa > bb) {\n\t\t\t\treturn 1;\n\t\t\t} else {\n\t\t\t\treturn 0;\n\t\t\t}\n\t\t}\n\t});\n\tfor(t=0; t<tiddlers.length; t++) {\n\t\ttitles.push(tiddlers[t].fields.title);\n\t}\n\treturn titles;\n};\n\nexports.countTiddlers = function(excludeTag) {\n\tvar tiddlers = this.getTiddlers({excludeTag: excludeTag});\n\treturn $tw.utils.count(tiddlers);\n};\n\n/*\nReturns a function iterator(callback) that iterates through the specified titles, and invokes the callback with callback(tiddler,title)\n*/\nexports.makeTiddlerIterator = function(titles) {\n\tvar self = this;\n\tif(!$tw.utils.isArray(titles)) {\n\t\ttitles = Object.keys(titles);\n\t} else {\n\t\ttitles = titles.slice(0);\n\t}\n\treturn function(callback) {\n\t\ttitles.forEach(function(title) {\n\t\t\tcallback(self.getTiddler(title),title);\n\t\t});\n\t};\n};\n\n/*\nSort an array of tiddler titles by a specified field\n\ttitles: array of titles (sorted in place)\n\tsortField: name of field to sort by\n\tisDescending: true if the sort should be descending\n\tisCaseSensitive: true if the sort should consider upper and lower case letters to be different\n*/\nexports.sortTiddlers = function(titles,sortField,isDescending,isCaseSensitive,isNumeric) {\n\tvar self = this;\n\ttitles.sort(function(a,b) {\n\t\tvar x,y,\n\t\t\tcompareNumbers = function(x,y) {\n\t\t\t\tvar result = \n\t\t\t\t\tisNaN(x) && !isNaN(y) ? (isDescending ? -1 : 1) :\n\t\t\t\t\t!isNaN(x) && isNaN(y) ? (isDescending ? 1 : -1) :\n\t\t\t\t\t (isDescending ? y - x : x - y);\n\t\t\t\treturn result;\n\t\t\t};\n\t\tif(sortField !== \"title\") {\n\t\t\tvar tiddlerA = self.getTiddler(a),\n\t\t\t\ttiddlerB = self.getTiddler(b);\n\t\t\tif(tiddlerA) {\n\t\t\t\ta = tiddlerA.fields[sortField] || \"\";\n\t\t\t} else {\n\t\t\t\ta = \"\";\n\t\t\t}\n\t\t\tif(tiddlerB) {\n\t\t\t\tb = tiddlerB.fields[sortField] || \"\";\n\t\t\t} else {\n\t\t\t\tb = \"\";\n\t\t\t}\n\t\t}\n\t\tx = Number(a);\n\t\ty = Number(b);\n\t\tif(isNumeric && (!isNaN(x) || !isNaN(y))) {\n\t\t\treturn compareNumbers(x,y);\n\t\t} else if($tw.utils.isDate(a) && $tw.utils.isDate(b)) {\n\t\t\treturn isDescending ? b - a : a - b;\n\t\t} else {\n\t\t\ta = String(a);\n\t\t\tb = String(b);\n\t\t\tif(!isCaseSensitive) {\n\t\t\t\ta = a.toLowerCase();\n\t\t\t\tb = b.toLowerCase();\n\t\t\t}\n\t\t\treturn isDescending ? b.localeCompare(a) : a.localeCompare(b);\n\t\t}\n\t});\n};\n\n/*\nFor every tiddler invoke a callback(title,tiddler) with `this` set to the wiki object. Options include:\nsortField: field to sort by\nexcludeTag: tag to exclude\nincludeSystem: whether to include system tiddlers (defaults to false)\n*/\nexports.forEachTiddler = function(/* [options,]callback */) {\n\tvar arg = 0,\n\t\toptions = arguments.length >= 2 ? arguments[arg++] : {},\n\t\tcallback = arguments[arg++],\n\t\ttitles = this.getTiddlers(options),\n\t\tt, tiddler;\n\tfor(t=0; t<titles.length; t++) {\n\t\ttiddler = this.getTiddler(titles[t]);\n\t\tif(tiddler) {\n\t\t\tcallback.call(this,tiddler.fields.title,tiddler);\n\t\t}\n\t}\n};\n\n/*\nReturn an array of tiddler titles that are directly linked from the specified tiddler\n*/\nexports.getTiddlerLinks = function(title) {\n\tvar self = this;\n\t// We'll cache the links so they only get computed if the tiddler changes\n\treturn this.getCacheForTiddler(title,\"links\",function() {\n\t\t// Parse the tiddler\n\t\tvar parser = self.parseTiddler(title);\n\t\t// Count up the links\n\t\tvar links = [],\n\t\t\tcheckParseTree = function(parseTree) {\n\t\t\t\tfor(var t=0; t<parseTree.length; t++) {\n\t\t\t\t\tvar parseTreeNode = parseTree[t];\n\t\t\t\t\tif(parseTreeNode.type === \"link\" && parseTreeNode.attributes.to && parseTreeNode.attributes.to.type === \"string\") {\n\t\t\t\t\t\tvar value = parseTreeNode.attributes.to.value;\n\t\t\t\t\t\tif(links.indexOf(value) === -1) {\n\t\t\t\t\t\t\tlinks.push(value);\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif(parseTreeNode.children) {\n\t\t\t\t\t\tcheckParseTree(parseTreeNode.children);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t};\n\t\tif(parser) {\n\t\t\tcheckParseTree(parser.tree);\n\t\t}\n\t\treturn links;\n\t});\n};\n\n/*\nReturn an array of tiddler titles that link to the specified tiddler\n*/\nexports.getTiddlerBacklinks = function(targetTitle) {\n\tvar self = this,\n\t\tbacklinks = [];\n\tthis.forEachTiddler(function(title,tiddler) {\n\t\tvar links = self.getTiddlerLinks(title);\n\t\tif(links.indexOf(targetTitle) !== -1) {\n\t\t\tbacklinks.push(title);\n\t\t}\n\t});\n\treturn backlinks;\n};\n\n/*\nReturn a hashmap of tiddler titles that are referenced but not defined. Each value is the number of times the missing tiddler is referenced\n*/\nexports.getMissingTitles = function() {\n\tvar self = this,\n\t\tmissing = [];\n// We should cache the missing tiddler list, even if we recreate it every time any tiddler is modified\n\tthis.forEachTiddler(function(title,tiddler) {\n\t\tvar links = self.getTiddlerLinks(title);\n\t\t$tw.utils.each(links,function(link) {\n\t\t\tif((!self.tiddlerExists(link) && !self.isShadowTiddler(link)) && missing.indexOf(link) === -1) {\n\t\t\t\tmissing.push(link);\n\t\t\t}\n\t\t});\n\t});\n\treturn missing;\n};\n\nexports.getOrphanTitles = function() {\n\tvar self = this,\n\t\torphans = this.getTiddlers();\n\tthis.forEachTiddler(function(title,tiddler) {\n\t\tvar links = self.getTiddlerLinks(title);\n\t\t$tw.utils.each(links,function(link) {\n\t\t\tvar p = orphans.indexOf(link);\n\t\t\tif(p !== -1) {\n\t\t\t\torphans.splice(p,1);\n\t\t\t}\n\t\t});\n\t});\n\treturn orphans; // Todo\n};\n\n/*\nRetrieves a list of the tiddler titles that are tagged with a given tag\n*/\nexports.getTiddlersWithTag = function(tag) {\n\tvar self = this;\n\treturn this.getGlobalCache(\"taglist-\" + tag,function() {\n\t\tvar tagmap = self.getTagMap();\n\t\treturn self.sortByList(tagmap[tag],tag);\n\t});\n};\n\n/*\nGet a hashmap by tag of arrays of tiddler titles\n*/\nexports.getTagMap = function() {\n\tvar self = this;\n\treturn this.getGlobalCache(\"tagmap\",function() {\n\t\tvar tags = Object.create(null),\n\t\t\tstoreTags = function(tagArray,title) {\n\t\t\t\tif(tagArray) {\n\t\t\t\t\tfor(var index=0; index<tagArray.length; index++) {\n\t\t\t\t\t\tvar tag = tagArray[index];\n\t\t\t\t\t\tif($tw.utils.hop(tags,tag)) {\n\t\t\t\t\t\t\ttags[tag].push(title);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\ttags[tag] = [title];\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t},\n\t\t\ttitle, tiddler;\n\t\t// Collect up all the tags\n\t\tself.eachShadow(function(tiddler,title) {\n\t\t\tif(!self.tiddlerExists(title)) {\n\t\t\t\ttiddler = self.getTiddler(title);\n\t\t\t\tstoreTags(tiddler.fields.tags,title);\n\t\t\t}\n\t\t});\n\t\tself.each(function(tiddler,title) {\n\t\t\tstoreTags(tiddler.fields.tags,title);\n\t\t});\n\t\treturn tags;\n\t});\n};\n\n/*\nLookup a given tiddler and return a list of all the tiddlers that include it in the specified list field\n*/\nexports.findListingsOfTiddler = function(targetTitle,fieldName) {\n\tfieldName = fieldName || \"list\";\n\tvar titles = [];\n\tthis.each(function(tiddler,title) {\n\t\tvar list = $tw.utils.parseStringArray(tiddler.fields[fieldName]);\n\t\tif(list && list.indexOf(targetTitle) !== -1) {\n\t\t\ttitles.push(title);\n\t\t}\n\t});\n\treturn titles;\n};\n\n/*\nSorts an array of tiddler titles according to an ordered list\n*/\nexports.sortByList = function(array,listTitle) {\n\tvar list = this.getTiddlerList(listTitle);\n\tif(!array || array.length === 0) {\n\t\treturn [];\n\t} else {\n\t\tvar titles = [], t, title;\n\t\t// First place any entries that are present in the list\n\t\tfor(t=0; t<list.length; t++) {\n\t\t\ttitle = list[t];\n\t\t\tif(array.indexOf(title) !== -1) {\n\t\t\t\ttitles.push(title);\n\t\t\t}\n\t\t}\n\t\t// Then place any remaining entries\n\t\tfor(t=0; t<array.length; t++) {\n\t\t\ttitle = array[t];\n\t\t\tif(list.indexOf(title) === -1) {\n\t\t\t\ttitles.push(title);\n\t\t\t}\n\t\t}\n\t\t// Finally obey the list-before and list-after fields of each tiddler in turn\n\t\tvar sortedTitles = titles.slice(0);\n\t\tfor(t=0; t<sortedTitles.length; t++) {\n\t\t\ttitle = sortedTitles[t];\n\t\t\tvar currPos = titles.indexOf(title),\n\t\t\t\tnewPos = -1,\n\t\t\t\ttiddler = this.getTiddler(title);\n\t\t\tif(tiddler) {\n\t\t\t\tvar beforeTitle = tiddler.fields[\"list-before\"],\n\t\t\t\t\tafterTitle = tiddler.fields[\"list-after\"];\n\t\t\t\tif(beforeTitle === \"\") {\n\t\t\t\t\tnewPos = 0;\n\t\t\t\t} else if(beforeTitle) {\n\t\t\t\t\tnewPos = titles.indexOf(beforeTitle);\n\t\t\t\t} else if(afterTitle) {\n\t\t\t\t\tnewPos = titles.indexOf(afterTitle);\n\t\t\t\t\tif(newPos >= 0) {\n\t\t\t\t\t\t++newPos;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif(newPos === -1) {\n\t\t\t\t\tnewPos = currPos;\n\t\t\t\t}\n\t\t\t\tif(newPos !== currPos) {\n\t\t\t\t\ttitles.splice(currPos,1);\n\t\t\t\t\tif(newPos >= currPos) {\n\t\t\t\t\t\tnewPos--;\n\t\t\t\t\t}\n\t\t\t\t\ttitles.splice(newPos,0,title);\n\t\t\t\t}\n\t\t\t}\n\n\t\t}\n\t\treturn titles;\n\t}\n};\n\nexports.getSubTiddler = function(title,subTiddlerTitle) {\n\tvar bundleInfo = this.getPluginInfo(title) || this.getTiddlerDataCached(title);\n\tif(bundleInfo && bundleInfo.tiddlers) {\n\t\tvar subTiddler = bundleInfo.tiddlers[subTiddlerTitle];\n\t\tif(subTiddler) {\n\t\t\treturn new $tw.Tiddler(subTiddler);\n\t\t}\n\t}\n\treturn null;\n};\n\n/*\nRetrieve a tiddler as a JSON string of the fields\n*/\nexports.getTiddlerAsJson = function(title) {\n\tvar tiddler = this.getTiddler(title);\n\tif(tiddler) {\n\t\tvar fields = Object.create(null);\n\t\t$tw.utils.each(tiddler.fields,function(value,name) {\n\t\t\tfields[name] = tiddler.getFieldString(name);\n\t\t});\n\t\treturn JSON.stringify(fields);\n\t} else {\n\t\treturn JSON.stringify({title: title});\n\t}\n};\n\n/*\nGet the content of a tiddler as a JavaScript object. How this is done depends on the type of the tiddler:\n\napplication/json: the tiddler JSON is parsed into an object\napplication/x-tiddler-dictionary: the tiddler is parsed as sequence of name:value pairs\n\nOther types currently just return null.\n\ntitleOrTiddler: string tiddler title or a tiddler object\ndefaultData: default data to be returned if the tiddler is missing or doesn't contain data\n\nNote that the same value is returned for repeated calls for the same tiddler data. The value is frozen to prevent modification; otherwise modifications would be visible to all callers\n*/\nexports.getTiddlerDataCached = function(titleOrTiddler,defaultData) {\n\tvar self = this,\n\t\ttiddler = titleOrTiddler;\n\tif(!(tiddler instanceof $tw.Tiddler)) {\n\t\ttiddler = this.getTiddler(tiddler);\t\n\t}\n\tif(tiddler) {\n\t\treturn this.getCacheForTiddler(tiddler.fields.title,\"data\",function() {\n\t\t\t// Return the frozen value\n\t\t\tvar value = self.getTiddlerData(tiddler.fields.title,defaultData);\n\t\t\t$tw.utils.deepFreeze(value);\n\t\t\treturn value;\n\t\t});\n\t} else {\n\t\treturn defaultData;\n\t}\n};\n\n/*\nAlternative, uncached version of getTiddlerDataCached(). The return value can be mutated freely and reused\n*/\nexports.getTiddlerData = function(titleOrTiddler,defaultData) {\n\tvar tiddler = titleOrTiddler,\n\t\tdata;\n\tif(!(tiddler instanceof $tw.Tiddler)) {\n\t\ttiddler = this.getTiddler(tiddler);\t\n\t}\n\tif(tiddler && tiddler.fields.text) {\n\t\tswitch(tiddler.fields.type) {\n\t\t\tcase \"application/json\":\n\t\t\t\t// JSON tiddler\n\t\t\t\ttry {\n\t\t\t\t\tdata = JSON.parse(tiddler.fields.text);\n\t\t\t\t} catch(ex) {\n\t\t\t\t\treturn defaultData;\n\t\t\t\t}\n\t\t\t\treturn data;\n\t\t\tcase \"application/x-tiddler-dictionary\":\n\t\t\t\treturn $tw.utils.parseFields(tiddler.fields.text);\n\t\t}\n\t}\n\treturn defaultData;\n};\n\n/*\nExtract an indexed field from within a data tiddler\n*/\nexports.extractTiddlerDataItem = function(titleOrTiddler,index,defaultText) {\n\tvar data = this.getTiddlerData(titleOrTiddler,Object.create(null)),\n\t\ttext;\n\tif(data && $tw.utils.hop(data,index)) {\n\t\ttext = data[index];\n\t}\n\tif(typeof text === \"string\" || typeof text === \"number\") {\n\t\treturn text.toString();\n\t} else {\n\t\treturn defaultText;\n\t}\n};\n\n/*\nSet a tiddlers content to a JavaScript object. Currently this is done by setting the tiddler's type to \"application/json\" and setting the text to the JSON text of the data.\ntitle: title of tiddler\ndata: object that can be serialised to JSON\nfields: optional hashmap of additional tiddler fields to be set\n*/\nexports.setTiddlerData = function(title,data,fields) {\n\tvar existingTiddler = this.getTiddler(title),\n\t\tnewFields = {\n\t\t\ttitle: title\n\t};\n\tif(existingTiddler && existingTiddler.fields.type === \"application/x-tiddler-dictionary\") {\n\t\tnewFields.text = $tw.utils.makeTiddlerDictionary(data);\n\t} else {\n\t\tnewFields.type = \"application/json\";\n\t\tnewFields.text = JSON.stringify(data,null,$tw.config.preferences.jsonSpaces);\n\t}\n\tthis.addTiddler(new $tw.Tiddler(this.getCreationFields(),existingTiddler,fields,newFields,this.getModificationFields()));\n};\n\n/*\nReturn the content of a tiddler as an array containing each line\n*/\nexports.getTiddlerList = function(title,field,index) {\n\tif(index) {\n\t\treturn $tw.utils.parseStringArray(this.extractTiddlerDataItem(title,index,\"\"));\n\t}\n\tfield = field || \"list\";\n\tvar tiddler = this.getTiddler(title);\n\tif(tiddler) {\n\t\treturn ($tw.utils.parseStringArray(tiddler.fields[field]) || []).slice(0);\n\t}\n\treturn [];\n};\n\n// Return a named global cache object. Global cache objects are cleared whenever a tiddler change occurs\nexports.getGlobalCache = function(cacheName,initializer) {\n\tthis.globalCache = this.globalCache || Object.create(null);\n\tif($tw.utils.hop(this.globalCache,cacheName)) {\n\t\treturn this.globalCache[cacheName];\n\t} else {\n\t\tthis.globalCache[cacheName] = initializer();\n\t\treturn this.globalCache[cacheName];\n\t}\n};\n\nexports.clearGlobalCache = function() {\n\tthis.globalCache = Object.create(null);\n};\n\n// Return the named cache object for a tiddler. If the cache doesn't exist then the initializer function is invoked to create it\nexports.getCacheForTiddler = function(title,cacheName,initializer) {\n\tthis.caches = this.caches || Object.create(null);\n\tvar caches = this.caches[title];\n\tif(caches && caches[cacheName]) {\n\t\treturn caches[cacheName];\n\t} else {\n\t\tif(!caches) {\n\t\t\tcaches = Object.create(null);\n\t\t\tthis.caches[title] = caches;\n\t\t}\n\t\tcaches[cacheName] = initializer();\n\t\treturn caches[cacheName];\n\t}\n};\n\n// Clear all caches associated with a particular tiddler, or, if the title is null, clear all the caches for all the tiddlers\nexports.clearCache = function(title) {\n\tif(title) {\n\t\tthis.caches = this.caches || Object.create(null);\n\t\tif($tw.utils.hop(this.caches,title)) {\n\t\t\tdelete this.caches[title];\n\t\t}\n\t} else {\n\t\tthis.caches = Object.create(null);\n\t}\n};\n\nexports.initParsers = function(moduleType) {\n\t// Install the parser modules\n\t$tw.Wiki.parsers = {};\n\tvar self = this;\n\t$tw.modules.forEachModuleOfType(\"parser\",function(title,module) {\n\t\tfor(var f in module) {\n\t\t\tif($tw.utils.hop(module,f)) {\n\t\t\t\t$tw.Wiki.parsers[f] = module[f]; // Store the parser class\n\t\t\t}\n\t\t}\n\t});\n};\n\n/*\nParse a block of text of a specified MIME type\n\ttype: content type of text to be parsed\n\ttext: text\n\toptions: see below\nOptions include:\n\tparseAsInline: if true, the text of the tiddler will be parsed as an inline run\n\t_canonical_uri: optional string of the canonical URI of this content\n*/\nexports.parseText = function(type,text,options) {\n\ttext = text || \"\";\n\toptions = options || {};\n\t// Select a parser\n\tvar Parser = $tw.Wiki.parsers[type];\n\tif(!Parser && $tw.utils.getFileExtensionInfo(type)) {\n\t\tParser = $tw.Wiki.parsers[$tw.utils.getFileExtensionInfo(type).type];\n\t}\n\tif(!Parser) {\n\t\tParser = $tw.Wiki.parsers[options.defaultType || \"text/vnd.tiddlywiki\"];\n\t}\n\tif(!Parser) {\n\t\treturn null;\n\t}\n\t// Return the parser instance\n\treturn new Parser(type,text,{\n\t\tparseAsInline: options.parseAsInline,\n\t\twiki: this,\n\t\t_canonical_uri: options._canonical_uri\n\t});\n};\n\n/*\nParse a tiddler according to its MIME type\n*/\nexports.parseTiddler = function(title,options) {\n\toptions = $tw.utils.extend({},options);\n\tvar cacheType = options.parseAsInline ? \"inlineParseTree\" : \"blockParseTree\",\n\t\ttiddler = this.getTiddler(title),\n\t\tself = this;\n\treturn tiddler ? this.getCacheForTiddler(title,cacheType,function() {\n\t\t\tif(tiddler.hasField(\"_canonical_uri\")) {\n\t\t\t\toptions._canonical_uri = tiddler.fields._canonical_uri;\n\t\t\t}\n\t\t\treturn self.parseText(tiddler.fields.type,tiddler.fields.text,options);\n\t\t}) : null;\n};\n\nexports.parseTextReference = function(title,field,index,options) {\n\tvar tiddler,text;\n\tif(options.subTiddler) {\n\t\ttiddler = this.getSubTiddler(title,options.subTiddler);\n\t} else {\n\t\ttiddler = this.getTiddler(title);\n\t\tif(field === \"text\" || (!field && !index)) {\n\t\t\tthis.getTiddlerText(title); // Force the tiddler to be lazily loaded\n\t\t\treturn this.parseTiddler(title,options);\n\t\t}\n\t}\n\tif(field === \"text\" || (!field && !index)) {\n\t\tif(tiddler && tiddler.fields) {\n\t\t\treturn this.parseText(tiddler.fields.type || \"text/vnd.tiddlywiki\",tiddler.fields.text,options);\t\t\t\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t} else if(field) {\n\t\tif(field === \"title\") {\n\t\t\ttext = title;\n\t\t} else {\n\t\t\tif(!tiddler || !tiddler.hasField(field)) {\n\t\t\t\treturn null;\n\t\t\t}\n\t\t\ttext = tiddler.fields[field];\n\t\t}\n\t\treturn this.parseText(\"text/vnd.tiddlywiki\",text.toString(),options);\n\t} else if(index) {\n\t\tthis.getTiddlerText(title); // Force the tiddler to be lazily loaded\n\t\ttext = this.extractTiddlerDataItem(tiddler,index,undefined);\n\t\tif(text === undefined) {\n\t\t\treturn null;\n\t\t}\n\t\treturn this.parseText(\"text/vnd.tiddlywiki\",text,options);\n\t}\n};\n\n/*\nMake a widget tree for a parse tree\nparser: parser object\noptions: see below\nOptions include:\ndocument: optional document to use\nvariables: hashmap of variables to set\nparentWidget: optional parent widget for the root node\n*/\nexports.makeWidget = function(parser,options) {\n\toptions = options || {};\n\tvar widgetNode = {\n\t\t\ttype: \"widget\",\n\t\t\tchildren: []\n\t\t},\n\t\tcurrWidgetNode = widgetNode;\n\t// Create set variable widgets for each variable\n\t$tw.utils.each(options.variables,function(value,name) {\n\t\tvar setVariableWidget = {\n\t\t\ttype: \"set\",\n\t\t\tattributes: {\n\t\t\t\tname: {type: \"string\", value: name},\n\t\t\t\tvalue: {type: \"string\", value: value}\n\t\t\t},\n\t\t\tchildren: []\n\t\t};\n\t\tcurrWidgetNode.children = [setVariableWidget];\n\t\tcurrWidgetNode = setVariableWidget;\n\t});\n\t// Add in the supplied parse tree nodes\n\tcurrWidgetNode.children = parser ? parser.tree : [];\n\t// Create the widget\n\treturn new widget.widget(widgetNode,{\n\t\twiki: this,\n\t\tdocument: options.document || $tw.fakeDocument,\n\t\tparentWidget: options.parentWidget\n\t});\n};\n\n/*\nMake a widget tree for transclusion\ntitle: target tiddler title\noptions: as for wiki.makeWidget() plus:\noptions.field: optional field to transclude (defaults to \"text\")\noptions.mode: transclusion mode \"inline\" or \"block\"\noptions.children: optional array of children for the transclude widget\n*/\nexports.makeTranscludeWidget = function(title,options) {\n\toptions = options || {};\n\tvar parseTree = {tree: [{\n\t\t\ttype: \"element\",\n\t\t\ttag: \"div\",\n\t\t\tchildren: [{\n\t\t\t\ttype: \"transclude\",\n\t\t\t\tattributes: {\n\t\t\t\t\ttiddler: {\n\t\t\t\t\t\tname: \"tiddler\",\n\t\t\t\t\t\ttype: \"string\",\n\t\t\t\t\t\tvalue: title}},\n\t\t\t\tisBlock: !options.parseAsInline}]}\n\t]};\n\tif(options.field) {\n\t\tparseTree.tree[0].children[0].attributes.field = {type: \"string\", value: options.field};\n\t}\n\tif(options.mode) {\n\t\tparseTree.tree[0].children[0].attributes.mode = {type: \"string\", value: options.mode};\n\t}\n\tif(options.children) {\n\t\tparseTree.tree[0].children[0].children = options.children;\n\t}\n\treturn $tw.wiki.makeWidget(parseTree,options);\n};\n\n/*\nParse text in a specified format and render it into another format\n\toutputType: content type for the output\n\ttextType: content type of the input text\n\ttext: input text\n\toptions: see below\nOptions include:\nvariables: hashmap of variables to set\nparentWidget: optional parent widget for the root node\n*/\nexports.renderText = function(outputType,textType,text,options) {\n\toptions = options || {};\n\tvar parser = this.parseText(textType,text,options),\n\t\twidgetNode = this.makeWidget(parser,options);\n\tvar container = $tw.fakeDocument.createElement(\"div\");\n\twidgetNode.render(container,null);\n\treturn outputType === \"text/html\" ? container.innerHTML : container.textContent;\n};\n\n/*\nParse text from a tiddler and render it into another format\n\toutputType: content type for the output\n\ttitle: title of the tiddler to be rendered\n\toptions: see below\nOptions include:\nvariables: hashmap of variables to set\nparentWidget: optional parent widget for the root node\n*/\nexports.renderTiddler = function(outputType,title,options) {\n\toptions = options || {};\n\tvar parser = this.parseTiddler(title,options),\n\t\twidgetNode = this.makeWidget(parser,options);\n\tvar container = $tw.fakeDocument.createElement(\"div\");\n\twidgetNode.render(container,null);\n\treturn outputType === \"text/html\" ? container.innerHTML : (outputType === \"text/plain-formatted\" ? container.formattedTextContent : container.textContent);\n};\n\n/*\nReturn an array of tiddler titles that match a search string\n\ttext: The text string to search for\n\toptions: see below\nOptions available:\n\tsource: an iterator function for the source tiddlers, called source(iterator), where iterator is called as iterator(tiddler,title)\n\texclude: An array of tiddler titles to exclude from the search\n\tinvert: If true returns tiddlers that do not contain the specified string\n\tcaseSensitive: If true forces a case sensitive search\n\tliteral: If true, searches for literal string, rather than separate search terms\n\tfield: If specified, restricts the search to the specified field\n*/\nexports.search = function(text,options) {\n\toptions = options || {};\n\tvar self = this,\n\t\tt,\n\t\tinvert = !!options.invert;\n\t// Convert the search string into a regexp for each term\n\tvar terms, searchTermsRegExps,\n\t\tflags = options.caseSensitive ? \"\" : \"i\";\n\tif(options.literal) {\n\t\tif(text.length === 0) {\n\t\t\tsearchTermsRegExps = null;\n\t\t} else {\n\t\t\tsearchTermsRegExps = [new RegExp(\"(\" + $tw.utils.escapeRegExp(text) + \")\",flags)];\n\t\t}\n\t} else {\n\t\tterms = text.split(/ +/);\n\t\tif(terms.length === 1 && terms[0] === \"\") {\n\t\t\tsearchTermsRegExps = null;\n\t\t} else {\n\t\t\tsearchTermsRegExps = [];\n\t\t\tfor(t=0; t<terms.length; t++) {\n\t\t\t\tsearchTermsRegExps.push(new RegExp(\"(\" + $tw.utils.escapeRegExp(terms[t]) + \")\",flags));\n\t\t\t}\n\t\t}\n\t}\n\t// Function to check a given tiddler for the search term\n\tvar searchTiddler = function(title) {\n\t\tif(!searchTermsRegExps) {\n\t\t\treturn true;\n\t\t}\n\t\tvar tiddler = self.getTiddler(title);\n\t\tif(!tiddler) {\n\t\t\ttiddler = new $tw.Tiddler({title: title, text: \"\", type: \"text/vnd.tiddlywiki\"});\n\t\t}\n\t\tvar contentTypeInfo = $tw.config.contentTypeInfo[tiddler.fields.type] || $tw.config.contentTypeInfo[\"text/vnd.tiddlywiki\"],\n\t\t\tmatch;\n\t\tfor(var t=0; t<searchTermsRegExps.length; t++) {\n\t\t\tmatch = false;\n\t\t\tif(options.field) {\n\t\t\t\tmatch = searchTermsRegExps[t].test(tiddler.getFieldString(options.field));\n\t\t\t} else {\n\t\t\t\t// Search title, tags and body\n\t\t\t\tif(contentTypeInfo.encoding === \"utf8\") {\n\t\t\t\t\tmatch = match || searchTermsRegExps[t].test(tiddler.fields.text);\n\t\t\t\t}\n\t\t\t\tvar tags = tiddler.fields.tags ? tiddler.fields.tags.join(\"\\0\") : \"\";\n\t\t\t\tmatch = match || searchTermsRegExps[t].test(tags) || searchTermsRegExps[t].test(tiddler.fields.title);\n\t\t\t}\n\t\t\tif(!match) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t\treturn true;\n\t};\n\t// Loop through all the tiddlers doing the search\n\tvar results = [],\n\t\tsource = options.source || this.each;\n\tsource(function(tiddler,title) {\n\t\tif(searchTiddler(title) !== options.invert) {\n\t\t\tresults.push(title);\n\t\t}\n\t});\n\t// Remove any of the results we have to exclude\n\tif(options.exclude) {\n\t\tfor(t=0; t<options.exclude.length; t++) {\n\t\t\tvar p = results.indexOf(options.exclude[t]);\n\t\t\tif(p !== -1) {\n\t\t\t\tresults.splice(p,1);\n\t\t\t}\n\t\t}\n\t}\n\treturn results;\n};\n\n/*\nTrigger a load for a tiddler if it is skinny. Returns the text, or undefined if the tiddler is missing, null if the tiddler is being lazily loaded.\n*/\nexports.getTiddlerText = function(title,defaultText) {\n\tvar tiddler = this.getTiddler(title);\n\t// Return undefined if the tiddler isn't found\n\tif(!tiddler) {\n\t\treturn defaultText;\n\t}\n\tif(tiddler.fields.text !== undefined) {\n\t\t// Just return the text if we've got it\n\t\treturn tiddler.fields.text;\n\t} else {\n\t\t// Tell any listeners about the need to lazily load this tiddler\n\t\tthis.dispatchEvent(\"lazyLoad\",title);\n\t\t// Indicate that the text is being loaded\n\t\treturn null;\n\t}\n};\n\n/*\nRead an array of browser File objects, invoking callback(tiddlerFieldsArray) once they're all read\n*/\nexports.readFiles = function(files,callback) {\n\tvar result = [],\n\t\toutstanding = files.length;\n\tfor(var f=0; f<files.length; f++) {\n\t\tthis.readFile(files[f],function(tiddlerFieldsArray) {\n\t\t\tresult.push.apply(result,tiddlerFieldsArray);\n\t\t\tif(--outstanding === 0) {\n\t\t\t\tcallback(result);\n\t\t\t}\n\t\t});\n\t}\n\treturn files.length;\n};\n\n/*\nRead a browser File object, invoking callback(tiddlerFieldsArray) with an array of tiddler fields objects\n*/\nexports.readFile = function(file,callback) {\n\t// Get the type, falling back to the filename extension\n\tvar self = this,\n\t\ttype = file.type;\n\tif(type === \"\" || !type) {\n\t\tvar dotPos = file.name.lastIndexOf(\".\");\n\t\tif(dotPos !== -1) {\n\t\t\tvar fileExtensionInfo = $tw.utils.getFileExtensionInfo(file.name.substr(dotPos));\n\t\t\tif(fileExtensionInfo) {\n\t\t\t\ttype = fileExtensionInfo.type;\n\t\t\t}\n\t\t}\n\t}\n\t// Figure out if we're reading a binary file\n\tvar contentTypeInfo = $tw.config.contentTypeInfo[type],\n\t\tisBinary = contentTypeInfo ? contentTypeInfo.encoding === \"base64\" : false;\n\t// Log some debugging information\n\tif($tw.log.IMPORT) {\n\t\tconsole.log(\"Importing file '\" + file.name + \"', type: '\" + type + \"', isBinary: \" + isBinary);\n\t}\n\t// Create the FileReader\n\tvar reader = new FileReader();\n\t// Onload\n\treader.onload = function(event) {\n\t\t// Deserialise the file contents\n\t\tvar text = event.target.result,\n\t\t\ttiddlerFields = {title: file.name || \"Untitled\", type: type};\n\t\t// Are we binary?\n\t\tif(isBinary) {\n\t\t\t// The base64 section starts after the first comma in the data URI\n\t\t\tvar commaPos = text.indexOf(\",\");\n\t\t\tif(commaPos !== -1) {\n\t\t\t\ttiddlerFields.text = text.substr(commaPos+1);\n\t\t\t\tcallback([tiddlerFields]);\n\t\t\t}\n\t\t} else {\n\t\t\t// Check whether this is an encrypted TiddlyWiki file\n\t\t\tvar encryptedJson = $tw.utils.extractEncryptedStoreArea(text);\n\t\t\tif(encryptedJson) {\n\t\t\t\t// If so, attempt to decrypt it with the current password\n\t\t\t\t$tw.utils.decryptStoreAreaInteractive(encryptedJson,function(tiddlers) {\n\t\t\t\t\tcallback(tiddlers);\n\t\t\t\t});\n\t\t\t} else {\n\t\t\t\t// Otherwise, just try to deserialise any tiddlers in the file\n\t\t\t\tcallback(self.deserializeTiddlers(type,text,tiddlerFields));\n\t\t\t}\n\t\t}\n\t};\n\t// Kick off the read\n\tif(isBinary) {\n\t\treader.readAsDataURL(file);\n\t} else {\n\t\treader.readAsText(file);\n\t}\n};\n\n/*\nFind any existing draft of a specified tiddler\n*/\nexports.findDraft = function(targetTitle) {\n\tvar draftTitle = undefined;\n\tthis.forEachTiddler({includeSystem: true},function(title,tiddler) {\n\t\tif(tiddler.fields[\"draft.title\"] && tiddler.fields[\"draft.of\"] === targetTitle) {\n\t\t\tdraftTitle = title;\n\t\t}\n\t});\n\treturn draftTitle;\n}\n\n/*\nCheck whether the specified draft tiddler has been modified.\nIf the original tiddler doesn't exist, create a vanilla tiddler variable,\nto check if additional fields have been added.\n*/\nexports.isDraftModified = function(title) {\n\tvar tiddler = this.getTiddler(title);\n\tif(!tiddler.isDraft()) {\n\t\treturn false;\n\t}\n\tvar ignoredFields = [\"created\", \"modified\", \"title\", \"draft.title\", \"draft.of\"],\n\t\torigTiddler = this.getTiddler(tiddler.fields[\"draft.of\"]) || new $tw.Tiddler({text:\"\", tags:[]}),\n\t\ttitleModified = tiddler.fields[\"draft.title\"] !== tiddler.fields[\"draft.of\"];\n\treturn titleModified || !tiddler.isEqual(origTiddler,ignoredFields);\n};\n\n/*\nAdd a new record to the top of the history stack\ntitle: a title string or an array of title strings\nfromPageRect: page coordinates of the origin of the navigation\nhistoryTitle: title of history tiddler (defaults to $:/HistoryList)\n*/\nexports.addToHistory = function(title,fromPageRect,historyTitle) {\n\tvar story = new $tw.Story({wiki: this, historyTitle: historyTitle});\n\tstory.addToHistory(title,fromPageRect);\n};\n\n/*\nInvoke the available upgrader modules\ntitles: array of tiddler titles to be processed\ntiddlers: hashmap by title of tiddler fields of pending import tiddlers. These can be modified by the upgraders. An entry with no fields indicates a tiddler that was pending import has been suppressed. When entries are added to the pending import the tiddlers hashmap may have entries that are not present in the titles array\nReturns a hashmap of messages keyed by tiddler title.\n*/\nexports.invokeUpgraders = function(titles,tiddlers) {\n\t// Collect up the available upgrader modules\n\tvar self = this;\n\tif(!this.upgraderModules) {\n\t\tthis.upgraderModules = [];\n\t\t$tw.modules.forEachModuleOfType(\"upgrader\",function(title,module) {\n\t\t\tif(module.upgrade) {\n\t\t\t\tself.upgraderModules.push(module);\n\t\t\t}\n\t\t});\n\t}\n\t// Invoke each upgrader in turn\n\tvar messages = {};\n\tfor(var t=0; t<this.upgraderModules.length; t++) {\n\t\tvar upgrader = this.upgraderModules[t],\n\t\t\tupgraderMessages = upgrader.upgrade(this,titles,tiddlers);\n\t\t$tw.utils.extend(messages,upgraderMessages);\n\t}\n\treturn messages;\n};\n\n})();\n",
"title": "$:/core/modules/wiki.js",
"type": "application/javascript",
"module-type": "wikimethod"
},
"$:/palettes/Blanca": {
"title": "$:/palettes/Blanca",
"name": "Blanca",
"description": "A clean white palette to let you focus",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #66cccc\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #999999\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #ffffff\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #7897f3\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #ccc\nsidebar-foreground-shadow: rgba(255,255,255, 0.8)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #ffffff\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #7897f3\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #eeeeee\ntab-border-selected: #cccccc\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ffeedd\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: #eee\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #ff9900\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Blue": {
"title": "$:/palettes/Blue",
"name": "Blue",
"description": "A blue theme",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #fff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour foreground>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333353\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #999999\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #ddddff\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #5778d8\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #ffffff\nsidebar-foreground-shadow: rgba(255,255,255, 0.8)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: <<colour page-background>>\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #5959c0\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: <<colour background>>\ntab-background: #ccccdd\ntab-border-selected: #ccccdd\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #eeeeff\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #666666\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #ffffff\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #ffffff\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #5959c0\ntoolbar-new-button: #5eb95e\ntoolbar-options-button: rgb(128, 88, 165)\ntoolbar-save-button: #0e90d2\ntoolbar-info-button: #0e90d2\ntoolbar-edit-button: rgb(243, 123, 29)\ntoolbar-close-button: #dd514c\ntoolbar-delete-button: #dd514c\ntoolbar-cancel-button: rgb(243, 123, 29)\ntoolbar-done-button: #5eb95e\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Muted": {
"title": "$:/palettes/Muted",
"name": "Muted",
"description": "Bright tiddlers on a muted background",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #bbb\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #6f6f70\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #29a6ee\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #c2c1c2\nsidebar-foreground-shadow: rgba(255,255,255,0)\nsidebar-foreground: #d3d2d4\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #6f6f70\nsidebar-tab-background: #666667\nsidebar-tab-border-selected: #999\nsidebar-tab-border: #515151\nsidebar-tab-divider: #999\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: #999\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #d1d0d2\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #d5ad34\ntag-foreground: #ffffff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #182955\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/ContrastLight": {
"title": "$:/palettes/ContrastLight",
"name": "Contrast (Light)",
"description": "High contrast and unambiguous (light version)",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #f00\nalert-border: <<colour background>>\nalert-highlight: <<colour foreground>>\nalert-muted-foreground: #800\nbackground: #fff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background: <<colour background>>\nbutton-foreground: <<colour foreground>>\nbutton-border: <<colour foreground>>\ncode-background: <<colour background>>\ncode-border: <<colour foreground>>\ncode-foreground: <<colour foreground>>\ndirty-indicator: #f00\ndownload-background: #080\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: <<colour foreground>>\ndropdown-tab-background: <<colour foreground>>\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #00a\nexternal-link-foreground: #00e\nforeground: #000\nmessage-background: <<colour foreground>>\nmessage-border: <<colour background>>\nmessage-foreground: <<colour background>>\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: <<colour foreground>>\nmodal-footer-background: <<colour background>>\nmodal-footer-border: <<colour foreground>>\nmodal-header-border: <<colour foreground>>\nmuted-foreground: <<colour foreground>>\nnotification-background: <<colour background>>\nnotification-border: <<colour foreground>>\npage-background: <<colour background>>\npre-background: <<colour background>>\npre-border: <<colour foreground>>\nprimary: #00f\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: <<colour background>>\nsidebar-controls-foreground: <<colour foreground>>\nsidebar-foreground-shadow: rgba(0,0,0, 0)\nsidebar-foreground: <<colour foreground>>\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: <<colour foreground>>\nsidebar-tab-background-selected: <<colour background>>\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: <<colour foreground>>\nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: <<colour foreground>>\nsidebar-tiddler-link-foreground: <<colour primary>>\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: <<colour background>>\ntab-background: <<colour foreground>>\ntab-border-selected: <<colour foreground>>\ntab-border: <<colour foreground>>\ntab-divider: <<colour foreground>>\ntab-foreground-selected: <<colour foreground>>\ntab-foreground: <<colour background>>\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #000\ntag-foreground: #fff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour foreground>>\ntiddler-controls-foreground-hover: #ddd\ntiddler-controls-foreground-selected: #fdd\ntiddler-controls-foreground: <<colour foreground>>\ntiddler-editor-background: <<colour background>>\ntiddler-editor-border-image: <<colour foreground>>\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: <<colour background>>\ntiddler-editor-fields-odd: <<colour background>>\ntiddler-info-background: <<colour background>>\ntiddler-info-border: <<colour foreground>>\ntiddler-info-tab-background: <<colour background>>\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: <<colour foreground>>\ntiddler-title-foreground: <<colour foreground>>\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: <<colour foreground>>\nvery-muted-foreground: #888888\n"
},
"$:/palettes/ContrastDark": {
"title": "$:/palettes/ContrastDark",
"name": "Contrast (Dark)",
"description": "High contrast and unambiguous (dark version)",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #f00\nalert-border: <<colour background>>\nalert-highlight: <<colour foreground>>\nalert-muted-foreground: #800\nbackground: #000\nblockquote-bar: <<colour muted-foreground>>\nbutton-background: <<colour background>>\nbutton-foreground: <<colour foreground>>\nbutton-border: <<colour foreground>>\ncode-background: <<colour background>>\ncode-border: <<colour foreground>>\ncode-foreground: <<colour foreground>>\ndirty-indicator: #f00\ndownload-background: #080\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: <<colour foreground>>\ndropdown-tab-background: <<colour foreground>>\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #00a\nexternal-link-foreground: #00e\nforeground: #fff\nmessage-background: <<colour foreground>>\nmessage-border: <<colour background>>\nmessage-foreground: <<colour background>>\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: <<colour foreground>>\nmodal-footer-background: <<colour background>>\nmodal-footer-border: <<colour foreground>>\nmodal-header-border: <<colour foreground>>\nmuted-foreground: <<colour foreground>>\nnotification-background: <<colour background>>\nnotification-border: <<colour foreground>>\npage-background: <<colour background>>\npre-background: <<colour background>>\npre-border: <<colour foreground>>\nprimary: #00f\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: <<colour background>>\nsidebar-controls-foreground: <<colour foreground>>\nsidebar-foreground-shadow: rgba(0,0,0, 0)\nsidebar-foreground: <<colour foreground>>\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: <<colour foreground>>\nsidebar-tab-background-selected: <<colour background>>\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: <<colour foreground>>\nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: <<colour foreground>>\nsidebar-tiddler-link-foreground: <<colour primary>>\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: <<colour background>>\ntab-background: <<colour foreground>>\ntab-border-selected: <<colour foreground>>\ntab-border: <<colour foreground>>\ntab-divider: <<colour foreground>>\ntab-foreground-selected: <<colour foreground>>\ntab-foreground: <<colour background>>\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #fff\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: <<colour foreground>>\ntiddler-controls-foreground-hover: #ddd\ntiddler-controls-foreground-selected: #fdd\ntiddler-controls-foreground: <<colour foreground>>\ntiddler-editor-background: <<colour background>>\ntiddler-editor-border-image: <<colour foreground>>\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: <<colour background>>\ntiddler-editor-fields-odd: <<colour background>>\ntiddler-info-background: <<colour background>>\ntiddler-info-border: <<colour foreground>>\ntiddler-info-tab-background: <<colour background>>\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: <<colour foreground>>\ntiddler-title-foreground: <<colour foreground>>\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: <<colour foreground>>\nvery-muted-foreground: #888888\n"
},
"$:/palettes/DarkPhotos": {
"created": "20150402111612188",
"description": "Good with dark photo backgrounds",
"modified": "20150402112344080",
"name": "DarkPhotos",
"tags": "$:/tags/Palette",
"title": "$:/palettes/DarkPhotos",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background: \nbutton-foreground: \nbutton-border: \ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #ddd\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #336438\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #5778d8\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #ccf\nsidebar-controls-foreground: #fff\nsidebar-foreground-shadow: rgba(0,0,0, 0.5)\nsidebar-foreground: #fff\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #eee\nsidebar-tab-background-selected: rgba(255,255,255, 0.8)\nsidebar-tab-background: rgba(255,255,255, 0.4)\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: rgba(255,255,255, 0.2)\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #aaf\nsidebar-tiddler-link-foreground: #ddf\nsite-title-foreground: #fff\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ec6\ntag-foreground: #ffffff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #182955\ntoolbar-new-button: \ntoolbar-options-button: \ntoolbar-save-button: \ntoolbar-info-button: \ntoolbar-edit-button: \ntoolbar-close-button: \ntoolbar-delete-button: \ntoolbar-cancel-button: \ntoolbar-done-button: \nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Rocker": {
"title": "$:/palettes/Rocker",
"name": "Rocker",
"description": "A dark theme",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #999999\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #000\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #cc0000\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #ffffff\nsidebar-foreground-shadow: rgba(255,255,255, 0.0)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #000\nsidebar-tab-background: <<colour tab-background>>\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: <<colour tab-divider>>\nsidebar-tab-foreground-selected: \nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #ffbb99\nsidebar-tiddler-link-foreground: #cc0000\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ffbb99\ntag-foreground: #000\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #cc0000\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/SolarFlare": {
"title": "$:/palettes/SolarFlare",
"name": "Solar Flare",
"description": "Warm, relaxing earth colours",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": ": Background Tones\n\nbase03: #002b36\nbase02: #073642\n\n: Content Tones\n\nbase01: #586e75\nbase00: #657b83\nbase0: #839496\nbase1: #93a1a1\n\n: Background Tones\n\nbase2: #eee8d5\nbase3: #fdf6e3\n\n: Accent Colors\n\nyellow: #b58900\norange: #cb4b16\nred: #dc322f\nmagenta: #d33682\nviolet: #6c71c4\nblue: #268bd2\ncyan: #2aa198\ngreen: #859900\n\n: Additional Tones (RA)\n\nbase10: #c0c4bb\nviolet-muted: #7c81b0\nblue-muted: #4e7baa\n\nyellow-hot: #ffcc44\norange-hot: #eb6d20\nred-hot: #ff2222\nblue-hot: #2298ee\ngreen-hot: #98ee22\n\n: Palette\n\n: Do not use colour macro for background and foreground\nbackground: #fdf6e3\n download-foreground: <<colour background>>\n dragger-foreground: <<colour background>>\n dropdown-background: <<colour background>>\n modal-background: <<colour background>>\n sidebar-foreground-shadow: <<colour background>>\n tiddler-background: <<colour background>>\n tiddler-border: <<colour background>>\n tiddler-link-background: <<colour background>>\n tab-background-selected: <<colour background>>\n dropdown-tab-background-selected: <<colour tab-background-selected>>\nforeground: #657b83\n dragger-background: <<colour foreground>>\n tab-foreground: <<colour foreground>>\n tab-foreground-selected: <<colour tab-foreground>>\n sidebar-tab-foreground-selected: <<colour tab-foreground-selected>>\n sidebar-tab-foreground: <<colour tab-foreground>>\n sidebar-button-foreground: <<colour foreground>>\n sidebar-controls-foreground: <<colour foreground>>\n sidebar-foreground: <<colour foreground>>\n: base03\n: base02\n: base01\n alert-muted-foreground: <<colour base01>>\n: base00\n code-foreground: <<colour base00>>\n message-foreground: <<colour base00>>\n tag-foreground: <<colour base00>>\n: base0\n sidebar-tiddler-link-foreground: <<colour base0>>\n: base1\n muted-foreground: <<colour base1>>\n blockquote-bar: <<colour muted-foreground>>\n dropdown-border: <<colour muted-foreground>>\n sidebar-muted-foreground: <<colour muted-foreground>>\n tiddler-title-foreground: <<colour muted-foreground>>\n site-title-foreground: <<colour tiddler-title-foreground>>\n: base2\n modal-footer-background: <<colour base2>>\n page-background: <<colour base2>>\n modal-backdrop: <<colour page-background>>\n notification-background: <<colour page-background>>\n code-background: <<colour page-background>>\n code-border: <<colour code-background>>\n pre-background: <<colour page-background>>\n pre-border: <<colour pre-background>>\n sidebar-tab-background-selected: <<colour page-background>>\n table-header-background: <<colour base2>>\n tag-background: <<colour base2>>\n tiddler-editor-background: <<colour base2>>\n tiddler-info-background: <<colour base2>>\n tiddler-info-tab-background: <<colour base2>>\n tab-background: <<colour base2>>\n dropdown-tab-background: <<colour tab-background>>\n: base3\n alert-background: <<colour base3>>\n message-background: <<colour base3>>\n: yellow\n: orange\n: red\n: magenta\n alert-highlight: <<colour magenta>>\n: violet\n external-link-foreground: <<colour violet>>\n: blue\n: cyan\n: green\n: base10\n tiddler-controls-foreground: <<colour base10>>\n: violet-muted\n external-link-foreground-visited: <<colour violet-muted>>\n: blue-muted\n primary: <<colour blue-muted>>\n download-background: <<colour primary>>\n tiddler-link-foreground: <<colour primary>>\n\nalert-border: #b99e2f\ndirty-indicator: #ff0000\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nmessage-border: #cfd6e6\nmodal-border: #999999\nsidebar-controls-foreground-hover:\nsidebar-muted-foreground-hover:\nsidebar-tab-background: #ded8c5\nsidebar-tiddler-link-foreground-hover:\nstatic-alert-foreground: #aaaaaa\ntab-border: #cccccc\n modal-footer-border: <<colour tab-border>>\n modal-header-border: <<colour tab-border>>\n notification-border: <<colour tab-border>>\n sidebar-tab-border: <<colour tab-border>>\n tab-border-selected: <<colour tab-border>>\n sidebar-tab-border-selected: <<colour tab-border-selected>>\ntab-divider: #d8d8d8\n sidebar-tab-divider: <<colour tab-divider>>\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-border: #dddddd\ntiddler-subtitle-foreground: #c0c0c0\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/palettes/Vanilla": {
"title": "$:/palettes/Vanilla",
"name": "Vanilla",
"description": "Pale and unobtrusive",
"tags": "$:/tags/Palette",
"type": "application/x-tiddler-dictionary",
"text": "alert-background: #ffe476\nalert-border: #b99e2f\nalert-highlight: #881122\nalert-muted-foreground: #b99e2f\nbackground: #ffffff\nblockquote-bar: <<colour muted-foreground>>\nbutton-background:\nbutton-foreground:\nbutton-border:\ncode-background: #f7f7f9\ncode-border: #e1e1e8\ncode-foreground: #dd1144\ndirty-indicator: #ff0000\ndownload-background: #34c734\ndownload-foreground: <<colour background>>\ndragger-background: <<colour foreground>>\ndragger-foreground: <<colour background>>\ndropdown-background: <<colour background>>\ndropdown-border: <<colour muted-foreground>>\ndropdown-tab-background-selected: #fff\ndropdown-tab-background: #ececec\ndropzone-background: rgba(0,200,0,0.7)\nexternal-link-background-hover: inherit\nexternal-link-background-visited: inherit\nexternal-link-background: inherit\nexternal-link-foreground-hover: inherit\nexternal-link-foreground-visited: #0000aa\nexternal-link-foreground: #0000ee\nforeground: #333333\nmessage-background: #ecf2ff\nmessage-border: #cfd6e6\nmessage-foreground: #547599\nmodal-backdrop: <<colour foreground>>\nmodal-background: <<colour background>>\nmodal-border: #999999\nmodal-footer-background: #f5f5f5\nmodal-footer-border: #dddddd\nmodal-header-border: #eeeeee\nmuted-foreground: #bbb\nnotification-background: #ffffdd\nnotification-border: #999999\npage-background: #f4f4f4\npre-background: #f5f5f5\npre-border: #cccccc\nprimary: #5778d8\nsidebar-button-foreground: <<colour foreground>>\nsidebar-controls-foreground-hover: #000000\nsidebar-controls-foreground: #aaaaaa\nsidebar-foreground-shadow: rgba(255,255,255, 0.8)\nsidebar-foreground: #acacac\nsidebar-muted-foreground-hover: #444444\nsidebar-muted-foreground: #c0c0c0\nsidebar-tab-background-selected: #f4f4f4\nsidebar-tab-background: #e0e0e0\nsidebar-tab-border-selected: <<colour tab-border-selected>>\nsidebar-tab-border: <<colour tab-border>>\nsidebar-tab-divider: #e4e4e4\nsidebar-tab-foreground-selected:\nsidebar-tab-foreground: <<colour tab-foreground>>\nsidebar-tiddler-link-foreground-hover: #444444\nsidebar-tiddler-link-foreground: #999999\nsite-title-foreground: <<colour tiddler-title-foreground>>\nstatic-alert-foreground: #aaaaaa\ntab-background-selected: #ffffff\ntab-background: #d8d8d8\ntab-border-selected: #d8d8d8\ntab-border: #cccccc\ntab-divider: #d8d8d8\ntab-foreground-selected: <<colour tab-foreground>>\ntab-foreground: #666666\ntable-border: #dddddd\ntable-footer-background: #a8a8a8\ntable-header-background: #f0f0f0\ntag-background: #ec6\ntag-foreground: #ffffff\ntiddler-background: <<colour background>>\ntiddler-border: <<colour background>>\ntiddler-controls-foreground-hover: #888888\ntiddler-controls-foreground-selected: #444444\ntiddler-controls-foreground: #cccccc\ntiddler-editor-background: #f8f8f8\ntiddler-editor-border-image: #ffffff\ntiddler-editor-border: #cccccc\ntiddler-editor-fields-even: #e0e8e0\ntiddler-editor-fields-odd: #f0f4f0\ntiddler-info-background: #f8f8f8\ntiddler-info-border: #dddddd\ntiddler-info-tab-background: #f8f8f8\ntiddler-link-background: <<colour background>>\ntiddler-link-foreground: <<colour primary>>\ntiddler-subtitle-foreground: #c0c0c0\ntiddler-title-foreground: #182955\ntoolbar-new-button:\ntoolbar-options-button:\ntoolbar-save-button:\ntoolbar-info-button:\ntoolbar-edit-button:\ntoolbar-close-button:\ntoolbar-delete-button:\ntoolbar-cancel-button:\ntoolbar-done-button:\nuntagged-background: #999999\nvery-muted-foreground: #888888\n"
},
"$:/core/readme": {
"title": "$:/core/readme",
"text": "This plugin contains TiddlyWiki's core components, comprising:\n\n* JavaScript code modules\n* Icons\n* Templates needed to create TiddlyWiki's user interface\n* British English (''en-GB'') translations of the localisable strings used by the core\n"
},
"$:/core/templates/alltiddlers.template.html": {
"title": "$:/core/templates/alltiddlers.template.html",
"type": "text/vnd.tiddlywiki-html",
"text": "<!-- This template is provided for backwards compatibility with older versions of TiddlyWiki -->\n\n<$set name=\"exportFilter\" value=\"[!is[system]sort[title]]\">\n\n{{$:/core/templates/exporters/StaticRiver}}\n\n</$set>\n"
},
"$:/core/templates/canonical-uri-external-image": {
"title": "$:/core/templates/canonical-uri-external-image",
"text": "<!--\n\nThis template is used to assign the ''_canonical_uri'' field to external images.\n\nChange the `./images/` part to a different base URI. The URI can be relative or absolute.\n\n-->\n./images/<$view field=\"title\" format=\"doubleurlencoded\"/>"
},
"$:/core/templates/canonical-uri-external-text": {
"title": "$:/core/templates/canonical-uri-external-text",
"text": "<!--\n\nThis template is used to assign the ''_canonical_uri'' field to external text files.\n\nChange the `./text/` part to a different base URI. The URI can be relative or absolute.\n\n-->\n./text/<$view field=\"title\" format=\"doubleurlencoded\"/>.tid"
},
"$:/core/templates/css-tiddler": {
"title": "$:/core/templates/css-tiddler",
"text": "<!--\n\nThis template is used for saving CSS tiddlers as a style tag with data attributes representing the tiddler fields.\n\n-->`<style`<$fields template=' data-tiddler-$name$=\"$encoded_value$\"'></$fields>` type=\"text/css\">`<$view field=\"text\" format=\"text\" />`</style>`"
},
"$:/core/templates/exporters/CsvFile": {
"title": "$:/core/templates/exporters/CsvFile",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/CsvFile}}",
"extension": ".csv",
"text": "\\define renderContent()\n<$text text=<<csvtiddlers filter:\"\"\"$(exportFilter)$\"\"\" format:\"quoted-comma-sep\">>/>\n\\end\n<<renderContent>>\n"
},
"$:/core/templates/exporters/JsonFile": {
"title": "$:/core/templates/exporters/JsonFile",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/JsonFile}}",
"extension": ".json",
"text": "\\define renderContent()\n<$text text=<<jsontiddlers filter:\"\"\"$(exportFilter)$\"\"\">>/>\n\\end\n<<renderContent>>\n"
},
"$:/core/templates/exporters/StaticRiver": {
"title": "$:/core/templates/exporters/StaticRiver",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/StaticRiver}}",
"extension": ".html",
"text": "\\define tv-wikilink-template() #$uri_encoded$\n\\define tv-config-toolbar-icons() no\n\\define tv-config-toolbar-text() no\n\\define tv-config-toolbar-class() tc-btn-invisible\n\\rules only filteredtranscludeinline transcludeinline\n<!doctype html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"{{$:/core/templates/version}}\" />\n<meta name=\"format-detection\" content=\"telephone=no\">\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<title>{{$:/core/wiki/title}}</title>\n<div id=\"styleArea\">\n{{$:/boot/boot.css||$:/core/templates/css-tiddler}}\n</div>\n<style type=\"text/css\">\n{{$:/core/ui/PageStylesheet||$:/core/templates/wikified-tiddler}}\n</style>\n</head>\n<body class=\"tc-body\">\n{{$:/StaticBanner||$:/core/templates/html-tiddler}}\n<section class=\"tc-story-river\">\n{{$:/core/templates/exporters/StaticRiver/Content||$:/core/templates/html-tiddler}}\n</section>\n</body>\n</html>\n"
},
"$:/core/templates/exporters/StaticRiver/Content": {
"title": "$:/core/templates/exporters/StaticRiver/Content",
"text": "\\define renderContent()\n{{{ $(exportFilter)$ ||$:/core/templates/static-tiddler}}}\n\\end\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n<<renderContent>>\n</$importvariables>\n"
},
"$:/core/templates/exporters/TidFile": {
"title": "$:/core/templates/exporters/TidFile",
"tags": "$:/tags/Exporter",
"description": "{{$:/language/Exporters/TidFile}}",
"extension": ".tid",
"text": "\\define renderContent()\n{{{ $(exportFilter)$ +[limit[1]] ||$:/core/templates/tid-tiddler}}}\n\\end\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\"><<renderContent>></$importvariables>"
},
"$:/core/templates/html-div-tiddler": {
"title": "$:/core/templates/html-div-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers as an HTML DIV tag with attributes representing the tiddler fields.\n\n-->`<div`<$fields template=' $name$=\"$encoded_value$\"'></$fields>`>\n<pre>`<$view field=\"text\" format=\"htmlencoded\" />`</pre>\n</div>`\n"
},
"$:/core/templates/html-tiddler": {
"title": "$:/core/templates/html-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers as raw HTML\n\n--><$view field=\"text\" format=\"htmlwikified\" />"
},
"$:/core/templates/javascript-tiddler": {
"title": "$:/core/templates/javascript-tiddler",
"text": "<!--\n\nThis template is used for saving JavaScript tiddlers as a script tag with data attributes representing the tiddler fields.\n\n-->`<script`<$fields template=' data-tiddler-$name$=\"$encoded_value$\"'></$fields>` type=\"text/javascript\">`<$view field=\"text\" format=\"text\" />`</script>`"
},
"$:/core/templates/module-tiddler": {
"title": "$:/core/templates/module-tiddler",
"text": "<!--\n\nThis template is used for saving JavaScript tiddlers as a script tag with data attributes representing the tiddler fields. The body of the tiddler is wrapped in a call to the `$tw.modules.define` function in order to define the body of the tiddler as a module\n\n-->`<script`<$fields template=' data-tiddler-$name$=\"$encoded_value$\"'></$fields>` type=\"text/javascript\" data-module=\"yes\">$tw.modules.define(\"`<$view field=\"title\" format=\"jsencoded\" />`\",\"`<$view field=\"module-type\" format=\"jsencoded\" />`\",function(module,exports,require) {`<$view field=\"text\" format=\"text\" />`});\n</script>`"
},
"$:/core/templates/MOTW.html": {
"title": "$:/core/templates/MOTW.html",
"text": "\\rules only filteredtranscludeinline transcludeinline entity\n<!-- The following comment is called a MOTW comment and is necessary for the TiddlyIE Internet Explorer extension -->\n<!-- saved from url=(0021)http://tiddlywiki.com --> "
},
"$:/core/templates/plain-text-tiddler": {
"title": "$:/core/templates/plain-text-tiddler",
"text": "<$view field=\"text\" format=\"text\" />"
},
"$:/core/templates/raw-static-tiddler": {
"title": "$:/core/templates/raw-static-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers as static HTML\n\n--><$view field=\"text\" format=\"plainwikified\" />"
},
"$:/core/save/all": {
"title": "$:/core/save/all",
"text": "\\define saveTiddlerFilter()\n[is[tiddler]] -[prefix[$:/state/popup/]] -[[$:/HistoryList]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] +[sort[title]] $(publishFilter)$\n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/save/empty": {
"title": "$:/core/save/empty",
"text": "\\define saveTiddlerFilter()\n[is[system]] -[prefix[$:/state/popup/]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] +[sort[title]]\n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/save/lazy-all": {
"title": "$:/core/save/lazy-all",
"text": "\\define saveTiddlerFilter()\n[is[system]] -[prefix[$:/state/popup/]] -[[$:/HistoryList]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] +[sort[title]] \n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/save/lazy-images": {
"title": "$:/core/save/lazy-images",
"text": "\\define saveTiddlerFilter()\n[is[tiddler]] -[prefix[$:/state/popup/]] -[[$:/HistoryList]] -[[$:/boot/boot.css]] -[type[application/javascript]library[yes]] -[[$:/boot/boot.js]] -[[$:/boot/bootprefix.js]] -[!is[system]is[image]] +[sort[title]] \n\\end\n{{$:/core/templates/tiddlywiki5.html}}\n"
},
"$:/core/templates/single.tiddler.window": {
"title": "$:/core/templates/single.tiddler.window",
"text": "<$set name=\"themeTitle\" value={{$:/view}}>\n\n<$set name=\"tempCurrentTiddler\" value=<<currentTiddler>>>\n\n<$set name=\"currentTiddler\" value={{$:/language}}>\n\n<$set name=\"languageTitle\" value={{!!name}}>\n\n<$set name=\"currentTiddler\" value=<<tempCurrentTiddler>>>\n\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<$navigator story=\"$:/StoryList\" history=\"$:/HistoryList\">\n\n<$transclude mode=\"block\"/>\n\n</$navigator>\n\n</$importvariables>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n"
},
"$:/core/templates/split-recipe": {
"title": "$:/core/templates/split-recipe",
"text": "<$list filter=\"[!is[system]]\">\ntiddler: <$view field=\"title\" format=\"urlencoded\"/>.tid\n</$list>\n"
},
"$:/core/templates/static-tiddler": {
"title": "$:/core/templates/static-tiddler",
"text": "<a name=<<currentTiddler>>>\n<$transclude tiddler=\"$:/core/ui/ViewTemplate\"/>\n</a>"
},
"$:/core/templates/static.area": {
"title": "$:/core/templates/static.area",
"text": "<$reveal type=\"nomatch\" state=\"$:/isEncrypted\" text=\"yes\">\n{{{ [all[shadows+tiddlers]tag[$:/tags/RawStaticContent]!has[draft.of]] ||$:/core/templates/raw-static-tiddler}}}\n{{$:/core/templates/static.content||$:/core/templates/html-tiddler}}\n</$reveal>\n<$reveal type=\"match\" state=\"$:/isEncrypted\" text=\"yes\">\nThis file contains an encrypted ~TiddlyWiki. Enable ~JavaScript and enter the decryption password when prompted.\n</$reveal>\n"
},
"$:/core/templates/static.content": {
"title": "$:/core/templates/static.content",
"type": "text/vnd.tiddlywiki",
"text": "<!-- For Google, and people without JavaScript-->\nThis [[TiddlyWiki|http://tiddlywiki.com]] contains the following tiddlers:\n\n<ul>\n<$list filter=<<saveTiddlerFilter>>>\n<li><$view field=\"title\" format=\"text\"></$view></li>\n</$list>\n</ul>\n"
},
"$:/core/templates/static.template.css": {
"title": "$:/core/templates/static.template.css",
"text": "{{$:/boot/boot.css||$:/core/templates/plain-text-tiddler}}\n\n{{$:/core/ui/PageStylesheet||$:/core/templates/wikified-tiddler}}\n"
},
"$:/core/templates/static.template.html": {
"title": "$:/core/templates/static.template.html",
"type": "text/vnd.tiddlywiki-html",
"text": "\\define tv-wikilink-template() static/$uri_doubleencoded$.html\n\\define tv-config-toolbar-icons() no\n\\define tv-config-toolbar-text() no\n\\define tv-config-toolbar-class() tc-btn-invisible\n\\rules only filteredtranscludeinline transcludeinline\n<!doctype html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"{{$:/core/templates/version}}\" />\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n<meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n<meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black-translucent\" />\n<meta name=\"mobile-web-app-capable\" content=\"yes\"/>\n<meta name=\"format-detection\" content=\"telephone=no\">\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<title>{{$:/core/wiki/title}}</title>\n<div id=\"styleArea\">\n{{$:/boot/boot.css||$:/core/templates/css-tiddler}}\n</div>\n<style type=\"text/css\">\n{{$:/core/ui/PageStylesheet||$:/core/templates/wikified-tiddler}}\n</style>\n</head>\n<body class=\"tc-body\">\n{{$:/StaticBanner||$:/core/templates/html-tiddler}}\n{{$:/core/ui/PageTemplate||$:/core/templates/html-tiddler}}\n</body>\n</html>\n"
},
"$:/core/templates/static.tiddler.html": {
"title": "$:/core/templates/static.tiddler.html",
"text": "\\define tv-wikilink-template() $uri_doubleencoded$.html\n\\define tv-config-toolbar-icons() no\n\\define tv-config-toolbar-text() no\n\\define tv-config-toolbar-class() tc-btn-invisible\n`<!doctype html>\n<html>\n<head>\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"`{{$:/core/templates/version}}`\" />\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n<meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n<meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black-translucent\" />\n<meta name=\"mobile-web-app-capable\" content=\"yes\"/>\n<meta name=\"format-detection\" content=\"telephone=no\">\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<link rel=\"stylesheet\" href=\"static.css\">\n<title>`<$view field=\"caption\"><$view field=\"title\"/></$view>: {{$:/core/wiki/title}}`</title>\n</head>\n<body class=\"tc-body\">\n`{{$:/StaticBanner||$:/core/templates/html-tiddler}}`\n<section class=\"tc-story-river\">\n`<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n<$view tiddler=\"$:/core/ui/ViewTemplate\" format=\"htmlwikified\"/>\n</$importvariables>`\n</section>\n</body>\n</html>\n`"
},
"$:/core/templates/store.area.template.html": {
"title": "$:/core/templates/store.area.template.html",
"text": "<$reveal type=\"nomatch\" state=\"$:/isEncrypted\" text=\"yes\">\n`<div id=\"storeArea\" style=\"display:none;\">`\n<$list filter=<<saveTiddlerFilter>> template=\"$:/core/templates/html-div-tiddler\"/>\n`</div>`\n</$reveal>\n<$reveal type=\"match\" state=\"$:/isEncrypted\" text=\"yes\">\n`<!--~~ Encrypted tiddlers ~~-->`\n`<pre id=\"encryptedStoreArea\" type=\"text/plain\" style=\"display:none;\">`\n<$encrypt filter=<<saveTiddlerFilter>>/>\n`</pre>`\n</$reveal>"
},
"$:/core/templates/tid-tiddler": {
"title": "$:/core/templates/tid-tiddler",
"text": "<!--\n\nThis template is used for saving tiddlers in TiddlyWeb *.tid format\n\n--><$fields exclude='text bag' template='$name$: $value$\n'></$fields>`\n`<$view field=\"text\" format=\"text\" />"
},
"$:/core/templates/tiddler-metadata": {
"title": "$:/core/templates/tiddler-metadata",
"text": "<!--\n\nThis template is used for saving tiddler metadata *.meta files\n\n--><$fields exclude='text bag' template='$name$: $value$\n'></$fields>"
},
"$:/core/templates/tiddlywiki5.html": {
"title": "$:/core/templates/tiddlywiki5.html",
"text": "\\rules only filteredtranscludeinline transcludeinline\n<!doctype html>\n{{$:/core/templates/MOTW.html}}<html>\n<head>\n<meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge\" />\t\t<!-- Force IE standards mode for Intranet and HTA - should be the first meta -->\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\" />\n<meta name=\"application-name\" content=\"TiddlyWiki\" />\n<meta name=\"generator\" content=\"TiddlyWiki\" />\n<meta name=\"tiddlywiki-version\" content=\"{{$:/core/templates/version}}\" />\n<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\" />\n<meta name=\"apple-mobile-web-app-capable\" content=\"yes\" />\n<meta name=\"apple-mobile-web-app-status-bar-style\" content=\"black-translucent\" />\n<meta name=\"mobile-web-app-capable\" content=\"yes\"/>\n<meta name=\"format-detection\" content=\"telephone=no\" />\n<meta name=\"copyright\" content=\"{{$:/core/copyright.txt}}\" />\n<link id=\"faviconLink\" rel=\"shortcut icon\" href=\"favicon.ico\">\n<title>{{$:/core/wiki/title}}</title>\n<!--~~ This is a Tiddlywiki file. The points of interest in the file are marked with this pattern ~~-->\n\n<!--~~ Raw markup ~~-->\n{{{ [all[shadows+tiddlers]tag[$:/core/wiki/rawmarkup]] [all[shadows+tiddlers]tag[$:/tags/RawMarkup]] ||$:/core/templates/plain-text-tiddler}}}\n</head>\n<body class=\"tc-body\">\n<!--~~ Static styles ~~-->\n<div id=\"styleArea\">\n{{$:/boot/boot.css||$:/core/templates/css-tiddler}}\n</div>\n<!--~~ Static content for Google and browsers without JavaScript ~~-->\n<noscript>\n<div id=\"splashArea\">\n{{$:/core/templates/static.area}}\n</div>\n</noscript>\n<!--~~ Ordinary tiddlers ~~-->\n{{$:/core/templates/store.area.template.html}}\n<!--~~ Library modules ~~-->\n<div id=\"libraryModules\" style=\"display:none;\">\n{{{ [is[system]type[application/javascript]library[yes]] ||$:/core/templates/javascript-tiddler}}}\n</div>\n<!--~~ Boot kernel prologue ~~-->\n<div id=\"bootKernelPrefix\" style=\"display:none;\">\n{{ $:/boot/bootprefix.js ||$:/core/templates/javascript-tiddler}}\n</div>\n<!--~~ Boot kernel ~~-->\n<div id=\"bootKernel\" style=\"display:none;\">\n{{ $:/boot/boot.js ||$:/core/templates/javascript-tiddler}}\n</div>\n</body>\n</html>\n"
},
"$:/core/templates/version": {
"title": "$:/core/templates/version",
"text": "<<version>>"
},
"$:/core/templates/wikified-tiddler": {
"title": "$:/core/templates/wikified-tiddler",
"text": "<$transclude />"
},
"$:/core/ui/AboveStory/tw2-plugin-check": {
"title": "$:/core/ui/AboveStory/tw2-plugin-check",
"tags": "$:/tags/AboveStory",
"text": "\\define lingo-base() $:/language/AboveStory/ClassicPlugin/\n<$list filter=\"[all[system+tiddlers]tag[systemConfig]limit[1]]\">\n\n<div class=\"tc-message-box\">\n\n<<lingo Warning>>\n\n<ul>\n\n<$list filter=\"[all[system+tiddlers]tag[systemConfig]limit[1]]\">\n\n<li>\n\n<$link><$view field=\"title\"/></$link>\n\n</li>\n\n</$list>\n\n</ul>\n\n</div>\n\n</$list>\n"
},
"$:/core/ui/AdvancedSearch/Filter": {
"title": "$:/core/ui/AdvancedSearch/Filter",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/Filter/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<<lingo Filter/Hint>>\n\n<div class=\"tc-search tc-advanced-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/AdvancedSearch/FilterButton]!has[draft.of]]\"><$transclude/></$list>\n</div>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$set name=\"resultCount\" value=\"\"\"<$count filter={{$:/temp/advancedsearch}}/>\"\"\">\n<div class=\"tc-search-results\">\n<<lingo Filter/Matches>>\n<$list filter={{$:/temp/advancedsearch}} template=\"$:/core/ui/ListItemTemplate\"/>\n</div>\n</$set>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/clear": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/clear",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/delete": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/delete",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button popup=<<qualify \"$:/state/filterDeleteDropdown\">> class=\"tc-btn-invisible\">\n{{$:/core/images/delete-button}}\n</$button>\n</$reveal>\n\n<$reveal state=<<qualify \"$:/state/filterDeleteDropdown\">> type=\"popup\" position=\"belowleft\" animate=\"yes\">\n<div class=\"tc-block-dropdown-wrapper\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<div class=\"tc-dropdown-item-plain\">\n<$set name=\"resultCount\" value=\"\"\"<$count filter={{$:/temp/advancedsearch}}/>\"\"\">\nAre you sure you wish to delete <<resultCount>> tiddler(s)?\n</$set>\n</div>\n<div class=\"tc-dropdown-item-plain\">\n<$button class=\"tc-btn\">\n<$action-deletetiddler $filter={{$:/temp/advancedsearch}}/>\nDelete these tiddlers\n</$button>\n</div>\n</div>\n</div>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/dropdown": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/dropdown",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/filterDropdown\">> class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n</$button>\n</span>\n\n<$reveal state=<<qualify \"$:/state/filterDropdown\">> type=\"popup\" position=\"belowleft\" animate=\"yes\">\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n<div class=\"tc-block-dropdown-wrapper\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Filter]]\"><$link to={{!!filter}}><$transclude field=\"description\"/></$link>\n</$list>\n</div>\n</div>\n</$linkcatcher>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Filter/FilterButtons/export": {
"title": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/export",
"tags": "$:/tags/AdvancedSearch/FilterButton",
"text": "<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$macrocall $name=\"exportButton\" exportFilter={{$:/temp/advancedsearch}} lingoBase=\"$:/language/Buttons/ExportTiddlers/\"/>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Shadows": {
"title": "$:/core/ui/AdvancedSearch/Shadows",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/Shadows/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo Shadows/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n\n<$set name=\"resultCount\" value=\"\"\"<$count filter=\"[all[shadows]search{$:/temp/advancedsearch}] -[[$:/temp/advancedsearch]]\"/>\"\"\">\n\n<div class=\"tc-search-results\">\n\n<<lingo Shadows/Matches>>\n\n<$list filter=\"[all[shadows]search{$:/temp/advancedsearch}sort[title]limit[250]] -[[$:/temp/advancedsearch]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n</div>\n\n</$set>\n\n</$reveal>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"match\" text=\"\">\n\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/Standard": {
"title": "$:/core/ui/AdvancedSearch/Standard",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/Standard/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo Standard/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$set name=\"searchTiddler\" value=\"$:/temp/advancedsearch\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]butfirst[]limit[1]]\" emptyMessage=\"\"\"\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\">\n<$transclude/>\n</$list>\n\"\"\">\n<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\" default={{$:/config/SearchResults/Default}}/>\n</$list>\n</$set>\n</$reveal>\n"
},
"$:/core/ui/AdvancedSearch/System": {
"title": "$:/core/ui/AdvancedSearch/System",
"tags": "$:/tags/AdvancedSearch",
"caption": "{{$:/language/Search/System/Caption}}",
"text": "\\define lingo-base() $:/language/Search/\n<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo System/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n\n<$set name=\"resultCount\" value=\"\"\"<$count filter=\"[is[system]search{$:/temp/advancedsearch}] -[[$:/temp/advancedsearch]]\"/>\"\"\">\n\n<div class=\"tc-search-results\">\n\n<<lingo System/Matches>>\n\n<$list filter=\"[is[system]search{$:/temp/advancedsearch}sort[title]limit[250]] -[[$:/temp/advancedsearch]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n</div>\n\n</$set>\n\n</$reveal>\n\n<$reveal state=\"$:/temp/advancedsearch\" type=\"match\" text=\"\">\n\n</$reveal>\n"
},
"$:/AdvancedSearch": {
"title": "$:/AdvancedSearch",
"icon": "$:/core/images/advanced-search-button",
"color": "#bbb",
"text": "<div class=\"tc-advanced-search\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/AdvancedSearch]!has[draft.of]]\" \"$:/core/ui/AdvancedSearch/System\">>\n</div>\n"
},
"$:/core/ui/AlertTemplate": {
"title": "$:/core/ui/AlertTemplate",
"text": "<div class=\"tc-alert\">\n<div class=\"tc-alert-toolbar\">\n<$button class=\"tc-btn-invisible\"><$action-deletetiddler $tiddler=<<currentTiddler>>/>{{$:/core/images/delete-button}}</$button>\n</div>\n<div class=\"tc-alert-subtitle\">\n<$view field=\"component\"/> - <$view field=\"modified\" format=\"date\" template=\"0hh:0mm:0ss DD MM YYYY\"/> <$reveal type=\"nomatch\" state=\"!!count\" text=\"\"><span class=\"tc-alert-highlight\">({{$:/language/Count}}: <$view field=\"count\"/>)</span></$reveal>\n</div>\n<div class=\"tc-alert-body\">\n\n<$transclude/>\n\n</div>\n</div>\n"
},
"$:/core/ui/BinaryWarning": {
"title": "$:/core/ui/BinaryWarning",
"text": "\\define lingo-base() $:/language/BinaryWarning/\n<div class=\"tc-binary-warning\">\n\n<<lingo Prompt>>\n\n</div>\n"
},
"$:/core/ui/Components/tag-link": {
"title": "$:/core/ui/Components/tag-link",
"text": "<$link>\n<$set name=\"backgroundColor\" value={{!!color}}>\n<span style=<<tag-styles>> class=\"tc-tag-label\">\n<$view field=\"title\" format=\"text\"/>\n</span>\n</$set>\n</$link>"
},
"$:/core/ui/ControlPanel/Advanced": {
"title": "$:/core/ui/ControlPanel/Advanced",
"tags": "$:/tags/ControlPanel/Info",
"caption": "{{$:/language/ControlPanel/Advanced/Caption}}",
"text": "{{$:/language/ControlPanel/Advanced/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Advanced]!has[draft.of]]\" \"$:/core/ui/ControlPanel/TiddlerFields\">>\n</div>\n"
},
"$:/core/ui/ControlPanel/Appearance": {
"title": "$:/core/ui/ControlPanel/Appearance",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Appearance/Caption}}",
"text": "{{$:/language/ControlPanel/Appearance/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Appearance]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Theme\">>\n</div>\n"
},
"$:/core/ui/ControlPanel/Basics": {
"title": "$:/core/ui/ControlPanel/Basics",
"tags": "$:/tags/ControlPanel/Info",
"caption": "{{$:/language/ControlPanel/Basics/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Basics/\n\n\\define show-filter-count(filter)\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $value=\"\"\"$filter$\"\"\"/>\n<$action-setfield $tiddler=\"$:/state/tab--1498284803\" $value=\"$:/core/ui/AdvancedSearch/Filter\"/>\n<$action-navigate $to=\"$:/AdvancedSearch\"/>\n''<$count filter=\"\"\"$filter$\"\"\"/>''\n{{$:/core/images/advanced-search-button}}\n</$button>\n\\end\n\n|<<lingo Version/Prompt>> |''<<version>>'' |\n|<$link to=\"$:/SiteTitle\"><<lingo Title/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteTitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/SiteSubtitle\"><<lingo Subtitle/Prompt>></$link> |<$edit-text tiddler=\"$:/SiteSubtitle\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/status/UserName\"><<lingo Username/Prompt>></$link> |<$edit-text tiddler=\"$:/status/UserName\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/config/AnimationDuration\"><<lingo AnimDuration/Prompt>></$link> |<$edit-text tiddler=\"$:/config/AnimationDuration\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/DefaultTiddlers\"><<lingo DefaultTiddlers/Prompt>></$link> |<<lingo DefaultTiddlers/TopHint>><br> <$edit tag=\"textarea\" tiddler=\"$:/DefaultTiddlers\" class=\"tc-edit-texteditor\"/><br>//<<lingo DefaultTiddlers/BottomHint>>// |\n|<$link to=\"$:/config/NewJournal/Title\"><<lingo NewJournal/Title/Prompt>></$link> |<$edit-text tiddler=\"$:/config/NewJournal/Title\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/config/NewJournal/Tags\"><<lingo NewJournal/Tags/Prompt>></$link> |<$edit-text tiddler=\"$:/config/NewJournal/Tags\" default=\"\" tag=\"input\"/> |\n|<<lingo Language/Prompt>> |{{$:/snippets/minilanguageswitcher}} |\n|<<lingo Tiddlers/Prompt>> |<<show-filter-count \"[!is[system]sort[title]]\">> |\n|<<lingo Tags/Prompt>> |<<show-filter-count \"[tags[]sort[title]]\">> |\n|<<lingo SystemTiddlers/Prompt>> |<<show-filter-count \"[is[system]sort[title]]\">> |\n|<<lingo ShadowTiddlers/Prompt>> |<<show-filter-count \"[all[shadows]sort[title]]\">> |\n|<<lingo OverriddenShadowTiddlers/Prompt>> |<<show-filter-count \"[is[tiddler]is[shadow]sort[title]]\">> |\n"
},
"$:/core/ui/ControlPanel/EditorTypes": {
"title": "$:/core/ui/ControlPanel/EditorTypes",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/EditorTypes/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/EditorTypes/\n\n<<lingo Hint>>\n\n<table>\n<tbody>\n<tr>\n<th><<lingo Type/Caption>></th>\n<th><<lingo Editor/Caption>></th>\n</tr>\n<$list filter=\"[all[shadows+tiddlers]prefix[$:/config/EditorTypeMappings/]sort[title]]\">\n<tr>\n<td>\n<$link>\n<$list filter=\"[all[current]removeprefix[$:/config/EditorTypeMappings/]]\">\n<$text text={{!!title}}/>\n</$list>\n</$link>\n</td>\n<td>\n<$view field=\"text\"/>\n</td>\n</tr>\n</$list>\n</tbody>\n</table>\n"
},
"$:/core/ui/ControlPanel/Info": {
"title": "$:/core/ui/ControlPanel/Info",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Info/Caption}}",
"text": "{{$:/language/ControlPanel/Info/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Info]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Basics\">>\n</div>\n"
},
"$:/core/ui/ControlPanel/KeyboardShortcuts": {
"title": "$:/core/ui/ControlPanel/KeyboardShortcuts",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/KeyboardShortcuts/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/KeyboardShortcuts/\n\n\\define new-shortcut(title)\n<div class=\"tc-dropdown-item-plain\">\n<$edit-shortcut tiddler=\"$title$\" placeholder={{$:/language/ControlPanel/KeyboardShortcuts/Add/Prompt}} style=\"width:auto;\"/> <$button>\n<<lingo Add/Caption>>\n<$action-listops\n\t$tiddler=\"$(shortcutTitle)$\"\n\t$field=\"text\"\n\t$subfilter=\"[{$title$}]\"\n/>\n<$action-deletetiddler\n\t$tiddler=\"$title$\"\n/>\n</$button>\n</div>\n\\end\n\n\\define shortcut-list-item(caption)\n<td>\n</td>\n<td style=\"text-align:right;font-size:0.7em;\">\n<<lingo Platform/$caption$>>\n</td>\n<td>\n<div style=\"position:relative;\">\n<$button popup=<<qualify \"$:/state/dropdown/$(shortcutTitle)$\">> class=\"tc-btn-invisible\">\n{{$:/core/images/edit-button}}\n</$button>\n<$macrocall $name=\"displayshortcuts\" $output=\"text/html\" shortcuts={{$(shortcutTitle)$}} prefix=\"<kbd>\" separator=\"</kbd> <kbd>\" suffix=\"</kbd>\"/>\n\n<$reveal state=<<qualify \"$:/state/dropdown/$(shortcutTitle)$\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-block-dropdown-wrapper\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown tc-popup-keep\">\n<$list filter=\"[list[$(shortcutTitle)$!!text]sort[title]]\" variable=\"shortcut\" emptyMessage=\"\"\"\n<div class=\"tc-dropdown-item-plain\">\n//<<lingo NoShortcuts/Caption>>//\n</div>\n\"\"\">\n<div class=\"tc-dropdown-item-plain\">\n<$button class=\"tc-btn-invisible\" tooltip=<<lingo Remove/Hint>>>\n<$action-listops\n\t$tiddler=\"$(shortcutTitle)$\"\n\t$field=\"text\"\n\t$subfilter=\"+[remove<shortcut>]\"\n/>\n×\n</$button>\n<kbd>\n<$macrocall $name=\"displayshortcuts\" $output=\"text/html\" shortcuts=<<shortcut>>/>\n</kbd>\n</div>\n</$list>\n<hr/>\n<$macrocall $name=\"new-shortcut\" title=<<qualify \"$:/state/new-shortcut/$(shortcutTitle)$\">>/>\n</div>\n</div>\n</$reveal>\n</div>\n</td>\n\\end\n\n\\define shortcut-list(caption,prefix)\n<tr>\n<$list filter=\"[all[tiddlers+shadows][$prefix$$(shortcutName)$]]\" variable=\"shortcutTitle\">\n<<shortcut-list-item \"$caption$\">>\n</$list>\n</tr>\n\\end\n\n\\define shortcut-editor()\n<<shortcut-list \"All\" \"$:/config/shortcuts/\">>\n<<shortcut-list \"Mac\" \"$:/config/shortcuts-mac/\">>\n<<shortcut-list \"NonMac\" \"$:/config/shortcuts-not-mac/\">>\n<<shortcut-list \"Linux\" \"$:/config/shortcuts-linux/\">>\n<<shortcut-list \"NonLinux\" \"$:/config/shortcuts-not-linux/\">>\n<<shortcut-list \"Windows\" \"$:/config/shortcuts-windows/\">>\n<<shortcut-list \"NonWindows\" \"$:/config/shortcuts-not-windows/\">>\n\\end\n\n\\define shortcut-preview()\n<$macrocall $name=\"displayshortcuts\" $output=\"text/html\" shortcuts={{$(shortcutPrefix)$$(shortcutName)$}} prefix=\"<kbd>\" separator=\"</kbd> <kbd>\" suffix=\"</kbd>\"/>\n\\end\n\n\\define shortcut-item-inner()\n<tr>\n<td>\n<$reveal type=\"nomatch\" state=<<dropdownStateTitle>> text=\"open\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield\n\t$tiddler=<<dropdownStateTitle>>\n\t$value=\"open\"\n/>\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<dropdownStateTitle>> text=\"open\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield\n\t$tiddler=<<dropdownStateTitle>>\n\t$value=\"close\"\n/>\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n''<$text text=<<shortcutName>>/>''\n</td>\n<td>\n<$transclude tiddler=\"$:/config/ShortcutInfo/$(shortcutName)$\"/>\n</td>\n<td>\n<$list filter=\"$:/config/shortcuts/ $:/config/shortcuts-mac/ $:/config/shortcuts-not-mac/ $:/config/shortcuts-linux/ $:/config/shortcuts-not-linux/ $:/config/shortcuts-windows/ $:/config/shortcuts-not-windows/\" variable=\"shortcutPrefix\">\n<<shortcut-preview>>\n</$list>\n</td>\n</tr>\n<$set name=\"dropdownState\" value={{$(dropdownStateTitle)$}}>\n<$list filter=\"[<dropdownState>prefix[open]]\" variable=\"listItem\">\n<<shortcut-editor>>\n</$list>\n</$set>\n\\end\n\n\\define shortcut-item()\n<$set name=\"dropdownStateTitle\" value=<<qualify \"$:/state/dropdown/keyboardshortcut/$(shortcutName)$\">>>\n<<shortcut-item-inner>>\n</$set>\n\\end\n\n<table>\n<tbody>\n<$list filter=\"[all[shadows+tiddlers]removeprefix[$:/config/ShortcutInfo/]]\" variable=\"shortcutName\">\n<<shortcut-item>>\n</$list>\n</tbody>\n</table>\n"
},
"$:/core/ui/ControlPanel/LoadedModules": {
"title": "$:/core/ui/ControlPanel/LoadedModules",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/LoadedModules/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/\n<<lingo LoadedModules/Hint>>\n\n{{$:/snippets/modules}}\n"
},
"$:/core/ui/ControlPanel/Modals/AddPlugins": {
"title": "$:/core/ui/ControlPanel/Modals/AddPlugins",
"subtitle": "{{$:/core/images/download-button}} {{$:/language/ControlPanel/Plugins/Add/Caption}}",
"text": "\\define install-plugin-button()\n<$button>\n<$action-sendmessage $message=\"tm-load-plugin-from-library\" url={{!!url}} title={{$(assetInfo)$!!original-title}}/>\n<$list filter=\"[<assetInfo>get[original-title]get[version]]\" variable=\"installedVersion\" emptyMessage=\"\"\"{{$:/language/ControlPanel/Plugins/Install/Caption}}\"\"\">\n{{$:/language/ControlPanel/Plugins/Reinstall/Caption}}\n</$list>\n</$button>\n\\end\n\n\\define popup-state-macro()\n$:/state/add-plugin-info/$(connectionTiddler)$/$(assetInfo)$\n\\end\n\n\\define display-plugin-info(type)\n<$set name=\"popup-state\" value=<<popup-state-macro>>>\n<div class=\"tc-plugin-info\">\n<div class=\"tc-plugin-info-chunk tc-small-icon\">\n<$reveal type=\"nomatch\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"yes\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"no\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<$list filter=\"[<assetInfo>has[icon]]\" emptyMessage=\"\"\"<$transclude tiddler=\"$:/core/images/plugin-generic-$type$\"/>\"\"\">\n<img src={{$(assetInfo)$!!icon}}/>\n</$list>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<h1><$view tiddler=<<assetInfo>> field=\"description\"/></h1>\n<h2><$view tiddler=<<assetInfo>> field=\"original-title\"/></h2>\n<div><em><$view tiddler=<<assetInfo>> field=\"version\"/></em></div>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<<install-plugin-button>>\n</div>\n</div>\n<$reveal type=\"match\" text=\"yes\" state=<<popup-state>>>\n<div class=\"tc-plugin-info-dropdown\">\n<div class=\"tc-plugin-info-dropdown-message\">\n<$list filter=\"[<assetInfo>get[original-title]get[version]]\" variable=\"installedVersion\" emptyMessage=\"\"\"{{$:/language/ControlPanel/Plugins/NotInstalled/Hint}}\"\"\">\n<em>\n{{$:/language/ControlPanel/Plugins/AlreadyInstalled/Hint}}\n</em>\n</$list>\n</div>\n<div class=\"tc-plugin-info-dropdown-body\">\n<$transclude tiddler=<<assetInfo>> field=\"readme\" mode=\"block\"/>\n</div>\n</div>\n</$reveal>\n</$set>\n\\end\n\n\\define load-plugin-library-button()\n<$button class=\"tc-btn-big-green\">\n<$action-sendmessage $message=\"tm-load-plugin-library\" url={{!!url}} infoTitlePrefix=\"$:/temp/RemoteAssetInfo/\"/>\n{{$:/core/images/chevron-right}} {{$:/language/ControlPanel/Plugins/OpenPluginLibrary}}\n</$button>\n\\end\n\n\\define display-server-assets(type)\n{{$:/language/Search/Search}}: <$edit-text tiddler=\"\"\"$:/temp/RemoteAssetSearch/$(currentTiddler)$\"\"\" default=\"\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"\"\"$:/temp/RemoteAssetSearch/$(currentTiddler)$\"\"\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"\"\"$:/temp/RemoteAssetSearch/$(currentTiddler)$\"\"\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n<div class=\"tc-plugin-library-listing\">\n<$list filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[$type$]search{$:/temp/RemoteAssetSearch/$(currentTiddler)$}sort[description]]\" variable=\"assetInfo\">\n<<display-plugin-info \"$type$\">>\n</$list>\n</div>\n\\end\n\n\\define display-server-connection()\n<$list filter=\"[all[tiddlers+shadows]tag[$:/tags/ServerConnection]suffix{!!url}]\" variable=\"connectionTiddler\" emptyMessage=<<load-plugin-library-button>>>\n\n<<tabs \"[[$:/core/ui/ControlPanel/Plugins/Add/Plugins]] [[$:/core/ui/ControlPanel/Plugins/Add/Themes]] [[$:/core/ui/ControlPanel/Plugins/Add/Languages]]\" \"$:/core/ui/ControlPanel/Plugins/Add/Plugins\">>\n\n</$list>\n\\end\n\n\\define plugin-library-listing()\n<$list filter=\"[all[tiddlers+shadows]tag[$:/tags/PluginLibrary]]\">\n<div class=\"tc-plugin-library\">\n\n!! <$link><$transclude field=\"caption\"><$view field=\"title\"/></$transclude></$link>\n\n//<$view field=\"url\"/>//\n\n<$transclude/>\n\n<<display-server-connection>>\n</div>\n</$list>\n\\end\n\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<div>\n<<plugin-library-listing>>\n</div>\n\n</$importvariables>\n"
},
"$:/core/ui/ControlPanel/Palette": {
"title": "$:/core/ui/ControlPanel/Palette",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/Palette/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Palette/\n\n{{$:/snippets/paletteswitcher}}\n\n<$reveal type=\"nomatch\" state=\"$:/state/ShowPaletteEditor\" text=\"yes\">\n\n<$button set=\"$:/state/ShowPaletteEditor\" setTo=\"yes\"><<lingo ShowEditor/Caption>></$button>\n\n</$reveal>\n\n<$reveal type=\"match\" state=\"$:/state/ShowPaletteEditor\" text=\"yes\">\n\n<$button set=\"$:/state/ShowPaletteEditor\" setTo=\"no\"><<lingo HideEditor/Caption>></$button>\n{{$:/snippets/paletteeditor}}\n\n</$reveal>\n\n"
},
"$:/core/ui/ControlPanel/Parsing": {
"title": "$:/core/ui/ControlPanel/Parsing",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/Parsing/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Parsing/\n\n\\define parsing-inner(typeCap)\n<li>\n<$checkbox tiddler=\"\"\"$:/config/WikiParserRules/$typeCap$/$(currentTiddler)$\"\"\" field=\"text\" checked=\"enable\" unchecked=\"disable\" default=\"enable\"> ''<$text text=<<currentTiddler>>/>'': </$checkbox>\n</li>\n\\end\n\n\\define parsing-outer(typeLower,typeCap)\n<ul>\n<$list filter=\"[wikiparserrules[$typeLower$]]\">\n<<parsing-inner typeCap:\"$typeCap$\">>\n</$list>\n</ul>\n\\end\n\n<<lingo Hint>>\n\n! <<lingo Pragma/Caption>>\n\n<<parsing-outer typeLower:\"pragma\" typeCap:\"Pragma\">>\n\n! <<lingo Inline/Caption>>\n\n<<parsing-outer typeLower:\"inline\" typeCap:\"Inline\">>\n\n! <<lingo Block/Caption>>\n\n<<parsing-outer typeLower:\"block\" typeCap:\"Block\">>\n"
},
"$:/core/ui/ControlPanel/Plugins/Add/Languages": {
"title": "$:/core/ui/ControlPanel/Plugins/Add/Languages",
"caption": "{{$:/language/ControlPanel/Plugins/Languages/Caption}} (<$count filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[language]]\"/>)",
"text": "<<display-server-assets language>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Add/Plugins": {
"title": "$:/core/ui/ControlPanel/Plugins/Add/Plugins",
"caption": "{{$:/language/ControlPanel/Plugins/Plugins/Caption}} (<$count filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[plugin]]\"/>)",
"text": "<<display-server-assets plugin>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Add/Themes": {
"title": "$:/core/ui/ControlPanel/Plugins/Add/Themes",
"caption": "{{$:/language/ControlPanel/Plugins/Themes/Caption}} (<$count filter=\"[all[tiddlers+shadows]tag[$:/tags/RemoteAssetInfo]server-url{!!url}original-plugin-type[theme]]\"/>)",
"text": "<<display-server-assets theme>>\n"
},
"$:/core/ui/ControlPanel/Plugins/AddPlugins": {
"title": "$:/core/ui/ControlPanel/Plugins/AddPlugins",
"text": "\\define lingo-base() $:/language/ControlPanel/Plugins/\n\n<$button message=\"tm-modal\" param=\"$:/core/ui/ControlPanel/Modals/AddPlugins\" tooltip={{$:/language/ControlPanel/Plugins/Add/Hint}} class=\"tc-btn-big-green\" style=\"background:blue;\">\n{{$:/core/images/download-button}} <<lingo Add/Caption>>\n</$button>\n"
},
"$:/core/ui/ControlPanel/Plugins/Installed/Languages": {
"title": "$:/core/ui/ControlPanel/Plugins/Installed/Languages",
"caption": "{{$:/language/ControlPanel/Plugins/Languages/Caption}} (<$count filter=\"[!has[draft.of]plugin-type[language]]\"/>)",
"text": "<<plugin-table language>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Installed/Plugins": {
"title": "$:/core/ui/ControlPanel/Plugins/Installed/Plugins",
"caption": "{{$:/language/ControlPanel/Plugins/Plugins/Caption}} (<$count filter=\"[!has[draft.of]plugin-type[plugin]]\"/>)",
"text": "<<plugin-table plugin>>\n"
},
"$:/core/ui/ControlPanel/Plugins/Installed/Themes": {
"title": "$:/core/ui/ControlPanel/Plugins/Installed/Themes",
"caption": "{{$:/language/ControlPanel/Plugins/Themes/Caption}} (<$count filter=\"[!has[draft.of]plugin-type[theme]]\"/>)",
"text": "<<plugin-table theme>>\n"
},
"$:/core/ui/ControlPanel/Plugins": {
"title": "$:/core/ui/ControlPanel/Plugins",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Plugins/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Plugins/\n\n\\define popup-state-macro()\n$(qualified-state)$-$(currentTiddler)$\n\\end\n\n\\define tabs-state-macro()\n$(popup-state)$-$(pluginInfoType)$\n\\end\n\n\\define plugin-icon-title()\n$(currentTiddler)$/icon\n\\end\n\n\\define plugin-disable-title()\n$:/config/Plugins/Disabled/$(currentTiddler)$\n\\end\n\n\\define plugin-table-body(type,disabledMessage)\n<div class=\"tc-plugin-info-chunk tc-small-icon\">\n<$reveal type=\"nomatch\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"yes\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<popup-state>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<popup-state>> setTo=\"no\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<$transclude tiddler=<<currentTiddler>> subtiddler=<<plugin-icon-title>>>\n<$transclude tiddler=\"$:/core/images/plugin-generic-$type$\"/>\n</$transclude>\n</div>\n<div class=\"tc-plugin-info-chunk\">\n<h1>\n''<$view field=\"description\"><$view field=\"title\"/></$view>'' $disabledMessage$\n</h1>\n<h2>\n<$view field=\"title\"/>\n</h2>\n<h2>\n<div><em><$view field=\"version\"/></em></div>\n</h2>\n</div>\n\\end\n\n\\define plugin-table(type)\n<$set name=\"qualified-state\" value=<<qualify \"$:/state/plugin-info\">>>\n<$list filter=\"[!has[draft.of]plugin-type[$type$]sort[description]]\" emptyMessage=<<lingo \"Empty/Hint\">>>\n<$set name=\"popup-state\" value=<<popup-state-macro>>>\n<$reveal type=\"nomatch\" state=<<plugin-disable-title>> text=\"yes\">\n<$link to={{!!title}} class=\"tc-plugin-info\">\n<<plugin-table-body type:\"$type$\">>\n</$link>\n</$reveal>\n<$reveal type=\"match\" state=<<plugin-disable-title>> text=\"yes\">\n<$link to={{!!title}} class=\"tc-plugin-info tc-plugin-info-disabled\">\n<<plugin-table-body type:\"$type$\" disabledMessage:\"<$macrocall $name='lingo' title='Disabled/Status'/>\">>\n</$link>\n</$reveal>\n<$reveal type=\"match\" text=\"yes\" state=<<popup-state>>>\n<div class=\"tc-plugin-info-dropdown\">\n<div class=\"tc-plugin-info-dropdown-body\">\n<$list filter=\"[all[current]] -[[$:/core]]\">\n<div style=\"float:right;\">\n<$reveal type=\"nomatch\" state=<<plugin-disable-title>> text=\"yes\">\n<$button set=<<plugin-disable-title>> setTo=\"yes\" tooltip={{$:/language/ControlPanel/Plugins/Disable/Hint}} aria-label={{$:/language/ControlPanel/Plugins/Disable/Caption}}>\n<<lingo Disable/Caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<plugin-disable-title>> text=\"yes\">\n<$button set=<<plugin-disable-title>> setTo=\"no\" tooltip={{$:/language/ControlPanel/Plugins/Enable/Hint}} aria-label={{$:/language/ControlPanel/Plugins/Enable/Caption}}>\n<<lingo Enable/Caption>>\n</$button>\n</$reveal>\n</div>\n</$list>\n<$reveal type=\"nomatch\" text=\"\" state=\"!!list\">\n<$macrocall $name=\"tabs\" state=<<tabs-state-macro>> tabsList={{!!list}} default=\"readme\" template=\"$:/core/ui/PluginInfo\"/>\n</$reveal>\n<$reveal type=\"match\" text=\"\" state=\"!!list\">\n<<lingo NoInformation/Hint>>\n</$reveal>\n</div>\n</div>\n</$reveal>\n</$set>\n</$list>\n</$set>\n\\end\n\n{{$:/core/ui/ControlPanel/Plugins/AddPlugins}}\n\n<<lingo Installed/Hint>>\n\n<<tabs \"[[$:/core/ui/ControlPanel/Plugins/Installed/Plugins]] [[$:/core/ui/ControlPanel/Plugins/Installed/Themes]] [[$:/core/ui/ControlPanel/Plugins/Installed/Languages]]\" \"$:/core/ui/ControlPanel/Plugins/Installed/Plugins\">>\n"
},
"$:/core/ui/ControlPanel/Saving": {
"title": "$:/core/ui/ControlPanel/Saving",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Saving/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Saving/\n\\define backupURL()\nhttp://$(userName)$.tiddlyspot.com/backup/\n\\end\n\\define backupLink()\n<$reveal type=\"nomatch\" state=\"$:/UploadName\" text=\"\">\n<$set name=\"userName\" value={{$:/UploadName}}>\n<$reveal type=\"match\" state=\"$:/UploadURL\" text=\"\">\n<<backupURL>>\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/UploadURL\" text=\"\">\n<$macrocall $name=resolvePath source={{$:/UploadBackupDir}} root={{$:/UploadURL}}>>\n</$reveal>\n</$set>\n</$reveal>\n\\end\n! <<lingo TiddlySpot/Heading>>\n\n<<lingo TiddlySpot/Description>>\n\n|<<lingo TiddlySpot/UserName>> |<$edit-text tiddler=\"$:/UploadName\" default=\"\" tag=\"input\"/> |\n|<<lingo TiddlySpot/Password>> |<$password name=\"upload\"/> |\n|<<lingo TiddlySpot/Backups>> |<<backupLink>> |\n\n''<<lingo TiddlySpot/Advanced/Heading>>''\n\n|<<lingo TiddlySpot/ServerURL>> |<$edit-text tiddler=\"$:/UploadURL\" default=\"\" tag=\"input\"/> |\n|<<lingo TiddlySpot/Filename>> |<$edit-text tiddler=\"$:/UploadFilename\" default=\"index.html\" tag=\"input\"/> |\n|<<lingo TiddlySpot/UploadDir>> |<$edit-text tiddler=\"$:/UploadDir\" default=\".\" tag=\"input\"/> |\n|<<lingo TiddlySpot/BackupDir>> |<$edit-text tiddler=\"$:/UploadBackupDir\" default=\".\" tag=\"input\"/> |\n\n<<lingo TiddlySpot/Hint>>"
},
"$:/core/ui/ControlPanel/Settings/AutoSave": {
"title": "$:/core/ui/ControlPanel/Settings/AutoSave",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/AutoSave/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/AutoSave/\n\n<$link to=\"$:/config/AutoSave\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/AutoSave\" value=\"yes\"> <<lingo Enabled/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/AutoSave\" value=\"no\"> <<lingo Disabled/Description>> </$radio>\n"
},
"$:/core/buttonstyles/Borderless": {
"title": "$:/core/buttonstyles/Borderless",
"tags": "$:/tags/ToolbarButtonStyle",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Borderless}}",
"text": "tc-btn-invisible"
},
"$:/core/buttonstyles/Boxed": {
"title": "$:/core/buttonstyles/Boxed",
"tags": "$:/tags/ToolbarButtonStyle",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Boxed}}",
"text": "tc-btn-boxed"
},
"$:/core/buttonstyles/Rounded": {
"title": "$:/core/buttonstyles/Rounded",
"tags": "$:/tags/ToolbarButtonStyle",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Styles/Rounded}}",
"text": "tc-btn-rounded"
},
"$:/core/ui/ControlPanel/Settings/CamelCase": {
"title": "$:/core/ui/ControlPanel/Settings/CamelCase",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/CamelCase/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/CamelCase/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/WikiParserRules/Inline/wikilink\" field=\"text\" checked=\"enable\" unchecked=\"disable\" default=\"enable\"> <$link to=\"$:/config/WikiParserRules/Inline/wikilink\"><<lingo Description>></$link> </$checkbox>\n"
},
"$:/core/ui/ControlPanel/Settings/DefaultSidebarTab": {
"caption": "{{$:/language/ControlPanel/Settings/DefaultSidebarTab/Caption}}",
"tags": "$:/tags/ControlPanel/Settings",
"title": "$:/core/ui/ControlPanel/Settings/DefaultSidebarTab",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/DefaultSidebarTab/\n\n<$link to=\"$:/config/DefaultSidebarTab\"><<lingo Hint>></$link>\n\n<$select tiddler=\"$:/config/DefaultSidebarTab\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SideBar]!has[draft.of]]\">\n<option value=<<currentTiddler>>><$transclude field=\"caption\"><$text text=<<currentTiddler>>/></$transclude></option>\n</$list>\n</$select>\n"
},
"$:/core/ui/ControlPanel/Settings/EditorToolbar": {
"title": "$:/core/ui/ControlPanel/Settings/EditorToolbar",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/EditorToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/EditorToolbar/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/TextEditor/EnableToolbar\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"yes\"> <$link to=\"$:/config/TextEditor/EnableToolbar\"><<lingo Description>></$link> </$checkbox>\n\n"
},
"$:/core/ui/ControlPanel/Settings/LinkToBehaviour": {
"title": "$:/core/ui/ControlPanel/Settings/LinkToBehaviour",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/LinkToBehaviour/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/LinkToBehaviour/\n\n<$link to=\"$:/config/Navigation/openLinkFromInsideRiver\"><<lingo \"InsideRiver/Hint\">></$link>\n\n<$select tiddler=\"$:/config/Navigation/openLinkFromInsideRiver\">\n <option value=\"above\"><<lingo \"OpenAbove\">></option>\n <option value=\"below\"><<lingo \"OpenBelow\">></option>\n <option value=\"top\"><<lingo \"OpenAtTop\">></option>\n <option value=\"bottom\"><<lingo \"OpenAtBottom\">></option>\n</$select>\n\n<$link to=\"$:/config/Navigation/openLinkFromOutsideRiver\"><<lingo \"OutsideRiver/Hint\">></$link>\n\n<$select tiddler=\"$:/config/Navigation/openLinkFromOutsideRiver\">\n <option value=\"top\"><<lingo \"OpenAtTop\">></option>\n <option value=\"bottom\"><<lingo \"OpenAtBottom\">></option>\n</$select>\n"
},
"$:/core/ui/ControlPanel/Settings/MissingLinks": {
"title": "$:/core/ui/ControlPanel/Settings/MissingLinks",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/MissingLinks/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/MissingLinks/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/MissingLinks\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"yes\"> <$link to=\"$:/config/MissingLinks\"><<lingo Description>></$link> </$checkbox>\n\n"
},
"$:/core/ui/ControlPanel/Settings/NavigationAddressBar": {
"title": "$:/core/ui/ControlPanel/Settings/NavigationAddressBar",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/NavigationAddressBar/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/NavigationAddressBar/\n\n<$link to=\"$:/config/Navigation/UpdateAddressBar\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateAddressBar\" value=\"permaview\"> <<lingo Permaview/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateAddressBar\" value=\"permalink\"> <<lingo Permalink/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateAddressBar\" value=\"no\"> <<lingo No/Description>> </$radio>\n"
},
"$:/core/ui/ControlPanel/Settings/NavigationHistory": {
"title": "$:/core/ui/ControlPanel/Settings/NavigationHistory",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/NavigationHistory/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/NavigationHistory/\n<$link to=\"$:/config/Navigation/UpdateHistory\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateHistory\" value=\"yes\"> <<lingo Yes/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Navigation/UpdateHistory\" value=\"no\"> <<lingo No/Description>> </$radio>\n"
},
"$:/core/ui/ControlPanel/Settings/PerformanceInstrumentation": {
"title": "$:/core/ui/ControlPanel/Settings/PerformanceInstrumentation",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/PerformanceInstrumentation/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/PerformanceInstrumentation/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/Performance/Instrumentation\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"no\"> <$link to=\"$:/config/Performance/Instrumentation\"><<lingo Description>></$link> </$checkbox>\n"
},
"$:/core/ui/ControlPanel/Settings/TitleLinks": {
"title": "$:/core/ui/ControlPanel/Settings/TitleLinks",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/TitleLinks/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/TitleLinks/\n<$link to=\"$:/config/Tiddlers/TitleLinks\"><<lingo Hint>></$link>\n\n<$radio tiddler=\"$:/config/Tiddlers/TitleLinks\" value=\"yes\"> <<lingo Yes/Description>> </$radio>\n\n<$radio tiddler=\"$:/config/Tiddlers/TitleLinks\" value=\"no\"> <<lingo No/Description>> </$radio>\n"
},
"$:/core/ui/ControlPanel/Settings/ToolbarButtons": {
"title": "$:/core/ui/ControlPanel/Settings/ToolbarButtons",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtons/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/ToolbarButtons/\n<<lingo Hint>>\n\n<$checkbox tiddler=\"$:/config/Toolbar/Icons\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"yes\"> <$link to=\"$:/config/Toolbar/Icons\"><<lingo Icons/Description>></$link> </$checkbox>\n\n<$checkbox tiddler=\"$:/config/Toolbar/Text\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"no\"> <$link to=\"$:/config/Toolbar/Text\"><<lingo Text/Description>></$link> </$checkbox>\n"
},
"$:/core/ui/ControlPanel/Settings/ToolbarButtonStyle": {
"title": "$:/core/ui/ControlPanel/Settings/ToolbarButtonStyle",
"tags": "$:/tags/ControlPanel/Settings",
"caption": "{{$:/language/ControlPanel/Settings/ToolbarButtonStyle/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/ToolbarButtonStyle/\n<$link to=\"$:/config/Toolbar/ButtonClass\"><<lingo \"Hint\">></$link>\n\n<$select tiddler=\"$:/config/Toolbar/ButtonClass\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ToolbarButtonStyle]]\">\n<option value={{!!text}}>{{!!caption}}</option>\n</$list>\n</$select>\n"
},
"$:/core/ui/ControlPanel/Settings": {
"title": "$:/core/ui/ControlPanel/Settings",
"tags": "$:/tags/ControlPanel",
"caption": "{{$:/language/ControlPanel/Settings/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/Settings/\n\n<<lingo Hint>>\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Settings]]\">\n\n<div style=\"border-top:1px solid #eee;\">\n\n!! <$link><$transclude field=\"caption\"/></$link>\n\n<$transclude/>\n\n</div>\n\n</$list>\n"
},
"$:/core/ui/ControlPanel/StoryView": {
"title": "$:/core/ui/ControlPanel/StoryView",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/StoryView/Caption}}",
"text": "{{$:/snippets/viewswitcher}}\n"
},
"$:/core/ui/ControlPanel/Theme": {
"title": "$:/core/ui/ControlPanel/Theme",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/Theme/Caption}}",
"text": "{{$:/snippets/themeswitcher}}\n"
},
"$:/core/ui/ControlPanel/TiddlerFields": {
"title": "$:/core/ui/ControlPanel/TiddlerFields",
"tags": "$:/tags/ControlPanel/Advanced",
"caption": "{{$:/language/ControlPanel/TiddlerFields/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/\n\n<<lingo TiddlerFields/Hint>>\n\n{{$:/snippets/allfields}}"
},
"$:/core/ui/ControlPanel/Toolbars/EditorToolbar": {
"title": "$:/core/ui/ControlPanel/Toolbars/EditorToolbar",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/EditorToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\n\\define config-title()\n$:/config/EditorToolbarButtons/Visibility/$(listItem)$\n\\end\n\n\\define toolbar-button()\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"> <$transclude tiddler={{$(listItem)$!!icon}}/> <$transclude tiddler=<<listItem>> field=\"caption\"/> -- <i class=\"tc-muted\"><$transclude tiddler=<<listItem>> field=\"description\"/></i></$checkbox>\n\\end\n\n{{$:/language/ControlPanel/Toolbars/EditorToolbar/Hint}}\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditorToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<<toolbar-button>>\n\n</$list>\n"
},
"$:/core/ui/ControlPanel/Toolbars/EditToolbar": {
"title": "$:/core/ui/ControlPanel/Toolbars/EditToolbar",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/EditToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/EditToolbarButtons/Visibility/$(listItem)$\n\\end\n\n{{$:/language/ControlPanel/Toolbars/EditToolbar/Hint}}\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>> field=\"caption\"/> <i class=\"tc-muted\">-- <$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/ControlPanel/Toolbars/PageControls": {
"title": "$:/core/ui/ControlPanel/Toolbars/PageControls",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/PageControls/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n\n{{$:/language/ControlPanel/Toolbars/PageControls/Hint}}\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>> field=\"caption\"/> <i class=\"tc-muted\">-- <$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/ControlPanel/Toolbars/ViewToolbar": {
"title": "$:/core/ui/ControlPanel/Toolbars/ViewToolbar",
"tags": "$:/tags/ControlPanel/Toolbars",
"caption": "{{$:/language/ControlPanel/Toolbars/ViewToolbar/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n\n{{$:/language/ControlPanel/Toolbars/ViewToolbar/Hint}}\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>> field=\"caption\"/> <i class=\"tc-muted\">-- <$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/ControlPanel/Toolbars": {
"title": "$:/core/ui/ControlPanel/Toolbars",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ControlPanel/Toolbars/Caption}}",
"text": "{{$:/language/ControlPanel/Toolbars/Hint}}\n\n<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel/Toolbars]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Toolbars/ViewToolbar\" \"$:/state/tabs/controlpanel/toolbars\" \"tc-vertical\">>\n</div>\n"
},
"$:/ControlPanel": {
"title": "$:/ControlPanel",
"icon": "$:/core/images/options-button",
"color": "#bbb",
"text": "<div class=\"tc-control-panel\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/ControlPanel]!has[draft.of]]\" \"$:/core/ui/ControlPanel/Info\">>\n</div>\n"
},
"$:/core/ui/DefaultSearchResultList": {
"title": "$:/core/ui/DefaultSearchResultList",
"tags": "$:/tags/SearchResults",
"caption": "{{$:/language/Search/DefaultResults/Caption}}",
"text": "\\define searchResultList()\n//<small>{{$:/language/Search/Matches/Title}}</small>//\n\n<$list filter=\"[!is[system]search:title{$(searchTiddler)$}sort[title]limit[250]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n//<small>{{$:/language/Search/Matches/All}}</small>//\n\n<$list filter=\"[!is[system]search{$(searchTiddler)$}sort[title]limit[250]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n\n\\end\n<<searchResultList>>\n"
},
"$:/core/ui/EditorToolbar/bold": {
"title": "$:/core/ui/EditorToolbar/bold",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/bold",
"caption": "{{$:/language/Buttons/Bold/Caption}}",
"description": "{{$:/language/Buttons/Bold/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((bold))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"''\"\n\tsuffix=\"''\"\n/>\n"
},
"$:/core/ui/EditorToolbar/clear-dropdown": {
"title": "$:/core/ui/EditorToolbar/clear-dropdown",
"text": "''{{$:/language/Buttons/Clear/Hint}}''\n\n<div class=\"tc-colour-chooser\">\n\n<$macrocall $name=\"colour-picker\" actions=\"\"\"\n\n<$action-sendmessage\n\t$message=\"tm-edit-bitmap-operation\"\n\t$param=\"clear\"\n\tcolour=<<colour-picker-value>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n\"\"\"/>\n\n</div>\n"
},
"$:/core/ui/EditorToolbar/clear": {
"title": "$:/core/ui/EditorToolbar/clear",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/erase",
"caption": "{{$:/language/Buttons/Clear/Caption}}",
"description": "{{$:/language/Buttons/Clear/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/clear-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/editor-height-dropdown": {
"title": "$:/core/ui/EditorToolbar/editor-height-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/EditorHeight/\n''<<lingo Hint>>''\n\n<$radio tiddler=\"$:/config/TextEditor/EditorHeight/Mode\" value=\"auto\"> {{$:/core/images/auto-height}} <<lingo Caption/Auto>></$radio>\n\n<$radio tiddler=\"$:/config/TextEditor/EditorHeight/Mode\" value=\"fixed\"> {{$:/core/images/fixed-height}} <<lingo Caption/Fixed>> <$edit-text tag=\"input\" tiddler=\"$:/config/TextEditor/EditorHeight/Height\" default=\"100px\"/></$radio>\n"
},
"$:/core/ui/EditorToolbar/editor-height": {
"title": "$:/core/ui/EditorToolbar/editor-height",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/fixed-height",
"custom-icon": "yes",
"caption": "{{$:/language/Buttons/EditorHeight/Caption}}",
"description": "{{$:/language/Buttons/EditorHeight/Hint}}",
"condition": "[<targetTiddler>!is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/editor-height-dropdown",
"text": "<$reveal tag=\"span\" state=\"$:/config/TextEditor/EditorHeight/Mode\" type=\"match\" text=\"fixed\">\n{{$:/core/images/fixed-height}}\n</$reveal>\n<$reveal tag=\"span\" state=\"$:/config/TextEditor/EditorHeight/Mode\" type=\"match\" text=\"auto\">\n{{$:/core/images/auto-height}}\n</$reveal>\n"
},
"$:/core/ui/EditorToolbar/excise-dropdown": {
"title": "$:/core/ui/EditorToolbar/excise-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Excise/\n\n\\define body(config-title)\n''<<lingo Hint>>''\n\n<<lingo Caption/NewTitle>> <$edit-text tag=\"input\" tiddler=\"$config-title$/new-title\" default=\"\" focus=\"true\"/>\n\n<$set name=\"new-title\" value={{$config-title$/new-title}}>\n<$list filter=\"\"\"[<new-title>is[tiddler]]\"\"\">\n<div class=\"tc-error\">\n<<lingo Caption/TiddlerExists>>\n</div>\n</$list>\n</$set>\n\n<$checkbox tiddler=\"\"\"$config-title$/tagnew\"\"\" field=\"text\" checked=\"yes\" unchecked=\"no\" default=\"false\"> <<lingo Caption/Tag>></$checkbox>\n\n<<lingo Caption/Replace>> <$select tiddler=\"\"\"$config-title$/type\"\"\" default=\"transclude\">\n<option value=\"link\"><<lingo Caption/Replace/Link>></option>\n<option value=\"transclude\"><<lingo Caption/Replace/Transclusion>></option>\n<option value=\"macro\"><<lingo Caption/Replace/Macro>></option>\n</$select>\n\n<$reveal state=\"\"\"$config-title$/type\"\"\" type=\"match\" text=\"macro\">\n<<lingo Caption/MacroName>> <$edit-text tag=\"input\" tiddler=\"\"\"$config-title$/macro-title\"\"\" default=\"translink\"/>\n</$reveal>\n\n<$button>\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"excise\"\n\ttitle={{$config-title$/new-title}}\n\ttype={{$config-title$/type}}\n\tmacro={{$config-title$/macro-title}}\n\ttagnew={{$config-title$/tagnew}}\n/>\n<$action-deletetiddler\n\t$tiddler=<<qualify \"$:/state/Excise/NewTitle\">>\n/>\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n<<lingo Caption/Excise>>\n</$button>\n\\end\n\n<$macrocall $name=\"body\" config-title=<<qualify \"$:/state/Excise/\">>/>\n"
},
"$:/core/ui/EditorToolbar/excise": {
"title": "$:/core/ui/EditorToolbar/excise",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/excise",
"caption": "{{$:/language/Buttons/Excise/Caption}}",
"description": "{{$:/language/Buttons/Excise/Hint}}",
"condition": "[<targetTiddler>!is[image]]",
"shortcuts": "((excise))",
"dropdown": "$:/core/ui/EditorToolbar/excise-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/heading-1": {
"title": "$:/core/ui/EditorToolbar/heading-1",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-1",
"caption": "{{$:/language/Buttons/Heading1/Caption}}",
"description": "{{$:/language/Buttons/Heading1/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((heading-1))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"1\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-2": {
"title": "$:/core/ui/EditorToolbar/heading-2",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-2",
"caption": "{{$:/language/Buttons/Heading2/Caption}}",
"description": "{{$:/language/Buttons/Heading2/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-2))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"2\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-3": {
"title": "$:/core/ui/EditorToolbar/heading-3",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-3",
"caption": "{{$:/language/Buttons/Heading3/Caption}}",
"description": "{{$:/language/Buttons/Heading3/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-3))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"3\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-4": {
"title": "$:/core/ui/EditorToolbar/heading-4",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-4",
"caption": "{{$:/language/Buttons/Heading4/Caption}}",
"description": "{{$:/language/Buttons/Heading4/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-4))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"4\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-5": {
"title": "$:/core/ui/EditorToolbar/heading-5",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-5",
"caption": "{{$:/language/Buttons/Heading5/Caption}}",
"description": "{{$:/language/Buttons/Heading5/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-5))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"5\"\n/>\n"
},
"$:/core/ui/EditorToolbar/heading-6": {
"title": "$:/core/ui/EditorToolbar/heading-6",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/heading-6",
"caption": "{{$:/language/Buttons/Heading6/Caption}}",
"description": "{{$:/language/Buttons/Heading6/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((heading-6))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"!\"\n\tcount=\"6\"\n/>\n"
},
"$:/core/ui/EditorToolbar/italic": {
"title": "$:/core/ui/EditorToolbar/italic",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/italic",
"caption": "{{$:/language/Buttons/Italic/Caption}}",
"description": "{{$:/language/Buttons/Italic/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((italic))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"//\"\n\tsuffix=\"//\"\n/>\n"
},
"$:/core/ui/EditorToolbar/line-width-dropdown": {
"title": "$:/core/ui/EditorToolbar/line-width-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/LineWidth/\n\n\\define toolbar-line-width-inner()\n<$button tag=\"a\" tooltip=\"\"\"$(line-width)$\"\"\">\n\n<$action-setfield\n\t$tiddler=\"$:/config/BitmapEditor/LineWidth\"\n\t$value=\"$(line-width)$\"\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<div style=\"display: inline-block; margin: 4px calc(80px - $(line-width)$); background-color: #000; width: calc(100px + $(line-width)$ * 2); height: $(line-width)$; border-radius: 120px; vertical-align: middle;\"/>\n\n<span style=\"margin-left: 8px;\">\n\n<$text text=\"\"\"$(line-width)$\"\"\"/>\n\n<$reveal state=\"$:/config/BitmapEditor/LineWidth\" type=\"match\" text=\"\"\"$(line-width)$\"\"\" tag=\"span\">\n\n<$entity entity=\" \"/>\n\n<$entity entity=\"✓\"/>\n\n</$reveal>\n\n</span>\n\n</$button>\n\\end\n\n''<<lingo Hint>>''\n\n<$list filter={{$:/config/BitmapEditor/LineWidths}} variable=\"line-width\">\n\n<<toolbar-line-width-inner>>\n\n</$list>\n"
},
"$:/core/ui/EditorToolbar/line-width": {
"title": "$:/core/ui/EditorToolbar/line-width",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/line-width",
"caption": "{{$:/language/Buttons/LineWidth/Caption}}",
"description": "{{$:/language/Buttons/LineWidth/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/line-width-dropdown",
"text": "<$text text={{$:/config/BitmapEditor/LineWidth}}/>"
},
"$:/core/ui/EditorToolbar/link-dropdown": {
"title": "$:/core/ui/EditorToolbar/link-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Link/\n\n\\define link-actions()\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"make-link\"\n\ttext={{$(linkTiddler)$}}\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<searchTiddler>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<linkTiddler>>\n/>\n\\end\n\n\\define body(config-title)\n''<<lingo Hint>>''\n\n<$vars searchTiddler=\"\"\"$config-title$/search\"\"\" linkTiddler=\"\"\"$config-title$/link\"\"\">\n\n<$edit-text tiddler=<<searchTiddler>> type=\"search\" tag=\"input\" focus=\"true\" placeholder={{$:/language/Search/Search}} default=\"\"/>\n<$reveal tag=\"span\" state=<<searchTiddler>> type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\" style=\"width: auto; display: inline-block; background-colour: inherit;\">\n<$action-setfield $tiddler=<<searchTiddler>> text=\"\" />\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n\n<$reveal tag=\"div\" state=<<searchTiddler>> type=\"nomatch\" text=\"\">\n\n<$linkcatcher actions=<<link-actions>> to=<<linkTiddler>>>\n\n{{$:/core/ui/SearchResults}}\n\n</$linkcatcher>\n\n</$reveal>\n\n</$vars>\n\n\\end\n\n<$macrocall $name=\"body\" config-title=<<qualify \"$:/state/Link/\">>/>\n"
},
"$:/core/ui/EditorToolbar/link": {
"title": "$:/core/ui/EditorToolbar/link",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/link",
"caption": "{{$:/language/Buttons/Link/Caption}}",
"description": "{{$:/language/Buttons/Link/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((link))",
"dropdown": "$:/core/ui/EditorToolbar/link-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/list-bullet": {
"title": "$:/core/ui/EditorToolbar/list-bullet",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/list-bullet",
"caption": "{{$:/language/Buttons/ListBullet/Caption}}",
"description": "{{$:/language/Buttons/ListBullet/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((list-bullet))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"*\"\n\tcount=\"1\"\n/>\n"
},
"$:/core/ui/EditorToolbar/list-number": {
"title": "$:/core/ui/EditorToolbar/list-number",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/list-number",
"caption": "{{$:/language/Buttons/ListNumber/Caption}}",
"description": "{{$:/language/Buttons/ListNumber/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((list-number))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"prefix-lines\"\n\tcharacter=\"#\"\n\tcount=\"1\"\n/>\n"
},
"$:/core/ui/EditorToolbar/mono-block": {
"title": "$:/core/ui/EditorToolbar/mono-block",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/mono-block",
"caption": "{{$:/language/Buttons/MonoBlock/Caption}}",
"description": "{{$:/language/Buttons/MonoBlock/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((mono-block))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-lines\"\n\tprefix=\"\n```\"\n\tsuffix=\"```\"\n/>\n"
},
"$:/core/ui/EditorToolbar/mono-line": {
"title": "$:/core/ui/EditorToolbar/mono-line",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/mono-line",
"caption": "{{$:/language/Buttons/MonoLine/Caption}}",
"description": "{{$:/language/Buttons/MonoLine/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((mono-line))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"`\"\n\tsuffix=\"`\"\n/>\n"
},
"$:/core/ui/EditorToolbar/more-dropdown": {
"title": "$:/core/ui/EditorToolbar/more-dropdown",
"text": "\\define config-title()\n$:/config/EditorToolbarButtons/Visibility/$(toolbarItem)$\n\\end\n\n\\define conditional-button()\n<$list filter={{$(toolbarItem)$!!condition}} variable=\"condition\">\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/toolbar/button\" mode=\"inline\"/> <$transclude tiddler=<<toolbarItem>> field=\"description\"/>\n</$list>\n\\end\n\n<div class=\"tc-text-editor-toolbar-more\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditorToolbar]!has[draft.of]] -[[$:/core/ui/EditorToolbar/more]]\">\n<$reveal type=\"match\" state=<<config-visibility-title>> text=\"hide\" tag=\"div\">\n<<conditional-button>>\n</$reveal>\n</$list>\n</div>\n"
},
"$:/core/ui/EditorToolbar/more": {
"title": "$:/core/ui/EditorToolbar/more",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/down-arrow",
"caption": "{{$:/language/Buttons/More/Caption}}",
"description": "{{$:/language/Buttons/More/Hint}}",
"condition": "[<targetTiddler>]",
"dropdown": "$:/core/ui/EditorToolbar/more-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/opacity-dropdown": {
"title": "$:/core/ui/EditorToolbar/opacity-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Opacity/\n\n\\define toolbar-opacity-inner()\n<$button tag=\"a\" tooltip=\"\"\"$(opacity)$\"\"\">\n\n<$action-setfield\n\t$tiddler=\"$:/config/BitmapEditor/Opacity\"\n\t$value=\"$(opacity)$\"\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<div style=\"display: inline-block; vertical-align: middle; background-color: $(current-paint-colour)$; opacity: $(opacity)$; width: 1em; height: 1em; border-radius: 50%;\"/>\n\n<span style=\"margin-left: 8px;\">\n\n<$text text=\"\"\"$(opacity)$\"\"\"/>\n\n<$reveal state=\"$:/config/BitmapEditor/Opacity\" type=\"match\" text=\"\"\"$(opacity)$\"\"\" tag=\"span\">\n\n<$entity entity=\" \"/>\n\n<$entity entity=\"✓\"/>\n\n</$reveal>\n\n</span>\n\n</$button>\n\\end\n\n\\define toolbar-opacity()\n''<<lingo Hint>>''\n\n<$list filter={{$:/config/BitmapEditor/Opacities}} variable=\"opacity\">\n\n<<toolbar-opacity-inner>>\n\n</$list>\n\\end\n\n<$set name=\"current-paint-colour\" value={{$:/config/BitmapEditor/Colour}}>\n\n<$set name=\"current-opacity\" value={{$:/config/BitmapEditor/Opacity}}>\n\n<<toolbar-opacity>>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/EditorToolbar/opacity": {
"title": "$:/core/ui/EditorToolbar/opacity",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/opacity",
"caption": "{{$:/language/Buttons/Opacity/Caption}}",
"description": "{{$:/language/Buttons/Opacity/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/opacity-dropdown",
"text": "<$text text={{$:/config/BitmapEditor/Opacity}}/>\n"
},
"$:/core/ui/EditorToolbar/paint-dropdown": {
"title": "$:/core/ui/EditorToolbar/paint-dropdown",
"text": "''{{$:/language/Buttons/Paint/Hint}}''\n\n<$macrocall $name=\"colour-picker\" actions=\"\"\"\n\n<$action-setfield\n\t$tiddler=\"$:/config/BitmapEditor/Colour\"\n\t$value=<<colour-picker-value>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n\"\"\"/>\n"
},
"$:/core/ui/EditorToolbar/paint": {
"title": "$:/core/ui/EditorToolbar/paint",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/paint",
"caption": "{{$:/language/Buttons/Paint/Caption}}",
"description": "{{$:/language/Buttons/Paint/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/paint-dropdown",
"text": "\\define toolbar-paint()\n<div style=\"display: inline-block; vertical-align: middle; background-color: $(colour-picker-value)$; width: 1em; height: 1em; border-radius: 50%;\"/>\n\\end\n<$set name=\"colour-picker-value\" value={{$:/config/BitmapEditor/Colour}}>\n<<toolbar-paint>>\n</$set>\n"
},
"$:/core/ui/EditorToolbar/picture-dropdown": {
"title": "$:/core/ui/EditorToolbar/picture-dropdown",
"text": "\\define replacement-text()\n[img[$(imageTitle)$]]\n\\end\n\n''{{$:/language/Buttons/Picture/Hint}}''\n\n<$macrocall $name=\"image-picker\" actions=\"\"\"\n\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"replace-selection\"\n\ttext=<<replacement-text>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n\"\"\"/>\n"
},
"$:/core/ui/EditorToolbar/picture": {
"title": "$:/core/ui/EditorToolbar/picture",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/picture",
"caption": "{{$:/language/Buttons/Picture/Caption}}",
"description": "{{$:/language/Buttons/Picture/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((picture))",
"dropdown": "$:/core/ui/EditorToolbar/picture-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/preview-type-dropdown": {
"title": "$:/core/ui/EditorToolbar/preview-type-dropdown",
"text": "\\define preview-type-button()\n<$button tag=\"a\">\n\n<$action-setfield $tiddler=\"$:/state/editpreviewtype\" $value=\"$(previewType)$\"/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<$transclude tiddler=<<previewType>> field=\"caption\" mode=\"inline\">\n\n<$view tiddler=<<previewType>> field=\"title\" mode=\"inline\"/>\n\n</$transclude> \n\n<$reveal tag=\"span\" state=\"$:/state/editpreviewtype\" type=\"match\" text=<<previewType>> default=\"$:/core/ui/EditTemplate/body/preview/output\">\n\n<$entity entity=\" \"/>\n\n<$entity entity=\"✓\"/>\n\n</$reveal>\n\n</$button>\n\\end\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditPreview]!has[draft.of]]\" variable=\"previewType\">\n\n<<preview-type-button>>\n\n</$list>\n"
},
"$:/core/ui/EditorToolbar/preview-type": {
"title": "$:/core/ui/EditorToolbar/preview-type",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/chevron-down",
"caption": "{{$:/language/Buttons/PreviewType/Caption}}",
"description": "{{$:/language/Buttons/PreviewType/Hint}}",
"condition": "[all[shadows+tiddlers]tag[$:/tags/EditPreview]!has[draft.of]butfirst[]limit[1]]",
"button-classes": "tc-text-editor-toolbar-item-adjunct",
"dropdown": "$:/core/ui/EditorToolbar/preview-type-dropdown"
},
"$:/core/ui/EditorToolbar/preview": {
"title": "$:/core/ui/EditorToolbar/preview",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/preview-open",
"custom-icon": "yes",
"caption": "{{$:/language/Buttons/Preview/Caption}}",
"description": "{{$:/language/Buttons/Preview/Hint}}",
"condition": "[<targetTiddler>]",
"button-classes": "tc-text-editor-toolbar-item-start-group",
"shortcuts": "((preview))",
"text": "<$reveal state=\"$:/state/showeditpreview\" type=\"match\" text=\"yes\" tag=\"span\">\n{{$:/core/images/preview-open}}\n<$action-setfield $tiddler=\"$:/state/showeditpreview\" $value=\"no\"/>\n</$reveal>\n<$reveal state=\"$:/state/showeditpreview\" type=\"nomatch\" text=\"yes\" tag=\"span\">\n{{$:/core/images/preview-closed}}\n<$action-setfield $tiddler=\"$:/state/showeditpreview\" $value=\"yes\"/>\n</$reveal>\n"
},
"$:/core/ui/EditorToolbar/quote": {
"title": "$:/core/ui/EditorToolbar/quote",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/quote",
"caption": "{{$:/language/Buttons/Quote/Caption}}",
"description": "{{$:/language/Buttons/Quote/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((quote))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-lines\"\n\tprefix=\"\n<<<\"\n\tsuffix=\"<<<\"\n/>\n"
},
"$:/core/ui/EditorToolbar/size-dropdown": {
"title": "$:/core/ui/EditorToolbar/size-dropdown",
"text": "\\define lingo-base() $:/language/Buttons/Size/\n\n\\define toolbar-button-size-preset(config-title)\n<$set name=\"width\" filter=\"$(sizePair)$ +[first[]]\">\n\n<$set name=\"height\" filter=\"$(sizePair)$ +[last[]]\">\n\n<$button tag=\"a\">\n\n<$action-setfield\n\t$tiddler=\"\"\"$config-title$/new-width\"\"\"\n\t$value=<<width>>\n/>\n\n<$action-setfield\n\t$tiddler=\"\"\"$config-title$/new-height\"\"\"\n\t$value=<<height>>\n/>\n\n<$action-deletetiddler\n\t$tiddler=\"\"\"$config-title$/presets-popup\"\"\"\n/>\n\n<$text text=<<width>>/> × <$text text=<<height>>/>\n\n</$button>\n\n</$set>\n\n</$set>\n\\end\n\n\\define toolbar-button-size(config-title)\n''{{$:/language/Buttons/Size/Hint}}''\n\n<<lingo Caption/Width>> <$edit-text tag=\"input\" tiddler=\"\"\"$config-title$/new-width\"\"\" default=<<tv-bitmap-editor-width>> focus=\"true\" size=\"8\"/> <<lingo Caption/Height>> <$edit-text tag=\"input\" tiddler=\"\"\"$config-title$/new-height\"\"\" default=<<tv-bitmap-editor-height>> size=\"8\"/> <$button popup=\"\"\"$config-title$/presets-popup\"\"\" class=\"tc-btn-invisible tc-popup-keep\" style=\"width: auto; display: inline-block; background-colour: inherit;\" selectedClass=\"tc-selected\">\n{{$:/core/images/down-arrow}}\n</$button>\n\n<$reveal tag=\"span\" state=\"\"\"$config-title$/presets-popup\"\"\" type=\"popup\" position=\"belowleft\" animate=\"yes\">\n\n<div class=\"tc-drop-down tc-popup-keep\">\n\n<$list filter={{$:/config/BitmapEditor/ImageSizes}} variable=\"sizePair\">\n\n<$macrocall $name=\"toolbar-button-size-preset\" config-title=\"$config-title$\"/>\n\n</$list>\n\n</div>\n\n</$reveal>\n\n<$button>\n<$action-sendmessage\n\t$message=\"tm-edit-bitmap-operation\"\n\t$param=\"resize\"\n\twidth={{$config-title$/new-width}}\n\theight={{$config-title$/new-height}}\n/>\n<$action-deletetiddler\n\t$tiddler=\"\"\"$config-title$/new-width\"\"\"\n/>\n<$action-deletetiddler\n\t$tiddler=\"\"\"$config-title$/new-height\"\"\"\n/>\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n<<lingo Caption/Resize>>\n</$button>\n\\end\n\n<$macrocall $name=\"toolbar-button-size\" config-title=<<qualify \"$:/state/Size/\">>/>\n"
},
"$:/core/ui/EditorToolbar/size": {
"title": "$:/core/ui/EditorToolbar/size",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/size",
"caption": "{{$:/language/Buttons/Size/Caption}}",
"description": "{{$:/language/Buttons/Size/Hint}}",
"condition": "[<targetTiddler>is[image]]",
"dropdown": "$:/core/ui/EditorToolbar/size-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/stamp-dropdown": {
"title": "$:/core/ui/EditorToolbar/stamp-dropdown",
"text": "\\define toolbar-button-stamp-inner()\n<$button tag=\"a\">\n\n<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"replace-selection\"\n\ttext={{$(snippetTitle)$}}\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<$view tiddler=<<snippetTitle>> field=\"caption\" mode=\"inline\">\n\n<$view tiddler=<<snippetTitle>> field=\"title\" mode=\"inline\"/>\n\n</$view>\n\n</$button>\n\\end\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TextEditor/Snippet]!has[draft.of]sort[caption]]\" variable=\"snippetTitle\">\n\n<<toolbar-button-stamp-inner>>\n\n</$list>\n\n----\n\n<$button tag=\"a\">\n\n<$action-sendmessage\n\t$message=\"tm-new-tiddler\"\n\ttags=\"$:/tags/TextEditor/Snippet\"\n\tcaption={{$:/language/Buttons/Stamp/New/Title}}\n\ttext={{$:/language/Buttons/Stamp/New/Text}}\n/>\n\n<$action-deletetiddler\n\t$tiddler=<<dropdown-state>>\n/>\n\n<em>\n\n<$text text={{$:/language/Buttons/Stamp/Caption/New}}/>\n\n</em>\n\n</$button>\n"
},
"$:/core/ui/EditorToolbar/stamp": {
"title": "$:/core/ui/EditorToolbar/stamp",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/stamp",
"caption": "{{$:/language/Buttons/Stamp/Caption}}",
"description": "{{$:/language/Buttons/Stamp/Hint}}",
"condition": "[<targetTiddler>!is[image]]",
"shortcuts": "((stamp))",
"dropdown": "$:/core/ui/EditorToolbar/stamp-dropdown",
"text": ""
},
"$:/core/ui/EditorToolbar/strikethrough": {
"title": "$:/core/ui/EditorToolbar/strikethrough",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/strikethrough",
"caption": "{{$:/language/Buttons/Strikethrough/Caption}}",
"description": "{{$:/language/Buttons/Strikethrough/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((strikethrough))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"~~\"\n\tsuffix=\"~~\"\n/>\n"
},
"$:/core/ui/EditorToolbar/subscript": {
"title": "$:/core/ui/EditorToolbar/subscript",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/subscript",
"caption": "{{$:/language/Buttons/Subscript/Caption}}",
"description": "{{$:/language/Buttons/Subscript/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((subscript))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\",,\"\n\tsuffix=\",,\"\n/>\n"
},
"$:/core/ui/EditorToolbar/superscript": {
"title": "$:/core/ui/EditorToolbar/superscript",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/superscript",
"caption": "{{$:/language/Buttons/Superscript/Caption}}",
"description": "{{$:/language/Buttons/Superscript/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((superscript))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"^^\"\n\tsuffix=\"^^\"\n/>\n"
},
"$:/core/ui/EditorToolbar/underline": {
"title": "$:/core/ui/EditorToolbar/underline",
"tags": "$:/tags/EditorToolbar",
"icon": "$:/core/images/underline",
"caption": "{{$:/language/Buttons/Underline/Caption}}",
"description": "{{$:/language/Buttons/Underline/Hint}}",
"condition": "[<targetTiddler>!has[type]] [<targetTiddler>type[text/vnd.tiddlywiki]]",
"shortcuts": "((underline))",
"text": "<$action-sendmessage\n\t$message=\"tm-edit-text-operation\"\n\t$param=\"wrap-selection\"\n\tprefix=\"__\"\n\tsuffix=\"__\"\n/>\n"
},
"$:/core/ui/EditTemplate/body/editor": {
"title": "$:/core/ui/EditTemplate/body/editor",
"text": "<$edit\n\n field=\"text\"\n class=\"tc-edit-texteditor\"\n placeholder={{$:/language/EditTemplate/Body/Placeholder}}\n\n><$set\n\n name=\"targetTiddler\"\n value=<<currentTiddler>>\n\n><$list\n\n filter=\"[all[shadows+tiddlers]tag[$:/tags/EditorToolbar]!has[draft.of]]\"\n\n><$reveal\n\n type=\"nomatch\"\n state=<<config-visibility-title>>\n text=\"hide\"\n class=\"tc-text-editor-toolbar-item-wrapper\"\n\n><$transclude\n\n tiddler=\"$:/core/ui/EditTemplate/body/toolbar/button\"\n mode=\"inline\"\n\n/></$reveal></$list></$set></$edit>\n"
},
"$:/core/ui/EditTemplate/body/toolbar/button": {
"title": "$:/core/ui/EditTemplate/body/toolbar/button",
"text": "\\define toolbar-button-icon()\n<$list\n\n filter=\"[all[current]!has[custom-icon]]\"\n variable=\"no-custom-icon\"\n\n><$transclude\n\n tiddler={{!!icon}}\n\n/></$list>\n\\end\n\n\\define toolbar-button-tooltip()\n{{!!description}}<$macrocall $name=\"displayshortcuts\" $output=\"text/plain\" shortcuts={{!!shortcuts}} prefix=\"` - [\" separator=\"] [\" suffix=\"]`\"/>\n\\end\n\n\\define toolbar-button()\n<$list\n\n filter={{!!condition}}\n variable=\"list-condition\"\n\n><$wikify\n\n name=\"tooltip-text\"\n text=<<toolbar-button-tooltip>>\n mode=\"inline\"\n output=\"text\"\n\n><$list\n\n filter=\"[all[current]!has[dropdown]]\"\n variable=\"no-dropdown\"\n\n><$button\n\n class=\"tc-btn-invisible $(buttonClasses)$\"\n tooltip=<<tooltip-text>>\n\n><span\n\n data-tw-keyboard-shortcut={{!!shortcuts}}\n\n/><<toolbar-button-icon>><$transclude\n\n tiddler=<<currentTiddler>>\n field=\"text\"\n\n/></$button></$list><$list\n\n filter=\"[all[current]has[dropdown]]\"\n variable=\"dropdown\"\n\n><$set\n\n name=\"dropdown-state\"\n value=<<qualify \"$:/state/EditorToolbarDropdown\">>\n\n><$button\n\n popup=<<dropdown-state>>\n class=\"tc-popup-keep tc-btn-invisible $(buttonClasses)$\"\n selectedClass=\"tc-selected\"\n tooltip=<<tooltip-text>>\n\n><span\n\n data-tw-keyboard-shortcut={{!!shortcuts}}\n\n/><<toolbar-button-icon>><$transclude\n\n tiddler=<<currentTiddler>>\n field=\"text\"\n\n/></$button><$reveal\n\n state=<<dropdown-state>>\n type=\"popup\"\n position=\"below\"\n animate=\"yes\"\n tag=\"span\"\n\n><div\n\n class=\"tc-drop-down tc-popup-keep\"\n\n><$transclude\n\n tiddler={{!!dropdown}}\n mode=\"block\"\n\n/></div></$reveal></$set></$list></$wikify></$list>\n\\end\n\n\\define toolbar-button-outer()\n<$set\n\n name=\"buttonClasses\"\n value={{!!button-classes}}\n\n><<toolbar-button>></$set>\n\\end\n\n<<toolbar-button-outer>>"
},
"$:/core/ui/EditTemplate/body": {
"title": "$:/core/ui/EditTemplate/body",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/Body/\n\\define config-visibility-title()\n$:/config/EditorToolbarButtons/Visibility/$(currentTiddler)$\n\\end\n<$list filter=\"[is[current]has[_canonical_uri]]\">\n\n<div class=\"tc-message-box\">\n\n<<lingo External/Hint>>\n\n<a href={{!!_canonical_uri}}><$text text={{!!_canonical_uri}}/></a>\n\n<$edit-text field=\"_canonical_uri\" class=\"tc-edit-fields\"></$edit-text>\n\n</div>\n\n</$list>\n\n<$list filter=\"[is[current]!has[_canonical_uri]]\">\n\n<$reveal state=\"$:/state/showeditpreview\" type=\"match\" text=\"yes\">\n\n<div class=\"tc-tiddler-preview\">\n\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/editor\" mode=\"inline\"/>\n\n<div class=\"tc-tiddler-preview-preview\">\n\n<$transclude tiddler={{$:/state/editpreviewtype}} mode=\"inline\">\n\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/preview/output\" mode=\"inline\"/>\n\n</$transclude>\n\n</div>\n\n</div>\n\n</$reveal>\n\n<$reveal state=\"$:/state/showeditpreview\" type=\"nomatch\" text=\"yes\">\n\n<$transclude tiddler=\"$:/core/ui/EditTemplate/body/editor\" mode=\"inline\"/>\n\n</$reveal>\n\n</$list>\n"
},
"$:/core/ui/EditTemplate/controls": {
"title": "$:/core/ui/EditTemplate/controls",
"tags": "$:/tags/EditTemplate",
"text": "\\define config-title()\n$:/config/EditToolbarButtons/Visibility/$(listItem)$\n\\end\n<div class=\"tc-tiddler-title tc-tiddler-edit-title\">\n<$view field=\"title\"/>\n<span class=\"tc-tiddler-controls tc-titlebar\"><$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditToolbar]!has[draft.of]]\" variable=\"listItem\"><$reveal type=\"nomatch\" state=<<config-title>> text=\"hide\"><$transclude tiddler=<<listItem>>/></$reveal></$list></span>\n<div style=\"clear: both;\"></div>\n</div>\n"
},
"$:/core/ui/EditTemplate/fields": {
"title": "$:/core/ui/EditTemplate/fields",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/\n\\define config-title()\n$:/config/EditTemplateFields/Visibility/$(currentField)$\n\\end\n\n\\define config-filter()\n[[hide]] -[title{$(config-title)$}]\n\\end\n\n\\define new-field-inner()\n<$reveal type=\"nomatch\" text=\"\" default=<<name>>>\n<$button>\n<$action-sendmessage $message=\"tm-add-field\" $name=<<name>> $value=<<value>>/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldname\"/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldvalue\"/>\n<<lingo Fields/Add/Button>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" text=\"\" default=<<name>>>\n<$button>\n<<lingo Fields/Add/Button>>\n</$button>\n</$reveal>\n\\end\n\n\\define new-field()\n<$set name=\"name\" value={{$:/temp/newfieldname}}>\n<$set name=\"value\" value={{$:/temp/newfieldvalue}}>\n<<new-field-inner>>\n</$set>\n</$set>\n\\end\n\n<div class=\"tc-edit-fields\">\n<table class=\"tc-edit-fields\">\n<tbody>\n<$list filter=\"[all[current]fields[]] +[sort[title]]\" variable=\"currentField\">\n<$list filter=<<config-filter>> variable=\"temp\">\n<tr class=\"tc-edit-field\">\n<td class=\"tc-edit-field-name\">\n<$text text=<<currentField>>/>:</td>\n<td class=\"tc-edit-field-value\">\n<$edit-text tiddler=<<currentTiddler>> field=<<currentField>> placeholder={{$:/language/EditTemplate/Fields/Add/Value/Placeholder}}/>\n</td>\n<td class=\"tc-edit-field-remove\">\n<$button class=\"tc-btn-invisible\" tooltip={{$:/language/EditTemplate/Field/Remove/Hint}} aria-label={{$:/language/EditTemplate/Field/Remove/Caption}}>\n<$action-deletefield $field=<<currentField>>/>\n{{$:/core/images/delete-button}}\n</$button>\n</td>\n</tr>\n</$list>\n</$list>\n</tbody>\n</table>\n</div>\n\n<$fieldmangler>\n<div class=\"tc-edit-field-add\">\n<em class=\"tc-edit\">\n<<lingo Fields/Add/Prompt>>\n</em>\n<span class=\"tc-edit-field-add-name\">\n<$edit-text tiddler=\"$:/temp/newfieldname\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Fields/Add/Name/Placeholder}} focusPopup=<<qualify \"$:/state/popup/field-dropdown\">> class=\"tc-edit-texteditor tc-popup-handle\"/>\n</span>\n<$button popup=<<qualify \"$:/state/popup/field-dropdown\">> class=\"tc-btn-invisible tc-btn-dropdown\" tooltip={{$:/language/EditTemplate/Field/Dropdown/Hint}} aria-label={{$:/language/EditTemplate/Field/Dropdown/Caption}}>{{$:/core/images/down-arrow}}</$button>\n<$reveal state=<<qualify \"$:/state/popup/field-dropdown\">> type=\"nomatch\" text=\"\" default=\"\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<$linkcatcher to=\"$:/temp/newfieldname\">\n<div class=\"tc-dropdown-item\">\n<<lingo Fields/Add/Dropdown/User>>\n</div>\n<$list filter=\"[!is[shadow]!is[system]fields[]sort[]] -created -creator -draft.of -draft.title -modified -modifier -tags -text -title -type\" variable=\"currentField\">\n<$link to=<<currentField>>>\n<<currentField>>\n</$link>\n</$list>\n<div class=\"tc-dropdown-item\">\n<<lingo Fields/Add/Dropdown/System>>\n</div>\n<$list filter=\"[fields[]sort[]] -[!is[shadow]!is[system]fields[]]\" variable=\"currentField\">\n<$link to=<<currentField>>>\n<<currentField>>\n</$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>\n<span class=\"tc-edit-field-add-value\">\n<$edit-text tiddler=\"$:/temp/newfieldvalue\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Fields/Add/Value/Placeholder}} class=\"tc-edit-texteditor\"/>\n</span>\n<span class=\"tc-edit-field-add-button\">\n<$macrocall $name=\"new-field\"/>\n</span>\n</div>\n</$fieldmangler>\n\n"
},
"$:/core/ui/EditTemplate/body/preview/output": {
"title": "$:/core/ui/EditTemplate/body/preview/output",
"tags": "$:/tags/EditPreview",
"caption": "{{$:/language/EditTemplate/Body/Preview/Type/Output}}",
"text": "<$set name=\"tv-tiddler-preview\" value=\"yes\">\n\n<$transclude />\n\n</$set>\n"
},
"$:/core/ui/EditTemplate/shadow": {
"title": "$:/core/ui/EditTemplate/shadow",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/Shadow/\n\\define pluginLinkBody()\n<$link to=\"\"\"$(pluginTitle)$\"\"\">\n<$text text=\"\"\"$(pluginTitle)$\"\"\"/>\n</$link>\n\\end\n<$list filter=\"[all[current]get[draft.of]is[shadow]!is[tiddler]]\">\n\n<$list filter=\"[all[current]shadowsource[]]\" variable=\"pluginTitle\">\n\n<$set name=\"pluginLink\" value=<<pluginLinkBody>>>\n<div class=\"tc-message-box\">\n\n<<lingo Warning>>\n\n</div>\n</$set>\n</$list>\n\n</$list>\n\n<$list filter=\"[all[current]get[draft.of]is[shadow]is[tiddler]]\">\n\n<$list filter=\"[all[current]shadowsource[]]\" variable=\"pluginTitle\">\n\n<$set name=\"pluginLink\" value=<<pluginLinkBody>>>\n<div class=\"tc-message-box\">\n\n<<lingo OverriddenWarning>>\n\n</div>\n</$set>\n</$list>\n\n</$list>"
},
"$:/core/ui/EditTemplate/tags": {
"title": "$:/core/ui/EditTemplate/tags",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/\n\\define tag-styles()\nbackground-color:$(backgroundColor)$;\nfill:$(foregroundColor)$;\ncolor:$(foregroundColor)$;\n\\end\n\\define tag-body-inner(colour,fallbackTarget,colourA,colourB)\n<$vars foregroundColor=<<contrastcolour target:\"\"\"$colour$\"\"\" fallbackTarget:\"\"\"$fallbackTarget$\"\"\" colourA:\"\"\"$colourA$\"\"\" colourB:\"\"\"$colourB$\"\"\">> backgroundColor=\"\"\"$colour$\"\"\">\n<span style=<<tag-styles>> class=\"tc-tag-label\">\n<$view field=\"title\" format=\"text\" />\n<$button message=\"tm-remove-tag\" param={{!!title}} class=\"tc-btn-invisible tc-remove-tag-button\">×</$button>\n</span>\n</$vars>\n\\end\n\\define tag-body(colour,palette)\n<$macrocall $name=\"tag-body-inner\" colour=\"\"\"$colour$\"\"\" fallbackTarget={{$palette$##tag-background}} colourA={{$palette$##foreground}} colourB={{$palette$##background}}/>\n\\end\n<div class=\"tc-edit-tags\">\n<$fieldmangler>\n<$list filter=\"[all[current]tags[]sort[title]]\" storyview=\"pop\">\n<$macrocall $name=\"tag-body\" colour={{!!color}} palette={{$:/palette}}/>\n</$list>\n\n<div class=\"tc-edit-add-tag\">\n<span class=\"tc-add-tag-name\">\n<$edit-text tiddler=\"$:/temp/NewTagName\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Tags/Add/Placeholder}} focusPopup=<<qualify \"$:/state/popup/tags-auto-complete\">> class=\"tc-edit-texteditor tc-popup-handle\"/>\n</span> <$button popup=<<qualify \"$:/state/popup/tags-auto-complete\">> class=\"tc-btn-invisible tc-btn-dropdown\" tooltip={{$:/language/EditTemplate/Tags/Dropdown/Hint}} aria-label={{$:/language/EditTemplate/Tags/Dropdown/Caption}}>{{$:/core/images/down-arrow}}</$button> <span class=\"tc-add-tag-button\">\n<$button message=\"tm-add-tag\" param={{$:/temp/NewTagName}} set=\"$:/temp/NewTagName\" setTo=\"\" class=\"\">\n<<lingo Tags/Add/Button>>\n</$button>\n</span>\n</div>\n\n<div class=\"tc-block-dropdown-wrapper\">\n<$reveal state=<<qualify \"$:/state/popup/tags-auto-complete\">> type=\"nomatch\" text=\"\" default=\"\">\n<div class=\"tc-block-dropdown\">\n<$linkcatcher set=\"$:/temp/NewTagName\" setTo=\"\" message=\"tm-add-tag\">\n<$list filter=\"[tags[]!is[system]search:title{$:/temp/NewTagName}sort[]]\">\n{{||$:/core/ui/Components/tag-link}}\n</$list>\n<hr>\n<$list filter=\"[tags[]is[system]search:title{$:/temp/NewTagName}sort[]]\">\n{{||$:/core/ui/Components/tag-link}}\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>\n</div>\n</$fieldmangler>\n</div>"
},
"$:/core/ui/EditTemplate/title": {
"title": "$:/core/ui/EditTemplate/title",
"tags": "$:/tags/EditTemplate",
"text": "<$vars pattern=\"\"\"[\\|\\[\\]{}]\"\"\" bad-chars=\"\"\"`| [ ] { }`\"\"\">\n\n<$list filter=\"[is[current]regexp:draft.title<pattern>]\" variable=\"listItem\">\n\n<div class=\"tc-message-box\">\n\n{{$:/language/EditTemplate/Title/BadCharacterWarning}}\n\n</div>\n\n</$list>\n\n</$vars>\n\n<$edit-text field=\"draft.title\" class=\"tc-titlebar tc-edit-texteditor\" focus=\"true\"/>\n"
},
"$:/core/ui/EditTemplate/type": {
"title": "$:/core/ui/EditTemplate/type",
"tags": "$:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/EditTemplate/\n<div class=\"tc-type-selector\"><$fieldmangler>\n<em class=\"tc-edit\"><<lingo Type/Prompt>></em> <$edit-text field=\"type\" tag=\"input\" default=\"\" placeholder={{$:/language/EditTemplate/Type/Placeholder}} focusPopup=<<qualify \"$:/state/popup/type-dropdown\">> class=\"tc-edit-typeeditor tc-popup-handle\"/> <$button popup=<<qualify \"$:/state/popup/type-dropdown\">> class=\"tc-btn-invisible tc-btn-dropdown\" tooltip={{$:/language/EditTemplate/Type/Dropdown/Hint}} aria-label={{$:/language/EditTemplate/Type/Dropdown/Caption}}>{{$:/core/images/down-arrow}}</$button> <$button message=\"tm-remove-field\" param=\"type\" class=\"tc-btn-invisible tc-btn-icon\" tooltip={{$:/language/EditTemplate/Type/Delete/Hint}} aria-label={{$:/language/EditTemplate/Type/Delete/Caption}}>{{$:/core/images/delete-button}}</$button>\n</$fieldmangler></div>\n\n<div class=\"tc-block-dropdown-wrapper\">\n<$reveal state=<<qualify \"$:/state/popup/type-dropdown\">> type=\"nomatch\" text=\"\" default=\"\">\n<div class=\"tc-block-dropdown tc-edit-type-dropdown\">\n<$linkcatcher to=\"!!type\">\n<$list filter='[all[shadows+tiddlers]prefix[$:/language/Docs/Types/]each[group]sort[group]]'>\n<div class=\"tc-dropdown-item\">\n<$text text={{!!group}}/>\n</div>\n<$list filter=\"[all[shadows+tiddlers]prefix[$:/language/Docs/Types/]group{!!group}] +[sort[description]]\"><$link to={{!!name}}><$view field=\"description\"/> (<$view field=\"name\"/>)</$link>\n</$list>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>\n</div>"
},
"$:/core/ui/EditTemplate": {
"title": "$:/core/ui/EditTemplate",
"text": "\\define frame-classes()\ntc-tiddler-frame tc-tiddler-edit-frame $(missingTiddlerClass)$ $(shadowTiddlerClass)$ $(systemTiddlerClass)$\n\\end\n<div class=<<frame-classes>>>\n<$set name=\"storyTiddler\" value=<<currentTiddler>>>\n<$keyboard key=\"((cancel-edit-tiddler))\" message=\"tm-cancel-tiddler\">\n<$keyboard key=\"((save-tiddler))\" message=\"tm-save-tiddler\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/EditTemplate]!has[draft.of]]\" variable=\"listItem\">\n<$transclude tiddler=<<listItem>>/>\n</$list>\n</$keyboard>\n</$keyboard>\n</$set>\n</div>\n"
},
"$:/core/ui/Buttons/cancel": {
"title": "$:/core/ui/Buttons/cancel",
"tags": "$:/tags/EditToolbar",
"caption": "{{$:/core/images/cancel-button}} {{$:/language/Buttons/Cancel/Caption}}",
"description": "{{$:/language/Buttons/Cancel/Hint}}",
"text": "<$button message=\"tm-cancel-tiddler\" tooltip={{$:/language/Buttons/Cancel/Hint}} aria-label={{$:/language/Buttons/Cancel/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/cancel-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Cancel/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/delete": {
"title": "$:/core/ui/Buttons/delete",
"tags": "$:/tags/EditToolbar $:/tags/ViewToolbar",
"caption": "{{$:/core/images/delete-button}} {{$:/language/Buttons/Delete/Caption}}",
"description": "{{$:/language/Buttons/Delete/Hint}}",
"text": "<$button message=\"tm-delete-tiddler\" tooltip={{$:/language/Buttons/Delete/Hint}} aria-label={{$:/language/Buttons/Delete/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/delete-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Delete/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/save": {
"title": "$:/core/ui/Buttons/save",
"tags": "$:/tags/EditToolbar",
"caption": "{{$:/core/images/done-button}} {{$:/language/Buttons/Save/Caption}}",
"description": "{{$:/language/Buttons/Save/Hint}}",
"text": "<$fieldmangler><$button tooltip={{$:/language/Buttons/Save/Hint}} aria-label={{$:/language/Buttons/Save/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-add-tag\" $param={{$:/temp/NewTagName}}/>\n<$action-deletetiddler $tiddler=\"$:/temp/NewTagName\"/>\n<$action-sendmessage $message=\"tm-add-field\" $name={{$:/temp/newfieldname}} $value={{$:/temp/newfieldvalue}}/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldname\"/>\n<$action-deletetiddler $tiddler=\"$:/temp/newfieldvalue\"/>\n<$action-sendmessage $message=\"tm-save-tiddler\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/done-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Save/Caption}}/></span>\n</$list>\n</$button>\n</$fieldmangler>\n"
},
"$:/core/Filters/AllTags": {
"title": "$:/core/Filters/AllTags",
"tags": "$:/tags/Filter",
"filter": "[tags[]!is[system]sort[title]]",
"description": "{{$:/language/Filters/AllTags}}",
"text": ""
},
"$:/core/Filters/AllTiddlers": {
"title": "$:/core/Filters/AllTiddlers",
"tags": "$:/tags/Filter",
"filter": "[!is[system]sort[title]]",
"description": "{{$:/language/Filters/AllTiddlers}}",
"text": ""
},
"$:/core/Filters/Drafts": {
"title": "$:/core/Filters/Drafts",
"tags": "$:/tags/Filter",
"filter": "[has[draft.of]sort[title]]",
"description": "{{$:/language/Filters/Drafts}}",
"text": ""
},
"$:/core/Filters/Missing": {
"title": "$:/core/Filters/Missing",
"tags": "$:/tags/Filter",
"filter": "[all[missing]sort[title]]",
"description": "{{$:/language/Filters/Missing}}",
"text": ""
},
"$:/core/Filters/Orphans": {
"title": "$:/core/Filters/Orphans",
"tags": "$:/tags/Filter",
"filter": "[all[orphans]sort[title]]",
"description": "{{$:/language/Filters/Orphans}}",
"text": ""
},
"$:/core/Filters/OverriddenShadowTiddlers": {
"title": "$:/core/Filters/OverriddenShadowTiddlers",
"tags": "$:/tags/Filter",
"filter": "[is[shadow]]",
"description": "{{$:/language/Filters/OverriddenShadowTiddlers}}",
"text": ""
},
"$:/core/Filters/RecentSystemTiddlers": {
"title": "$:/core/Filters/RecentSystemTiddlers",
"tags": "$:/tags/Filter",
"filter": "[has[modified]!sort[modified]limit[50]]",
"description": "{{$:/language/Filters/RecentSystemTiddlers}}",
"text": ""
},
"$:/core/Filters/RecentTiddlers": {
"title": "$:/core/Filters/RecentTiddlers",
"tags": "$:/tags/Filter",
"filter": "[!is[system]has[modified]!sort[modified]limit[50]]",
"description": "{{$:/language/Filters/RecentTiddlers}}",
"text": ""
},
"$:/core/Filters/ShadowTiddlers": {
"title": "$:/core/Filters/ShadowTiddlers",
"tags": "$:/tags/Filter",
"filter": "[all[shadows]sort[title]]",
"description": "{{$:/language/Filters/ShadowTiddlers}}",
"text": ""
},
"$:/core/Filters/SystemTags": {
"title": "$:/core/Filters/SystemTags",
"tags": "$:/tags/Filter",
"filter": "[all[shadows+tiddlers]tags[]is[system]sort[title]]",
"description": "{{$:/language/Filters/SystemTags}}",
"text": ""
},
"$:/core/Filters/SystemTiddlers": {
"title": "$:/core/Filters/SystemTiddlers",
"tags": "$:/tags/Filter",
"filter": "[is[system]sort[title]]",
"description": "{{$:/language/Filters/SystemTiddlers}}",
"text": ""
},
"$:/core/Filters/TypedTiddlers": {
"title": "$:/core/Filters/TypedTiddlers",
"tags": "$:/tags/Filter",
"filter": "[!is[system]has[type]each[type]sort[type]] -[type[text/vnd.tiddlywiki]]",
"description": "{{$:/language/Filters/TypedTiddlers}}",
"text": ""
},
"$:/core/ui/ImportListing": {
"title": "$:/core/ui/ImportListing",
"text": "\\define lingo-base() $:/language/Import/\n\\define messageField()\nmessage-$(payloadTiddler)$\n\\end\n\\define selectionField()\nselection-$(payloadTiddler)$\n\\end\n\\define previewPopupState()\n$(currentTiddler)$!!popup-$(payloadTiddler)$\n\\end\n<table>\n<tbody>\n<tr>\n<th>\n<<lingo Listing/Select/Caption>>\n</th>\n<th>\n<<lingo Listing/Title/Caption>>\n</th>\n<th>\n<<lingo Listing/Status/Caption>>\n</th>\n</tr>\n<$list filter=\"[all[current]plugintiddlers[]sort[title]]\" variable=\"payloadTiddler\">\n<tr>\n<td>\n<$checkbox field=<<selectionField>> checked=\"checked\" unchecked=\"unchecked\" default=\"checked\"/>\n</td>\n<td>\n<$reveal type=\"nomatch\" state=<<previewPopupState>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<previewPopupState>> setTo=\"yes\">\n{{$:/core/images/right-arrow}} <$text text=<<payloadTiddler>>/>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<previewPopupState>> text=\"yes\">\n<$button class=\"tc-btn-invisible tc-btn-dropdown\" set=<<previewPopupState>> setTo=\"no\">\n{{$:/core/images/down-arrow}} <$text text=<<payloadTiddler>>/>\n</$button>\n</$reveal>\n</td>\n<td>\n<$view field=<<messageField>>/>\n</td>\n</tr>\n<tr>\n<td colspan=\"3\">\n<$reveal type=\"match\" text=\"yes\" state=<<previewPopupState>>>\n<$transclude subtiddler=<<payloadTiddler>> mode=\"block\"/>\n</$reveal>\n</td>\n</tr>\n</$list>\n</tbody>\n</table>\n"
},
"$:/core/ui/ListItemTemplate": {
"title": "$:/core/ui/ListItemTemplate",
"text": "<div class=\"tc-menu-list-item\">\n<$link to={{!!title}}>\n<$view field=\"title\"/>\n</$link>\n</div>"
},
"$:/core/ui/MissingTemplate": {
"title": "$:/core/ui/MissingTemplate",
"text": "<div class=\"tc-tiddler-missing\">\n<$button popup=<<qualify \"$:/state/popup/missing\">> class=\"tc-btn-invisible tc-missing-tiddler-label\">\n<$view field=\"title\" format=\"text\" />\n</$button>\n<$reveal state=<<qualify \"$:/state/popup/missing\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$transclude tiddler=\"$:/core/ui/ListItemTemplate\"/>\n<hr>\n<$list filter=\"[all[current]backlinks[]sort[title]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n</div>\n</$reveal>\n</div>\n"
},
"$:/core/ui/MoreSideBar/All": {
"title": "$:/core/ui/MoreSideBar/All",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/All/Caption}}",
"text": "<$list filter={{$:/core/Filters/AllTiddlers!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Drafts": {
"title": "$:/core/ui/MoreSideBar/Drafts",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Drafts/Caption}}",
"text": "<$list filter={{$:/core/Filters/Drafts!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Missing": {
"title": "$:/core/ui/MoreSideBar/Missing",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Missing/Caption}}",
"text": "<$list filter={{$:/core/Filters/Missing!!filter}} template=\"$:/core/ui/MissingTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Orphans": {
"title": "$:/core/ui/MoreSideBar/Orphans",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Orphans/Caption}}",
"text": "<$list filter={{$:/core/Filters/Orphans!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Recent": {
"title": "$:/core/ui/MoreSideBar/Recent",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Recent/Caption}}",
"text": "<$macrocall $name=\"timeline\" format={{$:/language/RecentChanges/DateFormat}}/>\n"
},
"$:/core/ui/MoreSideBar/Shadows": {
"title": "$:/core/ui/MoreSideBar/Shadows",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Shadows/Caption}}",
"text": "<$list filter={{$:/core/Filters/ShadowTiddlers!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/System": {
"title": "$:/core/ui/MoreSideBar/System",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/System/Caption}}",
"text": "<$list filter={{$:/core/Filters/SystemTiddlers!!filter}} template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/MoreSideBar/Tags": {
"title": "$:/core/ui/MoreSideBar/Tags",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Tags/Caption}}",
"text": "<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n{{$:/core/ui/Buttons/tag-manager}}\n\n</$set>\n\n</$set>\n\n</$set>\n\n<$list filter={{$:/core/Filters/AllTags!!filter}}>\n\n<$transclude tiddler=\"$:/core/ui/TagTemplate\"/>\n\n</$list>\n\n<hr class=\"tc-untagged-separator\">\n\n{{$:/core/ui/UntaggedTemplate}}\n"
},
"$:/core/ui/MoreSideBar/Types": {
"title": "$:/core/ui/MoreSideBar/Types",
"tags": "$:/tags/MoreSideBar",
"caption": "{{$:/language/SideBar/Types/Caption}}",
"text": "<$list filter={{$:/core/Filters/TypedTiddlers!!filter}}>\n<div class=\"tc-menu-list-item\">\n<$view field=\"type\"/>\n<$list filter=\"[type{!!type}!is[system]sort[title]]\">\n<div class=\"tc-menu-list-subitem\">\n<$link to={{!!title}}><$view field=\"title\"/></$link>\n</div>\n</$list>\n</div>\n</$list>\n"
},
"$:/core/ui/Buttons/advanced-search": {
"title": "$:/core/ui/Buttons/advanced-search",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/advanced-search-button}} {{$:/language/Buttons/AdvancedSearch/Caption}}",
"description": "{{$:/language/Buttons/AdvancedSearch/Hint}}",
"text": "\\define control-panel-button(class)\n<$button to=\"$:/AdvancedSearch\" tooltip={{$:/language/Buttons/AdvancedSearch/Hint}} aria-label={{$:/language/Buttons/AdvancedSearch/Caption}} class=\"\"\"$(tv-config-toolbar-class)$ $class$\"\"\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/advanced-search-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/AdvancedSearch/Caption}}/></span>\n</$list>\n</$button>\n\\end\n\n<$list filter=\"[list[$:/StoryList]] +[field:title[$:/AdvancedSearch]]\" emptyMessage=<<control-panel-button>>>\n<<control-panel-button \"tc-selected\">>\n</$list>\n"
},
"$:/core/ui/Buttons/close-all": {
"title": "$:/core/ui/Buttons/close-all",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/close-all-button}} {{$:/language/Buttons/CloseAll/Caption}}",
"description": "{{$:/language/Buttons/CloseAll/Hint}}",
"text": "<$button message=\"tm-close-all-tiddlers\" tooltip={{$:/language/Buttons/CloseAll/Hint}} aria-label={{$:/language/Buttons/CloseAll/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/close-all-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/CloseAll/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/control-panel": {
"title": "$:/core/ui/Buttons/control-panel",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/options-button}} {{$:/language/Buttons/ControlPanel/Caption}}",
"description": "{{$:/language/Buttons/ControlPanel/Hint}}",
"text": "\\define control-panel-button(class)\n<$button to=\"$:/ControlPanel\" tooltip={{$:/language/Buttons/ControlPanel/Hint}} aria-label={{$:/language/Buttons/ControlPanel/Caption}} class=\"\"\"$(tv-config-toolbar-class)$ $class$\"\"\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/options-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/ControlPanel/Caption}}/></span>\n</$list>\n</$button>\n\\end\n\n<$list filter=\"[list[$:/StoryList]] +[field:title[$:/ControlPanel]]\" emptyMessage=<<control-panel-button>>>\n<<control-panel-button \"tc-selected\">>\n</$list>\n"
},
"$:/core/ui/Buttons/encryption": {
"title": "$:/core/ui/Buttons/encryption",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/locked-padlock}} {{$:/language/Buttons/Encryption/Caption}}",
"description": "{{$:/language/Buttons/Encryption/Hint}}",
"text": "<$reveal type=\"match\" state=\"$:/isEncrypted\" text=\"yes\">\n<$button message=\"tm-clear-password\" tooltip={{$:/language/Buttons/Encryption/ClearPassword/Hint}} aria-label={{$:/language/Buttons/Encryption/ClearPassword/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/locked-padlock}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Encryption/ClearPassword/Caption}}/></span>\n</$list>\n</$button>\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/isEncrypted\" text=\"yes\">\n<$button message=\"tm-set-password\" tooltip={{$:/language/Buttons/Encryption/SetPassword/Hint}} aria-label={{$:/language/Buttons/Encryption/SetPassword/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/unlocked-padlock}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Encryption/SetPassword/Caption}}/></span>\n</$list>\n</$button>\n</$reveal>"
},
"$:/core/ui/Buttons/export-page": {
"title": "$:/core/ui/Buttons/export-page",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/export-button}} {{$:/language/Buttons/ExportPage/Caption}}",
"description": "{{$:/language/Buttons/ExportPage/Hint}}",
"text": "<$macrocall $name=\"exportButton\" exportFilter=\"[!is[system]sort[title]]\" lingoBase=\"$:/language/Buttons/ExportPage/\"/>"
},
"$:/core/ui/Buttons/fold-all": {
"title": "$:/core/ui/Buttons/fold-all",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/fold-all-button}} {{$:/language/Buttons/FoldAll/Caption}}",
"description": "{{$:/language/Buttons/FoldAll/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/FoldAll/Hint}} aria-label={{$:/language/Buttons/FoldAll/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-all-tiddlers\" $param=<<currentTiddler>> foldedStatePrefix=\"$:/state/folded/\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/fold-all-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/FoldAll/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/full-screen": {
"title": "$:/core/ui/Buttons/full-screen",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/full-screen-button}} {{$:/language/Buttons/FullScreen/Caption}}",
"description": "{{$:/language/Buttons/FullScreen/Hint}}",
"text": "<$button message=\"tm-full-screen\" tooltip={{$:/language/Buttons/FullScreen/Hint}} aria-label={{$:/language/Buttons/FullScreen/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/full-screen-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/FullScreen/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/home": {
"title": "$:/core/ui/Buttons/home",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/home-button}} {{$:/language/Buttons/Home/Caption}}",
"description": "{{$:/language/Buttons/Home/Hint}}",
"text": "<$button message=\"tm-home\" tooltip={{$:/language/Buttons/Home/Hint}} aria-label={{$:/language/Buttons/Home/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/home-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Home/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/import": {
"title": "$:/core/ui/Buttons/import",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/import-button}} {{$:/language/Buttons/Import/Caption}}",
"description": "{{$:/language/Buttons/Import/Hint}}",
"text": "<div class=\"tc-file-input-wrapper\">\n<$button tooltip={{$:/language/Buttons/Import/Hint}} aria-label={{$:/language/Buttons/Import/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/import-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Import/Caption}}/></span>\n</$list>\n</$button>\n<$browse tooltip={{$:/language/Buttons/Import/Hint}}/>\n</div>"
},
"$:/core/ui/Buttons/language": {
"title": "$:/core/ui/Buttons/language",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/globe}} {{$:/language/Buttons/Language/Caption}}",
"description": "{{$:/language/Buttons/Language/Hint}}",
"text": "\\define flag-title()\n$(languagePluginTitle)$/icon\n\\end\n<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/language\">> tooltip={{$:/language/Buttons/Language/Hint}} aria-label={{$:/language/Buttons/Language/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n<span class=\"tc-image-button\">\n<$set name=\"languagePluginTitle\" value={{$:/language}}>\n<$image source=<<flag-title>>/>\n</$set>\n</span>\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Language/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/language\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down tc-drop-down-language-chooser\">\n<$linkcatcher to=\"$:/language\">\n<$list filter=\"[[$:/languages/en-GB]] [plugin-type[language]sort[description]]\">\n<$link>\n<span class=\"tc-drop-down-bullet\">\n<$reveal type=\"match\" state=\"$:/language\" text=<<currentTiddler>>>\n•\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/language\" text=<<currentTiddler>>>\n \n</$reveal>\n</span>\n<span class=\"tc-image-button\">\n<$set name=\"languagePluginTitle\" value=<<currentTiddler>>>\n<$transclude subtiddler=<<flag-title>>>\n<$list filter=\"[all[current]field:title[$:/languages/en-GB]]\">\n<$transclude tiddler=\"$:/languages/en-GB/icon\"/>\n</$list>\n</$transclude>\n</$set>\n</span>\n<$view field=\"description\">\n<$view field=\"name\">\n<$view field=\"title\"/>\n</$view>\n</$view>\n</$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/more-page-actions": {
"title": "$:/core/ui/Buttons/more-page-actions",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/down-arrow}} {{$:/language/Buttons/More/Caption}}",
"description": "{{$:/language/Buttons/More/Hint}}",
"text": "\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n<$button popup=<<qualify \"$:/state/popup/more\">> tooltip={{$:/language/Buttons/More/Hint}} aria-label={{$:/language/Buttons/More/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/down-arrow}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/More/Caption}}/></span>\n</$list>\n</$button><$reveal state=<<qualify \"$:/state/popup/more\">> type=\"popup\" position=\"below\" animate=\"yes\">\n\n<div class=\"tc-drop-down\">\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"tc-btn-invisible\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]] -[[$:/core/ui/Buttons/more-page-actions]]\" variable=\"listItem\">\n\n<$reveal type=\"match\" state=<<config-title>> text=\"hide\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$reveal>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</div>\n\n</$reveal>"
},
"$:/core/ui/Buttons/new-image": {
"title": "$:/core/ui/Buttons/new-image",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/new-image-button}} {{$:/language/Buttons/NewImage/Caption}}",
"description": "{{$:/language/Buttons/NewImage/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/NewImage/Hint}} aria-label={{$:/language/Buttons/NewImage/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" type=\"image/jpeg\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-image-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewImage/Caption}}/></span>\n</$list>\n</$button>\n"
},
"$:/core/ui/Buttons/new-journal": {
"title": "$:/core/ui/Buttons/new-journal",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/new-journal-button}} {{$:/language/Buttons/NewJournal/Caption}}",
"description": "{{$:/language/Buttons/NewJournal/Hint}}",
"text": "\\define journalButton()\n<$button tooltip={{$:/language/Buttons/NewJournal/Hint}} aria-label={{$:/language/Buttons/NewJournal/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" title=<<now \"$(journalTitleTemplate)$\">> tags=\"$(journalTags)$\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-journal-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewJournal/Caption}}/></span>\n</$list>\n</$button>\n\\end\n<$set name=\"journalTitleTemplate\" value={{$:/config/NewJournal/Title}}>\n<$set name=\"journalTags\" value={{$:/config/NewJournal/Tags}}>\n<<journalButton>>\n</$set></$set>"
},
"$:/core/ui/Buttons/new-tiddler": {
"title": "$:/core/ui/Buttons/new-tiddler",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/new-button}} {{$:/language/Buttons/NewTiddler/Caption}}",
"description": "{{$:/language/Buttons/NewTiddler/Hint}}",
"text": "<$button message=\"tm-new-tiddler\" tooltip={{$:/language/Buttons/NewTiddler/Hint}} aria-label={{$:/language/Buttons/NewTiddler/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewTiddler/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/palette": {
"title": "$:/core/ui/Buttons/palette",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/palette}} {{$:/language/Buttons/Palette/Caption}}",
"description": "{{$:/language/Buttons/Palette/Hint}}",
"text": "<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/palette\">> tooltip={{$:/language/Buttons/Palette/Hint}} aria-label={{$:/language/Buttons/Palette/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/palette}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Palette/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/palette\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\" style=\"font-size:0.7em;\">\n{{$:/snippets/paletteswitcher}}\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/refresh": {
"title": "$:/core/ui/Buttons/refresh",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/refresh-button}} {{$:/language/Buttons/Refresh/Caption}}",
"description": "{{$:/language/Buttons/Refresh/Hint}}",
"text": "<$button message=\"tm-browser-refresh\" tooltip={{$:/language/Buttons/Refresh/Hint}} aria-label={{$:/language/Buttons/Refresh/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/refresh-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Refresh/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/save-wiki": {
"title": "$:/core/ui/Buttons/save-wiki",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/save-button}} {{$:/language/Buttons/SaveWiki/Caption}}",
"description": "{{$:/language/Buttons/SaveWiki/Hint}}",
"text": "<$button message=\"tm-save-wiki\" param={{$:/config/SaveWikiButton/Template}} tooltip={{$:/language/Buttons/SaveWiki/Hint}} aria-label={{$:/language/Buttons/SaveWiki/Caption}} class=<<tv-config-toolbar-class>>>\n<span class=\"tc-dirty-indicator\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/save-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/SaveWiki/Caption}}/></span>\n</$list>\n</span>\n</$button>"
},
"$:/core/ui/Buttons/storyview": {
"title": "$:/core/ui/Buttons/storyview",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/storyview-classic}} {{$:/language/Buttons/StoryView/Caption}}",
"description": "{{$:/language/Buttons/StoryView/Hint}}",
"text": "\\define icon()\n$:/core/images/storyview-$(storyview)$\n\\end\n<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/storyview\">> tooltip={{$:/language/Buttons/StoryView/Hint}} aria-label={{$:/language/Buttons/StoryView/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n<$set name=\"storyview\" value={{$:/view}}>\n<$transclude tiddler=<<icon>>/>\n</$set>\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/StoryView/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/storyview\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$linkcatcher to=\"$:/view\">\n<$list filter=\"[storyviews[]]\" variable=\"storyview\">\n<$link to=<<storyview>>>\n<span class=\"tc-drop-down-bullet\">\n<$reveal type=\"match\" state=\"$:/view\" text=<<storyview>>>\n•\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/view\" text=<<storyview>>>\n \n</$reveal>\n</span>\n<$transclude tiddler=<<icon>>/>\n<$text text=<<storyview>>/></$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/tag-manager": {
"title": "$:/core/ui/Buttons/tag-manager",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/tag-button}} {{$:/language/Buttons/TagManager/Caption}}",
"description": "{{$:/language/Buttons/TagManager/Hint}}",
"text": "\\define control-panel-button(class)\n<$button to=\"$:/TagManager\" tooltip={{$:/language/Buttons/TagManager/Hint}} aria-label={{$:/language/Buttons/TagManager/Caption}} class=\"\"\"$(tv-config-toolbar-class)$ $class$\"\"\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/tag-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/TagManager/Caption}}/></span>\n</$list>\n</$button>\n\\end\n\n<$list filter=\"[list[$:/StoryList]] +[field:title[$:/TagManager]]\" emptyMessage=<<control-panel-button>>>\n<<control-panel-button \"tc-selected\">>\n</$list>\n"
},
"$:/core/ui/Buttons/theme": {
"title": "$:/core/ui/Buttons/theme",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/theme-button}} {{$:/language/Buttons/Theme/Caption}}",
"description": "{{$:/language/Buttons/Theme/Hint}}",
"text": "<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/theme\">> tooltip={{$:/language/Buttons/Theme/Hint}} aria-label={{$:/language/Buttons/Theme/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/theme-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Theme/Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/theme\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$linkcatcher to=\"$:/theme\">\n<$list filter=\"[plugin-type[theme]sort[title]]\" variable=\"themeTitle\">\n<$link to=<<themeTitle>>>\n<span class=\"tc-drop-down-bullet\">\n<$reveal type=\"match\" state=\"$:/theme\" text=<<themeTitle>>>\n•\n</$reveal>\n<$reveal type=\"nomatch\" state=\"$:/theme\" text=<<themeTitle>>>\n \n</$reveal>\n</span>\n<$view tiddler=<<themeTitle>> field=\"name\"/>\n</$link>\n</$list>\n</$linkcatcher>\n</div>\n</$reveal>"
},
"$:/core/ui/Buttons/unfold-all": {
"title": "$:/core/ui/Buttons/unfold-all",
"tags": "$:/tags/PageControls",
"caption": "{{$:/core/images/unfold-all-button}} {{$:/language/Buttons/UnfoldAll/Caption}}",
"description": "{{$:/language/Buttons/UnfoldAll/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/UnfoldAll/Hint}} aria-label={{$:/language/Buttons/UnfoldAll/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-unfold-all-tiddlers\" $param=<<currentTiddler>> foldedStatePrefix=\"$:/state/folded/\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/unfold-all-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/UnfoldAll/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/PageTemplate/pagecontrols": {
"title": "$:/core/ui/PageTemplate/pagecontrols",
"text": "\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n<div class=\"tc-page-controls\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]\" variable=\"listItem\">\n<$reveal type=\"nomatch\" state=<<config-title>> text=\"hide\">\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n</$reveal>\n</$list>\n</div>\n\n"
},
"$:/core/ui/PageStylesheet": {
"title": "$:/core/ui/PageStylesheet",
"text": "<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<$set name=\"currentTiddler\" value={{$:/language}}>\n\n<$set name=\"languageTitle\" value={{!!name}}>\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Stylesheet]!has[draft.of]]\">\n<$transclude mode=\"block\"/>\n</$list>\n\n</$set>\n\n</$set>\n\n</$importvariables>\n"
},
"$:/core/ui/PageTemplate/alerts": {
"title": "$:/core/ui/PageTemplate/alerts",
"tags": "$:/tags/PageTemplate",
"text": "<div class=\"tc-alerts\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Alert]!has[draft.of]]\" template=\"$:/core/ui/AlertTemplate\" storyview=\"pop\"/>\n\n</div>\n"
},
"$:/core/ui/PageTemplate/pluginreloadwarning": {
"title": "$:/core/ui/PageTemplate/pluginreloadwarning",
"tags": "$:/tags/PageTemplate",
"text": "\\define lingo-base() $:/language/\n\n<$list filter=\"[has[plugin-type]haschanged[]!plugin-type[import]limit[1]]\">\n\n<$reveal type=\"nomatch\" state=\"$:/temp/HidePluginWarning\" text=\"yes\">\n\n<div class=\"tc-plugin-reload-warning\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n<<lingo PluginReloadWarning>> <$button set=\"$:/temp/HidePluginWarning\" setTo=\"yes\" class=\"tc-btn-invisible\">{{$:/core/images/close-button}}</$button>\n\n</$set>\n\n</div>\n\n</$reveal>\n\n</$list>\n"
},
"$:/core/ui/PageTemplate/sidebar": {
"title": "$:/core/ui/PageTemplate/sidebar",
"tags": "$:/tags/PageTemplate",
"text": "<$scrollable fallthrough=\"no\" class=\"tc-sidebar-scrollable\">\n\n<div class=\"tc-sidebar-header\">\n\n<$reveal state=\"$:/state/sidebar\" type=\"match\" text=\"yes\" default=\"yes\" retain=\"yes\" animate=\"yes\">\n\n<h1 class=\"tc-site-title\">\n\n<$transclude tiddler=\"$:/SiteTitle\" mode=\"inline\"/>\n\n</h1>\n\n<div class=\"tc-site-subtitle\">\n\n<$transclude tiddler=\"$:/SiteSubtitle\" mode=\"inline\"/>\n\n</div>\n\n{{||$:/core/ui/PageTemplate/pagecontrols}}\n\n<$transclude tiddler=\"$:/core/ui/SideBarLists\" mode=\"inline\"/>\n\n</$reveal>\n\n</div>\n\n</$scrollable>"
},
"$:/core/ui/PageTemplate/story": {
"title": "$:/core/ui/PageTemplate/story",
"tags": "$:/tags/PageTemplate",
"text": "<section class=\"tc-story-river\">\n\n<section class=\"story-backdrop\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/AboveStory]!has[draft.of]]\">\n\n<$transclude/>\n\n</$list>\n\n</section>\n\n<$list filter=\"[list[$:/StoryList]]\" history=\"$:/HistoryList\" template=\"$:/core/ui/ViewTemplate\" editTemplate=\"$:/core/ui/EditTemplate\" storyview={{$:/view}} emptyMessage={{$:/config/EmptyStoryMessage}}/>\n\n<section class=\"story-frontdrop\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/BelowStory]!has[draft.of]]\">\n\n<$transclude/>\n\n</$list>\n\n</section>\n\n</section>\n"
},
"$:/core/ui/PageTemplate/topleftbar": {
"title": "$:/core/ui/PageTemplate/topleftbar",
"tags": "$:/tags/PageTemplate",
"text": "<span class=\"tc-topbar tc-topbar-left\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TopLeftBar]!has[draft.of]]\" variable=\"listItem\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$list>\n\n</span>\n"
},
"$:/core/ui/PageTemplate/toprightbar": {
"title": "$:/core/ui/PageTemplate/toprightbar",
"tags": "$:/tags/PageTemplate",
"text": "<span class=\"tc-topbar tc-topbar-right\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TopRightBar]!has[draft.of]]\" variable=\"listItem\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$list>\n\n</span>\n"
},
"$:/core/ui/PageTemplate": {
"title": "$:/core/ui/PageTemplate",
"text": "\\define containerClasses()\ntc-page-container tc-page-view-$(themeTitle)$ tc-language-$(languageTitle)$\n\\end\n\n<$importvariables filter=\"[[$:/core/ui/PageMacros]] [all[shadows+tiddlers]tag[$:/tags/Macro]!has[draft.of]]\">\n\n<$set name=\"tv-config-toolbar-icons\" value={{$:/config/Toolbar/Icons}}>\n\n<$set name=\"tv-config-toolbar-text\" value={{$:/config/Toolbar/Text}}>\n\n<$set name=\"tv-config-toolbar-class\" value={{$:/config/Toolbar/ButtonClass}}>\n\n<$set name=\"themeTitle\" value={{$:/view}}>\n\n<$set name=\"currentTiddler\" value={{$:/language}}>\n\n<$set name=\"languageTitle\" value={{!!name}}>\n\n<$set name=\"currentTiddler\" value=\"\">\n\n<div class=<<containerClasses>>>\n\n<$navigator story=\"$:/StoryList\" history=\"$:/HistoryList\" openLinkFromInsideRiver={{$:/config/Navigation/openLinkFromInsideRiver}} openLinkFromOutsideRiver={{$:/config/Navigation/openLinkFromOutsideRiver}}>\n\n<$dropzone>\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageTemplate]!has[draft.of]]\" variable=\"listItem\">\n\n<$transclude tiddler=<<listItem>>/>\n\n</$list>\n\n</$dropzone>\n\n</$navigator>\n\n</div>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</$importvariables>\n"
},
"$:/core/ui/PluginInfo": {
"title": "$:/core/ui/PluginInfo",
"text": "\\define localised-info-tiddler-title()\n$(currentTiddler)$/$(languageTitle)$/$(currentTab)$\n\\end\n\\define info-tiddler-title()\n$(currentTiddler)$/$(currentTab)$\n\\end\n<$transclude tiddler=<<localised-info-tiddler-title>> mode=\"block\">\n<$transclude tiddler=<<currentTiddler>> subtiddler=<<localised-info-tiddler-title>> mode=\"block\">\n<$transclude tiddler=<<currentTiddler>> subtiddler=<<info-tiddler-title>> mode=\"block\">\n{{$:/language/ControlPanel/Plugin/NoInfoFound/Hint}}\n</$transclude>\n</$transclude>\n</$transclude>\n"
},
"$:/core/ui/SearchResults": {
"title": "$:/core/ui/SearchResults",
"text": "<div class=\"tc-search-results\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]butfirst[]limit[1]]\" emptyMessage=\"\"\"\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\">\n<$transclude mode=\"block\"/>\n</$list>\n\"\"\">\n\n<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/SearchResults]!has[draft.of]]\" default={{$:/config/SearchResults/Default}}/>\n\n</$list>\n\n</div>\n"
},
"$:/core/ui/SideBar/More": {
"title": "$:/core/ui/SideBar/More",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/More/Caption}}",
"text": "<div class=\"tc-more-sidebar\">\n<<tabs \"[all[shadows+tiddlers]tag[$:/tags/MoreSideBar]!has[draft.of]]\" \"$:/core/ui/MoreSideBar/Tags\" \"$:/state/tab/moresidebar\" \"tc-vertical\">>\n</div>\n"
},
"$:/core/ui/SideBar/Open": {
"title": "$:/core/ui/SideBar/Open",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/Open/Caption}}",
"text": "\\define lingo-base() $:/language/CloseAll/\n<$list filter=\"[list[$:/StoryList]]\" history=\"$:/HistoryList\" storyview=\"pop\">\n\n<$button message=\"tm-close-tiddler\" tooltip={{$:/language/Buttons/Close/Hint}} aria-label={{$:/language/Buttons/Close/Caption}} class=\"tc-btn-invisible tc-btn-mini\">×</$button> <$link to={{!!title}}><$view field=\"title\"/></$link>\n\n</$list>\n\n<$button message=\"tm-close-all-tiddlers\" class=\"tc-btn-invisible tc-btn-mini\"><<lingo Button>></$button>\n"
},
"$:/core/ui/SideBar/Recent": {
"title": "$:/core/ui/SideBar/Recent",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/Recent/Caption}}",
"text": "<$macrocall $name=\"timeline\" format={{$:/language/RecentChanges/DateFormat}}/>\n"
},
"$:/core/ui/SideBar/Tools": {
"title": "$:/core/ui/SideBar/Tools",
"tags": "$:/tags/SideBar",
"caption": "{{$:/language/SideBar/Tools/Caption}}",
"text": "\\define lingo-base() $:/language/ControlPanel/\n\\define config-title()\n$:/config/PageControlButtons/Visibility/$(listItem)$\n\\end\n\n<<lingo Basics/Version/Prompt>> <<version>>\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]\" variable=\"listItem\">\n\n<div style=\"position:relative;\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>>/> <i class=\"tc-muted\"><$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</div>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/SideBarLists": {
"title": "$:/core/ui/SideBarLists",
"text": "<div class=\"tc-sidebar-lists\">\n\n<$set name=\"searchTiddler\" value=\"$:/temp/search\">\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/search\" type=\"search\" tag=\"input\" focus={{$:/config/Search/AutoFocus}} focusPopup=<<qualify \"$:/state/popup/search-dropdown\">> class=\"tc-popup-handle\"/>\n<$reveal state=\"$:/temp/search\" type=\"nomatch\" text=\"\">\n<$button tooltip={{$:/language/Buttons/AdvancedSearch/Hint}} aria-label={{$:/language/Buttons/AdvancedSearch/Caption}} class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" text={{$:/temp/search}}/>\n<$action-setfield $tiddler=\"$:/temp/search\" text=\"\"/>\n<$action-navigate $to=\"$:/AdvancedSearch\"/>\n{{$:/core/images/advanced-search-button}}\n</$button>\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/search\" text=\"\" />\n{{$:/core/images/close-button}}\n</$button>\n<$button popup=<<qualify \"$:/state/popup/search-dropdown\">> class=\"tc-btn-invisible\">\n<$set name=\"resultCount\" value=\"\"\"<$count filter=\"[!is[system]search{$(searchTiddler)$}]\"/>\"\"\">\n{{$:/core/images/down-arrow}} {{$:/language/Search/Matches}}\n</$set>\n</$button>\n</$reveal>\n<$reveal state=\"$:/temp/search\" type=\"match\" text=\"\">\n<$button to=\"$:/AdvancedSearch\" tooltip={{$:/language/Buttons/AdvancedSearch/Hint}} aria-label={{$:/language/Buttons/AdvancedSearch/Caption}} class=\"tc-btn-invisible\">\n{{$:/core/images/advanced-search-button}}\n</$button>\n</$reveal>\n</div>\n\n<$reveal tag=\"div\" class=\"tc-block-dropdown-wrapper\" state=\"$:/temp/search\" type=\"nomatch\" text=\"\">\n\n<$reveal tag=\"div\" class=\"tc-block-dropdown tc-search-drop-down tc-popup-handle\" state=<<qualify \"$:/state/popup/search-dropdown\">> type=\"nomatch\" text=\"\" default=\"\">\n\n{{$:/core/ui/SearchResults}}\n\n</$reveal>\n\n</$reveal>\n\n</$set>\n\n<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/SideBar]!has[draft.of]]\" default={{$:/config/DefaultSidebarTab}} state=\"$:/state/tab/sidebar\" />\n\n</div>\n"
},
"$:/TagManager": {
"title": "$:/TagManager",
"icon": "$:/core/images/tag-button",
"color": "#bbb",
"text": "\\define lingo-base() $:/language/TagManager/\n\\define iconEditorTab(type)\n<$list filter=\"[all[shadows+tiddlers]is[image]] [all[shadows+tiddlers]tag[$:/tags/Image]] -[type[application/pdf]] +[sort[title]] +[$type$is[system]]\">\n<$link to={{!!title}}>\n<$transclude/> <$view field=\"title\"/>\n</$link>\n</$list>\n\\end\n\\define iconEditor(title)\n<div class=\"tc-drop-down-wrapper\">\n<$button popup=<<qualify \"$:/state/popup/icon/$title$\">> class=\"tc-btn-invisible tc-btn-dropdown\">{{$:/core/images/down-arrow}}</$button>\n<$reveal state=<<qualify \"$:/state/popup/icon/$title$\">> type=\"popup\" position=\"belowleft\" text=\"\" default=\"\">\n<div class=\"tc-drop-down\">\n<$linkcatcher to=\"$title$!!icon\">\n<<iconEditorTab type:\"!\">>\n<hr/>\n<<iconEditorTab type:\"\">>\n</$linkcatcher>\n</div>\n</$reveal>\n</div>\n\\end\n\\define qualifyTitle(title)\n$title$$(currentTiddler)$\n\\end\n\\define toggleButton(state)\n<$reveal state=\"$state$\" type=\"match\" text=\"closed\" default=\"closed\">\n<$button set=\"$state$\" setTo=\"open\" class=\"tc-btn-invisible tc-btn-dropdown\" selectedClass=\"tc-selected\">\n{{$:/core/images/info-button}}\n</$button>\n</$reveal>\n<$reveal state=\"$state$\" type=\"match\" text=\"open\" default=\"closed\">\n<$button set=\"$state$\" setTo=\"closed\" class=\"tc-btn-invisible tc-btn-dropdown\" selectedClass=\"tc-selected\">\n{{$:/core/images/info-button}}\n</$button>\n</$reveal>\n\\end\n<table class=\"tc-tag-manager-table\">\n<tbody>\n<tr>\n<th><<lingo Colour/Heading>></th>\n<th class=\"tc-tag-manager-tag\"><<lingo Tag/Heading>></th>\n<th><<lingo Count/Heading>></th>\n<th><<lingo Icon/Heading>></th>\n<th><<lingo Info/Heading>></th>\n</tr>\n<$list filter=\"[tags[]!is[system]sort[title]]\">\n<tr>\n<td><$edit-text field=\"color\" tag=\"input\" type=\"color\"/></td>\n<td><$transclude tiddler=\"$:/core/ui/TagTemplate\"/></td>\n<td><$count filter=\"[all[current]tagging[]]\"/></td>\n<td>\n<$macrocall $name=\"iconEditor\" title={{!!title}}/>\n</td>\n<td>\n<$macrocall $name=\"toggleButton\" state=<<qualifyTitle \"$:/state/tag-manager/\">> /> \n</td>\n</tr>\n<tr>\n<td></td>\n<td colspan=\"4\">\n<$reveal state=<<qualifyTitle \"$:/state/tag-manager/\">> type=\"match\" text=\"open\" default=\"\">\n<table>\n<tbody>\n<tr><td><<lingo Colour/Heading>></td><td><$edit-text field=\"color\" tag=\"input\" type=\"text\" size=\"9\"/></td></tr>\n<tr><td><<lingo Icon/Heading>></td><td><$edit-text field=\"icon\" tag=\"input\" size=\"45\"/></td></tr>\n</tbody>\n</table>\n</$reveal>\n</td>\n</tr>\n</$list>\n<tr>\n<td></td>\n<td>\n{{$:/core/ui/UntaggedTemplate}}\n</td>\n<td>\n<small class=\"tc-menu-list-count\"><$count filter=\"[untagged[]!is[system]] -[tags[]]\"/></small>\n</td>\n<td></td>\n<td></td>\n</tr>\n</tbody>\n</table>\n"
},
"$:/core/ui/TagTemplate": {
"title": "$:/core/ui/TagTemplate",
"text": "\\define tag-styles()\nbackground-color:$(backgroundColor)$;\nfill:$(foregroundColor)$;\ncolor:$(foregroundColor)$;\n\\end\n\n\\define tag-body-inner(colour,fallbackTarget,colourA,colourB)\n<$vars foregroundColor=<<contrastcolour target:\"\"\"$colour$\"\"\" fallbackTarget:\"\"\"$fallbackTarget$\"\"\" colourA:\"\"\"$colourA$\"\"\" colourB:\"\"\"$colourB$\"\"\">> backgroundColor=\"\"\"$colour$\"\"\">\n<$button popup=<<qualify \"$:/state/popup/tag\">> class=\"tc-btn-invisible tc-tag-label\" style=<<tag-styles>>>\n<$transclude tiddler={{!!icon}}/> <$view field=\"title\" format=\"text\" />\n</$button>\n<$reveal state=<<qualify \"$:/state/popup/tag\">> type=\"popup\" position=\"below\" animate=\"yes\" class=\"tc-drop-down\"><$transclude tiddler=\"$:/core/ui/ListItemTemplate\"/>\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TagDropdown]!has[draft.of]]\" variable=\"listItem\"> \n<$transclude tiddler=<<listItem>>/> \n</$list> \n<hr>\n<$list filter=\"[all[current]tagging[]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n</$reveal>\n</$vars>\n\\end\n\n\\define tag-body(colour,palette)\n<span class=\"tc-tag-list-item\">\n<$macrocall $name=\"tag-body-inner\" colour=\"\"\"$colour$\"\"\" fallbackTarget={{$palette$##tag-background}} colourA={{$palette$##foreground}} colourB={{$palette$##background}}/>\n</span>\n\\end\n\n<$macrocall $name=\"tag-body\" colour={{!!color}} palette={{$:/palette}}/>\n"
},
"$:/core/ui/TiddlerFields": {
"title": "$:/core/ui/TiddlerFields",
"text": "<table class=\"tc-view-field-table\">\n<tbody>\n<$list filter=\"[all[current]fields[]sort[title]] -text\" template=\"$:/core/ui/TiddlerFieldTemplate\" variable=\"listItem\"/>\n</tbody>\n</table>\n"
},
"$:/core/ui/TiddlerFieldTemplate": {
"title": "$:/core/ui/TiddlerFieldTemplate",
"text": "<tr class=\"tc-view-field\">\n<td class=\"tc-view-field-name\">\n<$text text=<<listItem>>/>\n</td>\n<td class=\"tc-view-field-value\">\n<$view field=<<listItem>>/>\n</td>\n</tr>"
},
"$:/core/ui/TiddlerInfo/Advanced/PluginInfo": {
"title": "$:/core/ui/TiddlerInfo/Advanced/PluginInfo",
"tags": "$:/tags/TiddlerInfo/Advanced",
"text": "\\define lingo-base() $:/language/TiddlerInfo/Advanced/PluginInfo/\n<$list filter=\"[all[current]has[plugin-type]]\">\n\n! <<lingo Heading>>\n\n<<lingo Hint>>\n<ul>\n<$list filter=\"[all[current]plugintiddlers[]sort[title]]\" emptyMessage=<<lingo Empty/Hint>>>\n<li>\n<$link to={{!!title}}>\n<$view field=\"title\"/>\n</$link>\n</li>\n</$list>\n</ul>\n\n</$list>\n"
},
"$:/core/ui/TiddlerInfo/Advanced/ShadowInfo": {
"title": "$:/core/ui/TiddlerInfo/Advanced/ShadowInfo",
"tags": "$:/tags/TiddlerInfo/Advanced",
"text": "\\define lingo-base() $:/language/TiddlerInfo/Advanced/ShadowInfo/\n<$set name=\"infoTiddler\" value=<<currentTiddler>>>\n\n''<<lingo Heading>>''\n\n<$list filter=\"[all[current]!is[shadow]]\">\n\n<<lingo NotShadow/Hint>>\n\n</$list>\n\n<$list filter=\"[all[current]is[shadow]]\">\n\n<<lingo Shadow/Hint>>\n\n<$list filter=\"[all[current]shadowsource[]]\">\n\n<$set name=\"pluginTiddler\" value=<<currentTiddler>>>\n<<lingo Shadow/Source>>\n</$set>\n\n</$list>\n\n<$list filter=\"[all[current]is[shadow]is[tiddler]]\">\n\n<<lingo OverriddenShadow/Hint>>\n\n</$list>\n\n\n</$list>\n</$set>\n"
},
"$:/core/ui/TiddlerInfo/Advanced": {
"title": "$:/core/ui/TiddlerInfo/Advanced",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Advanced/Caption}}",
"text": "<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/TiddlerInfo/Advanced]!has[draft.of]]\" variable=\"listItem\">\n<$transclude tiddler=<<listItem>>/>\n\n</$list>\n"
},
"$:/core/ui/TiddlerInfo/Fields": {
"title": "$:/core/ui/TiddlerInfo/Fields",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Fields/Caption}}",
"text": "<$transclude tiddler=\"$:/core/ui/TiddlerFields\"/>\n"
},
"$:/core/ui/TiddlerInfo/List": {
"title": "$:/core/ui/TiddlerInfo/List",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/List/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[list{!!title}]\" emptyMessage=<<lingo List/Empty>> template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/TiddlerInfo/Listed": {
"title": "$:/core/ui/TiddlerInfo/Listed",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Listed/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[all[current]listed[]!is[system]]\" emptyMessage=<<lingo Listed/Empty>> template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/TiddlerInfo/References": {
"title": "$:/core/ui/TiddlerInfo/References",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/References/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[all[current]backlinks[]sort[title]]\" emptyMessage=<<lingo References/Empty>> template=\"$:/core/ui/ListItemTemplate\">\n</$list>\n"
},
"$:/core/ui/TiddlerInfo/Tagging": {
"title": "$:/core/ui/TiddlerInfo/Tagging",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Tagging/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n<$list filter=\"[all[current]tagging[]]\" emptyMessage=<<lingo Tagging/Empty>> template=\"$:/core/ui/ListItemTemplate\"/>\n"
},
"$:/core/ui/TiddlerInfo/Tools": {
"title": "$:/core/ui/TiddlerInfo/Tools",
"tags": "$:/tags/TiddlerInfo",
"caption": "{{$:/language/TiddlerInfo/Tools/Caption}}",
"text": "\\define lingo-base() $:/language/TiddlerInfo/\n\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]]\" variable=\"listItem\">\n\n<$checkbox tiddler=<<config-title>> field=\"text\" checked=\"show\" unchecked=\"hide\" default=\"show\"/> <$transclude tiddler=<<listItem>>/> <i class=\"tc-muted\"><$transclude tiddler=<<listItem>> field=\"description\"/></i>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n"
},
"$:/core/ui/TiddlerInfo": {
"title": "$:/core/ui/TiddlerInfo",
"text": "<$macrocall $name=\"tabs\" tabsList=\"[all[shadows+tiddlers]tag[$:/tags/TiddlerInfo]!has[draft.of]]\" default={{$:/config/TiddlerInfo/Default}}/>"
},
"$:/core/ui/TopBar/menu": {
"title": "$:/core/ui/TopBar/menu",
"tags": "$:/tags/TopRightBar",
"text": "<$reveal state=\"$:/state/sidebar\" type=\"nomatch\" text=\"no\">\n<$button set=\"$:/state/sidebar\" setTo=\"no\" tooltip={{$:/language/Buttons/HideSideBar/Hint}} aria-label={{$:/language/Buttons/HideSideBar/Caption}} class=\"tc-btn-invisible\">{{$:/core/images/chevron-right}}</$button>\n</$reveal>\n<$reveal state=\"$:/state/sidebar\" type=\"match\" text=\"no\">\n<$button set=\"$:/state/sidebar\" setTo=\"yes\" tooltip={{$:/language/Buttons/ShowSideBar/Hint}} aria-label={{$:/language/Buttons/ShowSideBar/Caption}} class=\"tc-btn-invisible\">{{$:/core/images/chevron-left}}</$button>\n</$reveal>\n"
},
"$:/core/ui/UntaggedTemplate": {
"title": "$:/core/ui/UntaggedTemplate",
"text": "\\define lingo-base() $:/language/SideBar/\n<$button popup=<<qualify \"$:/state/popup/tag\">> class=\"tc-btn-invisible tc-untagged-label tc-tag-label\">\n<<lingo Tags/Untagged/Caption>>\n</$button>\n<$reveal state=<<qualify \"$:/state/popup/tag\">> type=\"popup\" position=\"below\">\n<div class=\"tc-drop-down\">\n<$list filter=\"[untagged[]!is[system]] -[tags[]] +[sort[title]]\" template=\"$:/core/ui/ListItemTemplate\"/>\n</div>\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate/body": {
"title": "$:/core/ui/ViewTemplate/body",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal tag=\"div\" class=\"tc-tiddler-body\" type=\"nomatch\" state=<<folded-state>> text=\"hide\" retain=\"yes\" animate=\"yes\">\n\n<$list filter=\"[all[current]!has[plugin-type]!field:hide-body[yes]]\">\n\n<$transclude>\n\n<$transclude tiddler=\"$:/language/MissingTiddler/Hint\"/>\n\n</$transclude>\n\n</$list>\n\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate/classic": {
"title": "$:/core/ui/ViewTemplate/classic",
"tags": "$:/tags/ViewTemplate $:/tags/EditTemplate",
"text": "\\define lingo-base() $:/language/ClassicWarning/\n<$list filter=\"[all[current]type[text/x-tiddlywiki]]\">\n<div class=\"tc-message-box\">\n\n<<lingo Hint>>\n\n<$button set=\"!!type\" setTo=\"text/vnd.tiddlywiki\"><<lingo Upgrade/Caption>></$button>\n\n</div>\n</$list>\n"
},
"$:/core/ui/ViewTemplate/import": {
"title": "$:/core/ui/ViewTemplate/import",
"tags": "$:/tags/ViewTemplate",
"text": "\\define lingo-base() $:/language/Import/\n\n<$list filter=\"[all[current]field:plugin-type[import]]\">\n\n<div class=\"tc-import\">\n\n<<lingo Listing/Hint>>\n\n<$button message=\"tm-delete-tiddler\" param=<<currentTiddler>>><<lingo Listing/Cancel/Caption>></$button>\n<$button message=\"tm-perform-import\" param=<<currentTiddler>>><<lingo Listing/Import/Caption>></$button>\n\n{{||$:/core/ui/ImportListing}}\n\n<$button message=\"tm-delete-tiddler\" param=<<currentTiddler>>><<lingo Listing/Cancel/Caption>></$button>\n<$button message=\"tm-perform-import\" param=<<currentTiddler>>><<lingo Listing/Import/Caption>></$button>\n\n</div>\n\n</$list>\n"
},
"$:/core/ui/ViewTemplate/plugin": {
"title": "$:/core/ui/ViewTemplate/plugin",
"tags": "$:/tags/ViewTemplate",
"text": "<$list filter=\"[all[current]has[plugin-type]] -[all[current]field:plugin-type[import]]\">\n\n{{||$:/core/ui/TiddlerInfo/Advanced/PluginInfo}}\n\n</$list>\n"
},
"$:/core/ui/ViewTemplate/subtitle": {
"title": "$:/core/ui/ViewTemplate/subtitle",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal type=\"nomatch\" state=<<folded-state>> text=\"hide\" tag=\"div\" retain=\"yes\" animate=\"yes\">\n<div class=\"tc-subtitle\">\n<$link to={{!!modifier}}>\n<$view field=\"modifier\"/>\n</$link> <$view field=\"modified\" format=\"date\" template={{$:/language/Tiddler/DateFormat}}/>\n</div>\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate/tags": {
"title": "$:/core/ui/ViewTemplate/tags",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal type=\"nomatch\" state=<<folded-state>> text=\"hide\" tag=\"div\" retain=\"yes\" animate=\"yes\">\n<div class=\"tc-tags-wrapper\"><$list filter=\"[all[current]tags[]sort[title]]\" template=\"$:/core/ui/TagTemplate\" storyview=\"pop\"/></div>\n</$reveal>"
},
"$:/core/ui/ViewTemplate/title": {
"title": "$:/core/ui/ViewTemplate/title",
"tags": "$:/tags/ViewTemplate",
"text": "\\define title-styles()\nfill:$(foregroundColor)$;\n\\end\n\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n<div class=\"tc-tiddler-title\">\n<div class=\"tc-titlebar\">\n<span class=\"tc-tiddler-controls\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]]\" variable=\"listItem\"><$reveal type=\"nomatch\" state=<<config-title>> text=\"hide\"><$transclude tiddler=<<listItem>>/></$reveal></$list>\n</span>\n<$set name=\"tv-wikilinks\" value={{$:/config/Tiddlers/TitleLinks}}>\n<$link>\n<$set name=\"foregroundColor\" value={{!!color}}>\n<span class=\"tc-tiddler-title-icon\" style=<<title-styles>>>\n<$transclude tiddler={{!!icon}}/>\n</span>\n</$set>\n<$list filter=\"[all[current]removeprefix[$:/]]\">\n<h2 class=\"tc-title\" title={{$:/language/SystemTiddler/Tooltip}}>\n<span class=\"tc-system-title-prefix\">$:/</span><$text text=<<currentTiddler>>/>\n</h2>\n</$list>\n<$list filter=\"[all[current]!prefix[$:/]]\">\n<h2 class=\"tc-title\">\n<$view field=\"title\"/>\n</h2>\n</$list>\n</$link>\n</$set>\n</div>\n\n<$reveal type=\"nomatch\" text=\"\" default=\"\" state=<<tiddlerInfoState>> class=\"tc-tiddler-info tc-popup-handle\" animate=\"yes\" retain=\"yes\">\n\n<$transclude tiddler=\"$:/core/ui/TiddlerInfo\"/>\n\n</$reveal>\n</div>"
},
"$:/core/ui/ViewTemplate/unfold": {
"title": "$:/core/ui/ViewTemplate/unfold",
"tags": "$:/tags/ViewTemplate",
"text": "<$reveal tag=\"div\" type=\"nomatch\" state=\"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-bar\" text=\"hide\">\n<$reveal tag=\"div\" type=\"nomatch\" state=<<folded-state>> text=\"hide\" default=\"show\" retain=\"yes\" animate=\"yes\">\n<$button tooltip={{$:/language/Buttons/Fold/Hint}} aria-label={{$:/language/Buttons/Fold/Caption}} class=\"tc-fold-banner\">\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n{{$:/core/images/chevron-up}}\n</$button>\n</$reveal>\n<$reveal tag=\"div\" type=\"nomatch\" state=<<folded-state>> text=\"show\" default=\"show\" retain=\"yes\" animate=\"yes\">\n<$button tooltip={{$:/language/Buttons/Unfold/Hint}} aria-label={{$:/language/Buttons/Unfold/Caption}} class=\"tc-unfold-banner\">\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n{{$:/core/images/chevron-down}}\n</$button>\n</$reveal>\n</$reveal>\n"
},
"$:/core/ui/ViewTemplate": {
"title": "$:/core/ui/ViewTemplate",
"text": "\\define frame-classes()\ntc-tiddler-frame tc-tiddler-view-frame $(missingTiddlerClass)$ $(shadowTiddlerClass)$ $(systemTiddlerClass)$ $(tiddlerTagClasses)$\n\\end\n\\define folded-state()\n$:/state/folded/$(currentTiddler)$\n\\end\n<$set name=\"storyTiddler\" value=<<currentTiddler>>><$set name=\"tiddlerInfoState\" value=<<qualify \"$:/state/popup/tiddler-info\">>><$tiddler tiddler=<<currentTiddler>>><div class=<<frame-classes>>><$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewTemplate]!has[draft.of]]\" variable=\"listItem\"><$transclude tiddler=<<listItem>>/></$list>\n</div>\n</$tiddler></$set></$set>\n"
},
"$:/core/ui/Buttons/clone": {
"title": "$:/core/ui/Buttons/clone",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/clone-button}} {{$:/language/Buttons/Clone/Caption}}",
"description": "{{$:/language/Buttons/Clone/Hint}}",
"text": "<$button message=\"tm-new-tiddler\" param=<<currentTiddler>> tooltip={{$:/language/Buttons/Clone/Hint}} aria-label={{$:/language/Buttons/Clone/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/clone-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Clone/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/close-others": {
"title": "$:/core/ui/Buttons/close-others",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/close-others-button}} {{$:/language/Buttons/CloseOthers/Caption}}",
"description": "{{$:/language/Buttons/CloseOthers/Hint}}",
"text": "<$button message=\"tm-close-other-tiddlers\" param=<<currentTiddler>> tooltip={{$:/language/Buttons/CloseOthers/Hint}} aria-label={{$:/language/Buttons/CloseOthers/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/close-others-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/CloseOthers/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/close": {
"title": "$:/core/ui/Buttons/close",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/close-button}} {{$:/language/Buttons/Close/Caption}}",
"description": "{{$:/language/Buttons/Close/Hint}}",
"text": "<$button message=\"tm-close-tiddler\" tooltip={{$:/language/Buttons/Close/Hint}} aria-label={{$:/language/Buttons/Close/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/close-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Close/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/edit": {
"title": "$:/core/ui/Buttons/edit",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/edit-button}} {{$:/language/Buttons/Edit/Caption}}",
"description": "{{$:/language/Buttons/Edit/Hint}}",
"text": "<$button message=\"tm-edit-tiddler\" tooltip={{$:/language/Buttons/Edit/Hint}} aria-label={{$:/language/Buttons/Edit/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/edit-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Edit/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/export-tiddler": {
"title": "$:/core/ui/Buttons/export-tiddler",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/export-button}} {{$:/language/Buttons/ExportTiddler/Caption}}",
"description": "{{$:/language/Buttons/ExportTiddler/Hint}}",
"text": "\\define makeExportFilter()\n[[$(currentTiddler)$]]\n\\end\n<$macrocall $name=\"exportButton\" exportFilter=<<makeExportFilter>> lingoBase=\"$:/language/Buttons/ExportTiddler/\" baseFilename=<<currentTiddler>>/>"
},
"$:/core/ui/Buttons/fold-bar": {
"title": "$:/core/ui/Buttons/fold-bar",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/language/Buttons/Fold/FoldBar/Caption}}",
"description": "{{$:/language/Buttons/Fold/FoldBar/Hint}}",
"text": "<!-- This dummy toolbar button is here to allow visibility of the fold-bar to be controlled as if it were a toolbar button -->"
},
"$:/core/ui/Buttons/fold-others": {
"title": "$:/core/ui/Buttons/fold-others",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/fold-others-button}} {{$:/language/Buttons/FoldOthers/Caption}}",
"description": "{{$:/language/Buttons/FoldOthers/Hint}}",
"text": "<$button tooltip={{$:/language/Buttons/FoldOthers/Hint}} aria-label={{$:/language/Buttons/FoldOthers/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-other-tiddlers\" $param=<<currentTiddler>> foldedStatePrefix=\"$:/state/folded/\"/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/fold-others-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/FoldOthers/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/fold": {
"title": "$:/core/ui/Buttons/fold",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/fold-button}} {{$:/language/Buttons/Fold/Caption}}",
"description": "{{$:/language/Buttons/Fold/Hint}}",
"text": "<$reveal type=\"nomatch\" state=<<folded-state>> text=\"hide\" default=\"show\"><$button tooltip={{$:/language/Buttons/Fold/Hint}} aria-label={{$:/language/Buttons/Fold/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/fold-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\">\n<$text text={{$:/language/Buttons/Fold/Caption}}/>\n</span>\n</$list>\n</$button></$reveal><$reveal type=\"match\" state=<<folded-state>> text=\"hide\" default=\"show\"><$button tooltip={{$:/language/Buttons/Unfold/Hint}} aria-label={{$:/language/Buttons/Unfold/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-fold-tiddler\" $param=<<currentTiddler>> foldedState=<<folded-state>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\" variable=\"listItem\">\n{{$:/core/images/unfold-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\">\n<$text text={{$:/language/Buttons/Unfold/Caption}}/>\n</span>\n</$list>\n</$button></$reveal>"
},
"$:/core/ui/Buttons/info": {
"title": "$:/core/ui/Buttons/info",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/info-button}} {{$:/language/Buttons/Info/Caption}}",
"description": "{{$:/language/Buttons/Info/Hint}}",
"text": "<$button popup=<<tiddlerInfoState>> tooltip={{$:/language/Buttons/Info/Hint}} aria-label={{$:/language/Buttons/Info/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/info-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Info/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/more-tiddler-actions": {
"title": "$:/core/ui/Buttons/more-tiddler-actions",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/down-arrow}} {{$:/language/Buttons/More/Caption}}",
"description": "{{$:/language/Buttons/More/Hint}}",
"text": "\\define config-title()\n$:/config/ViewToolbarButtons/Visibility/$(listItem)$\n\\end\n<$button popup=<<qualify \"$:/state/popup/more\">> tooltip={{$:/language/Buttons/More/Hint}} aria-label={{$:/language/Buttons/More/Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/down-arrow}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/More/Caption}}/></span>\n</$list>\n</$button><$reveal state=<<qualify \"$:/state/popup/more\">> type=\"popup\" position=\"below\" animate=\"yes\">\n\n<div class=\"tc-drop-down\">\n\n<$set name=\"tv-config-toolbar-icons\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-text\" value=\"yes\">\n\n<$set name=\"tv-config-toolbar-class\" value=\"tc-btn-invisible\">\n\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]] -[[$:/core/ui/Buttons/more-tiddler-actions]]\" variable=\"listItem\">\n\n<$reveal type=\"match\" state=<<config-title>> text=\"hide\">\n\n<$transclude tiddler=<<listItem>> mode=\"inline\"/>\n\n</$reveal>\n\n</$list>\n\n</$set>\n\n</$set>\n\n</$set>\n\n</div>\n\n</$reveal>"
},
"$:/core/ui/Buttons/new-here": {
"title": "$:/core/ui/Buttons/new-here",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/new-here-button}} {{$:/language/Buttons/NewHere/Caption}}",
"description": "{{$:/language/Buttons/NewHere/Hint}}",
"text": "\\define newHereButtonTags()\n[[$(currentTiddler)$]]\n\\end\n\\define newHereButton()\n<$button tooltip={{$:/language/Buttons/NewHere/Hint}} aria-label={{$:/language/Buttons/NewHere/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" tags=<<newHereButtonTags>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-here-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewHere/Caption}}/></span>\n</$list>\n</$button>\n\\end\n<<newHereButton>>"
},
"$:/core/ui/Buttons/new-journal-here": {
"title": "$:/core/ui/Buttons/new-journal-here",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/new-journal-button}} {{$:/language/Buttons/NewJournalHere/Caption}}",
"description": "{{$:/language/Buttons/NewJournalHere/Hint}}",
"text": "\\define journalButtonTags()\n[[$(currentTiddlerTag)$]] $(journalTags)$\n\\end\n\\define journalButton()\n<$button tooltip={{$:/language/Buttons/NewJournalHere/Hint}} aria-label={{$:/language/Buttons/NewJournalHere/Caption}} class=<<tv-config-toolbar-class>>>\n<$action-sendmessage $message=\"tm-new-tiddler\" title=<<now \"$(journalTitleTemplate)$\">> tags=<<journalButtonTags>>/>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/new-journal-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/NewJournalHere/Caption}}/></span>\n</$list>\n</$button>\n\\end\n<$set name=\"journalTitleTemplate\" value={{$:/config/NewJournal/Title}}>\n<$set name=\"journalTags\" value={{$:/config/NewJournal/Tags}}>\n<$set name=\"currentTiddlerTag\" value=<<currentTiddler>>>\n<<journalButton>>\n</$set></$set></$set>"
},
"$:/core/ui/Buttons/open-window": {
"title": "$:/core/ui/Buttons/open-window",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/open-window}} {{$:/language/Buttons/OpenWindow/Caption}}",
"description": "{{$:/language/Buttons/OpenWindow/Hint}}",
"text": "<$button message=\"tm-open-window\" tooltip={{$:/language/Buttons/OpenWindow/Hint}} aria-label={{$:/language/Buttons/OpenWindow/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/open-window}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/OpenWindow/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/permalink": {
"title": "$:/core/ui/Buttons/permalink",
"tags": "$:/tags/ViewToolbar",
"caption": "{{$:/core/images/permalink-button}} {{$:/language/Buttons/Permalink/Caption}}",
"description": "{{$:/language/Buttons/Permalink/Hint}}",
"text": "<$button message=\"tm-permalink\" tooltip={{$:/language/Buttons/Permalink/Hint}} aria-label={{$:/language/Buttons/Permalink/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/permalink-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Permalink/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/core/ui/Buttons/permaview": {
"title": "$:/core/ui/Buttons/permaview",
"tags": "$:/tags/ViewToolbar $:/tags/PageControls",
"caption": "{{$:/core/images/permaview-button}} {{$:/language/Buttons/Permaview/Caption}}",
"description": "{{$:/language/Buttons/Permaview/Hint}}",
"text": "<$button message=\"tm-permaview\" tooltip={{$:/language/Buttons/Permaview/Hint}} aria-label={{$:/language/Buttons/Permaview/Caption}} class=<<tv-config-toolbar-class>>>\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/permaview-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$:/language/Buttons/Permaview/Caption}}/></span>\n</$list>\n</$button>"
},
"$:/temp/advancedsearch": {
"title": "$:/temp/advancedsearch",
"text": ""
},
"$:/snippets/allfields": {
"title": "$:/snippets/allfields",
"text": "\\define renderfield(title)\n<tr class=\"tc-view-field\"><td class=\"tc-view-field-name\">''$title$'':</td><td class=\"tc-view-field-value\">//{{$:/language/Docs/Fields/$title$}}//</td></tr>\n\\end\n<table class=\"tc-view-field-table\"><tbody><$list filter=\"[fields[]sort[title]]\" variable=\"listItem\"><$macrocall $name=\"renderfield\" title=<<listItem>>/></$list>\n</tbody></table>\n"
},
"$:/config/AnimationDuration": {
"title": "$:/config/AnimationDuration",
"text": "400"
},
"$:/config/AutoSave": {
"title": "$:/config/AutoSave",
"text": "yes"
},
"$:/config/BitmapEditor/Colour": {
"title": "$:/config/BitmapEditor/Colour",
"text": "#444"
},
"$:/config/BitmapEditor/ImageSizes": {
"title": "$:/config/BitmapEditor/ImageSizes",
"text": "[[62px 100px]] [[100px 62px]] [[124px 200px]] [[200px 124px]] [[248px 400px]] [[371px 600px]] [[400px 248px]] [[556px 900px]] [[600px 371px]] [[742px 1200px]] [[900px 556px]] [[1200px 742px]]"
},
"$:/config/BitmapEditor/LineWidth": {
"title": "$:/config/BitmapEditor/LineWidth",
"text": "3px"
},
"$:/config/BitmapEditor/LineWidths": {
"title": "$:/config/BitmapEditor/LineWidths",
"text": "0.25px 0.5px 1px 2px 3px 4px 6px 8px 10px 16px 20px 28px 40px 56px 80px"
},
"$:/config/BitmapEditor/Opacities": {
"title": "$:/config/BitmapEditor/Opacities",
"text": "0.01 0.025 0.05 0.075 0.1 0.15 0.2 0.3 0.4 0.5 0.6 0.7 0.8 0.9 1.0"
},
"$:/config/BitmapEditor/Opacity": {
"title": "$:/config/BitmapEditor/Opacity",
"text": "1.0"
},
"$:/config/DefaultSidebarTab": {
"title": "$:/config/DefaultSidebarTab",
"text": "$:/core/ui/SideBar/Open"
},
"$:/config/Drafts/TypingTimeout": {
"title": "$:/config/Drafts/TypingTimeout",
"text": "400"
},
"$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-4": {
"title": "$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-4",
"text": "hide"
},
"$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-5": {
"title": "$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-5",
"text": "hide"
},
"$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-6": {
"title": "$:/config/EditorToolbarButtons/Visibility/$:/core/ui/EditorToolbar/heading-6",
"text": "hide"
},
"$:/config/EditorTypeMappings/image/gif": {
"title": "$:/config/EditorTypeMappings/image/gif",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/jpeg": {
"title": "$:/config/EditorTypeMappings/image/jpeg",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/jpg": {
"title": "$:/config/EditorTypeMappings/image/jpg",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/png": {
"title": "$:/config/EditorTypeMappings/image/png",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/image/x-icon": {
"title": "$:/config/EditorTypeMappings/image/x-icon",
"text": "bitmap"
},
"$:/config/EditorTypeMappings/text/vnd.tiddlywiki": {
"title": "$:/config/EditorTypeMappings/text/vnd.tiddlywiki",
"text": "text"
},
"$:/config/EditTemplateFields/Visibility/title": {
"title": "$:/config/EditTemplateFields/Visibility/title",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/tags": {
"title": "$:/config/EditTemplateFields/Visibility/tags",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/text": {
"title": "$:/config/EditTemplateFields/Visibility/text",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/creator": {
"title": "$:/config/EditTemplateFields/Visibility/creator",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/created": {
"title": "$:/config/EditTemplateFields/Visibility/created",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/modified": {
"title": "$:/config/EditTemplateFields/Visibility/modified",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/modifier": {
"title": "$:/config/EditTemplateFields/Visibility/modifier",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/type": {
"title": "$:/config/EditTemplateFields/Visibility/type",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/draft.title": {
"title": "$:/config/EditTemplateFields/Visibility/draft.title",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/draft.of": {
"title": "$:/config/EditTemplateFields/Visibility/draft.of",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/revision": {
"title": "$:/config/EditTemplateFields/Visibility/revision",
"text": "hide"
},
"$:/config/EditTemplateFields/Visibility/bag": {
"title": "$:/config/EditTemplateFields/Visibility/bag",
"text": "hide"
},
"$:/config/MissingLinks": {
"title": "$:/config/MissingLinks",
"text": "yes"
},
"$:/config/Navigation/UpdateAddressBar": {
"title": "$:/config/Navigation/UpdateAddressBar",
"text": "no"
},
"$:/config/Navigation/UpdateHistory": {
"title": "$:/config/Navigation/UpdateHistory",
"text": "no"
},
"$:/config/OfficialPluginLibrary": {
"title": "$:/config/OfficialPluginLibrary",
"tags": "$:/tags/PluginLibrary",
"url": "http://tiddlywiki.com/library/v5.1.13/index.html",
"caption": "{{$:/language/OfficialPluginLibrary}}",
"text": "{{$:/language/OfficialPluginLibrary/Hint}}\n"
},
"$:/config/Navigation/openLinkFromInsideRiver": {
"title": "$:/config/Navigation/openLinkFromInsideRiver",
"text": "below"
},
"$:/config/Navigation/openLinkFromOutsideRiver": {
"title": "$:/config/Navigation/openLinkFromOutsideRiver",
"text": "top"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/advanced-search": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/advanced-search",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/close-all": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/close-all",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/encryption": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/encryption",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/export-page": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/export-page",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/fold-all": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/fold-all",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/full-screen": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/full-screen",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/home",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/refresh": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/refresh",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/import": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/import",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/language": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/language",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/tag-manager": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/tag-manager",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/more-page-actions": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/more-page-actions",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-journal": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-journal",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-image": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/new-image",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/palette": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/palette",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/permaview": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/permaview",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/storyview": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/storyview",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/theme": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/theme",
"text": "hide"
},
"$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/unfold-all": {
"title": "$:/config/PageControlButtons/Visibility/$:/core/ui/Buttons/unfold-all",
"text": "hide"
},
"$:/config/Performance/Instrumentation": {
"title": "$:/config/Performance/Instrumentation",
"text": "no"
},
"$:/config/SaverFilter": {
"title": "$:/config/SaverFilter",
"text": "[all[]] -[[$:/HistoryList]] -[[$:/StoryList]] -[[$:/Import]] -[[$:/isEncrypted]] -[[$:/UploadName]] -[prefix[$:/state/]] -[prefix[$:/temp/]]"
},
"$:/config/SaveWikiButton/Template": {
"title": "$:/config/SaveWikiButton/Template",
"text": "$:/core/save/all"
},
"$:/config/Search/AutoFocus": {
"title": "$:/config/Search/AutoFocus",
"text": "true"
},
"$:/config/SearchResults/Default": {
"title": "$:/config/SearchResults/Default",
"text": "$:/core/ui/DefaultSearchResultList"
},
"$:/config/ShortcutInfo/bold": {
"title": "$:/config/ShortcutInfo/bold",
"text": "{{$:/language/Buttons/Bold/Hint}}"
},
"$:/config/ShortcutInfo/cancel-edit-tiddler": {
"title": "$:/config/ShortcutInfo/cancel-edit-tiddler",
"text": "{{$:/language/Buttons/Cancel/Hint}}"
},
"$:/config/ShortcutInfo/excise": {
"title": "$:/config/ShortcutInfo/excise",
"text": "{{$:/language/Buttons/Excise/Hint}}"
},
"$:/config/ShortcutInfo/heading-1": {
"title": "$:/config/ShortcutInfo/heading-1",
"text": "{{$:/language/Buttons/Heading1/Hint}}"
},
"$:/config/ShortcutInfo/heading-2": {
"title": "$:/config/ShortcutInfo/heading-2",
"text": "{{$:/language/Buttons/Heading2/Hint}}"
},
"$:/config/ShortcutInfo/heading-3": {
"title": "$:/config/ShortcutInfo/heading-3",
"text": "{{$:/language/Buttons/Heading3/Hint}}"
},
"$:/config/ShortcutInfo/heading-4": {
"title": "$:/config/ShortcutInfo/heading-4",
"text": "{{$:/language/Buttons/Heading4/Hint}}"
},
"$:/config/ShortcutInfo/heading-5": {
"title": "$:/config/ShortcutInfo/heading-5",
"text": "{{$:/language/Buttons/Heading5/Hint}}"
},
"$:/config/ShortcutInfo/heading-6": {
"title": "$:/config/ShortcutInfo/heading-6",
"text": "{{$:/language/Buttons/Heading6/Hint}}"
},
"$:/config/ShortcutInfo/italic": {
"title": "$:/config/ShortcutInfo/italic",
"text": "{{$:/language/Buttons/Italic/Hint}}"
},
"$:/config/ShortcutInfo/link": {
"title": "$:/config/ShortcutInfo/link",
"text": "{{$:/language/Buttons/Link/Hint}}"
},
"$:/config/ShortcutInfo/list-bullet": {
"title": "$:/config/ShortcutInfo/list-bullet",
"text": "{{$:/language/Buttons/ListBullet/Hint}}"
},
"$:/config/ShortcutInfo/list-number": {
"title": "$:/config/ShortcutInfo/list-number",
"text": "{{$:/language/Buttons/ListNumber/Hint}}"
},
"$:/config/ShortcutInfo/mono-block": {
"title": "$:/config/ShortcutInfo/mono-block",
"text": "{{$:/language/Buttons/MonoBlock/Hint}}"
},
"$:/config/ShortcutInfo/mono-line": {
"title": "$:/config/ShortcutInfo/mono-line",
"text": "{{$:/language/Buttons/MonoLine/Hint}}"
},
"$:/config/ShortcutInfo/picture": {
"title": "$:/config/ShortcutInfo/picture",
"text": "{{$:/language/Buttons/Picture/Hint}}"
},
"$:/config/ShortcutInfo/preview": {
"title": "$:/config/ShortcutInfo/preview",
"text": "{{$:/language/Buttons/Preview/Hint}}"
},
"$:/config/ShortcutInfo/quote": {
"title": "$:/config/ShortcutInfo/quote",
"text": "{{$:/language/Buttons/Quote/Hint}}"
},
"$:/config/ShortcutInfo/save-tiddler": {
"title": "$:/config/ShortcutInfo/save-tiddler",
"text": "{{$:/language/Buttons/Save/Hint}}"
},
"$:/config/ShortcutInfo/stamp": {
"title": "$:/config/ShortcutInfo/stamp",
"text": "{{$:/language/Buttons/Stamp/Hint}}"
},
"$:/config/ShortcutInfo/strikethrough": {
"title": "$:/config/ShortcutInfo/strikethrough",
"text": "{{$:/language/Buttons/Strikethrough/Hint}}"
},
"$:/config/ShortcutInfo/subscript": {
"title": "$:/config/ShortcutInfo/subscript",
"text": "{{$:/language/Buttons/Subscript/Hint}}"
},
"$:/config/ShortcutInfo/superscript": {
"title": "$:/config/ShortcutInfo/superscript",
"text": "{{$:/language/Buttons/Superscript/Hint}}"
},
"$:/config/ShortcutInfo/underline": {
"title": "$:/config/ShortcutInfo/underline",
"text": "{{$:/language/Buttons/Underline/Hint}}"
},
"$:/config/shortcuts-mac/bold": {
"title": "$:/config/shortcuts-mac/bold",
"text": "meta-B"
},
"$:/config/shortcuts-mac/italic": {
"title": "$:/config/shortcuts-mac/italic",
"text": "meta-I"
},
"$:/config/shortcuts-mac/underline": {
"title": "$:/config/shortcuts-mac/underline",
"text": "meta-U"
},
"$:/config/shortcuts-not-mac/bold": {
"title": "$:/config/shortcuts-not-mac/bold",
"text": "ctrl-B"
},
"$:/config/shortcuts-not-mac/italic": {
"title": "$:/config/shortcuts-not-mac/italic",
"text": "ctrl-I"
},
"$:/config/shortcuts-not-mac/underline": {
"title": "$:/config/shortcuts-not-mac/underline",
"text": "ctrl-U"
},
"$:/config/shortcuts/cancel-edit-tiddler": {
"title": "$:/config/shortcuts/cancel-edit-tiddler",
"text": "escape"
},
"$:/config/shortcuts/excise": {
"title": "$:/config/shortcuts/excise",
"text": "ctrl-E"
},
"$:/config/shortcuts/heading-1": {
"title": "$:/config/shortcuts/heading-1",
"text": "ctrl-1"
},
"$:/config/shortcuts/heading-2": {
"title": "$:/config/shortcuts/heading-2",
"text": "ctrl-2"
},
"$:/config/shortcuts/heading-3": {
"title": "$:/config/shortcuts/heading-3",
"text": "ctrl-3"
},
"$:/config/shortcuts/heading-4": {
"title": "$:/config/shortcuts/heading-4",
"text": "ctrl-4"
},
"$:/config/shortcuts/heading-5": {
"title": "$:/config/shortcuts/heading-5",
"text": "ctrl-5"
},
"$:/config/shortcuts/heading-6": {
"title": "$:/config/shortcuts/heading-6",
"text": "ctrl-6"
},
"$:/config/shortcuts/link": {
"title": "$:/config/shortcuts/link",
"text": "ctrl-L"
},
"$:/config/shortcuts/list-bullet": {
"title": "$:/config/shortcuts/list-bullet",
"text": "ctrl-shift-L"
},
"$:/config/shortcuts/list-number": {
"title": "$:/config/shortcuts/list-number",
"text": "ctrl-shift-N"
},
"$:/config/shortcuts/mono-block": {
"title": "$:/config/shortcuts/mono-block",
"text": "ctrl-shift-M"
},
"$:/config/shortcuts/mono-line": {
"title": "$:/config/shortcuts/mono-line",
"text": "ctrl-M"
},
"$:/config/shortcuts/picture": {
"title": "$:/config/shortcuts/picture",
"text": "ctrl-shift-I"
},
"$:/config/shortcuts/preview": {
"title": "$:/config/shortcuts/preview",
"text": "alt-P"
},
"$:/config/shortcuts/quote": {
"title": "$:/config/shortcuts/quote",
"text": "ctrl-Q"
},
"$:/config/shortcuts/save-tiddler": {
"title": "$:/config/shortcuts/save-tiddler",
"text": "ctrl+enter"
},
"$:/config/shortcuts/stamp": {
"title": "$:/config/shortcuts/stamp",
"text": "ctrl-S"
},
"$:/config/shortcuts/strikethrough": {
"title": "$:/config/shortcuts/strikethrough",
"text": "ctrl-T"
},
"$:/config/shortcuts/subscript": {
"title": "$:/config/shortcuts/subscript",
"text": "ctrl-shift-B"
},
"$:/config/shortcuts/superscript": {
"title": "$:/config/shortcuts/superscript",
"text": "ctrl-shift-P"
},
"$:/config/SyncFilter": {
"title": "$:/config/SyncFilter",
"text": "[is[tiddler]] -[[$:/HistoryList]] -[[$:/Import]] -[[$:/isEncrypted]] -[prefix[$:/status/]] -[prefix[$:/state/]] -[prefix[$:/temp/]]"
},
"$:/config/TextEditor/EditorHeight/Height": {
"title": "$:/config/TextEditor/EditorHeight/Height",
"text": "400px"
},
"$:/config/TextEditor/EditorHeight/Mode": {
"title": "$:/config/TextEditor/EditorHeight/Mode",
"text": "auto"
},
"$:/config/TiddlerInfo/Default": {
"title": "$:/config/TiddlerInfo/Default",
"text": "$:/core/ui/TiddlerInfo/Fields"
},
"$:/config/Tiddlers/TitleLinks": {
"title": "$:/config/Tiddlers/TitleLinks",
"text": "no"
},
"$:/config/Toolbar/ButtonClass": {
"title": "$:/config/Toolbar/ButtonClass",
"text": "tc-btn-invisible"
},
"$:/config/Toolbar/Icons": {
"title": "$:/config/Toolbar/Icons",
"text": "yes"
},
"$:/config/Toolbar/Text": {
"title": "$:/config/Toolbar/Text",
"text": "no"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/clone": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/clone",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/close-others": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/close-others",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/export-tiddler": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/export-tiddler",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/info": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/info",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/more-tiddler-actions",
"text": "show"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-here": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-here",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-journal-here": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/new-journal-here",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/open-window": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/open-window",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permalink": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permalink",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permaview": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/permaview",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/delete": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/delete",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-bar": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-bar",
"text": "hide"
},
"$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-others": {
"title": "$:/config/ViewToolbarButtons/Visibility/$:/core/ui/Buttons/fold-others",
"text": "hide"
},
"$:/config/WikiParserRules/Inline/wikilink": {
"title": "$:/config/WikiParserRules/Inline/wikilink",
"text": "enable"
},
"$:/snippets/currpalettepreview": {
"title": "$:/snippets/currpalettepreview",
"text": "\\define swatchStyle()\nbackground-color: $(swatchColour)$;\n\\end\n\\define swatch(colour)\n<$set name=\"swatchColour\" value={{##$colour$}}>\n<div class=\"tc-swatch\" style=<<swatchStyle>>/>\n</$set>\n\\end\n<div class=\"tc-swatches-horiz\">\n<<swatch foreground>>\n<<swatch background>>\n<<swatch muted-foreground>>\n<<swatch primary>>\n<<swatch page-background>>\n<<swatch tab-background>>\n<<swatch tiddler-info-background>>\n</div>\n"
},
"$:/DefaultTiddlers": {
"title": "$:/DefaultTiddlers",
"text": "GettingStarted\n"
},
"$:/snippets/download-wiki-button": {
"title": "$:/snippets/download-wiki-button",
"text": "\\define lingo-base() $:/language/ControlPanel/Tools/Download/\n<$button class=\"tc-btn-big-green\">\n<$action-sendmessage $message=\"tm-download-file\" $param=\"$:/core/save/all\" filename=\"index.html\"/>\n<<lingo Full/Caption>> {{$:/core/images/save-button}}\n</$button>"
},
"$:/language": {
"title": "$:/language",
"text": "$:/languages/en-GB"
},
"$:/snippets/languageswitcher": {
"title": "$:/snippets/languageswitcher",
"text": "{{$:/language/ControlPanel/Basics/Language/Prompt}} <$select tiddler=\"$:/language\">\n<$list filter=\"[[$:/languages/en-GB]] [plugin-type[language]sort[description]]\">\n<option value=<<currentTiddler>>><$view field=\"description\"><$view field=\"name\"><$view field=\"title\"/></$view></$view></option>\n</$list>\n</$select>"
},
"$:/core/macros/colour-picker": {
"title": "$:/core/macros/colour-picker",
"tags": "$:/tags/Macro",
"text": "\\define colour-picker-update-recent()\n<$action-listops\n\t$tiddler=\"$:/config/ColourPicker/Recent\"\n\t$subfilter=\"$(colour-picker-value)$ [list[$:/config/ColourPicker/Recent]remove[$(colour-picker-value)$]] +[limit[8]]\"\n/>\n\\end\n\n\\define colour-picker-inner(actions)\n<$button tag=\"a\" tooltip=\"\"\"$(colour-picker-value)$\"\"\">\n\n$(colour-picker-update-recent)$\n\n$actions$\n\n<div style=\"background-color: $(colour-picker-value)$; width: 100%; height: 100%; border-radius: 50%;\"/>\n\n</$button>\n\\end\n\n\\define colour-picker-recent-inner(actions)\n<$set name=\"colour-picker-value\" value=\"$(recentColour)$\">\n<$macrocall $name=\"colour-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n</$set>\n\\end\n\n\\define colour-picker-recent(actions)\n{{$:/language/ColourPicker/Recent}} <$list filter=\"[list[$:/config/ColourPicker/Recent]]\" variable=\"recentColour\">\n<$macrocall $name=\"colour-picker-recent-inner\" actions=\"\"\"$actions$\"\"\"/></$list>\n\\end\n\n\\define colour-picker(actions)\n<div class=\"tc-colour-chooser\">\n\n<$macrocall $name=\"colour-picker-recent\" actions=\"\"\"$actions$\"\"\"/>\n\n---\n\n<$list filter=\"LightPink Pink Crimson LavenderBlush PaleVioletRed HotPink DeepPink MediumVioletRed Orchid Thistle Plum Violet Magenta Fuchsia DarkMagenta Purple MediumOrchid DarkViolet DarkOrchid Indigo BlueViolet MediumPurple MediumSlateBlue SlateBlue DarkSlateBlue Lavender GhostWhite Blue MediumBlue MidnightBlue DarkBlue Navy RoyalBlue CornflowerBlue LightSteelBlue LightSlateGrey SlateGrey DodgerBlue AliceBlue SteelBlue LightSkyBlue SkyBlue DeepSkyBlue LightBlue PowderBlue CadetBlue Azure LightCyan PaleTurquoise Cyan Aqua DarkTurquoise DarkSlateGrey DarkCyan Teal MediumTurquoise LightSeaGreen Turquoise Aquamarine MediumAquamarine MediumSpringGreen MintCream SpringGreen MediumSeaGreen SeaGreen Honeydew LightGreen PaleGreen DarkSeaGreen LimeGreen Lime ForestGreen Green DarkGreen Chartreuse LawnGreen GreenYellow DarkOliveGreen YellowGreen OliveDrab Beige LightGoldenrodYellow Ivory LightYellow Yellow Olive DarkKhaki LemonChiffon PaleGoldenrod Khaki Gold Cornsilk Goldenrod DarkGoldenrod FloralWhite OldLace Wheat Moccasin Orange PapayaWhip BlanchedAlmond NavajoWhite AntiqueWhite Tan BurlyWood Bisque DarkOrange Linen Peru PeachPuff SandyBrown Chocolate SaddleBrown Seashell Sienna LightSalmon Coral OrangeRed DarkSalmon Tomato MistyRose Salmon Snow LightCoral RosyBrown IndianRed Red Brown FireBrick DarkRed Maroon White WhiteSmoke Gainsboro LightGrey Silver DarkGrey Grey DimGrey Black\" variable=\"colour-picker-value\">\n<$macrocall $name=\"colour-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n</$list>\n\n---\n\n<$edit-text tiddler=\"$:/config/ColourPicker/New\" tag=\"input\" default=\"\" placeholder=\"\"/> \n<$edit-text tiddler=\"$:/config/ColourPicker/New\" type=\"color\" tag=\"input\"/>\n<$set name=\"colour-picker-value\" value={{$:/config/ColourPicker/New}}>\n<$macrocall $name=\"colour-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n</$set>\n\n</div>\n\n\\end\n"
},
"$:/core/macros/CSS": {
"title": "$:/core/macros/CSS",
"tags": "$:/tags/Macro",
"text": "\\define colour(name)\n<$transclude tiddler={{$:/palette}} index=\"$name$\"><$transclude tiddler=\"$:/palettes/Vanilla\" index=\"$name$\"/></$transclude>\n\\end\n\n\\define color(name)\n<<colour $name$>>\n\\end\n\n\\define box-shadow(shadow)\n``\n -webkit-box-shadow: $shadow$;\n -moz-box-shadow: $shadow$;\n box-shadow: $shadow$;\n``\n\\end\n\n\\define filter(filter)\n``\n -webkit-filter: $filter$;\n -moz-filter: $filter$;\n filter: $filter$;\n``\n\\end\n\n\\define transition(transition)\n``\n -webkit-transition: $transition$;\n -moz-transition: $transition$;\n transition: $transition$;\n``\n\\end\n\n\\define transform-origin(origin)\n``\n -webkit-transform-origin: $origin$;\n -moz-transform-origin: $origin$;\n transform-origin: $origin$;\n``\n\\end\n\n\\define background-linear-gradient(gradient)\n``\nbackground-image: linear-gradient($gradient$);\nbackground-image: -o-linear-gradient($gradient$);\nbackground-image: -moz-linear-gradient($gradient$);\nbackground-image: -webkit-linear-gradient($gradient$);\nbackground-image: -ms-linear-gradient($gradient$);\n``\n\\end\n\n\\define datauri(title)\n<$macrocall $name=\"makedatauri\" type={{$title$!!type}} text={{$title$}}/>\n\\end\n\n\\define if-sidebar(text)\n<$reveal state=\"$:/state/sidebar\" type=\"match\" text=\"yes\" default=\"yes\">$text$</$reveal>\n\\end\n\n\\define if-no-sidebar(text)\n<$reveal state=\"$:/state/sidebar\" type=\"nomatch\" text=\"yes\" default=\"yes\">$text$</$reveal>\n\\end\n"
},
"$:/core/macros/export": {
"title": "$:/core/macros/export",
"tags": "$:/tags/Macro",
"text": "\\define exportButtonFilename(baseFilename)\n$baseFilename$$(extension)$\n\\end\n\n\\define exportButton(exportFilter:\"[!is[system]sort[title]]\",lingoBase,baseFilename:\"tiddlers\")\n<span class=\"tc-popup-keep\">\n<$button popup=<<qualify \"$:/state/popup/export\">> tooltip={{$lingoBase$Hint}} aria-label={{$lingoBase$Caption}} class=<<tv-config-toolbar-class>> selectedClass=\"tc-selected\">\n<$list filter=\"[<tv-config-toolbar-icons>prefix[yes]]\">\n{{$:/core/images/export-button}}\n</$list>\n<$list filter=\"[<tv-config-toolbar-text>prefix[yes]]\">\n<span class=\"tc-btn-text\"><$text text={{$lingoBase$Caption}}/></span>\n</$list>\n</$button>\n</span>\n<$reveal state=<<qualify \"$:/state/popup/export\">> type=\"popup\" position=\"below\" animate=\"yes\">\n<div class=\"tc-drop-down\">\n<$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Exporter]]\">\n<$set name=\"extension\" value={{!!extension}}>\n<$button class=\"tc-btn-invisible\">\n<$action-sendmessage $message=\"tm-download-file\" $param=<<currentTiddler>> exportFilter=\"\"\"$exportFilter$\"\"\" filename=<<exportButtonFilename \"\"\"$baseFilename$\"\"\">>/>\n<$action-deletetiddler $tiddler=<<qualify \"$:/state/popup/export\">>/>\n<$transclude field=\"description\"/>\n</$button>\n</$set>\n</$list>\n</div>\n</$reveal>\n\\end\n"
},
"$:/core/macros/image-picker": {
"title": "$:/core/macros/image-picker",
"tags": "$:/tags/Macro",
"text": "\\define image-picker-inner(actions)\n<$button tag=\"a\" tooltip=\"\"\"$(imageTitle)$\"\"\">\n\n$actions$\n\n<$transclude tiddler=<<imageTitle>>/>\n\n</$button>\n\\end\n\n\\define image-picker(actions,subfilter:\"\")\n<div class=\"tc-image-chooser\">\n\n<$list filter=\"[all[shadows+tiddlers]is[image]$subfilter$!has[draft.of]] -[type[application/pdf]] +[sort[title]]\" variable=\"imageTitle\">\n\n<$macrocall $name=\"image-picker-inner\" actions=\"\"\"$actions$\"\"\"/>\n\n</$list>\n\n</div>\n\n\\end\n\n"
},
"$:/core/macros/lingo": {
"title": "$:/core/macros/lingo",
"tags": "$:/tags/Macro",
"text": "\\define lingo-base()\n$:/language/\n\\end\n\n\\define lingo(title)\n{{$(lingo-base)$$title$}}\n\\end\n"
},
"$:/core/macros/list": {
"title": "$:/core/macros/list",
"tags": "$:/tags/Macro",
"text": "\\define list-links(filter,type:\"ul\",subtype:\"li\",class:\"\")\n<$type$ class=\"$class$\">\n<$list filter=\"$filter$\">\n<$subtype$>\n<$link to={{!!title}}>\n<$transclude field=\"caption\">\n<$view field=\"title\"/>\n</$transclude>\n</$link>\n</$subtype$>\n</$list>\n</$type$>\n\\end\n"
},
"$:/core/macros/tabs": {
"title": "$:/core/macros/tabs",
"tags": "$:/tags/Macro",
"text": "\\define tabs(tabsList,default,state:\"$:/state/tab\",class,template)\n<div class=\"tc-tab-set $class$\">\n<div class=\"tc-tab-buttons $class$\">\n<$list filter=\"$tabsList$\" variable=\"currentTab\"><$set name=\"save-currentTiddler\" value=<<currentTiddler>>><$tiddler tiddler=<<currentTab>>><$button set=<<qualify \"$state$\">> setTo=<<currentTab>> default=\"$default$\" selectedClass=\"tc-tab-selected\" tooltip={{!!tooltip}}>\n<$tiddler tiddler=<<save-currentTiddler>>>\n<$set name=\"tv-wikilinks\" value=\"no\">\n<$transclude tiddler=<<currentTab>> field=\"caption\">\n<$macrocall $name=\"currentTab\" $type=\"text/plain\" $output=\"text/plain\"/>\n</$transclude>\n</$set></$tiddler></$button></$tiddler></$set></$list>\n</div>\n<div class=\"tc-tab-divider $class$\"/>\n<div class=\"tc-tab-content $class$\">\n<$list filter=\"$tabsList$\" variable=\"currentTab\">\n\n<$reveal type=\"match\" state=<<qualify \"$state$\">> text=<<currentTab>> default=\"$default$\">\n\n<$transclude tiddler=\"$template$\" mode=\"block\">\n\n<$transclude tiddler=<<currentTab>> mode=\"block\"/>\n\n</$transclude>\n\n</$reveal>\n\n</$list>\n</div>\n</div>\n\\end\n"
},
"$:/core/macros/tag": {
"title": "$:/core/macros/tag",
"tags": "$:/tags/Macro",
"text": "\\define tag(tag)\n{{$tag$||$:/core/ui/TagTemplate}}\n\\end\n"
},
"$:/core/macros/thumbnails": {
"title": "$:/core/macros/thumbnails",
"tags": "$:/tags/Macro",
"text": "\\define thumbnail(link,icon,color,background-color,image,caption,width:\"280\",height:\"157\")\n<$link to=\"\"\"$link$\"\"\"><div class=\"tc-thumbnail-wrapper\">\n<div class=\"tc-thumbnail-image\" style=\"width:$width$px;height:$height$px;\"><$reveal type=\"nomatch\" text=\"\" default=\"\"\"$image$\"\"\" tag=\"div\" style=\"width:$width$px;height:$height$px;\">\n[img[$image$]]\n</$reveal><$reveal type=\"match\" text=\"\" default=\"\"\"$image$\"\"\" tag=\"div\" class=\"tc-thumbnail-background\" style=\"width:$width$px;height:$height$px;background-color:$background-color$;\"></$reveal></div><div class=\"tc-thumbnail-icon\" style=\"fill:$color$;color:$color$;\">\n$icon$\n</div><div class=\"tc-thumbnail-caption\">\n$caption$\n</div>\n</div></$link>\n\\end\n\n\\define thumbnail-right(link,icon,color,background-color,image,caption,width:\"280\",height:\"157\")\n<div class=\"tc-thumbnail-right-wrapper\"><<thumbnail \"\"\"$link$\"\"\" \"\"\"$icon$\"\"\" \"\"\"$color$\"\"\" \"\"\"$background-color$\"\"\" \"\"\"$image$\"\"\" \"\"\"$caption$\"\"\" \"\"\"$width$\"\"\" \"\"\"$height$\"\"\">></div>\n\\end\n\n\\define list-thumbnails(filter,width:\"280\",height:\"157\")\n<$list filter=\"\"\"$filter$\"\"\"><$macrocall $name=\"thumbnail\" link={{!!link}} icon={{!!icon}} color={{!!color}} background-color={{!!background-color}} image={{!!image}} caption={{!!caption}} width=\"\"\"$width$\"\"\" height=\"\"\"$height$\"\"\"/></$list>\n\\end\n"
},
"$:/core/macros/timeline": {
"created": "20141212105914482",
"modified": "20141212110330815",
"tags": "$:/tags/Macro",
"title": "$:/core/macros/timeline",
"type": "text/vnd.tiddlywiki",
"text": "\\define timeline-title()\n<!-- Override this macro with a global macro \n of the same name if you need to change \n how titles are displayed on the timeline \n -->\n<$view field=\"title\"/>\n\\end\n\\define timeline(limit:\"100\",format:\"DDth MMM YYYY\",subfilter:\"\",dateField:\"modified\")\n<div class=\"tc-timeline\">\n<$list filter=\"[!is[system]$subfilter$has[$dateField$]!sort[$dateField$]limit[$limit$]eachday[$dateField$]]\">\n<div class=\"tc-menu-list-item\">\n<$view field=\"$dateField$\" format=\"date\" template=\"$format$\"/>\n<$list filter=\"[sameday:$dateField${!!$dateField$}!is[system]$subfilter$!sort[$dateField$]]\">\n<div class=\"tc-menu-list-subitem\">\n<$link to={{!!title}}>\n<<timeline-title>>\n</$link>\n</div>\n</$list>\n</div>\n</$list>\n</div>\n\\end\n"
},
"$:/core/macros/toc": {
"title": "$:/core/macros/toc",
"tags": "$:/tags/Macro",
"text": "\\define toc-caption()\n<$set name=\"tv-wikilinks\" value=\"no\">\n<$transclude field=\"caption\">\n<$view field=\"title\"/>\n</$transclude>\n</$set>\n\\end\n\n\\define toc-body(rootTag,tag,sort:\"\",itemClassFilter)\n<ol class=\"tc-toc\">\n<$list filter=\"\"\"[all[shadows+tiddlers]tag[$tag$]!has[draft.of]$sort$]\"\"\">\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$list filter=\"[all[current]toc-link[no]]\" emptyMessage=\"<$link><$view field='caption'><$view field='title'/></$view></$link>\">\n<<toc-caption>>\n</$list>\n<$list filter=\"\"\"[all[current]] -[[$rootTag$]]\"\"\">\n<$macrocall $name=\"toc-body\" rootTag=\"\"\"$rootTag$\"\"\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$list>\n</li>\n</$set>\n</$list>\n</ol>\n\\end\n\n\\define toc(tag,sort:\"\",itemClassFilter)\n<<toc-body rootTag:\"\"\"$tag$\"\"\" tag:\"\"\"$tag$\"\"\" sort:\"\"\"$sort$\"\"\" itemClassFilter:\"\"\"itemClassFilter\"\"\">>\n\\end\n\n\\define toc-linked-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$link>\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n<<toc-caption>>\n</$link>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"toc-expandable\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-unlinked-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"toc-expandable\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-expandable-empty-message()\n<<toc-linked-expandable-body tag:\"\"\"$(tag)$\"\"\" sort:\"\"\"$(sort)$\"\"\" itemClassFilter:\"\"\"$(itemClassFilter)$\"\"\">>\n\\end\n\n\\define toc-expandable(tag,sort:\"\",itemClassFilter)\n<$vars tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\">\n<ol class=\"tc-toc toc-expandable\">\n<$list filter=\"[all[shadows+tiddlers]tag[$tag$]!has[draft.of]$sort$]\">\n<$list filter=\"[all[current]toc-link[no]]\" emptyMessage=<<toc-expandable-empty-message>>>\n<<toc-unlinked-expandable-body tag:\"\"\"$tag$\"\"\" sort:\"\"\"$sort$\"\"\" itemClassFilter:\"\"\"itemClassFilter\"\"\">>\n</$list>\n</$list>\n</ol>\n</$vars>\n\\end\n\n\\define toc-linked-selective-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$link>\n<$list filter=\"[all[current]tagging[]limit[1]]\" variable=\"ignore\" emptyMessage=\"<$button class='tc-btn-invisible'>{{$:/core/images/blank}}</$button>\">\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n</$button>\n</$reveal>\n</$list>\n<<toc-caption>>\n</$link>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"toc-selective-expandable\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-unlinked-selective-expandable-body(tag,sort:\"\",itemClassFilter)\n<$set name=\"toc-state\" value=<<qualify \"\"\"$:/state/toc/$tag$-$(currentTiddler)$\"\"\">>>\n<$set name=\"toc-item-class\" filter=\"\"\"$itemClassFilter$\"\"\" value=\"toc-item-selected\" emptyValue=\"toc-item\">\n<li class=<<toc-item-class>>>\n<$list filter=\"[all[current]tagging[]limit[1]]\" variable=\"ignore\" emptyMessage=\"<$button class='tc-btn-invisible'>{{$:/core/images/blank}}</$button> <$view field='caption'><$view field='title'/></$view>\">\n<$reveal type=\"nomatch\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"open\" class=\"tc-btn-invisible\">\n{{$:/core/images/right-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$button set=<<toc-state>> setTo=\"close\" class=\"tc-btn-invisible\">\n{{$:/core/images/down-arrow}}\n<<toc-caption>>\n</$button>\n</$reveal>\n</$list>\n<$reveal type=\"match\" state=<<toc-state>> text=\"open\">\n<$macrocall $name=\"\"\"toc-selective-expandable\"\"\" tag=<<currentTiddler>> sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\"/>\n</$reveal>\n</li>\n</$set>\n</$set>\n\\end\n\n\\define toc-selective-expandable-empty-message()\n<<toc-linked-selective-expandable-body tag:\"\"\"$(tag)$\"\"\" sort:\"\"\"$(sort)$\"\"\" itemClassFilter:\"\"\"$(itemClassFilter)$\"\"\">>\n\\end\n\n\\define toc-selective-expandable(tag,sort:\"\",itemClassFilter)\n<$vars tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" itemClassFilter=\"\"\"$itemClassFilter$\"\"\">\n<ol class=\"tc-toc toc-selective-expandable\">\n<$list filter=\"[all[shadows+tiddlers]tag[$tag$]!has[draft.of]$sort$]\">\n<$list filter=\"[all[current]toc-link[no]]\" variable=\"ignore\" emptyMessage=<<toc-selective-expandable-empty-message>>>\n<<toc-unlinked-selective-expandable-body tag:\"\"\"$tag$\"\"\" sort:\"\"\"$sort$\"\"\" itemClassFilter:\"\"\"$itemClassFilter$\"\"\">>\n</$list>\n</$list>\n</ol>\n</$vars>\n\\end\n\n\\define toc-tabbed-selected-item-filter(selectedTiddler)\n[all[current]field:title{$selectedTiddler$}]\n\\end\n\n\\define toc-tabbed-external-nav(tag,sort:\"\",selectedTiddler:\"$:/temp/toc/selectedTiddler\",unselectedText,missingText,template:\"\")\n<$tiddler tiddler={{$selectedTiddler$}}>\n<div class=\"tc-tabbed-table-of-contents\">\n<$linkcatcher to=\"$selectedTiddler$\">\n<div class=\"tc-table-of-contents\">\n<$macrocall $name=\"toc-selective-expandable\" tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" itemClassFilter=<<toc-tabbed-selected-item-filter selectedTiddler:\"\"\"$selectedTiddler$\"\"\">>/>\n</div>\n</$linkcatcher>\n<div class=\"tc-tabbed-table-of-contents-content\">\n<$reveal state=\"\"\"$selectedTiddler$\"\"\" type=\"nomatch\" text=\"\">\n<$transclude mode=\"block\" tiddler=\"$template$\">\n<h1><<toc-caption>></h1>\n<$transclude mode=\"block\">$missingText$</$transclude>\n</$transclude>\n</$reveal>\n<$reveal state=\"\"\"$selectedTiddler$\"\"\" type=\"match\" text=\"\">\n$unselectedText$\n</$reveal>\n</div>\n</div>\n</$tiddler>\n\\end\n\n\\define toc-tabbed-internal-nav(tag,sort:\"\",selectedTiddler:\"$:/temp/toc/selectedTiddler\",unselectedText,missingText,template:\"\")\n<$linkcatcher to=\"\"\"$selectedTiddler$\"\"\">\n<$macrocall $name=\"toc-tabbed-external-nav\" tag=\"\"\"$tag$\"\"\" sort=\"\"\"$sort$\"\"\" selectedTiddler=\"\"\"$selectedTiddler$\"\"\" unselectedText=\"\"\"$unselectedText$\"\"\" missingText=\"\"\"$missingText$\"\"\" template=\"\"\"$template$\"\"\"/>\n</$linkcatcher>\n\\end\n\n"
},
"$:/core/macros/translink": {
"title": "$:/core/macros/translink",
"tags": "$:/tags/Macro",
"text": "\\define translink(title,mode:\"block\")\n<div style=\"border:1px solid #ccc; padding: 0.5em; background: black; foreground; white;\">\n<$link to=\"\"\"$title$\"\"\">\n<$text text=\"\"\"$title$\"\"\"/>\n</$link>\n<div style=\"border:1px solid #ccc; padding: 0.5em; background: white; foreground; black;\">\n<$transclude tiddler=\"\"\"$title$\"\"\" mode=\"$mode$\">\n\"<$text text=\"\"\"$title$\"\"\"/>\" is missing\n</$transclude>\n</div>\n</div>\n\\end\n"
},
"$:/snippets/minilanguageswitcher": {
"title": "$:/snippets/minilanguageswitcher",
"text": "<$select tiddler=\"$:/language\">\n<$list filter=\"[[$:/languages/en-GB]] [plugin-type[language]sort[title]]\">\n<option value=<<currentTiddler>>><$view field=\"description\"><$view field=\"name\"><$view field=\"title\"/></$view></$view></option>\n</$list>\n</$select>"
},
"$:/snippets/minithemeswitcher": {
"title": "$:/snippets/minithemeswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/Theme/\n<<lingo Prompt>> <$select tiddler=\"$:/theme\">\n<$list filter=\"[plugin-type[theme]sort[title]]\">\n<option value=<<currentTiddler>>><$view field=\"name\"><$view field=\"title\"/></$view></option>\n</$list>\n</$select>"
},
"$:/snippets/modules": {
"title": "$:/snippets/modules",
"text": "\\define describeModuleType(type)\n{{$:/language/Docs/ModuleTypes/$type$}}\n\\end\n<$list filter=\"[moduletypes[]]\">\n\n!! <$macrocall $name=\"currentTiddler\" $type=\"text/plain\" $output=\"text/plain\"/>\n\n<$macrocall $name=\"describeModuleType\" type=<<currentTiddler>>/>\n\n<ul><$list filter=\"[all[current]modules[]]\"><li><$link><<currentTiddler>></$link>\n</li>\n</$list>\n</ul>\n</$list>\n"
},
"$:/palette": {
"title": "$:/palette",
"text": "$:/palettes/Vanilla"
},
"$:/snippets/paletteeditor": {
"title": "$:/snippets/paletteeditor",
"text": "\\define lingo-base() $:/language/ControlPanel/Palette/Editor/\n\\define describePaletteColour(colour)\n<$transclude tiddler=\"$:/language/Docs/PaletteColours/$colour$\"><$text text=\"$colour$\"/></$transclude>\n\\end\n<$set name=\"currentTiddler\" value={{$:/palette}}>\n\n<<lingo Prompt>> <$link to={{$:/palette}}><$macrocall $name=\"currentTiddler\" $output=\"text/plain\"/></$link>\n\n<$list filter=\"[all[current]is[shadow]is[tiddler]]\" variable=\"listItem\">\n<<lingo Prompt/Modified>>\n<$button message=\"tm-delete-tiddler\" param={{$:/palette}}><<lingo Reset/Caption>></$button>\n</$list>\n\n<$list filter=\"[all[current]is[shadow]!is[tiddler]]\" variable=\"listItem\">\n<<lingo Clone/Prompt>>\n</$list>\n\n<$button message=\"tm-new-tiddler\" param={{$:/palette}}><<lingo Clone/Caption>></$button>\n\n<table>\n<tbody>\n<$list filter=\"[all[current]indexes[]]\" variable=\"colourName\">\n<tr>\n<td>\n''<$macrocall $name=\"describePaletteColour\" colour=<<colourName>>/>''<br/>\n<$macrocall $name=\"colourName\" $output=\"text/plain\"/>\n</td>\n<td>\n<$edit-text index=<<colourName>> tag=\"input\"/>\n<br>\n<$edit-text index=<<colourName>> type=\"color\" tag=\"input\"/>\n</td>\n</tr>\n</$list>\n</tbody>\n</table>\n</$set>\n"
},
"$:/snippets/palettepreview": {
"title": "$:/snippets/palettepreview",
"text": "<$set name=\"currentTiddler\" value={{$:/palette}}>\n<$transclude tiddler=\"$:/snippets/currpalettepreview\"/>\n</$set>\n"
},
"$:/snippets/paletteswitcher": {
"title": "$:/snippets/paletteswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/Palette/\n<div class=\"tc-prompt\">\n<<lingo Prompt>> <$view tiddler={{$:/palette}} field=\"name\"/>\n</div>\n\n<$linkcatcher to=\"$:/palette\">\n<div class=\"tc-chooser\"><$list filter=\"[all[shadows+tiddlers]tag[$:/tags/Palette]sort[description]]\"><div class=\"tc-chooser-item\"><$link to={{!!title}}><div><$reveal state=\"$:/palette\" type=\"match\" text={{!!title}}>•</$reveal><$reveal state=\"$:/palette\" type=\"nomatch\" text={{!!title}}> </$reveal> ''<$view field=\"name\" format=\"text\"/>'' - <$view field=\"description\" format=\"text\"/></div><$transclude tiddler=\"$:/snippets/currpalettepreview\"/></$link></div>\n</$list>\n</div>\n</$linkcatcher>"
},
"$:/temp/search": {
"title": "$:/temp/search",
"text": ""
},
"$:/tags/AdvancedSearch": {
"title": "$:/tags/AdvancedSearch",
"list": "[[$:/core/ui/AdvancedSearch/Standard]] [[$:/core/ui/AdvancedSearch/System]] [[$:/core/ui/AdvancedSearch/Shadows]] [[$:/core/ui/AdvancedSearch/Filter]]"
},
"$:/tags/AdvancedSearch/FilterButton": {
"title": "$:/tags/AdvancedSearch/FilterButton",
"list": "$:/core/ui/AdvancedSearch/Filter/FilterButtons/dropdown $:/core/ui/AdvancedSearch/Filter/FilterButtons/clear $:/core/ui/AdvancedSearch/Filter/FilterButtons/export $:/core/ui/AdvancedSearch/Filter/FilterButtons/delete"
},
"$:/tags/ControlPanel": {
"title": "$:/tags/ControlPanel",
"list": "$:/core/ui/ControlPanel/Info $:/core/ui/ControlPanel/Appearance $:/core/ui/ControlPanel/Settings $:/core/ui/ControlPanel/Saving $:/core/ui/ControlPanel/Plugins $:/core/ui/ControlPanel/Tools $:/core/ui/ControlPanel/Internals"
},
"$:/tags/ControlPanel/Info": {
"title": "$:/tags/ControlPanel/Info",
"list": "$:/core/ui/ControlPanel/Basics $:/core/ui/ControlPanel/Advanced"
},
"$:/tags/ControlPanel/Plugins": {
"title": "$:/tags/ControlPanel/Plugins",
"list": "[[$:/core/ui/ControlPanel/Plugins/Installed]] [[$:/core/ui/ControlPanel/Plugins/Add]]"
},
"$:/tags/EditorToolbar": {
"title": "$:/tags/EditorToolbar",
"list": "$:/core/ui/EditorToolbar/paint $:/core/ui/EditorToolbar/opacity $:/core/ui/EditorToolbar/line-width $:/core/ui/EditorToolbar/clear $:/core/ui/EditorToolbar/bold $:/core/ui/EditorToolbar/italic $:/core/ui/EditorToolbar/strikethrough $:/core/ui/EditorToolbar/underline $:/core/ui/EditorToolbar/superscript $:/core/ui/EditorToolbar/subscript $:/core/ui/EditorToolbar/mono-line $:/core/ui/EditorToolbar/mono-block $:/core/ui/EditorToolbar/quote $:/core/ui/EditorToolbar/list-bullet $:/core/ui/EditorToolbar/list-number $:/core/ui/EditorToolbar/heading-1 $:/core/ui/EditorToolbar/heading-2 $:/core/ui/EditorToolbar/heading-3 $:/core/ui/EditorToolbar/heading-4 $:/core/ui/EditorToolbar/heading-5 $:/core/ui/EditorToolbar/heading-6 $:/core/ui/EditorToolbar/link $:/core/ui/EditorToolbar/excise $:/core/ui/EditorToolbar/picture $:/core/ui/EditorToolbar/stamp $:/core/ui/EditorToolbar/size $:/core/ui/EditorToolbar/editor-height $:/core/ui/EditorToolbar/more $:/core/ui/EditorToolbar/preview $:/core/ui/EditorToolbar/preview-type"
},
"$:/tags/EditTemplate": {
"title": "$:/tags/EditTemplate",
"list": "[[$:/core/ui/EditTemplate/controls]] [[$:/core/ui/EditTemplate/title]] [[$:/core/ui/EditTemplate/tags]] [[$:/core/ui/EditTemplate/shadow]] [[$:/core/ui/ViewTemplate/classic]] [[$:/core/ui/EditTemplate/body]] [[$:/core/ui/EditTemplate/type]] [[$:/core/ui/EditTemplate/fields]]"
},
"$:/tags/EditToolbar": {
"title": "$:/tags/EditToolbar",
"list": "[[$:/core/ui/Buttons/delete]] [[$:/core/ui/Buttons/cancel]] [[$:/core/ui/Buttons/save]]"
},
"$:/tags/MoreSideBar": {
"title": "$:/tags/MoreSideBar",
"list": "[[$:/core/ui/MoreSideBar/All]] [[$:/core/ui/MoreSideBar/Recent]] [[$:/core/ui/MoreSideBar/Tags]] [[$:/core/ui/MoreSideBar/Missing]] [[$:/core/ui/MoreSideBar/Drafts]] [[$:/core/ui/MoreSideBar/Orphans]] [[$:/core/ui/MoreSideBar/Types]] [[$:/core/ui/MoreSideBar/System]] [[$:/core/ui/MoreSideBar/Shadows]]",
"text": ""
},
"$:/tags/PageControls": {
"title": "$:/tags/PageControls",
"list": "[[$:/core/ui/Buttons/home]] [[$:/core/ui/Buttons/close-all]] [[$:/core/ui/Buttons/fold-all]] [[$:/core/ui/Buttons/unfold-all]] [[$:/core/ui/Buttons/permaview]] [[$:/core/ui/Buttons/new-tiddler]] [[$:/core/ui/Buttons/new-journal]] [[$:/core/ui/Buttons/new-image]] [[$:/core/ui/Buttons/import]] [[$:/core/ui/Buttons/export-page]] [[$:/core/ui/Buttons/control-panel]] [[$:/core/ui/Buttons/advanced-search]] [[$:/core/ui/Buttons/tag-manager]] [[$:/core/ui/Buttons/language]] [[$:/core/ui/Buttons/palette]] [[$:/core/ui/Buttons/theme]] [[$:/core/ui/Buttons/storyview]] [[$:/core/ui/Buttons/encryption]] [[$:/core/ui/Buttons/full-screen]] [[$:/core/ui/Buttons/save-wiki]] [[$:/core/ui/Buttons/refresh]] [[$:/core/ui/Buttons/more-page-actions]]"
},
"$:/tags/PageTemplate": {
"title": "$:/tags/PageTemplate",
"list": "[[$:/core/ui/PageTemplate/topleftbar]] [[$:/core/ui/PageTemplate/toprightbar]] [[$:/core/ui/PageTemplate/sidebar]] [[$:/core/ui/PageTemplate/story]] [[$:/core/ui/PageTemplate/alerts]]",
"text": ""
},
"$:/tags/SideBar": {
"title": "$:/tags/SideBar",
"list": "[[$:/core/ui/SideBar/Open]] [[$:/core/ui/SideBar/Recent]] [[$:/core/ui/SideBar/Tools]] [[$:/core/ui/SideBar/More]]",
"text": ""
},
"$:/tags/TiddlerInfo": {
"title": "$:/tags/TiddlerInfo",
"list": "[[$:/core/ui/TiddlerInfo/Tools]] [[$:/core/ui/TiddlerInfo/References]] [[$:/core/ui/TiddlerInfo/Tagging]] [[$:/core/ui/TiddlerInfo/List]] [[$:/core/ui/TiddlerInfo/Listed]] [[$:/core/ui/TiddlerInfo/Fields]]",
"text": ""
},
"$:/tags/TiddlerInfo/Advanced": {
"title": "$:/tags/TiddlerInfo/Advanced",
"list": "[[$:/core/ui/TiddlerInfo/Advanced/ShadowInfo]] [[$:/core/ui/TiddlerInfo/Advanced/PluginInfo]]"
},
"$:/tags/ViewTemplate": {
"title": "$:/tags/ViewTemplate",
"list": "[[$:/core/ui/ViewTemplate/title]] [[$:/core/ui/ViewTemplate/unfold]] [[$:/core/ui/ViewTemplate/subtitle]] [[$:/core/ui/ViewTemplate/tags]] [[$:/core/ui/ViewTemplate/classic]] [[$:/core/ui/ViewTemplate/body]]"
},
"$:/tags/ViewToolbar": {
"title": "$:/tags/ViewToolbar",
"list": "[[$:/core/ui/Buttons/more-tiddler-actions]] [[$:/core/ui/Buttons/info]] [[$:/core/ui/Buttons/new-here]] [[$:/core/ui/Buttons/new-journal-here]] [[$:/core/ui/Buttons/clone]] [[$:/core/ui/Buttons/export-tiddler]] [[$:/core/ui/Buttons/edit]] [[$:/core/ui/Buttons/delete]] [[$:/core/ui/Buttons/permalink]] [[$:/core/ui/Buttons/permaview]] [[$:/core/ui/Buttons/open-window]] [[$:/core/ui/Buttons/close-others]] [[$:/core/ui/Buttons/close]] [[$:/core/ui/Buttons/fold-others]] [[$:/core/ui/Buttons/fold]]"
},
"$:/snippets/themeswitcher": {
"title": "$:/snippets/themeswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/Theme/\n<<lingo Prompt>> <$view tiddler={{$:/theme}} field=\"name\"/>\n\n<$linkcatcher to=\"$:/theme\">\n<$list filter=\"[plugin-type[theme]sort[title]]\"><div><$reveal state=\"$:/theme\" type=\"match\" text={{!!title}}>•</$reveal><$reveal state=\"$:/theme\" type=\"nomatch\" text={{!!title}}> </$reveal> <$link to={{!!title}}>''<$view field=\"name\" format=\"text\"/>'' <$view field=\"description\" format=\"text\"/></$link></div>\n</$list>\n</$linkcatcher>"
},
"$:/core/wiki/title": {
"title": "$:/core/wiki/title",
"type": "text/vnd.tiddlywiki",
"text": "{{$:/SiteTitle}} --- {{$:/SiteSubtitle}}"
},
"$:/view": {
"title": "$:/view",
"text": "classic"
},
"$:/snippets/viewswitcher": {
"title": "$:/snippets/viewswitcher",
"text": "\\define lingo-base() $:/language/ControlPanel/StoryView/\n<<lingo Prompt>> <$select tiddler=\"$:/view\">\n<$list filter=\"[storyviews[]]\">\n<option><$view field=\"title\"/></option>\n</$list>\n</$select>"
}
}
}
<div class="tc-more-sidebar">
<<tabs "[all[shadows+tiddlers]tag[$:/tags/MoreSideBar]!has[draft.of]]" "$:/core/ui/MoreSideBar/Tags" "$:/state/tab/moresidebar" "tc-vertical">>
</div>
<$macrocall $name="timeline" format={{$:/language/RecentChanges/DateFormat}}/>
\define lingo-base() $:/language/ControlPanel/
\define config-title()
$:/config/PageControlButtons/Visibility/$(listItem)$
\end
<<lingo Basics/Version/Prompt>> <<version>>
<$set name="tv-config-toolbar-icons" value="yes">
<$set name="tv-config-toolbar-text" value="yes">
<$set name="tv-config-toolbar-class" value="">
<$list filter="[all[shadows+tiddlers]tag[$:/tags/PageControls]!has[draft.of]]" variable="listItem">
<div style="position:relative;">
<$checkbox tiddler=<<config-title>> field="text" checked="show" unchecked="hide" default="show"/> <$transclude tiddler=<<listItem>>/> <i class="tc-muted"><$transclude tiddler=<<listItem>> field="description"/></i>
</div>
</$list>
</$set>
</$set>
</$set>
\define title-styles()
fill:$(foregroundColor)$;
\end
\define config-title()
$:/config/ViewToolbarButtons/Visibility/$(listItem)$
\end
<div class="tc-tiddler-title">
<div class="tc-titlebar">
<span class="tc-tiddler-controls">
<$list filter="[all[shadows+tiddlers]tag[$:/tags/ViewToolbar]!has[draft.of]]" variable="listItem"><$reveal type="nomatch" state=<<config-title>> text="hide"><$transclude tiddler=<<listItem>>/></$reveal></$list>
</span>
<$set name="tv-wikilinks" value={{$:/config/Tiddlers/TitleLinks}}>
<$link>
<$set name="foregroundColor" value={{!!color}}>
<span class="tc-tiddler-title-icon" style=<<title-styles>>>
<$transclude tiddler={{!!icon}}/>
</span>
</$set>
<$list filter="[all[current]removeprefix[$:/]]">
<h2 class="tc-title" title={{$:/language/SystemTiddler/Tooltip}}>
<span class="tc-system-title-prefix">$:/</span><$text text=<<currentTiddler>>/>
</h2>
</$list>
<$list filter="[all[current]!prefix[$:/]]">
<h2 class="tc-title">
<$transclude field="caption"><$view field="title"/></$transclude>
</h2>
</$list>
</$link>
</$set>
</div>
<$reveal type="nomatch" text="" default="" state=<<tiddlerInfoState>> class="tc-tiddler-info tc-popup-handle" animate="yes" retain="yes">
<$transclude tiddler="$:/core/ui/TiddlerInfo"/>
</$reveal>
</div>
{
"tiddlers": {
"$:/plugins/tiddlywiki/highlight/highlight.js": {
"type": "application/javascript",
"title": "$:/plugins/tiddlywiki/highlight/highlight.js",
"module-type": "library",
"text": "var hljs = require(\"$:/plugins/tiddlywiki/highlight/highlight.js\");\n!function(e){\"undefined\"!=typeof exports?e(exports):(window.hljs=e({}),\"function\"==typeof define&&define.amd&&define(\"hljs\",[],function(){return window.hljs}))}(function(e){function n(e){return e.replace(/&/gm,\"&\").replace(/</gm,\"<\").replace(/>/gm,\">\")}function t(e){return e.nodeName.toLowerCase()}function r(e,n){var t=e&&e.exec(n);return t&&0==t.index}function a(e){return/^(no-?highlight|plain|text)$/i.test(e)}function i(e){var n,t,r,i=e.className+\" \";if(i+=e.parentNode?e.parentNode.className:\"\",t=/\\blang(?:uage)?-([\\w-]+)\\b/i.exec(i))return w(t[1])?t[1]:\"no-highlight\";for(i=i.split(/\\s+/),n=0,r=i.length;r>n;n++)if(w(i[n])||a(i[n]))return i[n]}function o(e,n){var t,r={};for(t in e)r[t]=e[t];if(n)for(t in n)r[t]=n[t];return r}function u(e){var n=[];return function r(e,a){for(var i=e.firstChild;i;i=i.nextSibling)3==i.nodeType?a+=i.nodeValue.length:1==i.nodeType&&(n.push({event:\"start\",offset:a,node:i}),a=r(i,a),t(i).match(/br|hr|img|input/)||n.push({event:\"stop\",offset:a,node:i}));return a}(e,0),n}function c(e,r,a){function i(){return e.length&&r.length?e[0].offset!=r[0].offset?e[0].offset<r[0].offset?e:r:\"start\"==r[0].event?e:r:e.length?e:r}function o(e){function r(e){return\" \"+e.nodeName+'=\"'+n(e.value)+'\"'}f+=\"<\"+t(e)+Array.prototype.map.call(e.attributes,r).join(\"\")+\">\"}function u(e){f+=\"</\"+t(e)+\">\"}function c(e){(\"start\"==e.event?o:u)(e.node)}for(var s=0,f=\"\",l=[];e.length||r.length;){var g=i();if(f+=n(a.substr(s,g[0].offset-s)),s=g[0].offset,g==e){l.reverse().forEach(u);do c(g.splice(0,1)[0]),g=i();while(g==e&&g.length&&g[0].offset==s);l.reverse().forEach(o)}else\"start\"==g[0].event?l.push(g[0].node):l.pop(),c(g.splice(0,1)[0])}return f+n(a.substr(s))}function s(e){function n(e){return e&&e.source||e}function t(t,r){return new RegExp(n(t),\"m\"+(e.cI?\"i\":\"\")+(r?\"g\":\"\"))}function r(a,i){if(!a.compiled){if(a.compiled=!0,a.k=a.k||a.bK,a.k){var u={},c=function(n,t){e.cI&&(t=t.toLowerCase()),t.split(\" \").forEach(function(e){var t=e.split(\"|\");u[t[0]]=[n,t[1]?Number(t[1]):1]})};\"string\"==typeof a.k?c(\"keyword\",a.k):Object.keys(a.k).forEach(function(e){c(e,a.k[e])}),a.k=u}a.lR=t(a.l||/\\b\\w+\\b/,!0),i&&(a.bK&&(a.b=\"\\\\b(\"+a.bK.split(\" \").join(\"|\")+\")\\\\b\"),a.b||(a.b=/\\B|\\b/),a.bR=t(a.b),a.e||a.eW||(a.e=/\\B|\\b/),a.e&&(a.eR=t(a.e)),a.tE=n(a.e)||\"\",a.eW&&i.tE&&(a.tE+=(a.e?\"|\":\"\")+i.tE)),a.i&&(a.iR=t(a.i)),void 0===a.r&&(a.r=1),a.c||(a.c=[]);var s=[];a.c.forEach(function(e){e.v?e.v.forEach(function(n){s.push(o(e,n))}):s.push(\"self\"==e?a:e)}),a.c=s,a.c.forEach(function(e){r(e,a)}),a.starts&&r(a.starts,i);var f=a.c.map(function(e){return e.bK?\"\\\\.?(\"+e.b+\")\\\\.?\":e.b}).concat([a.tE,a.i]).map(n).filter(Boolean);a.t=f.length?t(f.join(\"|\"),!0):{exec:function(){return null}}}}r(e)}function f(e,t,a,i){function o(e,n){for(var t=0;t<n.c.length;t++)if(r(n.c[t].bR,e))return n.c[t]}function u(e,n){if(r(e.eR,n)){for(;e.endsParent&&e.parent;)e=e.parent;return e}return e.eW?u(e.parent,n):void 0}function c(e,n){return!a&&r(n.iR,e)}function g(e,n){var t=N.cI?n[0].toLowerCase():n[0];return e.k.hasOwnProperty(t)&&e.k[t]}function h(e,n,t,r){var a=r?\"\":E.classPrefix,i='<span class=\"'+a,o=t?\"\":\"</span>\";return i+=e+'\">',i+n+o}function p(){if(!L.k)return n(y);var e=\"\",t=0;L.lR.lastIndex=0;for(var r=L.lR.exec(y);r;){e+=n(y.substr(t,r.index-t));var a=g(L,r);a?(B+=a[1],e+=h(a[0],n(r[0]))):e+=n(r[0]),t=L.lR.lastIndex,r=L.lR.exec(y)}return e+n(y.substr(t))}function d(){var e=\"string\"==typeof L.sL;if(e&&!x[L.sL])return n(y);var t=e?f(L.sL,y,!0,M[L.sL]):l(y,L.sL.length?L.sL:void 0);return L.r>0&&(B+=t.r),e&&(M[L.sL]=t.top),h(t.language,t.value,!1,!0)}function b(){return void 0!==L.sL?d():p()}function v(e,t){var r=e.cN?h(e.cN,\"\",!0):\"\";e.rB?(k+=r,y=\"\"):e.eB?(k+=n(t)+r,y=\"\"):(k+=r,y=t),L=Object.create(e,{parent:{value:L}})}function m(e,t){if(y+=e,void 0===t)return k+=b(),0;var r=o(t,L);if(r)return k+=b(),v(r,t),r.rB?0:t.length;var a=u(L,t);if(a){var i=L;i.rE||i.eE||(y+=t),k+=b();do L.cN&&(k+=\"</span>\"),B+=L.r,L=L.parent;while(L!=a.parent);return i.eE&&(k+=n(t)),y=\"\",a.starts&&v(a.starts,\"\"),i.rE?0:t.length}if(c(t,L))throw new Error('Illegal lexeme \"'+t+'\" for mode \"'+(L.cN||\"<unnamed>\")+'\"');return y+=t,t.length||1}var N=w(e);if(!N)throw new Error('Unknown language: \"'+e+'\"');s(N);var R,L=i||N,M={},k=\"\";for(R=L;R!=N;R=R.parent)R.cN&&(k=h(R.cN,\"\",!0)+k);var y=\"\",B=0;try{for(var C,j,I=0;;){if(L.t.lastIndex=I,C=L.t.exec(t),!C)break;j=m(t.substr(I,C.index-I),C[0]),I=C.index+j}for(m(t.substr(I)),R=L;R.parent;R=R.parent)R.cN&&(k+=\"</span>\");return{r:B,value:k,language:e,top:L}}catch(O){if(-1!=O.message.indexOf(\"Illegal\"))return{r:0,value:n(t)};throw O}}function l(e,t){t=t||E.languages||Object.keys(x);var r={r:0,value:n(e)},a=r;return t.forEach(function(n){if(w(n)){var t=f(n,e,!1);t.language=n,t.r>a.r&&(a=t),t.r>r.r&&(a=r,r=t)}}),a.language&&(r.second_best=a),r}function g(e){return E.tabReplace&&(e=e.replace(/^((<[^>]+>|\\t)+)/gm,function(e,n){return n.replace(/\\t/g,E.tabReplace)})),E.useBR&&(e=e.replace(/\\n/g,\"<br>\")),e}function h(e,n,t){var r=n?R[n]:t,a=[e.trim()];return e.match(/\\bhljs\\b/)||a.push(\"hljs\"),-1===e.indexOf(r)&&a.push(r),a.join(\" \").trim()}function p(e){var n=i(e);if(!a(n)){var t;E.useBR?(t=document.createElementNS(\"http://www.w3.org/1999/xhtml\",\"div\"),t.innerHTML=e.innerHTML.replace(/\\n/g,\"\").replace(/<br[ \\/]*>/g,\"\\n\")):t=e;var r=t.textContent,o=n?f(n,r,!0):l(r),s=u(t);if(s.length){var p=document.createElementNS(\"http://www.w3.org/1999/xhtml\",\"div\");p.innerHTML=o.value,o.value=c(s,u(p),r)}o.value=g(o.value),e.innerHTML=o.value,e.className=h(e.className,n,o.language),e.result={language:o.language,re:o.r},o.second_best&&(e.second_best={language:o.second_best.language,re:o.second_best.r})}}function d(e){E=o(E,e)}function b(){if(!b.called){b.called=!0;var e=document.querySelectorAll(\"pre code\");Array.prototype.forEach.call(e,p)}}function v(){addEventListener(\"DOMContentLoaded\",b,!1),addEventListener(\"load\",b,!1)}function m(n,t){var r=x[n]=t(e);r.aliases&&r.aliases.forEach(function(e){R[e]=n})}function N(){return Object.keys(x)}function w(e){return e=e.toLowerCase(),x[e]||x[R[e]]}var E={classPrefix:\"hljs-\",tabReplace:null,useBR:!1,languages:void 0},x={},R={};return e.highlight=f,e.highlightAuto=l,e.fixMarkup=g,e.highlightBlock=p,e.configure=d,e.initHighlighting=b,e.initHighlightingOnLoad=v,e.registerLanguage=m,e.listLanguages=N,e.getLanguage=w,e.inherit=o,e.IR=\"[a-zA-Z]\\\\w*\",e.UIR=\"[a-zA-Z_]\\\\w*\",e.NR=\"\\\\b\\\\d+(\\\\.\\\\d+)?\",e.CNR=\"(\\\\b0[xX][a-fA-F0-9]+|(\\\\b\\\\d+(\\\\.\\\\d*)?|\\\\.\\\\d+)([eE][-+]?\\\\d+)?)\",e.BNR=\"\\\\b(0b[01]+)\",e.RSR=\"!|!=|!==|%|%=|&|&&|&=|\\\\*|\\\\*=|\\\\+|\\\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\\\?|\\\\[|\\\\{|\\\\(|\\\\^|\\\\^=|\\\\||\\\\|=|\\\\|\\\\||~\",e.BE={b:\"\\\\\\\\[\\\\s\\\\S]\",r:0},e.ASM={cN:\"string\",b:\"'\",e:\"'\",i:\"\\\\n\",c:[e.BE]},e.QSM={cN:\"string\",b:'\"',e:'\"',i:\"\\\\n\",c:[e.BE]},e.PWM={b:/\\b(a|an|the|are|I|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such)\\b/},e.C=function(n,t,r){var a=e.inherit({cN:\"comment\",b:n,e:t,c:[]},r||{});return a.c.push(e.PWM),a.c.push({cN:\"doctag\",b:\"(?:TODO|FIXME|NOTE|BUG|XXX):\",r:0}),a},e.CLCM=e.C(\"//\",\"$\"),e.CBCM=e.C(\"/\\\\*\",\"\\\\*/\"),e.HCM=e.C(\"#\",\"$\"),e.NM={cN:\"number\",b:e.NR,r:0},e.CNM={cN:\"number\",b:e.CNR,r:0},e.BNM={cN:\"number\",b:e.BNR,r:0},e.CSSNM={cN:\"number\",b:e.NR+\"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?\",r:0},e.RM={cN:\"regexp\",b:/\\//,e:/\\/[gimuy]*/,i:/\\n/,c:[e.BE,{b:/\\[/,e:/\\]/,r:0,c:[e.BE]}]},e.TM={cN:\"title\",b:e.IR,r:0},e.UTM={cN:\"title\",b:e.UIR,r:0},e});hljs.registerLanguage(\"markdown\",function(e){return{aliases:[\"md\",\"mkdown\",\"mkd\"],c:[{cN:\"header\",v:[{b:\"^#{1,6}\",e:\"$\"},{b:\"^.+?\\\\n[=-]{2,}$\"}]},{b:\"<\",e:\">\",sL:\"xml\",r:0},{cN:\"bullet\",b:\"^([*+-]|(\\\\d+\\\\.))\\\\s+\"},{cN:\"strong\",b:\"[*_]{2}.+?[*_]{2}\"},{cN:\"emphasis\",v:[{b:\"\\\\*.+?\\\\*\"},{b:\"_.+?_\",r:0}]},{cN:\"blockquote\",b:\"^>\\\\s+\",e:\"$\"},{cN:\"code\",v:[{b:\"`.+?`\"},{b:\"^( {4}|\t)\",e:\"$\",r:0}]},{cN:\"horizontal_rule\",b:\"^[-\\\\*]{3,}\",e:\"$\"},{b:\"\\\\[.+?\\\\][\\\\(\\\\[].*?[\\\\)\\\\]]\",rB:!0,c:[{cN:\"link_label\",b:\"\\\\[\",e:\"\\\\]\",eB:!0,rE:!0,r:0},{cN:\"link_url\",b:\"\\\\]\\\\(\",e:\"\\\\)\",eB:!0,eE:!0},{cN:\"link_reference\",b:\"\\\\]\\\\[\",e:\"\\\\]\",eB:!0,eE:!0}],r:10},{b:\"^\\\\[.+\\\\]:\",rB:!0,c:[{cN:\"link_reference\",b:\"\\\\[\",e:\"\\\\]:\",eB:!0,eE:!0,starts:{cN:\"link_url\",e:\"$\"}}]}]}});hljs.registerLanguage(\"ruby\",function(e){var c=\"[a-zA-Z_]\\\\w*[!?=]?|[-+~]\\\\@|<<|>>|=~|===?|<=>|[<>]=?|\\\\*\\\\*|[-/+%^&*~`|]|\\\\[\\\\]=?\",r=\"and false then defined module in return redo if BEGIN retry end for true self when next until do begin unless END rescue nil else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor\",b={cN:\"doctag\",b:\"@[A-Za-z]+\"},a={cN:\"value\",b:\"#<\",e:\">\"},n=[e.C(\"#\",\"$\",{c:[b]}),e.C(\"^\\\\=begin\",\"^\\\\=end\",{c:[b],r:10}),e.C(\"^__END__\",\"\\\\n$\")],s={cN:\"subst\",b:\"#\\\\{\",e:\"}\",k:r},t={cN:\"string\",c:[e.BE,s],v:[{b:/'/,e:/'/},{b:/\"/,e:/\"/},{b:/`/,e:/`/},{b:\"%[qQwWx]?\\\\(\",e:\"\\\\)\"},{b:\"%[qQwWx]?\\\\[\",e:\"\\\\]\"},{b:\"%[qQwWx]?{\",e:\"}\"},{b:\"%[qQwWx]?<\",e:\">\"},{b:\"%[qQwWx]?/\",e:\"/\"},{b:\"%[qQwWx]?%\",e:\"%\"},{b:\"%[qQwWx]?-\",e:\"-\"},{b:\"%[qQwWx]?\\\\|\",e:\"\\\\|\"},{b:/\\B\\?(\\\\\\d{1,3}|\\\\x[A-Fa-f0-9]{1,2}|\\\\u[A-Fa-f0-9]{4}|\\\\?\\S)\\b/}]},i={cN:\"params\",b:\"\\\\(\",e:\"\\\\)\",k:r},d=[t,a,{cN:\"class\",bK:\"class module\",e:\"$|;\",i:/=/,c:[e.inherit(e.TM,{b:\"[A-Za-z_]\\\\w*(::\\\\w+)*(\\\\?|\\\\!)?\"}),{cN:\"inheritance\",b:\"<\\\\s*\",c:[{cN:\"parent\",b:\"(\"+e.IR+\"::)?\"+e.IR}]}].concat(n)},{cN:\"function\",bK:\"def\",e:\"$|;\",c:[e.inherit(e.TM,{b:c}),i].concat(n)},{cN:\"constant\",b:\"(::)?(\\\\b[A-Z]\\\\w*(::)?)+\",r:0},{cN:\"symbol\",b:e.UIR+\"(\\\\!|\\\\?)?:\",r:0},{cN:\"symbol\",b:\":\",c:[t,{b:c}],r:0},{cN:\"number\",b:\"(\\\\b0[0-7_]+)|(\\\\b0x[0-9a-fA-F_]+)|(\\\\b[1-9][0-9_]*(\\\\.[0-9_]+)?)|[0_]\\\\b\",r:0},{cN:\"variable\",b:\"(\\\\$\\\\W)|((\\\\$|\\\\@\\\\@?)(\\\\w+))\"},{b:\"(\"+e.RSR+\")\\\\s*\",c:[a,{cN:\"regexp\",c:[e.BE,s],i:/\\n/,v:[{b:\"/\",e:\"/[a-z]*\"},{b:\"%r{\",e:\"}[a-z]*\"},{b:\"%r\\\\(\",e:\"\\\\)[a-z]*\"},{b:\"%r!\",e:\"![a-z]*\"},{b:\"%r\\\\[\",e:\"\\\\][a-z]*\"}]}].concat(n),r:0}].concat(n);s.c=d,i.c=d;var o=\"[>?]>\",l=\"[\\\\w#]+\\\\(\\\\w+\\\\):\\\\d+:\\\\d+>\",u=\"(\\\\w+-)?\\\\d+\\\\.\\\\d+\\\\.\\\\d(p\\\\d+)?[^>]+>\",N=[{b:/^\\s*=>/,cN:\"status\",starts:{e:\"$\",c:d}},{cN:\"prompt\",b:\"^(\"+o+\"|\"+l+\"|\"+u+\")\",starts:{e:\"$\",c:d}}];return{aliases:[\"rb\",\"gemspec\",\"podspec\",\"thor\",\"irb\"],k:r,c:n.concat(N).concat(d)}});hljs.registerLanguage(\"makefile\",function(e){var a={cN:\"variable\",b:/\\$\\(/,e:/\\)/,c:[e.BE]};return{aliases:[\"mk\",\"mak\"],c:[e.HCM,{b:/^\\w+\\s*\\W*=/,rB:!0,r:0,starts:{cN:\"constant\",e:/\\s*\\W*=/,eE:!0,starts:{e:/$/,r:0,c:[a]}}},{cN:\"title\",b:/^[\\w]+:\\s*$/},{cN:\"phony\",b:/^\\.PHONY:/,e:/$/,k:\".PHONY\",l:/[\\.\\w]+/},{b:/^\\t+/,e:/$/,r:0,c:[e.QSM,a]}]}});hljs.registerLanguage(\"json\",function(e){var t={literal:\"true false null\"},i=[e.QSM,e.CNM],l={cN:\"value\",e:\",\",eW:!0,eE:!0,c:i,k:t},c={b:\"{\",e:\"}\",c:[{cN:\"attribute\",b:'\\\\s*\"',e:'\"\\\\s*:\\\\s*',eB:!0,eE:!0,c:[e.BE],i:\"\\\\n\",starts:l}],i:\"\\\\S\"},n={b:\"\\\\[\",e:\"\\\\]\",c:[e.inherit(l,{cN:null})],i:\"\\\\S\"};return i.splice(i.length,0,c,n),{c:i,k:t,i:\"\\\\S\"}});hljs.registerLanguage(\"xml\",function(t){var s=\"[A-Za-z0-9\\\\._:-]+\",c={b:/<\\?(php)?(?!\\w)/,e:/\\?>/,sL:\"php\"},e={eW:!0,i:/</,r:0,c:[c,{cN:\"attribute\",b:s,r:0},{b:\"=\",r:0,c:[{cN:\"value\",c:[c],v:[{b:/\"/,e:/\"/},{b:/'/,e:/'/},{b:/[^\\s\\/>]+/}]}]}]};return{aliases:[\"html\",\"xhtml\",\"rss\",\"atom\",\"xsl\",\"plist\"],cI:!0,c:[{cN:\"doctype\",b:\"<!DOCTYPE\",e:\">\",r:10,c:[{b:\"\\\\[\",e:\"\\\\]\"}]},t.C(\"<!--\",\"-->\",{r:10}),{cN:\"cdata\",b:\"<\\\\!\\\\[CDATA\\\\[\",e:\"\\\\]\\\\]>\",r:10},{cN:\"tag\",b:\"<style(?=\\\\s|>|$)\",e:\">\",k:{title:\"style\"},c:[e],starts:{e:\"</style>\",rE:!0,sL:\"css\"}},{cN:\"tag\",b:\"<script(?=\\\\s|>|$)\",e:\">\",k:{title:\"script\"},c:[e],starts:{e:\"</script>\",rE:!0,sL:[\"actionscript\",\"javascript\",\"handlebars\"]}},c,{cN:\"pi\",b:/<\\?\\w+/,e:/\\?>/,r:10},{cN:\"tag\",b:\"</?\",e:\"/?>\",c:[{cN:\"title\",b:/[^ \\/><\\n\\t]+/,r:0},e]}]}});hljs.registerLanguage(\"css\",function(e){var c=\"[a-zA-Z-][a-zA-Z0-9_-]*\",a={cN:\"function\",b:c+\"\\\\(\",rB:!0,eE:!0,e:\"\\\\(\"},r={cN:\"rule\",b:/[A-Z\\_\\.\\-]+\\s*:/,rB:!0,e:\";\",eW:!0,c:[{cN:\"attribute\",b:/\\S/,e:\":\",eE:!0,starts:{cN:\"value\",eW:!0,eE:!0,c:[a,e.CSSNM,e.QSM,e.ASM,e.CBCM,{cN:\"hexcolor\",b:\"#[0-9A-Fa-f]+\"},{cN:\"important\",b:\"!important\"}]}}]};return{cI:!0,i:/[=\\/|'\\$]/,c:[e.CBCM,r,{cN:\"id\",b:/\\#[A-Za-z0-9_-]+/},{cN:\"class\",b:/\\.[A-Za-z0-9_-]+/},{cN:\"attr_selector\",b:/\\[/,e:/\\]/,i:\"$\"},{cN:\"pseudo\",b:/:(:)?[a-zA-Z0-9\\_\\-\\+\\(\\)\"']+/},{cN:\"at_rule\",b:\"@(font-face|page)\",l:\"[a-z-]+\",k:\"font-face page\"},{cN:\"at_rule\",b:\"@\",e:\"[{;]\",c:[{cN:\"keyword\",b:/\\S+/},{b:/\\s/,eW:!0,eE:!0,r:0,c:[a,e.ASM,e.QSM,e.CSSNM]}]},{cN:\"tag\",b:c,r:0},{cN:\"rules\",b:\"{\",e:\"}\",i:/\\S/,c:[e.CBCM,r]}]}});hljs.registerLanguage(\"perl\",function(e){var t=\"getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qqfileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmgetsub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedirioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when\",r={cN:\"subst\",b:\"[$@]\\\\{\",e:\"\\\\}\",k:t},s={b:\"->{\",e:\"}\"},n={cN:\"variable\",v:[{b:/\\$\\d/},{b:/[\\$%@](\\^\\w\\b|#\\w+(::\\w+)*|{\\w+}|\\w+(::\\w*)*)/},{b:/[\\$%@][^\\s\\w{]/,r:0}]},o=[e.BE,r,n],i=[n,e.HCM,e.C(\"^\\\\=\\\\w\",\"\\\\=cut\",{eW:!0}),s,{cN:\"string\",c:o,v:[{b:\"q[qwxr]?\\\\s*\\\\(\",e:\"\\\\)\",r:5},{b:\"q[qwxr]?\\\\s*\\\\[\",e:\"\\\\]\",r:5},{b:\"q[qwxr]?\\\\s*\\\\{\",e:\"\\\\}\",r:5},{b:\"q[qwxr]?\\\\s*\\\\|\",e:\"\\\\|\",r:5},{b:\"q[qwxr]?\\\\s*\\\\<\",e:\"\\\\>\",r:5},{b:\"qw\\\\s+q\",e:\"q\",r:5},{b:\"'\",e:\"'\",c:[e.BE]},{b:'\"',e:'\"'},{b:\"`\",e:\"`\",c:[e.BE]},{b:\"{\\\\w+}\",c:[],r:0},{b:\"-?\\\\w+\\\\s*\\\\=\\\\>\",c:[],r:0}]},{cN:\"number\",b:\"(\\\\b0[0-7_]+)|(\\\\b0x[0-9a-fA-F_]+)|(\\\\b[1-9][0-9_]*(\\\\.[0-9_]+)?)|[0_]\\\\b\",r:0},{b:\"(\\\\/\\\\/|\"+e.RSR+\"|\\\\b(split|return|print|reverse|grep)\\\\b)\\\\s*\",k:\"split return print reverse grep\",r:0,c:[e.HCM,{cN:\"regexp\",b:\"(s|tr|y)/(\\\\\\\\.|[^/])*/(\\\\\\\\.|[^/])*/[a-z]*\",r:10},{cN:\"regexp\",b:\"(m|qr)?/\",e:\"/[a-z]*\",c:[e.BE],r:0}]},{cN:\"sub\",bK:\"sub\",e:\"(\\\\s*\\\\(.*?\\\\))?[;{]\",r:5},{cN:\"operator\",b:\"-\\\\w\\\\b\",r:0},{b:\"^__DATA__$\",e:\"^__END__$\",sL:\"mojolicious\",c:[{b:\"^@@.*\",e:\"$\",cN:\"comment\"}]}];return r.c=i,s.c=i,{aliases:[\"pl\"],k:t,c:i}});hljs.registerLanguage(\"cs\",function(e){var r=\"abstract as base bool break byte case catch char checked const continue decimal dynamic default delegate do double else enum event explicit extern false finally fixed float for foreach goto if implicit in int interface internal is lock long null when object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this true try typeof uint ulong unchecked unsafe ushort using virtual volatile void while async protected public private internal ascending descending from get group into join let orderby partial select set value var where yield\",t=e.IR+\"(<\"+e.IR+\">)?\";return{aliases:[\"csharp\"],k:r,i:/::/,c:[e.C(\"///\",\"$\",{rB:!0,c:[{cN:\"xmlDocTag\",v:[{b:\"///\",r:0},{b:\"<!--|-->\"},{b:\"</?\",e:\">\"}]}]}),e.CLCM,e.CBCM,{cN:\"preprocessor\",b:\"#\",e:\"$\",k:\"if else elif endif define undef warning error line region endregion pragma checksum\"},{cN:\"string\",b:'@\"',e:'\"',c:[{b:'\"\"'}]},e.ASM,e.QSM,e.CNM,{bK:\"class interface\",e:/[{;=]/,i:/[^\\s:]/,c:[e.TM,e.CLCM,e.CBCM]},{bK:\"namespace\",e:/[{;=]/,i:/[^\\s:]/,c:[{cN:\"title\",b:\"[a-zA-Z](\\\\.?\\\\w)*\",r:0},e.CLCM,e.CBCM]},{bK:\"new return throw await\",r:0},{cN:\"function\",b:\"(\"+t+\"\\\\s+)+\"+e.IR+\"\\\\s*\\\\(\",rB:!0,e:/[{;=]/,eE:!0,k:r,c:[{b:e.IR+\"\\\\s*\\\\(\",rB:!0,c:[e.TM],r:0},{cN:\"params\",b:/\\(/,e:/\\)/,eB:!0,eE:!0,k:r,r:0,c:[e.ASM,e.QSM,e.CNM,e.CBCM]},e.CLCM,e.CBCM]}]}});hljs.registerLanguage(\"apache\",function(e){var r={cN:\"number\",b:\"[\\\\$%]\\\\d+\"};return{aliases:[\"apacheconf\"],cI:!0,c:[e.HCM,{cN:\"tag\",b:\"</?\",e:\">\"},{cN:\"keyword\",b:/\\w+/,r:0,k:{common:\"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername\"},starts:{e:/$/,r:0,k:{literal:\"on off all\"},c:[{cN:\"sqbracket\",b:\"\\\\s\\\\[\",e:\"\\\\]$\"},{cN:\"cbracket\",b:\"[\\\\$%]\\\\{\",e:\"\\\\}\",c:[\"self\",r]},r,e.QSM]}}],i:/\\S/}});hljs.registerLanguage(\"http\",function(t){return{aliases:[\"https\"],i:\"\\\\S\",c:[{cN:\"status\",b:\"^HTTP/[0-9\\\\.]+\",e:\"$\",c:[{cN:\"number\",b:\"\\\\b\\\\d{3}\\\\b\"}]},{cN:\"request\",b:\"^[A-Z]+ (.*?) HTTP/[0-9\\\\.]+$\",rB:!0,e:\"$\",c:[{cN:\"string\",b:\" \",e:\" \",eB:!0,eE:!0}]},{cN:\"attribute\",b:\"^\\\\w\",e:\": \",eE:!0,i:\"\\\\n|\\\\s|=\",starts:{cN:\"string\",e:\"$\"}},{b:\"\\\\n\\\\n\",starts:{sL:[],eW:!0}}]}});hljs.registerLanguage(\"objectivec\",function(e){var t={cN:\"built_in\",b:\"(AV|CA|CF|CG|CI|MK|MP|NS|UI)\\\\w+\"},i={keyword:\"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required\",literal:\"false true FALSE TRUE nil YES NO NULL\",built_in:\"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once\"},o=/[a-zA-Z@][a-zA-Z0-9_]*/,n=\"@interface @class @protocol @implementation\";return{aliases:[\"mm\",\"objc\",\"obj-c\"],k:i,l:o,i:\"</\",c:[t,e.CLCM,e.CBCM,e.CNM,e.QSM,{cN:\"string\",v:[{b:'@\"',e:'\"',i:\"\\\\n\",c:[e.BE]},{b:\"'\",e:\"[^\\\\\\\\]'\",i:\"[^\\\\\\\\][^']\"}]},{cN:\"preprocessor\",b:\"#\",e:\"$\",c:[{cN:\"title\",v:[{b:'\"',e:'\"'},{b:\"<\",e:\">\"}]}]},{cN:\"class\",b:\"(\"+n.split(\" \").join(\"|\")+\")\\\\b\",e:\"({|$)\",eE:!0,k:n,l:o,c:[e.UTM]},{cN:\"variable\",b:\"\\\\.\"+e.UIR,r:0}]}});hljs.registerLanguage(\"python\",function(e){var r={cN:\"prompt\",b:/^(>>>|\\.\\.\\.) /},b={cN:\"string\",c:[e.BE],v:[{b:/(u|b)?r?'''/,e:/'''/,c:[r],r:10},{b:/(u|b)?r?\"\"\"/,e:/\"\"\"/,c:[r],r:10},{b:/(u|r|ur)'/,e:/'/,r:10},{b:/(u|r|ur)\"/,e:/\"/,r:10},{b:/(b|br)'/,e:/'/},{b:/(b|br)\"/,e:/\"/},e.ASM,e.QSM]},a={cN:\"number\",r:0,v:[{b:e.BNR+\"[lLjJ]?\"},{b:\"\\\\b(0o[0-7]+)[lLjJ]?\"},{b:e.CNR+\"[lLjJ]?\"}]},l={cN:\"params\",b:/\\(/,e:/\\)/,c:[\"self\",r,a,b]};return{aliases:[\"py\",\"gyp\"],k:{keyword:\"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10 None True False\",built_in:\"Ellipsis NotImplemented\"},i:/(<\\/|->|\\?)/,c:[r,a,b,e.HCM,{v:[{cN:\"function\",bK:\"def\",r:10},{cN:\"class\",bK:\"class\"}],e:/:/,i:/[${=;\\n,]/,c:[e.UTM,l]},{cN:\"decorator\",b:/^[\\t ]*@/,e:/$/},{b:/\\b(print|exec)\\(/}]}});hljs.registerLanguage(\"java\",function(e){var a=e.UIR+\"(<\"+e.UIR+\">)?\",t=\"false synchronized int abstract float private char boolean static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private\",c=\"\\\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+)(\\\\.([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+))?|\\\\.([\\\\d]+[\\\\d_]+[\\\\d]+|[\\\\d]+))([eE][-+]?\\\\d+)?)[lLfF]?\",r={cN:\"number\",b:c,r:0};return{aliases:[\"jsp\"],k:t,i:/<\\/|#/,c:[e.C(\"/\\\\*\\\\*\",\"\\\\*/\",{r:0,c:[{cN:\"doctag\",b:\"@[A-Za-z]+\"}]}),e.CLCM,e.CBCM,e.ASM,e.QSM,{cN:\"class\",bK:\"class interface\",e:/[{;=]/,eE:!0,k:\"class interface\",i:/[:\"\\[\\]]/,c:[{bK:\"extends implements\"},e.UTM]},{bK:\"new throw return else\",r:0},{cN:\"function\",b:\"(\"+a+\"\\\\s+)+\"+e.UIR+\"\\\\s*\\\\(\",rB:!0,e:/[{;=]/,eE:!0,k:t,c:[{b:e.UIR+\"\\\\s*\\\\(\",rB:!0,r:0,c:[e.UTM]},{cN:\"params\",b:/\\(/,e:/\\)/,k:t,r:0,c:[e.ASM,e.QSM,e.CNM,e.CBCM]},e.CLCM,e.CBCM]},r,{cN:\"annotation\",b:\"@[A-Za-z]+\"}]}});hljs.registerLanguage(\"bash\",function(e){var t={cN:\"variable\",v:[{b:/\\$[\\w\\d#@][\\w\\d_]*/},{b:/\\$\\{(.*?)}/}]},s={cN:\"string\",b:/\"/,e:/\"/,c:[e.BE,t,{cN:\"variable\",b:/\\$\\(/,e:/\\)/,c:[e.BE]}]},a={cN:\"string\",b:/'/,e:/'/};return{aliases:[\"sh\",\"zsh\"],l:/-?[a-z\\.]+/,k:{keyword:\"if then else elif fi for while in do done case esac function\",literal:\"true false\",built_in:\"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp\",operator:\"-ne -eq -lt -gt -f -d -e -s -l -a\"},c:[{cN:\"shebang\",b:/^#![^\\n]+sh\\s*$/,r:10},{cN:\"function\",b:/\\w[\\w\\d_]*\\s*\\(\\s*\\)\\s*\\{/,rB:!0,c:[e.inherit(e.TM,{b:/\\w[\\w\\d_]*/})],r:0},e.HCM,e.NM,s,a,t]}});hljs.registerLanguage(\"sql\",function(e){var t=e.C(\"--\",\"$\");return{cI:!0,i:/[<>{}*]/,c:[{cN:\"operator\",bK:\"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke\",e:/;/,eW:!0,k:{keyword:\"abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias allocate allow alter always analyze ancillary and any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound buffer_cache buffer_pool build bulk by byte byteordermark bytes c cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle d data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration e each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain export export_set extended extent external external_1 external_2 externally extract f failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function g general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour http i id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists k keep keep_duplicates key keys kill l language large last last_day last_insert_id last_value lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim m main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex n name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding p package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second section securefile security seed segment select self sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime t table tables tablespace tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek\",literal:\"true false null\",built_in:\"array bigint binary bit blob boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text varchar varying void\"},c:[{cN:\"string\",b:\"'\",e:\"'\",c:[e.BE,{b:\"''\"}]},{cN:\"string\",b:'\"',e:'\"',c:[e.BE,{b:'\"\"'}]},{cN:\"string\",b:\"`\",e:\"`\",c:[e.BE]},e.CNM,e.CBCM,t]},e.CBCM,t]}});hljs.registerLanguage(\"nginx\",function(e){var r={cN:\"variable\",v:[{b:/\\$\\d+/},{b:/\\$\\{/,e:/}/},{b:\"[\\\\$\\\\@]\"+e.UIR}]},b={eW:!0,l:\"[a-z/_]+\",k:{built_in:\"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll\"},r:0,i:\"=>\",c:[e.HCM,{cN:\"string\",c:[e.BE,r],v:[{b:/\"/,e:/\"/},{b:/'/,e:/'/}]},{cN:\"url\",b:\"([a-z]+):/\",e:\"\\\\s\",eW:!0,eE:!0,c:[r]},{cN:\"regexp\",c:[e.BE,r],v:[{b:\"\\\\s\\\\^\",e:\"\\\\s|{|;\",rE:!0},{b:\"~\\\\*?\\\\s+\",e:\"\\\\s|{|;\",rE:!0},{b:\"\\\\*(\\\\.[a-z\\\\-]+)+\"},{b:\"([a-z\\\\-]+\\\\.)+\\\\*\"}]},{cN:\"number\",b:\"\\\\b\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}(:\\\\d{1,5})?\\\\b\"},{cN:\"number\",b:\"\\\\b\\\\d+[kKmMgGdshdwy]*\\\\b\",r:0},r]};return{aliases:[\"nginxconf\"],c:[e.HCM,{b:e.UIR+\"\\\\s\",e:\";|{\",rB:!0,c:[{cN:\"title\",b:e.UIR,starts:b}],r:0}],i:\"[^\\\\s\\\\}]\"}});hljs.registerLanguage(\"cpp\",function(t){var e={cN:\"keyword\",b:\"\\\\b[a-z\\\\d_]*_t\\\\b\"},r={cN:\"string\",v:[t.inherit(t.QSM,{b:'((u8?|U)|L)?\"'}),{b:'(u8?|U)?R\"',e:'\"',c:[t.BE]},{b:\"'\\\\\\\\?.\",e:\"'\",i:\".\"}]},s={cN:\"number\",v:[{b:\"\\\\b(\\\\d+(\\\\.\\\\d*)?|\\\\.\\\\d+)(u|U|l|L|ul|UL|f|F)\"},{b:t.CNR}]},i={cN:\"preprocessor\",b:\"#\",e:\"$\",k:\"if else elif endif define undef warning error line pragma ifdef ifndef\",c:[{b:/\\\\\\n/,r:0},{bK:\"include\",e:\"$\",c:[r,{cN:\"string\",b:\"<\",e:\">\",i:\"\\\\n\"}]},r,s,t.CLCM,t.CBCM]},a=t.IR+\"\\\\s*\\\\(\",c={keyword:\"int float while private char catch export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const struct for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using class asm case typeid short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignof constexpr decltype noexcept static_assert thread_local restrict _Bool complex _Complex _Imaginary atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong\",built_in:\"std string cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap array shared_ptr abort abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf\",literal:\"true false nullptr NULL\"};return{aliases:[\"c\",\"cc\",\"h\",\"c++\",\"h++\",\"hpp\"],k:c,i:\"</\",c:[e,t.CLCM,t.CBCM,s,r,i,{b:\"\\\\b(deque|list|queue|stack|vector|map|set|bitset|multiset|multimap|unordered_map|unordered_set|unordered_multiset|unordered_multimap|array)\\\\s*<\",e:\">\",k:c,c:[\"self\",e]},{b:t.IR+\"::\",k:c},{bK:\"new throw return else\",r:0},{cN:\"function\",b:\"(\"+t.IR+\"[\\\\*&\\\\s]+)+\"+a,rB:!0,e:/[{;=]/,eE:!0,k:c,i:/[^\\w\\s\\*&]/,c:[{b:a,rB:!0,c:[t.TM],r:0},{cN:\"params\",b:/\\(/,e:/\\)/,k:c,r:0,c:[t.CLCM,t.CBCM,r,s]},t.CLCM,t.CBCM,i]}]}});hljs.registerLanguage(\"php\",function(e){var c={cN:\"variable\",b:\"\\\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*\"},a={cN:\"preprocessor\",b:/<\\?(php)?|\\?>/},i={cN:\"string\",c:[e.BE,a],v:[{b:'b\"',e:'\"'},{b:\"b'\",e:\"'\"},e.inherit(e.ASM,{i:null}),e.inherit(e.QSM,{i:null})]},t={v:[e.BNM,e.CNM]};return{aliases:[\"php3\",\"php4\",\"php5\",\"php6\"],cI:!0,k:\"and include_once list abstract global private echo interface as static endswitch array null if endwhile or const for endforeach self var while isset public protected exit foreach throw elseif include __FILE__ empty require_once do xor return parent clone use __CLASS__ __LINE__ else break print eval new catch __METHOD__ case exception default die require __FUNCTION__ enddeclare final try switch continue endfor endif declare unset true false trait goto instanceof insteadof __DIR__ __NAMESPACE__ yield finally\",c:[e.CLCM,e.HCM,e.C(\"/\\\\*\",\"\\\\*/\",{c:[{cN:\"doctag\",b:\"@[A-Za-z]+\"},a]}),e.C(\"__halt_compiler.+?;\",!1,{eW:!0,k:\"__halt_compiler\",l:e.UIR}),{cN:\"string\",b:/<<<['\"]?\\w+['\"]?$/,e:/^\\w+;?$/,c:[e.BE,{cN:\"subst\",v:[{b:/\\$\\w+/},{b:/\\{\\$/,e:/\\}/}]}]},a,c,{b:/(::|->)+[a-zA-Z_\\x7f-\\xff][a-zA-Z0-9_\\x7f-\\xff]*/},{cN:\"function\",bK:\"function\",e:/[;{]/,eE:!0,i:\"\\\\$|\\\\[|%\",c:[e.UTM,{cN:\"params\",b:\"\\\\(\",e:\"\\\\)\",c:[\"self\",c,e.CBCM,i,t]}]},{cN:\"class\",bK:\"class interface\",e:\"{\",eE:!0,i:/[:\\(\\$\"]/,c:[{bK:\"extends implements\"},e.UTM]},{bK:\"namespace\",e:\";\",i:/[\\.']/,c:[e.UTM]},{bK:\"use\",e:\";\",c:[e.UTM]},{b:\"=>\"},i,t]}});hljs.registerLanguage(\"coffeescript\",function(e){var c={keyword:\"in if for while finally new do return else break catch instanceof throw try this switch continue typeof delete debugger super then unless until loop of by when and or is isnt not\",literal:\"true false null undefined yes no on off\",built_in:\"npm require console print module global window document\"},n=\"[A-Za-z$_][0-9A-Za-z$_]*\",r={cN:\"subst\",b:/#\\{/,e:/}/,k:c},t=[e.BNM,e.inherit(e.CNM,{starts:{e:\"(\\\\s*/)?\",r:0}}),{cN:\"string\",v:[{b:/'''/,e:/'''/,c:[e.BE]},{b:/'/,e:/'/,c:[e.BE]},{b:/\"\"\"/,e:/\"\"\"/,c:[e.BE,r]},{b:/\"/,e:/\"/,c:[e.BE,r]}]},{cN:\"regexp\",v:[{b:\"///\",e:\"///\",c:[r,e.HCM]},{b:\"//[gim]*\",r:0},{b:/\\/(?![ *])(\\\\\\/|.)*?\\/[gim]*(?=\\W|$)/}]},{cN:\"property\",b:\"@\"+n},{b:\"`\",e:\"`\",eB:!0,eE:!0,sL:\"javascript\"}];r.c=t;var s=e.inherit(e.TM,{b:n}),i=\"(\\\\(.*\\\\))?\\\\s*\\\\B[-=]>\",o={cN:\"params\",b:\"\\\\([^\\\\(]\",rB:!0,c:[{b:/\\(/,e:/\\)/,k:c,c:[\"self\"].concat(t)}]};return{aliases:[\"coffee\",\"cson\",\"iced\"],k:c,i:/\\/\\*/,c:t.concat([e.C(\"###\",\"###\"),e.HCM,{cN:\"function\",b:\"^\\\\s*\"+n+\"\\\\s*=\\\\s*\"+i,e:\"[-=]>\",rB:!0,c:[s,o]},{b:/[:\\(,=]\\s*/,r:0,c:[{cN:\"function\",b:i,e:\"[-=]>\",rB:!0,c:[o]}]},{cN:\"class\",bK:\"class\",e:\"$\",i:/[:=\"\\[\\]]/,c:[{bK:\"extends\",eW:!0,i:/[:=\"\\[\\]]/,c:[s]},s]},{cN:\"attribute\",b:n+\":\",e:\":\",rB:!0,rE:!0,r:0}])}});hljs.registerLanguage(\"javascript\",function(e){return{aliases:[\"js\"],k:{keyword:\"in of if for while finally var new function do return void else break catch instanceof with throw case default try this switch continue typeof delete let yield const export super debugger as async await\",literal:\"true false null undefined NaN Infinity\",built_in:\"eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Error EvalError InternalError RangeError ReferenceError StopIteration SyntaxError TypeError URIError Number Math Date String RegExp Array Float32Array Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect Promise\"},c:[{cN:\"pi\",r:10,b:/^\\s*['\"]use (strict|asm)['\"]/},e.ASM,e.QSM,{cN:\"string\",b:\"`\",e:\"`\",c:[e.BE,{cN:\"subst\",b:\"\\\\$\\\\{\",e:\"\\\\}\"}]},e.CLCM,e.CBCM,{cN:\"number\",v:[{b:\"\\\\b(0[bB][01]+)\"},{b:\"\\\\b(0[oO][0-7]+)\"},{b:e.CNR}],r:0},{b:\"(\"+e.RSR+\"|\\\\b(case|return|throw)\\\\b)\\\\s*\",k:\"return throw case\",c:[e.CLCM,e.CBCM,e.RM,{b:/</,e:/>\\s*[);\\]]/,r:0,sL:\"xml\"}],r:0},{cN:\"function\",bK:\"function\",e:/\\{/,eE:!0,c:[e.inherit(e.TM,{b:/[A-Za-z$_][0-9A-Za-z$_]*/}),{cN:\"params\",b:/\\(/,e:/\\)/,eB:!0,eE:!0,c:[e.CLCM,e.CBCM]}],i:/\\[|%/},{b:/\\$[(.]/},{b:\"\\\\.\"+e.IR,r:0},{bK:\"import\",e:\"[;$]\",k:\"import from as\",c:[e.ASM,e.QSM]},{cN:\"class\",bK:\"class\",e:/[{;=]/,eE:!0,i:/[:\"\\[\\]]/,c:[{bK:\"extends\"},e.UTM]}],i:/#/}});hljs.registerLanguage(\"ini\",function(e){var c={cN:\"string\",c:[e.BE],v:[{b:\"'''\",e:\"'''\",r:10},{b:'\"\"\"',e:'\"\"\"',r:10},{b:'\"',e:'\"'},{b:\"'\",e:\"'\"}]};return{aliases:[\"toml\"],cI:!0,i:/\\S/,c:[e.C(\";\",\"$\"),e.HCM,{cN:\"title\",b:/^\\s*\\[+/,e:/\\]+/},{cN:\"setting\",b:/^[a-z0-9\\[\\]_-]+\\s*=\\s*/,e:\"$\",c:[{cN:\"value\",eW:!0,k:\"on off true false yes no\",c:[{cN:\"variable\",v:[{b:/\\$[\\w\\d\"][\\w\\d_]*/},{b:/\\$\\{(.*?)}/}]},c,{cN:\"number\",b:/([\\+\\-]+)?[\\d]+_[\\d_]+/},e.NM],r:0}]}]}});hljs.registerLanguage(\"diff\",function(e){return{aliases:[\"patch\"],c:[{cN:\"chunk\",r:10,v:[{b:/^@@ +\\-\\d+,\\d+ +\\+\\d+,\\d+ +@@$/},{b:/^\\*\\*\\* +\\d+,\\d+ +\\*\\*\\*\\*$/},{b:/^\\-\\-\\- +\\d+,\\d+ +\\-\\-\\-\\-$/}]},{cN:\"header\",v:[{b:/Index: /,e:/$/},{b:/=====/,e:/=====$/},{b:/^\\-\\-\\-/,e:/$/},{b:/^\\*{3} /,e:/$/},{b:/^\\+\\+\\+/,e:/$/},{b:/\\*{5}/,e:/\\*{5}$/}]},{cN:\"addition\",b:\"^\\\\+\",e:\"$\"},{cN:\"deletion\",b:\"^\\\\-\",e:\"$\"},{cN:\"change\",b:\"^\\\\!\",e:\"$\"}]}});\nexports.hljs = hljs;\n"
},
"$:/plugins/tiddlywiki/highlight/highlight.css": {
"type": "text/css",
"title": "$:/plugins/tiddlywiki/highlight/highlight.css",
"tags": "[[$:/tags/Stylesheet]]",
"text": "/*\n\nOriginal style from softwaremaniacs.org (c) Ivan Sagalaev <Maniac@SoftwareManiacs.Org>\n\n*/\n\n.hljs {\n display: block;\n overflow-x: auto;\n padding: 0.5em;\n background: #f0f0f0;\n -webkit-text-size-adjust: none;\n}\n\n.hljs,\n.hljs-subst,\n.hljs-tag .hljs-title,\n.nginx .hljs-title {\n color: black;\n}\n\n.hljs-string,\n.hljs-title,\n.hljs-constant,\n.hljs-parent,\n.hljs-tag .hljs-value,\n.hljs-rule .hljs-value,\n.hljs-preprocessor,\n.hljs-pragma,\n.hljs-name,\n.haml .hljs-symbol,\n.ruby .hljs-symbol,\n.ruby .hljs-symbol .hljs-string,\n.hljs-template_tag,\n.django .hljs-variable,\n.smalltalk .hljs-class,\n.hljs-addition,\n.hljs-flow,\n.hljs-stream,\n.bash .hljs-variable,\n.pf .hljs-variable,\n.apache .hljs-tag,\n.apache .hljs-cbracket,\n.tex .hljs-command,\n.tex .hljs-special,\n.erlang_repl .hljs-function_or_atom,\n.asciidoc .hljs-header,\n.markdown .hljs-header,\n.coffeescript .hljs-attribute,\n.tp .hljs-variable {\n color: #800;\n}\n\n.smartquote,\n.hljs-comment,\n.hljs-annotation,\n.diff .hljs-header,\n.hljs-chunk,\n.asciidoc .hljs-blockquote,\n.markdown .hljs-blockquote {\n color: #888;\n}\n\n.hljs-number,\n.hljs-date,\n.hljs-regexp,\n.hljs-literal,\n.hljs-hexcolor,\n.smalltalk .hljs-symbol,\n.smalltalk .hljs-char,\n.go .hljs-constant,\n.hljs-change,\n.lasso .hljs-variable,\n.makefile .hljs-variable,\n.asciidoc .hljs-bullet,\n.markdown .hljs-bullet,\n.asciidoc .hljs-link_url,\n.markdown .hljs-link_url {\n color: #080;\n}\n\n.hljs-label,\n.ruby .hljs-string,\n.hljs-decorator,\n.hljs-filter .hljs-argument,\n.hljs-localvars,\n.hljs-array,\n.hljs-attr_selector,\n.hljs-important,\n.hljs-pseudo,\n.hljs-pi,\n.haml .hljs-bullet,\n.hljs-doctype,\n.hljs-deletion,\n.hljs-envvar,\n.hljs-shebang,\n.apache .hljs-sqbracket,\n.nginx .hljs-built_in,\n.tex .hljs-formula,\n.erlang_repl .hljs-reserved,\n.hljs-prompt,\n.asciidoc .hljs-link_label,\n.markdown .hljs-link_label,\n.vhdl .hljs-attribute,\n.clojure .hljs-attribute,\n.asciidoc .hljs-attribute,\n.lasso .hljs-attribute,\n.coffeescript .hljs-property,\n.hljs-phony {\n color: #88f;\n}\n\n.hljs-keyword,\n.hljs-id,\n.hljs-title,\n.hljs-built_in,\n.css .hljs-tag,\n.hljs-doctag,\n.smalltalk .hljs-class,\n.hljs-winutils,\n.bash .hljs-variable,\n.pf .hljs-variable,\n.apache .hljs-tag,\n.hljs-type,\n.hljs-typename,\n.tex .hljs-command,\n.asciidoc .hljs-strong,\n.markdown .hljs-strong,\n.hljs-request,\n.hljs-status,\n.tp .hljs-data,\n.tp .hljs-io {\n font-weight: bold;\n}\n\n.asciidoc .hljs-emphasis,\n.markdown .hljs-emphasis,\n.tp .hljs-units {\n font-style: italic;\n}\n\n.nginx .hljs-built_in {\n font-weight: normal;\n}\n\n.coffeescript .javascript,\n.javascript .xml,\n.lasso .markup,\n.tex .hljs-formula,\n.xml .javascript,\n.xml .vbscript,\n.xml .css,\n.xml .hljs-cdata {\n opacity: 0.5;\n}\n"
},
"$:/plugins/tiddlywiki/highlight/highlightblock.js": {
"text": "/*\\\ntitle: $:/plugins/tiddlywiki/highlight/highlightblock.js\ntype: application/javascript\nmodule-type: widget\n\nWraps up the fenced code blocks parser for highlight and use in TiddlyWiki5\n\n\\*/\n(function() {\n\n/*jslint node: true, browser: true */\n/*global $tw: false */\n\"use strict\";\n\nvar CodeBlockWidget = require(\"$:/core/modules/widgets/codeblock.js\").codeblock;\n\nvar hljs = require(\"$:/plugins/tiddlywiki/highlight/highlight.js\");\n\nhljs.configure({tabReplace: \" \"});\t\n\nCodeBlockWidget.prototype.postRender = function() {\n\tvar domNode = this.domNodes[0];\n\tif($tw.browser && this.document !== $tw.fakeDocument && this.language) {\n\t\tdomNode.className = this.language.toLowerCase();\n\t\thljs.highlightBlock(domNode);\n\t} else if(!$tw.browser && this.language && this.language.indexOf(\"/\") === -1 ){\n\t\ttry {\n\t\t\tdomNode.className = this.language.toLowerCase() + \" hljs\";\n\t\t\tdomNode.children[0].innerHTML = hljs.fixMarkup(hljs.highlight(this.language, this.getAttribute(\"code\")).value);\n\t\t}\n\t\tcatch(err) {\n\t\t\t// Can't easily tell if a language is registered or not in the packed version of hightlight.js,\n\t\t\t// so we silently fail and the codeblock remains unchanged\n\t\t}\n\t}\t\n};\n\n})();\n",
"title": "$:/plugins/tiddlywiki/highlight/highlightblock.js",
"type": "application/javascript",
"module-type": "widget"
},
"$:/plugins/tiddlywiki/highlight/license": {
"title": "$:/plugins/tiddlywiki/highlight/license",
"type": "text/plain",
"text": "Copyright (c) 2006, Ivan Sagalaev\nAll rights reserved.\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are met:\n\n * Redistributions of source code must retain the above copyright\n notice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above copyright\n notice, this list of conditions and the following disclaimer in the\n documentation and/or other materials provided with the distribution.\n * Neither the name of highlight.js nor the names of its contributors\n may be used to endorse or promote products derived from this software\n without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY\nEXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED\nWARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE\nDISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY\nDIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES\n(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;\nLOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND\nON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\nSOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
},
"$:/plugins/tiddlywiki/highlight/readme": {
"title": "$:/plugins/tiddlywiki/highlight/readme",
"text": "This plugin provides syntax highlighting of code blocks using v8.8.0 of [[highlight.js|https://github.com/isagalaev/highlight.js]] from Ivan Sagalaev.\n\n! Usage\n\nWhen the plugin is installed it automatically applies highlighting to all codeblocks defined with triple backticks or with the CodeBlockWidget.\n\nThe language can optionally be specified after the opening triple braces:\n\n<$codeblock code=\"\"\"```css\n * { margin: 0; padding: 0; } /* micro reset */\n\nhtml { font-size: 62.5%; }\nbody { font-size: 14px; font-size: 1.4rem; } /* =14px */\nh1 { font-size: 24px; font-size: 2.4rem; } /* =24px */\n```\"\"\"/>\n\nIf no language is specified highlight.js will attempt to automatically detect the language.\n\n! Built-in Language Brushes\n\nThe plugin includes support for the following languages (referred to as \"brushes\" by highlight.js):\n\n* apache\n* bash\n* coffeescript\n* cpp\n* cs\n* css\n* diff\n* http\n* ini\n* java\n* javascript\n* json\n* makefile\n* markdown\n* nginx\n* objectivec\n* perl\n* php\n* python\n* ruby\n* sql\n* xml\n\n"
},
"$:/plugins/tiddlywiki/highlight/styles": {
"title": "$:/plugins/tiddlywiki/highlight/styles",
"tags": "[[$:/tags/Stylesheet]]",
"text": ".hljs{display:block;overflow-x:auto;padding:.5em;color:#333;background:#f8f8f8;-webkit-text-size-adjust:none}.hljs-comment,.diff .hljs-header,.hljs-javadoc{color:#998;font-style:italic}.hljs-keyword,.css .rule .hljs-keyword,.hljs-winutils,.nginx .hljs-title,.hljs-subst,.hljs-request,.hljs-status{color:#333;font-weight:bold}.hljs-number,.hljs-hexcolor,.ruby .hljs-constant{color:teal}.hljs-string,.hljs-tag .hljs-value,.hljs-phpdoc,.hljs-dartdoc,.tex .hljs-formula{color:#d14}.hljs-title,.hljs-id,.scss .hljs-preprocessor{color:#900;font-weight:bold}.hljs-list .hljs-keyword,.hljs-subst{font-weight:normal}.hljs-class .hljs-title,.hljs-type,.vhdl .hljs-literal,.tex .hljs-command{color:#458;font-weight:bold}.hljs-tag,.hljs-tag .hljs-title,.hljs-rule .hljs-property,.django .hljs-tag .hljs-keyword{color:navy;font-weight:normal}.hljs-attribute,.hljs-variable,.lisp .hljs-body,.hljs-name{color:teal}.hljs-regexp{color:#009926}.hljs-symbol,.ruby .hljs-symbol .hljs-string,.lisp .hljs-keyword,.clojure .hljs-keyword,.scheme .hljs-keyword,.tex .hljs-special,.hljs-prompt{color:#990073}.hljs-built_in{color:#0086b3}.hljs-preprocessor,.hljs-pragma,.hljs-pi,.hljs-doctype,.hljs-shebang,.hljs-cdata{color:#999;font-weight:bold}.hljs-deletion{background:#fdd}.hljs-addition{background:#dfd}.diff .hljs-change{background:#0086b3}.hljs-chunk{color:#aaa}"
},
"$:/plugins/tiddlywiki/highlight/usage": {
"title": "$:/plugins/tiddlywiki/highlight/usage",
"text": "! Usage\n\nFenced code blocks can have a language specifier added to trigger highlighting in a specific language. Otherwise heuristics are used to detect the language.\n\n```\n ```js\n var a = b + c; // Highlighted as JavaScript\n ```\n```\n! Adding Themes\n\nYou can add themes from highlight.js by copying the CSS to a new tiddler and tagging it with [[$:/tags/Stylesheet]]. The available themes can be found on GitHub:\n\nhttps://github.com/isagalaev/highlight.js/tree/master/src/styles\n"
}
}
}
{
"tiddlers": {
"$:/plugins/wimmoermans/history/fhistory.js": {
"created": "20160511174147745",
"creator": "wjam",
"text": "/*\\\ntitle: $:/plugins/wimmoermans/fhistory.js\ntype: application/javascript\nmodule-type: filteroperator\n\na filter to generate ALL tiddler titles from historylist, \nrepairs escaped characters \" \\\n\nassumptions format of historylist \n \"title\":\\s\"(.*)\" where .* is the title of the visited tiddler\n\n@preserve\n\\*/\n\n (function() {\n \"use strict\";\n exports.fullhistory = function(e, t, i) {\n var o = [],\n match=\"\",\n regexp= \"\",\n ttt=\"\",\n text=\"\";\n regexp = new RegExp(\"\\\"title\\\": \\\"(.+)\\\"\", \"ig\");\n text = $tw.wiki.getTiddlerText(\"$:/HistoryList\");\n while (match = regexp.exec(text)) {\n ttt=match[1].replace(/\\\\\\\"/g,\"\\\"\");\n ttt=ttt.replace(/\\\\\\\\/g,\"\\\\\");\n o.push(ttt); /* oldest first */\n }; /*while*/\n return o;\n }; /* export */\n\n } )();",
"type": "application/javascript",
"title": "$:/plugins/wimmoermans/history/fhistory.js",
"tags": "historyplugin",
"module-type": "filteroperator",
"modifier": "wjam",
"modified": "20160513184814825"
},
"$:/plugins/wimmoermans/history/history.js": {
"created": "20160505064231013",
"creator": "Wim Moermans",
"text": "/*\\\ntitle: $:/plugins/wimmoermans/history.js\ntype: application/javascript\nmodule-type: filteroperator\n\na filter to generate tiddler titles from historylist, reverse order, no duplicates (only most recent), no drafts.\n\nassumptions\n \"title\":\\s\"(.*)\" where .* is the title\n \"Draft of '\" is the prefix for tiddler in edit mode\n\n@preserve\n\\*/\n\n (function() {\n \"use strict\";\n exports.history = function(e, t, i) {\n var results = new Array(),\n o = [],\n match=\"\",\n regexp= \"\",\n text=\"\",\nttt=\"\",\n i=0,\n j=0,\n entries=0,\n found=0;\n regexp = new RegExp(\"\\\"title\\\": \\\"(.+)\\\"\", \"ig\");\n text = $tw.wiki.getTiddlerText(\"$:/HistoryList\");\n while (match = regexp.exec(text)) {\n ttt=match[1].replace(/\\\\\\\"/g,\"\\\"\");\n ttt=ttt.replace(/\\\\\\\\/g,\"\\\\\");\n if (ttt.substr(0, 10) !== \"Draft of '\") {\n results.push(ttt); /* oldest first */\n entries = entries + 1;\n }\n }\n i = entries-1;\n while (i >= 0) {\n j = i + 1;\n found = 0;\n while ((j <= entries) && (found === 0)) {\n if (results[i] === results[j]) {\n found = 1;\n }\n j = j + 1;\n }\n if (found === 0) {\n\n if( results[i] !== \"\"){\n o.push(results[i]);\n }\n }\n i = i - 1;\n };\n return o;\n }\n\n } )();",
"type": "application/javascript",
"title": "$:/plugins/wimmoermans/history/history.js",
"tags": "historyplugin",
"module-type": "filteroperator",
"modifier": "wjam",
"modified": "20160513175106215"
},
"$:/plugins/wimmoermans/history/HistoryTab": {
"created": "20160504135142490",
"creator": "Wim Moermans",
"text": "<small>breadcrumbs:</small>\n\n{{{ [history[]] }}}\n\n\n",
"title": "$:/plugins/wimmoermans/history/HistoryTab",
"tags": "$:/tags/SideBar historyplugin",
"modifier": "wjam",
"modified": "20160507201121730",
"caption": "History"
},
"$:/plugins/wimmoermans/history/HistoryTab2": {
"text": "<$linkcatcher to=\"$:/temp/advancedsearch\">\n\n<<lingo Shadows/Hint>>\n\n<div class=\"tc-search\">\n<$edit-text tiddler=\"$:/temp/advancedsearch\" type=\"search\" tag=\"input\"/>\n<$reveal state=\"$:/temp/advancedsearch\" type=\"nomatch\" text=\"\">\n<$button class=\"tc-btn-invisible\">\n<$action-setfield $tiddler=\"$:/temp/advancedsearch\" $field=\"text\" $value=\"\"/>\n{{$:/core/images/close-button}}\n</$button>\n</$reveal>\n</div>\n\n</$linkcatcher>\n{{{ [history[]search{$:/temp/advancedsearch}limit[26]] }}}\n",
"title": "$:/plugins/wimmoermans/history/HistoryTab2",
"tags": "$:/tags/AdvancedSearch historyplugin",
"modifier": "wjam",
"modified": "20160507171948465",
"creator": "Wim Moermans",
"created": "20160505094007336",
"caption": "History2"
},
"$:/plugins/wimmoermans/history/icon": {
"created": "20160508110003253",
"title": "$:/plugins/wimmoermans/history/icon",
"type": "image/svg+xml",
"text": "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>\n<!-- Created with Inkscape (http://www.inkscape.org/) -->\n\n<svg\n xmlns:dc=\"http://purl.org/dc/elements/1.1/\"\n xmlns:cc=\"http://creativecommons.org/ns#\"\n xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n xmlns:svg=\"http://www.w3.org/2000/svg\"\n xmlns=\"http://www.w3.org/2000/svg\"\n xmlns:xlink=\"http://www.w3.org/1999/xlink\"\n xmlns:sodipodi=\"http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd\"\n xmlns:inkscape=\"http://www.inkscape.org/namespaces/inkscape\"\n width=\"22\"\n height=\"21\"\n id=\"svg4046\"\n version=\"1.1\"\n inkscape:version=\"0.47 r22583\"\n sodipodi:docname=\"cat backtrack red sign 22x21.svg\">\n <defs\n id=\"defs4048\">\n <linearGradient\n inkscape:collect=\"always\"\n id=\"linearGradient3600\">\n <stop\n style=\"stop-color:#ff0024;stop-opacity:1;\"\n offset=\"0\"\n id=\"stop3602\" />\n <stop\n style=\"stop-color:#ff0024;stop-opacity:0;\"\n offset=\"1\"\n id=\"stop3604\" />\n </linearGradient>\n <inkscape:perspective\n sodipodi:type=\"inkscape:persp3d\"\n inkscape:vp_x=\"0 : 526.18109 : 1\"\n inkscape:vp_y=\"0 : 1000 : 0\"\n inkscape:vp_z=\"744.09448 : 526.18109 : 1\"\n inkscape:persp3d-origin=\"372.04724 : 350.78739 : 1\"\n id=\"perspective4054\" />\n <inkscape:perspective\n id=\"perspective4064\"\n inkscape:persp3d-origin=\"0.5 : 0.33333333 : 1\"\n inkscape:vp_z=\"1 : 0.5 : 1\"\n inkscape:vp_y=\"0 : 1000 : 0\"\n inkscape:vp_x=\"0 : 0.5 : 1\"\n sodipodi:type=\"inkscape:persp3d\" />\n <linearGradient\n inkscape:collect=\"always\"\n xlink:href=\"#linearGradient3600\"\n id=\"linearGradient3606\"\n x1=\"-17.230652\"\n y1=\"4.6165885\"\n x2=\"-3.4143419\"\n y2=\"4.6165885\"\n gradientUnits=\"userSpaceOnUse\" />\n </defs>\n <sodipodi:namedview\n id=\"base\"\n pagecolor=\"#ffffff\"\n bordercolor=\"#666666\"\n borderopacity=\"1.0\"\n inkscape:pageopacity=\"0.0\"\n inkscape:pageshadow=\"2\"\n inkscape:zoom=\"31.678384\"\n inkscape:cx=\"9.633971\"\n inkscape:cy=\"9.3724875\"\n inkscape:document-units=\"px\"\n inkscape:current-layer=\"layer1\"\n showgrid=\"false\"\n inkscape:window-width=\"1690\"\n inkscape:window-height=\"1181\"\n inkscape:window-x=\"-5\"\n inkscape:window-y=\"-5\"\n inkscape:window-maximized=\"1\" />\n <metadata\n id=\"metadata4051\">\n <rdf:RDF>\n <cc:Work\n rdf:about=\"\">\n <dc:format>image/svg+xml</dc:format>\n <dc:type\n rdf:resource=\"http://purl.org/dc/dcmitype/StillImage\" />\n <dc:title></dc:title>\n </cc:Work>\n </rdf:RDF>\n </metadata>\n <g\n inkscape:label=\"Layer 1\"\n inkscape:groupmode=\"layer\"\n id=\"layer1\"\n transform=\"translate(-12.564828,-228.71506)\">\n <path\n sodipodi:type=\"arc\"\n style=\"fill:#fcfcfc;fill-opacity:1;fill-rule:nonzero\"\n id=\"path6042\"\n sodipodi:cx=\"-1.4836615\"\n sodipodi:cy=\"-1.6968651\"\n sodipodi:rx=\"6.976366\"\n sodipodi:ry=\"6.8500967\"\n d=\"m 5.4927045,-1.6968651 a 6.976366,6.8500967 0 1 1 -13.9527321,0 6.976366,6.8500967 0 1 1 13.9527321,0 z\"\n transform=\"matrix(1.2556561,0,0,1.2788018,25.334287,241.26263)\" />\n <path\n id=\"path6044\"\n d=\"m 30.4446,244.31397 c 0.310834,-0.28767 0.606444,-0.65004 0.656841,-0.80533 0.226308,-0.69733 -1.75153,-1.35182 -2.563323,-0.84824 -0.640438,0.39727 -1.154161,1.973 -0.807158,2.47583 0.257232,0.37275 0.420332,0.39322 1.137559,0.14288 0.460496,-0.16076 0.876334,-0.32601 0.924074,-0.36721 0.04729,-0.042 0.341159,-0.31027 0.65198,-0.59797 l 2.7e-5,4e-5 z m 0.597108,-2.74293 c 0.09612,-0.164 0.0099,-0.46244 -0.199577,-0.69068 -0.46117,-0.50252 -1.166755,-0.22586 -1.371622,0.53779 -0.138492,0.51626 -0.124003,0.53781 0.418457,0.62237 0.608375,0.0949 0.889192,-0.0195 1.152742,-0.46948 z m -3.686825,2.07878 c 0.168572,-0.62841 -0.06485,-0.93373 -0.745912,-0.97577 -0.770729,-0.0477 -1.241044,0.64384 -0.836604,1.22992 0.512291,0.74232 1.35136,0.60756 1.582532,-0.25415 l -1.6e-5,0 z m 1.462533,-2.12446 0.185272,-0.64054 -0.625699,-0.0677 c -0.706134,-0.0764 -0.924717,0.0207 -1.305369,0.57977 -0.335314,0.49243 -0.04392,0.93382 0.644496,0.97629 0.707662,0.0437 0.882331,-0.0906 1.101289,-0.84784 l 1.1e-5,-4e-5 z m -7.797022,1.15185 c 0.76937,-0.85185 0.741916,-1.28981 -0.106461,-1.69843 -0.998166,-0.48078 -1.914981,-0.37475 -2.454339,0.28389 -0.516439,0.63069 -0.583894,1.63687 -0.151704,2.26314 0.51397,0.74476 1.572512,0.41361 2.712495,-0.8486 z m -3.48099,-0.42697 c 0.0896,-0.69621 -0.04686,-0.87565 -0.696238,-0.91572 -1.139297,-0.0703 -1.566432,0.84984 -0.702808,1.51406 0.586816,0.4513 1.303444,0.14483 1.399073,-0.59834 l -2.7e-5,0 z m 3.354628,-2.52461 c 0.149115,-0.45951 -0.275478,-0.99883 -0.833864,-1.05921 -0.603977,-0.0653 -0.7421,0.0289 -0.89905,0.61367 -0.166828,0.62185 0.06374,0.9337 0.720441,0.97418 0.628634,0.0389 0.868921,-0.0867 1.012367,-0.52882 l 1.06e-4,1.8e-4 z m -2.408088,0.34458 c 0.112063,-0.75445 -0.0033,-0.89128 -0.721233,-0.85538 -0.828289,0.0413 -1.07332,0.23945 -1.137099,0.9192 -0.05268,0.56122 -0.02343,0.59189 0.653277,0.68515 0.878304,0.12109 1.095906,-0.0141 1.204881,-0.74921 l 1.74e-4,2.4e-4 z m 5.888163,-5.33851 c 0.142599,-0.43933 -0.245444,-0.96317 -1.034761,-1.39674 -0.659415,-0.36226 -1.526134,-0.27635 -1.956444,0.1939 -0.468183,0.51161 -0.852424,1.97658 -0.610417,2.32725 0.48829,0.70756 3.291025,-0.16736 3.601586,-1.12433 l 3.6e-5,-8e-5 z m 0.05327,-2.11052 c 0.567019,-0.52796 -0.337185,-1.89786 -1.117088,-1.69249 -0.480085,0.12648 -0.794832,1.02942 -0.505121,1.44923 0.309844,0.44897 1.249847,0.58994 1.622222,0.24325 l -1.3e-5,1e-5 z m -3.840095,1.12289 c 0.05032,-0.53627 0.0115,-0.59251 -0.526932,-0.76354 -0.319703,-0.10149 -0.703975,-0.10859 -0.853942,-0.0154 -0.412123,0.25566 -0.580704,0.98457 -0.316321,1.36768 0.511143,0.74066 1.608153,0.36021 1.697198,-0.58862 l -3e-6,-7e-5 z m 1.399399,-1.72835 c 0.13752,-0.4755 0.08353,-0.73271 -0.201357,-0.9592 -0.777497,-0.6182 -2.043348,0.0734 -1.830727,1.00011 0.08032,0.34992 1.408324,0.87902 1.720388,0.68544 0.06804,-0.0423 0.208269,-0.3691 0.311631,-0.72643 l 6.5e-5,8e-5 z\"\n style=\"fill:#000000\"\n sodipodi:nodetypes=\"cssssscccsssccsscccccsscccsssccsscccssscccssscccssscccsscccssscccssscc\" />\n </g>\n</svg>\n",
"modified": "20160508110047926"
},
"$:/plugins/wimmoermans/history/readme": {
"created": "20160505113313287",
"creator": "wjam",
"text": "!!history filter\nTom Tumb (Dutch: Klein Duimpje).used breadcrumbs because he didn't want to get lost in the woods. \n\nWhen reading or editing a large ~TiddlyWiki you sometimes get lost and revisit tiddlers over and over. This plugin ''automatically creates a list of all visited tiddlers'', and allows you to answer questions like \"Where did I read that?\", \"Did I update tiddler XXYY already?\", \"Which system tiddlers did I view/edit?\" \"Which tiddlers did I rename/delete?\". \n\n!!functionality \n\n*The ''plugin/filter'' generates the tiddlers which you visited since loading the ~TiddlyWiki. \n*Like ~OpenTab all* tiddlers from the story river are shown in the history. When you ''close a tiddler'' it is removed from the ~OpenTab but is ''still present in the the history''. \n*Tiddler titles which were opened using tiddlers like $:/DefaultTiddlers are not present in the history.\n*Like ~RecentTab, the history list includes the tiddlers you created or modified during this session. When you ''delete or rename'' a tiddler during your session the old tiddler title will be in ''//italics//''. \n\ncompare ''history[] and ''fullhistory[]\n\n| |''history[]''|fullhistory[]|\n| most recent visited tiddlers|''most recent visited appear first''|most recent appear last|\n| Draft titles|''drafts not included ''|all drafts ^^dangerous[1]^^|\n| visited multiple times|''no duplicates, only most recent title''|includes all duplicates|\n| usage|normal use|advanced use only|\n\n!!examples\n\nTo display all visited tiddlers so far use\n\n ``{{{[history[]]}}}`` \n\nYou can sort the list alphabetically, ''search'' the tiddlers and ''limit'' the number of results to 25. e.g.\n\n``{{{[history[]search{$:/temp/search}limit[25]]}}}``\n\nif you want to exclude system tiddlers from the list:\n\n``{{{[history[]!is[system]]}}}``\n\nIf you want modified but possibly not yet saved tiddlers (incl renamed, deleted but excluding Draft. \n\n``{{{[history[]haschanged[]]}}}``\n\n''fullhistory[]'' is only included for //advanced users//. To generate the same list as history[] you would have to write \n``{{{[fullhistory[]!prefix[Draft]reverse[]unique[]]}}}`` ^^[2]^^\n\n!!how to install \n\n''Drag'' the link $:/plugins/wimmoermans/history to your wiki, ''import'' the tiddler and ''save'' your wiki, then ''LOAD'' the newly saved wiki.\nOr ''open'' the history tiddler in this wiki and use the top right icon ''V'', ''export tiddler'', ''JSON file'' to save the tiddler to disk, then in your wiki in the sidebar use ''Tools'', ''import (paperclip)'' to import the JSON file you just saved, ''save'' your wiki, ''LOAD'' the saved wiki.\n\n# history filter <br>[[$:/plugins/wimmoermans/history/history.js]]\n\n#fullhistory filter <br>[[$:/plugins/wimmoermans/history/fhistory.js]]\n\n#History tab in the Sidebar.<br>[[$:/plugins/wimmoermans/history/HistoryTab]]<br><small>(to disable remove the ~$:/tags/SideBar tag)</small>\n# History2 tab for advanced seard tiddler <br>[[$:/plugins/wimmoermans/history/HistoryTab2]]<br><small>(to disable remove the ~$:/tags/AdvancedSearch tag)</small>\n#$:/plugins/wimmoermans/history/readme this tiddler\n# $:/plugins/wimmoermans/history/icon three cat paw prints (by Motovun ?)\n\n!!Google plus forum to discuss the history filters\nhttps://groups.google.com/forum/#!topic/tiddlywiki/u4lN-olqnPc\n\n\n!! ~TiddlyWiki version compatibility [3]\nhistory and fullhistory were tested on version 5.1.12 pre-release, 5.1.11, 5.1.9, 5.0.8-beta. For 5.0.8-beta the tab-example tiddlers require manually adding the field named 'caption' value 'History' and 'History2' to present the Tab captions.\n\n!!notes/warning\n[1] clicking on ''Draft'' titles in the history is ''dangerous'' especially when the tiddler is already open.\n\n[2] ''unique[]'' is a undocumented filter present in ~TiddlyWiki boot.js.\n\n[3] history scan the $:/HistoryList tiddler for \"title\"://single space//\"//tiddler title//\" and displays the //tiddler title// value. It correctly handles double quote and backslahs in tiddler titles.\n",
"title": "$:/plugins/wimmoermans/history/readme",
"tags": "historyplugin sh",
"modifier": "wjam",
"modified": "20160514063831746"
}
}
}
<small>breadcrumbs:</small>
{{{ [history[]] }}}
\rules except wikilink
Proceedings of INTERSPEECH 2021
\rules except wikilink
INTERSPEECH 2021
{
"tiddlers": {
"$:/info/browser": {
"title": "$:/info/browser",
"text": "yes"
},
"$:/info/node": {
"title": "$:/info/node",
"text": "no"
}
}
}
{
"tiddlers": {
"$:/themes/tiddlywiki/snowwhite/base": {
"title": "$:/themes/tiddlywiki/snowwhite/base",
"tags": "[[$:/tags/Stylesheet]]",
"text": "\\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline\n\n.tc-sidebar-header {\n\ttext-shadow: 0 1px 0 <<colour sidebar-foreground-shadow>>;\n}\n\n.tc-tiddler-info {\n\t<<box-shadow \"inset 1px 2px 3px rgba(0,0,0,0.1)\">>\n}\n\n@media screen {\n\t.tc-tiddler-frame {\n\t\t<<box-shadow \"1px 1px 5px rgba(0, 0, 0, 0.3)\">>\n\t}\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\t.tc-tiddler-frame {\n\t\t<<box-shadow none>>\n\t}\n}\n\n.tc-page-controls button svg, .tc-tiddler-controls button svg, .tc-topbar button svg {\n\t<<transition \"fill 150ms ease-in-out\">>\n}\n\n.tc-tiddler-controls button.tc-selected,\n.tc-page-controls button.tc-selected {\n\t<<filter \"drop-shadow(0px -1px 2px rgba(0,0,0,0.25))\">>\n}\n\n.tc-tiddler-frame input.tc-edit-texteditor {\n\t<<box-shadow \"inset 0 1px 8px rgba(0, 0, 0, 0.15)\">>\n}\n\n.tc-edit-tags {\n\t<<box-shadow \"inset 0 1px 8px rgba(0, 0, 0, 0.15)\">>\n}\n\n.tc-tiddler-frame .tc-edit-tags input.tc-edit-texteditor {\n\t<<box-shadow \"none\">>\n\tborder: none;\n\toutline: none;\n}\n\ncanvas.tc-edit-bitmapeditor {\n\t<<box-shadow \"2px 2px 5px rgba(0, 0, 0, 0.5)\">>\n}\n\n.tc-drop-down {\n\tborder-radius: 4px;\n\t<<box-shadow \"2px 2px 10px rgba(0, 0, 0, 0.5)\">>\n}\n\n.tc-block-dropdown {\n\tborder-radius: 4px;\n\t<<box-shadow \"2px 2px 10px rgba(0, 0, 0, 0.5)\">>\n}\n\n.tc-modal {\n\tborder-radius: 6px;\n\t<<box-shadow \"0 3px 7px rgba(0,0,0,0.3)\">>\n}\n\n.tc-modal-footer {\n\tborder-radius: 0 0 6px 6px;\n\t<<box-shadow \"inset 0 1px 0 #fff\">>;\n}\n\n\n.tc-alert {\n\tborder-radius: 6px;\n\t<<box-shadow \"0 3px 7px rgba(0,0,0,0.6)\">>\n}\n\n.tc-notification {\n\tborder-radius: 6px;\n\t<<box-shadow \"0 3px 7px rgba(0,0,0,0.3)\">>\n\ttext-shadow: 0 1px 0 rgba(255,255,255, 0.8);\n}\n\n.tc-sidebar-lists .tc-tab-set .tc-tab-divider {\n\tborder-top: none;\n\theight: 1px;\n\t<<background-linear-gradient \"left, rgba(0,0,0,0.15) 0%, rgba(0,0,0,0.0) 100%\">>\n}\n\n.tc-more-sidebar .tc-tab-buttons button {\n\t<<background-linear-gradient \"left, rgba(0,0,0,0.01) 0%, rgba(0,0,0,0.1) 100%\">>\n}\n\n.tc-more-sidebar .tc-tab-buttons button.tc-tab-selected {\n\t<<background-linear-gradient \"left, rgba(0,0,0,0.05) 0%, rgba(255,255,255,0.05) 100%\">>\n}\n\n.tc-message-box img {\n\t<<box-shadow \"1px 1px 3px rgba(0,0,0,0.5)\">>\n}\n\n.tc-plugin-info {\n\t<<box-shadow \"1px 1px 3px rgba(0,0,0,0.5)\">>\n}\n"
}
}
}
{
"tiddlers": {
"$:/themes/tiddlywiki/vanilla/base": {
"title": "$:/themes/tiddlywiki/vanilla/base",
"tags": "[[$:/tags/Stylesheet]]",
"text": "\\define custom-background-datauri()\n<$set name=\"background\" value={{$:/themes/tiddlywiki/vanilla/settings/backgroundimage}}>\n<$list filter=\"[<background>is[image]]\">\n`background: url(`\n<$list filter=\"[<background>!has[_canonical_uri]]\">\n<$macrocall $name=\"datauri\" title={{$:/themes/tiddlywiki/vanilla/settings/backgroundimage}}/>\n</$list>\n<$list filter=\"[<background>has[_canonical_uri]]\">\n<$view tiddler={{$:/themes/tiddlywiki/vanilla/settings/backgroundimage}} field=\"_canonical_uri\"/>\n</$list>\n`) center center;`\n`background-attachment: `{{$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment}}`;\n-webkit-background-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;\n-moz-background-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;\n-o-background-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;\nbackground-size:` {{$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize}}`;`\n</$list>\n</$set>\n\\end\n\n\\define if-fluid-fixed(text,hiddenSidebarText)\n<$reveal state=\"$:/themes/tiddlywiki/vanilla/options/sidebarlayout\" type=\"match\" text=\"fluid-fixed\">\n$text$\n<$reveal state=\"$:/state/sidebar\" type=\"nomatch\" text=\"yes\" default=\"yes\">\n$hiddenSidebarText$\n</$reveal>\n</$reveal>\n\\end\n\n\\rules only filteredtranscludeinline transcludeinline macrodef macrocallinline macrocallblock\n\n/*\n** Start with the normalize CSS reset, and then belay some of its effects\n*/\n\n{{$:/themes/tiddlywiki/vanilla/reset}}\n\n*, input[type=\"search\"] {\n\tbox-sizing: border-box;\n\t-moz-box-sizing: border-box;\n\t-webkit-box-sizing: border-box;\n}\n\nhtml button {\n\tline-height: 1.2;\n\tcolor: <<colour button-foreground>>;\n\tbackground: <<colour button-background>>;\n\tborder-color: <<colour button-border>>;\n}\n\n/*\n** Basic element styles\n*/\n\nhtml {\n\tfont-family: {{$:/themes/tiddlywiki/vanilla/settings/fontfamily}};\n\ttext-rendering: optimizeLegibility; /* Enables kerning and ligatures etc. */\n\t-webkit-font-smoothing: antialiased;\n\t-moz-osx-font-smoothing: grayscale;\n}\n\nhtml:-webkit-full-screen {\n\tbackground-color: <<colour page-background>>;\n}\n\nbody.tc-body {\n\tfont-size: {{$:/themes/tiddlywiki/vanilla/metrics/fontsize}};\n\tline-height: {{$:/themes/tiddlywiki/vanilla/metrics/lineheight}};\n\tcolor: <<colour foreground>>;\n\tbackground-color: <<colour page-background>>;\n\tfill: <<colour foreground>>;\n\tword-wrap: break-word;\n\t<<custom-background-datauri>>\n}\n\nh1, h2, h3, h4, h5, h6 {\n\tline-height: 1.2;\n\tfont-weight: 300;\n}\n\npre {\n\tdisplay: block;\n\tpadding: 14px;\n\tmargin-top: 1em;\n\tmargin-bottom: 1em;\n\tword-break: normal;\n\tword-wrap: break-word;\n\twhite-space: {{$:/themes/tiddlywiki/vanilla/options/codewrapping}};\n\tbackground-color: <<colour pre-background>>;\n\tborder: 1px solid <<colour pre-border>>;\n\tpadding: 0 3px 2px;\n\tborder-radius: 3px;\n\tfont-family: {{$:/themes/tiddlywiki/vanilla/settings/codefontfamily}};\n}\n\ncode {\n\tcolor: <<colour code-foreground>>;\n\tbackground-color: <<colour code-background>>;\n\tborder: 1px solid <<colour code-border>>;\n\twhite-space: {{$:/themes/tiddlywiki/vanilla/options/codewrapping}};\n\tpadding: 0 3px 2px;\n\tborder-radius: 3px;\n\tfont-family: {{$:/themes/tiddlywiki/vanilla/settings/codefontfamily}};\n}\n\nblockquote {\n\tborder-left: 5px solid <<colour blockquote-bar>>;\n\tmargin-left: 25px;\n\tpadding-left: 10px;\n}\n\ndl dt {\n\tfont-weight: bold;\n\tmargin-top: 6px;\n}\n\ntextarea,\ninput[type=text],\ninput[type=search],\ninput[type=\"\"],\ninput:not([type]) {\n\tcolor: <<colour foreground>>;\n\tbackground: <<colour background>>;\n}\n\n.tc-muted {\n\tcolor: <<colour muted-foreground>>;\n}\n\nsvg.tc-image-button {\n\tpadding: 0px 1px 1px 0px;\n}\n\nkbd {\n\tdisplay: inline-block;\n\tpadding: 3px 5px;\n\tfont-size: 0.8em;\n\tline-height: 1.2;\n\tcolor: <<colour foreground>>;\n\tvertical-align: middle;\n\tbackground-color: <<colour background>>;\n\tborder: solid 1px <<colour muted-foreground>>;\n\tborder-bottom-color: <<colour muted-foreground>>;\n\tborder-radius: 3px;\n\tbox-shadow: inset 0 -1px 0 <<colour muted-foreground>>;\n}\n\n/*\nMarkdown likes putting code elements inside pre elements\n*/\npre > code {\n\tpadding: 0;\n\tborder: none;\n\tbackground-color: inherit;\n\tcolor: inherit;\n}\n\ntable {\n\tborder: 1px solid <<colour table-border>>;\n\twidth: auto;\n\tmax-width: 100%;\n\tcaption-side: bottom;\n\tmargin-top: 1em;\n\tmargin-bottom: 1em;\n}\n\ntable th, table td {\n\tpadding: 0 7px 0 7px;\n\tborder-top: 1px solid <<colour table-border>>;\n\tborder-left: 1px solid <<colour table-border>>;\n}\n\ntable thead tr td, table th {\n\tbackground-color: <<colour table-header-background>>;\n\tfont-weight: bold;\n}\n\ntable tfoot tr td {\n\tbackground-color: <<colour table-footer-background>>;\n}\n\n.tc-csv-table {\n\twhite-space: nowrap;\n}\n\n.tc-tiddler-frame img,\n.tc-tiddler-frame svg,\n.tc-tiddler-frame canvas,\n.tc-tiddler-frame embed,\n.tc-tiddler-frame iframe {\n\tmax-width: 100%;\n}\n\n.tc-tiddler-body > embed,\n.tc-tiddler-body > iframe {\n\twidth: 100%;\n\theight: 600px;\n}\n\n/*\n** Links\n*/\n\nbutton.tc-tiddlylink,\na.tc-tiddlylink {\n\ttext-decoration: none;\n\tfont-weight: normal;\n\tcolor: <<colour tiddler-link-foreground>>;\n\t-webkit-user-select: inherit; /* Otherwise the draggable attribute makes links impossible to select */\n}\n\n.tc-sidebar-lists a.tc-tiddlylink {\n\tcolor: <<colour sidebar-tiddler-link-foreground>>;\n}\n\n.tc-sidebar-lists a.tc-tiddlylink:hover {\n\tcolor: <<colour sidebar-tiddler-link-foreground-hover>>;\n}\n\nbutton.tc-tiddlylink:hover,\na.tc-tiddlylink:hover {\n\ttext-decoration: underline;\n}\n\na.tc-tiddlylink-resolves {\n}\n\na.tc-tiddlylink-shadow {\n\tfont-weight: bold;\n}\n\na.tc-tiddlylink-shadow.tc-tiddlylink-resolves {\n\tfont-weight: normal;\n}\n\na.tc-tiddlylink-missing {\n\tfont-style: italic;\n}\n\na.tc-tiddlylink-external {\n\ttext-decoration: underline;\n\tcolor: <<colour external-link-foreground>>;\n\tbackground-color: <<colour external-link-background>>;\n}\n\na.tc-tiddlylink-external:visited {\n\tcolor: <<colour external-link-foreground-visited>>;\n\tbackground-color: <<colour external-link-background-visited>>;\n}\n\na.tc-tiddlylink-external:hover {\n\tcolor: <<colour external-link-foreground-hover>>;\n\tbackground-color: <<colour external-link-background-hover>>;\n}\n\n/*\n** Drag and drop styles\n*/\n\n.tc-tiddler-dragger {\n\tposition: relative;\n\tz-index: -10000;\n}\n\n.tc-tiddler-dragger-inner {\n\tposition: absolute;\n\tdisplay: inline-block;\n\tpadding: 8px 20px;\n\tfont-size: 16.9px;\n\tfont-weight: bold;\n\tline-height: 20px;\n\tcolor: <<colour dragger-foreground>>;\n\ttext-shadow: 0 1px 0 rgba(0, 0, 0, 1);\n\twhite-space: nowrap;\n\tvertical-align: baseline;\n\tbackground-color: <<colour dragger-background>>;\n\tborder-radius: 20px;\n}\n\n.tc-tiddler-dragger-cover {\n\tposition: absolute;\n\tbackground-color: <<colour page-background>>;\n}\n\n.tc-dropzone {\n\tposition: relative;\n}\n\n.tc-dropzone.tc-dragover:before {\n\tz-index: 10000;\n\tdisplay: block;\n\tposition: fixed;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbackground: <<colour dropzone-background>>;\n\ttext-align: center;\n\tcontent: \"<<lingo DropMessage>>\";\n}\n\n/*\n** Plugin reload warning\n*/\n\n.tc-plugin-reload-warning {\n\tz-index: 1000;\n\tdisplay: block;\n\tposition: fixed;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbackground: <<colour alert-background>>;\n\ttext-align: center;\n}\n\n/*\n** Buttons\n*/\n\nbutton svg, button img, label svg, label img {\n\tvertical-align: middle;\n}\n\n.tc-btn-invisible {\n\tpadding: 0;\n\tmargin: 0;\n\tbackground: none;\n\tborder: none;\n}\n\n.tc-btn-boxed {\n\tfont-size: 0.6em;\n\tpadding: 0.2em;\n\tmargin: 1px;\n\tbackground: none;\n\tborder: 1px solid <<colour tiddler-controls-foreground>>;\n\tborder-radius: 0.25em;\n}\n\nhtml body.tc-body .tc-btn-boxed svg {\n\tfont-size: 1.6666em;\n}\n\n.tc-btn-boxed:hover {\n\tbackground: <<colour muted-foreground>>;\n\tcolor: <<colour background>>;\n}\n\nhtml body.tc-body .tc-btn-boxed:hover svg {\n\tfill: <<colour background>>;\n}\n\n.tc-btn-rounded {\n\tfont-size: 0.5em;\n\tline-height: 2;\n\tpadding: 0em 0.3em 0.2em 0.4em;\n\tmargin: 1px;\n\tborder: 1px solid <<colour muted-foreground>>;\n\tbackground: <<colour muted-foreground>>;\n\tcolor: <<colour background>>;\n\tborder-radius: 2em;\n}\n\nhtml body.tc-body .tc-btn-rounded svg {\n\tfont-size: 1.6666em;\n\tfill: <<colour background>>;\n}\n\n.tc-btn-rounded:hover {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tbackground: <<colour background>>;\n\tcolor: <<colour muted-foreground>>;\n}\n\nhtml body.tc-body .tc-btn-rounded:hover svg {\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-btn-icon svg {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-btn-text {\n\tpadding: 0;\n\tmargin: 0;\n}\n\n.tc-btn-big-green {\n\tdisplay: inline-block;\n\tpadding: 8px;\n\tmargin: 4px 8px 4px 8px;\n\tbackground: <<colour download-background>>;\n\tcolor: <<colour download-foreground>>;\n\tfill: <<colour download-foreground>>;\n\tborder: none;\n\tfont-size: 1.2em;\n\tline-height: 1.4em;\n\ttext-decoration: none;\n}\n\n.tc-btn-big-green svg,\n.tc-btn-big-green img {\n\theight: 2em;\n\twidth: 2em;\n\tvertical-align: middle;\n\tfill: <<colour download-foreground>>;\n}\n\n.tc-sidebar-lists input {\n\tcolor: <<colour foreground>>;\n}\n\n.tc-sidebar-lists button {\n\tcolor: <<colour sidebar-button-foreground>>;\n\tfill: <<colour sidebar-button-foreground>>;\n}\n\n.tc-sidebar-lists button.tc-btn-mini {\n\tcolor: <<colour sidebar-muted-foreground>>;\n}\n\n.tc-sidebar-lists button.tc-btn-mini:hover {\n\tcolor: <<colour sidebar-muted-foreground-hover>>;\n}\n\nbutton svg.tc-image-button, button .tc-image-button img {\n\theight: 1em;\n\twidth: 1em;\n}\n\n.tc-unfold-banner {\n\tposition: absolute;\n\tpadding: 0;\n\tmargin: 0;\n\tbackground: none;\n\tborder: none;\n\twidth: 100%;\n\twidth: calc(100% + 2px);\n\tmargin-left: -43px;\n\ttext-align: center;\n\tborder-top: 2px solid <<colour tiddler-info-background>>;\n\tmargin-top: 4px;\n}\n\n.tc-unfold-banner:hover {\n\tbackground: <<colour tiddler-info-background>>;\n\tborder-top: 2px solid <<colour tiddler-info-border>>;\n}\n\n.tc-unfold-banner svg, .tc-fold-banner svg {\n\theight: 0.75em;\n\tfill: <<colour tiddler-controls-foreground>>;\n}\n\n.tc-unfold-banner:hover svg, .tc-fold-banner:hover svg {\n\tfill: <<colour tiddler-controls-foreground-hover>>;\n}\n\n.tc-fold-banner {\n\tposition: absolute;\n\tpadding: 0;\n\tmargin: 0;\n\tbackground: none;\n\tborder: none;\n\twidth: 23px;\n\ttext-align: center;\n\tmargin-left: -35px;\n\ttop: 6px;\n\tbottom: 6px;\n}\n\n.tc-fold-banner:hover {\n\tbackground: <<colour tiddler-info-background>>;\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-unfold-banner {\n\t\tposition: static;\n\t\twidth: calc(100% + 59px);\n\t}\n\n\t.tc-fold-banner {\n\t\twidth: 16px;\n\t\tmargin-left: -16px;\n\t\tfont-size: 0.75em;\n\t}\n\n}\n\n/*\n** Tags and missing tiddlers\n*/\n\n.tc-tag-list-item {\n\tposition: relative;\n\tdisplay: inline-block;\n\tmargin-right: 7px;\n}\n\n.tc-tags-wrapper {\n\tmargin: 4px 0 14px 0;\n}\n\n.tc-missing-tiddler-label {\n\tfont-style: italic;\n\tfont-weight: normal;\n\tdisplay: inline-block;\n\tfont-size: 11.844px;\n\tline-height: 14px;\n\twhite-space: nowrap;\n\tvertical-align: baseline;\n}\n\nbutton.tc-tag-label, span.tc-tag-label {\n\tdisplay: inline-block;\n\tpadding: 0.16em 0.7em;\n\tfont-size: 0.9em;\n\tfont-weight: 300;\n\tline-height: 1.2em;\n\tcolor: <<colour tag-foreground>>;\n\twhite-space: nowrap;\n\tvertical-align: baseline;\n\tbackground-color: <<colour tag-background>>;\n\tborder-radius: 1em;\n}\n\n.tc-untagged-separator {\n\twidth: 10em;\n\tleft: 0;\n\tmargin-left: 0;\n\tborder: 0;\n\theight: 1px;\n\tbackground: <<colour tab-divider>>;\n}\n\nbutton.tc-untagged-label {\n\tbackground-color: <<colour untagged-background>>;\n}\n\n.tc-tag-label svg, .tc-tag-label img {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour tag-foreground>>;\n}\n\n.tc-tag-manager-table .tc-tag-label {\n\twhite-space: normal;\n}\n\n.tc-tag-manager-tag {\n\twidth: 100%;\n}\n\n/*\n** Page layout\n*/\n\n.tc-topbar {\n\tposition: fixed;\n\tz-index: 1200;\n}\n\n.tc-topbar-left {\n\tleft: 29px;\n\ttop: 5px;\n}\n\n.tc-topbar-right {\n\ttop: 5px;\n\tright: 29px;\n}\n\n.tc-topbar button {\n\tpadding: 8px;\n}\n\n.tc-topbar svg {\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-topbar button:hover svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-sidebar-header {\n\tcolor: <<colour sidebar-foreground>>;\n\tfill: <<colour sidebar-foreground>>;\n}\n\n.tc-sidebar-header .tc-title a.tc-tiddlylink-resolves {\n\tfont-weight: 300;\n}\n\n.tc-sidebar-header .tc-sidebar-lists p {\n\tmargin-top: 3px;\n\tmargin-bottom: 3px;\n}\n\n.tc-sidebar-header .tc-missing-tiddler-label {\n\tcolor: <<colour sidebar-foreground>>;\n}\n\n.tc-advanced-search input {\n\twidth: 60%;\n}\n\n.tc-search a svg {\n\twidth: 1.2em;\n\theight: 1.2em;\n\tvertical-align: middle;\n}\n\n.tc-page-controls {\n\tmargin-top: 14px;\n\tfont-size: 1.5em;\n}\n\n.tc-page-controls button {\n\tmargin-right: 0.5em;\n}\n\n.tc-page-controls a.tc-tiddlylink:hover {\n\ttext-decoration: none;\n}\n\n.tc-page-controls img {\n\twidth: 1em;\n}\n\n.tc-page-controls svg {\n\tfill: <<colour sidebar-controls-foreground>>;\n}\n\n.tc-page-controls button:hover svg, .tc-page-controls a:hover svg {\n\tfill: <<colour sidebar-controls-foreground-hover>>;\n}\n\n.tc-menu-list-item {\n\twhite-space: nowrap;\n}\n\n.tc-menu-list-count {\n\tfont-weight: bold;\n}\n\n.tc-menu-list-subitem {\n\tpadding-left: 7px;\n}\n\n.tc-story-river {\n\tposition: relative;\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-sidebar-header {\n\t\tpadding: 14px;\n\t\tmin-height: 32px;\n\t\tmargin-top: {{$:/themes/tiddlywiki/vanilla/metrics/storytop}};\n\t}\n\n\t.tc-story-river {\n\t\tposition: relative;\n\t\tpadding: 0;\n\t}\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-message-box {\n\t\tmargin: 21px -21px 21px -21px;\n\t}\n\n\t.tc-sidebar-scrollable {\n\t\tposition: fixed;\n\t\ttop: {{$:/themes/tiddlywiki/vanilla/metrics/storytop}};\n\t\tleft: {{$:/themes/tiddlywiki/vanilla/metrics/storyright}};\n\t\tbottom: 0;\n\t\tright: 0;\n\t\toverflow-y: auto;\n\t\toverflow-x: auto;\n\t\t-webkit-overflow-scrolling: touch;\n\t\tmargin: 0 0 0 -42px;\n\t\tpadding: 71px 0 28px 42px;\n\t}\n\n\t.tc-story-river {\n\t\tposition: relative;\n\t\tleft: {{$:/themes/tiddlywiki/vanilla/metrics/storyleft}};\n\t\ttop: {{$:/themes/tiddlywiki/vanilla/metrics/storytop}};\n\t\twidth: {{$:/themes/tiddlywiki/vanilla/metrics/storywidth}};\n\t\tpadding: 42px 42px 42px 42px;\n\t}\n\n<<if-no-sidebar \"\n\n\t.tc-story-river {\n\t\twidth: calc(100% - {{$:/themes/tiddlywiki/vanilla/metrics/storyleft}});\n\t}\n\n\">>\n\n}\n\n@media print {\n\n\tbody.tc-body {\n\t\tbackground-color: transparent;\n\t}\n\n\t.tc-sidebar-header, .tc-topbar {\n\t\tdisplay: none;\n\t}\n\n\t.tc-story-river {\n\t\tmargin: 0;\n\t\tpadding: 0;\n\t}\n\n\t.tc-story-river .tc-tiddler-frame {\n\t\tmargin: 0;\n\t\tborder: none;\n\t\tpadding: 0;\n\t}\n}\n\n/*\n** Tiddler styles\n*/\n\n.tc-tiddler-frame {\n\tposition: relative;\n\tmargin-bottom: 28px;\n\tbackground-color: <<colour tiddler-background>>;\n\tborder: 1px solid <<colour tiddler-border>>;\n}\n\n{{$:/themes/tiddlywiki/vanilla/sticky}}\n\n.tc-tiddler-info {\n\tpadding: 14px 42px 14px 42px;\n\tbackground-color: <<colour tiddler-info-background>>;\n\tborder-top: 1px solid <<colour tiddler-info-border>>;\n\tborder-bottom: 1px solid <<colour tiddler-info-border>>;\n}\n\n.tc-tiddler-info p {\n\tmargin-top: 3px;\n\tmargin-bottom: 3px;\n}\n\n.tc-tiddler-info .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour tiddler-info-tab-background>>;\n\tborder-bottom: 1px solid <<colour tiddler-info-tab-background>>;\n}\n\n.tc-view-field-table {\n\twidth: 100%;\n}\n\n.tc-view-field-name {\n\twidth: 1%; /* Makes this column be as narrow as possible */\n\ttext-align: right;\n\tfont-style: italic;\n\tfont-weight: 200;\n}\n\n.tc-view-field-value {\n}\n\n@media (max-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\t.tc-tiddler-frame {\n\t\tpadding: 14px 14px 14px 14px;\n\t}\n\n\t.tc-tiddler-info {\n\t\tmargin: 0 -14px 0 -14px;\n\t}\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\t.tc-tiddler-frame {\n\t\tpadding: 28px 42px 42px 42px;\n\t\twidth: {{$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth}};\n\t\tborder-radius: 2px;\n\t}\n\n<<if-no-sidebar \"\n\n\t.tc-tiddler-frame {\n\t\twidth: 100%;\n\t}\n\n\">>\n\n\t.tc-tiddler-info {\n\t\tmargin: 0 -42px 0 -42px;\n\t}\n}\n\n.tc-site-title,\n.tc-titlebar {\n\tfont-weight: 300;\n\tfont-size: 2.35em;\n\tline-height: 1.2em;\n\tcolor: <<colour tiddler-title-foreground>>;\n\tmargin: 0;\n}\n\n.tc-site-title {\n\tcolor: <<colour site-title-foreground>>;\n}\n\n.tc-tiddler-title-icon {\n\tvertical-align: middle;\n}\n\n.tc-system-title-prefix {\n\tcolor: <<colour muted-foreground>>;\n}\n\n.tc-titlebar h2 {\n\tfont-size: 1em;\n\tdisplay: inline;\n}\n\n.tc-titlebar img {\n\theight: 1em;\n}\n\n.tc-subtitle {\n\tfont-size: 0.9em;\n\tcolor: <<colour tiddler-subtitle-foreground>>;\n\tfont-weight: 300;\n}\n\n.tc-tiddler-missing .tc-title {\n font-style: italic;\n font-weight: normal;\n}\n\n.tc-tiddler-frame .tc-tiddler-controls {\n\tfloat: right;\n}\n\n.tc-tiddler-controls .tc-drop-down {\n\tfont-size: 0.6em;\n}\n\n.tc-tiddler-controls .tc-drop-down .tc-drop-down {\n\tfont-size: 1em;\n}\n\n.tc-tiddler-controls > span > button {\n\tvertical-align: baseline;\n\tmargin-left:5px;\n}\n\n.tc-tiddler-controls button svg, .tc-tiddler-controls button img,\n.tc-search button svg, .tc-search a svg {\n\theight: 0.75em;\n\tfill: <<colour tiddler-controls-foreground>>;\n}\n\n.tc-tiddler-controls button.tc-selected svg,\n.tc-page-controls button.tc-selected svg {\n\tfill: <<colour tiddler-controls-foreground-selected>>;\n}\n\n.tc-tiddler-controls button.tc-btn-invisible:hover svg,\n.tc-search button:hover svg, .tc-search a:hover svg {\n\tfill: <<colour tiddler-controls-foreground-hover>>;\n}\n\n@media print {\n\t.tc-tiddler-controls {\n\t\tdisplay: none;\n\t}\n}\n\n.tc-tiddler-help { /* Help prompts within tiddler template */\n\tcolor: <<colour muted-foreground>>;\n\tmargin-top: 14px;\n}\n\n.tc-tiddler-help a.tc-tiddlylink {\n\tcolor: <<colour very-muted-foreground>>;\n}\n\n.tc-tiddler-frame .tc-edit-texteditor {\n\twidth: 100%;\n\tmargin: 4px 0 4px 0;\n}\n\n.tc-tiddler-frame input.tc-edit-texteditor,\n.tc-tiddler-frame textarea.tc-edit-texteditor,\n.tc-tiddler-frame iframe.tc-edit-texteditor {\n\tpadding: 3px 3px 3px 3px;\n\tborder: 1px solid <<colour tiddler-editor-border>>;\n\tline-height: 1.3em;\n\t-webkit-appearance: none;\n}\n\n.tc-tiddler-frame .tc-binary-warning {\n\twidth: 100%;\n\theight: 5em;\n\ttext-align: center;\n\tpadding: 3em 3em 6em 3em;\n\tbackground: <<colour alert-background>>;\n\tborder: 1px solid <<colour alert-border>>;\n}\n\n.tc-tiddler-frame input.tc-edit-texteditor {\n\tbackground-color: <<colour tiddler-editor-background>>;\n}\n\ncanvas.tc-edit-bitmapeditor {\n\tborder: 6px solid <<colour tiddler-editor-border-image>>;\n\tcursor: crosshair;\n\t-moz-user-select: none;\n\t-webkit-user-select: none;\n\t-ms-user-select: none;\n\tmargin-top: 6px;\n\tmargin-bottom: 6px;\n}\n\n.tc-edit-bitmapeditor-width {\n\tdisplay: block;\n}\n\n.tc-edit-bitmapeditor-height {\n\tdisplay: block;\n}\n\n.tc-tiddler-body {\n\tclear: both;\n}\n\n.tc-tiddler-frame .tc-tiddler-body {\n\tfont-size: {{$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize}};\n\tline-height: {{$:/themes/tiddlywiki/vanilla/metrics/bodylineheight}};\n}\n\n.tc-titlebar, .tc-tiddler-edit-title {\n\toverflow: hidden; /* https://github.com/Jermolene/TiddlyWiki5/issues/282 */\n}\n\nhtml body.tc-body.tc-single-tiddler-window {\n\tmargin: 1em;\n\tbackground: <<colour tiddler-background>>;\n}\n\n.tc-single-tiddler-window img,\n.tc-single-tiddler-window svg,\n.tc-single-tiddler-window canvas,\n.tc-single-tiddler-window embed,\n.tc-single-tiddler-window iframe {\n\tmax-width: 100%;\n}\n\n/*\n** Editor\n*/\n\n.tc-editor-toolbar {\n\tmargin-top: 8px;\n}\n\n.tc-editor-toolbar button {\n\tvertical-align: middle;\n\tbackground-color: <<colour tiddler-controls-foreground>>;\n\tfill: <<colour tiddler-controls-foreground-selected>>;\n\tborder-radius: 4px;\n\tpadding: 3px;\n\tmargin: 2px 0 2px 4px;\n}\n\n.tc-editor-toolbar button.tc-text-editor-toolbar-item-adjunct {\n\tmargin-left: 1px;\n\twidth: 1em;\n\tborder-radius: 8px;\n}\n\n.tc-editor-toolbar button.tc-text-editor-toolbar-item-start-group {\n\tmargin-left: 11px;\n}\n\n.tc-editor-toolbar button.tc-selected {\n\tbackground-color: <<colour primary>>;\n}\n\n.tc-editor-toolbar button svg {\n\twidth: 1.6em;\n\theight: 1.2em;\n}\n\n.tc-editor-toolbar button:hover {\n\tbackground-color: <<colour tiddler-controls-foreground-selected>>;\n\tfill: <<colour background>>;\n}\n\n.tc-editor-toolbar .tc-text-editor-toolbar-more {\n\twhite-space: normal;\n}\n\n.tc-editor-toolbar .tc-text-editor-toolbar-more button {\n\tdisplay: inline-block;\n\tpadding: 3px;\n\twidth: auto;\n}\n\n.tc-editor-toolbar .tc-search-results {\n\tpadding: 0;\n}\n\n/*\n** Adjustments for fluid-fixed mode\n*/\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n<<if-fluid-fixed text:\"\"\"\n\n\t.tc-story-river {\n\t\tpadding-right: 0;\n\t\tposition: relative;\n\t\twidth: auto;\n\t\tleft: 0;\n\t\tmargin-left: {{$:/themes/tiddlywiki/vanilla/metrics/storyleft}};\n\t\tmargin-right: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth}};\n\t}\n\n\t.tc-tiddler-frame {\n\t\twidth: 100%;\n\t}\n\n\t.tc-sidebar-scrollable {\n\t\tleft: auto;\n\t\tbottom: 0;\n\t\tright: 0;\n\t\twidth: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth}};\n\t}\n\n\tbody.tc-body .tc-storyview-zoomin-tiddler {\n\t\twidth: 100%;\n\t\twidth: calc(100% - 42px);\n\t}\n\n\"\"\" hiddenSidebarText:\"\"\"\n\n\t.tc-story-river {\n\t\tpadding-right: 3em;\n\t\tmargin-right: 0;\n\t}\n\n\tbody.tc-body .tc-storyview-zoomin-tiddler {\n\t\twidth: 100%;\n\t\twidth: calc(100% - 84px);\n\t}\n\n\"\"\">>\n\n}\n\n/*\n** Toolbar buttons\n*/\n\n.tc-page-controls svg.tc-image-new-button {\n fill: <<colour toolbar-new-button>>;\n}\n\n.tc-page-controls svg.tc-image-options-button {\n fill: <<colour toolbar-options-button>>;\n}\n\n.tc-page-controls svg.tc-image-save-button {\n fill: <<colour toolbar-save-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-info-button {\n fill: <<colour toolbar-info-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-edit-button {\n fill: <<colour toolbar-edit-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-close-button {\n fill: <<colour toolbar-close-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-delete-button {\n fill: <<colour toolbar-delete-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-cancel-button {\n fill: <<colour toolbar-cancel-button>>;\n}\n\n.tc-tiddler-controls button svg.tc-image-done-button {\n fill: <<colour toolbar-done-button>>;\n}\n\n/*\n** Tiddler edit mode\n*/\n\n.tc-tiddler-edit-frame em.tc-edit {\n\tcolor: <<colour muted-foreground>>;\n\tfont-style: normal;\n}\n\n.tc-edit-type-dropdown a.tc-tiddlylink-missing {\n\tfont-style: normal;\n}\n\n.tc-edit-tags {\n\tborder: 1px solid <<colour tiddler-editor-border>>;\n\tpadding: 4px 8px 4px 8px;\n}\n\n.tc-edit-add-tag {\n\tdisplay: inline-block;\n}\n\n.tc-edit-add-tag .tc-add-tag-name input {\n\twidth: 50%;\n}\n\n.tc-edit-tags .tc-tag-label {\n\tdisplay: inline-block;\n}\n\n.tc-edit-tags-list {\n\tmargin: 14px 0 14px 0;\n}\n\n.tc-remove-tag-button {\n\tpadding-left: 4px;\n}\n\n.tc-tiddler-preview {\n\toverflow: auto;\n}\n\n.tc-tiddler-preview-preview {\n\tfloat: right;\n\twidth: 49%;\n\tborder: 1px solid <<colour tiddler-editor-border>>;\n\tmargin: 4px 3px 3px 3px;\n\tpadding: 3px 3px 3px 3px;\n}\n\n.tc-tiddler-frame .tc-tiddler-preview .tc-edit-texteditor {\n\twidth: 49%;\n}\n\n.tc-tiddler-frame .tc-tiddler-preview canvas.tc-edit-bitmapeditor {\n\tmax-width: 49%;\n}\n\n.tc-edit-fields {\n\twidth: 100%;\n}\n\n\n.tc-edit-fields table, .tc-edit-fields tr, .tc-edit-fields td {\n\tborder: none;\n\tpadding: 4px;\n}\n\n.tc-edit-fields > tbody > .tc-edit-field:nth-child(odd) {\n\tbackground-color: <<colour tiddler-editor-fields-odd>>;\n}\n\n.tc-edit-fields > tbody > .tc-edit-field:nth-child(even) {\n\tbackground-color: <<colour tiddler-editor-fields-even>>;\n}\n\n.tc-edit-field-name {\n\ttext-align: right;\n}\n\n.tc-edit-field-value input {\n\twidth: 100%;\n}\n\n.tc-edit-field-remove {\n}\n\n.tc-edit-field-remove svg {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour muted-foreground>>;\n\tvertical-align: middle;\n}\n\n.tc-edit-field-add-name {\n\tdisplay: inline-block;\n\twidth: 15%;\n}\n\n.tc-edit-field-add-value {\n\tdisplay: inline-block;\n\twidth: 40%;\n}\n\n.tc-edit-field-add-button {\n\tdisplay: inline-block;\n\twidth: 10%;\n}\n\n/*\n** Storyview Classes\n*/\n\n.tc-storyview-zoomin-tiddler {\n\tposition: absolute;\n\tdisplay: block;\n\twidth: 100%;\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-storyview-zoomin-tiddler {\n\t\twidth: calc(100% - 84px);\n\t}\n\n}\n\n/*\n** Dropdowns\n*/\n\n.tc-btn-dropdown {\n\ttext-align: left;\n}\n\n.tc-btn-dropdown svg, .tc-btn-dropdown img {\n\theight: 1em;\n\twidth: 1em;\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-drop-down-wrapper {\n\tposition: relative;\n}\n\n.tc-drop-down {\n\tmin-width: 380px;\n\tborder: 1px solid <<colour dropdown-border>>;\n\tbackground-color: <<colour dropdown-background>>;\n\tpadding: 7px 0 7px 0;\n\tmargin: 4px 0 0 0;\n\twhite-space: nowrap;\n\ttext-shadow: none;\n\tline-height: 1.4;\n}\n\n.tc-drop-down .tc-drop-down {\n\tmargin-left: 14px;\n}\n\n.tc-drop-down button svg, .tc-drop-down a svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-drop-down button.tc-btn-invisible:hover svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-drop-down p {\n\tpadding: 0 14px 0 14px;\n}\n\n.tc-drop-down svg {\n\twidth: 1em;\n\theight: 1em;\n}\n\n.tc-drop-down img {\n\twidth: 1em;\n}\n\n.tc-drop-down-language-chooser img {\n\twidth: 2em;\n\tvertical-align: baseline;\n}\n\n.tc-drop-down a, .tc-drop-down button {\n\tdisplay: block;\n\tpadding: 0 14px 0 14px;\n\twidth: 100%;\n\ttext-align: left;\n\tcolor: <<colour foreground>>;\n\tline-height: 1.4;\n}\n\n.tc-drop-down .tc-tab-set .tc-tab-buttons button {\n\tdisplay: inline-block;\n width: auto;\n margin-bottom: 0px;\n border-bottom-left-radius: 0;\n border-bottom-right-radius: 0;\n}\n\n.tc-drop-down .tc-prompt {\n\tpadding: 0 14px;\n}\n\n.tc-drop-down .tc-chooser {\n\tborder: none;\n}\n\n.tc-drop-down .tc-chooser .tc-swatches-horiz {\n\tfont-size: 0.4em;\n\tpadding-left: 1.2em;\n}\n\n.tc-drop-down .tc-file-input-wrapper {\n\twidth: 100%;\n}\n\n.tc-drop-down .tc-file-input-wrapper button {\n\tcolor: <<colour foreground>>;\n}\n\n.tc-drop-down a:hover, .tc-drop-down button:hover, .tc-drop-down .tc-file-input-wrapper:hover button {\n\tcolor: <<colour tiddler-link-background>>;\n\tbackground-color: <<colour tiddler-link-foreground>>;\n\ttext-decoration: none;\n}\n\n.tc-drop-down .tc-tab-buttons button {\n\tbackground-color: <<colour dropdown-tab-background>>;\n}\n\n.tc-drop-down .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour dropdown-tab-background-selected>>;\n\tborder-bottom: 1px solid <<colour dropdown-tab-background-selected>>;\n}\n\n.tc-drop-down-bullet {\n\tdisplay: inline-block;\n\twidth: 0.5em;\n}\n\n.tc-drop-down .tc-tab-contents a {\n\tpadding: 0 0.5em 0 0.5em;\n}\n\n.tc-block-dropdown-wrapper {\n\tposition: relative;\n}\n\n.tc-block-dropdown {\n\tposition: absolute;\n\tmin-width: 220px;\n\tborder: 1px solid <<colour dropdown-border>>;\n\tbackground-color: <<colour dropdown-background>>;\n\tpadding: 7px 0;\n\tmargin: 4px 0 0 0;\n\twhite-space: nowrap;\n\tz-index: 1000;\n\ttext-shadow: none;\n}\n\n.tc-block-dropdown.tc-search-drop-down {\n\tmargin-left: -12px;\n}\n\n.tc-block-dropdown a {\n\tdisplay: block;\n\tpadding: 4px 14px 4px 14px;\n}\n\n.tc-block-dropdown.tc-search-drop-down a {\n\tdisplay: block;\n\tpadding: 0px 10px 0px 10px;\n}\n\n.tc-drop-down .tc-dropdown-item-plain,\n.tc-block-dropdown .tc-dropdown-item-plain {\n\tpadding: 4px 14px 4px 7px;\n}\n\n.tc-drop-down .tc-dropdown-item,\n.tc-block-dropdown .tc-dropdown-item {\n\tpadding: 4px 14px 4px 7px;\n\tcolor: <<colour muted-foreground>>;\n}\n\n.tc-block-dropdown a:hover {\n\tcolor: <<colour tiddler-link-background>>;\n\tbackground-color: <<colour tiddler-link-foreground>>;\n\ttext-decoration: none;\n}\n\n.tc-search-results {\n\tpadding: 0 7px 0 7px;\n}\n\n.tc-image-chooser, .tc-colour-chooser {\n\twhite-space: normal;\n}\n\n.tc-image-chooser a,\n.tc-colour-chooser a {\n\tdisplay: inline-block;\n\tvertical-align: top;\n\ttext-align: center;\n\tposition: relative;\n}\n\n.tc-image-chooser a {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tpadding: 2px;\n\tmargin: 2px;\n\twidth: 4em;\n\theight: 4em;\n}\n\n.tc-colour-chooser a {\n\tpadding: 3px;\n\twidth: 2em;\n\theight: 2em;\n\tvertical-align: middle;\n}\n\n.tc-image-chooser a:hover,\n.tc-colour-chooser a:hover {\n\tbackground: <<colour primary>>;\n\tpadding: 0px;\n\tborder: 3px solid <<colour primary>>;\n}\n\n.tc-image-chooser a svg,\n.tc-image-chooser a img {\n\tdisplay: inline-block;\n\twidth: auto;\n\theight: auto;\n\tmax-width: 3.5em;\n\tmax-height: 3.5em;\n\tposition: absolute;\n\ttop: 0;\n\tbottom: 0;\n\tleft: 0;\n\tright: 0;\n\tmargin: auto;\n}\n\n/*\n** Modals\n*/\n\n.tc-modal-wrapper {\n\tposition: fixed;\n\toverflow: auto;\n\toverflow-y: scroll;\n\ttop: 0;\n\tright: 0;\n\tbottom: 0;\n\tleft: 0;\n\tz-index: 900;\n}\n\n.tc-modal-backdrop {\n\tposition: fixed;\n\ttop: 0;\n\tright: 0;\n\tbottom: 0;\n\tleft: 0;\n\tz-index: 1000;\n\tbackground-color: <<colour modal-backdrop>>;\n}\n\n.tc-modal {\n\tz-index: 1100;\n\tbackground-color: <<colour modal-background>>;\n\tborder: 1px solid <<colour modal-border>>;\n}\n\n@media (max-width: 55em) {\n\t.tc-modal {\n\t\tposition: fixed;\n\t\ttop: 1em;\n\t\tleft: 1em;\n\t\tright: 1em;\n\t}\n\n\t.tc-modal-body {\n\t\toverflow-y: auto;\n\t\tmax-height: 400px;\n\t\tmax-height: 60vh;\n\t}\n}\n\n@media (min-width: 55em) {\n\t.tc-modal {\n\t\tposition: fixed;\n\t\ttop: 2em;\n\t\tleft: 25%;\n\t\twidth: 50%;\n\t}\n\n\t.tc-modal-body {\n\t\toverflow-y: auto;\n\t\tmax-height: 400px;\n\t\tmax-height: 60vh;\n\t}\n}\n\n.tc-modal-header {\n\tpadding: 9px 15px;\n\tborder-bottom: 1px solid <<colour modal-header-border>>;\n}\n\n.tc-modal-header h3 {\n\tmargin: 0;\n\tline-height: 30px;\n}\n\n.tc-modal-header img, .tc-modal-header svg {\n\twidth: 1em;\n\theight: 1em;\n}\n\n.tc-modal-body {\n\tpadding: 15px;\n}\n\n.tc-modal-footer {\n\tpadding: 14px 15px 15px;\n\tmargin-bottom: 0;\n\ttext-align: right;\n\tbackground-color: <<colour modal-footer-background>>;\n\tborder-top: 1px solid <<colour modal-footer-border>>;\n}\n\n/*\n** Notifications\n*/\n\n.tc-notification {\n\tposition: fixed;\n\ttop: 14px;\n\tright: 42px;\n\tz-index: 1300;\n\tmax-width: 280px;\n\tpadding: 0 14px 0 14px;\n\tbackground-color: <<colour notification-background>>;\n\tborder: 1px solid <<colour notification-border>>;\n}\n\n/*\n** Tabs\n*/\n\n.tc-tab-set.tc-vertical {\n\tdisplay: -webkit-flex;\n\tdisplay: flex;\n}\n\n.tc-tab-buttons {\n\tfont-size: 0.85em;\n\tpadding-top: 1em;\n\tmargin-bottom: -2px;\n}\n\n.tc-tab-buttons.tc-vertical {\n\tz-index: 100;\n\tdisplay: block;\n\tpadding-top: 14px;\n\tvertical-align: top;\n\ttext-align: right;\n\tmargin-bottom: inherit;\n\tmargin-right: -1px;\n\tmax-width: 33%;\n\t-webkit-flex: 0 0 auto;\n\tflex: 0 0 auto;\n}\n\n.tc-tab-buttons button.tc-tab-selected {\n\tcolor: <<colour tab-foreground-selected>>;\n\tbackground-color: <<colour tab-background-selected>>;\n\tborder-left: 1px solid <<colour tab-border-selected>>;\n\tborder-top: 1px solid <<colour tab-border-selected>>;\n\tborder-right: 1px solid <<colour tab-border-selected>>;\n}\n\n.tc-tab-buttons button {\n\tcolor: <<colour tab-foreground>>;\n\tpadding: 3px 5px 3px 5px;\n\tmargin-right: 0.3em;\n\tfont-weight: 300;\n\tborder: none;\n\tbackground: inherit;\n\tbackground-color: <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-right: 1px solid <<colour tab-border>>;\n\tborder-top-left-radius: 2px;\n\tborder-top-right-radius: 2px;\n}\n\n.tc-tab-buttons.tc-vertical button {\n\tdisplay: block;\n\twidth: 100%;\n\tmargin-top: 3px;\n\tmargin-right: 0;\n\ttext-align: right;\n\tbackground-color: <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n\tborder-right: none;\n\tborder-top-left-radius: 2px;\n\tborder-bottom-left-radius: 2px;\n}\n\n.tc-tab-buttons.tc-vertical button.tc-tab-selected {\n\tbackground-color: <<colour tab-background-selected>>;\n\tborder-right: 1px solid <<colour tab-background-selected>>;\n}\n\n.tc-tab-divider {\n\tborder-top: 1px solid <<colour tab-divider>>;\n}\n\n.tc-tab-divider.tc-vertical {\n\tdisplay: none;\n}\n\n.tc-tab-content {\n\tmargin-top: 14px;\n}\n\n.tc-tab-content.tc-vertical {\n\tdisplay: inline-block;\n\tvertical-align: top;\n\tpadding-top: 0;\n\tpadding-left: 14px;\n\tborder-left: 1px solid <<colour tab-border>>;\n\t-webkit-flex: 1 0 70%;\n\tflex: 1 0 70%;\n}\n\n.tc-sidebar-lists .tc-tab-buttons {\n\tmargin-bottom: -1px;\n}\n\n.tc-sidebar-lists .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour sidebar-tab-background-selected>>;\n\tcolor: <<colour sidebar-tab-foreground-selected>>;\n\tborder-left: 1px solid <<colour sidebar-tab-border-selected>>;\n\tborder-top: 1px solid <<colour sidebar-tab-border-selected>>;\n\tborder-right: 1px solid <<colour sidebar-tab-border-selected>>;\n}\n\n.tc-sidebar-lists .tc-tab-buttons button {\n\tbackground-color: <<colour sidebar-tab-background>>;\n\tcolor: <<colour sidebar-tab-foreground>>;\n\tborder-left: 1px solid <<colour sidebar-tab-border>>;\n\tborder-top: 1px solid <<colour sidebar-tab-border>>;\n\tborder-right: 1px solid <<colour sidebar-tab-border>>;\n}\n\n.tc-sidebar-lists .tc-tab-divider {\n\tborder-top: 1px solid <<colour sidebar-tab-divider>>;\n}\n\n.tc-more-sidebar .tc-tab-buttons button {\n\tdisplay: block;\n\twidth: 100%;\n\tbackground-color: <<colour sidebar-tab-background>>;\n\tborder-top: none;\n\tborder-left: none;\n\tborder-bottom: none;\n\tborder-right: 1px solid #ccc;\n\tmargin-bottom: inherit;\n}\n\n.tc-more-sidebar .tc-tab-buttons button.tc-tab-selected {\n\tbackground-color: <<colour sidebar-tab-background-selected>>;\n\tborder: none;\n}\n\n/*\n** Alerts\n*/\n\n.tc-alerts {\n\tposition: fixed;\n\ttop: 0;\n\tleft: 0;\n\tmax-width: 500px;\n\tz-index: 20000;\n}\n\n.tc-alert {\n\tposition: relative;\n\tmargin: 28px;\n\tpadding: 14px 14px 14px 14px;\n\tborder: 2px solid <<colour alert-border>>;\n\tbackground-color: <<colour alert-background>>;\n}\n\n.tc-alert-toolbar {\n\tposition: absolute;\n\ttop: 14px;\n\tright: 14px;\n}\n\n.tc-alert-toolbar svg {\n\tfill: <<colour alert-muted-foreground>>;\n}\n\n.tc-alert-subtitle {\n\tcolor: <<colour alert-muted-foreground>>;\n\tfont-weight: bold;\n}\n\n.tc-alert-highlight {\n\tcolor: <<colour alert-highlight>>;\n}\n\n@media (min-width: {{$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint}}) {\n\n\t.tc-static-alert {\n\t\tposition: relative;\n\t}\n\n\t.tc-static-alert-inner {\n\t\tposition: absolute;\n\t\tz-index: 100;\n\t}\n\n}\n\n.tc-static-alert-inner {\n\tpadding: 0 2px 2px 42px;\n\tcolor: <<colour static-alert-foreground>>;\n}\n\n/*\n** Control panel\n*/\n\n.tc-control-panel td {\n\tpadding: 4px;\n}\n\n.tc-control-panel table, .tc-control-panel table input, .tc-control-panel table textarea {\n\twidth: 100%;\n}\n\n.tc-plugin-info {\n\tdisplay: block;\n\tborder: 1px solid <<colour muted-foreground>>;\n\tbackground-colour: <<colour background>>;\n\tmargin: 0.5em 0 0.5em 0;\n\tpadding: 4px;\n}\n\n.tc-plugin-info-disabled {\n\tbackground: -webkit-repeating-linear-gradient(45deg, #ff0, #ff0 10px, #eee 10px, #eee 20px);\n\tbackground: repeating-linear-gradient(45deg, #ff0, #ff0 10px, #eee 10px, #eee 20px);\n}\n\n.tc-plugin-info-disabled:hover {\n\tbackground: -webkit-repeating-linear-gradient(45deg, #aa0, #aa0 10px, #888 10px, #888 20px);\n\tbackground: repeating-linear-gradient(45deg, #aa0, #aa0 10px, #888 10px, #888 20px);\n}\n\na.tc-tiddlylink.tc-plugin-info:hover {\n\ttext-decoration: none;\n\tbackground-color: <<colour primary>>;\n\tcolor: <<colour background>>;\n\tfill: <<colour foreground>>;\n}\n\na.tc-tiddlylink.tc-plugin-info:hover .tc-plugin-info > .tc-plugin-info-chunk > svg {\n\tfill: <<colour foreground>>;\n}\n\n.tc-plugin-info-chunk {\n\tdisplay: inline-block;\n\tvertical-align: middle;\n}\n\n.tc-plugin-info-chunk h1 {\n\tfont-size: 1em;\n\tmargin: 2px 0 2px 0;\n}\n\n.tc-plugin-info-chunk h2 {\n\tfont-size: 0.8em;\n\tmargin: 2px 0 2px 0;\n}\n\n.tc-plugin-info-chunk div {\n\tfont-size: 0.7em;\n\tmargin: 2px 0 2px 0;\n}\n\n.tc-plugin-info:hover > .tc-plugin-info-chunk > img, .tc-plugin-info:hover > .tc-plugin-info-chunk > svg {\n\twidth: 2em;\n\theight: 2em;\n\tfill: <<colour foreground>>;\n}\n\n.tc-plugin-info > .tc-plugin-info-chunk > img, .tc-plugin-info > .tc-plugin-info-chunk > svg {\n\twidth: 2em;\n\theight: 2em;\n\tfill: <<colour muted-foreground>>;\n}\n\n.tc-plugin-info.tc-small-icon > .tc-plugin-info-chunk > img, .tc-plugin-info.tc-small-icon > .tc-plugin-info-chunk > svg {\n\twidth: 1em;\n\theight: 1em;\n}\n\n.tc-plugin-info-dropdown {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tmargin-top: -8px;\n}\n\n.tc-plugin-info-dropdown-message {\n\tbackground: <<colour message-background>>;\n\tpadding: 0.5em 1em 0.5em 1em;\n\tfont-weight: bold;\n\tfont-size: 0.8em;\n}\n\n.tc-plugin-info-dropdown-body {\n\tpadding: 1em 1em 1em 1em;\n}\n\n/*\n** Message boxes\n*/\n\n.tc-message-box {\n\tborder: 1px solid <<colour message-border>>;\n\tbackground: <<colour message-background>>;\n\tpadding: 0px 21px 0px 21px;\n\tfont-size: 12px;\n\tline-height: 18px;\n\tcolor: <<colour message-foreground>>;\n}\n\n/*\n** Pictures\n*/\n\n.tc-bordered-image {\n\tborder: 1px solid <<colour muted-foreground>>;\n\tpadding: 5px;\n\tmargin: 5px;\n}\n\n/*\n** Floats\n*/\n\n.tc-float-right {\n\tfloat: right;\n}\n\n/*\n** Chooser\n*/\n\n.tc-chooser {\n\tborder: 1px solid <<colour table-border>>;\n}\n\n.tc-chooser-item {\n\tborder: 8px;\n\tpadding: 2px 4px;\n}\n\n.tc-chooser-item a.tc-tiddlylink {\n\tdisplay: block;\n\ttext-decoration: none;\n\tcolor: <<colour tiddler-link-foreground>>;\n\tbackground-color: <<colour tiddler-link-background>>;\n}\n\n.tc-chooser-item a.tc-tiddlylink:hover {\n\ttext-decoration: none;\n\tcolor: <<colour tiddler-link-background>>;\n\tbackground-color: <<colour tiddler-link-foreground>>;\n}\n\n/*\n** Palette swatches\n*/\n\n.tc-swatches-horiz {\n}\n\n.tc-swatches-horiz .tc-swatch {\n\tdisplay: inline-block;\n}\n\n.tc-swatch {\n\twidth: 2em;\n\theight: 2em;\n\tmargin: 0.4em;\n\tborder: 1px solid #888;\n}\n\n/*\n** Table of contents\n*/\n\n.tc-sidebar-lists .tc-table-of-contents {\n\twhite-space: nowrap;\n}\n\n.tc-table-of-contents button {\n\tcolor: <<colour sidebar-foreground>>;\n}\n\n.tc-table-of-contents svg {\n\twidth: 0.7em;\n\theight: 0.7em;\n\tvertical-align: middle;\n\tfill: <<colour sidebar-foreground>>;\n}\n\n.tc-table-of-contents ol {\n\tlist-style-type: none;\n\tpadding-left: 0;\n}\n\n.tc-table-of-contents ol ol {\n\tpadding-left: 1em;\n}\n\n.tc-table-of-contents li {\n\tfont-size: 1.0em;\n\tfont-weight: bold;\n}\n\n.tc-table-of-contents li a {\n\tfont-weight: bold;\n}\n\n.tc-table-of-contents li li {\n\tfont-size: 0.95em;\n\tfont-weight: normal;\n\tline-height: 1.4;\n}\n\n.tc-table-of-contents li li a {\n\tfont-weight: normal;\n}\n\n.tc-table-of-contents li li li {\n\tfont-size: 0.95em;\n\tfont-weight: 200;\n\tline-height: 1.5;\n}\n\n.tc-table-of-contents li li li a {\n\tfont-weight: bold;\n}\n\n.tc-table-of-contents li li li li {\n\tfont-size: 0.95em;\n\tfont-weight: 200;\n}\n\n.tc-tabbed-table-of-contents {\n\tdisplay: -webkit-flex;\n\tdisplay: flex;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents {\n\tz-index: 100;\n\tdisplay: inline-block;\n\tpadding-left: 1em;\n\tmax-width: 50%;\n\t-webkit-flex: 0 0 auto;\n\tflex: 0 0 auto;\n\tbackground: <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item > a,\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item-selected > a {\n\tdisplay: block;\n\tpadding: 0.12em 1em 0.12em 0.25em;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item > a {\n\tborder-top: 1px solid <<colour tab-background>>;\n\tborder-left: 1px solid <<colour tab-background>>;\n\tborder-bottom: 1px solid <<colour tab-background>>;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item > a:hover {\n\ttext-decoration: none;\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n\tbackground: <<colour tab-border>>;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item-selected > a {\n\tborder-top: 1px solid <<colour tab-border>>;\n\tborder-left: 1px solid <<colour tab-border>>;\n\tborder-bottom: 1px solid <<colour tab-border>>;\n\tbackground: <<colour background>>;\n\tmargin-right: -1px;\n}\n\n.tc-tabbed-table-of-contents .tc-table-of-contents .toc-item-selected > a:hover {\n\ttext-decoration: none;\n}\n\n.tc-tabbed-table-of-contents .tc-tabbed-table-of-contents-content {\n\tdisplay: inline-block;\n\tvertical-align: top;\n\tpadding-left: 1.5em;\n\tpadding-right: 1.5em;\n\tborder: 1px solid <<colour tab-border>>;\n\t-webkit-flex: 1 0 50%;\n\tflex: 1 0 50%;\n}\n\n/*\n** Dirty indicator\n*/\n\nbody.tc-dirty span.tc-dirty-indicator, body.tc-dirty span.tc-dirty-indicator svg {\n\tfill: <<colour dirty-indicator>>;\n\tcolor: <<colour dirty-indicator>>;\n}\n\n/*\n** File inputs\n*/\n\n.tc-file-input-wrapper {\n\tposition: relative;\n\toverflow: hidden;\n\tdisplay: inline-block;\n\tvertical-align: middle;\n}\n\n.tc-file-input-wrapper input[type=file] {\n\tposition: absolute;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbottom: 0;\n\tfont-size: 999px;\n\tmax-width: 100%;\n\tmax-height: 100%;\n\tfilter: alpha(opacity=0);\n\topacity: 0;\n\toutline: none;\n\tbackground: white;\n\tcursor: pointer;\n\tdisplay: inline-block;\n}\n\n/*\n** Thumbnail macros\n*/\n\n.tc-thumbnail-wrapper {\n\tposition: relative;\n\tdisplay: inline-block;\n\tmargin: 6px;\n\tvertical-align: top;\n}\n\n.tc-thumbnail-right-wrapper {\n\tfloat:right;\n\tmargin: 0.5em 0 0.5em 0.5em;\n}\n\n.tc-thumbnail-image {\n\ttext-align: center;\n\toverflow: hidden;\n\tborder-radius: 3px;\n}\n\n.tc-thumbnail-image svg,\n.tc-thumbnail-image img {\n\tfilter: alpha(opacity=1);\n\topacity: 1;\n\tmin-width: 100%;\n\tmin-height: 100%;\n\tmax-width: 100%;\n}\n\n.tc-thumbnail-wrapper:hover .tc-thumbnail-image svg,\n.tc-thumbnail-wrapper:hover .tc-thumbnail-image img {\n\tfilter: alpha(opacity=0.8);\n\topacity: 0.8;\n}\n\n.tc-thumbnail-background {\n\tposition: absolute;\n\tborder-radius: 3px;\n}\n\n.tc-thumbnail-icon svg,\n.tc-thumbnail-icon img {\n\twidth: 3em;\n\theight: 3em;\n\t<<filter \"drop-shadow(2px 2px 4px rgba(0,0,0,0.3))\">>\n}\n\n.tc-thumbnail-wrapper:hover .tc-thumbnail-icon svg,\n.tc-thumbnail-wrapper:hover .tc-thumbnail-icon img {\n\tfill: #fff;\n\t<<filter \"drop-shadow(3px 3px 4px rgba(0,0,0,0.6))\">>\n}\n\n.tc-thumbnail-icon {\n\tposition: absolute;\n\ttop: 0;\n\tleft: 0;\n\tright: 0;\n\tbottom: 0;\n\tdisplay: -webkit-flex;\n\t-webkit-align-items: center;\n\t-webkit-justify-content: center;\n\tdisplay: flex;\n\talign-items: center;\n\tjustify-content: center;\n}\n\n.tc-thumbnail-caption {\n\tposition: absolute;\n\tbackground-color: #777;\n\tcolor: #fff;\n\ttext-align: center;\n\tbottom: 0;\n\twidth: 100%;\n\tfilter: alpha(opacity=0.9);\n\topacity: 0.9;\n\tline-height: 1.4;\n\tborder-bottom-left-radius: 3px;\n\tborder-bottom-right-radius: 3px;\n}\n\n.tc-thumbnail-wrapper:hover .tc-thumbnail-caption {\n\tfilter: alpha(opacity=1);\n\topacity: 1;\n}\n\n/*\n** Errors\n*/\n\n.tc-error {\n\tbackground: #f00;\n\tcolor: #fff;\n}\n"
},
"$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize",
"text": "15px"
},
"$:/themes/tiddlywiki/vanilla/metrics/bodylineheight": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/bodylineheight",
"text": "22px"
},
"$:/themes/tiddlywiki/vanilla/metrics/fontsize": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/fontsize",
"text": "14px"
},
"$:/themes/tiddlywiki/vanilla/metrics/lineheight": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/lineheight",
"text": "20px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storyleft": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storyleft",
"text": "0px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storytop": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storytop",
"text": "0px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storyright": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storyright",
"text": "770px"
},
"$:/themes/tiddlywiki/vanilla/metrics/storywidth": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/storywidth",
"text": "770px"
},
"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth",
"text": "686px"
},
"$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint",
"text": "960px"
},
"$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth": {
"title": "$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth",
"text": "350px"
},
"$:/themes/tiddlywiki/vanilla/options/stickytitles": {
"title": "$:/themes/tiddlywiki/vanilla/options/stickytitles",
"text": "no"
},
"$:/themes/tiddlywiki/vanilla/options/sidebarlayout": {
"title": "$:/themes/tiddlywiki/vanilla/options/sidebarlayout",
"text": "fixed-fluid"
},
"$:/themes/tiddlywiki/vanilla/options/codewrapping": {
"title": "$:/themes/tiddlywiki/vanilla/options/codewrapping",
"text": "pre-wrap"
},
"$:/themes/tiddlywiki/vanilla/reset": {
"title": "$:/themes/tiddlywiki/vanilla/reset",
"type": "text/plain",
"text": "/*! normalize.css v3.0.0 | MIT License | git.io/normalize */\n\n/**\n * 1. Set default font family to sans-serif.\n * 2. Prevent iOS text size adjust after orientation change, without disabling\n * user zoom.\n */\n\nhtml {\n font-family: sans-serif; /* 1 */\n -ms-text-size-adjust: 100%; /* 2 */\n -webkit-text-size-adjust: 100%; /* 2 */\n}\n\n/**\n * Remove default margin.\n */\n\nbody {\n margin: 0;\n}\n\n/* HTML5 display definitions\n ========================================================================== */\n\n/**\n * Correct `block` display not defined in IE 8/9.\n */\n\narticle,\naside,\ndetails,\nfigcaption,\nfigure,\nfooter,\nheader,\nhgroup,\nmain,\nnav,\nsection,\nsummary {\n display: block;\n}\n\n/**\n * 1. Correct `inline-block` display not defined in IE 8/9.\n * 2. Normalize vertical alignment of `progress` in Chrome, Firefox, and Opera.\n */\n\naudio,\ncanvas,\nprogress,\nvideo {\n display: inline-block; /* 1 */\n vertical-align: baseline; /* 2 */\n}\n\n/**\n * Prevent modern browsers from displaying `audio` without controls.\n * Remove excess height in iOS 5 devices.\n */\n\naudio:not([controls]) {\n display: none;\n height: 0;\n}\n\n/**\n * Address `[hidden]` styling not present in IE 8/9.\n * Hide the `template` element in IE, Safari, and Firefox < 22.\n */\n\n[hidden],\ntemplate {\n display: none;\n}\n\n/* Links\n ========================================================================== */\n\n/**\n * Remove the gray background color from active links in IE 10.\n */\n\na {\n background: transparent;\n}\n\n/**\n * Improve readability when focused and also mouse hovered in all browsers.\n */\n\na:active,\na:hover {\n outline: 0;\n}\n\n/* Text-level semantics\n ========================================================================== */\n\n/**\n * Address styling not present in IE 8/9, Safari 5, and Chrome.\n */\n\nabbr[title] {\n border-bottom: 1px dotted;\n}\n\n/**\n * Address style set to `bolder` in Firefox 4+, Safari 5, and Chrome.\n */\n\nb,\nstrong {\n font-weight: bold;\n}\n\n/**\n * Address styling not present in Safari 5 and Chrome.\n */\n\ndfn {\n font-style: italic;\n}\n\n/**\n * Address variable `h1` font-size and margin within `section` and `article`\n * contexts in Firefox 4+, Safari 5, and Chrome.\n */\n\nh1 {\n font-size: 2em;\n margin: 0.67em 0;\n}\n\n/**\n * Address styling not present in IE 8/9.\n */\n\nmark {\n background: #ff0;\n color: #000;\n}\n\n/**\n * Address inconsistent and variable font size in all browsers.\n */\n\nsmall {\n font-size: 80%;\n}\n\n/**\n * Prevent `sub` and `sup` affecting `line-height` in all browsers.\n */\n\nsub,\nsup {\n font-size: 75%;\n line-height: 0;\n position: relative;\n vertical-align: baseline;\n}\n\nsup {\n top: -0.5em;\n}\n\nsub {\n bottom: -0.25em;\n}\n\n/* Embedded content\n ========================================================================== */\n\n/**\n * Remove border when inside `a` element in IE 8/9.\n */\n\nimg {\n border: 0;\n}\n\n/**\n * Correct overflow displayed oddly in IE 9.\n */\n\nsvg:not(:root) {\n overflow: hidden;\n}\n\n/* Grouping content\n ========================================================================== */\n\n/**\n * Address margin not present in IE 8/9 and Safari 5.\n */\n\nfigure {\n margin: 1em 40px;\n}\n\n/**\n * Address differences between Firefox and other browsers.\n */\n\nhr {\n -moz-box-sizing: content-box;\n box-sizing: content-box;\n height: 0;\n}\n\n/**\n * Contain overflow in all browsers.\n */\n\npre {\n overflow: auto;\n}\n\n/**\n * Address odd `em`-unit font size rendering in all browsers.\n */\n\ncode,\nkbd,\npre,\nsamp {\n font-family: monospace, monospace;\n font-size: 1em;\n}\n\n/* Forms\n ========================================================================== */\n\n/**\n * Known limitation: by default, Chrome and Safari on OS X allow very limited\n * styling of `select`, unless a `border` property is set.\n */\n\n/**\n * 1. Correct color not being inherited.\n * Known issue: affects color of disabled elements.\n * 2. Correct font properties not being inherited.\n * 3. Address margins set differently in Firefox 4+, Safari 5, and Chrome.\n */\n\nbutton,\ninput,\noptgroup,\nselect,\ntextarea {\n color: inherit; /* 1 */\n font: inherit; /* 2 */\n margin: 0; /* 3 */\n}\n\n/**\n * Address `overflow` set to `hidden` in IE 8/9/10.\n */\n\nbutton {\n overflow: visible;\n}\n\n/**\n * Address inconsistent `text-transform` inheritance for `button` and `select`.\n * All other form control elements do not inherit `text-transform` values.\n * Correct `button` style inheritance in Firefox, IE 8+, and Opera\n * Correct `select` style inheritance in Firefox.\n */\n\nbutton,\nselect {\n text-transform: none;\n}\n\n/**\n * 1. Avoid the WebKit bug in Android 4.0.* where (2) destroys native `audio`\n * and `video` controls.\n * 2. Correct inability to style clickable `input` types in iOS.\n * 3. Improve usability and consistency of cursor style between image-type\n * `input` and others.\n */\n\nbutton,\nhtml input[type=\"button\"], /* 1 */\ninput[type=\"reset\"],\ninput[type=\"submit\"] {\n -webkit-appearance: button; /* 2 */\n cursor: pointer; /* 3 */\n}\n\n/**\n * Re-set default cursor for disabled elements.\n */\n\nbutton[disabled],\nhtml input[disabled] {\n cursor: default;\n}\n\n/**\n * Remove inner padding and border in Firefox 4+.\n */\n\nbutton::-moz-focus-inner,\ninput::-moz-focus-inner {\n border: 0;\n padding: 0;\n}\n\n/**\n * Address Firefox 4+ setting `line-height` on `input` using `!important` in\n * the UA stylesheet.\n */\n\ninput {\n line-height: normal;\n}\n\n/**\n * It's recommended that you don't attempt to style these elements.\n * Firefox's implementation doesn't respect box-sizing, padding, or width.\n *\n * 1. Address box sizing set to `content-box` in IE 8/9/10.\n * 2. Remove excess padding in IE 8/9/10.\n */\n\ninput[type=\"checkbox\"],\ninput[type=\"radio\"] {\n box-sizing: border-box; /* 1 */\n padding: 0; /* 2 */\n}\n\n/**\n * Fix the cursor style for Chrome's increment/decrement buttons. For certain\n * `font-size` values of the `input`, it causes the cursor style of the\n * decrement button to change from `default` to `text`.\n */\n\ninput[type=\"number\"]::-webkit-inner-spin-button,\ninput[type=\"number\"]::-webkit-outer-spin-button {\n height: auto;\n}\n\n/**\n * 1. Address `appearance` set to `searchfield` in Safari 5 and Chrome.\n * 2. Address `box-sizing` set to `border-box` in Safari 5 and Chrome\n * (include `-moz` to future-proof).\n */\n\ninput[type=\"search\"] {\n -webkit-appearance: textfield; /* 1 */\n -moz-box-sizing: content-box;\n -webkit-box-sizing: content-box; /* 2 */\n box-sizing: content-box;\n}\n\n/**\n * Remove inner padding and search cancel button in Safari and Chrome on OS X.\n * Safari (but not Chrome) clips the cancel button when the search input has\n * padding (and `textfield` appearance).\n */\n\ninput[type=\"search\"]::-webkit-search-cancel-button,\ninput[type=\"search\"]::-webkit-search-decoration {\n -webkit-appearance: none;\n}\n\n/**\n * Define consistent border, margin, and padding.\n */\n\nfieldset {\n border: 1px solid #c0c0c0;\n margin: 0 2px;\n padding: 0.35em 0.625em 0.75em;\n}\n\n/**\n * 1. Correct `color` not being inherited in IE 8/9.\n * 2. Remove padding so people aren't caught out if they zero out fieldsets.\n */\n\nlegend {\n border: 0; /* 1 */\n padding: 0; /* 2 */\n}\n\n/**\n * Remove default vertical scrollbar in IE 8/9.\n */\n\ntextarea {\n overflow: auto;\n}\n\n/**\n * Don't inherit the `font-weight` (applied by a rule above).\n * NOTE: the default cannot safely be changed in Chrome and Safari on OS X.\n */\n\noptgroup {\n font-weight: bold;\n}\n\n/* Tables\n ========================================================================== */\n\n/**\n * Remove most spacing between table cells.\n */\n\ntable {\n border-collapse: collapse;\n border-spacing: 0;\n}\n\ntd,\nth {\n padding: 0;\n}\n"
},
"$:/themes/tiddlywiki/vanilla/settings/fontfamily": {
"title": "$:/themes/tiddlywiki/vanilla/settings/fontfamily",
"text": "\"Helvetica Neue\", Helvetica, Arial, \"Lucida Grande\", \"DejaVu Sans\", sans-serif"
},
"$:/themes/tiddlywiki/vanilla/settings/codefontfamily": {
"title": "$:/themes/tiddlywiki/vanilla/settings/codefontfamily",
"text": "Monaco, Consolas, \"Lucida Console\", \"DejaVu Sans Mono\", monospace"
},
"$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment": {
"title": "$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment",
"text": "fixed"
},
"$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize": {
"title": "$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize",
"text": "auto"
},
"$:/themes/tiddlywiki/vanilla/sticky": {
"title": "$:/themes/tiddlywiki/vanilla/sticky",
"text": "<$reveal state=\"$:/themes/tiddlywiki/vanilla/options/stickytitles\" type=\"match\" text=\"yes\">\n``\n.tc-tiddler-title {\n\tposition: -webkit-sticky;\n\tposition: -moz-sticky;\n\tposition: -o-sticky;\n\tposition: -ms-sticky;\n\tposition: sticky;\n\ttop: 0px;\n\tbackground: ``<<colour tiddler-background>>``;\n\tz-index: 500;\n}\n``\n</$reveal>\n"
},
"$:/themes/tiddlywiki/vanilla/themetweaks": {
"title": "$:/themes/tiddlywiki/vanilla/themetweaks",
"tags": "$:/tags/ControlPanel/Appearance",
"caption": "{{$:/language/ThemeTweaks/ThemeTweaks}}",
"text": "\\define lingo-base() $:/language/ThemeTweaks/\n\n\\define replacement-text()\n[img[$(imageTitle)$]]\n\\end\n\n\\define backgroundimage-dropdown()\n<div class=\"tc-drop-down-wrapper\">\n<$button popup=<<qualify \"$:/state/popup/themetweaks/backgroundimage\">> class=\"tc-btn-invisible tc-btn-dropdown\">{{$:/core/images/down-arrow}}</$button>\n<$reveal state=<<qualify \"$:/state/popup/themetweaks/backgroundimage\">> type=\"popup\" position=\"belowleft\" text=\"\" default=\"\">\n<div class=\"tc-drop-down\">\n<$macrocall $name=\"image-picker\" actions=\"\"\"\n\n<$action-setfield\n\t$tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimage\"\n\t$value=<<imageTitle>>\n/>\n\n\"\"\"/>\n</div>\n</$reveal>\n</div>\n\\end\n\n\\define backgroundimageattachment-dropdown()\n<$select tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment\" default=\"scroll\">\n<option value=\"scroll\"><<lingo Settings/BackgroundImageAttachment/Scroll>></option>\n<option value=\"fixed\"><<lingo Settings/BackgroundImageAttachment/Fixed>></option>\n</$select>\n\\end\n\n\\define backgroundimagesize-dropdown()\n<$select tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize\" default=\"scroll\">\n<option value=\"auto\"><<lingo Settings/BackgroundImageSize/Auto>></option>\n<option value=\"cover\"><<lingo Settings/BackgroundImageSize/Cover>></option>\n<option value=\"contain\"><<lingo Settings/BackgroundImageSize/Contain>></option>\n</$select>\n\\end\n\n<<lingo ThemeTweaks/Hint>>\n\n! <<lingo Options>>\n\n|<$link to=\"$:/themes/tiddlywiki/vanilla/options/sidebarlayout\"><<lingo Options/SidebarLayout>></$link> |<$select tiddler=\"$:/themes/tiddlywiki/vanilla/options/sidebarlayout\"><option value=\"fixed-fluid\"><<lingo Options/SidebarLayout/Fixed-Fluid>></option><option value=\"fluid-fixed\"><<lingo Options/SidebarLayout/Fluid-Fixed>></option></$select> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/options/stickytitles\"><<lingo Options/StickyTitles>></$link><br>//<<lingo Options/StickyTitles/Hint>>// |<$select tiddler=\"$:/themes/tiddlywiki/vanilla/options/stickytitles\"><option value=\"no\">{{$:/language/No}}</option><option value=\"yes\">{{$:/language/Yes}}</option></$select> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/options/codewrapping\"><<lingo Options/CodeWrapping>></$link> |<$select tiddler=\"$:/themes/tiddlywiki/vanilla/options/codewrapping\"><option value=\"pre\">{{$:/language/No}}</option><option value=\"pre-wrap\">{{$:/language/Yes}}</option></$select> |\n\n! <<lingo Settings>>\n\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/fontfamily\"><<lingo Settings/FontFamily>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/settings/fontfamily\" default=\"\" tag=\"input\"/> | |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/codefontfamily\"><<lingo Settings/CodeFontFamily>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/settings/codefontfamily\" default=\"\" tag=\"input\"/> | |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimage\"><<lingo Settings/BackgroundImage>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimage\" default=\"\" tag=\"input\"/> |<<backgroundimage-dropdown>> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimageattachment\"><<lingo Settings/BackgroundImageAttachment>></$link> |<<backgroundimageattachment-dropdown>> | |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/settings/backgroundimagesize\"><<lingo Settings/BackgroundImageSize>></$link> |<<backgroundimagesize-dropdown>> | |\n\n! <<lingo Metrics>>\n\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/fontsize\"><<lingo Metrics/FontSize>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/fontsize\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/lineheight\"><<lingo Metrics/LineHeight>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/lineheight\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize\"><<lingo Metrics/BodyFontSize>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/bodyfontsize\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/bodylineheight\"><<lingo Metrics/BodyLineHeight>></$link> |<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/bodylineheight\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storyleft\"><<lingo Metrics/StoryLeft>></$link><br>//<<lingo Metrics/StoryLeft/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storyleft\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storytop\"><<lingo Metrics/StoryTop>></$link><br>//<<lingo Metrics/StoryTop/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storytop\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storyright\"><<lingo Metrics/StoryRight>></$link><br>//<<lingo Metrics/StoryRight/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storyright\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/storywidth\"><<lingo Metrics/StoryWidth>></$link><br>//<<lingo Metrics/StoryWidth/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/storywidth\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth\"><<lingo Metrics/TiddlerWidth>></$link><br>//<<lingo Metrics/TiddlerWidth/Hint>>//<br> |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/tiddlerwidth\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint\"><<lingo Metrics/SidebarBreakpoint>></$link><br>//<<lingo Metrics/SidebarBreakpoint/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarbreakpoint\" default=\"\" tag=\"input\"/> |\n|<$link to=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth\"><<lingo Metrics/SidebarWidth>></$link><br>//<<lingo Metrics/SidebarWidth/Hint>>// |^<$edit-text tiddler=\"$:/themes/tiddlywiki/vanilla/metrics/sidebarwidth\" default=\"\" tag=\"input\"/> |\n"
}
}
}
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-5|PAPER Wed-E-O-3-5 — Audio Retrieval with Natural Language Queries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Retrieval with Natural Language Queries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211767.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-12|PAPER Wed-A-V-2-12 — A Discriminative Entity-Aware Language Model for Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Discriminative Entity-Aware Language Model for Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212217.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-10|PAPER Thu-A-V-1-10 — Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-2|PAPER Tue-M-V-3-2 — Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-3|PAPER Tue-A-V-5-3 — Disfluency Detection with Unlabeled Data and Small BERT Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disfluency Detection with Unlabeled Data and Small BERT Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-4|PAPER Thu-M-O-3-4 — Modeling Dialectal Variation for Swiss German Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dialectal Variation for Swiss German Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-3|PAPER Wed-E-V-1-3 — Unsupervised Cross-Lingual Representation Learning for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Cross-Lingual Representation Learning for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-3|PAPER Thu-A-SS-2-3 — WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-7|PAPER Tue-E-V-3-7 — Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211974.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-3|PAPER Tue-E-V-5-3 — Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211955.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-1|PAPER Thu-A-SS-1-1 — An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-1|PAPER Tue-E-V-3-1 — Estimating Articulatory Movements in Speech Production with Transformer Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimating Articulatory Movements in Speech Production with Transformer Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-8|PAPER Fri-A-S&T-1-8 — Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-13|PAPER Fri-M-V-6-13 — Teaching Keyword Spotters to Spot New Keywords with Limited Examples]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Teaching Keyword Spotters to Spot New Keywords with Limited Examples</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-7|PAPER Wed-E-V-6-7 — Perception of Social Speaker Characteristics in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Social Speaker Characteristics in Synthetic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-9|PAPER Wed-E-V-1-9 — SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-1|PAPER Wed-A-V-1-1 — Cross-Modal Learning for Audio-Visual Video Parsing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Learning for Audio-Visual Video Parsing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-5|PAPER Tue-E-V-5-5 — Speaker Normalization Using Joint Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Normalization Using Joint Variational Autoencoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-9|PAPER Wed-E-V-1-9 — SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210294.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-3|PAPER Wed-E-V-4-3 — An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-8|PAPER Tue-A-V-2-8 — Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-10|PAPER Wed-M-V-4-10 — Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-6|PAPER Thu-M-V-1-6 — Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-8|PAPER Thu-M-V-6-8 — Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-2|PAPER Tue-E-O-2-2 — Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-2|PAPER Tue-A-V-3-2 — Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-4|PAPER Tue-E-O-1-4 — Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-3|PAPER Tue-A-V-2-3 — A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-9|PAPER Tue-A-SS-2-9 — Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-9|PAPER Fri-M-V-2-9 — Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210294.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-3|PAPER Wed-E-V-4-3 — An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-4|PAPER Fri-M-V-5-4 — Fake Audio Detection in Resource-Constrained Settings Using Microfeatures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fake Audio Detection in Resource-Constrained Settings Using Microfeatures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-11|PAPER Wed-E-V-1-11 — Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-11|PAPER Wed-E-V-1-11 — Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-11|PAPER Thu-A-V-2-11 — Arabic Code-Switching Speech Recognition Using Monolingual Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Arabic Code-Switching Speech Recognition Using Monolingual Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-2|PAPER Fri-M-V-5-2 — A Multi-Branch Deep Learning Network for Automated Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Multi-Branch Deep Learning Network for Automated Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-10|PAPER Wed-E-V-6-10 — KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-11|PAPER Fri-A-V-4-11 — Masked Proxy Loss for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Masked Proxy Loss for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-4|PAPER Tue-A-V-2-4 — Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-6|PAPER Tue-M-V-6-6 — Enrollment-Less Training for Personalized Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enrollment-Less Training for Personalized Voice Activity Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-5|PAPER Tue-A-V-5-5 — Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211981.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-11|PAPER Fri-A-V-1-11 — End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-5|PAPER Tue-M-V-4-5 — Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211132.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-14|PAPER Fri-A-V-1-14 — Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-7|PAPER Tue-M-V-6-7 — Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-4|PAPER Wed-E-V-1-4 — Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-8|PAPER Tue-E-V-6-8 — StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-1|PAPER Thu-M-V-6-1 — A Simplified Model for the Vocal Tract of [s] with Inclined Incisors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simplified Model for the Vocal Tract of [s] with Inclined Incisors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-4|PAPER Thu-M-V-6-4 — Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-3|PAPER Wed-M-O-1-3 — Self-Supervised End-to-End ASR for Low Resource L2 Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised End-to-End ASR for Low Resource L2 Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-8|PAPER Fri-A-V-4-8 — Presentation Matters: Evaluating Speaker Identification Tasks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Presentation Matters: Evaluating Speaker Identification Tasks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-9|PAPER Thu-A-V-4-9 — Speaker Diarization Using Two-Pass Leave-One-Out Gaussian PLDA Clustering of DNN Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Diarization Using Two-Pass Leave-One-Out Gaussian PLDA Clustering of DNN Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-8|PAPER Tue-A-V-5-8 — Multimodal Speech Summarization Through Semantic Concept Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Speech Summarization Through Semantic Concept Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-10|PAPER Wed-E-V-1-10 — Hierarchical Phone Recognition with Compositional Phonetics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Phone Recognition with Compositional Phonetics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-5|PAPER Fri-M-O-2-5 — Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-5|PAPER Tue-E-V-1-5 — Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-3|PAPER Wed-A-V-3-3 — Librispeech Transducer Model with Internal Language Model Prior Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Librispeech Transducer Model with Internal Language Model Prior Correction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211255.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-5|PAPER Thu-M-O-1-5 — Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-3|PAPER Thu-M-O-3-3 — Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-2|PAPER Tue-M-SS-1-2 — Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-9|PAPER Wed-E-V-2-9 — Visual Speech for Obstructive Sleep Apnea Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Speech for Obstructive Sleep Apnea Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210645.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-2|PAPER Tue-M-O-3-2 — Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-1|PAPER Tue-A-V-3-1 — LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210228.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-2|PAPER Tue-A-O-2-2 — Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Fri-Survey|PAPER Fri-Survey — Child Language Acquisition Studied with Wearables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Child Language Acquisition Studied with Wearables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211255.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-5|PAPER Thu-M-O-1-5 — Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-3|PAPER Wed-A-SS-1-3 — SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-3|PAPER Wed-M-O-2-3 — An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-2|PAPER Wed-A-S&T-1-2 — Live TV Subtitling Through Respeaking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live TV Subtitling Through Respeaking</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211704.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-4|PAPER Fri-A-O-2-4 — Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-6|PAPER Wed-E-V-2-6 — Emotion Carrier Recognition from Personal Narratives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Carrier Recognition from Personal Narratives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-4|PAPER Wed-E-V-2-4 — Stacked Recurrent Neural Networks for Speech-Based Inference of Attachment Condition in School Age Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stacked Recurrent Neural Networks for Speech-Based Inference of Attachment Condition in School Age Children</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-5|PAPER Wed-E-V-2-5 — Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-4|PAPER Fri-M-O-1-4 — Learning to Rank Microphones for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning to Rank Microphones for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-3|PAPER Thu-M-S&T-1-3 — The INGENIOUS Multilingual Operations App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INGENIOUS Multilingual Operations App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211114.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-1|PAPER Wed-M-V-6-1 — Super-Human Performance in Online Low-Latency Recognition of Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Super-Human Performance in Online Low-Latency Recognition of Conversational Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-2|PAPER Wed-E-V-1-2 — Efficient Weight Factorization for Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Weight Factorization for Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-1|PAPER Wed-A-SS-1-1 — The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210462.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-1|PAPER Wed-M-O-1-1 — Golos: Russian Dataset for Speech Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Golos: Russian Dataset for Speech Research</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-4|PAPER Wed-A-O-1-4 — On Sampling-Based Training Criteria for Neural Language Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Sampling-Based Training Criteria for Neural Language Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-4|PAPER Thu-A-SS-1-4 — Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-4|PAPER Thu-A-O-1-4 — Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210141.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-1|PAPER Fri-A-V-3-1 — Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-8|PAPER Wed-M-V-2-8 — Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-1|PAPER Thu-A-V-1-1 — Temporal Context in Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Context in Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-11|PAPER Fri-A-V-5-11 — Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-2|PAPER Thu-M-S&T-1-2 — Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-6|PAPER Wed-A-O-3-6 — Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-4|PAPER Thu-M-O-3-4 — Modeling Dialectal Variation for Swiss German Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dialectal Variation for Swiss German Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-3|PAPER Thu-M-O-2-3 — M³: MultiModal Masking Applied to Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">M³: MultiModal Masking Applied to Sentiment Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-5|PAPER Tue-A-SS-1-5 — Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211912.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-3|PAPER Wed-A-V-6-3 — Large-Scale Self- and Semi-Supervised Learning for Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Self- and Semi-Supervised Learning for Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-3|PAPER Wed-E-V-1-3 — Unsupervised Cross-Lingual Representation Learning for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Cross-Lingual Representation Learning for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-9|PAPER Thu-A-V-6-9 — Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211912.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-3|PAPER Wed-A-V-6-3 — Large-Scale Self- and Semi-Supervised Learning for Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Self- and Semi-Supervised Learning for Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-3|PAPER Wed-E-V-1-3 — Unsupervised Cross-Lingual Representation Learning for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Cross-Lingual Representation Learning for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-3|PAPER Wed-E-O-1-3 — Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-1|PAPER Fri-A-O-1-1 — Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-3|PAPER Tue-E-V-3-3 — Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210319.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-6|PAPER Tue-E-V-6-6 — StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211399.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-2|PAPER Fri-A-O-2-2 — End-to-End Open Vocabulary Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Open Vocabulary Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210294.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-3|PAPER Wed-E-V-4-3 — An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-3|PAPER Wed-A-SS-1-3 — SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-7|PAPER Thu-A-V-3-7 — AvaTr: One-Shot Speaker Extraction with Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AvaTr: One-Shot Speaker Extraction with Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-5|PAPER Wed-A-V-1-5 — Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-10|PAPER Wed-E-V-6-10 — KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-11|PAPER Wed-E-V-4-11 — Parsing Speech for Grouping and Prominence, and the Typology of Rhythm]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parsing Speech for Grouping and Prominence, and the Typology of Rhythm</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-11|PAPER Fri-M-V-3-11 — Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-7|PAPER Tue-M-V-7-7 — A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-8|PAPER Wed-M-V-5-8 — Human Spoofing Detection Performance on Degraded Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Spoofing Detection Performance on Degraded Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-8|PAPER Fri-M-V-2-8 — Quantifying Vocal Tract Shape Variation and its Acoustic Impact: A Geometric Morphometric Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Quantifying Vocal Tract Shape Variation and its Acoustic Impact: A Geometric Morphometric Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-9|PAPER Wed-M-V-2-9 — Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-10|PAPER Tue-M-V-3-10 — A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-6|PAPER Wed-A-O-3-6 — Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-11|PAPER Wed-E-V-1-11 — Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-11|PAPER Thu-A-V-2-11 — Arabic Code-Switching Speech Recognition Using Monolingual Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Arabic Code-Switching Speech Recognition Using Monolingual Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-6|PAPER Fri-A-SS-1-6 — Nonlinear Acoustic Echo Cancellation with Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nonlinear Acoustic Echo Cancellation with Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-2|PAPER Wed-M-V-6-2 — Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-9|PAPER Thu-M-V-4-9 — A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210083.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-1|PAPER Fri-M-V-5-1 — An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-8|PAPER Tue-M-SS-1-8 — Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-6|PAPER Tue-E-O-2-6 — Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-8|PAPER Thu-A-V-4-8 — Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210153.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-4|PAPER Tue-A-SS-1-4 — Communication-Efficient Agnostic Federated Averaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Communication-Efficient Agnostic Federated Averaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-12|PAPER Wed-A-V-6-12 — Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211569.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-3|PAPER Tue-E-V-4-3 — Sequential End-to-End Intent and Slot Label Classification and Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequential End-to-End Intent and Slot Label Classification and Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-6|PAPER Tue-M-V-3-6 — Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-2|PAPER Wed-E-SS-1-2 — A Simultaneous Denoising and Dereverberation Framework with Target Decoupling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simultaneous Denoising and Dereverberation Framework with Target Decoupling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-3|PAPER Wed-A-V-3-3 — Librispeech Transducer Model with Internal Language Model Prior Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Librispeech Transducer Model with Internal Language Model Prior Correction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-3|PAPER Thu-M-O-3-3 — Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-14|PAPER Wed-M-V-4-14 — Protecting Gender and Identity with Disentangled Speech Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Protecting Gender and Identity with Disentangled Speech Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210886.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-8|PAPER Wed-A-V-4-8 — CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-1|PAPER Wed-E-O-3-1 — Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-1|PAPER Tue-A-V-3-1 — LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-13|PAPER Tue-E-V-1-13 — Speaker Anonymisation Using the McAdams Coefficient]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Anonymisation Using the McAdams Coefficient</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-6|PAPER Wed-A-O-2-6 — Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210645.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-2|PAPER Tue-M-O-3-2 — Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-7|PAPER Thu-A-V-3-7 — AvaTr: One-Shot Speaker Extraction with Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AvaTr: One-Shot Speaker Extraction with Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-8|PAPER Wed-M-V-4-8 — Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210542.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-7|PAPER Wed-M-V-6-7 — Improving RNN-T ASR Accuracy Using Context Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T ASR Accuracy Using Context Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-5|PAPER Tue-M-SS-1-5 — Visual Transformers for Primates Classification and Covid Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Transformers for Primates Classification and Covid Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211890.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-12|PAPER Wed-A-V-4-12 — On the Design of Deep Priors for Unsupervised Audio Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Design of Deep Priors for Unsupervised Audio Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211909.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-1|PAPER Thu-M-V-4-1 — End-to-End Neural Diarization: From Transformer to Conformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Neural Diarization: From Transformer to Conformer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211209.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-1|PAPER Fri-A-V-4-1 — Graph-Based Label Propagation for Semi-Supervised Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-Based Label Propagation for Semi-Supervised Speaker Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-2|PAPER Fri-A-V-4-2 — Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-5|PAPER Wed-E-O-3-5 — Audio Retrieval with Natural Language Queries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Retrieval with Natural Language Queries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-1|PAPER Thu-A-V-6-1 — Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211973.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-2|PAPER Wed-E-V-5-2 — Speech Denoising with Auditory Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising with Auditory Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-1|PAPER Fri-M-O-1-1 — Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-1|PAPER Thu-A-V-6-1 — Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-1|PAPER Fri-M-V-4-1 — Unsupervised Learning of Disentangled Speech Content and Style Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Learning of Disentangled Speech Content and Style Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210711.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-1|PAPER Tue-E-SS-1-1 — Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-2|PAPER Tue-E-SS-1-2 — A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-4|PAPER Tue-E-V-5-4 — Robust Continuous On-Device Personalization for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Continuous On-Device Personalization for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-1|PAPER Tue-M-V-4-1 — User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-6|PAPER Wed-E-V-2-6 — Emotion Carrier Recognition from Personal Narratives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Carrier Recognition from Personal Narratives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-7|PAPER Wed-E-V-2-7 — Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210711.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-1|PAPER Tue-E-SS-1-1 — Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-2|PAPER Tue-E-SS-1-2 — A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-7|PAPER Tue-E-V-6-7 — Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-2|PAPER Wed-E-V-4-2 — On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-5|PAPER Wed-E-V-2-5 — Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-10|PAPER Fri-M-V-3-10 — Insights on Neural Representations for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Insights on Neural Representations for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-3|PAPER Tue-E-SS-1-3 — Investigating Voice Function Characteristics of Greek Speakers with Hearing Loss Using Automatic Glottal Source Feature Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Voice Function Characteristics of Greek Speakers with Hearing Loss Using Automatic Glottal Source Feature Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-3|PAPER Tue-M-V-7-3 — Voicing Assimilations by French Speakers of German in Stop-Fricative Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voicing Assimilations by French Speakers of German in Stop-Fricative Sequences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-4|PAPER Wed-A-S&T-1-4 — Expressive Robot Performance Based on Facial Motion Capture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Robot Performance Based on Facial Motion Capture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211912.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-3|PAPER Wed-A-V-6-3 — Large-Scale Self- and Semi-Supervised Learning for Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Self- and Semi-Supervised Learning for Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-4|PAPER Wed-A-V-6-4 — CoVoST 2 and Massively Multilingual Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoVoST 2 and Massively Multilingual Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-10|PAPER Thu-M-V-2-10 — End-to-End Audio-Visual Speech Recognition for Overlapping Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Audio-Visual Speech Recognition for Overlapping Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210322.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-4|PAPER Thu-A-V-2-4 — Reducing Streaming ASR Model Delay with Self Alignment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Streaming ASR Model Delay with Self Alignment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210560.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-7|PAPER Thu-M-V-4-7 — End-To-End Speaker Segmentation for Overlap-Aware Resegmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-To-End Speaker Segmentation for Overlap-Aware Resegmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-1|PAPER Wed-A-SS-1-1 — The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-5|PAPER Tue-M-V-5-5 — IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-3|PAPER Wed-E-O-1-3 — Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-1|PAPER Fri-A-O-1-1 — Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-9|PAPER Tue-A-V-2-9 — RaSSpeR: Radar-Based Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RaSSpeR: Radar-Based Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-4|PAPER Thu-M-S&T-1-4 — Digital Einstein Experience: Fast Text-to-Speech for Conversational AI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Digital Einstein Experience: Fast Text-to-Speech for Conversational AI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-2|PAPER Tue-M-V-5-2 — Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-5|PAPER Wed-A-O-1-5 — Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-1|PAPER Wed-M-V-2-1 — Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210644.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-5|PAPER Thu-A-V-2-5 — Reduce and Reconstruct: ASR for Low-Resource Phonetic Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reduce and Reconstruct: ASR for Low-Resource Phonetic Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-9|PAPER Wed-E-V-5-9 — Speech Denoising Without Clean Training Data: A Noise2Noise Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising Without Clean Training Data: A Noise2Noise Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-3|PAPER Wed-M-V-5-3 — Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210347.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-13|PAPER Tue-E-V-3-13 — Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-6|PAPER Wed-M-SS-1-6 — SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-1|PAPER Tue-E-V-3-1 — Estimating Articulatory Movements in Speech Production with Transformer Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimating Articulatory Movements in Speech Production with Transformer Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-8|PAPER Fri-A-S&T-1-8 — Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-5|PAPER Thu-A-SS-2-5 — Comparing Acoustic-Based Approaches for Alzheimer’s Disease Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Acoustic-Based Approaches for Alzheimer’s Disease Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-6|PAPER Thu-M-O-1-6 — Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-1|PAPER Tue-E-V-3-1 — Estimating Articulatory Movements in Speech Production with Transformer Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimating Articulatory Movements in Speech Production with Transformer Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-8|PAPER Fri-A-S&T-1-8 — Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-4|PAPER Tue-A-S&T-1-4 — ROXANNE Research Platform: Automate Criminal Investigations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ROXANNE Research Platform: Automate Criminal Investigations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-2|PAPER Tue-E-V-4-2 — FANS: Fusing ASR and NLU for On-Device SLU]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FANS: Fusing ASR and NLU for On-Device SLU</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-2|PAPER Wed-E-V-3-2 — Learning a Neural Diff for Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning a Neural Diff for Speech Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-7|PAPER Fri-A-V-3-7 — Amortized Neural Networks for Low-Latency Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Amortized Neural Networks for Low-Latency Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210333.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-1|PAPER Thu-M-SS-2-1 — Towards an Accent-Robust Approach for ATC Communications Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Accent-Robust Approach for ATC Communications Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-5|PAPER Tue-E-V-2-5 — Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-1|PAPER Wed-M-V-2-1 — Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211849.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-8|PAPER Thu-M-V-7-8 — Adapting Long Context NLM for ASR Rescoring in Conversational Agents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Long Context NLM for ASR Rescoring in Conversational Agents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-2|PAPER Thu-M-V-3-2 — Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-6|PAPER Thu-M-S&T-1-6 — Expressive Latvian Speech Synthesis for Dialog Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Latvian Speech Synthesis for Dialog Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-1|PAPER Wed-M-V-5-1 — Perception of Standard Arabic Synthetic Speech Rate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Standard Arabic Synthetic Speech Rate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-2|PAPER Fri-M-V-5-2 — A Multi-Branch Deep Learning Network for Automated Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Multi-Branch Deep Learning Network for Automated Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210299.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-4|PAPER Wed-A-V-4-4 — NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211868.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-1|PAPER Wed-E-V-5-1 — Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-7|PAPER Tue-M-V-5-7 — Multi-Channel Transformer Transducer for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Transformer Transducer for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-8|PAPER Tue-A-SS-1-8 — Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-2|PAPER Tue-E-V-4-2 — FANS: Fusing ASR and NLU for On-Device SLU]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FANS: Fusing ASR and NLU for On-Device SLU</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211787.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-4|PAPER Wed-A-V-2-4 — Phonetically Induced Subwords for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetically Induced Subwords for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-4|PAPER Fri-A-V-3-4 — CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-9|PAPER Fri-A-V-6-9 — End-to-End Spoken Language Understanding for Generalized Voice Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spoken Language Understanding for Generalized Voice Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-4|PAPER Thu-M-V-5-4 — Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210175.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-2|PAPER Thu-M-O-1-2 — Knowledge Distillation for Streaming Transformer–Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation for Streaming Transformer–Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-1|PAPER Fri-M-SS-1-1 — OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-1|PAPER Tue-M-V-7-1 — “See what I mean, huh?” Evaluating Visual Inspection of F₀ Tracking in Nasal Grunts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“See what I mean, huh?” Evaluating Visual Inspection of F₀ Tracking in Nasal Grunts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-9|PAPER Thu-A-V-1-9 — Applying TDNN Architectures for Analyzing Duration Dependencies on Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applying TDNN Architectures for Analyzing Duration Dependencies on Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210096.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-2|PAPER Thu-M-V-2-2 — Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210662.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-1|PAPER Thu-A-V-3-1 — Online Blind Audio Source Separation Using Recursive Expectation-Maximization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Blind Audio Source Separation Using Recursive Expectation-Maximization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-12|PAPER Fri-M-V-6-12 — Keyword Transformer: A Self-Attention Model for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Keyword Transformer: A Self-Attention Model for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-4|PAPER Tue-M-O-3-4 — Phoneme-to-Audio Alignment with Recurrent Neural Networks for Speaking and Singing Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-to-Audio Alignment with Recurrent Neural Networks for Speaking and Singing Voice</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-1|PAPER Thu-M-O-2-1 — Speaker Attentive Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Attentive Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-4|PAPER Fri-M-V-5-4 — Fake Audio Detection in Resource-Constrained Settings Using Microfeatures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fake Audio Detection in Resource-Constrained Settings Using Microfeatures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-6|PAPER Thu-M-V-7-6 — PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210299.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-4|PAPER Wed-A-V-4-4 — NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-5|PAPER Wed-A-V-4-5 — Subjective Evaluation of Noise Suppression Algorithms in Crowdsourcing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subjective Evaluation of Noise Suppression Algorithms in Crowdsourcing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-7|PAPER Wed-E-V-6-7 — Perception of Social Speaker Characteristics in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Social Speaker Characteristics in Synthetic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-1|PAPER Fri-M-V-6-1 — Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-4|PAPER Tue-M-O-2-4 — A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-7|PAPER Thu-M-V-1-7 — Identifying Cognitive Impairment Using Sentence Representation Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Cognitive Impairment Using Sentence Representation Vectors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211404.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-7|PAPER Fri-M-V-7-7 — Attention-Based Convolutional Neural Network for ASV Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Convolutional Neural Network for ASV Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211718.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-10|PAPER Wed-M-V-5-10 — Towards the Explainability of Multimodal Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Explainability of Multimodal Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-7|PAPER Thu-M-V-2-7 — Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-1|PAPER Tue-E-V-4-1 — Data Augmentation for Spoken Language Understanding via Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation for Spoken Language Understanding via Pretrained Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-3|PAPER Wed-E-O-2-3 — Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-4|PAPER Thu-M-V-1-4 — Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211724.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-12|PAPER Fri-M-V-2-12 — Context and Co-Text Influence on the Accuracy Production of Italian L2 Non-Native Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Context and Co-Text Influence on the Accuracy Production of Italian L2 Non-Native Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-1|PAPER Tue-A-O-2-1 — Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-6|PAPER Fri-A-SS-1-6 — Nonlinear Acoustic Echo Cancellation with Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nonlinear Acoustic Echo Cancellation with Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211399.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-2|PAPER Fri-A-O-2-2 — End-to-End Open Vocabulary Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Open Vocabulary Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-7|PAPER Thu-M-V-6-7 — Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-6|PAPER Thu-M-V-1-6 — Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-8|PAPER Thu-A-SS-1-8 — Real-Time End-to-End Monaural Multi-Speaker Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time End-to-End Monaural Multi-Speaker Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210910.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-5|PAPER Tue-M-V-7-5 — Acoustic and Prosodic Correlates of Emotions in Urdu Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic and Prosodic Correlates of Emotions in Urdu Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-12|PAPER Wed-E-V-4-12 — Prosody of Case Markers in Urdu]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody of Case Markers in Urdu</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-1|PAPER Tue-E-V-1-1 — Unsupervised Bayesian Adaptation of PLDA for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Bayesian Adaptation of PLDA for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-6|PAPER Thu-M-SS-2-6 — Modeling the Effect of Military Oxygen Masks on Speech Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling the Effect of Military Oxygen Masks on Speech Characteristics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-1|PAPER Thu-M-S&T-1-1 — MoM: Minutes of Meeting Bot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoM: Minutes of Meeting Bot</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-5|PAPER Thu-M-S&T-1-5 — Live Subtitling for BigBlueButton with Open-Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live Subtitling for BigBlueButton with Open-Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-8|PAPER Thu-A-V-4-8 — Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-8|PAPER Fri-A-V-4-8 — Presentation Matters: Evaluating Speaker Identification Tasks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Presentation Matters: Evaluating Speaker Identification Tasks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-8|PAPER Thu-M-V-6-8 — Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211814.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-3|PAPER Tue-E-O-1-3 — Neural Text Denormalization for Speech Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Text Denormalization for Speech Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-3|PAPER Wed-M-V-2-3 — Towards Unsupervised Phone and Word Segmentation Using Self-Supervised Vector-Quantized Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Unsupervised Phone and Word Segmentation Using Self-Supervised Vector-Quantized Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-6|PAPER Wed-M-V-2-6 — Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-3|PAPER Tue-A-V-4-3 — On the Learning Dynamics of Semi-Supervised Training for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Learning Dynamics of Semi-Supervised Training for ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211560.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-13|PAPER Thu-A-V-3-13 — Deep Audio-Visual Speech Separation Based on Facial Motion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Audio-Visual Speech Separation Based on Facial Motion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-3|PAPER Tue-A-O-2-3 — Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-3|PAPER Fri-M-O-3-3 — A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-5|PAPER Thu-M-V-6-5 — Inhalations in Speech: Acoustic and Physiological Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inhalations in Speech: Acoustic and Physiological Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-7|PAPER Thu-M-V-6-7 — Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-8|PAPER Fri-M-V-1-8 — Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-1|PAPER Fri-M-V-6-1 — Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-2|PAPER Tue-A-V-6-2 — Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-2|PAPER Fri-A-V-5-2 — Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-9|PAPER Wed-M-V-5-9 — Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211172.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-9|PAPER Wed-E-V-4-9 — In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German]]</div>|^<div class="cpauthorindexpersoncardpapertitle">In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-3|PAPER Fri-M-V-1-3 — Testing Acoustic Voice Quality Classification Across Languages and Speech Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Testing Acoustic Voice Quality Classification Across Languages and Speech Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-12|PAPER Tue-A-V-1-12 — Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-11|PAPER Fri-A-V-4-11 — Masked Proxy Loss for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Masked Proxy Loss for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-6|PAPER Wed-M-V-4-6 — Data Quality as Predictor of Voice Anti-Spoofing Generalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Quality as Predictor of Voice Anti-Spoofing Generalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-11|PAPER Wed-M-V-6-11 — Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210648.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-7|PAPER Wed-E-V-3-7 — Regularizing Word Segmentation by Creating Misspellings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Regularizing Word Segmentation by Creating Misspellings</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-9|PAPER Tue-E-V-2-9 — Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-11|PAPER Wed-M-V-5-11 — Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-4|PAPER Fri-A-V-6-4 — Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-3|PAPER Tue-M-V-4-3 — Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210095.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-5|PAPER Tue-E-V-4-5 — A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-7|PAPER Fri-A-V-4-7 — Adaptive Margin Circle Loss for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Margin Circle Loss for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-2|PAPER Tue-M-V-1-2 — Bidirectional Multiscale Feature Aggregation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional Multiscale Feature Aggregation for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-3|PAPER Tue-E-V-1-3 — Improved Meta-Learning Training for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Meta-Learning Training for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-8|PAPER Wed-E-V-2-8 — TDCA-Net: Time-Domain Channel Attention Network for Depression Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TDCA-Net: Time-Domain Channel Attention Network for Depression Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-5|PAPER Fri-M-V-4-5 — Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-7|PAPER Wed-M-V-5-7 — Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage Progressive Speech Enhancement Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-8|PAPER Wed-A-SS-1-8 — The SJTU System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The SJTU System for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-6|PAPER Tue-A-V-5-6 — A Noise Robust Method for Word-Level Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Noise Robust Method for Word-Level Pronunciation Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-7|PAPER Wed-M-V-5-7 — Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-10|PAPER Wed-A-V-3-10 — End to End Transformer-Based Contextual Speech Recognition Based on Pointer Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End to End Transformer-Based Contextual Speech Recognition Based on Pointer Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-6|PAPER Fri-M-V-1-6 — F₀ Patterns of L2 English Speech by Mandarin Chinese Learners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F₀ Patterns of L2 English Speech by Mandarin Chinese Learners</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-7|PAPER Fri-M-V-1-7 — A Neural Network-Based Noise Compensation Method for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neural Network-Based Noise Compensation Method for Pronunciation Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-9|PAPER Fri-M-V-1-9 — A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-5|PAPER Fri-A-V-1-5 — Explore wav2vec 2.0 for Mispronunciation Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explore wav2vec 2.0 for Mispronunciation Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210931.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-7|PAPER Fri-A-V-1-7 — Deep Feature Transfer Learning for Automatic Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Feature Transfer Learning for Automatic Pronunciation Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211344.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-9|PAPER Fri-A-V-1-9 — A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-12|PAPER Fri-A-V-1-12 — “You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-4|PAPER Tue-A-V-6-4 — An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-3|PAPER Tue-A-SS-2-3 — Recognising Covid-19 from Coughing Using Ensembles of SVMs and LSTMs with Handcrafted and Deep Audio Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recognising Covid-19 from Coughing Using Ensembles of SVMs and LSTMs with Handcrafted and Deep Audio Features</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210811.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-7|PAPER Tue-A-SS-2-7 — The DiCOVA 2021 Challenge — An Encoder-Decoder Approach for COVID-19 Recognition from Coughing Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DiCOVA 2021 Challenge — An Encoder-Decoder Approach for COVID-19 Recognition from Coughing Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-9|PAPER Tue-A-SS-2-9 — Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-9|PAPER Thu-M-V-2-9 — LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210630.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-5|PAPER Fri-M-V-5-5 — Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-8|PAPER Fri-M-V-3-8 — Residual Energy-Based Models for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Energy-Based Models for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210661.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-8|PAPER Wed-A-V-2-8 — Token-Level Supervised Contrastive Learning for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Token-Level Supervised Contrastive Learning for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-5|PAPER Wed-M-V-1-5 — Exploring wav2vec 2.0 on Speaker Verification and Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring wav2vec 2.0 on Speaker Verification and Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-3|PAPER Tue-M-V-1-3 — Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-6|PAPER Tue-E-V-3-6 — An Attribute-Aligned Strategy for Learning Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attribute-Aligned Strategy for Learning Speech Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210662.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-1|PAPER Thu-A-V-3-1 — Online Blind Audio Source Separation Using Recursive Expectation-Maximization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Blind Audio Source Separation Using Recursive Expectation-Maximization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-4|PAPER Tue-E-O-2-4 — Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211399.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-2|PAPER Fri-A-O-2-2 — End-to-End Open Vocabulary Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Open Vocabulary Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210149.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-2|PAPER Thu-M-V-4-2 — Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-5|PAPER Thu-M-V-4-5 — Adapting Speaker Embeddings for Speaker Diarisation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Speaker Embeddings for Speaker Diarisation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-7|PAPER Wed-A-V-5-7 — UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-8|PAPER Wed-E-V-6-8 — Hi-Fi Multi-Speaker English TTS Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hi-Fi Multi-Speaker English TTS Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211770.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-10|PAPER Thu-A-SS-1-10 — TalkNet: Non-Autoregressive Depth-Wise Separable Convolutional Model for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TalkNet: Non-Autoregressive Depth-Wise Separable Convolutional Model for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-13|PAPER Fri-A-V-1-13 — NeMo Inverse Text Normalization: From Development to Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NeMo Inverse Text Normalization: From Development to Production</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-3|PAPER Fri-A-S&T-1-3 — NeMo (Inverse) Text Normalization: From Development to Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NeMo (Inverse) Text Normalization: From Development to Production</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-1|PAPER Thu-A-V-6-1 — Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-3|PAPER Wed-A-V-2-3 — Incorporating External POS Tagger for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating External POS Tagger for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-2|PAPER Wed-A-V-3-2 — Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-1|PAPER Fri-A-V-1-1 — Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-4|PAPER Wed-E-V-4-4 — ProsoBeast Prosody Annotation Tool]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ProsoBeast Prosody Annotation Tool</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-2|PAPER Wed-A-SS-1-2 — Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-4|PAPER Thu-A-V-4-4 — ECAPA-TDNN Embeddings for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ECAPA-TDNN Embeddings for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-7|PAPER Tue-E-V-6-7 — Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-4|PAPER Fri-A-V-3-4 — CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-9|PAPER Wed-M-V-6-9 — Reducing Exposure Bias in Training Recurrent Neural Network Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Exposure Bias in Training Recurrent Neural Network Transducers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-11|PAPER Wed-A-V-2-11 — Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-5|PAPER Wed-A-V-3-5 — On the Limit of English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Limit of English Conversational Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-1|PAPER Thu-A-SS-2-1 — Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-12|PAPER Wed-E-V-1-12 — Differentiable Allophone Graphs for Language-Universal Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Differentiable Allophone Graphs for Language-Universal Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-1|PAPER Fri-M-O-2-1 — Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-2|PAPER Tue-M-V-7-2 — System Performance as a Function of Calibration Methods, Sample Size and Sampling Variability in Likelihood Ratio-Based Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">System Performance as a Function of Calibration Methods, Sample Size and Sampling Variability in Likelihood Ratio-Based Forensic Voice Comparison</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-5|PAPER Wed-E-V-1-5 — Using Large Self-Supervised Models for Low-Resource Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Large Self-Supervised Models for Low-Resource Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-13|PAPER Wed-E-V-4-13 — Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-1|PAPER Wed-E-V-6-1 — Spectral and Latent Speech Representation Distortion for TTS Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectral and Latent Speech Representation Distortion for TTS Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210383.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-3|PAPER Fri-A-V-3-3 — Broadcasted Residual Learning for Efficient Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Broadcasted Residual Learning for Efficient Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210252.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-1|PAPER Thu-M-V-5-1 — Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-4|PAPER Thu-A-V-5-4 — Diff-TTS: A Denoising Diffusion Model for Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diff-TTS: A Denoising Diffusion Model for Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-5|PAPER Fri-A-V-5-5 — Expressive Text-to-Speech Using Style Tag]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Text-to-Speech Using Style Tag</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-6|PAPER Wed-M-V-1-6 — Self-Supervised Phonotactic Representations for Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Phonotactic Representations for Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210800.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-5|PAPER Wed-E-V-6-5 — Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-11|PAPER Thu-A-V-6-11 — Towards Automatic Speech to Sign Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Automatic Speech to Sign Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-8|PAPER Wed-E-V-1-8 — Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-6|PAPER Wed-A-V-2-6 — Lookup-Table Recurrent Language Models for Long Tail Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lookup-Table Recurrent Language Models for Long Tail Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-11|PAPER Fri-M-V-1-11 — L1 Identification from L2 Speech Using Neural Spectrogram Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">L1 Identification from L2 Speech Using Neural Spectrogram Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-8|PAPER Wed-M-V-5-8 — Human Spoofing Detection Performance on Degraded Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Spoofing Detection Performance on Degraded Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-1|PAPER Fri-M-O-1-1 — Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210988.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-4|PAPER Wed-M-O-3-4 — Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210886.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-8|PAPER Wed-A-V-4-8 — CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-2|PAPER Thu-M-O-2-2 — Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-4|PAPER Fri-A-O-1-4 — Voice Activity Detection with Teacher-Student Domain Emulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection with Teacher-Student Domain Emulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-8|PAPER Wed-A-V-1-8 — Analysis of Eye Gaze Reasons and Gaze Aversions During Three-Party Conversations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Eye Gaze Reasons and Gaze Aversions During Three-Party Conversations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211967.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-11|PAPER Wed-E-V-2-11 — Speech Based Depression Severity Level Classification Using a Multi-Stage Dilated CNN-LSTM Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Based Depression Severity Level Classification Using a Multi-Stage Dilated CNN-LSTM Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211960.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-8|PAPER Fri-A-V-2-8 — Generalized Dilated CNN Models for Depression Detection Using Inverted Vocal Tract Variables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Dilated CNN Models for Depression Detection Using Inverted Vocal Tract Variables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-8|PAPER Tue-M-SS-1-8 — Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-2|PAPER Wed-E-V-6-2 — Detection and Analysis of Attention Errors in Sequence-to-Sequence Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection and Analysis of Attention Errors in Sequence-to-Sequence Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-4|PAPER Thu-A-V-3-4 — Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-2|PAPER Tue-M-SS-1-2 — Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-9|PAPER Wed-E-V-2-9 — Visual Speech for Obstructive Sleep Apnea Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Speech for Obstructive Sleep Apnea Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210800.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-5|PAPER Wed-E-V-6-5 — Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-4|PAPER Wed-E-O-2-4 — It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-1|PAPER Thu-M-V-1-1 — Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211507.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-4|PAPER Tue-E-SS-1-4 — Automated Detection of Voice Disorder in the Saarbrücken Voice Database: Effects of Pathology Subset and Audio Materials]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automated Detection of Voice Disorder in the Saarbrücken Voice Database: Effects of Pathology Subset and Audio Materials</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-8|PAPER Tue-M-V-6-8 — FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-9|PAPER Thu-M-V-1-9 — Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-6|PAPER Tue-E-O-2-6 — Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210958.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-8|PAPER Tue-M-V-5-8 — Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211912.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-3|PAPER Wed-A-V-6-3 — Large-Scale Self- and Semi-Supervised Learning for Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Self- and Semi-Supervised Learning for Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-4|PAPER Wed-A-V-6-4 — CoVoST 2 and Massively Multilingual Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoVoST 2 and Massively Multilingual Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210947.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-7|PAPER Fri-A-V-5-7 — Towards Multi-Scale Style Control for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Multi-Scale Style Control for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-3|PAPER Tue-M-V-3-3 — Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-4|PAPER Wed-M-V-4-4 — WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-5|PAPER Wed-A-SS-1-5 — Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-6|PAPER Tue-A-V-1-6 — Event Specific Attention for Polyphonic Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Event Specific Attention for Polyphonic Sound Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-11|PAPER Tue-M-V-5-11 — Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-13|PAPER Wed-A-V-3-13 — Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-9|PAPER Wed-A-V-2-9 — BART Based Semantic Correction for Mandarin Automatic Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BART Based Semantic Correction for Mandarin Automatic Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-15|PAPER Wed-E-V-3-15 — Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210640.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-6|PAPER Tue-A-SS-1-6 — PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-3|PAPER Wed-E-O-2-3 — Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-4|PAPER Wed-A-S&T-1-4 — Expressive Robot Performance Based on Facial Motion Capture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Robot Performance Based on Facial Motion Capture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210622.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-1|PAPER Tue-A-O-1-1 — Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-7|PAPER Tue-M-V-6-7 — Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-4|PAPER Wed-E-V-1-4 — Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-2|PAPER Fri-A-V-4-2 — Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-9|PAPER Thu-M-V-4-9 — A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212128.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-11|PAPER Thu-M-V-2-11 — Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-1|PAPER Thu-A-V-5-1 — Federated Learning with Dynamic Transformer for Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Federated Learning with Dynamic Transformer for Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-1|PAPER Tue-M-V-2-1 — TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-5|PAPER Tue-M-V-2-5 — Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-7|PAPER Wed-M-SS-1-7 — Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-13|PAPER Wed-A-V-4-13 — Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-8|PAPER Tue-M-V-3-8 — Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-6|PAPER Thu-A-V-1-6 — Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210427.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-7|PAPER Wed-A-V-3-7 — Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-6|PAPER Tue-E-V-5-6 — The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-2|PAPER Wed-M-O-3-2 — Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-12|PAPER Thu-A-V-3-12 — Neural Speaker Extraction with Speaker-Speech Cross-Attention Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Extraction with Speaker-Speech Cross-Attention Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-4|PAPER Fri-M-O-3-4 — GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-7|PAPER Tue-A-SS-1-7 — Continual Learning for Fake Audio Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continual Learning for Fake Audio Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-2|PAPER Fri-A-V-6-2 — Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-6|PAPER Tue-M-V-3-6 — Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-2|PAPER Wed-E-SS-1-2 — A Simultaneous Denoising and Dereverberation Framework with Target Decoupling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simultaneous Denoising and Dereverberation Framework with Target Decoupling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-5|PAPER Fri-A-SS-1-5 — Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-1|PAPER Tue-E-V-4-1 — Data Augmentation for Spoken Language Understanding via Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation for Spoken Language Understanding via Pretrained Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-2|PAPER Fri-A-V-3-2 — Weakly Supervised Construction of ASR Systems from Massive Video Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly Supervised Construction of ASR Systems from Massive Video Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-10|PAPER Tue-E-V-6-10 — Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210559.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-4|PAPER Tue-M-V-1-4 — Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-3|PAPER Thu-M-V-5-3 — Rich Prosody Diversity Modelling with Phone-Level Mixture Density Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rich Prosody Diversity Modelling with Phone-Level Mixture Density Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-1|PAPER Tue-E-V-6-1 — CVC: Contrastive Learning for Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CVC: Contrastive Learning for Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-2|PAPER Tue-M-V-4-2 — Self-Supervised Dialogue Learning for Spoken Conversational Question Answering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Dialogue Learning for Spoken Conversational Question Answering</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-6|PAPER Tue-M-V-4-6 — Semantic Transportation Prototypical Network for Few-Shot Intent Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Transportation Prototypical Network for Few-Shot Intent Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-1|PAPER Thu-M-V-7-1 — Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-6|PAPER Tue-E-V-3-6 — An Attribute-Aligned Strategy for Learning Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attribute-Aligned Strategy for Learning Speech Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-9|PAPER Wed-A-V-3-9 — Online Compressive Transformer for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Compressive Transformer for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-3|PAPER Tue-M-V-1-3 — Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210358.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-4|PAPER Fri-M-SS-1-4 — Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-9|PAPER Wed-E-V-6-9 — Utilizing Self-Supervised Representations for MOS Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utilizing Self-Supervised Representations for MOS Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-5|PAPER Tue-M-V-6-5 — Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210640.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-6|PAPER Tue-A-SS-1-6 — PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-6|PAPER Wed-E-V-5-6 — A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-7|PAPER Thu-M-V-2-7 — Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-6|PAPER Thu-M-V-4-6 — Scenario-Dependent Speaker Diarization for DIHARD-III Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scenario-Dependent Speaker Diarization for DIHARD-III Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-10|PAPER Tue-A-V-1-10 — Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-12|PAPER Wed-A-V-3-12 — Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-5|PAPER Tue-E-V-3-5 — Noise Robust Pitch Stylization Using Minimum Mean Absolute Error Criterion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noise Robust Pitch Stylization Using Minimum Mean Absolute Error Criterion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-3|PAPER Thu-A-V-5-3 — Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211463.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-9|PAPER Tue-E-V-4-9 — Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-1|PAPER Thu-M-S&T-1-1 — MoM: Minutes of Meeting Bot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoM: Minutes of Meeting Bot</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-5|PAPER Thu-M-S&T-1-5 — Live Subtitling for BigBlueButton with Open-Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live Subtitling for BigBlueButton with Open-Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-4|PAPER Wed-A-S&T-1-4 — Expressive Robot Performance Based on Facial Motion Capture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Robot Performance Based on Facial Motion Capture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211625.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-10|PAPER Wed-E-V-4-10 — The LF Model in the Frequency Domain for Glottal Airflow Modelling Without Aliasing Distortion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LF Model in the Frequency Domain for Glottal Airflow Modelling Without Aliasing Distortion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-5|PAPER Wed-M-V-2-5 — Multilingual Transfer of Acoustic Word Embeddings Improves When Training on Languages Related to the Target Zero-Resource Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Transfer of Acoustic Word Embeddings Improves When Training on Languages Related to the Target Zero-Resource Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-11|PAPER Wed-M-V-5-11 — Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210347.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-13|PAPER Tue-E-V-3-13 — Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-12|PAPER Wed-M-V-4-12 — A Two-Stage Approach to Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Two-Stage Approach to Speech Bandwidth Extension</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-8|PAPER Fri-A-V-4-8 — Presentation Matters: Evaluating Speaker Identification Tasks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Presentation Matters: Evaluating Speaker Identification Tasks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-3|PAPER Thu-A-V-3-3 — Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-1|PAPER Tue-A-O-2-1 — Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-5|PAPER Fri-M-O-2-5 — Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211767.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-12|PAPER Wed-A-V-2-12 — A Discriminative Entity-Aware Language Model for Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Discriminative Entity-Aware Language Model for Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-4|PAPER Tue-E-O-1-4 — Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-2|PAPER Wed-A-O-2-2 — Using Games to Augment Corpora for Language Recognition and Confusability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Games to Augment Corpora for Language Recognition and Confusability</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-8|PAPER Tue-A-V-6-8 — An Exemplar Selection Algorithm for Native-Nonnative Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exemplar Selection Algorithm for Native-Nonnative Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-3|PAPER Thu-A-V-5-3 — Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211909.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-1|PAPER Thu-M-V-4-1 — End-to-End Neural Diarization: From Transformer to Conformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Neural Diarization: From Transformer to Conformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-2|PAPER Wed-M-V-4-2 — QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-10|PAPER Wed-M-V-6-10 — Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-3|PAPER Thu-A-SS-1-3 — Pushing the Limits of Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pushing the Limits of Non-Autoregressive Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-3|PAPER Tue-M-V-1-3 — Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-7|PAPER Tue-A-V-6-7 — S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-7|PAPER Wed-A-V-4-7 — MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-7|PAPER Fri-A-S&T-1-7 — Duplex Conversation in Outbound Agent System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Duplex Conversation in Outbound Agent System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-5|PAPER Fri-M-V-2-5 — How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-1|PAPER Tue-A-O-2-1 — Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-7|PAPER Tue-M-SS-1-7 — A Deep and Recurrent Architecture for Primate Vocalization Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deep and Recurrent Architecture for Primate Vocalization Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-5|PAPER Tue-M-SS-1-5 — Visual Transformers for Primates Classification and Covid Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Transformers for Primates Classification and Covid Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211816.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-8|PAPER Fri-A-V-6-8 — Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-1|PAPER Thu-M-O-2-1 — Speaker Attentive Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Attentive Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-5|PAPER Fri-M-V-6-5 — Few-Shot Keyword Spotting in Any Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Keyword Spotting in Any Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210744.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-2|PAPER Wed-A-V-6-2 — Subtitle Translation as Markup Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subtitle Translation as Markup Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211090.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-2|PAPER Wed-M-O-2-2 — Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-8|PAPER Wed-E-V-2-8 — TDCA-Net: Time-Domain Channel Attention Network for Depression Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TDCA-Net: Time-Domain Channel Attention Network for Depression Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-2|PAPER Thu-A-V-3-2 — Empirical Analysis of Generalized Iterative Speech Separation Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Analysis of Generalized Iterative Speech Separation Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211372.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-6|PAPER Thu-A-V-3-6 — Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage Progressive Speech Enhancement Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-10|PAPER Tue-E-V-3-10 — Synchronising Speech Segments with Musical Beats in Mandarin and English Singing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronising Speech Segments with Musical Beats in Mandarin and English Singing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-5|PAPER Fri-M-V-4-5 — Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-1|PAPER Wed-E-V-4-1 — How f0 and Phrase Position Affect Papuan Malay Word Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How f0 and Phrase Position Affect Papuan Malay Word Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-3|PAPER Tue-E-O-2-3 — Glottal Stops in Upper Sorbian: A Data-Driven Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Stops in Upper Sorbian: A Data-Driven Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-4|PAPER Fri-A-S&T-1-4 — Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-2|PAPER Wed-A-O-3-2 — Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211908.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-5|PAPER Wed-A-V-2-5 — Revisiting Parity of Human vs. Machine Conversational Speech Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Parity of Human vs. Machine Conversational Speech Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-8|PAPER Wed-A-V-5-8 — Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-5|PAPER Thu-M-V-1-5 — Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210745.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-4|PAPER Fri-A-V-1-4 — Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-3|PAPER Fri-M-V-3-3 — A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-7|PAPER Thu-M-V-3-7 — Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-3|PAPER Thu-A-V-5-3 — Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-1|PAPER Fri-A-V-5-1 — STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-6|PAPER Fri-A-V-1-6 — Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-1|PAPER Wed-A-V-5-1 — GAN Vocoder: Multi-Resolution Discriminator Is All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GAN Vocoder: Multi-Resolution Discriminator Is All You Need</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-7|PAPER Wed-A-V-5-7 — UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-11|PAPER Tue-M-V-5-11 — Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-8|PAPER Wed-A-V-3-8 — SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210414.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-2|PAPER Wed-A-V-5-2 — Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210412.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-4|PAPER Fri-A-V-5-4 — Controllable Context-Aware Conversational Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controllable Context-Aware Conversational Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211823.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-1|PAPER Fri-M-V-2-1 — Leveraging Real-Time MRI for Illuminating Linguistic Velum Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Real-Time MRI for Illuminating Linguistic Velum Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-7|PAPER Thu-M-V-1-7 — Identifying Cognitive Impairment Using Sentence Representation Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Cognitive Impairment Using Sentence Representation Vectors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-3|PAPER Tue-A-V-5-3 — Disfluency Detection with Unlabeled Data and Small BERT Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disfluency Detection with Unlabeled Data and Small BERT Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-10|PAPER Tue-M-V-3-10 — A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-2|PAPER Thu-M-O-2-2 — Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-4|PAPER Fri-A-O-1-4 — Voice Activity Detection with Teacher-Student Domain Emulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection with Teacher-Student Domain Emulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-3|PAPER Tue-A-V-5-3 — Disfluency Detection with Unlabeled Data and Small BERT Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disfluency Detection with Unlabeled Data and Small BERT Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210822.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-6|PAPER Fri-A-V-4-6 — Chronological Self-Training for Real-Time Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Chronological Self-Training for Real-Time Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-10|PAPER Wed-M-V-4-10 — Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-1|PAPER Fri-A-V-1-1 — Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-2|PAPER Tue-M-O-1-2 — T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-2|PAPER Fri-A-S&T-1-2 — Save Your Voice: Voice Banking and TTS for Anyone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Save Your Voice: Voice Banking and TTS for Anyone</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-6|PAPER Thu-A-SS-2-6 — Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-1|PAPER Wed-E-V-1-1 — Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-7|PAPER Wed-E-V-2-7 — Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-3|PAPER Fri-M-V-1-3 — Testing Acoustic Voice Quality Classification Across Languages and Speech Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Testing Acoustic Voice Quality Classification Across Languages and Speech Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210988.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-4|PAPER Wed-M-O-3-4 — Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210886.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-8|PAPER Wed-A-V-4-8 — CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-5|PAPER Wed-A-V-1-5 — Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-7|PAPER Tue-E-V-4-7 — Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-3|PAPER Fri-A-V-6-3 — The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-4|PAPER Thu-M-V-1-4 — Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-3|PAPER Fri-A-O-2-3 — Semantic Sentence Similarity: Size does not Always Matter]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Sentence Similarity: Size does not Always Matter</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-7|PAPER Wed-A-V-2-7 — Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-2|PAPER Wed-A-V-1-2 — A Psychology-Driven Computational Analysis of Political Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Psychology-Driven Computational Analysis of Political Interviews</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-1|PAPER Fri-A-V-6-1 — Intent Detection and Slot Filling for Vietnamese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intent Detection and Slot Filling for Vietnamese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-2|PAPER Wed-E-V-2-2 — Robust Laughter Detection in Noisy Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Laughter Detection in Noisy Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-6|PAPER Tue-A-SS-2-6 — Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-2|PAPER Tue-A-SS-1-2 — Configurable Privacy-Preserving Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Configurable Privacy-Preserving Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-6|PAPER Tue-A-SS-2-6 — Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-9|PAPER Tue-A-V-2-9 — RaSSpeR: Radar-Based Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RaSSpeR: Radar-Based Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-2|PAPER Thu-M-O-2-2 — Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-4|PAPER Fri-A-O-1-4 — Voice Activity Detection with Teacher-Student Domain Emulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection with Teacher-Student Domain Emulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-9|PAPER Wed-M-V-6-9 — Reducing Exposure Bias in Training Recurrent Neural Network Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Exposure Bias in Training Recurrent Neural Network Transducers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-11|PAPER Wed-A-V-2-11 — Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-11|PAPER Fri-A-V-5-11 — Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-1|PAPER Tue-A-V-1-1 — SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211891.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-4|PAPER Tue-E-O-3-4 — Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-9|PAPER Fri-M-V-3-9 — Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-2|PAPER Tue-M-V-5-2 — Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-12|PAPER Wed-E-V-1-12 — Differentiable Allophone Graphs for Language-Universal Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Differentiable Allophone Graphs for Language-Universal Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-3|PAPER Thu-A-V-6-3 — Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-6|PAPER Wed-A-V-2-6 — Lookup-Table Recurrent Language Models for Long Tail Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lookup-Table Recurrent Language Models for Long Tail Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-3|PAPER Fri-M-V-3-3 — A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218001.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-1|PAPER Tue-A-S&T-1-1 — Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-4|PAPER Wed-A-O-1-4 — On Sampling-Based Training Criteria for Neural Language Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Sampling-Based Training Criteria for Neural Language Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-1|PAPER Thu-A-SS-2-1 — Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-6|PAPER Thu-M-S&T-1-6 — Expressive Latvian Speech Synthesis for Dialog Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Latvian Speech Synthesis for Dialog Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-4|PAPER Tue-A-S&T-1-4 — ROXANNE Research Platform: Automate Criminal Investigations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ROXANNE Research Platform: Automate Criminal Investigations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-7|PAPER Thu-M-V-5-7 — Applying the Information Bottleneck Principle to Prosodic Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applying the Information Bottleneck Principle to Prosodic Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-9|PAPER Fri-A-V-5-9 — Fine-Grained Style Modeling, Transfer and Prediction in Text-to-Speech Synthesis via Phone-Level Content-Style Disentanglement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Style Modeling, Transfer and Prediction in Text-to-Speech Synthesis via Phone-Level Content-Style Disentanglement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-5|PAPER Fri-A-V-2-5 — Parametric Distributions to Model Numerical Emotion Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parametric Distributions to Model Numerical Emotion Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-8|PAPER Tue-A-SS-2-8 — COVID-19 Detection from Spectral Features on the DiCOVA Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">COVID-19 Detection from Spectral Features on the DiCOVA Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-5|PAPER Fri-A-V-6-5 — Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-6|PAPER Tue-A-V-3-6 — A Deep Learning Method to Multi-Channel Active Noise Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deep Learning Method to Multi-Channel Active Noise Control</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-8|PAPER Tue-E-V-2-8 — A Deep Learning Approach to Multi-Channel and Multi-Microphone Acoustic Echo Cancellation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deep Learning Approach to Multi-Channel and Multi-Microphone Acoustic Echo Cancellation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211344.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-9|PAPER Fri-A-V-1-9 — A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210277.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-2|PAPER Wed-M-V-1-2 — Modeling and Training Strategies for Language Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling and Training Strategies for Language Recognition Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210276.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-2|PAPER Thu-M-SS-1-2 — Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-1|PAPER Wed-E-V-1-1 — Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-2|PAPER Tue-E-O-3-2 — Audio-Visual Recognition of Emotional Engagement of People with Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Recognition of Emotional Engagement of People with Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-1|PAPER Wed-M-O-3-1 — Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-1|PAPER Wed-E-O-1-1 — Reformulating DOVER-Lap Label Mapping as a Graph Partitioning Problem]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reformulating DOVER-Lap Label Mapping as a Graph Partitioning Problem</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-3|PAPER Thu-A-V-4-3 — Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-6|PAPER Wed-M-V-3-6 — Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-8|PAPER Thu-A-SS-1-8 — Real-Time End-to-End Monaural Multi-Speaker Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time End-to-End Monaural Multi-Speaker Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-10|PAPER Fri-A-V-4-10 — An Integrated Framework for Two-Pass Personalized Voice Trigger]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Framework for Two-Pass Personalized Voice Trigger</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-2|PAPER Wed-A-V-3-2 — Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-12|PAPER Wed-M-V-4-12 — A Two-Stage Approach to Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Two-Stage Approach to Speech Bandwidth Extension</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-3|PAPER Wed-M-V-5-3 — Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-8|PAPER Fri-M-V-1-8 — Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-1|PAPER Fri-M-V-6-1 — Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210591.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-13|PAPER Wed-A-V-2-13 — Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-7|PAPER Wed-E-V-2-7 — Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-3|PAPER Tue-E-V-6-3 — One-Shot Voice Conversion with Speaker-Agnostic StarGAN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">One-Shot Voice Conversion with Speaker-Agnostic StarGAN</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-14|PAPER Wed-M-V-4-14 — Protecting Gender and Identity with Disentangled Speech Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Protecting Gender and Identity with Disentangled Speech Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-5|PAPER Tue-M-V-5-5 — IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-4|PAPER Thu-M-SS-1-4 — An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-11|PAPER Wed-M-V-4-11 — Voice Privacy Through x-Vector and CycleGAN-Based Anonymization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Privacy Through x-Vector and CycleGAN-Based Anonymization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210660.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-8|PAPER Thu-A-V-5-8 — A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210744.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-2|PAPER Wed-A-V-6-2 — Subtitle Translation as Markup Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subtitle Translation as Markup Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210822.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-6|PAPER Fri-A-V-4-6 — Chronological Self-Training for Real-Time Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Chronological Self-Training for Real-Time Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210283.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-5|PAPER Tue-E-V-6-5 — VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-10|PAPER Thu-M-V-1-10 — Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-8|PAPER Fri-A-SS-2-8 — Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-2|PAPER Wed-E-O-2-2 — Lost in Interpreting: Speech Translation from Source or Interpreter?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lost in Interpreting: Speech Translation from Source or Interpreter?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-12|PAPER Tue-M-SS-1-12 — Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211764.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-9|PAPER Tue-A-V-3-9 — Explaining Deep Learning Models for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explaining Deep Learning Models for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210313.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-1|PAPER Wed-A-O-1-1 — BERT-Based Semantic Model for Rescoring N-Best Speech Recognition List]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BERT-Based Semantic Model for Rescoring N-Best Speech Recognition List</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-12|PAPER Tue-M-V-3-12 — Incorporating Embedding Vectors from a Human Mean-Opinion Score Prediction Model for Monaural Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Embedding Vectors from a Human Mean-Opinion Score Prediction Model for Monaural Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-12|PAPER Wed-A-V-6-12 — Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-8|PAPER Fri-M-V-7-8 — Voting for the Right Answer: Adversarial Defense for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voting for the Right Answer: Adversarial Defense for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-11|PAPER Tue-M-V-5-11 — Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-8|PAPER Wed-A-V-3-8 — SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-7|PAPER Wed-A-V-4-7 — MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-11|PAPER Thu-M-V-3-11 — Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210681.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-5|PAPER Fri-A-V-4-5 — Multi-Channel Speaker Verification for Single and Multi-Talker Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Speaker Verification for Single and Multi-Talker Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210300.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-2|PAPER Tue-E-V-3-2 — Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-15|PAPER Wed-E-V-3-15 — Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-8|PAPER Tue-A-V-1-8 — Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-4|PAPER Wed-M-V-2-4 — Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage Progressive Speech Enhancement Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210383.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-3|PAPER Fri-A-V-3-3 — Broadcasted Residual Learning for Efficient Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Broadcasted Residual Learning for Efficient Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-5|PAPER Tue-A-SS-1-5 — Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-6|PAPER Wed-A-O-2-6 — Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211888.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-10|PAPER Tue-E-V-5-10 — Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-5|PAPER Wed-E-V-5-5 — Single-Channel Speech Enhancement Using Learnable Loss Mixup]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Single-Channel Speech Enhancement Using Learnable Loss Mixup</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-1|PAPER Fri-M-O-1-1 — Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-3|PAPER Wed-E-O-1-3 — Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-1|PAPER Fri-A-O-1-1 — Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-3|PAPER Thu-A-SS-1-3 — Pushing the Limits of Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pushing the Limits of Non-Autoregressive Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-13|PAPER Wed-E-V-5-13 — SE-Conformer: Time-Domain Speech Enhancement Using Conformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SE-Conformer: Time-Domain Speech Enhancement Using Conformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-3|PAPER Thu-M-O-2-3 — M³: MultiModal Masking Applied to Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">M³: MultiModal Masking Applied to Sentiment Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211585.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-8|PAPER Wed-M-V-3-8 — Investigating Contributions of Speech and Facial Landmarks for Talking Head Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Contributions of Speech and Facial Landmarks for Talking Head Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210910.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-5|PAPER Tue-M-V-7-5 — Acoustic and Prosodic Correlates of Emotions in Urdu Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic and Prosodic Correlates of Emotions in Urdu Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-3|PAPER Fri-M-V-3-3 — A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211649.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-11|PAPER Fri-M-V-2-11 — Developmental Changes of Vowel Acoustics in Adolescents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developmental Changes of Vowel Acoustics in Adolescents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-3|PAPER Thu-A-V-1-3 — Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-1|PAPER Wed-E-V-6-1 — Spectral and Latent Speech Representation Distortion for TTS Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectral and Latent Speech Representation Distortion for TTS Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211756.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-5|PAPER Thu-M-O-3-5 — Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-9|PAPER Wed-M-V-5-9 — Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-3|PAPER Tue-A-V-4-3 — On the Learning Dynamics of Semi-Supervised Training for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Learning Dynamics of Semi-Supervised Training for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-1|PAPER Thu-M-O-3-1 — The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-8|PAPER Thu-A-SS-2-8 — Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-9|PAPER Thu-A-V-6-9 — Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-5|PAPER Fri-A-V-2-5 — Parametric Distributions to Model Numerical Emotion Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parametric Distributions to Model Numerical Emotion Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-12|PAPER Tue-M-SS-1-12 — Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-1|PAPER Fri-M-O-3-1 — Many-Speakers Single Channel Speech Separation with Optimal Permutation Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Many-Speakers Single Channel Speech Separation with Optimal Permutation Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-6|PAPER Thu-A-SS-2-6 — Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-10|PAPER Fri-A-V-1-10 — The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-2|PAPER Tue-M-O-2-2 — On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-10|PAPER Tue-A-V-5-10 — Speaker Transition Patterns in Three-Party Conversation: Evidence from English, Estonian and Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Transition Patterns in Three-Party Conversation: Evidence from English, Estonian and Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-9|PAPER Tue-A-SS-2-9 — Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210630.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-5|PAPER Fri-M-V-5-5 — Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-7|PAPER Fri-M-SS-1-7 — One Size Does Not Fit All in Resource-Constrained ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">One Size Does Not Fit All in Resource-Constrained ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210958.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-8|PAPER Tue-M-V-5-8 — Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-9|PAPER Tue-A-V-1-9 — An Evaluation of Data Augmentation Methods for Sound Scene Geotagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Evaluation of Data Augmentation Methods for Sound Scene Geotagging</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211531.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-8|PAPER Thu-A-V-3-8 — Vocal Harmony Separation Using Time-Domain Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocal Harmony Separation Using Time-Domain Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-6|PAPER Tue-E-O-2-6 — Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-8|PAPER Fri-A-V-3-8 — Tied & Reduced RNN-T Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied & Reduced RNN-T Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211764.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-9|PAPER Tue-A-V-3-9 — Explaining Deep Learning Models for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explaining Deep Learning Models for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-7|PAPER Wed-M-V-1-7 — E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-12|PAPER Fri-M-V-5-12 — Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211585.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-8|PAPER Wed-M-V-3-8 — Investigating Contributions of Speech and Facial Landmarks for Talking Head Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Contributions of Speech and Facial Landmarks for Talking Head Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211090.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-2|PAPER Wed-M-O-2-2 — Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-3|PAPER Fri-A-SS-2-3 — Handling Acoustic Variation in Dysarthric Speech Recognition Systems Through Model Combination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Handling Acoustic Variation in Dysarthric Speech Recognition Systems Through Model Combination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-5|PAPER Thu-A-V-1-5 — Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-3|PAPER Tue-M-V-5-3 — Speech Acoustic Modelling Using Raw Source and Filter Components]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Acoustic Modelling Using Raw Source and Filter Components</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-3|PAPER Wed-E-V-3-3 — Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-2|PAPER Wed-M-V-6-2 — Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-2|PAPER Fri-M-V-7-2 — An Initial Investigation for Detecting Partially Spoofed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Initial Investigation for Detecting Partially Spoofed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211767.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-12|PAPER Wed-A-V-2-12 — A Discriminative Entity-Aware Language Model for Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Discriminative Entity-Aware Language Model for Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-4|PAPER Fri-A-SS-1-4 — Y²-Net FCRN for Acoustic Echo and Noise Suppression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Y²-Net FCRN for Acoustic Echo and Noise Suppression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-5|PAPER Wed-A-O-3-5 — Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-3|PAPER Wed-E-V-6-3 — RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210333.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-1|PAPER Thu-M-SS-2-1 — Towards an Accent-Robust Approach for ATC Communications Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Accent-Robust Approach for ATC Communications Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211565.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-4|PAPER Tue-M-O-1-4 — A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-6|PAPER Tue-E-V-2-6 — Scene-Agnostic Multi-Microphone Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scene-Agnostic Multi-Microphone Speech Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-7|PAPER Fri-M-SS-1-7 — One Size Does Not Fit All in Resource-Constrained ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">One Size Does Not Fit All in Resource-Constrained ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-13|PAPER Fri-A-SS-2-13 — Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210885.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-2|PAPER Fri-M-V-4-2 — Label Embedding for Chinese Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Label Embedding for Chinese Grapheme-to-Phoneme Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211909.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-1|PAPER Thu-M-V-4-1 — End-to-End Neural Diarization: From Transformer to Conformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Neural Diarization: From Transformer to Conformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210976.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-11|PAPER Wed-A-V-5-11 — High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-2|PAPER Thu-A-V-5-2 — LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-8|PAPER Wed-E-V-6-8 — Hi-Fi Multi-Speaker English TTS Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hi-Fi Multi-Speaker English TTS Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-13|PAPER Fri-A-V-1-13 — NeMo Inverse Text Normalization: From Development to Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NeMo Inverse Text Normalization: From Development to Production</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-3|PAPER Fri-A-S&T-1-3 — NeMo (Inverse) Text Normalization: From Development to Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NeMo (Inverse) Text Normalization: From Development to Production</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210083.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-1|PAPER Fri-M-V-5-1 — An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-2|PAPER Tue-A-S&T-1-2 — Beey: More Than a Speech-to-Text Editor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Beey: More Than a Speech-to-Text Editor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-4|PAPER Tue-E-V-5-4 — Robust Continuous On-Device Personalization for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Continuous On-Device Personalization for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-4|PAPER Thu-A-V-1-4 — Multimodal Sentiment Analysis with Temporal Modality Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Sentiment Analysis with Temporal Modality Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-8|PAPER Thu-M-V-3-8 — Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-1|PAPER Thu-A-SS-2-1 — Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211761.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-1|PAPER Fri-A-V-2-1 — Affect Recognition Through Scalogram and Multi-Resolution Cochleagram Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Affect Recognition Through Scalogram and Multi-Resolution Cochleagram Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210462.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-1|PAPER Wed-M-O-1-1 — Golos: Russian Dataset for Speech Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Golos: Russian Dataset for Speech Research</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-3|PAPER Tue-M-V-3-3 — Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-2|PAPER Tue-A-V-2-2 — Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211707.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-7|PAPER Tue-M-V-1-7 — Y-Vector: Multiscale Waveform Encoder for Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Y-Vector: Multiscale Waveform Encoder for Speaker Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211820.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-11|PAPER Fri-M-V-7-11 — An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-2|PAPER Wed-M-SS-1-2 — A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210146.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-3|PAPER Wed-M-SS-1-3 — Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210711.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-1|PAPER Tue-E-SS-1-1 — Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-2|PAPER Tue-E-SS-1-2 — A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-2|PAPER Tue-E-O-3-2 — Audio-Visual Recognition of Emotional Engagement of People with Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Recognition of Emotional Engagement of People with Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-14|PAPER Wed-M-V-6-14 — Multi-Mode Transformer Transducer with Stochastic Future Context]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Mode Transformer Transducer with Stochastic Future Context</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-10|PAPER Wed-E-V-5-10 — Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-11|PAPER Wed-E-V-5-11 — Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-11|PAPER Tue-M-V-3-11 — Self-Supervised Learning Based Phone-Fortified Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Learning Based Phone-Fortified Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-2|PAPER Tue-A-V-2-2 — Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-3|PAPER Tue-M-V-3-3 — Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-6|PAPER Wed-E-V-4-6 — Targeted and Targetless Neutral Tones in Taiwanese Southern Min]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Targeted and Targetless Neutral Tones in Taiwanese Southern Min</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-6|PAPER Fri-M-V-2-6 — A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-7|PAPER Tue-M-V-5-7 — Multi-Channel Transformer Transducer for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Transformer Transducer for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210851.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-11|PAPER Thu-A-V-5-11 — Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-11|PAPER Thu-A-SS-2-11 — Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-1|PAPER Thu-M-V-1-1 — Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-8|PAPER Tue-M-SS-1-8 — Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210333.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-1|PAPER Thu-M-SS-2-1 — Towards an Accent-Robust Approach for ATC Communications Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Accent-Robust Approach for ATC Communications Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-1|PAPER Wed-E-V-2-1 — Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-8|PAPER Tue-A-V-5-8 — Multimodal Speech Summarization Through Semantic Concept Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Speech Summarization Through Semantic Concept Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-10|PAPER Wed-E-V-1-10 — Hierarchical Phone Recognition with Compositional Phonetics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Phone Recognition with Compositional Phonetics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-12|PAPER Wed-E-V-1-12 — Differentiable Allophone Graphs for Language-Universal Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Differentiable Allophone Graphs for Language-Universal Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-13|PAPER Wed-E-V-4-13 — Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-9|PAPER Tue-A-V-2-9 — RaSSpeR: Radar-Based Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RaSSpeR: Radar-Based Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-2|PAPER Tue-M-SS-1-2 — Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-5|PAPER Fri-A-O-2-5 — Toward Genre Adapted Closed Captioning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Genre Adapted Closed Captioning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-4|PAPER Thu-A-V-4-4 — ECAPA-TDNN Embeddings for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ECAPA-TDNN Embeddings for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-5|PAPER Fri-A-O-2-5 — Toward Genre Adapted Closed Captioning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Genre Adapted Closed Captioning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-4|PAPER Tue-E-V-5-4 — Robust Continuous On-Device Personalization for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Continuous On-Device Personalization for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-3|PAPER Tue-E-O-2-3 — Glottal Stops in Upper Sorbian: A Data-Driven Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Stops in Upper Sorbian: A Data-Driven Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-10|PAPER Fri-A-V-5-10 — Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-3|PAPER Wed-M-O-3-3 — Using X-Vectors for Speech Activity Detection in Broadcast Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using X-Vectors for Speech Activity Detection in Broadcast Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-3|PAPER Wed-A-S&T-1-3 — Autonomous Robot for Measuring Room Impulse Responses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autonomous Robot for Measuring Room Impulse Responses</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-2|PAPER Fri-A-SS-1-2 — Acoustic Echo Cancellation with Cross-Domain Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Echo Cancellation with Cross-Domain Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-3|PAPER Wed-E-O-2-3 — Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-8|PAPER Thu-A-SS-1-8 — Real-Time End-to-End Monaural Multi-Speaker Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time End-to-End Monaural Multi-Speaker Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-9|PAPER Fri-A-V-4-9 — Automatic Error Correction for Speaker Embedding Learning with Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Error Correction for Speaker Embedding Learning with Noisy Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-7|PAPER Tue-E-V-1-7 — AntVoice Neural Speaker Embedding System for FFSVC 2020]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AntVoice Neural Speaker Embedding System for FFSVC 2020</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-6|PAPER Wed-M-V-1-6 — Self-Supervised Phonotactic Representations for Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Phonotactic Representations for Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-10|PAPER Tue-M-SS-1-10 — Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-6|PAPER Wed-A-O-3-6 — Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218001.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-1|PAPER Tue-A-S&T-1-1 — Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210299.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-4|PAPER Wed-A-V-4-4 — NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-1|PAPER Wed-E-O-3-1 — Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-8|PAPER Tue-A-V-4-8 — slimIPL: Language-Model-Free Iterative Pseudo-Labeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">slimIPL: Language-Model-Free Iterative Pseudo-Labeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-5|PAPER Wed-A-O-3-5 — Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-11|PAPER Wed-A-V-2-11 — Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-1|PAPER Wed-A-V-1-1 — Cross-Modal Learning for Audio-Visual Video Parsing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Learning for Audio-Visual Video Parsing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-10|PAPER Fri-A-V-3-10 — Collaborative Training of Acoustic Encoders for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Collaborative Training of Acoustic Encoders for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-2|PAPER Wed-M-V-4-2 — QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-6|PAPER Tue-E-V-5-6 — The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-7|PAPER Thu-M-V-7-7 — Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-11|PAPER Fri-M-V-5-11 — Generalized Spoofing Detection Inspired from Audio Generation Artifacts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spoofing Detection Inspired from Audio Generation Artifacts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210811.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-7|PAPER Tue-A-SS-2-7 — The DiCOVA 2021 Challenge — An Encoder-Decoder Approach for COVID-19 Recognition from Coughing Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DiCOVA 2021 Challenge — An Encoder-Decoder Approach for COVID-19 Recognition from Coughing Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-11|PAPER Wed-M-V-4-11 — Voice Privacy Through x-Vector and CycleGAN-Based Anonymization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Privacy Through x-Vector and CycleGAN-Based Anonymization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212264.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-11|PAPER Wed-M-V-2-11 — Zero-Shot Federated Learning with New Classes for Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Federated Learning with New Classes for Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211707.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-7|PAPER Tue-M-V-1-7 — Y-Vector: Multiscale Waveform Encoder for Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Y-Vector: Multiscale Waveform Encoder for Speaker Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211820.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-11|PAPER Fri-M-V-7-11 — An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-7|PAPER Thu-M-V-3-7 — Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-8|PAPER Wed-E-V-1-8 — Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-10|PAPER Wed-E-V-2-10 — Analysis of Contextual Voice Changes in Remote Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Contextual Voice Changes in Remote Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-3|PAPER Tue-E-SS-1-3 — Investigating Voice Function Characteristics of Greek Speakers with Hearing Loss Using Automatic Glottal Source Feature Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Voice Function Characteristics of Greek Speakers with Hearing Loss Using Automatic Glottal Source Feature Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-9|PAPER Wed-M-V-6-9 — Reducing Exposure Bias in Training Recurrent Neural Network Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Exposure Bias in Training Recurrent Neural Network Transducers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-11|PAPER Wed-A-V-2-11 — Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-5|PAPER Wed-A-V-3-5 — On the Limit of English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Limit of English Conversational Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-7|PAPER Wed-E-V-2-7 — Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210228.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-2|PAPER Tue-A-O-2-2 — Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-3|PAPER Thu-M-O-2-3 — M³: MultiModal Masking Applied to Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">M³: MultiModal Masking Applied to Sentiment Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-4|PAPER Tue-A-V-6-4 — An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-5|PAPER Thu-A-V-1-5 — Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-3|PAPER Tue-M-O-1-3 — Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-1|PAPER Wed-A-V-5-1 — GAN Vocoder: Multi-Resolution Discriminator Is All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GAN Vocoder: Multi-Resolution Discriminator Is All You Need</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-8|PAPER Wed-A-V-5-8 — Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211857.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-3|PAPER Wed-A-O-2-3 — Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211857.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-3|PAPER Wed-A-O-2-3 — Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210988.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-4|PAPER Wed-M-O-3-4 — Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210886.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-8|PAPER Wed-A-V-4-8 — CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211857.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-3|PAPER Wed-A-O-2-3 — Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-12|PAPER Wed-M-V-4-12 — A Two-Stage Approach to Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Two-Stage Approach to Speech Bandwidth Extension</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211908.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-5|PAPER Wed-A-V-2-5 — Revisiting Parity of Human vs. Machine Conversational Speech Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Parity of Human vs. Machine Conversational Speech Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-11|PAPER Wed-A-V-6-11 — Lexical Modeling of ASR Errors for Robust Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Modeling of ASR Errors for Robust Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-6|PAPER Wed-E-V-2-6 — Emotion Carrier Recognition from Personal Narratives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Carrier Recognition from Personal Narratives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210591.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-13|PAPER Wed-A-V-2-13 — Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210141.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-1|PAPER Fri-A-V-3-1 — Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-2|PAPER Tue-M-V-5-2 — Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-2|PAPER Tue-A-V-5-2 — SmallER: Scaling Neural Entity Resolution for Edge Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SmallER: Scaling Neural Entity Resolution for Edge Devices</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-2|PAPER Wed-E-V-3-2 — Learning a Neural Diff for Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning a Neural Diff for Speech Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-4|PAPER Fri-A-V-3-4 — CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-7|PAPER Fri-A-V-3-7 — Amortized Neural Networks for Low-Latency Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Amortized Neural Networks for Low-Latency Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-1|PAPER Tue-A-O-2-1 — Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-2|PAPER Thu-M-V-5-2 — Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-9|PAPER Fri-A-V-2-9 — Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-8|PAPER Wed-E-V-1-8 — Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-7|PAPER Thu-M-V-5-7 — Applying the Information Bottleneck Principle to Prosodic Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applying the Information Bottleneck Principle to Prosodic Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210412.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-4|PAPER Fri-A-V-5-4 — Controllable Context-Aware Conversational Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controllable Context-Aware Conversational Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-6|PAPER Thu-M-V-1-6 — Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-3|PAPER Thu-M-S&T-1-3 — The INGENIOUS Multilingual Operations App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INGENIOUS Multilingual Operations App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-2|PAPER Fri-M-V-5-2 — A Multi-Branch Deep Learning Network for Automated Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Multi-Branch Deep Learning Network for Automated Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-2|PAPER Wed-E-SS-1-2 — A Simultaneous Denoising and Dereverberation Framework with Target Decoupling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simultaneous Denoising and Dereverberation Framework with Target Decoupling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210964.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-9|PAPER Tue-M-V-5-9 — Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211884.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-9|PAPER Tue-E-V-5-9 — Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-11|PAPER Wed-M-V-5-11 — Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-6|PAPER Fri-M-V-5-6 — Knowledge Distillation for Singing Voice Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation for Singing Voice Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-1|PAPER Tue-A-V-1-1 — SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-1|PAPER Wed-M-V-3-1 — N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-1|PAPER Wed-A-V-5-1 — GAN Vocoder: Multi-Resolution Discriminator Is All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GAN Vocoder: Multi-Resolution Discriminator Is All You Need</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-4|PAPER Tue-A-SS-2-4 — Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-1|PAPER Wed-A-V-5-1 — GAN Vocoder: Multi-Resolution Discriminator Is All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GAN Vocoder: Multi-Resolution Discriminator Is All You Need</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210661.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-8|PAPER Wed-A-V-2-8 — Token-Level Supervised Contrastive Learning for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Token-Level Supervised Contrastive Learning for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-8|PAPER Tue-E-V-3-8 — Unsupervised Training of a DNN-Based Formant Tracker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Training of a DNN-Based Formant Tracker</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-1|PAPER Wed-E-O-2-1 — Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-3|PAPER Fri-A-V-1-3 — Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-6|PAPER Tue-E-V-2-6 — Scene-Agnostic Multi-Microphone Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scene-Agnostic Multi-Microphone Speech Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-8|PAPER Fri-M-V-7-8 — Voting for the Right Answer: Adversarial Defense for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voting for the Right Answer: Adversarial Defense for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-5|PAPER Thu-M-SS-1-5 — Language Recognition Based on Unsupervised Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition Based on Unsupervised Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-7|PAPER Wed-M-V-1-7 — E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-5|PAPER Fri-A-V-6-5 — Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210648.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-7|PAPER Wed-E-V-3-7 — Regularizing Word Segmentation by Creating Misspellings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Regularizing Word Segmentation by Creating Misspellings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-5|PAPER Thu-M-V-7-5 — Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-2|PAPER Fri-A-V-6-2 — Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-3|PAPER Fri-M-V-4-3 — PDF: Polyphone Disambiguation in Chinese by Using FLAT]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PDF: Polyphone Disambiguation in Chinese by Using FLAT</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210474.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-3|PAPER Wed-M-V-3-3 — Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-9|PAPER Tue-M-V-1-9 — Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-2|PAPER Tue-M-V-3-2 — Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-9|PAPER Tue-A-V-4-9 — Phonetically Motivated Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetically Motivated Self-Supervised Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-2|PAPER Tue-A-V-6-2 — Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210497.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-5|PAPER Tue-A-SS-2-5 — Diagnosis of COVID-19 Using Auditory Acoustic Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diagnosis of COVID-19 Using Auditory Acoustic Cues</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211980.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-12|PAPER Tue-E-V-1-12 — Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210687.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-11|PAPER Tue-E-V-6-11 — Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-2|PAPER Wed-M-O-3-2 — Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-12|PAPER Thu-A-V-3-12 — Neural Speaker Extraction with Speaker-Speech Cross-Attention Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Extraction with Speaker-Speech Cross-Attention Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-4|PAPER Fri-M-O-3-4 — GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-2|PAPER Fri-A-V-5-2 — Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-4|PAPER Fri-A-V-6-4 — Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-4|PAPER Tue-M-V-6-4 — Multi-Channel VAD for Transcription of Group Discussion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel VAD for Transcription of Group Discussion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-2|PAPER Tue-A-SS-1-2 — Configurable Privacy-Preserving Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Configurable Privacy-Preserving Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211816.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-8|PAPER Fri-A-V-6-8 — Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210322.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-4|PAPER Thu-A-V-2-4 — Reducing Streaming ASR Model Delay with Self Alignment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Streaming ASR Model Delay with Self Alignment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-2|PAPER Tue-M-V-2-2 — FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-1|PAPER Wed-M-V-3-1 — N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-5|PAPER Tue-M-V-6-5 — Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-7|PAPER Thu-M-V-2-7 — Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-2|PAPER Thu-A-V-1-2 — Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-1|PAPER Tue-E-V-6-1 — CVC: Contrastive Learning for Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CVC: Contrastive Learning for Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-9|PAPER Fri-M-V-6-9 — Energy-Friendly Keyword Spotting System Using Add-Based Convolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Energy-Friendly Keyword Spotting System Using Add-Based Convolution</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-10|PAPER Wed-E-V-5-10 — Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211241.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-3|PAPER Fri-M-O-1-3 — Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-1|PAPER Tue-E-O-2-1 — Voice Quality in Verbal Irony: Electroglottographic Analyses of Ironic Utterances in Standard Austrian German]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Quality in Verbal Irony: Electroglottographic Analyses of Ironic Utterances in Standard Austrian German</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210099.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-5|PAPER Fri-A-SS-2-5 — Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210711.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-1|PAPER Tue-E-SS-1-1 — Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-5|PAPER Wed-A-O-1-5 — Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211846.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-13|PAPER Fri-M-V-2-13 — A New Vowel Normalization for Sociophonetics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Vowel Normalization for Sociophonetics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-1|PAPER Tue-A-V-1-1 — SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210964.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-9|PAPER Tue-M-V-5-9 — Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-7|PAPER Wed-M-V-1-7 — E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-5|PAPER Thu-M-V-3-5 — End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-6|PAPER Wed-E-SS-1-6 — DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210630.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-5|PAPER Fri-M-V-5-5 — Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-9|PAPER Wed-E-V-3-9 — Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-3|PAPER Fri-M-V-3-3 — A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-6|PAPER Tue-A-V-3-6 — A Deep Learning Method to Multi-Channel Active Noise Control]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deep Learning Method to Multi-Channel Active Noise Control</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-8|PAPER Tue-E-V-2-8 — A Deep Learning Approach to Multi-Channel and Multi-Microphone Acoustic Echo Cancellation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deep Learning Approach to Multi-Channel and Multi-Microphone Acoustic Echo Cancellation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-7|PAPER Tue-M-V-3-7 — Speech Enhancement with Weakly Labelled Data from AudioSet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Weakly Labelled Data from AudioSet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-6|PAPER Thu-A-V-1-6 — Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-8|PAPER Thu-A-V-1-8 — Graph Isomorphism Network for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph Isomorphism Network for Speech Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210718.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-2|PAPER Fri-A-V-2-2 — A Speech Emotion Recognition Framework for Better Discrimination of Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Speech Emotion Recognition Framework for Better Discrimination of Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-7|PAPER Tue-A-SS-1-7 — Continual Learning for Fake Audio Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continual Learning for Fake Audio Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210474.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-3|PAPER Wed-M-V-3-3 — Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-7|PAPER Wed-M-V-3-7 — Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-9|PAPER Fri-M-V-4-9 — LinearSpeech: Parallel Text-to-Speech with Linear Complexity]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LinearSpeech: Parallel Text-to-Speech with Linear Complexity</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-9|PAPER Wed-E-V-1-9 — SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211756.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-5|PAPER Thu-M-O-3-5 — Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-6|PAPER Tue-A-V-1-6 — Event Specific Attention for Polyphonic Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Event Specific Attention for Polyphonic Sound Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210322.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-4|PAPER Thu-A-V-2-4 — Reducing Streaming ASR Model Delay with Self Alignment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Streaming ASR Model Delay with Self Alignment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-13|PAPER Fri-M-V-6-13 — Teaching Keyword Spotters to Spot New Keywords with Limited Examples]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Teaching Keyword Spotters to Spot New Keywords with Limited Examples</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-4|PAPER Thu-M-V-7-4 — Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211881.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-6|PAPER Thu-A-O-1-6 — Changes in Glottal Source Parameter Values with Light to Moderate Physical Load]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Changes in Glottal Source Parameter Values with Light to Moderate Physical Load</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-10|PAPER Wed-E-V-2-10 — Analysis of Contextual Voice Changes in Remote Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Contextual Voice Changes in Remote Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210149.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-2|PAPER Thu-M-V-4-2 — Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-5|PAPER Thu-M-V-4-5 — Adapting Speaker Embeddings for Speaker Diarisation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Speaker Embeddings for Speaker Diarisation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211404.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-7|PAPER Fri-M-V-7-7 — Attention-Based Convolutional Neural Network for ASV Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Convolutional Neural Network for ASV Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Tue-Survey|PAPER Tue-Survey — Towards Automatic Speech Recognition for People with Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Automatic Speech Recognition for People with Atypical Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-7|PAPER Thu-M-V-1-7 — Identifying Cognitive Impairment Using Sentence Representation Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Cognitive Impairment Using Sentence Representation Vectors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-9|PAPER Tue-M-V-2-9 — PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211977.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-11|PAPER Tue-M-V-6-11 — A Lightweight Framework for Online Voice Activity Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Lightweight Framework for Online Voice Activity Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-9|PAPER Tue-A-V-1-9 — An Evaluation of Data Augmentation Methods for Sound Scene Geotagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Evaluation of Data Augmentation Methods for Sound Scene Geotagging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-9|PAPER Tue-A-V-6-9 — Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210283.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-5|PAPER Tue-E-V-6-5 — VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-10|PAPER Thu-M-V-1-10 — Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-10|PAPER Fri-M-V-1-10 — Transformer Based End-to-End Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer Based End-to-End Mispronunciation Detection and Diagnosis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-12|PAPER Fri-M-V-7-12 — Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210947.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-7|PAPER Fri-A-V-5-7 — Towards Multi-Scale Style Control for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Multi-Scale Style Control for Expressive Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-8|PAPER Fri-A-SS-2-8 — Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-9|PAPER Tue-A-SS-2-9 — Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-2|PAPER Tue-A-V-1-2 — SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210300.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-2|PAPER Tue-E-V-3-2 — Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-1|PAPER Thu-M-V-1-1 — Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-11|PAPER Wed-M-V-4-11 — Voice Privacy Through x-Vector and CycleGAN-Based Anonymization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Privacy Through x-Vector and CycleGAN-Based Anonymization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210993.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-2|PAPER Wed-E-O-1-2 — Graph Attention Networks for Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph Attention Networks for Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-1|PAPER Tue-A-V-3-1 — LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-5|PAPER Wed-E-V-3-5 — Towards Lifelong Learning of End-to-End ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Lifelong Learning of End-to-End ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-5|PAPER Tue-M-V-6-5 — Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-10|PAPER Fri-A-V-6-10 — Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-3|PAPER Wed-M-V-2-3 — Towards Unsupervised Phone and Word Segmentation Using Self-Supervised Vector-Quantized Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Unsupervised Phone and Word Segmentation Using Self-Supervised Vector-Quantized Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-5|PAPER Wed-M-V-2-5 — Multilingual Transfer of Acoustic Word Embeddings Improves When Training on Languages Related to the Target Zero-Resource Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Transfer of Acoustic Word Embeddings Improves When Training on Languages Related to the Target Zero-Resource Language</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-6|PAPER Wed-M-V-2-6 — Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-1|PAPER Thu-M-V-2-1 — Direct Multimodal Few-Shot Learning of Speech and Images]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Multimodal Few-Shot Learning of Speech and Images</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-5|PAPER Thu-M-V-2-5 — Attention-Based Keyword Localisation in Speech Using Visual Grounding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Keyword Localisation in Speech Using Visual Grounding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Tue-Keynote|PAPER Tue-Keynote — Forty Years of Speech and Language Processing: From Bayes Decision Rule to Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Forty Years of Speech and Language Processing: From Bayes Decision Rule to Deep Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-4|PAPER Wed-A-O-1-4 — On Sampling-Based Training Criteria for Neural Language Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Sampling-Based Training Criteria for Neural Language Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-3|PAPER Wed-A-V-3-3 — Librispeech Transducer Model with Internal Language Model Prior Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Librispeech Transducer Model with Internal Language Model Prior Correction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211255.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-5|PAPER Thu-M-O-1-5 — Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-2|PAPER Thu-M-O-3-2 — Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-3|PAPER Thu-M-O-3-3 — Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-6|PAPER Thu-M-O-1-6 — Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-2|PAPER Fri-M-SS-1-2 — Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210560.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-7|PAPER Thu-M-V-4-7 — End-To-End Speaker Segmentation for Overlap-Aware Resegmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-To-End Speaker Segmentation for Overlap-Aware Resegmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-8|PAPER Tue-E-V-5-8 — Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-8|PAPER Tue-M-SS-1-8 — Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210244.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-4|PAPER Tue-E-V-6-4 — Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-3|PAPER Thu-M-V-6-3 — Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-4|PAPER Fri-M-V-5-4 — Fake Audio Detection in Resource-Constrained Settings Using Microfeatures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fake Audio Detection in Resource-Constrained Settings Using Microfeatures</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-11|PAPER Fri-A-V-4-11 — Masked Proxy Loss for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Masked Proxy Loss for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-12|PAPER Wed-M-V-6-12 — StableEmit: Selection Probability Discount for Reducing Emission Latency of Streaming Monotonic Attention ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StableEmit: Selection Probability Discount for Reducing Emission Latency of Streaming Monotonic Attention ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-4|PAPER Fri-M-V-3-4 — VAD-Free Streaming Hybrid CTC/Attention ASR for Unsegmented Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAD-Free Streaming Hybrid CTC/Attention ASR for Unsegmented Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-8|PAPER Tue-E-V-6-8 — StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-12|PAPER Tue-E-V-3-12 — Pitch Contour Separation from Overlapping Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pitch Contour Separation from Overlapping Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-7|PAPER Tue-M-V-6-7 — Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-4|PAPER Wed-E-V-1-4 — Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-3|PAPER Tue-M-V-2-3 — Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-6|PAPER Wed-M-V-3-6 — Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-4|PAPER Wed-A-V-5-4 — Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-10|PAPER Tue-E-V-2-10 — Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-4|PAPER Thu-M-V-6-4 — Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-7|PAPER Wed-A-V-6-7 — Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-4|PAPER Tue-M-V-5-4 — Noise Robust Acoustic Modeling for Single-Channel Speech Recognition Based on a Stream-Wise Transformer Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noise Robust Acoustic Modeling for Single-Channel Speech Recognition Based on a Stream-Wise Transformer Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-8|PAPER Wed-E-V-4-8 — Taiwan Min Nan (Taiwanese) Checked Tones Sound Change]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Taiwan Min Nan (Taiwanese) Checked Tones Sound Change</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-4|PAPER Tue-A-S&T-1-4 — ROXANNE Research Platform: Automate Criminal Investigations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ROXANNE Research Platform: Automate Criminal Investigations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-1|PAPER Tue-M-V-4-1 — User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-2|PAPER Thu-M-V-7-2 — Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-2|PAPER Thu-A-V-5-2 — LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-2|PAPER Fri-A-V-4-2 — Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-9|PAPER Tue-M-V-1-9 — Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212132.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-12|PAPER Tue-E-V-6-12 — Improving Robustness of One-Shot Voice Conversion with Deep Discriminative Speaker Encoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Robustness of One-Shot Voice Conversion with Deep Discriminative Speaker Encoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-4|PAPER Wed-E-SS-1-4 — DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-6|PAPER Fri-M-V-1-6 — F₀ Patterns of L2 English Speech by Mandarin Chinese Learners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F₀ Patterns of L2 English Speech by Mandarin Chinese Learners</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-8|PAPER Tue-E-V-1-8 — Gradient Regularization for Noise-Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gradient Regularization for Noise-Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-7|PAPER Thu-M-V-7-7 — Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-2|PAPER Tue-M-V-2-2 — FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-1|PAPER Wed-M-V-3-1 — N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-6|PAPER Wed-A-V-5-6 — GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210471.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-5|PAPER Thu-A-V-5-5 — Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-5|PAPER Wed-A-V-6-5 — AlloST: Low-Resource Speech Translation Without Source Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AlloST: Low-Resource Speech Translation Without Source Transcription</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-8|PAPER Thu-M-V-3-8 — Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-3|PAPER Thu-M-V-2-3 — A Fast Discrete Two-Step Learning Hashing for Scalable Cross-Modal Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fast Discrete Two-Step Learning Hashing for Scalable Cross-Modal Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210261.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-3|PAPER Thu-M-V-4-3 — Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-8|PAPER Fri-A-V-1-8 — Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-6|PAPER Fri-A-S&T-1-6 — WittyKiddy: Multilingual Spoken Language Learning for Kids]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WittyKiddy: Multilingual Spoken Language Learning for Kids</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-4|PAPER Wed-E-V-2-4 — Stacked Recurrent Neural Networks for Speech-Based Inference of Attachment Condition in School Age Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stacked Recurrent Neural Networks for Speech-Based Inference of Attachment Condition in School Age Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-4|PAPER Tue-M-O-2-4 — A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-3|PAPER Tue-A-O-2-3 — Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-3|PAPER Wed-A-O-3-3 — Speech Disorder Classification Using Extended Factorized Hierarchical Variational Auto-Encoders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Disorder Classification Using Extended Factorized Hierarchical Variational Auto-Encoders</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-3|PAPER Wed-E-O-3-3 — Audiovisual Transfer Learning for Audio Tagging and Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Transfer Learning for Audio Tagging and Sound Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-4|PAPER Wed-E-V-6-4 — AISHELL-3: A Multi-Speaker Mandarin TTS Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-3: A Multi-Speaker Mandarin TTS Corpus</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-12|PAPER Fri-M-V-7-12 — Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-3|PAPER Wed-M-V-1-3 — A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-10|PAPER Fri-A-V-6-10 — Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-1|PAPER Tue-M-V-3-1 — Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-7|PAPER Tue-E-V-1-7 — AntVoice Neural Speaker Embedding System for FFSVC 2020]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AntVoice Neural Speaker Embedding System for FFSVC 2020</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210358.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-4|PAPER Fri-M-SS-1-4 — Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-5|PAPER Wed-A-V-6-5 — AlloST: Low-Resource Speech Translation Without Source Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AlloST: Low-Resource Speech Translation Without Source Transcription</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-8|PAPER Thu-M-V-3-8 — Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-7|PAPER Tue-A-V-6-7 — S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-5|PAPER Wed-E-V-3-5 — Towards Lifelong Learning of End-to-End ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Lifelong Learning of End-to-End ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-9|PAPER Wed-E-V-6-9 — Utilizing Self-Supervised Representations for MOS Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utilizing Self-Supervised Representations for MOS Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-7|PAPER Thu-M-V-3-7 — Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-8|PAPER Fri-M-V-7-8 — Voting for the Right Answer: Adversarial Defense for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voting for the Right Answer: Adversarial Defense for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-10|PAPER Wed-E-V-6-10 — KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-3|PAPER Tue-M-O-1-3 — Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-2|PAPER Thu-A-V-5-2 — LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-8|PAPER Fri-M-V-5-8 — Open-Set Audio Classification with Limited Training Resources Based on Augmentation Enhanced Variational Auto-Encoder GAN with Detection-Classification Joint Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Open-Set Audio Classification with Limited Training Resources Based on Augmentation Enhanced Variational Auto-Encoder GAN with Detection-Classification Joint Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211891.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-4|PAPER Tue-E-O-3-4 — Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210885.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-2|PAPER Fri-M-V-4-2 — Label Embedding for Chinese Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Label Embedding for Chinese Grapheme-to-Phoneme Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-4|PAPER Thu-A-V-4-4 — ECAPA-TDNN Embeddings for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ECAPA-TDNN Embeddings for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-13|PAPER Wed-E-V-5-13 — SE-Conformer: Time-Domain Speech Enhancement Using Conformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SE-Conformer: Time-Domain Speech Enhancement Using Conformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-4|PAPER Thu-A-V-5-4 — Diff-TTS: A Denoising Diffusion Model for Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diff-TTS: A Denoising Diffusion Model for Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-2|PAPER Wed-M-O-1-2 — Radically Old Way of Computing Spectra: Applications in End-to-End ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Radically Old Way of Computing Spectra: Applications in End-to-End ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210072.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-3|PAPER Tue-M-V-6-3 — Noisy Student-Teacher Training for Robust Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noisy Student-Teacher Training for Robust Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-12|PAPER Thu-A-V-6-12 — kosp2e: Korean Speech to English Translation Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">kosp2e: Korean Speech to English Translation Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-4|PAPER Tue-A-V-3-4 — Multiple Sound Source Localization Based on Interchannel Phase Differences in All Frequencies with Spectral Masks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Sound Source Localization Based on Interchannel Phase Differences in All Frequencies with Spectral Masks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-9|PAPER Tue-A-V-5-9 — Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-9|PAPER Tue-A-V-5-9 — Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-10|PAPER Thu-A-V-5-10 — Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-14|PAPER Fri-M-V-2-14 — The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-4|PAPER Thu-A-O-1-4 — Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-9|PAPER Fri-M-V-3-9 — Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-3|PAPER Tue-A-V-1-3 — An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-3|PAPER Wed-M-V-1-3 — A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-5|PAPER Fri-A-V-3-5 — Extremely Low Footprint End-to-End ASR System for Smart Device]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extremely Low Footprint End-to-End ASR System for Smart Device</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-1|PAPER Thu-A-V-6-1 — Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211863.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-13|PAPER Wed-A-V-6-13 — Effects of Feature Scaling and Fusion on Sign Language Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Feature Scaling and Fusion on Sign Language Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210072.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-3|PAPER Tue-M-V-6-3 — Noisy Student-Teacher Training for Robust Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noisy Student-Teacher Training for Robust Keyword Spotting</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210646.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-6|PAPER Tue-E-V-1-6 — SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-4|PAPER Fri-A-V-4-4 — Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-2|PAPER Thu-M-SS-2-2 — Detecting English Speech in the Air Traffic Control Voice Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting English Speech in the Air Traffic Control Voice Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-4|PAPER Fri-M-V-5-4 — Fake Audio Detection in Resource-Constrained Settings Using Microfeatures]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fake Audio Detection in Resource-Constrained Settings Using Microfeatures</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-1|PAPER Wed-A-SS-1-1 — The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210542.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-7|PAPER Wed-M-V-6-7 — Improving RNN-T ASR Accuracy Using Context Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T ASR Accuracy Using Context Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-6|PAPER Tue-E-O-2-6 — Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}}
</p></div>
{{Author Index Link Row}}
|cpborderless|k
|cpaidxlinkrowtable|k
|<$link to="AUTHOR LIST — A"><div class="cpaidxlinkrowstyle">A</div></$link>|<$link to="AUTHOR LIST — B"><div class="cpaidxlinkrowstyle">B</div></$link>|<$link to="AUTHOR LIST — C"><div class="cpaidxlinkrowstyle">C</div></$link>|<$link to="AUTHOR LIST — D"><div class="cpaidxlinkrowstyle">D</div></$link>|<$link to="AUTHOR LIST — E"><div class="cpaidxlinkrowstyle">E</div></$link>|<$link to="AUTHOR LIST — F"><div class="cpaidxlinkrowstyle">F</div></$link>|<$link to="AUTHOR LIST — G"><div class="cpaidxlinkrowstyle">G</div></$link>|<$link to="AUTHOR LIST — H"><div class="cpaidxlinkrowstyle">H</div></$link>|<$link to="AUTHOR LIST — I"><div class="cpaidxlinkrowstyle">I</div></$link>|<$link to="AUTHOR LIST — J"><div class="cpaidxlinkrowstyle">J</div></$link>|<$link to="AUTHOR LIST — K"><div class="cpaidxlinkrowstyle">K</div></$link>|<$link to="AUTHOR LIST — L"><div class="cpaidxlinkrowstyle">L</div></$link>|<$link to="AUTHOR LIST — M"><div class="cpaidxlinkrowstyle">M</div></$link>|
|<$link to="AUTHOR LIST — N"><div class="cpaidxlinkrowstyle">N</div></$link>|<$link to="AUTHOR LIST — O"><div class="cpaidxlinkrowstyle">O</div></$link>|<$link to="AUTHOR LIST — P"><div class="cpaidxlinkrowstyle">P</div></$link>|<$link to="AUTHOR LIST — Q"><div class="cpaidxlinkrowstyle">Q</div></$link>|<$link to="AUTHOR LIST — R"><div class="cpaidxlinkrowstyle">R</div></$link>|<$link to="AUTHOR LIST — S"><div class="cpaidxlinkrowstyle">S</div></$link>|<$link to="AUTHOR LIST — T"><div class="cpaidxlinkrowstyle">T</div></$link>|<$link to="AUTHOR LIST — U"><div class="cpaidxlinkrowstyle">U</div></$link>|<$link to="AUTHOR LIST — V"><div class="cpaidxlinkrowstyle">V</div></$link>|<$link to="AUTHOR LIST — W"><div class="cpaidxlinkrowstyle">W</div></$link>|<$link to="AUTHOR LIST — X"><div class="cpaidxlinkrowstyle">X</div></$link>|<$link to="AUTHOR LIST — Y"><div class="cpaidxlinkrowstyle">Y</div></$link>|<$link to="AUTHOR LIST — Z"><div class="cpaidxlinkrowstyle">Z</div></$link>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-9|PAPER Wed-M-V-4-9 — Effects of Prosodic Variations on Accidental Triggers of a Commercial Voice Assistant]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Prosodic Variations on Accidental Triggers of a Commercial Voice Assistant</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-4|PAPER Wed-M-O-2-4 — Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-7|PAPER Wed-M-V-4-7 — Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-2|PAPER Tue-E-O-2-2 — Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210277.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-2|PAPER Wed-M-V-1-2 — Modeling and Training Strategies for Language Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling and Training Strategies for Language Recognition Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210313.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-1|PAPER Wed-A-O-1-1 — BERT-Based Semantic Model for Rescoring N-Best Speech Recognition List]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BERT-Based Semantic Model for Rescoring N-Best Speech Recognition List</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210276.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-2|PAPER Thu-M-SS-1-2 — Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-2|PAPER Tue-M-SS-1-2 — Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-9|PAPER Wed-E-V-2-9 — Visual Speech for Obstructive Sleep Apnea Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Speech for Obstructive Sleep Apnea Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-4|PAPER Tue-A-SS-2-4 — Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-6|PAPER Fri-A-SS-1-6 — Nonlinear Acoustic Echo Cancellation with Deep Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Nonlinear Acoustic Echo Cancellation with Deep Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-9|PAPER Thu-M-V-4-9 — A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-1|PAPER Fri-M-V-6-1 — Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-3|PAPER Tue-A-SS-1-3 — Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-3|PAPER Tue-E-O-2-3 — Glottal Stops in Upper Sorbian: A Data-Driven Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Stops in Upper Sorbian: A Data-Driven Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-2|PAPER Thu-M-V-5-2 — Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-3|PAPER Fri-A-V-1-3 — Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-7|PAPER Tue-A-S&T-1-7 — Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-7|PAPER Tue-A-S&T-1-7 — Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-2|PAPER Tue-M-O-2-2 — On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-2|PAPER Tue-M-O-2-2 — On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-8|PAPER Fri-M-V-1-8 — Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-7|PAPER Wed-E-V-2-7 — Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-2|PAPER Fri-M-V-5-2 — A Multi-Branch Deep Learning Network for Automated Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Multi-Branch Deep Learning Network for Automated Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-8|PAPER Tue-A-V-4-8 — slimIPL: Language-Model-Free Iterative Pseudo-Labeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">slimIPL: Language-Model-Free Iterative Pseudo-Labeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-11|PAPER Tue-E-V-3-11 — FRILL: A Non-Semantic Speech Embedding for Mobile Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FRILL: A Non-Semantic Speech Embedding for Mobile Devices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211560.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-13|PAPER Thu-A-V-3-13 — Deep Audio-Visual Speech Separation Based on Facial Motion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Audio-Visual Speech Separation Based on Facial Motion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210153.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-4|PAPER Tue-A-SS-1-4 — Communication-Efficient Agnostic Federated Averaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Communication-Efficient Agnostic Federated Averaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210976.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-11|PAPER Wed-A-V-5-11 — High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210885.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-2|PAPER Fri-M-V-4-2 — Label Embedding for Chinese Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Label Embedding for Chinese Grapheme-to-Phoneme Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-2|PAPER Tue-M-V-2-2 — FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-6|PAPER Wed-A-V-5-6 — GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210471.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-5|PAPER Thu-A-V-5-5 — Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-7|PAPER Wed-A-V-5-7 — UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-1|PAPER Wed-A-V-5-1 — GAN Vocoder: Multi-Resolution Discriminator Is All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GAN Vocoder: Multi-Resolution Discriminator Is All You Need</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-7|PAPER Thu-A-SS-1-7 — Layer Pruning on Demand with Intermediate CTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Layer Pruning on Demand with Intermediate CTC</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-9|PAPER Tue-A-V-5-9 — Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210322.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-4|PAPER Thu-A-V-2-4 — Reducing Streaming ASR Model Delay with Self Alignment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Streaming ASR Model Delay with Self Alignment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-8|PAPER Tue-M-V-6-8 — FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-8|PAPER Wed-M-V-4-8 — Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-1|PAPER Fri-A-V-1-1 — Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-11|PAPER Tue-E-V-3-11 — FRILL: A Non-Semantic Speech Embedding for Mobile Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FRILL: A Non-Semantic Speech Embedding for Mobile Devices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-2|PAPER Fri-A-S&T-1-2 — Save Your Voice: Voice Banking and TTS for Anyone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Save Your Voice: Voice Banking and TTS for Anyone</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-2|PAPER Wed-A-O-2-2 — Using Games to Augment Corpora for Language Recognition and Confusability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Games to Augment Corpora for Language Recognition and Confusability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-7|PAPER Tue-A-V-1-7 — AST: Audio Spectrogram Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AST: Audio Spectrogram Transformer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211810.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-12|PAPER Thu-M-V-1-12 — CLAC: A Speech Corpus of Healthy English Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CLAC: A Speech Corpus of Healthy English Speakers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-7|PAPER Thu-M-V-7-7 — Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-1|PAPER Thu-A-V-6-1 — Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-4|PAPER Thu-A-SS-1-4 — Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-4|PAPER Wed-M-O-2-4 — Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-1|PAPER Wed-M-O-3-1 — Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211756.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-5|PAPER Thu-M-O-3-5 — Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-2|PAPER Thu-M-SS-2-2 — Detecting English Speech in the Air Traffic Control Voice Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting English Speech in the Air Traffic Control Voice Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-4|PAPER Fri-A-SS-1-4 — Y²-Net FCRN for Acoustic Echo and Noise Suppression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Y²-Net FCRN for Acoustic Echo and Noise Suppression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-2|PAPER Tue-M-O-1-2 — T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-2|PAPER Wed-A-S&T-1-2 — Live TV Subtitling Through Respeaking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live TV Subtitling Through Respeaking</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211704.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-4|PAPER Fri-A-O-2-4 — Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-9|PAPER Fri-A-V-3-9 — PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-5|PAPER Wed-A-O-1-5 — Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-4|PAPER Fri-A-O-1-4 — Voice Activity Detection with Teacher-Student Domain Emulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection with Teacher-Student Domain Emulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-8|PAPER Tue-A-SS-1-8 — Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-10|PAPER Tue-E-V-1-10 — Scaling Effect of Self-Supervised Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Effect of Self-Supervised Speech Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211644.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-10|PAPER Wed-E-V-3-10 — Scaling Laws for Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Laws for Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-2|PAPER Thu-M-V-5-2 — Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-4|PAPER Fri-A-V-3-4 — CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210096.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-2|PAPER Thu-M-V-2-2 — Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-8|PAPER Tue-E-V-3-8 — Unsupervised Training of a DNN-Based Formant Tracker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Training of a DNN-Based Formant Tracker</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210646.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-6|PAPER Tue-E-V-1-6 — SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-4|PAPER Fri-A-V-4-4 — Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-11|PAPER Wed-E-V-6-11 — Confidence Intervals for ASR-Based TTS Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Confidence Intervals for ASR-Based TTS Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-1|PAPER Wed-A-V-1-1 — Cross-Modal Learning for Audio-Visual Video Parsing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Learning for Audio-Visual Video Parsing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211657.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-11|PAPER Wed-E-V-3-11 — Leveraging Non-Target Language Resources to Improve ASR Performance in a Target Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Non-Target Language Resources to Improve ASR Performance in a Target Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-1|PAPER Wed-A-V-1-1 — Cross-Modal Learning for Audio-Visual Video Parsing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Learning for Audio-Visual Video Parsing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211890.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-12|PAPER Wed-A-V-4-12 — On the Design of Deep Priors for Unsupervised Audio Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Design of Deep Priors for Unsupervised Audio Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210745.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-4|PAPER Fri-A-V-1-4 — Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-2|PAPER Thu-A-O-1-2 — Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-6|PAPER Tue-E-O-2-6 — Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-6|PAPER Wed-A-O-2-6 — Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-8|PAPER Thu-A-V-4-8 — Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-6|PAPER Thu-M-SS-2-6 — Modeling the Effect of Military Oxygen Masks on Speech Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling the Effect of Military Oxygen Masks on Speech Characteristics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-1|PAPER Wed-E-V-2-1 — Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-5|PAPER Thu-A-O-1-5 — Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211769.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-4|PAPER Tue-A-O-1-4 — Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-5|PAPER Tue-E-V-2-5 — Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-8|PAPER Wed-M-V-4-8 — Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210993.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-2|PAPER Wed-E-O-1-2 — Graph Attention Networks for Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph Attention Networks for Anti-Spoofing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210149.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-2|PAPER Thu-M-V-4-2 — Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-5|PAPER Thu-M-V-4-5 — Adapting Speaker Embeddings for Speaker Diarisation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Speaker Embeddings for Speaker Diarisation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-5|PAPER Thu-A-SS-2-5 — Comparing Acoustic-Based Approaches for Alzheimer’s Disease Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Acoustic-Based Approaches for Alzheimer’s Disease Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-8|PAPER Tue-M-SS-1-8 — Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211565.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-4|PAPER Tue-M-O-1-4 — A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-9|PAPER Wed-A-V-3-9 — Online Compressive Transformer for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Compressive Transformer for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210534.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-3|PAPER Thu-M-V-7-3 — Causal Confusion Reduction for Robust Multi-Domain Dialogue Policy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Causal Confusion Reduction for Robust Multi-Domain Dialogue Policy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211973.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-2|PAPER Wed-E-V-5-2 — Speech Denoising with Auditory Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising with Auditory Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211417.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-5|PAPER Tue-E-O-2-5 — Glottal Sounds in Korebaju]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Sounds in Korebaju</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-3|PAPER Wed-A-V-1-3 — Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210045.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-8|PAPER Thu-A-V-6-8 — Human-in-the-Loop Efficiency Analysis for Binary Classification in Edyson]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human-in-the-Loop Efficiency Analysis for Binary Classification in Edyson</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-3|PAPER Thu-M-S&T-1-3 — The INGENIOUS Multilingual Operations App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INGENIOUS Multilingual Operations App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-2|PAPER Wed-A-SS-1-2 — Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-4|PAPER Thu-A-V-4-4 — ECAPA-TDNN Embeddings for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ECAPA-TDNN Embeddings for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-1|PAPER Fri-M-V-1-1 — Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-2|PAPER Wed-A-O-3-2 — Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-10|PAPER Tue-M-V-3-10 — A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-3|PAPER Tue-E-V-3-3 — Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210648.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-7|PAPER Wed-E-V-3-7 — Regularizing Word Segmentation by Creating Misspellings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Regularizing Word Segmentation by Creating Misspellings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-7|PAPER Wed-A-V-2-7 — Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-10|PAPER Tue-M-V-6-10 — Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-2|PAPER Tue-A-O-1-2 — Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-9|PAPER Tue-E-V-1-9 — Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-12|PAPER Thu-A-SS-1-12 — Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-10|PAPER Fri-M-V-7-10 — Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-7|PAPER Tue-A-V-6-7 — S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210845.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-5|PAPER Wed-A-V-5-5 — Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-8|PAPER Tue-A-V-1-8 — Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210845.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-5|PAPER Wed-A-V-5-5 — Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210947.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-7|PAPER Fri-A-V-5-7 — Towards Multi-Scale Style Control for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Multi-Scale Style Control for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-4|PAPER Wed-E-V-3-4 — Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-11|PAPER Fri-A-V-4-11 — Masked Proxy Loss for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Masked Proxy Loss for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-9|PAPER Fri-A-V-2-9 — Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-7|PAPER Fri-A-V-2-7 — Speech Emotion Recognition with Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-2|PAPER Tue-M-V-1-2 — Bidirectional Multiscale Feature Aggregation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional Multiscale Feature Aggregation for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-2|PAPER Fri-A-V-6-2 — Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-6|PAPER Fri-M-SS-1-6 — Combining Hybrid and End-to-End Approaches for the OpenASR20 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Hybrid and End-to-End Approaches for the OpenASR20 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210414.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-2|PAPER Wed-A-V-5-2 — Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210412.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-4|PAPER Fri-A-V-5-4 — Controllable Context-Aware Conversational Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controllable Context-Aware Conversational Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210964.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-9|PAPER Tue-M-V-5-9 — Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-4|PAPER Tue-E-V-3-4 — Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-11|PAPER Fri-M-V-3-11 — Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-10|PAPER Tue-E-V-3-10 — Synchronising Speech Segments with Musical Beats in Mandarin and English Singing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronising Speech Segments with Musical Beats in Mandarin and English Singing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-8|PAPER Tue-E-V-1-8 — Gradient Regularization for Noise-Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gradient Regularization for Noise-Robust Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-1|PAPER Tue-E-V-4-1 — Data Augmentation for Spoken Language Understanding via Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation for Spoken Language Understanding via Pretrained Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-7|PAPER Tue-A-SS-1-7 — Continual Learning for Fake Audio Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continual Learning for Fake Audio Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-1|PAPER Fri-M-V-3-1 — FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-4|PAPER Wed-M-SS-1-4 — Improving Channel Decorrelation for Multi-Channel Target Speech Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Channel Decorrelation for Multi-Channel Target Speech Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-6|PAPER Wed-A-V-4-6 — Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-7|PAPER Tue-A-SS-1-7 — Continual Learning for Fake Audio Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continual Learning for Fake Audio Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-8|PAPER Wed-E-V-2-8 — TDCA-Net: Time-Domain Channel Attention Network for Depression Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TDCA-Net: Time-Domain Channel Attention Network for Depression Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-1|PAPER Fri-M-V-3-1 — FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-3|PAPER Tue-E-V-2-3 — Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-7|PAPER Fri-M-V-6-7 — A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-5|PAPER Tue-A-V-2-5 — An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210432.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-4|PAPER Thu-M-V-2-4 — Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-13|PAPER Tue-M-V-3-13 — Restoring Degraded Speech via a Modified Diffusion Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Restoring Degraded Speech via a Modified Diffusion Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-1|PAPER Tue-M-V-2-1 — TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-7|PAPER Tue-M-V-4-7 — Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-11|PAPER Tue-E-V-1-11 — Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211133.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-6|PAPER Fri-A-V-2-6 — Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-10|PAPER Fri-A-V-2-10 — Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-4|PAPER Tue-E-V-3-4 — Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-1|PAPER Thu-A-V-5-1 — Federated Learning with Dynamic Transformer for Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Federated Learning with Dynamic Transformer for Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-4|PAPER Wed-A-V-6-4 — CoVoST 2 and Massively Multilingual Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoVoST 2 and Massively Multilingual Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-8|PAPER Thu-A-V-1-8 — Graph Isomorphism Network for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph Isomorphism Network for Speech Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210718.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-2|PAPER Fri-A-V-2-2 — A Speech Emotion Recognition Framework for Better Discrimination of Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Speech Emotion Recognition Framework for Better Discrimination of Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211133.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-6|PAPER Fri-A-V-2-6 — Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-10|PAPER Fri-A-V-2-10 — Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-11|PAPER Thu-A-SS-2-11 — Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-7|PAPER Wed-M-V-1-7 — E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-1|PAPER Tue-M-V-3-1 — Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage Progressive Speech Enhancement Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-1|PAPER Thu-A-V-5-1 — Federated Learning with Dynamic Transformer for Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Federated Learning with Dynamic Transformer for Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-10|PAPER Tue-E-V-1-10 — Scaling Effect of Self-Supervised Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Effect of Self-Supervised Speech Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-9|PAPER Tue-A-V-6-9 — Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-9|PAPER Fri-A-V-4-9 — Automatic Error Correction for Speaker Embedding Learning with Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Error Correction for Speaker Embedding Learning with Noisy Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-7|PAPER Wed-A-SS-1-7 — The Sogou System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Sogou System for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-3|PAPER Fri-M-SS-1-3 — An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-11|PAPER Thu-A-SS-2-11 — Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-4|PAPER Thu-M-V-7-4 — Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-9|PAPER Wed-A-V-2-9 — BART Based Semantic Correction for Mandarin Automatic Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BART Based Semantic Correction for Mandarin Automatic Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-1|PAPER Thu-A-V-2-1 — Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-2|PAPER Fri-A-S&T-1-2 — Save Your Voice: Voice Banking and TTS for Anyone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Save Your Voice: Voice Banking and TTS for Anyone</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-3|PAPER Wed-M-O-3-3 — Using X-Vectors for Speech Activity Detection in Broadcast Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using X-Vectors for Speech Activity Detection in Broadcast Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-3|PAPER Wed-A-V-2-3 — Incorporating External POS Tagger for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating External POS Tagger for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-2|PAPER Tue-A-V-2-2 — Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-9|PAPER Thu-M-V-1-9 — Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-6|PAPER Fri-M-V-2-6 — A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-10|PAPER Fri-A-V-4-10 — An Integrated Framework for Two-Pass Personalized Voice Trigger]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Framework for Two-Pass Personalized Voice Trigger</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211787.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-4|PAPER Wed-A-V-2-4 — Phonetically Induced Subwords for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetically Induced Subwords for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-4|PAPER Wed-E-SS-1-4 — DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-11|PAPER Thu-A-V-1-11 — Leveraging Pre-Trained Language Model for Speech Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Pre-Trained Language Model for Speech Sentiment Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210682.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-6|PAPER Wed-M-V-5-6 — Effects of Aging and Age-Related Hearing Loss on Talker Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Aging and Age-Related Hearing Loss on Talker Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-1|PAPER Tue-E-V-2-1 — Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-4|PAPER Tue-E-V-3-4 — Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-11|PAPER Tue-E-V-5-11 — Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-1|PAPER Thu-A-V-5-1 — Federated Learning with Dynamic Transformer for Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Federated Learning with Dynamic Transformer for Text to Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211955.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-1|PAPER Thu-A-SS-1-1 — An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-4|PAPER Fri-M-V-4-4 — Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210559.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-4|PAPER Tue-M-V-1-4 — Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-5|PAPER Thu-M-SS-1-5 — Language Recognition Based on Unsupervised Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition Based on Unsupervised Pretrained Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-9|PAPER Tue-A-V-6-9 — Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210947.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-7|PAPER Fri-A-V-5-7 — Towards Multi-Scale Style Control for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Multi-Scale Style Control for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-7|PAPER Thu-A-SS-1-7 — Layer Pruning on Demand with Intermediate CTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Layer Pruning on Demand with Intermediate CTC</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-6|PAPER Wed-A-V-4-6 — Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-5|PAPER Thu-M-V-1-5 — Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211974.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-3|PAPER Tue-E-V-5-3 — Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-6|PAPER Wed-A-V-5-6 — GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210899.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-5|PAPER Wed-M-SS-1-5 — Inplace Gated Convolutional Recurrent Neural Network for Dual-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inplace Gated Convolutional Recurrent Neural Network for Dual-Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210383.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-3|PAPER Fri-A-V-3-3 — Broadcasted Residual Learning for Efficient Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Broadcasted Residual Learning for Efficient Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210785.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-3|PAPER Fri-A-V-2-3 — Speech Emotion Recognition via Multi-Level Cross-Modal Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition via Multi-Level Cross-Modal Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-2|PAPER Tue-A-V-5-2 — SmallER: Scaling Neural Entity Resolution for Edge Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SmallER: Scaling Neural Entity Resolution for Edge Devices</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-7|PAPER Fri-A-V-3-7 — Amortized Neural Networks for Low-Latency Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Amortized Neural Networks for Low-Latency Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211344.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-9|PAPER Fri-A-V-1-9 — A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-7|PAPER Wed-M-V-5-7 — Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-9|PAPER Fri-M-V-1-9 — A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211989.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-10|PAPER Tue-A-V-3-10 — Minimum-Norm Differential Beamforming for Linear Array with Directional Microphones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum-Norm Differential Beamforming for Linear Array with Directional Microphones</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-7|PAPER Wed-M-SS-1-7 — Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211884.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-9|PAPER Tue-E-V-5-9 — Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-2|PAPER Wed-M-V-6-2 — Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-5|PAPER Wed-M-V-6-5 — Streaming Multi-Talker Speech Recognition with Joint Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Multi-Talker Speech Recognition with Joint Speaker Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-2|PAPER Wed-A-V-2-2 — A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-3|PAPER Thu-A-V-2-3 — On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-3|PAPER Wed-A-O-3-3 — Speech Disorder Classification Using Extended Factorized Hierarchical Variational Auto-Encoders]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Disorder Classification Using Extended Factorized Hierarchical Variational Auto-Encoders</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-8|PAPER Tue-E-V-1-8 — Gradient Regularization for Noise-Robust Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Gradient Regularization for Noise-Robust Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-4|PAPER Wed-E-V-3-4 — Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-4|PAPER Thu-A-V-1-4 — Multimodal Sentiment Analysis with Temporal Modality Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Sentiment Analysis with Temporal Modality Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-7|PAPER Tue-A-S&T-1-7 — Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-3|PAPER Wed-M-O-3-3 — Using X-Vectors for Speech Activity Detection in Broadcast Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using X-Vectors for Speech Activity Detection in Broadcast Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-4|PAPER Thu-A-V-3-4 — Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-6|PAPER Wed-A-S&T-1-6 — Addressing Compliance in Call Centers with Entity Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Addressing Compliance in Call Centers with Entity Extraction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218014.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-7|PAPER Wed-A-S&T-1-7 — Audio Segmentation Based Conversational Silence Detection for Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Segmentation Based Conversational Silence Detection for Contact Center Calls</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-6|PAPER Thu-M-V-7-6 — PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-3|PAPER Thu-M-S&T-1-3 — The INGENIOUS Multilingual Operations App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INGENIOUS Multilingual Operations App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211539.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-3|PAPER Thu-A-O-1-3 — Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-4|PAPER Thu-M-S&T-1-4 — Digital Einstein Experience: Fast Text-to-Speech for Conversational AI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Digital Einstein Experience: Fast Text-to-Speech for Conversational AI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-5|PAPER Wed-E-O-3-5 — Audio Retrieval with Natural Language Queries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Retrieval with Natural Language Queries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-6|PAPER Thu-M-SS-2-6 — Modeling the Effect of Military Oxygen Masks on Speech Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling the Effect of Military Oxygen Masks on Speech Characteristics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-11|PAPER Tue-E-V-3-11 — FRILL: A Non-Semantic Speech Embedding for Mobile Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FRILL: A Non-Semantic Speech Embedding for Mobile Devices</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-8|PAPER Tue-M-SS-1-8 — Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-4|PAPER Wed-A-S&T-1-4 — Expressive Robot Performance Based on Facial Motion Capture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Robot Performance Based on Facial Motion Capture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-3|PAPER Tue-A-O-1-3 — Speaker Embeddings by Modeling Channel-Wise Correlations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Embeddings by Modeling Channel-Wise Correlations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-6|PAPER Wed-A-V-6-6 — Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-3|PAPER Tue-A-V-5-3 — Disfluency Detection with Unlabeled Data and Small BERT Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disfluency Detection with Unlabeled Data and Small BERT Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-3|PAPER Thu-A-SS-2-3 — WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-4|PAPER Tue-E-O-1-4 — Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210331.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-5|PAPER Wed-M-O-3-5 — Real-Time Speaker Counting in a Cocktail Party Scenario Using Attention-Guided Convolutional Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Speaker Counting in a Cocktail Party Scenario Using Attention-Guided Convolutional Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-1|PAPER Fri-M-V-1-1 — Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-6|PAPER Tue-A-SS-2-6 — Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210591.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-13|PAPER Wed-A-V-2-13 — Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211613.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-8|PAPER Tue-A-V-3-8 — Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-4|PAPER Thu-A-V-3-4 — Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-2|PAPER Wed-E-V-2-2 — Robust Laughter Detection in Noisy Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Laughter Detection in Noisy Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-4|PAPER Wed-A-S&T-1-4 — Expressive Robot Performance Based on Facial Motion Capture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Robot Performance Based on Facial Motion Capture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211090.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-2|PAPER Wed-M-O-2-2 — Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-10|PAPER Tue-A-V-1-10 — Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-5|PAPER Tue-A-V-4-5 — Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211693.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-13|PAPER Wed-M-V-6-13 — Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-12|PAPER Wed-A-V-3-12 — Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-2|PAPER Wed-E-V-3-2 — Learning a Neural Diff for Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning a Neural Diff for Speech Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-7|PAPER Fri-A-V-3-7 — Amortized Neural Networks for Low-Latency Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Amortized Neural Networks for Low-Latency Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-9|PAPER Tue-M-V-2-9 — PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-7|PAPER Tue-A-V-5-7 — Targeted Keyword Filtering for Accelerated Spoken Topic Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Targeted Keyword Filtering for Accelerated Spoken Topic Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-2|PAPER Wed-A-O-2-2 — Using Games to Augment Corpora for Language Recognition and Confusability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Games to Augment Corpora for Language Recognition and Confusability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-5|PAPER Fri-A-V-5-5 — Expressive Text-to-Speech Using Style Tag]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Text-to-Speech Using Style Tag</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-4|PAPER Tue-A-V-3-4 — Multiple Sound Source Localization Based on Interchannel Phase Differences in All Frequencies with Spectral Masks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Sound Source Localization Based on Interchannel Phase Differences in All Frequencies with Spectral Masks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-7|PAPER Wed-M-V-4-7 — Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210885.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-2|PAPER Fri-M-V-4-2 — Label Embedding for Chinese Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Label Embedding for Chinese Grapheme-to-Phoneme Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212151.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-13|PAPER Wed-M-V-4-13 — Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-1|PAPER Fri-M-V-1-1 — Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212151.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-13|PAPER Wed-M-V-4-13 — Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210149.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-2|PAPER Thu-M-V-4-2 — Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-5|PAPER Thu-M-V-4-5 — Adapting Speaker Embeddings for Speaker Diarisation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Speaker Embeddings for Speaker Diarisation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-7|PAPER Fri-M-V-3-7 — Deep Neural Network Calibration for E2E Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Network Calibration for E2E Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-3|PAPER Thu-A-V-6-3 — Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210099.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-5|PAPER Fri-A-SS-2-5 — Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-5|PAPER Thu-A-V-3-5 — Few-Shot Learning of New Sound Classes for Target Sound Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Learning of New Sound Classes for Target Sound Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Simultaneous Machine Interpretation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-1|PAPER Tue-M-V-4-1 — User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211241.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-3|PAPER Fri-M-O-1-3 — Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-1|PAPER Fri-M-O-1-1 — Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-12|PAPER Fri-A-V-1-12 — “You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-13|PAPER Tue-E-V-1-13 — Speaker Anonymisation Using the McAdams Coefficient]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Anonymisation Using the McAdams Coefficient</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210993.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-2|PAPER Wed-E-O-1-2 — Graph Attention Networks for Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph Attention Networks for Anti-Spoofing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-2|PAPER Fri-M-V-7-2 — An Initial Investigation for Detecting Partially Spoofed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Initial Investigation for Detecting Partially Spoofed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-13|PAPER Fri-M-V-7-13 — Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-10|PAPER Tue-M-SS-1-10 — Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211814.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-3|PAPER Tue-E-O-1-3 — Neural Text Denormalization for Speech Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Text Denormalization for Speech Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-2|PAPER Wed-A-S&T-1-2 — Live TV Subtitling Through Respeaking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live TV Subtitling Through Respeaking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-2|PAPER Wed-A-S&T-1-2 — Live TV Subtitling Through Respeaking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live TV Subtitling Through Respeaking</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211704.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-4|PAPER Fri-A-O-2-4 — Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-9|PAPER Fri-A-V-6-9 — End-to-End Spoken Language Understanding for Generalized Voice Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spoken Language Understanding for Generalized Voice Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-8|PAPER Tue-A-SS-1-8 — Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211973.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-2|PAPER Wed-E-V-5-2 — Speech Denoising with Auditory Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising with Auditory Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-5|PAPER Fri-M-V-6-5 — Few-Shot Keyword Spotting in Any Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Keyword Spotting in Any Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210711.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-1|PAPER Tue-E-SS-1-1 — Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-2|PAPER Tue-E-SS-1-2 — A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-10|PAPER Fri-A-V-6-10 — Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211090.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-2|PAPER Wed-M-O-2-2 — Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-3|PAPER Fri-M-O-3-3 — A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-12|PAPER Wed-M-V-4-12 — A Two-Stage Approach to Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Two-Stage Approach to Speech Bandwidth Extension</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-1|PAPER Tue-M-V-2-1 — TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-6|PAPER Wed-A-V-4-6 — Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211912.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-3|PAPER Wed-A-V-6-3 — Large-Scale Self- and Semi-Supervised Learning for Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Self- and Semi-Supervised Learning for Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-4|PAPER Wed-A-V-6-4 — CoVoST 2 and Massively Multilingual Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoVoST 2 and Massively Multilingual Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-5|PAPER Wed-A-S&T-1-5 — ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-7|PAPER Tue-E-V-4-7 — Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-3|PAPER Fri-A-V-6-3 — The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-5|PAPER Wed-A-O-1-5 — Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-2|PAPER Thu-M-O-2-2 — Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211891.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-4|PAPER Tue-E-O-3-4 — Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-3|PAPER Wed-E-V-6-3 — RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-2|PAPER Tue-M-O-2-2 — On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-5|PAPER Wed-A-O-3-5 — Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-6|PAPER Wed-A-V-1-6 — Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-9|PAPER Thu-A-SS-2-9 — Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-2|PAPER Thu-M-V-5-2 — Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-5|PAPER Fri-M-O-1-5 — Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210911.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-5|PAPER Thu-A-SS-1-5 — Relaxing the Conditional Independence Assumption of CTC-Based ASR by Conditioning on Intermediate Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relaxing the Conditional Independence Assumption of CTC-Based ASR by Conditioning on Intermediate Predictions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-11|PAPER Thu-A-SS-2-11 — Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-5|PAPER Tue-M-V-6-5 — Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-6|PAPER Wed-E-V-5-6 — A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-8|PAPER Wed-E-SS-1-8 — Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-7|PAPER Thu-M-V-2-7 — Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-6|PAPER Thu-M-V-4-6 — Scenario-Dependent Speaker Diarization for DIHARD-III Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scenario-Dependent Speaker Diarization for DIHARD-III Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-3|PAPER Thu-A-V-4-3 — Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-2|PAPER Fri-A-V-3-2 — Weakly Supervised Construction of ASR Systems from Massive Video Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly Supervised Construction of ASR Systems from Massive Video Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-4|PAPER Fri-M-V-4-4 — Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-11|PAPER Tue-M-V-5-11 — Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210146.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-3|PAPER Wed-M-SS-1-3 — Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-5|PAPER Fri-A-V-3-5 — Extremely Low Footprint End-to-End ASR System for Smart Device]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extremely Low Footprint End-to-End ASR System for Smart Device</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210559.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-4|PAPER Tue-M-V-1-4 — Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-10|PAPER Wed-E-V-1-10 — Hierarchical Phone Recognition with Compositional Phonetics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Phone Recognition with Compositional Phonetics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-3|PAPER Tue-E-V-2-3 — Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-1|PAPER Wed-M-V-4-1 — NU-Wave: A Diffusion Probabilistic Model for Neural Audio Upsampling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NU-Wave: A Diffusion Probabilistic Model for Neural Audio Upsampling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-1|PAPER Fri-M-V-7-1 — A Comparative Study on Recent Neural Spoofing Countermeasures for Synthetic Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study on Recent Neural Spoofing Countermeasures for Synthetic Speech Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-2|PAPER Fri-M-V-7-2 — An Initial Investigation for Detecting Partially Spoofed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Initial Investigation for Detecting Partially Spoofed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-4|PAPER Tue-A-V-6-4 — An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-4|PAPER Fri-M-V-4-4 — Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-1|PAPER Wed-A-V-6-1 — SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-6|PAPER Tue-M-V-5-6 — Scaling Sparsemax Based Channel Selection for Speech Recognition with ad-hoc Microphone Arrays]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Sparsemax Based Channel Selection for Speech Recognition with ad-hoc Microphone Arrays</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211404.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-7|PAPER Fri-M-V-7-7 — Attention-Based Convolutional Neural Network for ASV Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Convolutional Neural Network for ASV Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-8|PAPER Tue-E-V-5-8 — Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-7|PAPER Wed-A-V-5-7 — UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-5|PAPER Thu-M-V-6-5 — Inhalations in Speech: Acoustic and Physiological Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inhalations in Speech: Acoustic and Physiological Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-7|PAPER Thu-M-V-6-7 — Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211699.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-9|PAPER Tue-M-V-7-9 — Cross-Linguistic Speaker Individuality of Long-Term Formant Distributions: Phonetic and Forensic Perspectives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Speaker Individuality of Long-Term Formant Distributions: Phonetic and Forensic Perspectives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-1|PAPER Thu-A-O-1-1 — Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-6|PAPER Fri-M-V-5-6 — Knowledge Distillation for Singing Voice Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation for Singing Voice Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-6|PAPER Wed-M-V-1-6 — Self-Supervised Phonotactic Representations for Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Phonotactic Representations for Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-12|PAPER Thu-A-SS-2-12 — Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-8|PAPER Fri-M-V-5-8 — Open-Set Audio Classification with Limited Training Resources Based on Augmentation Enhanced Variational Auto-Encoder GAN with Detection-Classification Joint Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Open-Set Audio Classification with Limited Training Resources Based on Augmentation Enhanced Variational Auto-Encoder GAN with Detection-Classification Joint Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-4|PAPER Wed-E-SS-1-4 — DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210261.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-3|PAPER Thu-M-V-4-3 — Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-5|PAPER Thu-M-V-3-5 — End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211977.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-11|PAPER Tue-M-V-6-11 — A Lightweight Framework for Online Voice Activity Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Lightweight Framework for Online Voice Activity Detection in the Wild</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-10|PAPER Wed-A-V-2-10 — Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-3|PAPER Thu-M-V-5-3 — Rich Prosody Diversity Modelling with Phone-Level Mixture Density Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rich Prosody Diversity Modelling with Phone-Level Mixture Density Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-3|PAPER Thu-M-V-2-3 — A Fast Discrete Two-Step Learning Hashing for Scalable Cross-Modal Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Fast Discrete Two-Step Learning Hashing for Scalable Cross-Modal Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-5|PAPER Tue-M-V-2-5 — Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211344.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-9|PAPER Fri-A-V-1-9 — A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-7|PAPER Tue-E-V-1-7 — AntVoice Neural Speaker Embedding System for FFSVC 2020]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AntVoice Neural Speaker Embedding System for FFSVC 2020</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-4|PAPER Tue-M-V-6-4 — Multi-Channel VAD for Transcription of Group Discussion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel VAD for Transcription of Group Discussion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-8|PAPER Tue-E-V-5-8 — Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211973.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-2|PAPER Wed-E-V-5-2 — Speech Denoising with Auditory Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising with Auditory Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210594.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-7|PAPER Wed-E-V-4-7 — The Interaction of Word Complexity and Word Duration in an Agglutinative Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Interaction of Word Complexity and Word Duration in an Agglutinative Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-9|PAPER Wed-E-V-3-9 — Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-6|PAPER Wed-E-SS-1-6 — DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-4|PAPER Tue-M-V-3-4 — Transfer Learning for Speech Intelligibility Improvement in Noisy Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning for Speech Intelligibility Improvement in Noisy Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-2|PAPER Thu-M-V-3-2 — Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210627.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-3|PAPER Wed-A-O-1-3 — Text Augmentation for Language Models in High Error Recognition Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Text Augmentation for Language Models in High Error Recognition Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-2|PAPER Thu-M-SS-2-2 — Detecting English Speech in the Air Traffic Control Voice Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting English Speech in the Air Traffic Control Voice Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-Survey|PAPER Thu-Survey — Learning Speech Models from Multi-Modal Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Speech Models from Multi-Modal Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-11|PAPER Wed-M-V-6-11 — Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210648.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-7|PAPER Wed-E-V-3-7 — Regularizing Word Segmentation by Creating Misspellings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Regularizing Word Segmentation by Creating Misspellings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-1|PAPER Thu-A-O-1-1 — Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-1|PAPER Fri-A-O-2-1 — Device Playback Augmentation with Echo Cancellation for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Device Playback Augmentation with Echo Cancellation for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-1|PAPER Wed-M-O-3-1 — Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-9|PAPER Wed-M-V-5-9 — Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211172.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-9|PAPER Wed-E-V-4-9 — In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German]]</div>|^<div class="cpauthorindexpersoncardpapertitle">In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-3|PAPER Fri-M-V-1-3 — Testing Acoustic Voice Quality Classification Across Languages and Speech Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Testing Acoustic Voice Quality Classification Across Languages and Speech Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-2|PAPER Tue-M-V-5-2 — Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-3|PAPER Thu-A-V-6-3 — Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211864.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-1|PAPER Tue-A-V-5-1 — Speaker-Conversation Factorial Designs for Diarization Error Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Conversation Factorial Designs for Diarization Error Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211849.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-8|PAPER Thu-M-V-7-8 — Adapting Long Context NLM for ASR Rescoring in Conversational Agents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Long Context NLM for ASR Rescoring in Conversational Agents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-7|PAPER Wed-A-V-6-7 — Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-9|PAPER Wed-A-V-6-9 — ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-9|PAPER Wed-A-V-4-9 — Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-12|PAPER Wed-M-V-4-12 — A Two-Stage Approach to Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Two-Stage Approach to Speech Bandwidth Extension</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-1|PAPER Fri-M-SS-1-1 — OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-5|PAPER Thu-M-V-2-5 — Attention-Based Keyword Localisation in Speech Using Visual Grounding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Keyword Localisation in Speech Using Visual Grounding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-9|PAPER Wed-A-V-4-9 — Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-5|PAPER Wed-E-V-5-5 — Single-Channel Speech Enhancement Using Learnable Loss Mixup]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Single-Channel Speech Enhancement Using Learnable Loss Mixup</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-4|PAPER Wed-A-V-5-4 — Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210861.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-7|PAPER Fri-M-V-5-7 — Age Estimation with Speech-Age Model for Heterogeneous Speech Datasets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Age Estimation with Speech-Age Model for Heterogeneous Speech Datasets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-1|PAPER Thu-M-V-6-1 — A Simplified Model for the Vocal Tract of [s] with Inclined Incisors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simplified Model for the Vocal Tract of [s] with Inclined Incisors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-2|PAPER Tue-A-V-3-2 — Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210165.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-4|PAPER Wed-A-V-3-4 — A Deliberation-Based Joint Acoustic and Text Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deliberation-Based Joint Acoustic and Text Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-8|PAPER Fri-A-V-1-8 — Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-6|PAPER Fri-A-S&T-1-6 — WittyKiddy: Multilingual Spoken Language Learning for Kids]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WittyKiddy: Multilingual Spoken Language Learning for Kids</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-5|PAPER Thu-M-V-7-5 — Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-10|PAPER Tue-E-V-2-10 — Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-3|PAPER Thu-A-V-3-3 — Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-5|PAPER Thu-A-V-3-5 — Few-Shot Learning of New Sound Classes for Target Sound Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Learning of New Sound Classes for Target Sound Extraction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-5|PAPER Thu-A-V-4-5 — Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-7|PAPER Tue-E-V-2-7 — Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-4|PAPER Thu-M-V-5-4 — Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-3|PAPER Tue-E-V-6-3 — One-Shot Voice Conversion with Speaker-Agnostic StarGAN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">One-Shot Voice Conversion with Speaker-Agnostic StarGAN</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-4|PAPER Thu-M-V-4-4 — Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-4|PAPER Fri-A-V-2-4 — Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-9|PAPER Wed-A-V-4-9 — Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-3|PAPER Wed-A-V-1-3 — Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-7|PAPER Fri-A-V-2-7 — Speech Emotion Recognition with Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210958.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-8|PAPER Tue-M-V-5-8 — Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210252.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-1|PAPER Thu-M-V-5-1 — Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-1|PAPER Fri-A-V-5-1 — STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-4|PAPER Wed-M-V-1-4 — Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-13|PAPER Fri-M-V-6-13 — Teaching Keyword Spotters to Spot New Keywords with Limited Examples]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Teaching Keyword Spotters to Spot New Keywords with Limited Examples</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-4|PAPER Fri-M-V-1-4 — Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-4|PAPER Wed-M-V-4-4 — WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210387.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-6|PAPER Wed-A-V-3-6 — Deformable TDNN with Adaptive Receptive Fields for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deformable TDNN with Adaptive Receptive Fields for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210333.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-1|PAPER Thu-M-SS-2-1 — Towards an Accent-Robust Approach for ATC Communications Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Accent-Robust Approach for ATC Communications Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-12|PAPER Tue-M-V-3-12 — Incorporating Embedding Vectors from a Human Mean-Opinion Score Prediction Model for Monaural Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Embedding Vectors from a Human Mean-Opinion Score Prediction Model for Monaural Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-6|PAPER Thu-M-V-2-6 — Evaluation of Audio-Visual Alignments in Visually Grounded Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of Audio-Visual Alignments in Visually Grounded Speech Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-4|PAPER Tue-E-V-5-4 — Robust Continuous On-Device Personalization for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Continuous On-Device Personalization for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211754.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-10|PAPER Tue-M-V-7-10 — Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211780.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-14|PAPER Wed-E-V-4-14 — Leveraging the Uniformity Framework to Examine Crosslinguistic Similarity for Long-Lag Stops in Spontaneous Cantonese-English Bilingual Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging the Uniformity Framework to Examine Crosslinguistic Similarity for Long-Lag Stops in Spontaneous Cantonese-English Bilingual Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-4|PAPER Wed-A-O-1-4 — On Sampling-Based Training Criteria for Neural Language Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Sampling-Based Training Criteria for Neural Language Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-2|PAPER Thu-A-V-5-2 — LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-4|PAPER Thu-M-S&T-1-4 — Digital Einstein Experience: Fast Text-to-Speech for Conversational AI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Digital Einstein Experience: Fast Text-to-Speech for Conversational AI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211700.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-6|PAPER Tue-A-O-2-6 — Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-2|PAPER Wed-E-V-2-2 — Robust Laughter Detection in Noisy Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Laughter Detection in Noisy Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-1|PAPER Fri-M-V-1-1 — Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-9|PAPER Thu-A-V-4-9 — Speaker Diarization Using Two-Pass Leave-One-Out Gaussian PLDA Clustering of DNN Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Diarization Using Two-Pass Leave-One-Out Gaussian PLDA Clustering of DNN Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-9|PAPER Wed-E-V-1-9 — SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-6|PAPER Thu-M-V-1-6 — Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-6|PAPER Tue-A-S&T-1-6 — On-Device Streaming Transformer-Based End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On-Device Streaming Transformer-Based End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-8|PAPER Wed-E-SS-1-8 — Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-1|PAPER Thu-M-V-6-1 — A Simplified Model for the Vocal Tract of [s] with Inclined Incisors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simplified Model for the Vocal Tract of [s] with Inclined Incisors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-13|PAPER Tue-A-V-1-13 — Acoustic Event Detection with Classifier Chains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Event Detection with Classifier Chains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-4|PAPER Fri-A-V-2-4 — Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-9|PAPER Tue-M-V-1-9 — Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-11|PAPER Tue-E-V-1-11 — Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211980.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-12|PAPER Tue-E-V-1-12 — Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-2|PAPER Tue-A-O-1-2 — Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-2|PAPER Fri-M-O-3-2 — Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-1|PAPER Wed-A-SS-1-1 — The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-4|PAPER Tue-A-SS-2-4 — Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-3|PAPER Thu-A-V-1-3 — Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-8|PAPER Tue-A-V-2-8 — Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-11|PAPER Wed-E-V-6-11 — Confidence Intervals for ASR-Based TTS Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Confidence Intervals for ASR-Based TTS Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210252.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-1|PAPER Thu-M-V-5-1 — Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-8|PAPER Tue-A-SS-2-8 — COVID-19 Detection from Spectral Features on the DiCOVA Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">COVID-19 Detection from Spectral Features on the DiCOVA Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-2|PAPER Tue-A-V-3-2 — Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210244.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-4|PAPER Tue-E-V-6-4 — Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-3|PAPER Thu-M-V-6-3 — Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210372.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-5|PAPER Fri-M-V-1-5 — Adaptive Listening Difficulty Detection for L2 Learners Through Moderating ASR Resources]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Listening Difficulty Detection for L2 Learners Through Moderating ASR Resources</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-2|PAPER Wed-A-SS-1-2 — Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-5|PAPER Wed-E-V-1-5 — Using Large Self-Supervised Models for Low-Resource Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Large Self-Supervised Models for Low-Resource Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218014.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-7|PAPER Wed-A-S&T-1-7 — Audio Segmentation Based Conversational Silence Detection for Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Segmentation Based Conversational Silence Detection for Contact Center Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-9|PAPER Wed-E-V-5-9 — Speech Denoising Without Clean Training Data: A Noise2Noise Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising Without Clean Training Data: A Noise2Noise Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-11|PAPER Fri-M-V-3-11 — Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-1|PAPER Fri-A-O-2-1 — Device Playback Augmentation with Echo Cancellation for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Device Playback Augmentation with Echo Cancellation for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-4|PAPER Wed-M-O-2-4 — Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-10|PAPER Fri-M-V-1-10 — Transformer Based End-to-End Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer Based End-to-End Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-2|PAPER Tue-A-V-6-2 — Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210812.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-3|PAPER Wed-M-V-4-3 — X-net: A Joint Scale Down and Scale Up Method for Voice Call]]</div>|^<div class="cpauthorindexpersoncardpapertitle">X-net: A Joint Scale Down and Scale Up Method for Voice Call</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-14|PAPER Wed-M-V-6-14 — Multi-Mode Transformer Transducer with Stochastic Future Context]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Mode Transformer Transducer with Stochastic Future Context</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-6|PAPER Fri-A-S&T-1-6 — WittyKiddy: Multilingual Spoken Language Learning for Kids]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WittyKiddy: Multilingual Spoken Language Learning for Kids</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-13|PAPER Fri-A-V-1-13 — NeMo Inverse Text Normalization: From Development to Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NeMo Inverse Text Normalization: From Development to Production</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-14|PAPER Wed-M-V-6-14 — Multi-Mode Transformer Transducer with Stochastic Future Context]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Mode Transformer Transducer with Stochastic Future Context</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-11|PAPER Thu-A-V-1-11 — Leveraging Pre-Trained Language Model for Speech Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Pre-Trained Language Model for Speech Sentiment Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-1|PAPER Fri-A-V-5-1 — STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-7|PAPER Thu-A-V-4-7 — Robust End-to-End Speaker Diarization with Conformer and Additive Margin Penalty]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust End-to-End Speaker Diarization with Conformer and Additive Margin Penalty</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-10|PAPER Tue-M-V-2-10 — Speed up Training with Variable Length Inputs by Efficient Batching Strategies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speed up Training with Variable Length Inputs by Efficient Batching Strategies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-10|PAPER Wed-E-V-2-10 — Analysis of Contextual Voice Changes in Remote Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Contextual Voice Changes in Remote Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210682.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-6|PAPER Wed-M-V-5-6 — Effects of Aging and Age-Related Hearing Loss on Talker Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Aging and Age-Related Hearing Loss on Talker Discrimination</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-7|PAPER Fri-A-SS-2-7 — Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-4|PAPER Thu-A-SS-2-4 — Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211241.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-3|PAPER Fri-M-O-1-3 — Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-2|PAPER Tue-E-O-3-2 — Audio-Visual Recognition of Emotional Engagement of People with Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Recognition of Emotional Engagement of People with Dementia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-10|PAPER Tue-M-SS-1-10 — Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-6|PAPER Wed-A-O-3-6 — Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211881.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-6|PAPER Thu-A-O-1-6 — Changes in Glottal Source Parameter Values with Light to Moderate Physical Load]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Changes in Glottal Source Parameter Values with Light to Moderate Physical Load</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-10|PAPER Tue-M-V-6-10 — Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-9|PAPER Tue-E-V-1-9 — Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-2|PAPER Wed-M-V-2-2 — Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-12|PAPER Thu-A-SS-1-12 — Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-2|PAPER Wed-A-V-1-2 — A Psychology-Driven Computational Analysis of Political Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Psychology-Driven Computational Analysis of Political Interviews</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-1|PAPER Wed-E-O-2-1 — Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-1|PAPER Fri-M-O-2-1 — Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-3|PAPER Fri-A-V-1-3 — Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-1|PAPER Tue-M-O-3-1 — A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-5|PAPER Thu-A-O-1-5 — Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-1|PAPER Fri-M-O-2-1 — Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-3|PAPER Wed-E-O-2-3 — Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-5|PAPER Tue-A-SS-1-5 — Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-6|PAPER Wed-M-V-2-6 — Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-1|PAPER Thu-M-V-2-1 — Direct Multimodal Few-Shot Learning of Speech and Images]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Direct Multimodal Few-Shot Learning of Speech and Images</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-3|PAPER Wed-E-O-2-3 — Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-3|PAPER Wed-M-V-1-3 — A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210979.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-8|PAPER Fri-A-V-5-8 — Cross-Speaker Style Transfer with Prosody Bottleneck in Neural Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Speaker Style Transfer with Prosody Bottleneck in Neural Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-8|PAPER Wed-A-V-6-8 — End-to-End Speech Translation via Cross-Modal Progressive Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Translation via Cross-Modal Progressive Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-6|PAPER Thu-M-V-4-6 — Scenario-Dependent Speaker Diarization for DIHARD-III Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scenario-Dependent Speaker Diarization for DIHARD-III Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211980.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-12|PAPER Tue-E-V-1-12 — Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212132.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-12|PAPER Tue-E-V-6-12 — Improving Robustness of One-Shot Voice Conversion with Deep Discriminative Speaker Encoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Robustness of One-Shot Voice Conversion with Deep Discriminative Speaker Encoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210414.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-2|PAPER Wed-A-V-5-2 — Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-5|PAPER Wed-E-SS-1-5 — DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-2|PAPER Thu-A-SS-1-2 — Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-11|PAPER Fri-A-V-3-11 — Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210412.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-4|PAPER Fri-A-V-5-4 — Controllable Context-Aware Conversational Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controllable Context-Aware Conversational Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-10|PAPER Fri-A-V-5-10 — Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211359.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-3|PAPER Fri-A-SS-1-3 — F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-4|PAPER Thu-M-V-4-4 — Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211404.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-7|PAPER Fri-M-V-7-7 — Attention-Based Convolutional Neural Network for ASV Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Convolutional Neural Network for ASV Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-2|PAPER Tue-A-S&T-1-2 — Beey: More Than a Speech-to-Text Editor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Beey: More Than a Speech-to-Text Editor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-5|PAPER Wed-A-S&T-1-5 — ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210703.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-7|PAPER Thu-A-V-1-7 — Emotion Recognition from Speech Using wav2vec 2.0 Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Recognition from Speech Using wav2vec 2.0 Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-4|PAPER Thu-A-SS-2-4 — Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-4|PAPER Wed-A-O-2-4 — Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-6|PAPER Wed-E-V-5-6 — A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-7|PAPER Tue-M-V-3-7 — Speech Enhancement with Weakly Labelled Data from AudioSet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Weakly Labelled Data from AudioSet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210591.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-13|PAPER Wed-A-V-2-13 — Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-5|PAPER Tue-A-V-2-5 — An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210432.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-4|PAPER Thu-M-V-2-4 — Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210851.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-11|PAPER Thu-A-V-5-11 — Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210851.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-11|PAPER Thu-A-V-5-11 — Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-7|PAPER Tue-M-V-4-7 — Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-6|PAPER Fri-M-V-6-6 — Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211980.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-12|PAPER Tue-E-V-1-12 — Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-3|PAPER Tue-A-V-6-3 — Adversarial Voice Conversion Against Neural Spoofing Detectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Voice Conversion Against Neural Spoofing Detectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-3|PAPER Tue-A-V-1-3 — An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-3|PAPER Wed-M-V-1-3 — A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-7|PAPER Thu-M-V-2-7 — Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211092.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-7|PAPER Fri-M-V-4-7 — UnitNet-Based Hybrid Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnitNet-Based Hybrid Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-3|PAPER Fri-M-SS-1-3 — An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-8|PAPER Wed-M-V-2-8 — Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-1|PAPER Thu-A-V-1-1 — Temporal Context in Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Context in Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210818.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-7|PAPER Fri-A-V-6-7 — End-to-End Cross-Lingual Spoken Language Understanding Model with Multilingual Pretraining]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Cross-Lingual Spoken Language Understanding Model with Multilingual Pretraining</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-1|PAPER Wed-A-V-6-1 — SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-7|PAPER Fri-A-V-2-7 — Speech Emotion Recognition with Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-5|PAPER Wed-M-V-6-5 — Streaming Multi-Talker Speech Recognition with Joint Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Multi-Talker Speech Recognition with Joint Speaker Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-3|PAPER Thu-A-V-2-3 — On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210812.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-3|PAPER Wed-M-V-4-3 — X-net: A Joint Scale Down and Scale Up Method for Voice Call]]</div>|^<div class="cpauthorindexpersoncardpapertitle">X-net: A Joint Scale Down and Scale Up Method for Voice Call</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-1|PAPER Tue-E-V-2-1 — Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-11|PAPER Wed-E-V-5-11 — Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-10|PAPER Wed-M-V-6-10 — Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-8|PAPER Thu-A-V-2-8 — Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-8|PAPER Fri-M-V-3-8 — Residual Energy-Based Models for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Energy-Based Models for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-9|PAPER Fri-M-V-3-9 — Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-1|PAPER Tue-E-V-2-1 — Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-8|PAPER Fri-A-SS-2-8 — Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-3|PAPER Wed-M-O-2-3 — An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-8|PAPER Tue-M-V-1-8 — Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-6|PAPER Thu-M-SS-1-6 — Additive Phoneme-Aware Margin Softmax Loss for Language Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Additive Phoneme-Aware Margin Softmax Loss for Language Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-8|PAPER Thu-A-SS-1-8 — Real-Time End-to-End Monaural Multi-Speaker Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time End-to-End Monaural Multi-Speaker Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-9|PAPER Fri-A-V-4-9 — Automatic Error Correction for Speaker Embedding Learning with Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Error Correction for Speaker Embedding Learning with Noisy Labels</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-10|PAPER Fri-A-V-4-10 — An Integrated Framework for Two-Pass Personalized Voice Trigger]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Framework for Two-Pass Personalized Voice Trigger</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-3|PAPER Tue-A-V-1-3 — An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-3|PAPER Wed-M-V-1-3 — A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-2|PAPER Fri-M-V-7-2 — An Initial Investigation for Detecting Partially Spoofed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Initial Investigation for Detecting Partially Spoofed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-5|PAPER Wed-E-V-3-5 — Towards Lifelong Learning of End-to-End ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Lifelong Learning of End-to-End ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-10|PAPER Wed-A-V-2-10 — Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-1|PAPER Tue-M-V-3-1 — Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-5|PAPER Fri-A-SS-1-5 — Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211344.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-9|PAPER Fri-A-V-1-9 — A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-1|PAPER Fri-M-O-3-1 — Many-Speakers Single Channel Speech Separation with Optimal Permutation Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Many-Speakers Single Channel Speech Separation with Optimal Permutation Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211863.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-13|PAPER Wed-A-V-6-13 — Effects of Feature Scaling and Fusion on Sign Language Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Feature Scaling and Fusion on Sign Language Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210283.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-5|PAPER Tue-E-V-6-5 — VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-10|PAPER Thu-M-V-1-10 — Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[A., Arun Kumar|AUTHOR Arun Kumar A.]]|
|[[Abad, Alberto|AUTHOR Alberto Abad]]|
|[[Abari, Kálmán|AUTHOR Kálmán Abari]]|
|[[Abbas, Ammar|AUTHOR Ammar Abbas]]|
|[[Abdelali, Ahmed|AUTHOR Ahmed Abdelali]]|
|[[AbdelHady, Mohamed|AUTHOR Mohamed AbdelHady]]|
|[[Abdullah, Badr M.|AUTHOR Badr M. Abdullah]]|
|[[Abe, Masanobu|AUTHOR Masanobu Abe]]|
|[[Abhishek|AUTHOR Abhishek]]|
|[[Abplanalp, Samuel|AUTHOR Samuel Abplanalp]]|
|[[Abraham, Ajish K.|AUTHOR Ajish K. Abraham]]|
|[[Abrol, Vinayak|AUTHOR Vinayak Abrol]]|
|[[Accou, Bernd|AUTHOR Bernd Accou]]|
|[[Acero, Alex|AUTHOR Alex Acero]]|
|[[Adda-Decker, Martine|AUTHOR Martine Adda-Decker]]|
|[[Adi, Yossi|AUTHOR Yossi Adi]]|
|[[Adya, Saurabh|AUTHOR Saurabh Adya]]|
|[[Afshan, Amber|AUTHOR Amber Afshan]]|
|[[Agrawal, Vikas|AUTHOR Vikas Agrawal]]|
|[[Ahlqvist-Björkroth, Sari|AUTHOR Sari Ahlqvist-Björkroth]]|
|[[Ahmed, Beena|AUTHOR Beena Ahmed]]|
|[[Ahuja, Narendra|AUTHOR Narendra Ahuja]]|
|[[Aichner, Robert|AUTHOR Robert Aichner]]|
|[[Akata, Zeynep|AUTHOR Zeynep Akata]]|
|[[Akbacak, Murat|AUTHOR Murat Akbacak]]|
|[[Akeroyd, Michael|AUTHOR Michael Akeroyd]]|
|[[Akula, Jayaprakash|AUTHOR Jayaprakash Akula]]|
|[[Alameda-Pineda, Xavier|AUTHOR Xavier Alameda-Pineda]]|
|[[Albanie, Samuel|AUTHOR Samuel Albanie]]|
|[[Albesano, Dario|AUTHOR Dario Albesano]]|
|[[Aldhafyan, Rawan|AUTHOR Rawan Aldhafyan]]|
|[[Aldholmi, Yahya|AUTHOR Yahya Aldholmi]]|
|[[Alenin, Alexander|AUTHOR Alexander Alenin]]|
|[[Al-Ghezi, Ragheb|AUTHOR Ragheb Al-Ghezi]]|
|[[Ali, Ahmed|AUTHOR Ahmed Ali]]|
|[[Ali, Ayesha|AUTHOR Ayesha Ali]]|
|[[Ali, Murtiza|AUTHOR Murtiza Ali]]|
|[[Alisamir, Sina|AUTHOR Sina Alisamir]]|
|[[Alison, Laurence|AUTHOR Laurence Alison]]|
|[[Allauzen, Alexandre|AUTHOR Alexandre Allauzen]]|
|[[Allauzen, Cyril|AUTHOR Cyril Allauzen]]|
|[[Aloshban, Nujud|AUTHOR Nujud Aloshban]]|
|[[Aloufi, Ranya|AUTHOR Ranya Aloufi]]|
|[[Alqahtani, Asma|AUTHOR Asma Alqahtani]]|
|[[Al-Radhi, Mohammed Salah|AUTHOR Mohammed Salah Al-Radhi]]|
|[[Alsofyani, Huda|AUTHOR Huda Alsofyani]]|
|[[Aluisio, Sandra Maria|AUTHOR Sandra Maria Aluisio]]|
|[[Alumäe, Tanel|AUTHOR Tanel Alumäe]]|
|[[Alwan, Abeer|AUTHOR Abeer Alwan]]|
|[[Alyuz, Nese|AUTHOR Nese Alyuz]]|
|[[Ambikairajah, Eliathamby|AUTHOR Eliathamby Ambikairajah]]|
|[[Amin, Preet P.|AUTHOR Preet P. Amin]]|
|[[Amiriparian, Shahin|AUTHOR Shahin Amiriparian]]|
|[[An, Keyu|AUTHOR Keyu An]]|
|[[An, Xiaochun|AUTHOR Xiaochun An]]|
|[[Ananthanarayana, Tejaswini|AUTHOR Tejaswini Ananthanarayana]]|
|[[Anastasopoulos, Antonios|AUTHOR Antonios Anastasopoulos]]|
|[[Ando, Atsushi|AUTHOR Atsushi Ando]]|
|[[Ando, Shintaro|AUTHOR Shintaro Ando]]|
|[[André, Elisabeth|AUTHOR Elisabeth André]]|
|[[André-Obrecht, Régine|AUTHOR Régine André-Obrecht]]|
|[[Andrés-Ferrer, Jesús|AUTHOR Jesús Andrés-Ferrer]]|
|[[Andrusenko, Andrei|AUTHOR Andrei Andrusenko]]|
|[[Anidjar, Or Haim|AUTHOR Or Haim Anidjar]]|
|[[Annand, Colin T.|AUTHOR Colin T. Annand]]|
|[[Anvar, Aria|AUTHOR Aria Anvar]]|
|[[Aragón, Efren|AUTHOR Efren Aragón]]|
|[[Arai, Kenichi|AUTHOR Kenichi Arai]]|
|[[Arai, Takayuki|AUTHOR Takayuki Arai]]|
|[[Araki, Shoko|AUTHOR Shoko Araki]]|
|[[Arava, Radhika|AUTHOR Radhika Arava]]|
|[[Arciuli, Joanne|AUTHOR Joanne Arciuli]]|
|[[Arefin, Md. Rifat|AUTHOR Md. Rifat Arefin]]|
|[[Arias-Vergara, T.|AUTHOR T. Arias-Vergara]]|
|[[Arivazhagan, Naveen|AUTHOR Naveen Arivazhagan]]|
|[[Arnela, Marc|AUTHOR Marc Arnela]]|
|[[Arnrich, Bert|AUTHOR Bert Arnrich]]|
|[[Arora, Abhinav|AUTHOR Abhinav Arora]]|
|[[Arora, Ashish|AUTHOR Ashish Arora]]|
|[[Arora, Siddhant|AUTHOR Siddhant Arora]]|
|[[Arsikere, Harish|AUTHOR Harish Arsikere]]|
|[[Asami, Taichi|AUTHOR Taichi Asami]]|
|[[Ashihara, Takanori|AUTHOR Takanori Ashihara]]|
|[[Ashton, Elaine|AUTHOR Elaine Ashton]]|
|[[Aslan, Sinem|AUTHOR Sinem Aslan]]|
|[[Astapov, Sergei|AUTHOR Sergei Astapov]]|
|[[Astésano, Corine|AUTHOR Corine Astésano]]|
|[[Atchayaram, Nalini|AUTHOR Nalini Atchayaram]]|
|[[Audhkhasi, Kartik|AUTHOR Kartik Audhkhasi]]|
|[[Audibert, Nicolas|AUTHOR Nicolas Audibert]]|
|[[Auli, Michael|AUTHOR Michael Auli]]|
|[[Avgustinova, Tania|AUTHOR Tania Avgustinova]]|
|[[Avidov, Gilad|AUTHOR Gilad Avidov]]|
|[[Avila, Anderson R.|AUTHOR Anderson R. Avila]]|
|[[Avila, Flavio|AUTHOR Flavio Avila]]|
|[[Awasthi, Abhijeet|AUTHOR Abhijeet Awasthi]]|
|[[Azaria, Amos|AUTHOR Amos Azaria]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Baas, Matthew|AUTHOR Matthew Baas]]|
|[[BabaAli, Bagher|AUTHOR Bagher BabaAli]]|
|[[Babel, Molly|AUTHOR Molly Babel]]|
|[[Bacchiani, Michiel Adriaan Unico|AUTHOR Michiel Adriaan Unico Bacchiani]]|
|[[Bäckström, Tom|AUTHOR Tom Bäckström]]|
|[[Badino, Leonardo|AUTHOR Leonardo Badino]]|
|[[Bae, Hanbin|AUTHOR Hanbin Bae]]|
|[[Bae, Jae-Sung|AUTHOR Jae-Sung Bae]]|
|[[Baevski, Alexei|AUTHOR Alexei Baevski]]|
|[[Baghel, Shikha|AUTHOR Shikha Baghel]]|
|[[Bai, Ye|AUTHOR Ye Bai]]|
|[[Bailly, Gérard|AUTHOR Gérard Bailly]]|
|[[Baird, Alice|AUTHOR Alice Baird]]|
|[[Bak, Taejun|AUTHOR Taejun Bak]]|
|[[Baker, Elise|AUTHOR Elise Baker]]|
|[[Bakhturina, Evelina|AUTHOR Evelina Bakhturina]]|
|[[Balagopalan, Aparna|AUTHOR Aparna Balagopalan]]|
|[[Balam, Jagadeesh|AUTHOR Jagadeesh Balam]]|
|[[Baldridge, Jason|AUTHOR Jason Baldridge]]|
|[[Balejová, Ewa|AUTHOR Ewa Balejová]]|
|[[Bali, Kalika|AUTHOR Kalika Bali]]|
|[[Ballard, Kirrie J.|AUTHOR Kirrie J. Ballard]]|
|[[Ballier, Nicolas|AUTHOR Nicolas Ballier]]|
|[[Bamman, David|AUTHOR David Bamman]]|
|[[Banbury, Colby|AUTHOR Colby Banbury]]|
|[[Bando, Yoshiaki|AUTHOR Yoshiaki Bando]]|
|[[Bandon, John|AUTHOR John Bandon]]|
|[[Banks-Leite, Cristina|AUTHOR Cristina Banks-Leite]]|
|[[Banno, Hideki|AUTHOR Hideki Banno]]|
|[[Baquero-Arnal, Pau|AUTHOR Pau Baquero-Arnal]]|
|[[Barbier, Florentin|AUTHOR Florentin Barbier]]|
|[[Barbu, Andrei|AUTHOR Andrei Barbu]]|
|[[Barker, Jon|AUTHOR Jon Barker]]|
|[[Barnes, Megan|AUTHOR Megan Barnes]]|
|[[Barra-Chicote, Roberto|AUTHOR Roberto Barra-Chicote]]|
|[[Barriuso, Taylor Anne|AUTHOR Taylor Anne Barriuso]]|
|[[Bartl-Pokorny, Katrin D.|AUTHOR Katrin D. Bartl-Pokorny]]|
|[[Batliner, Anton|AUTHOR Anton Batliner]]|
|[[Batsis, John A.|AUTHOR John A. Batsis]]|
|[[Baumann, Timo|AUTHOR Timo Baumann]]|
|[[Bayerl, S.P.|AUTHOR S.P. Bayerl]]|
|[[Beack, Seungkwon|AUTHOR Seungkwon Beack]]|
|[[Bear, Helen L.|AUTHOR Helen L. Bear]]|
|[[Beaufays, Françoise|AUTHOR Françoise Beaufays]]|
|[[Behlau, Mara|AUTHOR Mara Behlau]]|
|[[Beiser, David|AUTHOR David Beiser]]|
|[[Beliaev, Stanislav|AUTHOR Stanislav Beliaev]]|
|[[Bell, Peter|AUTHOR Peter Bell]]|
|[[Belur, Yamini|AUTHOR Yamini Belur]]|
|[[Ben Amor, Imen|AUTHOR Imen Ben Amor]]|
|[[Benders, Titia|AUTHOR Titia Benders]]|
|[[Beneš, Karel|AUTHOR Karel Beneš]]|
|[[Benetos, Emmanouil|AUTHOR Emmanouil Benetos]]|
|[[Bennasar Vázquez, Jorge|AUTHOR Jorge Bennasar Vázquez]]|
|[[Bensidi-Slimane, Yasmine|AUTHOR Yasmine Bensidi-Slimane]]|
|[[Berdugo, Baruch|AUTHOR Baruch Berdugo]]|
|[[Berg, Axel|AUTHOR Axel Berg]]|
|[[Bergler, Christian|AUTHOR Christian Bergler]]|
|[[Beringer, Grzegorz|AUTHOR Grzegorz Beringer]]|
|[[Berisha, Visar|AUTHOR Visar Berisha]]|
|[[Bernard, Mathieu|AUTHOR Mathieu Bernard]]|
|[[Berry, James D.|AUTHOR James D. Berry]]|
|[[Besacier, Laurent|AUTHOR Laurent Besacier]]|
|[[Beskow, Jonas|AUTHOR Jonas Beskow]]|
|[[Bhandari, Nishchal|AUTHOR Nishchal Bhandari]]|
|[[Bharadwaj, Abhinav|AUTHOR Abhinav Bharadwaj]]|
|[[Bharadwaj, Samarth|AUTHOR Samarth Bharadwaj]]|
|[[Bharaj, Gaurav|AUTHOR Gaurav Bharaj]]|
|[[Bhat, Shrirama|AUTHOR Shrirama Bhat]]|
|[[Bhati, Saurabhchand|AUTHOR Saurabhchand Bhati]]|
|[[Bhattacharjee, Mrinmoy|AUTHOR Mrinmoy Bhattacharjee]]|
|[[Bhattacharjee, Tanuka|AUTHOR Tanuka Bhattacharjee]]|
|[[Bhavsar, Arpan|AUTHOR Arpan Bhavsar]]|
|[[Bhosale, Swapnil|AUTHOR Swapnil Bhosale]]|
|[[Biadsy, Fadi|AUTHOR Fadi Biadsy]]|
|[[Bie, Xiaoyu|AUTHOR Xiaoyu Bie]]|
|[[Biemann, Chris|AUTHOR Chris Biemann]]|
|[[Bigham, Jefferey|AUTHOR Jefferey Bigham]]|
|[[Bilgi, Raghavendra|AUTHOR Raghavendra Bilgi]]|
|[[Billa, Jayadev|AUTHOR Jayadev Billa]]|
|[[Billington, Rosey|AUTHOR Rosey Billington]]|
|[[Birkholz, Peter|AUTHOR Peter Birkholz]]|
|[[Bishop, Somer|AUTHOR Somer Bishop]]|
|[[Bissiri, Maria Paola|AUTHOR Maria Paola Bissiri]]|
|[[Biswas, Ritujoy|AUTHOR Ritujoy Biswas]]|
|[[Black, Alan W.|AUTHOR Alan W. Black]]|
|[[Blackburn, Daniel|AUTHOR Daniel Blackburn]]|
|[[Blandin, Rémi|AUTHOR Rémi Blandin]]|
|[[Blatt, Alexander|AUTHOR Alexander Blatt]]|
|[[Block, Aleese|AUTHOR Aleese Block]]|
|[[Bocklet, T.|AUTHOR T. Bocklet]]|
|[[Bodapati, Sravan|AUTHOR Sravan Bodapati]]|
|[[Bodur, Kubra|AUTHOR Kubra Bodur]]|
|[[Boeddeker, Christoph|AUTHOR Christoph Boeddeker]]|
|[[Boes, Wim|AUTHOR Wim Boes]]|
|[[Boggust, Angie|AUTHOR Angie Boggust]]|
|[[Bojar, Ondřej|AUTHOR Ondřej Bojar]]|
|[[Bonastre, Jean-François|AUTHOR Jean-François Bonastre]]|
|[[Bonneau, Anne|AUTHOR Anne Bonneau]]|
|[[Bönninghoff, Benedikt|AUTHOR Benedikt Bönninghoff]]|
|[[Bonomi, Cyntia|AUTHOR Cyntia Bonomi]]|
|[[Boogmans, Fleur|AUTHOR Fleur Boogmans]]|
|[[Borgström, Bengt J.|AUTHOR Bengt J. Borgström]]|
|[[Borsdorf, Marvin|AUTHOR Marvin Borsdorf]]|
|[[Bose, Deboshree|AUTHOR Deboshree Bose]]|
|[[Botelho, Catarina|AUTHOR Catarina Botelho]]|
|[[Botros, Rami|AUTHOR Rami Botros]]|
|[[Botteldooren, Dick|AUTHOR Dick Botteldooren]]|
|[[Botten, Darren|AUTHOR Darren Botten]]|
|[[Bouafif Mansali, Mariem|AUTHOR Mariem Bouafif Mansali]]|
|[[Bourlard, Hervé|AUTHOR Hervé Bourlard]]|
|[[Bouveyron, Charles|AUTHOR Charles Bouveyron]]|
|[[Boves, Lou|AUTHOR Lou Boves]]|
|[[Boyce, Suzanne|AUTHOR Suzanne Boyce]]|
|[[Boyer, Florian|AUTHOR Florian Boyer]]|
|[[Boyle, David|AUTHOR David Boyle]]|
|[[Bozza, Bruno|AUTHOR Bruno Bozza]]|
|[[Braccia, Arianna|AUTHOR Arianna Braccia]]|
|[[Braga, Otavio|AUTHOR Otavio Braga]]|
|[[Brand, Sophie|AUTHOR Sophie Brand]]|
|[[Branje, Sweeney|AUTHOR Sweeney Branje]]|
|[[Braun, Bettina|AUTHOR Bettina Braun]]|
|[[Braun, Sebastian|AUTHOR Sebastian Braun]]|
|[[Bredin, Hervé|AUTHOR Hervé Bredin]]|
|[[Bremerman, Jacob|AUTHOR Jacob Bremerman]]|
|[[Brenner, Michael P.|AUTHOR Michael P. Brenner]]|
|[[Brickell, Tracey A.|AUTHOR Tracey A. Brickell]]|
|[[Bright, Rebecca|AUTHOR Rebecca Bright]]|
|[[Brossier, Baptiste|AUTHOR Baptiste Brossier]]|
|[[Broughton, Samuel J.|AUTHOR Samuel J. Broughton]]|
|[[Brown, Chloë|AUTHOR Chloë Brown]]|
|[[Bruce, Stuart|AUTHOR Stuart Bruce]]|
|[[Brümmer, Niko|AUTHOR Niko Brümmer]]|
|[[Brungart, Douglas S.|AUTHOR Douglas S. Brungart]]|
|[[Brusco, Pablo|AUTHOR Pablo Brusco]]|
|[[Brutti, Alessio|AUTHOR Alessio Brutti]]|
|[[Bu, Hui|AUTHOR Hui Bu]]|
|[[Bu, Suliang|AUTHOR Suliang Bu]]|
|[[Buciuleac, Catinca|AUTHOR Catinca Buciuleac]]|
|[[Buet, François|AUTHOR François Buet]]|
|[[Bui, Trung H.|AUTHOR Trung H. Bui]]|
|[[Bulyko, Ivan|AUTHOR Ivan Bulyko]]|
|[[Bunnell, H. Timothy|AUTHOR H. Timothy Bunnell]]|
|[[Burget, Lukáš|AUTHOR Lukáš Burget]]|
|[[Burke, William|AUTHOR William Burke]]|
|[[Burnham, Denis|AUTHOR Denis Burnham]]|
|[[Burroni, Francesco|AUTHOR Francesco Burroni]]|
|[[Busso, Carlos|AUTHOR Carlos Busso]]|
|[[Butler, Duncan|AUTHOR Duncan Butler]]|
|[[Bütow, Andreas|AUTHOR Andreas Bütow]]|
|[[Butt, Miriam|AUTHOR Miriam Butt]]|
|[[Byrd, Dani|AUTHOR Dani Byrd]]|
|[[Byun, Joon|AUTHOR Joon Byun]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Cai, Bin|AUTHOR Bin Cai]]|
|[[Cai, Cong|AUTHOR Cong Cai]]|
|[[Cai, Danwei|AUTHOR Danwei Cai]]|
|[[Cai, Shanqing|AUTHOR Shanqing Cai]]|
|[[Cai, Xingyu|AUTHOR Xingyu Cai]]|
|[[Calamaro, Shira|AUTHOR Shira Calamaro]]|
|[[Cámbara, Guillermo|AUTHOR Guillermo Cámbara]]|
|[[Campbell, William|AUTHOR William Campbell]]|
|[[Candido Jr., Arnaldo|AUTHOR Arnaldo Candido Jr.]]|
|[[Canzi, Massimiliano|AUTHOR Massimiliano Canzi]]|
|[[Cao, Beiming|AUTHOR Beiming Cao]]|
|[[Cao, Liangliang|AUTHOR Liangliang Cao]]|
|[[Cao, Miao|AUTHOR Miao Cao]]|
|[[Cao, Songjun|AUTHOR Songjun Cao]]|
|[[Cao, Yiran|AUTHOR Yiran Cao]]|
|[[Cao, Yupeng|AUTHOR Yupeng Cao]]|
|[[Caper, Charlie|AUTHOR Charlie Caper]]|
|[[Carlini, Roberto|AUTHOR Roberto Carlini]]|
|[[Carlozzi, Noelle|AUTHOR Noelle Carlozzi]]|
|[[Casanova, Edresson|AUTHOR Edresson Casanova]]|
|[[Cascioli, Lorenzo|AUTHOR Lorenzo Cascioli]]|
|[[Caseiro, Diamantino|AUTHOR Diamantino Caseiro]]|
|[[Cattiau, Julie|AUTHOR Julie Cattiau]]|
|[[Cattoni, Roldano|AUTHOR Roldano Cattoni]]|
|[[Cavallaro, Andrea|AUTHOR Andrea Cavallaro]]|
|[[Cave, Richard|AUTHOR Richard Cave]]|
|[[Cernak, Milos|AUTHOR Milos Cernak]]|
|[[Černocký, Jan|AUTHOR Jan Černocký]]|
|[[Cerva, Petr|AUTHOR Petr Cerva]]|
|[[Cervone, Alessandra|AUTHOR Alessandra Cervone]]|
|[[Cettolo, Mauro|AUTHOR Mauro Cettolo]]|
|[[Cha, Sujeong|AUTHOR Sujeong Cha]]|
|[[Chae, Gyeongsu|AUTHOR Gyeongsu Chae]]|
|[[Chai, Li|AUTHOR Li Chai]]|
|[[Chai, Shuzhou|AUTHOR Shuzhou Chai]]|
|[[Chakraborty, Rupayan|AUTHOR Rupayan Chakraborty]]|
|[[Chakraborty, Titas|AUTHOR Titas Chakraborty]]|
|[[Chalamandaris, Aimilios|AUTHOR Aimilios Chalamandaris]]|
|[[Chan, Bo-Cheng|AUTHOR Bo-Cheng Chan]]|
|[[Chan, Julian|AUTHOR Julian Chan]]|
|[[Chan, Wai-Yip|AUTHOR Wai-Yip Chan]]|
|[[Chan, William|AUTHOR William Chan]]|
|[[Chanclu, Anaïs|AUTHOR Anaïs Chanclu]]|
|[[Chandorkar, Angad|AUTHOR Angad Chandorkar]]|
|[[Chandra, Vikas|AUTHOR Vikas Chandra]]|
|[[Chandra-Shekar, Meena|AUTHOR Meena Chandra-Shekar]]|
|[[Chang, Feng-Ju|AUTHOR Feng-Ju Chang]]|
|[[Chang, Heng-Jui|AUTHOR Heng-Jui Chang]]|
|[[Chang, Joon-Hyuk|AUTHOR Joon-Hyuk Chang]]|
|[[Chang, Oscar|AUTHOR Oscar Chang]]|
|[[Chang, Peng|AUTHOR Peng Chang]]|
|[[Chang, Shiyu|AUTHOR Shiyu Chang]]|
|[[Chang, Shuo-Yiin|AUTHOR Shuo-Yiin Chang]]|
|[[Chang, Simyung|AUTHOR Simyung Chang]]|
|[[Chang, Wonil|AUTHOR Wonil Chang]]|
|[[Chang, Xuankai|AUTHOR Xuankai Chang]]|
|[[Chang, Yueh-chin|AUTHOR Yueh-chin Chang]]|
|[[Chao, Yang|AUTHOR Yang Chao]]|
|[[Chapman, Kathy|AUTHOR Kathy Chapman]]|
|[[Chau, Duen Horng|AUTHOR Duen Horng Chau]]|
|[[Chaudhari, Gunvant|AUTHOR Gunvant Chaudhari]]|
|[[Chaudhary, Lipisha|AUTHOR Lipisha Chaudhary]]|
|[[Chauhan, Jagmohan|AUTHOR Jagmohan Chauhan]]|
|[[Chehadi, Assmaa|AUTHOR Assmaa Chehadi]]|
|[[Chen, Binbin|AUTHOR Binbin Chen]]|
|[[Chen, Boxing|AUTHOR Boxing Chen]]|
|[[Chen, Brian|AUTHOR Brian Chen]]|
|[[Chen, Chia-Ping|AUTHOR Chia-Ping Chen]]|
|[[Chen, Chia-Yu|AUTHOR Chia-Yu Chen]]|
|[[Chen, Fei|AUTHOR Fei Chen]]|
|[[Chen, Feiyang|AUTHOR Feiyang Chen]]|
|[[Chen, Guoguo|AUTHOR Guoguo Chen]]|
|[[Chen, Haiqing|AUTHOR Haiqing Chen]]|
|[[Chen, Haitao|AUTHOR Haitao Chen]]|
|[[Chen, Hang|AUTHOR Hang Chen]]|
|[[Chen, Hangting|AUTHOR Hangting Chen]]|
|[[Chen, Hongsheng|AUTHOR Hongsheng Chen]]|
|[[Chen, Jing|AUTHOR Jing Chen]]|
|[[Chen, Jingdong|AUTHOR Jingdong Chen]]|
|[[Chen, Jinhui|AUTHOR Jinhui Chen]]|
|[[Chen, Jinkun|AUTHOR Jinkun Chen]]|
|[[Chen, John|AUTHOR John Chen]]|
|[[Chen, Jun|AUTHOR Jun Chen]]|
|[[Chen, Junjie|AUTHOR Junjie Chen]]|
|[[Chen, Junkun|AUTHOR Junkun Chen]]|
|[[Chen, Junqi|AUTHOR Junqi Chen]]|
|[[Chen, Kai|AUTHOR Kai Chen]]|
|[[Chen, Kehan|AUTHOR Kehan Chen]]|
|[[Chen, Lei|AUTHOR Lei Chen]]|
|[[Chen, Li|AUTHOR Li Chen]]|
|[[Chen, Lianwu|AUTHOR Lianwu Chen]]|
|[[Chen, Li-Wei|AUTHOR Li-Wei Chen]]|
|[[Chen, Long|AUTHOR Long Chen]]|
|[[Chen, Mengzhe|AUTHOR Mengzhe Chen]]|
|[[Chen, Minchuan|AUTHOR Minchuan Chen]]|
|[[Chen, Mingqing|AUTHOR Mingqing Chen]]|
|[[Chen, Nancy F.|AUTHOR Nancy F. Chen]]|
|[[Chen, Nanxin|AUTHOR Nanxin Chen]]|
|[[Chen, Nuo|AUTHOR Nuo Chen]]|
|[[Chen, Qian|AUTHOR Qian Chen]]|
|[[Chen, Sanyuan|AUTHOR Sanyuan Chen]]|
|[[Chen, Taijing|AUTHOR Taijing Chen]]|
|[[Chen, Tongzhou|AUTHOR Tongzhou Chen]]|
|[[Chen, Wei|AUTHOR Wei Chen]]|
|[[Chen, Weiguang|AUTHOR Weiguang Chen]]|
|[[Chen, Xi|AUTHOR Xi Chen]]|
|[[Chen, Xianzhao|AUTHOR Xianzhao Chen]]|
|[[Chen, Xiao|AUTHOR Xiao Chen]]|
|[[Chen, Xiaoyu|AUTHOR Xiaoyu Chen]]|
|[[Chen, Xie|AUTHOR Xie Chen]]|
|[[Chen, Xunquan|AUTHOR Xunquan Chen]]|
|[[Chen, Yafeng|AUTHOR Yafeng Chen]]|
|[[Chen, Yangbin|AUTHOR Yangbin Chen]]|
|[[Chen, Yi-Chen|AUTHOR Yi-Chen Chen]]|
|[[Chen, Youzheng|AUTHOR Youzheng Chen]]|
|[[Chen, Yu|AUTHOR Yu Chen]]|
|[[Chen, Yue|AUTHOR Yue Chen]]|
|[[Chen, Zening|AUTHOR Zening Chen]]|
|[[Chen, Zeya|AUTHOR Zeya Chen]]|
|[[Chen, Zhehuai|AUTHOR Zhehuai Chen]]|
|[[Chen, Zhengyang|AUTHOR Zhengyang Chen]]|
|[[Chen, Zhuo|AUTHOR Zhuo Chen]]|
|[[Chen, Ziyi|AUTHOR Ziyi Chen]]|
|[[Cheng, Linjuan|AUTHOR Linjuan Cheng]]|
|[[Cheng, Longbiao|AUTHOR Longbiao Cheng]]|
|[[Cheng, Luyao|AUTHOR Luyao Cheng]]|
|[[Cheng, Mengli|AUTHOR Mengli Cheng]]|
|[[Cheng, Ning|AUTHOR Ning Cheng]]|
|[[Cheng, Xingliang|AUTHOR Xingliang Cheng]]|
|[[Cheng, Yao-Fei|AUTHOR Yao-Fei Cheng]]|
|[[Cheng, Yuan|AUTHOR Yuan Cheng]]|
|[[Cheon, Sung Jun|AUTHOR Sung Jun Cheon]]|
|[[Cheon, Youngju|AUTHOR Youngju Cheon]]|
|[[Cherry, Colin|AUTHOR Colin Cherry]]|
|[[Chestek, David|AUTHOR David Chestek]]|
|[[Chettri, Bhusan|AUTHOR Bhusan Chettri]]|
|[[Chetupalli, Srikanth Raj|AUTHOR Srikanth Raj Chetupalli]]|
|[[Chhipa, Prakash Chandra|AUTHOR Prakash Chandra Chhipa]]|
|[[Chi, Po-Han|AUTHOR Po-Han Chi]]|
|[[Chiba, Yuya|AUTHOR Yuya Chiba]]|
|[[Chien, Chung-Ming|AUTHOR Chung-Ming Chien]]|
|[[Chien, Jen-Tzung|AUTHOR Jen-Tzung Chien]]|
|[[Chingacham, Anupama|AUTHOR Anupama Chingacham]]|
|[[Chiu, Chung-Cheng|AUTHOR Chung-Cheng Chiu]]|
|[[Chlébowski, Aurélie|AUTHOR Aurélie Chlébowski]]|
|[[Chng, Eng Siong|AUTHOR Eng Siong Chng]]|
|[[Cho, Hoon-Young|AUTHOR Hoon-Young Cho]]|
|[[Cho, Hyunchang|AUTHOR Hyunchang Cho]]|
|[[Cho, Jaejin|AUTHOR Jaejin Cho]]|
|[[Cho, Kiho|AUTHOR Kiho Cho]]|
|[[Cho, Sungjae|AUTHOR Sungjae Cho]]|
|[[Cho, Won Ik|AUTHOR Won Ik Cho]]|
|[[Chodorowski, Jacques|AUTHOR Jacques Chodorowski]]|
|[[Choe, Soyeon|AUTHOR Soyeon Choe]]|
|[[Choi, Byoung Jin|AUTHOR Byoung Jin Choi]]|
|[[Choi, Eunbi|AUTHOR Eunbi Choi]]|
|[[Choi, Hyunjin|AUTHOR Hyunjin Choi]]|
|[[Choi, Kwang Pyo|AUTHOR Kwang Pyo Choi]]|
|[[Choi, Min-Seok|AUTHOR Min-Seok Choi]]|
|[[Chojnacka, Roza|AUTHOR Roza Chojnacka]]|
|[[Cholin, Joana|AUTHOR Joana Cholin]]|
|[[Chorowski, Jan|AUTHOR Jan Chorowski]]|
|[[Chouchane, Oubaïda|AUTHOR Oubaïda Chouchane]]|
|[[Choudhary, Samridhi|AUTHOR Samridhi Choudhary]]|
|[[Chowdhury, Shammur Absar|AUTHOR Shammur Absar Chowdhury]]|
|[[Christensen, Heidi|AUTHOR Heidi Christensen]]|
|[[Christensen, Mads Græsbøll|AUTHOR Mads Græsbøll Christensen]]|
|[[Chu, Fang|AUTHOR Fang Chu]]|
|[[Chu, Wei|AUTHOR Wei Chu]]|
|[[Chua, Mason|AUTHOR Mason Chua]]|
|[[Chuang, Shun-Po|AUTHOR Shun-Po Chuang]]|
|[[Chuang, Yung-Sung|AUTHOR Yung-Sung Chuang]]|
|[[Chuangsuwanich, Ekapol|AUTHOR Ekapol Chuangsuwanich]]|
|[[Chukharev-Hudilainen, Evgeny|AUTHOR Evgeny Chukharev-Hudilainen]]|
|[[Chung, Clement|AUTHOR Clement Chung]]|
|[[Chung, Hyunseung|AUTHOR Hyunseung Chung]]|
|[[Chung, Joon Son|AUTHOR Joon Son Chung]]|
|[[Chung, Minhwa|AUTHOR Minhwa Chung]]|
|[[Chung, Soo-Whan|AUTHOR Soo-Whan Chung]]|
|[[Chung, Yu-An|AUTHOR Yu-An Chung]]|
|[[Church, Kenneth|AUTHOR Kenneth Church]]|
|[[Cicuta, Pietro|AUTHOR Pietro Cicuta]]|
|[[Cieri, Christopher|AUTHOR Christopher Cieri]]|
|[[Ciesielski, Grzegorz|AUTHOR Grzegorz Ciesielski]]|
|[[Civera, Jorge|AUTHOR Jorge Civera]]|
|[[Clarke, Georgia|AUTHOR Georgia Clarke]]|
|[[Čmejla, J.|AUTHOR J. Čmejla]]|
|[[Cmejla, Roman|AUTHOR Roman Cmejla]]|
|[[Codina-Filbà, Joan|AUTHOR Joan Codina-Filbà]]|
|[[Cohen, Israel|AUTHOR Israel Cohen]]|
|[[Cohn, Michelle|AUTHOR Michelle Cohn]]|
|[[Coler, Matt|AUTHOR Matt Coler]]|
|[[Collobert, Ronan|AUTHOR Ronan Collobert]]|
|[[Conde, Pauline|AUTHOR Pauline Conde]]|
|[[Condron, Scott|AUTHOR Scott Condron]]|
|[[Cong, Jian|AUTHOR Jian Cong]]|
|[[Conneau, Alexis|AUTHOR Alexis Conneau]]|
|[[Cook, Darren|AUTHOR Darren Cook]]|
|[[Cooke, Martin|AUTHOR Martin Cooke]]|
|[[Cooper, Erica|AUTHOR Erica Cooper]]|
|[[Copet, Jade|AUTHOR Jade Copet]]|
|[[Cordourier Maruri, Hector A.|AUTHOR Hector A. Cordourier Maruri]]|
|[[Cornell, Samuele|AUTHOR Samuele Cornell]]|
|[[Cornish, Andrew|AUTHOR Andrew Cornish]]|
|[[Cox, Felicity|AUTHOR Felicity Cox]]|
|[[Cox, Trevor J.|AUTHOR Trevor J. Cox]]|
|[[Coy, Thomas|AUTHOR Thomas Coy]]|
|[[Cristia, Alejandrina|AUTHOR Alejandrina Cristia]]|
|[[Csapó, Tamás Gábor|AUTHOR Tamás Gábor Csapó]]|
|[[Cucchiarini, Catia|AUTHOR Catia Cucchiarini]]|
|[[Cuesta, Helena|AUTHOR Helena Cuesta]]|
|[[Cui, Chenye|AUTHOR Chenye Cui]]|
|[[Cui, Xia|AUTHOR Xia Cui]]|
|[[Cui, Xiaodong|AUTHOR Xiaodong Cui]]|
|[[Culling, John F.|AUTHOR John F. Culling]]|
|[[Cumani, Sandro|AUTHOR Sandro Cumani]]|
|[[Cumbal, Ronald|AUTHOR Ronald Cumbal]]|
|[[Cummins, Nicholas|AUTHOR Nicholas Cummins]]|
|[[Cunha, Conceição|AUTHOR Conceição Cunha]]|
|[[Curado, Francisco|AUTHOR Francisco Curado]]|
|[[Cutler, Ross|AUTHOR Ross Cutler]]|
|[[Cvetkovic, Zoran|AUTHOR Zoran Cvetkovic]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Dabral, Rishabh|AUTHOR Rishabh Dabral]]|
|[[Dai, Lingfeng|AUTHOR Lingfeng Dai]]|
|[[Dai, Li-Rong|AUTHOR Li-Rong Dai]]|
|[[Dai, Suyang|AUTHOR Suyang Dai]]|
|[[Dai, Xudong|AUTHOR Xudong Dai]]|
|[[d’Alessandro, Christophe|AUTHOR Christophe d’Alessandro]]|
|[[Dalmia, Siddharth|AUTHOR Siddharth Dalmia]]|
|[[Dang, Feng|AUTHOR Feng Dang]]|
|[[Dang, Jianwu|AUTHOR Jianwu Dang]]|
|[[Dang, Ting|AUTHOR Ting Dang]]|
|[[Daniel, Morgane|AUTHOR Morgane Daniel]]|
|[[Dao, Mai Hoang|AUTHOR Mai Hoang Dao]]|
|[[Daoudi, Khalid|AUTHOR Khalid Daoudi]]|
|[[d’Apolito, Sonia|AUTHOR Sonia d’Apolito]]|
|[[Das, Amit|AUTHOR Amit Das]]|
|[[Das, Biswajit|AUTHOR Biswajit Das]]|
|[[Das, Nilaksh|AUTHOR Nilaksh Das]]|
|[[Das, Partha Pratim|AUTHOR Partha Pratim Das]]|
|[[Das, Rohan Kumar|AUTHOR Rohan Kumar Das]]|
|[[Dasgupta, Hirak|AUTHOR Hirak Dasgupta]]|
|[[Dauwels, Justin|AUTHOR Justin Dauwels]]|
|[[David, Robert|AUTHOR Robert David]]|
|[[Dawalatabad, Nauman|AUTHOR Nauman Dawalatabad]]|
|[[Dawe-Lane, Erin|AUTHOR Erin Dawe-Lane]]|
|[[Dayalu, Praveen|AUTHOR Praveen Dayalu]]|
|[[de Bodt, Marc S.|AUTHOR Marc S. de Bodt]]|
|[[Dehak, Najim|AUTHOR Najim Dehak]]|
|[[Dehé, Nicole|AUTHOR Nicole Dehé]]|
|[[de la Fuente, Sofia|AUTHOR Sofia de la Fuente]]|
|[[Delcroix, Marc|AUTHOR Marc Delcroix]]|
|[[Delgado, Héctor|AUTHOR Héctor Delgado]]|
|[[Delpech, Estelle|AUTHOR Estelle Delpech]]|
|[[Del Rio, Miguel|AUTHOR Miguel Del Rio]]|
|[[Delworth, Natalie|AUTHOR Natalie Delworth]]|
|[[Demberg, Vera|AUTHOR Vera Demberg]]|
|[[Demopoulos, Carly|AUTHOR Carly Demopoulos]]|
|[[Demuth, Katherine|AUTHOR Katherine Demuth]]|
|[[Demuynck, Kris|AUTHOR Kris Demuynck]]|
|[[Deng, Chengyun|AUTHOR Chengyun Deng]]|
|[[Deng, Jiajun|AUTHOR Jiajun Deng]]|
|[[Deng, Keqi|AUTHOR Keqi Deng]]|
|[[Deng, Liqun|AUTHOR Liqun Deng]]|
|[[Deng, Wesley|AUTHOR Wesley Deng]]|
|[[Deng, Yan|AUTHOR Yan Deng]]|
|[[Denisenko, Alexander|AUTHOR Alexander Denisenko]]|
|[[de Seyssel, Maureen|AUTHOR Maureen de Seyssel]]|
|[[Deshmukh, Soham|AUTHOR Soham Deshmukh]]|
|[[Deshpande, Gauri|AUTHOR Gauri Deshpande]]|
|[[Desplanques, Brecht|AUTHOR Brecht Desplanques]]|
|[[Dhamyal, Hira|AUTHOR Hira Dhamyal]]|
|[[Dharma Gita, Ariyanidevi|AUTHOR Ariyanidevi Dharma Gita]]|
|[[Dhir, Chandra|AUTHOR Chandra Dhir]]|
|[[DiCocco, Vince|AUTHOR Vince DiCocco]]|
|[[Dighe, Pranay|AUTHOR Pranay Dighe]]|
|[[Dijkstra, Jelske|AUTHOR Jelske Dijkstra]]|
|[[Dimitriadis, Dimitrios|AUTHOR Dimitrios Dimitriadis]]|
|[[Dinarelli, Marco|AUTHOR Marco Dinarelli]]|
|[[Dineley, Judith|AUTHOR Judith Dineley]]|
|[[Ding, Hongwei|AUTHOR Hongwei Ding]]|
|[[Ding, Wenbiao|AUTHOR Wenbiao Ding]]|
|[[Ding, Yi-Yang|AUTHOR Yi-Yang Ding]]|
|[[Dinkel, Heinrich|AUTHOR Heinrich Dinkel]]|
|[[Dipani, Alish|AUTHOR Alish Dipani]]|
|[[Disch, Sascha|AUTHOR Sascha Disch]]|
|[[Diskin-Holdaway, Chloé|AUTHOR Chloé Diskin-Holdaway]]|
|[[Diwan, Anuj|AUTHOR Anuj Diwan]]|
|[[Do, Phat|AUTHOR Phat Do]]|
|[[Do, Quynh|AUTHOR Quynh Do]]|
|[[Dobel, Christian|AUTHOR Christian Dobel]]|
|[[Dobson, Richard J.B.|AUTHOR Richard J.B. Dobson]]|
|[[Doc, Jean-Baptiste|AUTHOR Jean-Baptiste Doc]]|
|[[Doddipatla, Rama|AUTHOR Rama Doddipatla]]|
|[[Dognin, Charles|AUTHOR Charles Dognin]]|
|[[Domínguez, Mónica|AUTHOR Mónica Domínguez]]|
|[[Dong, Joshua|AUTHOR Joshua Dong]]|
|[[Dong, Shuyan|AUTHOR Shuyan Dong]]|
|[[Dong, Yuanjie|AUTHOR Yuanjie Dong]]|
|[[Doshi, Rohan|AUTHOR Rohan Doshi]]|
|[[Dou, Qingyun|AUTHOR Qingyun Dou]]|
|[[Doutre, Thibault|AUTHOR Thibault Doutre]]|
|[[Dovrat, Shaked|AUTHOR Shaked Dovrat]]|
|[[Dovzhenko, Yuliya|AUTHOR Yuliya Dovzhenko]]|
|[[Drenhaus, Heiner|AUTHOR Heiner Drenhaus]]|
|[[Dresvyanskiy, Denis|AUTHOR Denis Dresvyanskiy]]|
|[[Drioli, Carlo|AUTHOR Carlo Drioli]]|
|[[Droppo, Jasha|AUTHOR Jasha Droppo]]|
|[[Drossos, Konstantinos|AUTHOR Konstantinos Drossos]]|
|[[Drude, Lukas|AUTHOR Lukas Drude]]|
|[[Drugman, Thomas|AUTHOR Thomas Drugman]]|
|[[Du, Chenpeng|AUTHOR Chenpeng Du]]|
|[[Du, Hongqiang|AUTHOR Hongqiang Du]]|
|[[Du, Jiayu|AUTHOR Jiayu Du]]|
|[[Du, Jun|AUTHOR Jun Du]]|
|[[Du, Xingjian|AUTHOR Xingjian Du]]|
|[[Duan, Wenying|AUTHOR Wenying Duan]]|
|[[Duan, Zhiyao|AUTHOR Zhiyao Duan]]|
|[[Dubagunta, S. Pavankumar|AUTHOR S. Pavankumar Dubagunta]]|
|[[Dubey, Harishchandra|AUTHOR Harishchandra Dubey]]|
|[[Duckhorn, Frank|AUTHOR Frank Duckhorn]]|
|[[Dugan, Sarah|AUTHOR Sarah Dugan]]|
|[[Dunbar, Ewan|AUTHOR Ewan Dunbar]]|
|[[Dupoux, Emmanuel|AUTHOR Emmanuel Dupoux]]|
|[[Durda, Kevin|AUTHOR Kevin Durda]]|
|[[Duroselle, Raphaël|AUTHOR Raphaël Duroselle]]|
|[[Dvir, Amit|AUTHOR Amit Dvir]]|
|[[Dvoynikova, Anastasia|AUTHOR Anastasia Dvoynikova]]|
|[[Dzikowski, Jarosław|AUTHOR Jarosław Dzikowski]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Eary, Kathryn J.|AUTHOR Kathryn J. Eary]]|
|[[Edler, Bernd|AUTHOR Bernd Edler]]|
|[[Edlund, Jens|AUTHOR Jens Edlund]]|
|[[Edraki, Amin|AUTHOR Amin Edraki]]|
|[[Effendi, Johanes|AUTHOR Johanes Effendi]]|
|[[Egas-López, José Vicente|AUTHOR José Vicente Egas-López]]|
|[[Eghbali, Sepehr|AUTHOR Sepehr Eghbali]]|
|[[Egorova, Ekaterina|AUTHOR Ekaterina Egorova]]|
|[[Ehlert, Hanna|AUTHOR Hanna Ehlert]]|
|[[Ehr, Heiko|AUTHOR Heiko Ehr]]|
|[[Ehrenfors, Johan|AUTHOR Johan Ehrenfors]]|
|[[Einfeldt, Marieke|AUTHOR Marieke Einfeldt]]|
|[[Eisenberg, Aviad|AUTHOR Aviad Eisenberg]]|
|[[Eitel, Megan M.|AUTHOR Megan M. Eitel]]|
|[[El Amouri, Hussein|AUTHOR Hussein El Amouri]]|
|[[Elhilali, Mounya|AUTHOR Mounya Elhilali]]|
|[[Elias, Isaac|AUTHOR Isaac Elias]]|
|[[Elibol, Oguz|AUTHOR Oguz Elibol]]|
|[[Elie, Benjamin|AUTHOR Benjamin Elie]]|
|[[Ellinas, Nikolaos|AUTHOR Nikolaos Ellinas]]|
|[[Elmers, Mikey|AUTHOR Mikey Elmers]]|
|[[El Shafey, Laurent|AUTHOR Laurent El Shafey]]|
|[[Elyasi, Mahsa|AUTHOR Mahsa Elyasi]]|
|[[Emamian, Eshrat S.|AUTHOR Eshrat S. Emamian]]|
|[[Emanuele, Marco|AUTHOR Marco Emanuele]]|
|[[Emond, Jesse|AUTHOR Jesse Emond]]|
|[[Eng, Nicholas|AUTHOR Nicholas Eng]]|
|[[Englert, Marina|AUTHOR Marina Englert]]|
|[[Engwall, Olov|AUTHOR Olov Engwall]]|
|[[Enyedi, Robert|AUTHOR Robert Enyedi]]|
|[[Epps, Julien|AUTHOR Julien Epps]]|
|[[Erdogan, Hakan|AUTHOR Hakan Erdogan]]|
|[[Ermis, Orhan|AUTHOR Orhan Ermis]]|
|[[Ernestus, Mirjam|AUTHOR Mirjam Ernestus]]|
|[[Erzin, Engin|AUTHOR Engin Erzin]]|
|[[Escalante-B., Alberto N.|AUTHOR Alberto N. Escalante-B.]]|
|[[Eshky, Aciel|AUTHOR Aciel Eshky]]|
|[[Eskimez, Sefik Emre|AUTHOR Sefik Emre Eskimez]]|
|[[Espin, Juan M.|AUTHOR Juan M. Espin]]|
|[[Esposito, Anna|AUTHOR Anna Esposito]]|
|[[Espy-Wilson, Carol|AUTHOR Carol Espy-Wilson]]|
|[[Essid, Slim|AUTHOR Slim Essid]]|
|[[Estève, Yannick|AUTHOR Yannick Estève]]|
|[[Estival, Dominique|AUTHOR Dominique Estival]]|
|[[Evain, Solène|AUTHOR Solène Evain]]|
|[[Evans, Nicholas|AUTHOR Nicholas Evans]]|
|[[Eyben, Florian|AUTHOR Florian Eyben]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Fabien, Maël|AUTHOR Maël Fabien]]|
|[[Fadiga, Luciano|AUTHOR Luciano Fadiga]]|
|[[Faerman, Afik|AUTHOR Afik Faerman]]|
|[[Fakhry, Ahmed|AUTHOR Ahmed Fakhry]]|
|[[Falaise, Achille|AUTHOR Achille Falaise]]|
|[[Falavigna, D.|AUTHOR D. Falavigna]]|
|[[Falkowski-Gilski, Przemyslaw|AUTHOR Przemyslaw Falkowski-Gilski]]|
|[[Fallgren, Per|AUTHOR Per Fallgren]]|
|[[Fan, Ruchao|AUTHOR Ruchao Fan]]|
|[[Fan, Zhiyun|AUTHOR Zhiyun Fan]]|
|[[Fang, Lei|AUTHOR Lei Fang]]|
|[[Fang, Qiang|AUTHOR Qiang Fang]]|
|[[Fang, Xin|AUTHOR Xin Fang]]|
|[[Farinas, Jérôme|AUTHOR Jérôme Farinas]]|
|[[Farris, Brian|AUTHOR Brian Farris]]|
|[[Farrús, Mireia|AUTHOR Mireia Farrús]]|
|[[Fasoli, Andrea|AUTHOR Andrea Fasoli]]|
|[[Fazel, Amin|AUTHOR Amin Fazel]]|
|[[Feather, Jenelle|AUTHOR Jenelle Feather]]|
|[[Federico, Marcello|AUTHOR Marcello Federico]]|
|[[Félix, Simon|AUTHOR Simon Félix]]|
|[[Feng, Jinwei|AUTHOR Jinwei Feng]]|
|[[Feng, Shulin|AUTHOR Shulin Feng]]|
|[[Feng, Siyuan|AUTHOR Siyuan Feng]]|
|[[Fenu, Gianni|AUTHOR Gianni Fenu]]|
|[[Feris, Rogerio|AUTHOR Rogerio Feris]]|
|[[Fernandes Jr., Ricardo Corso|AUTHOR Ricardo Corso Fernandes Jr.]]|
|[[Fernandez, Raul|AUTHOR Raul Fernandez]]|
|[[Ferragne, Emmanuel|AUTHOR Emmanuel Ferragne]]|
|[[Ferreira, David|AUTHOR David Ferreira]]|
|[[Ferrer, Carlos A.|AUTHOR Carlos A. Ferrer]]|
|[[Ferrer, Luciana|AUTHOR Luciana Ferrer]]|
|[[Fetaya, Ethan|AUTHOR Ethan Fetaya]]|
|[[Finger, Marcelo|AUTHOR Marcelo Finger]]|
|[[Fingscheidt, Tim|AUTHOR Tim Fingscheidt]]|
|[[Fischer, Tim|AUTHOR Tim Fischer]]|
|[[Fischer, Volker|AUTHOR Volker Fischer]]|
|[[Fiumara, James|AUTHOR James Fiumara]]|
|[[Flucha, Alexandre|AUTHOR Alexandre Flucha]]|
|[[Fogerty, Daniel|AUTHOR Daniel Fogerty]]|
|[[Foglianti, Lorenzo|AUTHOR Lorenzo Foglianti]]|
|[[Fohr, Dominique|AUTHOR Dominique Fohr]]|
|[[Folarin, Amos A.|AUTHOR Amos A. Folarin]]|
|[[Font, Roberto|AUTHOR Roberto Font]]|
|[[Fontaine, Mathieu|AUTHOR Mathieu Fontaine]]|
|[[Foresti, Gian Luca|AUTHOR Gian Luca Foresti]]|
|[[Foubert-Samier, Alexandra|AUTHOR Alexandra Foubert-Samier]]|
|[[Fraenkel, Ernest|AUTHOR Ernest Fraenkel]]|
|[[Fragner, Stefan|AUTHOR Stefan Fragner]]|
|[[Francart, Tom|AUTHOR Tom Francart]]|
|[[Francl, Andrew|AUTHOR Andrew Francl]]|
|[[Frank, Stefan L.|AUTHOR Stefan L. Frank]]|
|[[Franzen, Jan|AUTHOR Jan Franzen]]|
|[[Fraś, Mieszko|AUTHOR Mieszko Fraś]]|
|[[French, Louis M.|AUTHOR Louis M. French]]|
|[[Freyberg, Keenan|AUTHOR Keenan Freyberg]]|
|[[Fritsch, Julian|AUTHOR Julian Fritsch]]|
|[[Fromm, Davida|AUTHOR Davida Fromm]]|
|[[Fry, Laura|AUTHOR Laura Fry]]|
|[[Fu, Kaiqi|AUTHOR Kaiqi Fu]]|
|[[Fu, Qiang|AUTHOR Qiang Fu]]|
|[[Fu, Ruibo|AUTHOR Ruibo Fu]]|
|[[Fu, Szu-Wei|AUTHOR Szu-Wei Fu]]|
|[[Fu, Yanzhe|AUTHOR Yanzhe Fu]]|
|[[Fu, Yihui|AUTHOR Yihui Fu]]|
|[[Fuchs, Susanne|AUTHOR Susanne Fuchs]]|
|[[Fuegen, Christian|AUTHOR Christian Fuegen]]|
|[[Fujie, Shinya|AUTHOR Shinya Fujie]]|
|[[Fujimoto, Masakiyo|AUTHOR Masakiyo Fujimoto]]|
|[[Fujioka, Takuya|AUTHOR Takuya Fujioka]]|
|[[Fujita, Kenichi|AUTHOR Kenichi Fujita]]|
|[[Fujita, Yusuke|AUTHOR Yusuke Fujita]]|
|[[Fujita, Yuya|AUTHOR Yuya Fujita]]|
|[[Fukuda, Takashi|AUTHOR Takashi Fukuda]]|
|[[Fukumori, Takahiro|AUTHOR Takahiro Fukumori]]|
|[[Fulford, Daniel|AUTHOR Daniel Fulford]]|
|[[Fung, Pascale|AUTHOR Pascale Fung]]|
|[[Futamata, Kosuke|AUTHOR Kosuke Futamata]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Gabryś, Adam|AUTHOR Adam Gabryś]]|
|[[Gaddam, Navaneetha|AUTHOR Navaneetha Gaddam]]|
|[[Gadgil, Swapnil|AUTHOR Swapnil Gadgil]]|
|[[Gales, Mark J.F.|AUTHOR Mark J.F. Gales]]|
|[[Gamage, Amila|AUTHOR Amila Gamage]]|
|[[Gamboa Gamboa, Jorge Esteban|AUTHOR Jorge Esteban Gamboa Gamboa]]|
|[[Gamper, Hannes|AUTHOR Hannes Gamper]]|
|[[Gan, Wendong|AUTHOR Wendong Gan]]|
|[[Ganapathy, Sriram|AUTHOR Sriram Ganapathy]]|
|[[Gandhi, Vineet|AUTHOR Vineet Gandhi]]|
|[[Ganhotra, Jatin|AUTHOR Jatin Ganhotra]]|
|[[Gannot, Sharon|AUTHOR Sharon Gannot]]|
|[[Ganzeboom, Mario|AUTHOR Mario Ganzeboom]]|
|[[Gao, Dongji|AUTHOR Dongji Gao]]|
|[[Gao, Fan|AUTHOR Fan Gao]]|
|[[Gao, Heting|AUTHOR Heting Gao]]|
|[[Gao, Jiameng|AUTHOR Jiameng Gao]]|
|[[Gao, Jianfeng|AUTHOR Jianfeng Gao]]|
|[[Gao, Xibin|AUTHOR Xibin Gao]]|
|[[Gao, Yang|AUTHOR Yang Gao]]|
|[[Gao, Yingbo|AUTHOR Yingbo Gao]]|
|[[Gao, Yongyu|AUTHOR Yongyu Gao]]|
|[[Gao, Yuan|AUTHOR Yuan Gao]]|
|[[Gao, Zheng|AUTHOR Zheng Gao]]|
|[[Gao, Zhifu|AUTHOR Zhifu Gao]]|
|[[Garcés Díaz-Munío, Gonçal V.|AUTHOR Gonçal V. Garcés Díaz-Munío]]|
|[[García Perera, Leibny Paola|AUTHOR Leibny Paola García Perera]]|
|[[Gard, David|AUTHOR David Gard]]|
|[[Garg, Shefali|AUTHOR Shefali Garg]]|
|[[Garg, Vineet|AUTHOR Vineet Garg]]|
|[[Garner, Philip N.|AUTHOR Philip N. Garner]]|
|[[Garrison, Jake|AUTHOR Jake Garrison]]|
|[[Gaspers, Judith|AUTHOR Judith Gaspers]]|
|[[Gauder, Lara|AUTHOR Lara Gauder]]|
|[[Gaur, Neeraj|AUTHOR Neeraj Gaur]]|
|[[Gaur, Yashesh|AUTHOR Yashesh Gaur]]|
|[[Gauvain, Jean-Luc|AUTHOR Jean-Luc Gauvain]]|
|[[Gauvain, Jodie|AUTHOR Jodie Gauvain]]|
|[[Gaydecki, Patrick|AUTHOR Patrick Gaydecki]]|
|[[Ge, Chunyu|AUTHOR Chunyu Ge]]|
|[[Ge, Meng|AUTHOR Meng Ge]]|
|[[Ge, Wanying|AUTHOR Wanying Ge]]|
|[[Ge, Zhenhao|AUTHOR Zhenhao Ge]]|
|[[Geislinger, Robert|AUTHOR Robert Geislinger]]|
|[[Gelin, Lucile|AUTHOR Lucile Gelin]]|
|[[Gendrot, Cédric|AUTHOR Cédric Gendrot]]|
|[[Geng, Jianhua|AUTHOR Jianhua Geng]]|
|[[Geng, Mengzhe|AUTHOR Mengzhe Geng]]|
|[[Geng, Wang|AUTHOR Wang Geng]]|
|[[Georges, Marc-Antoine|AUTHOR Marc-Antoine Georges]]|
|[[Georgiou, Efthymios|AUTHOR Efthymios Georgiou]]|
|[[Georgiou, Panayiotis|AUTHOR Panayiotis Georgiou]]|
|[[Georgis, Philip|AUTHOR Philip Georgis]]|
|[[Gerazov, Branislav|AUTHOR Branislav Gerazov]]|
|[[Gerczuk, Maurice|AUTHOR Maurice Gerczuk]]|
|[[German, James S.|AUTHOR James S. German]]|
|[[Gerstenberger, Alexander|AUTHOR Alexander Gerstenberger]]|
|[[Getman, Yaroslav|AUTHOR Yaroslav Getman]]|
|[[Ghahabi, Omid|AUTHOR Omid Ghahabi]]|
|[[Ghahremani, Pegah|AUTHOR Pegah Ghahremani]]|
|[[Ghio, Alain|AUTHOR Alain Ghio]]|
|[[Ghodsi, Mohammadreza|AUTHOR Mohammadreza Ghodsi]]|
|[[Ghosh, Prasanta Kumar|AUTHOR Prasanta Kumar Ghosh]]|
|[[Gibson, Andy|AUTHOR Andy Gibson]]|
|[[Gibson, Mark|AUTHOR Mark Gibson]]|
|[[Giesbrecht, Timo|AUTHOR Timo Giesbrecht]]|
|[[Gili Fivela, Barbara|AUTHOR Barbara Gili Fivela]]|
|[[Giller, Maximilian|AUTHOR Maximilian Giller]]|
|[[Gillick, Jon|AUTHOR Jon Gillick]]|
|[[Gilmartin, Emer|AUTHOR Emer Gilmartin]]|
|[[Giménez Pastor, Adrià|AUTHOR Adrià Giménez Pastor]]|
|[[Gimeno, Pablo|AUTHOR Pablo Gimeno]]|
|[[Ginsburg, Boris|AUTHOR Boris Ginsburg]]|
|[[Giollo, Manuel|AUTHOR Manuel Giollo]]|
|[[Giri, Ritwik|AUTHOR Ritwik Giri]]|
|[[Girin, Laurent|AUTHOR Laurent Girin]]|
|[[Giulietti, Simone|AUTHOR Simone Giulietti]]|
|[[Glass, James|AUTHOR James Glass]]|
|[[Glushko, Aleksandr|AUTHOR Aleksandr Glushko]]|
|[[Gmyr, Robert|AUTHOR Robert Gmyr]]|
|[[Gobl, Christer|AUTHOR Christer Gobl]]|
|[[Goderre, Andrew|AUTHOR Andrew Goderre]]|
|[[Goel, Nagendra|AUTHOR Nagendra Goel]]|
|[[Goel, Srishti|AUTHOR Srishti Goel]]|
|[[Gogineni, Krishnachaitanya|AUTHOR Krishnachaitanya Gogineni]]|
|[[Gok, Alican|AUTHOR Alican Gok]]|
|[[Gölge, Eren|AUTHOR Eren Gölge]]|
|[[Gomez, Angel M.|AUTHOR Angel M. Gomez]]|
|[[Gómez, Emilia|AUTHOR Emilia Gómez]]|
|[[Gomez, Juan Zuluaga|AUTHOR Juan Zuluaga Gomez]]|
|[[Gomez-Alanis, Alejandro|AUTHOR Alejandro Gomez-Alanis]]|
|[[Gong, Cheng|AUTHOR Cheng Gong]]|
|[[Gong, Rong|AUTHOR Rong Gong]]|
|[[Gong, Xun|AUTHOR Xun Gong]]|
|[[Gong, Yifan|AUTHOR Yifan Gong]]|
|[[Gong, Yuan|AUTHOR Yuan Gong]]|
|[[Gonzalez, Rachel|AUTHOR Rachel Gonzalez]]|
|[[González Hautamäki, Rosa|AUTHOR Rosa González Hautamäki]]|
|[[Gonzalez-Lopez, Jose A.|AUTHOR Jose A. Gonzalez-Lopez]]|
|[[Gopal, Vishak|AUTHOR Vishak Gopal]]|
|[[Gopalakrishnan, Kailash|AUTHOR Kailash Gopalakrishnan]]|
|[[Gope, Dipanjan|AUTHOR Dipanjan Gope]]|
|[[Gordon, Matthew|AUTHOR Matthew Gordon]]|
|[[Gorman, Kyle|AUTHOR Kyle Gorman]]|
|[[Gorthi, Rama Krishna Sai S.|AUTHOR Rama Krishna Sai S. Gorthi]]|
|[[Gorthi, Subrahmanyam|AUTHOR Subrahmanyam Gorthi]]|
|[[Gósy, Mária|AUTHOR Mária Gósy]]|
|[[Gosztolya, Gábor|AUTHOR Gábor Gosztolya]]|
|[[Graetzer, Simone|AUTHOR Simone Graetzer]]|
|[[Graham, Calbert|AUTHOR Calbert Graham]]|
|[[Grammenos, Andreas|AUTHOR Andreas Grammenos]]|
|[[Grau, Teresa|AUTHOR Teresa Grau]]|
|[[Green, Jordan R.|AUTHOR Jordan R. Green]]|
|[[Gretter, R.|AUTHOR R. Gretter]]|
|[[Gris, Lucas Rafael Stefanel|AUTHOR Lucas Rafael Stefanel Gris]]|
|[[Grivolla, Jens|AUTHOR Jens Grivolla]]|
|[[Gromoglasov, Volodymyr|AUTHOR Volodymyr Gromoglasov]]|
|[[Grondin, François|AUTHOR François Grondin]]|
|[[Grůber, Martin|AUTHOR Martin Grůber]]|
|[[Gu, Bin|AUTHOR Bin Gu]]|
|[[Gu, Feng|AUTHOR Feng Gu]]|
|[[Gu, Jianjun|AUTHOR Jianjun Gu]]|
|[[Gu, Jiatao|AUTHOR Jiatao Gu]]|
|[[Gu, Nan|AUTHOR Nan Gu]]|
|[[Gu, Rongzhi|AUTHOR Rongzhi Gu]]|
|[[Gu, Yile|AUTHOR Yile Gu]]|
|[[Gudupudi, Leela K.|AUTHOR Leela K. Gudupudi]]|
|[[Gudur, Gautham Krishna|AUTHOR Gautham Krishna Gudur]]|
|[[Guevara-Rukoz, Adriana|AUTHOR Adriana Guevara-Rukoz]]|
|[[Guha, Prithwijit|AUTHOR Prithwijit Guha]]|
|[[Gulati, Anmol|AUTHOR Anmol Gulati]]|
|[[Gully, Amelia J.|AUTHOR Amelia J. Gully]]|
|[[Gunceler, Deniz|AUTHOR Deniz Gunceler]]|
|[[Gundogdu, Batuhan|AUTHOR Batuhan Gundogdu]]|
|[[Guo, Chenkai|AUTHOR Chenkai Guo]]|
|[[Guo, Liang|AUTHOR Liang Guo]]|
|[[Guo, Pengcheng|AUTHOR Pengcheng Guo]]|
|[[Guo, Shaotong|AUTHOR Shaotong Guo]]|
|[[Guo, Wu|AUTHOR Wu Guo]]|
|[[Guo, Zhe-chen|AUTHOR Zhe-chen Guo]]|
|[[Guruju, Sai|AUTHOR Sai Guruju]]|
|[[Gusev, Aleksei|AUTHOR Aleksei Gusev]]|
|[[Gutierrez, Fabian Ritter|AUTHOR Fabian Ritter Gutierrez]]|
|[[Gutierrez, Stephanie|AUTHOR Stephanie Gutierrez]]|
|[[Gutierrez-Osuna, Ricardo|AUTHOR Ricardo Gutierrez-Osuna]]|
|[[Gutz, Sarah E.|AUTHOR Sarah E. Gutz]]|
|[[Guzman, Emmanuel|AUTHOR Emmanuel Guzman]]|
|[[Gwon, Youngjune L.|AUTHOR Youngjune L. Gwon]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Habberstad, Doug|AUTHOR Doug Habberstad]]|
|[[Haddadi, Hamed|AUTHOR Hamed Haddadi]]|
|[[Haeb-Umbach, Reinhold|AUTHOR Reinhold Haeb-Umbach]]|
|[[Hagberg, Nils|AUTHOR Nils Hagberg]]|
|[[Haghani, Parisa|AUTHOR Parisa Haghani]]|
|[[Haidar, Md. Akmal|AUTHOR Md. Akmal Haidar]]|
|[[Haider, Fasih|AUTHOR Fasih Haider]]|
|[[Hain, Thomas|AUTHOR Thomas Hain]]|
|[[Hair, Adam|AUTHOR Adam Hair]]|
|[[Hajaj, Chen|AUTHOR Chen Hajaj]]|
|[[Hajek, John|AUTHOR John Hajek]]|
|[[Hakkani Tür, Dilek|AUTHOR Dilek Hakkani Tür]]|
|[[Hamann, Silke|AUTHOR Silke Hamann]]|
|[[Hamilakis, Nicolas|AUTHOR Nicolas Hamilakis]]|
|[[Han, Ambyera|AUTHOR Ambyera Han]]|
|[[Han, Asriel|AUTHOR Asriel Han]]|
|[[Han, Bing|AUTHOR Bing Han]]|
|[[Han, Cong|AUTHOR Cong Han]]|
|[[Han, David K.|AUTHOR David K. Han]]|
|[[Han, Eunjung|AUTHOR Eunjung Han]]|
|[[Han, Jeong-Im|AUTHOR Jeong-Im Han]]|
|[[Han, Jiangyu|AUTHOR Jiangyu Han]]|
|[[Han, Jing|AUTHOR Jing Han]]|
|[[Han, Jiqing|AUTHOR Jiqing Han]]|
|[[Han, Kyu J.|AUTHOR Kyu J. Han]]|
|[[Han, Mei|AUTHOR Mei Han]]|
|[[Han, Sangwook|AUTHOR Sangwook Han]]|
|[[Han, Seungu|AUTHOR Seungu Han]]|
|[[Han, Soyeon Caren|AUTHOR Soyeon Caren Han]]|
|[[Han, Ting|AUTHOR Ting Han]]|
|[[Han, Wei|AUTHOR Wei Han]]|
|[[Han, Zhuoda|AUTHOR Zhuoda Han]]|
|[[Hancock, Edwin R.|AUTHOR Edwin R. Hancock]]|
|[[Hanley, Terry|AUTHOR Terry Hanley]]|
|[[Hansen, John H.L.|AUTHOR John H.L. Hansen]]|
|[[Hantke, Simone|AUTHOR Simone Hantke]]|
|[[Hanzlíček, Zdeněk|AUTHOR Zdeněk Hanzlíček]]|
|[[Hao, Jie|AUTHOR Jie Hao]]|
|[[Hao, Shuai|AUTHOR Shuai Hao]]|
|[[Hara, Sunao|AUTHOR Sunao Hara]]|
|[[Haro, Josep Maria|AUTHOR Josep Maria Haro]]|
|[[Harrington, Jonathan|AUTHOR Jonathan Harrington]]|
|[[Harris, Jennifer M.|AUTHOR Jennifer M. Harris]]|
|[[Harrison, Peter M.C.|AUTHOR Peter M.C. Harrison]]|
|[[Harrison, Philip|AUTHOR Philip Harrison]]|
|[[Harvill, John|AUTHOR John Harvill]]|
|[[Harwath, David|AUTHOR David Harwath]]|
|[[Hasegawa-Johnson, Mark|AUTHOR Mark Hasegawa-Johnson]]|
|[[Hasthanasombat, Apinan|AUTHOR Apinan Hasthanasombat]]|
|[[Haulcy, R’mani|AUTHOR R’mani Haulcy]]|
|[[Hauptmann, Alexander|AUTHOR Alexander Hauptmann]]|
|[[Haws, David|AUTHOR David Haws]]|
|[[Hayakawa, Tomoaki|AUTHOR Tomoaki Hayakawa]]|
|[[Hayashi, Tomoki|AUTHOR Tomoki Hayashi]]|
|[[Hdez-Díaz, María E.|AUTHOR María E. Hdez-Díaz]]|
|[[He, Di|AUTHOR Di He]]|
|[[He, Lei|AUTHOR Lei He]]|
|[[He, Liang|AUTHOR Liang He]]|
|[[He, Maokui|AUTHOR Maokui He]]|
|[[He, Qing|AUTHOR Qing He]]|
|[[He, Shulin|AUTHOR Shulin He]]|
|[[He, Weipeng|AUTHOR Weipeng He]]|
|[[He, Xiangheng|AUTHOR Xiangheng He]]|
|[[He, Xiaoxi|AUTHOR Xiaoxi He]]|
|[[He, Yanzhang|AUTHOR Yanzhang He]]|
|[[He, Yi|AUTHOR Yi He]]|
|[[He, Yuxuan|AUTHOR Yuxuan He]]|
|[[Hecker, Pascal|AUTHOR Pascal Hecker]]|
|[[Heeringa, Wilbert|AUTHOR Wilbert Heeringa]]|
|[[Hegde, Sindhu B.|AUTHOR Sindhu B. Hegde]]|
|[[Heikinheimo, Hannes|AUTHOR Hannes Heikinheimo]]|
|[[Hejná, Míša|AUTHOR Míša Hejná]]|
|[[Helmke, Hartmut|AUTHOR Hartmut Helmke]]|
|[[Hembise, Corentin|AUTHOR Corentin Hembise]]|
|[[Henderson, Alice|AUTHOR Alice Henderson]]|
|[[Henriques, João F.|AUTHOR João F. Henriques]]|
|[[Heo, Hee-Soo|AUTHOR Hee-Soo Heo]]|
|[[Hermann, Enno|AUTHOR Enno Hermann]]|
|[[Hermansky, Hynek|AUTHOR Hynek Hermansky]]|
|[[Hershey, John R.|AUTHOR John R. Hershey]]|
|[[Heymann, Jahn|AUTHOR Jahn Heymann]]|
|[[Heywood, Rus|AUTHOR Rus Heywood]]|
|[[Hifny, Yasser|AUTHOR Yasser Hifny]]|
|[[Higashinaka, Ryuichiro|AUTHOR Ryuichiro Higashinaka]]|
|[[Higuchi, Yosuke|AUTHOR Yosuke Higuchi]]|
|[[Hildén, Raili|AUTHOR Raili Hildén]]|
|[[Hioka, Yusuke|AUTHOR Yusuke Hioka]]|
|[[Hiramura, Takekatsu|AUTHOR Takekatsu Hiramura]]|
|[[Hiroya, Sadao|AUTHOR Sadao Hiroya]]|
|[[Hirschberg, Julia|AUTHOR Julia Hirschberg]]|
|[[Hoang, Vu|AUTHOR Vu Hoang]]|
|[[Hoi, Steven|AUTHOR Steven Hoi]]|
|[[Hong, Mi|AUTHOR Mi Hong]]|
|[[Hong, Qingyang|AUTHOR Qingyang Hong]]|
|[[Hong, Y.-W. Peter|AUTHOR Y.-W. Peter Hong]]|
|[[Hong, Zhenhou|AUTHOR Zhenhou Hong]]|
|[[Hori, Chiori|AUTHOR Chiori Hori]]|
|[[Hori, Takaaki|AUTHOR Takaaki Hori]]|
|[[Horiguchi, Shota|AUTHOR Shota Horiguchi]]|
|[[Hotopf, Matthew|AUTHOR Matthew Hotopf]]|
|[[Hou, Feng|AUTHOR Feng Hou]]|
|[[Hou, Junfeng|AUTHOR Junfeng Hou]]|
|[[Hou, Wangrui|AUTHOR Wangrui Hou]]|
|[[Hou, Wenxin|AUTHOR Wenxin Hou]]|
|[[Hou, Yuanbo|AUTHOR Yuanbo Hou]]|
|[[Hough, Julian|AUTHOR Julian Hough]]|
|[[Hsieh, Feng-fan|AUTHOR Feng-fan Hsieh]]|
|[[Hsieh, Tsun-An|AUTHOR Tsun-An Hsieh]]|
|[[Hsu, Wei-Ning|AUTHOR Wei-Ning Hsu]]|
|[[Hu, Cheng-Hung|AUTHOR Cheng-Hung Hu]]|
|[[Hu, Chenxu|AUTHOR Chenxu Hu]]|
|[[Hu, Ke|AUTHOR Ke Hu]]|
|[[Hu, Na|AUTHOR Na Hu]]|
|[[Hu, Peng|AUTHOR Peng Hu]]|
|[[Hu, Pengfei|AUTHOR Pengfei Hu]]|
|[[Hu, Qian|AUTHOR Qian Hu]]|
|[[Hu, Shell Xu|AUTHOR Shell Xu Hu]]|
|[[Hu, Shih-Wei|AUTHOR Shih-Wei Hu]]|
|[[Hu, Shoukang|AUTHOR Shoukang Hu]]|
|[[Hu, Vivian|AUTHOR Vivian Hu]]|
|[[Hu, Wenchao|AUTHOR Wenchao Hu]]|
|[[Hu, Xinhui|AUTHOR Xinhui Hu]]|
|[[Hu, Yanxin|AUTHOR Yanxin Hu]]|
|[[Hu, Ying|AUTHOR Ying Hu]]|
|[[Hu, Yu|AUTHOR Yu Hu]]|
|[[Huang, Che-Wei|AUTHOR Che-Wei Huang]]|
|[[Huang, Chien-yu|AUTHOR Chien-yu Huang]]|
|[[Huang, Chongxuan|AUTHOR Chongxuan Huang]]|
|[[Huang, Hao|AUTHOR Hao Huang]]|
|[[Huang, Jing|AUTHOR Jing Huang]]|
|[[Huang, Jinwen|AUTHOR Jinwen Huang]]|
|[[Huang, Jun|AUTHOR Jun Huang]]|
|[[Huang, Junrui|AUTHOR Junrui Huang]]|
|[[Huang, Leichao|AUTHOR Leichao Huang]]|
|[[Huang, Liang|AUTHOR Liang Huang]]|
|[[Huang, Lu|AUTHOR Lu Huang]]|
|[[Huang, Michelle|AUTHOR Michelle Huang]]|
|[[Huang, Qiushi|AUTHOR Qiushi Huang]]|
|[[Huang, Rongjie|AUTHOR Rongjie Huang]]|
|[[Huang, Ruizhe|AUTHOR Ruizhe Huang]]|
|[[Huang, Shen|AUTHOR Shen Huang]]|
|[[Huang, Sung-Feng|AUTHOR Sung-Feng Huang]]|
|[[Huang, Tzu-Hsien|AUTHOR Tzu-Hsien Huang]]|
|[[Huang, Weilong|AUTHOR Weilong Huang]]|
|[[Huang, Wen-Chin|AUTHOR Wen-Chin Huang]]|
|[[Huang, W. Ronny|AUTHOR W. Ronny Huang]]|
|[[Huang, Xing|AUTHOR Xing Huang]]|
|[[Huang, Yan|AUTHOR Yan Huang]]|
|[[Huang, Yinghui|AUTHOR Yinghui Huang]]|
|[[Huang, Yiqing|AUTHOR Yiqing Huang]]|
|[[Huang, Yiteng|AUTHOR Yiteng Huang]]|
|[[Huang, Yu-Han|AUTHOR Yu-Han Huang]]|
|[[Huang, Yukai|AUTHOR Yukai Huang]]|
|[[Huang, Yu-Lin|AUTHOR Yu-Lin Huang]]|
|[[Huang, Zhihua|AUTHOR Zhihua Huang]]|
|[[Huang, Zifang|AUTHOR Zifang Huang]]|
|[[Huang, Zili|AUTHOR Zili Huang]]|
|[[Huckvale, Mark|AUTHOR Mark Huckvale]]|
|[[Hueber, Thomas|AUTHOR Thomas Hueber]]|
|[[Hughes, Vincent|AUTHOR Vincent Hughes]]|
|[[Hui, C.T. Justine|AUTHOR C.T. Justine Hui]]|
|[[Huo, Zhouyuan|AUTHOR Zhouyuan Huo]]|
|[[Hussain, Sarmad|AUTHOR Sarmad Hussain]]|
|[[Hussein, Amir|AUTHOR Amir Hussein]]|
|[[Hustad, Katherine C.|AUTHOR Katherine C. Hustad]]|
|[[Hutin, Mathilde|AUTHOR Mathilde Hutin]]|
|[[Hwang, Dongseong|AUTHOR Dongseong Hwang]]|
|[[Hwang, Geumbyeol|AUTHOR Geumbyeol Hwang]]|
|[[Hwang, Min-Jae|AUTHOR Min-Jae Hwang]]|
|[[Hwang, Soojoong|AUTHOR Soojoong Hwang]]|
|[[Hwang, Sung Ju|AUTHOR Sung Ju Hwang]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Ichikawa, Osamu|AUTHOR Osamu Ichikawa]]|
|[[Ide, Yuta|AUTHOR Yuta Ide]]|
|[[Ihori, Mana|AUTHOR Mana Ihori]]|
|[[Iida, Akiyoshi|AUTHOR Akiyoshi Iida]]|
|[[Ijima, Yusuke|AUTHOR Yusuke Ijima]]|
|[[Ilin, Aleksei|AUTHOR Aleksei Ilin]]|
|[[Illa, Aravind|AUTHOR Aravind Illa]]|
|[[Illina, Irina|AUTHOR Irina Illina]]|
|[[Illium, Steffen|AUTHOR Steffen Illium]]|
|[[Inaam, Rafia|AUTHOR Rafia Inaam]]|
|[[Inaguma, Hirofumi|AUTHOR Hirofumi Inaguma]]|
|[[Inan, Omer T.|AUTHOR Omer T. Inan]]|
|[[Ionescu, Radu Tudor|AUTHOR Radu Tudor Ionescu]]|
|[[Iqbal, Zikra|AUTHOR Zikra Iqbal]]|
|[[Iranzo-Sánchez, Javier|AUTHOR Javier Iranzo-Sánchez]]|
|[[Irino, Toshio|AUTHOR Toshio Irino]]|
|[[Isaieva, Karyna|AUTHOR Karyna Isaieva]]|
|[[Ishi, Carlos Toshinori|AUTHOR Carlos Toshinori Ishi]]|
|[[Ishizuka, Kenkichi|AUTHOR Kenkichi Ishizuka]]|
|[[Isik, Umut|AUTHOR Umut Isik]]|
|[[Ithapu, Vamsi Krishna|AUTHOR Vamsi Krishna Ithapu]]|
|[[Ito, Akinori|AUTHOR Akinori Ito]]|
|[[Ito, Koichiro|AUTHOR Koichiro Ito]]|
|[[Itoyama, Katsutoshi|AUTHOR Katsutoshi Itoyama]]|
|[[Iturralde Zurita, Alvaro|AUTHOR Alvaro Iturralde Zurita]]|
|[[Ivan, Alina|AUTHOR Alina Ivan]]|
|[[Ivry, Amir|AUTHOR Amir Ivry]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Jacobs, Christiaan|AUTHOR Christiaan Jacobs]]|
|[[Jacoby, Nori|AUTHOR Nori Jacoby]]|
|[[Jahchan, Nataly|AUTHOR Nataly Jahchan]]|
|[[Jain, Abhinav|AUTHOR Abhinav Jain]]|
|[[Jain, Mahaveer|AUTHOR Mahaveer Jain]]|
|[[Jaiswal, Saish|AUTHOR Saish Jaiswal]]|
|[[Jakob, Moritz|AUTHOR Moritz Jakob]]|
|[[Jalal, Md. Asif|AUTHOR Md. Asif Jalal]]|
|[[Jalilpour Monesi, Mohammad|AUTHOR Mohammad Jalilpour Monesi]]|
|[[Janaliyeva, Aigerim|AUTHOR Aigerim Janaliyeva]]|
|[[Jang, Inseon|AUTHOR Inseon Jang]]|
|[[Jang, Won|AUTHOR Won Jang]]|
|[[Jansen, Anne|AUTHOR Anne Jansen]]|
|[[Janský, J.|AUTHOR J. Janský]]|
|[[Jaramillo, Alfredo Esquivel|AUTHOR Alfredo Esquivel Jaramillo]]|
|[[Jatteau, Adèle|AUTHOR Adèle Jatteau]]|
|[[Jawahar, C.V.|AUTHOR C.V. Jawahar]]|
|[[Jayasuriya, Suren|AUTHOR Suren Jayasuriya]]|
|[[Jayesh, M.K.|AUTHOR M.K. Jayesh]]|
|[[Jensen, Jesper|AUTHOR Jesper Jensen]]|
|[[Jeong, Kihyuk|AUTHOR Kihyuk Jeong]]|
|[[Jeong, Myeonghun|AUTHOR Myeonghun Jeong]]|
|[[Jęśko, Waldemar|AUTHOR Waldemar Jęśko]]|
|[[Jespersen, Anna Bothe|AUTHOR Anna Bothe Jespersen]]|
|[[Jessen, Michael|AUTHOR Michael Jessen]]|
|[[Jetté, Miguel|AUTHOR Miguel Jetté]]|
|[[Ji, Youna|AUTHOR Youna Ji]]|
|[[Jia, Jia|AUTHOR Jia Jia]]|
|[[Jia, Jie|AUTHOR Jie Jia]]|
|[[Jia, Yan|AUTHOR Yan Jia]]|
|[[Jia, Ye|AUTHOR Ye Jia]]|
|[[Jiang, Dongcheng|AUTHOR Dongcheng Jiang]]|
|[[Jiang, Dongwei|AUTHOR Dongwei Jiang]]|
|[[Jiang, Fei|AUTHOR Fei Jiang]]|
|[[Jiang, Liyang|AUTHOR Liyang Jiang]]|
|[[Jiang, Pan-Pan|AUTHOR Pan-Pan Jiang]]|
|[[Jiang, Xinyi|AUTHOR Xinyi Jiang]]|
|[[Jiang, Yidi|AUTHOR Yidi Jiang]]|
|[[Jiao, Yunlong|AUTHOR Yunlong Jiao]]|
|[[Jimerson, Robbie|AUTHOR Robbie Jimerson]]|
|[[Jin, Chunxiang|AUTHOR Chunxiang Jin]]|
|[[Jin, Mingjie|AUTHOR Mingjie Jin]]|
|[[Jin, Qin|AUTHOR Qin Jin]]|
|[[Jin, Zengrui|AUTHOR Zengrui Jin]]|
|[[Jing, Zijun|AUTHOR Zijun Jing]]|
|[[Jo, Jaechoon|AUTHOR Jaechoon Jo]]|
|[[Joe, Seongho|AUTHOR Seongho Joe]]|
|[[Joglekar, Aditya|AUTHOR Aditya Joglekar]]|
|[[Joglekar, Sachin|AUTHOR Sachin Joglekar]]|
|[[Johnson, Khia A.|AUTHOR Khia A. Johnson]]|
|[[Johnson, Moris|AUTHOR Moris Johnson]]|
|[[Joly, Arnaud|AUTHOR Arnaud Joly]]|
|[[Jones, Llion|AUTHOR Llion Jones]]|
|[[Jones, Matthew|AUTHOR Matthew Jones]]|
|[[Joo, Young-Sun|AUTHOR Young-Sun Joo]]|
|[[Jorge, Javier|AUTHOR Javier Jorge]]|
|[[Joshi, Dhiraj|AUTHOR Dhiraj Joshi]]|
|[[Joshi, Sachindra|AUTHOR Sachindra Joshi]]|
|[[Joshi, Sonal|AUTHOR Sonal Joshi]]|
|[[Joshi, Vikas|AUTHOR Vikas Joshi]]|
|[[Jouvet, Denis|AUTHOR Denis Jouvet]]|
|[[Ju, Chelsea J.-T.|AUTHOR Chelsea J.-T. Ju]]|
|[[Juan, Alfons|AUTHOR Alfons Juan]]|
|[[Juang, Biing-Hwang|AUTHOR Biing-Hwang Juang]]|
|[[Jung, Hyun|AUTHOR Hyun Jung]]|
|[[Jung, Jee-weon|AUTHOR Jee-weon Jung]]|
|[[Jv, Yukai|AUTHOR Yukai Jv]]|
|[[Jyothi, Preethi|AUTHOR Preethi Jyothi]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Kaandorp, Casper S.|AUTHOR Casper S. Kaandorp]]|
|[[Kachare, Pramod H.|AUTHOR Pramod H. Kachare]]|
|[[Kachkovskaia, Tatiana|AUTHOR Tatiana Kachkovskaia]]|
|[[Kafentzis, George P.|AUTHOR George P. Kafentzis]]|
|[[Kahn, Jacob|AUTHOR Jacob Kahn]]|
|[[Kajarekar, Sachin|AUTHOR Sachin Kajarekar]]|
|[[Kakegawa, Naoto|AUTHOR Naoto Kakegawa]]|
|[[Kaland, Constantijn|AUTHOR Constantijn Kaland]]|
|[[Kalgaonkar, Kaustubh|AUTHOR Kaustubh Kalgaonkar]]|
|[[Kalinli, Ozlem|AUTHOR Ozlem Kalinli]]|
|[[Kalluri, Shareef Babu|AUTHOR Shareef Babu Kalluri]]|
|[[Kamble, Madhu R.|AUTHOR Madhu R. Kamble]]|
|[[Kameoka, Hirokazu|AUTHOR Hirokazu Kameoka]]|
|[[Kamo, Naoyuki|AUTHOR Naoyuki Kamo]]|
|[[Kamper, Herman|AUTHOR Herman Kamper]]|
|[[Kanda, Naoyuki|AUTHOR Naoyuki Kanda]]|
|[[Kang, Haimei|AUTHOR Haimei Kang]]|
|[[Kang, Hong-Goo|AUTHOR Hong-Goo Kang]]|
|[[Kang, Jian|AUTHOR Jian Kang]]|
|[[Kang, Jingu|AUTHOR Jingu Kang]]|
|[[Kang, Shiyin|AUTHOR Shiyin Kang]]|
|[[Kang, Tae Gyoon|AUTHOR Tae Gyoon Kang]]|
|[[Kang, Woo Hyun|AUTHOR Woo Hyun Kang]]|
|[[Kang, Yueteng|AUTHOR Yueteng Kang]]|
|[[Kansara, Seemran|AUTHOR Seemran Kansara]]|
|[[Kantharaju, Pavan|AUTHOR Pavan Kantharaju]]|
|[[Kao, Wei-Tsung|AUTHOR Wei-Tsung Kao]]|
|[[Kapia, Enkeleida|AUTHOR Enkeleida Kapia]]|
|[[Kapoor, Parul|AUTHOR Parul Kapoor]]|
|[[Karanasou, Penny|AUTHOR Penny Karanasou]]|
|[[Karas, Vincent|AUTHOR Vincent Karas]]|
|[[Karita, Shigeki|AUTHOR Shigeki Karita]]|
|[[Karlapati, Sri|AUTHOR Sri Karlapati]]|
|[[Karpov, Alexey|AUTHOR Alexey Karpov]]|
|[[Karpov, Nikolay|AUTHOR Nikolay Karpov]]|
|[[Karra, Kiran|AUTHOR Kiran Karra]]|
|[[Kashino, Makio|AUTHOR Makio Kashino]]|
|[[Kashiwagi, Yosuke|AUTHOR Yosuke Kashiwagi]]|
|[[Kashyap, Madhav Mahesh|AUTHOR Madhav Mahesh Kashyap]]|
|[[Kaszuba-Miotke, Katarzyna|AUTHOR Katarzyna Kaszuba-Miotke]]|
|[[Kataria, Saurabh|AUTHOR Saurabh Kataria]]|
|[[Katayama, Hayato|AUTHOR Hayato Katayama]]|
|[[Katsurada, Kouichi|AUTHOR Kouichi Katsurada]]|
|[[Katz, Boris|AUTHOR Boris Katz]]|
|[[Kaushik, Lakshmish|AUTHOR Lakshmish Kaushik]]|
|[[Kaushik, Vishesh|AUTHOR Vishesh Kaushik]]|
|[[Kawahara, Hideki|AUTHOR Hideki Kawahara]]|
|[[Kawahara, Tatsuya|AUTHOR Tatsuya Kawahara]]|
|[[Kawai, Hisashi|AUTHOR Hisashi Kawai]]|
|[[Kaya, Heysem|AUTHOR Heysem Kaya]]|
|[[Kazak, Ekaterina|AUTHOR Ekaterina Kazak]]|
|[[Ke, Dengfeng|AUTHOR Dengfeng Ke]]|
|[[Ke, Yuxuan|AUTHOR Yuxuan Ke]]|
|[[Keesing, Aaron|AUTHOR Aaron Keesing]]|
|[[Keller, Alexander|AUTHOR Alexander Keller]]|
|[[Kelly, Barbara|AUTHOR Barbara Kelly]]|
|[[Keren, Gil|AUTHOR Gil Keren]]|
|[[Kershenbaum, Benji|AUTHOR Benji Kershenbaum]]|
|[[Kerz, Elma|AUTHOR Elma Kerz]]|
|[[Kesim, Ege|AUTHOR Ege Kesim]]|
|[[Kesiraju, Santosh|AUTHOR Santosh Kesiraju]]|
|[[Khare, Shreya|AUTHOR Shreya Khare]]|
|[[Kharitonov, Eugene|AUTHOR Eugene Kharitonov]]|
|[[Khassanov, Yerbolat|AUTHOR Yerbolat Khassanov]]|
|[[Khelif, Khaled|AUTHOR Khaled Khelif]]|
|[[Khokhlov, Yuri|AUTHOR Yuri Khokhlov]]|
|[[Khong, Andy W.H.|AUTHOR Andy W.H. Khong]]|
|[[Khorrami, Khazar|AUTHOR Khazar Khorrami]]|
|[[Khosravani, Abbas|AUTHOR Abbas Khosravani]]|
|[[Khudanpur, Sanjeev|AUTHOR Sanjeev Khudanpur]]|
|[[Kilgour, Kevin|AUTHOR Kevin Kilgour]]|
|[[Kilpikoski, Juho|AUTHOR Juho Kilpikoski]]|
|[[Kim, Bongwan|AUTHOR Bongwan Kim]]|
|[[Kim, Byeonggeun|AUTHOR Byeonggeun Kim]]|
|[[Kim, Daeyoung|AUTHOR Daeyoung Kim]]|
|[[Kim, Dalhyun|AUTHOR Dalhyun Kim]]|
|[[Kim, Eesung|AUTHOR Eesung Kim]]|
|[[Kim, Eunggyun|AUTHOR Eunggyun Kim]]|
|[[Kim, Eunhwa|AUTHOR Eunhwa Kim]]|
|[[Kim, Gwantae|AUTHOR Gwantae Kim]]|
|[[Kim, Ho-Gyeong|AUTHOR Ho-Gyeong Kim]]|
|[[Kim, Hwa-Yeon|AUTHOR Hwa-Yeon Kim]]|
|[[Kim, Hyeongju|AUTHOR Hyeongju Kim]]|
|[[Kim, Jae-Min|AUTHOR Jae-Min Kim]]|
|[[Kim, Jaeyoung|AUTHOR Jaeyoung Kim]]|
|[[Kim, Jangho|AUTHOR Jangho Kim]]|
|[[Kim, Ji-Hoon|AUTHOR Ji-Hoon Kim]]|
|[[Kim, Ji-Hwan|AUTHOR Ji-Hwan Kim]]|
|[[Kim, Jong-Hwan|AUTHOR Jong-Hwan Kim]]|
|[[Kim, Jong Jin|AUTHOR Jong Jin Kim]]|
|[[Kim, Joo-Yeon|AUTHOR Joo-Yeon Kim]]|
|[[Kim, Juntae|AUTHOR Juntae Kim]]|
|[[Kim, Kwangyoun|AUTHOR Kwangyoun Kim]]|
|[[Kim, Kyuhong|AUTHOR Kyuhong Kim]]|
|[[Kim, Minchan|AUTHOR Minchan Kim]]|
|[[Kim, Minje|AUTHOR Minje Kim]]|
|[[Kim, Nam Soo|AUTHOR Nam Soo Kim]]|
|[[Kim, Seok Min|AUTHOR Seok Min Kim]]|
|[[Kim, Seong-Hu|AUTHOR Seong-Hu Kim]]|
|[[Kim, So Hyun|AUTHOR So Hyun Kim]]|
|[[Kim, Sunhee|AUTHOR Sunhee Kim]]|
|[[Kim, Sunwoo|AUTHOR Sunwoo Kim]]|
|[[Kim, Suyoun|AUTHOR Suyoun Kim]]|
|[[Kim, Tae-Woo|AUTHOR Tae-Woo Kim]]|
|[[Kim, Ui-Hyun|AUTHOR Ui-Hyun Kim]]|
|[[Kim, Yelin|AUTHOR Yelin Kim]]|
|[[Kim, You Jin|AUTHOR You Jin Kim]]|
|[[Kim, Young-Ik|AUTHOR Young-Ik Kim]]|
|[[Kim, Young-Kyung|AUTHOR Young-Kyung Kim]]|
|[[King, Brian|AUTHOR Brian King]]|
|[[King, Jeff|AUTHOR Jeff King]]|
|[[King, Simon|AUTHOR Simon King]]|
|[[Kingsbury, Brian|AUTHOR Brian Kingsbury]]|
|[[Kinnunen, Tomi|AUTHOR Tomi Kinnunen]]|
|[[Kinoshita, Keisuke|AUTHOR Keisuke Kinoshita]]|
|[[Kirchhoff, Katrin|AUTHOR Katrin Kirchhoff]]|
|[[Kishiyama, Takeshi|AUTHOR Takeshi Kishiyama]]|
|[[Kiss, Gábor|AUTHOR Gábor Kiss]]|
|[[Klabbers, Esther|AUTHOR Esther Klabbers]]|
|[[Klakow, Dietrich|AUTHOR Dietrich Klakow]]|
|[[Kleinert, Matthias|AUTHOR Matthias Kleinert]]|
|[[Klejch, Ondřej|AUTHOR Ondřej Klejch]]|
|[[Klementiev, Anita|AUTHOR Anita Klementiev]]|
|[[Klimkov, Viacheslav|AUTHOR Viacheslav Klimkov]]|
|[[Klinke, Piotr|AUTHOR Piotr Klinke]]|
|[[Klumpp, P.|AUTHOR P. Klumpp]]|
|[[Knill, K.|AUTHOR K. Knill]]|
|[[Ko, Byeongil|AUTHOR Byeongil Ko]]|
|[[Ko, Hanseok|AUTHOR Hanseok Ko]]|
|[[Ko, Tom|AUTHOR Tom Ko]]|
|[[Ko, Yuka|AUTHOR Yuka Ko]]|
|[[Kobayashi, Akio|AUTHOR Akio Kobayashi]]|
|[[Kobayashi, Kazuhiro|AUTHOR Kazuhiro Kobayashi]]|
|[[Kobayashi, Tetsunori|AUTHOR Tetsunori Kobayashi]]|
|[[Kocharov, Daniil|AUTHOR Daniil Kocharov]]|
|[[Kocour, Martin|AUTHOR Martin Kocour]]|
|[[Koehler, Thilo|AUTHOR Thilo Koehler]]|
|[[Koenig, Laura L.|AUTHOR Laura L. Koenig]]|
|[[Koepke, A. Sophia|AUTHOR A. Sophia Koepke]]|
|[[Koh, Yun Sing|AUTHOR Yun Sing Koh]]|
|[[Koishida, Kazuhito|AUTHOR Kazuhito Koishida]]|
|[[Kojima, Atsushi|AUTHOR Atsushi Kojima]]|
|[[Kojima, Ryosuke|AUTHOR Ryosuke Kojima]]|
|[[Koldovský, Z.|AUTHOR Z. Koldovský]]|
|[[Kolossa, Dorothea|AUTHOR Dorothea Kolossa]]|
|[[Komatani, Kazunori|AUTHOR Kazunori Komatani]]|
|[[Komatsu, Tatsuya|AUTHOR Tatsuya Komatsu]]|
|[[Kong, Jiaming|AUTHOR Jiaming Kong]]|
|[[Kong, Qiuqiang|AUTHOR Qiuqiang Kong]]|
|[[Kong, Tianlong|AUTHOR Tianlong Kong]]|
|[[Kong, Yuxiang|AUTHOR Yuxiang Kong]]|
|[[Kongthaworn, Thananchai|AUTHOR Thananchai Kongthaworn]]|
|[[Koot, Amparo C.|AUTHOR Amparo C. Koot]]|
|[[Kopparapu, Sunil Kumar|AUTHOR Sunil Kumar Kopparapu]]|
|[[Koppelmann, Timm|AUTHOR Timm Koppelmann]]|
|[[Korenevskaya, Mariya|AUTHOR Mariya Korenevskaya]]|
|[[Korenevsky, Maxim|AUTHOR Maxim Korenevsky]]|
|[[Koriyama, Tomoki|AUTHOR Tomoki Koriyama]]|
|[[Korzekwa, Daniel|AUTHOR Daniel Korzekwa]]|
|[[Kosgi, Saiteja|AUTHOR Saiteja Kosgi]]|
|[[Koshizuka, Takeshi|AUTHOR Takeshi Koshizuka]]|
|[[Kostek, Bozena|AUTHOR Bozena Kostek]]|
|[[Kothare, Hardik|AUTHOR Hardik Kothare]]|
|[[Kotov, Denis|AUTHOR Denis Kotov]]|
|[[Koul, Ashwani|AUTHOR Ashwani Koul]]|
|[[Kounovský, T.|AUTHOR T. Kounovský]]|
|[[Kovács, György|AUTHOR György Kovács]]|
|[[Kowalczyk, Konrad|AUTHOR Konrad Kowalczyk]]|
|[[Kraljevski, Ivan|AUTHOR Ivan Kraljevski]]|
|[[Krikun, Maxim|AUTHOR Maxim Krikun]]|
|[[Krishnamohan, Venkat|AUTHOR Venkat Krishnamohan]]|
|[[Krishnan, Aravind|AUTHOR Aravind Krishnan]]|
|[[Krishnan, Prashant|AUTHOR Prashant Krishnan]]|
|[[Krishnaswamy, Arvindh|AUTHOR Arvindh Krishnaswamy]]|
|[[Krug, Paul Konstantin|AUTHOR Paul Konstantin Krug]]|
|[[Kubo, Yotaro|AUTHOR Yotaro Kubo]]|
|[[Kuchaiev, Oleksii|AUTHOR Oleksii Kuchaiev]]|
|[[Kuchinsky, Stefanie E.|AUTHOR Stefanie E. Kuchinsky]]|
|[[Kucsko, Georg|AUTHOR Georg Kucsko]]|
|[[Kudera, Jacek|AUTHOR Jacek Kudera]]|
|[[Kuehne, Hilde|AUTHOR Hilde Kuehne]]|
|[[Kuhnert, Barbara|AUTHOR Barbara Kuhnert]]|
|[[Kumar, Aiswarya Vinod|AUTHOR Aiswarya Vinod Kumar]]|
|[[Kumar, Anurag|AUTHOR Anurag Kumar]]|
|[[Kumar, Ayush|AUTHOR Ayush Kumar]]|
|[[Kumar, C. Shiva|AUTHOR C. Shiva Kumar]]|
|[[Kumar, Kshitiz|AUTHOR Kshitiz Kumar]]|
|[[Kumar, Mari Ganesh|AUTHOR Mari Ganesh Kumar]]|
|[[Kumar, Neeraj|AUTHOR Neeraj Kumar]]|
|[[Kumar, Puneet|AUTHOR Puneet Kumar]]|
|[[Kumar, Rohit|AUTHOR Rohit Kumar]]|
|[[Kumar, Saket|AUTHOR Saket Kumar]]|
|[[Kumar, Shankar|AUTHOR Shankar Kumar]]|
|[[Kumar, Shashi|AUTHOR Shashi Kumar]]|
|[[Kumatani, Kenichi|AUTHOR Kenichi Kumatani]]|
|[[Kumawat, Pooja|AUTHOR Pooja Kumawat]]|
|[[Kumm, Jochen|AUTHOR Jochen Kumm]]|
|[[Kunz, Timo P.|AUTHOR Timo P. Kunz]]|
|[[Kunzmann, Siegfried|AUTHOR Siegfried Kunzmann]]|
|[[Kuo, Cheng-Hao|AUTHOR Cheng-Hao Kuo]]|
|[[Kuo, Hong-Kwang J.|AUTHOR Hong-Kwang J. Kuo]]|
|[[Kurata, Gakuto|AUTHOR Gakuto Kurata]]|
|[[Kuriakose, Jom|AUTHOR Jom Kuriakose]]|
|[[Kurimo, Mikko|AUTHOR Mikko Kurimo]]|
|[[Küster, Dennis|AUTHOR Dennis Küster]]|
|[[Kuznetsova, Anastasia|AUTHOR Anastasia Kuznetsova]]|
|[[Kwak, Nojun|AUTHOR Nojun Kwak]]|
|[[Kwon, Yoohwan|AUTHOR Yoohwan Kwon]]|
|[[Kwon, Young D.|AUTHOR Young D. Kwon]]|
|[[Kwon, Youngki|AUTHOR Youngki Kwon]]|
|[[Ky, Nguyen Hoang|AUTHOR Nguyen Hoang Ky]]|
|[[Kynych, Frantisek|AUTHOR Frantisek Kynych]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Laali, Majid|AUTHOR Majid Laali]]|
|[[Lachiri, Zied|AUTHOR Zied Lachiri]]|
|[[Ladewig, Marilyn A.|AUTHOR Marilyn A. Ladewig]]|
|[[Lahiri, Rimita|AUTHOR Rimita Lahiri]]|
|[[Lai, Catherine|AUTHOR Catherine Lai]]|
|[[Lai, Cheng-I Jeff|AUTHOR Cheng-I Jeff Lai]]|
|[[Laínez, José|AUTHOR José Laínez]]|
|[[Lakhotia, Kushal|AUTHOR Kushal Lakhotia]]|
|[[Lalhminghlui, Wendy|AUTHOR Wendy Lalhminghlui]]|
|[[Lall, Brejesh|AUTHOR Brejesh Lall]]|
|[[Lalwani, Garima|AUTHOR Garima Lalwani]]|
|[[Lam, Max W.Y.|AUTHOR Max W.Y. Lam]]|
|[[Lam, Tsz Kin|AUTHOR Tsz Kin Lam]]|
|[[Lamba, Jatin|AUTHOR Jatin Lamba]]|
|[[Lamel, Lori|AUTHOR Lori Lamel]]|
|[[Lammert, Adam|AUTHOR Adam Lammert]]|
|[[Lan, Tian|AUTHOR Tian Lan]]|
|[[Lan, Tian|AUTHOR Tian Lan]]|
|[[Łańcucki, Adrian|AUTHOR Adrian Łańcucki]]|
|[[Lange, Rael T.|AUTHOR Rael T. Lange]]|
|[[Lapidot, Itshak|AUTHOR Itshak Lapidot]]|
|[[Laprie, Yves|AUTHOR Yves Laprie]]|
|[[Laptev, Aleksandr|AUTHOR Aleksandr Laptev]]|
|[[Larcher, Anthony|AUTHOR Anthony Larcher]]|
|[[Lardy, Thomas|AUTHOR Thomas Lardy]]|
|[[Larrouy-Maestri, Pauline|AUTHOR Pauline Larrouy-Maestri]]|
|[[Laurent, Antoine|AUTHOR Antoine Laurent]]|
|[[Lavelle, Grace|AUTHOR Grace Lavelle]]|
|[[Lavrukhin, Vitaly|AUTHOR Vitaly Lavrukhin]]|
|[[Lawson, Jenna|AUTHOR Jenna Lawson]]|
|[[Lazaridis, Alexandros|AUTHOR Alexandros Lazaridis]]|
|[[Le, Duc|AUTHOR Duc Le]]|
|[[Le, Hang|AUTHOR Hang Le]]|
|[[Le, Xiaohuai|AUTHOR Xiaohuai Le]]|
|[[Lea, Colin|AUTHOR Colin Lea]]|
|[[Leal, Isabel|AUTHOR Isabel Leal]]|
|[[Lech, Margaret|AUTHOR Margaret Lech]]|
|[[Leclere, Justine|AUTHOR Justine Leclere]]|
|[[Lecouteux, Benjamin|AUTHOR Benjamin Lecouteux]]|
|[[Lee, Ann|AUTHOR Ann Lee]]|
|[[Lee, Bong-Jin|AUTHOR Bong-Jin Lee]]|
|[[Lee, Chi-Chun|AUTHOR Chi-Chun Lee]]|
|[[Lee, Chin-Hui|AUTHOR Chin-Hui Lee]]|
|[[Lee, Chul|AUTHOR Chul Lee]]|
|[[Lee, Chwee Beng|AUTHOR Chwee Beng Lee]]|
|[[Lee, Daniel|AUTHOR Daniel Lee]]|
|[[Lee, Donghyun|AUTHOR Donghyun Lee]]|
|[[Lee, Dongyub|AUTHOR Dongyub Lee]]|
|[[Lee, Gyeong-Hoon|AUTHOR Gyeong-Hoon Lee]]|
|[[Lee, Hoshik|AUTHOR Hoshik Lee]]|
|[[Lee, Hung-Shin|AUTHOR Hung-Shin Lee]]|
|[[Lee, Hung-yi|AUTHOR Hung-yi Lee]]|
|[[Lee, Hyunjae|AUTHOR Hyunjae Lee]]|
|[[Lee, Jaesong|AUTHOR Jaesong Lee]]|
|[[Lee, Jaeyun|AUTHOR Jaeyun Lee]]|
|[[Lee, Ji-Hyun|AUTHOR Ji-Hyun Lee]]|
|[[Lee, Jihyun|AUTHOR Jihyun Lee]]|
|[[Lee, Jinkyu|AUTHOR Jinkyu Lee]]|
|[[Lee, Junhyeok|AUTHOR Junhyeok Lee]]|
|[[Lee, Keon|AUTHOR Keon Lee]]|
|[[Lee, Kong Aik|AUTHOR Kong Aik Lee]]|
|[[Lee, Ko-tik|AUTHOR Ko-tik Lee]]|
|[[Lee, Lin-shan|AUTHOR Lin-shan Lee]]|
|[[Lee, Lukas|AUTHOR Lukas Lee]]|
|[[Lee, Minjae|AUTHOR Minjae Lee]]|
|[[Lee, Min-Ji|AUTHOR Min-Ji Lee]]|
|[[Lee, Min-Joong|AUTHOR Min-Joong Lee]]|
|[[Lee, Mun-Hak|AUTHOR Mun-Hak Lee]]|
|[[Lee, Sang-Hoon|AUTHOR Sang-Hoon Lee]]|
|[[Lee, Seong-Whan|AUTHOR Seong-Whan Lee]]|
|[[Lee, Soo-Young|AUTHOR Soo-Young Lee]]|
|[[Lee, Tan|AUTHOR Tan Lee]]|
|[[Leem, Seong-Gyun|AUTHOR Seong-Gyun Leem]]|
|[[Lefter, Iulia|AUTHOR Iulia Lefter]]|
|[[Leglaive, Simon|AUTHOR Simon Leglaive]]|
|[[Lehnen, Patrick|AUTHOR Patrick Lehnen]]|
|[[Lei, Ming|AUTHOR Ming Lei]]|
|[[Lei, Xiaoning|AUTHOR Xiaoning Lei]]|
|[[Lei, Xin|AUTHOR Xin Lei]]|
|[[Leightley, Daniel|AUTHOR Daniel Leightley]]|
|[[Le Moine, Clément|AUTHOR Clément Le Moine]]|
|[[Leong, Chi-Hang|AUTHOR Chi-Hang Leong]]|
|[[Leong, C.W.|AUTHOR C.W. Leong]]|
|[[Leow, Chee Siang|AUTHOR Chee Siang Leow]]|
|[[Le Roux, Jonathan|AUTHOR Jonathan Le Roux]]|
|[[Le-The, Quoc-Nam|AUTHOR Quoc-Nam Le-The]]|
|[[Leung, Tsun-Yat|AUTHOR Tsun-Yat Leung]]|
|[[Leung, Wai-Kim|AUTHOR Wai-Kim Leung]]|
|[[Levis, John|AUTHOR John Levis]]|
|[[Levitan, Sarah Ita|AUTHOR Sarah Ita Levitan]]|
|[[Levow, Gina-Anne|AUTHOR Gina-Anne Levow]]|
|[[Leykum, Hannah|AUTHOR Hannah Leykum]]|
|[[Li, Andong|AUTHOR Andong Li]]|
|[[Li, Bo|AUTHOR Bo Li]]|
|[[Li, Bohan|AUTHOR Bohan Li]]|
|[[Li, Chenda|AUTHOR Chenda Li]]|
|[[Li, Chengfei|AUTHOR Chengfei Li]]|
|[[Li, Guangzhi|AUTHOR Guangzhi Li]]|
|[[Li, Hai|AUTHOR Hai Li]]|
|[[Li, Haizhou|AUTHOR Haizhou Li]]|
|[[Li, Hang|AUTHOR Hang Li]]|
|[[Li, Hao|AUTHOR Hao Li]]|
|[[Li, Haoqi|AUTHOR Haoqi Li]]|
|[[Li, Huichun|AUTHOR Huichun Li]]|
|[[Li, Jiahang|AUTHOR Jiahang Li]]|
|[[Li, Jianchen|AUTHOR Jianchen Li]]|
|[[Li, Jinfeng|AUTHOR Jinfeng Li]]|
|[[Li, Jing|AUTHOR Jing Li]]|
|[[Li, Jingbei|AUTHOR Jingbei Li]]|
|[[Li, JingWei|AUTHOR JingWei Li]]|
|[[Li, Jingyu|AUTHOR Jingyu Li]]|
|[[Li, Jinyu|AUTHOR Jinyu Li]]|
|[[Li, Juan|AUTHOR Juan Li]]|
|[[Li, Juncheng|AUTHOR Juncheng Li]]|
|[[Li, Junfeng|AUTHOR Junfeng Li]]|
|[[Li, Junjie|AUTHOR Junjie Li]]|
|[[Li, Ke|AUTHOR Ke Li]]|
|[[Li, Kun|AUTHOR Kun Li]]|
|[[Li, Lei|AUTHOR Lei Li]]|
|[[Li, Li Erran|AUTHOR Li Erran Li]]|
|[[Li, Lin|AUTHOR Lin Li]]|
|[[Li, Lin|AUTHOR Lin Li]]|
|[[Li, Meng|AUTHOR Meng Li]]|
|[[Li, Ming|AUTHOR Ming Li]]|
|[[Li, Ping|AUTHOR Ping Li]]|
|[[Li, Qiujia|AUTHOR Qiujia Li]]|
|[[Li, Ruichen|AUTHOR Ruichen Li]]|
|[[Li, Ruirui|AUTHOR Ruirui Li]]|
|[[Li, Sarah R.|AUTHOR Sarah R. Li]]|
|[[Li, Shang-Wen|AUTHOR Shang-Wen Li]]|
|[[Li, Sheng|AUTHOR Sheng Li]]|
|[[Li, Song|AUTHOR Song Li]]|
|[[Li, Tao|AUTHOR Tao Li]]|
|[[Li, Tingle|AUTHOR Tingle Li]]|
|[[Li, Wei|AUTHOR Wei Li]]|
|[[Li, Wubo|AUTHOR Wubo Li]]|
|[[Li, Xiang|AUTHOR Xiang Li]]|
|[[Li, Xiangang|AUTHOR Xiangang Li]]|
|[[Li, Xiangrui|AUTHOR Xiangrui Li]]|
|[[Li, Xiaodong|AUTHOR Xiaodong Li]]|
|[[Li, Xiaofei|AUTHOR Xiaofei Li]]|
|[[Li, Xiaoqi|AUTHOR Xiaoqi Li]]|
|[[Li, Xinhui|AUTHOR Xinhui Li]]|
|[[Li, Xinjian|AUTHOR Xinjian Li]]|
|[[Li, Xiyun|AUTHOR Xiyun Li]]|
|[[Li, Xu|AUTHOR Xu Li]]|
|[[Li, Xuewei|AUTHOR Xuewei Li]]|
|[[Li, Yaxing|AUTHOR Yaxing Li]]|
|[[Li, Yinghao Aaron|AUTHOR Yinghao Aaron Li]]|
|[[Li, Yuepeng|AUTHOR Yuepeng Li]]|
|[[Li, Zezhong|AUTHOR Zezhong Li]]|
|[[Li, Zheng|AUTHOR Zheng Li]]|
|[[Li, Zhengyang|AUTHOR Zhengyang Li]]|
|[[Li, Zhu|AUTHOR Zhu Li]]|
|[[Lian, Jiachen|AUTHOR Jiachen Lian]]|
|[[Lian, Jie|AUTHOR Jie Lian]]|
|[[Liang, Chengdong|AUTHOR Chengdong Liang]]|
|[[Liang, Qiao|AUTHOR Qiao Liang]]|
|[[Liang, Xia|AUTHOR Xia Liang]]|
|[[Liang, Xiaohui|AUTHOR Xiaohui Liang]]|
|[[Liao, Dexin|AUTHOR Dexin Liao]]|
|[[Liberatore, Christopher|AUTHOR Christopher Liberatore]]|
|[[Liberman, Mark|AUTHOR Mark Liberman]]|
|[[Liebling, Daniel J.|AUTHOR Daniel J. Liebling]]|
|[[Likhomanenko, Tatiana|AUTHOR Tatiana Likhomanenko]]|
|[[Lilley, Jason|AUTHOR Jason Lilley]]|
|[[Lillianfeld, Lisie|AUTHOR Lisie Lillianfeld]]|
|[[Lim, Dan|AUTHOR Dan Lim]]|
|[[Lin, Binghuai|AUTHOR Binghuai Lin]]|
|[[Lin, Gang-Xuan|AUTHOR Gang-Xuan Lin]]|
|[[Lin, Guan-Ting|AUTHOR Guan-Ting Lin]]|
|[[Lin, Haitao|AUTHOR Haitao Lin]]|
|[[Lin, Haopeng|AUTHOR Haopeng Lin]]|
|[[Lin, Hung-Pang|AUTHOR Hung-Pang Lin]]|
|[[Lin, Jheng-hao|AUTHOR Jheng-hao Lin]]|
|[[Lin, Ju|AUTHOR Ju Lin]]|
|[[Lin, Pingyuan|AUTHOR Pingyuan Lin]]|
|[[Lin, Qingjian|AUTHOR Qingjian Lin]]|
|[[Lin, Shilun|AUTHOR Shilun Lin]]|
|[[Lin, Shoufeng|AUTHOR Shoufeng Lin]]|
|[[Lin, Yist Y.|AUTHOR Yist Y. Lin]]|
|[[Lin, Yue|AUTHOR Yue Lin]]|
|[[Lin, Zhouhan|AUTHOR Zhouhan Lin]]|
|[[Ling, Hefei|AUTHOR Hefei Ling]]|
|[[Ling, Zhen-Hua|AUTHOR Zhen-Hua Ling]]|
|[[Linnhoff-Popien, Claudia|AUTHOR Claudia Linnhoff-Popien]]|
|[[Lippa, Sara M.|AUTHOR Sara M. Lippa]]|
|[[Liscombe, Jackson|AUTHOR Jackson Liscombe]]|
|[[Liss, Julie|AUTHOR Julie Liss]]|
|[[Liu, Alexander H.|AUTHOR Alexander H. Liu]]|
|[[Liu, Andy T.|AUTHOR Andy T. Liu]]|
|[[Liu, Bin|AUTHOR Bin Liu]]|
|[[Liu, Bing|AUTHOR Bing Liu]]|
|[[Liu, Ching-Feng|AUTHOR Ching-Feng Liu]]|
|[[Liu, Da-Rong|AUTHOR Da-Rong Liu]]|
|[[Liu, Haohe|AUTHOR Haohe Liu]]|
|[[Liu, Hexin|AUTHOR Hexin Liu]]|
|[[Liu, Jiawang|AUTHOR Jiawang Liu]]|
|[[Liu, Jiaxing|AUTHOR Jiaxing Liu]]|
|[[Liu, Jie|AUTHOR Jie Liu]]|
|[[Liu, Jing|AUTHOR Jing Liu]]|
|[[Liu, Jinglin|AUTHOR Jinglin Liu]]|
|[[Liu, Jinjiang|AUTHOR Jinjiang Liu]]|
|[[Liu, Kai|AUTHOR Kai Liu]]|
|[[Liu, Li|AUTHOR Li Liu]]|
|[[Liu, Li-Juan|AUTHOR Li-Juan Liu]]|
|[[Liu, Lin|AUTHOR Lin Liu]]|
|[[Liu, Meng|AUTHOR Meng Liu]]|
|[[Liu, Min|AUTHOR Min Liu]]|
|[[Liu, Qi|AUTHOR Qi Liu]]|
|[[Liu, Qiao|AUTHOR Qiao Liu]]|
|[[Liu, Qing-Feng|AUTHOR Qing-Feng Liu]]|
|[[Liu, Roger Cheng-yen|AUTHOR Roger Cheng-yen Liu]]|
|[[Liu, Rongliang|AUTHOR Rongliang Liu]]|
|[[Liu, Rui|AUTHOR Rui Liu]]|
|[[Liu, Shansong|AUTHOR Shansong Liu]]|
|[[Liu, Shichao|AUTHOR Shichao Liu]]|
|[[Liu, Shujie|AUTHOR Shujie Liu]]|
|[[Liu, Shuo|AUTHOR Shuo Liu]]|
|[[Liu, Songxiang|AUTHOR Songxiang Liu]]|
|[[Liu, Tie-Yan|AUTHOR Tie-Yan Liu]]|
|[[Liu, Wenzhe|AUTHOR Wenzhe Liu]]|
|[[Liu, Xiangyu|AUTHOR Xiangyu Liu]]|
|[[Liu, Xubo|AUTHOR Xubo Liu]]|
|[[Liu, Xuefei|AUTHOR Xuefei Liu]]|
|[[Liu, Xunying|AUTHOR Xunying Liu]]|
|[[Liu, Yang|AUTHOR Yang Liu]]|
|[[Liu, Yan|AUTHOR Yan Liu]]|
|[[Liu, Yanqing|AUTHOR Yanqing Liu]]|
|[[Liu, Yan|AUTHOR Yan Liu]]|
|[[Liu, Yichen|AUTHOR Yichen Liu]]|
|[[Liu, Yi Chieh|AUTHOR Yi Chieh Liu]]|
|[[Liu, Yi-Fen|AUTHOR Yi-Fen Liu]]|
|[[Liu, Yufei|AUTHOR Yufei Liu]]|
|[[Liu, Yulan|AUTHOR Yulan Liu]]|
|[[Liu, Zhang|AUTHOR Zhang Liu]]|
|[[Liu, Zhengchen|AUTHOR Zhengchen Liu]]|
|[[Liu, Zhengxi|AUTHOR Zhengxi Liu]]|
|[[Liu, Zirui|AUTHOR Zirui Liu]]|
|[[Liu, Zitao|AUTHOR Zitao Liu]]|
|[[Livescu, Karen|AUTHOR Karen Livescu]]|
|[[Lleida, Eduardo|AUTHOR Eduardo Lleida]]|
|[[Lo, Justin J.H.|AUTHOR Justin J.H. Lo]]|
|[[Locqueville, Grégoire|AUTHOR Grégoire Locqueville]]|
|[[Lohrenz, Timo|AUTHOR Timo Lohrenz]]|
|[[Loide, Markus|AUTHOR Markus Loide]]|
|[[Long, Siqu|AUTHOR Siqu Long]]|
|[[Long, Yanhua|AUTHOR Yanhua Long]]|
|[[Loose, Zdeněk|AUTHOR Zdeněk Loose]]|
|[[Łopatka, Kuba|AUTHOR Kuba Łopatka]]|
|[[Lopes, José|AUTHOR José Lopes]]|
|[[Lopez Moreno, Ignacio|AUTHOR Ignacio Lopez Moreno]]|
|[[Lord, Catherine|AUTHOR Catherine Lord]]|
|[[Lorenzo-Trueba, Jaime|AUTHOR Jaime Lorenzo-Trueba]]|
|[[Lou, Xin|AUTHOR Xin Lou]]|
|[[Loweimi, Erfan|AUTHOR Erfan Loweimi]]|
|[[Lu, Chung-Li|AUTHOR Chung-Li Lu]]|
|[[Lu, Chun-Shien|AUTHOR Chun-Shien Lu]]|
|[[Lu, Han|AUTHOR Han Lu]]|
|[[Lu, Hui|AUTHOR Hui Lu]]|
|[[Lu, Jing|AUTHOR Jing Lu]]|
|[[Lu, Li|AUTHOR Li Lu]]|
|[[Lu, Liang|AUTHOR Liang Lu]]|
|[[Lu, Xugang|AUTHOR Xugang Lu]]|
|[[Lu, Yen-Ju|AUTHOR Yen-Ju Lu]]|
|[[Lu, Yiting|AUTHOR Yiting Lu]]|
|[[Lu, Yizhou|AUTHOR Yizhou Lu]]|
|[[Lu, Zhiyun|AUTHOR Zhiyun Lu]]|
|[[Luckenbaugh, Jarrod|AUTHOR Jarrod Luckenbaugh]]|
|[[Lüdtke, Ulrike|AUTHOR Ulrike Lüdtke]]|
|[[Ludusan, Bogdan|AUTHOR Bogdan Ludusan]]|
|[[Lulich, Steven M.|AUTHOR Steven M. Lulich]]|
|[[Lumban Tobing, Patrick|AUTHOR Patrick Lumban Tobing]]|
|[[Luo, Chong|AUTHOR Chong Luo]]|
|[[Luo, Hongyin|AUTHOR Hongyin Luo]]|
|[[Luo, Jian|AUTHOR Jian Luo]]|
|[[Luo, Liuping|AUTHOR Liuping Luo]]|
|[[Luo, Xiaoxue|AUTHOR Xiaoxue Luo]]|
|[[Luo, Yi|AUTHOR Yi Luo]]|
|[[Luo, Yiyu|AUTHOR Yiyu Luo]]|
|[[Luo, Zhaojie|AUTHOR Zhaojie Luo]]|
|[[Luong, Manh|AUTHOR Manh Luong]]|
|[[Lupea, David|AUTHOR David Lupea]]|
|[[Luu, Chau|AUTHOR Chau Luu]]|
|[[Luz, Saturnino|AUTHOR Saturnino Luz]]|
|[[Lv, Shubo|AUTHOR Shubo Lv]]|
|[[Lv, Zhiqiang|AUTHOR Zhiqiang Lv]]|
|[[Lyu, Kexin|AUTHOR Kexin Lyu]]|
|[[Lyu, Shao-ren|AUTHOR Shao-ren Lyu]]|
|[[Lyu, Yilan|AUTHOR Yilan Lyu]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[M., Gurunath Reddy|AUTHOR Gurunath Reddy M.]]|
|[[Ma, Guodong|AUTHOR Guodong Ma]]|
|[[Ma, Haoxin|AUTHOR Haoxin Ma]]|
|[[Ma, Jun|AUTHOR Jun Ma]]|
|[[Ma, Kaili|AUTHOR Kaili Ma]]|
|[[Ma, Long|AUTHOR Long Ma]]|
|[[Ma, Lu|AUTHOR Lu Ma]]|
|[[Ma, Mingbo|AUTHOR Mingbo Ma]]|
|[[Ma, Ning|AUTHOR Ning Ma]]|
|[[Ma, Pingchuan|AUTHOR Pingchuan Ma]]|
|[[Ma, Shiqian|AUTHOR Shiqian Ma]]|
|[[Ma, Yong|AUTHOR Yong Ma]]|
|[[Ma, Youxuan|AUTHOR Youxuan Ma]]|
|[[Ma, Zejun|AUTHOR Zejun Ma]]|
|[[Ma, Zhizhong|AUTHOR Zhizhong Ma]]|
|[[Maas, Roland|AUTHOR Roland Maas]]|
|[[MacDonald, Robert L.|AUTHOR Robert L. MacDonald]]|
|[[Macháček, Dominik|AUTHOR Dominik Macháček]]|
|[[Maciejewski, Matthew|AUTHOR Matthew Maciejewski]]|
|[[Macoskey, Jonathan|AUTHOR Jonathan Macoskey]]|
|[[MacWhinney, Brian|AUTHOR Brian MacWhinney]]|
|[[Madhavi, Maulik|AUTHOR Maulik Madhavi]]|
|[[Madikeri, Srikanth|AUTHOR Srikanth Madikeri]]|
|[[Madsen, Julia|AUTHOR Julia Madsen]]|
|[[Maekaku, Takashi|AUTHOR Takashi Maekaku]]|
|[[Magimai-Doss, Mathew|AUTHOR Mathew Magimai-Doss]]|
|[[Mahadeokar, Jay|AUTHOR Jay Mahadeokar]]|
|[[Mahoor, Mohammad H.|AUTHOR Mohammad H. Mahoor]]|
|[[Mahr, Tristan J.|AUTHOR Tristan J. Mahr]]|
|[[Maier, Andreas|AUTHOR Andreas Maier]]|
|[[Mainar, Pablo|AUTHOR Pablo Mainar]]|
|[[Majumdar, Somshubra|AUTHOR Somshubra Majumdar]]|
|[[Mak, Man-Wai|AUTHOR Man-Wai Mak]]|
|[[Makarov, Rostislav|AUTHOR Rostislav Makarov]]|
|[[Makino, Shoji|AUTHOR Shoji Makino]]|
|[[Makishima, Naoki|AUTHOR Naoki Makishima]]|
|[[Malek, Jiri|AUTHOR Jiri Malek]]|
|[[Malik, John|AUTHOR John Malik]]|
|[[Mallela, Jhansi|AUTHOR Jhansi Mallela]]|
|[[Mallidi, Sri Harish|AUTHOR Sri Harish Mallidi]]|
|[[Mallol-Ragolta, Adria|AUTHOR Adria Mallol-Ragolta]]|
|[[Mamontov, Danila|AUTHOR Danila Mamontov]]|
|[[Mane, Vishal|AUTHOR Vishal Mane]]|
|[[Maniati, Georgia|AUTHOR Georgia Maniati]]|
|[[Manocha, Dinesh|AUTHOR Dinesh Manocha]]|
|[[Manohar, N.|AUTHOR N. Manohar]]|
|[[Manohara, Krishnamoorthy|AUTHOR Krishnamoorthy Manohara]]|
|[[Mansbach, Noa|AUTHOR Noa Mansbach]]|
|[[Mansfield, Courtney|AUTHOR Courtney Mansfield]]|
|[[Mao, Hongda|AUTHOR Hongda Mao]]|
|[[Marcinek, Ĺuboš|AUTHOR Ĺuboš Marcinek]]|
|[[Markó, Alexandra|AUTHOR Alexandra Markó]]|
|[[Markopoulos, Konstantinos|AUTHOR Konstantinos Markopoulos]]|
|[[Markuson, Sara|AUTHOR Sara Markuson]]|
|[[Marlo, Michael R.|AUTHOR Michael R. Marlo]]|
|[[Maron, Haggai|AUTHOR Haggai Maron]]|
|[[Marras, Mirko|AUTHOR Mirko Marras]]|
|[[Martin, Rainer|AUTHOR Rainer Martin]]|
|[[Martin, Vincent P.|AUTHOR Vincent P. Martin]]|
|[[Martucci, Giuseppe|AUTHOR Giuseppe Martucci]]|
|[[Marxer, Ricard|AUTHOR Ricard Marxer]]|
|[[Masaki, Shungo|AUTHOR Shungo Masaki]]|
|[[Mascolo, Cecilia|AUTHOR Cecilia Mascolo]]|
|[[Maskell, Simon|AUTHOR Simon Maskell]]|
|[[Mast, T. Douglas|AUTHOR T. Douglas Mast]]|
|[[Masumura, Ryo|AUTHOR Ryo Masumura]]|
|[[Matassoni, Marco|AUTHOR Marco Matassoni]]|
|[[Matcham, Faith|AUTHOR Faith Matcham]]|
|[[Mateju, Lukas|AUTHOR Lukas Mateju]]|
|[[Mathad, Vikram C.|AUTHOR Vikram C. Mathad]]|
|[[Mathews, Rajiv|AUTHOR Rajiv Mathews]]|
|[[Matoušek, Jindřich|AUTHOR Jindřich Matoušek]]|
|[[Matrouf, Driss|AUTHOR Driss Matrouf]]|
|[[Matsui, Toshie|AUTHOR Toshie Matsui]]|
|[[Matthews, Iain|AUTHOR Iain Matthews]]|
|[[Mau, Ted|AUTHOR Ted Mau]]|
|[[Mavandadi, Sepand|AUTHOR Sepand Mavandadi]]|
|[[Mazumder, Mark|AUTHOR Mark Mazumder]]|
|[[McCree, Alan|AUTHOR Alan McCree]]|
|[[McDermott, Josh H.|AUTHOR Josh H. McDermott]]|
|[[McGowan, Ross|AUTHOR Ross McGowan]]|
|[[McGraw, Ian|AUTHOR Ian McGraw]]|
|[[McKean, Cristina|AUTHOR Cristina McKean]]|
|[[McKenna, Joseph P.|AUTHOR Joseph P. McKenna]]|
|[[McLoughlin, Ian|AUTHOR Ian McLoughlin]]|
|[[McNamara, Quinten|AUTHOR Quinten McNamara]]|
|[[Mdhaffar, Salima|AUTHOR Salima Mdhaffar]]|
|[[Md. Salleh, Siti Umairah|AUTHOR Siti Umairah Md. Salleh]]|
|[[Medda, Giacomo|AUTHOR Giacomo Medda]]|
|[[Medennikov, Ivan|AUTHOR Ivan Medennikov]]|
|[[Medina, Salvador|AUTHOR Salvador Medina]]|
|[[Meewis, Floor|AUTHOR Floor Meewis]]|
|[[Meghawat, Aaksha|AUTHOR Aaksha Meghawat]]|
|[[Mehmood, Hadi|AUTHOR Hadi Mehmood]]|
|[[Mehrish, Ambuj|AUTHOR Ambuj Mehrish]]|
|[[Mehta, Rupesh R.|AUTHOR Rupesh R. Mehta]]|
|[[Meignier, Sylvain|AUTHOR Sylvain Meignier]]|
|[[Meissner, Wassilios G.|AUTHOR Wassilios G. Meissner]]|
|[[Meister, Einar|AUTHOR Einar Meister]]|
|[[Meister, Lya|AUTHOR Lya Meister]]|
|[[Méli, Adrien|AUTHOR Adrien Méli]]|
|[[Meloni, Giacomo|AUTHOR Giacomo Meloni]]|
|[[Meng, Hao|AUTHOR Hao Meng]]|
|[[Meng, Helen|AUTHOR Helen Meng]]|
|[[Meng, Li|AUTHOR Li Meng]]|
|[[Meng, Yixiong|AUTHOR Yixiong Meng]]|
|[[Meng, Zhong|AUTHOR Zhong Meng]]|
|[[Menshikova, Alla|AUTHOR Alla Menshikova]]|
|[[Merboldt, André|AUTHOR André Merboldt]]|
|[[Merkx, Danny|AUTHOR Danny Merkx]]|
|[[Mertes, Silvan|AUTHOR Silvan Mertes]]|
|[[Mesgarani, Nima|AUTHOR Nima Mesgarani]]|
|[[Meshgi, Kourosh|AUTHOR Kourosh Meshgi]]|
|[[Metze, Florian|AUTHOR Florian Metze]]|
|[[Meunier, Christine|AUTHOR Christine Meunier]]|
|[[Meyer, Josh|AUTHOR Josh Meyer]]|
|[[Miao, Chenfeng|AUTHOR Chenfeng Miao]]|
|[[Miao, Xiaoxiao|AUTHOR Xiaoxiao Miao]]|
|[[Michael, Thilo|AUTHOR Thilo Michael]]|
|[[Michel, Wilfried|AUTHOR Wilfried Michel]]|
|[[Miguel, Antonio|AUTHOR Antonio Miguel]]|
|[[Mikolajczyk, Krystian|AUTHOR Krystian Mikolajczyk]]|
|[[Mikolov, Tomáš|AUTHOR Tomáš Mikolov]]|
|[[Milanović, Ljubomir|AUTHOR Ljubomir Milanović]]|
|[[Milde, Benjamin|AUTHOR Benjamin Milde]]|
|[[Milhé de Saint Victor, Solange|AUTHOR Solange Milhé de Saint Victor]]|
|[[Milling, Manuel|AUTHOR Manuel Milling]]|
|[[Millman, Rebecca|AUTHOR Rebecca Millman]]|
|[[Minematsu, Nobuaki|AUTHOR Nobuaki Minematsu]]|
|[[Miner, Jennifer Ann|AUTHOR Jennifer Ann Miner]]|
|[[Mingote, Victoria|AUTHOR Victoria Mingote]]|
|[[Minker, Wolfgang|AUTHOR Wolfgang Minker]]|
|[[Minkin, Fedor|AUTHOR Fedor Minkin]]|
|[[Mira, Rodrigo|AUTHOR Rodrigo Mira]]|
|[[Mirheidari, Bahman|AUTHOR Bahman Mirheidari]]|
|[[Mirzaei, Maryam Sadat|AUTHOR Maryam Sadat Mirzaei]]|
|[[Mirzakhmetov, Almas|AUTHOR Almas Mirzakhmetov]]|
|[[Misra, A.|AUTHOR A. Misra]]|
|[[Misra, Ananya|AUTHOR Ananya Misra]]|
|[[Mitra, Vikramjit|AUTHOR Vikramjit Mitra]]|
|[[Mitrofanov, Anton|AUTHOR Anton Mitrofanov]]|
|[[Mittag, Gabriel|AUTHOR Gabriel Mittag]]|
|[[Mittal, Ashish|AUTHOR Ashish Mittal]]|
|[[Mittal, Deepak|AUTHOR Deepak Mittal]]|
|[[Miyazaki, Koichi|AUTHOR Koichi Miyazaki]]|
|[[Mizumachi, Mitsunori|AUTHOR Mitsunori Mizumachi]]|
|[[Mizuta, Kazuki|AUTHOR Kazuki Mizuta]]|
|[[Möbius, Bernd|AUTHOR Bernd Möbius]]|
|[[Moell, Birger|AUTHOR Birger Moell]]|
|[[Mohamed, Abdelrahman|AUTHOR Abdelrahman Mohamed]]|
|[[Mohamed, Thahir|AUTHOR Thahir Mohamed]]|
|[[Mohammadamini, Mohammad|AUTHOR Mohammad Mohammadamini]]|
|[[Mohan, Devang S. Ram|AUTHOR Devang S. Ram Mohan]]|
|[[Mohri, Mehryar|AUTHOR Mehryar Mohri]]|
|[[Moinet, Alexis|AUTHOR Alexis Moinet]]|
|[[Mok, Peggy|AUTHOR Peggy Mok]]|
|[[Mokhosi, Refuoe|AUTHOR Refuoe Mokhosi]]|
|[[Möller, Sebastian|AUTHOR Sebastian Möller]]|
|[[Moniz, Joel Ruben Antony|AUTHOR Joel Ruben Antony Moniz]]|
|[[Moore, Roger K.|AUTHOR Roger K. Moore]]|
|[[Morais, Edmilson|AUTHOR Edmilson Morais]]|
|[[Mordido, Gonçalo|AUTHOR Gonçalo Mordido]]|
|[[Moreno, Pedro J.|AUTHOR Pedro J. Moreno]]|
|[[Morfi, Veronica|AUTHOR Veronica Morfi]]|
|[[Morgenshtern, Veniamin I.|AUTHOR Veniamin I. Morgenshtern]]|
|[[Mori, Hiroki|AUTHOR Hiroki Mori]]|
|[[Morimoto, Yoshiya|AUTHOR Yoshiya Morimoto]]|
|[[Morise, Masanori|AUTHOR Masanori Morise]]|
|[[Moritz, Niko|AUTHOR Niko Moritz]]|
|[[Moriya, Takafumi|AUTHOR Takafumi Moriya]]|
|[[Moro-Velázquez, Laureano|AUTHOR Laureano Moro-Velázquez]]|
|[[Morris, Ethan|AUTHOR Ethan Morris]]|
|[[Morse-Kopp, Daniela|AUTHOR Daniela Morse-Kopp]]|
|[[Mortensen, David R.|AUTHOR David R. Mortensen]]|
|[[Mosbach, Marius|AUTHOR Marius Mosbach]]|
|[[Motlicek, Petr|AUTHOR Petr Motlicek]]|
|[[Mouchtaris, Athanasios|AUTHOR Athanasios Mouchtaris]]|
|[[Mower Provost, Emily|AUTHOR Emily Mower Provost]]|
|[[Mu, Tingting|AUTHOR Tingting Mu]]|
|[[Mueller, Markus|AUTHOR Markus Mueller]]|
|[[Muguli, Ananya|AUTHOR Ananya Muguli]]|
|[[Muhlack, Beeke|AUTHOR Beeke Muhlack]]|
|[[Mukherjee, Sankar|AUTHOR Sankar Mukherjee]]|
|[[Mukhopadhyay, Rudrabha|AUTHOR Rudrabha Mukhopadhyay]]|
|[[Mulder, Kimberley|AUTHOR Kimberley Mulder]]|
|[[Mulimani, Manjunath|AUTHOR Manjunath Mulimani]]|
|[[Müller, Markus|AUTHOR Markus Müller]]|
|[[Müller, Nicolas Michael|AUTHOR Nicolas Michael Müller]]|
|[[Müller, Robert|AUTHOR Robert Müller]]|
|[[Mumtaz, Benazir|AUTHOR Benazir Mumtaz]]|
|[[Muniyappa, Thejaswi|AUTHOR Thejaswi Muniyappa]]|
|[[Munkhdalai, Tsendsuren|AUTHOR Tsendsuren Munkhdalai]]|
|[[Murad, Noah B.|AUTHOR Noah B. Murad]]|
|[[Muralidharan, Deepak|AUTHOR Deepak Muralidharan]]|
|[[Murauskas, Šarūnas|AUTHOR Šarūnas Murauskas]]|
|[[Murthy, Aditya|AUTHOR Aditya Murthy]]|
|[[Murthy, Hema A.|AUTHOR Hema A. Murthy]]|
|[[Murty, K. Sri Rama|AUTHOR K. Sri Rama Murty]]|
|[[Mussakhojayeva, Saida|AUTHOR Saida Mussakhojayeva]]|
|[[Muxika, Oihane|AUTHOR Oihane Muxika]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[N., Krishna D.|AUTHOR Krishna D. N.]]|
|[[Na, Hwidong|AUTHOR Hwidong Na]]|
|[[Na, Yueyue|AUTHOR Yueyue Na]]|
|[[Nachman, Lama|AUTHOR Lama Nachman]]|
|[[Nachmani, Eliya|AUTHOR Eliya Nachmani]]|
|[[Naderi, Babak|AUTHOR Babak Naderi]]|
|[[Nagamatsu, Kenji|AUTHOR Kenji Nagamatsu]]|
|[[Nagano, Mizuki|AUTHOR Mizuki Nagano]]|
|[[Nagaraja, Varun|AUTHOR Varun Nagaraja]]|
|[[Naijo, Satsuki|AUTHOR Satsuki Naijo]]|
|[[Nair, Arun|AUTHOR Arun Nair]]|
|[[Nakadai, Kazuhiro|AUTHOR Kazuhiro Nakadai]]|
|[[Nakagome, Yu|AUTHOR Yu Nakagome]]|
|[[Nakamura, Satoshi|AUTHOR Satoshi Nakamura]]|
|[[Nakamura, Taiki|AUTHOR Taiki Nakamura]]|
|[[Nakano, Kaito|AUTHOR Kaito Nakano]]|
|[[Nakano, Teppei|AUTHOR Teppei Nakano]]|
|[[Nakatani, Tomohiro|AUTHOR Tomohiro Nakatani]]|
|[[Nakayama, Takahiro|AUTHOR Takahiro Nakayama]]|
|[[Nalamalapu, Anil Kumar|AUTHOR Anil Kumar Nalamalapu]]|
|[[Nam, Gyuhyeon|AUTHOR Gyuhyeon Nam]]|
|[[Namazifar, Mahdi|AUTHOR Mahdi Namazifar]]|
|[[Namboodiri, Vinay|AUTHOR Vinay Namboodiri]]|
|[[Namyalo, Saudah|AUTHOR Saudah Namyalo]]|
|[[Nanavati, Jai|AUTHOR Jai Nanavati]]|
|[[Nanavati, Raoul|AUTHOR Raoul Nanavati]]|
|[[Nanda, Viral|AUTHOR Viral Nanda]]|
|[[Nandi, Ritika|AUTHOR Ritika Nandi]]|
|[[Naowarat, Burin|AUTHOR Burin Naowarat]]|
|[[Narang, Ankur|AUTHOR Ankur Narang]]|
|[[Narayan, Vaibhav A.|AUTHOR Vaibhav A. Narayan]]|
|[[Narayanan, Arun|AUTHOR Arun Narayanan]]|
|[[Narayanan, Shrikanth S.|AUTHOR Shrikanth S. Narayanan]]|
|[[Narayanan Sundararaman, Mukuntha|AUTHOR Mukuntha Narayanan Sundararaman]]|
|[[Narayanaswamy, Vivek Sivaraman|AUTHOR Vivek Sivaraman Narayanaswamy]]|
|[[Nareddula, Santhan Kumar Reddy|AUTHOR Santhan Kumar Reddy Nareddula]]|
|[[Narisetty, Chaitanya|AUTHOR Chaitanya Narisetty]]|
|[[Nasir, Md.|AUTHOR Md. Nasir]]|
|[[Nasreen, Shamila|AUTHOR Shamila Nasreen]]|
|[[Nataraj, K.S.|AUTHOR K.S. Nataraj]]|
|[[Natarajan, S.|AUTHOR S. Natarajan]]|
|[[Nathwani, Karan|AUTHOR Karan Nathwani]]|
|[[Nautsch, Andreas|AUTHOR Andreas Nautsch]]|
|[[Navar, Indu|AUTHOR Indu Navar]]|
|[[Nayem, Khandokar Md.|AUTHOR Khandokar Md. Nayem]]|
|[[Naylor, Graham|AUTHOR Graham Naylor]]|
|[[Nechanský, Tomáš|AUTHOR Tomáš Nechanský]]|
|[[Negri, Matteo|AUTHOR Matteo Negri]]|
|[[Neiterman, Evgeny Hershkovitch|AUTHOR Evgeny Hershkovitch Neiterman]]|
|[[Nelson, Philip C.|AUTHOR Philip C. Nelson]]|
|[[Nelus, Alexandru|AUTHOR Alexandru Nelus]]|
|[[Németh, Géza|AUTHOR Géza Németh]]|
|[[Ness, Steven R.|AUTHOR Steven R. Ness]]|
|[[Nessler, Natalia|AUTHOR Natalia Nessler]]|
|[[Neubig, Graham|AUTHOR Graham Neubig]]|
|[[Neumann, Michael|AUTHOR Michael Neumann]]|
|[[Ney, Hermann|AUTHOR Hermann Ney]]|
|[[Ng, Cymie Wing-Yee|AUTHOR Cymie Wing-Yee Ng]]|
|[[Ng, Edwin G.|AUTHOR Edwin G. Ng]]|
|[[Ng, Sara|AUTHOR Sara Ng]]|
|[[Ng, Si-Ioi|AUTHOR Si-Ioi Ng]]|
|[[Nguyen, Dat Quoc|AUTHOR Dat Quoc Nguyen]]|
|[[Nguyen, Ha|AUTHOR Ha Nguyen]]|
|[[Nguyen, Hoang H.|AUTHOR Hoang H. Nguyen]]|
|[[Nguyen, Hoang Long|AUTHOR Hoang Long Nguyen]]|
|[[Nguyen, Huu-Kim|AUTHOR Huu-Kim Nguyen]]|
|[[Nguyen, Huyen|AUTHOR Huyen Nguyen]]|
|[[Nguyen, Manh Hung|AUTHOR Manh Hung Nguyen]]|
|[[Nguyen, Thai-Son|AUTHOR Thai-Son Nguyen]]|
|[[Nguyen, Tuan-Nam|AUTHOR Tuan-Nam Nguyen]]|
|[[Nguyen, Tu Anh|AUTHOR Tu Anh Nguyen]]|
|[[Nguyen, Tu Anh|AUTHOR Tu Anh Nguyen]]|
|[[Nguyen, Viet-Nhat|AUTHOR Viet-Nhat Nguyen]]|
|[[Ni, Hao|AUTHOR Hao Ni]]|
|[[Ni, Junrui|AUTHOR Junrui Ni]]|
|[[Ni, Shikang|AUTHOR Shikang Ni]]|
|[[Nicmanis, Dāvis|AUTHOR Dāvis Nicmanis]]|
|[[Nicolson, Aaron|AUTHOR Aaron Nicolson]]|
|[[Nie, Yuting|AUTHOR Yuting Nie]]|
|[[Niedermeier, Andreas|AUTHOR Andreas Niedermeier]]|
|[[Nielsen, Jesper Kjær|AUTHOR Jesper Kjær Nielsen]]|
|[[Nigmatulina, Iuliia|AUTHOR Iuliia Nigmatulina]]|
|[[Nikolaidou, Konstantina|AUTHOR Konstantina Nikolaidou]]|
|[[Nishida, Kenji|AUTHOR Kenji Nishida]]|
|[[Nishizaki, Hiromitsu|AUTHOR Hiromitsu Nishizaki]]|
|[[Niu, Mingyue|AUTHOR Mingyue Niu]]|
|[[Niu, Shu-Tong|AUTHOR Shu-Tong Niu]]|
|[[Noé, Paul-Gauthier|AUTHOR Paul-Gauthier Noé]]|
|[[Nonaka, Yuto|AUTHOR Yuto Nonaka]]|
|[[Norel, Raquel|AUTHOR Raquel Norel]]|
|[[Noroozi, Vahid|AUTHOR Vahid Noroozi]]|
|[[Norouzi, Mohammad|AUTHOR Mohammad Norouzi]]|
|[[Nortje, Leanne|AUTHOR Leanne Nortje]]|
|[[Nose, Takashi|AUTHOR Takashi Nose]]|
|[[Nöth, Elmar|AUTHOR Elmar Nöth]]|
|[[Novak, Josef|AUTHOR Josef Novak]]|
|[[Novikova, Jekaterina|AUTHOR Jekaterina Novikova]]|
|[[Novitasari, Sashi|AUTHOR Sashi Novitasari]]|
|[[Novoselov, Sergey|AUTHOR Sergey Novoselov]]|
|[[Novotney, Scott|AUTHOR Scott Novotney]]|
|[[Novotný, Ondřej|AUTHOR Ondřej Novotný]]|
|[[Nozaki, Jumon|AUTHOR Jumon Nozaki]]|
|[[Nozaki, Kazunori|AUTHOR Kazunori Nozaki]]|
|[[Nugraha, Aditya Arie|AUTHOR Aditya Arie Nugraha]]|
|[[Nwogu, Ifeoma|AUTHOR Ifeoma Nwogu]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Oard, Douglas W.|AUTHOR Douglas W. Oard]]|
|[[Obin, Nicolas|AUTHOR Nicolas Obin]]|
|[[O’Brien, Benjamin|AUTHOR Benjamin O’Brien]]|
|[[Obyat, Abdelrahman|AUTHOR Abdelrahman Obyat]]|
|[[Ochiai, Tsubasa|AUTHOR Tsubasa Ochiai]]|
|[[O’Connor, Mark|AUTHOR Mark O’Connor]]|
|[[Odobez, Jean-Marc|AUTHOR Jean-Marc Odobez]]|
|[[Oetzmann, Carolin|AUTHOR Carolin Oetzmann]]|
|[[Ogawa, Atsunori|AUTHOR Atsunori Ogawa]]|
|[[Ogawa, Tetsuji|AUTHOR Tetsuji Ogawa]]|
|[[Oh, Miran|AUTHOR Miran Oh]]|
|[[Oh, Yoo Rhee|AUTHOR Yoo Rhee Oh]]|
|[[Ohmura, Hidefumi|AUTHOR Hidefumi Ohmura]]|
|[[Ohneiser, Oliver|AUTHOR Oliver Ohneiser]]|
|[[Ohta, Mayumi|AUTHOR Mayumi Ohta]]|
|[[Okamura, Daiki|AUTHOR Daiki Okamura]]|
|[[Okhotnikov, Anton|AUTHOR Anton Okhotnikov]]|
|[[Olaleye, Kayode|AUTHOR Kayode Olaleye]]|
|[[Oliveira, Frederico Santos de|AUTHOR Frederico Santos de Oliveira]]|
|[[Ollerenshaw, Anna|AUTHOR Anna Ollerenshaw]]|
|[[Omachi, Motoi|AUTHOR Motoi Omachi]]|
|[[O’Malley, Ronan|AUTHOR Ronan O’Malley]]|
|[[Omologo, Maurizio|AUTHOR Maurizio Omologo]]|
|[[Omote, Masanori|AUTHOR Masanori Omote]]|
|[[Oncescu, Andreea-Maria|AUTHOR Andreea-Maria Oncescu]]|
|[[O’Neill, Patrick K.|AUTHOR Patrick K. O’Neill]]|
|[[Önen, Melek|AUTHOR Melek Önen]]|
|[[Onnela, Jukka-Pekka|AUTHOR Jukka-Pekka Onnela]]|
|[[Oostermeijer, Koen|AUTHOR Koen Oostermeijer]]|
|[[Opala, Mateusz|AUTHOR Mateusz Opala]]|
|[[Orihashi, Shota|AUTHOR Shota Orihashi]]|
|[[Orozco-Arroyave, J.R.|AUTHOR J.R. Orozco-Arroyave]]|
|[[Ortega, Alfonso|AUTHOR Alfonso Ortega]]|
|[[Ostapenko, Alissa|AUTHOR Alissa Ostapenko]]|
|[[Ostendorf, Mari|AUTHOR Mari Ostendorf]]|
|[[Ostermann, Jörn|AUTHOR Jörn Ostermann]]|
|[[Ottl, Sandra|AUTHOR Sandra Ottl]]|
|[[Ou, Wenjie|AUTHOR Wenjie Ou]]|
|[[Ou, Zhijian|AUTHOR Zhijian Ou]]|
|[[Ouyang, Beibei|AUTHOR Beibei Ouyang]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Padfield, Dirk|AUTHOR Dirk Padfield]]|
|[[Pahlavan Nodeh, Maryam|AUTHOR Maryam Pahlavan Nodeh]]|
|[[Palakapilly, Joseph|AUTHOR Joseph Palakapilly]]|
|[[Palaskar, Shruti|AUTHOR Shruti Palaskar]]|
|[[Palaz, Dimitri|AUTHOR Dimitri Palaz]]|
|[[Palekar, Ashwini|AUTHOR Ashwini Palekar]]|
|[[Palmer, Ian|AUTHOR Ian Palmer]]|
|[[Pan, Changjie|AUTHOR Changjie Pan]]|
|[[Pan, Ho-hsien|AUTHOR Ho-hsien Pan]]|
|[[Pan, Jing|AUTHOR Jing Pan]]|
|[[Pan, Jingjing|AUTHOR Jingjing Pan]]|
|[[Pan, Shifeng|AUTHOR Shifeng Pan]]|
|[[Pan, Yilin|AUTHOR Yilin Pan]]|
|[[Pan, Yiqian|AUTHOR Yiqian Pan]]|
|[[Panariello, Michele|AUTHOR Michele Panariello]]|
|[[Panda, Rameswar|AUTHOR Rameswar Panda]]|
|[[Pandey, Abhishek|AUTHOR Abhishek Pandey]]|
|[[Pandey, Prem C.|AUTHOR Prem C. Pandey]]|
|[[Pang, Ruoming|AUTHOR Ruoming Pang]]|
|[[Pantazis, Yannis|AUTHOR Yannis Pantazis]]|
|[[Pantic, Maja|AUTHOR Maja Pantic]]|
|[[Papadourakis, Vasileios|AUTHOR Vasileios Papadourakis]]|
|[[Pappagari, Raghavendra|AUTHOR Raghavendra Pappagari]]|
|[[Parada-Cabaleiro, Emilia|AUTHOR Emilia Parada-Cabaleiro]]|
|[[Paraskevopoulos, Georgios|AUTHOR Georgios Paraskevopoulos]]|
|[[Parcollet, Titouan|AUTHOR Titouan Parcollet]]|
|[[Parida, Shantipriya|AUTHOR Shantipriya Parida]]|
|[[Park, Byeongseon|AUTHOR Byeongseon Park]]|
|[[Park, Hyoungmin|AUTHOR Hyoungmin Park]]|
|[[Park, Hyun-Jin|AUTHOR Hyun-Jin Park]]|
|[[Park, Kiyoung|AUTHOR Kiyoung Park]]|
|[[Park, Kyumin|AUTHOR Kyumin Park]]|
|[[Park, Sung-Un|AUTHOR Sung-Un Park]]|
|[[Park, Yong-Hwa|AUTHOR Yong-Hwa Park]]|
|[[Park, Youngcheol|AUTHOR Youngcheol Park]]|
|[[Park, Youngo|AUTHOR Youngo Park]]|
|[[Parnamaa, Tanel|AUTHOR Tanel Parnamaa]]|
|[[Parrell, Benjamin|AUTHOR Benjamin Parrell]]|
|[[Parry, Jack|AUTHOR Jack Parry]]|
|[[Patel, Rita R.|AUTHOR Rita R. Patel]]|
|[[Patel, Shwetak|AUTHOR Shwetak Patel]]|
|[[Pathak, Sheetal K.|AUTHOR Sheetal K. Pathak]]|
|[[Patil, Hemant A.|AUTHOR Hemant A. Patil]]|
|[[Patil, Vaishali|AUTHOR Vaishali Patil]]|
|[[Patino, Jose|AUTHOR Jose Patino]]|
|[[Patrocinio, Danilo|AUTHOR Danilo Patrocinio]]|
|[[Pattee, Gary L.|AUTHOR Gary L. Pattee]]|
|[[Paul, Dipjyoti|AUTHOR Dipjyoti Paul]]|
|[[Paul, Soumava|AUTHOR Soumava Paul]]|
|[[Paulus, Jouni|AUTHOR Jouni Paulus]]|
|[[Pautler, David|AUTHOR David Pautler]]|
|[[Pavy-Le Traon, Anne|AUTHOR Anne Pavy-Le Traon]]|
|[[Pede, Silvia|AUTHOR Silvia Pede]]|
|[[Peinado, Antonio M.|AUTHOR Antonio M. Peinado]]|
|[[Peiró-Lilja, Alex|AUTHOR Alex Peiró-Lilja]]|
|[[Peirolo, Morgane|AUTHOR Morgane Peirolo]]|
|[[Pelecanos, Jason|AUTHOR Jason Pelecanos]]|
|[[Pelemans, Joris|AUTHOR Joris Pelemans]]|
|[[Pellegrini, Thomas|AUTHOR Thomas Pellegrini]]|
|[[Peñarrubia-María, Maria Teresa|AUTHOR Maria Teresa Peñarrubia-María]]|
|[[Peng, Baolin|AUTHOR Baolin Peng]]|
|[[Peng, Junyi|AUTHOR Junyi Peng]]|
|[[Peng, Linkai|AUTHOR Linkai Peng]]|
|[[Peng, Renhua|AUTHOR Renhua Peng]]|
|[[Peng, Wei|AUTHOR Wei Peng]]|
|[[Peng, Yiyuan|AUTHOR Yiyuan Peng]]|
|[[Peng, Yizhou|AUTHOR Yizhou Peng]]|
|[[Peng, Yu-Huai|AUTHOR Yu-Huai Peng]]|
|[[Peng, Zhendong|AUTHOR Zhendong Peng]]|
|[[Peng, Zhiyuan|AUTHOR Zhiyuan Peng]]|
|[[Penney, Joshua|AUTHOR Joshua Penney]]|
|[[Peperkamp, Sharon|AUTHOR Sharon Peperkamp]]|
|[[Pepino, Leonardo|AUTHOR Leonardo Pepino]]|
|[[Peplinski, Jacob|AUTHOR Jacob Peplinski]]|
|[[Perepu, Satheesh Kumar|AUTHOR Satheesh Kumar Perepu]]|
|[[Perez, Matthew|AUTHOR Matthew Perez]]|
|[[Pérez-González-de-Martos, Alejandro|AUTHOR Alejandro Pérez-González-de-Martos]]|
|[[Pérez-Toro, P.A.|AUTHOR P.A. Pérez-Toro]]|
|[[Pérez Zarazaga, Pablo|AUTHOR Pablo Pérez Zarazaga]]|
|[[Pernkopf, Franz|AUTHOR Franz Pernkopf]]|
|[[Perrotin, Olivier|AUTHOR Olivier Perrotin]]|
|[[Peterson, Kay|AUTHOR Kay Peterson]]|
|[[Petridis, Stavros|AUTHOR Stavros Petridis]]|
|[[Peyser, Cal|AUTHOR Cal Peyser]]|
|[[Pfeifenberger, Lukas|AUTHOR Lukas Pfeifenberger]]|
|[[Pham, Ngoc-Quan|AUTHOR Ngoc-Quan Pham]]|
|[[Pham, Van Tung|AUTHOR Van Tung Pham]]|
|[[Philip, Pierre|AUTHOR Pierre Philip]]|
|[[Phung, My|AUTHOR My Phung]]|
|[[Picheny, Michael|AUTHOR Michael Picheny]]|
|[[Picone, Jordan|AUTHOR Jordan Picone]]|
|[[Pilati, Laurent|AUTHOR Laurent Pilati]]|
|[[Pillot-Loiseau, Claire|AUTHOR Claire Pillot-Loiseau]]|
|[[Pino, Juan|AUTHOR Juan Pino]]|
|[[Pinquier, Julien|AUTHOR Julien Pinquier]]|
|[[Pinto, Lancelot|AUTHOR Lancelot Pinto]]|
|[[Pirogova, Elena|AUTHOR Elena Pirogova]]|
|[[Pitkow, Xaq|AUTHOR Xaq Pitkow]]|
|[[Plakal, Manoj|AUTHOR Manoj Plakal]]|
|[[Plantinga, Peter|AUTHOR Peter Plantinga]]|
|[[Plaut, Florian|AUTHOR Florian Plaut]]|
|[[Podluzhny, Ivan|AUTHOR Ivan Podluzhny]]|
|[[Pokorny, Florian B.|AUTHOR Florian B. Pokorny]]|
|[[Polyak, Adam|AUTHOR Adam Polyak]]|
|[[Ponti, Moacir Antonelli|AUTHOR Moacir Antonelli Ponti]]|
|[[Poon, Josiah|AUTHOR Josiah Poon]]|
|[[Poorjam, Amir H.|AUTHOR Amir H. Poorjam]]|
|[[Popien, Claudia-Linnhoff|AUTHOR Claudia-Linnhoff Popien]]|
|[[Porter, Eszter|AUTHOR Eszter Porter]]|
|[[Portera, Saige|AUTHOR Saige Portera]]|
|[[Portet, François|AUTHOR François Portet]]|
|[[Post, Matt|AUTHOR Matt Post]]|
|[[Potamianos, Alexandros|AUTHOR Alexandros Potamianos]]|
|[[Potdar, Nihal|AUTHOR Nihal Potdar]]|
|[[Potharaju, Srividya Pranavi|AUTHOR Srividya Pranavi Potharaju]]|
|[[Pouplier, Marianne|AUTHOR Marianne Pouplier]]|
|[[Poupon, Nicolas|AUTHOR Nicolas Poupon]]|
|[[Pouthier, Baptiste|AUTHOR Baptiste Pouthier]]|
|[[Povey, Daniel|AUTHOR Daniel Povey]]|
|[[Prabhavalkar, Rohit|AUTHOR Rohit Prabhavalkar]]|
|[[Prajapati, Gauri P.|AUTHOR Gauri P. Prajapati]]|
|[[Prakash, Anusha|AUTHOR Anusha Prakash]]|
|[[Prandoni, Paolo|AUTHOR Paolo Prandoni]]|
|[[Prasad, Amrutha|AUTHOR Amrutha Prasad]]|
|[[Prasad, Lodagala V.S.V. Durga|AUTHOR Lodagala V.S.V. Durga Prasad]]|
|[[Prasad, Manasa|AUTHOR Manasa Prasad]]|
|[[Prasad, RaviShankar|AUTHOR RaviShankar Prasad]]|
|[[Prasanna, S.R. Mahadeva|AUTHOR S.R. Mahadeva Prasanna]]|
|[[Pratap, Vineel|AUTHOR Vineel Pratap]]|
|[[Pražák, Aleš|AUTHOR Aleš Pražák]]|
|[[Precioso, Frederic|AUTHOR Frederic Precioso]]|
|[[Preet, Supreet|AUTHOR Supreet Preet]]|
|[[Proctor, Michael|AUTHOR Michael Proctor]]|
|[[Prokopalo, Yevhenii|AUTHOR Yevhenii Prokopalo]]|
|[[Prom-on, Santitham|AUTHOR Santitham Prom-on]]|
|[[Prud’hommeaux, Emily|AUTHOR Emily Prud’hommeaux]]|
|[[Psutka, Josef|AUTHOR Josef Psutka]]|
|[[Psutka, Josef V.|AUTHOR Josef V. Psutka]]|
|[[Pu, Jie|AUTHOR Jie Pu]]|
|[[Pucher, Michael|AUTHOR Michael Pucher]]|
|[[Puertolas, Adrien|AUTHOR Adrien Puertolas]]|
|[[Pugliatti, Maura|AUTHOR Maura Pugliatti]]|
|[[Pulman, Stephen|AUTHOR Stephen Pulman]]|
|[[Purin, Marju|AUTHOR Marju Purin]]|
|[[Purushothaman, Anurenjan|AUTHOR Anurenjan Purushothaman]]|
|[[Purver, Matthew|AUTHOR Matthew Purver]]|
|[[Pusateri, Ernest|AUTHOR Ernest Pusateri]]|
|[[Pusz, Piotr|AUTHOR Piotr Pusz]]|
|[[Putze, Felix|AUTHOR Felix Putze]]|
|[[Pylkkönen, Janne|AUTHOR Janne Pylkkönen]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Qazi, Ihsan Ayyub|AUTHOR Ihsan Ayyub Qazi]]|
|[[Qendro, Lorena|AUTHOR Lorena Qendro]]|
|[[Qi, Jiajun|AUTHOR Jiajun Qi]]|
|[[Qi, Jinzi|AUTHOR Jinzi Qi]]|
|[[Qian, Fan|AUTHOR Fan Qian]]|
|[[Qian, Kaizhi|AUTHOR Kaizhi Qian]]|
|[[Qian, Yanmin|AUTHOR Yanmin Qian]]|
|[[Qian, Yuxin|AUTHOR Yuxin Qian]]|
|[[Qiao, Yu|AUTHOR Yu Qiao]]|
|[[Qin, James|AUTHOR James Qin]]|
|[[Qin, Tao|AUTHOR Tao Qin]]|
|[[Qin, Xiaoyi|AUTHOR Xiaoyi Qin]]|
|[[Qin, Ying|AUTHOR Ying Qin]]|
|[[Qiu, David|AUTHOR David Qiu]]|
|[[Qiu, Yuanhang|AUTHOR Yuanhang Qiu]]|
|[[Qu, Xiaoyang|AUTHOR Xiaoyang Qu]]|
|[[Quatieri, Thomas F.|AUTHOR Thomas F. Quatieri]]|
|[[Quillen, Carl|AUTHOR Carl Quillen]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[R., Nirmala|AUTHOR Nirmala R.]]|
|[[Radfar, Martin|AUTHOR Martin Radfar]]|
|[[Radová, Vlasta|AUTHOR Vlasta Radová]]|
|[[Raghavan, Srinivasa|AUTHOR Srinivasa Raghavan]]|
|[[Raj, Bhiksha|AUTHOR Bhiksha Raj]]|
|[[Raj, Desh|AUTHOR Desh Raj]]|
|[[Raj, R.G. Prithvi|AUTHOR R.G. Prithvi Raj]]|
|[[Rajpuria, Akash|AUTHOR Akash Rajpuria]]|
|[[Raju, Anirudh|AUTHOR Anirudh Raju]]|
|[[Rallabandi, Sai Sirisha|AUTHOR Sai Sirisha Rallabandi]]|
|[[Ramabhadran, Bhuvana|AUTHOR Bhuvana Ramabhadran]]|
|[[Ramakrishnan, Ganesh|AUTHOR Ganesh Ramakrishnan]]|
|[[Raman, Balasubramanian|AUTHOR Balasubramanian Raman]]|
|[[Ramanarayanan, Vikram|AUTHOR Vikram Ramanarayanan]]|
|[[Ramesh, G.|AUTHOR G. Ramesh]]|
|[[Ramírez-de-la-Rosa, Gabriela|AUTHOR Gabriela Ramírez-de-la-Rosa]]|
|[[Ramoji, Shreyas|AUTHOR Shreyas Ramoji]]|
|[[Ranjan, Yatharth|AUTHOR Yatharth Ranjan]]|
|[[Rao, Hong|AUTHOR Hong Rao]]|
|[[Rao, K. Sreenivasa|AUTHOR K. Sreenivasa Rao]]|
|[[Rao, Milind|AUTHOR Milind Rao]]|
|[[Rao, Preeti|AUTHOR Preeti Rao]]|
|[[Rao, Wei|AUTHOR Wei Rao]]|
|[[Räsänen, Okko|AUTHOR Okko Räsänen]]|
|[[Rascol, Olivier|AUTHOR Olivier Rascol]]|
|[[Rashid, Zulqarnain|AUTHOR Zulqarnain Rashid]]|
|[[Rastrow, Ariya|AUTHOR Ariya Rastrow]]|
|[[Rath, Shakti P.|AUTHOR Shakti P. Rath]]|
|[[Rathod, Akshada|AUTHOR Akshada Rathod]]|
|[[Ratnarajah, Anton|AUTHOR Anton Ratnarajah]]|
|[[Ravanelli, Mirco|AUTHOR Mirco Ravanelli]]|
|[[Ravichandran, Venkatesh|AUTHOR Venkatesh Ravichandran]]|
|[[Ray, Swayambhu Nath|AUTHOR Swayambhu Nath Ray]]|
|[[Raza, Agha Ali|AUTHOR Agha Ali Raza]]|
|[[Reddi, Vijay Janapa|AUTHOR Vijay Janapa Reddi]]|
|[[Reddy, Chandan K.A.|AUTHOR Chandan K.A. Reddy]]|
|[[Reddy, Pradeep|AUTHOR Pradeep Reddy]]|
|[[Rehman, Ivana|AUTHOR Ivana Rehman]]|
|[[Reichel, Uwe|AUTHOR Uwe Reichel]]|
|[[Remus, Steffen|AUTHOR Steffen Remus]]|
|[[Ren, Xinlei|AUTHOR Xinlei Ren]]|
|[[Ren, Yi|AUTHOR Yi Ren]]|
|[[Ren, Zhao|AUTHOR Zhao Ren]]|
|[[Ren, Zongze|AUTHOR Zongze Ren]]|
|[[Renals, Steve|AUTHOR Steve Renals]]|
|[[Renkens, Vincent|AUTHOR Vincent Renkens]]|
|[[Řezáčková, Markéta|AUTHOR Markéta Řezáčková]]|
|[[Rezagholizadeh, Mehdi|AUTHOR Mehdi Rezagholizadeh]]|
|[[Ribeiro, Manuel Sam|AUTHOR Manuel Sam Ribeiro]]|
|[[Ribeiro, Vinicius|AUTHOR Vinicius Ribeiro]]|
|[[Riccardi, Giuseppe|AUTHOR Giuseppe Riccardi]]|
|[[Richmond, Korin|AUTHOR Korin Richmond]]|
|[[Richter, Richard|AUTHOR Richard Richter]]|
|[[Riedhammer, K.|AUTHOR K. Riedhammer]]|
|[[Riera, Pablo|AUTHOR Pablo Riera]]|
|[[Riezler, Stefan|AUTHOR Stefan Riezler]]|
|[[Rigal, Rémi|AUTHOR Rémi Rigal]]|
|[[Rikhye, Rajeev|AUTHOR Rajeev Rikhye]]|
|[[Riley, Michael|AUTHOR Michael Riley]]|
|[[Riley, Michael A.|AUTHOR Michael A. Riley]]|
|[[Rilliard, Albert|AUTHOR Albert Rilliard]]|
|[[Ringeval, Fabien|AUTHOR Fabien Ringeval]]|
|[[Ristea, Nicolae-Cătălin|AUTHOR Nicolae-Cătălin Ristea]]|
|[[Ritwik, Kotra Venkata Sai|AUTHOR Kotra Venkata Sai Ritwik]]|
|[[Riverin-Coutlée, Josiane|AUTHOR Josiane Riverin-Coutlée]]|
|[[Rivière, Morgane|AUTHOR Morgane Rivière]]|
|[[Rizos, Georgios|AUTHOR Georgios Rizos]]|
|[[Ro, Jae|AUTHOR Jae Ro]]|
|[[Roberts, Angela|AUTHOR Angela Roberts]]|
|[[Rocholl, Johann C.|AUTHOR Johann C. Rocholl]]|
|[[Roebel, Axel|AUTHOR Axel Roebel]]|
|[[Roesler, Oliver|AUTHOR Oliver Roesler]]|
|[[Rohanian, Morteza|AUTHOR Morteza Rohanian]]|
|[[Rohdin, Johan|AUTHOR Johan Rohdin]]|
|[[Rohmatillah, Mahdin|AUTHOR Mahdin Rohmatillah]]|
|[[Rolland, Thomas|AUTHOR Thomas Rolland]]|
|[[Rom, Hassan|AUTHOR Hassan Rom]]|
|[[Romana, Amrit|AUTHOR Amrit Romana]]|
|[[Romanenko, Aleksei|AUTHOR Aleksei Romanenko]]|
|[[Ronanki, Srikanth|AUTHOR Srikanth Ronanki]]|
|[[Rondon, Pat|AUTHOR Pat Rondon]]|
|[[Rose, Richard|AUTHOR Richard Rose]]|
|[[Roselló, Nahuel|AUTHOR Nahuel Roselló]]|
|[[Rosenberg, Andrew|AUTHOR Andrew Rosenberg]]|
|[[Rosenkranz, Tobias|AUTHOR Tobias Rosenkranz]]|
|[[Rosindell, James|AUTHOR James Rosindell]]|
|[[Rossato, Solange|AUTHOR Solange Rossato]]|
|[[Roth, Julian|AUTHOR Julian Roth]]|
|[[Roth, Robert M.|AUTHOR Robert M. Roth]]|
|[[Rothkrantz, Leon J.M.|AUTHOR Leon J.M. Rothkrantz]]|
|[[Rouas, Jean-Luc|AUTHOR Jean-Luc Rouas]]|
|[[Rouditchenko, Andrew|AUTHOR Andrew Rouditchenko]]|
|[[Rouhe, Aku|AUTHOR Aku Rouhe]]|
|[[Routray, Aurobinda|AUTHOR Aurobinda Routray]]|
|[[Rowe, Hannah P.|AUTHOR Hannah P. Rowe]]|
|[[Rownicka, Joanna|AUTHOR Joanna Rownicka]]|
|[[Roy, Anwesha|AUTHOR Anwesha Roy]]|
|[[Rozé, Patricia|AUTHOR Patricia Rozé]]|
|[[Rozkovec, M.|AUTHOR M. Rozkovec]]|
|[[Rudnicky, Alexander|AUTHOR Alexander Rudnicky]]|
|[[Rumberg, Lars|AUTHOR Lars Rumberg]]|
|[[Ruzi, Rukiye|AUTHOR Rukiye Ruzi]]|
|[[Ryant, Neville|AUTHOR Neville Ryant]]|
|[[Rybach, David|AUTHOR David Rybach]]|
|[[Rybicka, Magdalena|AUTHOR Magdalena Rybicka]]|
|[[Rychlikowski, Paweł|AUTHOR Paweł Rychlikowski]]|
|[[Ryokai, Kimiko|AUTHOR Kimiko Ryokai]]|
|[[Ryumina, Elena|AUTHOR Elena Ryumina]]|
|[[Ryzhova, Margarita|AUTHOR Margarita Ryzhova]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Saabas, Ando|AUTHOR Ando Saabas]]|
|[[Sabzi Shahrebabaki, Abdolreza|AUTHOR Abdolreza Sabzi Shahrebabaki]]|
|[[Saddler, Mark R.|AUTHOR Mark R. Saddler]]|
|[[Sadhu, Samik|AUTHOR Samik Sadhu]]|
|[[Sadjadi, Seyed Omid|AUTHOR Seyed Omid Sadjadi]]|
|[[Saebi, Mandana|AUTHOR Mandana Saebi]]|
|[[Saeboe, Lilja|AUTHOR Lilja Saeboe]]|
|[[Saghir, Hamidreza|AUTHOR Hamidreza Saghir]]|
|[[Sahidullah, Md.|AUTHOR Md. Sahidullah]]|
|[[Sailor, Hardik|AUTHOR Hardik Sailor]]|
|[[Sainath, Tara N.|AUTHOR Tara N. Sainath]]|
|[[Saito, Daisuke|AUTHOR Daisuke Saito]]|
|[[Saito, Susumu|AUTHOR Susumu Saito]]|
|[[Saito, Yuki|AUTHOR Yuki Saito]]|
|[[Sak, Hasim|AUTHOR Hasim Sak]]|
|[[Sakakibara, Ken-Ichi|AUTHOR Ken-Ichi Sakakibara]]|
|[[Sakallah, Alaa|AUTHOR Alaa Sakallah]]|
|[[Sakamoto, Shoki|AUTHOR Shoki Sakamoto]]|
|[[Sakti, Sakriani|AUTHOR Sakriani Sakti]]|
|[[Sakuma, Jin|AUTHOR Jin Sakuma]]|
|[[Salakhutdinov, Ruslan|AUTHOR Ruslan Salakhutdinov]]|
|[[Salesky, Elizabeth|AUTHOR Elizabeth Salesky]]|
|[[Salimbajevs, Askars|AUTHOR Askars Salimbajevs]]|
|[[Sallustio, Vincenzo|AUTHOR Vincenzo Sallustio]]|
|[[Salvati, Daniele|AUTHOR Daniele Salvati]]|
|[[Samarakoon, Lahiru|AUTHOR Lahiru Samarakoon]]|
|[[Samlan, Robin|AUTHOR Robin Samlan]]|
|[[Sanabria, Ramon|AUTHOR Ramon Sanabria]]|
|[[Sanchez, Enrique|AUTHOR Enrique Sanchez]]|
|[[Sanchis, Albert|AUTHOR Albert Sanchis]]|
|[[Sancinetti, Marcelo|AUTHOR Marcelo Sancinetti]]|
|[[Sandler, Mark|AUTHOR Mark Sandler]]|
|[[Sankaranarayanan, Karthik|AUTHOR Karthik Sankaranarayanan]]|
|[[Santoso, Jennifer|AUTHOR Jennifer Santoso]]|
|[[Saon, George|AUTHOR George Saon]]|
|[[Saraclar, Murat|AUTHOR Murat Saraclar]]|
|[[Saraf, Yatharth|AUTHOR Yatharth Saraf]]|
|[[Sarawagi, Sunita|AUTHOR Sunita Sarawagi]]|
|[[Sarfjoo, Seyyed Saeed|AUTHOR Seyyed Saeed Sarfjoo]]|
|[[Sarkar, Saurjya|AUTHOR Saurjya Sarkar]]|
|[[Sarma, Mousmita|AUTHOR Mousmita Sarma]]|
|[[Sarmah, Priyankoo|AUTHOR Priyankoo Sarmah]]|
|[[Sarni, Salvatore|AUTHOR Salvatore Sarni]]|
|[[Saruwatari, Hiroshi|AUTHOR Hiroshi Saruwatari]]|
|[[Sato, Hiroshi|AUTHOR Hiroshi Sato]]|
|[[Sawata, Ryosuke|AUTHOR Ryosuke Sawata]]|
|[[Saxon, Michael|AUTHOR Michael Saxon]]|
|[[Schamoni, Shigehiko|AUTHOR Shigehiko Schamoni]]|
|[[Scharenborg, Odette|AUTHOR Odette Scharenborg]]|
|[[Scheibler, Robin|AUTHOR Robin Scheibler]]|
|[[Scherer, Nancy|AUTHOR Nancy Scherer]]|
|[[Schiller, Dominik|AUTHOR Dominik Schiller]]|
|[[Schlüter, Ralf|AUTHOR Ralf Schlüter]]|
|[[Schmitt, Manuel|AUTHOR Manuel Schmitt]]|
|[[Schneider, Aaron|AUTHOR Aaron Schneider]]|
|[[Schönherr, Lea|AUTHOR Lea Schönherr]]|
|[[Schröter, Hendrik|AUTHOR Hendrik Schröter]]|
|[[Schuller, Björn W.|AUTHOR Björn W. Schuller]]|
|[[Schuller, Dagmar M.|AUTHOR Dagmar M. Schuller]]|
|[[Schultz, Tanja|AUTHOR Tanja Schultz]]|
|[[Schuster, M.|AUTHOR M. Schuster]]|
|[[Schwab, Didier|AUTHOR Didier Schwab]]|
|[[Schwab, Sarah M.|AUTHOR Sarah M. Schwab]]|
|[[Schwartz, Boaz|AUTHOR Boaz Schwartz]]|
|[[Schwartz, Jean-Luc|AUTHOR Jean-Luc Schwartz]]|
|[[Schwarz, Andreas|AUTHOR Andreas Schwarz]]|
|[[Schymura, Christopher|AUTHOR Christopher Schymura]]|
|[[Sculley, D.|AUTHOR D. Sculley]]|
|[[Seaver, Katie|AUTHOR Katie Seaver]]|
|[[Sebkhi, Nordine|AUTHOR Nordine Sebkhi]]|
|[[Sedlmeier, Andreas|AUTHOR Andreas Sedlmeier]]|
|[[Seidel, Ernst|AUTHOR Ernst Seidel]]|
|[[Sekiguchi, Kouhei|AUTHOR Kouhei Sekiguchi]]|
|[[Seltzer, Michael L.|AUTHOR Michael L. Seltzer]]|
|[[Seneviratne, Nadee|AUTHOR Nadee Seneviratne]]|
|[[Sensi, Mariachiara|AUTHOR Mariachiara Sensi]]|
|[[Seo, Hyeji|AUTHOR Hyeji Seo]]|
|[[Seo, Soonshin|AUTHOR Soonshin Seo]]|
|[[Serafinowicz, Alicja|AUTHOR Alicja Serafinowicz]]|
|[[Serrano, Mauricio|AUTHOR Mauricio Serrano]]|
|[[Seshadri, Vivek|AUTHOR Vivek Seshadri]]|
|[[Seth, Ashish|AUTHOR Ashish Seth]]|
|[[Sethu, Vidhyasaharan|AUTHOR Vidhyasaharan Sethu]]|
|[[Sevastjanova, Rita|AUTHOR Rita Sevastjanova]]|
|[[Seyfarth, Scott|AUTHOR Scott Seyfarth]]|
|[[Sfakianaki, Anna|AUTHOR Anna Sfakianaki]]|
|[[Sha, Yongtao|AUTHOR Yongtao Sha]]|
|[[Shafran, Izhak|AUTHOR Izhak Shafran]]|
|[[Shah, Muhammad A.|AUTHOR Muhammad A. Shah]]|
|[[Shah, Sanket|AUTHOR Sanket Shah]]|
|[[Shahin, Mostafa|AUTHOR Mostafa Shahin]]|
|[[Shaik, M.A. Basha|AUTHOR M.A. Basha Shaik]]|
|[[Shamsi, Meysam|AUTHOR Meysam Shamsi]]|
|[[Shandiz, Amin Honarmandi|AUTHOR Amin Honarmandi Shandiz]]|
|[[Shang, Shidong|AUTHOR Shidong Shang]]|
|[[Shang, Zengqiang|AUTHOR Zengqiang Shang]]|
|[[Shangguan, Yuan|AUTHOR Yuan Shangguan]]|
|[[Shao, Jing|AUTHOR Jing Shao]]|
|[[Shao, Qijie|AUTHOR Qijie Shao]]|
|[[Shao, Zongru|AUTHOR Zongru Shao]]|
|[[Sharma, Bidisha|AUTHOR Bidisha Sharma]]|
|[[Sharma, Dushyant|AUTHOR Dushyant Sharma]]|
|[[Sharma, Manthan|AUTHOR Manthan Sharma]]|
|[[Sharma, Mayank|AUTHOR Mayank Sharma]]|
|[[Sharma, Neeraj|AUTHOR Neeraj Sharma]]|
|[[Shechtman, Slava|AUTHOR Slava Shechtman]]|
|[[Shekhar, Shashank|AUTHOR Shashank Shekhar]]|
|[[Shen, Guang|AUTHOR Guang Shen]]|
|[[Shen, Jonathan|AUTHOR Jonathan Shen]]|
|[[Shen, Yuan|AUTHOR Yuan Shen]]|
|[[Shenoy, Ashish|AUTHOR Ashish Shenoy]]|
|[[Sherman, Alexander V.|AUTHOR Alexander V. Sherman]]|
|[[Shetty, Shruthi|AUTHOR Shruthi Shetty]]|
|[[Shi, Guixin|AUTHOR Guixin Shi]]|
|[[Shi, Huiyu|AUTHOR Huiyu Shi]]|
|[[Shi, Jiatong|AUTHOR Jiatong Shi]]|
|[[Shi, Ke|AUTHOR Ke Shi]]|
|[[Shi, Liang|AUTHOR Liang Shi]]|
|[[Shi, Ning|AUTHOR Ning Shi]]|
|[[Shi, Yangyang|AUTHOR Yangyang Shi]]|
|[[Shi, Yao|AUTHOR Yao Shi]]|
|[[Shi, Yi|AUTHOR Yi Shi]]|
|[[Shibata, Kentaro|AUTHOR Kentaro Shibata]]|
|[[Shigabeev, Ilya|AUTHOR Ilya Shigabeev]]|
|[[Shin, Jong Won|AUTHOR Jong Won Shin]]|
|[[Shin, Myeong Cheol|AUTHOR Myeong Cheol Shin]]|
|[[Shin, Seungmin|AUTHOR Seungmin Shin]]|
|[[Shinozaki, Takahiro|AUTHOR Takahiro Shinozaki]]|
|[[Shintani, Taiken|AUTHOR Taiken Shintani]]|
|[[Shirouzu, Hajime|AUTHOR Hajime Shirouzu]]|
|[[Shon, Suwon|AUTHOR Suwon Shon]]|
|[[Shor, Joel|AUTHOR Joel Shor]]|
|[[Shuai, Wang|AUTHOR Wang Shuai]]|
|[[Shulby, Christopher|AUTHOR Christopher Shulby]]|
|[[Shulman, Michael D.|AUTHOR Michael D. Shulman]]|
|[[Si, Shijing|AUTHOR Shijing Si]]|
|[[Si, Yuke|AUTHOR Yuke Si]]|
|[[Siddhartha, Nikhil|AUTHOR Nikhil Siddhartha]]|
|[[Siddi, Sara|AUTHOR Sara Siddi]]|
|[[Siegert, Ingo|AUTHOR Ingo Siegert]]|
|[[Sigtia, Siddharth|AUTHOR Siddharth Sigtia]]|
|[[Silpachai, Alif|AUTHOR Alif Silpachai]]|
|[[Silva, Daniel Peixoto Pinto da|AUTHOR Daniel Peixoto Pinto da Silva]]|
|[[Silva, Samuel|AUTHOR Samuel Silva]]|
|[[Silvestre-Cerdà, Joan-Albert|AUTHOR Joan-Albert Silvestre-Cerdà]]|
|[[Sim, Khe Chai|AUTHOR Khe Chai Sim]]|
|[[Simantiraki, Olympia|AUTHOR Olympia Simantiraki]]|
|[[Simblett, Sara|AUTHOR Sara Simblett]]|
|[[Simha, Pramod|AUTHOR Pramod Simha]]|
|[[Siminyu, Kathleen|AUTHOR Kathleen Siminyu]]|
|[[Simonchik, Konstantin|AUTHOR Konstantin Simonchik]]|
|[[Singh, Abhayjeet|AUTHOR Abhayjeet Singh]]|
|[[Singh, Ankita|AUTHOR Ankita Singh]]|
|[[Singh, Dipesh K.|AUTHOR Dipesh K. Singh]]|
|[[Singh, Maneesh|AUTHOR Maneesh Singh]]|
|[[Singh, Prachi|AUTHOR Prachi Singh]]|
|[[Singh, Rita|AUTHOR Rita Singh]]|
|[[Singh, Satwinder|AUTHOR Satwinder Singh]]|
|[[Siniscalchi, Sabato Marco|AUTHOR Sabato Marco Siniscalchi]]|
|[[Siohan, Olivier|AUTHOR Olivier Siohan]]|
|[[Sirojan, Tharmakulasingam|AUTHOR Tharmakulasingam Sirojan]]|
|[[Sisman, Berrak|AUTHOR Berrak Sisman]]|
|[[Sitaram, Sunayana|AUTHOR Sunayana Sitaram]]|
|[[Sivaprasad, Sarath|AUTHOR Sarath Sivaprasad]]|
|[[Sivaramakrishnan, V.|AUTHOR V. Sivaramakrishnan]]|
|[[Sivaraman, Aswin|AUTHOR Aswin Sivaraman]]|
|[[Sivasankaran, Sunit|AUTHOR Sunit Sivasankaran]]|
|[[Skarnitzl, Radek|AUTHOR Radek Skarnitzl]]|
|[[Skerry-Ryan, R.J.|AUTHOR R.J. Skerry-Ryan]]|
|[[Sklyar, Ilya|AUTHOR Ilya Sklyar]]|
|[[Slangen, Simon|AUTHOR Simon Slangen]]|
|[[Šmídl, Luboš|AUTHOR Luboš Šmídl]]|
|[[Smiljanic, Rajka|AUTHOR Rajka Smiljanic]]|
|[[Snowden, Julie S.|AUTHOR Julie S. Snowden]]|
|[[Soares, Anderson da Silva|AUTHOR Anderson da Silva Soares]]|
|[[Södergren, Isabella|AUTHOR Isabella Södergren]]|
|[[Solera-Ureña, Rubén|AUTHOR Rubén Solera-Ureña]]|
|[[Soler-Company, Juan|AUTHOR Juan Soler-Company]]|
|[[Solomon, Nancy Pearl|AUTHOR Nancy Pearl Solomon]]|
|[[Soltau, Hagen|AUTHOR Hagen Soltau]]|
|[[Song, Changhe|AUTHOR Changhe Song]]|
|[[Song, Dandan|AUTHOR Dandan Song]]|
|[[Song, Eunwoo|AUTHOR Eunwoo Song]]|
|[[Song, Hongwei|AUTHOR Hongwei Song]]|
|[[Song, Hui|AUTHOR Hui Song]]|
|[[Song, Hyungchan|AUTHOR Hyungchan Song]]|
|[[Song, Meishu|AUTHOR Meishu Song]]|
|[[Song, Qi|AUTHOR Qi Song]]|
|[[Song, Qiong|AUTHOR Qiong Song]]|
|[[Song, Yan|AUTHOR Yan Song]]|
|[[Song, Yaodong|AUTHOR Yaodong Song]]|
|[[Soo, Rachel|AUTHOR Rachel Soo]]|
|[[Soong, Frank K.|AUTHOR Frank K. Soong]]|
|[[Sootla, Sten|AUTHOR Sten Sootla]]|
|[[Sorensen, Karsten|AUTHOR Karsten Sorensen]]|
|[[Sorin, Alexander|AUTHOR Alexander Sorin]]|
|[[Sorokin, Daniil|AUTHOR Daniil Sorokin]]|
|[[Spanias, Andreas|AUTHOR Andreas Spanias]]|
|[[Spathis, Dimitris|AUTHOR Dimitris Spathis]]|
|[[Spong, Paul|AUTHOR Paul Spong]]|
|[[Sprenkamp, Kilian|AUTHOR Kilian Sprenkamp]]|
|[[Squartini, Stefano|AUTHOR Stefano Squartini]]|
|[[Sridhar, Prashant|AUTHOR Prashant Sridhar]]|
|[[Srinivasan, Sriram|AUTHOR Sriram Srinivasan]]|
|[[Srinivasan, Sundararajan|AUTHOR Sundararajan Srinivasan]]|
|[[Sriram, Anuroop|AUTHOR Anuroop Sriram]]|
|[[Stack, Sarah|AUTHOR Sarah Stack]]|
|[[Stafylakis, Themos|AUTHOR Themos Stafylakis]]|
|[[Stahl, Daniel|AUTHOR Daniel Stahl]]|
|[[Staib, Marlene|AUTHOR Marlene Staib]]|
|[[Stappen, Lukas|AUTHOR Lukas Stappen]]|
|[[Stefansdottir, Brynhildur|AUTHOR Brynhildur Stefansdottir]]|
|[[Steinert, Lars|AUTHOR Lars Steinert]]|
|[[Stemmer, Georg|AUTHOR Georg Stemmer]]|
|[[Stephenson, Brooke|AUTHOR Brooke Stephenson]]|
|[[Stern, Richard M.|AUTHOR Richard M. Stern]]|
|[[Stoakes, Hywel|AUTHOR Hywel Stoakes]]|
|[[Stoidis, Dimitrios|AUTHOR Dimitrios Stoidis]]|
|[[Stolcke, Andreas|AUTHOR Andreas Stolcke]]|
|[[Stone, Michael|AUTHOR Michael Stone]]|
|[[Stone, Simon|AUTHOR Simon Stone]]|
|[[Strake, Maximilian|AUTHOR Maximilian Strake]]|
|[[Strauss, Martin|AUTHOR Martin Strauss]]|
|[[Strik, Helmer|AUTHOR Helmer Strik]]|
|[[Strimel, Grant P.|AUTHOR Grant P. Strimel]]|
|[[Strohman, Trevor|AUTHOR Trevor Strohman]]|
|[[Stüker, Sebastian|AUTHOR Sebastian Stüker]]|
|[[Šturm, Pavel|AUTHOR Pavel Šturm]]|
|[[Styles, Suzy J.|AUTHOR Suzy J. Styles]]|
|[[Stylianou, Yannis|AUTHOR Yannis Stylianou]]|
|[[Stypułkowski, Michał|AUTHOR Michał Stypułkowski]]|
|[[Su, Bo-Hao|AUTHOR Bo-Hao Su]]|
|[[Su, Dan|AUTHOR Dan Su]]|
|[[Su, Hang|AUTHOR Hang Su]]|
|[[Su, Jinru|AUTHOR Jinru Su]]|
|[[Su, Ruolin|AUTHOR Ruolin Su]]|
|[[Subbalakshmi, K.P.|AUTHOR K.P. Subbalakshmi]]|
|[[Subrahmanya, Niranjan|AUTHOR Niranjan Subrahmanya]]|
|[[Sudoh, Katsuhito|AUTHOR Katsuhito Sudoh]]|
|[[Suendermann-Oeft, David|AUTHOR David Suendermann-Oeft]]|
|[[Sun, Eric|AUTHOR Eric Sun]]|
|[[Sun, Huiming|AUTHOR Huiming Sun]]|
|[[Sun, Jianwei|AUTHOR Jianwei Sun]]|
|[[Sun, Jingyu|AUTHOR Jingyu Sun]]|
|[[Sun, Lei|AUTHOR Lei Sun]]|
|[[Sun, Lifa|AUTHOR Lifa Sun]]|
|[[Sun, Ming|AUTHOR Ming Sun]]|
|[[Sun, Qinghua|AUTHOR Qinghua Sun]]|
|[[Sun, Sining|AUTHOR Sining Sun]]|
|[[Sun, Xiao|AUTHOR Xiao Sun]]|
|[[Sun, Xingwei|AUTHOR Xingwei Sun]]|
|[[Sun, Yandan|AUTHOR Yandan Sun]]|
|[[Sun, Yuhang|AUTHOR Yuhang Sun]]|
|[[Sundar, Harshavardhan|AUTHOR Harshavardhan Sundar]]|
|[[Sung, Dooyong|AUTHOR Dooyong Sung]]|
|[[Sung, Jongmo|AUTHOR Jongmo Sung]]|
|[[Sung, June Sig|AUTHOR June Sig Sung]]|
|[[Sunkara, Monica|AUTHOR Monica Sunkara]]|
|[[Suo, Hongbin|AUTHOR Hongbin Suo]]|
|[[Suresh, Ananda Theertha|AUTHOR Ananda Theertha Suresh]]|
|[[Suter, Benjamin|AUTHOR Benjamin Suter]]|
|[[Švec, Jan|AUTHOR Jan Švec]]|
|[[Svendsen, Torbjørn|AUTHOR Torbjørn Svendsen]]|
|[[Swaminathan, Rupak Vignesh|AUTHOR Rupak Vignesh Swaminathan]]|
|[[Swapna, N.|AUTHOR N. Swapna]]|
|[[Swart, Albert|AUTHOR Albert Swart]]|
|[[Swearengen, Michael|AUTHOR Michael Swearengen]]|
|[[Syed, Muhammad Shehram Shah|AUTHOR Muhammad Shehram Shah Syed]]|
|[[Syed, Zafi Sherhan|AUTHOR Zafi Sherhan Syed]]|
|[[Symonds, Helena|AUTHOR Helena Symonds]]|
|[[Synnaeve, Gabriel|AUTHOR Gabriel Synnaeve]]|
|[[Szakay, Anita|AUTHOR Anita Szakay]]|
|[[Szöke, Igor|AUTHOR Igor Szöke]]|
|[[Sztahó, Dávid|AUTHOR Dávid Sztahó]]|
|[[Szurley, Joseph|AUTHOR Joseph Szurley]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[T., Kiran Praveen|AUTHOR Kiran Praveen T.]]|
|[[T., Mani Kumar|AUTHOR Mani Kumar T.]]|
|[[Tachibana, Kentaro|AUTHOR Kentaro Tachibana]]|
|[[Tada, Kohei|AUTHOR Kohei Tada]]|
|[[Tahon, Marie|AUTHOR Marie Tahon]]|
|[[Tai, Wenxin|AUTHOR Wenxin Tai]]|
|[[Tairum Cruz, Miguel|AUTHOR Miguel Tairum Cruz]]|
|[[Tak, Hemlata|AUTHOR Hemlata Tak]]|
|[[Takahashi, Shun|AUTHOR Shun Takahashi]]|
|[[Takahashi, Shusuke|AUTHOR Shusuke Takahashi]]|
|[[Takamichi, Shinnosuke|AUTHOR Shinnosuke Takamichi]]|
|[[Takashima, Akihiko|AUTHOR Akihiko Takashima]]|
|[[Takashima, Yuki|AUTHOR Yuki Takashima]]|
|[[Takeda, Ryu|AUTHOR Ryu Takeda]]|
|[[Talkar, Tanya|AUTHOR Tanya Talkar]]|
|[[Tambwekar, Anuj|AUTHOR Anuj Tambwekar]]|
|[[Tamim, Nour|AUTHOR Nour Tamim]]|
|[[Tammewar, Aniruddha|AUTHOR Aniruddha Tammewar]]|
|[[Tamminen, Samu|AUTHOR Samu Tamminen]]|
|[[Tan, Daxin|AUTHOR Daxin Tan]]|
|[[Tan, Kye Min|AUTHOR Kye Min Tan]]|
|[[Tan, Xu|AUTHOR Xu Tan]]|
|[[Tanaka, Keitaro|AUTHOR Keitaro Tanaka]]|
|[[Tanaka, Tomohiro|AUTHOR Tomohiro Tanaka]]|
|[[Tang, Chuanxin|AUTHOR Chuanxin Tang]]|
|[[Tang, Fengyi|AUTHOR Fengyi Tang]]|
|[[Tang, H. Lilian|AUTHOR H. Lilian Tang]]|
|[[Tang, Li|AUTHOR Li Tang]]|
|[[Tang, Min|AUTHOR Min Tang]]|
|[[Tang, Ping|AUTHOR Ping Tang]]|
|[[Tang, Xiao|AUTHOR Xiao Tang]]|
|[[Tang, Yufeng|AUTHOR Yufeng Tang]]|
|[[Tang, Zhenyu|AUTHOR Zhenyu Tang]]|
|[[Tang, Zhiyuan|AUTHOR Zhiyuan Tang]]|
|[[Tang, Ziyue|AUTHOR Ziyue Tang]]|
|[[Taniguchi, Akira|AUTHOR Akira Taniguchi]]|
|[[Taniguchi, Tadahiro|AUTHOR Tadahiro Taniguchi]]|
|[[Tanji, Ryo|AUTHOR Ryo Tanji]]|
|[[Tao, Jianhua|AUTHOR Jianhua Tao]]|
|[[Tawara, Naohiro|AUTHOR Naohiro Tawara]]|
|[[Taylor, Jason|AUTHOR Jason Taylor]]|
|[[Taylor, Sarah|AUTHOR Sarah Taylor]]|
|[[Teh, Kah Kuan|AUTHOR Kah Kuan Teh]]|
|[[Teh, Tian Huey|AUTHOR Tian Huey Teh]]|
|[[Teixeira, António|AUTHOR António Teixeira]]|
|[[Teixeira, Francisco|AUTHOR Francisco Teixeira]]|
|[[ten Bosch, Louis|AUTHOR Louis ten Bosch]]|
|[[Terblanche, Camryn|AUTHOR Camryn Terblanche]]|
|[[Teytaut, Yann|AUTHOR Yann Teytaut]]|
|[[Thelapurath, Shrinath|AUTHOR Shrinath Thelapurath]]|
|[[The RADAR-CNS Consortium|AUTHOR The RADAR-CNS Consortium]]|
|[[Thiagarajan, Jayaraman J.|AUTHOR Jayaraman J. Thiagarajan]]|
|[[Thieberger, Nick|AUTHOR Nick Thieberger]]|
|[[Thiele, Lothar|AUTHOR Lothar Thiele]]|
|[[Thienpondt, Jenthe|AUTHOR Jenthe Thienpondt]]|
|[[Thomas, Samuel|AUTHOR Samuel Thomas]]|
|[[Thompson, Jennifer C.|AUTHOR Jennifer C. Thompson]]|
|[[Thulke, David|AUTHOR David Thulke]]|
|[[Thyagachandran, Anand|AUTHOR Anand Thyagachandran]]|
|[[Tian, Biao|AUTHOR Biao Tian]]|
|[[Tian, Xiaohai|AUTHOR Xiaohai Tian]]|
|[[Tian, Zhengkun|AUTHOR Zhengkun Tian]]|
|[[Tiede, Mark|AUTHOR Mark Tiede]]|
|[[Tihelka, Daniel|AUTHOR Daniel Tihelka]]|
|[[Tilsen, Sam|AUTHOR Sam Tilsen]]|
|[[Tiscareno, Ingrid|AUTHOR Ingrid Tiscareno]]|
|[[Tiwari, Upasana|AUTHOR Upasana Tiwari]]|
|[[Tjandra, Andros|AUTHOR Andros Tjandra]]|
|[[Tobin, Jimmy|AUTHOR Jimmy Tobin]]|
|[[Toda, Tomoki|AUTHOR Tomoki Toda]]|
|[[Todisco, Massimiliano|AUTHOR Massimiliano Todisco]]|
|[[Togami, Masahito|AUTHOR Masahito Togami]]|
|[[Tokuyama, Hirotaka|AUTHOR Hirotaka Tokuyama]]|
|[[Tolias, Andreas Savas|AUTHOR Andreas Savas Tolias]]|
|[[Toma, Andrea|AUTHOR Andrea Toma]]|
|[[Tomanek, Katrin|AUTHOR Katrin Tomanek]]|
|[[Tomasello, Paden|AUTHOR Paden Tomasello]]|
|[[Tomashenko, Natalia|AUTHOR Natalia Tomashenko]]|
|[[Tong, Audrey|AUTHOR Audrey Tong]]|
|[[Tong, Fuchuan|AUTHOR Fuchuan Tong]]|
|[[Tong, Ziyi|AUTHOR Ziyi Tong]]|
|[[Tooley, Lauren|AUTHOR Lauren Tooley]]|
|[[Topar, Tobias|AUTHOR Tobias Topar]]|
|[[Torcoli, Matteo|AUTHOR Matteo Torcoli]]|
|[[Torgashov, Nikita|AUTHOR Nikita Torgashov]]|
|[[Torralba, Antonio|AUTHOR Antonio Torralba]]|
|[[Torresquintero, Alexandra|AUTHOR Alexandra Torresquintero]]|
|[[Tóth, László|AUTHOR László Tóth]]|
|[[Tran, Dung N.|AUTHOR Dung N. Tran]]|
|[[Tran, Huy Dat|AUTHOR Huy Dat Tran]]|
|[[Tran, Khoa Viet|AUTHOR Khoa Viet Tran]]|
|[[Tran, Trang|AUTHOR Trang Tran]]|
|[[Tran, Viet Anh|AUTHOR Viet Anh Tran]]|
|[[Trancoso, Isabel|AUTHOR Isabel Trancoso]]|
|[[Trang, Nguyen Thi Thu|AUTHOR Nguyen Thi Thu Trang]]|
|[[Treep, Jelle|AUTHOR Jelle Treep]]|
|[[Trella, Paweł|AUTHOR Paweł Trella]]|
|[[Tripathi, Anshuman|AUTHOR Anshuman Tripathi]]|
|[[Tripiana, Antonio|AUTHOR Antonio Tripiana]]|
|[[Trmal, Jan|AUTHOR Jan Trmal]]|
|[[Trouvain, Jürgen|AUTHOR Jürgen Trouvain]]|
|[[Truong, Thinh Hung|AUTHOR Thinh Hung Truong]]|
|[[Tsao, Yu|AUTHOR Yu Tsao]]|
|[[Tschoepe, Constanze|AUTHOR Constanze Tschoepe]]|
|[[Tseng, Shu-Chuan|AUTHOR Shu-Chuan Tseng]]|
|[[Tseng, Wei-Cheng|AUTHOR Wei-Cheng Tseng]]|
|[[Tsiakoulis, Pirros|AUTHOR Pirros Tsiakoulis]]|
|[[Tsukada, Kimiko|AUTHOR Kimiko Tsukada]]|
|[[Tsunoo, Emiru|AUTHOR Emiru Tsunoo]]|
|[[Tsuzaki, Minoru|AUTHOR Minoru Tsuzaki]]|
|[[Tu, Wei-Wei|AUTHOR Wei-Wei Tu]]|
|[[Tu, Youzhi|AUTHOR Youzhi Tu]]|
|[[Tu, Zehai|AUTHOR Zehai Tu]]|
|[[Tulics, Miklós Gábriel|AUTHOR Miklós Gábriel Tulics]]|
|[[Tur, Gokhan|AUTHOR Gokhan Tur]]|
|[[Turchi, Marco|AUTHOR Marco Turchi]]|
|[[Turrisi, Rosanna|AUTHOR Rosanna Turrisi]]|
|[[Tüske, Zoltán|AUTHOR Zoltán Tüske]]|
|[[Tzanetakis, George|AUTHOR George Tzanetakis]]|
|[[Tzimiropoulos, Georgios|AUTHOR Georgios Tzimiropoulos]]|
|[[Tzirakis, Panagiotis|AUTHOR Panagiotis Tzirakis]]|
|[[Tzudir, Moakala|AUTHOR Moakala Tzudir]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Udupa, Sathvik|AUTHOR Sathvik Udupa]]|
|[[Ukkonen, Antti|AUTHOR Antti Ukkonen]]|
|[[ul Haq, Ehsan|AUTHOR Ehsan ul Haq]]|
|[[Um, Seyun|AUTHOR Seyun Um]]|
|[[Umesh, Tejas|AUTHOR Tejas Umesh]]|
|[[Unni, Vinit|AUTHOR Vinit Unni]]|
|[[Unoki, Masashi|AUTHOR Masashi Unoki]]|
|[[Urooj, Saba|AUTHOR Saba Urooj]]|
|[[Utsuro, Takehito|AUTHOR Takehito Utsuro]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Vaaras, Einari|AUTHOR Einari Vaaras]]|
|[[Vaideeswaran, Rakesh|AUTHOR Rakesh Vaideeswaran]]|
|[[Valentini-Botinhao, Cassia|AUTHOR Cassia Valentini-Botinhao]]|
|[[Vali, Mohammad Hassan|AUTHOR Mohammad Hassan Vali]]|
|[[Valin, Jean-Marc|AUTHOR Jean-Marc Valin]]|
|[[Vallée, Nathalie|AUTHOR Nathalie Vallée]]|
|[[Vallés-Pérez, Iván|AUTHOR Iván Vallés-Pérez]]|
|[[Valstar, Michel|AUTHOR Michel Valstar]]|
|[[Vamvoukakis, Georgios|AUTHOR Georgios Vamvoukakis]]|
|[[Van de Velde, Hans|AUTHOR Hans Van de Velde]]|
|[[Van Gysel, Christophe|AUTHOR Christophe Van Gysel]]|
|[[Van hamme, Hugo|AUTHOR Hugo Van hamme]]|
|[[van Hout, Roeland|AUTHOR Roeland van Hout]]|
|[[Van keirsbilck, Matthijs|AUTHOR Matthijs Van keirsbilck]]|
|[[van Niekerk, Benjamin|AUTHOR Benjamin van Niekerk]]|
|[[van Niekerk, Daniel|AUTHOR Daniel van Niekerk]]|
|[[van Os, Marjolein|AUTHOR Marjolein van Os]]|
|[[van Rijn, Pol|AUTHOR Pol van Rijn]]|
|[[van Son, Rob J.J.H.|AUTHOR Rob J.J.H. van Son]]|
|[[Variani, Ehsan|AUTHOR Ehsan Variani]]|
|[[Varma, Rajat|AUTHOR Rajat Varma]]|
|[[Varol, Huseyin Atakan|AUTHOR Huseyin Atakan Varol]]|
|[[Vasilescu, Ioana|AUTHOR Ioana Vasilescu]]|
|[[Vásquez-Correa, J.C.|AUTHOR J.C. Vásquez-Correa]]|
|[[Vaysse, Robin|AUTHOR Robin Vaysse]]|
|[[Vega Rodriguez, Jenifer|AUTHOR Jenifer Vega Rodriguez]]|
|[[Velichko, Alena|AUTHOR Alena Velichko]]|
|[[Venkataramani, Shrikant|AUTHOR Shrikant Venkataramani]]|
|[[Venkataramani, Swagath|AUTHOR Swagath Venkataramani]]|
|[[Venkatesh, Ganesh|AUTHOR Ganesh Venkatesh]]|
|[[Vente, Ralph|AUTHOR Ralph Vente]]|
|[[Venugopalan, Subhashini|AUTHOR Subhashini Venugopalan]]|
|[[Vepa, Jithendra|AUTHOR Jithendra Vepa]]|
|[[Verkholyak, Oxana|AUTHOR Oxana Verkholyak]]|
|[[Veselý, Karel|AUTHOR Karel Veselý]]|
|[[Vetráb, Mercedes|AUTHOR Mercedes Vetráb]]|
|[[Vidal, Jazmín|AUTHOR Jazmín Vidal]]|
|[[Vijayasenan, Deepu|AUTHOR Deepu Vijayasenan]]|
|[[Villalba, Jesús|AUTHOR Jesús Villalba]]|
|[[Villatoro-Tello, Esaú|AUTHOR Esaú Villatoro-Tello]]|
|[[Vincent, Emmanuel|AUTHOR Emmanuel Vincent]]|
|[[Vinciarelli, Alessandro|AUTHOR Alessandro Vinciarelli]]|
|[[Vinogradova, Alisa|AUTHOR Alisa Vinogradova]]|
|[[Virkar, Yogesh|AUTHOR Yogesh Virkar]]|
|[[Viswanathan, Vijay|AUTHOR Vijay Viswanathan]]|
|[[Vít, Jakub|AUTHOR Jakub Vít]]|
|[[Viveros Muñoz, Rhoddy|AUTHOR Rhoddy Viveros Muñoz]]|
|[[Volná, Veronika|AUTHOR Veronika Volná]]|
|[[von Neumann, Thilo|AUTHOR Thilo von Neumann]]|
|[[Vozila, Paul|AUTHOR Paul Vozila]]|
|[[Vuissoz, Pierre-André|AUTHOR Pierre-André Vuissoz]]|
|[[Vuong, Tyler|AUTHOR Tyler Vuong]]|
|[[Vyas, Apoorv|AUTHOR Apoorv Vyas]]|
|[[Vyas, Piyush|AUTHOR Piyush Vyas]]|
|[[Vyas, Saurabh|AUTHOR Saurabh Vyas]]|
|[[Vydana, Hari Krishna|AUTHOR Hari Krishna Vydana]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Wagner, Michael|AUTHOR Michael Wagner]]|
|[[Wagner, Petra|AUTHOR Petra Wagner]]|
|[[Waibel, Alex|AUTHOR Alex Waibel]]|
|[[Walker, Daniel D.|AUTHOR Daniel D. Walker]]|
|[[Wallbridge, Sarenne|AUTHOR Sarenne Wallbridge]]|
|[[Wallington, Electra|AUTHOR Electra Wallington]]|
|[[Wallis, Christopher G.R.|AUTHOR Christopher G.R. Wallis]]|
|[[Wan, Moquan|AUTHOR Moquan Wan]]|
|[[Wan, Xucheng|AUTHOR Xucheng Wan]]|
|[[Wang, Bin|AUTHOR Bin Wang]]|
|[[Wang, Binling|AUTHOR Binling Wang]]|
|[[Wang, Boxin|AUTHOR Boxin Wang]]|
|[[Wang, Bruce Xiao|AUTHOR Bruce Xiao Wang]]|
|[[Wang, Changhan|AUTHOR Changhan Wang]]|
|[[Wang, Chao|AUTHOR Chao Wang]]|
|[[Wang, Chao|AUTHOR Chao Wang]]|
|[[Wang, Chenglong|AUTHOR Chenglong Wang]]|
|[[Wang, Chengyu|AUTHOR Chengyu Wang]]|
|[[Wang, Chunfeng|AUTHOR Chunfeng Wang]]|
|[[Wang, Congyi|AUTHOR Congyi Wang]]|
|[[Wang, Dan|AUTHOR Dan Wang]]|
|[[Wang, DeLiang|AUTHOR DeLiang Wang]]|
|[[Wang, Ding|AUTHOR Ding Wang]]|
|[[Wang, Disong|AUTHOR Disong Wang]]|
|[[Wang, Dong|AUTHOR Dong Wang]]|
|[[Wang, Fan-Lin|AUTHOR Fan-Lin Wang]]|
|[[Wang, Fei|AUTHOR Fei Wang]]|
|[[Wang, Feng|AUTHOR Feng Wang]]|
|[[Wang, Gary|AUTHOR Gary Wang]]|
|[[Wang, Guan-Bo|AUTHOR Guan-Bo Wang]]|
|[[Wang, Guangsen|AUTHOR Guangsen Wang]]|
|[[Wang, Haoxiang|AUTHOR Haoxiang Wang]]|
|[[Wang, Haoyu|AUTHOR Haoyu Wang]]|
|[[Wang, Helin|AUTHOR Helin Wang]]|
|[[Wang, Hsin-Min|AUTHOR Hsin-Min Wang]]|
|[[Wang, Huaming|AUTHOR Huaming Wang]]|
|[[Wang, Hui|AUTHOR Hui Wang]]|
|[[Wang, Jianping|AUTHOR Jianping Wang]]|
|[[Wang, Jianrong|AUTHOR Jianrong Wang]]|
|[[Wang, Jianzong|AUTHOR Jianzong Wang]]|
|[[Wang, Jie|AUTHOR Jie Wang]]|
|[[Wang, Jie|AUTHOR Jie Wang]]|
|[[Wang, Jin|AUTHOR Jin Wang]]|
|[[Wang, Jinchao|AUTHOR Jinchao Wang]]|
|[[Wang, Jindong|AUTHOR Jindong Wang]]|
|[[Wang, Jing|AUTHOR Jing Wang]]|
|[[Wang, Jingsong|AUTHOR Jingsong Wang]]|
|[[Wang, Jinhan|AUTHOR Jinhan Wang]]|
|[[Wang, Junjie|AUTHOR Junjie Wang]]|
|[[Wang, Jun|AUTHOR Jun Wang]]|
|[[Wang, Jun|AUTHOR Jun Wang]]|
|[[Wang, Kai|AUTHOR Kai Wang]]|
|[[Wang, Kang|AUTHOR Kang Wang]]|
|[[Wang, L.|AUTHOR L. Wang]]|
|[[Wang, Lan|AUTHOR Lan Wang]]|
|[[Wang, Li|AUTHOR Li Wang]]|
|[[Wang, Liyuan|AUTHOR Liyuan Wang]]|
|[[Wang, Lizhong|AUTHOR Lizhong Wang]]|
|[[Wang, Longbiao|AUTHOR Longbiao Wang]]|
|[[Wang, Lu|AUTHOR Lu Wang]]|
|[[Wang, Mingqiu|AUTHOR Mingqiu Wang]]|
|[[Wang, Mingxuan|AUTHOR Mingxuan Wang]]|
|[[Wang, Naigang|AUTHOR Naigang Wang]]|
|[[Wang, Nick J.C.|AUTHOR Nick J.C. Wang]]|
|[[Wang, Ning|AUTHOR Ning Wang]]|
|[[Wang, Peidong|AUTHOR Peidong Wang]]|
|[[Wang, Pinyi|AUTHOR Pinyi Wang]]|
|[[Wang, Pu|AUTHOR Pu Wang]]|
|[[Wang, Qing|AUTHOR Qing Wang]]|
|[[Wang, Qing|AUTHOR Qing Wang]]|
|[[Wang, Quan|AUTHOR Quan Wang]]|
|[[Wang, Rui|AUTHOR Rui Wang]]|
|[[Wang, Ruili|AUTHOR Ruili Wang]]|
|[[Wang, Shaojun|AUTHOR Shaojun Wang]]|
|[[Wang, Shengbei|AUTHOR Shengbei Wang]]|
|[[Wang, Sifan|AUTHOR Sifan Wang]]|
|[[Wang, Taihui|AUTHOR Taihui Wang]]|
|[[Wang, Tao|AUTHOR Tao Wang]]|
|[[Wang, Tianzi|AUTHOR Tianzi Wang]]|
|[[Wang, Wei|AUTHOR Wei Wang]]|
|[[Wang, Weiqing|AUTHOR Weiqing Wang]]|
|[[Wang, Wei|AUTHOR Wei Wang]]|
|[[Wang, Wen|AUTHOR Wen Wang]]|
|[[Wang, Wenchao|AUTHOR Wenchao Wang]]|
|[[Wang, Wenwu|AUTHOR Wenwu Wang]]|
|[[Wang, Wupeng|AUTHOR Wupeng Wang]]|
|[[Wang, Xiaobo|AUTHOR Xiaobo Wang]]|
|[[Wang, Xiaofei|AUTHOR Xiaofei Wang]]|
|[[Wang, Xiaoqiang|AUTHOR Xiaoqiang Wang]]|
|[[Wang, Xiaorui|AUTHOR Xiaorui Wang]]|
|[[Wang, Xingming|AUTHOR Xingming Wang]]|
|[[Wang, Xin|AUTHOR Xin Wang]]|
|[[Wang, Xin|AUTHOR Xin Wang]]|
|[[Wang, Xiong|AUTHOR Xiong Wang]]|
|[[Wang, Xuyang|AUTHOR Xuyang Wang]]|
|[[Wang, Yang|AUTHOR Yang Wang]]|
|[[Wang, Yannan|AUTHOR Yannan Wang]]|
|[[Wang, Yih-Wen|AUTHOR Yih-Wen Wang]]|
|[[Wang, Yongqing|AUTHOR Yongqing Wang]]|
|[[Wang, Yuguang|AUTHOR Yuguang Wang]]|
|[[Wang, Yuhua|AUTHOR Yuhua Wang]]|
|[[Wang, Yujun|AUTHOR Yujun Wang]]|
|[[Wang, Yun|AUTHOR Yun Wang]]|
|[[Wang, Yu-Xuan|AUTHOR Yu-Xuan Wang]]|
|[[Wang, Yuxuan|AUTHOR Yuxuan Wang]]|
|[[Wang, Zhichao|AUTHOR Zhichao Wang]]|
|[[Wang, Zhiming|AUTHOR Zhiming Wang]]|
|[[Wang, Ziteng|AUTHOR Ziteng Wang]]|
|[[Wani, Yash R.|AUTHOR Yash R. Wani]]|
|[[Wanner, Leo|AUTHOR Leo Wanner]]|
|[[Warden, Pete|AUTHOR Pete Warden]]|
|[[Watanabe, Shinji|AUTHOR Shinji Watanabe]]|
|[[Waters, Austin|AUTHOR Austin Waters]]|
|[[Watson, Catherine I.|AUTHOR Catherine I. Watson]]|
|[[Wei, Bo|AUTHOR Bo Wei]]|
|[[Wei, Wenqi|AUTHOR Wenqi Wei]]|
|[[Weingartová, Lenka|AUTHOR Lenka Weingartová]]|
|[[Weiss, Ron J.|AUTHOR Ron J. Weiss]]|
|[[Weld, Henry|AUTHOR Henry Weld]]|
|[[Wen, Liang|AUTHOR Liang Wen]]|
|[[Wen, Xue|AUTHOR Xue Wen]]|
|[[Wen, Zhengqi|AUTHOR Zhengqi Wen]]|
|[[Wen, Zujie|AUTHOR Zujie Wen]]|
|[[Weng, Chao|AUTHOR Chao Weng]]|
|[[Werner, Raphael|AUTHOR Raphael Werner]]|
|[[Westerman, Ryan|AUTHOR Ryan Westerman]]|
|[[Weston, Heather|AUTHOR Heather Weston]]|
|[[Whang, Taesun|AUTHOR Taesun Whang]]|
|[[White, Hannah|AUTHOR Hannah White]]|
|[[White, Katie M.|AUTHOR Katie M. White]]|
|[[Wiechmann, Daniel|AUTHOR Daniel Wiechmann]]|
|[[Wiesler, Simon|AUTHOR Simon Wiesler]]|
|[[Wiesner, Matthew|AUTHOR Matthew Wiesner]]|
|[[Wiest, Thomas|AUTHOR Thomas Wiest]]|
|[[Wilbrandt, Alexander|AUTHOR Alexander Wilbrandt]]|
|[[Willett, Daniel|AUTHOR Daniel Willett]]|
|[[Williams, Jason|AUTHOR Jason Williams]]|
|[[Williamson, Donald S.|AUTHOR Donald S. Williamson]]|
|[[Winata, Genta Indra|AUTHOR Genta Indra Winata]]|
|[[Wintrode, Jonathan|AUTHOR Jonathan Wintrode]]|
|[[Witbrock, Michael|AUTHOR Michael Witbrock]]|
|[[Witkowski, Marcin|AUTHOR Marcin Witkowski]]|
|[[Witzlack-Makarevich, Alena|AUTHOR Alena Witzlack-Makarevich]]|
|[[Włodarczak, Marcin|AUTHOR Marcin Włodarczak]]|
|[[Wochner, Daniela|AUTHOR Daniela Wochner]]|
|[[Woisard, Virginie|AUTHOR Virginie Woisard]]|
|[[Wolf, Lior|AUTHOR Lior Wolf]]|
|[[Wolff, Matthias|AUTHOR Matthias Wolff]]|
|[[Woltron, Thomas|AUTHOR Thomas Woltron]]|
|[[Wood, Chris|AUTHOR Chris Wood]]|
|[[Woodland, Philip C.|AUTHOR Philip C. Woodland]]|
|[[Wren, Yvonne|AUTHOR Yvonne Wren]]|
|[[Wright, Jonathan|AUTHOR Jonathan Wright]]|
|[[Wright, Richard A.|AUTHOR Richard A. Wright]]|
|[[Wu, Anne|AUTHOR Anne Wu]]|
|[[Wu, Bin|AUTHOR Bin Wu]]|
|[[Wu, Bo|AUTHOR Bo Wu]]|
|[[Wu, Bo|AUTHOR Bo Wu]]|
|[[Wu, Chunyang|AUTHOR Chunyang Wu]]|
|[[Wu, Di|AUTHOR Di Wu]]|
|[[Wu, Felix|AUTHOR Felix Wu]]|
|[[Wu, Haibin|AUTHOR Haibin Wu]]|
|[[Wu, Jianhan|AUTHOR Jianhan Wu]]|
|[[Wu, Jian|AUTHOR Jian Wu]]|
|[[Wu, Jian|AUTHOR Jian Wu]]|
|[[Wu, Mengyue|AUTHOR Mengyue Wu]]|
|[[Wu, Ming-hui|AUTHOR Ming-hui Wu]]|
|[[Wu, Minglin|AUTHOR Minglin Wu]]|
|[[Wu, Minhua|AUTHOR Minhua Wu]]|
|[[Wu, Sarah|AUTHOR Sarah Wu]]|
|[[Wu, Ting-Wei|AUTHOR Ting-Wei Wu]]|
|[[Wu, Xixin|AUTHOR Xixin Wu]]|
|[[Wu, Yanfeng|AUTHOR Yanfeng Wu]]|
|[[Wu, Yaru|AUTHOR Yaru Wu]]|
|[[Wu, Yibo|AUTHOR Yibo Wu]]|
|[[Wu, Yi-Chiao|AUTHOR Yi-Chiao Wu]]|
|[[Wu, Yifei|AUTHOR Yifei Wu]]|
|[[Wu, Yonghui|AUTHOR Yonghui Wu]]|
|[[Wu, Yu|AUTHOR Yu Wu]]|
|[[Wu, Zelin|AUTHOR Zelin Wu]]|
|[[Wu, Zhiyong|AUTHOR Zhiyong Wu]]|
|[[Wu, Zhizheng|AUTHOR Zhizheng Wu]]|
|[[Wu, Zhongqin|AUTHOR Zhongqin Wu]]|
|[[Wykes, Til|AUTHOR Til Wykes]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Xia, Rui|AUTHOR Rui Xia]]|
|[[Xia, Tong|AUTHOR Tong Xia]]|
|[[Xia, Yangyang|AUTHOR Yangyang Xia]]|
|[[Xiang, Lu|AUTHOR Lu Xiang]]|
|[[Xiao, Alex|AUTHOR Alex Xiao]]|
|[[Xiao, Jaclyn|AUTHOR Jaclyn Xiao]]|
|[[Xiao, Jing|AUTHOR Jing Xiao]]|
|[[Xiao, Runqiu|AUTHOR Runqiu Xiao]]|
|[[Xiao, Wei|AUTHOR Wei Xiao]]|
|[[Xiao, Xiao|AUTHOR Xiao Xiao]]|
|[[Xie, Fenglong|AUTHOR Fenglong Xie]]|
|[[Xie, Lei|AUTHOR Lei Xie]]|
|[[Xie, Xurong|AUTHOR Xurong Xie]]|
|[[Xie, Yang|AUTHOR Yang Xie]]|
|[[Xie, Yanlu|AUTHOR Yanlu Xie]]|
|[[Xie, Zongxia|AUTHOR Zongxia Xie]]|
|[[Xin, Detai|AUTHOR Detai Xin]]|
|[[Xing, Chao|AUTHOR Chao Xing]]|
|[[Xing, Xiaofen|AUTHOR Xiaofen Xing]]|
|[[Xiong, Caiming|AUTHOR Caiming Xiong]]|
|[[Xiong, Deyi|AUTHOR Deyi Xiong]]|
|[[Xiong, Shengwu|AUTHOR Shengwu Xiong]]|
|[[Xiong, Shifu|AUTHOR Shifu Xiong]]|
|[[Xiong, Tao|AUTHOR Tao Xiong]]|
|[[Xiong, Yixuan|AUTHOR Yixuan Xiong]]|
|[[Xiu, Zhiping|AUTHOR Zhiping Xiu]]|
|[[Xu, Anqi|AUTHOR Anqi Xu]]|
|[[Xu, Bo|AUTHOR Bo Xu]]|
|[[Xu, Changliang|AUTHOR Changliang Xu]]|
|[[Xu, Chenglin|AUTHOR Chenglin Xu]]|
|[[Xu, Dongxiang|AUTHOR Dongxiang Xu]]|
|[[Xu, Furong|AUTHOR Furong Xu]]|
|[[Xu, Gaopeng|AUTHOR Gaopeng Xu]]|
|[[Xu, Haihua|AUTHOR Haihua Xu]]|
|[[Xu, Hainan|AUTHOR Hainan Xu]]|
|[[Xu, Jiaming|AUTHOR Jiaming Xu]]|
|[[Xu, Jing|AUTHOR Jing Xu]]|
|[[Xu, Liang|AUTHOR Liang Xu]]|
|[[Xu, Menglong|AUTHOR Menglong Xu]]|
|[[Xu, Min|AUTHOR Min Xu]]|
|[[Xu, Mingxing|AUTHOR Mingxing Xu]]|
|[[Xu, Qiantong|AUTHOR Qiantong Xu]]|
|[[Xu, Shugong|AUTHOR Shugong Xu]]|
|[[Xu, Weiyuan|AUTHOR Weiyuan Xu]]|
|[[Xu, Xiaoshuo|AUTHOR Xiaoshuo Xu]]|
|[[Xu, Xin|AUTHOR Xin Xu]]|
|[[Xu, Xinkang|AUTHOR Xinkang Xu]]|
|[[Xu, Xinmeng|AUTHOR Xinmeng Xu]]|
|[[Xu, Xuenan|AUTHOR Xuenan Xu]]|
|[[Xu, Xuexin|AUTHOR Xuexin Xu]]|
|[[Xu, Yi|AUTHOR Yi Xu]]|
|[[Xu, Yong|AUTHOR Yong Xu]]|
|[[Xu, Yuezhu|AUTHOR Yuezhu Xu]]|
|[[Xu, Ziyi|AUTHOR Ziyi Xu]]|
|[[Xue, Cheng|AUTHOR Cheng Xue]]|
|[[Xue, Jiabin|AUTHOR Jiabin Xue]]|
|[[Xue, Jian|AUTHOR Jian Xue]]|
|[[Xue, Wei|AUTHOR Wei Xue]]|
|[[Xue, Yawen|AUTHOR Yawen Xue]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Yadama, Tarun Reddy|AUTHOR Tarun Reddy Yadama]]|
|[[Yadav, Ravi|AUTHOR Ravi Yadav]]|
|[[Yamada, Takeshi|AUTHOR Takeshi Yamada]]|
|[[Yamagishi, Junichi|AUTHOR Junichi Yamagishi]]|
|[[Yamamoto, Ayako|AUTHOR Ayako Yamamoto]]|
|[[Yamamoto, Ryuichi|AUTHOR Ryuichi Yamamoto]]|
|[[Yamazaki, Yoshihiro|AUTHOR Yoshihiro Yamazaki]]|
|[[Yan, Brian|AUTHOR Brian Yan]]|
|[[Yan, Chao|AUTHOR Chao Yan]]|
|[[Yan, Jie|AUTHOR Jie Yan]]|
|[[Yan, Jinghao|AUTHOR Jinghao Yan]]|
|[[Yan, Tianhao|AUTHOR Tianhao Yan]]|
|[[Yan, Yonghong|AUTHOR Yonghong Yan]]|
|[[Yan, Yuzi|AUTHOR Yuzi Yan]]|
|[[Yan, Zhijie|AUTHOR Zhijie Yan]]|
|[[Yan, Zhiyong|AUTHOR Zhiyong Yan]]|
|[[Yang, Chao|AUTHOR Chao Yang]]|
|[[Yang, Chao-Han Huck|AUTHOR Chao-Han Huck Yang]]|
|[[Yang, Dongchao|AUTHOR Dongchao Yang]]|
|[[Yang, Eunho|AUTHOR Eunho Yang]]|
|[[Yang, Feiran|AUTHOR Feiran Yang]]|
|[[Yang, Feng|AUTHOR Feng Yang]]|
|[[Yang, Fengyu|AUTHOR Fengyu Yang]]|
|[[Yang, Gene-Ping|AUTHOR Gene-Ping Yang]]|
|[[Yang, Hemin|AUTHOR Hemin Yang]]|
|[[Yang, Jinhyeok|AUTHOR Jinhyeok Yang]]|
|[[Yang, Jun|AUTHOR Jun Yang]]|
|[[Yang, Jun|AUTHOR Jun Yang]]|
|[[Yang, Lidong|AUTHOR Lidong Yang]]|
|[[Yang, Linju|AUTHOR Linju Yang]]|
|[[Yang, Meirong|AUTHOR Meirong Yang]]|
|[[Yang, Minghui|AUTHOR Minghui Yang]]|
|[[Yang, Shan|AUTHOR Shan Yang]]|
|[[Yang, Shu-wen|AUTHOR Shu-wen Yang]]|
|[[Yang, Song|AUTHOR Song Yang]]|
|[[Yang, Wei|AUTHOR Wei Yang]]|
|[[Yang, Xuerui|AUTHOR Xuerui Yang]]|
|[[Yang, Yingchun|AUTHOR Yingchun Yang]]|
|[[Yang, Yuguang|AUTHOR Yuguang Yang]]|
|[[Yang, Zhenchuan|AUTHOR Zhenchuan Yang]]|
|[[Yao, Kaisheng|AUTHOR Kaisheng Yao]]|
|[[Yao, Shengyu|AUTHOR Shengyu Yao]]|
|[[Yao, Xuchen|AUTHOR Xuchen Yao]]|
|[[Yao, Yiwu|AUTHOR Yiwu Yao]]|
|[[Yao, Zhuoyuan|AUTHOR Zhuoyuan Yao]]|
|[[Yarra, Chiranjeevi|AUTHOR Chiranjeevi Yarra]]|
|[[Yatabe, Kohei|AUTHOR Kohei Yatabe]]|
|[[Ye, Guoli|AUTHOR Guoli Ye]]|
|[[Ye, Jieping|AUTHOR Jieping Ye]]|
|[[Ye, Rong|AUTHOR Rong Ye]]|
|[[Ye, Shuaishuai|AUTHOR Shuaishuai Ye]]|
|[[Ye, Zi|AUTHOR Zi Ye]]|
|[[Yeh, Ching-Feng|AUTHOR Ching-Feng Yeh]]|
|[[Yemini, Yochai|AUTHOR Yochai Yemini]]|
|[[Yeo, Eun Jung|AUTHOR Eun Jung Yeo]]|
|[[Yeung, Yu Ting|AUTHOR Yu Ting Yeung]]|
|[[Yi, Jiangyan|AUTHOR Jiangyan Yi]]|
|[[Yin, Bao-Cai|AUTHOR Bao-Cai Yin]]|
|[[Yin, Dacheng|AUTHOR Dacheng Yin]]|
|[[Yin, Hengxin|AUTHOR Hengxin Yin]]|
|[[Yin, Shouyi|AUTHOR Shouyi Yin]]|
|[[Yin, Xiang|AUTHOR Xiang Yin]]|
|[[Yin, Xuefeng|AUTHOR Xuefeng Yin]]|
|[[Yokoyama, Hiroshi|AUTHOR Hiroshi Yokoyama]]|
|[[Yoneyama, Reo|AUTHOR Reo Yoneyama]]|
|[[Yoon, Jaesam|AUTHOR Jaesam Yoon]]|
|[[Yoshii, Kazuyoshi|AUTHOR Kazuyoshi Yoshii]]|
|[[Yoshinaga, Tsukasa|AUTHOR Tsukasa Yoshinaga]]|
|[[Yoshioka, Takuya|AUTHOR Takuya Yoshioka]]|
|[[You, Chenyu|AUTHOR Chenyu You]]|
|[[You, Jaeseong|AUTHOR Jaeseong You]]|
|[[You, Zhao|AUTHOR Zhao You]]|
|[[Yousefi, Midia|AUTHOR Midia Yousefi]]|
|[[Yu, Bing|AUTHOR Bing Yu]]|
|[[Yu, Cheng|AUTHOR Cheng Yu]]|
|[[Yu, Chengzhu|AUTHOR Chengzhu Yu]]|
|[[Yu, Dong|AUTHOR Dong Yu]]|
|[[Yu, Fan|AUTHOR Fan Yu]]|
|[[Yu, Guochen|AUTHOR Guochen Yu]]|
|[[Yu, Guoxing|AUTHOR Guoxing Yu]]|
|[[Yu, Haibin|AUTHOR Haibin Yu]]|
|[[Yu, Jiahui|AUTHOR Jiahui Yu]]|
|[[Yu, Jianwei|AUTHOR Jianwei Yu]]|
|[[Yu, Kai|AUTHOR Kai Yu]]|
|[[Yu, Mei|AUTHOR Mei Yu]]|
|[[Yu, Meng|AUTHOR Meng Yu]]|
|[[Yu, Ruiguo|AUTHOR Ruiguo Yu]]|
|[[Yu, Shi|AUTHOR Shi Yu]]|
|[[Yu, Xiangzhan|AUTHOR Xiangzhan Yu]]|
|[[Yu, Yan|AUTHOR Yan Yu]]|
|[[Yu, Zhesong|AUTHOR Zhesong Yu]]|
|[[Yuan, Jiahong|AUTHOR Jiahong Yuan]]|
|[[Yuan, Weitao|AUTHOR Weitao Yuan]]|
|[[Yue, Xianghu|AUTHOR Xianghu Yue]]|
|[[Yue, Zhengjun|AUTHOR Zhengjun Yue]]|
|[[Yun, Jaewoong|AUTHOR Jaewoong Yun]]|
|[[Yurong|AUTHOR Yurong]]|
|[[Yurt, Metehan|AUTHOR Metehan Yurt]]|
|[[Yusuf, Bolaji|AUTHOR Bolaji Yusuf]]|
|[[Yvon, François|AUTHOR François Yvon]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpauthorindexlisttable|k
|[[Zahner-Ritter, Katharina|AUTHOR Katharina Zahner-Ritter]]|
|[[Zaiem, Salah|AUTHOR Salah Zaiem]]|
|[[Zainkó, Csaba|AUTHOR Csaba Zainkó]]|
|[[Zaitova, Iuliia|AUTHOR Iuliia Zaitova]]|
|[[Zandie, Rohola|AUTHOR Rohola Zandie]]|
|[[Zanon Boito, Marcely|AUTHOR Marcely Zanon Boito]]|
|[[Zaporowski, Szymon|AUTHOR Szymon Zaporowski]]|
|[[Zare, Ali|AUTHOR Ali Zare]]|
|[[Zarrieß, Sina|AUTHOR Sina Zarrieß]]|
|[[Zayats, Vicky|AUTHOR Vicky Zayats]]|
|[[Zdansky, Jindrich|AUTHOR Jindrich Zdansky]]|
|[[Zeineldeen, Mohammad|AUTHOR Mohammad Zeineldeen]]|
|[[Żelasko, Piotr|AUTHOR Piotr Żelasko]]|
|[[Zellers, Margaret|AUTHOR Margaret Zellers]]|
|[[Zellou, Georgia|AUTHOR Georgia Zellou]]|
|[[Zen, Heiga|AUTHOR Heiga Zen]]|
|[[Zeng, Biao|AUTHOR Biao Zeng]]|
|[[Zeng, Michael|AUTHOR Michael Zeng]]|
|[[Zeng, Wenjun|AUTHOR Wenjun Zeng]]|
|[[Zerr, Benoît|AUTHOR Benoît Zerr]]|
|[[Zeyer, Albert|AUTHOR Albert Zeyer]]|
|[[Zhan, Haoyue|AUTHOR Haoyue Zhan]]|
|[[Zhan, Jinsong|AUTHOR Jinsong Zhan]]|
|[[Zhan, Puming|AUTHOR Puming Zhan]]|
|[[Zhang, Baiyan|AUTHOR Baiyan Zhang]]|
|[[Zhang, Binbin|AUTHOR Binbin Zhang]]|
|[[Zhang, Chao|AUTHOR Chao Zhang]]|
|[[Zhang, Chen|AUTHOR Chen Zhang]]|
|[[Zhang, Chuanyao|AUTHOR Chuanyao Zhang]]|
|[[Zhang, Chunlei|AUTHOR Chunlei Zhang]]|
|[[Zhang, Cong|AUTHOR Cong Zhang]]|
|[[Zhang, Cong|AUTHOR Cong Zhang]]|
|[[Zhang, Dawei|AUTHOR Dawei Zhang]]|
|[[Zhang, Dejun|AUTHOR Dejun Zhang]]|
|[[Zhang, Didi|AUTHOR Didi Zhang]]|
|[[Zhang, Dong|AUTHOR Dong Zhang]]|
|[[Zhang, Guangyan|AUTHOR Guangyan Zhang]]|
|[[Zhang, Haiteng|AUTHOR Haiteng Zhang]]|
|[[Zhang, Haitong|AUTHOR Haitong Zhang]]|
|[[Zhang, Hao|AUTHOR Hao Zhang]]|
|[[Zhang, Hao|AUTHOR Hao Zhang]]|
|[[Zhang, Haoyu|AUTHOR Haoyu Zhang]]|
|[[Zhang, Haozhe|AUTHOR Haozhe Zhang]]|
|[[Zhang, Huayun|AUTHOR Huayun Zhang]]|
|[[Zhang, Hui|AUTHOR Hui Zhang]]|
|[[Zhang, Jiajun|AUTHOR Jiajun Zhang]]|
|[[Zhang, Jianwei|AUTHOR Jianwei Zhang]]|
|[[Zhang, Jicheng|AUTHOR Jicheng Zhang]]|
|[[Zhang, Jie|AUTHOR Jie Zhang]]|
|[[Zhang, Jinsong|AUTHOR Jinsong Zhang]]|
|[[Zhang, Jisi|AUTHOR Jisi Zhang]]|
|[[Zhang, Ju|AUTHOR Ju Zhang]]|
|[[Zhang, Jun|AUTHOR Jun Zhang]]|
|[[Zhang, Junbo|AUTHOR Junbo Zhang]]|
|[[Zhang, Kaili|AUTHOR Kaili Zhang]]|
|[[Zhang, Kanghao|AUTHOR Kanghao Zhang]]|
|[[Zhang, Kexun|AUTHOR Kexun Zhang]]|
|[[Zhang, Leying|AUTHOR Leying Zhang]]|
|[[Zhang, Li|AUTHOR Li Zhang]]|
|[[Zhang, Lin|AUTHOR Lin Zhang]]|
|[[Zhang, Pei|AUTHOR Pei Zhang]]|
|[[Zhang, Peng|AUTHOR Peng Zhang]]|
|[[Zhang, Pengyuan|AUTHOR Pengyuan Zhang]]|
|[[Zhang, Qian|AUTHOR Qian Zhang]]|
|[[Zhang, Qianyutong|AUTHOR Qianyutong Zhang]]|
|[[Zhang, Qinglin|AUTHOR Qinglin Zhang]]|
|[[Zhang, Qiquan|AUTHOR Qiquan Zhang]]|
|[[Zhang, Shaoji|AUTHOR Shaoji Zhang]]|
|[[Zhang, Shilei|AUTHOR Shilei Zhang]]|
|[[Zhang, Shiliang|AUTHOR Shiliang Zhang]]|
|[[Zhang, Shimin|AUTHOR Shimin Zhang]]|
|[[Zhang, Shiqi|AUTHOR Shiqi Zhang]]|
|[[Zhang, Shi-Xiong|AUTHOR Shi-Xiong Zhang]]|
|[[Zhang, Shuai|AUTHOR Shuai Zhang]]|
|[[Zhang, Shucong|AUTHOR Shucong Zhang]]|
|[[Zhang, Sijia|AUTHOR Sijia Zhang]]|
|[[Zhang, Siyuan|AUTHOR Siyuan Zhang]]|
|[[Zhang, Tao|AUTHOR Tao Zhang]]|
|[[Zhang, Wei|AUTHOR Wei Zhang]]|
|[[Zhang, Weibin|AUTHOR Weibin Zhang]]|
|[[Zhang, Weicheng|AUTHOR Weicheng Zhang]]|
|[[Zhang, Wei-Qiang|AUTHOR Wei-Qiang Zhang]]|
|[[Zhang, Xia|AUTHOR Xia Zhang]]|
|[[Zhang, Xianwei|AUTHOR Xianwei Zhang]]|
|[[Zhang, Xiao-Lei|AUTHOR Xiao-Lei Zhang]]|
|[[Zhang, Xiao-Qi|AUTHOR Xiao-Qi Zhang]]|
|[[Zhang, Xinyi|AUTHOR Xinyi Zhang]]|
|[[Zhang, Xu|AUTHOR Xu Zhang]]|
|[[Zhang, Xudong|AUTHOR Xudong Zhang]]|
|[[Zhang, Xueliang|AUTHOR Xueliang Zhang]]|
|[[Zhang, Yang|AUTHOR Yang Zhang]]|
|[[Zhang, Yang|AUTHOR Yang Zhang]]|
|[[Zhang, Yang|AUTHOR Yang Zhang]]|
|[[Zhang, Yike|AUTHOR Yike Zhang]]|
|[[Zhang, Yinping|AUTHOR Yinping Zhang]]|
|[[Zhang, Yi|AUTHOR Yi Zhang]]|
|[[Zhang, Yi|AUTHOR Yi Zhang]]|
|[[Zhang, Yi|AUTHOR Yi Zhang]]|
|[[Zhang, You|AUTHOR You Zhang]]|
|[[Zhang, Yu|AUTHOR Yu Zhang]]|
|[[Zhang, Yuekai|AUTHOR Yuekai Zhang]]|
|[[Zhang, Yu-Jia|AUTHOR Yu-Jia Zhang]]|
|[[Zhang, Yuqing|AUTHOR Yuqing Zhang]]|
|[[Zhang, Yuxiang|AUTHOR Yuxiang Zhang]]|
|[[Zhang, Zhenchuan|AUTHOR Zhenchuan Zhang]]|
|[[Zhang, Zhihong|AUTHOR Zhihong Zhang]]|
|[[Zhang, Zhihui|AUTHOR Zhihui Zhang]]|
|[[Zhang, Zhiwen|AUTHOR Zhiwen Zhang]]|
|[[Zhang, Zhiyu|AUTHOR Zhiyu Zhang]]|
|[[Zhang, Zhuohuang|AUTHOR Zhuohuang Zhang]]|
|[[Zhao, Chendong|AUTHOR Chendong Zhao]]|
|[[Zhao, Chunyu|AUTHOR Chunyu Zhao]]|
|[[Zhao, Ding|AUTHOR Ding Zhao]]|
|[[Zhao, Guanlong|AUTHOR Guanlong Zhao]]|
|[[Zhao, Hang|AUTHOR Hang Zhao]]|
|[[Zhao, Huan|AUTHOR Huan Zhao]]|
|[[Zhao, Jing|AUTHOR Jing Zhao]]|
|[[Zhao, Jinming|AUTHOR Jinming Zhao]]|
|[[Zhao, Junan|AUTHOR Junan Zhao]]|
|[[Zhao, Liang|AUTHOR Liang Zhao]]|
|[[Zhao, Rui|AUTHOR Rui Zhao]]|
|[[Zhao, Sheng|AUTHOR Sheng Zhao]]|
|[[Zhao, Shuaijiang|AUTHOR Shuaijiang Zhao]]|
|[[Zhao, Xi|AUTHOR Xi Zhao]]|
|[[Zhao, Xintao|AUTHOR Xintao Zhao]]|
|[[Zhao, Yucheng|AUTHOR Yucheng Zhao]]|
|[[Zhao, Yun|AUTHOR Yun Zhao]]|
|[[Zhao, Yunxin|AUTHOR Yunxin Zhao]]|
|[[Zhao, Zhengdao|AUTHOR Zhengdao Zhao]]|
|[[Zhao, Zhiyuan|AUTHOR Zhiyuan Zhao]]|
|[[Zhao, Zhou|AUTHOR Zhou Zhao]]|
|[[Zheng, Chengshi|AUTHOR Chengshi Zheng]]|
|[[Zheng, Nengheng|AUTHOR Nengheng Zheng]]|
|[[Zheng, Renjie|AUTHOR Renjie Zheng]]|
|[[Zheng, Siqi|AUTHOR Siqi Zheng]]|
|[[Zheng, Thomas Fang|AUTHOR Thomas Fang Zheng]]|
|[[Zheng, Tieran|AUTHOR Tieran Zheng]]|
|[[Zheng, Xiguang|AUTHOR Xiguang Zheng]]|
|[[Zheng, Xu|AUTHOR Xu Zheng]]|
|[[Zheng, Yuxing|AUTHOR Yuxing Zheng]]|
|[[Zheng, Zuoyun|AUTHOR Zuoyun Zheng]]|
|[[Zhi, Yiming|AUTHOR Yiming Zhi]]|
|[[Zhong, Xionghu|AUTHOR Xionghu Zhong]]|
|[[Zhou, Hang|AUTHOR Hang Zhou]]|
|[[Zhou, Hengshun|AUTHOR Hengshun Zhou]]|
|[[Zhou, Huan|AUTHOR Huan Zhou]]|
|[[Zhou, Jiatong|AUTHOR Jiatong Zhou]]|
|[[Zhou, Jiayu|AUTHOR Jiayu Zhou]]|
|[[Zhou, Kun|AUTHOR Kun Zhou]]|
|[[Zhou, Peilin|AUTHOR Peilin Zhou]]|
|[[Zhou, Shiyu|AUTHOR Shiyu Zhou]]|
|[[Zhou, Tianyan|AUTHOR Tianyan Zhou]]|
|[[Zhou, Wei|AUTHOR Wei Zhou]]|
|[[Zhou, Wei|AUTHOR Wei Zhou]]|
|[[Zhou, Xiao|AUTHOR Xiao Zhou]]|
|[[Zhou, Xinyong|AUTHOR Xinyong Zhou]]|
|[[Zhou, Yi|AUTHOR Yi Zhou]]|
|[[Zhou, Yu|AUTHOR Yu Zhou]]|
|[[Zhou, Yuanfu|AUTHOR Yuanfu Zhou]]|
|[[Zhou, Zhikai|AUTHOR Zhikai Zhou]]|
|[[Zhou, Zimu|AUTHOR Zimu Zhou]]|
|[[Zhu, Bilei|AUTHOR Bilei Zhu]]|
|[[Zhu, Chenguang|AUTHOR Chenguang Zhu]]|
|[[Zhu, Dawei|AUTHOR Dawei Zhu]]|
|[[Zhu, Ge|AUTHOR Ge Zhu]]|
|[[Zhu, Hongning|AUTHOR Hongning Zhu]]|
|[[Zhu, Huifeng|AUTHOR Huifeng Zhu]]|
|[[Zhu, Huijia|AUTHOR Huijia Zhu]]|
|[[Zhu, Jian|AUTHOR Jian Zhu]]|
|[[Zhu, Pai|AUTHOR Pai Zhu]]|
|[[Zhu, Qingying|AUTHOR Qingying Zhu]]|
|[[Zhu, Qiu-shi|AUTHOR Qiu-shi Zhu]]|
|[[Zhu, Rui|AUTHOR Rui Zhu]]|
|[[Zhu, Tinglong|AUTHOR Tinglong Zhu]]|
|[[Zhu, Xinghua|AUTHOR Xinghua Zhu]]|
|[[Zhu, Youxiang|AUTHOR Youxiang Zhu]]|
|[[Zhu, Yun|AUTHOR Yun Zhu]]|
|[[Zhu, Yunzheng|AUTHOR Yunzheng Zhu]]|
|[[Zhu, Zirun|AUTHOR Zirun Zhu]]|
|[[Zi, Yunfei|AUTHOR Yunfei Zi]]|
|[[Žilinec, Matúš|AUTHOR Matúš Žilinec]]|
|[[Zilka, Miri|AUTHOR Miri Zilka]]|
|[[Zmolikova, Katerina|AUTHOR Katerina Zmolikova]]|
|[[Zoehrer, Matthias|AUTHOR Matthias Zoehrer]]|
|[[Zong, Chengqing|AUTHOR Chengqing Zong]]|
|[[Zorilă, Cătălin|AUTHOR Cătălin Zorilă]]|
|[[Zou, Wei|AUTHOR Wei Zou]]|
|[[Zou, Yuexian|AUTHOR Yuexian Zou]]|
|[[Zou, Yuxiang|AUTHOR Yuxiang Zou]]|
|[[Zuluaga, Maria A.|AUTHOR Maria A. Zuluaga]]|
|[[Zuluaga-Gomez, Juan|AUTHOR Juan Zuluaga-Gomez]]|
|[[Zwerts, Joeri A.|AUTHOR Joeri A. Zwerts]]|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-7|PAPER Fri-A-V-4-7 — Adaptive Margin Circle Loss for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Margin Circle Loss for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-6|PAPER Tue-A-V-5-6 — A Noise Robust Method for Word-Level Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Noise Robust Method for Word-Level Pronunciation Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-10|PAPER Wed-A-V-3-10 — End to End Transformer-Based Contextual Speech Recognition Based on Pointer Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End to End Transformer-Based Contextual Speech Recognition Based on Pointer Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-6|PAPER Fri-M-V-1-6 — F₀ Patterns of L2 English Speech by Mandarin Chinese Learners]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F₀ Patterns of L2 English Speech by Mandarin Chinese Learners</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-7|PAPER Fri-M-V-1-7 — A Neural Network-Based Noise Compensation Method for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neural Network-Based Noise Compensation Method for Pronunciation Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210931.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-7|PAPER Fri-A-V-1-7 — Deep Feature Transfer Learning for Automatic Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Feature Transfer Learning for Automatic Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210812.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-3|PAPER Wed-M-V-4-3 — X-net: A Joint Scale Down and Scale Up Method for Voice Call]]</div>|^<div class="cpauthorindexpersoncardpapertitle">X-net: A Joint Scale Down and Scale Up Method for Voice Call</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-1|PAPER Fri-M-O-1-1 — Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-11|PAPER Wed-A-V-3-11 — A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211209.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-1|PAPER Fri-A-V-4-1 — Graph-Based Label Propagation for Semi-Supervised Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-Based Label Propagation for Semi-Supervised Speaker Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-4|PAPER Wed-M-V-1-4 — Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-5|PAPER Fri-A-V-1-5 — Explore wav2vec 2.0 for Mispronunciation Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explore wav2vec 2.0 for Mispronunciation Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-11|PAPER Fri-A-V-3-11 — Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-3|PAPER Tue-E-V-2-3 — Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-1|PAPER Tue-M-V-2-1 — TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-5|PAPER Tue-M-V-2-5 — Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-7|PAPER Tue-M-V-4-7 — Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-11|PAPER Tue-E-V-1-11 — Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211133.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-6|PAPER Fri-A-V-2-6 — Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-10|PAPER Fri-A-V-2-10 — Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-9|PAPER Thu-M-V-1-9 — Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-2|PAPER Tue-E-O-2-2 — Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-6|PAPER Thu-M-SS-2-6 — Modeling the Effect of Military Oxygen Masks on Speech Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling the Effect of Military Oxygen Masks on Speech Characteristics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-2|PAPER Thu-M-V-7-2 — Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211394.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-4|PAPER Tue-A-O-2-4 — Word Competition: An Entropy-Based Approach in the DIANA Model of Human Word Comprehension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Word Competition: An Entropy-Based Approach in the DIANA Model of Human Word Comprehension</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211408.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-5|PAPER Tue-A-O-2-5 — Time-to-Event Models for Analyzing Reaction Time Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-to-Event Models for Analyzing Reaction Time Sequences</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211700.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-6|PAPER Tue-A-O-2-6 — Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211394.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-4|PAPER Tue-A-O-2-4 — Word Competition: An Entropy-Based Approach in the DIANA Model of Human Word Comprehension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Word Competition: An Entropy-Based Approach in the DIANA Model of Human Word Comprehension</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211408.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-5|PAPER Tue-A-O-2-5 — Time-to-Event Models for Analyzing Reaction Time Sequences]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-to-Event Models for Analyzing Reaction Time Sequences</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211700.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-6|PAPER Tue-A-O-2-6 — Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-6|PAPER Tue-E-V-5-6 — The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-5|PAPER Fri-A-V-6-5 — Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-2|PAPER Fri-A-V-6-2 — Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-3|PAPER Thu-A-O-2-3 — N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211704.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-4|PAPER Fri-A-O-2-4 — Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-5|PAPER Tue-E-V-1-5 — Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210703.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-7|PAPER Thu-A-V-1-7 — Emotion Recognition from Speech Using wav2vec 2.0 Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Recognition from Speech Using wav2vec 2.0 Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-4|PAPER Thu-A-SS-2-4 — Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210745.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-4|PAPER Fri-A-V-1-4 — Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-5|PAPER Fri-M-O-1-5 — Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-4|PAPER Fri-A-S&T-1-4 — Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-3|PAPER Tue-A-O-1-3 — Speaker Embeddings by Modeling Channel-Wise Correlations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Embeddings by Modeling Channel-Wise Correlations</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210627.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-3|PAPER Wed-A-O-1-3 — Text Augmentation for Language Models in High Error Recognition Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Text Augmentation for Language Models in High Error Recognition Scenario</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211756.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-5|PAPER Thu-M-O-3-5 — Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-8|PAPER Wed-M-V-4-8 — Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-8|PAPER Wed-E-V-5-8 — DEMUCS-Mobile : On-Device Lightweight Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEMUCS-Mobile : On-Device Lightweight Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-3|PAPER Wed-M-O-3-3 — Using X-Vectors for Speech Activity Detection in Broadcast Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using X-Vectors for Speech Activity Detection in Broadcast Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-3|PAPER Wed-A-S&T-1-3 — Autonomous Robot for Measuring Room Impulse Responses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autonomous Robot for Measuring Room Impulse Responses</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-2|PAPER Fri-A-SS-1-2 — Acoustic Echo Cancellation with Cross-Domain Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Echo Cancellation with Cross-Domain Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211649.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-11|PAPER Fri-M-V-2-11 — Developmental Changes of Vowel Acoustics in Adolescents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Developmental Changes of Vowel Acoustics in Adolescents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-7|PAPER Tue-A-S&T-1-7 — Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-6|PAPER Wed-M-SS-1-6 — SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-6|PAPER Wed-M-SS-1-6 — SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-9|PAPER Wed-E-V-5-9 — Speech Denoising Without Clean Training Data: A Noise2Noise Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising Without Clean Training Data: A Noise2Noise Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-3|PAPER Tue-E-V-3-3 — Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-4|PAPER Tue-A-S&T-1-4 — ROXANNE Research Platform: Automate Criminal Investigations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ROXANNE Research Platform: Automate Criminal Investigations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-2|PAPER Tue-A-O-1-2 — Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210591.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-13|PAPER Wed-A-V-2-13 — Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210534.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-3|PAPER Thu-M-V-7-3 — Causal Confusion Reduction for Robust Multi-Domain Dialogue Policy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Causal Confusion Reduction for Robust Multi-Domain Dialogue Policy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-11|PAPER Fri-M-V-5-11 — Generalized Spoofing Detection Inspired from Audio Generation Artifacts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spoofing Detection Inspired from Audio Generation Artifacts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-1|PAPER Fri-A-V-6-1 — Intent Detection and Slot Filling for Vietnamese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intent Detection and Slot Filling for Vietnamese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-9|PAPER Thu-M-V-2-9 — LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-12|PAPER Wed-M-V-5-12 — Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-6|PAPER Tue-M-V-1-6 — Mutual Information Enhanced Training for Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mutual Information Enhanced Training for Speaker Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-6|PAPER Tue-M-V-6-6 — Enrollment-Less Training for Personalized Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enrollment-Less Training for Personalized Voice Activity Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-5|PAPER Tue-A-V-5-5 — Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211981.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-11|PAPER Fri-A-V-1-11 — End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211767.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-12|PAPER Wed-A-V-2-12 — A Discriminative Entity-Aware Language Model for Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Discriminative Entity-Aware Language Model for Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-5|PAPER Fri-A-S&T-1-5 — Automatic Radiology Report Editing Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Radiology Report Editing Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-10|PAPER Tue-A-V-6-10 — Many-to-Many Voice Conversion Based Feature Disentanglement Using Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Many-to-Many Voice Conversion Based Feature Disentanglement Using Variational Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-5|PAPER Thu-A-V-1-5 — Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-5|PAPER Tue-A-V-1-5 — Acoustic Scene Classification Using Kervolution-Based SubSpectralNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using Kervolution-Based SubSpectralNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-3|PAPER Tue-A-V-2-3 — A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-1|PAPER Wed-E-V-1-1 — Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-8|PAPER Tue-A-V-2-8 — Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-6|PAPER Thu-M-V-4-6 — Scenario-Dependent Speaker Diarization for DIHARD-III Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scenario-Dependent Speaker Diarization for DIHARD-III Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-3|PAPER Thu-A-V-4-3 — Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-2|PAPER Thu-A-O-1-2 — Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-10|PAPER Tue-E-V-2-10 — Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-1|PAPER Wed-M-O-3-1 — Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-3|PAPER Thu-A-V-3-3 — Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-5|PAPER Thu-A-V-3-5 — Few-Shot Learning of New Sound Classes for Target Sound Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Learning of New Sound Classes for Target Sound Extraction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-5|PAPER Thu-A-V-4-5 — Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-5|PAPER Thu-A-O-1-5 — Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-6|PAPER Thu-M-V-5-6 — Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210745.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-4|PAPER Fri-A-V-1-4 — Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-2|PAPER Fri-M-O-3-2 — Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-10|PAPER Tue-A-V-5-10 — Speaker Transition Patterns in Three-Party Conversation: Evidence from English, Estonian and Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Transition Patterns in Three-Party Conversation: Evidence from English, Estonian and Swedish</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-4|PAPER Tue-E-O-2-4 — Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-4|PAPER Fri-M-O-1-4 — Learning to Rank Microphones for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning to Rank Microphones for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-11|PAPER Wed-A-V-6-11 — Lexical Modeling of ASR Errors for Robust Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Modeling of ASR Errors for Robust Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-8|PAPER Thu-A-SS-2-8 — Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-3|PAPER Wed-M-O-2-3 — An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211908.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-5|PAPER Wed-A-V-2-5 — Revisiting Parity of Human vs. Machine Conversational Speech Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Parity of Human vs. Machine Conversational Speech Transcription</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-5|PAPER Wed-E-V-4-5 — Assessing the Use of Prosody in Constituency Parsing of Imperfect Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing the Use of Prosody in Constituency Parsing of Imperfect Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210594.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-7|PAPER Wed-E-V-4-7 — The Interaction of Word Complexity and Word Duration in an Agglutinative Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Interaction of Word Complexity and Word Duration in an Agglutinative Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-3|PAPER Tue-E-O-2-3 — Glottal Stops in Upper Sorbian: A Data-Driven Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Stops in Upper Sorbian: A Data-Driven Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-7|PAPER Fri-M-V-2-7 — Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-9|PAPER Wed-M-V-5-9 — Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-3|PAPER Fri-M-V-1-3 — Testing Acoustic Voice Quality Classification Across Languages and Speech Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Testing Acoustic Voice Quality Classification Across Languages and Speech Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-5|PAPER Tue-A-V-3-5 — Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-1|PAPER Thu-M-V-1-1 — Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-1|PAPER Fri-M-V-6-1 — Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-7|PAPER Fri-M-V-2-7 — Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-6|PAPER Tue-A-SS-2-6 — Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-8|PAPER Tue-E-V-5-8 — Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211507.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-4|PAPER Tue-E-SS-1-4 — Automated Detection of Voice Disorder in the Saarbrücken Voice Database: Effects of Pathology Subset and Audio Materials]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automated Detection of Voice Disorder in the Saarbrücken Voice Database: Effects of Pathology Subset and Audio Materials</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-6|PAPER Tue-M-V-2-6 — Deliberation-Based Multi-Pass Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deliberation-Based Multi-Pass Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-5|PAPER Fri-M-V-6-5 — Few-Shot Keyword Spotting in Any Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Keyword Spotting in Any Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-12|PAPER Fri-M-V-6-12 — Keyword Transformer: A Self-Attention Model for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Keyword Transformer: A Self-Attention Model for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211973.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-2|PAPER Wed-E-V-5-2 — Speech Denoising with Auditory Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising with Auditory Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211531.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-8|PAPER Thu-A-V-3-8 — Vocal Harmony Separation Using Time-Domain Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocal Harmony Separation Using Time-Domain Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-4|PAPER Thu-A-O-1-4 — Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-2|PAPER Tue-M-O-1-2 — T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-2|PAPER Fri-A-S&T-1-2 — Save Your Voice: Voice Banking and TTS for Anyone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Save Your Voice: Voice Banking and TTS for Anyone</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-8|PAPER Tue-A-SS-1-8 — Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211787.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-4|PAPER Wed-A-V-2-4 — Phonetically Induced Subwords for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetically Induced Subwords for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210324.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-4|PAPER Wed-M-V-5-4 — SPEECHADJUSTER: A Tool for Investigating Listener Preferences and Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPEECHADJUSTER: A Tool for Investigating Listener Preferences and Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-2|PAPER Fri-A-S&T-1-2 — Save Your Voice: Voice Banking and TTS for Anyone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Save Your Voice: Voice Banking and TTS for Anyone</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-2|PAPER Thu-M-SS-2-2 — Detecting English Speech in the Air Traffic Control Voice Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting English Speech in the Air Traffic Control Voice Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-7|PAPER Tue-M-V-5-7 — Multi-Channel Transformer Transducer for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Transformer Transducer for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-2|PAPER Tue-E-V-4-2 — FANS: Fusing ASR and NLU for On-Device SLU]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FANS: Fusing ASR and NLU for On-Device SLU</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-3|PAPER Fri-M-O-3-3 — A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-2|PAPER Tue-E-O-2-2 — Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-2|PAPER Wed-M-O-3-2 — Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-4|PAPER Fri-M-O-3-4 — GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-4|PAPER Tue-A-SS-2-4 — Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210372.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-5|PAPER Fri-M-V-1-5 — Adaptive Listening Difficulty Detection for L2 Learners Through Moderating ASR Resources]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Listening Difficulty Detection for L2 Learners Through Moderating ASR Resources</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-3|PAPER Wed-A-V-4-3 — Sound Source Localization with Majorization Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sound Source Localization with Majorization Minimization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210523.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-6|PAPER Thu-M-V-3-6 — Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-4|PAPER Tue-M-V-5-4 — Noise Robust Acoustic Modeling for Single-Channel Speech Recognition Based on a Stream-Wise Transformer Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noise Robust Acoustic Modeling for Single-Channel Speech Recognition Based on a Stream-Wise Transformer Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210914.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-4|PAPER Tue-M-V-2-4 — Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-10|PAPER Tue-M-V-2-10 — Speed up Training with Variable Length Inputs by Efficient Batching Strategies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speed up Training with Variable Length Inputs by Efficient Batching Strategies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-4|PAPER Thu-M-V-3-4 — Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-4|PAPER Tue-E-V-5-4 — Robust Continuous On-Device Personalization for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Continuous On-Device Personalization for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-12|PAPER Wed-E-V-4-12 — Prosody of Case Markers in Urdu]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody of Case Markers in Urdu</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-13|PAPER Tue-E-V-1-13 — Speaker Anonymisation Using the McAdams Coefficient]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Anonymisation Using the McAdams Coefficient</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210993.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-2|PAPER Wed-E-O-1-2 — Graph Attention Networks for Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph Attention Networks for Anti-Spoofing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-13|PAPER Fri-M-V-7-13 — Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-2|PAPER Tue-M-O-2-2 — On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-3|PAPER Tue-M-O-3-3 — Identification of F1 and F2 in Speech Using Modified Zero Frequency Filtering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identification of F1 and F2 in Speech Using Modified Zero Frequency Filtering</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-5|PAPER Wed-A-O-3-5 — Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-3|PAPER Fri-A-SS-2-3 — Handling Acoustic Variation in Dysarthric Speech Recognition Systems Through Model Combination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Handling Acoustic Variation in Dysarthric Speech Recognition Systems Through Model Combination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-2|PAPER Tue-A-V-3-2 — Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-2|PAPER Tue-E-O-2-2 — Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211565.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-4|PAPER Tue-M-O-1-4 — A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-11|PAPER Wed-A-V-6-11 — Lexical Modeling of ASR Errors for Robust Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Modeling of ASR Errors for Robust Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-3|PAPER Fri-M-O-3-3 — A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-6|PAPER Wed-M-V-2-6 — Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-1|PAPER Wed-E-V-4-1 — How f0 and Phrase Position Affect Papuan Malay Word Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How f0 and Phrase Position Affect Papuan Malay Word Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211924.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-9|PAPER Thu-A-V-3-9 — Speaker Verification-Based Evaluation of Single-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification-Based Evaluation of Single-Channel Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-6|PAPER Wed-A-V-1-6 — Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-9|PAPER Thu-A-SS-2-9 — Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-3|PAPER Tue-E-O-2-3 — Glottal Stops in Upper Sorbian: A Data-Driven Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Stops in Upper Sorbian: A Data-Driven Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-2|PAPER Fri-A-SS-1-2 — Acoustic Echo Cancellation with Cross-Domain Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Echo Cancellation with Cross-Domain Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210141.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-1|PAPER Fri-A-V-3-1 — Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-2|PAPER Wed-E-O-2-2 — Lost in Interpreting: Speech Translation from Source or Interpreter?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lost in Interpreting: Speech Translation from Source or Interpreter?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210497.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-5|PAPER Tue-A-SS-2-5 — Diagnosis of COVID-19 Using Auditory Acoustic Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diagnosis of COVID-19 Using Auditory Acoustic Cues</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-4|PAPER Fri-A-V-6-4 — Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-7|PAPER Tue-M-V-5-7 — Multi-Channel Transformer Transducer for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Transformer Transducer for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211787.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-4|PAPER Wed-A-V-2-4 — Phonetically Induced Subwords for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetically Induced Subwords for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-11|PAPER Wed-A-V-6-11 — Lexical Modeling of ASR Errors for Robust Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Modeling of ASR Errors for Robust Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-11|PAPER Tue-M-V-5-11 — Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210744.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-2|PAPER Wed-A-V-6-2 — Subtitle Translation as Markup Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subtitle Translation as Markup Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-3|PAPER Wed-A-S&T-1-3 — Autonomous Robot for Measuring Room Impulse Responses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autonomous Robot for Measuring Room Impulse Responses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-3|PAPER Wed-E-SS-1-3 — Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-4|PAPER Fri-A-SS-1-4 — Y²-Net FCRN for Acoustic Echo and Noise Suppression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Y²-Net FCRN for Acoustic Echo and Noise Suppression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-6|PAPER Thu-M-V-5-6 — Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211679.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-7|PAPER Tue-E-V-5-7 — On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-13|PAPER Wed-A-V-3-13 — Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-1|PAPER Tue-A-V-6-1 — Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-10|PAPER Fri-M-V-3-10 — Insights on Neural Representations for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Insights on Neural Representations for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-7|PAPER Thu-A-V-3-7 — AvaTr: One-Shot Speaker Extraction with Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AvaTr: One-Shot Speaker Extraction with Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210277.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-2|PAPER Wed-M-V-1-2 — Modeling and Training Strategies for Language Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling and Training Strategies for Language Recognition Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-6|PAPER Wed-M-V-4-6 — Data Quality as Predictor of Voice Anti-Spoofing Generalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Quality as Predictor of Voice Anti-Spoofing Generalization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210276.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-2|PAPER Thu-M-SS-1-2 — Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-4|PAPER Tue-E-O-1-4 — Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-13|PAPER Wed-A-V-3-13 — Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210153.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-4|PAPER Tue-A-SS-1-4 — Communication-Efficient Agnostic Federated Averaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Communication-Efficient Agnostic Federated Averaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-12|PAPER Wed-E-V-5-12 — Learning Speech Structure to Improve Time-Frequency Masks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Speech Structure to Improve Time-Frequency Masks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-5|PAPER Tue-A-V-2-5 — An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210432.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-4|PAPER Thu-M-V-2-4 — Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210630.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-5|PAPER Fri-M-V-5-5 — Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-12|PAPER Thu-A-V-3-12 — Neural Speaker Extraction with Speaker-Speech Cross-Attention Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Extraction with Speaker-Speech Cross-Attention Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-5|PAPER Wed-M-V-1-5 — Exploring wav2vec 2.0 on Speaker Verification and Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring wav2vec 2.0 on Speaker Verification and Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-11|PAPER Tue-E-V-1-11 — Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-7|PAPER Wed-A-V-4-7 — MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-11|PAPER Thu-M-V-3-11 — Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-2|PAPER Fri-A-V-3-2 — Weakly Supervised Construction of ASR Systems from Massive Video Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly Supervised Construction of ASR Systems from Massive Video Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210427.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-7|PAPER Wed-A-V-3-7 — Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211977.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-11|PAPER Tue-M-V-6-11 — A Lightweight Framework for Online Voice Activity Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Lightweight Framework for Online Voice Activity Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-4|PAPER Tue-A-V-5-4 — Discriminative Self-Training for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Self-Training for Punctuation Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-10|PAPER Tue-M-SS-1-10 — Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210645.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-2|PAPER Tue-M-O-3-2 — Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-4|PAPER Wed-M-V-2-4 — Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211912.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-3|PAPER Wed-A-V-6-3 — Large-Scale Self- and Semi-Supervised Learning for Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Self- and Semi-Supervised Learning for Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-3|PAPER Wed-E-V-1-3 — Unsupervised Cross-Lingual Representation Learning for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Cross-Lingual Representation Learning for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-8|PAPER Tue-M-V-7-8 — MAP Adaptation Characteristics in Forensic Long-Term Formant Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MAP Adaptation Characteristics in Forensic Long-Term Formant Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-10|PAPER Fri-A-V-3-10 — Collaborative Training of Acoustic Encoders for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Collaborative Training of Acoustic Encoders for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-2|PAPER Tue-E-SS-1-2 — A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210473.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-1|PAPER Tue-M-O-1-1 — Conversion of Airborne to Bone-Conducted Speech with Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conversion of Airborne to Bone-Conducted Speech with Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-2|PAPER Tue-M-V-5-2 — Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-3|PAPER Fri-M-V-3-3 — A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-9|PAPER Fri-A-V-6-9 — End-to-End Spoken Language Understanding for Generalized Voice Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spoken Language Understanding for Generalized Voice Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-3|PAPER Thu-A-O-2-3 — N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-4|PAPER Wed-E-V-4-4 — ProsoBeast Prosody Annotation Tool]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ProsoBeast Prosody Annotation Tool</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-11|PAPER Wed-E-V-4-11 — Parsing Speech for Grouping and Prominence, and the Typology of Rhythm]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parsing Speech for Grouping and Prominence, and the Typology of Rhythm</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212217.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-10|PAPER Thu-A-V-1-10 — Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-1|PAPER Tue-E-V-4-1 — Data Augmentation for Spoken Language Understanding via Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation for Spoken Language Understanding via Pretrained Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-5|PAPER Thu-A-V-1-5 — Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-13|PAPER Fri-M-V-7-13 — Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210228.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-2|PAPER Tue-A-O-2-2 — Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-11|PAPER Wed-A-V-3-11 — A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210331.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-5|PAPER Wed-M-O-3-5 — Real-Time Speaker Counting in a Cocktail Party Scenario Using Attention-Guided Convolutional Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Speaker Counting in a Cocktail Party Scenario Using Attention-Guided Convolutional Neural Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-2|PAPER Fri-M-O-3-2 — Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-12|PAPER Fri-M-V-6-12 — Keyword Transformer: A Self-Attention Model for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Keyword Transformer: A Self-Attention Model for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-7|PAPER Thu-M-V-6-7 — Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-3|PAPER Wed-M-O-1-3 — Self-Supervised End-to-End ASR for Low Resource L2 Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised End-to-End ASR for Low Resource L2 Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218001.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-1|PAPER Tue-A-S&T-1-1 — Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-4|PAPER Wed-E-O-3-4 — Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-5|PAPER Wed-A-SS-1-5 — Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210682.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-6|PAPER Wed-M-V-5-6 — Effects of Aging and Age-Related Hearing Loss on Talker Discrimination]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Aging and Age-Related Hearing Loss on Talker Discrimination</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210976.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-11|PAPER Wed-A-V-5-11 — High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-2|PAPER Thu-A-V-5-2 — LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-1|PAPER Wed-M-V-3-1 — N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-8|PAPER Wed-E-V-5-8 — DEMUCS-Mobile : On-Device Lightweight Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEMUCS-Mobile : On-Device Lightweight Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-5|PAPER Fri-A-V-5-5 — Expressive Text-to-Speech Using Style Tag]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Text-to-Speech Using Style Tag</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-4|PAPER Fri-M-V-4-4 — Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-5|PAPER Fri-A-V-3-5 — Extremely Low Footprint End-to-End ASR System for Smart Device]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extremely Low Footprint End-to-End ASR System for Smart Device</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-5|PAPER Tue-M-V-1-5 — Binary Neural Network for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Neural Network for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-5|PAPER Wed-A-SS-1-5 — Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-4|PAPER Wed-E-V-6-4 — AISHELL-3: A Multi-Speaker Mandarin TTS Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-3: A Multi-Speaker Mandarin TTS Corpus</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-6|PAPER Tue-A-V-1-6 — Event Specific Attention for Polyphonic Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Event Specific Attention for Polyphonic Sound Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-3|PAPER Fri-M-SS-1-3 — An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-1|PAPER Wed-A-V-6-1 — SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-7|PAPER Fri-A-S&T-1-7 — Duplex Conversation in Outbound Agent System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Duplex Conversation in Outbound Agent System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-10|PAPER Fri-M-V-1-10 — Transformer Based End-to-End Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer Based End-to-End Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210153.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-4|PAPER Tue-A-SS-1-4 — Communication-Efficient Agnostic Federated Averaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Communication-Efficient Agnostic Federated Averaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-3|PAPER Fri-A-V-1-3 — Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210960.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-4|PAPER Fri-M-V-7-4 — Cross-Database Replay Detection in Terminal-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Replay Detection in Terminal-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-8|PAPER Wed-A-V-6-8 — End-to-End Speech Translation via Cross-Modal Progressive Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Translation via Cross-Modal Progressive Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-8|PAPER Wed-E-V-2-8 — TDCA-Net: Time-Domain Channel Attention Network for Depression Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TDCA-Net: Time-Domain Channel Attention Network for Depression Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-13|PAPER Fri-A-SS-2-13 — Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-8|PAPER Wed-E-V-5-8 — DEMUCS-Mobile : On-Device Lightweight Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEMUCS-Mobile : On-Device Lightweight Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211868.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-1|PAPER Wed-E-V-5-1 — Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211823.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-1|PAPER Fri-M-V-2-1 — Leveraging Real-Time MRI for Illuminating Linguistic Velum Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Real-Time MRI for Illuminating Linguistic Velum Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-4|PAPER Thu-A-V-4-4 — ECAPA-TDNN Embeddings for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ECAPA-TDNN Embeddings for Speaker Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-12|PAPER Fri-A-V-3-12 — The Energy and Carbon Footprint of Training End-to-End Speech Recognizers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Energy and Carbon Footprint of Training End-to-End Speech Recognizers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-3|PAPER Thu-M-S&T-1-3 — The INGENIOUS Multilingual Operations App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INGENIOUS Multilingual Operations App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-2|PAPER Wed-A-V-1-2 — A Psychology-Driven Computational Analysis of Political Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Psychology-Driven Computational Analysis of Political Interviews</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-12|PAPER Wed-E-V-4-12 — Prosody of Case Markers in Urdu]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosody of Case Markers in Urdu</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-3|PAPER Fri-A-O-2-3 — Semantic Sentence Similarity: Size does not Always Matter]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Sentence Similarity: Size does not Always Matter</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211857.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-3|PAPER Wed-A-O-2-3 — Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-2|PAPER Wed-E-V-4-2 — On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-3|PAPER Fri-M-V-2-3 — Exploration of Welsh English Pre-Aspiration: How Wide-Spread is it?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploration of Welsh English Pre-Aspiration: How Wide-Spread is it?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210827.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-3|PAPER Wed-E-V-2-3 — Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-8|PAPER Wed-M-V-1-8 — Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-3|PAPER Wed-E-V-6-3 — RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210867.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-1|PAPER Thu-A-O-2-1 — End-to-End Optimized Multi-Stage Vector Quantization of Spectral Envelopes for Speech and Audio Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Optimized Multi-Stage Vector Quantization of Spectral Envelopes for Speech and Audio Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-3|PAPER Tue-A-O-2-3 — Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-6|PAPER Wed-A-O-2-6 — Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211255.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-5|PAPER Thu-M-O-1-5 — Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-2|PAPER Thu-M-O-3-2 — Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-8|PAPER Wed-A-V-5-8 — Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211754.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-10|PAPER Tue-M-V-7-10 — Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-5|PAPER Wed-A-S&T-1-5 — ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211888.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-10|PAPER Tue-E-V-5-10 — Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211849.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-8|PAPER Thu-M-V-7-8 — Adapting Long Context NLM for ASR Rescoring in Conversational Agents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Long Context NLM for ASR Rescoring in Conversational Agents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-6|PAPER Tue-M-V-2-6 — Deliberation-Based Multi-Pass Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deliberation-Based Multi-Pass Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-5|PAPER Fri-M-O-1-5 — Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-4|PAPER Fri-A-S&T-1-4 — Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-4|PAPER Wed-M-O-2-4 — Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211172.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-9|PAPER Wed-E-V-4-9 — In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German]]</div>|^<div class="cpauthorindexpersoncardpapertitle">In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-9|PAPER Thu-A-SS-2-9 — Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211131.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-6|PAPER Thu-A-SS-1-6 — Toward Streaming ASR with Non-Autoregressive Insertion-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Streaming ASR with Non-Autoregressive Insertion-Based Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Thu-Keynote|PAPER Thu-Keynote — Adaptive Listening to Everyday Soundscapes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Listening to Everyday Soundscapes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-10|PAPER Fri-M-V-5-10 — Automatic Detection of Shouted Speech Segments in Indian News Debates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Shouted Speech Segments in Indian News Debates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-8|PAPER Tue-A-SS-1-8 — Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-8|PAPER Thu-A-SS-2-8 — Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-6|PAPER Thu-M-V-7-6 — PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-7|PAPER Fri-M-V-3-7 — Deep Neural Network Calibration for E2E Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Neural Network Calibration for E2E Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-1|PAPER Tue-M-V-4-1 — User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211399.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-2|PAPER Fri-A-O-2-2 — End-to-End Open Vocabulary Keyword Search]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Open Vocabulary Keyword Search</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-2|PAPER Thu-M-V-3-2 — Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-4|PAPER Thu-A-V-5-4 — Diff-TTS: A Denoising Diffusion Model for Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diff-TTS: A Denoising Diffusion Model for Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-4|PAPER Tue-A-V-2-4 — Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-4|PAPER Tue-A-V-2-4 — Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210412.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-4|PAPER Fri-A-V-5-4 — Controllable Context-Aware Conversational Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controllable Context-Aware Conversational Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211967.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-11|PAPER Wed-E-V-2-11 — Speech Based Depression Severity Level Classification Using a Multi-Stage Dilated CNN-LSTM Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Based Depression Severity Level Classification Using a Multi-Stage Dilated CNN-LSTM Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211960.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-8|PAPER Fri-A-V-2-8 — Generalized Dilated CNN Models for Depression Detection Using Inverted Vocal Tract Variables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Dilated CNN Models for Depression Detection Using Inverted Vocal Tract Variables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-10|PAPER Tue-M-V-6-10 — Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-2|PAPER Tue-A-O-1-2 — Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-9|PAPER Tue-E-V-1-9 — Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-12|PAPER Thu-A-SS-1-12 — Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-10|PAPER Fri-M-V-7-10 — Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-4|PAPER Wed-A-SS-1-4 — Team02 Text-Independent Speaker Verification System for SdSV Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team02 Text-Independent Speaker Verification System for SdSV Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-4|PAPER Thu-A-V-5-4 — Diff-TTS: A Denoising Diffusion Model for Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diff-TTS: A Denoising Diffusion Model for Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-12|PAPER Thu-A-V-6-12 — kosp2e: Korean Speech to English Translation Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">kosp2e: Korean Speech to English Translation Corpus</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-5|PAPER Fri-A-V-5-5 — Expressive Text-to-Speech Using Style Tag]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Text-to-Speech Using Style Tag</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-5|PAPER Tue-A-V-2-5 — An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-8|PAPER Fri-A-V-1-8 — Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-6|PAPER Fri-A-S&T-1-6 — WittyKiddy: Multilingual Spoken Language Learning for Kids]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WittyKiddy: Multilingual Spoken Language Learning for Kids</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-12|PAPER Thu-A-SS-1-12 — Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-5|PAPER Thu-A-V-4-5 — Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-6|PAPER Tue-M-V-6-6 — Enrollment-Less Training for Personalized Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enrollment-Less Training for Personalized Voice Activity Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-5|PAPER Tue-A-V-5-5 — Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211981.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-11|PAPER Fri-A-V-1-11 — End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210914.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-4|PAPER Tue-M-V-2-4 — Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-10|PAPER Tue-E-V-2-10 — Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-5|PAPER Wed-M-V-6-5 — Streaming Multi-Talker Speech Recognition with Joint Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Multi-Talker Speech Recognition with Joint Speaker Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-3|PAPER Thu-A-V-2-3 — On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-6|PAPER Tue-A-SS-2-6 — Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-4|PAPER Wed-E-O-3-4 — Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-13|PAPER Tue-E-V-1-13 — Speaker Anonymisation Using the McAdams Coefficient]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Anonymisation Using the McAdams Coefficient</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-8|PAPER Thu-A-V-4-8 — Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210333.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-1|PAPER Thu-M-SS-2-1 — Towards an Accent-Robust Approach for ATC Communications Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards an Accent-Robust Approach for ATC Communications Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211417.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-5|PAPER Tue-E-O-2-5 — Glottal Sounds in Korebaju]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glottal Sounds in Korebaju</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-4|PAPER Thu-A-V-4-4 — ECAPA-TDNN Embeddings for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ECAPA-TDNN Embeddings for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-3|PAPER Tue-A-V-2-3 — A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210744.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-2|PAPER Wed-A-V-6-2 — Subtitle Translation as Markup Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subtitle Translation as Markup Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-7|PAPER Tue-E-V-6-7 — Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-10|PAPER Wed-A-V-4-10 — Feature Fusion by Attention Networks for Robust DOA Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Feature Fusion by Attention Networks for Robust DOA Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-10|PAPER Wed-E-V-2-10 — Analysis of Contextual Voice Changes in Remote Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Contextual Voice Changes in Remote Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-2|PAPER Wed-E-V-1-2 — Efficient Weight Factorization for Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Weight Factorization for Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-5|PAPER Fri-M-O-2-5 — Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-5|PAPER Fri-M-O-2-5 — Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210800.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-5|PAPER Wed-E-V-6-5 — Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-13|PAPER Tue-E-V-1-13 — Speaker Anonymisation Using the McAdams Coefficient]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Anonymisation Using the McAdams Coefficient</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210993.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-2|PAPER Wed-E-O-1-2 — Graph Attention Networks for Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph Attention Networks for Anti-Spoofing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-2|PAPER Fri-M-V-7-2 — An Initial Investigation for Detecting Partially Spoofed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Initial Investigation for Detecting Partially Spoofed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-13|PAPER Fri-M-V-7-13 — Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-5|PAPER Fri-A-V-6-5 — Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-14|PAPER Fri-M-V-2-14 — The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-1|PAPER Thu-M-O-1-1 — Self-Paced Ensemble Learning for Speech and Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Paced Ensemble Learning for Speech and Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-1|PAPER Tue-A-O-2-1 — Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-1|PAPER Tue-M-V-7-1 — “See what I mean, huh?” Evaluating Visual Inspection of F₀ Tracking in Nasal Grunts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“See what I mean, huh?” Evaluating Visual Inspection of F₀ Tracking in Nasal Grunts</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210294.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-3|PAPER Wed-E-V-4-3 — An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-1|PAPER Thu-M-O-2-1 — Speaker Attentive Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Attentive Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-3|PAPER Fri-M-V-1-3 — Testing Acoustic Voice Quality Classification Across Languages and Speech Styles]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Testing Acoustic Voice Quality Classification Across Languages and Speech Styles</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211569.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-3|PAPER Tue-E-V-4-3 — Sequential End-to-End Intent and Slot Label Classification and Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequential End-to-End Intent and Slot Label Classification and Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-1|PAPER Wed-A-SS-1-1 — The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-5|PAPER Tue-E-V-1-5 — Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-5|PAPER Tue-A-V-4-5 — Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211693.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-13|PAPER Wed-M-V-6-13 — Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-12|PAPER Wed-A-V-3-12 — Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210462.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-1|PAPER Wed-M-O-1-1 — Golos: Russian Dataset for Speech Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Golos: Russian Dataset for Speech Research</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211888.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-10|PAPER Tue-E-V-5-10 — Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-4|PAPER Wed-A-S&T-1-4 — Expressive Robot Performance Based on Facial Motion Capture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Robot Performance Based on Facial Motion Capture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210319.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-6|PAPER Tue-E-V-6-6 — StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-10|PAPER Thu-M-V-3-10 — Implicit Filter-and-Sum Network for End-to-End Multi-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implicit Filter-and-Sum Network for End-to-End Multi-Channel Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-2|PAPER Thu-A-V-3-2 — Empirical Analysis of Generalized Iterative Speech Separation Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Analysis of Generalized Iterative Speech Separation Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211372.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-6|PAPER Thu-A-V-3-6 — Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-4|PAPER Tue-E-V-3-4 — Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211613.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-8|PAPER Tue-A-V-3-8 — Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-3|PAPER Wed-A-V-2-3 — Incorporating External POS Tagger for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating External POS Tagger for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-12|PAPER Thu-A-SS-2-12 — Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210072.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-3|PAPER Tue-M-V-6-3 — Noisy Student-Teacher Training for Robust Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noisy Student-Teacher Training for Robust Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210083.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-1|PAPER Fri-M-V-5-1 — An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-3|PAPER Tue-A-V-5-3 — Disfluency Detection with Unlabeled Data and Small BERT Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disfluency Detection with Unlabeled Data and Small BERT Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-6|PAPER Fri-A-V-1-6 — Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-9|PAPER Fri-A-V-3-9 — PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-6|PAPER Tue-M-V-7-6 — Voicing Contrasts in the Singleton Stops of Palestinian Arabic: Production and Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voicing Contrasts in the Singleton Stops of Palestinian Arabic: Production and Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-5|PAPER Wed-E-V-2-5 — Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-2|PAPER Tue-M-V-4-2 — Self-Supervised Dialogue Learning for Spoken Conversational Question Answering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Dialogue Learning for Spoken Conversational Question Answering</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-1|PAPER Thu-M-V-7-1 — Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-6|PAPER Fri-M-V-6-6 — Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-2|PAPER Wed-M-V-2-2 — Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-10|PAPER Tue-E-V-1-10 — Scaling Effect of Self-Supervised Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Effect of Self-Supervised Speech Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211644.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-10|PAPER Wed-E-V-3-10 — Scaling Laws for Acoustic Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Laws for Acoustic Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-2|PAPER Fri-A-V-4-2 — Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-7|PAPER Fri-M-V-2-7 — Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-6|PAPER Thu-M-V-2-6 — Evaluation of Audio-Visual Alignments in Visually Grounded Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluation of Audio-Visual Alignments in Visually Grounded Speech Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-3|PAPER Thu-A-V-1-3 — Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-3|PAPER Tue-M-O-1-3 — Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-10|PAPER Wed-M-V-6-10 — Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-10|PAPER Thu-M-V-2-10 — End-to-End Audio-Visual Speech Recognition for Overlapping Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Audio-Visual Speech Recognition for Overlapping Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-12|PAPER Fri-A-V-1-12 — “You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210324.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-4|PAPER Wed-M-V-5-4 — SPEECHADJUSTER: A Tool for Investigating Listener Preferences and Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPEECHADJUSTER: A Tool for Investigating Listener Preferences and Speech Intelligibility</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-5|PAPER Fri-A-O-1-5 — EML Online Speech Activity Detection for the Fearless Steps Challenge Phase-III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EML Online Speech Activity Detection for the Fearless Steps Challenge Phase-III</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-2|PAPER Wed-E-O-2-2 — Lost in Interpreting: Speech Translation from Source or Interpreter?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lost in Interpreting: Speech Translation from Source or Interpreter?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-3|PAPER Tue-A-V-4-3 — On the Learning Dynamics of Semi-Supervised Training for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Learning Dynamics of Semi-Supervised Training for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-1|PAPER Thu-M-O-3-1 — The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-2|PAPER Thu-M-SS-2-2 — Detecting English Speech in the Air Traffic Control Voice Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting English Speech in the Air Traffic Control Voice Communication</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-9|PAPER Thu-M-V-4-9 — A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-4|PAPER Tue-M-V-6-4 — Multi-Channel VAD for Transcription of Group Discussion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel VAD for Transcription of Group Discussion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-5|PAPER Wed-E-V-5-5 — Single-Channel Speech Enhancement Using Learnable Loss Mixup]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Single-Channel Speech Enhancement Using Learnable Loss Mixup</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-10|PAPER Thu-M-V-2-10 — End-to-End Audio-Visual Speech Recognition for Overlapping Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Audio-Visual Speech Recognition for Overlapping Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-9|PAPER Thu-A-V-6-9 — Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-10|PAPER Fri-A-V-3-10 — Collaborative Training of Acoustic Encoders for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Collaborative Training of Acoustic Encoders for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-11|PAPER Thu-A-V-1-11 — Leveraging Pre-Trained Language Model for Speech Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Pre-Trained Language Model for Speech Sentiment Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-1|PAPER Fri-A-O-1-1 — Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-4|PAPER Wed-E-O-3-4 — Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-5|PAPER Tue-A-V-3-5 — Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210703.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-7|PAPER Thu-A-V-1-7 — Emotion Recognition from Speech Using wav2vec 2.0 Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotion Recognition from Speech Using wav2vec 2.0 Embeddings</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-4|PAPER Thu-A-SS-2-4 — Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210072.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-3|PAPER Tue-M-V-6-3 — Noisy Student-Teacher Training for Robust Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noisy Student-Teacher Training for Robust Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-4|PAPER Wed-E-O-3-4 — Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-6|PAPER Fri-M-V-5-6 — Knowledge Distillation for Singing Voice Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation for Singing Voice Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-11|PAPER Thu-A-V-6-11 — Towards Automatic Speech to Sign Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Automatic Speech to Sign Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Wed-Keynote|PAPER Wed-Keynote — Ethical and Technological Challenges of Conversational AI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ethical and Technological Challenges of Conversational AI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-3|PAPER Thu-A-O-2-3 — N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-7|PAPER Tue-E-V-4-7 — Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-3|PAPER Fri-A-V-6-3 — The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-9|PAPER Wed-A-V-5-9 — High-Fidelity and Low-Latency Universal Neural Vocoder Based on Multiband WaveRNN with Data-Driven Linear Prediction for Discrete Waveform Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">High-Fidelity and Low-Latency Universal Neural Vocoder Based on Multiband WaveRNN with Data-Driven Linear Prediction for Discrete Waveform Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-7|PAPER Wed-A-V-2-7 — Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-6|PAPER Wed-A-O-2-6 — Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210645.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-2|PAPER Tue-M-O-3-2 — Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-1|PAPER Wed-M-O-2-1 — Prosodic Accommodation in Face-to-Face and Telephone Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Accommodation in Face-to-Face and Telephone Dialogues</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-2|PAPER Wed-E-V-4-2 — On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-1|PAPER Fri-A-O-2-1 — Device Playback Augmentation with Echo Cancellation for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Device Playback Augmentation with Echo Cancellation for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-11|PAPER Wed-M-V-6-11 — Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-5|PAPER Fri-M-V-2-5 — How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-2|PAPER Wed-A-V-3-2 — Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-8|PAPER Wed-E-V-3-8 — Multitask Training with Text Data for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Training with Text Data for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-6|PAPER Tue-M-V-4-6 — Semantic Transportation Prototypical Network for Few-Shot Intent Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Transportation Prototypical Network for Few-Shot Intent Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-11|PAPER Tue-E-V-5-11 — Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211955.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-1|PAPER Thu-A-SS-1-1 — An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-6|PAPER Wed-A-SS-1-6 — Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-6|PAPER Wed-A-SS-1-6 — Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-2|PAPER Thu-A-SS-1-2 — Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210964.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-9|PAPER Tue-M-V-5-9 — Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-5|PAPER Tue-A-V-6-5 — TVQVC: Transformer Based Vector Quantized Variational Autoencoder with CTC Loss for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TVQVC: Transformer Based Vector Quantized Variational Autoencoder with CTC Loss for Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-7|PAPER Wed-M-V-3-7 — Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-10|PAPER Wed-E-V-5-10 — Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-9|PAPER Fri-M-V-4-9 — LinearSpeech: Parallel Text-to-Speech with Linear Complexity]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LinearSpeech: Parallel Text-to-Speech with Linear Complexity</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-5|PAPER Fri-M-V-7-5 — The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-7|PAPER Fri-A-V-4-7 — Adaptive Margin Circle Loss for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Margin Circle Loss for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210045.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-8|PAPER Thu-A-V-6-8 — Human-in-the-Loop Efficiency Analysis for Binary Classification in Edyson]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human-in-the-Loop Efficiency Analysis for Binary Classification in Edyson</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-5|PAPER Fri-M-V-6-5 — Few-Shot Keyword Spotting in Any Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Keyword Spotting in Any Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-3|PAPER Tue-M-V-5-3 — Speech Acoustic Modelling Using Raw Source and Filter Components]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Acoustic Modelling Using Raw Source and Filter Components</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210622.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-1|PAPER Tue-A-O-1-1 — Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-3|PAPER Tue-A-V-4-3 — On the Learning Dynamics of Semi-Supervised Training for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Learning Dynamics of Semi-Supervised Training for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-4|PAPER Wed-E-O-2-4 — It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-3|PAPER Wed-E-V-3-3 — Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-1|PAPER Thu-M-O-3-1 — The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-2|PAPER Thu-M-S&T-1-2 — Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-2|PAPER Thu-A-O-1-2 — Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-3|PAPER Wed-M-O-3-3 — Using X-Vectors for Speech Activity Detection in Broadcast Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using X-Vectors for Speech Activity Detection in Broadcast Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211769.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-4|PAPER Tue-A-O-1-4 — Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-4|PAPER Tue-A-S&T-1-4 — ROXANNE Research Platform: Automate Criminal Investigations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ROXANNE Research Platform: Automate Criminal Investigations</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-5|PAPER Wed-A-O-3-5 — Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-2|PAPER Fri-M-SS-1-2 — Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-3|PAPER Fri-A-O-1-3 — Speech Activity Detection Based on Multilingual Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Activity Detection Based on Multilingual Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-4|PAPER Tue-E-O-2-4 — Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211539.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-3|PAPER Thu-A-O-1-3 — Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211565.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-4|PAPER Tue-M-O-1-4 — A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-15|PAPER Wed-E-V-3-15 — Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-8|PAPER Fri-M-V-3-8 — Residual Energy-Based Models for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Energy-Based Models for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-8|PAPER Fri-M-V-1-8 — Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-7|PAPER Tue-M-V-7-7 — A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-8|PAPER Wed-M-V-5-8 — Human Spoofing Detection Performance on Degraded Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Spoofing Detection Performance on Degraded Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-4|PAPER Thu-M-O-3-4 — Modeling Dialectal Variation for Swiss German Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dialectal Variation for Swiss German Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-1|PAPER Wed-E-V-2-1 — Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-1|PAPER Thu-A-O-1-1 — Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211404.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-7|PAPER Fri-M-V-7-7 — Attention-Based Convolutional Neural Network for ASV Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Convolutional Neural Network for ASV Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-4|PAPER Fri-M-V-1-4 — Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-9|PAPER Thu-M-V-2-9 — LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-5|PAPER Wed-E-V-1-5 — Using Large Self-Supervised Models for Low-Resource Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Large Self-Supervised Models for Low-Resource Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-1|PAPER Fri-A-O-2-1 — Device Playback Augmentation with Echo Cancellation for Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Device Playback Augmentation with Echo Cancellation for Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-10|PAPER Tue-M-V-6-10 — Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-2|PAPER Tue-A-O-1-2 — Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-9|PAPER Tue-E-V-1-9 — Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-2|PAPER Wed-M-V-2-2 — Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-12|PAPER Thu-A-SS-1-12 — Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-10|PAPER Fri-M-V-7-10 — Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-12|PAPER Wed-A-V-6-12 — Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-12|PAPER Tue-M-SS-1-12 — Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-9|PAPER Thu-A-V-1-9 — Applying TDNN Architectures for Analyzing Duration Dependencies on Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applying TDNN Architectures for Analyzing Duration Dependencies on Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-1|PAPER Thu-A-V-4-1 — LEAP Submission for the Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LEAP Submission for the Third DIHARD Diarization Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-4|PAPER Tue-A-SS-2-4 — Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-3|PAPER Tue-A-V-2-3 — A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-1|PAPER Tue-E-V-3-1 — Estimating Articulatory Movements in Speech Production with Transformer Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimating Articulatory Movements in Speech Production with Transformer Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-5|PAPER Tue-E-V-3-5 — Noise Robust Pitch Stylization Using Minimum Mean Absolute Error Criterion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noise Robust Pitch Stylization Using Minimum Mean Absolute Error Criterion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-8|PAPER Fri-A-S&T-1-8 — Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-14|PAPER Wed-M-V-6-14 — Multi-Mode Transformer Transducer with Stochastic Future Context]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Mode Transformer Transducer with Stochastic Future Context</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-11|PAPER Wed-M-V-4-11 — Voice Privacy Through x-Vector and CycleGAN-Based Anonymization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Privacy Through x-Vector and CycleGAN-Based Anonymization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-1|PAPER Wed-M-V-2-1 — Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-1|PAPER Wed-A-V-1-1 — Cross-Modal Learning for Audio-Visual Video Parsing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Learning for Audio-Visual Video Parsing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210644.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-5|PAPER Thu-A-V-2-5 — Reduce and Reconstruct: ASR for Low-Resource Phonetic Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reduce and Reconstruct: ASR for Low-Resource Phonetic Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-4|PAPER Tue-M-V-7-4 — The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-10|PAPER Fri-M-V-5-10 — Automatic Detection of Shouted Speech Segments in Indian News Debates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Shouted Speech Segments in Indian News Debates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-11|PAPER Tue-M-V-7-11 — Characterizing Voiced and Voiceless Nasals in Mizo]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Characterizing Voiced and Voiceless Nasals in Mizo</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-8|PAPER Wed-M-V-1-8 — Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-1|PAPER Wed-A-V-4-1 — Difference in Perceived Speech Signal Quality Assessment Among Monolingual and Bilingual Teenage Students]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Difference in Perceived Speech Signal Quality Assessment Among Monolingual and Bilingual Teenage Students</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-4|PAPER Tue-M-O-2-4 — A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-7|PAPER Wed-A-V-2-7 — Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211718.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-10|PAPER Wed-M-V-5-10 — Towards the Explainability of Multimodal Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Explainability of Multimodal Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-10|PAPER Wed-A-V-2-10 — Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-2|PAPER Tue-M-V-3-2 — Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-4|PAPER Tue-A-V-5-4 — Discriminative Self-Training for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Self-Training for Punctuation Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-6|PAPER Tue-E-V-4-6 — Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210322.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-4|PAPER Thu-A-V-2-4 — Reducing Streaming ASR Model Delay with Self Alignment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Streaming ASR Model Delay with Self Alignment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-5|PAPER Tue-A-V-2-5 — An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210432.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-4|PAPER Thu-M-V-2-4 — Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-9|PAPER Tue-E-V-2-9 — Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-8|PAPER Tue-A-V-4-8 — slimIPL: Language-Model-Free Iterative Pseudo-Labeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">slimIPL: Language-Model-Free Iterative Pseudo-Labeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-4|PAPER Fri-M-V-1-4 — Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-10|PAPER Thu-A-V-3-10 — Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210785.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-3|PAPER Fri-A-V-2-3 — Speech Emotion Recognition via Multi-Level Cross-Modal Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition via Multi-Level Cross-Modal Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-8|PAPER Wed-E-SS-1-8 — Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211980.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-12|PAPER Tue-E-V-1-12 — Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-6|PAPER Fri-M-V-4-6 — A Neural-Network-Based Approach to Identifying Speakers in Novels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neural-Network-Based Approach to Identifying Speakers in Novels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-4|PAPER Fri-A-V-2-4 — Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-4|PAPER Tue-A-V-5-4 — Discriminative Self-Training for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Self-Training for Punctuation Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-6|PAPER Tue-E-V-4-6 — Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-8|PAPER Tue-M-V-1-8 — Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-6|PAPER Thu-M-SS-1-6 — Additive Phoneme-Aware Margin Softmax Loss for Language Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Additive Phoneme-Aware Margin Softmax Loss for Language Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-8|PAPER Thu-A-SS-1-8 — Real-Time End-to-End Monaural Multi-Speaker Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time End-to-End Monaural Multi-Speaker Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-9|PAPER Fri-A-V-4-9 — Automatic Error Correction for Speaker Embedding Learning with Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Error Correction for Speaker Embedding Learning with Noisy Labels</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-10|PAPER Fri-A-V-4-10 — An Integrated Framework for Two-Pass Personalized Voice Trigger]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Framework for Two-Pass Personalized Voice Trigger</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-6|PAPER Tue-M-V-2-6 — Deliberation-Based Multi-Pass Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deliberation-Based Multi-Pass Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-2|PAPER Tue-M-V-3-2 — Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-3|PAPER Fri-M-SS-1-3 — An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-8|PAPER Fri-M-V-3-8 — Residual Energy-Based Models for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Energy-Based Models for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-9|PAPER Fri-M-V-3-9 — Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-7|PAPER Tue-M-V-3-7 — Speech Enhancement with Weakly Labelled Data from AudioSet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Weakly Labelled Data from AudioSet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210661.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-8|PAPER Wed-A-V-2-8 — Token-Level Supervised Contrastive Learning for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Token-Level Supervised Contrastive Learning for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210646.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-6|PAPER Tue-E-V-1-6 — SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-4|PAPER Fri-A-V-4-4 — Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-7|PAPER Tue-E-V-4-7 — Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-3|PAPER Fri-A-V-6-3 — The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-6|PAPER Wed-M-SS-1-6 — SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211810.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-12|PAPER Thu-M-V-1-12 — CLAC: A Speech Corpus of Healthy English Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CLAC: A Speech Corpus of Healthy English Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-4|PAPER Fri-A-O-1-4 — Voice Activity Detection with Teacher-Student Domain Emulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection with Teacher-Student Domain Emulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211754.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-10|PAPER Tue-M-V-7-10 — Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-1|PAPER Wed-M-O-2-1 — Prosodic Accommodation in Face-to-Face and Telephone Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Accommodation in Face-to-Face and Telephone Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-1|PAPER Thu-M-O-1-1 — Self-Paced Ensemble Learning for Speech and Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Paced Ensemble Learning for Speech and Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-4|PAPER Thu-M-V-6-4 — Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-3|PAPER Wed-M-O-1-3 — Self-Supervised End-to-End ASR for Low Resource L2 Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised End-to-End ASR for Low Resource L2 Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-3|PAPER Wed-M-O-1-3 — Self-Supervised End-to-End ASR for Low Resource L2 Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised End-to-End ASR for Low Resource L2 Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-5|PAPER Tue-A-SS-1-5 — Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-1|PAPER Thu-A-V-4-1 — LEAP Submission for the Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LEAP Submission for the Third DIHARD Diarization Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210153.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-4|PAPER Tue-A-SS-1-4 — Communication-Efficient Agnostic Federated Averaging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Communication-Efficient Agnostic Federated Averaging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211640.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-10|PAPER Fri-M-V-2-10 — Speakers Coarticulate Less When Facing Real and Imagined Communicative Difficulties: An Analysis of Read and Spontaneous Speech from the LUCID Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speakers Coarticulate Less When Facing Real and Imagined Communicative Difficulties: An Analysis of Read and Spontaneous Speech from the LUCID Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-4|PAPER Wed-A-O-1-4 — On Sampling-Based Training Criteria for Neural Language Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Sampling-Based Training Criteria for Neural Language Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-3|PAPER Wed-A-V-3-3 — Librispeech Transducer Model with Internal Language Model Prior Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Librispeech Transducer Model with Internal Language Model Prior Correction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211255.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-5|PAPER Thu-M-O-1-5 — Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-2|PAPER Thu-M-O-3-2 — Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-3|PAPER Thu-M-O-3-3 — Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-10|PAPER Fri-A-V-1-10 — The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211891.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-4|PAPER Tue-E-O-3-4 — Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-4|PAPER Thu-A-V-3-4 — Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-2|PAPER Thu-A-O-2-2 — Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-8|PAPER Fri-A-V-3-8 — Tied & Reduced RNN-T Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied & Reduced RNN-T Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210096.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-2|PAPER Thu-M-V-2-2 — Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-2|PAPER Tue-A-SS-1-2 — Configurable Privacy-Preserving Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Configurable Privacy-Preserving Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210277.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-2|PAPER Wed-M-V-1-2 — Modeling and Training Strategies for Language Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling and Training Strategies for Language Recognition Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210276.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-2|PAPER Thu-M-SS-1-2 — Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-5|PAPER Thu-M-V-6-5 — Inhalations in Speech: Acoustic and Physiological Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inhalations in Speech: Acoustic and Physiological Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-7|PAPER Thu-M-V-6-7 — Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-11|PAPER Fri-A-V-5-11 — Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-3|PAPER Tue-M-O-3-3 — Identification of F1 and F2 in Speech Using Modified Zero Frequency Filtering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identification of F1 and F2 in Speech Using Modified Zero Frequency Filtering</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-1|PAPER Wed-M-V-5-1 — Perception of Standard Arabic Synthetic Speech Rate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Standard Arabic Synthetic Speech Rate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-3|PAPER Thu-A-O-2-3 — N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-10|PAPER Thu-A-V-3-10 — Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-2|PAPER Wed-A-O-3-2 — Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-3|PAPER Thu-A-V-3-3 — Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-2|PAPER Thu-A-O-1-2 — Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211560.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-13|PAPER Thu-A-V-3-13 — Deep Audio-Visual Speech Separation Based on Facial Motion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Audio-Visual Speech Separation Based on Facial Motion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-5|PAPER Fri-A-SS-1-5 — Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-1|PAPER Wed-A-V-6-1 — SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-7|PAPER Fri-A-V-2-7 — Speech Emotion Recognition with Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-3|PAPER Wed-A-V-5-3 — Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Aligned Contrastive Predictive Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-8|PAPER Tue-A-V-6-8 — An Exemplar Selection Algorithm for Native-Nonnative Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exemplar Selection Algorithm for Native-Nonnative Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-6|PAPER Thu-M-V-1-6 — Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211908.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-5|PAPER Wed-A-V-2-5 — Revisiting Parity of Human vs. Machine Conversational Speech Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Parity of Human vs. Machine Conversational Speech Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-1|PAPER Thu-A-V-1-1 — Temporal Context in Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Context in Speech Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-2|PAPER Fri-A-O-1-2 — The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-10|PAPER Thu-M-V-2-10 — End-to-End Audio-Visual Speech Recognition for Overlapping Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Audio-Visual Speech Recognition for Overlapping Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-1|PAPER Wed-A-V-1-1 — Cross-Modal Learning for Audio-Visual Video Parsing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Learning for Audio-Visual Video Parsing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211918.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-5|PAPER Tue-E-SS-1-5 — Accelerometer-Based Measurements of Voice Quality in Children During Semi-Occluded Vocal Tract Exercise with a Narrow Straw in Air]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Accelerometer-Based Measurements of Voice Quality in Children During Semi-Occluded Vocal Tract Exercise with a Narrow Straw in Air</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-9|PAPER Wed-M-V-5-9 — Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-12|PAPER Tue-A-V-1-12 — Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-11|PAPER Fri-M-V-5-11 — Generalized Spoofing Detection Inspired from Audio Generation Artifacts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spoofing Detection Inspired from Audio Generation Artifacts</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-11|PAPER Fri-A-V-4-11 — Masked Proxy Loss for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Masked Proxy Loss for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-5|PAPER Tue-A-V-1-5 — Acoustic Scene Classification Using Kervolution-Based SubSpectralNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using Kervolution-Based SubSpectralNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-4|PAPER Tue-M-V-3-4 — Transfer Learning for Speech Intelligibility Improvement in Noisy Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning for Speech Intelligibility Improvement in Noisy Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-5|PAPER Tue-E-V-2-5 — Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-1|PAPER Tue-E-O-3-1 — Measuring Voice Quality Parameters After Speaker Pseudonymization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Measuring Voice Quality Parameters After Speaker Pseudonymization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-7|PAPER Fri-M-SS-1-7 — One Size Does Not Fit All in Resource-Constrained ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">One Size Does Not Fit All in Resource-Constrained ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-8|PAPER Fri-A-V-3-8 — Tied & Reduced RNN-T Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied & Reduced RNN-T Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-6|PAPER Thu-M-V-5-6 — Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-5|PAPER Thu-M-S&T-1-5 — Live Subtitling for BigBlueButton with Open-Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live Subtitling for BigBlueButton with Open-Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-3|PAPER Tue-E-V-6-3 — One-Shot Voice Conversion with Speaker-Agnostic StarGAN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">One-Shot Voice Conversion with Speaker-Agnostic StarGAN</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-3|PAPER Thu-A-SS-2-3 — WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-5|PAPER Tue-M-SS-1-5 — Visual Transformers for Primates Classification and Covid Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Transformers for Primates Classification and Covid Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-7|PAPER Tue-M-SS-1-7 — A Deep and Recurrent Architecture for Primate Vocalization Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deep and Recurrent Architecture for Primate Vocalization Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-10|PAPER Wed-M-V-4-10 — Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-2|PAPER Thu-M-V-5-2 — Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-6|PAPER Thu-M-V-5-6 — Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-3|PAPER Thu-M-S&T-1-3 — The INGENIOUS Multilingual Operations App]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INGENIOUS Multilingual Operations App</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-3|PAPER Wed-A-V-4-3 — Sound Source Localization with Majorization Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sound Source Localization with Majorization Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-2|PAPER Wed-A-O-3-2 — Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-9|PAPER Thu-M-V-2-9 — LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-1|PAPER Thu-M-V-1-1 — Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-6|PAPER Wed-E-V-4-6 — Targeted and Targetless Neutral Tones in Taiwanese Southern Min]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Targeted and Targetless Neutral Tones in Taiwanese Southern Min</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-1|PAPER Tue-A-V-6-1 — Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210497.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-5|PAPER Tue-A-SS-2-5 — Diagnosis of COVID-19 Using Auditory Acoustic Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diagnosis of COVID-19 Using Auditory Acoustic Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-6|PAPER Wed-M-SS-1-6 — SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-3|PAPER Wed-E-V-6-3 — RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-8|PAPER Wed-E-V-3-8 — Multitask Training with Text Data for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Training with Text Data for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-12|PAPER Fri-A-V-1-12 — “You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">“You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-8|PAPER Tue-A-V-4-8 — slimIPL: Language-Model-Free Iterative Pseudo-Labeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">slimIPL: Language-Model-Free Iterative Pseudo-Labeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-3|PAPER Wed-E-V-1-3 — Unsupervised Cross-Lingual Representation Learning for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Cross-Lingual Representation Learning for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-7|PAPER Thu-M-V-1-7 — Identifying Cognitive Impairment Using Sentence Representation Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Cognitive Impairment Using Sentence Representation Vectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-1|PAPER Fri-M-O-1-1 — Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-8|PAPER Wed-A-V-6-8 — End-to-End Speech Translation via Cross-Modal Progressive Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Translation via Cross-Modal Progressive Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-10|PAPER Wed-A-V-4-10 — Feature Fusion by Attention Networks for Robust DOA Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Feature Fusion by Attention Networks for Robust DOA Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-6|PAPER Fri-M-V-6-6 — Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-6|PAPER Wed-M-V-4-6 — Data Quality as Predictor of Voice Anti-Spoofing Generalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Quality as Predictor of Voice Anti-Spoofing Generalization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-14|PAPER Fri-M-V-2-14 — The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-5|PAPER Wed-A-V-4-5 — Subjective Evaluation of Noise Suppression Algorithms in Crowdsourcing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Subjective Evaluation of Noise Suppression Algorithms in Crowdsourcing</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-2|PAPER Tue-A-V-5-2 — SmallER: Scaling Neural Entity Resolution for Edge Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SmallER: Scaling Neural Entity Resolution for Edge Devices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-1|PAPER Wed-A-SS-1-1 — The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210646.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-6|PAPER Tue-E-V-1-6 — SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-2|PAPER Tue-M-SS-1-2 — Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211974.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-3|PAPER Tue-E-V-5-3 — Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211955.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-1|PAPER Thu-A-SS-1-1 — An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-11|PAPER Thu-A-V-6-11 — Towards Automatic Speech to Sign Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Automatic Speech to Sign Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-2|PAPER Fri-A-V-5-2 — Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-11|PAPER Wed-M-V-5-11 — Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-7|PAPER Tue-M-V-3-7 — Speech Enhancement with Weakly Labelled Data from AudioSet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Weakly Labelled Data from AudioSet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-2|PAPER Wed-M-SS-1-2 — A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210146.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-3|PAPER Wed-M-SS-1-3 — Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210785.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-3|PAPER Fri-A-V-2-3 — Speech Emotion Recognition via Multi-Level Cross-Modal Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition via Multi-Level Cross-Modal Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-10|PAPER Fri-A-V-2-10 — Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-11|PAPER Tue-M-V-3-11 — Self-Supervised Learning Based Phone-Fortified Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Learning Based Phone-Fortified Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-10|PAPER Tue-E-V-1-10 — Scaling Effect of Self-Supervised Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Effect of Self-Supervised Speech Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-2|PAPER Fri-A-V-4-2 — Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-7|PAPER Fri-A-SS-2-7 — Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-7|PAPER Fri-A-V-4-7 — Adaptive Margin Circle Loss for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Margin Circle Loss for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-3|PAPER Tue-M-V-4-3 — Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210095.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-5|PAPER Tue-E-V-4-5 — A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-10|PAPER Wed-M-V-6-10 — Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-1|PAPER Fri-M-V-4-1 — Unsupervised Learning of Disentangled Speech Content and Style Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Learning of Disentangled Speech Content and Style Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-4|PAPER Fri-A-V-3-4 — CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-10|PAPER Tue-A-SS-2-10 — Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-2|PAPER Wed-M-V-6-2 — Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-8|PAPER Tue-A-V-5-8 — Multimodal Speech Summarization Through Semantic Concept Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Speech Summarization Through Semantic Concept Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-6|PAPER Tue-M-V-6-6 — Enrollment-Less Training for Personalized Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enrollment-Less Training for Personalized Voice Activity Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-5|PAPER Tue-A-V-5-5 — Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211981.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-11|PAPER Fri-A-V-1-11 — End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-3|PAPER Thu-M-V-6-3 — Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-9|PAPER Wed-A-V-4-9 — Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-7|PAPER Tue-E-V-2-7 — Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210861.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-7|PAPER Fri-M-V-5-7 — Age Estimation with Speech-Age Model for Heterogeneous Speech Datasets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Age Estimation with Speech-Age Model for Heterogeneous Speech Datasets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210976.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-11|PAPER Wed-A-V-5-11 — High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210252.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-1|PAPER Thu-M-V-5-1 — Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-4|PAPER Tue-M-V-4-4 — Dialogue Situation Recognition for Everyday Conversation Using Multimodal Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialogue Situation Recognition for Everyday Conversation Using Multimodal Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-9|PAPER Wed-E-V-5-9 — Speech Denoising Without Clean Training Data: A Noise2Noise Approach]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising Without Clean Training Data: A Noise2Noise Approach</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-5|PAPER Wed-A-O-3-5 — Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-8|PAPER Wed-M-V-1-8 — Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-10|PAPER Fri-M-V-5-10 — Automatic Detection of Shouted Speech Segments in Indian News Debates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Shouted Speech Segments in Indian News Debates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210910.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-5|PAPER Tue-M-V-7-5 — Acoustic and Prosodic Correlates of Emotions in Urdu Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic and Prosodic Correlates of Emotions in Urdu Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210640.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-6|PAPER Tue-A-SS-1-6 — PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-7|PAPER Tue-E-V-3-7 — Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-11|PAPER Tue-E-V-3-11 — FRILL: A Non-Semantic Speech Embedding for Mobile Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FRILL: A Non-Semantic Speech Embedding for Mobile Devices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210827.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-3|PAPER Wed-E-V-2-3 — Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-6|PAPER Wed-A-S&T-1-6 — Addressing Compliance in Call Centers with Entity Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Addressing Compliance in Call Centers with Entity Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-7|PAPER Wed-E-V-6-7 — Perception of Social Speaker Characteristics in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Social Speaker Characteristics in Synthetic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-10|PAPER Wed-E-V-6-10 — KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-3|PAPER Fri-A-V-5-3 — Emotional Prosody Control for Speech Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Prosody Control for Speech Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-10|PAPER Tue-M-V-2-10 — Speed up Training with Variable Length Inputs by Efficient Batching Strategies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speed up Training with Variable Length Inputs by Efficient Batching Strategies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-7|PAPER Wed-M-V-2-7 — Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-6|PAPER Wed-A-V-6-6 — Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-7|PAPER Wed-A-V-6-7 — Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-9|PAPER Wed-A-V-6-9 — ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210946.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-8|PAPER Fri-M-V-4-8 — Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-4|PAPER Thu-M-O-1-4 — Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-4|PAPER Thu-A-O-1-4 — Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210114.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-3|PAPER Fri-A-V-4-3 — A Generative Model for Duration-Dependent Score Calibration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Generative Model for Duration-Dependent Score Calibration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-13|PAPER Wed-E-V-4-13 — Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-1|PAPER Wed-M-V-2-1 — Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-2|PAPER Wed-M-O-1-2 — Radically Old Way of Computing Spectra: Applications in End-to-End ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Radically Old Way of Computing Spectra: Applications in End-to-End ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211816.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-8|PAPER Fri-A-V-6-8 — Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-9|PAPER Fri-A-V-6-9 — End-to-End Spoken Language Understanding for Generalized Voice Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spoken Language Understanding for Generalized Voice Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-5|PAPER Wed-A-O-1-5 — Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-4|PAPER Fri-A-O-1-4 — Voice Activity Detection with Teacher-Student Domain Emulation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection with Teacher-Student Domain Emulation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-5|PAPER Wed-E-O-3-5 — Audio Retrieval with Natural Language Queries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Retrieval with Natural Language Queries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-1|PAPER Tue-A-V-6-1 — Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-9|PAPER Tue-A-V-2-9 — RaSSpeR: Radar-Based Silent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RaSSpeR: Radar-Based Silent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-6|PAPER Thu-A-V-2-6 — Knowledge Distillation Based Training of Universal ASR Source Models for Cross-Lingual Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation Based Training of Universal ASR Source Models for Cross-Lingual Transfer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-4|PAPER Fri-M-O-1-4 — Learning to Rank Microphones for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning to Rank Microphones for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210114.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-3|PAPER Fri-A-V-4-3 — A Generative Model for Duration-Dependent Score Calibration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Generative Model for Duration-Dependent Score Calibration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210845.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-5|PAPER Wed-A-V-5-5 — Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-10|PAPER Thu-A-V-5-10 — Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-7|PAPER Wed-M-V-4-7 — Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-1|PAPER Wed-E-O-1-1 — Reformulating DOVER-Lap Label Mapping as a Graph Partitioning Problem]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reformulating DOVER-Lap Label Mapping as a Graph Partitioning Problem</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211924.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-9|PAPER Thu-A-V-3-9 — Speaker Verification-Based Evaluation of Single-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification-Based Evaluation of Single-Channel Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210660.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-8|PAPER Thu-A-V-5-8 — A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-2|PAPER Thu-A-O-2-2 — Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-2|PAPER Thu-M-SS-2-2 — Detecting English Speech in the Air Traffic Control Voice Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting English Speech in the Air Traffic Control Voice Communication</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211908.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-5|PAPER Wed-A-V-2-5 — Revisiting Parity of Human vs. Machine Conversational Speech Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Revisiting Parity of Human vs. Machine Conversational Speech Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210099.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-5|PAPER Fri-A-SS-2-5 — Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211891.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-4|PAPER Tue-E-O-3-4 — Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-4|PAPER Thu-A-O-1-4 — Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-3|PAPER Fri-A-V-5-3 — Emotional Prosody Control for Speech Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Prosody Control for Speech Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-4|PAPER Wed-E-O-2-4 — It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-3|PAPER Thu-A-V-1-3 — Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210910.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-5|PAPER Tue-M-V-7-5 — Acoustic and Prosodic Correlates of Emotions in Urdu Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic and Prosodic Correlates of Emotions in Urdu Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210645.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-2|PAPER Tue-M-O-3-2 — Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210946.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-8|PAPER Fri-M-V-4-8 — Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212264.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-11|PAPER Wed-M-V-2-11 — Zero-Shot Federated Learning with New Classes for Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Federated Learning with New Classes for Audio Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-1|PAPER Tue-E-V-3-1 — Estimating Articulatory Movements in Speech Production with Transformer Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Estimating Articulatory Movements in Speech Production with Transformer Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-8|PAPER Fri-A-S&T-1-8 — Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-7|PAPER Wed-M-V-2-7 — Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-6|PAPER Wed-A-V-6-6 — Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-7|PAPER Wed-A-V-6-7 — Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-9|PAPER Wed-A-V-6-9 — ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210946.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-8|PAPER Fri-M-V-4-8 — Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211132.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-14|PAPER Fri-A-V-1-14 — Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-1|PAPER Thu-A-SS-2-1 — Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211761.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-1|PAPER Fri-A-V-2-1 — Affect Recognition Through Scalogram and Multi-Resolution Cochleagram Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Affect Recognition Through Scalogram and Multi-Resolution Cochleagram Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-11|PAPER Tue-M-V-3-11 — Self-Supervised Learning Based Phone-Fortified Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Learning Based Phone-Fortified Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-3|PAPER Wed-M-O-2-3 — An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-9|PAPER Tue-E-V-1-9 — Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210681.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-5|PAPER Fri-A-V-4-5 — Multi-Channel Speaker Verification for Single and Multi-Talker Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Speaker Verification for Single and Multi-Talker Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-10|PAPER Tue-M-V-6-10 — Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211531.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-8|PAPER Thu-A-V-3-8 — Vocal Harmony Separation Using Time-Domain Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocal Harmony Separation Using Time-Domain Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-7|PAPER Wed-E-V-2-7 — Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-3|PAPER Tue-A-SS-1-3 — Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211864.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-1|PAPER Tue-A-V-5-1 — Speaker-Conversation Factorial Designs for Diarization Error Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Conversation Factorial Designs for Diarization Error Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210299.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-4|PAPER Wed-A-V-4-4 — NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-1|PAPER Wed-E-O-3-1 — Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-7|PAPER Wed-E-V-6-7 — Perception of Social Speaker Characteristics in Synthetic Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Social Speaker Characteristics in Synthetic Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211114.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-1|PAPER Wed-M-V-6-1 — Super-Human Performance in Online Low-Latency Recognition of Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Super-Human Performance in Online Low-Latency Recognition of Conversational Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-2|PAPER Wed-E-V-1-2 — Efficient Weight Factorization for Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Weight Factorization for Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-3|PAPER Tue-E-V-6-3 — One-Shot Voice Conversion with Speaker-Agnostic StarGAN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">One-Shot Voice Conversion with Speaker-Agnostic StarGAN</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-12|PAPER Thu-A-V-6-12 — kosp2e: Korean Speech to English Translation Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">kosp2e: Korean Speech to English Translation Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-2-2|PAPER Thu-M-O-2-2 — Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-1|PAPER Tue-M-V-1-1 — Adaptive Convolutional Neural Network for Text-Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Convolutional Neural Network for Text-Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210845.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-5|PAPER Wed-A-V-5-5 — Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-10|PAPER Thu-A-V-5-10 — Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-9|PAPER Tue-A-V-5-9 — Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210165.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-4|PAPER Wed-A-V-3-4 — A Deliberation-Based Joint Acoustic and Text Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deliberation-Based Joint Acoustic and Text Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211816.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-8|PAPER Fri-A-V-6-8 — Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-3|PAPER Wed-A-SS-1-3 — SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-3|PAPER Wed-A-SS-1-3 — SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212151.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-13|PAPER Wed-M-V-4-13 — Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212151.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-13|PAPER Wed-M-V-4-13 — Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-1|PAPER Wed-M-V-4-1 — NU-Wave: A Diffusion Probabilistic Model for Neural Audio Upsampling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NU-Wave: A Diffusion Probabilistic Model for Neural Audio Upsampling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-1-4|PAPER Tue-E-O-1-4 — Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-2|PAPER Thu-A-V-5-2 — LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-3|PAPER Fri-A-O-1-3 — Speech Activity Detection Based on Multilingual Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Activity Detection Based on Multilingual Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-1|PAPER Fri-M-O-3-1 — Many-Speakers Single Channel Speech Separation with Optimal Permutation Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Many-Speakers Single Channel Speech Separation with Optimal Permutation Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-5|PAPER Tue-E-V-5-5 — Speaker Normalization Using Joint Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Normalization Using Joint Variational Autoencoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-7|PAPER Wed-E-V-5-7 — Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-6|PAPER Wed-A-V-1-6 — Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-11|PAPER Wed-E-V-1-11 — Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-11|PAPER Thu-A-V-2-11 — Arabic Code-Switching Speech Recognition Using Monolingual Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Arabic Code-Switching Speech Recognition Using Monolingual Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210414.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-2|PAPER Wed-A-V-5-2 — Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210412.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-4|PAPER Fri-A-V-5-4 — Controllable Context-Aware Conversational Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Controllable Context-Aware Conversational Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-7|PAPER Thu-M-V-7-7 — Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-6|PAPER Wed-A-V-2-6 — Lookup-Table Recurrent Language Models for Long Tail Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lookup-Table Recurrent Language Models for Long Tail Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-4|PAPER Tue-A-S&T-1-4 — ROXANNE Research Platform: Automate Criminal Investigations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ROXANNE Research Platform: Automate Criminal Investigations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-8|PAPER Wed-E-V-4-8 — Taiwan Min Nan (Taiwanese) Checked Tones Sound Change]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Taiwan Min Nan (Taiwanese) Checked Tones Sound Change</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-4|PAPER Wed-E-V-6-4 — AISHELL-3: A Multi-Speaker Mandarin TTS Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-3: A Multi-Speaker Mandarin TTS Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-12|PAPER Wed-E-V-5-12 — Learning Speech Structure to Improve Time-Frequency Masks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Speech Structure to Improve Time-Frequency Masks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-4|PAPER Fri-M-V-4-4 — Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-1|PAPER Tue-M-V-2-1 — TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-8|PAPER Tue-A-SS-2-8 — COVID-19 Detection from Spectral Features on the DiCOVA Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">COVID-19 Detection from Spectral Features on the DiCOVA Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-6|PAPER Tue-E-V-2-6 — Scene-Agnostic Multi-Microphone Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scene-Agnostic Multi-Microphone Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210662.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-1|PAPER Thu-A-V-3-1 — Online Blind Audio Source Separation Using Recursive Expectation-Maximization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Blind Audio Source Separation Using Recursive Expectation-Maximization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-9|PAPER Fri-M-V-2-9 — Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-5|PAPER Tue-A-V-1-5 — Acoustic Scene Classification Using Kervolution-Based SubSpectralNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Scene Classification Using Kervolution-Based SubSpectralNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-5|PAPER Tue-E-V-5-5 — Speaker Normalization Using Joint Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Normalization Using Joint Variational Autoencoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-7|PAPER Wed-E-V-5-7 — Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-7|PAPER Thu-A-V-3-7 — AvaTr: One-Shot Speaker Extraction with Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AvaTr: One-Shot Speaker Extraction with Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210964.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-9|PAPER Tue-M-V-5-9 — Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-5|PAPER Thu-M-V-3-5 — End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-4|PAPER Thu-M-SS-1-4 — An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-2|PAPER Wed-A-V-2-2 — A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-4|PAPER Thu-M-V-3-4 — Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-7|PAPER Wed-A-SS-1-7 — The Sogou System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Sogou System for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-9|PAPER Fri-M-V-2-9 — Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-7|PAPER Wed-A-V-4-7 — MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-11|PAPER Thu-M-V-3-11 — Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210681.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-5|PAPER Fri-A-V-4-5 — Multi-Channel Speaker Verification for Single and Multi-Talker Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel Speaker Verification for Single and Multi-Talker Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-2|PAPER Wed-M-SS-1-2 — A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210979.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-8|PAPER Fri-A-V-5-8 — Cross-Speaker Style Transfer with Prosody Bottleneck in Neural Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Speaker Style Transfer with Prosody Bottleneck in Neural Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-5|PAPER Tue-M-V-6-5 — Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211679.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-7|PAPER Tue-E-V-5-7 — On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-11|PAPER Wed-A-V-3-11 — A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-1|PAPER Fri-M-V-4-1 — Unsupervised Learning of Disentangled Speech Content and Style Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Learning of Disentangled Speech Content and Style Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-2|PAPER Wed-M-V-4-2 — QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-6|PAPER Fri-A-S&T-1-6 — WittyKiddy: Multilingual Spoken Language Learning for Kids]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WittyKiddy: Multilingual Spoken Language Learning for Kids</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-8|PAPER Wed-M-V-1-8 — Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-10|PAPER Fri-M-V-5-10 — Automatic Detection of Shouted Speech Segments in Indian News Debates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection of Shouted Speech Segments in Indian News Debates</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-5|PAPER Wed-A-SS-1-5 — Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-5|PAPER Fri-A-V-3-5 — Extremely Low Footprint End-to-End ASR System for Smart Device]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extremely Low Footprint End-to-End ASR System for Smart Device</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210851.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-11|PAPER Thu-A-V-5-11 — Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-5|PAPER Wed-E-SS-1-5 — DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211359.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-3|PAPER Fri-A-SS-1-3 — F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210958.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-8|PAPER Tue-M-V-5-8 — Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-13|PAPER Tue-A-V-1-13 — Acoustic Event Detection with Classifier Chains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Event Detection with Classifier Chains</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-1|PAPER Wed-M-O-3-1 — Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-8|PAPER Wed-M-V-2-8 — Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-14|PAPER Wed-M-V-6-14 — Multi-Mode Transformer Transducer with Stochastic Future Context]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Mode Transformer Transducer with Stochastic Future Context</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-12|PAPER Wed-E-V-1-12 — Differentiable Allophone Graphs for Language-Universal Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Differentiable Allophone Graphs for Language-Universal Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-4|PAPER Thu-M-V-4-4 — Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-11|PAPER Thu-A-V-1-11 — Leveraging Pre-Trained Language Model for Speech Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Pre-Trained Language Model for Speech Sentiment Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211924.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-9|PAPER Thu-A-V-3-9 — Speaker Verification-Based Evaluation of Single-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Verification-Based Evaluation of Single-Channel Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-3|PAPER Thu-A-V-4-3 — Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-2|PAPER Thu-A-SS-1-2 — Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211131.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-6|PAPER Thu-A-SS-1-6 — Toward Streaming ASR with Non-Autoregressive Insertion-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Streaming ASR with Non-Autoregressive Insertion-Based Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-7|PAPER Thu-A-SS-1-7 — Layer Pruning on Demand with Intermediate CTC]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Layer Pruning on Demand with Intermediate CTC</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-9|PAPER Thu-A-SS-1-9 — Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-6|PAPER Wed-M-V-3-6 — Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-6|PAPER Fri-A-V-1-6 — Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-4|PAPER Thu-M-V-7-4 — Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-2|PAPER Wed-A-V-3-2 — Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-1|PAPER Fri-A-V-1-1 — Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-9|PAPER Tue-A-V-6-9 — Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-8|PAPER Tue-E-V-5-8 — Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-5|PAPER Wed-M-V-1-5 — Exploring wav2vec 2.0 on Speaker Verification and Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring wav2vec 2.0 on Speaker Verification and Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-3|PAPER Wed-A-V-1-3 — Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-8|PAPER Tue-E-V-6-8 — StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-5|PAPER Thu-A-V-3-5 — Few-Shot Learning of New Sound Classes for Target Sound Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Learning of New Sound Classes for Target Sound Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-4|PAPER Thu-M-V-4-4 — Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-6|PAPER Tue-M-V-6-6 — Enrollment-Less Training for Personalized Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enrollment-Less Training for Personalized Voice Activity Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-5|PAPER Tue-A-V-5-5 — Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211981.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-11|PAPER Fri-A-V-1-11 — End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-11|PAPER Wed-A-V-4-11 — Far-Field Speaker Localization and Adaptive GLMB Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Far-Field Speaker Localization and Adaptive GLMB Tracking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-1|PAPER Wed-M-V-2-1 — Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-5|PAPER Tue-E-V-2-5 — Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-6|PAPER Thu-A-V-1-6 — Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211823.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-1|PAPER Fri-M-V-2-1 — Leveraging Real-Time MRI for Illuminating Linguistic Velum Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Real-Time MRI for Illuminating Linguistic Velum Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-8|PAPER Tue-A-V-5-8 — Multimodal Speech Summarization Through Semantic Concept Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multimodal Speech Summarization Through Semantic Concept Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-1|PAPER Tue-A-V-2-1 — Segment and Tone Production in Continuous Speech of Hearing and Hearing-Impaired Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segment and Tone Production in Continuous Speech of Hearing and Hearing-Impaired Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-6|PAPER Thu-M-V-4-6 — Scenario-Dependent Speaker Diarization for DIHARD-III Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scenario-Dependent Speaker Diarization for DIHARD-III Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-12|PAPER Thu-A-SS-2-12 — Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-1|PAPER Fri-M-V-3-1 — FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-4|PAPER Thu-M-SS-1-4 — An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-5|PAPER Wed-E-SS-1-5 — DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211359.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-3|PAPER Fri-A-SS-1-3 — F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-3|PAPER Wed-E-V-3-3 — Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-3|PAPER Fri-M-V-5-3 — RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-8|PAPER Wed-A-V-3-8 — SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-6|PAPER Wed-E-SS-1-6 — DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-7|PAPER Wed-M-V-2-7 — Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-7|PAPER Thu-M-V-3-7 — Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-9|PAPER Wed-A-V-4-9 — Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210630.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-5|PAPER Fri-M-V-5-5 — Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-7|PAPER Tue-E-V-2-7 — Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-11|PAPER Tue-E-V-3-11 — FRILL: A Non-Semantic Speech Embedding for Mobile Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FRILL: A Non-Semantic Speech Embedding for Mobile Devices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-5|PAPER Thu-M-V-1-5 — Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-12|PAPER Wed-E-V-1-12 — Differentiable Allophone Graphs for Language-Universal Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Differentiable Allophone Graphs for Language-Universal Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-2|PAPER Tue-E-V-4-2 — FANS: Fusing ASR and NLU for On-Device SLU]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FANS: Fusing ASR and NLU for On-Device SLU</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-6|PAPER Wed-A-V-4-6 — Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-11|PAPER Wed-E-V-4-11 — Parsing Speech for Grouping and Prominence, and the Typology of Rhythm]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parsing Speech for Grouping and Prominence, and the Typology of Rhythm</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-6|PAPER Tue-M-V-7-6 — Voicing Contrasts in the Singleton Stops of Palestinian Arabic: Production and Perception]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voicing Contrasts in the Singleton Stops of Palestinian Arabic: Production and Perception</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-12|PAPER Tue-M-SS-1-12 — Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-4|PAPER Thu-M-V-1-4 — Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-2|PAPER Thu-A-O-1-2 — Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-2|PAPER Wed-E-V-6-2 — Detection and Analysis of Attention Errors in Sequence-to-Sequence Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection and Analysis of Attention Errors in Sequence-to-Sequence Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-1|PAPER Tue-M-O-3-1 — A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-2|PAPER Wed-A-V-1-2 — A Psychology-Driven Computational Analysis of Political Interviews]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Psychology-Driven Computational Analysis of Political Interviews</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-2|PAPER Thu-M-S&T-1-2 — Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210542.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-7|PAPER Wed-M-V-6-7 — Improving RNN-T ASR Accuracy Using Context Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T ASR Accuracy Using Context Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210383.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-3|PAPER Fri-A-V-3-3 — Broadcasted Residual Learning for Efficient Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Broadcasted Residual Learning for Efficient Keyword Spotting</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-9|PAPER Fri-A-V-3-9 — PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211539.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-3|PAPER Thu-A-O-1-3 — Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-11|PAPER Thu-A-V-6-11 — Towards Automatic Speech to Sign Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Automatic Speech to Sign Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-10|PAPER Wed-E-V-2-10 — Analysis of Contextual Voice Changes in Remote Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Contextual Voice Changes in Remote Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-11|PAPER Fri-A-V-3-11 — Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-10|PAPER Fri-A-V-6-10 — Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-6|PAPER Fri-A-S&T-1-6 — WittyKiddy: Multilingual Spoken Language Learning for Kids]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WittyKiddy: Multilingual Spoken Language Learning for Kids</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-2|PAPER Wed-M-V-2-2 — Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-3|PAPER Tue-A-V-3-3 — Microphone Array Generalization for Multichannel Narrowband Deep Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Microphone Array Generalization for Multichannel Narrowband Deep Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-11|PAPER Fri-A-V-5-11 — Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-4|PAPER Thu-M-O-1-4 — Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-1|PAPER Thu-A-SS-2-1 — Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-12|PAPER Tue-A-V-1-12 — Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-10|PAPER Fri-M-V-7-10 — Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-8|PAPER Thu-A-SS-1-8 — Real-Time End-to-End Monaural Multi-Speaker Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time End-to-End Monaural Multi-Speaker Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-9|PAPER Fri-A-V-4-9 — Automatic Error Correction for Speaker Embedding Learning with Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Error Correction for Speaker Embedding Learning with Noisy Labels</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-10|PAPER Fri-A-V-4-10 — An Integrated Framework for Two-Pass Personalized Voice Trigger]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Framework for Two-Pass Personalized Voice Trigger</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-6|PAPER Tue-E-V-5-6 — The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212128.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-11|PAPER Thu-M-V-2-11 — Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-5|PAPER Thu-M-SS-1-5 — Language Recognition Based on Unsupervised Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition Based on Unsupervised Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-4|PAPER Wed-M-V-1-4 — Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-5|PAPER Fri-A-V-1-5 — Explore wav2vec 2.0 for Mispronunciation Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explore wav2vec 2.0 for Mispronunciation Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-8|PAPER Fri-A-SS-2-8 — Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211724.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-12|PAPER Fri-M-V-2-12 — Context and Co-Text Influence on the Accuracy Production of Italian L2 Non-Native Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Context and Co-Text Influence on the Accuracy Production of Italian L2 Non-Native Sounds</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-1|PAPER Wed-A-S&T-1-1 — Multi-Speaker Emotional Text-to-Speech Synthesizer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Speaker Emotional Text-to-Speech Synthesizer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-7|PAPER Wed-M-V-4-7 — Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-8|PAPER Tue-A-V-1-8 — Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211700.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-6|PAPER Tue-A-O-2-6 — Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-6|PAPER Fri-M-V-5-6 — Knowledge Distillation for Singing Voice Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation for Singing Voice Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-10|PAPER Fri-A-V-6-10 — Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211888.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-10|PAPER Tue-E-V-5-10 — Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211849.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-8|PAPER Thu-M-V-7-8 — Adapting Long Context NLM for ASR Rescoring in Conversational Agents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Long Context NLM for ASR Rescoring in Conversational Agents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-6|PAPER Thu-M-O-1-6 — Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-2|PAPER Fri-M-SS-1-2 — Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-3|PAPER Fri-A-O-1-3 — Speech Activity Detection Based on Multilingual Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Activity Detection Based on Multilingual Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-1|PAPER Thu-A-V-4-1 — LEAP Submission for the Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LEAP Submission for the Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211849.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-8|PAPER Thu-M-V-7-8 — Adapting Long Context NLM for ASR Rescoring in Conversational Agents]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Long Context NLM for ASR Rescoring in Conversational Agents</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-6|PAPER Wed-M-SS-1-6 — SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing</div> |
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Wed-Survey|PAPER Wed-Survey — Uncovering the Acoustic Cues of COVID-19 Infection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncovering the Acoustic Cues of COVID-19 Infection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-1|PAPER Thu-A-V-4-1 — LEAP Submission for the Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LEAP Submission for the Third DIHARD Diarization Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-7|PAPER Tue-E-V-6-7 — Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-1|PAPER Tue-M-V-4-1 — User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211770.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-10|PAPER Thu-A-SS-1-10 — TalkNet: Non-Autoregressive Depth-Wise Separable Convolutional Model for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TalkNet: Non-Autoregressive Depth-Wise Separable Convolutional Model for Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-9|PAPER Thu-M-V-2-9 — LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-3|PAPER Wed-A-S&T-1-3 — Autonomous Robot for Measuring Room Impulse Responses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autonomous Robot for Measuring Room Impulse Responses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-2-3|PAPER Fri-A-O-2-3 — Semantic Sentence Similarity: Size does not Always Matter]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Sentence Similarity: Size does not Always Matter</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211679.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-7|PAPER Tue-E-V-5-7 — On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-4|PAPER Fri-M-O-1-4 — Learning to Rank Microphones for Distant Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning to Rank Microphones for Distant Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-5|PAPER Tue-M-SS-1-5 — Visual Transformers for Primates Classification and Covid Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Transformers for Primates Classification and Covid Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-7|PAPER Tue-M-SS-1-7 — A Deep and Recurrent Architecture for Primate Vocalization Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deep and Recurrent Architecture for Primate Vocalization Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-1|PAPER Thu-M-S&T-1-1 — MoM: Minutes of Meeting Bot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoM: Minutes of Meeting Bot</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-3|PAPER Tue-M-V-5-3 — Speech Acoustic Modelling Using Raw Source and Filter Components]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Acoustic Modelling Using Raw Source and Filter Components</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210622.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-1|PAPER Tue-A-O-1-1 — Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-8|PAPER Tue-A-V-2-8 — Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-3|PAPER Wed-E-V-3-3 — Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-8|PAPER Wed-E-V-1-8 — Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211918.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-SS-1-5|PAPER Tue-E-SS-1-5 — Accelerometer-Based Measurements of Voice Quality in Children During Semi-Occluded Vocal Tract Exercise with a Narrow Straw in Air]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Accelerometer-Based Measurements of Voice Quality in Children During Semi-Occluded Vocal Tract Exercise with a Narrow Straw in Air</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-2|PAPER Thu-A-O-2-2 — Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-12|PAPER Wed-E-V-5-12 — Learning Speech Structure to Improve Time-Frequency Masks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Speech Structure to Improve Time-Frequency Masks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210914.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-4|PAPER Tue-M-V-2-4 — Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211864.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-1|PAPER Tue-A-V-5-1 — Speaker-Conversation Factorial Designs for Diarization Error Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker-Conversation Factorial Designs for Diarization Error Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211888.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-10|PAPER Tue-E-V-5-10 — Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-4|PAPER Thu-A-V-5-4 — Diff-TTS: A Denoising Diffusion Model for Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Diff-TTS: A Denoising Diffusion Model for Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-5|PAPER Fri-A-V-5-5 — Expressive Text-to-Speech Using Style Tag]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Expressive Text-to-Speech Using Style Tag</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-7|PAPER Thu-M-V-3-7 — Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-1|PAPER Wed-A-S&T-1-1 — Multi-Speaker Emotional Text-to-Speech Synthesizer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Speaker Emotional Text-to-Speech Synthesizer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-13|PAPER Fri-A-SS-2-13 — Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-10|PAPER Tue-A-SS-2-10 — Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211764.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-9|PAPER Tue-A-V-3-9 — Explaining Deep Learning Models for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explaining Deep Learning Models for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-1|PAPER Wed-M-V-2-1 — Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211868.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-1|PAPER Wed-E-V-5-1 — Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-13|PAPER Tue-M-V-3-13 — Restoring Degraded Speech via a Modified Diffusion Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Restoring Degraded Speech via a Modified Diffusion Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-5|PAPER Thu-M-V-6-5 — Inhalations in Speech: Acoustic and Physiological Characteristics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inhalations in Speech: Acoustic and Physiological Characteristics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211881.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-6|PAPER Thu-A-O-1-6 — Changes in Glottal Source Parameter Values with Light to Moderate Physical Load]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Changes in Glottal Source Parameter Values with Light to Moderate Physical Load</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-5|PAPER Wed-M-V-5-5 — VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-11|PAPER Thu-A-V-1-11 — Leveraging Pre-Trained Language Model for Speech Sentiment Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging Pre-Trained Language Model for Speech Sentiment Analysis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-5|PAPER Thu-M-V-7-5 — Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-10|PAPER Tue-A-SS-2-10 — Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-4|PAPER Wed-M-O-2-4 — Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-8|PAPER Tue-M-V-3-8 — Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Phonetic Footprint of Covid-19?</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-7|PAPER Tue-A-S&T-1-7 — Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-8|PAPER Tue-E-V-6-8 — StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-1|PAPER Wed-M-V-3-1 — N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-2|PAPER Tue-M-V-2-2 — FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-6|PAPER Wed-A-V-5-6 — GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210471.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-5|PAPER Thu-A-V-5-5 — Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210146.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-3|PAPER Wed-M-SS-1-3 — Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-8|PAPER Thu-M-V-6-8 — Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-8|PAPER Wed-A-V-1-8 — Analysis of Eye Gaze Reasons and Gaze Aversions During Three-Party Conversations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis of Eye Gaze Reasons and Gaze Aversions During Three-Party Conversations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-3|PAPER Tue-M-V-2-3 — Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-10|PAPER Tue-A-V-1-10 — Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-5|PAPER Tue-A-V-4-5 — Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211693.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-13|PAPER Wed-M-V-6-13 — Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-12|PAPER Wed-A-V-3-12 — Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-10|PAPER Tue-E-V-2-10 — Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-12|PAPER Wed-M-V-5-12 — Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-9|PAPER Fri-M-V-5-9 — Deep Spectral-Cepstral Fusion for Shouted and Normal Speech Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Spectral-Cepstral Fusion for Shouted and Normal Speech Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-4|PAPER Tue-M-V-6-4 — Multi-Channel VAD for Transcription of Group Discussion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Channel VAD for Transcription of Group Discussion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-1|PAPER Thu-A-V-2-1 — Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-12|PAPER Wed-M-V-5-12 — Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-6|PAPER Thu-A-V-2-6 — Knowledge Distillation Based Training of Universal ASR Source Models for Cross-Lingual Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation Based Training of Universal ASR Source Models for Cross-Lingual Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-8|PAPER Wed-M-V-2-8 — Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-5|PAPER Tue-M-V-4-5 — Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211132.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-14|PAPER Fri-A-V-1-14 — Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-3|PAPER Tue-A-S&T-1-3 — Downsizing of Vocal-Tract Models to Line up Variations and Reduce Manufacturing Costs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Downsizing of Vocal-Tract Models to Line up Variations and Reduce Manufacturing Costs</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-2|PAPER Thu-M-V-6-2 — Vocal-Tract Models to Visualize the Airstream of Human Breath and Droplets While Producing Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocal-Tract Models to Visualize the Airstream of Human Breath and Droplets While Producing Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-4|PAPER Thu-M-V-6-4 — Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-7|PAPER Tue-M-V-6-7 — Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-4|PAPER Wed-E-V-1-4 — Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-3|PAPER Wed-A-V-1-3 — Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210089.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-2|PAPER Wed-M-V-5-2 — The Influence of Parallel Processing on Illusory Vowels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Influence of Parallel Processing on Illusory Vowels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210244.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-4|PAPER Tue-E-V-6-4 — Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-3|PAPER Wed-A-V-1-3 — Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-4|PAPER Fri-A-V-2-4 — Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-6|PAPER Wed-A-O-3-6 — Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-8|PAPER Wed-A-V-5-8 — Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-5|PAPER Thu-M-V-1-5 — Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-7|PAPER Thu-M-V-5-7 — Applying the Information Bottleneck Principle to Prosodic Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applying the Information Bottleneck Principle to Prosodic Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-6|PAPER Fri-M-V-7-6 — Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-9|PAPER Fri-A-V-5-9 — Fine-Grained Style Modeling, Transfer and Prediction in Text-to-Speech Synthesis via Phone-Level Content-Style Disentanglement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Style Modeling, Transfer and Prediction in Text-to-Speech Synthesis via Phone-Level Content-Style Disentanglement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-6|PAPER Fri-M-SS-1-6 — Combining Hybrid and End-to-End Approaches for the OpenASR20 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Combining Hybrid and End-to-End Approaches for the OpenASR20 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-8|PAPER Fri-M-V-1-8 — Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-2|PAPER Tue-E-O-3-2 — Audio-Visual Recognition of Emotional Engagement of People with Dementia]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Recognition of Emotional Engagement of People with Dementia</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-3-2|PAPER Wed-M-O-3-2 — Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-9|PAPER Wed-E-V-2-9 — Visual Speech for Obstructive Sleep Apnea Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visual Speech for Obstructive Sleep Apnea Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-3-4|PAPER Fri-M-O-3-4 — GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-1|PAPER Thu-A-V-2-1 — Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-7|PAPER Tue-E-V-1-7 — AntVoice Neural Speaker Embedding System for FFSVC 2020]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AntVoice Neural Speaker Embedding System for FFSVC 2020</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-6|PAPER Wed-A-V-2-6 — Lookup-Table Recurrent Language Models for Long Tail Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lookup-Table Recurrent Language Models for Long Tail Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210165.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-4|PAPER Wed-A-V-3-4 — A Deliberation-Based Joint Acoustic and Text Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deliberation-Based Joint Acoustic and Text Decoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-8|PAPER Wed-E-V-3-8 — Multitask Training with Text Data for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multitask Training with Text Data for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-8|PAPER Fri-A-V-3-8 — Tied & Reduced RNN-T Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied & Reduced RNN-T Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218014.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-7|PAPER Wed-A-S&T-1-7 — Audio Segmentation Based Conversational Silence Detection for Contact Center Calls]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Segmentation Based Conversational Silence Detection for Contact Center Calls</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-5|PAPER Wed-A-V-1-5 — Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-8|PAPER Tue-A-V-4-8 — slimIPL: Language-Model-Free Iterative Pseudo-Labeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">slimIPL: Language-Model-Free Iterative Pseudo-Labeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-12|PAPER Wed-M-V-6-12 — StableEmit: Selection Probability Discount for Reducing Emission Latency of Streaming Monotonic Attention ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StableEmit: Selection Probability Discount for Reducing Emission Latency of Streaming Monotonic Attention ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-4|PAPER Fri-M-V-3-4 — VAD-Free Streaming Hybrid CTC/Attention ASR for Unsegmented Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAD-Free Streaming Hybrid CTC/Attention ASR for Unsegmented Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-13|PAPER Tue-A-V-1-13 — Acoustic Event Detection with Classifier Chains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Event Detection with Classifier Chains</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210911.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-5|PAPER Thu-A-SS-1-5 — Relaxing the Conditional Independence Assumption of CTC-Based ASR by Conditioning on Intermediate Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relaxing the Conditional Independence Assumption of CTC-Based ASR by Conditioning on Intermediate Predictions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-3|PAPER Tue-A-V-2-3 — A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211863.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-13|PAPER Wed-A-V-6-13 — Effects of Feature Scaling and Fusion on Sign Language Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effects of Feature Scaling and Fusion on Sign Language Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-5|PAPER Wed-M-V-5-5 — VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-9|PAPER Wed-M-V-2-9 — Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-5|PAPER Wed-M-V-5-5 — VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210523.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-6|PAPER Thu-M-V-3-6 — Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210523.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-6|PAPER Thu-M-V-3-6 — Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-4|PAPER Thu-M-V-7-4 — Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211114.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-1|PAPER Wed-M-V-6-1 — Super-Human Performance in Online Low-Latency Recognition of Conversational Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Super-Human Performance in Online Low-Latency Recognition of Conversational Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-1|PAPER Wed-E-V-6-1 — Spectral and Latent Speech Representation Distortion for TTS Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectral and Latent Speech Representation Distortion for TTS Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-2|PAPER Tue-A-V-5-2 — SmallER: Scaling Neural Entity Resolution for Edge Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SmallER: Scaling Neural Entity Resolution for Edge Devices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-3|PAPER Tue-A-O-1-3 — Speaker Embeddings by Modeling Channel-Wise Correlations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaker Embeddings by Modeling Channel-Wise Correlations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-10|PAPER Wed-M-V-6-10 — Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-1|PAPER Wed-E-O-3-1 — Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-3|PAPER Thu-A-V-3-3 — Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-1|PAPER Fri-A-V-6-1 — Intent Detection and Slot Filling for Vietnamese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intent Detection and Slot Filling for Vietnamese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-7|PAPER Tue-M-V-7-7 — A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-1|PAPER Fri-A-V-1-1 — Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210960.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-4|PAPER Fri-M-V-7-4 — Cross-Database Replay Detection in Terminal-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Replay Detection in Terminal-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-10|PAPER Fri-M-V-3-10 — Insights on Neural Representations for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Insights on Neural Representations for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-3|PAPER Tue-M-O-1-3 — Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-1|PAPER Tue-M-O-3-1 — A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-5|PAPER Thu-A-O-1-5 — Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-1|PAPER Fri-M-O-2-1 — Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211911.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-6|PAPER Tue-M-SS-1-6 — Deep-Learning-Based Central African Primate Species Classification with MixUp and SpecAugment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep-Learning-Based Central African Primate Species Classification with MixUp and SpecAugment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-5|PAPER Fri-M-O-1-5 — Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-2|PAPER Tue-M-SS-1-2 — Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210473.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-1-1|PAPER Tue-M-O-1-1 — Conversion of Airborne to Bone-Conducted Speech with Deep Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conversion of Airborne to Bone-Conducted Speech with Deep Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-2|PAPER Tue-M-V-3-2 — Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-10|PAPER Thu-A-V-3-10 — Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210630.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-5|PAPER Fri-M-V-5-5 — Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211131.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-6|PAPER Thu-A-SS-1-6 — Toward Streaming ASR with Non-Autoregressive Insertion-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Streaming ASR with Non-Autoregressive Insertion-Based Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-9|PAPER Thu-A-SS-1-9 — Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-4|PAPER Wed-E-V-3-4 — Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-3|PAPER Wed-E-SS-1-3 — Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-3|PAPER Thu-M-O-1-3 — Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-4|PAPER Fri-A-SS-1-4 — Y²-Net FCRN for Acoustic Echo and Noise Suppression]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Y²-Net FCRN for Acoustic Echo and Noise Suppression</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-1|PAPER Thu-M-S&T-1-1 — MoM: Minutes of Meeting Bot]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MoM: Minutes of Meeting Bot</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-5|PAPER Tue-A-SS-1-5 — Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-5|PAPER Thu-M-S&T-1-5 — Live Subtitling for BigBlueButton with Open-Source Software]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live Subtitling for BigBlueButton with Open-Source Software</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-5|PAPER Thu-A-V-1-5 — Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-3|PAPER Thu-M-O-1-3 — Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-4|PAPER Thu-M-S&T-1-4 — Digital Einstein Experience: Fast Text-to-Speech for Conversational AI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Digital Einstein Experience: Fast Text-to-Speech for Conversational AI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-9|PAPER Thu-M-V-1-9 — Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211463.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-9|PAPER Tue-E-V-4-9 — Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-3|PAPER Tue-M-V-4-3 — Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210095.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-5|PAPER Tue-E-V-4-5 — A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-1|PAPER Tue-E-V-6-1 — CVC: Contrastive Learning for Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CVC: Contrastive Learning for Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-5|PAPER Tue-M-V-1-5 — Binary Neural Network for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Neural Network for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-9|PAPER Wed-M-V-2-9 — Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-4|PAPER Tue-M-V-7-4 — The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-6|PAPER Wed-A-O-2-6 — Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-4|PAPER Thu-M-O-1-4 — Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-12|PAPER Fri-A-V-3-12 — The Energy and Carbon Footprint of Training End-to-End Speech Recognizers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Energy and Carbon Footprint of Training End-to-End Speech Recognizers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-1|PAPER Tue-A-V-3-1 — LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-3|PAPER Wed-A-S&T-1-3 — Autonomous Robot for Measuring Room Impulse Responses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Autonomous Robot for Measuring Room Impulse Responses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-5|PAPER Tue-A-V-3-5 — Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210867.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-2-1|PAPER Thu-A-O-2-1 — End-to-End Optimized Multi-Stage Vector Quantization of Spectral Envelopes for Speech and Audio Coding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Optimized Multi-Stage Vector Quantization of Spectral Envelopes for Speech and Audio Coding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-3|PAPER Tue-A-O-2-3 — Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210661.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-8|PAPER Wed-A-V-2-8 — Token-Level Supervised Contrastive Learning for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Token-Level Supervised Contrastive Learning for Punctuation Restoration</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-7|PAPER Fri-M-V-6-7 — A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^{{$:/causal/NO-PDF Marker}}|^<div class="cpauthorindexpersoncardpapercode">[[Fri-Keynote|PAPER Fri-Keynote — Language Modeling and Artificial Intelligence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Modeling and Artificial Intelligence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-2-1|PAPER Wed-M-O-2-1 — Prosodic Accommodation in Face-to-Face and Telephone Dialogues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Accommodation in Face-to-Face and Telephone Dialogues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-6|PAPER Wed-M-V-4-6 — Data Quality as Predictor of Voice Anti-Spoofing Generalization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Quality as Predictor of Voice Anti-Spoofing Generalization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-4|PAPER Wed-E-V-1-4 — Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-6|PAPER Tue-M-V-6-6 — Enrollment-Less Training for Personalized Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enrollment-Less Training for Personalized Voice Activity Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-5|PAPER Tue-A-V-5-5 — Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211981.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-11|PAPER Fri-A-V-1-11 — End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-13|PAPER Tue-A-V-1-13 — Acoustic Event Detection with Classifier Chains]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Event Detection with Classifier Chains</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-3|PAPER Tue-M-V-2-3 — Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-6|PAPER Wed-M-V-3-6 — Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-4|PAPER Wed-A-V-5-4 — Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-3|PAPER Wed-A-V-5-3 — Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-9|PAPER Wed-A-V-5-9 — High-Fidelity and Low-Latency Universal Neural Vocoder Based on Multiband WaveRNN with Data-Driven Linear Prediction for Discrete Waveform Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">High-Fidelity and Low-Latency Universal Neural Vocoder Based on Multiband WaveRNN with Data-Driven Linear Prediction for Discrete Waveform Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-9|PAPER Thu-M-V-1-9 — Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-11|PAPER Wed-M-V-6-11 — Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-7|PAPER Tue-E-V-3-7 — Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-5|PAPER Wed-E-V-4-5 — Assessing the Use of Prosody in Constituency Parsing of Imperfect Transcripts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessing the Use of Prosody in Constituency Parsing of Imperfect Transcripts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-6|PAPER Wed-A-V-2-6 — Lookup-Table Recurrent Language Models for Long Tail Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lookup-Table Recurrent Language Models for Long Tail Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-5|PAPER Fri-A-S&T-1-5 — Automatic Radiology Report Editing Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Radiology Report Editing Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-4|PAPER Tue-E-V-5-4 — Robust Continuous On-Device Personalization for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Continuous On-Device Personalization for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-10|PAPER Tue-E-V-2-10 — Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-5|PAPER Thu-A-V-3-5 — Few-Shot Learning of New Sound Classes for Target Sound Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Learning of New Sound Classes for Target Sound Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-1|PAPER Thu-M-V-6-1 — A Simplified Model for the Vocal Tract of [s] with Inclined Incisors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simplified Model for the Vocal Tract of [s] with Inclined Incisors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-4|PAPER Thu-M-V-6-4 — Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-8|PAPER Tue-M-V-3-8 — Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-7|PAPER Thu-A-V-4-7 — Robust End-to-End Speaker Diarization with Conformer and Additive Margin Penalty]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust End-to-End Speaker Diarization with Conformer and Additive Margin Penalty</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211679.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-7|PAPER Tue-E-V-5-7 — On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-5|PAPER Fri-A-S&T-1-5 — Automatic Radiology Report Editing Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Radiology Report Editing Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-2|PAPER Wed-E-V-1-2 — Efficient Weight Factorization for Multilingual Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Weight Factorization for Multilingual Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-11|PAPER Fri-M-V-5-11 — Generalized Spoofing Detection Inspired from Audio Generation Artifacts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spoofing Detection Inspired from Audio Generation Artifacts</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-2|PAPER Fri-A-O-1-2 — The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-2|PAPER Tue-M-V-6-2 — Noise-Tolerant Self-Supervised Learning for Audio-Visual Voice Activity Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Noise-Tolerant Self-Supervised Learning for Audio-Visual Voice Activity Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211241.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-1-3|PAPER Fri-M-O-1-3 — Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-5|PAPER Tue-E-V-2-5 — Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-10|PAPER Tue-A-SS-2-10 — Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-4|PAPER Tue-A-V-2-4 — Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-4|PAPER Tue-M-V-7-4 — The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210347.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-13|PAPER Tue-E-V-3-13 — Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-7|PAPER Wed-M-V-1-7 — E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-12|PAPER Fri-M-V-5-12 — Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-10|PAPER Fri-A-V-3-10 — Collaborative Training of Acoustic Encoders for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Collaborative Training of Acoustic Encoders for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211787.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-4|PAPER Wed-A-V-2-4 — Phonetically Induced Subwords for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetically Induced Subwords for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210645.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-2|PAPER Tue-M-O-3-2 — Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-1|PAPER Thu-A-V-4-1 — LEAP Submission for the Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LEAP Submission for the Third DIHARD Diarization Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Third DIHARD Diarization Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211209.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-1|PAPER Fri-A-V-4-1 — Graph-Based Label Propagation for Semi-Supervised Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Graph-Based Label Propagation for Semi-Supervised Speaker Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-3|PAPER Wed-M-V-5-3 — Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-9|PAPER Tue-A-V-1-9 — An Evaluation of Data Augmentation Methods for Sound Scene Geotagging]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Evaluation of Data Augmentation Methods for Sound Scene Geotagging</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-2|PAPER Tue-A-S&T-1-2 — Beey: More Than a Speech-to-Text Editor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Beey: More Than a Speech-to-Text Editor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-10|PAPER Wed-M-V-4-10 — Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-3|PAPER Tue-A-V-5-3 — Disfluency Detection with Unlabeled Data and Small BERT Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Disfluency Detection with Unlabeled Data and Small BERT Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-3|PAPER Wed-E-O-1-3 — Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-5|PAPER Fri-A-V-2-5 — Parametric Distributions to Model Numerical Emotion Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parametric Distributions to Model Numerical Emotion Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-10|PAPER Tue-A-V-6-10 — Many-to-Many Voice Conversion Based Feature Disentanglement Using Variational Autoencoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Many-to-Many Voice Conversion Based Feature Disentanglement Using Variational Autoencoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-7|PAPER Thu-A-V-3-7 — AvaTr: One-Shot Speaker Extraction with Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AvaTr: One-Shot Speaker Extraction with Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-5|PAPER Fri-M-V-6-5 — Few-Shot Keyword Spotting in Any Language]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Few-Shot Keyword Spotting in Any Language</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-9|PAPER Wed-E-V-1-9 — SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-7|PAPER Wed-E-V-5-7 — Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-10|PAPER Fri-A-V-3-10 — Collaborative Training of Acoustic Encoders for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Collaborative Training of Acoustic Encoders for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-2|PAPER Wed-M-V-6-2 — Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-11|PAPER Thu-A-V-6-11 — Towards Automatic Speech to Sign Language Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Automatic Speech to Sign Language Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-4|PAPER Tue-M-V-3-4 — Transfer Learning for Speech Intelligibility Improvement in Noisy Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transfer Learning for Speech Intelligibility Improvement in Noisy Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-2|PAPER Tue-A-V-5-2 — SmallER: Scaling Neural Entity Resolution for Edge Devices]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SmallER: Scaling Neural Entity Resolution for Edge Devices</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-2|PAPER Tue-M-V-7-2 — System Performance as a Function of Calibration Methods, Sample Size and Sampling Variability in Likelihood Ratio-Based Forensic Voice Comparison]]</div>|^<div class="cpauthorindexpersoncardpapertitle">System Performance as a Function of Calibration Methods, Sample Size and Sampling Variability in Likelihood Ratio-Based Forensic Voice Comparison</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-7|PAPER Tue-M-V-7-7 — A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-3|PAPER Tue-A-SS-2-3 — Recognising Covid-19 from Coughing Using Ensembles of SVMs and LSTMs with Handcrafted and Deep Audio Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Recognising Covid-19 from Coughing Using Ensembles of SVMs and LSTMs with Handcrafted and Deep Audio Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-1|PAPER Wed-E-V-2-1 — Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-1|PAPER Tue-M-V-4-1 — User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-4|PAPER Thu-M-V-1-4 — Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-3|PAPER Fri-A-V-5-3 — Emotional Prosody Control for Speech Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emotional Prosody Control for Speech Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-1|PAPER Thu-A-O-1-1 — Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-13|PAPER Tue-M-V-3-13 — Restoring Degraded Speech via a Modified Diffusion Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Restoring Degraded Speech via a Modified Diffusion Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211718.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-10|PAPER Wed-M-V-5-10 — Towards the Explainability of Multimodal Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Explainability of Multimodal Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-8|PAPER Wed-E-V-6-8 — Hi-Fi Multi-Speaker English TTS Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hi-Fi Multi-Speaker English TTS Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211890.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-12|PAPER Wed-A-V-4-12 — On the Design of Deep Priors for Unsupervised Audio Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Design of Deep Priors for Unsupervised Audio Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-2|PAPER Wed-A-S&T-1-2 — Live TV Subtitling Through Respeaking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live TV Subtitling Through Respeaking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-5|PAPER Fri-A-O-1-5 — EML Online Speech Activity Detection for the Fearless Steps Challenge Phase-III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EML Online Speech Activity Detection for the Fearless Steps Challenge Phase-III</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-S&T-1-4|PAPER Thu-M-S&T-1-4 — Digital Einstein Experience: Fast Text-to-Speech for Conversational AI]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Digital Einstein Experience: Fast Text-to-Speech for Conversational AI</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-5|PAPER Fri-A-S&T-1-5 — Automatic Radiology Report Editing Through Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Radiology Report Editing Through Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-6|PAPER Wed-A-V-2-6 — Lookup-Table Recurrent Language Models for Long Tail Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Lookup-Table Recurrent Language Models for Long Tail Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-10|PAPER Fri-M-V-1-10 — Transformer Based End-to-End Mispronunciation Detection and Diagnosis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer Based End-to-End Mispronunciation Detection and Diagnosis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-10|PAPER Tue-M-V-3-10 — A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-3|PAPER Thu-M-V-1-3 — Vocalization Recognition of People with Profound Intellectual and Multiple Disabilities (PIMD) Using Machine Learning Algorithms]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Vocalization Recognition of People with Profound Intellectual and Multiple Disabilities (PIMD) Using Machine Learning Algorithms</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-10|PAPER Tue-E-V-6-10 — Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-13|PAPER Fri-M-V-7-13 — Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-7|PAPER Wed-A-SS-1-7 — The Sogou System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Sogou System for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211974.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-3|PAPER Tue-E-V-5-3 — Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-11|PAPER Tue-E-V-5-11 — Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211955.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-1|PAPER Thu-A-SS-1-1 — An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-10|PAPER Wed-M-V-6-10 — Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-8|PAPER Thu-A-V-2-8 — Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-8|PAPER Fri-A-V-3-8 — Tied & Reduced RNN-T Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied & Reduced RNN-T Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211463.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-9|PAPER Tue-E-V-4-9 — Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-4|PAPER Wed-M-SS-1-4 — Improving Channel Decorrelation for Multi-Channel Target Speech Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Channel Decorrelation for Multi-Channel Target Speech Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-3|PAPER Wed-A-V-2-3 — Incorporating External POS Tagger for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating External POS Tagger for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-1|PAPER Thu-M-V-1-1 — Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-5|PAPER Thu-M-V-7-5 — Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-2|PAPER Thu-M-O-3-2 — Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-3|PAPER Thu-M-O-3-3 — Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-10|PAPER Fri-A-V-1-10 — The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-4|PAPER Wed-M-V-2-4 — Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-9|PAPER Wed-E-V-6-9 — Utilizing Self-Supervised Representations for MOS Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utilizing Self-Supervised Representations for MOS Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-5|PAPER Thu-M-SS-1-5 — Language Recognition Based on Unsupervised Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition Based on Unsupervised Pretrained Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-9|PAPER Wed-E-V-6-9 — Utilizing Self-Supervised Representations for MOS Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utilizing Self-Supervised Representations for MOS Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-10|PAPER Tue-E-V-6-10 — Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-4|PAPER Wed-M-V-3-4 — Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-7|PAPER Wed-M-SS-1-7 — Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-13|PAPER Wed-A-V-4-13 — Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-12|PAPER Fri-M-V-5-12 — Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211989.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-10|PAPER Tue-A-V-3-10 — Minimum-Norm Differential Beamforming for Linear Array with Directional Microphones]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum-Norm Differential Beamforming for Linear Array with Directional Microphones</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-7|PAPER Wed-M-SS-1-7 — Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211769.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-4|PAPER Tue-A-O-1-4 — Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-4|PAPER Thu-M-V-3-4 — Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-6|PAPER Tue-M-V-4-6 — Semantic Transportation Prototypical Network for Few-Shot Intent Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Transportation Prototypical Network for Few-Shot Intent Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-4|PAPER Tue-A-V-5-4 — Discriminative Self-Training for Punctuation Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Discriminative Self-Training for Punctuation Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-6|PAPER Tue-E-V-4-6 — Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-2|PAPER Thu-A-V-1-2 — Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-9|PAPER Fri-M-V-6-9 — Energy-Friendly Keyword Spotting System Using Add-Based Convolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Energy-Friendly Keyword Spotting System Using Add-Based Convolution</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-5|PAPER Fri-M-V-7-5 — The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-7|PAPER Fri-A-V-4-7 — Adaptive Margin Circle Loss for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Margin Circle Loss for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-7-11|PAPER Tue-M-V-7-11 — Characterizing Voiced and Voiceless Nasals in Mizo]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Characterizing Voiced and Voiceless Nasals in Mizo</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210474.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-3|PAPER Wed-M-V-3-3 — Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-3|PAPER Thu-A-V-5-3 — Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-2|PAPER Tue-A-V-1-2 — SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-4|PAPER Thu-M-V-3-4 — Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-1|PAPER Thu-A-V-2-1 — Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-10|PAPER Thu-A-V-3-10 — Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-2|PAPER Thu-M-V-7-2 — Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-6|PAPER Tue-M-V-3-6 — Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-2|PAPER Wed-E-SS-1-2 — A Simultaneous Denoising and Dereverberation Framework with Target Decoupling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simultaneous Denoising and Dereverberation Framework with Target Decoupling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-2|PAPER Wed-E-V-2-2 — Robust Laughter Detection in Noisy Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Laughter Detection in Noisy Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211846.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-13|PAPER Fri-M-V-2-13 — A New Vowel Normalization for Sociophonetics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A New Vowel Normalization for Sociophonetics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-3|PAPER Wed-A-V-3-3 — Librispeech Transducer Model with Internal Language Model Prior Correction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Librispeech Transducer Model with Internal Language Model Prior Correction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211255.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-5|PAPER Thu-M-O-1-5 — Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-3|PAPER Thu-A-SS-1-3 — Pushing the Limits of Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pushing the Limits of Non-Autoregressive Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-3|PAPER Wed-E-O-3-3 — Audiovisual Transfer Learning for Audio Tagging and Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audiovisual Transfer Learning for Audio Tagging and Sound Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-12|PAPER Thu-A-V-6-12 — kosp2e: Korean Speech to English Translation Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">kosp2e: Korean Speech to English Translation Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-7|PAPER Wed-A-V-5-7 — UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-4|PAPER Wed-A-SS-1-4 — Team02 Text-Independent Speaker Verification System for SdSV Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Team02 Text-Independent Speaker Verification System for SdSV Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-2|PAPER Tue-M-V-1-2 — Bidirectional Multiscale Feature Aggregation for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bidirectional Multiscale Feature Aggregation for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-3|PAPER Tue-E-V-1-3 — Improved Meta-Learning Training for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Meta-Learning Training for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-4|PAPER Wed-M-V-2-4 — Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-12|PAPER Thu-A-V-3-12 — Neural Speaker Extraction with Speaker-Speech Cross-Attention Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Speaker Extraction with Speaker-Speech Cross-Attention Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-7|PAPER Thu-A-V-3-7 — AvaTr: One-Shot Speaker Extraction with Transformers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AvaTr: One-Shot Speaker Extraction with Transformers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-1|PAPER Tue-M-O-3-1 — A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-10|PAPER Wed-A-V-4-10 — Feature Fusion by Attention Networks for Robust DOA Estimation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Feature Fusion by Attention Networks for Robust DOA Estimation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-9|PAPER Wed-M-V-2-9 — Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210947.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-7|PAPER Fri-A-V-5-7 — Towards Multi-Scale Style Control for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Multi-Scale Style Control for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-4|PAPER Wed-M-V-2-4 — Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-4|PAPER Tue-A-V-6-4 — An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-9|PAPER Tue-A-V-4-9 — Phonetically Motivated Self-Supervised Speech Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetically Motivated Self-Supervised Speech Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-4|PAPER Thu-M-V-3-4 — Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-3|PAPER Wed-A-V-2-3 — Incorporating External POS Tagger for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating External POS Tagger for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210818.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-7|PAPER Fri-A-V-6-7 — End-to-End Cross-Lingual Spoken Language Understanding Model with Multilingual Pretraining]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Cross-Lingual Spoken Language Understanding Model with Multilingual Pretraining</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-9|PAPER Wed-E-V-3-9 — Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210283.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-5|PAPER Tue-E-V-6-5 — VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-10|PAPER Thu-M-V-1-10 — Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-9|PAPER Fri-M-V-6-9 — Energy-Friendly Keyword Spotting System Using Add-Based Convolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Energy-Friendly Keyword Spotting System Using Add-Based Convolution</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-2-1|PAPER Tue-A-O-2-1 — Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211092.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-7|PAPER Fri-M-V-4-7 — UnitNet-Based Hybrid Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnitNet-Based Hybrid Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-6|PAPER Tue-M-V-5-6 — Scaling Sparsemax Based Channel Selection for Speech Recognition with ad-hoc Microphone Arrays]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Sparsemax Based Channel Selection for Speech Recognition with ad-hoc Microphone Arrays</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210427.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-7|PAPER Wed-A-V-3-7 — Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-6|PAPER Wed-E-V-5-6 — A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-2|PAPER Fri-A-V-3-2 — Weakly Supervised Construction of ASR Systems from Massive Video Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Weakly Supervised Construction of ASR Systems from Massive Video Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-10|PAPER Fri-A-V-5-10 — Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-9|PAPER Wed-M-V-6-9 — Reducing Exposure Bias in Training Recurrent Neural Network Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Exposure Bias in Training Recurrent Neural Network Transducers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-6|PAPER Tue-M-V-3-6 — Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-2|PAPER Wed-E-SS-1-2 — A Simultaneous Denoising and Dereverberation Framework with Target Decoupling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simultaneous Denoising and Dereverberation Framework with Target Decoupling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-5|PAPER Fri-A-SS-1-5 — Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-3|PAPER Tue-A-V-3-3 — Microphone Array Generalization for Multichannel Narrowband Deep Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Microphone Array Generalization for Multichannel Narrowband Deep Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-4|PAPER Wed-M-V-3-4 — Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210687.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-11|PAPER Tue-E-V-6-11 — Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-4|PAPER Wed-E-SS-1-4 — DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-3|PAPER Thu-A-SS-2-3 — WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-2|PAPER Wed-A-V-2-2 — A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-5|PAPER Fri-A-V-1-5 — Explore wav2vec 2.0 for Mispronunciation Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explore wav2vec 2.0 for Mispronunciation Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-2|PAPER Thu-M-V-7-2 — Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-7|PAPER Fri-A-V-4-7 — Adaptive Margin Circle Loss for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Margin Circle Loss for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-2|PAPER Wed-E-SS-1-2 — A Simultaneous Denoising and Dereverberation Framework with Target Decoupling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Simultaneous Denoising and Dereverberation Framework with Target Decoupling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-1|PAPER Thu-A-V-5-1 — Federated Learning with Dynamic Transformer for Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Federated Learning with Dynamic Transformer for Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-5|PAPER Tue-M-V-1-5 — Binary Neural Network for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binary Neural Network for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-5|PAPER Wed-A-SS-1-5 — Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-1|PAPER Tue-M-O-3-1 — A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-3|PAPER Fri-M-SS-1-3 — An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-6|PAPER Wed-A-V-4-6 — Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-1|PAPER Fri-M-V-7-1 — A Comparative Study on Recent Neural Spoofing Countermeasures for Synthetic Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study on Recent Neural Spoofing Countermeasures for Synthetic Speech Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-2|PAPER Fri-M-V-7-2 — An Initial Investigation for Detecting Partially Spoofed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Initial Investigation for Detecting Partially Spoofed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-4|PAPER Wed-E-V-6-4 — AISHELL-3: A Multi-Speaker Mandarin TTS Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-3: A Multi-Speaker Mandarin TTS Corpus</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-7|PAPER Tue-M-V-3-7 — Speech Enhancement with Weakly Labelled Data from AudioSet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Weakly Labelled Data from AudioSet</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210960.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-4|PAPER Fri-M-V-7-4 — Cross-Database Replay Detection in Terminal-Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Database Replay Detection in Terminal-Dependent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-3|PAPER Tue-E-V-2-3 — Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-7|PAPER Fri-A-V-2-7 — Speech Emotion Recognition with Multi-Task Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Emotion Recognition with Multi-Task Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-4|PAPER Thu-M-SS-1-4 — An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210851.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-11|PAPER Thu-A-V-5-11 — Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-2|PAPER Tue-M-V-5-2 — Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-10|PAPER Wed-E-V-1-10 — Hierarchical Phone Recognition with Compositional Phonetics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Phone Recognition with Compositional Phonetics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-3|PAPER Thu-A-V-6-3 — Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-4|PAPER Thu-M-SS-1-4 — An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage Progressive Speech Enhancement Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-9|PAPER Tue-A-V-6-9 — Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-2|PAPER Fri-M-V-5-2 — A Multi-Branch Deep Learning Network for Automated Detection of COVID-19]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Multi-Branch Deep Learning Network for Automated Detection of COVID-19</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-11|PAPER Fri-A-V-3-11 — Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-13|PAPER Wed-A-V-4-13 — Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-12|PAPER Fri-M-V-5-12 — Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-6|PAPER Tue-M-V-2-6 — Deliberation-Based Multi-Pass Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deliberation-Based Multi-Pass Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-12|PAPER Fri-M-V-7-12 — Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-8|PAPER Fri-A-SS-2-8 — Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-6|PAPER Fri-M-V-7-6 — Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-12|PAPER Fri-M-V-7-12 — Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-1|PAPER Thu-A-V-2-1 — Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-3|PAPER Tue-A-V-1-3 — An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-8|PAPER Wed-M-V-2-8 — Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-2|PAPER Thu-A-SS-1-2 — Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-9|PAPER Thu-A-SS-1-9 — Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210661.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-8|PAPER Wed-A-V-2-8 — Token-Level Supervised Contrastive Learning for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Token-Level Supervised Contrastive Learning for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210261.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-3|PAPER Thu-M-V-4-3 — Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-5|PAPER Tue-M-V-2-5 — Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-11|PAPER Wed-E-V-5-11 — Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210812.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-3|PAPER Wed-M-V-4-3 — X-net: A Joint Scale Down and Scale Up Method for Voice Call]]</div>|^<div class="cpauthorindexpersoncardpapertitle">X-net: A Joint Scale Down and Scale Up Method for Voice Call</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-8|PAPER Wed-E-V-2-8 — TDCA-Net: Time-Domain Channel Attention Network for Depression Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TDCA-Net: Time-Domain Channel Attention Network for Depression Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-6|PAPER Thu-A-SS-2-6 — Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210899.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-5|PAPER Wed-M-SS-1-5 — Inplace Gated Convolutional Recurrent Neural Network for Dual-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Inplace Gated Convolutional Recurrent Neural Network for Dual-Channel Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-6|PAPER Wed-A-SS-1-6 — Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-6|PAPER Wed-E-SS-1-6 — DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211977.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-11|PAPER Tue-M-V-6-11 — A Lightweight Framework for Online Voice Activity Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Lightweight Framework for Online Voice Activity Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-9|PAPER Wed-A-V-2-9 — BART Based Semantic Correction for Mandarin Automatic Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BART Based Semantic Correction for Mandarin Automatic Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-5|PAPER Tue-A-V-2-5 — An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210432.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-4|PAPER Thu-M-V-2-4 — Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-8|PAPER Tue-M-V-3-8 — Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-2|PAPER Tue-E-V-5-2 — Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210283.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-5|PAPER Tue-E-V-6-5 — VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-10|PAPER Thu-M-V-1-10 — Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-12|PAPER Fri-M-V-7-12 — Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-7|PAPER Fri-A-SS-2-7 — Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-8|PAPER Fri-A-SS-2-8 — Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-7|PAPER Fri-A-SS-2-7 — Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-6|PAPER Tue-E-V-3-6 — An Attribute-Aligned Strategy for Learning Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attribute-Aligned Strategy for Learning Speech Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-3|PAPER Tue-E-V-1-3 — Improved Meta-Learning Training for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Meta-Learning Training for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-1|PAPER Wed-M-V-5-1 — Perception of Standard Arabic Synthetic Speech Rate]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Perception of Standard Arabic Synthetic Speech Rate</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211884.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-9|PAPER Tue-E-V-5-9 — Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-8|PAPER Tue-M-V-1-8 — Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-6|PAPER Thu-M-SS-1-6 — Additive Phoneme-Aware Margin Softmax Loss for Language Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Additive Phoneme-Aware Margin Softmax Loss for Language Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-9|PAPER Fri-A-V-4-9 — Automatic Error Correction for Speaker Embedding Learning with Noisy Labels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Error Correction for Speaker Embedding Learning with Noisy Labels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-2|PAPER Wed-A-V-3-2 — Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-3|PAPER Tue-A-V-1-3 — An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-3|PAPER Wed-M-V-1-3 — A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-1|PAPER Fri-M-SS-1-1 — OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages]]</div>|^<div class="cpauthorindexpersoncardpapertitle">OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-5|PAPER Fri-A-V-6-5 — Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210559.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-4|PAPER Tue-M-V-1-4 — Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-10|PAPER Tue-E-V-6-10 — Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-11|PAPER Fri-M-V-5-11 — Generalized Spoofing Detection Inspired from Audio Generation Artifacts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spoofing Detection Inspired from Audio Generation Artifacts</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage Progressive Speech Enhancement Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210847.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-3|PAPER Fri-M-V-7-3 — Siamese Network with wav2vec Feature for Spoofing Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siamese Network with wav2vec Feature for Spoofing Speech Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-8|PAPER Fri-M-V-7-8 — Voting for the Right Answer: Adversarial Defense for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voting for the Right Answer: Adversarial Defense for Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-8|PAPER Tue-E-V-5-8 — Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211973.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-2|PAPER Wed-E-V-5-2 — Speech Denoising with Auditory Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Denoising with Auditory Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-8|PAPER Wed-E-V-6-8 — Hi-Fi Multi-Speaker English TTS Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hi-Fi Multi-Speaker English TTS Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-13|PAPER Fri-A-V-1-13 — NeMo Inverse Text Normalization: From Development to Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NeMo Inverse Text Normalization: From Development to Production</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-3|PAPER Fri-A-S&T-1-3 — NeMo (Inverse) Text Normalization: From Development to Production]]</div>|^<div class="cpauthorindexpersoncardpapertitle">NeMo (Inverse) Text Normalization: From Development to Production</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-7|PAPER Fri-M-V-6-7 — A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-10|PAPER Fri-A-V-3-10 — Collaborative Training of Acoustic Encoders for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Collaborative Training of Acoustic Encoders for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-1|PAPER Thu-A-V-1-1 — Temporal Context in Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Temporal Context in Speech Emotion Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-O-1-2|PAPER Fri-A-O-1-2 — The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-4|PAPER Wed-M-SS-1-4 — Improving Channel Decorrelation for Multi-Channel Target Speech Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Channel Decorrelation for Multi-Channel Target Speech Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-7|PAPER Wed-M-V-5-7 — Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-2|PAPER Tue-E-V-5-2 — Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-4|PAPER Wed-A-O-2-4 — Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-10|PAPER Wed-A-V-5-10 — Basis-MelGAN: Efficient Neural Vocoder Based on Audio Decomposition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Basis-MelGAN: Efficient Neural Vocoder Based on Audio Decomposition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-8|PAPER Wed-A-SS-1-8 — The SJTU System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The SJTU System for Short-Duration Speaker Verification Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212128.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-11|PAPER Thu-M-V-2-11 — Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-O-3-4|PAPER Tue-M-O-3-4 — Phoneme-to-Audio Alignment with Recurrent Neural Networks for Speaking and Singing Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-to-Audio Alignment with Recurrent Neural Networks for Speaking and Singing Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-4|PAPER Wed-M-SS-1-4 — Improving Channel Decorrelation for Multi-Channel Target Speech Extraction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Channel Decorrelation for Multi-Channel Target Speech Extraction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-2-1|PAPER Wed-E-O-2-1 — Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210660.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-8|PAPER Thu-A-V-5-8 — A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210660.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-8|PAPER Thu-A-V-5-8 — A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-2|PAPER Wed-A-V-2-2 — A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-5|PAPER Wed-E-SS-1-5 — DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211359.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-3|PAPER Fri-A-SS-1-3 — F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-9|PAPER Fri-M-V-3-9 — Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-8|PAPER Fri-A-V-3-8 — Tied & Reduced RNN-T Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tied & Reduced RNN-T Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-4|PAPER Wed-E-V-6-4 — AISHELL-3: A Multi-Speaker Mandarin TTS Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-3: A Multi-Speaker Mandarin TTS Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-5|PAPER Wed-A-V-6-5 — AlloST: Low-Resource Speech Translation Without Source Transcription]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AlloST: Low-Resource Speech Translation Without Source Transcription</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-10|PAPER Fri-A-V-2-10 — Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-3|PAPER Wed-M-O-1-3 — Self-Supervised End-to-End ASR for Low Resource L2 Swedish]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised End-to-End ASR for Low Resource L2 Swedish</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-2-2|PAPER Tue-E-O-2-2 — Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-6|PAPER Tue-A-SS-2-6 — Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-11|PAPER Thu-A-V-2-11 — Arabic Code-Switching Speech Recognition Using Monolingual Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Arabic Code-Switching Speech Recognition Using Monolingual Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-7|PAPER Tue-A-SS-1-7 — Continual Learning for Fake Audio Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continual Learning for Fake Audio Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-1|PAPER Fri-M-V-3-1 — FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-9|PAPER Tue-M-V-2-9 — PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-6|PAPER Thu-A-V-1-6 — Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-2|PAPER Wed-M-V-4-2 — QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-10|PAPER Wed-E-V-6-10 — KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211909.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-1|PAPER Thu-M-V-4-1 — End-to-End Neural Diarization: From Transformer to Conformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Neural Diarization: From Transformer to Conformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-9|PAPER Wed-E-V-3-9 — Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-10|PAPER Thu-M-V-3-10 — Implicit Filter-and-Sum Network for End-to-End Multi-Channel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Implicit Filter-and-Sum Network for End-to-End Multi-Channel Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-2|PAPER Thu-A-V-3-2 — Empirical Analysis of Generalized Iterative Speech Separation Networks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Empirical Analysis of Generalized Iterative Speech Separation Networks</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211372.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-6|PAPER Thu-A-V-3-6 — Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-4|PAPER Wed-M-V-4-4 — WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-5|PAPER Fri-M-V-4-5 — Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-2|PAPER Fri-M-V-2-2 — Segmental Alignment of English Syllables with Singleton and Cluster Onsets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segmental Alignment of English Syllables with Singleton and Cluster Onsets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210387.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-6|PAPER Wed-A-V-3-6 — Deformable TDNN with Adaptive Receptive Fields for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deformable TDNN with Adaptive Receptive Fields for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-7|PAPER Thu-M-V-7-7 — Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210687.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-11|PAPER Tue-E-V-6-11 — Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-7|PAPER Thu-M-V-3-7 — Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-3|PAPER Wed-A-V-5-3 — Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-1|PAPER Tue-A-V-2-1 — Segment and Tone Production in Continuous Speech of Hearing and Hearing-Impaired Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segment and Tone Production in Continuous Speech of Hearing and Hearing-Impaired Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-3|PAPER Tue-A-V-6-3 — Adversarial Voice Conversion Against Neural Spoofing Detectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Voice Conversion Against Neural Spoofing Detectors</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-11|PAPER Tue-E-V-1-11 — Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-1|PAPER Tue-E-V-6-1 — CVC: Contrastive Learning for Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">CVC: Contrastive Learning for Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-4|PAPER Fri-A-V-6-4 — Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211884.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-9|PAPER Tue-E-V-5-9 — Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-2|PAPER Wed-M-V-6-2 — Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-5|PAPER Wed-M-V-6-5 — Streaming Multi-Talker Speech Recognition with Joint Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming Multi-Talker Speech Recognition with Joint Speaker Identification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-3|PAPER Thu-A-V-2-3 — On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212128.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-11|PAPER Thu-M-V-2-11 — Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-3|PAPER Tue-M-V-1-3 — Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-10|PAPER Thu-A-V-3-10 — Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-3|PAPER Tue-A-SS-1-3 — Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-7|PAPER Thu-M-V-1-7 — Identifying Cognitive Impairment Using Sentence Representation Vectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Identifying Cognitive Impairment Using Sentence Representation Vectors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-10|PAPER Fri-A-V-4-10 — An Integrated Framework for Two-Pass Personalized Voice Trigger]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Integrated Framework for Two-Pass Personalized Voice Trigger</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-5|PAPER Thu-M-V-3-5 — End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-7|PAPER Thu-M-V-5-7 — Applying the Information Bottleneck Principle to Prosodic Representation Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Applying the Information Bottleneck Principle to Prosodic Representation Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-1-4|PAPER Wed-A-O-1-4 — On Sampling-Based Training Criteria for Neural Language Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Sampling-Based Training Criteria for Neural Language Modeling</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210847.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-3|PAPER Fri-M-V-7-3 — Siamese Network with wav2vec Feature for Spoofing Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siamese Network with wav2vec Feature for Spoofing Speech Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210319.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-6|PAPER Tue-E-V-6-6 — StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210648.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-7|PAPER Wed-E-V-3-7 — Regularizing Word Segmentation by Creating Misspellings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Regularizing Word Segmentation by Creating Misspellings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-7|PAPER Wed-A-SS-1-7 — The Sogou System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Sogou System for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211569.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-3|PAPER Tue-E-V-4-3 — Sequential End-to-End Intent and Slot Label Classification and Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Sequential End-to-End Intent and Slot Label Classification and Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-7|PAPER Tue-A-V-6-7 — S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-9|PAPER Wed-E-V-6-9 — Utilizing Self-Supervised Representations for MOS Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Utilizing Self-Supervised Representations for MOS Prediction</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-6|PAPER Tue-M-V-2-6 — Deliberation-Based Multi-Pass Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deliberation-Based Multi-Pass Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-5|PAPER Fri-A-V-3-5 — Extremely Low Footprint End-to-End ASR System for Smart Device]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extremely Low Footprint End-to-End ASR System for Smart Device</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-5|PAPER Fri-M-V-2-5 — How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements]]</div>|^<div class="cpauthorindexpersoncardpapertitle">How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-1|PAPER Tue-E-V-2-1 — Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Stage Progressive Speech Enhancement Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-2|PAPER Tue-E-V-5-2 — Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-7|PAPER Wed-M-V-1-7 — E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-6|PAPER Tue-E-V-2-6 — Scene-Agnostic Multi-Microphone Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scene-Agnostic Multi-Microphone Speech Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-6|PAPER Thu-M-V-5-6 — Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-5|PAPER Wed-A-SS-1-5 — Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-7|PAPER Wed-A-V-4-7 — MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-11|PAPER Thu-M-V-3-11 — Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-1|PAPER Tue-M-V-1-1 — Adaptive Convolutional Neural Network for Text-Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Convolutional Neural Network for Text-Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-3|PAPER Tue-E-V-2-3 — Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-7|PAPER Wed-M-V-3-7 — Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-9|PAPER Fri-M-V-4-9 — LinearSpeech: Parallel Text-to-Speech with Linear Complexity]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LinearSpeech: Parallel Text-to-Speech with Linear Complexity</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-9|PAPER Tue-M-V-2-9 — PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-9|PAPER Wed-A-V-2-9 — BART Based Semantic Correction for Mandarin Automatic Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BART Based Semantic Correction for Mandarin Automatic Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-6|PAPER Tue-A-S&T-1-6 — On-Device Streaming Transformer-Based End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On-Device Streaming Transformer-Based End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-2|PAPER Tue-A-V-3-2 — Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-5|PAPER Tue-M-V-4-5 — Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-9|PAPER Wed-A-V-4-9 — Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-5|PAPER Tue-A-V-4-5 — Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210958.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-8|PAPER Tue-M-V-5-8 — Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-11|PAPER Wed-A-V-3-11 — A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-5|PAPER Thu-M-V-4-5 — Adapting Speaker Embeddings for Speaker Diarisation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Speaker Embeddings for Speaker Diarisation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211820.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-11|PAPER Fri-M-V-7-11 — An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-8|PAPER Wed-E-V-5-8 — DEMUCS-Mobile : On-Device Lightweight Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">DEMUCS-Mobile : On-Device Lightweight Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-8|PAPER Tue-M-V-6-8 — FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-2|PAPER Tue-M-V-2-2 — FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-1|PAPER Wed-M-V-3-1 — N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-6|PAPER Wed-A-V-5-6 — GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210471.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-5|PAPER Thu-A-V-5-5 — Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212151.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-13|PAPER Wed-M-V-4-13 — Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-7|PAPER Wed-M-V-4-7 — Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-5-9|PAPER Tue-A-V-5-9 — Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210149.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-2|PAPER Thu-M-V-4-2 — Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-5|PAPER Thu-M-V-4-5 — Adapting Speaker Embeddings for Speaker Diarisation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adapting Speaker Embeddings for Speaker Diarisation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210812.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-3|PAPER Wed-M-V-4-3 — X-net: A Joint Scale Down and Scale Up Method for Voice Call]]</div>|^<div class="cpauthorindexpersoncardpapertitle">X-net: A Joint Scale Down and Scale Up Method for Voice Call</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-3|PAPER Thu-A-SS-2-3 — WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-3|PAPER Fri-M-V-5-3 — RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-6|PAPER Tue-M-V-1-6 — Mutual Information Enhanced Training for Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Mutual Information Enhanced Training for Speaker Embedding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-5|PAPER Fri-M-V-4-5 — Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-3|PAPER Tue-A-V-6-3 — Adversarial Voice Conversion Against Neural Spoofing Detectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Voice Conversion Against Neural Spoofing Detectors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-7|PAPER Thu-M-V-2-7 — Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210523.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-6|PAPER Thu-M-V-3-6 — Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-6|PAPER Thu-A-SS-2-6 — Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-10|PAPER Fri-A-V-1-10 — The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210283.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-5|PAPER Tue-E-V-6-5 — VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-10|PAPER Thu-M-V-1-10 — Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-9|PAPER Fri-M-V-6-9 — Energy-Friendly Keyword Spotting System Using Add-Based Convolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Energy-Friendly Keyword Spotting System Using Add-Based Convolution</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-8|PAPER Tue-M-V-3-8 — Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-2|PAPER Wed-M-V-4-2 — QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-9|PAPER Tue-M-V-2-9 — PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-8|PAPER Thu-A-V-2-8 — Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-3|PAPER Thu-A-SS-1-3 — Pushing the Limits of Non-Autoregressive Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pushing the Limits of Non-Autoregressive Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-8|PAPER Fri-M-V-3-8 — Residual Energy-Based Models for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Residual Energy-Based Models for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-9|PAPER Fri-M-V-3-9 — Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-1|PAPER Fri-M-V-4-1 — Unsupervised Learning of Disentangled Speech Content and Style Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Learning of Disentangled Speech Content and Style Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-6-2|PAPER Fri-A-V-6-2 — Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-7|PAPER Tue-A-V-1-7 — AST: Audio Spectrogram Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AST: Audio Spectrogram Transformer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-4|PAPER Thu-A-SS-1-4 — Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-9|PAPER Wed-A-V-3-9 — Online Compressive Transformer for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Compressive Transformer for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-8|PAPER Thu-M-V-3-8 — Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-3|PAPER Tue-M-V-1-3 — Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210358.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-4|PAPER Fri-M-SS-1-4 — Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-6|PAPER Tue-E-V-3-6 — An Attribute-Aligned Strategy for Learning Speech Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Attribute-Aligned Strategy for Learning Speech Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-6|PAPER Thu-M-V-4-6 — Scenario-Dependent Speaker Diarization for DIHARD-III Challenge]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scenario-Dependent Speaker Diarization for DIHARD-III Challenge</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-7|PAPER Tue-E-V-1-7 — AntVoice Neural Speaker Embedding System for FFSVC 2020]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AntVoice Neural Speaker Embedding System for FFSVC 2020</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211133.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-6|PAPER Fri-A-V-2-6 — Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-7|PAPER Tue-A-V-1-7 — AST: Audio Spectrogram Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AST: Audio Spectrogram Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-9|PAPER Wed-A-V-2-9 — BART Based Semantic Correction for Mandarin Automatic Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BART Based Semantic Correction for Mandarin Automatic Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-11|PAPER Tue-M-V-3-11 — Self-Supervised Learning Based Phone-Fortified Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Learning Based Phone-Fortified Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-3|PAPER Thu-A-V-5-3 — Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-6|PAPER Fri-M-V-4-6 — A Neural-Network-Based Approach to Identifying Speakers in Novels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neural-Network-Based Approach to Identifying Speakers in Novels</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210474.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-3|PAPER Wed-M-V-3-3 — Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-4-6|PAPER Wed-E-V-4-6 — Targeted and Targetless Neutral Tones in Taiwanese Southern Min]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Targeted and Targetless Neutral Tones in Taiwanese Southern Min</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-6|PAPER Fri-M-V-2-6 — A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-SS-1-2|PAPER Wed-M-SS-1-2 — A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-5|PAPER Fri-A-V-1-5 — Explore wav2vec 2.0 for Mispronunciation Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Explore wav2vec 2.0 for Mispronunciation Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-2|PAPER Tue-M-V-4-2 — Self-Supervised Dialogue Learning for Spoken Conversational Question Answering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Dialogue Learning for Spoken Conversational Question Answering</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-6|PAPER Tue-M-V-4-6 — Semantic Transportation Prototypical Network for Few-Shot Intent Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Transportation Prototypical Network for Few-Shot Intent Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-1-2|PAPER Tue-A-V-1-2 — SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210300.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-2|PAPER Tue-E-V-3-2 — Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-1|PAPER Thu-M-V-7-1 — Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-6|PAPER Fri-M-V-6-6 — Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-9|PAPER Tue-E-V-2-9 — Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-9|PAPER Fri-A-V-2-9 — Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-10|PAPER Tue-E-V-6-10 — Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-4|PAPER Wed-M-V-3-4 — Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-1|PAPER Tue-M-V-2-1 — TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-10|PAPER Tue-E-V-1-10 — Scaling Effect of Self-Supervised Speech Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Scaling Effect of Self-Supervised Speech Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-1|PAPER Tue-M-V-3-1 — Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-9|PAPER Fri-A-V-2-9 — Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-6-9|PAPER Wed-A-V-6-9 — ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-7|PAPER Tue-M-V-4-7 — Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-6|PAPER Wed-M-V-3-6 — Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-4|PAPER Thu-M-V-4-4 — Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-1-1|PAPER Wed-E-V-1-1 — Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212217.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-10|PAPER Thu-A-V-1-10 — Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210347.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-13|PAPER Tue-E-V-3-13 — Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-12|PAPER Wed-M-V-4-12 — A Two-Stage Approach to Speech Bandwidth Extension]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Two-Stage Approach to Speech Bandwidth Extension</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-9|PAPER Wed-A-V-2-9 — BART Based Semantic Correction for Mandarin Automatic Speech Recognition System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">BART Based Semantic Correction for Mandarin Automatic Speech Recognition System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-10|PAPER Wed-M-V-4-10 — Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-12|PAPER Wed-E-V-5-12 — Learning Speech Structure to Improve Time-Frequency Masks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Speech Structure to Improve Time-Frequency Masks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211974.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-3|PAPER Tue-E-V-5-3 — Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-12|PAPER Thu-A-SS-2-12 — Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-7|PAPER Wed-M-V-5-7 — Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-9|PAPER Fri-M-V-1-9 — A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-1|PAPER Fri-M-V-1-1 — Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-4|PAPER Thu-M-V-4-4 — Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210800.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-5|PAPER Wed-E-V-6-5 — Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210914.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-4|PAPER Tue-M-V-2-4 — Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210827.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-2-3|PAPER Wed-E-V-2-3 — Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-4|PAPER Thu-M-V-5-4 — Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-5|PAPER Wed-M-V-5-5 — VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-5|PAPER Thu-M-SS-1-5 — Language Recognition Based on Unsupervised Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition Based on Unsupervised Pretrained Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-7|PAPER Tue-M-V-6-7 — Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211359.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-1-3|PAPER Fri-A-SS-1-3 — F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-5|PAPER Fri-M-V-7-5 — The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-3-10|PAPER Thu-A-V-3-10 — Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210812.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-3|PAPER Wed-M-V-4-3 — X-net: A Joint Scale Down and Scale Up Method for Voice Call]]</div>|^<div class="cpauthorindexpersoncardpapertitle">X-net: A Joint Scale Down and Scale Up Method for Voice Call</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-6|PAPER Tue-M-V-3-6 — Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-7|PAPER Tue-M-V-3-7 — Speech Enhancement with Weakly Labelled Data from AudioSet]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Enhancement with Weakly Labelled Data from AudioSet</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-4|PAPER Tue-M-V-4-4 — Dialogue Situation Recognition for Everyday Conversation Using Multimodal Information]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Dialogue Situation Recognition for Everyday Conversation Using Multimodal Information</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-4-5|PAPER Tue-M-V-4-5 — Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-2-8|PAPER Wed-M-V-2-8 — Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211131.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-6|PAPER Thu-A-SS-1-6 — Toward Streaming ASR with Non-Autoregressive Insertion-Based Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Toward Streaming ASR with Non-Autoregressive Insertion-Based Model</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-9|PAPER Thu-A-SS-1-9 — Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adaptive Text to Speech for Spontaneous Style</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-O-1-1|PAPER Thu-A-O-1-1 — Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-S&T-1-7|PAPER Tue-A-S&T-1-7 — Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-8|PAPER Thu-A-SS-2-8 — Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-2|PAPER Fri-A-S&T-1-2 — Save Your Voice: Voice Banking and TTS for Anyone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Save Your Voice: Voice Banking and TTS for Anyone</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-S&T-1-2|PAPER Wed-A-S&T-1-2 — Live TV Subtitling Through Respeaking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Live TV Subtitling Through Respeaking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211613.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-8|PAPER Tue-A-V-3-8 — Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-9|PAPER Wed-E-V-3-9 — Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210165.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-4|PAPER Wed-A-V-3-4 — A Deliberation-Based Joint Acoustic and Text Decoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Deliberation-Based Joint Acoustic and Text Decoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-7|PAPER Wed-M-V-3-7 — Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-9|PAPER Fri-M-V-4-9 — LinearSpeech: Parallel Text-to-Speech with Linear Complexity]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LinearSpeech: Parallel Text-to-Speech with Linear Complexity</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-4|PAPER Fri-M-V-1-4 — Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-4-2|PAPER Fri-A-V-4-2 — Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-O-3-5|PAPER Wed-E-O-3-5 — Audio Retrieval with Natural Language Queries]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio Retrieval with Natural Language Queries</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-5|PAPER Thu-M-V-7-5 — Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-9|PAPER Tue-E-V-2-9 — Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-8|PAPER Wed-A-V-3-8 — SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-4-11|PAPER Wed-A-V-4-11 — Far-Field Speaker Localization and Adaptive GLMB Tracking]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Far-Field Speaker Localization and Adaptive GLMB Tracking</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211640.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-10|PAPER Fri-M-V-2-10 — Speakers Coarticulate Less When Facing Real and Imagined Communicative Difficulties: An Analysis of Read and Spontaneous Speech from the LUCID Corpus]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speakers Coarticulate Less When Facing Real and Imagined Communicative Difficulties: An Analysis of Read and Spontaneous Speech from the LUCID Corpus</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-3|PAPER Tue-A-V-6-3 — Adversarial Voice Conversion Against Neural Spoofing Detectors]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarial Voice Conversion Against Neural Spoofing Detectors</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-6|PAPER Fri-M-V-4-6 — A Neural-Network-Based Approach to Identifying Speakers in Novels]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Neural-Network-Based Approach to Identifying Speakers in Novels</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211092.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-7|PAPER Fri-M-V-4-7 — UnitNet-Based Hybrid Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">UnitNet-Based Hybrid Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-10|PAPER Tue-E-V-6-10 — Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-4|PAPER Wed-M-V-3-4 — Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210847.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-3|PAPER Fri-M-V-7-3 — Siamese Network with wav2vec Feature for Spoofing Speech Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Siamese Network with wav2vec Feature for Spoofing Speech Detection</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-8|PAPER Tue-M-V-1-8 — Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-6|PAPER Thu-M-SS-1-6 — Additive Phoneme-Aware Margin Softmax Loss for Language Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Additive Phoneme-Aware Margin Softmax Loss for Language Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-2-9|PAPER Fri-A-V-2-9 — Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-SS-1-7|PAPER Tue-A-SS-1-7 — Continual Learning for Fake Audio Detection]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continual Learning for Fake Audio Detection</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-1|PAPER Fri-M-V-3-1 — FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-1|PAPER Fri-M-V-3-1 — FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization]]</div>|^<div class="cpauthorindexpersoncardpapertitle">FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-5-10|PAPER Wed-A-V-5-10 — Basis-MelGAN: Efficient Neural Vocoder Based on Audio Decomposition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Basis-MelGAN: Efficient Neural Vocoder Based on Audio Decomposition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-O-2-4|PAPER Wed-A-O-2-4 — Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-8|PAPER Wed-A-SS-1-8 — The SJTU System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The SJTU System for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-1-3|PAPER Thu-M-O-1-3 — Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-10|PAPER Tue-M-V-2-10 — Speed up Training with Variable Length Inputs by Efficient Batching Strategies]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speed up Training with Variable Length Inputs by Efficient Batching Strategies</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-1|PAPER Thu-A-V-5-1 — Federated Learning with Dynamic Transformer for Text to Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Federated Learning with Dynamic Transformer for Text to Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-5|PAPER Tue-M-V-5-5 — IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-3-5|PAPER Fri-A-V-3-5 — Extremely Low Footprint End-to-End ASR System for Smart Device]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Extremely Low Footprint End-to-End ASR System for Smart Device</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-3-7|PAPER Wed-M-V-3-7 — Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-5|PAPER Thu-M-V-3-5 — End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-9|PAPER Fri-M-V-4-9 — LinearSpeech: Parallel Text-to-Speech with Linear Complexity]]</div>|^<div class="cpauthorindexpersoncardpapertitle">LinearSpeech: Parallel Text-to-Speech with Linear Complexity</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210387.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-6|PAPER Wed-A-V-3-6 — Deformable TDNN with Adaptive Receptive Fields for Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deformable TDNN with Adaptive Receptive Fields for Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-2|PAPER Tue-E-V-5-2 — Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-SS-1-8|PAPER Wed-A-SS-1-8 — The SJTU System for Short-Duration Speaker Verification Challenge 2021]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The SJTU System for Short-Duration Speaker Verification Challenge 2021</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-1-7|PAPER Tue-E-V-1-7 — AntVoice Neural Speaker Embedding System for FFSVC 2020]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AntVoice Neural Speaker Embedding System for FFSVC 2020</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211707.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-1-7|PAPER Tue-M-V-1-7 — Y-Vector: Multiscale Waveform Encoder for Speaker Embedding]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Y-Vector: Multiscale Waveform Encoder for Speaker Embedding</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211820.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-11|PAPER Fri-M-V-7-11 — An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-9|PAPER Tue-A-V-6-9 — Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-8|PAPER Fri-M-V-7-8 — Voting for the Right Answer: Adversarial Defense for Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Voting for the Right Answer: Adversarial Defense for Speaker Verification</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210947.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-5-7|PAPER Fri-A-V-5-7 — Towards Multi-Scale Style Control for Expressive Speech Synthesis]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Towards Multi-Scale Style Control for Expressive Speech Synthesis</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|^<div class="cpauthorindexpersoncardpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-4-4|PAPER Fri-M-V-4-4 — Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-7-6|PAPER Fri-M-V-7-6 — Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-5-3|PAPER Thu-A-V-5-3 — Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-1-5|PAPER Wed-M-V-1-5 — Exploring wav2vec 2.0 on Speaker Verification and Language Identification]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring wav2vec 2.0 on Speaker Verification and Language Identification</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-8|PAPER Thu-A-V-2-8 — Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210687.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-6-11|PAPER Tue-E-V-6-11 — Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-3-11|PAPER Tue-M-V-3-11 — Self-Supervised Learning Based Phone-Fortified Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Self-Supervised Learning Based Phone-Fortified Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-3|PAPER Thu-A-V-2-3 — On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-5-6|PAPER Tue-E-V-5-6 — The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212128.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-11|PAPER Thu-M-V-2-11 — Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-SS-1-5|PAPER Thu-M-SS-1-5 — Language Recognition Based on Unsupervised Pretrained Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Language Recognition Based on Unsupervised Pretrained Models</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-2|PAPER Thu-A-V-1-2 — Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-4-4|PAPER Wed-M-V-4-4 — WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|^<div class="cpauthorindexpersoncardpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-3|PAPER Wed-A-V-2-3 — Incorporating External POS Tagger for Punctuation Restoration]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Incorporating External POS Tagger for Punctuation Restoration</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-5-7|PAPER Wed-M-V-5-7 — Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-1-9|PAPER Fri-M-V-1-9 — A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives]]</div>|^<div class="cpauthorindexpersoncardpapertitle">A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-3|PAPER Thu-A-V-4-3 — Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|^<div class="cpauthorindexpersoncardpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|^<div class="cpauthorindexpersoncardpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-3-11|PAPER Thu-M-V-3-11 — Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|^<div class="cpauthorindexpersoncardpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-3-5|PAPER Tue-A-V-3-5 — Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-6-5|PAPER Tue-M-V-6-5 — Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|^<div class="cpauthorindexpersoncardpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-4-3|PAPER Thu-A-V-4-3 — Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-7-2|PAPER Thu-M-V-7-2 — Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-2-2|PAPER Fri-M-V-2-2 — Segmental Alignment of English Syllables with Singleton and Cluster Onsets]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Segmental Alignment of English Syllables with Singleton and Cluster Onsets</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-V-1-2|PAPER Thu-A-V-1-2 — Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-2-9|PAPER Tue-E-V-2-9 — Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-6-5|PAPER Tue-A-V-6-5 — TVQVC: Transformer Based Vector Quantized Variational Autoencoder with CTC Loss for Voice Conversion]]</div>|^<div class="cpauthorindexpersoncardpapertitle">TVQVC: Transformer Based Vector Quantized Variational Autoencoder with CTC Loss for Voice Conversion</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|^<div class="cpauthorindexpersoncardpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-SS-1-3|PAPER Wed-E-SS-1-3 — Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210432.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-V-2-4|PAPER Thu-M-V-2-4 — Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-M-V-6-9|PAPER Wed-M-V-6-9 — Reducing Exposure Bias in Training Recurrent Neural Network Transducers]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Reducing Exposure Bias in Training Recurrent Neural Network Transducers</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-2-11|PAPER Wed-A-V-2-11 — Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-A-V-3-5|PAPER Wed-A-V-3-5 — On the Limit of English Conversational Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">On the Limit of English Conversational Speech Recognition</div> |
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|^<div class="cpauthorindexpersoncardpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-A-SS-2-12|PAPER Thu-A-SS-2-12 — Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Wed-E-V-3-9|PAPER Wed-E-V-3-9 — Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-M-V-5-3|PAPER Fri-M-V-5-3 — RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform]]</div>|^<div class="cpauthorindexpersoncardpapertitle">RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-M-V-5-3|PAPER Tue-M-V-5-3 — Speech Acoustic Modelling Using Raw Source and Filter Components]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Speech Acoustic Modelling Using Raw Source and Filter Components</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Fri-A-S&T-1-7|PAPER Fri-A-S&T-1-7 — Duplex Conversation in Outbound Agent System]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Duplex Conversation in Outbound Agent System</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cptablecelltopbottomspace2|k
|cpaidxauthortable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in author view}}</a> |^<div class="cpauthorindexpersoncardpapercode">[[Thu-M-O-3-2|PAPER Thu-M-O-3-2 — Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition]]</div>|^<div class="cpauthorindexpersoncardpapertitle">Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cpconfinfotable|k
|^<a href="./IS2021/HTML/ABSBOOK.PDF#page1" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in confinfo view}}</a>|^Program and Abstract Book |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}}
</p></div>
<div class="cpcopyrightpage">{{$:/causal/publication/Copyright Statement}}</div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pablo Gimeno|AUTHOR Pablo Gimeno]], [[Alfonso Ortega|AUTHOR Alfonso Ortega]], [[Antonio Miguel|AUTHOR Antonio Miguel]], [[Eduardo Lleida|AUTHOR Eduardo Lleida]]
</p><p class="cpabstractcardaffiliationlist">Universidad de Zaragoza, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4359–4363
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe the ViVoLab speech activity detection (SAD) system submitted to the Fearless Steps Challenge Phase III. This series of challenges have proposed a number of speech processing task dealing with audio from Apollo space missions over the last few years. The focus in this edition is set on the generalisation capabilities of the systems, with new evaluation data from different channels. Our proposed submission is based on the use of the unsupervised representation learning paradigm, seeking to obtain a new and more discriminative audio representation than traditional perceptual features such as log Mel-filterbank energies. These new features are used to train different variations of a convolutional recurrent neural network (CRNN). Experimental results show that features learned via unsupervised learning provide a much more robust representation, significantly reducing the mismatch observed between development and evaluation partition results. Obtained results largely outperform the organisation baseline, achieving a DCF metric of 2.98% on the evaluation set and ranking third among all the participant teams.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tyler Vuong|AUTHOR Tyler Vuong]], [[Yangyang Xia|AUTHOR Yangyang Xia]], [[Richard M. Stern|AUTHOR Richard M. Stern]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4364–4368
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe a deep-learning-based system developed for the Fearless Steps Phase-03 Speech Activity Detection (SAD) challenge. The system includes both learnable spectro-temporal receptive fields (STRFs) and unconstrained 2-dimensional convolutional kernels in the first layer. Experiments show that the inclusion of learnable STRFs in the first layer increases the system’s robustness to additive noise. Additionally, we found that utilizing SpecAugment during training improves generalization on unseen data. By incorporating these enhancements and others our system achieved the best score in the official SAD challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Seyyed Saeed Sarfjoo|AUTHOR Seyyed Saeed Sarfjoo]], [[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Petr Motlicek|AUTHOR Petr Motlicek]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4369–4373
</span></p></div>
<div class="cpabstractcardabstract"><p>To better model the contextual information and increase the generalization ability of the Speech Activity Detection (SAD) system, this paper leverages a multilingual Automatic Speech Recognition (ASR) system to perform SAD. Sequence-discriminative training of Acoustic Model (AM) using Lattice-Free Maximum Mutual Information (LF-MMI) loss function, effectively extracts the contextual information of the input acoustic frame. Multilingual AM training causes the robustness to noise and language variabilities. The index of maximum output posterior is considered as a frame-level speech/non-speech decision function. Majority voting and logistic regression are applied to fuse the language-dependent decisions. The multilingual ASR is trained on 18 languages of BABEL datasets and the built SAD is evaluated on 3 different languages. On out-of-domain datasets, the proposed SAD model shows significantly better performance with respect to baseline models. On the Ester2 dataset, without using any in-domain data, this model outperforms the WebRTC, phoneme recognizer based VAD (Phn_Rec), and Pyannote baselines (respectively by 7.1, 1.7, and 2.7% absolute) in Detection Error Rate (DetER) metrics. Similarly, on the LiveATC dataset, this model outperforms the WebRTC, Phn_Rec, and Pyannote baselines (respectively by 6.4, 10.0, and 3.7% absolutely) in DetER metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jarrod Luckenbaugh|AUTHOR Jarrod Luckenbaugh]]^^1^^
, [[Samuel Abplanalp|AUTHOR Samuel Abplanalp]]^^2^^
, [[Rachel Gonzalez|AUTHOR Rachel Gonzalez]]^^3^^
, [[Daniel Fulford|AUTHOR Daniel Fulford]]^^2^^
, [[David Gard|AUTHOR David Gard]]^^3^^
, [[Carlos Busso|AUTHOR Carlos Busso]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Dallas, USA; ^^2^^Boston University, USA; ^^3^^San Francisco State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4374–4378
</span></p></div>
<div class="cpabstractcardabstract"><p>Transfer learning is a promising approach to increase performance for many speech-based systems, including //voice activity detection// (VAD). Domain adaptation, a subfield of transfer learning, often improves model conditioning in the presence of a mismatch between train-test conditions. This study proposes a formulation for VAD based on the teacher-student training, where the teacher model, trained with clean data, transfers knowledge to the student model trained with a noisy, paired version of the corpus resembling the test conditions. The models leverage temporal information using //recurrent neural networks// (RNN), implemented with either //bidirectional long short term memory// (BLSTM) or the modern, continuous-state Hopfield network. We provide evidence that in-domain noise emulation for domain adaptation is viable under unconstrained audio channel conditions for VAD “in the wild.” Our application domain is in healthcare, where multimodal sensors, including microphones, from portable devices are used to automatically predict social isolation in patients affected by schizophrenia. We empirically show positive results for domain emulation when the training conditions are similar to the target domain. We also show that the Hopfield network outperforms our best BLSTM for VAD on real-world benchmarks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Omid Ghahabi|AUTHOR Omid Ghahabi]], [[Volker Fischer|AUTHOR Volker Fischer]]
</p><p class="cpabstractcardaffiliationlist">EML Speech Technology, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4379–4382
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech Activity Detection (SAD), locating speech segments within an audio recording, is a main part of most speech technology applications. Robust SAD is usually more difficult in noisy conditions with varying signal-to-noise ratios (SNR). The Fearless Steps challenge has recently provided such data from the NASA Apollo-11 mission for different speech processing tasks including SAD. Most audio recordings are degraded by different kinds and levels of noise varying within and between channels. This paper describes the EML online algorithm for the most recent phase of this challenge. The proposed algorithm can be trained both in a supervised and unsupervised manner and assigns speech and non-speech labels at runtime approximately every 0.1 sec. The experimental results show a competitive accuracy on both development and evaluation datasets with a real-time factor of about 0.002 using a single CPU machine.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kuba Łopatka|AUTHOR Kuba Łopatka]], [[Katarzyna Kaszuba-Miotke|AUTHOR Katarzyna Kaszuba-Miotke]], [[Piotr Klinke|AUTHOR Piotr Klinke]], [[Paweł Trella|AUTHOR Paweł Trella]]
</p><p class="cpabstractcardaffiliationlist">Intel, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4383–4387
</span></p></div>
<div class="cpabstractcardabstract"><p>Keyword spotting (KWS) is required to operate in device playback conditions in which the device itself plays interfering signals. We propose a new method to augment the training set and adapt the acoustic model to the playback environment. It is based on acoustic simulation which models the coupling between the device’s loudspeakers and microphones. The employed model involves frequency response of the device, as well as room impulse response and nonlinear distortions introduced in the playback path. Finally, we pass the simulated signals through Acoustic Echo Cancellation (AEC) to model the artifacts introduced by AEC algorithm. The proposed method reduces False Rejection Rate in device playback noise by 25–60% for a Time-Delay Neural Network-based KWS engine. It is shown that the introduction of device characteristics and nonlinear filtration is necessary to achieve improvement in playback conditions. The augmentation scheme is highly independent of the architecture of the KWS system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bolaji Yusuf|AUTHOR Bolaji Yusuf]], [[Alican Gok|AUTHOR Alican Gok]], [[Batuhan Gundogdu|AUTHOR Batuhan Gundogdu]], [[Murat Saraclar|AUTHOR Murat Saraclar]]
</p><p class="cpabstractcardaffiliationlist">Boğaziçi Üniversitesi, Turkey</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4388–4392
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, neural approaches to spoken content retrieval have become popular. However, they tend to be restricted in their vocabulary or in their ability to deal with imbalanced test settings. These restrictions limit their applicability in keyword search, where the set of queries is not known beforehand, and where the system should return not just whether an utterance contains a query but the exact location of any such occurrences. In this work, we propose a model directly optimized for keyword search. The model takes a query and an utterance as input and returns a sequence of probabilities for each frame of the utterance of the query having occurred in that frame. Experiments show that the proposed model not only outperforms similar end-to-end models on a task where the ratio of positive and negative trials is artificially balanced, but it is also able to deal with the far more challenging task of keyword search with its inherent imbalance. Furthermore, using our system to rescore the outputs an LVCSR-based keyword search system leads to significant improvements on the latter.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Danny Merkx|AUTHOR Danny Merkx]], [[Stefan L. Frank|AUTHOR Stefan L. Frank]], [[Mirjam Ernestus|AUTHOR Mirjam Ernestus]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4393–4397
</span></p></div>
<div class="cpabstractcardabstract"><p>This study addresses the question whether visually grounded speech recognition (VGS) models learn to capture sentence semantics without access to any prior linguistic knowledge. We produce synthetic and natural spoken versions of a well known semantic textual similarity database and show that our VGS model produces embeddings that correlate well with human semantic similarity judgements. Our results show that a model trained on a small image-caption database outperforms two models trained on much larger databases, indicating that database size is not all that matters. We also investigate the importance of having multiple captions per image and find that this is indeed helpful even if the total number of images is lower, suggesting that paraphrasing is a valuable learning signal. While the general trend in the field is to create ever larger datasets to train models on, our findings indicate other characteristics of the database can just as important.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Švec|AUTHOR Jan Švec]], [[Luboš Šmídl|AUTHOR Luboš Šmídl]], [[Josef V. Psutka|AUTHOR Josef V. Psutka]], [[Aleš Pražák|AUTHOR Aleš Pražák]]
</p><p class="cpabstractcardaffiliationlist">University of West Bohemia, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4398–4402
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper describes a novel approach to Spoken Term Detection (STD) in large spoken archives using deep LSTM networks. The work is based on the previous approach of using Siamese neural networks for STD and naturally extends it to directly localize a spoken term and estimate its relevance score. The phoneme confusion network generated by a phoneme recognizer is processed by the deep LSTM network which projects each segment of the confusion network into an embedding space. The searched term is projected into the same embedding space using another deep LSTM network. The relevance score is then computed using a simple dot-product in the embedding space and calibrated using a sigmoid function to predict the probability of occurrence. The location of the searched term is then estimated from the sequence of output probabilities. The deep LSTM networks are trained in a self-supervised manner from paired recognition hypotheses on word and phoneme levels. The method is experimentally evaluated on MALACH data in English and Czech languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[François Buet|AUTHOR François Buet]], [[François Yvon|AUTHOR François Yvon]]
</p><p class="cpabstractcardaffiliationlist">LISN (UMR 9015), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4403–4407
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper studies the generation of intralingual closed captions from automatic speech transcripts, with the aim to assess techniques for multi-genre captioning. Captions and subtitles greatly vary in form and content depending on the programs genres and subtitling styles, resulting for instance in significantly different compression rates and lexical content. Borrowing ideas from the multi-domain machine translation literature, we implement and contrast several adaptation methods on a diverse set of programs broadcast on the French public TV. Our results show that such multi-domain adaption techniques are effective and help to improve our automatic subtitling system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hideki Kawahara|AUTHOR Hideki Kawahara]]^^1^^
, [[Kohei Yatabe|AUTHOR Kohei Yatabe]]^^2^^
, [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]]^^3^^
, [[Mitsunori Mizumachi|AUTHOR Mitsunori Mizumachi]]^^4^^
, [[Masanori Morise|AUTHOR Masanori Morise]]^^5^^
, [[Hideki Banno|AUTHOR Hideki Banno]]^^6^^
, [[Toshio Irino|AUTHOR Toshio Irino]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^Waseda University, Japan; ^^3^^HSUH, Japan; ^^4^^Kyutech, Japan; ^^5^^Meiji University, Japan; ^^6^^Meijo University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4853–4854
</span></p></div>
<div class="cpabstractcardabstract"><p>Objective measurements of speech data acquisition and presentation processes are crucial for assuring reproducibility and reusability of experimental results and acquired materials. We introduce setting and measurement examples of those conditions using an interactive and real-time acoustic measurement tool based on an extended time-stretched pulse. We also introduce supporting tools.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Tihelka|AUTHOR Daniel Tihelka]], [[Markéta Řezáčková|AUTHOR Markéta Řezáčková]], [[Martin Grůber|AUTHOR Martin Grůber]], [[Zdeněk Hanzlíček|AUTHOR Zdeněk Hanzlíček]], [[Jakub Vít|AUTHOR Jakub Vít]], [[Jindřich Matoušek|AUTHOR Jindřich Matoušek]]
</p><p class="cpabstractcardaffiliationlist">University of West Bohemia, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4855–4856
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper describes the process of automatic building of a personalized TTS system. The system was primarily developed for people facing the threat of voice loss; however, it can be used by anyone who wants to save his/her voice for any reason. Regarding the target group of users, the whole system is designed to be as simple to use as possible while still being fully autonomous.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yang Zhang|AUTHOR Yang Zhang]], [[Evelina Bakhturina|AUTHOR Evelina Bakhturina]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]]
</p><p class="cpabstractcardaffiliationlist">NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4857–4859
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce the NeMo Text Processing (NTP) toolkit — an open-source Python library for text normalization (TN) and inverse text normalization (ITN) based on weighted finite-state transducers (WFSTs). The English grammars provided within NTP can be seamlessly deployed to the C++ Sparrowhawk framework for production.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Corentin Hembise|AUTHOR Corentin Hembise]], [[Lucile Gelin|AUTHOR Lucile Gelin]], [[Morgane Daniel|AUTHOR Morgane Daniel]]
</p><p class="cpabstractcardaffiliationlist">Lalilo, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4860–4861
</span></p></div>
<div class="cpabstractcardabstract"><p>Lalilo is a reading assistant intended to help kindergarten to second grade students to master their reading skills. Students progress at their own pace thanks to an adaptive learning system that differentiates instructions. Teachers can access data on their students’ progression. Among other exercises, a read-aloud exercise is provided for students to practice their reading. This exercise uses a reading mistake detection system based on speech recognition to offer automatic feedback on the child’s reading. Since speech recognition on children learning to read is highly challenging, we overcome potential inaccurate thus damageable feedback with an uncertainty estimation leading to a neutral feedback.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Manh Hung Nguyen|AUTHOR Manh Hung Nguyen]]^^1^^
, [[Vu Hoang|AUTHOR Vu Hoang]]^^1^^
, [[Tu Anh Nguyen|AUTHOR Tu Anh Nguyen]]^^1^^
, [[Trung H. Bui|AUTHOR Trung H. Bui]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^VinBrain, Vietnam; ^^2^^Independent Researcher, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4862–4863
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a system that allows radiologists to edit the radiology report through their voices. This is a function in our bigger system at VinBrain LLC that uses AI algorithms to assist radiologists with chest x-ray diagnosis, the system can suggest the abnormalities, then bases on the radiologist’s confirmations or conclusions to automatically generate the report using predefined templates. We then allow the radiologist to freely edit the report using voice. The system combines two components, the first is the Speech Recognition System (SRS), and the second is the Natural Language Understanding System (NLUS) that executes the user’s command. The user can delete, modify or add an arbitrary whole sentence. In addition, we successfully developed an SRS for such a non-mainstream language as Vietnamese and adapted it for the radiology domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ke Shi|AUTHOR Ke Shi]], [[Kye Min Tan|AUTHOR Kye Min Tan]], [[Huayun Zhang|AUTHOR Huayun Zhang]], [[Siti Umairah Md. Salleh|AUTHOR Siti Umairah Md. Salleh]], [[Shikang Ni|AUTHOR Shikang Ni]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]]
</p><p class="cpabstractcardaffiliationlist">A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4864–4865
</span></p></div>
<div class="cpabstractcardabstract"><p>We present //WittyKiddy//, a spoken language learning system for children, developed at the Institute for Infocomm Research (I2R), A*STAR, Singapore. Our system automatically evaluates a student’s oral proficiency by scoring pronunciation, fluency and intonation of a spoken utterance. We demonstrate the technical capabilities of the system via reading aloud exercises and oral cloze tests in English and Malay. Both quantitative and qualitative feedback are given to the student. Our work helps support multilingual education for children.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chunxiang Jin|AUTHOR Chunxiang Jin]], [[Minghui Yang|AUTHOR Minghui Yang]], [[Zujie Wen|AUTHOR Zujie Wen]]
</p><p class="cpabstractcardaffiliationlist">Ant, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4866–4867
</span></p></div>
<div class="cpabstractcardabstract"><p>Intelligent outbound is a popular way to contact customers. The traditional outbound agents communicate with users in a simplex way. The user and the agent cannot speak at the same time, and the user cannot actively interrupt the conversation while the agent is playing audio generated by TTS. The traditional solution is based on the output of the VAD module, once the user voice is detected, the agent will immediately stop talking. However, the user sometimes expresses the short answer at will, not to interrupt the agent, and it will cause the agent to be frequently interrupted. In addition, when users say named entity nouns(numbers, locations, company names, etc), their speech speed is slow and the pause time between words is longer, and they may be interrupted by the agent unreasonably. We propose a method to identify user’s interruption requests and discontinuous expressions by analyzing the semantic information of the user’s utterance. As a result, fluency of the dialogue is improved.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sathvik Udupa|AUTHOR Sathvik Udupa]], [[Anwesha Roy|AUTHOR Anwesha Roy]], [[Abhayjeet Singh|AUTHOR Abhayjeet Singh]], [[Aravind Illa|AUTHOR Aravind Illa]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4868–4869
</span></p></div>
<div class="cpabstractcardabstract"><p>We release a web interface to visualise estimated articulatory movements in speech production from different modalities — acoustics and text. We allow the use of various trained models for this purpose. This tool also serves the purpose of comparing the predicted articulatory movements from different modalities and visually understanding the effect of noise in speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ross Cutler|AUTHOR Ross Cutler]], [[Ando Saabas|AUTHOR Ando Saabas]], [[Tanel Parnamaa|AUTHOR Tanel Parnamaa]], [[Markus Loide|AUTHOR Markus Loide]], [[Sten Sootla|AUTHOR Sten Sootla]], [[Marju Purin|AUTHOR Marju Purin]], [[Hannes Gamper|AUTHOR Hannes Gamper]], [[Sebastian Braun|AUTHOR Sebastian Braun]], [[Karsten Sorensen|AUTHOR Karsten Sorensen]], [[Robert Aichner|AUTHOR Robert Aichner]], [[Sriram Srinivasan|AUTHOR Sriram Srinivasan]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4748–4752
</span></p></div>
<div class="cpabstractcardabstract"><p>The INTERSPEECH 2021 Acoustic Echo Cancellation Challenge is intended to stimulate research in the area of acoustic echo cancellation (AEC), which is an important part of speech enhancement and still a top issue in audio communication. Many recent AEC studies report good performance on synthetic datasets where the training and testing data may come from the same underlying distribution. However, AEC performance often degrades significantly on real recordings. Also, most of the conventional objective metrics such as echo return loss enhancement and perceptual evaluation of speech quality do not correlate well with subjective speech quality tests in the presence of background noise and reverberation found in realistic environments. In this challenge, we open source two large datasets to train AEC models under both single talk and double talk scenarios. These datasets consist of recordings from more than 5,000 real audio devices and human speakers in real environments, as well as a synthetic dataset. We also open source an online subjective test framework and provide an online objective metric service for researchers to quickly test their results. The winners of this challenge are selected based on the average Mean Opinion Score achieved across all different single talk and double talk scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lukas Pfeifenberger|AUTHOR Lukas Pfeifenberger]]^^1^^
, [[Matthias Zoehrer|AUTHOR Matthias Zoehrer]]^^1^^
, [[Franz Pernkopf|AUTHOR Franz Pernkopf]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Evolve, Austria; ^^2^^Technische Universität Graz, Austria</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4753–4757
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes the Cross-Domain Echo-Controller (CDEC), submitted to the Interspeech 2021 AEC-Challenge. The algorithm consists of three building blocks: (i) a Time-Delay Compensation (TDC) module, (ii) a frequency-domain block-based Acoustic Echo Canceler (AEC), and (iii) a Time-Domain Neural-Network (TD-NN) used as a post-processor. Our system achieves an overall MOS score of 3.80, while only using 2.1 million parameters at a system latency of 32ms.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shimin Zhang|AUTHOR Shimin Zhang]], [[Yuxiang Kong|AUTHOR Yuxiang Kong]], [[Shubo Lv|AUTHOR Shubo Lv]], [[Yanxin Hu|AUTHOR Yanxin Hu]], [[Lei Xie|AUTHOR Lei Xie]]
</p><p class="cpabstractcardaffiliationlist">Northwestern Polytechnical University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4758–4762
</span></p></div>
<div class="cpabstractcardabstract"><p>With the increasing demand for audio communication and online conference, ensuring the robustness of Acoustic Echo Cancellation (AEC) under the complicated acoustic scenario including noise, reverberation and nonlinear distortion has become a top issue. Although there have been some traditional methods that consider nonlinear distortion, they are still inefficient for echo suppression and the performance will be attenuated when noise is present. In this paper, we present a real-time AEC approach using complex neural network to better modeling the important phase information and frequency-time-LSTMs (F-T-LSTM), which scan both frequency and time axis, for better temporal modeling. Moreover, we utilize modified SI-SNR as cost function to make the model to have better echo cancellation and noise suppression (NS) performance. With only 1.4M parameters, the proposed approach outperforms the AEC-challenge baseline by 0.27 in terms of Mean Opinion Score (MOS).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ernst Seidel|AUTHOR Ernst Seidel]], [[Jan Franzen|AUTHOR Jan Franzen]], [[Maximilian Strake|AUTHOR Maximilian Strake]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Braunschweig, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4763–4767
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, deep neural networks (DNNs) were studied as an alternative to traditional acoustic echo cancellation (AEC) algorithms. The proposed models achieved remarkable performance for the separate tasks of AEC and residual echo suppression (RES). A promising network topology is a fully convolutional recurrent network (FCRN) structure, which has already proven its performance on both noise suppression and AEC tasks, individually. However, the combination of AEC, postfiltering, and noise suppression to a single network typically leads to a noticeable decline in the quality of the near-end speech component due to the lack of a separate loss for echo estimation. In this paper, we propose a two-stage model (Y²-Net) which consists of two FCRNs, each with two inputs and one output (Y-Net). The first stage (AEC) yields an echo estimate, which — as a novelty for a DNN AEC model — is further used by the second stage to perform RES and noise suppression. While the subjective listening test of the Interspeech 2021 AEC Challenge mostly yielded results close to the baseline, the proposed method scored an average improvement of 0.46 points over the baseline on the blind testset in double-talk on the instrumental metric DECMOS, provided by the challenge organizers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Renhua Peng|AUTHOR Renhua Peng]], [[Linjuan Cheng|AUTHOR Linjuan Cheng]], [[Chengshi Zheng|AUTHOR Chengshi Zheng]], [[Xiaodong Li|AUTHOR Xiaodong Li]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4768–4772
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes a two-stage acoustic echo cancellation (AEC) and suppression framework for the INTERSPEECH2021 AEC Challenge. In the first stage, four parallel partitioned block frequency domain adaptive filters are used to cancel the linear echo components, where the far-end signal is delayed 0ms, 320ms, 640ms and 960ms for these four adaptive filters, respectively, thus a maximum 1280 ms time delay can be well handled in the blind test dataset. The error signal with minimum energy and its corresponding reference signal are chosen as the input for the second stage, where a gate complex convolutional recurrent neural network (GCCRN) is trained to further suppress the residual echo, late reverberation and environmental noise simultaneously. To improve the performance of GCCRN, we compress both the magnitude of the error signal and that of the far-end reference signal, and then the two compressed magnitudes are combined with the phase of the error signal to regenerate the complex spectra as the input features of GCCRN. Numerous experimental results show that the proposed framework is robust to the blind test dataset, and achieves a promising result with the P.808 evaluation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amir Ivry|AUTHOR Amir Ivry]], [[Israel Cohen|AUTHOR Israel Cohen]], [[Baruch Berdugo|AUTHOR Baruch Berdugo]]
</p><p class="cpabstractcardaffiliationlist">Technion, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4773–4777
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a nonlinear acoustic echo cancellation system, which aims to model the echo path from the far-end signal to the near-end microphone in two parts. Inspired by the physical behavior of modern hands-free devices, we first introduce a novel neural network architecture that is specifically designed to model the nonlinear distortions these devices induce between receiving and playing the far-end signal. To account for variations between devices, we construct this network with trainable memory length and nonlinear activation functions that are not parameterized in advance, but are rather optimized during the training stage using the training data. Second, the network is succeeded by a standard adaptive linear filter that constantly tracks the echo path between the loudspeaker output and the microphone. During training, the network and filter are jointly optimized to learn the network parameters. This system requires 17 thousand parameters that consume 500 Million floating-point operations per second and 40 Kilo-bytes of memory. It also satisfies hands-free communication timing requirements on a standard neural processor, which renders it adequate for embedding on hands-free communication devices. Using 280 hours of real and synthetic data, experiments show advantageous performance compared to competing methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jordan R. Green|AUTHOR Jordan R. Green]]^^1^^
, [[Robert L. MacDonald|AUTHOR Robert L. MacDonald]]^^2^^
, [[Pan-Pan Jiang|AUTHOR Pan-Pan Jiang]]^^2^^
, [[Julie Cattiau|AUTHOR Julie Cattiau]]^^2^^
, [[Rus Heywood|AUTHOR Rus Heywood]]^^2^^
, [[Richard Cave|AUTHOR Richard Cave]]^^3^^
, [[Katie Seaver|AUTHOR Katie Seaver]]^^4^^
, [[Marilyn A. Ladewig|AUTHOR Marilyn A. Ladewig]]^^5^^
, [[Jimmy Tobin|AUTHOR Jimmy Tobin]]^^2^^
, [[Michael P. Brenner|AUTHOR Michael P. Brenner]]^^2^^
, [[Philip C. Nelson|AUTHOR Philip C. Nelson]]^^2^^
, [[Katrin Tomanek|AUTHOR Katrin Tomanek]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MGH Institute of Health Professions, USA; ^^2^^Google, USA; ^^3^^MND Association, UK; ^^4^^MGH Institute of Health Professions, USA; ^^5^^Cerebral Palsy Associations of New York State, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4778–4782
</span></p></div>
<div class="cpabstractcardabstract"><p>This study evaluated the accuracy of personalized automatic speech recognition (ASR) for recognizing disordered speech from a large cohort of individuals with a wide range of underlying etiologies using an open vocabulary. The performance of these models was benchmarked relative to that of expert human transcribers and two different speaker-independent ASR models trained on typical speech. 432 individuals with self-reported disordered speech recorded at least 300 short phrases using a web-based application. Word error rates (WERs) were estimated for three different ASR models and for human transcribers. Metadata were collected to evaluate the potential impact of participants, atypical speech characteristics, and technical factors on recognition accuracy. Personalized models outperformed human transcribers with median and max recognition accuracy gains of 9% and 80%, respectively. The accuracies of personalized models were high (median WER: 4.6%) and better than those of speaker-independent models (median WER: 31%). The most significant improvements were for the most severely affected speakers. Low signal-to-noise ratio and fewer training utterances were associated with poor word recognition, even for speakers with mild speech impairments. Our results demonstrate the efficacy of personalized ASR models in recognizing a wide range of speech impairments and severities and using an open vocabulary.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shanqing Cai|AUTHOR Shanqing Cai]], [[Lisie Lillianfeld|AUTHOR Lisie Lillianfeld]], [[Katie Seaver|AUTHOR Katie Seaver]], [[Jordan R. Green|AUTHOR Jordan R. Green]], [[Michael P. Brenner|AUTHOR Michael P. Brenner]], [[Philip C. Nelson|AUTHOR Philip C. Nelson]], [[D. Sculley|AUTHOR D. Sculley]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4823–4827
</span></p></div>
<div class="cpabstractcardabstract"><p>Severe speech impairments limit the precision and range of producible speech sounds. As a result, generic automatic speech recognition (ASR) and keyword spotting (KWS) systems fail to accurately recognize the utterances produced by individuals with severe speech impairments. This paper describes an approach in a simple speech sound, namely isolated open vowel (/a/), is used in lieu of more motorically-demanding utterances. A neural network (NN) is trained to detect the isolated open vowel uttered by impaired speakers. The NN is trained with a two-phase approach. The pre-training phase uses samples from unimpaired speakers along with samples of background noises and unrelated speech; then the fine-tuning phase uses samples of vowel samples collected from individuals with speech impairments. This model can be built into an experimental mobile app to act as a switch that allows users to activate preconfigured actions such as alerting caregivers. Preliminary user testing indicates the vowel spotter has the potential to be a useful and flexible emergency communication channel for motor- and speech-impaired individuals.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhehuai Chen|AUTHOR Zhehuai Chen]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Fadi Biadsy|AUTHOR Fadi Biadsy]], [[Xia Zhang|AUTHOR Xia Zhang]], [[Youzheng Chen|AUTHOR Youzheng Chen]], [[Liyang Jiang|AUTHOR Liyang Jiang]], [[Fang Chu|AUTHOR Fang Chu]], [[Rohan Doshi|AUTHOR Rohan Doshi]], [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4828–4832
</span></p></div>
<div class="cpabstractcardabstract"><p>Parrotron is an end-to-end personalizable model that enables many-to-one voice conversion (VC) and automated speech recognition (ASR) simultaneously for atypical speech. In this work, we present the next-generation Parrotron model with improvements in overall accuracy, training and inference speeds. The proposed architecture builds on the recent Conformer encoder comprising of convolution and attention layer based blocks used in ASR. We introduce architectural modifications that subsamples encoder activations to achieve speed-ups in training and inference. In order to jointly improve ASR and voice conversion quality, we show that this requires a corresponding upsampling after the Conformer encoder blocks. We provide an in-depth analysis on how the proposed approach can maximize the efficiency of a speech-to-speech conversion model in the context of atypical speech. Experiments on both many-to-one and one-to-one dysarthric speech conversion tasks show that we can achieve up to 7× speedup and 35% relative reduction in WER over the previous best Transformer Parrotron.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Robert L. MacDonald|AUTHOR Robert L. MacDonald]]^^1^^
, [[Pan-Pan Jiang|AUTHOR Pan-Pan Jiang]]^^1^^
, [[Julie Cattiau|AUTHOR Julie Cattiau]]^^1^^
, [[Rus Heywood|AUTHOR Rus Heywood]]^^1^^
, [[Richard Cave|AUTHOR Richard Cave]]^^2^^
, [[Katie Seaver|AUTHOR Katie Seaver]]^^3^^
, [[Marilyn A. Ladewig|AUTHOR Marilyn A. Ladewig]]^^4^^
, [[Jimmy Tobin|AUTHOR Jimmy Tobin]]^^1^^
, [[Michael P. Brenner|AUTHOR Michael P. Brenner]]^^1^^
, [[Philip C. Nelson|AUTHOR Philip C. Nelson]]^^1^^
, [[Jordan R. Green|AUTHOR Jordan R. Green]]^^5^^
, [[Katrin Tomanek|AUTHOR Katrin Tomanek]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^MND Association, UK; ^^3^^MGH Institute of Health Professions, USA; ^^4^^Cerebral Palsy Associations of New York State, USA; ^^5^^MGH Institute of Health Professions, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4833–4837
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech samples from over 1000 individuals with impaired speech have been submitted for Project Euphonia, aimed at improving automated speech recognition systems for disordered speech. We provide an overview of the corpus, which recently passed 1 million utterances (>1300 hours), and review key lessons learned from this project. The reasoning behind decisions such as phrase set composition, prompted vs extemporaneous speech, metadata and data quality efforts are explained based on findings from both technical and user-facing research.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eun Jung Yeo|AUTHOR Eun Jung Yeo]], [[Sunhee Kim|AUTHOR Sunhee Kim]], [[Minhwa Chung|AUTHOR Minhwa Chung]]
</p><p class="cpabstractcardaffiliationlist">Seoul National University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4838–4842
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes an automatic severity classification method for Korean dysarthric speech by using two types of phoneme-level pronunciation features. The first type is the percentage of correct phonemes, which consists of percentage of correct consonants, percentage of correct vowels, and percentage of total correct phonemes. The second type is related to the degree of vowel distortion, such as vowel space area, formant centralized ratio, vowel articulatory index, and F2-ratio. The baseline experiments use features from our previous study, consisting of MFCCs, voice quality features, and prosody features. Compared to the baseline, experiments including phoneme-level pronunciation features achieve a relative percentage increase of 32.55% and 33.84% in F1-score for support vector machine and feed-forward neural network classifiers, respectively. Our best performance reaches an F1-score of 77.38%, which is a relative percentage increase of 10.39% compared to the best previous results conducted on the Korean dysarthric QoLT corpus. Furthermore, with feature selection applied, all seven phoneme-level pronunciation features are chosen, accounting for the highest percentage of the selected feature set by both recursive feature elimination and extra trees classifier feature selection algorithms. Results indicate that phoneme-level pronunciation features are useful in enhancing the performance for automatic severity classification of dysarthric speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Subhashini Venugopalan|AUTHOR Subhashini Venugopalan]]^^1^^
, [[Joel Shor|AUTHOR Joel Shor]]^^2^^
, [[Manoj Plakal|AUTHOR Manoj Plakal]]^^1^^
, [[Jimmy Tobin|AUTHOR Jimmy Tobin]]^^1^^
, [[Katrin Tomanek|AUTHOR Katrin Tomanek]]^^1^^
, [[Jordan R. Green|AUTHOR Jordan R. Green]]^^3^^
, [[Michael P. Brenner|AUTHOR Michael P. Brenner]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^Google, Japan; ^^3^^MGH Institute of Health Professions, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4843–4847
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic classification of disordered speech can provide an objective tool for identifying the presence and severity of a speech impairment. Classification approaches can also help identify hard-to-recognize speech samples to teach ASR systems about the variable manifestations of impaired speech. Here, we develop and compare different deep learning techniques to classify the intelligibility of disordered speech on selected phrases. We collected samples from a diverse set of 661 speakers with a variety of self-reported disorders speaking 29 words or phrases, which were rated by speech-language pathologists for their overall intelligibility using a five-point Likert scale. We then evaluated classifiers developed using 3 approaches: (1) a convolutional neural network (CNN) trained for the task, (2) classifiers trained on non-semantic speech representations from CNNs that used an unsupervised objective [1], and (3) classifiers trained on the acoustic (encoder) embeddings from an ASR system trained on typical speech [2]. We found that the ASR encoder’s embeddings considerably outperform the other two on detecting and classifying disordered speech. Further analysis shows that the ASR embeddings cluster speech by the spoken phrase, while the non-semantic embeddings cluster speech by speaker. Also, longer phrases are more indicative of intelligibility deficits than single words.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vikramjit Mitra|AUTHOR Vikramjit Mitra]], [[Zifang Huang|AUTHOR Zifang Huang]], [[Colin Lea|AUTHOR Colin Lea]], [[Lauren Tooley|AUTHOR Lauren Tooley]], [[Sarah Wu|AUTHOR Sarah Wu]], [[Darren Botten|AUTHOR Darren Botten]], [[Ashwini Palekar|AUTHOR Ashwini Palekar]], [[Shrinath Thelapurath|AUTHOR Shrinath Thelapurath]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]], [[Sachin Kajarekar|AUTHOR Sachin Kajarekar]], [[Jefferey Bigham|AUTHOR Jefferey Bigham]]
</p><p class="cpabstractcardaffiliationlist">Apple, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4848–4852
</span></p></div>
<div class="cpabstractcardabstract"><p>Dysfluencies and variations in speech pronunciation can severely degrade speech recognition performance, and for many individuals with moderate-to-severe speech disorders, voice operated systems do not work. Current speech recognition systems are trained primarily with data from fluent speakers and as a consequence do not generalize well to speech with dysfluencies such as sound or word repetitions, sound prolongations, or audible blocks. The focus of this work is on quantitative analysis of a consumer speech recognition system on individuals who stutter and production-oriented approaches for improving performance for common voice assistant tasks (i.e., “what is the weather?”). At baseline, this system introduces a significant number of insertion and substitution errors resulting in intended speech Word Error Rates (isWER) that are 13.64% worse (absolute) for individuals with fluency disorders. We show that by simply tuning the decoding parameters in an existing hybrid speech recognition system one can improve isWER by 24% (relative) for individuals with fluency disorders. Tuning these parameters translates to 3.6% better domain recognition and 1.7% better intent recognition relative to the default setup for the 18 study participants across all stuttering severities.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Neumann|AUTHOR Michael Neumann]]^^1^^
, [[Oliver Roesler|AUTHOR Oliver Roesler]]^^1^^
, [[Jackson Liscombe|AUTHOR Jackson Liscombe]]^^1^^
, [[Hardik Kothare|AUTHOR Hardik Kothare]]^^1^^
, [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]^^1^^
, [[David Pautler|AUTHOR David Pautler]]^^1^^
, [[Indu Navar|AUTHOR Indu Navar]]^^2^^
, [[Aria Anvar|AUTHOR Aria Anvar]]^^2^^
, [[Jochen Kumm|AUTHOR Jochen Kumm]]^^3^^
, [[Raquel Norel|AUTHOR Raquel Norel]]^^4^^
, [[Ernest Fraenkel|AUTHOR Ernest Fraenkel]]^^5^^
, [[Alexander V. Sherman|AUTHOR Alexander V. Sherman]]^^6^^
, [[James D. Berry|AUTHOR James D. Berry]]^^6^^
, [[Gary L. Pattee|AUTHOR Gary L. Pattee]]^^7^^
, [[Jun Wang|AUTHOR Jun Wang]]^^8^^
, [[Jordan R. Green|AUTHOR Jordan R. Green]]^^6^^
, [[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Modality.AI, USA; ^^2^^Peter Cohen Foundation, USA; ^^3^^Pr3vent, USA; ^^4^^IBM, USA; ^^5^^MIT, USA; ^^6^^MGH Institute of Health Professions, USA; ^^7^^University of Nebraska, USA; ^^8^^University of Texas at Austin, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4783–4787
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a cloud-based multimodal dialog platform for the remote assessment and monitoring of Amyotrophic Lateral Sclerosis (ALS) at scale. This paper presents our vision, technology setup, and an initial investigation of the efficacy of the various acoustic and visual speech metrics automatically extracted by the platform. 82 healthy controls and 54 people with ALS (pALS) were instructed to interact with the platform and completed a battery of speaking tasks designed to probe the acoustic, articulatory, phonatory, and respiratory aspects of their speech. We find that multiple acoustic (rate, duration, voicing) and visual (higher order statistics of the jaw and lip) speech metrics show statistically significant differences between controls, bulbar symptomatic and bulbar pre-symptomatic patients. We report on the sensitivity and specificity of these metrics using five-fold cross-validation. We further conducted a LASSO-LARS regression analysis to uncover the relative contributions of various acoustic and visual features in predicting the severity of patients’ ALS (as measured by their self-reported ALSFRSR scores). Our results provide encouraging evidence of the utility of automatically extracted audiovisual analytics for scalable remote patient assessment and monitoring in ALS.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Enno Hermann|AUTHOR Enno Hermann]], [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4788–4792
</span></p></div>
<div class="cpabstractcardabstract"><p>Developing automatic speech recognition (ASR) systems that recognise dysarthric speech as well as control speech from unimpaired speakers remains challenging. Including more highly variable dysarthric speech during training can also negatively affect the performance on control speakers, which is not desirable when developing speech recognisers for a wider audience. In this work, we analyse how the acoustic variability of dysarthric speech affects ASR systems and propose the combination of multiple acoustic models trained on different subsets of speakers to mitigate this effect. This approach shows improvements for both dysarthric and control speakers on the Torgo and UA-Speech corpora.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mengzhe Geng|AUTHOR Mengzhe Geng]]^^1^^
, [[Shansong Liu|AUTHOR Shansong Liu]]^^1^^
, [[Jianwei Yu|AUTHOR Jianwei Yu]]^^1^^
, [[Xurong Xie|AUTHOR Xurong Xie]]^^2^^
, [[Shoukang Hu|AUTHOR Shoukang Hu]]^^1^^
, [[Zi Ye|AUTHOR Zi Ye]]^^1^^
, [[Zengrui Jin|AUTHOR Zengrui Jin]]^^1^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^CAS, China; ^^3^^CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4793–4797
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic recognition of disordered speech remains a highly challenging task to date. Sources of variability commonly found in normal speech including accent, age or gender, when further compounded with the underlying causes of speech impairment and varying severity levels, create large diversity among speakers. To this end, speaker adaptation techniques play a vital role in current speech recognition systems. Motivated by the spectro-temporal level differences between disordered and normal speech that systematically manifest in articulatory imprecision, decreased volume and clarity, slower speaking rates and increased dysfluencies, novel spectro-temporal subspace basis embedding deep features derived by SVD decomposition of speech spectrum are proposed to facilitate both accurate speech intelligibility assessment and auxiliary feature based speaker adaptation of state-of-the-art hybrid DNN and end-to-end disordered speech recognition systems. Experiments conducted on the UASpeech corpus suggest the proposed spectro-temporal deep feature adapted systems consistently outperformed baseline i-Vector adaptation by up to 2.63% absolute (8.6% relative) reduction in word error rate (WER) with or without data augmentation. Learning hidden unit contribution (LHUC) based speaker adaptation was further applied. The final speaker adapted system using the proposed spectral basis embedding features gave an overall WER of 25.6% on the UASpeech test set of 16 dysarthric speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sarah E. Gutz|AUTHOR Sarah E. Gutz]]^^1^^
, [[Hannah P. Rowe|AUTHOR Hannah P. Rowe]]^^2^^
, [[Jordan R. Green|AUTHOR Jordan R. Green]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Harvard University, USA; ^^2^^MGH Institute of Health Professions, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4798–4802
</span></p></div>
<div class="cpabstractcardabstract"><p>The increasing prevalence of face masks in the United States due to the COVID-19 pandemic necessitates serious consideration of the functional impact of wearing a mask on speech. This study considers how the presence of a KN95 mask affects the performance of a commercial ASR system, Google Cloud Speech. We present evidence that wearing a mask does not impact ASR performance at the sentence level. Moreover, speakers may be naturally adapting to the mask by increasing their vowel space area. However, when speakers intentionally altered their speech by speaking clearly or loudly (though not slowly), ASR performance improved. These findings suggest that ASR users can employ speech strategies to achieve better ASR results when wearing a mask. Beyond healthy speakers, our study has implications for mask-wearing ASR users with otherwise reduced speech intelligibility.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zengrui Jin|AUTHOR Zengrui Jin]]^^1^^
, [[Mengzhe Geng|AUTHOR Mengzhe Geng]]^^1^^
, [[Xurong Xie|AUTHOR Xurong Xie]]^^2^^
, [[Jianwei Yu|AUTHOR Jianwei Yu]]^^1^^
, [[Shansong Liu|AUTHOR Shansong Liu]]^^1^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4803–4807
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic recognition of disordered speech remains a highly challenging task to date. The underlying neuro-motor conditions, often compounded with co-occurring physical disabilities, lead to the difficulty in collecting large quantities of impaired speech required for ASR system development. To this end, data augmentation techniques play a vital role in current disordered speech recognition systems. In contrast to existing data augmentation techniques only modifying the speaking rate or overall shape of spectral contour, fine-grained spectro-temporal differences between disordered and normal speech are modelled using deep convolutional generative adversarial networks (DCGAN) during data augmentation to modify normal speech spectra into those closer to disordered speech. Experiments conducted on the UASpeech corpus suggest the proposed adversarial data augmentation approach consistently outperformed the baseline augmentation methods using tempo or speed perturbation on a state-of-the-art hybrid DNN system. An overall word error rate (WER) reduction up to 3.05% (9.7% relative) was obtained over the baseline system using no data augmentation. The final learning hidden unit contribution (LHUC) speaker adapted system using the best adversarial augmentation approach gives an overall WER of 25.89% on the UASpeech test set of 16 dysarthric speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xurong Xie|AUTHOR Xurong Xie]]^^1^^
, [[Rukiye Ruzi|AUTHOR Rukiye Ruzi]]^^1^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^2^^
, [[Lan Wang|AUTHOR Lan Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4808–4812
</span></p></div>
<div class="cpabstractcardabstract"><p>Dysarthric speech recognition is a challenging task due to acoustic variability and limited amount of available data. Diverse conditions of dysarthric speakers account for the acoustic variability, which make the variability difficult to be modeled precisely. This paper presents a variational auto-encoder based variability encoder (VAEVE) to explicitly encode such variability for dysarthric speech. The VAEVE makes use of both phoneme information and low-dimensional latent variable to reconstruct the input acoustic features, thereby the latent variable is forced to encode the phoneme-independent variability. Stochastic gradient variational Bayes algorithm is applied to model the distribution for generating variability encodings, which are further used as auxiliary features for DNN acoustic modeling. Experiment results conducted on the UASpeech corpus show that the VAEVE based variability encodings have complementary effect to the learning hidden unit contributions (LHUC) speaker adaptation. The systems using variability encodings consistently outperform the comparable baseline systems without using them, and obtain absolute word error rate (WER) reduction by up to 2.2% on dysarthric speech with “Very low” intelligibility level, and up to 2% on the “Mixed” type of dysarthric speech with diverse or uncertain conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Disong Wang|AUTHOR Disong Wang]]^^1^^
, [[Songxiang Liu|AUTHOR Songxiang Liu]]^^1^^
, [[Lifa Sun|AUTHOR Lifa Sun]]^^2^^
, [[Xixin Wu|AUTHOR Xixin Wu]]^^3^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^SpeechX, China; ^^3^^University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4813–4817
</span></p></div>
<div class="cpabstractcardabstract"><p>Though significant progress has been made for the voice conversion (VC) of typical speech, VC for atypical speech, e.g., dysarthric and second-language (L2) speech, remains a challenge, since it involves correcting for atypical prosody while maintaining speaker identity. To address this issue, we propose a VC system with explicit prosodic modelling and deep speaker embedding (DSE) learning. First, a speech-encoder strives to extract robust phoneme embeddings from atypical speech. Second, a prosody corrector takes in phoneme embeddings to infer typical phoneme duration and pitch values. Third, a conversion model takes phoneme embeddings and typical prosody features as inputs to generate the converted speech, conditioned on the target DSE that is learned via speaker encoder or speaker adaptation. Extensive experiments demonstrate that speaker adaptation can achieve higher speaker similarity, and the speaker encoder based conversion model can greatly reduce dysarthric and non-native pronunciation patterns with improved speech intelligibility. A comparison of speech recognition results between the original dysarthric speech and converted speech show that absolute reduction of 47.6% character error rate (CER) and 29.3% word error rate (WER) can be achieved.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiajun Deng|AUTHOR Jiajun Deng]]^^1^^
, [[Fabian Ritter Gutierrez|AUTHOR Fabian Ritter Gutierrez]]^^1^^
, [[Shoukang Hu|AUTHOR Shoukang Hu]]^^1^^
, [[Mengzhe Geng|AUTHOR Mengzhe Geng]]^^1^^
, [[Xurong Xie|AUTHOR Xurong Xie]]^^2^^
, [[Zi Ye|AUTHOR Zi Ye]]^^1^^
, [[Shansong Liu|AUTHOR Shansong Liu]]^^1^^
, [[Jianwei Yu|AUTHOR Jianwei Yu]]^^1^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4818–4822
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic recognition of elderly and disordered speech remains a highly challenging task to date. Such data is not only difficult to collect in large quantities, but also exhibits a significant mismatch against normal speech trained ASR systems. To this end, conventional deep neural network model adaptation approaches only consider parameter fine-tuning on limited target domain data. In this paper, a novel Bayesian parametric and neural architectural domain adaptation approach is proposed. Both the standard model parameters and architectural hyper-parameters (hidden layer L/R context offsets) of two lattice-free MMI (LF-MMI) factored TDNN systems separately trained using large quantities of normal speech from the English LibriSpeech and Cantonese SpeechOcean corpora were domain adapted to two tasks: a) 16-hour DementiaBank elderly speech corpus; and b) 14-hour CUDYS dysarthric speech database. A Bayesian differentiable architectural search (DARTS) super-network was designed to allow both efficient search over up to 7^^28^^ different TDNN structures during domain adaptation, and robust modelling of parameter uncertainty given limited target domain data. Absolute recognition error rate reductions of 1.82% and 2.93% (13.2% and 8.3% relative) were obtained over the baseline systems performing model parameter fine-tuning only. Consistent performance improvements were retained after data augmentation and learning hidden unit contribution (LHUC) based speaker adaptation was performed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Korzekwa|AUTHOR Daniel Korzekwa]]^^1^^
, [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]]^^2^^
, [[Thomas Drugman|AUTHOR Thomas Drugman]]^^2^^
, [[Shira Calamaro|AUTHOR Shira Calamaro]]^^2^^
, [[Bozena Kostek|AUTHOR Bozena Kostek]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, Poland; ^^2^^Amazon, UK; ^^3^^Gdansk University of Technology, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4408–4412
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a weakly-supervised model for word-level mispronunciation detection in non-native (L2) English speech. To train this model, phonetically transcribed L2 speech is not required and we only need to mark mispronounced words. The lack of phonetic transcriptions for L2 speech means that the model has to learn only from a weak signal of word-level mispronunciations. Because of that and due to the limited amount of mispronounced L2 speech, the model is more likely to overfit. To limit this risk, we train it in a multi-task setup. In the first task, we estimate the probabilities of word-level mispronunciation. For the second task, we use a phoneme recognizer trained on phonetically transcribed L1 speech that is easily accessible and can be automatically annotated. Compared to state-of-the-art approaches, we improve the accuracy of detecting word-level pronunciation errors in AUC metric by 30% on the GUT Isle Corpus of L2 Polish speakers, and by 21.5% on the Isle Corpus of L2 German and Italian speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu Qiao|AUTHOR Yu Qiao]], [[Wei Zhou|AUTHOR Wei Zhou]], [[Elma Kerz|AUTHOR Elma Kerz]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4453–4457
<a href="./IS2021/MEDIA/1402" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, automated approaches to assessing linguistic complexity in second language (L2) writing have made significant progress in gauging learner performance, predicting human ratings of the quality of learner productions, and benchmarking L2 development. In contrast, there is comparatively little work in the area of speaking, particularly with respect to fully automated approaches to assessing L2 spontaneous speech. While the importance of a well-performing ASR system is widely recognized, little research has been conducted to investigate the impact of its performance on subsequent automatic text analysis. In this paper, we focus on this issue and examine the impact of using a state-of-the-art ASR system for subsequent automatic analysis of linguistic complexity in spontaneously produced L2 speech. A set of 30 selected measures were considered, falling into four categories: syntactic, lexical, n-gram frequency, and information-theoretic measures. The agreement between the scores for these measures obtained on the basis of ASR-generated vs. manual transcriptions was determined through correlation analysis. A more differential effect of ASR performance on specific types of complexity measures when controlling for task type effects is also presented.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Naoki Makishima|AUTHOR Naoki Makishima]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4458–4462
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a semi-supervised learning method for building end-to-end rich transcription-style automatic speech recognition (RT-ASR) systems from small-scale rich transcription-style and large-scale common transcription-style datasets. In spontaneous speech tasks, various speech phenomena such as fillers, word fragments, laughter and coughs, etc. are often included. While common transcriptions do not give special awareness to these phenomena, rich transcriptions explicitly convert them into special phenomenon tokens as well as textual tokens. In previous studies, the textual and phenomenon tokens were simultaneously estimated in an end-to-end manner. However, it is difficult to build accurate RT-ASR systems because large-scale rich transcription-style datasets are often unavailable. To solve this problem, our training method uses a limited rich transcription-style dataset and common transcription-style dataset simultaneously. The Key process in our semi-supervised learning is to convert the common transcription-style dataset into a pseudo-rich transcription-style dataset. To this end, we introduce style tokens which control phenomenon tokens are generated or not into transformer-based autoregressive modeling. We use this modeling for generating the pseudo-rich transcription-style datasets and for building RT-ASR system from the pseudo and original datasets. Our experiments on spontaneous ASR tasks showed the effectiveness of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ronald Cumbal|AUTHOR Ronald Cumbal]]^^1^^
, [[Birger Moell|AUTHOR Birger Moell]]^^1^^
, [[José Lopes|AUTHOR José Lopes]]^^2^^
, [[Olov Engwall|AUTHOR Olov Engwall]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KTH, Sweden; ^^2^^Heriot-Watt University, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4463–4467
</span></p></div>
<div class="cpabstractcardabstract"><p>The performance of Automatic Speech Recognition (ASR) systems has constantly increased in state-of-the-art development. However, performance tends to decrease considerably in more challenging conditions (e.g., background noise, multiple speaker social conversations) and with more atypical speakers (e.g., children, non-native speakers or people with speech disorders), which signifies that general improvements do not necessarily transfer to applications that rely on ASR, e.g., educational software for younger students or language learners. In this study, we focus on the gap in performance between recognition results for native and non-native, read and spontaneous, Swedish utterances transcribed by different ASR services. We compare the recognition results using Word Error Rate and analyze the linguistic factors that may generate the observed transcription errors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yang Zhang|AUTHOR Yang Zhang]]^^1^^
, [[Evelina Bakhturina|AUTHOR Evelina Bakhturina]]^^1^^
, [[Kyle Gorman|AUTHOR Kyle Gorman]]^^2^^
, [[Boris Ginsburg|AUTHOR Boris Ginsburg]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NVIDIA, USA; ^^2^^CUNY Graduate Center, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4468–4472
</span></p></div>
<div class="cpabstractcardabstract"><p>Inverse text normalization (ITN) converts spoken-domain automatic speech recognition (ASR) output into written-domain text to improve the readability of the ASR output. Many state-of-the-art ITN systems use hand-written weighted finite-state transducer (WFST) grammars since this task has extremely low tolerance to unrecoverable errors. We introduce an open-source Python WFST-based library for ITN which enables a seamless path from development to production. We describe the specification of ITN grammar rules for English, but the library can be adapted for other languages. It can also be used for written-to-spoken text normalization. We evaluate the NeMo ITN library using a modified version of the Google Text normalization dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Satsuki Naijo|AUTHOR Satsuki Naijo]], [[Akinori Ito|AUTHOR Akinori Ito]], [[Takashi Nose|AUTHOR Takashi Nose]]
</p><p class="cpabstractcardaffiliationlist">Tohoku University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4473–4477
</span></p></div>
<div class="cpabstractcardabstract"><p>The current Computer-Assisted Pronunciation Training (CAPT) system uses DNN-based speech recognition results to evaluate learner’s pronunciation with high accuracy when using many utterances for the evaluation. However, when we use only a few utterances, the accuracy of the CAPT system deteriorates. One reason for the deterioration is that the score calculated by a CAPT system is biased depending on the pronunciation difficulty of the sentences when using a small number of utterances. In this study, we developed a CAPT system that takes the sentence speakability (pronunciation difficulty of sentences) into account. As a result, the correlation coefficient between the human evaluation and the machine score was 0.46 in the conventional method, while it improved to 0.57 with the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Guoli Ye|AUTHOR Guoli Ye]], [[Yashesh Gaur|AUTHOR Yashesh Gaur]], [[Xiaofei Wang|AUTHOR Xiaofei Wang]], [[Zhong Meng|AUTHOR Zhong Meng]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4413–4417
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents our recent effort on end-to-end speaker-attributed automatic speech recognition, which jointly performs speaker counting, speech recognition and speaker identification for monaural multi-talker audio. Firstly, we thoroughly update the model architecture that was previously designed based on a long short-term memory (LSTM)-based attention encoder decoder by applying transformer architectures. Secondly, we propose a speaker deduplication mechanism to reduce speaker identification errors in highly overlapped regions. Experimental results on the LibriSpeechMix dataset shows that the transformer-based architecture is especially good at counting the speakers and that the proposed model reduces the speaker-attributed word error rate by 47% over the LSTM-based baseline. Furthermore, for the LibriCSS dataset, which consists of real recordings of overlapped speech, the proposed model achieves concatenated minimum-permutation word error rates of 11.9% and 16.3% with and without target speaker profiles, respectively, both of which are the state-of-the-art results for LibriCSS with the monaural setting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hagen Soltau|AUTHOR Hagen Soltau]], [[Mingqiu Wang|AUTHOR Mingqiu Wang]], [[Izhak Shafran|AUTHOR Izhak Shafran]], [[Laurent El Shafey|AUTHOR Laurent El Shafey]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4418–4422
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe novel components for extracting clinically relevant information from medical conversations which will be available as Google APIs. We describe a transformer-based Recurrent Neural Network Transducer (RNN-T) model tailored for long-form audio, which can produce rich transcriptions including speaker segmentation, speaker role labeling, punctuation and capitalization. On a representative test set, we compare performance of RNN-T models with different encoders, units and streaming constraints. Our transformer-based streaming model performs at about 20% WER on the ASR task, 6% WDER on the diarization task, 43% SER on periods, 52% SER on commas, 43% SER on question marks and 30% SER on capitalization. Our recognizer is paired with a confidence model that utilizes both acoustic and lexical features from the recognizer. The model performs at about 0.37 NCE. Finally, we describe a RNN-T based tagging model. The performance of the model depends on the ontologies, with F-scores of 0.90 for medications, 0.76 for symptoms, 0.75 for conditions, 0.76 for diagnosis, and 0.61 for treatments. While there is still room for improvement, our results suggest that these models are sufficiently accurate for practical applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jazmín Vidal|AUTHOR Jazmín Vidal]]^^1^^
, [[Cyntia Bonomi|AUTHOR Cyntia Bonomi]]^^1^^
, [[Marcelo Sancinetti|AUTHOR Marcelo Sancinetti]]^^1^^
, [[Luciana Ferrer|AUTHOR Luciana Ferrer]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UBA, Argentina; ^^2^^UBA-CONICET ICC, Argentina</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4423–4427
</span></p></div>
<div class="cpabstractcardabstract"><p>In today’s globalized world being able to communicate in English is crucial to many people. Computer assisted pronunciation training (CAPT) systems can help students achieve English proficiency by providing an accessible way to practice, offering personalized feedback. However, phone-level pronunciation scoring is still a very challenging task, with performance far from that of human annotators. In this paper we compare and present results on the Spanish subset of the L2-ARCTIC corpus and the new Epa-DB database, both containing non-native English speech by native Spanish speakers and intended for the development of pronunciation scoring systems. We show the most frequent errors in each database and compare performance of a state-of-the-art goodness of pronunciation (GOP) system. Results show that both databases have similar error patterns and that performance is similar for most phones, despite differences in recording conditions. For the EpaDB database we also present an analysis of the errors per target phone. This study validates the EpaDB collection and annotations, providing initial results and contributing to the advancement of a challenging low-resource task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaoshuo Xu|AUTHOR Xiaoshuo Xu]], [[Yueteng Kang|AUTHOR Yueteng Kang]], [[Songjun Cao|AUTHOR Songjun Cao]], [[Binghuai Lin|AUTHOR Binghuai Lin]], [[Long Ma|AUTHOR Long Ma]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4428–4432
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents an initial attempt to use self-supervised learning for Mispronunciation Detection. Unlike existing methods that use speech recognition corpus to train models, we exploit unlabeled data and utilize a self-supervised learning technique, Wav2vec 2.0, for pretraining. After the pretraining process, the training process only requires a little pronunciation-labeled data for finetuning. Formulating Mispronunciation Detection as a binary classification task, we add convolutional and pooling layers on the top of the pretrained model to detect mispronunciations of the given prompted texts within the alignment segmentations. The training process is simple and effective. Several experiments are conducted to validate the effectiveness of the pretrained method. Our approach outperforms existing methods on a public dataset L2-ARCTIC with a F1 value of 0.610.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shintaro Ando|AUTHOR Shintaro Ando]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]], [[Daisuke Saito|AUTHOR Daisuke Saito]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4433–4437
</span></p></div>
<div class="cpabstractcardabstract"><p>In L2 pronunciation, what kind of phonetic errors are more influential to intelligibility reduction? Teachers say that learners’ utterances become unintelligible when words are pronounced with such errors that make the words misidentified as others. In this paper, we focus on Japanese English (JE), where the number of phonemes of the L1 (Japanese) is much smaller than that of the L2 (American English, AE). Since learners often substitute L1 phonemes when speaking in L2, some words are expected to be pronounced not distinctively enough in JE, which may result in word misidentification. This implies that words of JE will exist phonetically closer to each other in a space where words are distributed. In this paper, lexical density analysis of JE and AE is carried out using acoustic word embeddings. Word productions in JE and AE, extracted from the ERJ corpus, are mapped as points in an acoustic word embedding space obtained by network training with the WSJ corpus. Experiments show that significantly higher density is found in JE than in AE and it is also found in poor learners than in good learners.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4438–4442
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic pronunciation assessment is commonly developed to evaluate pronunciation quality of second language (L2) learners. Traditional methods for automatic pronunciation assessment normally utilize speech features such as Goodness of pronunciation (GOP), which may not provide sufficient information for the pronunciation proficiency assessment [1]. In this paper, we propose a transfer learning method for automatic pronunciation assessment. We directly utilize the deep features from the acoustic model instead of traditional features such as GOP, and transfer the acoustic knowledge from ASR to a specific scoring module. The scoring module is designed to consider the relationship among different granularities in an utterance based on an attention mechanism. Only this module is updated for faster transfer and adaptation of various pronunciation assessment tasks. Experimental results based on the dataset recorded by Chinese English-as-second-language (ESL) learners and the Speechocean762 dataset demonstrate that the proposed method outperforms the traditional GOP-based baselines in Pearson correlation coefficient (PCC) and yields parameter-efficient transfer for different pronunciation assessment tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Huayun Zhang|AUTHOR Huayun Zhang]], [[Ke Shi|AUTHOR Ke Shi]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]]
</p><p class="cpabstractcardaffiliationlist">A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4443–4447
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech evaluation is an essential component in computer-assisted language learning (CALL). While speech evaluation on English has been popular, automatic speech scoring on low resource languages remains challenging. Work in this area has focused on monolingual specific designs and handcrafted features stemming from resource-rich languages like English. Such approaches are often difficult to generalize to other languages, especially if we also want to consider suprasegmental qualities such as rhythm. In this work, we examine three different languages that possess distinct rhythm patterns: English (stress-timed), Malay (syllable-timed), and Tamil (mora-timed). We exploit robust feature representations inspired by music processing and vector representation learning. Empirical validations show consistent gains for all three languages when predicting pronunciation, rhythm and intonation performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Linkai Peng|AUTHOR Linkai Peng]]^^1^^
, [[Kaiqi Fu|AUTHOR Kaiqi Fu]]^^1^^
, [[Binghuai Lin|AUTHOR Binghuai Lin]]^^2^^
, [[Dengfeng Ke|AUTHOR Dengfeng Ke]]^^1^^
, [[Jinsong Zhan|AUTHOR Jinsong Zhan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BLCU, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4448–4452
</span></p></div>
<div class="cpabstractcardabstract"><p>Mispronunciation detection and diagnosis (MDD) technology is a key component of computer-assisted pronunciation training system (CAPT). The mainstream method is based on deep neural network automatic speech recognition. Unfortunately, the technique requires massive human-annotated speech recordings for training. Due to the huge variations in mother tongue, age, and proficiency level among second language learners, it is difficult to gather a large amount of matching data for acoustic model training, which greatly limits the model performance. In this paper, we explore the use of Self-Supervised Pretraining (SSP) model wav2vec2.0 for MDD tasks. SSP utilizes a large unlabelled dataset to learn general representation and can be applied in downstream tasks. We conduct experiments using two publicly available datasets (TIMIT, L2-arctic) and our best system achieves 60.44% f1-score. Moreover, our method is able to achieve 55.52% f1-score with 3 times less data, which demonstrates the effectiveness of SSP on MDD.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fasih Haider|AUTHOR Fasih Haider]], [[Saturnino Luz|AUTHOR Saturnino Luz]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4478–4482
</span></p></div>
<div class="cpabstractcardabstract"><p>An approach to the categorization of voice samples according to emotions expressed by the speaker is proposed which uses Multi-Resolution Cochleagram (MRCG) and scalogram features in a novel way. Audio recordings from the EmoDB, EMOVO and Savee Data-sets are employed in training and testing of predictive models consisting of different sets of speech features. This study systematically evaluates the performance of the feature sets most commonly used in computational paralinguistic tasks (i.e. //emobase, eGeMAPS// and //ComParE//) in addition to MRCG- and scalogram-derived features and their fusion, across five different classifiers. The datasets used in this evaluation include speech in three different languages (German, Italian and English). MRCG features outperform the feature sets most commonly used in computational paralinguistic tasks, including //emobase, eGeMAPS// and //ComParE//, for the EmoDB (unweighted average recall, UAR = 59.15%) and SAVEE (UAR = 36.12%) datasets, while //eGeMAPS// provides the best overall UAR (33.84%) for the EMOVO dataset. A support vector machine (SVM) classifier yields the best UAR for EmoDB (80.05%) through fusion of //emobase, eGeMAPS, ComParE// and MRCG, and for EMOVO (40.31%), through fusion of //emobase//, //eGeMAPS// and //ComParE//. For SAVEE, random forests provide the best result (46.55%) using the //ComParE// feature set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiaxing Liu|AUTHOR Jiaxing Liu]], [[Yaodong Song|AUTHOR Yaodong Song]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Jianwu Dang|AUTHOR Jianwu Dang]], [[Ruiguo Yu|AUTHOR Ruiguo Yu]]
</p><p class="cpabstractcardaffiliationlist">Tianjin University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4523–4527
</span></p></div>
<div class="cpabstractcardabstract"><p>With the development of speech emotion recognition (SER), dialogue-level SER (DSER) is more aligned with actual scenarios. In this paper, we propose a DSER approach that includes two stages of representation learning: intra-utterance representation learning and inter-utterance representation learning. In the intra-utterance representation learning stage, traditional convolutional neural network (CNN) has demonstrated great success. However, the basic design of a CNN restricts its ability to model the local and global information in the spectrogram. Therefore, we propose a novel local-global representation learning method for the intra-utterance stage. The local information is learned by a time-frequency convolutional neural network (TFCNN), which we published previously. Here, we propose a time-frequency capsule neural network (TFCap) to model global information that can extract more stable global time-frequency information directly from spectrograms. In the inter-utterance stage, a graph convolutional network (GCN) is introduced to explore the relations between utterances in a dialog. Our proposed methods were evaluated on the IEMOCAP database. The proposed time-frequency based method in the intra-utterance stage achieves an absolute increase of 9.35% compared to CNN. By integrating GCN in the inter-utterance stage, the proposed approach achieves an absolute increase of 4.05% compared to the model in the previous stage.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiawang Liu|AUTHOR Jiawang Liu]], [[Haoxiang Wang|AUTHOR Haoxiang Wang]]
</p><p class="cpabstractcardaffiliationlist">SCUT, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4483–4487
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech emotion recognition (SER) plays an important role in human-machine interaction (HMI). Various methods have been proposed for the SER task. However, a common problem in most of the previous studies is some specific emotions are grossly misclassified. In this paper, we propose a novel SER framework aiming at discriminating the confusions by utilizing triplet loss and data augmentation to enforce a CNN-LSTM model to emphasize more on these emotions which are hard to be correctly classified. Ablation experiments demonstrate the effectiveness of the proposed framework. On Interactive Emotional Dyadic Motion Capture (IEMOCAP) dataset, our framework can achieve 79.52% of Weighted Accuracy (WA) and 78.30% of Unweighted Accuracy (UA). Compared to the other state-of-the-art models, our framework obtains more than 3.34% and 1.94% improvement on WA and UA respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ruichen Li|AUTHOR Ruichen Li]], [[Jinming Zhao|AUTHOR Jinming Zhao]], [[Qin Jin|AUTHOR Qin Jin]]
</p><p class="cpabstractcardaffiliationlist">Renmin University of China, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4488–4492
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech emotion recognition faces the problem that most of the existing speech corpora are limited in scale and diversity due to the high annotation cost and label ambiguity. In this work, we explore the task of learning robust speech emotion representations based on large unlabeled speech data. Under a simple assumption that the internal emotional states across different modalities are similar, we propose a method called Multi-level Cross-modal Emotion Distillation (MCED), which trains the speech emotion model without any labeled speech emotion data by transferring emotion knowledge from a pretrained text emotion model. Extensive experiments on two benchmark datasets, IEMOCAP and MELD, show that our proposed MCED can help learn effective speech emotion representations which generalize well on downstream speech emotion recognition tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Koichiro Ito|AUTHOR Koichiro Ito]], [[Takuya Fujioka|AUTHOR Takuya Fujioka]], [[Qinghua Sun|AUTHOR Qinghua Sun]], [[Kenji Nagamatsu|AUTHOR Kenji Nagamatsu]]
</p><p class="cpabstractcardaffiliationlist">Hitachi, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4493–4497
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose an audio-visual speech emotion recognition (AV-SER) that can suppress the disturbance from an identity attribute by disentangling an emotion attribute and an identity one. We developed a model that first disentangles both attributes for each modality. In order to achieve the disentanglement, we introduce a co-attention module to our model. Our model disentangles the emotion attribute by giving the identity attribute as conditional features to the module. Conversely, the identity attribute is also obtained with the emotion attribute as a condition. Our model then makes a prediction for each attribute from these disentangled features by considering both modalities. In addition, to ensure the disentanglement capacity of our model, we train the model with an identification task as the auxiliary task and an SER task as the primary task alternately, and we update only the part of parameters responsible for each task. The experimental result shows the effectiveness of our method with the wild CMU-MOSEI dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Deboshree Bose|AUTHOR Deboshree Bose]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]
</p><p class="cpabstractcardaffiliationlist">UNSW Sydney, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4498–4502
</span></p></div>
<div class="cpabstractcardabstract"><p>It is common to represent emotional states as values on a set of numerical scales corresponding to attributes such as arousal and valence. Often these labels are obtained from multiple annotators who record their perception of emotion in terms of these attributes. Combining these multiple annotations by taking the mean, as is typical in affective computing systems ignores the inherent ambiguity in the labels. Recently it has been recognised that this ambiguity carries useful information and systems that employ distributions over the numerical scales to represent emotional states have been proposed. In this paper we show that the common and widespread assumption that this distribution is Gaussian may not be suitable since the underlying numerical scales are bounded. We then compare a range of well-known distributions defined on bounded domains to ascertain which of them would be the most suitable alternative. Statistical measures are proposed to enable quantifiable comparisons and the results are reported. All comparisons reported in the paper were carried out on the RECOLA dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuan Gao|AUTHOR Yuan Gao]], [[Jiaxing Liu|AUTHOR Jiaxing Liu]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Jianwu Dang|AUTHOR Jianwu Dang]]
</p><p class="cpabstractcardaffiliationlist">Tianjin University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4503–4507
</span></p></div>
<div class="cpabstractcardabstract"><p>Due to the lack of sufficient speech emotional data, the recognition performance of existing speech emotion recognition (SER) approaches is relatively low and requires further improvement to meet the needs of real-life applications. For the problem of data scarcity, an increasingly popular solution is to transfer emotional information through pre-training models and extract additional features. However, the feature representation needs further compression because the training object of unsupervised learning is to reconstruct input, making the latent representation contain non-affective information. In this paper, we introduce deep metric learning to constrain the feature distribution of the pre-training model. Specifically, we propose a triplet loss to modify the representation extraction model as a pseudo-siamese network and achieve more efficient knowledge transfer for emotion recognition. Furthermore, we propose a gated fusion method to learn the connection of features extracted from the pre-training model and supervised feature extraction model. We conduct experiments on the common benchmarking dataset IEMOCAP to verify the performance of the proposed model. The experimental results demonstrate the advantages of our model, outperforming the unsupervised transfer learning system by 3.7% and 3.88% in weighted accuracy and unweighted accuracy, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xingyu Cai|AUTHOR Xingyu Cai]], [[Jiahong Yuan|AUTHOR Jiahong Yuan]], [[Renjie Zheng|AUTHOR Renjie Zheng]], [[Liang Huang|AUTHOR Liang Huang]], [[Kenneth Church|AUTHOR Kenneth Church]]
</p><p class="cpabstractcardaffiliationlist">Baidu, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4508–4512
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech emotion recognition (SER) classifies speech into emotion categories such as: //Happy, Angry, Sad// and //Neutral//. Recently, deep learning has been applied to the SER task. This paper proposes a multi-task learning (MTL) framework to simultaneously perform speech-to-text recognition and emotion classification, with an end-to-end deep neural model based on wav2vec-2.0. Experiments on the IEMOCAP benchmark show that the proposed method achieves the state-of-the-art performance on the SER task. In addition, an ablation study establishes the effectiveness of the proposed MTL framework.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nadee Seneviratne|AUTHOR Nadee Seneviratne]], [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]
</p><p class="cpabstractcardaffiliationlist">University of Maryland at College Park, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4513–4517
</span></p></div>
<div class="cpabstractcardabstract"><p>Depression detection using vocal biomarkers is a highly researched area. Articulatory coordination features (ACFs) are developed based on the changes in neuromotor coordination due to psychomotor slowing, a key feature of Major Depressive Disorder. However findings of existing studies are mostly validated on a single database which limits the generalizability of results. Variability across different depression databases adversely affects the results in cross corpus evaluations (CCEs). We propose to develop a generalized classifier for depression detection using a dilated Convolutional Neural Network which is trained on ACFs extracted from two depression databases. We show that ACFs derived from Vocal Tract Variables (TVs) show promise as a robust set of features for depression detection. Our model achieves relative accuracy improvements of ~10% compared to CCEs performed on models trained on a single database. We extend the study to show that fusing TVs and Mel-Frequency Cepstral Coefficients can further improve the performance of this classifier.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuhua Wang|AUTHOR Yuhua Wang]], [[Guang Shen|AUTHOR Guang Shen]], [[Yuezhu Xu|AUTHOR Yuezhu Xu]], [[Jiahang Li|AUTHOR Jiahang Li]], [[Zhengdao Zhao|AUTHOR Zhengdao Zhao]]
</p><p class="cpabstractcardaffiliationlist">Harbin Engineering University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4518–4522
</span></p></div>
<div class="cpabstractcardabstract"><p>Various studies have confirmed the necessity and benefits of leveraging multimodal features for SER, and the latest research results show that the temporal information captured by the transformer is very useful for improving multimodal speech emotion recognition. However, the dependency between different modalities and high-level temporal-feature learning using a deeper transformer is yet to be investigated. Thus, we propose a multimodal transformer with sharing weights for speech emotion recognition. The proposed network shares the weights across the modalities in each transformer layer to learn the correlation among multiple modalities. In addition, since the emotion contained in a speech generally include audio and text features, both of which have not only internal dependence but also mutual dependence, we design a deep multimodal attention mechanism to capture these two kinds of emotional dependence. We evaluated our model on the publicly available IEMOCAP dataset. The experimental results demonstrate that the proposed model yielded a promising result.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gonçalo Mordido|AUTHOR Gonçalo Mordido]]^^1^^
, [[Matthijs Van keirsbilck|AUTHOR Matthijs Van keirsbilck]]^^2^^
, [[Alexander Keller|AUTHOR Alexander Keller]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HPI, Germany; ^^2^^NVIDIA, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4528–4532
</span></p></div>
<div class="cpabstractcardabstract"><p>We demonstrate that 1×1-convolutions in 1D time-channel separable convolutions may be replaced by constant, sparse random ternary matrices with weights in -1, 0, +1. Such layers do not perform any multiplications and do not require training. Moreover, the matrices may be generated on the chip during computation and therefore do not require any memory access. With the same parameter budget, we can afford deeper and more expressive models, improving the Pareto frontiers of existing models on several tasks. For command recognition on Google Speech Commands v1, we improve the state-of-the-art accuracy from 97.21% to 97.41% at the same network size. Alternatively, we can lower the cost of existing models. For speech recognition on Librispeech, we halve the number of weights to be trained while only sacrificing about 1% of the floating-point baseline’s word error rate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Varun Nagaraja|AUTHOR Varun Nagaraja]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Ganesh Venkatesh|AUTHOR Ganesh Venkatesh]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]], [[Vikas Chandra|AUTHOR Vikas Chandra]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4573–4577
</span></p></div>
<div class="cpabstractcardabstract"><p>On-device speech recognition requires training models of different sizes for deploying on devices with various computational budgets. When building such different models, we can benefit from training them jointly to take advantage of the knowledge shared between them. Joint training is also efficient since it reduces the redundancy in the training procedure’s data handling operations. We propose a method for collaboratively training acoustic encoders of different sizes for speech recognition. We use a sequence transducer setup where different acoustic encoders share a common predictor and joiner modules. The acoustic encoders are also trained using co-distillation through an auxiliary task for frame level chenone prediction, along with the transducer loss. We perform experiments using the LibriSpeech corpus and demonstrate that the collaboratively trained acoustic encoders can provide up to a 11% relative improvement in the word error rate on both the test partitions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiong Wang|AUTHOR Xiong Wang]]^^1^^
, [[Sining Sun|AUTHOR Sining Sun]]^^2^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
, [[Long Ma|AUTHOR Long Ma]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4578–4582
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end models are favored in automatic speech recognition (ASR) because of their simplified system structure and superior performance. Among these models, Transformer and Conformer have achieved state-of-the-art recognition accuracy in which self-attention plays a vital role in capturing important global information. However, the time and memory complexity of self-attention increases squarely with the length of the sentence. In this paper, a prob-sparse self-attention mechanism is introduced into Conformer to sparse the computing process of self-attention in order to accelerate inference speed and reduce space consumption. Specifically, we adopt a Kullback-Leibler divergence based sparsity measurement for each query to decide whether we compute the attention function on this query. By using the prob-sparse attention mechanism, we achieve impressively 8% to 45% inference speed-up and 15% to 45% memory usage reduction of the self-attention module of Conformer Transducer while maintaining the same level of error rate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Titouan Parcollet|AUTHOR Titouan Parcollet]]^^1^^
, [[Mirco Ravanelli|AUTHOR Mirco Ravanelli]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIA (EA 4128), France; ^^2^^Mila, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4583–4587
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep learning contributes to reaching higher levels of artificial intelligence. Due to its pervasive adoption, however, growing concerns on the environmental impact of this technology have been raised. In particular, the energy consumed at training and inference time by modern neural networks is far from being negligible and will increase even further due to the deployment of ever larger models.
This work investigates for the first time the carbon cost of end-to-end automatic speech recognition (ASR). First, it quantifies the amount of CO₂ emitted while training state-of-the-art (SOTA) ASR systems on a university-scale cluster. Then, it shows that a tiny performance improvement comes at an extremely high carbon cost. For instance, the conducted experiments reveal that a SOTA Transformer emits 50% of its total training released CO₂ solely to achieve a final decrease of 0.3 of the word error rate. With this study, we hope to raise awareness on this crucial topic and we provide guidelines, insights, and estimates enabling researchers to better assess the environmental impact of training speech technologies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mengli Cheng|AUTHOR Mengli Cheng]], [[Chengyu Wang|AUTHOR Chengyu Wang]], [[Jun Huang|AUTHOR Jun Huang]], [[Xiaobo Wang|AUTHOR Xiaobo Wang]]
</p><p class="cpabstractcardaffiliationlist">Alibaba, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4533–4537
</span></p></div>
<div class="cpabstractcardabstract"><p>Despite the rapid development of deep learning models, for real-world applications, building large-scale Automatic Speech Recognition (ASR) systems from scratch is still significantly challenging, mostly due to the time-consuming and financially-expensive process of annotating a large amount of audio data with transcripts. Although several self-supervised pre-training models have been proposed to learn speech representations, applying such models directly might be sub-optimal if more labeled, training data could be obtained without a large cost.
In this paper, we present VideoASR, a weakly supervised framework for constructing ASR systems from massive video data. As user-generated videos often contain human-speech audio roughly aligned with subtitles, we consider videos as an important knowledge source, and propose an effective approach to extract high-quality audio aligned with transcripts from videos based on text detection and Optical Character Recognition. The underlying ASR models can be fine-tuned to fit any domain-specific target training datasets after weakly supervised pre-training on automatically generated datasets. Extensive experiments show that VideoASR can easily produce state-of-the-art results on six public datasets for Mandarin speech recognition. In addition, the VideoASR framework has been deployed on the cloud to support various industrial-scale applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Byeonggeun Kim|AUTHOR Byeonggeun Kim]], [[Simyung Chang|AUTHOR Simyung Chang]], [[Jinkyu Lee|AUTHOR Jinkyu Lee]], [[Dooyong Sung|AUTHOR Dooyong Sung]]
</p><p class="cpabstractcardaffiliationlist">Qualcomm, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4538–4542
</span></p></div>
<div class="cpabstractcardabstract"><p>Keyword spotting is an important research field because it plays a key role in device wake-up and user interaction on smart devices. However, it is challenging to minimize errors while operating efficiently in devices with limited resources such as mobile phones. We present a //broadcasted residual learning// method to achieve high accuracy with small model size and computational load. Our method configures most of the residual functions as 1D temporal convolution while still allows 2D convolution together using a broadcasted-residual connection that expands temporal output to frequency-temporal dimension. This residual mapping enables the network to effectively represent useful audio features with much less computation than conventional convolutional neural networks. We also propose a novel network architecture, Broadcasting-residual network (BC-ResNet), based on broadcasted residual learning and describe how to scale up the model according to the target device’s resources. BC-ResNets achieve state-of-the-art 98.0% and 98.7% top-1 accuracy on Google speech command datasets v1 and v2, respectively, and consistently outperform previous approaches, using fewer computations and parameters.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rupak Vignesh Swaminathan|AUTHOR Rupak Vignesh Swaminathan]], [[Brian King|AUTHOR Brian King]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]], [[Jasha Droppo|AUTHOR Jasha Droppo]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4543–4547
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a simple yet effective method to compress an RNN-Transducer (RNN-T) through the well-known knowledge distillation paradigm. We show that the transducer’s encoder outputs naturally have a high entropy and contain rich information about acoustically similar word-piece confusions. This rich information is suppressed when combined with the lower entropy decoder outputs to produce the joint network logits. Consequently, we introduce an auxiliary loss to distill the encoder logits from a teacher transducer’s encoder, and explore training strategies where this encoder distillation works effectively. We find that tandem training of teacher and student encoders with an inplace encoder distillation outperforms the use of a pre-trained and static teacher transducer. We also report an interesting phenomenon we refer to as implicit distillation, that occurs when the teacher and student encoders share the same decoder. Our experiments show 5.37–8.4% relative word error rate reductions (WERR) on in-house test sets, and 5.05–6.18% relative WERRs on LibriSpeech test sets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhifu Gao|AUTHOR Zhifu Gao]]^^1^^
, [[Yiwu Yao|AUTHOR Yiwu Yao]]^^1^^
, [[Shiliang Zhang|AUTHOR Shiliang Zhang]]^^1^^
, [[Jun Yang|AUTHOR Jun Yang]]^^1^^
, [[Ming Lei|AUTHOR Ming Lei]]^^1^^
, [[Ian McLoughlin|AUTHOR Ian McLoughlin]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alibaba, China; ^^2^^SIT, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4548–4552
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, end-to-end (E2E) speech recognition has become popular, since it can integrate the acoustic, pronunciation and language models into a single neural network, which outperforms conventional models. Among E2E approaches, attention-based models, e.g. Transformer, have emerged as being superior. Such models have opened the door to deployment of ASR on smart devices, however they still suffer from requiring a large number of model parameters. We propose an extremely low footprint E2E ASR system for smart devices, to achieve the goal of satisfying resource constraints without sacrificing recognition accuracy. We design cross-layer weight sharing to improve parameter efficiency and further exploit model compression methods including sparsification and quantization, to reduce memory storage and boost decoding efficiency. We evaluate our approaches on the public AISHELL-1 and AISHELL-2 benchmarks. On the AISHELL-2 task, the proposed method achieves more than 10× compression (model size reduces from 248 to 24MB), at the cost of only minor performance loss (CER reduces from 6.49% to 6.92%).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuan Shangguan|AUTHOR Yuan Shangguan]], [[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]], [[Hang Su|AUTHOR Hang Su]], [[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Jiatong Zhou|AUTHOR Jiatong Zhou]], [[Chunyang Wu|AUTHOR Chunyang Wu]], [[Duc Le|AUTHOR Duc Le]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4553–4557
</span></p></div>
<div class="cpabstractcardabstract"><p>As speech-enabled devices such as smartphones and smart speakers become increasingly ubiquitous, there is growing interest in building automatic speech recognition (ASR) systems that can run directly on-device; end-to-end (E2E) speech recognition models such as recurrent neural network transducers and their variants have recently emerged as prime candidates for this task. Apart from being accurate and compact, such systems need to decode speech with low user-perceived latency (UPL), producing words as soon as they are spoken. This work examines the impact of various techniques — model architectures, training criteria, decoding hyperparameters, and endpointer parameters — on UPL. Our analyses suggest that measures of model size (parameters, input chunk sizes), or measures of computation (e.g., FLOPS, RTF) that reflect the model’s ability to process input frames are not always strongly correlated with observed UPL. Thus, conventional algorithmic latency measurements might be inadequate in accurately capturing latency observed when models are deployed on embedded devices. Instead, we find that factors affecting token emission latency, and endpointing behavior have a larger impact on UPL. We achieve the best trade-off between latency and word error rate when performing ASR jointly with endpointing, while utilizing the recently proposed alignment regularization mechanism.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jonathan Macoskey|AUTHOR Jonathan Macoskey]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]], [[Jinru Su|AUTHOR Jinru Su]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4558–4562
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce Amortized Neural Networks (AmNets), a compute cost- and latency-aware network architecture particularly well-suited for sequence modeling tasks. We apply AmNets to the Recurrent Neural Network Transducer (RNN-T) to reduce compute cost and latency for an automatic speech recognition (ASR) task. The AmNets RNN-T architecture enables the network to dynamically switch between encoder branches on a frame-by-frame basis. Branches are constructed with variable levels of compute cost and model capacity. Here, we achieve variable compute for two well-known candidate techniques: one using sparse pruning and the other using matrix factorization. Frame-by-frame switching is determined by an arbitrator network that requires negligible compute overhead. We present results using both architectures on LibriSpeech data and show that our proposed architecture can reduce inference cost by up to 45% and latency to nearly real-time without incurring a loss in accuracy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rami Botros|AUTHOR Rami Botros]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Robert David|AUTHOR Robert David]], [[Emmanuel Guzman|AUTHOR Emmanuel Guzman]], [[Wei Li|AUTHOR Wei Li]], [[Yanzhang He|AUTHOR Yanzhang He]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4563–4567
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous works on the Recurrent Neural Network-Transducer (RNN-T) models have shown that, under some conditions, it is possible to simplify its prediction network with little or no loss in recognition accuracy [1, 2, 3]. This is done by limiting the context size of previous labels and/or using a simpler architecture for its layers instead of LSTMs. The benefits of such changes include reduction in model size, faster inference and power savings, which are all useful for on-device applications.
In this work, we study ways to make the RNN-T decoder (prediction network + joint network) smaller and faster without degradation in recognition performance. Our prediction network performs a simple weighted averaging of the input embeddings, and shares its embedding matrix weights with the joint network’s output layer (a.k.a. weight tying, commonly used in language modeling [4]). This simple design, when used in conjunction with additional Edit-based Minimum Bayes Risk (EMBR) training, reduces the RNN-T Decoder from 23M parameters to just 2M, without affecting word-error rate (WER).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jangho Kim|AUTHOR Jangho Kim]]^^1^^
, [[Simyung Chang|AUTHOR Simyung Chang]]^^1^^
, [[Nojun Kwak|AUTHOR Nojun Kwak]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Qualcomm, Korea; ^^2^^Seoul National University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4568–4572
</span></p></div>
<div class="cpabstractcardabstract"><p>As edge devices become prevalent, deploying Deep Neural Networks (DNN) on edge devices has become a critical issue. However, DNN requires a high computational resource which is rarely available for edge devices. To handle this, we propose a novel model compression method for the devices with limited computational resources, called //PQK// consisting of pruning, quantization, and knowledge distillation (KD) processes. Unlike traditional pruning and KD, PQK makes use of unimportant weights pruned in the pruning process to make a teacher network for training a better student network without pre-training the teacher model. PQK has two phases. Phase 1 exploits iterative pruning and quantization-aware training to make a lightweight and power-efficient model. In phase 2, we make a teacher network by adding unimportant weights unused in phase 1 to a pruned network. By using this teacher network, we train the pruned network as a student network. In doing so, we do not need a pre-trained teacher network for the KD framework because the teacher and the student networks coexist within the same network (See Fig. 1). We apply our method to the recognition model and verify the effectiveness of PQK on keyword spotting (KWS) and image recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Long Chen|AUTHOR Long Chen]], [[Venkatesh Ravichandran|AUTHOR Venkatesh Ravichandran]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4588–4592
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker identification in the household scenario (e.g., for smart speakers) is typically based on only a few enrollment utterances but a much larger set of unlabeled data, suggesting semi-supervised learning to improve speaker profiles. We propose a graph-based semi-supervised learning approach for speaker identification in the household scenario, to leverage the unlabeled speech samples. In contrast to most of the works in speaker recognition that focus on speaker-discriminative embeddings, this work focuses on speaker label inference (scoring). Given a pre-trained embedding extractor, graph-based learning allows us to integrate information about both labeled and unlabeled utterances. Considering each utterance as a graph node, we represent pairwise utterance similarity scores as edge weights. Graphs are constructed per household, and speaker identities are propagated to unlabeled nodes to optimize a global consistency criterion. We show in experiments on the VoxCeleb dataset that this approach makes effective use of unlabeled data and improves speaker identification accuracy compared to two state-of-the-art scoring methods as well as their semi-supervised variants based on pseudo-labels.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dexin Liao|AUTHOR Dexin Liao]], [[Jing Li|AUTHOR Jing Li]], [[Yiming Zhi|AUTHOR Yiming Zhi]], [[Song Li|AUTHOR Song Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]], [[Lin Li|AUTHOR Lin Li]]
</p><p class="cpabstractcardaffiliationlist">Xiamen University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4633–4637
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present the XMUSPEECH system for Task 1 of 2020 Personalized Voice Trigger Challenge (PVTC2020). Task 1 is a joint wake-up word detection with speaker verification on close talking data. The whole system consists of a keyword spotting (KWS) sub-system and a speaker verification (SV) sub-system. For the KWS system, we applied a Temporal Depthwise Separable Convolution Residual Network (TDSC-ResNet) to improve the system’s performance. For the SV system, we proposed a multi-task learning network, where phonetic branch is trained with the character label of the utterance, and speaker branch is trained with the label of the speaker. Phonetic branch is optimized with connectionist temporal classification (CTC) loss, which is treated as an auxiliary module for speaker branch. Experiments show that our system gets significant improvements compared with baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiachen Lian|AUTHOR Jiachen Lian]], [[Aiswarya Vinod Kumar|AUTHOR Aiswarya Vinod Kumar]], [[Hira Dhamyal|AUTHOR Hira Dhamyal]], [[Bhiksha Raj|AUTHOR Bhiksha Raj]], [[Rita Singh|AUTHOR Rita Singh]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4638–4642
</span></p></div>
<div class="cpabstractcardabstract"><p>Open-set speaker recognition can be regarded as a metric learning problem, which is to maximize inter-class variance and minimize intra-class variance. Supervised metric learning can be categorized into pair-based learning and proxy-based learning [1]. Most of the existing metric learning objectives belong to the former division, the performance of which is either highly dependent on sample mining strategy or restricted by insufficient label information in the mini-batch. Proxy-based losses mitigate both shortcomings, however, fine-grained connections among entities are either not or indirectly leveraged. This paper proposes a Masked Proxy (MP) loss which directly incorporates both proxy-based relationship and pair-based relationship. We further propose Multinomial Masked Proxy (MMP) loss to leverage the hardness of speaker pairs. These methods have been applied to evaluate on VoxCeleb test set and reach state-of-the-art Equal Error Rate (EER).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ruirui Li|AUTHOR Ruirui Li]], [[Chelsea J.-T. Ju|AUTHOR Chelsea J.-T. Ju]], [[Zeya Chen|AUTHOR Zeya Chen]], [[Hongda Mao|AUTHOR Hongda Mao]], [[Oguz Elibol|AUTHOR Oguz Elibol]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4593–4597
</span></p></div>
<div class="cpabstractcardabstract"><p>By implicitly recognizing a user based on his/her speech input, speaker identification enables many downstream applications, such as personalized system behavior and expedited shopping checkouts. Based on whether the speech content is constrained or not, both text-dependent (TD) and text-independent (TI) speaker recognition models may be used. We wish to combine the advantages of both types of models through an ensemble system to make more reliable predictions. However, any such combined approach has to be robust to incomplete inputs, i.e., when either TD or TI input is missing. As a solution we propose a __f__usion __o__f __e__mbeddings __net__work (FOEnet) architecture, combining joint learning with neural attention. We compare FOEnet with four competitive baseline methods on a dataset of voice assistant inputs, and show that it achieves higher accuracy than the baseline and score fusion methods, especially in the presence of incomplete inputs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sandro Cumani|AUTHOR Sandro Cumani]], [[Salvatore Sarni|AUTHOR Salvatore Sarni]]
</p><p class="cpabstractcardaffiliationlist">Politecnico di Torino, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4598–4602
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work we introduce a generative score calibration model for speaker verification systems able to explicitly account for utterance-dependent miscalibration sources, with a focus on segment duration. The model is theoretically motivated by an analysis of the effects of distribution mismatch on the scores produced by Probabilistic Linear Discriminant Analysis (PLDA), and extends our previous investigation on the distribution of well-calibrated PLDA log-likelihood ratios. We characterize target and non-target scores by means of Variance-Gamma densities, whose parameters represent effective between and within-class variabilities. Experimental results on SRE 2019 show that the proposed method improves both calibration and verification accuracy with respect to duration-agnostic models and to duration-aware discriminative methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jason Pelecanos|AUTHOR Jason Pelecanos]], [[Quan Wang|AUTHOR Quan Wang]], [[Ignacio Lopez Moreno|AUTHOR Ignacio Lopez Moreno]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4603–4607
</span></p></div>
<div class="cpabstractcardabstract"><p>Many neural network speaker recognition systems model each speaker using a fixed-dimensional embedding vector. These embeddings are generally compared using either linear or 2nd-order scoring and, until recently, do not handle utterance-specific uncertainty. In this work we propose scoring these representations in a way that can capture uncertainty, enroll/test asymmetry and additional non-linear information. This is achieved by incorporating a 2nd-stage neural network (known as a decision network) as part of an end-to-end training regimen. In particular, we propose the concept of decision residual networks which involves the use of a compact decision network to leverage cosine scores and to model the residual signal that’s needed. Additionally, we present a modification to the generalized end-to-end softmax loss function to target the separation of same/different speaker scores. We observed significant performance gains for the two techniques.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saurabh Kataria|AUTHOR Saurabh Kataria]]^^1^^
, [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]]^^2^^
, [[Dong Yu|AUTHOR Dong Yu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Johns Hopkins University, USA; ^^2^^Tencent, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4608–4612
</span></p></div>
<div class="cpabstractcardabstract"><p>To improve speaker verification in real scenarios with interference speakers, noise, and reverberation, we propose to bring together advancements made in multi-channel speech features. Specifically, we combine //spectral//, //spatial//, and //directional// features, which includes inter-channel phase difference, multichannel //sinc// convolutions, directional power ratio features, and angle features. To maximally leverage supervised learning, our framework is also equipped with multi-channel speech enhancement and voice activity detection. On all simulated, replayed, and real recordings, we observe large and consistent improvements at various degradation levels. On real recordings of multi-talker speech, we achieve a 36% relative reduction in equal error rate w.r.t. single-channel baseline. We find the improvements from speaker-dependent //directional// features more consistent in multi-talker conditions than clean. Lastly, we investigate if the learned multi-channel speaker embedding space can be made more discriminative through a contrastive loss-based fine-tuning. With a simple choice of Triplet loss, we observe a further 8.3% relative reduction in EER.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dirk Padfield|AUTHOR Dirk Padfield]], [[Daniel J. Liebling|AUTHOR Daniel J. Liebling]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4613–4617
</span></p></div>
<div class="cpabstractcardabstract"><p>Diarization partitions an audio stream into segments based on the voices of the speakers. Real-time diarization systems that include an enrollment step should limit enrollment training samples to reduce user interaction time. Although training on a small number of samples yields poor performance, we show that the accuracy can be improved dramatically using a chronological self-training approach. We studied the tradeoff between training time and classification performance and found that 1 second is sufficient to reach over 95% accuracy. We evaluated on 700 audio conversation files of about 10 minutes each from 6 different languages and demonstrated average diarization error rates as low as 10%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Runqiu Xiao|AUTHOR Runqiu Xiao]]^^1^^
, [[Xiaoxiao Miao|AUTHOR Xiaoxiao Miao]]^^1^^
, [[Wenchao Wang|AUTHOR Wenchao Wang]]^^1^^
, [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]]^^1^^
, [[Bin Cai|AUTHOR Bin Cai]]^^2^^
, [[Liuping Luo|AUTHOR Liuping Luo]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^Guangdong PSD, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4618–4622
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep-Neural-Network (DNN) based speaker verification systems use the angular softmax loss with margin penalties to enhance the intra-class compactness of speaker embeddings, which achieved remarkable performance. In this paper, we propose a novel angular loss function called adaptive margin circle loss for speaker verification. The stage-based margin and chunk-based margin are applied to improve the angular discrimination of circle loss on the training set. The analysis on gradients shows that, compared with the previous angular loss like Additive Margin Softmax(Am-Softmax), circle loss has flexible optimization and definite convergence status. Experiments are carried out on the Voxceleb and SITW. By applying adaptive margin circle loss, our best system achieves 1.31%EER on Voxceleb1 and 2.13% on SITW core-core.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin O’Brien|AUTHOR Benjamin O’Brien]], [[Christine Meunier|AUTHOR Christine Meunier]], [[Alain Ghio|AUTHOR Alain Ghio]]
</p><p class="cpabstractcardaffiliationlist">LPL (UMR 7309), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4623–4627
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper details our evaluations and comparisons of speaker identification (SID) performance by listeners across different tasks. Experiment 1 participants completed traditional target-lineup (1-out-of-N speakers or out-of-set speaker) and binary (speaker verification) tasks. Experiment 2 participants completed trials online by using a //clustering// method by grouping speech recordings into speaker-specific clusters. Both studies employed similar speech recordings from the PTSVOX corpus. Our results showed participants who completed the binary and clustering tasks had higher accuracy than those who completed the target-lineup task. We also observed that independent of the tasks participants found some speakers significantly more difficult to identify relative to their foils. Pearson correlation procedures showed significant negative correlations between accuracy and task-dependent temporal-based metrics across tasks, where an increase in time required to make determinations yielded a decrease in perceptual SID performance. These findings underscored the important role of SID task design and the process of selecting speech recordings. Future work aims to examine the relationship between different perceptual SID task performances and scores generated by automatic speaker verification systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fuchuan Tong|AUTHOR Fuchuan Tong]], [[Yan Liu|AUTHOR Yan Liu]], [[Song Li|AUTHOR Song Li]], [[Jie Wang|AUTHOR Jie Wang]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]
</p><p class="cpabstractcardaffiliationlist">Xiamen University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4628–4632
</span></p></div>
<div class="cpabstractcardabstract"><p>Despite the superior performance deep neural networks have achieved in speaker verification tasks, much of their success benefits from the availability of large-scale and carefully labeled datasets. However, noisy labels often occur during data collection. In this paper, we propose an automatic error correction method for deep speaker embedding learning with noisy labels. Specifically, a label noise correction loss is proposed that leverages a model’s generalization capability to correct noisy labels during training. In addition, we improve the vanilla AM-Softmax to estimate a more robust speaker posterior by introducing sub-centers. When applied on the VoxCeleb dataset, the proposed method performs gracefully when noisy labels are introduced. Moreover, when combining with the Bayesian estimation of PLDA with noisy training labels at the back-end, the whole system performs better under conditions in which noisy labels are present.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Keon Lee|AUTHOR Keon Lee]], [[Kyumin Park|AUTHOR Kyumin Park]], [[Daeyoung Kim|AUTHOR Daeyoung Kim]]
</p><p class="cpabstractcardaffiliationlist">KAIST, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4643–4647
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous works on neural text-to-speech (TTS) have been addressed on limited speed in training and inference time, robustness for difficult synthesis conditions, expressiveness, and controllability. Although several approaches resolve some limitations, there has been no attempt to solve all weaknesses at once. In this paper, we propose STYLER, an expressive and controllable TTS framework with high-speed and robust synthesis. Our novel audio-text aligning method called Mel Calibrator and excluding autoregressive decoding enable rapid training and inference and robust synthesis on unseen data. Also, disentangled style factor modeling under supervision enlarges the controllability in synthesizing process leading to expressive TTS. On top of it, a novel noise modeling pipeline using domain adversarial training and Residual Decoding empowers noise-robust style transfer, decomposing the noise without any additional label. Various experiments demonstrate that STYLER is more effective in speed and robustness than expressive TTS with autoregressive decoding and more expressive and controllable than reading style non-autoregressive TTS. Synthesis samples and experiment results are provided via our demo page, and code is available publicly.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaochun An|AUTHOR Xiaochun An]]^^1^^
, [[Frank K. Soong|AUTHOR Frank K. Soong]]^^2^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4688–4692
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end neural TTS training has shown improved performance in speech style transfer. However, the improvement is still limited by the training data in both target styles and speakers. Inadequate style transfer performance occurs when the trained TTS tries to transfer the speech to a target style from a new speaker with an unknown, arbitrary style. In this paper, we propose a new approach to style transfer for both seen and unseen styles, with disjoint, multi-style datasets, i.e., datasets of different styles are recorded, each individual style is by one speaker with multiple utterances. To encode the style information, we adopt an inverse autoregressive flow (IAF) structure to improve the variational inference. The whole system is optimized to minimize a weighed sum of four different loss functions: 1) a reconstruction loss to measure the distortions in both source and target reconstructions; 2) an adversarial loss to “fool” a well-trained discriminator; 3) a style distortion loss to measure the expected style loss after the transfer; 4) a cycle consistency loss to preserve the speaker identity of the source after the transfer. Experiments demonstrate, both objectively and subjectively, the effectiveness of the proposed approach for seen and unseen style transfer tasks. The performance of the new approach is better and more robust than those of four baseline systems of the prior art.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Slava Shechtman|AUTHOR Slava Shechtman]]^^1^^
, [[Raul Fernandez|AUTHOR Raul Fernandez]]^^2^^
, [[Alexander Sorin|AUTHOR Alexander Sorin]]^^1^^
, [[David Haws|AUTHOR David Haws]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Israel; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4693–4697
</span></p></div>
<div class="cpabstractcardabstract"><p>Although Sequence-to-Sequence (S2S) architectures have become state-of-the-art in speech synthesis, the best models benefit from access to moderate-to-large amounts of training data, posing a resource bottleneck when we are interested in generating speech in a variety of expressive styles. In this work we explore a S2S architecture variant that is capable of generating a variety of stylistic expressive variations observed in a limited amount of training data, and of transplanting that style to a neutral target speaker for whom no labeled expressive resources exist. The architecture is furthermore controllable, allowing the user to select an operating point that conveys a desired level of expressiveness. We evaluate this proposal against a classically supervised baseline via perceptual listening tests, and demonstrate that i) it is able to outperform the baseline in terms of its generalizability to neutral speakers, ii) it is strongly preferred in terms of its ability to convey expressiveness, and iii) it provides a reasonable trade-off between expressiveness and naturalness, allowing the user to tune it to the particular demands of a given application.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rui Liu|AUTHOR Rui Liu]]^^1^^
, [[Berrak Sisman|AUTHOR Berrak Sisman]]^^1^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SUTD, Singapore; ^^2^^NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4648–4652
</span></p></div>
<div class="cpabstractcardabstract"><p>Emotional text-to-speech synthesis (ETTS) has seen much progress in recent years. However, the generated voice is often not perceptually identifiable by its intended emotion category. To address this problem, we propose a new interactive training paradigm for ETTS, denoted as //i-ETTS//, which seeks to directly improve the emotion discriminability by interacting with a speech emotion recognition (SER) model. Moreover, we formulate an iterative training strategy with reinforcement learning to ensure the quality of //i-ETTS// optimization. Experimental results demonstrate that the proposed //i-ETTS// outperforms the state-of-the-art baselines by rendering speech with more accurate emotion style. To our best knowledge, this is the first study of reinforcement learning in emotional text-to-speech synthesis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sarath Sivaprasad|AUTHOR Sarath Sivaprasad]], [[Saiteja Kosgi|AUTHOR Saiteja Kosgi]], [[Vineet Gandhi|AUTHOR Vineet Gandhi]]
</p><p class="cpabstractcardaffiliationlist">IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4653–4657
</span></p></div>
<div class="cpabstractcardabstract"><p>Machine-generated speech is characterized by its limited or unnatural emotional variation. Current text to speech systems generates speech with either a flat emotion, emotion selected from a predefined set, average variation learned from prosody sequences in training data or transferred from a source style. We propose a text to speech (TTS) system, where a user can choose the emotion of generated speech from a continuous and meaningful emotion space (Arousal-Valence space). The proposed TTS system can generate speech from the text in any speaker’s style, with fine control of emotion. We show that the system works on emotion unseen during training and can scale to previously unseen speakers given his/her speech sample. Our work expands the horizon of the state-of-the-art FastSpeech2 backbone to a multi-speaker setting and gives it much-coveted continuous (and interpretable) affective control, without any observable degradation in the quality of the synthesized speech. Audio samples are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jian Cong|AUTHOR Jian Cong]]^^1^^
, [[Shan Yang|AUTHOR Shan Yang]]^^2^^
, [[Na Hu|AUTHOR Na Hu]]^^2^^
, [[Guangzhi Li|AUTHOR Guangzhi Li]]^^2^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
, [[Dan Su|AUTHOR Dan Su]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4658–4662
</span></p></div>
<div class="cpabstractcardabstract"><p>In spoken conversations, spontaneous behaviors like filled pause and prolongations always happen. Conversational partner tends to align features of their speech with their interlocutor which is known as entrainment. To produce human-like conversations, we propose a unified controllable spontaneous conversational speech synthesis framework to model the above two phenomena. Specifically, we use explicit labels to represent two typical spontaneous behaviors //filled-pause// and //prolongation// in the acoustic model and develop a neural network based predictor to predict the occurrences of the two behaviors from text. We subsequently develop an algorithm based on the predictor to control the occurrence frequency of the behaviors, making the synthesized speech vary from less disfluent to more disfluent. To model the speech entrainment at acoustic level, we utilize a context acoustic encoder to extract a global style embedding from the previous speech conditioning on the synthesizing of current speech. Furthermore, since the current and previous utterances belong to the different speakers in a conversation, we add a domain adversarial training module to eliminate the speaker-related information in the acoustic encoder while maintaining the style-related information. Experiments show that our proposed approach can synthesize realistic conversations and control the occurrences of the spontaneous behaviors naturally.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Minchan Kim|AUTHOR Minchan Kim]]^^1^^
, [[Sung Jun Cheon|AUTHOR Sung Jun Cheon]]^^1^^
, [[Byoung Jin Choi|AUTHOR Byoung Jin Choi]]^^1^^
, [[Jong Jin Kim|AUTHOR Jong Jin Kim]]^^2^^
, [[Nam Soo Kim|AUTHOR Nam Soo Kim]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Seoul National University, Korea; ^^2^^SK Telecom, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4663–4667
</span></p></div>
<div class="cpabstractcardabstract"><p>As recent text-to-speech (TTS) systems have been rapidly improved in speech quality and generation speed, many researchers now focus on a more challenging issue: expressive TTS. To control speaking styles, existing expressive TTS models use categorical style index or reference speech as style input. In this work, we propose StyleTagging-TTS (ST-TTS), a novel expressive TTS model that utilizes a style tag written in natural language. Using a style-tagged TTS dataset and a pre-trained language model, we modeled the relationship between linguistic embedding and speaking style domain, which enables our model to work even with style tags unseen during training. As style tag is written in natural language, it can control speaking style in a more intuitive, interpretable, and scalable way compared with style index or reference speech. In addition, in terms of model architecture, we propose an efficient non-autoregressive (NAR) TTS architecture with single-stage training. The experimental result shows that ST-TTS outperforms the existing expressive TTS model, Tacotron2-GST in speech quality and expressiveness.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuzi Yan|AUTHOR Yuzi Yan]]^^1^^
, [[Xu Tan|AUTHOR Xu Tan]]^^2^^
, [[Bohan Li|AUTHOR Bohan Li]]^^2^^
, [[Guangyan Zhang|AUTHOR Guangyan Zhang]]^^3^^
, [[Tao Qin|AUTHOR Tao Qin]]^^2^^
, [[Sheng Zhao|AUTHOR Sheng Zhao]]^^2^^
, [[Yuan Shen|AUTHOR Yuan Shen]]^^1^^
, [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]]^^4^^
, [[Tie-Yan Liu|AUTHOR Tie-Yan Liu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^Microsoft, China; ^^3^^CUHK, China; ^^4^^Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4668–4672
</span></p></div>
<div class="cpabstractcardabstract"><p>While recent text to speech (TTS) models perform very well in synthesizing reading-style (e.g., audiobook) speech, it is still challenging to synthesize spontaneous-style speech (e.g., podcast or conversation), mainly because of two reasons: 1) the lack of training data for spontaneous speech; 2) the difficulty in modeling the filled pauses (//um// and //uh//) and diverse rhythms in spontaneous speech. In this paper, we develop AdaSpeech 3, an adaptive TTS system that fine-tunes a well-trained reading-style TTS model for spontaneous-style speech. Specifically, 1) to insert filled pauses (FP) in the text sequence appropriately, we introduce an FP predictor to the TTS model; 2) to model the varying rhythms, we introduce a duration predictor based on mixture of experts (MoE), which contains three experts responsible for the generation of fast, medium and slow speech respectively, and fine-tune it as well as the pitch predictor for rhythm adaptation; 3) to adapt to other speaker timbre, we fine-tune some parameters in the decoder with few speech data. To address the challenge of lack of training data, we mine a spontaneous speech dataset to support our research this work and facilitate future research on spontaneous TTS. Experiments show that AdaSpeech 3 synthesizes speech with natural FP and rhythms in spontaneous styles, and achieves much better MOS and SMOS scores than previous adaptive TTS systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiang Li|AUTHOR Xiang Li]], [[Changhe Song|AUTHOR Changhe Song]], [[Jingbei Li|AUTHOR Jingbei Li]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Jia Jia|AUTHOR Jia Jia]], [[Helen Meng|AUTHOR Helen Meng]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4673–4677
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a multi-scale speech style modeling method for end-to-end expressive speech synthesis. The proposed method employs a multi-scale reference encoder to extract both the global-scale utterance-level and the local-scale quasi-phoneme-level style features of the target speech, which are then fed into the speech synthesis model as an extension to the input phoneme sequence. During training time, the multi-scale style model could be jointly trained with the speech synthesis model in an end-to-end fashion. By applying the proposed method to style transfer task, experimental results indicate that the controllability of the multi-scale speech style model and the expressiveness of the synthesized speech are greatly improved. Moreover, by assigning different reference speeches to extraction of style on each scale, the flexibility of the proposed method is further revealed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shifeng Pan|AUTHOR Shifeng Pan]], [[Lei He|AUTHOR Lei He]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4678–4682
</span></p></div>
<div class="cpabstractcardabstract"><p>Cross-speaker style transfer is crucial to the applications of multi-style and expressive speech synthesis at scale. It does not require the target speakers to be experts in expressing all styles and to collect corresponding recordings for model training. However, the performances of existing style transfer methods are still far behind real application needs. The root causes are mainly twofold. Firstly, the style embedding extracted from single reference speech can hardly provide fine-grained and appropriate prosody information for arbitrary text to synthesize. Secondly, in these models the content/text, prosody, and speaker timbre are usually highly entangled, it’s therefore not realistic to expect a satisfied result when freely combining these components, such as to transfer speaking style between speakers. In this paper, we propose a cross-speaker style transfer text-to-speech (TTS) model with explicit prosody bottleneck. The prosody bottleneck builds up the kernels accounting for speaking style robustly, and disentangles the prosody from content and speaker timbre, therefore guarantees high quality cross-speaker style transfer. Evaluation result shows the proposed method even achieves on-par performance with source speaker’s speaker-dependent (SD) model in objective measurement of prosody, and significantly outperforms the cycle consistency and GMVAE-based baselines in objective and subjective evaluations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daxin Tan|AUTHOR Daxin Tan]], [[Tan Lee|AUTHOR Tan Lee]]
</p><p class="cpabstractcardaffiliationlist">CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4683–4687
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a novel design of neural network system for fine-grained style modeling, transfer and prediction in expressive text-to-speech (TTS) synthesis. Fine-grained modeling is realized by extracting style embeddings from the mel-spectrograms of phone-level speech segments. Collaborative learning and adversarial learning strategies are applied in order to achieve effective disentanglement of content and style factors in speech and alleviate the “content leakage” problem in style modeling. The proposed system can be used for varying-content speech style transfer in the single-speaker scenario. The results of objective and subjective evaluation show that our system performs better than other fine-grained speech style transfer models, especially in the aspect of content preservation. By incorporating a style predictor, the proposed system can also be used for text-to-speech synthesis. Audio samples are provided for system demonstration.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mai Hoang Dao|AUTHOR Mai Hoang Dao]], [[Thinh Hung Truong|AUTHOR Thinh Hung Truong]], [[Dat Quoc Nguyen|AUTHOR Dat Quoc Nguyen]]
</p><p class="cpabstractcardaffiliationlist">VinAI Research, Vietnam</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4698–4702
</span></p></div>
<div class="cpabstractcardabstract"><p>Intent detection and slot filling are important tasks in spoken and natural language understanding. However, Vietnamese is a low-resource language in these research topics. In this paper, we present the //first// public intent detection and slot filling dataset for Vietnamese. In addition, we also propose a joint model for intent detection and slot filling, that extends the recent state-of-the-art JointBERT+CRF model [1] with an intent-slot attention layer to explicitly incorporate intent context information into slot filling via “soft” intent label embedding. Experimental results on our Vietnamese dataset show that our proposed model significantly outperforms JointBERT+CRF. We publicly release our dataset and the implementation of our model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Soyeon Caren Han|AUTHOR Soyeon Caren Han]], [[Siqu Long|AUTHOR Siqu Long]], [[Huichun Li|AUTHOR Huichun Li]], [[Henry Weld|AUTHOR Henry Weld]], [[Josiah Poon|AUTHOR Josiah Poon]]
</p><p class="cpabstractcardaffiliationlist">University of Sydney, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4743–4747
</span></p></div>
<div class="cpabstractcardabstract"><p>Intent classification and slot filling are two critical tasks for natural language understanding. Traditionally the two tasks proceeded independently. However, more recently joint models for intent classification and slot filling have achieved state-of-the-art performance, and have proved that there exists a strong relationship between the two tasks. In this paper, we propose a bi-directional joint model for intent classification and slot filling, which includes a multi-stage hierarchical process via BERT and bi-directional joint natural language understanding mechanisms, including intent2slot and slot2intent, to obtain mutual performance enhancement between intent classification and slot filling. The evaluations show that our model achieves state-of-the-art results on intent classification accuracy, slot filling F1, and significantly improves sentence-level semantic frame accuracy when applied to publicly available benchmark datasets, ATIS (88.6%) and SNIPS (92.8%).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haitao Lin|AUTHOR Haitao Lin]], [[Lu Xiang|AUTHOR Lu Xiang]], [[Yu Zhou|AUTHOR Yu Zhou]], [[Jiajun Zhang|AUTHOR Jiajun Zhang]], [[Chengqing Zong|AUTHOR Chengqing Zong]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4703–4707
</span></p></div>
<div class="cpabstractcardabstract"><p>Spoken Language Understanding (SLU) is one essential step in building a dialogue system. Due to the expensive cost of obtaining the labeled data, SLU suffers from the data scarcity problem. Therefore, in this paper, we focus on data augmentation for slot filling task in SLU. To achieve that, we aim at generating more diverse data based on existing data. Specifically, we try to exploit the latent language knowledge from pretrained language models by finetuning them. We propose two strategies for finetuning process: value-based and context-based augmentation. Experimental results on two public SLU datasets have shown that compared with existing data augmentation methods, our proposed method can generate more diverse sentences and significantly improve the performance on SLU.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Judith Gaspers|AUTHOR Judith Gaspers]], [[Quynh Do|AUTHOR Quynh Do]], [[Daniil Sorokin|AUTHOR Daniil Sorokin]], [[Patrick Lehnen|AUTHOR Patrick Lehnen]]
</p><p class="cpabstractcardaffiliationlist">Amazon, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4708–4712
</span></p></div>
<div class="cpabstractcardabstract"><p>With the expanding role of voice-controlled devices, bootstrapping spoken language understanding models from little labeled data becomes essential. Semi-supervised learning is a common technique to improve model performance when labeled data is scarce. In a real-world production system, the labeled data and the online test data often may come from different distributions. In this work, we use semi-supervised learning based on pseudo-labeling with an auxiliary task on incoming unlabeled noisy data, which is closer to the test distribution. We demonstrate empirically that our approach can mitigate negative effects arising from training with non-representative labeled data as well as the negative impacts of noises in the data, which are introduced by pseudo-labeling and automatic speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yidi Jiang|AUTHOR Yidi Jiang]], [[Bidisha Sharma|AUTHOR Bidisha Sharma]], [[Maulik Madhavi|AUTHOR Maulik Madhavi]], [[Haizhou Li|AUTHOR Haizhou Li]]
</p><p class="cpabstractcardaffiliationlist">NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4713–4717
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end intent classification using speech has numerous advantages compared to the conventional pipeline approach using automatic speech recognition (ASR), followed by natural language processing modules. It attempts to predict intent from speech without using an intermediate ASR module. However, such end-to-end framework suffers from the unavailability of large speech resources with higher acoustic variation in spoken language understanding. In this work, we exploit the scope of the transformer distillation method that is specifically designed for knowledge distillation from a transformer based language model to a transformer based speech model. In this regard, we leverage the reliable and widely used bidirectional encoder representations from transformers (BERT) model as a language model and transfer the knowledge to build an acoustic model for intent classification using the speech. In particular, a multilevel transformer based teacher-student model is designed, and knowledge distillation is performed across attention and hidden sub-layers of different transformer layers of the student and teacher models. We achieve an intent classification accuracy of 99.10% and 88.79% for Fluent speech corpus and ATIS database, respectively. Further, the proposed method demonstrates better performance and robustness in acoustically degraded condition compared to the baseline method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nick J.C. Wang|AUTHOR Nick J.C. Wang]], [[Lu Wang|AUTHOR Lu Wang]], [[Yandan Sun|AUTHOR Yandan Sun]], [[Haimei Kang|AUTHOR Haimei Kang]], [[Dejun Zhang|AUTHOR Dejun Zhang]]
</p><p class="cpabstractcardaffiliationlist">Ping An Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4718–4722
</span></p></div>
<div class="cpabstractcardabstract"><p>In spoken language understanding (SLU), what the user says is converted to his/her intent. Recent work on end-to-end SLU has shown that accuracy can be improved via pre-training approaches. We revisit ideas presented by Lugosch et al. using speech pre-training and three-module modeling; however, to ease construction of the end-to-end SLU model, we use as our phoneme module an open-source acoustic-phonetic model from a DNN-HMM hybrid automatic speech recognition (ASR) system instead of training one from scratch. Hence we fine-tune on speech only for the word module, and we apply multi-target learning (MTL) on the word and intent modules to jointly optimize SLU performance. MTL yields a relative reduction of 40% in intent-classification error rates (from 1.0% to 0.6%). Note that our three-module model is a streaming method. The final outcome of the proposed three-module modeling approach yields an intent accuracy of 99.4% on FluentSpeech, an intent error rate reduction of 50% compared to that of Lugosch et al. Although we focus on real-time streaming methods, we also list non-streaming methods for comparison.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sujeong Cha|AUTHOR Sujeong Cha]]^^1^^
, [[Wangrui Hou|AUTHOR Wangrui Hou]]^^1^^
, [[Hyun Jung|AUTHOR Hyun Jung]]^^1^^
, [[My Phung|AUTHOR My Phung]]^^1^^
, [[Michael Picheny|AUTHOR Michael Picheny]]^^1^^
, [[Hong-Kwang J. Kuo|AUTHOR Hong-Kwang J. Kuo]]^^2^^
, [[Samuel Thomas|AUTHOR Samuel Thomas]]^^2^^
, [[Edmilson Morais|AUTHOR Edmilson Morais]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NYU, USA; ^^2^^IBM, USA; ^^3^^IBM, Brazil</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4723–4727
</span></p></div>
<div class="cpabstractcardabstract"><p>A major focus of recent research in spoken language understanding (SLU) has been on the end-to-end approach where a single model can predict intents directly from speech inputs without intermediate transcripts. However, this approach presents some challenges. First, since speech can be considered as personally identifiable information, in some cases only automatic speech recognition (ASR) transcripts are accessible. Second, intent-labeled speech data is scarce. To address the first challenge, we propose a novel system that can predict intents from flexible types of inputs: speech, ASR transcripts, or both. We demonstrate strong performance for either modality separately, and when both speech and ASR transcripts are available, through system combination, we achieve better results than using a single input modality. To address the second challenge, we leverage a semantically robust pre-trained BERT model and adopt a cross-modal system that co-trains text embeddings and acoustic embeddings in a shared latent space. We further enhance this system by utilizing an acoustic module pre-trained on LibriSpeech and domain-adapting the text module on our target datasets. Our experiments show significant advantages for these pre-training and fine-tuning strategies, resulting in a system that achieves competitive intent-classification performance on Snips SLU and Fluent Speech Commands datasets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xianwei Zhang|AUTHOR Xianwei Zhang]], [[Liang He|AUTHOR Liang He]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4728–4732
</span></p></div>
<div class="cpabstractcardabstract"><p>The spoken language understanding (SLU) plays an essential role in the field of human-computer interaction. Most of the current SLU systems are cascade systems of automatic speech recognition (ASR) and natural language understanding (NLU). Error propagation and scarcity of annotated speech data are two common difficulties for resource-poor languages. To solve them, we propose a simple but effective end-to-end cross-lingual spoken language understanding model based on XLSR-53, which is a pretrained model in 53 languages by the Facebook research team. The end-to-end approach avoids error propagation and the multilingual pretraining reduces data annotation requirements. Our proposed method achieves 99.71% on the Fluent Speech Commands (FSC) English database and 79.89% on the CATSLU-MAP Chinese database, in intent classification accuracy. To the best of our knowledge, the former is the reported best result on the FSC database.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hamidreza Saghir|AUTHOR Hamidreza Saghir]]^^1^^
, [[Samridhi Choudhary|AUTHOR Samridhi Choudhary]]^^2^^
, [[Sepehr Eghbali|AUTHOR Sepehr Eghbali]]^^1^^
, [[Clement Chung|AUTHOR Clement Chung]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, Canada; ^^2^^Amazon, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4733–4737
</span></p></div>
<div class="cpabstractcardabstract"><p>Fine-tuning transformer-based models have shown to outperform other methods for many Natural Language Understanding (NLU) tasks. Recent studies to reduce the size of transformer models have achieved reductions of > 80%, making on-device inference on powerful devices possible. However, other resource-constrained devices, like those enabling voice assistants (VAs), require much further reductions. In this work, we propose factorization-aware training (FAT), wherein we factorize the linear mappings of an already compressed transformer model (DistilBERT) and train jointly on NLU tasks. We test this method on three different NLU datasets and show our method outperforms naive application of factorization after training by 10%–440% across various compression rates. Additionally, We introduce a new metric called factorization gap and use it to analyze the need for FAT across various model components. We also present results for training subsets of factorized components to enable faster training, re-usability and maintainability for multiple on-device models. We further demonstrate the trade-off between memory, inference speed and performance at a given compression-rate for a on-device implementation of a factorized model. Our best performing factorized model, achieves a relative size reduction of 84% with ~10% relative degradation in NLU error rate compared to a non-factorized model on our internal dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Saxon|AUTHOR Michael Saxon]], [[Samridhi Choudhary|AUTHOR Samridhi Choudhary]], [[Joseph P. McKenna|AUTHOR Joseph P. McKenna]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4738–4742
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end (E2E) spoken language understanding (SLU) systems predict utterance semantics directly from speech using a single model. Previous work in this area has focused on targeted tasks in fixed domains, where the output semantic structure is assumed a priori and the input speech is of limited complexity. In this work we present our approach to developing an E2E model for generalized SLU in commercial voice assistants (VAs). We propose a fully differentiable, transformer-based, hierarchical system that can be pretrained at both the ASR and NLU levels. This is then fine-tuned on both transcription and semantic classification losses to handle a diverse set of intent and argument combinations. This leads to an SLU system that achieves significant improvements over baselines on a complex internal generalized VA dataset with a 43% improvement in accuracy, while still meeting the 99% accuracy benchmark on the popular Fluent Speech Commands dataset. We further evaluate our model on a hard test set, exclusively containing slot arguments unseen in training, and demonstrate a nearly 20% improvement, showing the efficacy of our approach in truly demanding VA scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tomáš Mikolov|AUTHOR Tomáš Mikolov]]
</p><p class="cpabstractcardaffiliationlist">CIIRC CTU, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>Statistical language modeling has been labeled as an AI-complete problem by many famous researchers of the past. However, despite all the progress made in the last decade, it remains unclear how much progress towards truly intelligent language models we made.
In this talk, I will present my view on what has been accomplished so far, and what scientific challenges are still in front of us. We need to focus more on developing new mathematical models with certain properties, such as the ability to learn continually and without explicit supervision, generalize to novel tasks from limited amounts of data, and the ability to form non-trivial long-term memory. I will describe some of our attempts to develop such models within the framework of complex systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rong Gong|AUTHOR Rong Gong]]^^1^^
, [[Carl Quillen|AUTHOR Carl Quillen]]^^2^^
, [[Dushyant Sharma|AUTHOR Dushyant Sharma]]^^2^^
, [[Andrew Goderre|AUTHOR Andrew Goderre]]^^2^^
, [[José Laínez|AUTHOR José Laínez]]^^3^^
, [[Ljubomir Milanović|AUTHOR Ljubomir Milanović]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nuance Communications, Austria; ^^2^^Nuance Communications, USA; ^^3^^Nuance Communications, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3840–3844
</span></p></div>
<div class="cpabstractcardabstract"><p>When a sufficiently large far-field training data is presented, jointly optimizing a multichannel frontend and an end-to-end (E2E) Automatic Speech Recognition (ASR) backend shows promising results. Recent literature has shown traditional beamformer designs, such as MVDR (Minimum Variance Distortionless Response) or fixed beamformers can be successfully integrated as the frontend into an E2E ASR system with learnable parameters. In this work, we propose the self-attention channel combinator (SACC) ASR frontend, which leverages the self-attention mechanism to combine multichannel audio signals in the magnitude spectral domain. Experiments conducted on a multichannel playback test data shows that the SACC achieved a 9.3% WERR compared to a state-of-the-art fixed beamformer-based frontend, both jointly optimized with a ContextNet-based ASR backend. We also demonstrate the connection between the SACC and the traditional beamformers, and analyze the intermediate outputs of the SACC.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[R. Gretter|AUTHOR R. Gretter]]^^1^^
, [[Marco Matassoni|AUTHOR Marco Matassoni]]^^1^^
, [[D. Falavigna|AUTHOR D. Falavigna]]^^1^^
, [[A. Misra|AUTHOR A. Misra]]^^2^^
, [[C.W. Leong|AUTHOR C.W. Leong]]^^2^^
, [[K. Knill|AUTHOR K. Knill]]^^3^^
, [[L. Wang|AUTHOR L. Wang]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FBK, Italy; ^^2^^Educational Testing Service, USA; ^^3^^University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3845–3849
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper presents the Second ASR Challenge for Non-native Children’s Speech proposed as a Special Session at Interspeech 2021, following the successful first challenge at Interspeech 2020. The goal of the challenge is to advance research on non-native children’s speech recognition technology, as speech technology still struggles when applied to both children and non-native speakers. The audio data consists of spoken responses provided by L2 students in the context of both English and German speaking proficiency examinations, the latter language added for 2021. Additional training data and a new evaluation set was released for L2 English recorded by speakers of different native languages. Participants could build systems for one or both languages. Each had a closed track where a predetermined set of audio and linguistic resources were selected, and an open track where additional data was allowed. After a description of the released corpora, the paper analyzes the results achieved by the participating systems. Some issues suggested from these results are discussed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lars Rumberg|AUTHOR Lars Rumberg]], [[Hanna Ehlert|AUTHOR Hanna Ehlert]], [[Ulrike Lüdtke|AUTHOR Ulrike Lüdtke]], [[Jörn Ostermann|AUTHOR Jörn Ostermann]]
</p><p class="cpabstractcardaffiliationlist">Leibniz Universität Hannover, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3850–3854
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition for children’s speech is a challenging task mainly due to scarcity of publicly available child speech corpora and wide inter- and intra-speaker variability in terms of acoustic and linguistic characteristics of children’s speech. We propose a framework for age-invariant training of the acoustic model of end-to-end speech recognition systems based on adversarial multi-task learning. We use age information additionally to just differentiating between the child and adult domains and thus force the acoustic model to learn age invariant features. Our results on publicly available data sets show that this leads to better leveraging of existing data during training. We further show that usage of adversarial multitask learning should not necessarily be regarded as a substitute for traditional feature space adaptation methods, but that both should be used together for best performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Samuele Cornell|AUTHOR Samuele Cornell]]^^1^^
, [[Alessio Brutti|AUTHOR Alessio Brutti]]^^2^^
, [[Marco Matassoni|AUTHOR Marco Matassoni]]^^2^^
, [[Stefano Squartini|AUTHOR Stefano Squartini]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Università Politecnica delle Marche, Italy; ^^2^^FBK, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3855–3859
</span></p></div>
<div class="cpabstractcardabstract"><p>Fully exploiting ad-hoc microphone networks for distant speech recognition is still an open issue. Empirical evidence shows that being able to select the best microphone leads to significant improvements in recognition without any additional effort on front-end processing. Current channel selection techniques either rely on signal, decoder or posterior-based features. Signal-based features are inexpensive to compute but do not always correlate with recognition performance. Instead decoder and posterior-based features exhibit better correlation but require substantial computational resources.
In this work, we tackle the channel selection problem by proposing MicRank, a learning to rank framework where a neural network is trained to rank the available channels using directly the recognition performance on the training set. The proposed approach is agnostic with respect to the array geometry and type of recognition back-end. We investigate different learning to rank strategies using a synthetic dataset developed on purpose and the CHiME-6 data. Results show that the proposed approach considerably improves over previous selection techniques, reaching comparable and in some instances better performance than oracle signal-based measures.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lucile Gelin|AUTHOR Lucile Gelin]]^^1^^
, [[Thomas Pellegrini|AUTHOR Thomas Pellegrini]]^^2^^
, [[Julien Pinquier|AUTHOR Julien Pinquier]]^^2^^
, [[Morgane Daniel|AUTHOR Morgane Daniel]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IRIT (UMR 5505), France; ^^2^^IRIT (UMR 5505), France; ^^3^^Lalilo, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3860–3864
</span></p></div>
<div class="cpabstractcardabstract"><p>Current performance of automatic speech recognition (ASR) for children is below that of the latest systems dedicated to adult speech. Child speech is particularly difficult to recognise, and substantial corpora are missing to train acoustic models. Furthermore, in the scope of our reading assistant for 5–8-year-old children learning to read, models need to cope with disfluencies and reading mistakes, which remain considerable challenges even for state-of-the-art ASR systems. In this paper, we adapt an end-to-end Transformer acoustic model to speech from children learning to read. Transfer learning (TL) with a small amount of child speech improves the phone error rate (PER) by 48.7% relative over an adult model and outperforms a TL-adapted DNN-HMM model by 21.0% relative PER. Multi-objective training with a Connectionist Temporal Classification (CTC) function further reduces the PER by 4.8% relative. We propose a method of reading mistakes data augmentation, where we simulate word-level repetitions and substitutions with phonetically or graphically close words. Combining these two types of reading mistakes reaches a 19.9% PER, with a 13.1% relative improvement over the baseline. A detailed analysis shows that both the CTC multi-objective training and the augmentation with synthetic repetitions help the attention mechanisms better detect children’s disfluencies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Brooke Stephenson|AUTHOR Brooke Stephenson]]^^1^^
, [[Thomas Hueber|AUTHOR Thomas Hueber]]^^1^^
, [[Laurent Girin|AUTHOR Laurent Girin]]^^1^^
, [[Laurent Besacier|AUTHOR Laurent Besacier]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^GIPSA-lab (UMR 5216), France; ^^2^^LIG (UMR 5217), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3865–3869
</span></p></div>
<div class="cpabstractcardabstract"><p>Inferring the prosody of a word in text-to-speech synthesis requires information about its surrounding context. In incremental text-to-speech synthesis, where the synthesizer produces an output before it has access to the complete input, the full context is often unknown which can result in a loss of naturalness. In this paper, we investigate whether the use of predicted future text from a transformer language model can attenuate this loss in a neural TTS system. We compare several test conditions of next future word: (a) unknown (zero-word), (b) language model predicted, (c) randomly predicted and (d) ground-truth. We measure the prosodic features (pitch, energy and duration) and find that predicted text provides significant improvements over a zero-word lookahead, but only slight gains over random-word lookahead. We confirm these results with a perceptive test.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pol van Rijn|AUTHOR Pol van Rijn]]^^1^^
, [[Silvan Mertes|AUTHOR Silvan Mertes]]^^2^^
, [[Dominik Schiller|AUTHOR Dominik Schiller]]^^2^^
, [[Peter M.C. Harrison|AUTHOR Peter M.C. Harrison]]^^1^^
, [[Pauline Larrouy-Maestri|AUTHOR Pauline Larrouy-Maestri]]^^1^^
, [[Elisabeth André|AUTHOR Elisabeth André]]^^2^^
, [[Nori Jacoby|AUTHOR Nori Jacoby]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MPI for Empirical Aesthetics, Germany; ^^2^^Universität Augsburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3870–3874
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent TTS systems are able to generate prosodically varied and realistic speech. However, it is unclear how this prosodic variation contributes to the perception of speakers’ emotional states. Here we use the recent psychological paradigm ‘Gibbs Sampling with People’ to search the prosodic latent space in a trained Global Style Token Tacotron model to explore prototypes of emotional prosody. Participants are recruited online and collectively manipulate the latent space of the generative speech model in a sequentially adaptive way so that the stimulus presented to one group of participants is determined by the response of the previous groups. We demonstrate that (1) particular regions of the model’s latent space are reliably associated with particular emotions, (2) the resulting emotional prototypes are well-recognized by a separate group of human raters, and (3) these emotional prototypes can be effectively transferred to new sentences. Collectively, these experiments demonstrate a novel approach to the understanding of emotional speech by providing a tool to explore the relation between the latent space of generative models and human semantics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Devang S. Ram Mohan|AUTHOR Devang S. Ram Mohan]], [[Vivian Hu|AUTHOR Vivian Hu]], [[Tian Huey Teh|AUTHOR Tian Huey Teh]], [[Alexandra Torresquintero|AUTHOR Alexandra Torresquintero]], [[Christopher G.R. Wallis|AUTHOR Christopher G.R. Wallis]], [[Marlene Staib|AUTHOR Marlene Staib]], [[Lorenzo Foglianti|AUTHOR Lorenzo Foglianti]], [[Jiameng Gao|AUTHOR Jiameng Gao]], [[Simon King|AUTHOR Simon King]]
</p><p class="cpabstractcardaffiliationlist">Papercup Technologies, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3875–3879
</span></p></div>
<div class="cpabstractcardabstract"><p>Text does not fully specify the spoken form, so text-to-speech models must be able to learn from speech data that vary in ways not explained by the corresponding text. One way to reduce the amount of unexplained variation in training data is to provide acoustic information as an additional learning signal. When generating speech, modifying this acoustic information enables multiple distinct renditions of a text to be produced.
Since much of the unexplained variation is in the prosody, we propose a model that generates speech explicitly conditioned on the three primary acoustic correlates of prosody: F₀, energy and duration. The model is flexible about how the values of these features are specified: they can be externally provided, or predicted from text, or predicted then subsequently modified.
Compared to a model that employs a variational auto-encoder to learn unsupervised latent features, our model provides more interpretable, temporally-precise, and disentangled control. When automatically predicting the acoustic features from text, it generates speech that is more natural than that from a Tacotron 2 model with reference encoder. Subsequent human-in-the-loop modification of the predicted acoustic features can significantly further increase naturalness.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexandra Torresquintero|AUTHOR Alexandra Torresquintero]], [[Tian Huey Teh|AUTHOR Tian Huey Teh]], [[Christopher G.R. Wallis|AUTHOR Christopher G.R. Wallis]], [[Marlene Staib|AUTHOR Marlene Staib]], [[Devang S. Ram Mohan|AUTHOR Devang S. Ram Mohan]], [[Vivian Hu|AUTHOR Vivian Hu]], [[Lorenzo Foglianti|AUTHOR Lorenzo Foglianti]], [[Jiameng Gao|AUTHOR Jiameng Gao]], [[Simon King|AUTHOR Simon King]]
</p><p class="cpabstractcardaffiliationlist">Papercup Technologies, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3880–3884
</span></p></div>
<div class="cpabstractcardabstract"><p>Text-to-speech is now able to achieve near-human naturalness and research focus has shifted to increasing expressivity. One popular method is to transfer the prosody from a reference speech sample. There have been considerable advances in using prosody transfer to generate more expressive speech, but the field lacks a clear definition of what successful prosody transfer means and a method for measuring it. We introduce a dataset of prosodically-varied reference natural speech samples for evaluating prosody transfer. The samples include global variations reflecting emotion and interpersonal attitude, and local variations reflecting topical emphasis, propositional attitude, syntactic phrasing and marked tonicity. The corpus only includes prosodic variations that listeners are able to distinguish with reasonable accuracy, and we report these figures as a benchmark against which text-to-speech prosody transfer can be compared. We conclude the paper with a demonstration of our proposed evaluation methodology, using the corpus to evaluate two text-to-speech models that perform prosody transfer.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nguyen Thi Thu Trang|AUTHOR Nguyen Thi Thu Trang]]^^1^^
, [[Nguyen Hoang Ky|AUTHOR Nguyen Hoang Ky]]^^1^^
, [[Albert Rilliard|AUTHOR Albert Rilliard]]^^2^^
, [[Christophe d’Alessandro|AUTHOR Christophe d’Alessandro]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Hanoi University of Science & Technology, Vietnam; ^^2^^LISN (UMR 9015), France; ^^3^^∂’Alembert (UMR 7190), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3885–3889
</span></p></div>
<div class="cpabstractcardabstract"><p>This research aims to build a prosodic boundary prediction model for improving the naturalness of Vietnamese speech synthesis. This model can be used directly to predict prosodic boundaries in the synthesis phase of the statistical parametric or end-to-end speech systems. Beside conventional features related to Part-Of-Speech (POS), this paper proposes two efficient features to predict prosodic boundaries: syntactic blocks and syntactic links, based on a thorough analysis of a Vietnamese dataset. Syntactic blocks are syntactic phrases whose sizes are bounded in their constituent syntactic tree. A syntactic link of two adjacent words is calculated based on the distance between them in the syntax tree. The experimental results show that the two proposed predictors improve the quality of the boundary prediction model using a decision tree classification algorithm, about 36.4% (F1 score) higher than the model with only POS features. The final boundary prediction model with POS, syntactic block, and syntactic link features using the LightGBM algorithm gives the best F1-score results at 87.0% in test data. The proposed model helps the TTS systems, developed by either HMM-based, DNN-based, or End-to-end speech synthesis techniques, improve about 0.3 MOS points (i.e. 6 to 10%) compared to the ones without the proposed model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shaked Dovrat|AUTHOR Shaked Dovrat]], [[Eliya Nachmani|AUTHOR Eliya Nachmani]], [[Lior Wolf|AUTHOR Lior Wolf]]
</p><p class="cpabstractcardaffiliationlist">Tel Aviv University, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3890–3894
</span></p></div>
<div class="cpabstractcardabstract"><p>Single channel speech separation has experienced great progress in the last few years. However, training neural speech separation for a large number of speakers (e.g., more than 10 speakers) is out of reach for the current methods, which rely on the Permutation Invariant Training (PIT). In this work, we present a permutation invariant training that employs the Hungarian algorithm in order to train with an O(C³) time complexity, where C is the number of speakers, in comparison to O(C!) of PIT based methods. Furthermore, we present a modified architecture that can handle the increased number of speakers. Our approach separates up to 20 speakers and improves the previous results for large C by a wide margin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mieszko Fraś|AUTHOR Mieszko Fraś]], [[Marcin Witkowski|AUTHOR Marcin Witkowski]], [[Konrad Kowalczyk|AUTHOR Konrad Kowalczyk]]
</p><p class="cpabstractcardaffiliationlist">AGH UST, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3895–3899
</span></p></div>
<div class="cpabstractcardabstract"><p>Sound source separation (SS) from the microphone signals capturing speech in reverberant conditions is a formidable task. This paper addresses the problem of joint separation and dereverberation of speech using the multichannel Wiener filter (MWF) that is tailored to the sub-source modeling of each speech source with a full-rank mixing matrix. Specifically, the parameters of the proposed sub-source-weighted (SSW) spatial filter are estimated using the sub-source based expectation maximization (EM) algorithm with multiplicative updates (MU) and the localization prior distribution (LP) on the mixing matrix (SSEM-MU-LP). In addition, we strengthen dereverberation by incorporating a Generalized Weighted Prediction Error (GWPE) algorithm. The proposed method is evaluated using a large dataset of two-channel recordings of clean speech convolved with both real and synthesized impulse responses. The results of the experiments show the superior performance of the proposed method in reverberant conditions in comparison to using the standard NTF-based separation with the vanilla MWF in terms of signal-to-distortion ratio (improvement of 3–5.6 dB) and other commonly used sound separation metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Martin Strauss|AUTHOR Martin Strauss]]^^1^^
, [[Jouni Paulus|AUTHOR Jouni Paulus]]^^1^^
, [[Matteo Torcoli|AUTHOR Matteo Torcoli]]^^2^^
, [[Bernd Edler|AUTHOR Bernd Edler]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^AudioLabs, Germany; ^^2^^Fraunhofer IIS, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3900–3904
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes a hands-on comparison on using state-of-the-art music source separation deep neural networks (DNNs) before and after task-specific fine-tuning for separating speech content from non-speech content in broadcast audio (i.e., dialog separation). The music separation models are selected as they share the number of channels (2) and sampling rate (44.1 kHz or higher) with the considered broadcast content, and vocals separation in music is considered as a parallel for dialog separation in the target application domain. These similarities are assumed to enable transfer learning between the tasks. Three models pre-trained on music (Open-Unmix, Spleeter, and Conv-TasNet) are considered in the experiments, and fine-tuned with real broadcast data. The performance of the models is evaluated before and after fine-tuning with computational evaluation metrics (SI-SIRi, SI-SDRi, 2f-model), as well as with a listening test simulating an application where the non-speech signal is partially attenuated, e.g., for better speech intelligibility. The evaluations include two reference systems specifically developed for dialog separation. The results indicate that pre-trained music source separation models can be used for dialog separation to some degree, and that they benefit from the fine-tuning, reaching a performance close to task-specific solutions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marvin Borsdorf|AUTHOR Marvin Borsdorf]]^^1^^
, [[Chenglin Xu|AUTHOR Chenglin Xu]]^^2^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^2^^
, [[Tanja Schultz|AUTHOR Tanja Schultz]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Bremen, Germany; ^^2^^NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3905–3909
</span></p></div>
<div class="cpabstractcardabstract"><p>Monaural speech separation has been well studied on various databases. However, these databases mostly concern English speech. Research in multi-speaker scenarios, such as speech recognition, speaker recognition, speaker diarization, and speech separation calls for speaker mixtures databases comprising multiple languages. In this paper, we propose a new extensive multilingual database for speech separation tasks derived from the GlobalPhone 2000 Speaker Package, called “GlobalPhone Mix-to-Separate out of 2” (GlobalPhoneMS2). We describe the construction of the database and conduct speech separation experiments in monolingual and multilingual as well as seen and unseen languages settings. When trained on a multilingual dataset, the networks improve their performances for unseen languages, and across almost all seen languages. We show that replacing a monolingual dataset with a trilingual one, while keeping the data size roughly the same, helps to improve the performance in most cases. We attribute this to a larger diversity in speech, language, speaker, and recording characteristics. Based on the GlobalPhoneMS2 database, speech separation results for two-speaker mixing scenarios are reported in 22 spoken languages for the first time.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kay Peterson|AUTHOR Kay Peterson]]^^1^^
, [[Audrey Tong|AUTHOR Audrey Tong]]^^1^^
, [[Yan Yu|AUTHOR Yan Yu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NIST, USA; ^^2^^Dakota Consulting, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4324–4328
</span></p></div>
<div class="cpabstractcardabstract"><p>In 2020, the National Institute of Standards and Technology (NIST), in cooperation with the Intelligence Advanced Research Project Activity (IARPA), conducted an open challenge on automatic speech recognition (ASR) technology for low-resource languages on a challenging data type — conversational telephone speech. The OpenASR20 Challenge was offered for ten low-resource languages — Amharic, Cantonese, Guarani, Javanese, Kurmanji Kurdish, Mongolian, Pashto, Somali, Tamil, and Vietnamese. A total of nine teams from five countries fully participated, and 128 valid submissions were scored. This paper gives an overview of the challenge setup and procedures, as well as a summary of the results. The results show overall high word error rate (WER), with the best results on a severely constrained training data condition ranging from 0.4 to 0.65, depending on the language. ASR with such limited resources remains a challenging problem. Providing a computing platform may be a way to level the playing field and encourage wider participation in challenges like OpenASR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4329–4333
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we develop Automatic Speech Recognition (ASR) systems for multi-genre speech recognition of low-resource languages where training data is predominantly conversational speech but test data can be in one of the following genres: news broadcast, topical broadcast and conversational speech. ASR for low-resource languages is often developed by adapting a pre-trained model to a target language. When training data is predominantly from one genre and limited, the system’s performance for other genres suffer. To handle such out-of-domain scenarios, we employ multitask adaptation by using auxiliary conversational speech data from other languages in addition to the target-language data. We aim to (1) improve adaptation through implicit data augmentation by adding other languages as auxiliary tasks, and (2) prevent the acoustic model from overfitting to the dominant genre in the training set. Pre-trained parameters are obtained from a multilingual model trained with data from 18 languages using the Lattice-Free Maximum Mutual Information (LF-MMI) criterion. The adaptation is performed with the LF-MMI criterion. We present results on MATERIAL datasets for three languages: Kazakh and Farsi and Pashto.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qiu-shi Zhu|AUTHOR Qiu-shi Zhu]]^^1^^
, [[Jie Zhang|AUTHOR Jie Zhang]]^^1^^
, [[Ming-hui Wu|AUTHOR Ming-hui Wu]]^^2^^
, [[Xin Fang|AUTHOR Xin Fang]]^^1^^
, [[Li-Rong Dai|AUTHOR Li-Rong Dai]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4334–4338
</span></p></div>
<div class="cpabstractcardabstract"><p>wav2vec 2.0 is a recently proposed self-supervised pre-training framework for learning speech representation. It utilizes a transformer to learn global contextual representation, which is effective especially in low-resource scenarios. Besides, it was shown that combining convolution neural network and transformer to model both local and global dependencies is beneficial for e.g., automatic speech recognition (ASR), natural language processing (NLP). However, how to model the local and global dependence in pre-training models is still an open question in the speech domain. In this paper, we therefore propose a new transformer encoder for enhancing the local dependency by combining convolution and self-attention modules. The transformer encoder first parallels the convolution and self-attention modules, and then serialized with another convolution module, sandwiched by a pair of feed forward modules. Experimental results show that the pre-trained model using the proposed method can reduce the word error rate (WER) compared to the reproduced wav2vec 2.0 at the cost of slightly increasing the size of training parameters.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hung-Pang Lin|AUTHOR Hung-Pang Lin]], [[Yu-Jia Zhang|AUTHOR Yu-Jia Zhang]], [[Chia-Ping Chen|AUTHOR Chia-Ping Chen]]
</p><p class="cpabstractcardaffiliationlist">National Sun Yat-sen University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4339–4343
</span></p></div>
<div class="cpabstractcardabstract"><p>We, in the team name of NSYSU-MITLab, have participated in low-resource speech recognition of the Open Automatic Speech Recognition Challenge 2020 (OpenASR20) and Formosa Speech Recognition Challenge 2020 (FSR-2020). For the tasks in the challenges, we build and compare end-to-end (E2E) systems and Deep Neural Network Hidden Markov Model (DNN-HMM) systems. In E2E systems, we implement an encoder with Conformer architecture and a decoder with Transformer architecture. In addition, a speaker classifier with a gradient reversal layer is included in the training phase to improve the robustness to speaker variation. In DNN-HMM systems, we implement the Time-Restricted Self-Attention and Factorized Time Delay Neural Networks for the DNN front-end acoustic representation learning. In OpenASR20, the best word error rates we achieved are 61.45% for Cantonese and 74.61% for Vietnamese. In FSR-2020, the best character error rate we achieved is 43.4% for Taiwanese Southern Min Recommended Characters and the best syllable error rate is 25.4% for Taiwan Minnanyu Luomazi Pinyin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jing Zhao|AUTHOR Jing Zhao]]^^1^^
, [[Zhiqiang Lv|AUTHOR Zhiqiang Lv]]^^2^^
, [[Ambyera Han|AUTHOR Ambyera Han]]^^2^^
, [[Guan-Bo Wang|AUTHOR Guan-Bo Wang]]^^3^^
, [[Guixin Shi|AUTHOR Guixin Shi]]^^1^^
, [[Jian Kang|AUTHOR Jian Kang]]^^2^^
, [[Jinghao Yan|AUTHOR Jinghao Yan]]^^2^^
, [[Pengfei Hu|AUTHOR Pengfei Hu]]^^2^^
, [[Shen Huang|AUTHOR Shen Huang]]^^2^^
, [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^Tencent, China; ^^3^^Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4344–4348
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents our work for OpenASR20 Challenge. We describe our Automatic Speech Recognition (ASR) systems for Cantonese and Mongolian under both constrained and unconstrained conditions. For constrained condition, a hybrid NN-HMM ASR system play the main role, while for unconstrained condition, an end-to-end ASR system outperforms traditional hybrid systems significantly due to adequate training data. Besides, we adapt to the challenging PSTN conditions using publicly available wideband dictated speech with similar accent, respectively for the two languages. Furthermore, data cleanup, language tailored features, multi-band training, data augmentation, pre-training and system fusions are incorporated. Our submitted systems have achieved excellent performances for the two conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tanel Alumäe|AUTHOR Tanel Alumäe]], [[Jiaming Kong|AUTHOR Jiaming Kong]]
</p><p class="cpabstractcardaffiliationlist">Tallinn University of Technology, Estonia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4349–4353
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the TalTech team submission to the OpenASR20 Challenge. OpenASR20 evaluated low-resource speech recognition technologies across 10 languages, using only 10 hours of training data in the constrained condition. Our ASR systems used hybrid CNN-TDNNF-based acoustic models, trained with different data augmentation strategies. We used language model adaptation, recurrent neural network language models and lattice combination for improving first pass results. The scores of our submissions were the best across all teams in six out of ten languages. The paper also describes post-evaluation experiments that focused on the unconstrained condition. We show that optimized N-best list combination of a CNN-TDNNF based system and a finetuned multilingual XLSR-53 model results in large reductions in word error rate. Using BABEL data and the combination of hybrid and end-to-end systems gives 12–22% relative improvement over the constrained condition results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ethan Morris|AUTHOR Ethan Morris]]^^1^^
, [[Robbie Jimerson|AUTHOR Robbie Jimerson]]^^1^^
, [[Emily Prud’hommeaux|AUTHOR Emily Prud’hommeaux]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Rochester Institute of Technology, USA; ^^2^^Boston College, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4354–4358
</span></p></div>
<div class="cpabstractcardabstract"><p>The application of deep neural networks to the task of acoustic modeling for automatic speech recognition has resulted in dramatic decreases in ASR word error rates, enabling the use of this technology for interacting with smart phones and personal home assistants in high-resource languages. Developing ASR models of this caliber, however, requires hundreds or thousands of hours of transcribed speech recordings, which presents challenges for the vast majority of the world’s languages. In this paper, we investigate the utility of three distinct architectures that have previously been used for ASR in languages with limited training resources. We train and test these systems on publicly available ASR datasets for several typologically and orthographically diverse languages, which were produced under a variety of conditions using different speech collection strategies, practices, and equipment. Although these corpora are comparable in size, we find that no single ASR architecture outperforms all others. In addition, word error rates vary significantly, in some cases within the range of those typically reported for high-resource languages. Our results point to the importance of considering language-specific and corpus-specific factors and experimenting with multiple approaches when developing ASR systems for languages with limited training resources.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kimiko Tsukada|AUTHOR Kimiko Tsukada]]^^1^^
, [[Yurong|AUTHOR Yurong]]^^2^^
, [[Joo-Yeon Kim|AUTHOR Joo-Yeon Kim]]^^3^^
, [[Jeong-Im Han|AUTHOR Jeong-Im Han]]^^3^^
, [[John Hajek|AUTHOR John Hajek]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Macquarie University, Australia; ^^2^^Inner Mongolia University, China; ^^3^^Konkuk University, Korea; ^^4^^University of Melbourne, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3910–3914
</span></p></div>
<div class="cpabstractcardabstract"><p>The perception of Japanese consonant length contrasts (i.e. short/singleton vs long/geminate) by native and non-native speakers was compared to examine the extent to which difficult foreign language (FL) sounds are processed accurately. Three groups of participants had Korean, Mandarin or Mongolian as their first language (L1) and had no experience with Japanese. Unlike Japanese, Mandarin and Mongolian do not use consonant length contrastively. The phonemic status of consonant length in Korean is debatable. Further, unlike Japanese and Mandarin which predominantly use open syllables and restrict the occurrence of consonants in coda position, Korean and Mongolian permit a wide range of consonants in that syllable position. Via the AXB task, the participants’ discrimination accuracy of Japanese consonant length contrasts was assessed and compared to that of a group of 10 native Japanese speakers who served as controls. The Japanese group was at near ceiling with little individual variation. The Mongolian (but not Korean and Mandarin) group did not significantly differ from the control group when the target token (X) contained a geminate. All non-native groups were significantly less accurate than the control group when X contained a singleton. These results were interpreted as reflecting the participants’ L1 quantity system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Minglin Wu|AUTHOR Minglin Wu]]^^1^^
, [[Kun Li|AUTHOR Kun Li]]^^2^^
, [[Wai-Kim Leung|AUTHOR Wai-Kim Leung]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^SpeechX, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3954–3958
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces two Transformer-based architectures for Mispronunciation Detection and Diagnosis (MDD). The first Transformer architecture (T-1) is a standard setup with an encoder, a decoder, a projection part and the Cross Entropy (CE) loss. T-1 takes in Mel-Frequency Cepstral Coefficients (MFCC) as input. The second architecture (T-2) is based on wav2vec 2.0, a pretraining framework. T-2 is composed of a CNN feature encoder, several Transformer blocks capturing contextual speech representations, a projection part and the Connectionist Temporal Classification (CTC) loss. Unlike T-1, T-2 takes in raw audio data as input. Both models are trained in an end-to-end manner. Experiments are conducted on the CU-CHLOE corpus, where T-1 achieves a Phone Error Rate (PER) of 8.69% and F-measure of 77.23%; and T-2 achieves a PER of 5.97% and F-measure of 80.98%. Both models significantly outperform the previously proposed AGPM and CNN-RNN-CTC models, with PERs at 11.1% and 12.1% respectively, and F-measures at 72.61% and 74.65% respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Calbert Graham|AUTHOR Calbert Graham]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3959–3963
</span></p></div>
<div class="cpabstractcardabstract"><p>It is well-known that the characteristics of L2 speech are highly influenced by the speakers’ L1. The main objective of this study was to uncover discriminative speech features to identify the L1 background of a speaker from their L2 English speech. Traditional phonetic approaches tend to compare speakers based on a pre-selected set of acoustic features, which may not be sufficient to capture all the unique traces of the L1 in the L2 speech for forensic speaker profiling purposes. Convolutional Neural Network (CNN) has the potential to remedy this issue through the automatic processing of the visual spectrogram.
This paper reports a series of CNN classification experiments modelled on spectrogram images. The classification problem consisted of determining whether English speech samples are spoken by a native speaker of English, Japanese, Dutch, French, or Polish. Both phonetically transcribed and untranscribed speech data were used.
Overall, results showed that the CNN achieved a high level of accuracy in identifying the speakers’ L1s based on spectrogram pictures without explicit phonetic segmentation. However, the results also showed that training the classifiers on certain combinations of phonetically modelled spectrogram images, which would make features more transparent, can produce results with comparable accuracy rates.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniel Korzekwa|AUTHOR Daniel Korzekwa]]^^1^^
, [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]]^^2^^
, [[Szymon Zaporowski|AUTHOR Szymon Zaporowski]]^^3^^
, [[Grzegorz Beringer|AUTHOR Grzegorz Beringer]]^^1^^
, [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]]^^2^^
, [[Alicja Serafinowicz|AUTHOR Alicja Serafinowicz]]^^1^^
, [[Jasha Droppo|AUTHOR Jasha Droppo]]^^4^^
, [[Thomas Drugman|AUTHOR Thomas Drugman]]^^2^^
, [[Bozena Kostek|AUTHOR Bozena Kostek]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, Poland; ^^2^^Amazon, UK; ^^3^^Gdansk University of Technology, Poland; ^^4^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3915–3919
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes two novel complementary techniques that improve the detection of lexical stress errors in non-native (L2) English speech: attention-based feature extraction and data augmentation based on Neural Text-To-Speech (TTS). In a classical approach, audio features are usually extracted from fixed regions of speech such as the syllable nucleus. We propose an attention-based deep learning model that automatically derives optimal syllable-level representation from frame-level and phoneme-level audio features. Training this model is challenging because of the limited amount of incorrect stress patterns. To solve this problem, we propose to augment the training set with incorrectly stressed words generated with Neural TTS. Combining both techniques achieves 94.8% precision and 49.2% recall for the detection of incorrectly stressed words in L2 English speech of Slavic and Baltic speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bettina Braun|AUTHOR Bettina Braun]]^^1^^
, [[Nicole Dehé|AUTHOR Nicole Dehé]]^^1^^
, [[Marieke Einfeldt|AUTHOR Marieke Einfeldt]]^^1^^
, [[Daniela Wochner|AUTHOR Daniela Wochner]]^^1^^
, [[Katharina Zahner-Ritter|AUTHOR Katharina Zahner-Ritter]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Konstanz, Germany; ^^2^^Universität Trier, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3920–3924
</span></p></div>
<div class="cpabstractcardabstract"><p>Many studies relate acoustic voice quality measures to perceptual classification. We extend this line of research by training a classifier on a balanced set of perceptually annotated voice quality categories with high inter-rater agreement, and test it on speech samples from a different language and on a different speech style. Annotations were done on continuous speech from different laboratory settings. In Experiment 1, we trained a random forest with Standard Chinese and German recordings labelled as modal, breathy, or glottalized. The model had an accuracy of 78.7% on unseen data from the same sample (most important variables were harmonics-to-noise ratio, cepstral-peak prominence, and H1-A2). This model was then used to classify data from a different language (Icelandic, Experiment 2) and to classify a different speech style (German infant-directed speech (IDS), Experiment 3). Cross-linguistic generalizability was high for Icelandic (78.6% accuracy), but lower for German IDS (71.7% accuracy). Accuracy of recordings of adult-directed speech from the same speakers as in Experiment 3 (77%, Experiment 4) suggests that it is the special speech style of IDS, rather than the recording setting that led to lower performance. Results are discussed in terms of efficiency of coding and generalizability across languages and speech styles.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qianyutong Zhang|AUTHOR Qianyutong Zhang]], [[Kexin Lyu|AUTHOR Kexin Lyu]], [[Zening Chen|AUTHOR Zening Chen]], [[Ping Tang|AUTHOR Ping Tang]]
</p><p class="cpabstractcardaffiliationlist">NJUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3925–3928
</span></p></div>
<div class="cpabstractcardabstract"><p>Prosodic focus plays an important role during speech communication, delivering speakers’ pragmatical intention to emphasize key information, especially in contrastive scenarios. Previous studies exploring children’s acquisition of prosodic focus have generally focused on Germanic and Romance languages, while it was unclear when children learning Mandarin Chinese were able to correctly interpret the pragmatic meaning of prosodic focus and integrate it into speech comprehension. The current study explored Mandarin-learning 3–6-year-olds’ online interpretation of prosodic focus to identify contrastive referents. Twenty 3–4-year-olds, 23 5–6-year-olds, and 22 adult controls were tested. The visual-world paradigm was adopted, where participants were instructed to search for target pictures while listening to contrastive objects in discourse sequences, e.g., //Find the red cat. Now, find the PURPLE/purple cat//, where the second adjective was produced with or without prosodic focus. Participants’ fixation patterns were recorded via eye-trackers. The results showed that while adults and 5–6 years showed faster fixation toward target pictures in the presence of prosodic focus, this was not the case for 3–4 years. These results indicated that Mandarin-learning children at 5–6 years have acquired the pragmatic meaning of prosodic focus and utilize it to guide their identification of contrastive referents.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Maryam Sadat Mirzaei|AUTHOR Maryam Sadat Mirzaei]], [[Kourosh Meshgi|AUTHOR Kourosh Meshgi]]
</p><p class="cpabstractcardaffiliationlist">RIKEN, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3929–3933
</span></p></div>
<div class="cpabstractcardabstract"><p>Teaching listening skills to those learning a second language (L2) is one of the most challenging tasks mainly because predicting L2 listening difficulties is not always straightforward. Complex processes are involved in decoding connected speech, constructing meaning, and comprehending the audio material. Many studies have attempted to identify the significant factors leading to listening difficulties, yet, a comprehensive model is to be constructed. We argue that an automatic speech recognition (ASR) system with limited training can be viewed as a rough model for an L2 listener with particular language proficiency. We proposed a method to select the training samples for the ASR system to match the mistakes of L2 listeners when listening to the authentic listening materials. This model can predict the learners’ listening difficulties, thus allowing for generating tailored captions to assist them with L2 listening.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hongwei Ding|AUTHOR Hongwei Ding]]^^1^^
, [[Binghuai Lin|AUTHOR Binghuai Lin]]^^2^^
, [[Liyuan Wang|AUTHOR Liyuan Wang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SJTU, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3934–3938
</span></p></div>
<div class="cpabstractcardabstract"><p>Prosodic speech characteristics are important in the evaluation of both intelligibility and naturalness of oral proficiency for learners of English as a Second Language (ESL). Different f₀ movement patterns between native and Mandarin Chinese learners have been an important research topic for second-language (L2) English speech learning. However, previous studies have seldom examined f₀ movement patterns between lower-level and higher-level Mandarin ESL learners. The current study compared f₀ change patterns extracted from the same 20 English sentences read by 20 lower- and 20 higher- level Mandarin ESL learners, and 20 native English speakers from a speech database. Appropriate procedures were applied to ensure a more accurate estimation of f₀ values and to catch characteristic deviation in f₀ movement patterns of ESL learners. The results showed that lower-level Mandarin speakers displayed more frequent f₀ fluctuations and smaller standard deviation of intervals between f₀ peaks than both native speakers and higher-level learners. The special characteristic of many smaller “ripples” on pitch contours of lower-level L2 English speech resembles Mandarin Chinese f₀ movements, which suggests a negative transfer from the first language (L1) Mandarin. The findings can shed light on the assessment and learning of L2 English prosody by Mandarin ESL learners.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3939–3943
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic pronunciation assessment plays an important role in computer-assisted pronunciation training (CAPT). Goodness of pronunciation (GOP) based on automatic speech recognition (ASR) has been commonly used in pronunciation assessment. It has been found that GOP normally shows deteriorating performance under noisy conditions. Traditional noise compensation methods, which compensate distorted GOP under noisy situations based on the Gaussian mixture model (GMM) or other simple mapping functions, ignore contextual influence and phonemic attributes of the utterance. This usually leads to a lack of robustness with changed conditions. In this paper, we adopt a bidirectional long short-term (BLSTM) network combining phonemic attributes to conduct the compensation for distorted GOP under noisy conditions. We evaluate the model performance based on English words recorded by Chinese learners in clean and noisy situations. Experimental results show the proposed model outperforms the traditional baselines in Pearson correlation coefficient (PCC) and accuracy for pronunciation assessment under various noisy conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jacek Kudera|AUTHOR Jacek Kudera]], [[Philip Georgis|AUTHOR Philip Georgis]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Tania Avgustinova|AUTHOR Tania Avgustinova]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3944–3948
</span></p></div>
<div class="cpabstractcardabstract"><p>This study reveals the relation between surprisal, phonetic distance, and latency based on a multilingual, short-term priming framework. Four Slavic languages (Bulgarian, Czech, Polish, and Russian) are investigated across two priming conditions: associative and phonetic priming, involving true cognates and near-homophones, respectively. This research is grounded in the methodology of information theory and proposes new methods for quantifying differences between meaningful lexical primes and targets for closely related languages. It also outlines the influence of phonetic distance between cognate and noncognate pairs of primes and targets on response times in a cross-lingual lexical decision task. The experimental results show that phonetic distance moderates response times only in Polish and Czech, whereas the surprisal-based correspondence effect is an accurate predictor of latency for all tested languages. The information-theoretic approach of quantifying feature-based alternations between Slavic cognates and near-homophones appears to be a valid method for latency moderation in the auditory modality. The outcomes of this study suggest that the surprisal-based (un)expectedness of spoken stimuli is an accurate predictor of human performance in multilingual lexical decision tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuqing Zhang|AUTHOR Yuqing Zhang]]^^1^^
, [[Zhu Li|AUTHOR Zhu Li]]^^1^^
, [[Binghuai Lin|AUTHOR Binghuai Lin]]^^2^^
, [[Jinsong Zhang|AUTHOR Jinsong Zhang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BLCU, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3949–3953
</span></p></div>
<div class="cpabstractcardabstract"><p>Relatively little attention has been devoted to the discourse-level prosodic encoding and speech planning in second language (L2) speech. This study reports a preliminary study on learners’ discourse prosody encoding pattern and makes a comparison with that of native speakers. Using a corpus of spontaneously produced picture story narratives, we analyzed general characteristics of prosodic units (PUs) and explored relationships between pitch encoding (cross-boundary f0 heights and f0 slopes) of PUs and the semantic completeness of PUs in English spontaneous speech by native speakers, beginning learners, and advanced learners. The results indicated that beginning learners showed neither sensitivity to semantic units in discourse (DUs) in their f0 encoding nor distinct signs of pitch-related preplanning based on DUs, suggesting improper phrasing of the least proficient non-native speakers. Both native speakers and advanced learners were sensitive to the initiation and termination of DUs in their prosodic encoding; however, only native speakers showed clear signs of DU-based preplanning. We argue that the observed between-group differences in L1 and L2 speech might be attributed to differences in the scope of speech planning, i.e., compared with native speakers, who mostly produce complete semantic units, learners’ speech is produced step by step with pauses between phrases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Miran Oh|AUTHOR Miran Oh]], [[Dani Byrd|AUTHOR Dani Byrd]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3964–3968
</span></p></div>
<div class="cpabstractcardabstract"><p>Velum actions are critical to differentiating oral and nasal sounds in spoken language; specifically in the latter, the velum is lowered to open the nasal port and allow nasal airflow. However, details on how the velum is lowered for nasal production in speech are scarce. State-of-the-art real-time Magnetic Resonance Imaging (rtMRI) can directly image the entirety of the moving vocal tract, providing spatiotemporal kinematic data of articulatory actions. Most instrumental studies of speech production explore oral constriction actions such as lip or tongue movements. RtMRI makes possible a quantitative assessment of non-oral and non-constriction actions, such as velum (and larynx) dynamics. This paper illustrates articulatory aspects of consonant nasality, which have previously been inferred from acoustic or aerodynamic data. Velum actions are quantified in spatial and temporal domains: i) vertical and horizontal velum positions during nasal consonant production are quantified to measure, respectively, the degree of velum lowering and velic opening, and ii) duration intervals for velum lowering, plateau, and raising are obtained to understand which portion of the velum action is lengthened to generate phonologically long nasality. Findings demonstrate that velum action tracking using rtMRI can illuminate linguistic modulations of nasality strength and length.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhe-chen Guo|AUTHOR Zhe-chen Guo]], [[Rajka Smiljanic|AUTHOR Rajka Smiljanic]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Austin, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4009–4013
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated coarticulation of read and spontaneous speech in different communicative contexts from the LUCID corpus. Spontaneous speech samples were from Southern British English speakers who completed an interactive spot-the-differences task with no communicative barrier (NB), with their voice vocoded (VOC), and with a partner who heard their speech in babble (BABBLE) or was a non-native English speaker (L2). The same speakers also read sentences in a casual (READ-CO) and clear (READ-CL) speaking style. Tokens of a pre-defined set of keywords were extracted from the speech samples and consonant-vowel sequences in these tokens were analyzed using a whole-spectrum measure of coarticulation. Results showed that coarticulatory resistance in the six communicative contexts from highest to lowest was: BABBLE > VOC, L2, READ-CL > NB, READ-CO. Thus, in response to communicative barriers, be they real or imaginary, speakers coarticulated less, in line with the models of targeted speaker adaptations (the H&H theory [1] and Adaptive Speaker Framework [2]).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Einar Meister|AUTHOR Einar Meister]], [[Lya Meister|AUTHOR Lya Meister]]
</p><p class="cpabstractcardaffiliationlist">Tallinn University of Technology, Estonia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4014–4018
</span></p></div>
<div class="cpabstractcardabstract"><p>The paper explores the developmental changes of vowel acoustics in Estonian adolescents as a function of age and gender. Formant frequencies F1–F4 and the duration of vowels were measured from read speech samples of 305 native Estonian subjects (173 girls and 132 boys) aged from 10 to 18 years. GAM framework was applied for the statistical analysis. The results show that both the formant frequencies and the vowel space area decrease gradually from 10 to 15 years in both gender groups and the quality of vowels stabilizes at the age of 15–18 years, whereas gender-specific differences emerge around the age of 12–13. Age-related change in the duration of vowels shows similar patterns with formants, however, with no gender difference. The findings are in line with the results reported for adolescent speech in other languages. The analysis results based on speech samples of the subjects with normal linguistic development can be considered reference data for distinguishing between normal and abnormal speech development.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sonia d’Apolito|AUTHOR Sonia d’Apolito]], [[Barbara Gili Fivela|AUTHOR Barbara Gili Fivela]]
</p><p class="cpabstractcardaffiliationlist">Università del Salento, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4019–4023
</span></p></div>
<div class="cpabstractcardabstract"><p>Accuracy in production of non-native sounds is analyzed by considering the influence of L1, context and co-text on Italian L2 speech. While the L1 influence is often described in the literature, careful investigations on how production accuracy may change in different contexts and co-texts are needed. This paper describes two experiments on how French learners of Italian as L2 (advanced/beginners) realize geminates depending on different contexts (the global contexts, e.g., the tasks) and co-texts (the amount of information available syntagmatically).
Acoustic data acquired by recording 4 advanced and 4 beginner Italian-L2 learners (and 3 Italian natives as control) were analyzed as for the duration of the target consonant and preceding vowel, as well as speech articulation rate, taken as indexes of geminate production accuracy.
Results confirm the strongest influence of L1 in beginners’ production, and depict a complex interplay of context and co- text. Adding information in co-text may induce different effects on speech production, depending on the local context, that is on the speakers’ communication needs during speech production. Specifically, a “rich” co-text may favor a decrease in production accuracy or, on the contrary, an increase, depending on the need the speaker have to highlight/contrast information.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wilbert Heeringa|AUTHOR Wilbert Heeringa]], [[Hans Van de Velde|AUTHOR Hans Van de Velde]]
</p><p class="cpabstractcardaffiliationlist">Fryske Akademy, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4024–4028
</span></p></div>
<div class="cpabstractcardabstract"><p>Several studies have shown that in sociophonetic research Lobanov’s speaker normalization method outperforms other methods for normalizing vowel formants of speakers. An advantage of Lobanov’s method compared to the method that was introduced by Watt & Fabricius in 2002 is that it is independent of the shape of the vowel space area, and also normalizes to the dispersion of the vowels. However, it does depend on the distribution of the vowels within the vowel space. When using Lobanov normalization the formant values are converted to z-scores. We present a method where the µ in the z-score formula is replaced by the center of the convex hull that encloses the vowels, and the σ is obtained on the basis of the points that constitute the convex hull. When normalizing measurements of two real data sets, and of a series of randomly generated data sets, we found that our method improved in matching vowel spaces in size and overlap.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rosey Billington|AUTHOR Rosey Billington]]^^1^^
, [[Hywel Stoakes|AUTHOR Hywel Stoakes]]^^2^^
, [[Nick Thieberger|AUTHOR Nick Thieberger]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ANU, Australia; ^^2^^University of Melbourne, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4029–4033
</span></p></div>
<div class="cpabstractcardabstract"><p>For most of the world’s languages, detailed phonetic analyses across different aspects of the sound system do not exist, due in part to limitations in available speech data and tools for efficiently processing such data for low-resource languages. Archival language documentation collections offer opportunities to extend the scope and scale of phonetic research on low-resource languages, and developments in methods for automatic recognition and alignment of speech facilitate the preparation of phonetic corpora based on these collections. We present a case study applying speech modelling and forced alignment methods to narrative data for Nafsan, an Oceanic language of central Vanuatu. We examine the accuracy of the forced-aligned phonetic labelling based on limited speech data used in the modelling process, and compare acoustic and durational measures of 17,851 vowel tokens for 11 speakers with previous experimental phonetic data for Nafsan. Results point to the suitability of archival data for large-scale studies of phonetic variation in low-resource languages, and also suggest that this approach can feasibly be used as a starting point in expanding to phonetic comparisons across closely-related Oceanic languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zirui Liu|AUTHOR Zirui Liu]], [[Yi Xu|AUTHOR Yi Xu]]
</p><p class="cpabstractcardaffiliationlist">University College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3969–3973
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent research has shown fresh evidence that consonant and vowel are synchronised at the syllable onset, as predicted by a number of theoretical models. The finding was made by using a minimal contrast paradigm to determine segment onset in Mandarin CV syllables, which differed from the conventional method of detecting gesture onset with a velocity threshold [1]. It has remained unclear, however, if CV co-onset also occurs between the nucleus vowel and a consonant cluster, as predicted by the articulatory syllable model [2]. This study applied the minimal contrast paradigm to British English in both CV and clusterV (CLV) syllables, and analysed the spectral patterns with signal chopping in conjunction with recurrent neural networks (RNN) with long short-term memory (LSTM) [3]. Results show that vowel onset is synchronised with the onset of the first consonant in a cluster, thus supporting the articulatory syllable model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Míša Hejná|AUTHOR Míša Hejná]]
</p><p class="cpabstractcardaffiliationlist">Aarhus University, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3974–3978
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates how widespread pre-aspiration and local breathiness are in English spoken in Wales, by speakers identifying as Welsh. While the main purpose is to establish whether the phenomenon is generally present in Welsh English, the data also enables us to explore whether pre-aspiration might be conditioned by sex/gender, age, and the ability to speak Welsh. An acoustic corpus of 45 speakers producing word-final plosives and fricatives is analysed.
Pre-aspiration and local breathiness are produced by all speakers, representing 32 towns and 16 areas included in the analyses. Pre-aspiration and breathiness are more frequent and longer in L1 and L2 Welsh speakers than those who do not speak Welsh at all. In general, no statistically significant sex and age effects emerge.
In addition, a gradient allophony between pre-aspiration and glottalisation is reported for all speakers in the plosive context: the more frequently they glottalise, the less frequent the pre-aspiration. In fricatives, most speakers do not glottalise. Regarding those who do, 1. some display no relationship between pre-aspiration and glottalisation, and 2. a minority display either an indication of gradient allophony between the two, or 3. a positive correlation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Beeke Muhlack|AUTHOR Beeke Muhlack]], [[Mikey Elmers|AUTHOR Mikey Elmers]], [[Heiner Drenhaus|AUTHOR Heiner Drenhaus]], [[Jürgen Trouvain|AUTHOR Jürgen Trouvain]], [[Marjolein van Os|AUTHOR Marjolein van Os]], [[Raphael Werner|AUTHOR Raphael Werner]], [[Margarita Ryzhova|AUTHOR Margarita Ryzhova]], [[Bernd Möbius|AUTHOR Bernd Möbius]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3979–3983
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper reports on two experiments that partially replicate an experiment by Fraundorf and Watson (2011, J Mem. Lang.) on the recall effect of filler particles. Their subjects listened to three passages of a story, either with or without filler particles, which they had to retell afterwards. They analysed the subjects’ retelling in terms of whether important plot points were remembered or not. For their English data, they found that filler particles facilitate the recall of the plot points significantly compared to stories that did not include filler particles. As this seems to be a convincing experimental design, we aimed at evaluating this method as a web-based experiment which may, if found to be suitable, easily be applied to other languages. Furthermore, we investigated whether their results are found in German as well (Experiment 1), and evaluated whether filler duration has an effect on recall performance (Experiment 2). Our results could not replicate the findings of the original study: in fact, the opposite effect was found for German. In Experiment 1, participants performed better on recall in the fluent condition, while no significant results were found for English in Experiment 2.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chunyu Ge|AUTHOR Chunyu Ge]], [[Yixuan Xiong|AUTHOR Yixuan Xiong]], [[Peggy Mok|AUTHOR Peggy Mok]]
</p><p class="cpabstractcardaffiliationlist">CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3984–3988
</span></p></div>
<div class="cpabstractcardabstract"><p>The COVID-19 pandemic posed an unprecedented challenge to phonetic research. On-site collection of speech data is difficult, if not impossible. The advancement of technology in mobile devices and online conference platforms offers the opportunity to collect data remotely. This paper aims to answer the question of how reliable speech data collected remotely are based on controlled speech. Seven devices, including smartphones and laptops, were used to record speech simultaneously, locally or on the cloud using ZOOM, both in a sound-attenuated lab and a conference room. Common acoustic measurements were made on these recordings. Local recordings proved to be reliable in duration, but not for recordings made on the cloud. Different devices have comparable performances in F0 and F1. The values acquired by different devices differ a lot for F2 and higher formants, spectral moments, and voice quality measures. These differences can lead to erroneous interpretation of segmental and voice quality contrasts. The recordings made remotely by smartphones and locally using ZOOM can be useful in studying prosody, but should be used with care for segments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jing Huang|AUTHOR Jing Huang]], [[Feng-fan Hsieh|AUTHOR Feng-fan Hsieh]], [[Yueh-chin Chang|AUTHOR Yueh-chin Chang]]
</p><p class="cpabstractcardaffiliationlist">National Tsing Hua University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3989–3993
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper is a comparative study of the articulation of the “apical vowels” in three Mandarin dialects: Beijing Mandarin (BJM), Northeastern Mandarin (NEM), and Southwestern Mandarin (SWM), using co-registered EMA and ultrasound. Data from 5 BJM speakers, 5 NEM speakers and 4 SWM speakers in their twenties were analyzed and discussed. Our recording materials include the dental and retroflex apical vowels, and their //er//-suffixed forms. Results suggest that distinct lingual configurations are found among the three dialects of Mandarin, even though these apical vowels are not perceptually distinguishable. Specifically, the dental apical vowel [ɿ] has a grooved tongue shape in BJM, a retracted tongue dorsum in NEM, and a relatively flat tongue shape in SWM. The retroflex apical vowel [ʅ] has a domed tongue shape as well as a bunched tongue body in NEM, while a slightly domed tongue posture is found in SWM. Moreover, the retroflex apical vowel [ʅ] is, articulatorily speaking, very similar to the //er//-suffix in BJM (cf. [10]). In sum, we observed yet another instance of the articulatory-acoustic mismatch.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mark Gibson|AUTHOR Mark Gibson]]^^1^^
, [[Oihane Muxika|AUTHOR Oihane Muxika]]^^1^^
, [[Marianne Pouplier|AUTHOR Marianne Pouplier]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad de Navarra, Spain; ^^2^^LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3994–3998
</span></p></div>
<div class="cpabstractcardabstract"><p>We capitalize on previously recorded kinematic and acoustic data for three languages (Georgian (GE), Spanish (SP) and Moroccan Arabic (MA)) that exhibit open articulatory transitions between the consonants in clusters in order to dissect the aero-acoustic parameters of the transitions in each language. These particular languages are of interest because they show similar patterns of interarticulatory timing in clusters, offering the unique opportunity to examine the acoustics of open transitions cross-linguistically. Our analysis centers on word initial clusters (/kl/ and /gl/), from which we extract relativized temporal values relevant to clusters and spectral parameters related to open articulatory transitions. We report baseline results using linear mixed effects models, then train a Random Forest model in a supervised learning environment on the significant variables. After training, test tokens are introduced in order to test whether the model can categorize the language based on the spectral and temporal parameters, and rank variables in terms of their feature importance. The results show that the model can categorize the data to the correct language with a 95.59% accuracy rate, where normalized zero-crossing (nzcr), modifications of the amplitude envelope (ΔE), and intensity ratio ranked highest in feature importance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amelia J. Gully|AUTHOR Amelia J. Gully]]
</p><p class="cpabstractcardaffiliationlist">University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3999–4003
</span></p></div>
<div class="cpabstractcardabstract"><p>The shape of the vocal tract varies considerably between individuals. The relationship between detailed variation in vocal tract shape and the acoustics of speech is not yet well understood, despite its potential for increasing understanding in the fields of voice biometrics, forensic speech science, and personalised speech synthesis. One reason that this topic has not yet been extensively explored is that 3D vocal tract shape is difficult to quantify robustly. Geometric morphometrics is a technique developed in evolutionary biology for statistically valid quantification and comparison of anatomical shapes. This study makes use of 3D magnetic resonance imaging data of the vocal tracts of eight individuals, and accompanying audio recordings, combined with geometric morphometric techniques to determine whether the method offers useful information for speech science. The results suggest a linear relationship between the shapes of the vocal tract and output spectra, and there is evidence of possible sexual dimorphism and allometry (a systematic variation of shape with size) in the vocal tract, although due to the limited sample size the results did not reach statistical significance. The results suggest that geometric morphometrics can provide useful information about the vocal tract, and justify further study using this technique.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adriana Guevara-Rukoz|AUTHOR Adriana Guevara-Rukoz]]^^1^^
, [[Shi Yu|AUTHOR Shi Yu]]^^2^^
, [[Sharon Peperkamp|AUTHOR Sharon Peperkamp]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LSCP (UMR 8554), France; ^^2^^LPP (UMR 7018), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4004–4008
</span></p></div>
<div class="cpabstractcardabstract"><p>Japanese allows for almost no consonants in syllable codas. In loanwords, illegal codas are transformed into onsets by means of vowel epenthesis. The default epenthetic vowel in loanwords is [ɯ], and previous work has shown that this [ɯ]-epenthesis reflects Japanese listeners’ perception of illegal coda consonants. Here, we focus on one of the non-default cases: following coda [ç] and [x] the epenthetic vowel is a copy of the preceding vowel. Using an identification and a discrimination task, we provide evidence for the perceptual origin of this copy vowel phenomenon: After [ç] and [x], Japanese listeners perceive more often an epenthetic copy vowel than the default vowel [ɯ], whereas after [k] it is the reverse.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhengkun Tian|AUTHOR Zhengkun Tian]], [[Jiangyan Yi|AUTHOR Jiangyan Yi]], [[Ye Bai|AUTHOR Ye Bai]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Shuai Zhang|AUTHOR Shuai Zhang]], [[Zhengqi Wen|AUTHOR Zhengqi Wen]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4034–4038
</span></p></div>
<div class="cpabstractcardabstract"><p>Transducer-based models, such as RNN-Transducer and transformer-transducer, have achieved great success in speech recognition. A typical transducer model decodes the output sequence conditioned on the current acoustic state and previously predicted tokens step by step. Statistically, The number of blank tokens in the prediction results accounts for nearly 90% of all tokens. It takes a lot of computation and time to predict the blank tokens, but only the non-blank tokens will appear in the final output sequence. Therefore, we propose a method named fast-skip regularization, which tries to align the blank position predicted by a transducer with that predicted by a connectionist temporal classification (CTC) model. During the inference, the transducer model can predict the blank tokens in advance by a simple CTC project layer without many complicated forward calculations of the transducer decoder and then skip them, which will reduce the computation and improve the inference speed greatly. All experiments are conducted on a public Chinese mandarin dataset AISHELL-1. The results show that the fast-skip regularization can indeed help the transducer model learn the blank position alignments. Besides, the inference with fast-skip can be speeded up nearly 4 times with only a little performance degradation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anna Ollerenshaw|AUTHOR Anna Ollerenshaw]], [[Md. Asif Jalal|AUTHOR Md. Asif Jalal]], [[Thomas Hain|AUTHOR Thomas Hain]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4079–4083
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end automatic speech recognition (ASR) models aim to learn a generalised speech representation. However, there are limited tools available to understand the internal functions and the effect of hierarchical dependencies within the model architecture. It is crucial to understand the correlations between the layer-wise representations, to derive insights on the relationship between neural representations and performance. Previous investigations of network similarities using correlation analysis techniques have not been explored for End-to-End ASR models. This paper analyses and explores the internal dynamics between layers during training with CNN, LSTM and Transformer based approaches using Canonical correlation analysis (CCA) and centered kernel alignment (CKA) for the experiments. It was found that neural representations within CNN layers exhibit hierarchical correlation dependencies as layer depth increases but this is mostly limited to cases where neural representation correlates more closely. This behaviour is not observed in LSTM architecture, however there is a bottom-up pattern observed across the training process, while Transformer encoder layers exhibit irregular coefficiency correlation as neural depth increases. Altogether, these results provide new insights into the role that neural architectures have upon speech recognition performance. More specifically, these techniques can be used as indicators to build better performing speech recognition models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amber Afshan|AUTHOR Amber Afshan]]^^1^^
, [[Kshitiz Kumar|AUTHOR Kshitiz Kumar]]^^2^^
, [[Jian Wu|AUTHOR Jian Wu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of California at Los Angeles, USA; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4084–4088
</span></p></div>
<div class="cpabstractcardabstract"><p>Scores from traditional confidence classifiers (CCs) in automatic speech recognition (ASR) systems lack universal interpretation and vary with updates to the underlying confidence or acoustic models (AMs). In this work, we build interpretable confidence scores with an objective to closely align with ASR accuracy. We propose a new sequence-level CC with a richer context providing CC scores highly correlated with ASR accuracy and scores stable across CC updates. Hence, expanding CC applications. Recently, AM customization has gained traction with the widespread use of unified models. Conventional adaptation strategies that customize AM expect well-matched data for the target domain with gold-standard transcriptions. We propose a cost-effective method of using CC scores to select an optimal adaptation data set, where we maximize ASR gains from minimal data. We study data in various confidence ranges and optimally choose data for AM adaptation with KL-Divergence regularization. On the Microsoft voice search task, data selection for supervised adaptation using the sequence-level confidence scores achieves word error rate reduction (WERR) of 8.5% for row-convolution LSTM (RC-LSTM) and 5.2% for latency-controlled bidirectional LSTM (LC-BLSTM). In the semi-supervised case, with ASR hypotheses as labels, our method provides WERR of 5.9% and 2.8% for RC-LSTM and LC-BLSTM, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anton Mitrofanov|AUTHOR Anton Mitrofanov]]^^1^^
, [[Mariya Korenevskaya|AUTHOR Mariya Korenevskaya]]^^2^^
, [[Ivan Podluzhny|AUTHOR Ivan Podluzhny]]^^1^^
, [[Yuri Khokhlov|AUTHOR Yuri Khokhlov]]^^2^^
, [[Aleksandr Laptev|AUTHOR Aleksandr Laptev]]^^1^^
, [[Andrei Andrusenko|AUTHOR Andrei Andrusenko]]^^1^^
, [[Aleksei Ilin|AUTHOR Aleksei Ilin]]^^2^^
, [[Maxim Korenevsky|AUTHOR Maxim Korenevsky]]^^2^^
, [[Ivan Medennikov|AUTHOR Ivan Medennikov]]^^1^^
, [[Aleksei Romanenko|AUTHOR Aleksei Romanenko]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ITMO University, Russia; ^^2^^STC-innovations, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4039–4043
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural network-based language models are commonly used in rescoring approaches to improve the quality of modern automatic speech recognition (ASR) systems. Most of the existing methods are computationally expensive since they use autoregressive language models. We propose a novel rescoring approach, which processes the entire lattice in a single call to the model. The key feature of our rescoring policy is a novel non-autoregressive Lattice Transformer Language Model (LT-LM). This model takes the whole lattice as an input and predicts a new language score for each arc. Additionally, we propose the artificial lattices generation approach to incorporate a large amount of text data in the LT-LM training process. Our single-shot rescoring performs orders of magnitude faster than other rescoring methods in our experiments. It is more than 300 times faster than pruned RNNLM lattice rescoring and N-best rescoring while slightly inferior in terms of WER.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cyril Allauzen|AUTHOR Cyril Allauzen]], [[Ehsan Variani|AUTHOR Ehsan Variani]], [[Michael Riley|AUTHOR Michael Riley]], [[David Rybach|AUTHOR David Rybach]], [[Hao Zhang|AUTHOR Hao Zhang]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4044–4048
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes and evaluates alternative speech recognition design strategies using the hybrid autoregressive transducer (HAT) model. The different strategies are designed with special attention to the choice of modeling units and to the integration of different types of external language models during first-pass beam-search or second-pass re-scoring. These approaches are compared on a large-scale voice search task and the recognition quality over the head and tail of speech data is analyzed. Our experiments show decent improvements in WER over common speech phrases and significant gains on uncommon ones compared to the state-of-the-art approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hirofumi Inaguma|AUTHOR Hirofumi Inaguma]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]
</p><p class="cpabstractcardaffiliationlist">Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4049–4053
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we propose novel decoding algorithms to enable streaming automatic speech recognition (ASR) on unsegmented long-form recordings without voice activity detection (VAD), based on monotonic chunkwise attention (MoChA) with an auxiliary connectionist temporal classification (CTC) objective. We propose a //block-synchronous// beam search decoding to take advantage of efficient batched output-synchronous and low-latency input-synchronous searches. We also propose a VAD-free inference algorithm that leverages CTC probabilities to determine a suitable timing to reset the model states to tackle the vulnerability to long-form data. Experimental evaluations demonstrate that the block-synchronous decoding achieves comparable accuracy to the label-synchronous one. Moreover, the VAD-free inference can recognize long-form speech robustly for up to a few hours.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhuoyuan Yao|AUTHOR Zhuoyuan Yao]]^^1^^
, [[Di Wu|AUTHOR Di Wu]]^^2^^
, [[Xiong Wang|AUTHOR Xiong Wang]]^^1^^
, [[Binbin Zhang|AUTHOR Binbin Zhang]]^^2^^
, [[Fan Yu|AUTHOR Fan Yu]]^^1^^
, [[Chao Yang|AUTHOR Chao Yang]]^^2^^
, [[Zhendong Peng|AUTHOR Zhendong Peng]]^^2^^
, [[Xiaoyu Chen|AUTHOR Xiaoyu Chen]]^^2^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
, [[Xin Lei|AUTHOR Xin Lei]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Mobvoi, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4054–4058
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose an open source speech recognition toolkit called WeNet, in which a new two-pass approach named U2 is implemented to unify streaming and non-streaming end-to-end (E2E) speech recognition in a single model. The main motivation of WeNet is to close the gap between the research and deployment of E2E speech recognition models. WeNet provides an efficient way to ship automatic speech recognition (ASR) applications in real-world scenarios, which is the main difference and advantage to other open source E2E speech recognition toolkits. We develop a hybrid connectionist temporal classification (CTC)/attention architecture with transformer or conformer as encoder and an attention decoder to rescore the CTC hypotheses. To achieve streaming and non-streaming in a unified model, we use a dynamic chunk-based attention strategy which allows the self-attention to focus on the right context with random length. Our experiments on the AISHELL-1 dataset show that our model achieves 5.03% relative character error rate (CER) reduction in non-streaming ASR compared to a standard non-streaming transformer. After model quantification, our model achieves reasonable RTF and latency at runtime. The toolkit is publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Takanori Ashihara|AUTHOR Takanori Ashihara]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Naoki Makishima|AUTHOR Naoki Makishima]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4059–4063
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a cross-modal transformer-based neural correction models that refines the output of an automatic speech recognition (ASR) system so as to exclude ASR errors. Generally, neural correction models are composed of encoder-decoder networks, which can directly model sequence-to-sequence mapping problems. The most successful method is to use both input speech and its ASR output text as the input contexts for the encoder-decoder networks. However, the conventional method cannot take into account the relationships between these two different modal inputs because the input contexts are separately encoded for each modal. To effectively leverage the correlated information between the two different modal inputs, our proposed models encode two different contexts jointly on the basis of cross-modal self-attention using a transformer. We expect that cross-modal self-attention can effectively capture the relationships between two different modals for refining ASR hypotheses. We also introduce a shallow fusion technique to efficiently integrate the first-pass ASR model and our proposed neural correction model. Experiments on Japanese natural language ASR tasks demonstrated that our proposed models achieve better ASR performance than conventional neural correction models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mun-Hak Lee|AUTHOR Mun-Hak Lee]], [[Joon-Hyuk Chang|AUTHOR Joon-Hyuk Chang]]
</p><p class="cpabstractcardaffiliationlist">Hanyang University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4064–4068
</span></p></div>
<div class="cpabstractcardabstract"><p>Cross-entropy loss, which is commonly used in deep-neural-network-based (DNN) classification model training, induces models to assign a high probability value to one class. Networks trained in this fashion tend to be overconfident, which causes a problem in the decoding process of the speech recognition system, as it uses the combined probability distribution of multiple independently trained networks. Overconfidence in neural networks can be quantified as a calibration error, which is the difference between the output probability of a model and the likelihood of obtaining an actual correct answer. We show that the deep-learning-based components of an end-to-end (E2E) speech recognition system with high classification accuracy contain calibration errors and quantify them using various calibration measures. In addition, it was experimentally shown that the calibration function, which was being trained to minimize calibration errors effectively mitigates those of the speech recognition system, and as a result, can improve the performance of beam-search during decoding.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qiujia Li|AUTHOR Qiujia Li]]^^1^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^2^^
, [[Bo Li|AUTHOR Bo Li]]^^2^^
, [[Liangliang Cao|AUTHOR Liangliang Cao]]^^2^^
, [[Philip C. Woodland|AUTHOR Philip C. Woodland]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Cambridge, UK; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4069–4073
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end models with auto-regressive decoders have shown impressive results for automatic speech recognition (ASR). These models formulate the sequence-level probability as a product of the conditional probabilities of all individual tokens given their histories. However, the performance of locally normalised models can be sub-optimal because of factors such as exposure bias. Consequently, the model distribution differs from the underlying data distribution. In this paper, the residual energy-based model (R-EBM) is proposed to complement the auto-regressive ASR model to close the gap between the two distributions. Meanwhile, R-EBMs can also be regarded as utterance-level confidence estimators, which may benefit many downstream tasks. Experiments on a 100hr LibriSpeech dataset show that R-EBMs can reduce the word error rates (WERs) by 8.2%/6.7% while improving areas under precision-recall curves of confidence scores by 12.6%/28.4% on test-clean/test-other sets. Furthermore, on a state-of-the-art model using self-supervised learning (wav2vec 2.0), R-EBMs still significantly improves both the WER and confidence estimation performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David Qiu|AUTHOR David Qiu]]^^1^^
, [[Yanzhang He|AUTHOR Yanzhang He]]^^1^^
, [[Qiujia Li|AUTHOR Qiujia Li]]^^2^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^1^^
, [[Liangliang Cao|AUTHOR Liangliang Cao]]^^1^^
, [[Ian McGraw|AUTHOR Ian McGraw]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4074–4078
</span></p></div>
<div class="cpabstractcardabstract"><p>Confidence scores are very useful for downstream applications of automatic speech recognition (ASR) systems. Recent works have proposed using neural networks to learn word or utterance confidence scores for end-to-end ASR. In those studies, word confidence by itself does not model deletions, and utterance confidence does not take advantage of word-level training signals. This paper proposes to jointly learn word confidence, word deletion, and utterance confidence. Empirical results show that multi-task learning with all three objectives improves confidence metrics (NCE, AUC, RMSE) without the need for increasing the model size of the confidence estimation module. Using the utterance-level confidence for rescoring also decreases the word error rates on Google’s Voice Search and Long-tail Maps datasets by 3–5% relative, without needing a dedicated neural rescorer.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andros Tjandra|AUTHOR Andros Tjandra]]^^1^^
, [[Ruoming Pang|AUTHOR Ruoming Pang]]^^2^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^2^^
, [[Shigeki Karita|AUTHOR Shigeki Karita]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NAIST, Japan; ^^2^^Google, USA; ^^3^^Google, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4089–4093
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech is influenced by a number of underlying factors, which can be broadly categorized into linguistic contents and speaking styles. However, collecting the labeled data that annotates both content and style is an expensive and time-consuming task. Here, we present an approach for unsupervised learning of speech representation disentangling contents and styles. Our model consists of: (1) a local encoder that captures per-frame information; (2) a global encoder that captures per-utterance information; and (3) a conditional decoder that reconstructs speech given local and global latent variables. Our experiments show that (1) the local latent variables encode speech contents, as reconstructed speech can be recognized by ASR with low word error rates (WER), even with a different global encoding; (2) the global latent variables encode speaker style, as reconstructed speech shares speaker identity with the source utterance of the global encoding. Additionally, we demonstrate a useful application from our pre-trained model, where we can train a speaker recognition model from the global latent variables and achieve high accuracy by fine-tuning with as few data as one label per speaker.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eunbi Choi|AUTHOR Eunbi Choi]]^^1^^
, [[Hwa-Yeon Kim|AUTHOR Hwa-Yeon Kim]]^^2^^
, [[Jong-Hwan Kim|AUTHOR Jong-Hwan Kim]]^^2^^
, [[Jae-Min Kim|AUTHOR Jae-Min Kim]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KAIST, Korea; ^^2^^Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4094–4098
</span></p></div>
<div class="cpabstractcardabstract"><p>Chinese grapheme-to-phoneme (G2P) conversion plays a significant role in text-to-speech systems by generating pronunciations corresponding to Chinese input characters. The main challenge in Chinese G2P conversion is polyphone disambiguation, which requires selecting the appropriate pronunciation among several candidates. In polyphone disambiguation, calculating probabilities for the entire pronunciations is unnecessary since each Chinese character has only a few (mostly two or three) candidate pronunciations. In this study, we introduce a label embedding approach that matches the character embedding with the closest label embedding among the possible candidates. Specifically, negative sampling and triplet loss were applied to maximize the difference between the correct embedding and the other candidate embeddings. Experimental results show that the label embedding approach improved the polyphone disambiguation accuracy by 4.50% and 1.74% on two datasets compared to the one-hot label classification approach. Moreover, the bidirectional long short-term memory model with the label embedding approach outperformed the previous most advanced model, BERT, demonstrating outstanding performance in polyphone disambiguation. Lastly, we discuss the effect of contextual information in character embeddings on the G2P conversion task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haiteng Zhang|AUTHOR Haiteng Zhang]]
</p><p class="cpabstractcardaffiliationlist">Databaker Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4099–4103
</span></p></div>
<div class="cpabstractcardabstract"><p>Polyphone disambiguation is an essential procedure in the front-end module of the Chinese text-to-speech (TTS) system. It serves to predict the pronunciation of the input polyphonic character. In the Chinese TTS system, a well-designed pronunciation dictionary plays a crucial role in supplying pinyin to words. However, the conventional system is unable to fully utilize the pronunciation dictionary while modelling because of the unavoidable Chinese segment errors and model structure. In this paper, we proposed a system named PDF: ''P''olyphone ''D''isambiguation by using ''F''LAT. The proposed model encodes both the input character sequence and dictionary matched words of the sentence, enabling the model to both avoid segment errors and leverage the well-designed pronunciation dictionary in the model. Additionally, we also use the pre-trained language model (PLM) as an encoder to extract the contextual information of input sequence. The experimental results verified the effectiveness of the proposed PDF model. Our system obtains an improvement in accuracy by 0.98% compared to Bert on an open-source dataset. The experiential results demonstrate that leveraging pronunciation dictionary while modelling helps improve the performance of polyphone disambiguation system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junjie Li|AUTHOR Junjie Li]]^^1^^
, [[Zhiyu Zhang|AUTHOR Zhiyu Zhang]]^^2^^
, [[Minchuan Chen|AUTHOR Minchuan Chen]]^^1^^
, [[Jun Ma|AUTHOR Jun Ma]]^^1^^
, [[Shaojun Wang|AUTHOR Shaojun Wang]]^^1^^
, [[Jing Xiao|AUTHOR Jing Xiao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ping An Technology, China; ^^2^^National Tsing Hua University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4104–4108
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel system based on word-level features and window-based attention for polyphone disambiguation, which is a fundamental task for Grapheme-to-phoneme (G2P) conversion of Mandarin Chinese. The framework aims to combine a pre-trained language model with explicit word-level information in order to get meaningful context extraction. Particularly, we employ a pre-trained bidirectional encoder from Transformers (BERT) model to extract character-level features, and an external Chinese word segmentation (CWS) tool is used to obtain the word units. We adopt a mixed pooling mechanism to convert character-level features into word-level features based on the segmentation results. A window-based attention module is utilized to incorporate contextual word-level features for the polyphonic characters. Experimental results show that our method achieves an accuracy of 99.06% on an open benchmark dataset for Mandarin Chinese polyphone disambiguation, which outperforms the baseline systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi Shi|AUTHOR Yi Shi]], [[Congyi Wang|AUTHOR Congyi Wang]], [[Yu Chen|AUTHOR Yu Chen]], [[Bin Wang|AUTHOR Bin Wang]]
</p><p class="cpabstractcardaffiliationlist">Xmov, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4109–4113
</span></p></div>
<div class="cpabstractcardabstract"><p>The majority of Chinese characters are monophonic, while a special group of characters, called polyphonic characters, have multiple pronunciations. As a prerequisite of performing speech-related generative tasks, the correct pronunciation must be identified among several candidates. This process is called Polyphone Disambiguation. Although the problem has been well explored with both knowledge-based and learning-based approaches, it remains challenging due to the lack of publicly available labeled datasets and the irregular nature of polyphone in Mandarin Chinese. In this paper, we propose a novel semi-supervised learning (SSL) framework for Mandarin Chinese polyphone disambiguation that can potentially leverage unlimited unlabeled text data. We explore the effect of various proxy labeling strategies including entropy-thresholding and lexicon-based labeling. Qualitative and quantitative experiments demonstrate that our method achieves state-of-the-art performance. In addition, we publish a novel dataset specifically for the polyphone disambiguation task to promote further researches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yue Chen|AUTHOR Yue Chen]], [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]], [[Qing-Feng Liu|AUTHOR Qing-Feng Liu]]
</p><p class="cpabstractcardaffiliationlist">USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4114–4118
</span></p></div>
<div class="cpabstractcardabstract"><p>Identifying speakers in novels aims at determining who says a quote in a given context by text analysis. This task is important for speech synthesis systems to assign appropriate voices to the quotes when producing audiobooks. However, existing approaches stick with manual features and traditional machine learning classifiers, which constrain the accuracy of speaker identification. In this paper, we propose a method to tackle this challenging problem with the help of deep learning. We formulate speaker identification as a scoring task and build a candidate scoring network (CSN) based on BERT. Candidate-specific segments are put forward to eliminate redundant context information. Moreover, a revision algorithm is designed utilizing the speaker alternation pattern in two-party dialogues. Experiments have been conducted using the dataset built on the Chinese novel //World of Plainness//. The results show that our proposed method reaches a new state-of-the-art performance with an identification accuracy of 82.5%, which outperforms the baseline using manual features by 12%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiao Zhou|AUTHOR Xiao Zhou]], [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]]
</p><p class="cpabstractcardaffiliationlist">USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4119–4123
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a hybrid speech synthesis method based on UnitNet, a unified sequence-to-sequence (Seq2Seq) acoustic model for both statistical parametric speech synthesis (SPSS) and concatenative speech synthesis (CSS). This method combines CSS and SPSS approaches to synthesize different segments in an utterance. Comparing with the Tacotron2 model for Seq2Seq speech synthesis, UnitNet utilizes the phone boundaries of training data and its decoder contains autoregressive structures at both phone and frame levels. This hierarchical architecture can not only extract embedding vectors for representing phone-sized units in the corpus but also measure the dependency among consecutive units, which makes UnitNet capable of guiding the selection of phone-sized units for CSS. Furthermore, hybrid synthesis can be achieved by integrating the units generated by SPSS into the framework of CSS for the target phones without appropriate candidates in the corpus. Experimental results show that UnitNet can achieve comparable naturalness with Tacotron2 for SPSS and outperform our previous Tacotron-based method for CSS. Besides, the naturalness and inference efficiency of SPSS can be further improved through hybrid synthesis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sashi Novitasari|AUTHOR Sashi Novitasari]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4124–4128
</span></p></div>
<div class="cpabstractcardabstract"><p>Although machine speech chains were originally proposed to mimic a closed-loop human speech chain mechanism with auditory feedback, the existing machine speech chains are only utilized as a semi-supervised learning method that allows automatic speech recognition (ASR) and text-to-speech synthesis systems (TTS) to support each other given unpaired data. During inference, however, ASR and TTS are still performed separately. This paper focuses on machine speech chain inferences in a noisy environment. In human communication, speakers tend to talk more loudly in noisy environments, a phenomenon known as the Lombard effect. Simulating the Lombard effect, we implement a machine speech chain that enables TTS to speak louder in a noisy condition given auditory feedback. The auditory feedback includes speech-to-noise ratio prediction and ASR loss as a speech intelligibility measurement. To the best of our knowledge, this is the first deep learning framework that mimics human speech perception and production behaviors in a noisy environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haozhe Zhang|AUTHOR Haozhe Zhang]]^^1^^
, [[Zhihua Huang|AUTHOR Zhihua Huang]]^^2^^
, [[Zengqiang Shang|AUTHOR Zengqiang Shang]]^^1^^
, [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]]^^1^^
, [[Yonghong Yan|AUTHOR Yonghong Yan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^UCAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4129–4133
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-autoregressive text to speech models such as FastSpeech can synthesize speech significantly faster than previous autoregressive models with comparable quality. However, the memory and time complexity //O(N//^^2^^//)// of self-attention hinders FastSpeech from generating long sequences, where //N// is the length of mel-spectrograms. In this work, we propose LinearSpeech, an efficient parallel text-to-speech model with memory and computational complexity //O(N)//. Firstly, we replace standard attention modules in decoder of the model with linear attention modules to reduce the time and memory cost. Secondly, we add a novel positional encoding to standard and linear attention modules, which enable the model to learn the order of input sequence and synthesizing long mel-spectrograms. Furthermore, we use reversible residual layers instead of the standard residuals, which reduce the memory consumption in training stage. In our experiments, LinearSpeech can be trained with doubled batch size than FastSpeech with similar number of parameters. At inference, LinearSpeech achieves more than 2.0× inference speedup on CPU when synthesizing mel-spectrograms longer than 3,500. And our model can synthesize 5.5× longer mel-spectrograms than FastSpeech when running out of 12GB GPU memory. Our subjective listening test also shows that the speech quality of LinearSpeech is comparable to FastSpeech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Noa Mansbach|AUTHOR Noa Mansbach]], [[Evgeny Hershkovitch Neiterman|AUTHOR Evgeny Hershkovitch Neiterman]], [[Amos Azaria|AUTHOR Amos Azaria]]
</p><p class="cpabstractcardaffiliationlist">Ariel University, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4134–4138
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work we present the development of an autonomous agent capable of competing with humans in a deception-based game. The agent predicts whether a given statement is true or false based on vocal cues. To this end, we develop a game for collecting a large scale and high quality labeled sound data-set in a controlled environment in English and Hebrew. We develop a model that can detect deception based on vocal statements from the participants of the experiment, and show that the model is more accurate than humans.
We develop an agent that uses the developed deception model and interacts with humans within our deceptive environment. We show that our agent significantly outperforms a simple agent that does not use the deception model; that is, it wins significantly more games when played against human players. In addition, we use our model to detect whether a statement will be perceived as a lie or not by human subjects, based on its vocal cues.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shikha Baghel|AUTHOR Shikha Baghel]]^^1^^
, [[Mrinmoy Bhattacharjee|AUTHOR Mrinmoy Bhattacharjee]]^^1^^
, [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]^^2^^
, [[Prithwijit Guha|AUTHOR Prithwijit Guha]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Guwahati, India; ^^2^^IIT Dharwad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4179–4183
</span></p></div>
<div class="cpabstractcardabstract"><p>Shouted speech detection is an essential pre-processing step in conventional speech processing systems such as speech and speaker recognition, speaker diarization, and others. Excitation source plays an important role in shouted speech production. This work explores feature computed from the Integrated Linear Prediction Residual (ILPR) signal for shouted speech detection in Indian news debates. The log spectrogram of ILPR signal provides time-frequency characteristics of excitation source signal. The proposed shouted speech detection system is deep network with CNN-based autoencoder and attention-based classifier sub-modules. The Autoencoder sub-network aids the classifier in learning discriminative deep embeddings for better classification. The proposed classifier is equipped with attention mechanism and Bidirectional Gated Recurrent Units. Classification results show that the proposed system with excitation feature performs better than baseline log spectrogram computed from the pre-emphasized speech signal. A score-level fusion of the classifiers trained on the source feature and the baseline feature provides the best performance. The performance of the proposed shouted speech detection is also evaluated at various speech segment durations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yang Gao|AUTHOR Yang Gao]]^^1^^
, [[Tyler Vuong|AUTHOR Tyler Vuong]]^^1^^
, [[Mahsa Elyasi|AUTHOR Mahsa Elyasi]]^^2^^
, [[Gaurav Bharaj|AUTHOR Gaurav Bharaj]]^^2^^
, [[Rita Singh|AUTHOR Rita Singh]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^AI Foundation, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4184–4188
</span></p></div>
<div class="cpabstractcardabstract"><p>State-of-the-art methods for audio generation suffer from fingerprint artifacts and repeated inconsistencies across temporal and spectral domains. Such artifacts could be well captured by the frequency domain analysis over the spectrogram. Thus, we propose a novel use of long-range spectro-temporal modulation feature — 2D DCT over log-Mel spectrogram for the audio deepfake detection. We show that this feature works better than log-Mel spectrogram, CQCC, MFCC, as a suitable candidate to capture such artifacts. We employ spectrum augmentation and feature normalization to decrease overfitting and bridge the gap between training and test dataset along with this novel feature introduction. We developed a CNN-based baseline that achieved a 0.0849 t-DCF and outperformed the previously top single systems reported in the ASVspoof 2019 challenge. Finally, by combining our baseline with our proposed 2D DCT spectro-temporal feature, we decrease the t-DCF score down by 14% to 0.0737, making it a state-of-the-art system for spoofing detection. Furthermore, we evaluate our model using two external datasets, showing the proposed feature’s generalization ability. We also provide analysis and ablation studies for our proposed feature and results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weiguang Chen|AUTHOR Weiguang Chen]]^^1^^
, [[Van Tung Pham|AUTHOR Van Tung Pham]]^^2^^
, [[Eng Siong Chng|AUTHOR Eng Siong Chng]]^^2^^
, [[Xionghu Zhong|AUTHOR Xionghu Zhong]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Hunan University, China; ^^2^^NTU, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4189–4193
</span></p></div>
<div class="cpabstractcardabstract"><p>Overlapped speech is widely present in conversations and can cause significant performance degradation on speech processing such as diarization, enhancement, and recognition. Detection of overlapped speech, in particular when the speakers are in the far-field, is a challenging task as the overlapped part is usually short, and heavy reverberation and noise may present in the conversation scenario. Existing solutions overwhelmingly rely on spectral features extracted from single microphone signal to perform the detection. In this paper, we propose a novel detection approach which is able to use a microphone array and fuse the spatial and spectral features extracted from multi-channel array signal. Two categories of spatial features, directional statistics which are projected to spherical location grids and generalized cross-correlation function based on phase transform (GCC-PHAT), are considered to model the speaker’s spatial characteristic. Such spatial features are then fused with the spectral features to detect the overlapped speech by using a Gated Multimodal Unit (GMU). The performance of the proposed approach is studied under AMI and CHiME-6 corpora. Experimental results show that the proposed feature fusion approach achieves better performance than methods using spectral features only.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ahmed Fakhry|AUTHOR Ahmed Fakhry]]^^1^^
, [[Xinyi Jiang|AUTHOR Xinyi Jiang]]^^2^^
, [[Jaclyn Xiao|AUTHOR Jaclyn Xiao]]^^3^^
, [[Gunvant Chaudhari|AUTHOR Gunvant Chaudhari]]^^4^^
, [[Asriel Han|AUTHOR Asriel Han]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Alexandria, Egypt; ^^2^^Independent Researcher, USA; ^^3^^Duke University, USA; ^^4^^University of California at San Francisco, USA; ^^5^^Stanford University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4139–4143
</span></p></div>
<div class="cpabstractcardabstract"><p>Fast and affordable solutions for COVID-19 testing are necessary to contain the spread of the global pandemic and help relieve the burden on medical facilities. Currently, limited testing locations and expensive equipment pose difficulties for individuals seeking testing, especially in low-resource settings. Researchers have successfully presented models for detecting COVID-19 infection status using audio samples recorded in clinical settings, suggesting that audio-based Artificial Intelligence models can be used to identify COVID-19. Such models have the potential to be deployed on smartphones for fast, widespread, and low-resource testing. However, while previous studies have trained models on cleaned audio samples collected mainly from clinical settings, audio samples collected from average smartphones may yield suboptimal quality data that is different from the clean data that models were trained on. This discrepancy may add a bias that affects COVID-19 status predictions. To tackle this issue, we propose a multi-branch deep learning network that is trained and tested on crowdsourced data where most of the data has not been manually processed and cleaned. Furthermore, the model achieves state-of-art results for the COUGHVID dataset. After breaking down results for each category, we have shown an AUC of 0.99 for audio samples with COVID-19 positive labels.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Youxuan Ma|AUTHOR Youxuan Ma]], [[Zongze Ren|AUTHOR Zongze Ren]], [[Shugong Xu|AUTHOR Shugong Xu]]
</p><p class="cpabstractcardaffiliationlist">Shanghai University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4144–4148
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, synthetic speech generated by advanced text-to-speech (TTS) and voice conversion (VC) systems has caused great harms to automatic speaker verification (ASV) systems, urging us to design a synthetic speech detection system to protect ASV systems. In this paper, we propose a new speech anti-spoofing model named ResWavegram-Resnet (RW-Resnet). The model contains two parts, Conv1D Resblocks and backbone Resnet34. The Conv1D Resblock is based on the Conv1D block with a residual connection. For the first part, we use the raw waveform as input and feed it to the stacked Conv1D Resblocks to get the ResWavegram. Compared with traditional methods, ResWavegram keeps all the information from the audio signal and has a stronger ability in extracting features. For the second part, the extracted features are fed to the backbone Resnet34 for the spoofed or bonafide decision. The ASVspoof2019 logical access (LA) corpus is used to evaluate our proposed RW-Resnet. Experimental results show that the RW-Resnet achieves better performance than other state-of-the-art anti-spoofing models, which illustrates its effectiveness in detecting synthetic speech attacks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hira Dhamyal|AUTHOR Hira Dhamyal]]^^1^^
, [[Ayesha Ali|AUTHOR Ayesha Ali]]^^2^^
, [[Ihsan Ayyub Qazi|AUTHOR Ihsan Ayyub Qazi]]^^2^^
, [[Agha Ali Raza|AUTHOR Agha Ali Raza]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LUMS, Pakistan; ^^2^^LUMS, Pakistan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4149–4153
</span></p></div>
<div class="cpabstractcardabstract"><p>Fake audio generation has undergone remarkable improvement with the advancement in deep neural network models. This has made it increasingly important to develop lightweight yet robust mechanisms for detecting fake audios, especially for resource-constrained settings such as on edge devices and embedded controllers as well as with low-resource languages. In this paper, we analyze two //microfeatures//: Voicing Onset Time (VOT) and coarticulation, to classify bonafide and synthesized audios. Using the ASVSpoof2019 LA dataset, we find that on average, VOT is higher in synthesized speech compared to bonafide speech and exhibits higher variance for multiple occurrences of the same stop consonants. Further, we observe that vowels in CVC form in bonafide speech have greater F1/F2 movement compared to similarly constrained vowels in synthesized speech. We also analyse the predictive power of VOT and coarticulation for detecting bonafide and synthesized speech and achieve equal error rates of 25.2% using VOT, 39.3% using coarticulation, and 23.5% using a fusion of both models. This is the first study analysing VOT and coarticulation as features for fake audio detection. We suggest these microfeatures as standalone features for speaker-dependent forensics, voice-biometrics, and for rapid pre-screening of suspicious audios, and as additional features in bigger feature sets for computationally intensive classifiers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tianhao Yan|AUTHOR Tianhao Yan]]^^1^^
, [[Hao Meng|AUTHOR Hao Meng]]^^1^^
, [[Emilia Parada-Cabaleiro|AUTHOR Emilia Parada-Cabaleiro]]^^2^^
, [[Shuo Liu|AUTHOR Shuo Liu]]^^3^^
, [[Meishu Song|AUTHOR Meishu Song]]^^3^^
, [[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Harbin Engineering University, China; ^^2^^Johannes Kepler Universität Linz, Austria; ^^3^^Universität Augsburg, Germany; ^^4^^Universität Augsburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4154–4158
</span></p></div>
<div class="cpabstractcardabstract"><p>The rapid emergence of COVID-19 has become a major public health threat around the world. Although early detection is crucial to reduce its spread, the existing diagnostic methods are still insufficient in bringing the pandemic under control. Thus, more sophisticated systems, able to easily identify the infection from a larger variety of symptoms, such as cough, are urgently needed. Deep learning models can indeed convey numerous signal features relevant to fight against the disease; yet, the performance of state-of-the-art approaches is still severely restricted by the feature information loss typically due to the high number of layers. To mitigate this phenomenon, identifying the most relevant feature areas by drawing into attention mechanisms becomes essential. In this paper, we introduce Spatial Attentive ConvLSTM-RNN (SACRNN), a novel algorithm that is using Convolutional Long-Short Term Memory Recurrent Neural Networks with embedded attention that has the ability to identify the most valuable features. The promising results achieved by the fusion between the proposed model and a conventional Attentive Convolutional Recurrent Neural Network, on the automatic recognition of COVID-19 coughing (73.2% of Unweighted Average Recall) show the great potential of the presented approach in developing efficient solutions to defeat the pandemic.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Soumava Paul|AUTHOR Soumava Paul]], [[Gurunath Reddy M.|AUTHOR Gurunath Reddy M.]], [[K. Sreenivasa Rao|AUTHOR K. Sreenivasa Rao]], [[Partha Pratim Das|AUTHOR Partha Pratim Das]]
</p><p class="cpabstractcardaffiliationlist">IIT Kharagpur, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4159–4163
</span></p></div>
<div class="cpabstractcardabstract"><p>Singing Voice Detection (SVD) has been an active area of research in music information retrieval (MIR). Currently, two deep neural network-based methods, one based on CNN and the other on RNN, exist in literature that learn optimized features for the voice detection (VD) task and achieve state-of-the-art performance on common datasets. Both these models have a huge number of parameters (1.4M for CNN and 65.7K for RNN) and hence not suitable for deployment on devices like smartphones or embedded sensors with limited capacity in terms of memory and computation power. The most popular method to address this issue is known as knowledge distillation in deep learning literature (in addition to model compression) where a large pre-trained network known as the teacher is used to train a smaller student network. Given the wide applications of SVD in music information retrieval, to the best of our knowledge, model compression for practical deployment has not yet been explored. In this paper, efforts have been made to investigate this issue using both conventional as well as ensemble knowledge distillation techniques.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ryu Takeda|AUTHOR Ryu Takeda]], [[Kazunori Komatani|AUTHOR Kazunori Komatani]]
</p><p class="cpabstractcardaffiliationlist">Osaka University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4164–4168
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes an age estimation method from speech signals for heterogeneous datasets. Although previous studies in the speech field evaluate age prediction models with held-out testing data within the same dataset recorded in a consistent setting, such evaluation does not measure real performance. The difficulty of heterogeneous datasets is overfitting caused by the corpus-specific properties: transfer function of the recording environment and distributions of age and speaker. We propose a speech-age model and its integration with sequence neural networks (NNs). The speech-age model represents the ambiguity of age as a probability distribution, which also virtually extends the limited range of age distribution of each corpus. A Bayesian generative model successfully integrates the speech-age model and the NNs. We also applied mean normalization technique to cope with the transfer function problem. Experiments showed that our proposed method outperformed the baseline neural classifier for completely open test sets in the age distribution and recording setting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kah Kuan Teh|AUTHOR Kah Kuan Teh]], [[Huy Dat Tran|AUTHOR Huy Dat Tran]]
</p><p class="cpabstractcardaffiliationlist">A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4169–4173
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel method to address practical problems when deploying audio classification systems in operations that are the presence of unseen sound classes (open-set) and the limitation of training resources. To solve it, a novel method which embeds variational auto-encoder (VAE), data augmentation and detection-classification joint training into conventional GAN networks is proposed. The VAE input to GAN-generator helps to generate realistic outlier samples which are not too far from in-distribution class and hence improve the open-set discrimination capabilities of classifiers. Next, the augmentation enhanced GAN scheme developed in our previous work [4] for close-set audio classification, will help to address the limited training resources by in cooperating the physical data augmentation to work together with traditional GAN produced samples to prevent overfitting and improve the optimization convergences. The detection-classification joint training further steps on advantages of VAE and Augmentation GAN to further improving the performances of detection and classification tasks. The experiments carried out on Google Speech Command database show great improvements of open-set classification accuracy from 62.41% to 88.29% when using only 10% amount of training data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takahiro Fukumori|AUTHOR Takahiro Fukumori]]
</p><p class="cpabstractcardaffiliationlist">Ritsumeikan University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4174–4178
</span></p></div>
<div class="cpabstractcardabstract"><p>Discrimination between shouted and normal speech is crucial in audio surveillance and monitoring. Although deep neural networks are used in recent methods, traditional low-level speech features are applied, such as mel-frequency cepstral coefficients and the mel spectrum. This paper presents a deep spectral-cepstral fusion approach that learns descriptive features for target classification from high-dimensional spectrograms and cepstrograms. We compare the following three types of architectures as base networks: convolutional neural networks (CNNs), gated recurrent unit (GRU) networks, and their combination (CNN-GRU). Using a corpus comprising real shouts and speech, we present a comprehensive comparison with conventional methods to verify the effectiveness of the proposed feature learning method. The results of experiments conducted in various noisy environments demonstrate that the CNN-GRU based on our spectral-cepstral features achieves better classification performance than single feature-based networks. This finding suggests the effectiveness of using high-dimensional sources for speech-type recognition in sound event detection.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Badr M. Abdullah|AUTHOR Badr M. Abdullah]], [[Marius Mosbach|AUTHOR Marius Mosbach]], [[Iuliia Zaitova|AUTHOR Iuliia Zaitova]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4194–4198
</span></p></div>
<div class="cpabstractcardabstract"><p>Several variants of deep neural networks have been successfully employed for building parametric models that project variable-duration spoken word segments onto fixed-size vector representations, or acoustic word embeddings (AWEs). However, it remains unclear to what degree we can rely on the distance in the emerging AWE space as an estimate of word-form similarity. In this paper, we ask: does the distance in the acoustic embedding space correlate with phonological dissimilarity? To answer this question, we empirically investigate the performance of supervised approaches for AWEs with different neural architectures and learning objectives. We train AWE models in controlled settings for two languages (German and Czech) and evaluate the embeddings on two tasks: word discrimination and phonological similarity. Our experiments show that (1) the distance in the embedding space in the best cases only moderately correlates with phonological distance, and (2) improving the performance on the word discrimination task does not necessarily yield models that better reflect word phonological similarity. Our findings highlight the necessity to rethink the current intrinsic evaluations for AWEs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yan Jia|AUTHOR Yan Jia]]^^1^^
, [[Xingming Wang|AUTHOR Xingming Wang]]^^1^^
, [[Xiaoyi Qin|AUTHOR Xiaoyi Qin]]^^1^^
, [[Yinping Zhang|AUTHOR Yinping Zhang]]^^2^^
, [[Xuyang Wang|AUTHOR Xuyang Wang]]^^2^^
, [[Junjie Wang|AUTHOR Junjie Wang]]^^2^^
, [[Dong Zhang|AUTHOR Dong Zhang]]^^3^^
, [[Ming Li|AUTHOR Ming Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Duke Kunshan University, China; ^^2^^Lenovo, China; ^^3^^Sun Yat-sen University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4239–4243
</span></p></div>
<div class="cpabstractcardabstract"><p>The 2020 Personalized Voice Trigger Challenge (PVTC2020) addresses two different research problems in a unified setup: joint wake-up word detection with speaker verification on close-talking single microphone data and far-field multi-channel microphone array data. Specially, the second task poses an additional cross-channel matching challenge on top of the far-field condition. To simulate the real-life application scenario, the enrollment utterances are recorded from close-talking cell-phone only, while the test utterances are recorded from both the close-talking cell-phone and the far-field microphone arrays. This paper introduces our challenge setup and the released database as well as the evaluation metrics. In addition, we present a sequential two stage end-to-end neural network baseline system trained with the proposed database for speaker-dependent wake-up word detection. Results show that state-of-the-art personalized voice trigger methods are still based on the two stage design, however, this benchmark database could also be used to evaluate multi-task joint learning methods. The official website, the open-source baseline system and results of submitted systems have been released.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jingsong Wang|AUTHOR Jingsong Wang]]^^1^^
, [[Yuxuan He|AUTHOR Yuxuan He]]^^1^^
, [[Chunyu Zhao|AUTHOR Chunyu Zhao]]^^1^^
, [[Qijie Shao|AUTHOR Qijie Shao]]^^2^^
, [[Wei-Wei Tu|AUTHOR Wei-Wei Tu]]^^1^^
, [[Tom Ko|AUTHOR Tom Ko]]^^3^^
, [[Hung-yi Lee|AUTHOR Hung-yi Lee]]^^4^^
, [[Lei Xie|AUTHOR Lei Xie]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^4Paradigm, China; ^^2^^Northwestern Polytechnical University, China; ^^3^^SUSTech, China; ^^4^^National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4244–4248
</span></p></div>
<div class="cpabstractcardabstract"><p>Auto-KWS 2021 challenge calls for automated machine learning (AutoML) solutions to automate the process of applying machine learning to a customized keyword spotting task. Compared with other keyword spotting tasks, Auto-KWS challenge has the following three characteristics: 1) The challenge focuses on the problem of customized keyword spotting, where the target device can only be awakened by an enrolled speaker with his/her specified keyword. The speaker can use any language and accent to define his keyword. 2) All data of the challenge is recorded in realistic environment to simulate different user scenarios. 3) Auto-KWS is a “code competition”, where participants need to submit AutoML solutions, then the platform automatically runs the enrollment and prediction steps with the submitted code. This challenge aims at promoting the development of a more personalized and flexible keyword spotting system. Two baseline systems are provided to all participants as references.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Axel Berg|AUTHOR Axel Berg]], [[Mark O’Connor|AUTHOR Mark O’Connor]], [[Miguel Tairum Cruz|AUTHOR Miguel Tairum Cruz]]
</p><p class="cpabstractcardaffiliationlist">Arm, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4249–4253
</span></p></div>
<div class="cpabstractcardabstract"><p>The Transformer architecture has been successful across many domains, including natural language processing, computer vision and speech recognition. In keyword spotting, self-attention has primarily been used on top of convolutional or recurrent encoders. We investigate a range of ways to adapt the Transformer architecture to keyword spotting and introduce the Keyword Transformer (KWT), a fully self-attentional architecture that exceeds state-of-the-art performance across multiple tasks without any pre-training or additional data. Surprisingly, this simple architecture outperforms more complex models that mix convolutional, recurrent and attentive layers. KWT can be used as a drop-in replacement for these models, setting two new benchmark records on the Google Speech Commands dataset with 98.6% and 97.7% accuracy on the 12 and 35-command tasks respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Abhijeet Awasthi|AUTHOR Abhijeet Awasthi]], [[Kevin Kilgour|AUTHOR Kevin Kilgour]], [[Hassan Rom|AUTHOR Hassan Rom]]
</p><p class="cpabstractcardaffiliationlist">Google, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4254–4258
</span></p></div>
<div class="cpabstractcardabstract"><p>Learning to recognize new keywords with just a few examples is essential for personalizing keyword spotting (KWS) models to a user’s choice of keywords. However, modern KWS models are typically trained on large datasets and restricted to a small vocabulary of keywords, limiting their transferability to a broad range of unseen keywords. Towards easily customizable KWS models, we present KeySEM (''Key''word ''S''peech ''EM''bedding), a speech embedding model pre-trained on the task of recognizing a large number of keywords. Speech representations offered by KeySEM are highly effective for learning new keywords from a limited number of examples. Comparisons with a diverse range of related work across several datasets show that our method achieves consistently superior performance with fewer training examples. Although KeySEM was pre-trained only on English utterances, the performance gains also extend to datasets from four other languages indicating that KeySEM learns useful representations well aligned with the task of keyword spotting. Finally, we demonstrate KeySEM’s ability to learn new keywords sequentially without requiring to re-train on previously learned keywords. Our experimental observations suggest that KeySEM is well suited to on-device environments where post-deployment learning and ease of customization are often desirable.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zheng Gao|AUTHOR Zheng Gao]], [[Radhika Arava|AUTHOR Radhika Arava]], [[Qian Hu|AUTHOR Qian Hu]], [[Xibin Gao|AUTHOR Xibin Gao]], [[Thahir Mohamed|AUTHOR Thahir Mohamed]], [[Wei Xiao|AUTHOR Wei Xiao]], [[Mohamed AbdelHady|AUTHOR Mohamed AbdelHady]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4199–4203
</span></p></div>
<div class="cpabstractcardabstract"><p>Spoken language understanding (SLU) smart assistants such as Amazon Alexa host hundreds of thousands of voice applications (skills) to delight end-users and fulfill their utterance requests. Sometimes utterances fail to be claimed by smart assistants due to system problems such as model incapability or routing errors. The failure may lead to customer frustration, dialog termination and eventually cause customer churn. To avoid this, we design a skill retrieval system as a downstream service to suggest fallback skills to unclaimed utterances. If the suggested skill satisfies customer intent, the conversation will be recovered with the assistant. For the sake of smooth customer experience, we only present the most relevant skill to customers, resulting in partial observation problem which constrains retrieval model training. To solve this problem, we propose a two-step approach to automatically align claimed utterance labels to unclaimed utterances. Extensive experiments on two real-world datasets demonstrate that our proposed model significantly outperforms a number of strong alternatives.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rajeev Rikhye|AUTHOR Rajeev Rikhye]], [[Quan Wang|AUTHOR Quan Wang]], [[Qiao Liang|AUTHOR Qiao Liang]], [[Yanzhang He|AUTHOR Yanzhang He]], [[Ding Zhao|AUTHOR Ding Zhao]], [[Yiteng Huang|AUTHOR Yiteng Huang]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Ian McGraw|AUTHOR Ian McGraw]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4204–4208
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we introduce a streaming keyphrase detection system that can be easily customized to accurately detect any phrase composed of words from a large vocabulary. The system is implemented with an end-to-end trained automatic speech recognition (ASR) model and a text-independent speaker verification model. To address the challenge of detecting these keyphrases under various noisy conditions, a speaker separation model is added to the feature frontend of the speaker verification model, and an adaptive noise cancellation (ANC) algorithm is included to exploit cross-microphone noise coherence. Our experiments show that the text-independent speaker verification model largely reduces the false triggering rate of the keyphrase detection, while the speaker separation model and adaptive noise cancellation largely reduce false rejections.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vineet Garg|AUTHOR Vineet Garg]]^^1^^
, [[Wonil Chang|AUTHOR Wonil Chang]]^^1^^
, [[Siddharth Sigtia|AUTHOR Siddharth Sigtia]]^^2^^
, [[Saurabh Adya|AUTHOR Saurabh Adya]]^^1^^
, [[Pramod Simha|AUTHOR Pramod Simha]]^^1^^
, [[Pranay Dighe|AUTHOR Pranay Dighe]]^^1^^
, [[Chandra Dhir|AUTHOR Chandra Dhir]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Apple, USA; ^^2^^Apple, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4209–4213
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a unified and hardware efficient architecture for two stage voice trigger detection (VTD) and false trigger mitigation (FTM) tasks. Two stage VTD systems of voice assistants can get falsely activated to audio segments acoustically similar to the trigger phrase of interest. FTM systems cancel such activations by using post trigger audio context. Traditional FTM systems rely on automatic speech recognition lattices which are computationally expensive to obtain on device. We propose a streaming transformer (TF) encoder architecture, which progressively processes incoming audio chunks and maintains audio context to perform both VTD and FTM tasks using only acoustic features. The proposed joint model yields an average 18% relative reduction in false reject rate (FRR) for the VTD task at a given false alarm rate. Moreover, our model suppresses 95% of the false triggers with an additional one second of post-trigger audio. Finally, on-device measurements show 32% reduction in runtime memory and 56% reduction in inference time compared to non-streaming version of the model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mark Mazumder|AUTHOR Mark Mazumder]]^^1^^
, [[Colby Banbury|AUTHOR Colby Banbury]]^^1^^
, [[Josh Meyer|AUTHOR Josh Meyer]]^^2^^
, [[Pete Warden|AUTHOR Pete Warden]]^^3^^
, [[Vijay Janapa Reddi|AUTHOR Vijay Janapa Reddi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Harvard University, USA; ^^2^^Coqui, Germany; ^^3^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4214–4218
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce a few-shot transfer learning method for keyword spotting in any language. Leveraging open speech corpora in nine languages, we automate the extraction of a large multilingual keyword bank and use it to train an embedding model. With just five training examples, we fine-tune the embedding model for keyword spotting and achieve an average F₁ score of 0.75 on keyword classification for 180 new keywords unseen by the embedding model in these nine languages. This embedding model also generalizes to new languages. We achieve an average F₁ score of 0.65 on 5-shot models for 260 keywords sampled across 13 new languages unseen by the embedding model. We investigate streaming accuracy for our 5-shot models in two contexts: keyword spotting and keyword search. Across 440 keywords in 22 languages, we achieve an average streaming keyword spotting accuracy of 87.4% with a false acceptance rate of 4.3%, and observe promising initial results on keyword search.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Li Wang|AUTHOR Li Wang]], [[Rongzhi Gu|AUTHOR Rongzhi Gu]], [[Nuo Chen|AUTHOR Nuo Chen]], [[Yuexian Zou|AUTHOR Yuexian Zou]]
</p><p class="cpabstractcardaffiliationlist">Peking University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4219–4223
</span></p></div>
<div class="cpabstractcardabstract"><p>Keyword Spotting (KWS) remains challenging to achieve the trade-off between small footprint and high accuracy. Recently proposed metric learning approaches improved the generalizability of models for the KWS task, and 1D-CNN based KWS models have achieved the state-of-the-arts (SOTA) in terms of model size. However, for metric learning, due to data limitations, the speech anchor is highly susceptible to the acoustic environment and speakers. Also, we note that the 1D-CNN models have limited capability to capture long-term temporal acoustic features. To address the above problems, we propose to utilize text anchors to improve the stability of anchors. Furthermore, a new type of model (LG-Net) is exquisitely designed to promote long-short term acoustic feature modeling based on 1D-CNN and self-attention. Experiments are conducted on Google Speech Commands Dataset version 1 (GSCDv1) and 2 (GSCDv2). The results demonstrate that the proposed text anchor based metric learning method shows consistent improvements over speech anchor on representative CNN-based models. Moreover, our LG-Net model achieves SOTA accuracy of 97.67% and 96.79% on two datasets, respectively. It is encouraged to see that our lighter LG-Net with only 74k parameters obtains 96.82% KWS accuracy on the GSCDv1 and 95.77% KWS accuracy on the GSCDv2.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yangbin Chen|AUTHOR Yangbin Chen]]^^1^^
, [[Tom Ko|AUTHOR Tom Ko]]^^2^^
, [[Jianping Wang|AUTHOR Jianping Wang]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^SUSTech, China; ^^3^^CityU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4224–4228
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently we formulated a user-defined spoken term classification task as a few-shot learning task and tackled the task using Model-Agnostic Meta-Learning (MAML) algorithm. Our results show that the meta-learning approach performs much better than conventional supervised learning and transfer learning in the task, especially with limited training data. In this paper, we extend our work by addressing a more practical problem in the user-defined scenario where users can define any number of spoken terms and provide any number of enrollment audio examples for each spoken term. From the perspective of few-shot learning, this is an N-way, K-shot problem with varying N and K. In our work, we relax the values of N and K of each meta-task during training instead of assigning fixed values to them, which differs from what most meta-learning algorithms do. We adopt a metric-based meta-learning algorithm named Prototypical Networks (ProtoNet) as it avoids exhaustive fine-tuning when N varies. Furthermore, we use the Max-Mahalanobis Center (MMC) loss as an effective regularizer to address the problem of ProtoNet under the condition of varying K. Experiments on the Google Speech Commands dataset demonstrate that our proposed method outperforms the conventional N-way, K-shot setting in most testing tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dongyub Lee|AUTHOR Dongyub Lee]]^^1^^
, [[Byeongil Ko|AUTHOR Byeongil Ko]]^^1^^
, [[Myeong Cheol Shin|AUTHOR Myeong Cheol Shin]]^^1^^
, [[Taesun Whang|AUTHOR Taesun Whang]]^^2^^
, [[Daniel Lee|AUTHOR Daniel Lee]]^^1^^
, [[Eunhwa Kim|AUTHOR Eunhwa Kim]]^^1^^
, [[Eunggyun Kim|AUTHOR Eunggyun Kim]]^^1^^
, [[Jaechoon Jo|AUTHOR Jaechoon Jo]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Kakao, Korea; ^^2^^Wisenut, Korea; ^^3^^Hanshin University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4229–4233
</span></p></div>
<div class="cpabstractcardabstract"><p>Detecting disfluencies in spontaneous speech is an important preprocessing step in natural language processing and speech recognition applications. Existing works for disfluency detection have focused on designing a single objective only for disfluency detection, while auxiliary objectives utilizing linguistic information of a word such as named entity or part-of-speech information can be effective. In this paper, we focus on detecting disfluencies on spoken transcripts and propose a method utilizing named entity recognition (NER) and part-of-speech (POS) as auxiliary sequence labeling (SL) tasks for disfluency detection. First, we investigate cases that utilizing linguistic information of a word can prevent mispredicting important words and can be helpful for the correct detection of disfluencies. Second, we show that training a disfluency detection model with auxiliary SL tasks can improve its F-score in disfluency detection. Then, we analyze which auxiliary SL tasks are influential depending on baseline models. Experimental results on the widely used English Switchboard dataset show that our method outperforms the previous state-of-the-art in disfluency detection.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hang Zhou|AUTHOR Hang Zhou]], [[Wenchao Hu|AUTHOR Wenchao Hu]], [[Yu Ting Yeung|AUTHOR Yu Ting Yeung]], [[Xiao Chen|AUTHOR Xiao Chen]]
</p><p class="cpabstractcardaffiliationlist">Huawei Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4234–4238
</span></p></div>
<div class="cpabstractcardabstract"><p>Wake-up keyword of a keyword spotting (KWS) system represents brand name of a smart device. Performance of KWS is also crucial for modern speech based human-device interaction. An on-device KWS with both high accuracy and low power consumption is desired. We propose a KWS with add-based convolution layers, namely Add TC-ResNet. Add-based convolution paves a new way to reduce power consumption of KWS system, as addition is more energy efficient than multiplication at hardware level. On Google Speech Commands dataset V2, Add TC-ResNet achieves an accuracy of 97.1%, with 99% of multiplication operations are replaced by addition operations. The result is competitive to a state-of-the-art fully multiplication-based TC-ResNet KWS. We also investigate knowledge distillation and a mixed addition-multiplication design for the proposed KWS, which leads to further performance improvement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xin Wang|AUTHOR Xin Wang]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]
</p><p class="cpabstractcardaffiliationlist">NII, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4259–4263
</span></p></div>
<div class="cpabstractcardabstract"><p>A great deal of recent research effort on speech spoofing countermeasures has been invested into back-end neural networks and training criteria. We contribute to this effort with a comparative perspective in this study. Our comparison of countermeasure models on the ASVspoof 2019 logical access scenario takes into account common strategies to deal with input trials of varied length, recently proposed margin-based training criteria, and widely used front ends. We also measured intra-model differences through multiple training-evaluation rounds with random initialization. Our statistical analysis demonstrates that the performance of the same model may be statistically significantly different when just changing the random initial seed. We thus recommend similar statistical analysis or reporting results of multiple runs for further research on the database. Despite the intra-model differences, we observed a few promising techniques, including average pooling, to efficiently process varied-length inputs and a new hyper-parameter-free loss function. The two techniques led to the best single model in our experiment, which achieved an equal error rate of 1.92% and was significantly different in statistical sense from most of the other experimental models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jesús Villalba|AUTHOR Jesús Villalba]], [[Sonal Joshi|AUTHOR Sonal Joshi]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Najim Dehak|AUTHOR Najim Dehak]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4304–4308
</span></p></div>
<div class="cpabstractcardabstract"><p>Adversarial attacks have become a major threat for machine learning applications. There is a growing interest in studying these attacks in the audio domain, e.g, speech and speaker recognition; and find defenses against them. In this work, we focus on using representation learning to classify/detect attacks w.r.t. the attack algorithm, threat model or signal-to-adversarial-noise ratio. We found that common attacks in the literature can be classified with accuracies as high as 90%. Also, representations trained to classify attacks against speaker identification can be used also to classify attacks against speaker verification and speech recognition. We also tested an attack verification task, where we need to decide whether two speech utterances contain the same attack. We observed that our models did not generalize well to attack algorithms not included in the attack representation model training. Motivated by this, we evaluated an unknown attack detection task. We were able to detect unknown attacks with equal error rates of about 19%, which is promising.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[You Zhang|AUTHOR You Zhang]], [[Ge Zhu|AUTHOR Ge Zhu]], [[Fei Jiang|AUTHOR Fei Jiang]], [[Zhiyao Duan|AUTHOR Zhiyao Duan]]
</p><p class="cpabstractcardaffiliationlist">University of Rochester, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4309–4313
</span></p></div>
<div class="cpabstractcardabstract"><p>Spoofing countermeasure (CM) systems are critical in speaker verification; they aim to discern spoofing attacks from bona fide speech trials. In practice, however, acoustic condition variability in speech utterances may significantly degrade the performance of CM systems. In this paper, we conduct a cross-dataset study on several state-of-the-art CM systems and observe significant performance degradation compared with their single-dataset performance. Observing differences of average magnitude spectra of bona fide utterances across the datasets, we hypothesize that channel mismatch among these datasets is one important reason. We then verify it by demonstrating a similar degradation of CM systems trained on original but evaluated on channel-shifted data. Finally, we propose several channel robust strategies (data augmentation, multi-task learning, adversarial learning) for CM systems, and observe a significant performance improvement on cross-dataset experiments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xu Li|AUTHOR Xu Li]]^^1^^
, [[Xixin Wu|AUTHOR Xixin Wu]]^^2^^
, [[Hui Lu|AUTHOR Hui Lu]]^^1^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4314–4318
</span></p></div>
<div class="cpabstractcardabstract"><p>Existing approaches for anti-spoofing in automatic speaker verification (ASV) still lack generalizability to unseen attacks. The Res2Net approach designs a residual-like connection between feature groups within one block, which increases the possible receptive fields and improves the system’s detection generalizability. However, such a residual-like connection is performed by a direct addition between feature groups without channel-wise priority. We argue that the information across channels may not contribute to spoofing cues equally, and the less relevant channels are expected to be suppressed before adding onto the next feature group, so that the system can generalize better to unseen attacks. This argument motivates the current work that presents a novel, channel-wise gated Res2Net (CG-Res2Net), which modifies Res2Net to enable a channel-wise gating mechanism in the connection between feature groups. This gating mechanism dynamically selects channel-wise features based on the input, to suppress the less relevant channels and enhance the detection generalizability. Three gating mechanisms with different structures are proposed and integrated into Res2Net. Experimental results conducted on ASVspoof 2019 logical access (LA) demonstrate that the proposed CG-Res2Net significantly outperforms Res2Net on both the overall LA evaluation set and individual difficult unseen attacks, which also outperforms other state-of-the-art single systems, depicting the effectiveness of our method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wanying Ge|AUTHOR Wanying Ge]], [[Michele Panariello|AUTHOR Michele Panariello]], [[Jose Patino|AUTHOR Jose Patino]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]], [[Nicholas Evans|AUTHOR Nicholas Evans]]
</p><p class="cpabstractcardaffiliationlist">EURECOM, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4319–4323
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper reports the first successful application of a differentiable architecture search (DARTS) approach to the deepfake and spoofing detection problems. An example of neural architecture search, DARTS operates upon a continuous, differentiable search space which enables both the architecture and parameters to be optimised via gradient descent. Solutions based on partially-connected DARTS use random channel masking in the search space to reduce GPU time and automatically learn and optimise complex neural architectures composed of convolutional operations and residual blocks. Despite being learned quickly with little human effort, the resulting networks are competitive with the best performing systems reported in the literature. Some are also far less complex, containing 85% fewer parameters than a Res2Net competitor.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lin Zhang|AUTHOR Lin Zhang]]^^1^^
, [[Xin Wang|AUTHOR Xin Wang]]^^1^^
, [[Erica Cooper|AUTHOR Erica Cooper]]^^1^^
, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^1^^
, [[Jose Patino|AUTHOR Jose Patino]]^^2^^
, [[Nicholas Evans|AUTHOR Nicholas Evans]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NII, Japan; ^^2^^EURECOM, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4264–4268
</span></p></div>
<div class="cpabstractcardabstract"><p>All existing databases of spoofed speech contain attack data that is spoofed in its entirety. In practice, it is entirely plausible that successful attacks can be mounted with utterances that are only partially spoofed. By definition, partially-spoofed utterances contain a mix of both spoofed and bona fide segments, which will likely degrade the performance of countermeasures trained with entirely spoofed utterances. This hypothesis raises the obvious question: //‘Can we detect partially-spoofed audio?’// This paper introduces a new database of partially-spoofed data, named PartialSpoof, to help address this question. This new database enables us to investigate and compare the performance of countermeasures on both utterance- and segmental- level labels. Experimental results using the utterance-level labels reveal that the reliability of countermeasures trained to detect fully-spoofed data is found to degrade substantially when tested with partially-spoofed data, whereas training on partially-spoofed data performs reliably in the case of both fully- and partially-spoofed utterances. Additional experiments using segmental-level labels show that spotting injected spoofed segments included in an utterance is a much more challenging task even if the latest countermeasure models are used.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yang Xie|AUTHOR Yang Xie]], [[Zhenchuan Zhang|AUTHOR Zhenchuan Zhang]], [[Yingchun Yang|AUTHOR Yingchun Yang]]
</p><p class="cpabstractcardaffiliationlist">Zhejiang University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4269–4273
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speaker verification is vulnerable to spoofing attacks with synthesized or converted speech. Although high-performance anti-spoofing countermeasures can achieve high accuracy when the training and testing spoofing attack examples are similarly distributed, their performance degrades significantly when confronted with out-of-distribution spoofing speech, which is created by increasingly advanced unseen speech synthesis and voice conversion methods. Since it is unrealistic to collect enough labeled data from each new spoofing attack method, we argue that addressing the problem of out-of-distribution generalization for spoofing speech detection is essential. In this work, we propose a two-phase representation learning system based on a Siamese network for spoofing speech detection tasks. During the representation learning phase, an embedding Siamese neural network is trained with the wav2vec features to distinguish whether the speech samples in a pair belong to the same category. The proposed system decreases the equal error rate from the state-of-the-art result of 4.07% to 1.15% on the ASVspoof 2019 evaluation set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xingliang Cheng|AUTHOR Xingliang Cheng]], [[Mingxing Xu|AUTHOR Mingxing Xu]], [[Thomas Fang Zheng|AUTHOR Thomas Fang Zheng]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4274–4278
</span></p></div>
<div class="cpabstractcardabstract"><p>The vulnerability of automatic speaker verification (ASV) systems against replay attacks becomes a severe problem. Although various methods have been proposed for replay detection, the generalization capability is still limited. For instance, a detection model trained on one database may fully fail when tested on another database. In this paper, we adopt the one-class learning technology to address the cross-database problem. Different from conventional two-class models that discriminate genuine speeches from replay attacks, the one-class model focuses on the within-class variance of genuine speeches, which is naturally robust to unseen attacks. In this study, we choose the Gaussian mixture model (GMM) as the one-class model and design two utterance-level features which reduce the uncertainties of genuine class while still be distinguishable from non-genuine class. Experiments conducted on three public replay datasets show that, compared to the state-of-the-art methods, the proposed method demonstrates promising generalization capability under cross-database scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuxiang Zhang|AUTHOR Yuxiang Zhang]], [[Wenchao Wang|AUTHOR Wenchao Wang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4279–4283
</span></p></div>
<div class="cpabstractcardabstract"><p>The current neural network based anti-spoofing systems have poor robustness. Their performance degrades further after voice activity detection (VAD) performed, making it difficult to be applied in practice. This work investigated the effect of silence at the beginning and end of speech, finding that silent differences are part of the basis for countermeasures’ judgements. The reason for the performance deterioration caused by VAD is also explored. The experimental results demonstrate that the neural network loses the information about silent segments after the VAD operation removes them. This can lead to more serious overfitting. In order to solve the overfitting problem, the work in this paper also analyzes the reasons for system overfitting from different frequency sub-bands. It is found that the high-frequency part of the feature is the main cause of system overfitting, while the low-frequency part is more robust but less accurate against known attacks. Therefore, we propose the dual-band fusion anti-spoofing algorithm, which requires only two sub-systems but outperforms all but one primary system submitted to the logical access condition of the ASVspoof 2019 challenge. Our system has an EER of 3.50% even after VAD operations performed, thus can be put into practical application.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhiyuan Peng|AUTHOR Zhiyuan Peng]], [[Xu Li|AUTHOR Xu Li]], [[Tan Lee|AUTHOR Tan Lee]]
</p><p class="cpabstractcardaffiliationlist">CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4284–4288
</span></p></div>
<div class="cpabstractcardabstract"><p>Vulnerability of speaker verification (SV) systems under adversarial attack receives wide attention recently. Simple and effective countermeasures against such attack are yet to be developed. This paper formulates the task of adversarial defense as a problem of attack detection. The detection is made possible with the verification scores from a pair of purposely selected SV models. The twin-model design comprises a fragile model paired up with a relatively robust one. The two models show prominent score inconsistency under adversarial attack. To detect the score inconsistency, a simple one-class classifier is adopted. The classifier is trained with normal speech samples, which not only bypasses the need of crafting adversarial samples but also prevents itself from over-fitting to the crafted samples, and hence makes the detection robust to unseen attacks. Compared to single-model systems, the proposed system shows consistent and significant performance improvement against different attack strategies. The false acceptance rates (FARs) are reduced from over 63.54% to 2.26% under the strongest attack. Our approach has practical benefits, e.g., no need to modify a well-deployed SV model even it is well-known and can be fully accessed by the adversary. Moreover, it can be combined with existing single-model countermeasures for even stronger defenses.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hefei Ling|AUTHOR Hefei Ling]], [[Leichao Huang|AUTHOR Leichao Huang]], [[Junrui Huang|AUTHOR Junrui Huang]], [[Baiyan Zhang|AUTHOR Baiyan Zhang]], [[Ping Li|AUTHOR Ping Li]]
</p><p class="cpabstractcardaffiliationlist">HUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4289–4293
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, automatic speaker verification (ASV) algorithms have undergone significant progress. They have been widely deployed in different applications, but the ASV systems are vulnerable to spoofing attacks, such as impersonation, replay, text-to-speech, voice conversion and the recently emerged adversarial attacks. To improve the robustness of the ASV system, researchers have designed anti-spoofing systems to resist spoofing attacks. While previously proposed systems have shown to be effective for spoof attacks detection, they are all ensemble methods based on different speech representations and architectures at the cost of increased model complexity, with similar performance not being achieved with single systems. This paper proposes an attention-based single convolutional neural network to learn discriminative feature embedding for spoof detection, achieving performance comparable to ensemble methods. The key idea is to decrease the information redundancy among channels and focus on the most informative sub-bands of speech representations. The experiments show that our proposed single system achieves an equal error rate of 1.87% on the evaluation set of ASVspoof 2019 Challenge, outperforming all single systems and comparable to the second-ranked system (EER 1.86%) among all known systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haibin Wu|AUTHOR Haibin Wu]]^^1^^
, [[Yang Zhang|AUTHOR Yang Zhang]]^^1^^
, [[Zhiyong Wu|AUTHOR Zhiyong Wu]]^^1^^
, [[Dong Wang|AUTHOR Dong Wang]]^^1^^
, [[Hung-yi Lee|AUTHOR Hung-yi Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4294–4298
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speaker verification (ASV) is a well developed technology for biometric identification, and has been ubiquitous implemented in security-critic applications, such as banking and access control. However, previous works have shown that ASV is under the radar of adversarial attacks, which are very similar to their original counterparts from human’s perception, yet will manipulate the ASV render wrong prediction. Due to the very late emergence of adversarial attacks for ASV, effective countermeasures against them are limited. Given that the security of ASV is of high priority, in this work, we propose the idea of “voting for the right answer” to prevent risky decisions of ASV in blind spot areas, by employing random sampling and voting. Experimental results show that our proposed method improves the robustness against both the limited-knowledge attackers by pulling the adversarial samples out of the blind spots, and the sufficient-knowledge attackers by introducing randomness and increasing the attackers’ budgets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]^^1^^
, [[Andreas Nautsch|AUTHOR Andreas Nautsch]]^^2^^
, [[Md. Sahidullah|AUTHOR Md. Sahidullah]]^^3^^
, [[Nicholas Evans|AUTHOR Nicholas Evans]]^^2^^
, [[Xin Wang|AUTHOR Xin Wang]]^^4^^
, [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]^^2^^
, [[Héctor Delgado|AUTHOR Héctor Delgado]]^^5^^
, [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]^^4^^
, [[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^6^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Eastern Finland, Finland; ^^2^^EURECOM, France; ^^3^^Inria, France; ^^4^^NII, Japan; ^^5^^Nuance Communications, Spain; ^^6^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 4299–4303
</span></p></div>
<div class="cpabstractcardabstract"><p>Whether it be for results summarization, or the analysis of classifier fusion, some means to compare different classifiers can often provide illuminating insight into their behaviour, (dis)similarity or complementarity. We propose a simple method to derive 2D representation from detection scores produced by an arbitrary set of binary classifiers in response to a common dataset. Based upon rank correlations, our method facilitates a visual comparison of classifiers with arbitrary scores and with close relation to receiver operating characteristic (ROC) and detection error trade-off (DET) analyses. While the approach is fully versatile and can be applied to any detection task, we demonstrate the method using scores produced by automatic speaker verification and voice anti-spoofing systems. The former are produced by a Gaussian mixture model system trained with VoxCeleb data whereas the latter stem from submissions to the ASVspoof 2019 challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]
</p><p class="cpabstractcardaffiliationlist">LSCP (UMR 8554), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, the ease with which we can collect audio (and to a lesser extent visual information) with wearables has improved dramatically. These allow unprecedented access to the speech that children produce, and that which they year. Although many conclusions drawn from short observations seem to generalize to these naturalistic datasets, others appear questionable based on human annotations of data collected with wearables. Making the best of such recordings also requires unique tool development.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vinicius Ribeiro|AUTHOR Vinicius Ribeiro]]^^1^^
, [[Karyna Isaieva|AUTHOR Karyna Isaieva]]^^2^^
, [[Justine Leclere|AUTHOR Justine Leclere]]^^2^^
, [[Pierre-André Vuissoz|AUTHOR Pierre-André Vuissoz]]^^2^^
, [[Yves Laprie|AUTHOR Yves Laprie]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Loria (UMR 7503), France; ^^2^^IADI (Inserm U1254), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3325–3329
<a href="./IS2021/MEDIA/0184" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we address the prediction of speech articulators’ temporal geometric position from the sequence of phonemes to be articulated. We start from a set of real-time MRI sequences uttered by a female French speaker. The contours of five articulators were tracked automatically in each of the frames in the MRI video. Then, we explore the capacity of a bidirectional GRU to correctly predict each articulator’s shape and position given the sequence of phonemes and their duration. We propose a 5-fold cross-validation experiment to evaluate the generalization capacity of the model. In a second experiment, we evaluate our model’s data efficiency by reducing training data. We evaluate the point-to-point Euclidean distance and the Pearson’s correlations along time between the predicted and the target shapes. We also evaluate produced shapes of the critical articulators of specific phonemes. We show that our model can achieve good results with minimal data, producing very realistic vocal tract shapes.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rémi Blandin|AUTHOR Rémi Blandin]]^^1^^
, [[Marc Arnela|AUTHOR Marc Arnela]]^^2^^
, [[Simon Félix|AUTHOR Simon Félix]]^^3^^
, [[Jean-Baptiste Doc|AUTHOR Jean-Baptiste Doc]]^^4^^
, [[Peter Birkholz|AUTHOR Peter Birkholz]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Technische Universität Dresden, Germany; ^^2^^Universitat Ramon Llull, Spain; ^^3^^LAUM (UMR 6613), France; ^^4^^LMSSC (EA 3196), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3330–3334
<a href="./IS2021/MEDIA/0975" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>The acoustic properties of vocal tract are usually characterized by its transfer function from the input acoustic volume flow at the glottis to the radiated acoustic pressure. These transfer functions can be computed with acoustic models. Three-dimensional acoustic simulation are used to take into account accurately the three-dimensional vocal tract shape and to generate valid results even at high frequency. Finite element models, finite difference methods, three-dimensional waveguide meshes, or the multimodal method have been used for this purpose. However, these methods require much more computation time than simple one-dimensional models. Among these methods, the multimodal method can achieve the shortest computation times. However, all the previous implementations had limitations regarding the geometrical shapes and the losses. In this work, we evaluate a new implementation that intends to overcome these limitations. Vowel transfer functions obtained with this new implementation are compared with a transmission-line model and a proven, robust and highly accurate method: the finite element method. While the finite element method remains the most reliable, the multimodal method generates similar transfer functions in much less time. The transmission line model gives valid results for the four first resonances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Petra Wagner|AUTHOR Petra Wagner]], [[Sina Zarrieß|AUTHOR Sina Zarrieß]], [[Joana Cholin|AUTHOR Joana Cholin]]
</p><p class="cpabstractcardaffiliationlist">Universität Bielefeld, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3335–3339
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech variation is often explained by speakers’ balancing of production constraints (favoring phonetic reduction of high frequency, expected items) and listener orientation (favoring more canonical productions for low frequency, unexpected items). Less well understood are processes involving a structural reorganization of articulatory plans due to re-syllabification, e.g., resulting from processes involving massive reduction, epenthesis or metathesis. In this paper, we want to focus on two kinds of re-syllabifications: (1) within-system innovations, in which non-canonical forms occur, and (2) beyond-system inventions, which do not follow the phonotactic constraints of the language under consideration. We examine these processes in a corpus of spontaneous and read dyadic interactions of German, in which time pressure was controlled as an additional factor. Results show that spontaneity and time pressure will mostly lead to within-system innovations, favoring highly trained, unmarked articulatory routines, while minimizing information loss. However, occasionally speakers leave the beaten paths of highly trained articulatory routines, and invent novel phonotactic sequences which are at odds with the phonotactic grammar of German. Our results are discussed in the light of their implications for contemporary models of speech production.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Salvador Medina|AUTHOR Salvador Medina]]^^1^^
, [[Sarah Taylor|AUTHOR Sarah Taylor]]^^2^^
, [[Mark Tiede|AUTHOR Mark Tiede]]^^3^^
, [[Alexander Hauptmann|AUTHOR Alexander Hauptmann]]^^1^^
, [[Iain Matthews|AUTHOR Iain Matthews]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^University of East Anglia, UK; ^^3^^Haskins Laboratories, USA; ^^4^^Epic Games, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3340–3344
</span></p></div>
<div class="cpabstractcardabstract"><p>Our study examines the information obtained by adding two parasagittal sensors to the standard midsagittal configuration of an Electromagnetic Articulography (EMA) observation of lingual articulation. In this work, we present a large and phonetically balanced corpus obtained from an EMA recording session of a single English native speaker reading 1899 sentences from the Harvard and TIMIT corpora. According to a statistical analysis of the diphones produced during the recording session, the motion captured by the parasagittal sensors has a low correlation to the midsagittal sensors in the mediolateral direction. We perform a geometric analysis of the lateral tongue by the measure of its width and using a proxy of the tongue’s curvature that is computed using the Menger curvature. To provide a better understanding of the tongue sensor motion we present dynamic visualizations of all diphones. Finally, we present a summary of the velocity information computed from the tongue sensor information.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marc-Antoine Georges|AUTHOR Marc-Antoine Georges]], [[Laurent Girin|AUTHOR Laurent Girin]], [[Jean-Luc Schwartz|AUTHOR Jean-Luc Schwartz]], [[Thomas Hueber|AUTHOR Thomas Hueber]]
</p><p class="cpabstractcardaffiliationlist">GIPSA-lab (UMR 5216), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3345–3349
</span></p></div>
<div class="cpabstractcardabstract"><p>It is increasingly considered that human speech perception and production both rely on articulatory representations. In this paper, we investigate whether this type of representation could improve the performances of a deep generative model (here a variational autoencoder) trained to encode and decode acoustic speech features. First we develop an articulatory model able to associate articulatory parameters describing the jaw, tongue, lips and velum configurations with vocal tract shapes and spectral features. Then we incorporate these articulatory parameters into a variational autoencoder applied on spectral features by using a regularization technique that constrains part of the latent space to represent articulatory trajectories. We show that this articulatory constraint improves model training by decreasing time to convergence and reconstruction loss at convergence, and yields better performance in a speech denoising task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heather Weston|AUTHOR Heather Weston]]^^1^^
, [[Laura L. Koenig|AUTHOR Laura L. Koenig]]^^2^^
, [[Susanne Fuchs|AUTHOR Susanne Fuchs]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ZAS, Germany; ^^2^^Adelphi University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3350–3354
</span></p></div>
<div class="cpabstractcardabstract"><p>Engaging in everyday physical activities, like walking, initiates physiological processes that also affect parts of the body used for speech. However, it is currently unclear to what extent such activities affect phonatory processes, and in turn, the voice. The present exploratory study investigates how selected glottal source parameters are affected by light and moderate physical activity. Recordings of sustained vowel /a/ were obtained from 39 female speakers of German at rest, and during low-intensity and moderate-intensity cycling. Ten glottal source parameters thought to reflect different physiological states were investigated using VoiceSauce. Even during light activity, significant increases were found in f0, strength of excitation and H1, and a decrease in harmonics-to-noise ratio at higher frequencies. During moderate-intensity activity, significant effects were stronger and found for most parameters. However, considerable intra- and interspeaker variability was observed. These findings may be relevant for applications in automatic speaker-state recognition. They also underscore the importance of investigating individual-level responses to better understand stress–voice interactions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohammad Hassan Vali|AUTHOR Mohammad Hassan Vali]], [[Tom Bäckström|AUTHOR Tom Bäckström]]
</p><p class="cpabstractcardaffiliationlist">Aalto University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3355–3359
</span></p></div>
<div class="cpabstractcardabstract"><p>Spectral envelope modeling is an instrumental part of speech and audio codecs, which can be used to enable efficient entropy coding of spectral components. Overall optimization of codecs, including envelope models, has however been difficult due to the complicated interactions between different modules of the codec. In this paper, we study an end-to-end optimization methodology to optimize all modules in a codec integrally with respect to each other while capturing all these complex interactions with a global loss function. For the quantization of the spectral envelope parameters with a fixed bitrate, we use multi-stage vector quantization which gives high quality, but yet has a computational complexity which can be realistically applied in embedded devices. The obtained results demonstrate benefits in terms of PESQ and PSNR in comparison to the 3GPP EVS, as well as our recently proposed PyAWNeS codecs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Santhan Kumar Reddy Nareddula|AUTHOR Santhan Kumar Reddy Nareddula]], [[Subrahmanyam Gorthi|AUTHOR Subrahmanyam Gorthi]], [[Rama Krishna Sai S. Gorthi|AUTHOR Rama Krishna Sai S. Gorthi]]
</p><p class="cpabstractcardaffiliationlist">IIT Tirupati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3360–3364
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a deep learning-based densely connected Y-Net as an effective network architecture for the fusion of time and frequency domain loss functions for speech enhancement. The proposed architecture performs speech enhancement in the time domain while fusing information from the frequency domain. Y-network consists of an encoder branch followed by two decoder branches, where the first and second decoder loss functions enforce speech enhancement in time and frequency domains respectively. Each layer of the proposed network is formed with densely connected blocks comprising dilated and causal convolutions for significant feature collection and error backpropagation. The proposed model is trained on a publicly available data set of 28 speakers with 40 different noise conditions. The evaluations are performed on an independent, unseen test set of 2 speakers and 20 different noise conditions. The results from the proposed method are compared with five state-of-the-art methods using various metrics. The proposed method has resulted in an overall perceptual evaluation of speech quality of 3.4. It has outperformed the existing methods by a significant margin in terms of all the evaluation metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ĺuboš Marcinek|AUTHOR Ĺuboš Marcinek]], [[Michael Stone|AUTHOR Michael Stone]], [[Rebecca Millman|AUTHOR Rebecca Millman]], [[Patrick Gaydecki|AUTHOR Patrick Gaydecki]]
</p><p class="cpabstractcardaffiliationlist">University of Manchester, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3365–3369
</span></p></div>
<div class="cpabstractcardabstract"><p>The application of speech enhancement algorithms for hearing aids may not always be beneficial to increasing speech intelligibility. Therefore, a prior environment classification could be important. However, previous speech intelligibility models do not provide any additional information regarding the reason for a decrease in speech intelligibility. We propose a unique non-intrusive multi-task transfer learning-based speech intelligibility prediction model with scenery classification (N-MTTL SI model). The solution combines a Mel-spectrogram analysis of the degraded speech signal with transfer learning and multi-task learning to provide simultaneous speech intelligibility prediction (task 1) and scenery classification of ten real-world noise conditions (task 2). The model utilises a pre-trained ResNet architecture as an encoder for feature extraction. The prediction accuracy of the N-MTTL SI model for both tasks is high. Specifically, RMSE of speech intelligibility predictions for seen and unseen conditions is 3.76% and 4.06%. The classification accuracy is 98%. In addition, the proposed solution demonstrates the potential of using pre-trained deep learning models in the domain of speech intelligibility prediction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ruchao Fan|AUTHOR Ruchao Fan]]^^1^^
, [[Wei Chu|AUTHOR Wei Chu]]^^2^^
, [[Peng Chang|AUTHOR Peng Chang]]^^2^^
, [[Jing Xiao|AUTHOR Jing Xiao]]^^2^^
, [[Abeer Alwan|AUTHOR Abeer Alwan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of California at Los Angeles, USA; ^^2^^PAII, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3715–3719
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-autoregressive mechanisms can significantly decrease inference time for speech transformers, especially when the single step variant is applied. Previous work on CTC alignment-based single step non-autoregressive transformer (CASS-NAT) has shown a large real time factor (RTF) improvement over autoregressive transformers (AT). In this work, we propose several methods to improve the accuracy of the end-to-end CASS-NAT, followed by performance analyses. First, convolution augmented self-attention blocks are applied to both the encoder and decoder modules. Second, we propose to expand the trigger mask (acoustic boundary) for each token to increase the robustness of CTC alignments. In addition, iterated loss functions are used to enhance the gradient update of low-layer parameters. Without using an external language model, the WERs of the improved CASS-NAT, when using the three methods, are 3.1%/7.2% on Librispeech test clean/other sets and the CER is 5.4% on the Aishell1 test set, achieving a 7%~21% relative WER/CER improvement. For the analyses, we plot attention weight distributions in the decoders to visualize the relationships between token-level acoustic embeddings. When the acoustic embeddings are visualized, we find that they have a similar behavior to word embeddings, which explains why the improved CASS-NAT performs similarly to AT.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Stanislav Beliaev|AUTHOR Stanislav Beliaev]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]]
</p><p class="cpabstractcardaffiliationlist">NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3760–3764
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose TalkNet, a non-autoregressive convolutional neural model for speech synthesis with explicit pitch and duration prediction. The model consists of three feed-forward convolutional networks. The first network predicts grapheme durations. An input text is then expanded by repeating each symbol according to the predicted duration. The second network predicts pitch value for every mel frame. The third network generates a mel-spectrogram from the expanded text conditioned on predicted pitch. All networks are based on 1D depth-wise separable convolutional architecture. The explicit duration prediction eliminates word skipping and repeating. The quality of the generated speech nearly matches the best auto-regressive models — TalkNet trained on the LJSpeech dataset got a MOS of 4.08. The model has only 13.2M parameters, almost 2× less than the present state-of-the-art text-to-speech models. The non-autoregressive architecture allows for fast training and inference. The small model size and fast inference make TalkNet an attractive candidate for embedded speech synthesis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nanxin Chen|AUTHOR Nanxin Chen]]^^1^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^2^^
, [[Heiga Zen|AUTHOR Heiga Zen]]^^3^^
, [[Ron J. Weiss|AUTHOR Ron J. Weiss]]^^2^^
, [[Mohammad Norouzi|AUTHOR Mohammad Norouzi]]^^4^^
, [[Najim Dehak|AUTHOR Najim Dehak]]^^1^^
, [[William Chan|AUTHOR William Chan]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Johns Hopkins University, USA; ^^2^^Google, USA; ^^3^^Google, Japan; ^^4^^Google, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3765–3769
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces //WaveGrad 2//, a non-autoregressive generative model for text-to-speech synthesis. WaveGrad 2 is trained to estimate the gradient of the log conditional density of the waveform given a phoneme sequence. The model takes an input phoneme sequence, and through an iterative refinement process, generates an audio waveform. This contrasts to the original WaveGrad vocoder which conditions on mel-spectrogram features, generated by a separate model. The iterative refinement process starts from Gaussian noise, and through a series of refinement steps (e.g., 50 steps), progressively recovers the audio sequence. WaveGrad 2 offers a natural way to trade-off between inference speed and sample quality, through adjusting the number of refinement steps. Experiments show that the model can generate high fidelity audio, approaching the performance of a state-of-the-art neural TTS system. We also report various ablation studies over different model configurations. Audio samples are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nanxin Chen|AUTHOR Nanxin Chen]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Najim Dehak|AUTHOR Najim Dehak]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3770–3774
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep autoregressive models start to become comparable or superior to the conventional systems for automatic speech recognition. However, for the inference computation, they still suffer from inference speed issue due to their token-by-token decoding characteristic. Non-autoregressive models greatly improve decoding speed by supporting decoding within a constant number of iterations. For example, Align-Refine was proposed to improve the performance of the non-autoregressive system by refining the alignment iteratively. In this work, we propose a new perspective to connect Align-Refine and denoising autoencoder. We introduce a novel noisy distribution to sample the alignment directly instead of obtaining it from the decoder output. The experimental results reveal that the proposed Align-Denoise speeds up both training and inference with performance improvement up to 5% relatively using single-pass decoding.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hui Lu|AUTHOR Hui Lu]]^^1^^
, [[Zhiyong Wu|AUTHOR Zhiyong Wu]]^^1^^
, [[Xixin Wu|AUTHOR Xixin Wu]]^^2^^
, [[Xu Li|AUTHOR Xu Li]]^^1^^
, [[Shiyin Kang|AUTHOR Shiyin Kang]]^^3^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^University of Cambridge, UK; ^^3^^Huya, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3775–3779
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes a variational auto-encoder based non-autoregressive text-to-speech (VAENAR-TTS) model. The autoregressive TTS (AR-TTS) models based on the sequence-to-sequence architecture can generate high-quality speech, but their sequential decoding process can be time-consuming. Recently, non-autoregressive TTS (NAR-TTS) models have been shown to be more efficient with the parallel decoding process. However, these NAR-TTS models rely on phoneme-level durations to generate a hard alignment between the text and the spectrogram. Obtaining duration labels, either through forced alignment or knowledge distillation, is cumbersome. Furthermore, hard alignment based on phoneme expansion can degrade the naturalness of the synthesized speech. In contrast, the proposed model of VAENAR-TTS is an end-to-end approach that does not require phoneme-level durations. The VAENAR-TTS model does not contain recurrent structures and is completely non-autoregressive in both the training and inference phases. Based on the VAE architecture, the alignment information is encoded in the latent variable, and attention-based soft alignment between the text and the latent variable is used in the decoder to reconstruct the spectrogram. Experiments show that VAENAR-TTS achieves state-of-the-art synthesis quality, while the synthesis speed is comparable with other NAR-TTS models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pengcheng Guo|AUTHOR Pengcheng Guo]]^^1^^
, [[Xuankai Chang|AUTHOR Xuankai Chang]]^^2^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3720–3724
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-autoregressive (NAR) models have achieved a large inference computation reduction and comparable results with autoregressive (AR) models on various sequence to sequence tasks. However, there has been limited research aiming to explore the NAR approaches on sequence to multi-sequence problems, like multi-speaker automatic speech recognition (ASR). In this study, we extend our proposed conditional chain model to NAR multi-speaker ASR. Specifically, the output of each speaker is inferred one-by-one using both the input mixture speech and previously-estimated conditional speaker features. In each step, a NAR connectionist temporal classification (CTC) encoder is used to perform parallel computation. With this design, the total inference steps will be restricted to the number of mixed speakers. Besides, we also adopt the Conformer and incorporate an intermediate CTC loss to improve the performance. Experiments on WSJ0-Mix and LibriMix corpora show that our model outperforms other NAR models with only a slight increase of latency, achieving WERs of 22.3% and 24.9%, respectively. Moreover, by including the data of variable numbers of speakers, our model can even better than the PIT-Conformer AR model with only 1/7 latency, obtaining WERs of 19.9% and 34.3% on WSJ0-2mix and WSJ0-3mix sets. All of our codes are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Edwin G. Ng|AUTHOR Edwin G. Ng]]^^1^^
, [[Chung-Cheng Chiu|AUTHOR Chung-Cheng Chiu]]^^1^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^1^^
, [[William Chan|AUTHOR William Chan]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^Google, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3725–3729
</span></p></div>
<div class="cpabstractcardabstract"><p>We combine recent advancements in end-to-end speech recognition to non-autoregressive automatic speech recognition. We push the limits of non-autoregressive state-of-the-art results for multiple datasets: LibriSpeech, Fisher+Switchboard and Wall Street Journal. Key to our recipe, we leverage CTC on giant Conformer neural network architectures with SpecAugment and wav2vec2 pre-training. We achieve 1.8%/3.6% WER on LibriSpeech test/test-other sets, 5.1%/9.8% WER on Switchboard, and 3.4% on the Wall Street Journal, all without a language model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexander H. Liu|AUTHOR Alexander H. Liu]], [[Yu-An Chung|AUTHOR Yu-An Chung]], [[James Glass|AUTHOR James Glass]]
</p><p class="cpabstractcardaffiliationlist">MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3730–3734
</span></p></div>
<div class="cpabstractcardabstract"><p>Self-supervised speech representations have been shown to be effective in a variety of speech applications. However, existing representation learning methods generally rely on the autoregressive model and/or observed global dependencies while generating the representation. In this work, we propose Non-Autoregressive Predictive Coding (NPC), a self-supervised method, to learn a speech representation in a non-autoregressive manner by relying only on local dependencies of speech. NPC has a conceptually simple objective and can be implemented easily with the introduced Masked Convolution Blocks. NPC offers a significant speedup for inference since it is parallelizable in time and has a fixed inference time for each time step regardless of the input sequence length. We discuss and verify the effectiveness of NPC by theoretically and empirically comparing it with other methods. We show that the NPC representation is comparable to other methods in our experiments while being more efficient.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jumon Nozaki|AUTHOR Jumon Nozaki]], [[Tatsuya Komatsu|AUTHOR Tatsuya Komatsu]]
</p><p class="cpabstractcardaffiliationlist">LINE, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3735–3739
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a method to relax the conditional independence assumption of connectionist temporal classification (CTC)-based automatic speech recognition (ASR) models. We train a CTC-based ASR model with auxiliary CTC losses in intermediate layers in addition to the original CTC loss in the last layer. During both training and inference, each generated prediction in the intermediate layers is summed to the input of the next layer to condition the prediction of the last layer on those intermediate predictions. Our method is easy to implement and retains the merits of CTC-based ASR: a simple model architecture and fast decoding speed. We conduct experiments on three different ASR corpora. Our proposed method improves a standard CTC model significantly (e.g., more than 20% relative word error rate reduction on the WSJ corpus) with a little computational overhead. Moreover, for the TEDLIUM2 corpus and the AISHELL-1 corpus, it achieves a comparable performance to a strong autoregressive model with beam search, but the decoding speed is at least 30 times faster.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuya Fujita|AUTHOR Yuya Fujita]]^^1^^
, [[Tianzi Wang|AUTHOR Tianzi Wang]]^^2^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^3^^
, [[Motoi Omachi|AUTHOR Motoi Omachi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Yahoo, Japan; ^^2^^Johns Hopkins University, USA; ^^3^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3740–3744
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural end-to-end (E2E) models have become a promising technique to realize practical automatic speech recognition (ASR) systems. When realizing such a system, one important issue is the segmentation of audio to deal with streaming input or long recording. After audio segmentation, the ASR model with a small real-time factor (RTF) is preferable because the latency of the system can be faster. Recently, E2E ASR based on non-autoregressive models becomes a promising approach since it can decode an N-length token sequence with less than N iterations. We propose a system to concatenate audio segmentation and non-autoregressive ASR to realize high accuracy and low RTF ASR. As a non-autoregressive ASR, the insertion-based model is used. In addition, instead of concatenating separated models for segmentation and ASR, we introduce a new architecture that realizes audio segmentation and non-autoregressive ASR by a single neural network. Experimental results on Japanese and English dataset show that the method achieved a reasonable trade-off between accuracy and RTF compared with baseline autoregressive Transformer and connectionist temporal classification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jaesong Lee|AUTHOR Jaesong Lee]]^^1^^
, [[Jingu Kang|AUTHOR Jingu Kang]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Naver, Korea; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3745–3749
</span></p></div>
<div class="cpabstractcardabstract"><p>Deploying an end-to-end automatic speech recognition (ASR) model on mobile/embedded devices is a challenging task, since the device computational power and energy consumption requirements are dynamically changed in practice. To overcome the issue, we present a training and pruning method for ASR based on the connectionist temporal classification (CTC) which allows reduction of model depth at run-time without any extra fine-tuning. To achieve the goal, we adopt two regularization methods, intermediate CTC and stochastic depth, to train a model whose performance does not degrade much after pruning. We present an in-depth analysis of layer behaviors using singular vector canonical correlation analysis (SVCCA), and efficient strategies for finding layers which are safe to prune. Using the proposed method, we show that a Transformer-CTC model can be pruned in various depth on demand, improving real-time factor from 0.005 to 0.002 on GPU, while each pruned sub-model maintains the accuracy of individually trained model of the same depth.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Song Li|AUTHOR Song Li]], [[Beibei Ouyang|AUTHOR Beibei Ouyang]], [[Fuchuan Tong|AUTHOR Fuchuan Tong]], [[Dexin Liao|AUTHOR Dexin Liao]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]
</p><p class="cpabstractcardaffiliationlist">Xiamen University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3750–3754
</span></p></div>
<div class="cpabstractcardabstract"><p>The rising interest in single-channel multi-speaker speech separation has triggered the development of end-to-end multi-speaker automatic speech recognition (ASR). However, until now, most systems have adopted autoregressive mechanisms for decoding, resulting in slow decoding speed, which is not conducive to the application of multi-speaker speech recognition in real-world environments. In this paper, we first comprehensively investigate and compare the mainstream end-to-end multi-speaker speech recognition systems. Secondly, we improve the recently proposed non-autoregressive end-to-end speech recognition model Mask-CTC, and introduce it to multi-speaker speech recognition to achieve real-time decoding. Our experiments on the LibriMix data set show that under the premise of the same amount of parameters, the non-autoregressive model achieves performance close to that of the autoregressive model while having a faster decoding speed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tianzi Wang|AUTHOR Tianzi Wang]]^^1^^
, [[Yuya Fujita|AUTHOR Yuya Fujita]]^^2^^
, [[Xuankai Chang|AUTHOR Xuankai Chang]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Johns Hopkins University, USA; ^^2^^Yahoo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3755–3759
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-autoregressive (NAR) modeling has gained more and more attention in speech processing. With recent state-of-the-art attention-based automatic speech recognition (ASR) structure, NAR can realize promising real-time factor (RTF) improvement with only small degradation of accuracy compared to the autoregressive (AR) models. However, the recognition inference needs to wait for the completion of a full speech utterance, which limits their applications on low latency scenarios. To address this issue, we propose a novel end-to-end streaming NAR speech recognition system by combining blockwise-attention and connectionist temporal classification with mask-predict (Mask-CTC) NAR. During inference, the input audio is separated into small blocks and then processed in a blockwise streaming way. To address the insertion and deletion error at the edge of the output of each block, we apply an overlapping decoding strategy with a dynamic mapping trick that can produce more coherent sentences. Experimental results show that the proposed method improves online ASR recognition in low latency conditions compared to vanilla Mask-CTC. Moreover, it can achieve a much faster inference speed compared to the AR attention-based models. All of our codes will be publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saturnino Luz|AUTHOR Saturnino Luz]]^^1^^
, [[Fasih Haider|AUTHOR Fasih Haider]]^^1^^
, [[Sofia de la Fuente|AUTHOR Sofia de la Fuente]]^^1^^
, [[Davida Fromm|AUTHOR Davida Fromm]]^^2^^
, [[Brian MacWhinney|AUTHOR Brian MacWhinney]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Edinburgh, UK; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3780–3784
</span></p></div>
<div class="cpabstractcardabstract"><p>Building on the success of the ADReSS Challenge at Interspeech 2020, which attracted the participation of 34 teams from across the world, the ADReSSo Challenge targets three difficult automatic prediction problems of societal and medical relevance, namely: detection of Alzheimer’s Dementia, inference of cognitive testing scores, and prediction of cognitive decline. This paper presents these prediction tasks in detail, describes the datasets used, and reports the results of the baseline classification and regression models we developed for each task. A combination of acoustic and linguistic features extracted directly from audio recordings, without human intervention, yielded a baseline accuracy of 78.87% for the AD classification task, a root mean squared error (RMSE) of 5.28 for prediction of cognitive scores , and 68.75% accuracy (F₁ = 66.67) for the cognitive decline prediction task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Raghavendra Pappagari|AUTHOR Raghavendra Pappagari]], [[Jaejin Cho|AUTHOR Jaejin Cho]], [[Sonal Joshi|AUTHOR Sonal Joshi]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Najim Dehak|AUTHOR Najim Dehak]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3825–3829
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study, we analyze the use of speech and speaker recognition technologies and natural language processing to detect Alzheimer disease (AD) and estimate mini-mental status evaluation (MMSE) scores. We used speech recordings from Interspeech 2021 ADReSSo challenge dataset. Our work focuses on adapting state-of-the-art speaker recognition and language models individually and later collectively to examine their complementary behavior for the tasks. We used speech embedding techniques such as x-vectors and prosody features to characterize the speech signals. We also employed automatic speech recognition (ASR) with interpolated language models to obtain transcriptions used to fine-tune the BERT models that classify and assess the speakers. Our results indicate that the fusion of scores obtained from the multiple acoustic and linguistic models provides the best detection results, suggesting that they contain complementary information. A separate analysis of the models indicates that linguistic models outperform acoustic models in detection and prediction tasks. However, acoustic models can provide better results than linguistic models under certain circumstances due to the errors in ASR transcriptions, which indicates that the performance of linguistic models relies on the performance of ASRs. Our best models provide 84.51% accuracy in automatic detection of AD and 3.85 RMSE in MMSE prediction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jun Chen|AUTHOR Jun Chen]]^^1^^
, [[Jieping Ye|AUTHOR Jieping Ye]]^^1^^
, [[Fengyi Tang|AUTHOR Fengyi Tang]]^^2^^
, [[Jiayu Zhou|AUTHOR Jiayu Zhou]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Michigan, USA; ^^2^^Michigan State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3830–3834
</span></p></div>
<div class="cpabstractcardabstract"><p>Alzheimer’s disease (AD) is a neurodegenerative syndrome which affects tens of millions of elders worldwide. Although there is no treatment currently available, early recognition can improve the lives of people with AD and their caretakers and families. To find a cost-effective and easy-to-use method for dementia detection and address the dementia classification task of InterSpeech 2021 ADReSSo (Alzheimer’s Dementia Recognition through Spontaneous Speech only) challenge, we conduct a systematic comparison of approaches to detection of cognitive impairment based on spontaneous speech. We investigated the characteristics of acoustic modality and linguistic modality directly based on the audio recordings of narrative speech, and explored a variety of modality fusion strategies. With an ensemble over top-10 classifiers on the training set, we achieved an accuracy of 81.69% compared to the baseline of 78.87% on the test set. The results suggest that although transcription errors will be introduced through automatic speech recognition, integrating textual information generally improves classification performance. Besides, ensemble methods can boost both the accuracy and the robustness of models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ning Wang|AUTHOR Ning Wang]]^^1^^
, [[Yupeng Cao|AUTHOR Yupeng Cao]]^^1^^
, [[Shuai Hao|AUTHOR Shuai Hao]]^^1^^
, [[Zongru Shao|AUTHOR Zongru Shao]]^^2^^
, [[K.P. Subbalakshmi|AUTHOR K.P. Subbalakshmi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Stevens Institute of Technology, USA; ^^2^^CASUS, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3835–3839
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we propose a modular multi-modal architecture to automatically detect Alzheimer’s disease using the dataset provided in the ADReSSo challenge. Both acoustic and text-based features are used in this architecture. Since the dataset provides only audio samples of controls and patients, we use Google cloud-based speech-to-text API to automatically transcribe the audio files to extract text-based features. Several kinds of audio features are extracted using standard packages. The proposed approach consists of 4 networks: C-attention-acoustic network (for acoustic features only), C-Attention-FT network (for linguistic features only), C-Attention-Embedding network (for language embeddings and acoustic embeddings), and a unified network (uses all of those features). The architecture combines attention networks and a convolutional neural network (C-Attention network) in order to process these features. Experimental results show that the C-Attention-Unified network with Linguistic features and X-Vector embeddings achieves the best accuracy of 80.28% and F1 score of 0.825 on the test dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[P.A. Pérez-Toro|AUTHOR P.A. Pérez-Toro]]^^1^^
, [[S.P. Bayerl|AUTHOR S.P. Bayerl]]^^2^^
, [[T. Arias-Vergara|AUTHOR T. Arias-Vergara]]^^1^^
, [[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]]^^1^^
, [[P. Klumpp|AUTHOR P. Klumpp]]^^1^^
, [[M. Schuster|AUTHOR M. Schuster]]^^3^^
, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^1^^
, [[J.R. Orozco-Arroyave|AUTHOR J.R. Orozco-Arroyave]]^^1^^
, [[K. Riedhammer|AUTHOR K. Riedhammer]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FAU Erlangen-Nürnberg, Germany; ^^2^^TH Nürnberg, Germany; ^^3^^LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3785–3789
</span></p></div>
<div class="cpabstractcardabstract"><p>Alzheimer’s Disease (AD) results from the progressive loss of neurons in the hippocampus, which affects the capability to produce coherent language. It affects lexical, grammatical, and semantic processes as well as speech fluency. This paper considers the analyses of speech and language for the assessment of AD in the context of the Alzheimer’s Dementia Recognition through Spontaneous Speech (ADReSSo) 2021 challenge. We propose to extract acoustic features such as X-vectors, prosody, and emotional embeddings as well as linguistic features such as perplexity, and word-embeddings. The data consist of speech recordings from AD patients and healthy controls. The transcriptions are obtained using a commercial automatic speech recognition system. We outperform baseline results on the test set, both for the classification and the Mini-Mental State Examination (MMSE) prediction. We achieved a classification accuracy of 80% and an RMSE of 4.56 in the regression. Additionally, we found strong evidence for the influence of the interviewer on classification results. In cross-validation on the training set, we get classification results of 85% accuracy using the combined speech of the interviewer and the participant. Using interviewer speech only we still get an accuracy of 78%. Thus, we provide strong evidence for interviewer influence on classification results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Youxiang Zhu|AUTHOR Youxiang Zhu]]^^1^^
, [[Abdelrahman Obyat|AUTHOR Abdelrahman Obyat]]^^1^^
, [[Xiaohui Liang|AUTHOR Xiaohui Liang]]^^1^^
, [[John A. Batsis|AUTHOR John A. Batsis]]^^2^^
, [[Robert M. Roth|AUTHOR Robert M. Roth]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UMass Boston, USA; ^^2^^University of North Carolina, USA; ^^3^^Geisel School of Medicine at Dartmouth, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3790–3794
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we exploit semantic and non-semantic information from patient’s speech data using Wav2vec and Bidirectional Encoder Representations from Transformers (BERT) for dementia detection. We first propose a basic WavBERT model by extracting semantic information from speech data using Wav2vec, and analyzing the semantic information using BERT for dementia detection. While the basic model discards the non-semantic information, we propose extended WavBERT models that convert the output of Wav2vec to the input to BERT for preserving the non-semantic information in dementia detection. Specifically, we determine the locations and lengths of inter-word pauses using the number of blank tokens from Wav2vec where the threshold for setting the pauses is automatically generated via BERT. We further design a pre-trained embedding conversion network that converts the output embedding of Wav2vec to the input embedding of BERT, enabling the fine-tuning of WavBERT with non-semantic information. Our evaluation results using the ADReSSo dataset showed that the WavBERT models achieved the highest accuracy of 83.1% in the classification task, the lowest Root-Mean-Square Error (RMSE) score of 4.44 in the regression task, and a mean F1 of 70.91% in the progression task. We confirmed the effectiveness of WavBERT models exploiting both semantic and non-semantic speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lara Gauder|AUTHOR Lara Gauder]], [[Leonardo Pepino|AUTHOR Leonardo Pepino]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]], [[Pablo Riera|AUTHOR Pablo Riera]]
</p><p class="cpabstractcardaffiliationlist">UBA-CONICET ICC, Argentina</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3795–3799
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes our submission to the ADreSSo Challenge, which focuses on the problem of automatic recognition of Alzheimer’s Disease (AD) from speech. The audio samples contain speech from the subjects describing a picture with the guidance of an experimenter. Our approach to the problem is based on the use of embeddings extracted from different pre-trained models — trill, allosaurus, and wav2vec 2.0 — which were trained to solve different speech tasks. These features are modeled with a neural network that takes short segments of speech as input, generating an AD score per segment. The final score for an audio file is given by the average over all segments in the file. We include ablation results to show the performance of different feature types individually and in combination, a study of the effect of the segment size, and an analysis of statistical significance. Our results on the test data for the challenge reach an accuracy of 78.9%, outperforming both the acoustic and linguistic baselines provided by the organizers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aparna Balagopalan|AUTHOR Aparna Balagopalan]], [[Jekaterina Novikova|AUTHOR Jekaterina Novikova]]
</p><p class="cpabstractcardaffiliationlist">Winterlight Labs, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3800–3804
</span></p></div>
<div class="cpabstractcardabstract"><p>Robust strategies for Alzheimer’s disease (AD) detection is important, given the high prevalence of AD. In this paper, we study the performance and generalizability of three approaches for AD detection from speech on the recent ADReSSo challenge dataset:1) using conventional acoustic features 2) using novel pre-trained acoustic embeddings 3) combining acoustic features and embeddings. We find that while feature-based approaches have a higher precision, classification approaches relying on the combination of embeddings and features prove to have a higher, and more balanced performance across multiple metrics of performance. Our best model, using such a combined approach, outperforms the acoustic baseline in the challenge by 2.8%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu Qiao|AUTHOR Yu Qiao]]^^1^^
, [[Xuefeng Yin|AUTHOR Xuefeng Yin]]^^1^^
, [[Daniel Wiechmann|AUTHOR Daniel Wiechmann]]^^2^^
, [[Elma Kerz|AUTHOR Elma Kerz]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^RWTH Aachen University, Germany; ^^2^^Universiteit van Amsterdam, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3805–3809
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we combined linguistic complexity and (dis)fluency features with pretrained language models for the task of Alzheimer’s disease detection of the 2021 ADReSSo (Alzheimer’s Dementia Recognition through Spontaneous Speech) challenge. An accuracy of 83.1% was achieved on the test set, which amounts to an improvement of 4.23% over the baseline model. Our best-performing model that integrated component models using a stacking ensemble technique performed equally well on cross-validation and test data, indicating that it is robust against overfitting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yilin Pan|AUTHOR Yilin Pan]]^^1^^
, [[Bahman Mirheidari|AUTHOR Bahman Mirheidari]]^^1^^
, [[Jennifer M. Harris|AUTHOR Jennifer M. Harris]]^^2^^
, [[Jennifer C. Thompson|AUTHOR Jennifer C. Thompson]]^^2^^
, [[Matthew Jones|AUTHOR Matthew Jones]]^^2^^
, [[Julie S. Snowden|AUTHOR Julie S. Snowden]]^^2^^
, [[Daniel Blackburn|AUTHOR Daniel Blackburn]]^^1^^
, [[Heidi Christensen|AUTHOR Heidi Christensen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Sheffield, UK; ^^2^^University of Manchester, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3810–3814
</span></p></div>
<div class="cpabstractcardabstract"><p>Exploring acoustic and linguistic information embedded in spontaneous speech recordings has proven to be efficient for automatic Alzheimer’s dementia detection. Acoustic features can be extracted directly from the audio recordings, however, linguistic features, in fully automatic systems, need to be extracted from transcripts generated by an automatic speech recognition (ASR) system. We explore two state-of-the-art ASR paradigms, Wav2vec2.0 (for transcription and feature extraction) and time delay neural networks (TDNN) on the ADReSSo dataset containing recordings of people describing the Cookie Theft (CT) picture. As no manual transcripts are provided, we train an ASR system using our in-house CT data. We further investigate the use of confidence scores and multiple ASR hypotheses to guide and augment the input for the BERT-based classification. In total, five models are proposed for exploring how to use the audio recordings only for acoustic and linguistic information extraction. The test results on best acoustic-only and best linguistic-only are 74.65% and 84.51% respectively (representing a 15% and 9% relative increase to published baseline results).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zafi Sherhan Syed|AUTHOR Zafi Sherhan Syed]]^^1^^
, [[Muhammad Shehram Shah Syed|AUTHOR Muhammad Shehram Shah Syed]]^^2^^
, [[Margaret Lech|AUTHOR Margaret Lech]]^^2^^
, [[Elena Pirogova|AUTHOR Elena Pirogova]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MUET, Pakistan; ^^2^^RMIT University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3815–3819
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses the Interspeech Alzheimer’s Dementia Recognition through Spontaneous Speech only (ADReSSo) challenge 2021. The objective of our study is to propose the approach to a three task automated screening that will aid in distinguishing between healthy individuals and subjects with dementia. The first task is to differentiate between speech recordings from individuals with dementia. The second task requires participants to estimate the Mini-Mental State Examination (MMSE) score based on an individual’s speech. The third task requires participants to leverage speech recordings to identify whether individuals have suffered from cognitive decline. Here, we propose a system based on functionals of deep textual embeddings with special preprocessing steps integrating the effect of silence segments. We report that the developed system outperforms the challenge baseline for all three tasks. For Task 1, we achieve an accuracy of 84.51% compared to the baseline of 77.46%, for Task 2, we achieve a root-mean-square-error (RMSE) of 4.35 compared to the baseline of 5.28, and for Task 3, we achieve an average-f1score of 73.80% compared to the baseline of 66.67%. These results are a testament of the effectiveness of our proposed system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Morteza Rohanian|AUTHOR Morteza Rohanian]], [[Julian Hough|AUTHOR Julian Hough]], [[Matthew Purver|AUTHOR Matthew Purver]]
</p><p class="cpabstractcardaffiliationlist">Queen Mary University of London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3820–3824
</span></p></div>
<div class="cpabstractcardabstract"><p>We present two multimodal fusion-based deep learning models that consume ASR transcribed speech and acoustic data simultaneously to classify whether a speaker in a structured diagnostic task has Alzheimer’s Disease and to what degree, evaluating the ADReSSo challenge 2021 data. Our best model, a BiLSTM with highway layers using words, word probabilities, disfluency features, pause information, and a variety of acoustic features, achieves an accuracy of 84% and RSME error prediction of 4.26 on MMSE cognitive scores. While predicting cognitive decline is more challenging, our models show improvement using the multimodal approach and word probabilities, disfluency, and pause information over word-only models. We show considerable gains for AD classification using multimodal fusion and gating, which can effectively deal with noisy inputs from acoustic features and ASR hypotheses.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yangyang Xia|AUTHOR Yangyang Xia]], [[Li-Wei Chen|AUTHOR Li-Wei Chen]], [[Alexander Rudnicky|AUTHOR Alexander Rudnicky]], [[Richard M. Stern|AUTHOR Richard M. Stern]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3370–3374
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate the importance of temporal context for speech emotion recognition (SER). Two SER systems trained on traditional and learned features, respectively, are developed to predict categorical labels of emotion. For traditional acoustical features, we study the combination of filterbank features and prosodic features and the impact on SER when the temporal context of these features is expanded by learnable spectro-temporal receptive fields (STRFs). Experiments show that the system trained on learnable STRFs outperforms other reported systems evaluated with a similar setup. We also demonstrate that the wav2vec features, pretrained with long temporal context, are superior to traditional features. We then introduce a novel segment-based learning objective to constrain our classifier to extract local emotion features from the large temporal context. Combined with the learning objective and fine-tuning strategy, our top-line system using wav2vec features reaches state-of-the-art performance on the IEMOCAP dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aaron Keesing|AUTHOR Aaron Keesing]], [[Yun Sing Koh|AUTHOR Yun Sing Koh]], [[Michael Witbrock|AUTHOR Michael Witbrock]]
</p><p class="cpabstractcardaffiliationlist">University of Auckland, New Zealand</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3415–3419
</span></p></div>
<div class="cpabstractcardabstract"><p>Many features have been proposed for use in speech emotion recognition, from signal processing features to bag-of-audio-words (BoAW) models to abstract neural representations. Some of these feature types have not been directly compared across a large number of speech corpora to determine performance differences. We propose a full factorial design and to compare speech processing features, BoAW and neural representations on 17 emotional speech datasets. We measure the performance of features in a categorical emotion classification problem for each dataset, using speaker-independent cross-validation with diverse classifiers. Results show statistically significant differences between features and between classifiers, with large effect sizes between features. In particular, standard acoustic feature sets still perform competitively to neural representations, while neural representations have a larger range of performance, and BoAW features lie in the middle. The best and worst neural representations were wav2veq and VGGish, respectively, with wav2vec performing best out of all tested features. These results indicate that standard acoustic feature sets are still very useful baselines for emotional classification, but high quality neural speech representations can be better.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Suwon Shon|AUTHOR Suwon Shon]]^^1^^
, [[Pablo Brusco|AUTHOR Pablo Brusco]]^^1^^
, [[Jing Pan|AUTHOR Jing Pan]]^^1^^
, [[Kyu J. Han|AUTHOR Kyu J. Han]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ASAPP, USA; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3420–3424
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we explore the use of pre-trained language models to learn sentiment information of written texts for speech sentiment analysis. First, we investigate how useful a pre-trained language model would be in a 2-step pipeline approach employing Automatic Speech Recognition (ASR) and transcripts-based sentiment analysis separately. Second, we propose a pseudo label-based semi-supervised training strategy using a language model on an end-to-end speech sentiment approach to take advantage of a large, but unlabeled speech dataset for training. Although spoken and written texts have different linguistic characteristics, they can complement each other in understanding sentiment. Therefore, the proposed system can not only model acoustic characteristics to bear sentiment-specific information in speech signals, but learn latent information to carry sentiments in the text representation. In these experiments, we demonstrate the proposed approaches improve F1 scores consistently compared to systems without a language model. Moreover, we also show that the proposed framework can reduce 65% of human supervision by leveraging a large amount of data without human sentiment annotation and boost performance in a low-resource condition where the human sentiment annotation is not available enough.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hang Li|AUTHOR Hang Li]], [[Wenbiao Ding|AUTHOR Wenbiao Ding]], [[Zhongqin Wu|AUTHOR Zhongqin Wu]], [[Zitao Liu|AUTHOR Zitao Liu]]
</p><p class="cpabstractcardaffiliationlist">TAL, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3375–3379
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech emotion recognition is a challenging task because the emotion expression is complex, multimodal and fine-grained. In this paper, we propose a novel multimodal deep learning approach to perform fine-grained emotion recognition from real-life speeches. We design a temporal alignment mean-max pooling mechanism to capture the subtle and fine-grained emotions implied in every utterance. In addition, we propose a cross modality excitement module to conduct sample-specific adjustment on cross modality embeddings and adaptively recalibrate the corresponding values by its aligned latent features from the other modality. Our proposed model is evaluated on two well-known real-world speech emotion recognition datasets. The results demonstrate that our approach is superior on the prediction tasks for multimodal speech utterances, and it outperforms a wide range of baselines in terms of prediction accuracy. Furthermore, we conduct detailed ablation studies to show that our temporal alignment mean-max pooling mechanism and cross modality excitement significantly contribute to the promising results. In order to encourage the research reproducibility, we make the code publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Einari Vaaras|AUTHOR Einari Vaaras]]^^1^^
, [[Sari Ahlqvist-Björkroth|AUTHOR Sari Ahlqvist-Björkroth]]^^2^^
, [[Konstantinos Drossos|AUTHOR Konstantinos Drossos]]^^1^^
, [[Okko Räsänen|AUTHOR Okko Räsänen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tampere University, Finland; ^^2^^University of Turku, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3380–3384
</span></p></div>
<div class="cpabstractcardabstract"><p>Researchers have recently started to study how the emotional speech heard by young infants can affect their developmental outcomes. As a part of this research, hundreds of hours of daylong recordings from preterm infants’ audio environments were collected from two hospitals in Finland and Estonia in the context of so-called APPLE study. In order to analyze the emotional content of speech in such a massive dataset, an automatic speech emotion recognition (SER) system is required. However, there are no emotion labels or existing in-domain SER systems to be used for this purpose. In this paper, we introduce this initially unannotated large-scale real-world audio dataset and describe the development of a functional SER system for the Finnish subset of the data. We explore the effectiveness of alternative state-of-the-art techniques to deploy a SER system to a new domain, comparing cross-corpus generalization, WGAN-based domain adaptation, and active learning in the task. As a result, we show that the best-performing models are able to achieve a classification performance of 73.4% unweighted average recall (UAR) and 73.2% UAR for a binary classification for valence and arousal, respectively. The results also show that active learning achieves the most consistent performance compared to the two alternatives.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fan Qian|AUTHOR Fan Qian]], [[Jiqing Han|AUTHOR Jiqing Han]]
</p><p class="cpabstractcardaffiliationlist">Harbin Institute of Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3385–3389
</span></p></div>
<div class="cpabstractcardabstract"><p>Multimodal sentiment analysis is an important research that involves integrating information from multiple modalities to identify a speaker underlying attitude. The core challenge is to model cross-modal interactions which span across both the different modalities and time. Although great progress has been made, the existing methods are still not sufficient for modeling cross-modal interactions. Inspired by previous research in cognitive neuroscience that humans perceive intentions through focusing on different modalities over time, in this paper we propose a novel attention mechanism called Temporal Modality Attention (TMA) to simulate this process. Cross-modal interactions are modeled using this human-like TMA mechanism which focuses on specific modalities dynamically as recurrent modeling proceed. To verify the effectiveness of TMA, we conduct comprehensive experiments on multiple benchmark datasets for multimodal sentiment analysis. The results show a consistently significant improvement compared to the baseline models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mani Kumar T.|AUTHOR Mani Kumar T.]]^^1^^
, [[Enrique Sanchez|AUTHOR Enrique Sanchez]]^^1^^
, [[Georgios Tzimiropoulos|AUTHOR Georgios Tzimiropoulos]]^^2^^
, [[Timo Giesbrecht|AUTHOR Timo Giesbrecht]]^^3^^
, [[Michel Valstar|AUTHOR Michel Valstar]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Nottingham, UK; ^^2^^Queen Mary University of London, UK; ^^3^^Unilever, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3390–3394
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we pose continuous apparent emotion recognition from speech as a problem of learning distributions of functions, and do so using Stochastic Processes Regression. We presume that the relation between speech signals and their corresponding emotion labels is governed by some underlying stochastic process, in contrast to existing speech emotion recognition methods that are mostly based on deterministic regression models (static or recurrent). We treat each training sequence as an instance of the underlying stochastic process which we aim to discover using a neural latent variable model, which approximates the distribution of functions with a stochastic latent variable using an encoder-decoder composition: the encoder infers the distribution over the latent variable, which the decoder uses to predict the distribution of output emotion labels. To this end, we build on the previously proposed Neural Processes theory by using (a). noisy label predictions of a backbone instead of ground truth labels for latent variable inference and (b). recurrent encoder-decoder models to alleviate the effect of commonly encountered temporal misalignment between audio features and emotion labels due to annotator reaction lag. We validated our method on AVEC’19 cross-cultural emotion recognition dataset, achieving state-of-the-art results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haoqi Li|AUTHOR Haoqi Li]]^^1^^
, [[Yelin Kim|AUTHOR Yelin Kim]]^^1^^
, [[Cheng-Hao Kuo|AUTHOR Cheng-Hao Kuo]]^^1^^
, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, USA; ^^2^^University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3395–3399
</span></p></div>
<div class="cpabstractcardabstract"><p>Key challenges in developing generalized automatic emotion recognition systems include scarcity of labeled data and lack of gold-standard references. Even for the cues that are labeled as the same emotion category, the variability of associated expressions can be high depending on the elicitation context e.g., emotion elicited during improvised conversations vs. acted sessions with predefined scripts. In this work, we regard the emotion elicitation approach as domain knowledge, and explore domain transfer learning techniques on emotional utterances collected under different emotion elicitation approaches, particularly with limited labeled target samples. Our emotion recognition model combines the gradient reversal technique with an entropy loss function as well as the softlabel loss, and the experiment results show that domain transfer learning methods can be employed to alleviate the domain mismatch between different elicitation approaches. Our work provides new insights into emotion data collection, particularly the impact of its elicitation strategies, and the importance of domain adaptation in emotion recognition aiming for generalized systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Leonardo Pepino|AUTHOR Leonardo Pepino]], [[Pablo Riera|AUTHOR Pablo Riera]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]]
</p><p class="cpabstractcardaffiliationlist">UBA-CONICET ICC, Argentina</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3400–3404
</span></p></div>
<div class="cpabstractcardabstract"><p>Emotion recognition datasets are relatively small, making the use of deep learning techniques challenging. In this work, we propose a transfer learning method for speech emotion recognition (SER) where features extracted from pre-trained wav2vec 2.0 models are used as input to shallow neural networks to recognize emotions from speech. We propose a way to combine the output of several layers from the pre-trained model, producing richer speech representations than the model’s output alone. We evaluate the proposed approaches on two standard emotion databases, IEMOCAP and RAVDESS, and compare different feature extraction techniques using two wav2vec 2.0 models: a generic one, and one finetuned for speech recognition. We also experiment with different shallow architectures for our speech emotion recognition model, and report baseline results using traditional features. Finally, we show that our best performing models have better average recall than previous approaches that use deep neural networks trained on spectrograms and waveforms or shallow neural networks trained on features extracted from wav2vec 1.0.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiawang Liu|AUTHOR Jiawang Liu]], [[Haoxiang Wang|AUTHOR Haoxiang Wang]]
</p><p class="cpabstractcardaffiliationlist">SCUT, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3405–3409
</span></p></div>
<div class="cpabstractcardabstract"><p>Previous deep learning approaches such as Convolutional Neural Network (CNN) and Long Short-Term Memory (LSTM) have been broadly used in speech emotion recognition (SER). In these approaches, speech signals are generally modeled in the Euclidean space. In this paper, a novel SER model (LSTM-GIN) is proposed, which applies Graph Isomorphism Network (GIN) on LSTM outputs for global emotion modeling in the non-Euclidean space. In our LSTM-GIN model, speech signals are represented as graph-structured data so that we can better extract global feature representation. The deep frame-level features generated from the bidirectional LSTM are converted into an undirected graph with nodes represented by frame-level features and connections defined according to temporal relations between speech frames. GIN is adopted to classify the graph representations of utterances, as it is proved of excellent discriminative power in comparative experiments. We conduct experiments on the IEMOCAP dataset, and the results show that our proposed LSTM-GIN model surpasses other recent graph-based models and deep learning models by achieving 64.65% of weighted accuracy (WA) and 65.53% of unweighted accuracy (UA).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pooja Kumawat|AUTHOR Pooja Kumawat]], [[Aurobinda Routray|AUTHOR Aurobinda Routray]]
</p><p class="cpabstractcardaffiliationlist">IIT Kharagpur, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3410–3414
</span></p></div>
<div class="cpabstractcardabstract"><p>We have analyzed the Time Delay Neural Network (TDNN) based architectures for speech emotion classification. TDNN models efficiently capture the temporal information and provide an utterance level prediction. Emotions are dynamic in nature and require temporal context for reliable prediction. In our work, we have applied the TDNN based x-vector and emphasized channel attention, propagation & aggregation based TDNN (ECAPA-TDNN) architectures for speech emotion identification with RAVDESS, Emo-DB, and IEMOCAP databases. The results show that the TDNN architectures are very efficient for predicting emotion classes and ECAPA-TDNN outperforms the TDNN based x-vector architecture. Next, we investigated the performance of ECAPA-TDNN with various training chunk durations and test utterance durations. We have identified that in spite of very promising emotion recognition performance the TDNN models have a strong training chunk duration-based bias. Earlier research work revealed that individual emotion class accuracy depends largely on the test utterance duration. Most of these studies were based on frame level emotions predictions. However, utterance level based emotion recognition is relatively less explored. The results show that even with the TDNN models, the accuracy of the different emotion classes is dependent on the utterance duration.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wenxin Hou|AUTHOR Wenxin Hou]]^^1^^
, [[Jindong Wang|AUTHOR Jindong Wang]]^^2^^
, [[Xu Tan|AUTHOR Xu Tan]]^^2^^
, [[Tao Qin|AUTHOR Tao Qin]]^^2^^
, [[Takahiro Shinozaki|AUTHOR Takahiro Shinozaki]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tokyo Tech, Japan; ^^2^^Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3425–3429
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end automatic speech recognition (ASR) can achieve promising performance with large-scale training data. However, it is known that domain mismatch between training and testing data often leads to a degradation of recognition accuracy. In this work, we focus on the unsupervised domain adaptation for ASR and propose CMatch, a __C__haracter-level distribution matching method to perform fine-grained adaptation between each character in two domains. First, to obtain labels for the features belonging to each character, we achieve frame-level label assignment using the Connectionist Temporal Classification (CTC) pseudo labels. Then, we match the character-level distributions using Maximum Mean Discrepancy. We train our algorithm using the self-training technique. Experiments on the Libri-Adapt dataset show that our proposed approach achieves 14.39% and 16.50% relative Word Error Rate (WER) reduction on both cross-device and cross-environment ASR. We also comprehensively analyze the different strategies for frame-level label assignment and Transformer adaptations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eric Sun|AUTHOR Eric Sun]]^^1^^
, [[Jinyu Li|AUTHOR Jinyu Li]]^^1^^
, [[Zhong Meng|AUTHOR Zhong Meng]]^^1^^
, [[Yu Wu|AUTHOR Yu Wu]]^^2^^
, [[Jian Xue|AUTHOR Jian Xue]]^^1^^
, [[Shujie Liu|AUTHOR Shujie Liu]]^^2^^
, [[Yifan Gong|AUTHOR Yifan Gong]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, USA; ^^2^^Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3470–3474
</span></p></div>
<div class="cpabstractcardabstract"><p>In end-to-end multilingual speech recognition, the hypotheses in one language could include word tokens from other languages. Language confusions happen even more frequently when language identifier (LID) is not present during inference. In this paper, we explore to reduce language confusions without using LID in model inference by creating models with multiple output heads and use sequence probability to select the correct head for output hypotheses. We propose head grouping to merge several language outputs into one head to save runtime cost. Head groups are decided by the distances among language clusters learned through language embedding vectors to separate confusable languages apart. We further propose prediction network sharing for languages from the same family. By jointly applying head grouping and prediction network sharing, training data from the same family languages is better shared while confusable languages are divided into different heads to reduce language confusions. Our experiments demonstrate that our multilingual transformer transducer models based on multi-head outputs achieve on average 7.8% and 10.9% relative word error rate reductions without LID being used in inference from one-head baseline model with affordably increased runtime cost on 10 European languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ahmed Ali|AUTHOR Ahmed Ali]]^^1^^
, [[Shammur Absar Chowdhury|AUTHOR Shammur Absar Chowdhury]]^^1^^
, [[Amir Hussein|AUTHOR Amir Hussein]]^^1^^
, [[Yasser Hifny|AUTHOR Yasser Hifny]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HBKU, Qatar; ^^2^^Helwan University, Egypt</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3475–3479
</span></p></div>
<div class="cpabstractcardabstract"><p>Code-switching in automatic speech recognition (ASR) is an important challenge due to globalization. Recent research in multilingual ASR shows potential improvement over monolingual systems. We study key issues related to multilingual modeling for ASR through a series of large-scale ASR experiments. Our innovative framework deploys a multi-graph approach in the weighted finite state transducers (WFST) framework. We compare our WFST decoding strategies with a transformer sequence to sequence system trained on the same data. Given a code-switching scenario between Arabic and English languages, our results show that the WFST decoding approaches were more suitable for the intersentential code-switching datasets. In addition, the transformer system performed better for intrasentential code-switching task. With this study, we release an artificially generated development and test sets, along with ecological code-switching test set, to benchmark the ASR performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Naoyuki Kanda|AUTHOR Naoyuki Kanda]]^^1^^
, [[Guoli Ye|AUTHOR Guoli Ye]]^^1^^
, [[Yu Wu|AUTHOR Yu Wu]]^^2^^
, [[Yashesh Gaur|AUTHOR Yashesh Gaur]]^^1^^
, [[Xiaofei Wang|AUTHOR Xiaofei Wang]]^^1^^
, [[Zhong Meng|AUTHOR Zhong Meng]]^^1^^
, [[Zhuo Chen|AUTHOR Zhuo Chen]]^^1^^
, [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, USA; ^^2^^Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3430–3434
</span></p></div>
<div class="cpabstractcardabstract"><p>Transcribing meetings containing overlapped speech with only a single distant microphone (SDM) has been one of the most challenging problems for automatic speech recognition (ASR). While various approaches have been proposed, all previous studies on the monaural overlapped speech recognition problem were based on either simulation data or small-scale real data. In this paper, we extensively investigate a two-step approach where we first pre-train a serialized output training (SOT)-based multi-talker ASR by using large-scale simulation data and then fine-tune the model with a small amount of real meeting data. Experiments are conducted by utilizing 75 thousand (K) hours of our internal single-talker recording to simulate a total of 900K hours of multi-talker audio segments for supervised pre-training. With fine-tuning on the 70 hours of the AMI-SDM training data, our SOT ASR model achieves a word error rate (WER) of 21.2% for the AMI-SDM evaluation set while automatically counting speakers in each test segment. This result is not only significantly better than the previous state-of-the-art WER of 36.4% with oracle utterance boundary information but also better than a result by a similarly fine-tuned single-talker ASR model applied to beamformed audio.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Liang Lu|AUTHOR Liang Lu]], [[Zhong Meng|AUTHOR Zhong Meng]], [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3435–3439
</span></p></div>
<div class="cpabstractcardabstract"><p>Hybrid Autoregressive Transducer (HAT) is a recently proposed end-to-end acoustic model that extends the standard Recurrent Neural Network Transducer (RNN-T) for the purpose of the external language model (LM) fusion. In HAT, the blank probability and the label probability are estimated using two separate probability distributions, which provides a more accurate solution for internal LM score estimation, and thus works better when combining with an external LM. Previous work mainly focuses on HAT model training with the negative log-likelihood loss, while in this paper, we study the minimum word error rate (MWER) training of HAT — a criterion that is closer to the evaluation metric for speech recognition, and has been successfully applied to other types of end-to-end models such as sequence-to-sequence (S2S) and RNN-T models. From experiments with around 30,000 hours of training data, we show that MWER training can improve the accuracy of HAT models, while at the same time, improving the robustness of the model against the decoding hyper-parameters such as length normalization and decoding beam during inference.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jaeyoung Kim|AUTHOR Jaeyoung Kim]], [[Han Lu|AUTHOR Han Lu]], [[Anshuman Tripathi|AUTHOR Anshuman Tripathi]], [[Qian Zhang|AUTHOR Qian Zhang]], [[Hasim Sak|AUTHOR Hasim Sak]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3440–3444
</span></p></div>
<div class="cpabstractcardabstract"><p>Reducing prediction delay for streaming end-to-end ASR models with minimal performance regression is a challenging problem. Constrained alignment is a well-known existing approach that penalizes predicted word boundaries using external low-latency acoustic models. On the contrary, recently proposed FastEmit is a sequence-level delay regularization scheme encouraging vocabulary tokens over blanks without any reference alignments. Although all these schemes are successful in reducing delay, ASR word error rate (WER) often severely degrades after applying these delay constraining schemes. In this paper, we propose a novel delay constraining method, named self alignment. Self alignment does not require external alignment models. Instead, it utilizes Viterbi forced-alignments from the trained model to find the lower latency alignment direction. From LibriSpeech evaluation, self alignment outperformed existing schemes: 25% and 56% less delay compared to FastEmit and constrained alignment at the similar word error rate. For Voice Search evaluation, 12% and 25% delay reductions were achieved compared to FastEmit and constrained alignment with more than 2% WER improvements.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anuj Diwan|AUTHOR Anuj Diwan]], [[Preethi Jyothi|AUTHOR Preethi Jyothi]]
</p><p class="cpabstractcardaffiliationlist">IIT Bombay, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3445–3449
</span></p></div>
<div class="cpabstractcardabstract"><p>This work presents a seemingly simple but effective technique to improve low-resource ASR systems for phonetic languages. By identifying sets of acoustically similar graphemes in these languages, we first reduce the output alphabet of the ASR system using linguistically meaningful reductions and then reconstruct the original alphabet using a standalone module. We demonstrate that this lessens the burden and improves the performance of low-resource end-to-end ASR systems (because only reduced-alphabet predictions are needed) and that it is possible to design a very simple but effective reconstruction module that recovers sequences in the original alphabet from sequences in the reduced alphabet. We present a finite state transducer-based reconstruction module that operates on the 1-best ASR hypothesis in the reduced alphabet. We demonstrate the efficacy of our proposed technique using ASR systems for two Indian languages, Gujarati and Telugu. With access to only 10 hrs of speech data, we obtain relative WER reductions of up to 7% compared to systems that do not use any reduction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takashi Fukuda|AUTHOR Takashi Fukuda]]^^1^^
, [[Samuel Thomas|AUTHOR Samuel Thomas]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Japan; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3450–3454
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we introduce a novel knowledge distillation based framework for training universal source models. In our proposed approach for automatic speech recognition (ASR), multilingual source models are first trained using multiple language-dependent resources before being used to initialize language specific target models in low resource settings. For the proposed source models to be effective in cross-lingual transfer to novel target languages, the training framework encourages the models to perform accurate universal phone classification while ignoring any language-dependent characteristics present in the training data set. These two goals are achieved by applying knowledge distillation to improve the models’ universal phone classification performance along with a shuffling mechanism that alleviates any language specific dependencies that might be learned. The benefits of this proposed technique are demonstrated in several practical settings, where either large amounts or only limited quantities of unbalanced multilingual data resources are available for source model creation. Compared to a conventional knowledge transfer learning method, the proposed approaches achieve a relative WER reduction of 8–10% in streaming ASR settings for various low resource target languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Swayambhu Nath Ray|AUTHOR Swayambhu Nath Ray]]^^1^^
, [[Minhua Wu|AUTHOR Minhua Wu]]^^2^^
, [[Anirudh Raju|AUTHOR Anirudh Raju]]^^2^^
, [[Pegah Ghahremani|AUTHOR Pegah Ghahremani]]^^2^^
, [[Raghavendra Bilgi|AUTHOR Raghavendra Bilgi]]^^1^^
, [[Milind Rao|AUTHOR Milind Rao]]^^2^^
, [[Harish Arsikere|AUTHOR Harish Arsikere]]^^1^^
, [[Ariya Rastrow|AUTHOR Ariya Rastrow]]^^2^^
, [[Andreas Stolcke|AUTHOR Andreas Stolcke]]^^2^^
, [[Jasha Droppo|AUTHOR Jasha Droppo]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, India; ^^2^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3455–3459
</span></p></div>
<div class="cpabstractcardabstract"><p>Comprehending the overall intent of an utterance helps a listener recognize the individual words spoken. Inspired by this fact, we perform a novel study of the impact of explicitly incorporating intent representations as additional information to improve a recurrent neural network-transducer (RNN-T) based automatic speech recognition (ASR) system. An audio-to-intent (A2I) model encodes the intent of the utterance in the form of embeddings or posteriors, and these are used as auxiliary inputs for RNN-T training and inference. Experimenting with a 50k-hour far-field English speech corpus, this study shows that when running the system in //non-streaming// mode, where intent representation is extracted from the entire utterance and then used to bias streaming RNN-T search from the start, it provides a 5.56% relative word error rate reduction (WERR). On the other hand, a //streaming// system using per-frame intent posteriors as extra inputs for the RNN-T ASR system yields a 3.33% relative WERR. A further detailed analysis of the streaming system indicates that our proposed method brings especially good gain on media-playing related intents (e.g. 9.12% relative WERR on PlayMusicIntent).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhiyun Lu|AUTHOR Zhiyun Lu]], [[Wei Han|AUTHOR Wei Han]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Liangliang Cao|AUTHOR Liangliang Cao]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3460–3464
<a href="./IS2021/MEDIA/1668" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Although end-to-end automatic speech recognition (e2e ASR) models are widely deployed in many applications, there have been very few studies to understand models’ robustness against adversarial perturbations. In this paper, we explore whether a targeted universal perturbation vector exists for e2e ASR models. Our goal is to find perturbations that can mislead the models to predict the given targeted transcript such as “thank you” or empty string on any input utterance. We study two different attacks, namely additive and prepending perturbations, and their performances on the state-of-the-art LAS, CTC and RNN-T models. We find that LAS is the most vulnerable to perturbations among the three models. RNN-T is more robust against additive perturbations, especially on long utterances. And CTC is robust against both additive and prepending perturbations. To attack RNN-T, we find prepending perturbation is more effective than the additive perturbation, and can mislead the models to predict the same short target on utterances of arbitrary length.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Miguel Del Rio|AUTHOR Miguel Del Rio]]^^1^^
, [[Natalie Delworth|AUTHOR Natalie Delworth]]^^1^^
, [[Ryan Westerman|AUTHOR Ryan Westerman]]^^1^^
, [[Michelle Huang|AUTHOR Michelle Huang]]^^1^^
, [[Nishchal Bhandari|AUTHOR Nishchal Bhandari]]^^1^^
, [[Joseph Palakapilly|AUTHOR Joseph Palakapilly]]^^1^^
, [[Quinten McNamara|AUTHOR Quinten McNamara]]^^1^^
, [[Joshua Dong|AUTHOR Joshua Dong]]^^1^^
, [[Piotr Żelasko|AUTHOR Piotr Żelasko]]^^2^^
, [[Miguel Jetté|AUTHOR Miguel Jetté]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Rev.com, USA; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3465–3469
</span></p></div>
<div class="cpabstractcardabstract"><p>Commonly used speech corpora inadequately challenge academic and commercial ASR systems. In particular, speech corpora lack metadata needed for detailed analysis and WER measurement. In response, we present //Earnings-21//, a 39-hour corpus of earnings calls containing entity-dense speech from nine different financial sectors. This corpus is intended to benchmark ASR systems in the wild with special attention towards named entity recognition. We benchmark four commercial ASR models, two internal models built with open-source tools, and an open-source LibriSpeech model and discuss their differences in performance on //Earnings-21//. Using our recently released //fstalign// tool, we provide a candid analysis of each model’s recognition capabilities under different partitions. Our analysis finds that ASR accuracy for certain NER categories is poor, presenting a significant impediment to transcript comprehension and usage. //Earnings-21// bridges academic and commercial ASR system evaluation and enables further research on entity modeling and WER on real world audio.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aviad Eisenberg|AUTHOR Aviad Eisenberg]], [[Boaz Schwartz|AUTHOR Boaz Schwartz]], [[Sharon Gannot|AUTHOR Sharon Gannot]]
</p><p class="cpabstractcardaffiliationlist">Bar-Ilan University, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3480–3484
</span></p></div>
<div class="cpabstractcardabstract"><p>The challenging problem of online multi-microphone blind audio source separation (BASS) in noisy environment is addressed in this paper. We present a sequential, non-iterative, algorithm based on the recursive EM (REM) framework. In the proposed algorithm, the compete-data, which constitutes the separated sources and residual noise, is estimated in the E-step by applying a multichannel Wiener filter (MCWF); and the corresponding parameters, comprised of acoustic transfer functions (ATFs) relating the sources and the microphones and power spectral densities (PSDs) of the desired sources, are sequentially estimated in the M-step. The separated speech signals are further enhanced using matched-filter beamformers. The performance of the algorithm is demonstrated in terms of the separation capabilities, the resulting speech intelligibility and the ability to track the direction of arrival (DOA) of the moving sources.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tian Lan|AUTHOR Tian Lan]], [[Yuxin Qian|AUTHOR Yuxin Qian]], [[Yilan Lyu|AUTHOR Yilan Lyu]], [[Refuoe Mokhosi|AUTHOR Refuoe Mokhosi]], [[Wenxin Tai|AUTHOR Wenxin Tai]], [[Qiao Liu|AUTHOR Qiao Liu]]
</p><p class="cpabstractcardaffiliationlist">UESTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3525–3529
</span></p></div>
<div class="cpabstractcardabstract"><p>Most deep learning-based monaural speech separation models only use either spectrograms or time domain speech signal as the input feature. The recently proposed cross-domain network (CDNet) demonstrates that concatenated frequency domain and time domain features helps to reach better performance. Although concatenation is a widely used feature fusion method, it has been proved that using frequency domain and time domain features to reconstruct signal makes minor difference compared with only using time domain feature in CDNet. To make better use of frequency domain feature in decoder, we propose using selection weights to select and fuse features from different domains and unify the features used in separator and decoder. In this paper, we propose using trainable weights or the global information calculated from the different domain features to generate selection weights. Given that our proposed models use element-wise fusing in the encoder, only one deconvolution layer in the decoder is needed to reconstruct signals. Experiments show that proposed methods achieve encouraging results on the large and challenging Libri2Mix dataset with a small increasing in parameters, which proves the frequency domain information is beneficial for signal reconstruction. Furthermore, proposed method has shown good generalizability on the unmatched VCTK2Mix dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chengyun Deng|AUTHOR Chengyun Deng]]^^1^^
, [[Shiqian Ma|AUTHOR Shiqian Ma]]^^1^^
, [[Yongtao Sha|AUTHOR Yongtao Sha]]^^1^^
, [[Yi Zhang|AUTHOR Yi Zhang]]^^1^^
, [[Hui Zhang|AUTHOR Hui Zhang]]^^2^^
, [[Hui Song|AUTHOR Hui Song]]^^1^^
, [[Fei Wang|AUTHOR Fei Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^DiDi Chuxing, China; ^^2^^Baidu, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3530–3534
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker extraction aims to extract target speech signal from a multi-talker environment with interference speakers and surrounding noise, given a reference speech from target speaker. Most speaker extraction systems achieve satisfactory performance in the closed condition. Such systems suffer from performance degradation given unseen target speakers and/or mismatched reference speech. In this paper we propose a novel strategy named Iterative Refined Adaptation (IRA) to improve the robustness and generalization capability of speaker extraction systems in the aforementioned scenarios. Given an initial speaker embedding encoded by an auxiliary network, the extraction network can obtain a latent representation of the target speaker as the feedback of the auxiliary network to refine the speaker embedding, which provides more accurate guidance for the extraction network. Experiments show that the network with IRA confirm the superior performance over comparison approaches in terms of SI-SDRi and PESQ on WSJ0-2mix-extr and WHAM! dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wupeng Wang|AUTHOR Wupeng Wang]], [[Chenglin Xu|AUTHOR Chenglin Xu]], [[Meng Ge|AUTHOR Meng Ge]], [[Haizhou Li|AUTHOR Haizhou Li]]
</p><p class="cpabstractcardaffiliationlist">NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3535–3539
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel time-domain speaker-speech cross-attention network as a variant of SpEx [1] architecture, that features speaker-speech cross-attention. The speaker-speech cross-attention network consists of speech semantic layers that capture the high-level dependency of audio feature, and cross-attention layers that fuse speaker embedding and speech features to estimate the speaker mask. We implement cross-attention layers with both parallel and sequential concatenation techniques. Experiments show that the proposed models consistently outperform the state-of-the-art time-domain speaker extraction baseline on WSJ0-2mix dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rémi Rigal|AUTHOR Rémi Rigal]]^^1^^
, [[Jacques Chodorowski|AUTHOR Jacques Chodorowski]]^^1^^
, [[Benoît Zerr|AUTHOR Benoît Zerr]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Orange Labs, France; ^^2^^Lab-STICC (UMR 6285), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3540–3544
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a deep neural network that relies on facial motion and time-domain audio for isolating speech signals from a mixture of speeches and background noises. Recent studies in deep learning-based audio-visual speech separation and speech enhancement have proven that leveraging visual information in addition to audio can yield substantial improvement to the prediction quality and robustness. We propose to use facial motion, inferred from optical flow techniques, as a visual feature input for our model. Combined with state-of-the-art audio-only speech separation approaches, we demonstrate that facial motion significantly improves the speech quality as well as the versatility of the model. Our proposed method offers a signal-to-distortion improvement of up to 4.2 dB on two-speaker mixtures when compared to other audio-visual approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi Luo|AUTHOR Yi Luo]], [[Cong Han|AUTHOR Cong Han]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]
</p><p class="cpabstractcardaffiliationlist">Columbia University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3485–3489
</span></p></div>
<div class="cpabstractcardabstract"><p>Although most existing speech separation networks are designed as a one-pass pipeline where the sources are directly estimated from the mixture, multi-pass or iterative pipelines have been shown to be effective by designing multiple rounds of separation and utilizing separation outputs from a previous iteration as additional inputs for the next iteration. Moreover, such iterative separation pipeline can also be extended to a more general framework where a training objective designed to minimize the discrepancy between the estimated and target sources is applied to different parts of the network. In this paper, we empirically investigate the effect of such generalized iterative separation pipeline by adjusting its configuration in multiple aspects in both training and inference phases. For the training phase, we compare the separation performance of both time-domain and frequency-domain networks with different numbers of iterations following the recent discussions on the model architecture organizations. We also evaluate the effect of parameter sharing across iterations and the necessity of additional training objectives. For the inference phase, we measure the separation performance of various numbers of iterations. Our results show that iterative speech separation is a promising direction and deserves more in-depth analysis and exploration.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thilo von Neumann|AUTHOR Thilo von Neumann]]^^1^^
, [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]]^^2^^
, [[Christoph Boeddeker|AUTHOR Christoph Boeddeker]]^^1^^
, [[Marc Delcroix|AUTHOR Marc Delcroix]]^^2^^
, [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Paderborn, Germany; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3490–3494
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic transcription of meetings requires handling of overlapped speech, which calls for continuous speech separation (CSS) systems. The uPIT criterion was proposed for utterance-level separation with neural networks and introduces the constraint that the total number of speakers must not exceed the number of output channels. When processing meeting-like data in a segment-wise manner, i.e., by separating overlapping segments independently and stitching adjacent segments to continuous output streams, this constraint has to be fulfilled for any segment. In this contribution, we show that this constraint can be significantly relaxed. We propose a novel graph-based PIT criterion, which casts the assignment of utterances to output channels in a graph coloring problem. It only requires that the number of concurrently active speakers must not exceed the number of output channels. As a consequence, the system can process an arbitrary number of speakers and arbitrarily long segments and thus can handle more diverse scenarios. Further, the stitching algorithm for obtaining a consistent output order in neighboring segments is of less importance and can even be eliminated completely, not the least reducing the computational effort. Experiments on meeting-style WSJ data show improvements in recognition performance over using the uPIT criterion.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jisi Zhang|AUTHOR Jisi Zhang]]^^1^^
, [[Cătălin Zorilă|AUTHOR Cătălin Zorilă]]^^2^^
, [[Rama Doddipatla|AUTHOR Rama Doddipatla]]^^2^^
, [[Jon Barker|AUTHOR Jon Barker]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Sheffield, UK; ^^2^^Toshiba, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3495–3499
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we introduce a novel semi-supervised learning framework for end-to-end speech separation. The proposed method first uses mixtures of unseparated sources and the mixture invariant training (MixIT) criterion to train a teacher model. The teacher model then estimates separated sources that are used to train a student model with standard permutation invariant training (PIT). The student model can be fine-tuned with supervised data, i.e., paired artificial mixtures and clean speech sources, and further improved via model distillation. Experiments with single and multi channel mixtures show that the teacher-student training resolves the over-separation problem observed in the original MixIT method. Further, the semi-supervised performance is comparable to a fully-supervised separation system trained using ten times the amount of supervised data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marc Delcroix|AUTHOR Marc Delcroix]], [[Jorge Bennasar Vázquez|AUTHOR Jorge Bennasar Vázquez]], [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Shoko Araki|AUTHOR Shoko Araki]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3500–3504
</span></p></div>
<div class="cpabstractcardabstract"><p>Target sound extraction consists of extracting the sound of a target acoustic event (AE) class from a mixture of AE sounds. It can be realized using a neural network that extracts the target sound conditioned on a 1-hot vector that represents the desired AE class. With this approach, embedding vectors associated with the AE classes are directly optimized for the extraction of sound classes seen during training. However, it is not easy to extend this framework to new AE classes, i.e. unseen during training. Recently, speech, music, or AE sound extraction based on enrollment audio of the desired sound offers the potential of extracting any target sound in a mixture given only a short audio signal of a similar sound. In this work, we propose combining 1-hot- and enrollment-based target sound extraction, allowing optimal performance for seen AE classes and simple extension to new classes. In experiments with synthesized sound mixtures generated with the Freesound Dataset (FSD) datasets, we demonstrate the benefit of the combined framework for both seen and new AE classes. Besides, we also propose adapting the embedding vectors obtained from a few enrollment audio samples (few-shot) to further improve performance on new classes.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cong Han|AUTHOR Cong Han]], [[Yi Luo|AUTHOR Yi Luo]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]
</p><p class="cpabstractcardaffiliationlist">Columbia University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3505–3509
</span></p></div>
<div class="cpabstractcardabstract"><p>Binaural speech separation algorithms designed for augmented hearing technologies need to both improve the signal-to-noise ratio of individual speakers and preserve their perceived location in space. The majority of binaural speech separation methods assume nonmoving speakers. As a result, their application to real-world scenarios with freely moving speakers requires block-wise adaptation which relies on short-term contextual information and limits their performance. In this study, we propose an alternative approach for utterance-level source separation with moving speakers and in reverberant conditions. Our model makes use of spectral and spatial features of speakers in a larger context compared to the block-wise adaption methods. The model can implicitly track speakers within the utterance without the need for explicit tracking modules. Experimental results on simulated moving multitalker speech show that the proposed method can significantly outperform block-wise adaptation methods in both separation performance and preserving the interaural cues across multiple conditions, which makes it suitable for real-world augmented hearing applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shell Xu Hu|AUTHOR Shell Xu Hu]], [[Md. Rifat Arefin|AUTHOR Md. Rifat Arefin]], [[Viet-Nhat Nguyen|AUTHOR Viet-Nhat Nguyen]], [[Alish Dipani|AUTHOR Alish Dipani]], [[Xaq Pitkow|AUTHOR Xaq Pitkow]], [[Andreas Savas Tolias|AUTHOR Andreas Savas Tolias]]
</p><p class="cpabstractcardaffiliationlist">Upload AI, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3510–3514
<a href="./IS2021/MEDIA/1378" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>To extract the voice of a target speaker when mixed with a variety of other sounds, such as white and ambient noises or the voices of interfering speakers, we extend the Transformer network [1] to attend the most relevant information with respect to the target speaker given the characteristics of his or her voices as a form of contextual information. The idea has a natural interpretation in terms of the //selective attention theory// [2]. Specifically, we propose two models to incorporate the voice characteristics in Transformer based on different insights of where the feature selection should take place. Both models yield excellent performance, on par or better than published state-of-the-art models on the //speaker extraction// task, including separating speech of novel speakers not seen during training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saurjya Sarkar|AUTHOR Saurjya Sarkar]], [[Emmanouil Benetos|AUTHOR Emmanouil Benetos]], [[Mark Sandler|AUTHOR Mark Sandler]]
</p><p class="cpabstractcardaffiliationlist">Queen Mary University of London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3515–3519
</span></p></div>
<div class="cpabstractcardabstract"><p>Polyphonic vocal recordings are an inherently challenging source separation task due to the melodic structure of the vocal parts and unique timbre of its constituents. In this work we utilise a time-domain neural network architecture re-purposed from speech separation research and modify it to separate //a capella// mixtures at a high sampling rate. We use four-part (soprano, alto, tenor and bass) //a capella// recordings of Bach Chorales and Barbershop Quartets for our experiments. Unlike current deep learning based choral separation models where the training objective is to separate constituent sources based on their class, we train our model using a permutation invariant objective. Using this we achieve state-of-the-art results for choral music separation. We introduce a novel method to estimate harmonic overlap between sung musical notes as a measure of task complexity. We also present an analysis of the impact of randomised mixing, input lengths and filterbank lengths for our task. Our results show a moderate negative correlation between the harmonic overlap of the target sources and source separation performance. We report that training our models with randomly mixed musically-incoherent mixtures drastically reduces the performance of vocal harmony separation as it decreases the average harmonic overlap presented during training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matthew Maciejewski|AUTHOR Matthew Maciejewski]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3520–3524
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech enhancement techniques typically focus on intrinsic metrics of signal quality. The overwhelming majority of deep learning-based single-channel speech separation studies, for instance, have relied on a single class of metrics to evaluate the systems by. These metrics, usually variants of Signal-to-Distortion Ratio (SDR), measure fidelity to the “ground truth” waveform. This can be problematic, not only for lack of diversity in evaluation metrics, but also in cases where a perfect ground truth waveform may be unavailable. In this work, we explore the value of speaker verification as an extrinsic metric of separation quality, with additional utility as evidence of the benefits of separation as pre-processing for downstream tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Prachi Singh|AUTHOR Prachi Singh]], [[Rajat Varma|AUTHOR Rajat Varma]], [[Venkat Krishnamohan|AUTHOR Venkat Krishnamohan]], [[Srikanth Raj Chetupalli|AUTHOR Srikanth Raj Chetupalli]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3545–3549
</span></p></div>
<div class="cpabstractcardabstract"><p>The LEAP submission for DIHARD-III challenge is described in this paper. The proposed system is composed of a speech bandwidth classifier, and diarization systems fine-tuned for narrowband and wideband speech separately. We use an end-to-end speaker diarization system for the narrowband conversational telephone speech recordings. For the wideband multi-speaker recordings, we use a neural embedding based clustering approach, similar to the baseline system. The embeddings are extracted from a time-delay neural network (called x-vectors) followed by the graph based path integral clustering (PIC) approach. The LEAP system showed 24% and 18% relative improvements for Track-1 and Track-2 respectively over the baseline system provided by the organizers. This paper describes the challenge submission, the post-evaluation analysis and improvements observed on the DIHARD-III dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shiliang Zhang|AUTHOR Shiliang Zhang]]^^1^^
, [[Siqi Zheng|AUTHOR Siqi Zheng]]^^1^^
, [[Weilong Huang|AUTHOR Weilong Huang]]^^1^^
, [[Ming Lei|AUTHOR Ming Lei]]^^1^^
, [[Hongbin Suo|AUTHOR Hongbin Suo]]^^1^^
, [[Jinwei Feng|AUTHOR Jinwei Feng]]^^2^^
, [[Zhijie Yan|AUTHOR Zhijie Yan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alibaba, China; ^^2^^Alibaba, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3550–3554
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose an overlapping speech detection (OSD) system for real multiparty meetings. Different from previous works on single-channel recordings or simulated data, we conduct research on real multi-channel data recorded by an 8-microphone array. We investigate how spatial information provided by multi-channel beamforming can benefit OSD. Specifically, we propose a two-stream DFSMN to jointly model acoustic and spatial features. Instead of performing frame-level OSD, we try to perform segment-level OSD. We come up with an attention pooling layer to model speech segments with variable length. Experimental results show that two-stream DFSMN with attention pooling can effectively model acoustic-spatial feature and significantly boost the performance of OSD, result in 3.5% (from 85.57% to 89.12%) absolute detection accuracy improvement compared to the baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Maokui He|AUTHOR Maokui He]]^^1^^
, [[Desh Raj|AUTHOR Desh Raj]]^^2^^
, [[Zili Huang|AUTHOR Zili Huang]]^^2^^
, [[Jun Du|AUTHOR Jun Du]]^^1^^
, [[Zhuo Chen|AUTHOR Zhuo Chen]]^^3^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^Johns Hopkins University, USA; ^^3^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3555–3559
</span></p></div>
<div class="cpabstractcardabstract"><p>Target-speaker voice activity detection (TS-VAD) has recently shown promising results for speaker diarization on highly overlapped speech. However, the original model requires a fixed (and known) number of speakers, which limits its application to real conversations. In this paper, we extend TS-VAD to speaker diarization with unknown numbers of speakers. This is achieved by two steps: first, an initial diarization system is applied for speaker number estimation, followed by TS-VAD network output masking according to this estimate. We further investigate different diarization methods, including clustering-based and region proposal networks, for estimating the initial i-vectors. Since these systems have complementary strengths, we propose a fusion-based method to combine frame-level decisions from the systems for an improved initialization. We demonstrate through experiments on variants of the LibriCSS meeting corpus that our proposed approach can improve the DER by up to 50% relative across varying numbers of speakers. This improvement also results in better downstream ASR performance approaching that using oracle segments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nauman Dawalatabad|AUTHOR Nauman Dawalatabad]]^^1^^
, [[Mirco Ravanelli|AUTHOR Mirco Ravanelli]]^^2^^
, [[François Grondin|AUTHOR François Grondin]]^^3^^
, [[Jenthe Thienpondt|AUTHOR Jenthe Thienpondt]]^^4^^
, [[Brecht Desplanques|AUTHOR Brecht Desplanques]]^^4^^
, [[Hwidong Na|AUTHOR Hwidong Na]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Madras, India; ^^2^^Mila, Canada; ^^3^^Université de Sherbrooke, Canada; ^^4^^Ghent University, Belgium; ^^5^^Samsung, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3560–3564
</span></p></div>
<div class="cpabstractcardabstract"><p>Learning robust speaker embeddings is a crucial step in speaker diarization. Deep neural networks can accurately capture speaker discriminative characteristics and popular deep embeddings such as x-vectors are nowadays a fundamental component of modern diarization systems. Recently, some improvements over the standard TDNN architecture used for x-vectors have been proposed. The ECAPA-TDNN model, for instance, has shown impressive performance in the speaker verification domain, thanks to a carefully designed neural model.
In this work, we extend, for the first time, the use of the ECAPA-TDNN model to speaker diarization. Moreover, we improved its robustness with a powerful augmentation scheme that concatenates several contaminated versions of the same signal within the same training batch. The ECAPA-TDNN model turned out to provide robust speaker embeddings under both close-talking and distant-talking conditions. Our results on the popular AMI meeting corpus show that our system significantly outperforms recently proposed approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Naohiro Tawara|AUTHOR Naohiro Tawara]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3565–3569
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, we proposed a novel speaker diarization method called End-to-End-Neural-Diarization-vector clustering (EEND-vector clustering) that integrates clustering-based and end-to-end neural network-based diarization approaches into one framework. The proposed method combines advantages of both frameworks, i.e. high diarization performance and handling of overlapped speech based on EEND, and robust handling of long recordings with an arbitrary number of speakers based on clustering-based approaches. However, the method was only evaluated so far on simulated 2-speaker meeting-like data. This paper is to (1) report recent advances we made to this framework, including newly introduced robust constrained clustering algorithms, and (2) experimentally show that the method can now outperform competitive diarization methods such as Encoder-Decoder Attractor (EDA)-EEND, on CALLHOME data which comprises real conversational speech data including overlapped speech and an arbitrary number of speakers. By further analyzing the experimental results, this paper also discusses pros and cons of the proposed method and reveals potential for further improvement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Neville Ryant|AUTHOR Neville Ryant]]^^1^^
, [[Prachi Singh|AUTHOR Prachi Singh]]^^2^^
, [[Venkat Krishnamohan|AUTHOR Venkat Krishnamohan]]^^2^^
, [[Rajat Varma|AUTHOR Rajat Varma]]^^2^^
, [[Kenneth Church|AUTHOR Kenneth Church]]^^3^^
, [[Christopher Cieri|AUTHOR Christopher Cieri]]^^1^^
, [[Jun Du|AUTHOR Jun Du]]^^4^^
, [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]^^2^^
, [[Mark Liberman|AUTHOR Mark Liberman]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Pennsylvania, USA; ^^2^^Indian Institute of Science, India; ^^3^^Baidu, USA; ^^4^^USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3570–3574
</span></p></div>
<div class="cpabstractcardabstract"><p>DIHARD III was the third in a series of speaker diarization challenges intended to improve the robustness of diarization systems to variability in recording equipment, noise conditions, and conversational domain. Speaker diarization was evaluated under two speech activity conditions (diarization from a reference speech activity vs. diarization from scratch) and 11 diverse domains. The domains span a range of recording conditions and interaction types, including read audio-books, meeting speech, clinical interviews, web videos, and, for the first time, conversational telephone speech. A total of 30 organizations (forming 21 teams) from industry and academia submitted 499 valid system outputs. The evaluation results indicate that speaker diarization has improved markedly since DIHARD I, particularly for two-party interactions, but that for many domains (e.g., web video) the problem remains far from solved.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tsun-Yat Leung|AUTHOR Tsun-Yat Leung]], [[Lahiru Samarakoon|AUTHOR Lahiru Samarakoon]]
</p><p class="cpabstractcardaffiliationlist">Fano Labs, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3575–3579
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditionally, a speaker diarization system has multiple components to extract and cluster speaker embeddings. However, end-to-end diarization is more desirable as it facilitates optimizing one model in contrast to multiple components in a traditional set up. Moreover, end-to-end diarization systems are capable of handling overlapped speech. Recently proposed self-attentive end-to-end diarization model with encoder-decoder based attractors (EEND-EDA) is capable of processing speech from an unknown number of speakers, and has reported comparable performances to traditional systems. In this work, we aim to improve the EEND-EDA model. First, we increase the robustness of the model by incorporating an additive margin penalty for minimizing the intra-class variance. Second, we propose to replace the Transformer encoders with Conformer encoders to capture local information. Third, we propose to use convolutional subsampling and upsampling instead of manual subsampling only. Our proposed improvements report 21.6% relative reduction in DER on the evaluation full set of the track 2 of the DIHARD III challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin O’Brien|AUTHOR Benjamin O’Brien]]^^1^^
, [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]]^^2^^
, [[Anaïs Chanclu|AUTHOR Anaïs Chanclu]]^^2^^
, [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LPL (UMR 7309), France; ^^2^^LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3580–3584
</span></p></div>
<div class="cpabstractcardabstract"><p>Our study examined the performance of evaluators tasked to group natural and anonymised speech recordings into clusters based on their perceived similarities. Speech stimuli were selected from the VCTK corpus; two systems developed for the VoicePrivacy 2020 Challenge were used for anonymisation. The Baseline-1 (B1) system was developed by using x-vectors and neural waveform models, while the Baseline-2 (B2) system relied on digital-signal-processing techniques. 74 evaluators completed three trials composed of 16 recordings with either natural or anonymised speech generated from a single system. F-measure and cluster purity metrics were used to assess evaluator accuracy. Probabilistic linear discriminant analysis (PLDA) scores from an automatic speaker verification system were generated to quantify similarity between recordings and used to correlate subjective results. Our findings showed that non-native English speaking evaluators significantly lowered their F-measure means when presented anonymised recordings. We observed no significance for cluster purity. Pearson correlation procedures revealed that PLDA scores generated from natural and B2-anonymised speech recordings correlated positively to F-measure and cluster purity metrics. These findings show evaluators were able to use the interface to cluster natural and anonymised speech recordings and suggest anonymisation systems modelled like B1 are more effective at suppressing identifiable speech characteristics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kiran Karra|AUTHOR Kiran Karra]], [[Alan McCree|AUTHOR Alan McCree]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3585–3589
</span></p></div>
<div class="cpabstractcardabstract"><p>Many modern systems for speaker diarization, such as the recently-developed VBx approach, rely on clustering of DNN speaker embeddings followed by resegmentation. Two problems with this approach are that the DNN is not directly optimized for this task, and the parameters need significant retuning for different applications. We have recently presented progress in this direction with a Leave-One-Out Gaussian PLDA (LGP) clustering algorithm and an approach to training the DNN such that embeddings directly optimize performance of this scoring method. This paper presents a new two-pass version of this system, where the second pass uses finer time resolution to significantly improve overall performance. For the Callhome corpus, we achieve the first published error rate below 4% without any task-dependent parameter tuning. We also show significant progress towards a robust single solution for multiple diarization tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhenhou Hong|AUTHOR Zhenhou Hong]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]], [[Jie Liu|AUTHOR Jie Liu]], [[Chendong Zhao|AUTHOR Chendong Zhao]], [[Jing Xiao|AUTHOR Jing Xiao]]
</p><p class="cpabstractcardaffiliationlist">Ping An Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3590–3594
</span></p></div>
<div class="cpabstractcardabstract"><p>Text to speech (TTS) is a crucial task for user interaction, but TTS model training relies on a sizable set of high-quality original datasets. Due to privacy and security issues, the original datasets are usually unavailable directly. Recently, federated learning proposes a popular distributed machine learning paradigm with an enhanced privacy protection mechanism. It offers a practical and secure framework for data owners to collaborate with others, thus obtaining a better global model trained on the larger dataset. However, due to the high complexity of transformer models, the convergence process becomes slow and unstable in the federated learning setting. Besides, the transformer model trained in federated learning is costly communication and limited computational speed on clients, impeding its popularity. To deal with these challenges, we propose the federated dynamic transformer. On the one hand, the performance is greatly improved comparing with the federated transformer, approaching centralize-trained Transformer-TTS when increasing clients number. On the other hand, it achieves faster and more stable convergence in the training phase and significantly reduces communication time. Experiments on the LJSpeech dataset also strongly prove our method’s advantage.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hyunseung Chung|AUTHOR Hyunseung Chung]], [[Sang-Hoon Lee|AUTHOR Sang-Hoon Lee]], [[Seong-Whan Lee|AUTHOR Seong-Whan Lee]]
</p><p class="cpabstractcardaffiliationlist">Korea University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3635–3639
</span></p></div>
<div class="cpabstractcardabstract"><p>Text-to-speech (TTS) synthesis is the process of producing synthesized speech from text or phoneme input. Traditional TTS models contain multiple processing steps and require external aligners, which provide attention alignments of phoneme-to-frame sequences. As the complexity increases and efficiency decreases with every additional step, there is expanding demand in modern synthesis pipelines for end-to-end TTS with efficient internal aligners. In this work, we propose an end-to-end text-to-waveform network with a novel reinforcement learning based duration search method. Our proposed generator is feed-forward and the aligner trains the agent to make optimal duration predictions by receiving active feedback from actions taken to maximize cumulative reward. We demonstrate accurate alignments of phoneme-to-frame sequence generated from trained agents enhance fidelity and naturalness of synthesized audio. Experimental results also show the superiority of our proposed model compared to other state-of-the-art TTS models with internal and external aligners.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shilun Lin|AUTHOR Shilun Lin]], [[Fenglong Xie|AUTHOR Fenglong Xie]], [[Li Meng|AUTHOR Li Meng]], [[Xinhui Li|AUTHOR Xinhui Li]], [[Li Lu|AUTHOR Li Lu]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3640–3644
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, a robust and efficient text-to-speech (TTS) synthesis system named Triple M is proposed for large-scale online application. The key components of Triple M are: 1) A sequence-to-sequence model adopts a novel multi-guidance attention to transfer complementary advantages from guiding attention mechanisms to the basic attention mechanism without in-domain performance loss and online service modification. Compared with single attention mechanism, multi-guidance attention not only brings better naturalness to long sentence synthesis, but also reduces the word error rate by 26.8%. 2) A new efficient multi-band multi-time vocoder framework, which reduces the computational complexity from 2.8 to 1.0 GFLOP and speeds up LPCNet by 2.75× on a single CPU.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Edresson Casanova|AUTHOR Edresson Casanova]]^^1^^
, [[Christopher Shulby|AUTHOR Christopher Shulby]]^^2^^
, [[Eren Gölge|AUTHOR Eren Gölge]]^^3^^
, [[Nicolas Michael Müller|AUTHOR Nicolas Michael Müller]]^^4^^
, [[Frederico Santos de Oliveira|AUTHOR Frederico Santos de Oliveira]]^^5^^
, [[Arnaldo Candido Jr.|AUTHOR Arnaldo Candido Jr.]]^^6^^
, [[Anderson da Silva Soares|AUTHOR Anderson da Silva Soares]]^^5^^
, [[Sandra Maria Aluisio|AUTHOR Sandra Maria Aluisio]]^^1^^
, [[Moacir Antonelli Ponti|AUTHOR Moacir Antonelli Ponti]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidade de São Paulo, Brazil; ^^2^^DefinedCrowd, USA; ^^3^^Coqui, Germany; ^^4^^Fraunhofer AISEC, Germany; ^^5^^Universidade Federal de Goiás, Brazil; ^^6^^Universidade Tecnológica Federal do Paraná, Brazil</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3645–3649
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose SC-GlowTTS: an efficient zero-shot multi-speaker text-to-speech model that improves similarity for speakers unseen during training. We propose a speaker-conditional architecture that explores a flow-based decoder that works in a zero-shot scenario. As text encoders, we explore a dilated residual convolutional-based encoder, gated convolutional-based encoder, and transformer-based encoder. Additionally, we have shown that adjusting a GAN-based vocoder for the spectrograms predicted by the TTS model on the training dataset can significantly improve the similarity and speech quality for new speakers. Our model converges using only 11 speakers, reaching state-of-the-art results for similarity with new speakers, as well as high speech quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Huu-Kim Nguyen|AUTHOR Huu-Kim Nguyen]]^^1^^
, [[Kihyuk Jeong|AUTHOR Kihyuk Jeong]]^^1^^
, [[Seyun Um|AUTHOR Seyun Um]]^^1^^
, [[Min-Jae Hwang|AUTHOR Min-Jae Hwang]]^^2^^
, [[Eunwoo Song|AUTHOR Eunwoo Song]]^^3^^
, [[Hong-Goo Kang|AUTHOR Hong-Goo Kang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Yonsei University, Korea; ^^2^^Search Solutions, Korea; ^^3^^Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3595–3599
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a lightweight end-to-end text-to-speech model that can generate high-quality speech at breakneck speed. In our proposed model, a feature prediction module and a waveform generation module are combined within a single framework. The feature prediction module, which consists of two independent sub-modules, estimates latent space embeddings for input text and prosodic information, and the waveform generation module generates speech waveforms by conditioning on the estimated latent space embeddings. Unlike conventional approaches that estimate prosodic information using a pre-trained model, our model jointly trains the prosodic embedding network with the speech waveform generation task using an effective domain transfer technique. Experimental results show that our proposed model can generate samples 7 times faster than real-time, and about 1.6 times faster than FastSpeech 2, as we use only 13.4 million parameters. We confirm that the generated speech quality is still of a high standard as evaluated by mean opinion scores.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chuanxin Tang|AUTHOR Chuanxin Tang]]^^1^^
, [[Chong Luo|AUTHOR Chong Luo]]^^1^^
, [[Zhiyuan Zhao|AUTHOR Zhiyuan Zhao]]^^1^^
, [[Dacheng Yin|AUTHOR Dacheng Yin]]^^2^^
, [[Yucheng Zhao|AUTHOR Yucheng Zhao]]^^2^^
, [[Wenjun Zeng|AUTHOR Wenjun Zeng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, China; ^^2^^USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3600–3604
</span></p></div>
<div class="cpabstractcardabstract"><p>Given a piece of speech and its transcript text, text-based speech editing aims to generate speech that can be seamlessly inserted into the given speech by editing the transcript. Existing methods adopt a two-stage approach: synthesize the input text using a generic text-to-speech (TTS) engine and then transform the voice to the desired voice using voice conversion (VC). A major problem of this framework is that VC is a challenging problem which usually needs a moderate amount of parallel training data to work satisfactorily. In this paper, we propose a one-stage context-aware framework to generate natural and coherent target speech without any training data of the target speaker. In particular, we manage to perform accurate zero-shot duration prediction for the inserted text. The predicted duration is used to regulate both text embedding and speech embedding. Then, based on the aligned cross-modality input, we directly generate the mel-spectrogram of the edited speech with a transformer-based decoder. Subjective listening tests show that despite the lack of training data for the speaker, our method has achieved satisfactory results. It outperforms a recent zero-shot TTS engine by a large margin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Myeonghun Jeong|AUTHOR Myeonghun Jeong]]^^1^^
, [[Hyeongju Kim|AUTHOR Hyeongju Kim]]^^2^^
, [[Sung Jun Cheon|AUTHOR Sung Jun Cheon]]^^1^^
, [[Byoung Jin Choi|AUTHOR Byoung Jin Choi]]^^1^^
, [[Nam Soo Kim|AUTHOR Nam Soo Kim]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Seoul National University, Korea; ^^2^^Neosapience, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3605–3609
</span></p></div>
<div class="cpabstractcardabstract"><p>Although neural text-to-speech (TTS) models have attracted a lot of attention and succeeded in generating human-like speech, there is still room for improvements to its naturalness and architectural efficiency. In this work, we propose a novel non-autoregressive TTS model, namely Diff-TTS, which achieves highly natural and efficient speech synthesis. Given the text, Diff-TTS exploits a denoising diffusion framework to transform the noise signal into a mel-spectrogram via diffusion time steps. In order to learn the mel-spectrogram distribution conditioned on the text, we present a likelihood-based optimization method for TTS. Furthermore, to boost up the inference speed, we leverage the accelerated sampling method that allows Diff-TTS to generate raw waveforms much faster without significantly degrading perceptual quality. Through experiments, we verified that Diff-TTS generates 28 times faster than the real-time with a single NVIDIA 2080Ti GPU.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jae-Sung Bae|AUTHOR Jae-Sung Bae]], [[Taejun Bak|AUTHOR Taejun Bak]], [[Young-Sun Joo|AUTHOR Young-Sun Joo]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]
</p><p class="cpabstractcardaffiliationlist">NCSOFT, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3610–3614
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose methods for improving the modeling performance of a Transformer-based non-autoregressive text-to-speech (TNA-TTS) model. Although the text encoder and audio decoder handle different types and lengths of data (i.e., text and audio), the TNA-TTS models are not designed considering these variations. Therefore, to improve the modeling performance of the TNA-TTS model we propose a hierarchical Transformer structure-based text encoder and audio decoder that are designed to accommodate the characteristics of each module. For the text encoder, we constrain each self-attention layer so the encoder focuses on a text sequence from the local to the global scope. Conversely, the audio decoder constrains its self-attention layers to focus in the reverse direction, i.e., from global to local scope. Additionally, we further improve the pitch modeling accuracy of the audio decoder by providing sentence and word-level pitch as conditions. Various objective and subjective evaluations verified that the proposed method outperformed the baseline TNA-TTS.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adam Polyak|AUTHOR Adam Polyak]]^^1^^
, [[Yossi Adi|AUTHOR Yossi Adi]]^^1^^
, [[Jade Copet|AUTHOR Jade Copet]]^^2^^
, [[Eugene Kharitonov|AUTHOR Eugene Kharitonov]]^^2^^
, [[Kushal Lakhotia|AUTHOR Kushal Lakhotia]]^^3^^
, [[Wei-Ning Hsu|AUTHOR Wei-Ning Hsu]]^^3^^
, [[Abdelrahman Mohamed|AUTHOR Abdelrahman Mohamed]]^^3^^
, [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Facebook, Israel; ^^2^^Facebook, France; ^^3^^Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3615–3619
<a href="./IS2021/MEDIA/0475" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose using self-supervised discrete representations for the task of speech resynthesis. To generate disentangled representation, we separately extract low-bitrate representations for speech content, prosodic information, and speaker identity. This allows to synthesize speech in a controllable manner. We analyze various state-of-the-art, self-supervised representation learning methods and shed light on the advantages of each method while considering reconstruction quality and disentanglement properties. Specifically, we evaluate the F0 reconstruction, speaker identification performance (for both resynthesis and voice conversion), recordings’ intelligibility, and overall quality using subjective human evaluation. Lastly, we demonstrate how these representations can be used for an ultra-lightweight speech codec. Using the obtained representations, we can get to a rate of 365 bits per second while providing better speech quality than the baseline methods. Audio samples are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Penny Karanasou|AUTHOR Penny Karanasou]], [[Sri Karlapati|AUTHOR Sri Karlapati]], [[Alexis Moinet|AUTHOR Alexis Moinet]], [[Arnaud Joly|AUTHOR Arnaud Joly]], [[Ammar Abbas|AUTHOR Ammar Abbas]], [[Simon Slangen|AUTHOR Simon Slangen]], [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]], [[Thomas Drugman|AUTHOR Thomas Drugman]]
</p><p class="cpabstractcardaffiliationlist">Amazon, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3620–3624
</span></p></div>
<div class="cpabstractcardabstract"><p>Many factors influence speech yielding different renditions of a given sentence. Generative models, such as variational autoencoders (VAEs), capture this variability and allow multiple renditions of the same sentence via sampling. The degree of prosodic variability depends heavily on the prior that is used when sampling. In this paper, we propose a novel method to compute an informative prior for the VAE latent space of a neural text-to-speech (TTS) system. By doing so, we aim to sample with more prosodic variability, while gaining controllability over the latent space’s structure.
By using as prior the posterior distribution of a secondary VAE, which we condition on a speaker vector, we can sample from the primary VAE taking explicitly the conditioning into account and resulting in samples from a specific region of the latent space for each condition (i.e. speaker). A formal preference test demonstrates significant preference of the proposed approach over standard Conditional VAE. We also provide visualisations of the latent space where well-separated condition-specific clusters appear, as well as ablation studies to better understand the behaviour of the system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dipjyoti Paul|AUTHOR Dipjyoti Paul]]^^1^^
, [[Sankar Mukherjee|AUTHOR Sankar Mukherjee]]^^2^^
, [[Yannis Pantazis|AUTHOR Yannis Pantazis]]^^3^^
, [[Yannis Stylianou|AUTHOR Yannis Stylianou]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Crete, Greece; ^^2^^IIT, Italy; ^^3^^FORTH, Greece</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3625–3629
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a universal multi-speaker, multi-style Text-to-Speech (TTS) synthesis system which is able to generate speech from text with speaker characteristics and speaking style similar to a given reference signal. Training is conducted on non-parallel data and generates voices in an unsupervised manner, i.e., neither style annotation nor speaker label are required. To avoid leaking content information into the style embeddings (referred to as “content leakage”) and leaking speaker information into style embeddings (referred to as “style leakage”) we suggest a novel ''R''ényi ''D''ivergence based ''D''isentangled ''R''epresentation framework through adversarial learning. Similar to mutual information minimization, the proposed approach explicitly estimates via a variational formula and then minimizes the Rényi divergence between the joint distribution and the product of marginals for the content-style and style-speaker pairs. By doing so, content, style and speaker spaces become representative and (ideally) independent of each other. Our proposed system greatly reduces content leakage by improving the word error rate by approximately 17–19% relative to the baseline system. In MOS-speech-quality, the proposed algorithm achieves an improvement of about 16–20% whereas MOS-style-similarly boost up 15% relative performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]]^^1^^
, [[Cheng-Hung Hu|AUTHOR Cheng-Hung Hu]]^^2^^
, [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]]^^2^^
, [[Yu-Huai Peng|AUTHOR Yu-Huai Peng]]^^2^^
, [[Wen-Chin Huang|AUTHOR Wen-Chin Huang]]^^1^^
, [[Yu Tsao|AUTHOR Yu Tsao]]^^2^^
, [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]^^2^^
, [[Tomoki Toda|AUTHOR Tomoki Toda]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nagoya University, Japan; ^^2^^Academia Sinica, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3630–3634
</span></p></div>
<div class="cpabstractcardabstract"><p>Nowadays, neural vocoders can generate very high-fidelity speech when a bunch of training data is available. Although a speaker-dependent (SD) vocoder usually outperforms a speaker-independent (SI) vocoder, it is impractical to collect a large amount of data of a specific target speaker for most real-world applications. To tackle the problem of limited target data, a data augmentation method based on speaker representation and similarity measurement of speaker verification is proposed in this paper. The proposed method selects utterances that have similar speaker identity to the target speaker from an external corpus, and then combines the selected utterances with the limited target data for SD vocoder adaptation. The evaluation results show that, compared with the vocoder adapted using only limited target data, the vocoder adapted using augmented data improves both the quality and similarity of synthesized speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ian Palmer|AUTHOR Ian Palmer]], [[Andrew Rouditchenko|AUTHOR Andrew Rouditchenko]], [[Andrei Barbu|AUTHOR Andrei Barbu]], [[Boris Katz|AUTHOR Boris Katz]], [[James Glass|AUTHOR James Glass]]
</p><p class="cpabstractcardaffiliationlist">MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3650–3654
</span></p></div>
<div class="cpabstractcardabstract"><p>Visually-grounded spoken language datasets can enable models to learn cross-modal correspondences with very weak supervision. However, modern audio-visual datasets contain biases that undermine the real-world performance of models trained on that data. We introduce Spoken ObjectNet, which is designed to remove some of these biases and provide a way to better evaluate how effectively models will perform in real-world scenarios. This dataset expands upon ObjectNet, which is a bias-controlled image dataset that features similar image classes to those present in ImageNet.
We detail our data collection pipeline, which features several methods to improve caption quality, including automated language model checks. Lastly, we show baseline results on image retrieval and audio retrieval tasks. These results show that models trained on other datasets and then evaluated on Spoken ObjectNet tend to perform poorly due to biases in other datasets that the models have learned. We also show evidence that the performance decrease is due to the dataset controls, and not the transfer setting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gonçal V. Garcés Díaz-Munío|AUTHOR Gonçal V. Garcés Díaz-Munío]], [[Joan-Albert Silvestre-Cerdà|AUTHOR Joan-Albert Silvestre-Cerdà]], [[Javier Jorge|AUTHOR Javier Jorge]], [[Adrià Giménez Pastor|AUTHOR Adrià Giménez Pastor]], [[Javier Iranzo-Sánchez|AUTHOR Javier Iranzo-Sánchez]], [[Pau Baquero-Arnal|AUTHOR Pau Baquero-Arnal]], [[Nahuel Roselló|AUTHOR Nahuel Roselló]], [[Alejandro Pérez-González-de-Martos|AUTHOR Alejandro Pérez-González-de-Martos]], [[Jorge Civera|AUTHOR Jorge Civera]], [[Albert Sanchis|AUTHOR Albert Sanchis]], [[Alfons Juan|AUTHOR Alfons Juan]]
</p><p class="cpabstractcardaffiliationlist">Universitat Politècnica de València, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3695–3699
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce Europarl-ASR, a large speech and text corpus of parliamentary debates including 1 300 hours of transcribed speeches and 70 million tokens of text in English extracted from European Parliament sessions. The training set is labelled with the Parliament’s non-fully-verbatim official transcripts, time-aligned. As verbatimness is critical for acoustic model training, we also provide automatically noise-filtered and automatically verbatimized transcripts of all speeches based on speech data filtering and verbatimization techniques. Additionally, 18 hours of transcribed speeches were manually verbatimized to build reliable speaker-dependent and speaker-independent development/test sets for streaming ASR benchmarking. The availability of manual non-verbatim and verbatim transcripts for dev/test speeches makes this corpus useful for the assessment of automatic filtering and verbatimization techniques. This paper describes the corpus and its creation, and provides off-line and streaming ASR baselines for both the speaker-dependent and speaker-independent tasks using the three training transcription sets. The corpus is publicly released under an open licence.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Parul Kapoor|AUTHOR Parul Kapoor]]^^1^^
, [[Rudrabha Mukhopadhyay|AUTHOR Rudrabha Mukhopadhyay]]^^2^^
, [[Sindhu B. Hegde|AUTHOR Sindhu B. Hegde]]^^2^^
, [[Vinay Namboodiri|AUTHOR Vinay Namboodiri]]^^1^^
, [[C.V. Jawahar|AUTHOR C.V. Jawahar]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Kanpur, India; ^^2^^IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3700–3704
<a href="./IS2021/MEDIA/1094" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>We aim to solve the highly challenging task of generating continuous sign language videos solely from speech segments for the first time. Recent efforts in this space have focused on generating such videos from human-annotated text transcripts without considering other modalities. However, replacing speech with sign language proves to be a practical solution while communicating with people suffering from hearing loss. Therefore, we eliminate the need of using text as input and design techniques that work for more natural, continuous, freely uttered speech covering an extensive vocabulary. Since the current datasets are inadequate for generating sign language directly from speech, we collect and release the first Indian sign language dataset comprising speech-level annotations, text transcripts, and the corresponding sign-language videos. Next, we propose a multi-tasking transformer network trained to generate signer’s poses from speech segments. With speech-to-text as an auxiliary task and an additional cross-modal discriminator, our model learns to generate continuous sign pose sequences in an end-to-end manner. Extensive experiments and comparisons with other baselines demonstrate the effectiveness of our approach. We also conduct additional ablation studies to analyze the effect of different modules of our network. A demo video containing several results is attached to the supplementary material.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Won Ik Cho|AUTHOR Won Ik Cho]]^^1^^
, [[Seok Min Kim|AUTHOR Seok Min Kim]]^^1^^
, [[Hyunchang Cho|AUTHOR Hyunchang Cho]]^^2^^
, [[Nam Soo Kim|AUTHOR Nam Soo Kim]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Seoul National University, Korea; ^^2^^Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3705–3709
</span></p></div>
<div class="cpabstractcardabstract"><p>Most speech-to-text (S2T) translation studies use English speech as a source, which makes it difficult for non-English speakers to take advantage of the S2T technologies. For some languages, this problem was tackled through corpus construction, but the farther linguistically from English or the more under-resourced, this deficiency and underrepresentedness becomes more significant. In this paper, we introduce //kosp2e// (read as ‘kospi’), a corpus that allows Korean speech to be translated into English text in an end-to-end manner. We adopt open license speech recognition corpus, translation corpus, and spoken language corpora to make our dataset freely available to the public, and check the performance through the pipeline and training-based approaches. Using pipeline and various end-to-end schemes, we obtain the highest BLEU of 21.3 and 18.0 for each based on the English hypothesis, validating the feasibility of our data. We plan to supplement annotations for other target languages through community contributions in the future.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junbo Zhang|AUTHOR Junbo Zhang]]^^1^^
, [[Zhiwen Zhang|AUTHOR Zhiwen Zhang]]^^2^^
, [[Yongqing Wang|AUTHOR Yongqing Wang]]^^1^^
, [[Zhiyong Yan|AUTHOR Zhiyong Yan]]^^1^^
, [[Qiong Song|AUTHOR Qiong Song]]^^2^^
, [[Yukai Huang|AUTHOR Yukai Huang]]^^2^^
, [[Ke Li|AUTHOR Ke Li]]^^2^^
, [[Daniel Povey|AUTHOR Daniel Povey]]^^1^^
, [[Yujun Wang|AUTHOR Yujun Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Xiaomi, China; ^^2^^SpeechOcean, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3710–3714
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a new open-source speech corpus named “speechocean762” designed for pronunciation assessment use, consisting of 5000 English utterances from 250 non-native speakers, where half of the speakers are children. Five experts annotated each of the utterances at sentence-level, word-level and phoneme-level. A baseline system is released in open source to illustrate the phoneme-level pronunciation assessment workflow on this corpus. This corpus is allowed to be used freely for commercial and non-commercial purposes. It is available for free download from OpenSLR, and the corresponding baseline system is published in the Kaldi speech recognition toolkit.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Elizabeth Salesky|AUTHOR Elizabeth Salesky]]^^1^^
, [[Matthew Wiesner|AUTHOR Matthew Wiesner]]^^1^^
, [[Jacob Bremerman|AUTHOR Jacob Bremerman]]^^2^^
, [[Roldano Cattoni|AUTHOR Roldano Cattoni]]^^3^^
, [[Matteo Negri|AUTHOR Matteo Negri]]^^3^^
, [[Marco Turchi|AUTHOR Marco Turchi]]^^3^^
, [[Douglas W. Oard|AUTHOR Douglas W. Oard]]^^2^^
, [[Matt Post|AUTHOR Matt Post]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Johns Hopkins University, USA; ^^2^^University of Maryland, USA; ^^3^^FBK, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3655–3659
</span></p></div>
<div class="cpabstractcardabstract"><p>We present the Multilingual TEDx corpus, built to support speech recognition (ASR) and speech translation (ST) research across many non-English source languages. The corpus is a collection of audio recordings from TEDx talks in 8 source languages. We segment transcripts into sentences and align them to the source-language audio and target-language translations. The corpus is released along with open-sourced code enabling extension to new talks and languages as they become available. Our corpus creation methodology can be applied to more languages than previous work, and creates multi-way parallel evaluation sets. We provide baselines in multiple ASR and ST settings, including multilingual models to improve translation performance for low-resource language pairs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David R. Mortensen|AUTHOR David R. Mortensen]]^^1^^
, [[Jordan Picone|AUTHOR Jordan Picone]]^^2^^
, [[Xinjian Li|AUTHOR Xinjian Li]]^^1^^
, [[Kathleen Siminyu|AUTHOR Kathleen Siminyu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Carnegie Mellon University, USA; ^^2^^University of Pittsburgh, USA; ^^3^^Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3660–3664
</span></p></div>
<div class="cpabstractcardabstract"><p>There is growing interest in ASR systems that can recognize phones in a language-independent fashion [1, 2, 3]. There is additionally interest in building language technologies for low-resource and endangered languages. However, there is a paucity of realistic data that can be used to test such systems and technologies. This paper presents a publicly available, phonetically transcribed corpus of 2255 utterances (words and short phrases) in the endangered Tangkhulic language East Tusom (no ISO 639-3 code), a Tibeto-Burman language variety spoken mostly in India. Because the dataset is transcribed in terms of phones, rather than phonemes, it is a better match for universal phone recognition systems than many larger (phonemically transcribed) datasets. This paper describes the dataset and the methodology used to produce it. It further presents basic benchmarks of state-of-the-art universal phone recognition systems on the dataset as baselines for future experiments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yihui Fu|AUTHOR Yihui Fu]]^^1^^
, [[Luyao Cheng|AUTHOR Luyao Cheng]]^^1^^
, [[Shubo Lv|AUTHOR Shubo Lv]]^^1^^
, [[Yukai Jv|AUTHOR Yukai Jv]]^^1^^
, [[Yuxiang Kong|AUTHOR Yuxiang Kong]]^^1^^
, [[Zhuo Chen|AUTHOR Zhuo Chen]]^^2^^
, [[Yanxin Hu|AUTHOR Yanxin Hu]]^^1^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
, [[Jian Wu|AUTHOR Jian Wu]]^^3^^
, [[Hui Bu|AUTHOR Hui Bu]]^^4^^
, [[Xin Xu|AUTHOR Xin Xu]]^^4^^
, [[Jun Du|AUTHOR Jun Du]]^^5^^
, [[Jingdong Chen|AUTHOR Jingdong Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Microsoft, USA; ^^3^^Microsoft, China; ^^4^^Beijing Shell Shell Technology, China; ^^5^^USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3665–3669
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present AISHELL-4, a sizable real-recorded Mandarin speech dataset collected by 8-channel circular microphone array for speech processing in conference scenario. The dataset consists of 211 recorded meeting sessions, each containing 4 to 8 speakers, with a total length of 120 hours. This dataset aims to bridge the advanced research on multi-speaker processing and the practical application scenario in three aspects. With real recorded meetings, AISHELL-4 provides realistic acoustics and rich natural speech characteristics in conversation such as short pause, speech overlap, quick speaker turn, noise, etc. Meanwhile, accurate transcription and speaker voice activity are provided for each meeting in AISHELL-4. This allows the researchers to explore different aspects in meeting processing, ranging from individual tasks such as speech front-end processing, speech recognition and speaker diarization, to multi-modality modeling and joint optimization of relevant tasks. Given most open source dataset for multi-speaker tasks are in English, AISHELL-4 is the only Mandarin dataset for conversation speech, providing additional value for data diversity in speech community. We also release a PyTorch-based training and evaluation framework as baseline system to promote reproducible research in this field.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Guoguo Chen|AUTHOR Guoguo Chen]]^^1^^
, [[Shuzhou Chai|AUTHOR Shuzhou Chai]]^^1^^
, [[Guan-Bo Wang|AUTHOR Guan-Bo Wang]]^^1^^
, [[Jiayu Du|AUTHOR Jiayu Du]]^^1^^
, [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]]^^1^^
, [[Chao Weng|AUTHOR Chao Weng]]^^2^^
, [[Dan Su|AUTHOR Dan Su]]^^2^^
, [[Daniel Povey|AUTHOR Daniel Povey]]^^3^^
, [[Jan Trmal|AUTHOR Jan Trmal]]^^4^^
, [[Junbo Zhang|AUTHOR Junbo Zhang]]^^3^^
, [[Mingjie Jin|AUTHOR Mingjie Jin]]^^2^^
, [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]^^4^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^4^^
, [[Shuaijiang Zhao|AUTHOR Shuaijiang Zhao]]^^5^^
, [[Wei Zou|AUTHOR Wei Zou]]^^5^^
, [[Xiangang Li|AUTHOR Xiangang Li]]^^5^^
, [[Xuchen Yao|AUTHOR Xuchen Yao]]^^6^^
, [[Yongqing Wang|AUTHOR Yongqing Wang]]^^3^^
, [[Zhao You|AUTHOR Zhao You]]^^2^^
, [[Zhiyong Yan|AUTHOR Zhiyong Yan]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SpeechColab, China; ^^2^^Tencent, China; ^^3^^Xiaomi, China; ^^4^^Johns Hopkins University, USA; ^^5^^KE, China; ^^6^^Seasalt AI, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3670–3674
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces GigaSpeech, an evolving, multi-domain English speech recognition corpus with 10,000 hours of high quality labeled audio suitable for supervised training, and 33,000 hours of total audio suitable for semi-supervised and unsupervised training. Around 33,000 hours of transcribed audio is first collected from audiobooks, podcasts and YouTube, covering both read and spontaneous speaking styles, and a variety of topics, such as arts, science, sports, etc. A new forced alignment and segmentation pipeline is proposed to create sentence segments suitable for speech recognition training, and to filter out segments with low-quality transcription. For system training, GigaSpeech provides five subsets of different sizes, 10h, 250h, 1000h, 2500h, and 10000h. For our 10,000-hour //XL// training subset, we cap the word error rate at 4% during the filtering/ validation stage, and for all our other smaller training subsets, we cap it at 0%. The //DEV// and //TEST// evaluation sets, on the other hand, are re-processed by professional human transcribers to ensure high transcription quality. Baseline systems are provided for popular speech recognition toolkits, namely Athena, ESPnet, Kaldi and Pika.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[You Jin Kim|AUTHOR You Jin Kim]], [[Hee-Soo Heo|AUTHOR Hee-Soo Heo]], [[Soyeon Choe|AUTHOR Soyeon Choe]], [[Soo-Whan Chung|AUTHOR Soo-Whan Chung]], [[Yoohwan Kwon|AUTHOR Yoohwan Kwon]], [[Bong-Jin Lee|AUTHOR Bong-Jin Lee]], [[Youngki Kwon|AUTHOR Youngki Kwon]], [[Joon Son Chung|AUTHOR Joon Son Chung]]
</p><p class="cpabstractcardaffiliationlist">Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3675–3679
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we present a novel audio-visual dataset for active speaker detection in the wild. A speaker is considered ''active'' when his or her face is visible and the voice is audible simultaneously. Although active speaker detection is a crucial pre-processing step for many audio-visual tasks, there is no existing active speaker detection dataset to evaluate the performance using natural human speech. We therefore curate the //Active Speakers in the Wild// (ASW) dataset which contains videos and co-occurring speech segments with dense speech activity labels. Videos and timestamps of audible segments are parsed and adopted from VoxConverse, an existing speaker diarisation dataset that consists of videos in the wild. Face tracks are extracted from the videos and active segments are annotated based on the timestamps of VoxConverse in a semi-automatic way. Two reference systems, one is self-supervised and the other is supervised system, are evaluated on the dataset to provide the baseline performances of ASW. Cross-domain evaluation and case study are conducted, in order to show the negative effect of the dubbed videos that are excluded in ASW.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Beena Ahmed|AUTHOR Beena Ahmed]]^^1^^
, [[Kirrie J. Ballard|AUTHOR Kirrie J. Ballard]]^^2^^
, [[Denis Burnham|AUTHOR Denis Burnham]]^^3^^
, [[Tharmakulasingam Sirojan|AUTHOR Tharmakulasingam Sirojan]]^^1^^
, [[Hadi Mehmood|AUTHOR Hadi Mehmood]]^^1^^
, [[Dominique Estival|AUTHOR Dominique Estival]]^^3^^
, [[Elise Baker|AUTHOR Elise Baker]]^^3^^
, [[Felicity Cox|AUTHOR Felicity Cox]]^^4^^
, [[Joanne Arciuli|AUTHOR Joanne Arciuli]]^^5^^
, [[Titia Benders|AUTHOR Titia Benders]]^^4^^
, [[Katherine Demuth|AUTHOR Katherine Demuth]]^^4^^
, [[Barbara Kelly|AUTHOR Barbara Kelly]]^^6^^
, [[Chloé Diskin-Holdaway|AUTHOR Chloé Diskin-Holdaway]]^^7^^
, [[Mostafa Shahin|AUTHOR Mostafa Shahin]]^^1^^
, [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]]^^1^^
, [[Julien Epps|AUTHOR Julien Epps]]^^1^^
, [[Chwee Beng Lee|AUTHOR Chwee Beng Lee]]^^3^^
, [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UNSW Sydney, Australia; ^^2^^University of Sydney, Australia; ^^3^^Western Sydney University, Australia; ^^4^^Macquarie University, Australia; ^^5^^Flinders University, Australia; ^^6^^University of Melbourne, Australia; ^^7^^University of Mel bourne, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3680–3684
</span></p></div>
<div class="cpabstractcardabstract"><p>Here we present AusKidTalk [1], an audio-visual (AV) corpus of Australian children’s speech collected to facilitate the development of speech based technological solutions for children. It builds upon the technology and expertise developed through the collection of an earlier corpus of Australian adult speech, AusTalk [2,3]. This multi-site initiative was established to remedy the dire shortage of children’s speech corpora in Australia and around the world that are sufficiently sized to train accurate automated speech processing tools for children. We are collecting ~600 hours of speech from children aged 3–12 years that includes single word and sentence productions as well as narrative and emotional speech. In this paper, we discuss the key requirements for AusKidTalk and how we designed the recording setup and protocol to meet them. We also discuss key findings from our feasibility study of the recording protocol, recording tools, and user interface.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Per Fallgren|AUTHOR Per Fallgren]], [[Jens Edlund|AUTHOR Jens Edlund]]
</p><p class="cpabstractcardaffiliationlist">KTH, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3685–3689
</span></p></div>
<div class="cpabstractcardabstract"><p>Edyson is a human-in-the-loop (HITL) tool for browsing and annotating large amounts of audio data quickly. It builds on temporally disassembled audio and massively multi-component audio environments to overcome the cumbersome time constraints that come with linear exploration of large audio data. This study adds the following contributions to Edyson: 1) We add the new use case of HITL binary classification by sample; 2) We explore the new domain oceanic hydrophone recordings with whale song, along with speech activity detection in noisy audio; 3) We propose a repeatable method of analysing the efficiency of HITL in Edyson for binary classification, specifically designed to measure the return on human time spent in a given domain. We exemplify this method on two domains, and show that for a manageable initial cost in terms of HITL, it does differentiate between suitable and unsuitable domains for our new use case — a valuable insight when working with large collections of audio.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Elena Ryumina|AUTHOR Elena Ryumina]], [[Oxana Verkholyak|AUTHOR Oxana Verkholyak]], [[Alexey Karpov|AUTHOR Alexey Karpov]]
</p><p class="cpabstractcardaffiliationlist">RAS, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3690–3694
</span></p></div>
<div class="cpabstractcardabstract"><p>Commonly adapted design of emotional corpora includes multiple annotations for the same instance from several annotators. Most of the previous studies assume the ground truth to be an average between all labels or the most frequently used label. Current study shows that this approach may not be optimal for training. By filtering training data according to the level of annotation agreement, it is possible to increase the performance of the system even on unreliable test samples. However, increasing the annotation confidence inevitably leads to a loss of data. Therefore, balancing the trade-off between annotation quality and sample size requires careful investigation. This study presents experimental findings of audio-visual emotion classification on a recently introduced RAMAS dataset, which contains rich categorical partially-continuous annotation for 6 basic emotions, and reveals important conclusions about optimal formulation of ground truth. By applying the proposed approach, it is possible to achieve classification accuracy of UAR=70.51% on the speech utterances with more than 60% agreement, which surpasses previously reported values on this corpus in the literature.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mounya Elhilali|AUTHOR Mounya Elhilali]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>As we navigate our everyday life, we are continuously parsing through a cacophony of sounds that are constantly impinging on our senses. This ability to sieve through everyday sounds and pick-out signals of interest may seem intuitive and effortless, but it is a real feat that involves complex brain networks that balance the sensory signal with our goals, expectations, attentional state and prior knowledge (what we hear, what we want to hear, what we expect to hear, what we know). A similar challenge faces computer systems that need to adapt to dynamic inputs, evolving objectives and novel surrounds. A growing body of work in neuroscience has been amending our views of processing in the brain; replacing the conventional view of ‘static’ processing with a more ‘active’ and malleable mapping that rapidly adapts to the task at hand and listening conditions. After all, humans and most animals are not specialists, but generalists whose perception is shaped by experience, context and changing behavioral demands. The talk will discuss theoretical formulations of these adaptive processes and lessons to leverage attentional feedback in algorithms for detecting and separating sounds of interest (e.g. speech, music) amidst competing distractors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nicolae-Cătălin Ristea|AUTHOR Nicolae-Cătălin Ristea]]^^1^^
, [[Radu Tudor Ionescu|AUTHOR Radu Tudor Ionescu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UPB, Romania; ^^2^^University of Bucharest, Romania</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2836–2840
</span></p></div>
<div class="cpabstractcardabstract"><p>Combining multiple machine learning models into an ensemble is known to provide superior performance levels compared to the individual components forming the ensemble. This is because models can complement each other in taking better decisions. Instead of just combining the models, we propose a self-paced ensemble learning scheme in which models learn from each other over several iterations. During the self-paced learning process based on pseudo-labeling, in addition to improving the individual models, our ensemble also gains knowledge about the target domain. To demonstrate the generality of our self-paced ensemble learning (SPEL) scheme, we conduct experiments on three audio tasks. Our empirical results indicate that SPEL significantly outperforms the baseline ensemble models. We also show that applying self-paced learning on individual models is less effective, illustrating the idea that models in the ensemble actually learn from each other.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Atsushi Kojima|AUTHOR Atsushi Kojima]]
</p><p class="cpabstractcardaffiliationlist">Advanced Media, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2841–2845
</span></p></div>
<div class="cpabstractcardabstract"><p>We explore knowledge distillation methods from nonstreaming to streaming Transformer–Transducer (T–T) models. Streaming T–T truncates future context. It leads to recognition quality degradation compared with the original T–T. In this work, we explore knowledge distillation, which minimizes internal representations in all Transformer layers between nonstreaming and streaming T–T models. In the experiment, we compared two different methods: the minimization of the L2 distance of hidden vectors and the minimization of the L2 distance of heads. All experiments were conducted using the public LibriSpeech corpus. Results of the experiment showed that hidden vector similarity-based knowledge distillation is better than multi-head similarity-based knowledge distillation. We observed 3.5% and 2.1% relative reductions in word error rate compared with the original streaming T–T in test-clean set and test-other set, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Timo Lohrenz|AUTHOR Timo Lohrenz]], [[Zhengyang Li|AUTHOR Zhengyang Li]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Braunschweig, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2846–2850
</span></p></div>
<div class="cpabstractcardabstract"><p>Stream fusion, also known as system combination, is a common technique in automatic speech recognition for traditional hybrid hidden Markov model approaches, yet mostly unexplored for modern deep neural network end-to-end model architectures. Here, we investigate various fusion techniques for the all-attention-based encoder-decoder architecture known as the transformer, striving to achieve optimal fusion by investigating different fusion levels in an example single-microphone setting with fusion of standard magnitude and phase features. We introduce a novel multi-encoder learning method that performs a weighted combination of two encoder-decoder multi-head attention outputs //only// during training. Employing then only the magnitude feature encoder in inference, we are able to show consistent improvement on Wall Street Journal (WSJ) with language model and on Librispeech, without increase in runtime or parameters. Combining two such multi-encoder trained models by a simple late fusion in inference, we achieve state-of-the-art performance for transformer-based models on WSJ with a significant WER reduction of 19% relative compared to the current benchmark approach.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Salah Zaiem|AUTHOR Salah Zaiem]]^^1^^
, [[Titouan Parcollet|AUTHOR Titouan Parcollet]]^^2^^
, [[Slim Essid|AUTHOR Slim Essid]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LTCI (UMR 5141), France; ^^2^^LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2851–2855
</span></p></div>
<div class="cpabstractcardabstract"><p>Through solving pretext tasks, self-supervised learning (SSL) leverages unlabeled data to extract useful latent representations replacing traditional input features in the downstream task. A common pretext task consists in pretraining a SSL model on pseudo-labels derived from the original signal. This technique is particularly relevant for speech data where various meaningful signal processing features may serve as pseudo-labels. However, the process of selecting pseudo-labels, for speech or other types of data, remains mostly unexplored and currently relies on observing the results on the final downstream task. Nevertheless, this methodology is not sustainable at scale due to substantial computational (hence carbon) costs. Thus, this paper introduces a practical and theoretical framework to select relevant pseudo-labels with respect to a given downstream task. More precisely, we propose a functional estimator of the pseudo-label utility grounded in the conditional independence theory, which does not require any training. The experiments conducted on speaker recognition and automatic speech recognition validate our estimator, showing a significant correlation between the performance observed on the downstream task and the utility estimates obtained with our approach, facilitating the prospection of relevant pseudo-labels for self-supervised speech representation learning.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohammad Zeineldeen|AUTHOR Mohammad Zeineldeen]], [[Aleksandr Glushko|AUTHOR Aleksandr Glushko]], [[Wilfried Michel|AUTHOR Wilfried Michel]], [[Albert Zeyer|AUTHOR Albert Zeyer]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2856–2860
</span></p></div>
<div class="cpabstractcardabstract"><p>Attention-based encoder-decoder (AED) models learn an implicit internal language model (ILM) from the training transcriptions. The integration with an external LM trained on much more unpaired text usually leads to better performance. A Bayesian interpretation as in the hybrid autoregressive transducer (HAT) suggests dividing by the prior of the discriminative acoustic model, which corresponds to this implicit LM, similarly as in the hybrid hidden Markov model approach. The implicit LM cannot be calculated efficiently in general and it is yet unclear what are the best methods to estimate it. In this work, we compare different approaches from the literature and propose several novel methods to estimate the ILM directly from the AED model. Our proposed methods outperform all previous approaches. We also investigate other methods to suppress the ILM mainly by decreasing the capacity of the AED model, limiting the label context, and also by training the AED model together with a pre-existing LM.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Apoorv Vyas|AUTHOR Apoorv Vyas]], [[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2861–2865
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we investigate if the wav2vec 2.0 self-supervised pretraining helps mitigate the overfitting issues with connectionist temporal classification (CTC) training to reduce its performance gap with flat-start lattice-free MMI (E2E-LFMMI) for automatic speech recognition with limited training data. Towards that objective, we use the pretrained wav2vec 2.0 BASE model and fine-tune it on three different datasets including out-of-domain (Switchboard) and cross-lingual (Babel) scenarios. Our results show that for supervised adaptation of the wav2vec 2.0 model, both E2E-LFMMI and CTC achieve similar results; significantly outperforming the baselines trained only with supervised data. Fine-tuning the wav2vec 2.0 model with E2E-LFMMI and CTC we obtain the following relative WER improvements over the supervised baseline trained with E2E-LFMMI. We get relative improvements of 40% and 44% on the clean-set and 64% and 58% on the test set of Librispeech (100h) respectively. On Switchboard (300h) we obtain relative improvements of 33% and 35% respectively. Finally, for Babel languages, we obtain relative improvements of 26% and 23% on Swahili (38h) and 18% and 17% on Tagalog (84h) respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Clément Le Moine|AUTHOR Clément Le Moine]], [[Nicolas Obin|AUTHOR Nicolas Obin]], [[Axel Roebel|AUTHOR Axel Roebel]]
</p><p class="cpabstractcardaffiliationlist">STMS (UMR 9912), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2866–2870
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech Emotion Recognition (SER) task has known significant improvements over the last years with the advent of Deep Neural Networks (DNNs). However, even the most successful methods are still rather failing when adaptation to specific speakers and scenarios is needed, inevitably leading to poorer performances when compared to humans. In this paper, we present novel work based on the idea of teaching the emotion recognition network about speaker identity. Our system is a combination of two ACRNN classifiers respectively dedicated to speaker and emotion recognition. The first informs the latter through a Self Speaker Attention (SSA) mechanism that is shown to considerably help to focus on emotional information of the speech signal. Speaker-dependant experiments on social attitudes database Att-HACK and IEMOCAP corpus demonstrate the effectiveness of the proposed method and achieve the state-of-the-art performance in terms of unweighted average recall.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Seong-Gyun Leem|AUTHOR Seong-Gyun Leem]]^^1^^
, [[Daniel Fulford|AUTHOR Daniel Fulford]]^^2^^
, [[Jukka-Pekka Onnela|AUTHOR Jukka-Pekka Onnela]]^^3^^
, [[David Gard|AUTHOR David Gard]]^^4^^
, [[Carlos Busso|AUTHOR Carlos Busso]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Dallas, USA; ^^2^^Boston University, USA; ^^3^^Harvard University, USA; ^^4^^San Francisco State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2871–2875
</span></p></div>
<div class="cpabstractcardabstract"><p>When //speech emotion recognition// (SER) is applied in an actual application, the system should be able to cope with audio acquired in a noisy, unconstrained environment. Most studies on noise-robust SER require a parallel dataset with emotion labels, which is impractical to collect, or use speech with artificially added noise, which does not resemble practical conditions. This study builds upon the ladder network formulation, which can effectively compensate the environmental differences between a clean speech corpus and real-life recordings. This study proposes a decoupled ladder network, which increases the robustness of the SER system against the influences of non-stationary background noise by decoupling the last hidden layer embedding into emotion and reconstruction embeddings. This novel implementation allows the emotion embedding to focus exclusively on building a discriminative representation, without worrying about the reconstruction task. We introduce a noisy version of the MSP-Podcast database, which contains audio segments collected with a smartphone that simultaneously records sentences from the corpus and non-stationary noise at different //signal-to-noise ratios// (SNRs). We test the effectiveness of our proposed model with this corpus, showing that the decoupled ladder network can increase the performance of the regular ladder network when dealing with noisy recordings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Efthymios Georgiou|AUTHOR Efthymios Georgiou]], [[Georgios Paraskevopoulos|AUTHOR Georgios Paraskevopoulos]], [[Alexandros Potamianos|AUTHOR Alexandros Potamianos]]
</p><p class="cpabstractcardaffiliationlist">NTUA, Greece</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2876–2880
</span></p></div>
<div class="cpabstractcardabstract"><p>A common issue when training multimodal architectures is that not all modalities contribute equally to the model’s prediction and the network tends to over-rely on the strongest modality. In this work, we present M³, a training procedure based on modality masking for deep multimodal architectures. During network training, we randomly select one modality and mask its features, forcing the model to make its prediction in the absence of this modality. This structured regularization allows the network to better exploit complementary information in input modalities. We implement M³ as a generic layer that can be integrated with any multimodal architecture. Our experiments show that M³ outperforms other masking schemes and improves performance for our strong baseline. We evaluate M³ for multimodal sentiment analysis on CMU-MOSEI, achieving results comparable to the state-of-the-art.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ondřej Klejch|AUTHOR Ondřej Klejch]], [[Electra Wallington|AUTHOR Electra Wallington]], [[Peter Bell|AUTHOR Peter Bell]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2881–2885
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the CSTR submission to the Multilingual and Code-Switching ASR Challenges at Interspeech 2021. For the multilingual track of the challenge, we trained a multilingual CNN-TDNN acoustic model for Gujarati, Hindi, Marathi, Odia, Tamil and Telugu and subsequently fine-tuned the model on monolingual training data. A language model built on a mixture of training and CommonCrawl data was used for decoding. We also demonstrate that crawled data from YouTube can be successfully used to improve the performance of the acoustic model with semi-supervised training. These models together with confidence based language identification achieve the average WER of 18.1%, a 41% relative improvement compared to the provided multilingual baseline model. For the code-switching track of the challenge we again train a multilingual model on Bengali and Hindi technical lectures and we employ a language model trained on CommonCrawl Bengali and Hindi data mixed with in-domain English data, using a novel transliteration method to generate pronunciations for the English terms. The final model improves by 18% and 34% relative compared to our multilingual baseline. Both our systems were among the top-ranked entries to the challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei Zhou|AUTHOR Wei Zhou]], [[Mohammad Zeineldeen|AUTHOR Mohammad Zeineldeen]], [[Zuoyun Zheng|AUTHOR Zuoyun Zheng]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2886–2890
</span></p></div>
<div class="cpabstractcardabstract"><p>Subword units are commonly used for end-to-end automatic speech recognition (ASR), while a fully acoustic-oriented subword modeling approach is somewhat missing. We propose an acoustic data-driven subword modeling (ADSM) approach that adapts the advantages of several text-based and acoustic-based subword methods into one pipeline. With a fully acoustic-oriented label design and learning process, ADSM produces acoustic-structured subword units and acoustic-matched target sequence for further ASR training. The obtained ADSM labels are evaluated with different end-to-end ASR approaches including CTC, RNN-Transducer and attention models. Experiments on the LibriSpeech corpus show that ADSM clearly outperforms both byte pair encoding (BPE) and pronunciation-assisted subword modeling (PASM) in all cases. Detailed analysis shows that ADSM achieves acoustically more logical word segmentation and more balanced sequence length, and thus, is suitable for both time-synchronous and label-synchronous models. We also briefly describe how to apply acoustic-based subword regularization and unseen text segmentation using ADSM.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei Zhou|AUTHOR Wei Zhou]], [[Albert Zeyer|AUTHOR Albert Zeyer]], [[André Merboldt|AUTHOR André Merboldt]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2891–2895
</span></p></div>
<div class="cpabstractcardabstract"><p>With the advent of direct models in automatic speech recognition (ASR), the formerly prevalent frame-wise acoustic modeling based on hidden Markov models (HMM) diversified into a number of modeling architectures like encoder-decoder attention models, transducer models and segmental models (direct HMM). While transducer models stay with a frame-level model definition, segmental models are defined on the level of label segments directly. While (soft-)attention-based models avoid explicit alignment, transducer and segmental approach internally do model alignment, either by segment hypotheses or, more implicitly, by emitting so-called blank symbols. In this work, we prove that the widely used class of RNN-Transducer models and segmental models (direct HMM) are equivalent and therefore show equal modeling power. It is shown that blank probabilities translate into segment length probabilities and vice versa. In addition, we provide initial experiments investigating decoding and beam-pruning, comparing time-synchronous and label-/segment-synchronous search strategies and their properties using the same underlying model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Abbas Khosravani|AUTHOR Abbas Khosravani]]^^1^^
, [[Philip N. Garner|AUTHOR Philip N. Garner]]^^1^^
, [[Alexandros Lazaridis|AUTHOR Alexandros Lazaridis]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Idiap Research Institute, Switzerland; ^^2^^Swisscom, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2896–2900
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe a speech recognition system for Swiss German, a dialectal spoken language in German-speaking Switzerland. Swiss German has no standard orthography, with a significant variation in its written form. To alleviate the uncertainty associated with this variability, we automatically generate a lexicon from which multiple written forms of a given word in any dialect can be generated. The lexicon is built from a small (incomplete) handcrafted lexicon designed by linguistic experts and contains forms of common words in various Swiss German dialects. We exploit the powerful speech representation of self-supervised acoustic pre-training (wav2vec) to address the low-resource nature of the spoken dialects. The proposed approach results in an overall relative improvement of 9% word error rate compared to one based on an expert-generated lexicon for our TV Box voice assistant application.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ekaterina Egorova|AUTHOR Ekaterina Egorova]], [[Hari Krishna Vydana|AUTHOR Hari Krishna Vydana]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]
</p><p class="cpabstractcardaffiliationlist">Brno University of Technology, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2901–2905
</span></p></div>
<div class="cpabstractcardabstract"><p>This work explores the effectiveness of detecting positions of out-of-vocabulary words (OOVs) in a decoded utterance using attention weights and CTC per-frame outputs of an end-to-end system predicting word sequences. We show that the end-to-end approach can be effective for the task of OOV detection. CTC alignments are shown to provide better temporal information about the positions of OOV words than attention, and therefore are more suitable for the task. The detected positions of OOV occurrences are utilized for the recurrent OOV recovery task in which probabilistic representations of the pronunciations of the detected OOVs are clustered in order to find repeating words. Improved detection results are shown to correlate with better performance of the recovery of recurrent OOVs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matthew Wiesner|AUTHOR Matthew Wiesner]]^^1^^
, [[Mousmita Sarma|AUTHOR Mousmita Sarma]]^^2^^
, [[Ashish Arora|AUTHOR Ashish Arora]]^^1^^
, [[Desh Raj|AUTHOR Desh Raj]]^^1^^
, [[Dongji Gao|AUTHOR Dongji Gao]]^^1^^
, [[Ruizhe Huang|AUTHOR Ruizhe Huang]]^^1^^
, [[Supreet Preet|AUTHOR Supreet Preet]]^^2^^
, [[Moris Johnson|AUTHOR Moris Johnson]]^^2^^
, [[Zikra Iqbal|AUTHOR Zikra Iqbal]]^^2^^
, [[Nagendra Goel|AUTHOR Nagendra Goel]]^^2^^
, [[Jan Trmal|AUTHOR Jan Trmal]]^^1^^
, [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]]^^1^^
, [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Johns Hopkins University, USA; ^^2^^GoVivace, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2906–2910
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe the JHU-GoVivace submission for subtask 2 (code-switching task) of the Multilingual and Code-switching ASR challenges for low resource Indian languages. We built a hybrid HMM-DNN system with several improvements over the provided baseline in terms of lexical, language, and acoustic modeling. For lexical modeling, we investigate using unified pronunciations and phonesets derived from the baseline lexicon and publicly available Wikipron lexicons in Bengali and Hindi to expand the pronunciation lexicons. We explore several neural network architectures, along with supervised pretraining and multilingual training for acoustic modeling. We also describe how we used large externally crawled web text for language modeling. Since the challenge data contain artefacts such as misalignments, various data cleanup methods are explored, including acoustic-driven pronunciation learning to help discover Indian-accented pronunciations for English words as well as transcribed punctuation. As a result of these efforts, our best systems achieve transliterated WERs of 19.5% and 23.2% on the non-duplicated development sets for Hindi-English and Bengali-English, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin Milde|AUTHOR Benjamin Milde]]^^1^^
, [[Tim Fischer|AUTHOR Tim Fischer]]^^2^^
, [[Steffen Remus|AUTHOR Steffen Remus]]^^2^^
, [[Chris Biemann|AUTHOR Chris Biemann]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Hamburg, Germany; ^^2^^Universität Hamburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3311–3312
</span></p></div>
<div class="cpabstractcardabstract"><p>We present MoM (Minutes of Meeting) bot, an automatic meeting transcription system with real-time recognition, summarization and visualization capabilities. MoM works without any cloud processing and does not require a network connection. Every processing step is local, even its speech recognition component, to address privacy concerns of meetings. MoM can be used to assisted writing a (summarized) protocol of a meeting, but may also help the hearing-impaired to follow a discussion. We address meeting-related issues, e.g. local vocabulary of an organization or company with active learning of G2P models and custom vocabulary extensions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexander Wilbrandt|AUTHOR Alexander Wilbrandt]], [[Simon Stone|AUTHOR Simon Stone]], [[Peter Birkholz|AUTHOR Peter Birkholz]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Dresden, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3313–3314
</span></p></div>
<div class="cpabstractcardabstract"><p>Articulatory data can be collected using numerous modalities, such as video, ultrasound, electromagnetic articulography, or palatographic techniques. Every measurement technique requires software to visualize the incoming data and export the data for further analysis. This has led to an increase of available recording software over the past decades, including properly maintained software in regular use but also many abandoned and dead projects. In this paper, we present a new framework for real-time, simultaneous recording of acoustic and articulatory data. With the release of the Articulatory Data Recorder, our aim is to provide the experimental phonetics and articulatory research community with a common framework that is simple to use and easy to extend. It is specifically designed to cover the most common use cases in experimental phonetics: Elicit speech utterances using text prompts and record simultaneous audio and articulatory data. By following the FURPS+-system, we offer a combination of high performance and a low barrier of entrance for enrollment of any new articulatory measurement technique. The current version already supports various palatographic measurement techniques in use at our institute and future work will incorporate feedback and feature requests from the community.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joan Codina-Filbà|AUTHOR Joan Codina-Filbà]]^^1^^
, [[Guillermo Cámbara|AUTHOR Guillermo Cámbara]]^^1^^
, [[Alex Peiró-Lilja|AUTHOR Alex Peiró-Lilja]]^^1^^
, [[Jens Grivolla|AUTHOR Jens Grivolla]]^^1^^
, [[Roberto Carlini|AUTHOR Roberto Carlini]]^^1^^
, [[Mireia Farrús|AUTHOR Mireia Farrús]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universitat Pompeu Fabra, Spain; ^^2^^Universitat de Barcelona, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3315–3316
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents the integration of a speech-to-speech translation service into a Telegram bot as a part of the EU funded INGENIOUS project. The bot is thought as a multilingual communication channel where First Responders talk in their own language and receive other’s messages in English. The Speech-to-Speech translation system is currently being adapted to the emergency domains, so it will correctly deal with emergency codes and geographical data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joanna Rownicka|AUTHOR Joanna Rownicka]], [[Kilian Sprenkamp|AUTHOR Kilian Sprenkamp]], [[Antonio Tripiana|AUTHOR Antonio Tripiana]], [[Volodymyr Gromoglasov|AUTHOR Volodymyr Gromoglasov]], [[Timo P. Kunz|AUTHOR Timo P. Kunz]]
</p><p class="cpabstractcardaffiliationlist">Aflorithmic Labs, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3317–3318
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe our approach to create and deliver a custom voice for a conversational AI use-case. More specifically, we provide a voice for a Digital Einstein character, to enable human-computer interaction within the digital conversation experience. To create the voice which fits the context well, we first design a voice character and we produce the recordings which correspond to the desired speech attributes. We then model the voice. Our solution utilizes Fastspeech 2 for log-scaled mel-spectrogram prediction from phonemes and Parallel WaveGAN to generate the waveforms. The system supports a character input and gives a speech waveform at the output. We use a custom dictionary for selected words to ensure their proper pronunciation. Our proposed cloud architecture enables for fast voice delivery, making it possible to talk to the digital version of Albert Einstein in real-time.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Robert Geislinger|AUTHOR Robert Geislinger]]^^1^^
, [[Benjamin Milde|AUTHOR Benjamin Milde]]^^1^^
, [[Timo Baumann|AUTHOR Timo Baumann]]^^2^^
, [[Chris Biemann|AUTHOR Chris Biemann]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HITeC, Germany; ^^2^^Universität Hamburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3319–3320
</span></p></div>
<div class="cpabstractcardabstract"><p>We present an open source plugin for live subtitling in the popular open source video conferencing software BigBlueButton. Our plugin decodes each speaker’s audio stream separately and in parallel, thereby obliviating the need for speaker diarization and seamlessly handling overlapped talk. Any Kaldi-compatible nnet3 model can be used with our plugin and we demonstrate it using freely available TDNN-HMM-based ASR models for English and German. Our subtitles can be used as they are (e.g., in loud environments) or can form the basis for further NLP processes. Our tool can also simplify the collection of remotely recorded multi-party dialogue corpora.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dāvis Nicmanis|AUTHOR Dāvis Nicmanis]], [[Askars Salimbajevs|AUTHOR Askars Salimbajevs]]
</p><p class="cpabstractcardaffiliationlist">Tilde, Latvia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3321–3322
</span></p></div>
<div class="cpabstractcardabstract"><p>To fully enable spoken human-computer interaction, the text-to-speech (TTS) component of such a system must produce natural human-like speech and adjust the prosody according to the dialog context.
While the current publicly available TTS services can produce natural-sounding speech, they usually lack emotional expressiveness.
In this paper, we present an expressive speech synthesis prototype for the Latvian language. The prototype is integrated into our chatbot management system and enables bot designers to specify the stylistic information for each bot response, thus making the interaction with the chatbot more natural.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pramod H. Kachare|AUTHOR Pramod H. Kachare]]^^1^^
, [[Prem C. Pandey|AUTHOR Prem C. Pandey]]^^1^^
, [[Vishal Mane|AUTHOR Vishal Mane]]^^2^^
, [[Hirak Dasgupta|AUTHOR Hirak Dasgupta]]^^1^^
, [[K.S. Nataraj|AUTHOR K.S. Nataraj]]^^1^^
, [[Akshada Rathod|AUTHOR Akshada Rathod]]^^2^^
, [[Sheetal K. Pathak|AUTHOR Sheetal K. Pathak]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Bombay, India; ^^2^^Digital India, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3323–3324
</span></p></div>
<div class="cpabstractcardabstract"><p>An app is presented as a speech-training aid for providing visual feedback of articulatory efforts using information obtained from the utterances’ audiovisual recording. It has two panels to enable comparison between the articulatory efforts of the learner and the teacher or a pre-recorded reference speaker. The visual feedback consists of a slow-motion animation of lateral vocal tract shape, level, and pitch, and time-aligned display of the frontal view of the speaker’s face along with playback of the time-scaled speech signal. The app comprises a graphical user interface and modules for signal acquisition, analysis, and animation. It is developed using Python as a Windows-based app and may be accessed remotely through a web browser.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jing Li|AUTHOR Jing Li]]^^1^^
, [[Binling Wang|AUTHOR Binling Wang]]^^1^^
, [[Yiming Zhi|AUTHOR Yiming Zhi]]^^1^^
, [[Zheng Li|AUTHOR Zheng Li]]^^1^^
, [[Lin Li|AUTHOR Lin Li]]^^1^^
, [[Qingyang Hong|AUTHOR Qingyang Hong]]^^1^^
, [[Dong Wang|AUTHOR Dong Wang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Xiamen University, China; ^^2^^Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3251–3255
</span></p></div>
<div class="cpabstractcardabstract"><p>The fifth Oriental Language Recognition (OLR) Challenge focuses on language recognition in a variety of complex environments to promote its development. The OLR 2020 Challenge includes three tasks: (1) cross-channel language identification, (2) dialect identification, and (3) noisy language identification. We choose //C//,,avg,, as the principle evaluation metric, and the Equal Error Rate (EER) as the secondary metric. There were 58 teams participating in this challenge and one third of the teams submitted valid results. Compared with the best baseline, the //C//,,avg,, values of Top 1 system for the three tasks were relatively reduced by 82%, 62% and 48%, respectively. This paper describes the three tasks, the database profile, and the final results. We also outline the novel approaches that improve the performance of language recognition systems most significantly, such as the utilization of auxiliary information.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Raphaël Duroselle|AUTHOR Raphaël Duroselle]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Denis Jouvet|AUTHOR Denis Jouvet]], [[Irina Illina|AUTHOR Irina Illina]]
</p><p class="cpabstractcardaffiliationlist">Loria (UMR 7503), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3256–3260
</span></p></div>
<div class="cpabstractcardabstract"><p>We describe the LORIA-Inria-MULTISPEECH system submitted to the Oriental Language Recognition AP20-OLR Challenge. This system has been specifically designed to be robust to unknown conditions: channel mismatch (task 1) and noisy conditions (task 3). Three sets of studies have been carried out for elaborating the system: design of multilingual bottleneck features, selection of robust features by evaluating language recognition performance on an unobserved channel, and design of the final models with different loss functions which exploit channel diversity within the training set. Key factors for robustness to unknown conditions are data augmentation techniques, stochastic weight averaging, and regularization of TDNNs with domain robustness loss functions. The final system is the combination of four TDNNs using bottleneck features and one GMM using SDC-MFCC features. Within the AP20-OLR Challenge, it achieves the top performance for tasks 1 and 3 with a //C//,,avg,, of respectively 0.0239 and 0.0374. This validates the approach for generalization to unknown conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tianlong Kong|AUTHOR Tianlong Kong]]^^1^^
, [[Shouyi Yin|AUTHOR Shouyi Yin]]^^1^^
, [[Dawei Zhang|AUTHOR Dawei Zhang]]^^2^^
, [[Wang Geng|AUTHOR Wang Geng]]^^2^^
, [[Xin Wang|AUTHOR Xin Wang]]^^2^^
, [[Dandan Song|AUTHOR Dandan Song]]^^1^^
, [[Jinwen Huang|AUTHOR Jinwen Huang]]^^2^^
, [[Huiyu Shi|AUTHOR Huiyu Shi]]^^1^^
, [[Xiaorui Wang|AUTHOR Xiaorui Wang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^Kwai, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3261–3265
</span></p></div>
<div class="cpabstractcardabstract"><p>Time Delay Neural Networks (TDNN)-based methods are widely used in dialect identification. However, in previous work with TDNN application, subtle variant is being neglected in different feature scales. To address this issue, we propose a new architecture, named dynamic multi-scale convolution, which consists of dynamic kernel convolution, local multi-scale learning, and global multi-scale pooling. Dynamic kernel convolution captures features between short-term and long-term context adaptively. Local multi-scale learning, which represents multi-scale features at a granular level, is able to increase the range of receptive fields for convolution operation. Besides, global multi-scale pooling is applied to aggregate features from different bottleneck layers in order to collect information from multiple aspects. The proposed architecture significantly outperforms state-of-the-art system on the AP20-OLR-dialect-task of oriental language recognition (OLR) challenge 2020, with the best average cost performance (//C//,,avg,,) of 0.067 and the best equal error rate (EER) of 6.52%. Compared with the known best results, our method achieves 9% of //C//,,avg,, and 45% of EER relative improvement, respectively. Furthermore, the parameters of proposed model are 91% fewer than the best known model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ding Wang|AUTHOR Ding Wang]]^^1^^
, [[Shuaishuai Ye|AUTHOR Shuaishuai Ye]]^^1^^
, [[Xinhui Hu|AUTHOR Xinhui Hu]]^^1^^
, [[Sheng Li|AUTHOR Sheng Li]]^^2^^
, [[Xinkang Xu|AUTHOR Xinkang Xu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HiThink RoyalFlush, China; ^^2^^NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3266–3270
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose an end-to-end (E2E) dialect identification system trained using transfer learning from a multilingual automatic speech recognition (ASR) model. This is also an extension of our submitted system to the Oriental Language Recognition Challenge 2020 (AP20-OLR). We verified its applicability using the dialect identification (DID) task of the AP20-OLR. First, we trained a robust conformer-based joint connectionist temporal classification (CTC) /attention multilingual E2E ASR model using the training corpora of eight languages, independent of the target dialects. Second, we initialized the E2E-based classifier with the ASR model’s shared encoder using a transfer learning approach. Finally, we trained the classifier on the target dialect corpus. We obtained the final classifier by selecting the best model from the following: (1) the averaged model in term of the loss values; and (2) the averaged model in term of classification accuracy.
Our experiments on the DID test-set of the AP20-OLR demonstrated that significant identification improvements were achieved for three Chinese dialects. The performances of our system outperforms the winning team of the AP20-OLR, with the largest relative reductions of 19.5% in //C//,,avg,, and 25.2% in EER.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haibin Yu|AUTHOR Haibin Yu]]^^1^^
, [[Jing Zhao|AUTHOR Jing Zhao]]^^1^^
, [[Song Yang|AUTHOR Song Yang]]^^2^^
, [[Zhongqin Wu|AUTHOR Zhongqin Wu]]^^2^^
, [[Yuting Nie|AUTHOR Yuting Nie]]^^1^^
, [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^TAL, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3271–3275
</span></p></div>
<div class="cpabstractcardabstract"><p>Unsupervised pretrained models have been proven to rival or even outperform supervised systems in various speech recognition tasks. However, their performance for language recognition is still left to be explored. In this paper, we construct several language recognition systems based on existing unsupervised pretraining approaches, and explore their credibility and performance to learn high-level generalization of language. We discover that unsupervised pretrained models capture expressive and highly linear-separable features. With these representations, language recognition can perform well even when the classifiers are relatively simple or only a small amount of labeled data is available. Although linear classifiers are usable, neural nets with RNN structures improve the results. Meanwhile, unsupervised pretrained models are able to gain refined representations on audio frame level that are strongly coupled with the acoustic features of the input sequence. Therefore these features contain redundant information of speakers and channels with few relations to the identity of the language. This nature of unsupervised pretrained models causes a performance degradation in language recognition tasks on cross-channel tests.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zheng Li|AUTHOR Zheng Li]], [[Yan Liu|AUTHOR Yan Liu]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]
</p><p class="cpabstractcardaffiliationlist">Xiamen University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3276–3280
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes an additive phoneme-aware margin softmax (APM-Softmax) loss to train the multi-task learning network with phonetic information for language recognition. In additive margin softmax (AM-Softmax) loss, the margin is set as a constant during the entire training for all training samples, and that is a suboptimal method since the recognition difficulty varies in training samples. In additive angular margin softmax (AAM-Softmax) loss, the additional angular margin is set as a constant as well. In this paper, we propose an APM-Softmax loss for language recognition with phoneitc multi-task learning, in which the additive phoneme-aware margin is automatically tuned for different training samples. More specifically, the margin of language recognition is adjusted according to the results of phoneme recognition. Experiments are reported on Oriental Language Recognition (OLR) datasets, and the proposed method improves AM-Softmax loss and AAM-Softmax loss in different language recognition testing conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nataly Jahchan|AUTHOR Nataly Jahchan]]^^1^^
, [[Florentin Barbier|AUTHOR Florentin Barbier]]^^2^^
, [[Ariyanidevi Dharma Gita|AUTHOR Ariyanidevi Dharma Gita]]^^1^^
, [[Khaled Khelif|AUTHOR Khaled Khelif]]^^2^^
, [[Estelle Delpech|AUTHOR Estelle Delpech]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^APSYS, France; ^^2^^Airbus, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3281–3285
</span></p></div>
<div class="cpabstractcardabstract"><p>Air Traffic Control (ATC) communications are a typical example where Automatic Speech Recognition could face various challenges: audio data are quite noisy due to the characteristics of capturing mechanisms. All speakers involved use a specific English-based phraseology and a significant number of pilots and controllers are non-native English speakers. The aim of this work is to enhance pilot-ATC communications by adding a Speech to Text (STT) capability that will transcribe ATC speech into text on the cockpit interfaces to help the pilot understand ATC speech in a more optimal manner (be able to verify what he/she heard on the radio by looking at the text transcription, be able to decipher non-native English accents from controllers, not lose time asking the ATC to repeat the message several times). In this paper, we first describe an accent analysis study which was carried out both on a theoretical level but also with the help of feedback from several hundred airline pilots. Then, we present the dataset that was set up for this work. Finally, we describe the experiments we have implemented and the impact of the speaker accent on the performance of a speech to text engine.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Igor Szöke|AUTHOR Igor Szöke]]^^1^^
, [[Santosh Kesiraju|AUTHOR Santosh Kesiraju]]^^2^^
, [[Ondřej Novotný|AUTHOR Ondřej Novotný]]^^2^^
, [[Martin Kocour|AUTHOR Martin Kocour]]^^2^^
, [[Karel Veselý|AUTHOR Karel Veselý]]^^2^^
, [[Jan Černocký|AUTHOR Jan Černocký]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Brno University of Technology, Czechia; ^^2^^Brno University of Technology, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3286–3290
</span></p></div>
<div class="cpabstractcardabstract"><p>Developing in-cockpit voice enabled applications require a real-world dataset with labels and annotations. We launched a community platform for collecting the Air-Traffic Control (ATC) speech, world-wide in the ATCO² project. Filtering out non-English speech is one of the main components in the data processing pipeline. The proposed English Language Detection (ELD) system is based on the embeddings from Bayesian subspace multinomial model. It is trained on the word confusion network from an ASR system. It is robust, easy to train, and light weighted. We achieved 0.0439 equal-error-rate (EER), a 50% relative reduction as compared to the state-of-the-art acoustic ELD system based on x-vectors, in the in-domain scenario. Further, we achieved an EER of 0.1352, a 33% relative reduction as compared to the acoustic ELD, in the unseen language (out-of-domain) condition. We plan to publish the evaluation dataset from the ATCO² project.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oliver Ohneiser|AUTHOR Oliver Ohneiser]]^^1^^
, [[Seyyed Saeed Sarfjoo|AUTHOR Seyyed Saeed Sarfjoo]]^^2^^
, [[Hartmut Helmke|AUTHOR Hartmut Helmke]]^^1^^
, [[Shruthi Shetty|AUTHOR Shruthi Shetty]]^^1^^
, [[Petr Motlicek|AUTHOR Petr Motlicek]]^^2^^
, [[Matthias Kleinert|AUTHOR Matthias Kleinert]]^^1^^
, [[Heiko Ehr|AUTHOR Heiko Ehr]]^^1^^
, [[Šarūnas Murauskas|AUTHOR Šarūnas Murauskas]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^DLR, Germany; ^^2^^Idiap Research Institute, Switzerland; ^^3^^Oro navigacija, Lithuania</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3291–3295
</span></p></div>
<div class="cpabstractcardabstract"><p>The maturity of automatic speech recognition (ASR) systems at controller working positions is currently a highly relevant technological topic in air traffic control (ATC). However, ATC service providers are less interested in pure word error rate (WER). They want to see benefits of ASR applications for ATC. Such applications transform recognized word sequences into semantic meanings, i.e., a number of related concepts such as callsign, type, value, unit, etc., which are combined to form commands. Digitized concepts or recognized commands can enter ATC systems based on an ontology for utterance annotation agreed between European ATC stakeholders. Command recognition (CR) has already been performed in approach control. However, spoken utterances of tower controllers are longer, include more free speech, and contain other command types than in approach. An automatic CR rate of 95.8% is achievable on perfect word recognition, i.e., manually transcribed audio recordings (gold transcriptions), taken from Lithuanian controllers in a multiple remote tower environment. This paper presents CR results for various speech-to-text models with different WERs on tower utterances. Although WERs were around 9%, we achieve CR rates of 85%. CR rates only slightly decrease with higher WERs, which enables to bring ASR applications closer to operational ATC environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Juan Zuluaga-Gomez|AUTHOR Juan Zuluaga-Gomez]]^^1^^
, [[Iuliia Nigmatulina|AUTHOR Iuliia Nigmatulina]]^^1^^
, [[Amrutha Prasad|AUTHOR Amrutha Prasad]]^^1^^
, [[Petr Motlicek|AUTHOR Petr Motlicek]]^^1^^
, [[Karel Veselý|AUTHOR Karel Veselý]]^^2^^
, [[Martin Kocour|AUTHOR Martin Kocour]]^^2^^
, [[Igor Szöke|AUTHOR Igor Szöke]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Idiap Research Institute, Switzerland; ^^2^^Brno University of Technology, Czechia; ^^3^^ReplayWell, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3296–3300
</span></p></div>
<div class="cpabstractcardabstract"><p>Air traffic management and specifically air-traffic control (ATC) rely mostly on voice communications between Air Traffic Controllers (ATCos) and pilots. In most cases, these voice communications follow a well-defined grammar that could be leveraged in Automatic Speech Recognition (ASR) technologies. The callsign used to address an airplane is an essential part of all ATCo-pilot communications. We propose a two-step approach to add contextual knowledge during semi-supervised training to reduce the ASR system error rates at recognizing the part of the utterance that contains the callsign. Initially, we represent in a WFST the contextual knowledge (i.e. air-surveillance data) of an ATCo-pilot communication. Then, during Semi-Supervised Learning (SSL) the contextual knowledge is added by second-pass decoding (i.e. lattice re-scoring). Results show that ‘unseen domains’ (e.g. data from airports not present in the supervised training data) are further aided by contextual SSL when compared to standalone SSL. For this task, we introduce the Callsign Word Error Rate (CA-WER) as an evaluation metric, which only assesses ASR performance of the spoken callsign in an utterance. We obtained a 32.1% CA-WER relative improvement applying SSL with an additional 17.5% CA-WER improvement by adding contextual knowledge during SSL on a challenging ATC-based test set gathered from LiveATC.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Martin Kocour|AUTHOR Martin Kocour]]^^1^^
, [[Karel Veselý|AUTHOR Karel Veselý]]^^1^^
, [[Alexander Blatt|AUTHOR Alexander Blatt]]^^2^^
, [[Juan Zuluaga Gomez|AUTHOR Juan Zuluaga Gomez]]^^3^^
, [[Igor Szöke|AUTHOR Igor Szöke]]^^1^^
, [[Jan Černocký|AUTHOR Jan Černocký]]^^1^^
, [[Dietrich Klakow|AUTHOR Dietrich Klakow]]^^2^^
, [[Petr Motlicek|AUTHOR Petr Motlicek]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Brno University of Technology, Czechia; ^^2^^Universität des Saarlandes, Germany; ^^3^^Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3301–3305
</span></p></div>
<div class="cpabstractcardabstract"><p>Contextual adaptation of ASR can be very beneficial for multi-accent and often noisy Air-Traffic Control (ATC) speech. Our focus is call-sign recognition, which can be used to track conversations of ATC operators with individual airplanes. We developed a two-stage boosting strategy, consisting of HCLG boosting and Lattice boosting. Both are implemented as WFST compositions and the contextual information is specific to each utterance. In HCLG boosting we give score discounts to individual words, while in Lattice boosting the score discounts are given to word sequences. The context data have origin in surveillance database of OpenSky Network. From this, we obtain lists of call-signs that are made more likely to appear in the best hypothesis of ASR. This also improves the accuracy of the NLU module that recognizes the call-signs from the best hypothesis of ASR.
As part of ATCO² project, we collected liveatc test set2. The boosting of call-signs leads to 4.7% absolute WER improvement and 27.1% absolute increase of Call-Sign recognition Accuracy (CSA). Our best result of 82.9% CSA is quite good, given that the data is noisy, and WER 28.4% is relatively high. We believe there is still room for improvement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin Elie|AUTHOR Benjamin Elie]]^^1^^
, [[Jodie Gauvain|AUTHOR Jodie Gauvain]]^^2^^
, [[Jean-Luc Gauvain|AUTHOR Jean-Luc Gauvain]]^^1^^
, [[Lori Lamel|AUTHOR Lori Lamel]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LISN (UMR 9015), France; ^^2^^Vocapia Research, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3306–3310
</span></p></div>
<div class="cpabstractcardabstract"><p>Wearing an oxygen mask changes the speech production of speakers. It indeed modifies the vocal apparatus and perturbs the articulatory movements of the speaker. This paper studies the impact of the oxygen mask of military aircraft pilots on formant trajectories, both dynamically (variations of the formants at a utterance level) and globally (mean value at the utterance level) for 12 speakers.
A comparative analysis of speech collected with and without an oxygen mask shows that the mask has a significant impact on the formant trajectories, both on the mean values and on the formant variations at the utterance level. This impact is strongly dependent on the speaker and also on the mask model. These observations suggest that the articulatory movements of the speaker are modified by the presence of the mask.
These observations are validated via a preliminary ASR experiment that uses a data augmentation technique based on articulatory perturbations that are driven by our experimental observations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei Xue|AUTHOR Wei Xue]], [[Roeland van Hout|AUTHOR Roeland van Hout]], [[Fleur Boogmans|AUTHOR Fleur Boogmans]], [[Mario Ganzeboom|AUTHOR Mario Ganzeboom]], [[Catia Cucchiarini|AUTHOR Catia Cucchiarini]], [[Helmer Strik|AUTHOR Helmer Strik]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2911–2915
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigated speech intelligibility in dysarthric and non-dysarthric speakers as measured by two commonly used metrics, ratings through the Visual Analogue Scale (VAS) and word accuracy (AcW) through orthographic transcriptions. To gain a better understanding of how acoustic-phonetic correlates could be employed to obtain more objective measures of speech intelligibility and a better classification of dysarthric and non-dysarthric speakers, we studied the relation between these measures of intelligibility and some important acoustic-phonetic correlates. We found that the two intelligibility measures are related, but distinct, and that they might refer to different components of the intelligibility construct. The acoustic-phonetic features showed no difference in the mean values between the two speaker types at the utterance level, but more than half of them played a role in classifying the two speaker types. We computed an acoustic-phonetic probability index (API) at the speaker level. API is moderately correlated to VAS ratings but not correlated to AcW. In addition, API and VAS complement each other in classifying dysarthric and non-dysarthric speakers. This suggests that the intelligibility measures assigned by human raters and acoustic-phonetic features relate to different constructs of intelligibility.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Disong Wang|AUTHOR Disong Wang]]^^1^^
, [[Liqun Deng|AUTHOR Liqun Deng]]^^2^^
, [[Yu Ting Yeung|AUTHOR Yu Ting Yeung]]^^2^^
, [[Xiao Chen|AUTHOR Xiao Chen]]^^2^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^Huawei Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2956–2960
</span></p></div>
<div class="cpabstractcardabstract"><p>Dysarthric speech detection (DSD) systems aim to detect characteristics of the neuromotor disorder from speech. Such systems are particularly susceptible to domain mismatch where the training and testing data come from the source and target domains respectively, but the two domains may differ in terms of speech stimuli, disease etiology, etc. It is hard to acquire labelled data in the target domain, due to high costs of annotating sizeable datasets. This paper makes a first attempt to formulate cross-domain DSD as an unsupervised domain adaptation (UDA) problem. We use labelled source-domain data and unlabelled target-domain data, and propose a multi-task learning strategy, including dysarthria presence classification (DPC), domain adversarial training (DAT) and mutual information minimization (MIM), which aim to learn dysarthria-discriminative and domain-invariant biomarker embeddings. Specifically, DPC helps biomarker embeddings capture critical indicators of dysarthria; DAT forces biomarker embeddings to be indistinguishable in source and target domains; and MIM further reduces the correlation between biomarker embeddings and domain-related cues. By treating the UASPEECH and TORGO corpora respectively as the source and target domains, experiments show that the incorporation of UDA attains absolute increases of 22.2% and 20.0% respectively in utterance-level weighted average recall and speaker-level accuracy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tanuka Bhattacharjee|AUTHOR Tanuka Bhattacharjee]]^^1^^
, [[Jhansi Mallela|AUTHOR Jhansi Mallela]]^^1^^
, [[Yamini Belur|AUTHOR Yamini Belur]]^^2^^
, [[Nalini Atchayaram|AUTHOR Nalini Atchayaram]]^^2^^
, [[Ravi Yadav|AUTHOR Ravi Yadav]]^^2^^
, [[Pradeep Reddy|AUTHOR Pradeep Reddy]]^^2^^
, [[Dipanjan Gope|AUTHOR Dipanjan Gope]]^^1^^
, [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Indian Institute of Science, India; ^^2^^NIMHANS, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2961–2965
</span></p></div>
<div class="cpabstractcardabstract"><p>Parkinson’s disease (PD) affects both source and vocal tract components of speech. Various speech cues explored in literature for automatic classification of individuals with PD and healthy controls (HC) implicitly carry information about both these components. This work explicitly analyzes the contribution of source and vocal tract attributes toward automatic PD vs. HC classification, which has not been done earlier to the best of our knowledge. Here fundamental frequency (f,,o,,) is used to capture source information. For quantifying vocal tract information, speech waveforms are converted to unvoiced forms and mel-frequency cepstral coefficients (MFCC), denoted by voicing-removed MFCC, are obtained from them. Experimental results suggest that (1) the relative merit of source and vocal tract cues in classifying PD vs. HC largely depends on the speech task being considered, (2) both cues complement each other across all tasks, (3) while MFCC encodes both source and vocal tract features, source information captured by f,,o,, is different and further complements MFCC when the classifiers are trained and tested under clean or matched noise conditions, thereby enabling the feature-level fusion of f,,o,, and MFCC to achieve the best classification accuracy, (4) under unseen noise conditions, f,,o,, alone proves to be a highly noise-robust feature.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[R’mani Haulcy|AUTHOR R’mani Haulcy]], [[James Glass|AUTHOR James Glass]]
</p><p class="cpabstractcardaffiliationlist">MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2966–2970
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces the Crowdsourced Language Assessment Corpus (CLAC), a speech corpus consisting of audio recordings and automatically-generated transcripts for several speech and language tasks, as well as metadata for each of the speakers. The CLAC was created to provide the community with a collection of audio samples from various speakers that could be used to learn a general representation for speech from healthy subjects, as well as complement other health-related speech datasets, which tend to be limited. In this paper, we describe the data collection protocol and summarize the contents of the dataset. We also extract timing metrics from the recordings of each task to explore what those metrics look like for a large, English-speaking population. Lastly, we provide an example of how the dataset can be used by comparing the metrics to those extracted from a small sample of Frontotemporal Dementia subjects. We hope that this dataset will help advance the state of the art in the health and speech domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Young-Kyung Kim|AUTHOR Young-Kyung Kim]]^^1^^
, [[Rimita Lahiri|AUTHOR Rimita Lahiri]]^^1^^
, [[Md. Nasir|AUTHOR Md. Nasir]]^^2^^
, [[So Hyun Kim|AUTHOR So Hyun Kim]]^^3^^
, [[Somer Bishop|AUTHOR Somer Bishop]]^^4^^
, [[Catherine Lord|AUTHOR Catherine Lord]]^^5^^
, [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^Microsoft, USA; ^^3^^Cornell University, USA; ^^4^^University of California at San Francisco, USA; ^^5^^University of California at Los Angeles, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2916–2920
</span></p></div>
<div class="cpabstractcardabstract"><p>Computational methodologies have shown promise in advancing diagnostic and intervention research in the domain of //Autism Spectrum Disorder (ASD)//. Prior works have investigated speech features to assess disorder severity and also to differentiate between children with and without an ASD diagnosis. In this work, we explore short term dynamic functionals of speech features both within and across speakers to understand if local changes in speech provide information toward phenotyping of ASD.We compare the contributions of static and dynamic functionals representing conversational speech toward the clinical diagnosis state. Our results show that predictions obtained from a combination of dynamic and static functionals have comparable or superior performance to the predictions obtained from just static speech functionals. We also analyze the relationship between speech production and ASD diagnosis through correlation analyses between speech functionals and manually-derived behavioral codes related to autism severity. The experimental results support the notion that dynamic speech functionals capture complementary information which can facilitate enriched analysis of clinically-meaningful behavioral inference tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Waldemar Jęśko|AUTHOR Waldemar Jęśko]]
</p><p class="cpabstractcardaffiliationlist">PSNC, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2921–2925
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate vocalization recognition for people with Profound Intellectual and Multiple Disabilities using various machine learning algorithms. The amount of training data available for people with PIMD is typically significantly limited. Due to this fact, data augmentation process was used. Various types of Machine Learning algorithms were tested: k-NN, NB, DT, RDF, MLP and LSTM. During research we also tested various regularization techniques to improve recognition performance. The best results were obtained in case of MLP network with dropout and batch normalization: 90%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Barbara Gili Fivela|AUTHOR Barbara Gili Fivela]]^^1^^
, [[Vincenzo Sallustio|AUTHOR Vincenzo Sallustio]]^^2^^
, [[Silvia Pede|AUTHOR Silvia Pede]]^^2^^
, [[Danilo Patrocinio|AUTHOR Danilo Patrocinio]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Università del Salento, Italy; ^^2^^ASL Lecce, Italy; ^^3^^Università Cattolica del Sacro Cuore, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2926–2930
</span></p></div>
<div class="cpabstractcardabstract"><p>Intelligibility is the degree to which the speech of a person may be understood by a listener, and is related to functional limitation and disability. In protocols for the clinical assessment of dysarthria, intelligibility checks are included, as well as evaluations of speech accuracy, which is more directly related to the disease severity. However, both evaluations are usually based on subjective ratings.
Aim of this work is checking the correlation between intelligibility judgements, subjectively assigned as it may be the case in clinical procedures, and acoustic measures related to linguistically contrasting units. Two novelties characterize this work: a) acoustic measurements considered in the paper relate to both segments (vowel and consonants) and prosodic-intonational phonological events (e.g., pitch accents), that is linguistically relevant speech units; b) contexts of increasing phonetic-phonological complexity are considered, in order for the phonetic characteristics to challenge production accuracy, possibly affecting the realization of phonological features and intelligibility. Increasing complexity is expected to challenge intelligibility indeed and to have an impact on the correlation between intelligibility rates and acoustic measures. Results are preliminary, but confirm both 1) the correlation between acoustic measures of linguistically relevant events and speech intelligibility, as for both the segmental and the prosodic-intonational level, and 2) the role of increasing phonetic-phonological complexity in enhancing the above mentioned correlation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Si-Ioi Ng|AUTHOR Si-Ioi Ng]], [[Cymie Wing-Yee Ng|AUTHOR Cymie Wing-Yee Ng]], [[Jingyu Li|AUTHOR Jingyu Li]], [[Tan Lee|AUTHOR Tan Lee]]
</p><p class="cpabstractcardaffiliationlist">CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2931–2935
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech sound disorder (SSD) refers to a type of developmental disorder in young children who encounter persistent difficulties in producing certain speech sounds at the expected age. Consonant errors are the major indicator of SSD in clinical assessment. Previous studies on automatic assessment of SSD revealed that detection of speech errors concerning short and transitory consonants is less satisfactory. This paper investigates a neural network based approach to detecting consonant errors in disordered speech using consonant-vowel (CV) diphone segment in comparison to using consonant monophone segment. The underlying assumption is that the vowel part of a CV segment carries important information of co-articulation from the consonant. Speech embeddings are extracted from CV segments by a recurrent neural network model. The similarity scores between the embeddings of the test segment and the reference segments are computed to determine if the test segment is the expected consonant or not. Experimental results show that using CV segments achieves improved performance on detecting speech errors concerning those “difficult” consonants reported in the previous studies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adam Hair|AUTHOR Adam Hair]]^^1^^
, [[Guanlong Zhao|AUTHOR Guanlong Zhao]]^^1^^
, [[Beena Ahmed|AUTHOR Beena Ahmed]]^^2^^
, [[Kirrie J. Ballard|AUTHOR Kirrie J. Ballard]]^^3^^
, [[Ricardo Gutierrez-Osuna|AUTHOR Ricardo Gutierrez-Osuna]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Texas A&M University, USA; ^^2^^UNSW Sydney, Australia; ^^3^^University of Sydney, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2936–2940
</span></p></div>
<div class="cpabstractcardabstract"><p>A critical component of child speech therapy is home practice with a caregiver, who can provide feedback. However, caregivers oftentimes struggle with accurately rating speech and with perceiving pronunciation errors. One potential solution for this issue is to embed automatic mispronunciation-detection (MPD) algorithms within digital speech therapy applications. To address the need for MPD within child speech therapy, we investigated posterior-based mispronunciation detection using a custom corpus of disordered speech from children that had been manually annotated by an expert clinician. Namely, we trained a family of phoneme-specific logistic regression classifiers (LRC) and support vector machines (SVM) on log posterior probability and log posterior ratio features. Our results show that these classifiers outperformed baseline Goodness of Pronunciation scoring by 11% and 10%, respectively. Even more importantly, in an offline test, the LRC and SVM classifiers outperformed student clinicians at identifying mispronunciations by 18% and 16%, respectively. These results suggest that posterior-based mispronunciation detection may be suitable to provide at-home therapy feedback for children.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bahman Mirheidari|AUTHOR Bahman Mirheidari]], [[Yilin Pan|AUTHOR Yilin Pan]], [[Daniel Blackburn|AUTHOR Daniel Blackburn]], [[Ronan O’Malley|AUTHOR Ronan O’Malley]], [[Heidi Christensen|AUTHOR Heidi Christensen]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2941–2945
</span></p></div>
<div class="cpabstractcardabstract"><p>The widely used word vectors can be extended at the sentence level to perform a wide range of natural language processing (NLP) tasks. Recently the Bidirectional Encoder Representations from Transformers (BERT) language representation achieved state-of-the-art performance for these applications. The model is trained with punctuated and well-formed (writ-ten) text, however, the performance of the model drops significantly when the input text is the — erroneous and unpunctuated — output of automatic speech recognition (ASR). We use a sliding window and averaging approach for pre-processing text for BERT to extract features for classifying three diagnostic categories relating to cognitive impairment: neurodegenerative dis-order (ND), mild cognitive impairment (MCI), and healthy controls (HC). The in-house dataset contains the audio recordings of an intelligent virtual agent (IVA) who asks the participants several conversational questions prompts in addition to giving a picture description prompt. For the three-way classification, we achieve a 73.88% F-score (accuracy: 76.53%) using the pre-trained, uncased base BERT and for the two-way classifier (HC vs. ND) we achieve 89.80% (accuracy: 90%). We further improve these by using a prompt selection technique, reaching the F-scores of 79.98% (accuracy: 81.63%) and 93.56% (accuracy: 93.75%) respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhengjun Yue|AUTHOR Zhengjun Yue]]^^1^^
, [[Jon Barker|AUTHOR Jon Barker]]^^1^^
, [[Heidi Christensen|AUTHOR Heidi Christensen]]^^1^^
, [[Cristina McKean|AUTHOR Cristina McKean]]^^2^^
, [[Elaine Ashton|AUTHOR Elaine Ashton]]^^2^^
, [[Yvonne Wren|AUTHOR Yvonne Wren]]^^3^^
, [[Swapnil Gadgil|AUTHOR Swapnil Gadgil]]^^4^^
, [[Rebecca Bright|AUTHOR Rebecca Bright]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Sheffield, UK; ^^2^^Newcastle University, UK; ^^3^^North Bristol NHS Trust, UK; ^^4^^Therapy Box, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2946–2950
</span></p></div>
<div class="cpabstractcardabstract"><p>A novel crowdsourcing project to gather children’s storytelling based language samples using a mobile app was undertaken across the United Kingdom. Parents’ scaffolding of children’s narratives was observed in many of the samples. This study was designed to examine the relationship of scaffolding and young children’s narrative language ability in a story retell context which is analysed at the macro-structural (total macro-structure score), the micro-structural (mean length of utterances in morphemes) and verbal productivity (total number of utterances) levels. Young children with and without scaffolding were statistically compared. The interaction between the level of scaffolding support, the grammar complexity and the narrative structure was explored. A bidirectional relationship was observed between scaffolding and young children’s narrative language ability. Young children with better performance were observed to receive less scaffolding from parents. Scaffolding was shown to support early narrative development of young children and was more able to benefit those with low-level grammatical complexity skills. It is crucial to encourage parental scaffolding to be well-attuned to the child’s narrative ability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tong Xia|AUTHOR Tong Xia]], [[Jing Han|AUTHOR Jing Han]], [[Lorena Qendro|AUTHOR Lorena Qendro]], [[Ting Dang|AUTHOR Ting Dang]], [[Cecilia Mascolo|AUTHOR Cecilia Mascolo]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2951–2955
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, sound-based COVID-19 detection studies have shown great promise to achieve scalable and prompt digital pre-screening. However, there are still two unsolved issues hindering the practice. First, collected datasets for model training are often imbalanced, with a considerably smaller proportion of users tested positive, making it harder to learn representative and robust features. Second, deep learning models are generally overconfident in their predictions. Clinically, false predictions aggravate healthcare costs. Estimation of the uncertainty of screening would aid this. To handle these issues, we propose an ensemble framework where multiple deep learning models for sound-based COVID-19 detection are developed from different but balanced subsets from original data. As such, data are utilized more effectively compared to traditional up-sampling and down-sampling approaches: an AUC of 0.74 with a sensitivity of 0.68 and a specificity of 0.69 is achieved. Simultaneously, we estimate uncertainty from the disagreement across multiple models. It is shown that false predictions often yield higher uncertainty, enabling us to suggest the users with certainty higher than a threshold to repeat the audio test on their phones or to take clinical tests if digital diagnosis still fails. This study paves the way for a more robust sound-based COVID-19 automated screening system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Leanne Nortje|AUTHOR Leanne Nortje]], [[Herman Kamper|AUTHOR Herman Kamper]]
</p><p class="cpabstractcardaffiliationlist">Stellenbosch University, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2971–2975
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose direct multimodal few-shot models that learn a shared embedding space of spoken words and images from only a few paired examples. Imagine an agent is shown an image along with a spoken word describing the object in the picture, e.g. //pen, book// and //eraser//. After observing a few paired examples of each class, the model is asked to identify the “book” in a set of unseen pictures. Previous work used a two-step indirect approach relying on speech-speech and image-image comparisons across the support set of given speech-image pairs. Instead, we propose two direct models which learn a single multimodal space where inputs from different modalities are directly comparable: a multimodal triplet network (MTriplet) and a multimodal correspondence autoencoder (MCAE). To train these direct models, we //mine// speech-image pairs by using the support set to pair up unlabelled in-domain speech and images. In a speech-to-image digit matching task, direct models outperform indirect models, with the MTriplet achieving the best multimodal five-shot accuracy. We show that the improvements are due to the combination of unsupervised and transfer learning in the direct models, and the absence of two-step compounding errors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Richard Rose|AUTHOR Richard Rose]], [[Olivier Siohan|AUTHOR Olivier Siohan]], [[Anshuman Tripathi|AUTHOR Anshuman Tripathi]], [[Otavio Braga|AUTHOR Otavio Braga]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3016–3020
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates an end-to-end audio-visual (A/V) modeling approach for transcribing utterances in scenarios where there are overlapping speech utterances from multiple talkers. It assumes that overlapping audio signals and video signals in the form of mouth-tracks aligned with speech are available for overlapping talkers. The approach builds on previous work in audio-only multi-talker ASR. In that work, a conventional recurrent neural network transducer (RNN-T) architecture was extended to include a masking model for separation of encoded audio features and multiple label encoders to encode transcripts from overlapping speakers. It is shown here that incorporating an attention weighted combination of visual features in A/V multi-talker RNN-T models significantly improves speaker disambiguation in ASR on overlapping speech relative to audio-only performance. The A/V multi-talker ASR systems described here are trained and evaluated on a two speaker A/V overlapping speech dataset created from YouTube videos. A 17% reduction in WER was observed for A/V multi-talker models relative to audio-only multi-talker models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yifei Wu|AUTHOR Yifei Wu]]^^1^^
, [[Chenda Li|AUTHOR Chenda Li]]^^1^^
, [[Song Yang|AUTHOR Song Yang]]^^2^^
, [[Zhongqin Wu|AUTHOR Zhongqin Wu]]^^2^^
, [[Yanmin Qian|AUTHOR Yanmin Qian]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SJTU, China; ^^2^^TAL, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3021–3025
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech from microphones is vulnerable in a complex acoustic environment due to noise and reverberation, while the cameras are not. Thus, utilizing the visual modality in the “cocktail party” scenario with multi-talkers has become a promising and popular approach. In this paper, we have explored the incorporating of visual modality into the end-to-end multi-talker speech recognition task. We propose two methods based on the modality fusion position, which are encoder-based fusion and decoder-based fusion. And for each method, advanced audio-visual fusion techniques including attention mechanism and dual decoder have been explored to find the best usage of the visual modality. With the proposed methods, our best audio-visual multi-talker automatic speech recognition (ASR) model gets almost ~50.0% word error rate (WER) reduction compared to the audio-only multi-talker ASR system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ramon Sanabria|AUTHOR Ramon Sanabria]]^^1^^
, [[Austin Waters|AUTHOR Austin Waters]]^^2^^
, [[Jason Baldridge|AUTHOR Jason Baldridge]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Edinburgh, UK; ^^2^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2976–2980
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech-based image retrieval has been studied as a proxy for joint representation learning, usually without emphasis on retrieval itself. As such, it is unclear how well speech-based retrieval can work in practice — both in an absolute sense and versus alternative strategies that combine automatic speech recognition (ASR) with strong text encoders. In this work, we extensively study and expand choices of encoder architectures, training methodology (including unimodal and multimodal pretraining), and other factors. Our experiments cover different types of speech in three datasets: Flickr Audio, Places Audio, and Localized Narratives. Our best model configuration achieves large gains over state of the art, e.g., pushing recall-at-one from 21.8% to 33.2% for Flickr Audio and 27.6% to 53.4% for Places Audio. We also show our best speech-based models can match or exceed cascaded ASR-to-text encoding when speech is spontaneous, accented, or otherwise hard to automatically transcribe.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Huan Zhao|AUTHOR Huan Zhao]], [[Kaili Ma|AUTHOR Kaili Ma]]
</p><p class="cpabstractcardaffiliationlist">Hunan University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2981–2985
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, some cross-modal hashing methods are proposed to search data for different modality effectively. Hashing has received wide attention because of its low storage and high efficiency. Hashing-based methods project the data instances from different modalities into a Hamming space to learn hash codes for retrieval between different modality. Although obtaining promising performance, hashing-based methods have still several common limitations. First, they learn the hash codes by constructing semantic similarity matrices, resulting in the loss of information. Second, most existing methods simultaneously learn the hash codes and the hash functions, which bring a high computational complexity. Third, they utilize the relaxation-based optimization strategy to generate the hash codes which leads to the large quantization error of the hash codes. To solve the above problems, we propose a novel fast supervised hashing method, termed Fast Discrete Two-Step Learning Hashing (FDTLH) for scalable cross-modal retrieval, which learns the discriminative hash codes by adopting a effective two-step learning scheme. Extensive experiments show that the FDTLH outperforms several state-of-the-art hashing methods in terms of retrieval performance and learning efficiency.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianrong Wang|AUTHOR Jianrong Wang]]^^1^^
, [[Ziyue Tang|AUTHOR Ziyue Tang]]^^1^^
, [[Xuewei Li|AUTHOR Xuewei Li]]^^1^^
, [[Mei Yu|AUTHOR Mei Yu]]^^1^^
, [[Qiang Fang|AUTHOR Qiang Fang]]^^2^^
, [[Li Liu|AUTHOR Li Liu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tianjin University, China; ^^2^^CASS, China; ^^3^^CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2986–2990
</span></p></div>
<div class="cpabstractcardabstract"><p>Cued Speech (CS) is a visual communication system for the deaf or hearing impaired people. It combines lip movements with hand cues to obtain a complete phonetic repertoire. Current deep learning based methods on automatic CS recognition suffer from a common problem, which is the data scarcity. Until now, there are only two public single speaker datasets for French (238 sentences) and British English (97 sentences). In this work, we propose a cross-modal knowledge distillation method with teacher-student structure, which transfers audio speech information to CS to overcome the limited data problem. Firstly, we pretrain a teacher model for CS recognition with a large amount of open source audio speech data, and simultaneously pretrain the feature extractors for lips and hands using CS data. Then, we distill the knowledge from teacher model to the student model with frame-level and sequence-level distillation strategies. Importantly, for frame-level, we exploit multi-task learning to weigh losses automatically, to obtain the balance coefficient. Besides, we establish a five-speaker British English CS dataset for the first time. The proposed method is evaluated on French and British English CS datasets, showing superior CS recognition performance to the state-of-the-art (SOTA) by a large margin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kayode Olaleye|AUTHOR Kayode Olaleye]], [[Herman Kamper|AUTHOR Herman Kamper]]
</p><p class="cpabstractcardaffiliationlist">Stellenbosch University, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2991–2995
</span></p></div>
<div class="cpabstractcardabstract"><p>Visually grounded speech models learn from images paired with spoken captions. By tagging images with soft text labels using a trained visual classifier with a fixed vocabulary, previous work has shown that it is possible to train a model that can //detect// whether a particular text keyword occurs in speech utterances or not. Here we investigate whether visually grounded speech models can also do keyword //localisation//: predicting where, within an utterance, a given textual keyword occurs without any explicit text-based or alignment supervision. We specifically consider whether incorporating attention into a convolutional model is beneficial for localisation. Although absolute localisation performance with visually supervised models is still modest (compared to using unordered bag-of-word text labels for supervision), we show that attention provides a large gain in performance over previous visually grounded models. As in many other speech-image studies, we find that many of the incorrect localisations are due to semantic confusions, e.g. locating the word ‘backstroke’ for the query keyword ‘swimming’.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Khazar Khorrami|AUTHOR Khazar Khorrami]], [[Okko Räsänen|AUTHOR Okko Räsänen]]
</p><p class="cpabstractcardaffiliationlist">Tampere University, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2996–3000
</span></p></div>
<div class="cpabstractcardabstract"><p>Systems that can find correspondences between multiple modalities, such as between speech and images, have great potential to solve different recognition and data analysis tasks in an unsupervised manner. This work studies multimodal learning in the context of visually grounded speech (VGS) models, and focuses on their recently demonstrated capability to extract spatiotemporal alignments between spoken words and the corresponding visual objects without ever been explicitly trained for object localization or word recognition. As the main contributions, we formalize the alignment problem in terms of an audio-visual alignment tensor that is based on earlier VGS work, introduce systematic metrics for evaluating model performance in aligning visual objects and spoken words, and propose a new VGS model variant for the alignment task utilizing cross-modal attention layer. We test our model and a previously proposed model in the alignment task using SPEECH-COCO captions coupled with MSCOCO images. We compare the alignment performance using our proposed evaluation metrics to the semantic retrieval task commonly used to evaluate VGS models. We show that cross-modal attention layer not only helps the model to achieve higher semantic cross-modal retrieval performance, but also leads to substantial improvements in the alignment performance between image object and spoken words.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hang Chen|AUTHOR Hang Chen]]^^1^^
, [[Jun Du|AUTHOR Jun Du]]^^1^^
, [[Yu Hu|AUTHOR Yu Hu]]^^1^^
, [[Li-Rong Dai|AUTHOR Li-Rong Dai]]^^1^^
, [[Bao-Cai Yin|AUTHOR Bao-Cai Yin]]^^2^^
, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China; ^^3^^Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3001–3005
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel deep learning architecture for improving word-level lip-reading. We first incorporate multi-scale processing into spatial feature extraction for lip-reading using hierarchical pyramidal convolution (HPConv) and self-attention. Specifically, HPConv is proposed to replace the conventional convolution features, leading to an improvement over the model’s ability to discover fine-grained lip movements. Next to deal with fixed-length image sequences representing words in a given database, a self-attention mechanism is proposed to integrate local information in all lip frames without assuming known word boundaries, so that our deep models automatically utilize key feature in relevant frames of a given word. Experiments on the Lip Reading in the Wild corpus show that our proposed architecture achieves an accuracy of 86.83%, yielding a relative error rate reduction of about 10% from that obtained with a state-of-the-art scheme of averaging frame scores for information fusion. A detailed analysis of the experimental results also confirms that weights learned from self-attention tend to be zero at both sides of an image sequence and focus non-zero weights in the middle part of a given word.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andrew Rouditchenko|AUTHOR Andrew Rouditchenko]]^^1^^
, [[Angie Boggust|AUTHOR Angie Boggust]]^^1^^
, [[David Harwath|AUTHOR David Harwath]]^^2^^
, [[Samuel Thomas|AUTHOR Samuel Thomas]]^^3^^
, [[Hilde Kuehne|AUTHOR Hilde Kuehne]]^^3^^
, [[Brian Chen|AUTHOR Brian Chen]]^^4^^
, [[Rameswar Panda|AUTHOR Rameswar Panda]]^^3^^
, [[Rogerio Feris|AUTHOR Rogerio Feris]]^^3^^
, [[Brian Kingsbury|AUTHOR Brian Kingsbury]]^^3^^
, [[Michael Picheny|AUTHOR Michael Picheny]]^^5^^
, [[James Glass|AUTHOR James Glass]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MIT, USA; ^^2^^University of Texas at Austin, USA; ^^3^^IBM, USA; ^^4^^Columbia University, USA; ^^5^^NYU, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3006–3010
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we explore self-supervised audio-visual models that learn from instructional videos. Prior work has shown that these models can relate spoken words and sounds to visual content after training on a large-scale dataset of videos, but they were only trained and evaluated on videos in English. To learn multilingual audio-visual representations, we propose a cascaded approach that leverages a model trained on English videos and applies it to audio-visual data in other languages, such as Japanese videos. With our cascaded approach, we show an improvement in retrieval performance of nearly 10× compared to training on the Japanese videos solely. We also apply the model trained on English videos to Japanese and Hindi spoken captions of images, achieving state-of-the-art performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pingchuan Ma|AUTHOR Pingchuan Ma]]^^1^^
, [[Rodrigo Mira|AUTHOR Rodrigo Mira]]^^1^^
, [[Stavros Petridis|AUTHOR Stavros Petridis]]^^2^^
, [[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^1^^
, [[Maja Pantic|AUTHOR Maja Pantic]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Imperial College London, UK; ^^2^^Facebook, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3011–3015
</span></p></div>
<div class="cpabstractcardabstract"><p>The large amount of audiovisual content being shared online today has drawn substantial attention to the prospect of audio-visual self-supervised learning. Recent works have focused on each of these modalities separately, while others have attempted to model both simultaneously in a cross-modal fashion. However, comparatively little attention has been given to leveraging one modality as a training objective to learn from the other. In this work, we propose Learning visual speech Representations from Audio via self-supervision (LiRA). Specifically, we train a ResNet+Conformer model to predict acoustic features from unlabelled visual speech. We find that this pre-trained model can be leveraged towards word-level and sentence-level lip-reading through feature extraction and fine-tuning experiments. We show that our approach significantly outperforms other self-supervised methods on the Lip Reading in the Wild (LRW) dataset and achieves state-of-the-art performance on Lip Reading Sentences 2 (LRS2) using only a fraction of the total labelled data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sanyuan Chen|AUTHOR Sanyuan Chen]]^^1^^
, [[Yu Wu|AUTHOR Yu Wu]]^^2^^
, [[Zhuo Chen|AUTHOR Zhuo Chen]]^^3^^
, [[Jian Wu|AUTHOR Jian Wu]]^^2^^
, [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]^^3^^
, [[Shujie Liu|AUTHOR Shujie Liu]]^^2^^
, [[Jinyu Li|AUTHOR Jinyu Li]]^^3^^
, [[Xiangzhan Yu|AUTHOR Xiangzhan Yu]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Harbin Institute of Technology, China; ^^2^^Microsoft, China; ^^3^^Microsoft, USA; ^^4^^Harbin Institute of Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3026–3030
</span></p></div>
<div class="cpabstractcardabstract"><p>Transformer has been successfully applied to speech separation recently with its strong long-dependency modeling capacity using a self-attention mechanism. However, Transformer tends to have heavy run-time costs due to the deep encoder layers, which hinders its deployment on edge devices. A small Transformer model with fewer encoder layers is preferred for computational efficiency, but it is prone to performance degradation. In this paper, an ultra fast speech separation Transformer model is proposed to achieve both better performance and efficiency with teacher student learning (T-S learning). We introduce layer-wise T-S learning and objective shifting mechanisms to guide the small student model to learn intermediate representations from the large teacher model. Compared with the small Transformer model trained from scratch, the proposed T-S learning method reduces the word error rate (WER) by more than 5% for both multi-channel and single-channel speech separation on LibriCSS dataset. Utilizing more unlabeled speech data, our ultra fast speech separation models achieve more than 10% relative WER reduction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi Luo|AUTHOR Yi Luo]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]
</p><p class="cpabstractcardaffiliationlist">Columbia University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3071–3075
</span></p></div>
<div class="cpabstractcardabstract"><p>Various neural network architectures have been proposed in recent years for the task of multi-channel speech separation. Among them, the filter-and-sum network (FaSNet) performs end-to-end time-domain filter-and-sum beamforming and has shown effective in both ad-hoc and fixed microphone array geometries. However, whether such explicit beamforming operation is a necessary and valid formulation remains unclear. In this paper, we investigate the beamforming operation and show that it is not necessary. To further improve the performance, we change the explicit waveform-level filter-and-sum operation into an implicit feature-level filter-and-sum operation around a context of features. A feature-level normalized cross correlation (fNCC) feature is also proposed to better match the implicit operation for an improved performance. Experiment results on a simulated ad-hoc microphone array dataset show that the proposed modification to the FaSNet, which we refer to as the implicit filter-and-sum network (iFaSNet), achieve better performance than the explicit FaSNet with a similar model size and a faster training and inference speed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yong Xu|AUTHOR Yong Xu]]^^1^^
, [[Zhuohuang Zhang|AUTHOR Zhuohuang Zhang]]^^2^^
, [[Meng Yu|AUTHOR Meng Yu]]^^1^^
, [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]]^^1^^
, [[Dong Yu|AUTHOR Dong Yu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tencent, USA; ^^2^^Indiana University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3076–3080
</span></p></div>
<div class="cpabstractcardabstract"><p>Although the conventional mask-based minimum variance distortionless response (MVDR) could reduce the non-linear distortion, the residual noise level of the MVDR separated speech is still high. In this paper, we propose a spatio-temporal recurrent neural network based beamformer (RNN-BF) for target speech separation. This new beamforming framework directly learns the beamforming weights from the estimated speech and noise spatial covariance matrices. Leveraging on the temporal modeling capability of RNNs, the RNN-BF could automatically accumulate the statistics of the speech and noise covariance matrices to learn the frame-level beamforming weights in a recursive way. An RNN-based generalized eigenvalue (RNN-GEV) beamformer and a more generalized RNN beamformer (GRNN-BF) are proposed. We further improve the RNN-GEV and the GRNN-BF by using layer normalization to replace the commonly used mask normalization on the covariance matrices. The proposed GRNN-BF obtains better performance against prior arts in terms of speech quality (PESQ), speech-to-noise ratio (SNR) and word error rate (WER).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Murtiza Ali|AUTHOR Murtiza Ali]], [[Ashwani Koul|AUTHOR Ashwani Koul]], [[Karan Nathwani|AUTHOR Karan Nathwani]]
</p><p class="cpabstractcardaffiliationlist">IIT Jammu, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3031–3035
</span></p></div>
<div class="cpabstractcardabstract"><p>Sparse Recovery (SR) algorithms have been used widely for direction-of-arrival (DOA) estimation in spatially contiguous plane wave for their robust performance. But these algorithms have proven to be computationally costly. With a few sensors and at low SNRs, the noise dominates the data singular vectors and the sparse estimation of contiguous sources is incorrect. The magnitude spectrum-based re-weighted sparse recovery (RWSR) algorithms improve the robustness by re-weighting the sparse estimates. However, their efficiency degrades with decreasing the number of sensors at low SNRs. Therefore, this paper exhibits the significance of the phase spectrum, in the form of group-delay, for sparse and robust source estimation using RWSR algorithms for spatially contiguous sources. Further, an optimal re-weighted methodology based on simultaneously minimizing average-root-mean-square-error and maximizing the probability of separation is also proposed. The simulation results are carried out for Gaussian noise to demonstrate the excellent performance of the proposed algorithms.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cong Han|AUTHOR Cong Han]]^^1^^
, [[Yi Luo|AUTHOR Yi Luo]]^^1^^
, [[Chenda Li|AUTHOR Chenda Li]]^^2^^
, [[Tianyan Zhou|AUTHOR Tianyan Zhou]]^^3^^
, [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]]^^4^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^5^^
, [[Marc Delcroix|AUTHOR Marc Delcroix]]^^4^^
, [[Hakan Erdogan|AUTHOR Hakan Erdogan]]^^6^^
, [[John R. Hershey|AUTHOR John R. Hershey]]^^6^^
, [[Nima Mesgarani|AUTHOR Nima Mesgarani]]^^1^^
, [[Zhuo Chen|AUTHOR Zhuo Chen]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Columbia University, USA; ^^2^^SJTU, China; ^^3^^Microsoft, USA; ^^4^^NTT, Japan; ^^5^^Johns Hopkins University, USA; ^^6^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3036–3040
</span></p></div>
<div class="cpabstractcardabstract"><p>Leveraging additional speaker information to facilitate speech separation has received increasing attention in recent years. Recent research includes extracting target speech by using the target speaker’s voice snippet and jointly separating all participating speakers by using a pool of additional speaker signals, which is known as speech separation using speaker inventory (SSUSI). However, all these systems ideally assume that the pre-enrolled speaker signals are available and are only evaluated on simple data configurations. In realistic multi-talker conversations, the speech signal contains a large proportion of non-overlapped regions, where we can derive robust speaker embedding of individual talkers. In this work, we adopt the SSUSI model in long recordings and propose a self-informed, clustering-based inventory forming scheme for long recording, where the speaker inventory is fully built from the input signal without the need for external speaker signals. Experiment results on simulated noisy reverberant long recording datasets show that the proposed method can significantly improve the separation performance across various conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weitao Yuan|AUTHOR Weitao Yuan]]^^1^^
, [[Shengbei Wang|AUTHOR Shengbei Wang]]^^1^^
, [[Xiangrui Li|AUTHOR Xiangrui Li]]^^1^^
, [[Masashi Unoki|AUTHOR Masashi Unoki]]^^2^^
, [[Wenwu Wang|AUTHOR Wenwu Wang]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tiangong University, China; ^^2^^JAIST, Japan; ^^3^^University of Surrey, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3041–3045
</span></p></div>
<div class="cpabstractcardabstract"><p>Generative adversarial networks (GANs) and Conditional GANs (cGANs) have recently been applied for singing voice extraction (SVE), since they can accurately model the vocal distributions and effectively utilize a large amount of unlabelled datasets. However, current GANs/cGANs based SVE frameworks have no explicit mechanism to eliminate the mutual interferences between different sources. In this work, we introduce a novel ‘crossfire’ criterion into GANs to complement its standard adversarial training, which forms a dual-objective GANs, namely Crossfire GANs (Cr-GANs). In addition, we design a Generalized Projection Method (GPM) for cGANs based frameworks to extract more effective conditional information for SVE. Using the proposed GPM, we extend our Cr-GANs to conditional version, i.e., Crossfire Conditional GANs (Cr-cGANs). The proposed methods were evaluated on the DSD100 and CCMixter datasets. The numerical results have shown that the ‘crossfire’ criterion and GPM are beneficial to each other and considerably improve the separation performance of existing GANs/cGANs based SVE methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kai Wang|AUTHOR Kai Wang]]^^1^^
, [[Hao Huang|AUTHOR Hao Huang]]^^1^^
, [[Ying Hu|AUTHOR Ying Hu]]^^1^^
, [[Zhihua Huang|AUTHOR Zhihua Huang]]^^2^^
, [[Sheng Li|AUTHOR Sheng Li]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Xinjiang University, China; ^^2^^Xinjiang University, China; ^^3^^NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3046–3050
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditional single channel speech separation in the time-frequency (T-F) domain often faces the problem of phase reconstruction. Due to the fact that the real-valued network is not suitable for dealing with complex-valued representation, the performance of the T-F domain speech separation method is often constrained from reaching the state-of-the-art. In this paper, we propose improved speech separation methods in both complex and real T-F domain using orthogonal representation. For the complex-valued case, we combine the deep complex network (DCN) and Conv-TasNet to design an end-to-end complex-valued model. Specifically, we incorporate short-time Fourier transform (STFT) and learnable complex layers to build a hybrid encoder-decoder structure, and use a DCN based separator. Then we present the importance of weights orthogonality in the T-F domain transformation and propose a multi-segment orthogonality (MSO) architecture for further improvements. For the real-valued case, we performed separation in real T-F domain by introducing the short-time DCT (STDCT) with orthogonal representation as well. Experimental results show that the proposed complex model outperforms the baseline Conv-TasNet with a comparable parameter size by 1.8 dB, and the STDCT-based real-valued T-F model by 1.2 dB, showing the advantages of speech separation in the T-F domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu Nakagome|AUTHOR Yu Nakagome]]^^1^^
, [[Masahito Togami|AUTHOR Masahito Togami]]^^2^^
, [[Tetsuji Ogawa|AUTHOR Tetsuji Ogawa]]^^1^^
, [[Tetsunori Kobayashi|AUTHOR Tetsunori Kobayashi]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Waseda University, Japan; ^^2^^LINE, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3051–3055
</span></p></div>
<div class="cpabstractcardabstract"><p>This study presents a framework to enable efficient and stable adversarial learning of unsupervised multichannel source separation models. When the paired data, i.e., the mixture and the corresponding clean speech, are not available for training, it is promising to exploit generative adversarial networks (GANs), where a source separation system is treated as a generator and trained to bring the distribution of the separated (fake) speech closer to that of the clean (real) speech. The separated speech, however, contains many errors, especially when the system is trained unsupervised and can be easily distinguished from the clean speech. A real/fake binary discriminator therefore will stop the adversarial learning process unreasonably early. This study aims to balance the convergence of the generator and discriminator to achieve efficient and stable learning. For that purpose, the autoencoder-based discriminator and more stable adversarial loss, which are designed in boundary equilibrium GAN (BEGAN), are introduced. In addition, generator-specific distortions are added to real examples so that the models can be trained to focus only on source separation. Experimental comparisons demonstrated that the present stabilizing learning techniques improved the performance of multiple unsupervised source separation systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sung-Feng Huang|AUTHOR Sung-Feng Huang]]^^1^^
, [[Shun-Po Chuang|AUTHOR Shun-Po Chuang]]^^1^^
, [[Da-Rong Liu|AUTHOR Da-Rong Liu]]^^1^^
, [[Yi-Chen Chen|AUTHOR Yi-Chen Chen]]^^1^^
, [[Gene-Ping Yang|AUTHOR Gene-Ping Yang]]^^2^^
, [[Hung-yi Lee|AUTHOR Hung-yi Lee]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Taiwan University, Taiwan; ^^2^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3056–3060
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech separation has been well developed, with the very successful permutation invariant training (PIT) approach, although the frequent label assignment switching happening during PIT training remains to be a problem when better convergence speed and achievable performance are desired. In this paper, we propose to perform self-supervised pre-training to stabilize the label assignment in training the speech separation model. Experiments over several types of self-supervised approaches, several typical speech separation models and two different datasets showed that very good improvements are achievable if a proper self-supervised approach is chosen.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Fan-Lin Wang|AUTHOR Fan-Lin Wang]], [[Yu-Huai Peng|AUTHOR Yu-Huai Peng]], [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]
</p><p class="cpabstractcardaffiliationlist">Academia Sinica, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3061–3065
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech separation has been extensively studied to deal with the cocktail party problem in recent years. All related approaches can be divided into two categories: time-frequency domain methods and time domain methods. In addition, some methods try to generate speaker vectors to support source separation. In this study, we propose a new model called dual-path filter network (DPFN). Our model focuses on the post-processing of speech separation to improve speech separation performance. DPFN is composed of two parts: the speaker module and the separation module. First, the speaker module infers the identities of the speakers. Then, the separation module uses the speakers’ information to extract the voices of individual speakers from the mixture. DPFN constructed based on DPRNN-TasNet is not only superior to DPRNN-TasNet, but also avoids the problem of permutation-invariant training (PIT).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jian Wu|AUTHOR Jian Wu]]^^1^^
, [[Zhuo Chen|AUTHOR Zhuo Chen]]^^2^^
, [[Sanyuan Chen|AUTHOR Sanyuan Chen]]^^1^^
, [[Yu Wu|AUTHOR Yu Wu]]^^1^^
, [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]^^2^^
, [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]]^^2^^
, [[Shujie Liu|AUTHOR Shujie Liu]]^^1^^
, [[Jinyu Li|AUTHOR Jinyu Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, China; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3066–3070
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech separation has been successfully applied as a front-end processing module of conversation transcription systems thanks to its ability to handle overlapped speech and its flexibility to combine with downstream tasks such as automatic speech recognition (ASR). However, a speech separation model often introduces target speech distortion, resulting in a sub-optimum word error rate (WER). In this paper, we describe our efforts to improve the performance of a single channel speech separation system. Specifically, we investigate a two-stage training scheme that firstly applies a feature level optimization criterion for pre-training, followed by an ASR-oriented optimization criterion using an end-to-end (E2E) speech recognition model. Meanwhile, to keep the model light-weight, we introduce a modified teacher-student learning technique for model compression. By combining those approaches, we achieve a absolute average WER improvement of 2.70% and 0.77% using models with less than 10M parameters compared with the previous state-of-the-art results on the LibriCSS dataset for utterance-wise evaluation and continuous evaluation, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi Chieh Liu|AUTHOR Yi Chieh Liu]]^^1^^
, [[Eunjung Han|AUTHOR Eunjung Han]]^^2^^
, [[Chul Lee|AUTHOR Chul Lee]]^^2^^
, [[Andreas Stolcke|AUTHOR Andreas Stolcke]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Georgia Tech, USA; ^^2^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3081–3085
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a new end-to-end neural diarization (EEND) system that is based on Conformer, a recently proposed neural architecture that combines convolutional mappings and Transformer to model both local and global dependencies in speech. We first show that data augmentation and convolutional subsampling layers enhance the original self-attentive EEND in the Transformer-based EEND, and then Conformer gives an additional gain over the Transformer-based EEND. However, we notice that the Conformer-based EEND does not generalize as well from simulated to real conversation data as the Transformer-based model. This leads us to quantify the mismatch between simulated data and real speaker behavior in terms of temporal statistics reflecting turn-taking between speakers, and investigate its correlation with diarization error. By mixing simulated and real data in EEND training, we mitigate the mismatch further, with Conformer-based EEND achieving 24% error reduction over the baseline SA-EEND system, and 10% improvement over the best augmented Transformer-based system, on two-speaker CALLHOME data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jee-weon Jung|AUTHOR Jee-weon Jung]], [[Hee-Soo Heo|AUTHOR Hee-Soo Heo]], [[Youngki Kwon|AUTHOR Youngki Kwon]], [[Joon Son Chung|AUTHOR Joon Son Chung]], [[Bong-Jin Lee|AUTHOR Bong-Jin Lee]]
</p><p class="cpabstractcardaffiliationlist">Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3086–3090
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we propose an overlapped speech detection system trained as a three-class classifier. Unlike conventional systems that perform binary classification as to whether or not a frame contains overlapped speech, the proposed approach classifies into three classes: non-speech, single speaker speech, and overlapped speech. By training a network with the more detailed label definition, the model can learn a better notion on deciding the number of speakers included in a given frame. A convolutional recurrent neural network architecture is explored to benefit from both convolutional layer’s capability to model local patterns and recurrent layer’s ability to model sequential information. The proposed overlapped speech detection model establishes a state-of-the-art performance with a precision of 0.6648 and a recall of 0.3222 on the DIHARD II evaluation set, showing a 20% increase in recall along with higher precision. In addition, we also introduce a simple approach to utilize the proposed overlapped speech detection model for speaker diarization which ranked third place in the Track 1 of the DIHARD III challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xucheng Wan|AUTHOR Xucheng Wan]], [[Kai Liu|AUTHOR Kai Liu]], [[Huan Zhou|AUTHOR Huan Zhou]]
</p><p class="cpabstractcardaffiliationlist">Huawei Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3091–3095
</span></p></div>
<div class="cpabstractcardabstract"><p>Despite considerable efforts, online speaker diarization remains an ongoing challenge. In this study, we propose to tackle the challenge from two perspectives, to endow diarization model with discriminability and to rectify less-reliable online inference with guidance. Specifically, based on the current prior art, UIS-RNN, two enhancement approaches are proposed to concretize our motivations. The effectiveness of our proposals is experimentally validated by results on the AMI evaluation set. With substantial relative improvement of 48.7%, our online speaker diarization system significantly outperformed its baseline. More impressively, its performance in terms of diarization error rate is better than most state-of-the-art offline systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuki Takashima|AUTHOR Yuki Takashima]]^^1^^
, [[Yusuke Fujita|AUTHOR Yusuke Fujita]]^^1^^
, [[Shota Horiguchi|AUTHOR Shota Horiguchi]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
, [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]]^^3^^
, [[Kenji Nagamatsu|AUTHOR Kenji Nagamatsu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Hitachi, Japan; ^^2^^Carnegie Mellon University, USA; ^^3^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3096–3100
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a semi-supervised training technique using pseudo-labeling for end-to-end neural diarization (EEND). The EEND system has shown promising performance compared with traditional clustering-based methods, especially in the case of overlapping speech. However, to get a well-tuned model, EEND requires labeled data for all the joint speech activities of every speaker at each time frame in a recording. In this paper, we explore a pseudo-labeling approach that employs unlabeled data. First, we propose an iterative pseudo-label method for EEND, which trains the model using unlabeled data of a target condition. Then, we also propose a committee-based training method to improve the performance of EEND. To evaluate our proposed method, we conduct the experiments of model adaptation using labeled and unlabeled data. Experimental results on the CALLHOME dataset show that our proposed pseudo-label achieved a 37.4% relative diarization error rate reduction compared to a seed model. Moreover, we analyzed the results of semi-supervised adaptation with pseudo-labeling. We also show the effectiveness of our approach on the third DIHARD dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Youngki Kwon|AUTHOR Youngki Kwon]], [[Jee-weon Jung|AUTHOR Jee-weon Jung]], [[Hee-Soo Heo|AUTHOR Hee-Soo Heo]], [[You Jin Kim|AUTHOR You Jin Kim]], [[Bong-Jin Lee|AUTHOR Bong-Jin Lee]], [[Joon Son Chung|AUTHOR Joon Son Chung]]
</p><p class="cpabstractcardaffiliationlist">Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3101–3105
</span></p></div>
<div class="cpabstractcardabstract"><p>The goal of this paper is to adapt speaker embeddings for solving the problem of speaker diarisation. The quality of speaker embeddings is paramount to the performance of speaker diarisation systems. Despite this, prior works in the field have directly used embeddings designed only to be effective on the speaker verification task. In this paper, we propose three techniques that can be used to better adapt the speaker embeddings for diarisation: dimensionality reduction, attention-based embedding aggregation, and non-speech clustering. A wide range of experiments is performed on various challenging datasets. The results demonstrate that all three techniques contribute positively to the performance of the diarisation system achieving an average relative improvement of 25.07% in terms of diarisation error rate over the baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu-Xuan Wang|AUTHOR Yu-Xuan Wang]]^^1^^
, [[Jun Du|AUTHOR Jun Du]]^^1^^
, [[Maokui He|AUTHOR Maokui He]]^^1^^
, [[Shu-Tong Niu|AUTHOR Shu-Tong Niu]]^^1^^
, [[Lei Sun|AUTHOR Lei Sun]]^^2^^
, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China; ^^3^^Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3106–3110
</span></p></div>
<div class="cpabstractcardabstract"><p>In this study, we propose a scenario-dependent speaker diarization approach to handling the diversified scenarios of 11 domains encountered in DIHARD-III challenge with a divide-and-conquer strategy. First, using a ResNet-based audio domain classifier, all domains in DIHARD-III challenge could be divided into several scenarios by different impact factors, such as background noise level, speaker number, and speaker overlap ratio. In each scenario, different combinations of techniques are designed, aiming at achieving the best performance in terms of both diarization error rate (DER) and run-time efficiency. For low signal-to-noise-ration (SNR) scenarios, speech enhancement based on a progressive learning network with multiple intermediate SNR targets is adopted for pre-processing. Conventional clustering-based speaker diarization is utilized to mainly handle speech segments with non-overlapping speakers, while separation-based or neural speaker diarization is used to cope with the overlapping speech regions, which is combined with an iterative fine-tuning strategy to boost the generalization ability. We also explore post-processing to perform system fusion and selection. For DIHARD-III challenge, our scenario-dependent system won the first place among all submitted systems, and significantly outperforms the state-of-the-art clustering-based speaker diarization system, yielding relative DER reductions of 32.17% and 28.34% on development set and evaluation set on Track 1, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hervé Bredin|AUTHOR Hervé Bredin]]^^1^^
, [[Antoine Laurent|AUTHOR Antoine Laurent]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IRIT (UMR 5505), France; ^^2^^LIUM (EA 4023), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3111–3115
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker segmentation consists in partitioning a conversation between one or more speakers into speaker turns. Usually addressed as the late combination of three sub-tasks (voice activity detection, speaker change detection, and overlapped speech detection), we propose to train an end-to-end segmentation model that does it directly. Inspired by the original end-to-end neural speaker diarization approach (EEND), the task is modeled as a multi-label classification problem using permutation-invariant training. The main difference is that our model operates on short audio chunks (5 seconds) but at a much higher temporal resolution (every 16ms). Experiments on multiple speaker diarization datasets conclude that our model can be used with great success on both voice activity detection and overlapped speech detection. Our proposed model can also be used as a post-processing step, to detect and correctly assign overlapped speech regions. Relative diarization error rate improvement over the best considered baseline (VBx) reaches 17% on AMI, 13% on DIHARD 3, and 13% on VoxConverse.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yawen Xue|AUTHOR Yawen Xue]]^^1^^
, [[Shota Horiguchi|AUTHOR Shota Horiguchi]]^^1^^
, [[Yusuke Fujita|AUTHOR Yusuke Fujita]]^^1^^
, [[Yuki Takashima|AUTHOR Yuki Takashima]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
, [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]]^^3^^
, [[Kenji Nagamatsu|AUTHOR Kenji Nagamatsu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Hitachi, Japan; ^^2^^Carnegie Mellon University, USA; ^^3^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3116–3120
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a streaming diarization method based on an end-to-end neural diarization (EEND) model, which handles flexible numbers of speakers and overlapping speech. In our previous study, the speaker-tracing buffer (STB) mechanism was proposed to achieve a chunk-wise streaming diarization using a pre-trained EEND model. STB traces the speaker information in previous chunks to map the speakers in a new chunk. However, it only worked with two-speaker recordings. In this paper, we propose an extended STB for flexible numbers of speakers, FLEX-STB. The proposed method uses a zero-padding followed by speaker-tracing, which alleviates the difference in the number of speakers between a buffer and a current chunk. We also examine buffer update strategies to select important frames for tracing multiple speakers. Experiments on CALLHOME and DIHARD II datasets show that the proposed method achieves comparable performance to the offline EEND method with 1-second latency. The results also show that our proposed method outperforms recently proposed chunk-wise diarization methods based on EEND (BW-EDA-EEND).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Or Haim Anidjar|AUTHOR Or Haim Anidjar]]^^1^^
, [[Itshak Lapidot|AUTHOR Itshak Lapidot]]^^2^^
, [[Chen Hajaj|AUTHOR Chen Hajaj]]^^1^^
, [[Amit Dvir|AUTHOR Amit Dvir]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ariel University, Israel; ^^2^^Afeka College, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3121–3125
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker Change Detection (SCD) is the task of segmenting an input audio-recording according to speaker interchanges. This task is essential for many applications, such as automatic voice transcription or Speaker Diarization (SD). This paper focuses on the essential task of audio segmentation and suggests a word-embedding-based solution for the SCD problem. Moreover, we show how to use our approach in order to outperform voice-based solutions for the SD problem. We empirically show that our method can accurately identify the speaker-turns in an audio-recording with 82.12% and 89.02% success in the Recall and F1-score measures.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kosuke Futamata|AUTHOR Kosuke Futamata]], [[Byeongseon Park|AUTHOR Byeongseon Park]], [[Ryuichi Yamamoto|AUTHOR Ryuichi Yamamoto]], [[Kentaro Tachibana|AUTHOR Kentaro Tachibana]]
</p><p class="cpabstractcardaffiliationlist">LINE, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3126–3130
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a novel phrase break prediction method that combines implicit features extracted from a pre-trained large language model, a.k.a BERT, and explicit features extracted from BiLSTM with linguistic features. In conventional BiLSTM-based methods, word representations and/or sentence representations are used as independent components. The proposed method takes account of both representations to extract the latent semantics, which cannot be captured by previous methods. The objective evaluation results show that the proposed method obtains an absolute improvement of 3.2 points for the F1 score compared with BiLSTM-based conventional methods using linguistic features. Moreover, the perceptual listening test results verify that a TTS system that applied our proposed method achieved a mean opinion score of 4.39 in prosody naturalness, which is highly competitive with the score of 4.37 for synthesized speech with ground-truth phrase breaks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Iván Vallés-Pérez|AUTHOR Iván Vallés-Pérez]]^^1^^
, [[Julian Roth|AUTHOR Julian Roth]]^^1^^
, [[Grzegorz Beringer|AUTHOR Grzegorz Beringer]]^^2^^
, [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]]^^1^^
, [[Jasha Droppo|AUTHOR Jasha Droppo]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, UK; ^^2^^Amazon, Poland; ^^3^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3131–3135
</span></p></div>
<div class="cpabstractcardabstract"><p>Text-to-speech systems recently achieved almost indistinguishable quality from human speech. However, the prosody of those systems is generally flatter than natural speech, producing samples with low expressiveness. Disentanglement of speaker id and prosody is crucial in text-to-speech systems to improve on naturalness and produce more variable syntheses. This paper proposes a new neural text-to-speech model that approaches the disentanglement problem by conditioning a //Tacotron2//-like architecture on flow-normalized speaker embeddings, and by substituting the reference encoder with a new learned latent distribution responsible for modeling the intra-sentence variability due to the prosody. By removing the reference encoder dependency, the speaker-leakage problem typically happening in this kind of systems disappears, producing more distinctive syntheses at inference time. The new model achieves significantly higher prosody variance than the baseline in a set of quantitative prosody features, as well as higher speaker distinctiveness, without decreasing the speaker intelligibility. Finally, we observe that the normalized speaker embeddings enable much richer speaker interpolations, substantially improving the distinctiveness of the new interpolated speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chenpeng Du|AUTHOR Chenpeng Du]], [[Kai Yu|AUTHOR Kai Yu]]
</p><p class="cpabstractcardaffiliationlist">SJTU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3136–3140
</span></p></div>
<div class="cpabstractcardabstract"><p>Generating natural speech with a diverse and smooth prosody pattern is a challenging task. Although random sampling with phone-level prosody distribution has been investigated to generate different prosody patterns, the diversity of the generated speech is still very limited and far from what can be achieved by humans. This is largely due to the use of uni-modal distribution, such as single Gaussian, in the prior works of phone-level prosody modelling. In this work, we propose a novel approach that models phone-level prosodies with GMM based mixture density network (GMM-MDN). Experiments on the LJSpeech dataset demonstrate that phone-level prosodies can precisely control the synthetic speech and GMM-MDN can generate a more natural and smooth prosody pattern than a single Gaussian. Subjective evaluations further show that the proposed approach not only achieves better naturalness, but also significantly improves the prosody diversity in synthetic speech without the need of manual control.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kenichi Fujita|AUTHOR Kenichi Fujita]], [[Atsushi Ando|AUTHOR Atsushi Ando]], [[Yusuke Ijima|AUTHOR Yusuke Ijima]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3141–3145
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a novel speech-rhythm-based method for speaker embeddings. Conventionally spectral feature-based speaker embedding vectors such as the x-vector are used as auxiliary information for multi-speaker speech synthesis. However, speech synthesis with conventional embeddings has difficulty reproducing the target speaker’s speech rhythm, one of the important factors among speaker characteristics, because spectral features do not explicitly include speech rhythm. In this paper, speaker embeddings that take speech rhythm information into account are introduced to achieve phoneme duration modeling using a few utterances by the target speaker. A novel point of the proposed method is that rhythm-based embeddings are extracted with phonemes and their durations. They are extracted with a speaker identification model similar to the conventional spectral feature-based one. We conducted two experiments: speaker embeddings generation and speech synthesis with generated embeddings. We show that the proposed model has an EER of 10.3% in speaker identification even with only speech rhythm. Visualizing the embeddings shows that utterances with similar rhythms are also similar in their speaker embeddings. The results of an objective and subjective evaluation on speech synthesis demonstrate that the proposed method can synthesize speech with speech rhythm closer to the target speaker.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuxiang Zou|AUTHOR Yuxiang Zou]], [[Shichao Liu|AUTHOR Shichao Liu]], [[Xiang Yin|AUTHOR Xiang Yin]], [[Haopeng Lin|AUTHOR Haopeng Lin]], [[Chunfeng Wang|AUTHOR Chunfeng Wang]], [[Haoyu Zhang|AUTHOR Haoyu Zhang]], [[Zejun Ma|AUTHOR Zejun Ma]]
</p><p class="cpabstractcardaffiliationlist">ByteDance, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3146–3150
</span></p></div>
<div class="cpabstractcardabstract"><p>Benefiting from the great development of deep learning, modern neural text-to-speech (TTS) models can generate speech indistinguishable from natural speech. However, The generated utterances often keep an average prosodic style of the database instead of having rich prosodic variation. For pitch-stressed languages, such as English, accurate intonation and stress are important for conveying semantic information. In this work, we propose a fine-grained prosody modeling method in neural speech synthesis with ToBI (Tones and Break Indices) representation. The proposed system consists of a text frontend for ToBI prediction and a Tacotron-based TTS module for prosody modeling. By introducing the ToBI representation, we can control the system to synthesize speech with accurate intonation and stress at syllable level. Compared with the two baselines (Tacotron and unsupervised method), experiments show that our model can generate more natural speech with more accurate prosody, as well as effectively control the stress, intonation, and pause of the speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mayank Sharma|AUTHOR Mayank Sharma]]^^1^^
, [[Yogesh Virkar|AUTHOR Yogesh Virkar]]^^2^^
, [[Marcello Federico|AUTHOR Marcello Federico]]^^2^^
, [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]]^^3^^
, [[Robert Enyedi|AUTHOR Robert Enyedi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, India; ^^2^^Amazon, USA; ^^3^^Amazon, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3151–3155
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatically dubbed speech of a video involves: (i) segmenting the target sentences into phrases to reflect the speech-pause arrangement used by the original speaker, and (ii) adjusting the speaking rate of the synthetic voice at the phrase-level to match the exact timing of each corresponding source phrase. In this work, we investigate a post-segmentation approach to control the speaking rate of neural Text-to-Speech (TTS) at the phrase-level after generating the entire sentence. Our post-segmentation method relies on the attention matrix generated by the context generation step to perform a force-alignment over pause markers inserted in the input text. We show that: (i) our approach can be more accurate than applying an off-the-shelf forced aligner, and (ii) post-segmentation method permits generation more fluent speech than pre-segmentation approach described in [1].</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Guangyan Zhang|AUTHOR Guangyan Zhang]]^^1^^
, [[Ying Qin|AUTHOR Ying Qin]]^^2^^
, [[Daxin Tan|AUTHOR Daxin Tan]]^^1^^
, [[Tan Lee|AUTHOR Tan Lee]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^Beijing Jiaotong University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3156–3160
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes a novel design of a neural network-based speech generation model for learning prosodic representation. The problem of representation learning is formulated according to the information bottleneck (IB) principle. A modified VQ-VAE quantized layer is incorporated in the speech generation model to control the IB capacity and adjust the balance between reconstruction power and disentangle capability of the learned representation. The proposed model is able to learn word-level prosodic representations from speech data. With an optimized IB capacity, the learned representations not only are adequate to reconstruct the original speech but also can be used to transfer the prosody onto different textual content. Extensive results of the objective and subjective evaluation are presented to demonstrate the effect of IB capacity control, the effectiveness, and potential usage of the learned prosodic representation in controllable neural speech generation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alice Baird|AUTHOR Alice Baird]], [[Silvan Mertes|AUTHOR Silvan Mertes]], [[Manuel Milling|AUTHOR Manuel Milling]], [[Lukas Stappen|AUTHOR Lukas Stappen]], [[Thomas Wiest|AUTHOR Thomas Wiest]], [[Elisabeth André|AUTHOR Elisabeth André]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Augsburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3161–3165
</span></p></div>
<div class="cpabstractcardabstract"><p>The collection of emotional speech data is a time-consuming and costly endeavour. Generative networks can be applied to augment the limited audio data artificially. However, it is challenging to evaluate generated audio for its similarity to source data, as current quantitative metrics are not necessarily suited to the audio domain. We explore the use of a prototypical network to evaluate four classes of generated emotional audio with this in mind. We first extract spectrogram images from WAVEGAN generated audio and other audio augmentation approaches, comparing similarity to the class prototype and diversity within the embedding space. Furthermore, we augment the source training set with each augmentation type and perform a classification to explore the generated audio plausibility. Results suggest that quality and diversity can be quantitatively observed with this approach. In the chosen context, we see that WAVEGAN generated data is recognisable as a source data class (F₁-score 43.6%), and the samples add similar diversity as unseen source data. This result leads to more plausible data for augmentation of the source training set — achieving up to 63.9% F₁ which is a 3.5% improvement over the source data baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tsukasa Yoshinaga|AUTHOR Tsukasa Yoshinaga]]^^1^^
, [[Kohei Tada|AUTHOR Kohei Tada]]^^1^^
, [[Kazunori Nozaki|AUTHOR Kazunori Nozaki]]^^2^^
, [[Akiyoshi Iida|AUTHOR Akiyoshi Iida]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Toyohashi Tech, Japan; ^^2^^Osaka University Dental Hospital, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3166–3170
</span></p></div>
<div class="cpabstractcardabstract"><p>To examine the effects of inclined incisors on the phonation of [s], a simplified vocal tract model is proposed, and the acoustic characteristics with different maxillary incisor angles are predicted by the model. As a control model, a realistic vocal tract replica of [s] was constructed from medical images, and the angle of the maxillary incisor was changed from the original position up to 30°. The simplified model was constructed with a rectangular flow channel using the average dimensions of the vocal tracts for five Japanese subjects. Both geometries were set in an anechoic chamber, and sounds generated from the geometries were recorded with a microphone. The results showed that amplitudes of the sound generated by the realistic geometry were decreased by increasing the incisor angle, and this tendency agreed well with the simplified model. Moreover, the slope value of the decrease in overall pressure levels estimated by the model was consistent with that of the realistic geometry, indicating the capability of estimating the effects of inclined incisors with dental prostheses on the production of [s] by using the simplified model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takayuki Arai|AUTHOR Takayuki Arai]]
</p><p class="cpabstractcardaffiliationlist">Sophia University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3171–3175
<a href="./IS2021/MEDIA/0449" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Due to the COVID-19 pandemic, visualizing the airstream of human breath during speech production has become extremely important from the viewpoint of preventing infection. In addition, visualizing droplets and the larger drops expelled when we speak consonantal sounds may help for the same reason. One visualization technique is to pass a laser sheet through the droplet cloud produced by a human speaker. However, the laser poses certain health risks for human beings. Therefore, we developed an alternative method to passing a laser against a human body in which we utilize physical models of the human vocal tract. First, we tested a head-shaped model with a lung model from our previous study to visualize the exhaled breath during vowel production (with and without a mask). Then, we implemented an extended version of the anatomical-type vocal-tract model introduced in our previous study. With this newly developed model, lips are made of the same flexible material that was used to form the tongue part in the previous model. We also attached these lips to another previous model for producing sounds including /b/. Finally, the lip models were tested to visualize the droplet cloud including expelled drops present while producing a bilabial plosive sound.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ryo Tanji|AUTHOR Ryo Tanji]], [[Hidefumi Ohmura|AUTHOR Hidefumi Ohmura]], [[Kouichi Katsurada|AUTHOR Kouichi Katsurada]]
</p><p class="cpabstractcardaffiliationlist">Tokyo University of Science, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3176–3180
</span></p></div>
<div class="cpabstractcardabstract"><p>We herein propose a deep neural network-based model for articulatory-to-acoustic conversion from real-time MRI data. Although rtMRI, which can record entire articulatory organs with a high resolution, has an advantage in articulatory-to-acoustic conversion, it has a relatively low sampling rate. To address this, we incorporated the super-resolution technique in the temporal dimension with a transposed convolution. With the use of transposed convolution, the resolution can be increased by applying the inversion process of resolution reduction of a standard CNN. To evaluate the performance on the datasets with different temporal resolutions, we conducted experiments using two datasets: USC-TIMIT and Japanese rtMRI dataset. Results of the experiments performed using mel-cepstrum distortion and PESQ showed that transposed convolution is effective for generating accurate acoustic features. We also confirmed that increasing the magnification of the super-resolution leads to an improvement in the PESQ score.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rafia Inaam|AUTHOR Rafia Inaam]]^^1^^
, [[Tsukasa Yoshinaga|AUTHOR Tsukasa Yoshinaga]]^^1^^
, [[Takayuki Arai|AUTHOR Takayuki Arai]]^^2^^
, [[Hiroshi Yokoyama|AUTHOR Hiroshi Yokoyama]]^^1^^
, [[Akiyoshi Iida|AUTHOR Akiyoshi Iida]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Toyohashi Tech, Japan; ^^2^^Sophia University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3181–3185
</span></p></div>
<div class="cpabstractcardabstract"><p>The sound generated by a reed-type artificial vocal fold was predicted by a one-mass modeling and numerical flow simulation to examine the sound generation mechanisms of the artificial vocal fold. For the one-mass modeling, the reed oscillation was modeled with an equivalent spring constant, and the flow rate was estimated by Bernoulli’s equation. For the flow simulation, the flow and acoustic fields were predicted with compressible Navier-Stokes Equations, while the reed oscillation was calculated by a one-dimensional beam equation. The experimentation was conducted by measuring the sound of an artificial vocal fold in an anechoic chamber. The results of the acoustic measurement showed that the sound amplitudes in the flow simulation agreed well with the experiment, while the one-mass model underestimated the amplitudes in a higher frequency range. Reed displacement and flow rate comparisons indicated that the flow retention in the reed retainer caused the asymmetry in the flow rate waveform, hence producing larger amplitudes for the flow simulation in the higher frequency range. The flow simulation enabled to predict this flow retention which cannot be modeled in the one-dimensional one-mass model, and it is anticipated to apply the flow simulation to develop a better artificial vocal fold.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Raphael Werner|AUTHOR Raphael Werner]]^^1^^
, [[Susanne Fuchs|AUTHOR Susanne Fuchs]]^^2^^
, [[Jürgen Trouvain|AUTHOR Jürgen Trouvain]]^^1^^
, [[Bernd Möbius|AUTHOR Bernd Möbius]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität des Saarlandes, Germany; ^^2^^ZAS, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3186–3190
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper examines the acoustic properties of breath noises in speech pauses in relation to similar speech segments and with regard to their inhalation speed. We measured intensity, center of gravity, and formants, as well as kinematic data (via Respiratory Inductance Plethysmography) for inhalations, aspirations of stops, glottal fricatives, and schwa vowels. We find that inhalations within speech are louder than those initiating speech, share spectral properties (center of gravity) with the aspiration phase of /k/-realizations, and generally involve a more open vocal tract (higher F1) than schwa-realizations. Intensity, center of gravity, and F1 are found to be positively correlated to inhalation speed. Overall, we conclude that jaw openness and inhalation speed are major contributors to inhalation noises in speech pauses.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anqi Xu|AUTHOR Anqi Xu]]^^1^^
, [[Daniel van Niekerk|AUTHOR Daniel van Niekerk]]^^1^^
, [[Branislav Gerazov|AUTHOR Branislav Gerazov]]^^2^^
, [[Paul Konstantin Krug|AUTHOR Paul Konstantin Krug]]^^3^^
, [[Santitham Prom-on|AUTHOR Santitham Prom-on]]^^4^^
, [[Peter Birkholz|AUTHOR Peter Birkholz]]^^3^^
, [[Yi Xu|AUTHOR Yi Xu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University College London, UK; ^^2^^UKiM, Macedonia; ^^3^^Technische Universität Dresden, Germany; ^^4^^KMUTT, Thailand</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3191–3195
</span></p></div>
<div class="cpabstractcardabstract"><p>While the acoustic vowel space has been extensively studied in previous research, little is known about the high-dimensional articulatory space of vowels. The articulatory imaging techniques are limited to tracking only a few key articulators, leaving the rest of the articulators unmonitored. In the present study, we attempted to develop a detailed articulatory space obtained by training a 3D articulatory synthesizer to learn eleven British English vowels. An analysis-by-synthesis strategy was used to acoustically optimize vocal tract parameters that represent twenty articulatory dimensions. The results show that tongue height and retraction, larynx location and lip roundness are the most perceptually distinctive articulatory dimensions. Yet, even for these dimensions, there is a fair amount of articulatory overlap between vowels, unlike the fine-grained acoustic space. This method opens up the possibility of using modelling to investigate the link between speech production and perception.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mikey Elmers|AUTHOR Mikey Elmers]], [[Raphael Werner|AUTHOR Raphael Werner]], [[Beeke Muhlack|AUTHOR Beeke Muhlack]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Jürgen Trouvain|AUTHOR Jürgen Trouvain]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3196–3200
</span></p></div>
<div class="cpabstractcardabstract"><p>This study revisits Whalen et al. (1995, JASA) by evaluating English speaking participants in a perception experiment to determine if their recollection is affected by including breath noises in sentences generated by a speech synthesis system. Whalen found an improvement in recollection for sentences that were preceded by a breath noise compared to sentences without one. While Whalen and colleagues used formant synthesis to render the English sentences, we use a modern concatenative synthesis system. The present study uses inhalations of three different lengths: 0 ms (no breath noise), 300 ms (short breath noise), and 600 ms (long breath noise). Our results are consistent with Whalen and colleagues for the 600 ms condition, but not for the 300 ms condition, indicating that not all inhalations improved recollection. The present study also found a significant effect for sentence length, illustrating that shorter sentences have higher accuracy for recollection than longer sentences. Overall, the present study indicates that respiratory sounds are important to the recollection of synthesized speech and that researchers should focus on longer and more complex types of speech, such as paragraphs or dialogues, for future studies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Taijing Chen|AUTHOR Taijing Chen]]^^1^^
, [[Adam Lammert|AUTHOR Adam Lammert]]^^2^^
, [[Benjamin Parrell|AUTHOR Benjamin Parrell]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UW–Madison, USA; ^^2^^Worcester Polytechnic Institute, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3201–3205
</span></p></div>
<div class="cpabstractcardabstract"><p>When speakers are exposed to auditory feedback perturbations of a particular vowel, they not only adapt their productions of that vowel but also transfer this change to other, untrained, vowels. However, current models of speech sensorimotor adaptation, which rely on changes in the feedforward control of specific speech units, are unable to account for this type of generalization. Here, we developed a neural-network based model to simulate speech sensorimotor adaptation, and assess whether updates to internal control models can account for observed patterns of generalization. Based on a dataset generated from the Maeda plant, we trained two independent neural networks: 1) an inverse model, which generates motor commands for desired acoustic outcomes and 2) a forward model, which maps motor commands to acoustic outcomes (prediction). When vowel formant perturbations were given, both forward and inverse models were updated when there was a mismatch between predicted and perceived output. Our results replicate behavioral experiments: the model altered its production to counteract the perturbation, and showed gradient transfer of this learning dependent on acoustic distance between training and test vowels. These results suggest that updating paired forward and inverse models provides a plausible account for sensorimotor adaptation in speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hideki Kawahara|AUTHOR Hideki Kawahara]]^^1^^
, [[Toshie Matsui|AUTHOR Toshie Matsui]]^^2^^
, [[Kohei Yatabe|AUTHOR Kohei Yatabe]]^^3^^
, [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]]^^4^^
, [[Minoru Tsuzaki|AUTHOR Minoru Tsuzaki]]^^5^^
, [[Masanori Morise|AUTHOR Masanori Morise]]^^6^^
, [[Toshio Irino|AUTHOR Toshio Irino]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^Toyohashi Tech, Japan; ^^3^^Waseda University, Japan; ^^4^^HSUH, Japan; ^^5^^KCUA, Japan; ^^6^^Meiji University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3206–3210
<a href="./IS2021/MEDIA/2073" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Auditory feedback plays an essential role in the regulation of the fundamental frequency of voiced sounds. The fundamental frequency also responds to auditory stimulation other than the speaker’s voice. We propose to use this response of the fundamental frequency of sustained vowels to frequency-modulated test signals for investigating involuntary control of voice pitch. This involuntary response is difficult to identify and isolate by the conventional paradigm, which uses step-shaped pitch perturbation. We recently developed a versatile measurement method using a mixture of orthogonal sequences made from a set of extended time-stretched pulses (TSP). In this article, we extended our approach and designed a set of test signals using the mixture to modulate the fundamental frequency of artificial signals. For testing the response, the experimenter presents the modulated signal aurally while the subject is voicing sustained vowels. We developed a tool for conducting this test quickly and interactively. We make the tool available as an open-source and also provide executable GUI-based applications. Preliminary tests revealed that the proposed method consistently provides compensatory responses with about 100 ms latency, representing involuntary control. Finally, we discuss future applications of the proposed method for objective and non-invasive auditory response measurements.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chenyu You|AUTHOR Chenyu You]]^^1^^
, [[Nuo Chen|AUTHOR Nuo Chen]]^^2^^
, [[Yuexian Zou|AUTHOR Yuexian Zou]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Yale University, USA; ^^2^^Peking University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3211–3215
</span></p></div>
<div class="cpabstractcardabstract"><p>Spoken conversational question answering (SCQA) requires machines to model the flow of multi-turn conversation given the speech utterances and text corpora. Different from traditional text question answering (QA) tasks, SCQA involves audio signal processing, passage comprehension, and contextual understanding. However, ASR systems introduce unexpected noisy signals to the transcriptions, which result in performance degradation on SCQA. To overcome the problem, we propose CADNet, a novel contextualized attention-based distillation approach, which applies both cross-attention and self-attention to obtain ASR-robust contextualized embedding representations of the passage and dialogue history for performance improvements. We also introduce the spoken conventional knowledge distillation framework to distill the ASR-robust knowledge from the estimated probabilities of the //teacher// model to the //student//. We conduct extensive experiments on the Spoken-CoQA dataset and demonstrate that our approach achieves remarkable performance in this task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wenying Duan|AUTHOR Wenying Duan]]^^1^^
, [[Xiaoxi He|AUTHOR Xiaoxi He]]^^2^^
, [[Zimu Zhou|AUTHOR Zimu Zhou]]^^3^^
, [[Hong Rao|AUTHOR Hong Rao]]^^1^^
, [[Lothar Thiele|AUTHOR Lothar Thiele]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nanchang University, China; ^^2^^ETH Zürich, Switzerland; ^^3^^Singapore Management University, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3216–3220
</span></p></div>
<div class="cpabstractcardabstract"><p>Pre-trained language models have been widely adopted as backbones in various natural language processing tasks. However, existing pre-trained language models ignore the descriptive meta-information in the text such as the distinction between the title and the mainbody, leading to over-weighted attention to insignificant text. In this paper, we propose a hypernetwork-based architecture to model the descriptive meta-information and integrate it into pre-trained language models. Evaluations on three natural language processing tasks show that our method notably improves the performance of pre-trained language models and achieves the state-of-the-art results on keyphrase extraction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mahdin Rohmatillah|AUTHOR Mahdin Rohmatillah]], [[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]]
</p><p class="cpabstractcardaffiliationlist">NYCU, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3221–3225
</span></p></div>
<div class="cpabstractcardabstract"><p>In the multi-domain dialogue system, dialog policy plays an important role since it determines the suitable actions based on the user’s goals. However, in many recent works, most of the dialogue optimizations, especially that use reinforcement learning (RL) methods, do not perform well. The main problem is that the initial step of optimization that involves the behavior cloning (BC) methods suffer from the causal confusion problem, which means that the agent misidentifies true cause of an expert action in current state. This paper proposes a novel method to improve the performance of BC method in dialogue system. Instead of only predicting correct action given a state from dataset, we introduce the auxiliary tasks to predict both of current belief state and recent user utterance in order to reduce causal confusion of the expert action in the dataset since those features are important in every dialog turn. Experiments on ConvLab-2 shows that, by using this method, all of RL based optimizations are improved. Furthermore, the agent based on the proximal policy optimization shows very significant improvement with the help of the proposed BC agent weights both in policy evaluation as well as in end-to-end system evaluation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shinya Fujie|AUTHOR Shinya Fujie]]^^1^^
, [[Hayato Katayama|AUTHOR Hayato Katayama]]^^2^^
, [[Jin Sakuma|AUTHOR Jin Sakuma]]^^2^^
, [[Tetsunori Kobayashi|AUTHOR Tetsunori Kobayashi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Chiba Institute of Technology, Japan; ^^2^^Waseda University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3226–3230
</span></p></div>
<div class="cpabstractcardabstract"><p>A brand new neural network based precise timing generation framework, named the Timing Generating Network (TGN), is proposed and applied to turn-taking timing decision problems. Although turn-taking problems have conventionally been formalized as users’ end-of-turn detection, this approach cannot estimate the precise timing at which a spoken dialogue system should take a turn to start its utterance. Since several conventional approaches estimate precise timings but the estimation executed only at/after the end of preceding user’s utterance, they highly depend on the accuracy of intermediate decision modules, such as voice activity detection, etc. The advantages of the TGN are that its parameters are tunable via error backpropagation as it is described in a differentiable form as a whole, and it is free from inter-module error propagation as it has no deterministic intermediate modules. The experimental results show that the proposed system is superior to a conventional turn-taking system that adopts the hard decisions on user’s voice activity detection and response time estimation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kehan Chen|AUTHOR Kehan Chen]], [[Zezhong Li|AUTHOR Zezhong Li]], [[Suyang Dai|AUTHOR Suyang Dai]], [[Wei Zhou|AUTHOR Wei Zhou]], [[Haiqing Chen|AUTHOR Haiqing Chen]]
</p><p class="cpabstractcardaffiliationlist">Alibaba, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3231–3235
</span></p></div>
<div class="cpabstractcardabstract"><p>Conducting natural turn-taking behavior takes a crucial part in the user experience of modern spoken dialogue systems. One way to build such system is to learn those behaviors from real-world human-to-human dialogues, which have the most diverse and fine-grained turn-taking actions than any manual constructed sessions.
In this paper, we propose a Dataset — FTAD which could be used to learn turn-taking policies directly from human. First, we design an annotation mechanism to transform existing human-to-human dialogue session into structural data with most fine-grained turn-taking actions reserved. Then we explored a set of supervised learning tasks on it, showing the challenge and potential of learning complete fine-grained turn-taking policies based on such data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mukuntha Narayanan Sundararaman|AUTHOR Mukuntha Narayanan Sundararaman]], [[Ayush Kumar|AUTHOR Ayush Kumar]], [[Jithendra Vepa|AUTHOR Jithendra Vepa]]
</p><p class="cpabstractcardaffiliationlist">Observe.AI, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3236–3240
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent years have witnessed significant improvement in ASR systems to recognize spoken utterances. However, it is still a challenging task for noisy and out-of-domain data, where ASR errors are prevalent in the transcribed text. These errors significantly degrade the performance of downstream tasks such as intent and sentiment detection. In this work, we propose a BERT-style language model, referred to as ''PhonemeBERT'' that learns a joint language model with phoneme sequence and ASR transcript to learn phonetic-aware representations that are robust to ASR errors. We show that PhonemeBERT leverages phoneme sequences as additional features that outperform word-only models on downstream tasks. We evaluate our approach extensively by generating noisy data for three benchmark datasets — Stanford Sentiment Treebank, TREC and ATIS for sentiment, question and intent classification tasks respectively in addition to a real-life sentiment dataset. The results of the proposed approach beats the state-of-the-art baselines comprehensively on each dataset. Additionally, we show that PhonemeBERT can also be utilized as a pre-trained encoder in a low-resource setup where we only have ASR-transcripts for the downstream tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hongyin Luo|AUTHOR Hongyin Luo]]^^1^^
, [[James Glass|AUTHOR James Glass]]^^1^^
, [[Garima Lalwani|AUTHOR Garima Lalwani]]^^2^^
, [[Yi Zhang|AUTHOR Yi Zhang]]^^2^^
, [[Shang-Wen Li|AUTHOR Shang-Wen Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MIT, USA; ^^2^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3241–3245
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural dialog response selection models infer by scoring each candidate response given the dialog context, and the cross-encoder method yields state-of-the-art (SOTA) results for the task. In the method, the candidate scores are computed by feeding the output embedding of the first token in the input sequence, which is a concatenation of response and context, to a linear layer for making prediction. However, the embeddings of the other tokens in the sequence are not modeled explicitly, and inferring the candidate scores only with the first token makes the result not interpretable. To address the challenge, we propose a Retrieval-EXtraction encoder (REX) for dialog response selection. We augment the existing first-token- or sequence- based retrieval approach with an extraction loss. The loss provides gradient signal from each token during training and allows the model to learn token-level evidence and to select response based on important keywords. We show that REX achieves the new SOTA in the dialog response selection task. Also, our qualitative analysis suggests that REX highlights evidence it infers selections from and makes the inference result interpretable.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ashish Shenoy|AUTHOR Ashish Shenoy]], [[Sravan Bodapati|AUTHOR Sravan Bodapati]], [[Monica Sunkara|AUTHOR Monica Sunkara]], [[Srikanth Ronanki|AUTHOR Srikanth Ronanki]], [[Katrin Kirchhoff|AUTHOR Katrin Kirchhoff]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 3246–3250
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural Language Models (NLM), when trained and evaluated with context spanning multiple utterances, have been shown to consistently outperform both conventional n-gram language models and NLMs that use limited context. In this paper, we investigate various techniques to incorporate turn based context history into both recurrent (LSTM) and Transformer-XL based NLMs. For recurrent based NLMs, we explore context carry over mechanism and feature based augmentation, where we incorporate other forms of contextual information such as bot response and system dialogue acts as classified by a Natural Language Understanding (NLU) model. To mitigate the sharp nearby, fuzzy far away problem with contextual NLM, we propose the use of attention layer over lexical metadata to improve feature based augmentation. Additionally, we adapt our contextual NLM towards user provided on-the-fly speech patterns by leveraging encodings from a large pre-trained masked language model and performing fusion with a Transformer-XL based NLM. We test our proposed models using N-best rescoring of ASR hypotheses of task-oriented dialogues and also evaluate on downstream NLU tasks such as intent classification and slot labeling. The best performing model shows a relative WER between 1.6% and 9.1% and a slot labeling F1 score improvement of 4% over non-contextual baselines.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karen Livescu|AUTHOR Karen Livescu]]
</p><p class="cpabstractcardaffiliationlist">TTIC, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech is usually recorded as an acoustic signal, but it often appears in context with other signals. In addition to the acoustic signal, we may have available a corresponding visual scene, the video of the speaker, physiological signals such as the speaker’s movements or neural recordings, or other related signals. It is often possible to learn a better speech model or representation by considering the context provided by these additional signals, or to learn with less training data. Typical approaches to training from multi-modal data are based on the idea that models or representations of each modality should be in some sense predictive of the other modalities. Multi-modal approaches can also take advantage of the fact that the sources of noise or nuisance variables are different in different measurement modalities, so an additional (non-acoustic) modality can help learn a speech representation that suppresses such noise. This talk will survey several lines of work in this area, both older and newer. It will cover some basic techniques from machine learning and statistics, as well as specific models and applications for speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chau Luu|AUTHOR Chau Luu]], [[Peter Bell|AUTHOR Peter Bell]], [[Steve Renals|AUTHOR Steve Renals]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 491–495
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep speaker embeddings have become the leading method for encoding speaker identity in speaker recognition tasks. The embedding space should ideally capture the variations between all possible speakers, encoding the multiple acoustic aspects that make up a speaker’s identity, whilst being robust to non-speaker acoustic variation. Deep speaker embeddings are normally trained discriminatively, predicting speaker identity labels on the training data. We hypothesise that additionally predicting speaker-related auxiliary variables — such as age and nationality — may yield representations that are better able to generalise to unseen speakers. We propose a framework for making use of auxiliary label information, even when it is only available for speech corpora mismatched to the target application. On a test set of US Supreme Court recordings, we show that by leveraging two additional forms of speaker attribute information derived respectively from the matched training data, and VoxCeleb corpus, we improve the performance of our deep speaker embeddings for both verification and diarization tasks, achieving a relative improvement of 26.2% in DER and 6.7% in EER compared to baselines using speaker labels only. This improvement is obtained despite the auxiliary labels having been scraped from the web and being potentially noisy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Magdalena Rybicka|AUTHOR Magdalena Rybicka]]^^1^^
, [[Jesús Villalba|AUTHOR Jesús Villalba]]^^2^^
, [[Piotr Żelasko|AUTHOR Piotr Żelasko]]^^2^^
, [[Najim Dehak|AUTHOR Najim Dehak]]^^2^^
, [[Konrad Kowalczyk|AUTHOR Konrad Kowalczyk]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^AGH UST, Poland; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 496–500
</span></p></div>
<div class="cpabstractcardabstract"><p>Modeling speaker embeddings using deep neural networks is currently state-of-the-art in speaker recognition. Recently, ResNet-based structures have gained a broader interest, slowly becoming the baseline along with the deep-rooted Time Delay Neural Network based models. However, the scale-decreased design of the ResNet models may not preserve all of the speaker information. In this paper, we investigate the SpineNet structure with scale-permuted design to tackle this problem, in which feature size either increases or decreases depending on the processing stage in the network. Apart from the presented adjustments of the SpineNet model for the speaker recognition task, we also incorporate popular modules dedicated to the residual-like structures, namely the Res2Net and Squeeze-and-Excitation blocks, and modify them to work effectively in the presented neural network architectures. The final proposed model, i.e., the SpineNet architecture with Res2Net and Time-Squeeze-and-Excitation blocks, achieves remarkable Equal Error Rates of 0.99 and 0.92 for the Extended and Original trial lists of the well-known VoxCeleb1 dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Themos Stafylakis|AUTHOR Themos Stafylakis]]^^1^^
, [[Johan Rohdin|AUTHOR Johan Rohdin]]^^1^^
, [[Lukáš Burget|AUTHOR Lukáš Burget]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Omilia, Greece; ^^2^^Brno University of Technology, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 501–505
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker embeddings extracted with deep 2D convolutional neural networks are typically modeled as projections of first and second order statistics of channel-frequency pairs onto a linear layer, using either average or attentive pooling along the time axis. In this paper we examine an alternative pooling method, where pairwise correlations between channels for given frequencies are used as statistics. The method is inspired by style-transfer methods in computer vision, where the style of an image, modeled by the matrix of channel-wise correlations, is transferred to another image, in order to produce a new image having the style of the first and the content of the second. By drawing analogies between image style and speaker characteristics, and between image content and phonetic sequence, we explore the use of such channel-wise correlations features to train a ResNet architecture in an end-to-end fashion. Our experiments on VoxCeleb demonstrate the effectiveness of the proposed pooling method in speaker recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weipeng He|AUTHOR Weipeng He]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Jean-Marc Odobez|AUTHOR Jean-Marc Odobez]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 506–510
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a novel approach for extracting speaker embeddings from audio mixtures of multiple overlapping voices. This approach is based on a multi-task neural network. The network first extracts a latent feature for each direction. This feature is used for detecting sound sources as well as identifying speakers. In contrast to traditional approaches, the proposed method does not rely on explicit sound source separation. The neural network model learns from data to extract the most suitable features of the sounds at different directions. The experiments using audio recordings of overlapping sound sources show that the proposed approach outperforms a beamforming-based traditional method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junyi Peng|AUTHOR Junyi Peng]]^^1^^
, [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]]^^1^^
, [[Jianzong Wang|AUTHOR Jianzong Wang]]^^1^^
, [[Rongzhi Gu|AUTHOR Rongzhi Gu]]^^2^^
, [[Jing Xiao|AUTHOR Jing Xiao]]^^1^^
, [[Lukáš Burget|AUTHOR Lukáš Burget]]^^3^^
, [[Jan Černocký|AUTHOR Jan Černocký]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ping An Technology, China; ^^2^^Peking University, China; ^^3^^Brno University of Technology, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 511–515
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, extracting speaker embedding directly from raw waveform has drawn increasing attention in the field of speaker verification. Parametric real-valued filters in the first convolutional layer are learned to transform the waveform into time-frequency representations. However, these methods only focus on the magnitude spectrum and the poor interpretability of the learned filters limits the performance. In this paper, we propose a complex speaker embedding extractor, named ICSpk, with higher interpretability and fewer parameters. Specifically, at first, to quantify the speaker-related frequency response of waveform, we modify the original short-term Fourier transform filters into a family of complex exponential filters, named interpretable complex (IC) filters. Each IC filter is confined by a complex exponential filter parameterized by frequency. Then, a deep complex-valued speaker embedding extractor is designed to operate on the complex-valued output of IC filters. The proposed ICSpk is evaluated on VoxCeleb and CNCeleb databases. Experimental results demonstrate the IC filters-based system exhibits a significant improvement over the complex spectrogram based systems. Furthermore, the proposed ICSpk outperforms existing raw waveform based systems by a large margin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiao Xiao|AUTHOR Xiao Xiao]]^^1^^
, [[Nicolas Audibert|AUTHOR Nicolas Audibert]]^^1^^
, [[Grégoire Locqueville|AUTHOR Grégoire Locqueville]]^^2^^
, [[Christophe d’Alessandro|AUTHOR Christophe d’Alessandro]]^^2^^
, [[Barbara Kuhnert|AUTHOR Barbara Kuhnert]]^^1^^
, [[Claire Pillot-Loiseau|AUTHOR Claire Pillot-Loiseau]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LPP (UMR 7018), France; ^^2^^∂’Alembert (UMR 7190), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 516–520
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces an interface that enables the real-time gestural control of intonation in phrases produced by a vocal synthesizer. The melody and timing of a target phrase can be modified by tracing melodic contours on the touch-screen of a mobile tablet. Envisioning this interface as a means for non-native speakers to practice the intonation of a foreign language, we present a pilot study where native and non-native speakers imitated the pronunciation of French phrases using their voice and the interface, with a visual guide and without. Comparison of resulting F0 curves against the reference contour and a preliminary perceptual assessment of synthesized utterances suggest that for both non-native and native speakers, imitation with the help of a visual guide is comparable in accuracy to vocal imitation, and that timing control was a source of difficulty.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aleese Block|AUTHOR Aleese Block]], [[Michelle Cohn|AUTHOR Michelle Cohn]], [[Georgia Zellou|AUTHOR Georgia Zellou]]
</p><p class="cpabstractcardaffiliationlist">University of California at Davis, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 521–525
</span></p></div>
<div class="cpabstractcardabstract"><p>The current study explores whether perception of coarticulatory vowel nasalization differs by speaker age (adult vs. child) and type of voice (naturally produced vs. synthetic speech). Listeners completed a 4IAX discrimination task between pairs containing acoustically identical (both nasal or oral) vowels and acoustically distinct (one oral, one nasal) vowels. Vowels occurred in either the same consonant contexts or different contexts across pairs. Listeners completed the experiment with either naturally produced speech or text-to-speech (TTS). For same-context trials, listeners were better at discriminating between oral and nasal vowels for child speech in the synthetic voices but adult speech in the natural voices. Meanwhile, in different-context trials, listeners were less able to discriminate, indicating more perceptual compensation for synthetic voices. There was no difference in different-context discrimination across talker ages, indicating that listeners did not compensate differently if the speaker was a child or adult. Findings are relevant for models of compensation, computer personification theories, and speaker-indexical perception accounts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohammad Jalilpour Monesi|AUTHOR Mohammad Jalilpour Monesi]], [[Bernd Accou|AUTHOR Bernd Accou]], [[Tom Francart|AUTHOR Tom Francart]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]
</p><p class="cpabstractcardaffiliationlist">KU Leuven, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 526–530
</span></p></div>
<div class="cpabstractcardabstract"><p>Decoding the speech signal that a person is listening to from the human brain via electroencephalography (EEG) can help us understand how our auditory system works. Linear models have been used to reconstruct the EEG from speech or vice versa. Recently, Artificial Neural Networks (ANNs) such as Convolutional Neural Network (CNN) and Long Short-Term Memory (LSTM) based architectures have outperformed linear models in modeling the relation between EEG and speech. Before attempting to use these models in real-world applications such as hearing tests or (second) language comprehension assessment we need to know what level of speech information is being utilized by these models. In this study, we aim to analyze the performance of an LSTM-based model using different levels of speech features. The task of the model is to determine which of two given speech segments is matched with the recorded EEG. We used low- and high-level speech features including: envelope, mel spectrogram, voice activity, phoneme identity, and word embedding. Our results suggest that the model exploits information about silences, intensity, and broad phonetic classes from the EEG. Furthermore, the mel spectrogram, which contains all this information, yields the highest accuracy (84%) among all the features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Louis ten Bosch|AUTHOR Louis ten Bosch]], [[Lou Boves|AUTHOR Lou Boves]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 531–535
</span></p></div>
<div class="cpabstractcardabstract"><p>We discuss the role of entropy of the set of unfolding word candidates in the context of DIANA, a computational model of human auditory speech comprehension. DIANA consists of three major interacting components: Activation, Decision and Execution. The Activation component computes activations of word candidates that change over time as a function of the unfolding audio input. The resulting set of word candidate activations can be associated with an entropy that is related to difficulty of the decision when one of these candidates must be selected at time T. The paper presents the close relation between entropy measures and the between-word competition during the unfolding of the auditory stimuli, and at the end of the stimuli if no decision could be made before stimulus offset. We present a way for computing the entropy that takes into account linguistic-phonetic constraints that play a role in speech comprehension and in lexical decision experiments. Using the BALDEY data set and linear mixed effects regression models for RT, we show that entropy measures explain differences between RTs of words with different morphological structure.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Louis ten Bosch|AUTHOR Louis ten Bosch]], [[Lou Boves|AUTHOR Lou Boves]]
</p><p class="cpabstractcardaffiliationlist">Radboud Universiteit, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 536–540
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate reaction time (RT) sequences obtained from lexical decision experiments by applying Time-to-Event modelling (Survival Analysis). This is a branch of statistics for analyzing the expected duration until one or more events happen, associated with a set of potential ‘causes’ (in our case the decision for a ‘word’ judgment as a function of conventional predictors such as lexical frequency, stimulus duration, reduction, etc.). In this analysis, RTs are considered a by-product of an (unobservable) cumulative incidence function that results in a decision when it exceeds a certain threshold.
We show that Survival Analysis can be effectively used to narrow the gap between data-oriented models and process-oriented models for RT data from lexical decision experiments. Results of this analysis technique are presented for two different RT data sets. The analysis reveals time-varying patterns of predictors that reflect the differences in cognitive processes during the presentation of auditory stimuli.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sophie Brand|AUTHOR Sophie Brand]]^^1^^
, [[Kimberley Mulder|AUTHOR Kimberley Mulder]]^^2^^
, [[Louis ten Bosch|AUTHOR Louis ten Bosch]]^^3^^
, [[Lou Boves|AUTHOR Lou Boves]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Zuyd Hogeschool, The Netherlands; ^^2^^Universiteit Utrecht, The Netherlands; ^^3^^Radboud Universiteit, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 541–545
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate how the role of predictors in models of reaction times in auditory lexical decision experiments depends on the operational definition of RT: whether the time is measured from stimulus onset or from stimulus offset. In a large body of literature, RTs are measured from the onset of the stimulus to the start of the response (often a button press or an oral response). The rationale behind this choice is that information about the stimulus becomes available to the listener starting at onset. Alternatively, the RT from offset is less dependent on stimulus duration and is assumed to focus on those cognitive processes that play a role late(r) in the word and after word offset, when all information is available.
The paper presents RT-onset and RT-offset-based linear mixed effects models for three different lexical decision-based data sets and explains the significant differences between these models, showing to what extent both definitions of reaction time reveal different roles for predictors and how early and later contributions to the overall RT can be differentiated.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gábor Kiss|AUTHOR Gábor Kiss]], [[Dávid Sztahó|AUTHOR Dávid Sztahó]], [[Miklós Gábriel Tulics|AUTHOR Miklós Gábriel Tulics]]
</p><p class="cpabstractcardaffiliationlist">BME, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 956–957
</span></p></div>
<div class="cpabstractcardabstract"><p>In this Show&Tell presentation we demonstrate an application that is able to assess a voice sample according to three different voice disorders: depression, Parkinson’s disease and dysphonic speech. Affection probability of each disorder is analyzed along with their severity estimation. Although the acoustic models (support vector machine and regression models) are trained on Hungarian voice samples, English samples can also be utilized for assessment. The results are displayed by as pie chart for probabilities and separate severity scores. The input of the application is a read text with a fixed linguistic content. It is possible to load a pre-recorded voice sample or create a live recording. The developed system could evaluate a speaker’s voice sample, assisting medical staff.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lenka Weingartová|AUTHOR Lenka Weingartová]], [[Veronika Volná|AUTHOR Veronika Volná]], [[Ewa Balejová|AUTHOR Ewa Balejová]]
</p><p class="cpabstractcardaffiliationlist">NEWTON Technologies, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 958–959
</span></p></div>
<div class="cpabstractcardabstract"><p>We present Beey, a newly developed web-based multimedia platform for producing Automatic Speech Recognition (ASR) and editing its output. In addition to ASR, Beey employs modules for speaker diarization and identification, text formatting, automatic punctuation insertion, subtitling, automatic translation, transcription of stream and more.
The platform and its development are focused on user experience and fast document creation. Our aim is to transfer research results in the field of speech recognition and signal processing into practice and enable Beey’s users to make their production processes faster and cheaper by minimizing human effort and costs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takayuki Arai|AUTHOR Takayuki Arai]]
</p><p class="cpabstractcardaffiliationlist">Sophia University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 960–961
</span></p></div>
<div class="cpabstractcardabstract"><p>Demonstrating vowel production with physical models of the human vocal tract is a part of intuitive education in speech science. The adult male vocal tract was most often used as a model in the past because of the limited availability of physical models, but discussions on different vocal tract sizes were ongoing. Therefore, we focused on downsizing the vocal-tract models in this study, especially the straight models. We reduced the cross-sectional area function for the sliding three-tube model (including the total length) to female adult and child sizes. Furthermore, we created fixed straight models of similar dimensions for the five Japanese vowels. We found that the intelligibility of each model was preserved as long as the ratios of the cross-sectional areas were maintained even if the cross-sections were less than the average human sizes. This indicates that we can reduce the cost of manufacturing the models, as cost is typically a barrier when the models are used for pedagogical purposes.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Maël Fabien|AUTHOR Maël Fabien]]^^1^^
, [[Shantipriya Parida|AUTHOR Shantipriya Parida]]^^1^^
, [[Petr Motlicek|AUTHOR Petr Motlicek]]^^1^^
, [[Dawei Zhu|AUTHOR Dawei Zhu]]^^2^^
, [[Aravind Krishnan|AUTHOR Aravind Krishnan]]^^2^^
, [[Hoang H. Nguyen|AUTHOR Hoang H. Nguyen]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Idiap Research Institute, Switzerland; ^^2^^Universität des Saarlandes, Germany; ^^3^^Leibniz Universität Hannover, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 962–964
</span></p></div>
<div class="cpabstractcardabstract"><p>Criminal investigations require manual intervention of several investigators and translators. However, the amount and the diversity of the data collected raises many challenges, and cross-border investigations against organized crime can quickly impossible to handle. We developed ROXANNE Research platform, an all-in-one platform which processes intercepted phone calls, runs state-of-the-art components such as speaker identification, automatic speech recognition or named entity detection, and builds a knowledge graph of the extracted information. Our aim for this work is to do a first step in the direction of an open research platform combining speech, text, and video processing algorithms with criminal network analysis for combating organized crime.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexandre Flucha|AUTHOR Alexandre Flucha]], [[Anthony Larcher|AUTHOR Anthony Larcher]], [[Ambuj Mehrish|AUTHOR Ambuj Mehrish]], [[Sylvain Meignier|AUTHOR Sylvain Meignier]], [[Florian Plaut|AUTHOR Florian Plaut]], [[Nicolas Poupon|AUTHOR Nicolas Poupon]], [[Yevhenii Prokopalo|AUTHOR Yevhenii Prokopalo]], [[Adrien Puertolas|AUTHOR Adrien Puertolas]], [[Meysam Shamsi|AUTHOR Meysam Shamsi]], [[Marie Tahon|AUTHOR Marie Tahon]]
</p><p class="cpabstractcardaffiliationlist">LIUM (EA 4023), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 965–966
</span></p></div>
<div class="cpabstractcardabstract"><p>We developed a human assisted speaker diarization platform that enables a human annotator to correct the output of any speaker diarization system by providing a graphical view of the diarization segmentation and clustering steps while guiding the human annotator to optimize the correction process and easily improve the resulting diarization.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yoo Rhee Oh|AUTHOR Yoo Rhee Oh]], [[Kiyoung Park|AUTHOR Kiyoung Park]]
</p><p class="cpabstractcardaffiliationlist">ETRI, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 967–968
</span></p></div>
<div class="cpabstractcardabstract"><p>This work is the first attempt to run streaming Transformer-based end-to-end speech recognition on embedded scale IoT systems. Recently there are many researches on online Transformer-based speech recognition such as a contextual block encoder [1] and a block-wise synchronous beam search [2]. Based on them we designed a novel fully-streaming end-to-end speech recognition method using Transformer. By efficiently utilizing a connectionist temporal classification network to detect symbol and sentence boundaries, we make decoder in streaming manner. Moreover, by using the optimized model structure, the proposed method could be deployed on a low-power edge device such as Raspberry Pi 4B with the high accuracy and the small latency. With the experiments with Librispeech corpus, the methods achieved word error rates of 3.76% and 9.25% respectively. Also the recognition speed is measured in two aspects; the real-time factor and the user perceived latency. The system is evaluated to have 0.84 xRT and the average latency of 0.75±0.62 seconds on Raspberry Pi 4B.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[J. Čmejla|AUTHOR J. Čmejla]], [[T. Kounovský|AUTHOR T. Kounovský]], [[J. Janský|AUTHOR J. Janský]], [[Jiri Malek|AUTHOR Jiri Malek]], [[M. Rozkovec|AUTHOR M. Rozkovec]], [[Z. Koldovský|AUTHOR Z. Koldovský]]
</p><p class="cpabstractcardaffiliationlist">Technical University of Liberec, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 969–970
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a new device for speaker extraction and physical tracking and demonstrate its use in real conditions. The device is equipped with a dense planar array consisting of 64 microphones mounted on a rotating platform. State-of-the-art blind source extraction algorithms controlled by x-vector piloting are used to extract the desired speaker, which is being tracked by the rotating microphone array. The audience will experience the functionality of the device and the potential of the blind algorithms to extract the speaker from multi-source noisy recordings in a live situation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oubaïda Chouchane|AUTHOR Oubaïda Chouchane]], [[Baptiste Brossier|AUTHOR Baptiste Brossier]], [[Jorge Esteban Gamboa Gamboa|AUTHOR Jorge Esteban Gamboa Gamboa]], [[Thomas Lardy|AUTHOR Thomas Lardy]], [[Hemlata Tak|AUTHOR Hemlata Tak]], [[Orhan Ermis|AUTHOR Orhan Ermis]], [[Madhu R. Kamble|AUTHOR Madhu R. Kamble]], [[Jose Patino|AUTHOR Jose Patino]], [[Nicholas Evans|AUTHOR Nicholas Evans]], [[Melek Önen|AUTHOR Melek Önen]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]
</p><p class="cpabstractcardaffiliationlist">EURECOM, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 856–860
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years the automatic speaker verification (ASV) community has grappled with vulnerabilities to spoofing attacks whereby fraudsters masquerade as enrolled subjects to provoke illegitimate accepts. Countermeasures have hence been developed to protect ASV systems from such attacks. Given that recordings of speech contain potentially sensitive information, any system operating upon them, including spoofing countermeasures, must have provisions for privacy preservation. While privacy enhancing technologies such as Homomorphic Encryption or Secure Multi-Party Computation (MPC) are effective in preserving privacy, these tend to impact upon computational capacity and computational precision, while no available spoofing countermeasures preserve privacy. This paper reports the first solution based upon the combination of shallow neural networks with secure MPC. Experiments performed using the ASVspoof 2019 logical access database show that the proposed solution is not only computationally efficient, but that it also improves upon the performance of the ASVspoof baseline countermeasure, all while preserving privacy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ranya Aloufi|AUTHOR Ranya Aloufi]], [[Hamed Haddadi|AUTHOR Hamed Haddadi]], [[David Boyle|AUTHOR David Boyle]]
</p><p class="cpabstractcardaffiliationlist">Imperial College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 861–865
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice assistive technologies have given rise to far-reaching privacy and security concerns. In this paper we investigate whether modular automatic speech recognition (ASR) can improve privacy in voice assistive systems by combining independently trained separation, recognition, and discretization modules to design configurable privacy-preserving ASR systems. We evaluate privacy concerns and the effects of applying various state-of-the-art techniques at each stage of the system, and report results using task-specific metrics (i.e., WER, ABX, and accuracy). We show that overlapping speech inputs to ASR systems present further privacy concerns, and how these may be mitigated using speech separation and optimization techniques. Our discretization module is shown to minimize paralinguistics privacy leakage from ASR acoustic models to levels commensurate with random guessing. We show that voice privacy can be //configurable//, and argue this presents new opportunities for privacy-preserving applications incorporating ASR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Scott Novotney|AUTHOR Scott Novotney]], [[Yile Gu|AUTHOR Yile Gu]], [[Ivan Bulyko|AUTHOR Ivan Bulyko]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 866–870
</span></p></div>
<div class="cpabstractcardabstract"><p>To improve customer privacy, commercial speech applications are reducing human transcription of customer data. This has a negative impact on language model training due to a smaller amount of in-domain transcripts. Prior work demonstrated that training on automated transcripts alone provides modest gains due to reinforcement of recognition errors. We consider a new condition, where a model trained on historical human transcripts, but not the transcripts themselves, are available to us. To overcome temporal drift in vocabulary and topics, we propose a novel extension of knowledge distillation, //adjunct-emeritus distillation// where two imperfect teachers jointly train a student model. We conduct experiments on an English voice assistant domain and simulate a one year gap in human transcription. Unlike fine-tuning, our approach is architecture agnostic and achieves a 14% relative reduction in perplexity over the baseline approach of freezing model development and improves over the baseline of knowledge distillation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jae Ro|AUTHOR Jae Ro]], [[Mingqing Chen|AUTHOR Mingqing Chen]], [[Rajiv Mathews|AUTHOR Rajiv Mathews]], [[Mehryar Mohri|AUTHOR Mehryar Mohri]], [[Ananda Theertha Suresh|AUTHOR Ananda Theertha Suresh]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 871–875
<a href="./IS2021/MEDIA/0153" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>In distributed learning settings such as federated learning, the training algorithm can be potentially biased towards different clients. [1] proposed a domain-agnostic learning algorithm, where the model is optimized for any target distribution formed by a mixture of the client distributions in order to overcome this bias. They further proposed an algorithm for the cross-silo federated learning setting, where the number of clients is small. We consider this problem in the cross-device setting, where the number of clients is much larger. We propose a communication-efficient distributed algorithm called AGNOSTIC FEDERATED AVERAGING (or AGNOSTICFEDAVG) to minimize the domain-agnostic objective proposed in [1], which is amenable to other private mechanisms such as secure aggregation. We highlight two types of naturally occurring domains in federated learning and argue that AGNOSTICFEDAVG performs well on both. To demonstrate the practical effectiveness of AGNOSTICFEDAVG, we report positive results for large-scale language modeling tasks in both simulation and live experiments, where the latter involves training language models for Spanish virtual keyboard for millions of user devices.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Timm Koppelmann|AUTHOR Timm Koppelmann]], [[Alexandru Nelus|AUTHOR Alexandru Nelus]], [[Lea Schönherr|AUTHOR Lea Schönherr]], [[Dorothea Kolossa|AUTHOR Dorothea Kolossa]], [[Rainer Martin|AUTHOR Rainer Martin]]
</p><p class="cpabstractcardaffiliationlist">Ruhr-Universität Bochum, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 876–880
</span></p></div>
<div class="cpabstractcardabstract"><p>Wake word detection and verification systems often involve a local, on-device wake word detector and a cloud-based verification node. In such systems, the audio representation sent to the cloud-based server may exhibit sensitive information that might be intercepted by an eavesdropper. To improve privacy of cloud-based wake word verification (WWV) systems, we propose to use a privacy-preserving feature representation that minimizes the automatic speech recognition (ASR) capability of a potential attacker. The proposed approach employs an adversarial training schedule that aims to minimize an attacker’s word error rate (WER) while maintaining a high WWV performance. To this end, we apply an adaptive weighting factor in the combined loss function to control the balance between minimizing the WWV loss and maximizing the ASR loss. We show that the proposed training method significantly reduces possible privacy risks while maintaining a strong WWV performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chao-Han Huck Yang|AUTHOR Chao-Han Huck Yang]], [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]
</p><p class="cpabstractcardaffiliationlist">Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 881–885
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose using an adversarial autoencoder (AAE) to replace generative adversarial network (GAN) in private aggregation of teacher ensembles (PATE), a solution for ensuring differential privacy in speech applications. The AAE architecture allows us to obtain good synthetic speech leveraging upon a discriminative training of latent vectors. Such synthetic speech is used to build a privacy-preserving classifier when non-sensitive data is not sufficiently available in the public domain. This classifier follows the PATE scheme that uses an ensemble of noisy outputs to label the synthetic samples and guarantee ε-differential privacy (DP) on its derived classifiers. Our proposed framework thus consists of an AAE-based generator and a PATE-based classifier (PATE-AAE). Evaluated on the Google Speech Commands Dataset Version II, the proposed PATE-AAE improves the average classification accuracy by +2.11% and +6.60%, respectively, when compared with alternative privacy-preserving solutions, namely PATE-GAN and DP-GAN, while maintaining a strong level of privacy target at ε=0.01 with a fixed δ=10^^-5^^.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haoxin Ma|AUTHOR Haoxin Ma]]^^1^^
, [[Jiangyan Yi|AUTHOR Jiangyan Yi]]^^1^^
, [[Jianhua Tao|AUTHOR Jianhua Tao]]^^2^^
, [[Ye Bai|AUTHOR Ye Bai]]^^1^^
, [[Zhengkun Tian|AUTHOR Zhengkun Tian]]^^2^^
, [[Chenglong Wang|AUTHOR Chenglong Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 886–890
</span></p></div>
<div class="cpabstractcardabstract"><p>Fake audio attack becomes a major threat to the speaker verification system. Although current detection approaches have achieved promising results on dataset-specific scenarios, they encounter difficulties on unseen spoofing data. Fine-tuning and retraining from scratch have been applied to incorporate new data. However, fine-tuning leads to performance degradation on previous data. Retraining takes a lot of time and computation resources. Besides, previous data are unavailable due to privacy in some situations. To solve the above problems, this paper proposes detecting fake without forgetting, a continual-learning-based method, to make the model learn new spoofing attacks incrementally. A knowledge distillation loss is introduced to loss function to preserve the memory of original model. Supposing the distribution of genuine voice is consistent among different scenarios, an extra embedding similarity loss is used as another constraint to further do a positive sample alignment. Experiments are conducted on the ASVspoof2019 dataset. The results show that our proposed method outperforms fine-tuning by the relative reduction of average equal error rate up to 81.62%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Muhammad A. Shah|AUTHOR Muhammad A. Shah]], [[Joseph Szurley|AUTHOR Joseph Szurley]], [[Markus Mueller|AUTHOR Markus Mueller]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Jasha Droppo|AUTHOR Jasha Droppo]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 891–895
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent studies have shown that it may be possible to determine if a machine learning model was trained on a given data sample, using Membership Inference Attacks (MIA). In this paper we evaluate the vulnerability of state-of-the-art speech recognition models to MIA under black-box access. Using models trained with standard methods and public datasets, we demonstrate that without any knowledge of the target model’s parameters or training data a MIA can successfully infer membership with precision and recall more than 60%. Furthermore, for utterances from about 39% of the speakers the precision is more than 75%, indicating that training data membership can be inferred more precisely for some speakers than others. While strong regularization reduces the overall accuracy of MIA to almost 50%, the attacker can still infer membership for utterances from 25% of the speakers with high precision. These results indicate that (1) speaker-level MIA success should be reported, along with overall accuracy, to provide a holistic view of the model’s vulnerability and (2) conventional regularization is an inadequate defense against MIA.We believe that the insights gleaned from this study can direct future work towards more effective defenses.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amin Fazel|AUTHOR Amin Fazel]]^^1^^
, [[Wei Yang|AUTHOR Wei Yang]]^^1^^
, [[Yulan Liu|AUTHOR Yulan Liu]]^^2^^
, [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]]^^2^^
, [[Yixiong Meng|AUTHOR Yixiong Meng]]^^1^^
, [[Roland Maas|AUTHOR Roland Maas]]^^1^^
, [[Jasha Droppo|AUTHOR Jasha Droppo]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, USA; ^^2^^Amazon, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 896–900
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end (E2E) automatic speech recognition (ASR) models have recently demonstrated superior performance over the traditional hybrid ASR models. Training an E2E ASR model requires a large amount of data which is not only expensive but may also raise dependency on production data. At the same time, synthetic speech generated by the state-of-the-art text-to-speech (TTS) engines has advanced to near-human naturalness. In this work, we propose to utilize synthetic speech for ASR training (SynthASR) in applications where data is sparse or hard to get for ASR model training. In addition, we apply continual learning with a novel multi-stage training strategy to address catastrophic forgetting, achieved by a mix of weighted multi-style training, data augmentation, encoder freezing, and parameter regularization. In our experiments conducted on in-house datasets for a new application of recognizing medication names, training ASR RNN-T models with synthetic audio via the proposed multi-stage training improved the recognition performance on new application by more than 65% relative, without degradation on existing general applications. Our observations show that SynthASR holds great promise in training the state-of-the-art large-scale E2E ASR models for new applications while reducing the costs and dependency on production data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ananya Muguli|AUTHOR Ananya Muguli]]^^1^^
, [[Lancelot Pinto|AUTHOR Lancelot Pinto]]^^2^^
, [[Nirmala R.|AUTHOR Nirmala R.]]^^1^^
, [[Neeraj Sharma|AUTHOR Neeraj Sharma]]^^1^^
, [[Prashant Krishnan|AUTHOR Prashant Krishnan]]^^1^^
, [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]^^1^^
, [[Rohit Kumar|AUTHOR Rohit Kumar]]^^1^^
, [[Shrirama Bhat|AUTHOR Shrirama Bhat]]^^3^^
, [[Srikanth Raj Chetupalli|AUTHOR Srikanth Raj Chetupalli]]^^1^^
, [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]^^1^^
, [[Shreyas Ramoji|AUTHOR Shreyas Ramoji]]^^1^^
, [[Viral Nanda|AUTHOR Viral Nanda]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Indian Institute of Science, India; ^^2^^P.D. Hinduja Hospital, India; ^^3^^KMC Hospital, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 901–905
</span></p></div>
<div class="cpabstractcardabstract"><p>The DiCOVA challenge aims at accelerating research in diagnosing COVID-19 using acoustics (DiCOVA), a topic at the intersection of speech and audio processing, respiratory health diagnosis, and machine learning. This challenge is an open call for researchers to analyze a dataset of sound recordings, collected from COVID-19 infected and non-COVID-19 individuals, for a two-class classification. These recordings were collected via crowdsourcing from multiple countries, through a website application. The challenge features two tracks, one focusing on cough sounds, and the other on using a collection of breath, sustained vowel phonation, and number counting speech recordings. In this paper, we introduce the challenge and provide a detailed description of the task, and present a baseline system for the task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Swapnil Bhosale|AUTHOR Swapnil Bhosale]], [[Upasana Tiwari|AUTHOR Upasana Tiwari]], [[Rupayan Chakraborty|AUTHOR Rupayan Chakraborty]], [[Sunil Kumar Kopparapu|AUTHOR Sunil Kumar Kopparapu]]
</p><p class="cpabstractcardaffiliationlist">TCS, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 946–950
</span></p></div>
<div class="cpabstractcardabstract"><p>Cough sounds as a descriptor have been used for detecting various respiratory ailments based on its intensity, duration of intermediate phase between two cough sounds, repetitions, dryness etc. However, COVID-19 diagnosis using only cough sounds is challenging because of cough being a common symptom among many non COVID-19 health diseases and inherent data imbalance within the available datasets. As one of the approach in this direction, we explore the robustness of multi-domain representation by performing the early fusion over a wide set of temporal, spectral and tempo-spectral handcrafted features, followed by training a Support Vector Machine (SVM) classifier. In our second approach, using a contrastive loss function we learn a latent space from Mel Filter Cepstral Coefficients (MFCCs) where representations belonging to samples having similar cough characteristics are closer. This helps learn representations for the highly varied COVID-negative class (healthy and symptomatic COVID-negative), by learning multiple smaller clusters. Using only the DiCOVA data, multi-domain features yields an absolute improvement of 0.74% and 1.07%, whereas our second approach shows an improvement of 2.09% and 3.98%, over the blind test and validation set, respectively, when compared with challenge baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Flavio Avila|AUTHOR Flavio Avila]]^^1^^
, [[Amir H. Poorjam|AUTHOR Amir H. Poorjam]]^^1^^
, [[Deepak Mittal|AUTHOR Deepak Mittal]]^^1^^
, [[Charles Dognin|AUTHOR Charles Dognin]]^^1^^
, [[Ananya Muguli|AUTHOR Ananya Muguli]]^^2^^
, [[Rohit Kumar|AUTHOR Rohit Kumar]]^^2^^
, [[Srikanth Raj Chetupalli|AUTHOR Srikanth Raj Chetupalli]]^^2^^
, [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]^^2^^
, [[Maneesh Singh|AUTHOR Maneesh Singh]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Verisk Analytics, USA; ^^2^^Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 951–955
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose an approach to automatically classify COVID-19 and non-COVID-19 cough samples based on the combination of both feature engineering and deep learning models. In the feature engineering approach, we develop a support vector machine classifier over high dimensional (6373D) space of acoustic features. In the deep learning-based approach, on the other hand, we apply a convolutional neural network trained on the log-mel spectrograms. These two methodologically diverse models are then combined by fusing the probability scores of the models. The proposed system, which ranked 9^^th^^ on the 2021 Diagnosing COVID-19 using Acoustics (DiCOVA) challenge leaderboard, obtained an area under the receiver operating characteristic curve (AUC) of 0.81 on the blind test data set, which is a 10.9% absolute improvement compared to the baseline. Moreover, we analyze the explainability of the deep learning-based model when detecting COVID-19 from cough signals.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Madhu R. Kamble|AUTHOR Madhu R. Kamble]]^^1^^
, [[Jose A. Gonzalez-Lopez|AUTHOR Jose A. Gonzalez-Lopez]]^^2^^
, [[Teresa Grau|AUTHOR Teresa Grau]]^^3^^
, [[Juan M. Espin|AUTHOR Juan M. Espin]]^^3^^
, [[Lorenzo Cascioli|AUTHOR Lorenzo Cascioli]]^^1^^
, [[Yiqing Huang|AUTHOR Yiqing Huang]]^^1^^
, [[Alejandro Gomez-Alanis|AUTHOR Alejandro Gomez-Alanis]]^^2^^
, [[Jose Patino|AUTHOR Jose Patino]]^^1^^
, [[Roberto Font|AUTHOR Roberto Font]]^^3^^
, [[Antonio M. Peinado|AUTHOR Antonio M. Peinado]]^^2^^
, [[Angel M. Gomez|AUTHOR Angel M. Gomez]]^^2^^
, [[Nicholas Evans|AUTHOR Nicholas Evans]]^^1^^
, [[Maria A. Zuluaga|AUTHOR Maria A. Zuluaga]]^^1^^
, [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^EURECOM, France; ^^2^^Universidad de Granada, Spain; ^^3^^Biometric Vox, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 906–910
</span></p></div>
<div class="cpabstractcardabstract"><p>The COVID-19 pandemic has led to the saturation of public health services worldwide. In this scenario, the early diagnosis of SARS-Cov-2 infections can help to stop or slow the spread of the virus and to manage the demand upon health services. This is especially important when resources are also being stretched by heightened demand linked to other seasonal diseases, such as the flu. In this context, the organisers of the DiCOVA 2021 challenge have collected a database with the aim of diagnosing COVID-19 through the use of coughing audio samples. This work presents the details of the automatic system for COVID-19 detection from cough recordings presented by team PANACEA. This team consists of researchers from two European academic institutions and one company: EURECOM (France), University of Granada (Spain), and Biometric Vox S.L. (Spain). We developed several systems based on established signal processing and machine learning methods. Our best system employs a Teager energy operator cepstral coefficients (TECCs) based front-end and Light gradient boosting machine (LightGBM) back-end. The AUC obtained by this system on the test set is 76.31% which corresponds to a 10% improvement over the official baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vincent Karas|AUTHOR Vincent Karas]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]
</p><p class="cpabstractcardaffiliationlist">Universität Augsburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 911–915
</span></p></div>
<div class="cpabstractcardabstract"><p>As the Covid-19 pandemic continues, digital health solutions can provide valuable insights and assist in diagnosis and prevention. Since the disease affects the respiratory system, it is hypothesised that sound formation is changed, and thus, an infection can be automatically recognised through audio analysis. We present an ensemble learning approach used in our entry to Track 1 of the DiCOVA 2021 Challenge, which aims at binary classification of Covid-19 infection on a crowd-sourced dataset of 1 040 cough sounds. Our system is based on a combination of handcrafted features for paralinguistics with deep feature extraction from spectrograms using pre-trained CNNs. We extract features both at segment level and with a sliding window approach, and process them with SVMs and LSTMs, respectively. We then perform least-squares weighted late fusion of our classifiers. Our system surpasses the challenge baseline, with a ROC-AUC on the test set of 78.18%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Isabella Södergren|AUTHOR Isabella Södergren]], [[Maryam Pahlavan Nodeh|AUTHOR Maryam Pahlavan Nodeh]], [[Prakash Chandra Chhipa|AUTHOR Prakash Chandra Chhipa]], [[Konstantina Nikolaidou|AUTHOR Konstantina Nikolaidou]], [[György Kovács|AUTHOR György Kovács]]
</p><p class="cpabstractcardaffiliationlist">Luleå University of Technology, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 916–920
</span></p></div>
<div class="cpabstractcardabstract"><p>The detection of COVID-19 is and will remain in the foreseeable future a crucial challenge, making the development of tools for the task important. One possible approach, on the confines of speech and audio processing, is detecting potential COVID-19 cases based on cough sounds. We propose a simple, yet robust method based on the well-known ComParE 2016 feature set, and two classical machine learning models, namely Random Forests, and Support Vector Machines (SVMs). Furthermore, we combine the two methods, by calculating the weighted average of their predictions. Our results in the DiCOVA challenge show that this simple approach leads to a robust solution while producing competitive results. Based on the Area Under the Receiver Operating Characteristic Curve (AUC ROC) score, both classical machine learning methods we applied markedly outperform the baseline provided by the challenge organisers. Moreover, their combination attains an AUC ROC score of 85.21, positioning us at fourth place on the leaderboard (where the second team attained a similar, 85.43 score). Here, we would describe this system in more detail, and analyse the resulting models, drawing conclusions, and determining future work directions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rohan Kumar Das|AUTHOR Rohan Kumar Das]], [[Maulik Madhavi|AUTHOR Maulik Madhavi]], [[Haizhou Li|AUTHOR Haizhou Li]]
</p><p class="cpabstractcardaffiliationlist">NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 921–925
</span></p></div>
<div class="cpabstractcardabstract"><p>COVID-19 can be pre-screened based on symptoms and confirmed using other laboratory tests. The cough or speech from patients are also studied in the recent time for detection of COVID-19 as they are indicators of change in anatomy and physiology of the respiratory system. Along this direction, the diagnosis of COVID-19 using acoustics (DiCOVA) challenge aims to promote such research by releasing publicly available cough/speech corpus. We participated in the Track-1 of the challenge, which deals with COVID-19 detection using cough sounds from individuals. In this challenge, we use a few novel auditory acoustic cues based on long-term transform, equivalent rectangular bandwidth spectrum and gammatone filterbank. We evaluate these representations using logistic regression, random forest and multilayer perceptron classifiers for detection of COVID-19. On the blind test set, we obtain an area under the ROC curve (AUC) of 83.49% for the best system submitted to the challenge. It is worth noting that the submitted system ranked among the top few systems on the leaderboard and outperformed the challenge baseline by a large margin.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[John Harvill|AUTHOR John Harvill]]^^1^^
, [[Yash R. Wani|AUTHOR Yash R. Wani]]^^2^^
, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^1^^
, [[Narendra Ahuja|AUTHOR Narendra Ahuja]]^^1^^
, [[David Beiser|AUTHOR David Beiser]]^^2^^
, [[David Chestek|AUTHOR David Chestek]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^University of Chicago, USA; ^^3^^University of Illinois at Chicago, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 926–930
</span></p></div>
<div class="cpabstractcardabstract"><p>Serum and saliva-based testing methods have been crucial to slowing the COVID-19 pandemic, yet have been limited by slow throughput and cost. A system able to determine COVID-19 status from cough sounds alone would provide a low cost, rapid, and remote alternative to current testing methods. We explore the applicability of recent techniques such as pre-training and spectral augmentation in improving the performance of a neural cough classification system. We use Autoregressive Predictive Coding (APC) to pre-train a unidirectional LSTM on the COUGHVID dataset. We then generate our final model by fine-tuning added BLSTM layers on the DiCOVA challenge dataset. We perform various ablation studies to see how each component impacts performance and improves generalization with a small dataset. Our final system achieves an AUC of 85.35 and places third out of 29 entries in the DiCOVA challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gauri Deshpande|AUTHOR Gauri Deshpande]]^^1^^
, [[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^TCS, India; ^^2^^Universität Augsburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 931–935
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents the automatic recognition of COVID-19 from coughing. In particular, it describes our contribution to the DiCOVA challenge — Track 1, which addresses such cough sound analysis for COVID-19 detection. Pathologically, the effects of a COVID-19 infection on the respiratory system and on breathing patterns are known. We demonstrate the use of breathing patterns of the cough audio signal in identifying the COVID-19 status. Breathing patterns of the cough audio signal are derived using a model trained with the subset of the UCL Speech Breath Monitoring (UCL-SBM) database. This database provides speech recordings of the participants while their breathing values are captured by a respiratory belt. We use an encoder-decoder architecture. The encoder encodes the audio signal into breathing patterns and the decoder decodes the COVID-19 status for the corresponding breathing patterns using an attention mechanism. The encoder uses a pre-trained model which predicts breathing patterns from the speech signal, and transfers the learned patterns to cough audio signals.
With this architecture, we achieve an AUC of 64.42% on the evaluation set of Track 1.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kotra Venkata Sai Ritwik|AUTHOR Kotra Venkata Sai Ritwik]], [[Shareef Babu Kalluri|AUTHOR Shareef Babu Kalluri]], [[Deepu Vijayasenan|AUTHOR Deepu Vijayasenan]]
</p><p class="cpabstractcardaffiliationlist">NITK Surathkal, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 936–940
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we investigate the cues of COVID-19 on sustained phonation of Vowel-/i/, deep breathing and number counting data of the DiCOVA dataset. We use an ensemble of classifiers trained on different features, namely, super-vectors, formants, harmonics and MFCC features. We fit a two-class Weighted SVM classifier to separate the COVID-19 audio from Non-COVID-19 audio. Weighted penalties help mitigate the challenge of class imbalance in the dataset. The results are reported on the stationary (breathing, Vowel-/i/) and non-stationary (counting data) data using individual and combination of features on each type of utterance. We find that the Formant information plays a crucial role in classification. The proposed system resulted in an AUC score of 0.734 for cross validation, and 0.717 for evaluation dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adria Mallol-Ragolta|AUTHOR Adria Mallol-Ragolta]]^^1^^
, [[Helena Cuesta|AUTHOR Helena Cuesta]]^^2^^
, [[Emilia Gómez|AUTHOR Emilia Gómez]]^^2^^
, [[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Augsburg, Germany; ^^2^^Universitat Pompeu Fabra, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 941–945
</span></p></div>
<div class="cpabstractcardabstract"><p>The aim of this contribution is to automatically detect COVID-19 patients by analysing the acoustic information embedded in coughs. COVID-19 affects the respiratory system, and, consequently, respiratory-related signals have the potential to contain salient information for the task at hand. We focus on analysing the spectrogram representations of cough samples with the aim to investigate whether COVID-19 alters the frequency content of these signals. Furthermore, this work also assesses the impact of gender in the automatic detection of COVID-19. To extract deep-learnt representations of the spectrograms, we compare the performance of a cough-specific, and a Resnet18 pre-trained Convolutional Neural Network (CNN). Additionally, our approach explores the use of contextual attention, so the model can learn to highlight the most relevant deep-learnt features extracted by the CNN. We conduct our experiments on the dataset released for the Cough Sound Track of the DICOVA 2021 Challenge. The best performance on the test set is obtained using the Resnet18 pre-trained CNN with contextual attention, which scored an Area Under the Curve (AUC) of 70.91% at 80% sensitivity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gwantae Kim|AUTHOR Gwantae Kim]]^^1^^
, [[David K. Han|AUTHOR David K. Han]]^^2^^
, [[Hanseok Ko|AUTHOR Hanseok Ko]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Korea University, Korea; ^^2^^Drexel University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 546–550
</span></p></div>
<div class="cpabstractcardabstract"><p>A mixed sample data augmentation strategy is proposed to enhance the performance of models on audio scene classification, sound event classification, and speech enhancement tasks. While there have been several augmentation methods shown to be effective in improving image classification performance, their efficacy toward time-frequency domain features of audio is not assured. We propose a novel audio data augmentation approach named “Specmix” specifically designed for dealing with time-frequency domain features. The augmentation method consists of mixing two different data samples by applying time-frequency masks effective in preserving the spectral correlation of each audio sample. Our experiments on acoustic scene classification, sound event classification, and speech enhancement tasks show that the proposed Specmix improves the performance of various neural network architectures by a maximum of 2.7%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chiori Hori|AUTHOR Chiori Hori]], [[Takaaki Hori|AUTHOR Takaaki Hori]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]
</p><p class="cpabstractcardaffiliationlist">MERL, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 586–590
</span></p></div>
<div class="cpabstractcardabstract"><p>Video captioning is an essential technology to understand scenes and describe events in natural language. To apply it to real-time monitoring, a system needs not only to describe events accurately but also to produce the captions as soon as possible. Low-latency captioning is needed to realize such functionality, but this research area for online video captioning has not been pursued yet. This paper proposes a novel approach to optimize each caption’s output timing based on a trade-off between latency and caption quality. An audio-visual Transformer is trained to generate ground-truth captions using only a small portion of all video frames, and to mimic outputs of a pre-trained Transformer to which all the frames are given. A CNN-based timing detector is also trained to detect a proper output timing, where the captions generated by the two Transformers become sufficiently close to each other. With the jointly trained Transformer and timing detector, a caption can be generated in the early stages of an event-triggered video clip, as soon as an event happens or when it can be forecasted. Experiments with the ActivityNet Captions dataset show that our approach achieves 94% of the caption quality of the upper bound given by the pre-trained Transformer using the entire video clips, using only 28% of frames from the beginning.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shijing Si|AUTHOR Shijing Si]]^^1^^
, [[Jianzong Wang|AUTHOR Jianzong Wang]]^^1^^
, [[Huiming Sun|AUTHOR Huiming Sun]]^^1^^
, [[Jianhan Wu|AUTHOR Jianhan Wu]]^^2^^
, [[Chuanyao Zhang|AUTHOR Chuanyao Zhang]]^^2^^
, [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]]^^1^^
, [[Ning Cheng|AUTHOR Ning Cheng]]^^1^^
, [[Lei Chen|AUTHOR Lei Chen]]^^3^^
, [[Jing Xiao|AUTHOR Jing Xiao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ping An Technology, China; ^^2^^USTC, China; ^^3^^HKUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 591–595
</span></p></div>
<div class="cpabstractcardabstract"><p>Large-scale deep neural networks (DNNs) such as convolutional neural networks (CNNs) have achieved impressive performance in audio classification for their powerful capacity and strong generalization ability. However, when training a DNN model on low-resource tasks, it is usually prone to overfitting the small data and learning too much redundant information. To address this issue, we propose to use variational information bottleneck (VIB) to mitigate overfitting and suppress irrelevant information. In this work, we conduct experiments on a 4-layer CNN. However, the VIB framework is ready-to-use and could be easily utilized with many other state-of-the-art network architectures. Evaluation on a few audio datasets shows that our approach significantly outperforms baseline methods, yielding ≥ 5.0% improvement in terms of classification accuracy in some low-source settings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Soham Deshmukh|AUTHOR Soham Deshmukh]]^^1^^
, [[Bhiksha Raj|AUTHOR Bhiksha Raj]]^^2^^
, [[Rita Singh|AUTHOR Rita Singh]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, USA; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 596–600
</span></p></div>
<div class="cpabstractcardabstract"><p>While multitask and transfer learning has shown to improve the performance of neural networks in limited data settings, they require pretraining of the model on large datasets beforehand. In this paper, we focus on improving the performance of weakly supervised sound event detection in low data and noisy settings simultaneously without requiring any pretraining task. To that extent, we propose a shared encoder architecture with sound event detection as a primary task and an additional secondary decoder for a self-supervised auxiliary task. We empirically evaluate the proposed framework for weakly supervised sound event detection on a remix dataset of the DCASE 2019 task 1 acoustic scene data with DCASE 2018 Task 2 sounds event data under 0, 10 and 20 dB SNR. To ensure we retain the localisation information of multiple sound events, we propose a two-step attention pooling mechanism that provides a time-frequency localisation of multiple audio events in the clip. The proposed framework with two-step attention outperforms existing benchmark models by 22.3%, 12.8%, 5.9% on 0, 10 and 20 dB SNR respectively. We carry out an ablation study to determine the contribution of the auxiliary task and two-step attention pooling to the SED performance improvement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tatsuya Komatsu|AUTHOR Tatsuya Komatsu]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
, [[Koichi Miyazaki|AUTHOR Koichi Miyazaki]]^^3^^
, [[Tomoki Hayashi|AUTHOR Tomoki Hayashi]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LINE, Japan; ^^2^^Carnegie Mellon University, USA; ^^3^^Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 601–605
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes acoustic event detection (AED) with classifier chains, a new classifier based on the probabilistic chain rule. The proposed AED with classifier chains consists of a gated recurrent unit and performs iterative binary detection of each event one by one. In each iteration, the event’s activity is estimated and used to condition the next output based on the probabilistic chain rule to form classifier chains. Therefore, the proposed method can handle the interdependence among events upon classification, while the conventional AED methods with multiple binary classifiers with a linear layer and sigmoid function have placed an assumption of conditional independence. In the experiments with a real-recording dataset, the proposed method demonstrates its superior AED performance to a relative 14.80% improvement compared to a convolutional recurrent neural network baseline system with the multiple binary classifiers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Helin Wang|AUTHOR Helin Wang]]^^1^^
, [[Yuexian Zou|AUTHOR Yuexian Zou]]^^1^^
, [[Wenwu Wang|AUTHOR Wenwu Wang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Peking University, China; ^^2^^University of Surrey, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 551–555
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present SpecAugment++, a novel data augmentation method for deep neural networks based acoustic scene classification (ASC). Different from other popular data augmentation methods such as SpecAugment and mixup that only work on the input space, SpecAugment++ is applied to both the input space and the hidden space of the deep neural networks to enhance the input and the intermediate feature representations. For an intermediate hidden state, the augmentation techniques consist of masking blocks of frequency channels and masking blocks of time frames, which improve generalization by enabling a model to attend not only to the most discriminative parts of the feature, but also the entire parts. Apart from using zeros for masking, we also examine two approaches for masking based on the use of other samples within the mini-batch, which helps introduce noises to the networks to make them more discriminative for classification. The experimental results on the DCASE 2018 Task1 dataset and DCASE 2019 Task1 dataset show that our proposed method can obtain 3.6% and 4.7% accuracy gains over a strong baseline without augmentation (i.e. CP-ResNet) respectively, and outperforms other previous data augmentation methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xu Zheng|AUTHOR Xu Zheng]]^^1^^
, [[Yan Song|AUTHOR Yan Song]]^^1^^
, [[Li-Rong Dai|AUTHOR Li-Rong Dai]]^^1^^
, [[Ian McLoughlin|AUTHOR Ian McLoughlin]]^^1^^
, [[Lin Liu|AUTHOR Lin Liu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 556–560
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a novel mutual mean teaching based domain adaptation (MMT-DA) method for sound event detection (SED) task, which can effectively exploit synthetic data to improve the SED performance. Existing methods simply treat the synthetic data as strongly-labeled data in semi-supervised learning (SSL) framework. Benefiting from the strong labels of synthetic data, superior SED performance can be achieved. However, a distribution mismatch between synthetic and real data raises an evident challenge for domain adaptation (DA). In MMT-DA, convolutional recurrent neural networks (CRNN) learned from different datasets (i.e. //total data//:real+synthetic, and //real data//) are exploited for DA. Specifically, mean teacher method using CRNN is employed for utilizing the unlabeled real data. To compensate the domain diversity, an additional domain classifier with gradient reverse layer(GRL) is used for training a mean teacher for //total data//. The student CRNNs are mutually taught using the soft predictions of unlabeled data obtained from different teachers. Furthermore, a strip pooling based attention module is exploited to model the inter-dependencies between channels and time-frequency dimensions to exploit the structure information. Experimental results on Task4 of DCASE2020 demonstrate the ability of the proposed method, achieving 52.0% F1-score on the validation dataset, which outperforms the winning system’s 50.6%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ritika Nandi|AUTHOR Ritika Nandi]], [[Shashank Shekhar|AUTHOR Shashank Shekhar]], [[Manjunath Mulimani|AUTHOR Manjunath Mulimani]]
</p><p class="cpabstractcardaffiliationlist">MAHE, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 561–565
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, a Kervolution-based SubSpectralNet model is proposed for Acoustic Scene Classification (ASC). SubSpectralNet is a competitive model which divides the mel spectrogram into horizontal slices termed as sub-spectrograms that are considered as input to the Convolutional Neural Network (CNN). In this work, the linear convolutional operation of SubSpectralNet is replaced with a non-linear operation using the kernel trick. This is also known as kervolution (kernel convolution)-based SubSpectralNet. The performance of the proposed methodology is evaluated on the DCASE (Detection and Classification of Acoustic Scenes and Events) 2018 development dataset. The proposed method achieves 73.52% and 75.76% accuracy with Polynomial and Gaussian Kernels respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Harshavardhan Sundar|AUTHOR Harshavardhan Sundar]], [[Ming Sun|AUTHOR Ming Sun]], [[Chao Wang|AUTHOR Chao Wang]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 566–570
</span></p></div>
<div class="cpabstractcardabstract"><p>The concept of multi-headed self attention (MHSA) introduced as a critical building block of a Transformer Encoder/Decoder Module has made a significant impact in the areas of natural language processing (NLP), automatic speech recognition (ASR) and recently in the area of sound event detection (SED). The current state-of-the-art approaches to SED employ a shared attention mechanism achieved through a stack of MHSA blocks to detect multiple sound events. Consequently, in a multi-label SED task, a common attention mechanism would be responsible for generating relevant feature representations for each of the events to be detected. In this paper, we show through empirical evaluation that having more MHSA blocks dedicated specifically for individual events, rather than having a stack of shared MHSA blocks, improves the overall detection performance. Interestingly, this improvement in performance comes about because the event-specific attention blocks help in resolving confusions in the case of co-occurring events. The proposed “Event-specific Attention Network” (ESA-Net) can be trained in an end-to-end manner. On the DCASE 2020 Task 4 data set, we show that with ESA-Net, the best single model achieves an event-based F1 score of 52.1% on the public validation data set improving over the existing state of the art result.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuan Gong|AUTHOR Yuan Gong]], [[Yu-An Chung|AUTHOR Yu-An Chung]], [[James Glass|AUTHOR James Glass]]
</p><p class="cpabstractcardaffiliationlist">MIT, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 571–575
</span></p></div>
<div class="cpabstractcardabstract"><p>In the past decade, convolutional neural networks (CNNs) have been widely adopted as the main building block for end-to-end audio classification models, which aim to learn a direct mapping from audio spectrograms to corresponding labels. To better capture long-range global context, a recent trend is to add a self-attention mechanism on top of the CNN, forming a CNN-attention hybrid model. However, it is unclear whether the reliance on a CNN is necessary, and if neural networks purely based on attention are sufficient to obtain good performance in audio classification. In this paper, we answer the question by introducing the //Audio Spectrogram Transformer// (AST), the first convolution-free, purely attention-based model for audio classification. We evaluate AST on various audio classification benchmarks, where it achieves new state-of-the-art results of 0.485 mAP on AudioSet, 95.6% accuracy on ESC-50, and 98.1% accuracy on Speech Commands V2.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Soonshin Seo|AUTHOR Soonshin Seo]], [[Donghyun Lee|AUTHOR Donghyun Lee]], [[Ji-Hwan Kim|AUTHOR Ji-Hwan Kim]]
</p><p class="cpabstractcardaffiliationlist">Sogang University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 576–580
</span></p></div>
<div class="cpabstractcardabstract"><p>Convolutional neural networks (CNNs) exhibit good performance in low-complexity classification with fixed-length acoustic scenes. However, previous studies have not considered variable-length acoustic scenes in which performance degradation is prevalent. In this regard, we investigate two novel architectures — convolution-augmented transformer (Conformer) and differentiable neural computer (DNC). Both the models show desirable performance for variable-length data but require a large amount of data. In other words, small amounts of data, such as those from acoustic scenes, lead to overfitting in these models. In this paper, we propose a shallow convolution-augmented Transformer with a differentiable neural computer (shallow Conformer-DNC) for the low-complexity classification of variable-length acoustic scenes. The shallow Conformer-DNC is enabled to converge with small amounts of data. Short-term and long-term contexts of variable-length acoustic scenes are trained by using the shallow Conformer and shallow DNC, respectively. The experiments were conducted for variable-length conditions using the TAU Urban Acoustic Scenes 2020 Mobile dataset. As a result, a peak accuracy of 61.25% was confirmed for shallow Conformer-DNC with a model parameter of 34 K. It is comparable performance to state-of-the-art CNNs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Helen L. Bear|AUTHOR Helen L. Bear]], [[Veronica Morfi|AUTHOR Veronica Morfi]], [[Emmanouil Benetos|AUTHOR Emmanouil Benetos]]
</p><p class="cpabstractcardaffiliationlist">Queen Mary University of London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 581–585
</span></p></div>
<div class="cpabstractcardabstract"><p>Sound scene geotagging is a new topic of research which has evolved from acoustic scene classification. It is motivated by the idea of audio surveillance. Not content with only describing a scene in a recording, a machine which can locate where the recording was captured would be of use to many. In this paper we explore a series of common audio data augmentation methods to evaluate which best improves the accuracy of audio geotagging classifiers.
Our work improves on the state-of-the-art city geotagging method by 23% in terms of classification accuracy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shu-Chuan Tseng|AUTHOR Shu-Chuan Tseng]]^^1^^
, [[Yi-Fen Liu|AUTHOR Yi-Fen Liu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Academia Sinica, Taiwan; ^^2^^Feng Chia University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 606–610
</span></p></div>
<div class="cpabstractcardabstract"><p>Verbal communication in daily use is conducted in the form of continuous speech that theoretically is the ideal data format for assessing oral language ability in educational and clinical domains. But as phonetic reduction and particularly lexical tones in Chinese are greatly affected by discourse context, it is a challenging task for automatic systems to evaluate continuous speech only by acoustic features. This study analyzed repetitive and storytelling speech produced by selected Chinese-speaking hearing and hearing-impaired children with distinctively high and low speech intelligibility levels. Word-based reduction types are derived by phonological properties that characterize contraction degrees of automatically generated surface forms of disyllabic words. F0-based tonal contours are visualized using the centroid-nearest data points in the major clusters computed for tonal syllables. Our results show that primary speech characteristics across different groups of children can be differentiated by means of reduction type and tone production.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Beiming Cao|AUTHOR Beiming Cao]]^^1^^
, [[Nordine Sebkhi|AUTHOR Nordine Sebkhi]]^^2^^
, [[Arpan Bhavsar|AUTHOR Arpan Bhavsar]]^^2^^
, [[Omer T. Inan|AUTHOR Omer T. Inan]]^^2^^
, [[Robin Samlan|AUTHOR Robin Samlan]]^^3^^
, [[Ted Mau|AUTHOR Ted Mau]]^^4^^
, [[Jun Wang|AUTHOR Jun Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Austin, USA; ^^2^^Georgia Tech, USA; ^^3^^University of Arizona, USA; ^^4^^UT Southwestern Medical Center, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 651–655
</span></p></div>
<div class="cpabstractcardabstract"><p>Silent speech interfaces (SSIs) are devices that convert non-audio bio-signals to speech, which hold the potential of recovering quality speech for laryngectomees (people who have undergone laryngectomy). Although significant progress has been made, most of the recent SSI works focused on data collected from healthy speakers. SSIs for laryngectomees have rarely been investigated. In this study, we investigated the reconstruction of speech for two laryngectomees who either use tracheoesophageal puncture (TEP) or electro-larynx (EL) speech as their post-surgery communication mode. We reconstructed their speech using two SSI designs (1) real-time recognition-and-synthesis and (2) directly articulation-to-speech synthesis (ATS). The reconstructed speech samples were measured in subjective evaluation by 20 listeners in terms of naturalness and intelligibility. The results indicated that both designs increased the naturalness of alaryngeal speech. The real-time recognition-and-synthesis design obtained higher intelligibility in electrolarynx speech as well, while the ATS did not. These preliminary results suggest the real-time recognition-and-synthesis design may have a better potential for clinical applications (for laryngectomees) than ATS.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Feng Wang|AUTHOR Feng Wang]]^^1^^
, [[Jing Chen|AUTHOR Jing Chen]]^^2^^
, [[Fei Chen|AUTHOR Fei Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SUSTech, China; ^^2^^Peking University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 611–615
</span></p></div>
<div class="cpabstractcardabstract"><p>For patients suffering with high-frequency hearing loss and preserving low-frequency hearing, combined electric-acoustic stimulation (EAS) may significantly improve their speech perception compared with cochlear implants (CIs). In combined EAS, a hearing aid provides low-frequency information via acoustic (A) stimulation and a CI evokes high-frequency sound sensation via electrical (E) stimulation. The present work investigated the EAS advantage when only a small number (i.e., 1 or 2) of channels were provided for electrical stimulation in a CI, and the effect of carrier bandwidth on understanding Mandarin sentences in a simulation of combined EAS experiment. The A-portion was extracted via low-pass filtering processing and the E-portion was generated with a vocoder model preserving multi-channel temporal envelope waveforms, whereas a noise-vocoder and a tone-vocoder were used to simulate the effect of carrier bandwidth. The synthesized stimuli were presented to normal-hearing listeners to recognize. Experimental results showed that while low-pass filtered Mandarin speech was not very intelligible, adding one or two E channels could significantly improve the intelligibility score to above 86.0%. Under the condition with one E channel, using a large carrier bandwidth in noise-vocoder processing provided a better intelligibility performance than using a narrow carrier bandwidth in tone-vocoder processing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Manthan Sharma|AUTHOR Manthan Sharma]], [[Navaneetha Gaddam|AUTHOR Navaneetha Gaddam]], [[Tejas Umesh|AUTHOR Tejas Umesh]], [[Aditya Murthy|AUTHOR Aditya Murthy]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 616–620
</span></p></div>
<div class="cpabstractcardabstract"><p>Electromyography (EMG) signals have been extensively used to capture facial muscle movements while speaking since they are one of the most closely related bio-signals generated during speech production. In this work, we focus on speech acoustics to EMG prediction. We present a comparative study of ten different EMG signal-based features including Time Domain (TD) features existing in the literature to examine their effectiveness in speech acoustics to EMG inverse (AEI) mapping. We propose a novel feature based on the Hilbert envelope of the filtered EMG signal. The raw EMG signal is reconstructed from these features as well. For the AEI mapping, we use a bi-directional long short-term memory (BLSTM) network in a session-dependent manner. To estimate the raw EMG signal from the EMG features, we use a CNN-BLSTM model comprising of a convolution neural network (CNN) followed by BLSTM layers. AEI mapping performance using the BLSTM network reveals that the Hilbert envelope based feature is predicted from speech with the highest accuracy, among all the features. Therefore, it could be the most representative feature of the underlying muscle activation during speech production. The proposed Hilbert envelope feature, when used together with the existing TD features, improves the raw EMG signal reconstruction performance compared to using the TD features alone.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ajish K. Abraham|AUTHOR Ajish K. Abraham]], [[V. Sivaramakrishnan|AUTHOR V. Sivaramakrishnan]], [[N. Swapna|AUTHOR N. Swapna]], [[N. Manohar|AUTHOR N. Manohar]]
</p><p class="cpabstractcardaffiliationlist">AIISH, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 621–625
</span></p></div>
<div class="cpabstractcardabstract"><p>Correcting the deficits in jaw movements have often been ignored in assessment and treatment of speech disorders. A robotic simulation is being developed to facilitate Speech Language Pathologists to demonstrate the movement of jaw, tongue and teeth during production of speech sounds, as a part of a larger study. Profiling of jaw movement is an important aspect of articulatory simulation. The present study attempts to develop a simple and efficient technique for deriving the jaw parameters and using them to simulate jaw movements through inverse kinematics.
Three Kannada speaking male participants in the age range of 26 to 33 years were instructed to produce selected speech sounds. The image of the final position of the jaw during production of each speech sound was recorded through CT scan and video camera. Angle of ramus and angle of body of mandible were simulated through inverse kinematics using RoboAnalyzer software. The variables for inverse kinematics were derived through kinematic analysis. The Denavit-Hartenberg (D-H) parameters required for kinematic analysis were obtained from still image. Angles simulated were compared with the angles obtained from CT scan images. No significant difference was observed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianrong Wang|AUTHOR Jianrong Wang]]^^1^^
, [[Nan Gu|AUTHOR Nan Gu]]^^1^^
, [[Mei Yu|AUTHOR Mei Yu]]^^1^^
, [[Xuewei Li|AUTHOR Xuewei Li]]^^1^^
, [[Qiang Fang|AUTHOR Qiang Fang]]^^2^^
, [[Li Liu|AUTHOR Li Liu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tianjin University, China; ^^2^^CASS, China; ^^3^^CUHK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 626–630
</span></p></div>
<div class="cpabstractcardabstract"><p>Cued Speech (CS) is a communication system for deaf people or hearing impaired people, in which a speaker uses it to aid a lipreader in phonetic level by clarifying potentially ambiguous mouth movements with hand shape and positions. Feature extraction of multi-modal CS is a key step in CS recognition. Recent supervised deep learning based methods suffer from noisy CS data annotations especially for hand shape modality. In this work, we first propose a self-supervised contrastive learning method to learn the feature representation of image without using labels. Secondly, a small amount of manually annotated CS data are used to fine-tune the first module. Thirdly, we present a module, which combines Bi-LSTM and self-attention networks to further learn sequential features with temporal and contextual information. Besides, to enlarge the volume and the diversity of the current limited CS datasets, we build a new British English dataset containing 5 native CS speakers. Evaluation results on both French and British English datasets show that our model achieves over 90% accuracy in hand shape recognition. Significant improvements of 8.75% (for French) and 10.09% (for British English) are achieved in CS phoneme recognition correctness compared with the state-of-the-art.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Judith Dineley|AUTHOR Judith Dineley]]^^1^^
, [[Grace Lavelle|AUTHOR Grace Lavelle]]^^2^^
, [[Daniel Leightley|AUTHOR Daniel Leightley]]^^2^^
, [[Faith Matcham|AUTHOR Faith Matcham]]^^2^^
, [[Sara Siddi|AUTHOR Sara Siddi]]^^3^^
, [[Maria Teresa Peñarrubia-María|AUTHOR Maria Teresa Peñarrubia-María]]^^4^^
, [[Katie M. White|AUTHOR Katie M. White]]^^2^^
, [[Alina Ivan|AUTHOR Alina Ivan]]^^2^^
, [[Carolin Oetzmann|AUTHOR Carolin Oetzmann]]^^2^^
, [[Sara Simblett|AUTHOR Sara Simblett]]^^2^^
, [[Erin Dawe-Lane|AUTHOR Erin Dawe-Lane]]^^2^^
, [[Stuart Bruce|AUTHOR Stuart Bruce]]^^2^^
, [[Daniel Stahl|AUTHOR Daniel Stahl]]^^2^^
, [[Yatharth Ranjan|AUTHOR Yatharth Ranjan]]^^2^^
, [[Zulqarnain Rashid|AUTHOR Zulqarnain Rashid]]^^2^^
, [[Pauline Conde|AUTHOR Pauline Conde]]^^2^^
, [[Amos A. Folarin|AUTHOR Amos A. Folarin]]^^2^^
, [[Josep Maria Haro|AUTHOR Josep Maria Haro]]^^3^^
, [[Til Wykes|AUTHOR Til Wykes]]^^2^^
, [[Richard J.B. Dobson|AUTHOR Richard J.B. Dobson]]^^2^^
, [[Vaibhav A. Narayan|AUTHOR Vaibhav A. Narayan]]^^5^^
, [[Matthew Hotopf|AUTHOR Matthew Hotopf]]^^2^^
, [[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^1^^
, [[Nicholas Cummins|AUTHOR Nicholas Cummins]]^^1^^
, [[The RADAR-CNS Consortium|AUTHOR The RADAR-CNS Consortium]]
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Augsburg, Germany; ^^2^^King’s College London, UK; ^^3^^CIBERSAM, Spain; ^^4^^IDIAP Jordi Gol, Spain; ^^5^^Janssen, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 631–635
<a href="./IS2021/MEDIA/1240" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>The ease of in-the-wild speech recording using smartphones has sparked considerable interest in the combined application of speech, remote measurement technology (RMT) and advanced analytics as a research and healthcare tool. For this to be realised, the acceptability of remote speech collection to the user must be established, in addition to feasibility from an analytical perspective. To understand the acceptance, facilitators, and barriers of smartphone-based speech recording, we invited 384 individuals with major depressive disorder (MDD) from the Remote Assessment of Disease and Relapse — Central Nervous System (RADAR-CNS) research programme in Spain and the UK to complete a survey on their experiences recording their speech. In this analysis, we demonstrate that study participants were more comfortable completing a scripted speech task than a free speech task. For both speech tasks, we found depression severity and country to be significant predictors of comfort. Not seeing smartphone notifications of the scheduled speech tasks, low mood and forgetfulness were the most commonly reported obstacles to providing speech recordings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sarah R. Li|AUTHOR Sarah R. Li]], [[Colin T. Annand|AUTHOR Colin T. Annand]], [[Sarah Dugan|AUTHOR Sarah Dugan]], [[Sarah M. Schwab|AUTHOR Sarah M. Schwab]], [[Kathryn J. Eary|AUTHOR Kathryn J. Eary]], [[Michael Swearengen|AUTHOR Michael Swearengen]], [[Sarah Stack|AUTHOR Sarah Stack]], [[Suzanne Boyce|AUTHOR Suzanne Boyce]], [[Michael A. Riley|AUTHOR Michael A. Riley]], [[T. Douglas Mast|AUTHOR T. Douglas Mast]]
</p><p class="cpabstractcardaffiliationlist">University of Cincinnati, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 636–640
</span></p></div>
<div class="cpabstractcardabstract"><p>Characterizing accurate vs. misarticulated patterns of tongue movement using ultrasound can be challenging in real time because of the fast, independent movement of tongue regions. The usefulness of ultrasound for biofeedback speech therapy is limited because speakers must mentally track and compare differences between their tongue movement and available models. It is desirable to automate this interpretive task using a single parameter representing deviation from known accurate tongue movements. In this study, displacements recorded automatically by ultrasound image tracking were transformed into a single biofeedback parameter (time-dependent difference between blade and dorsum displacements). Receiver operating characteristic (ROC) curve analysis was used to evaluate this parameter as a predictor of production accuracy over a range of different vowel contexts with initial and final /r/ in American English. Areas under ROC curves were 0.8 or above, indicating that this simple parameter may provide useful real-time biofeedback on /r/ accuracy within a range of rhotic contexts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Manuel Sam Ribeiro|AUTHOR Manuel Sam Ribeiro]]^^1^^
, [[Aciel Eshky|AUTHOR Aciel Eshky]]^^2^^
, [[Korin Richmond|AUTHOR Korin Richmond]]^^3^^
, [[Steve Renals|AUTHOR Steve Renals]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, Poland; ^^2^^Rasa Technologies, UK; ^^3^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 641–645
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate multi-speaker speech recognition from ultrasound images of the tongue and video images of the lips. We train our systems on imaging data from modal speech, and evaluate on matched test sets of two speaking modes: silent and modal speech. We observe that silent speech recognition from imaging data underperforms compared to modal speech recognition, likely due to a speaking-mode mismatch between training and testing. We improve silent speech recognition performance using techniques that address the domain mismatch, such as fMLLR and unsupervised model adaptation. We also analyse the properties of silent and modal speech in terms of utterance duration and the size of the articulatory space. To estimate the articulatory space, we compute the convex hull of tongue splines, extracted from ultrasound tongue images. Overall, we observe that the duration of silent speech is longer than that of modal speech, and that silent speech covers a smaller articulatory space than modal speech. Although these two properties are statistically significant across speaking modes, they do not directly correlate with word error rates from speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[David Ferreira|AUTHOR David Ferreira]], [[Samuel Silva|AUTHOR Samuel Silva]], [[Francisco Curado|AUTHOR Francisco Curado]], [[António Teixeira|AUTHOR António Teixeira]]
</p><p class="cpabstractcardaffiliationlist">Universidade de Aveiro, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 646–650
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech is our most natural and efficient way of communication and offers a strong potential to improve how we interact with machines. However, speech communication can sometimes be limited by environmental (e.g., ambient noise), contextual (e.g., need for privacy in a public place), or health conditions (e.g., laryngectomy), hindering the consideration of audible speech. In this regard, silent speech interfaces (SSI) have been proposed (e.g., considering video, electromyography), however, many technologies still face limitations regarding their everyday use, e.g., the need to place equipment in contact with the speaker (e.g., electrodes/ultrasound probe), and raise technical (e.g., lighting conditions for video) or privacy concerns. In this context, the consideration of technologies that can help tackle these issues, e.g, by being contactless and/or placed in the environment, can foster the widespread use of SSI. In this article, continuous-wave radar is explored to assess its potential for SSI. To this end, a corpus of 13 words was acquired, for 3 speakers, and different classifiers were tested on the resulting data. The best results, obtained using Bagging classifier, trained for each speaker, with 5-fold cross-validation, yielded an average accuracy of 0.826, an encouraging result that establishes promising grounds for further exploration of this technology for silent speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hendrik Schröter|AUTHOR Hendrik Schröter]]^^1^^
, [[Tobias Rosenkranz|AUTHOR Tobias Rosenkranz]]^^2^^
, [[Alberto N. Escalante-B.|AUTHOR Alberto N. Escalante-B.]]^^2^^
, [[Andreas Maier|AUTHOR Andreas Maier]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FAU Erlangen-Nürnberg, Germany; ^^2^^Sivantos, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 656–660
</span></p></div>
<div class="cpabstractcardabstract"><p>Fundamental frequency (f₀) estimation, also known as pitch tracking, has been a long-standing research topic in the speech and signal processing community. Many pitch estimation algorithms, however, fail in noisy conditions or introduce large delays due to their frame size or Viterbi decoding.
In this study, we propose a deep learning-based pitch estimation algorithm, LACOPE, which was trained in a joint pitch estimation and speech enhancement framework. In contrast to previous work, this algorithm allows for a configurable latency down to an algorithmic delay of 0. This is achieved by exploiting the smoothness properties of the pitch trajectory. That is, a recurrent neural network compensates delay introduced by the feature computation by predicting the pitch for a desired point, allowing a trade-off between pitch accuracy and latency.
We integrate the pitch estimation in a speech enhancement framework for hearing aids. For this application, we allow a delay on the analysis side of approx. 5ms. The pitch estimate is then used for constructing a comb filter in frequency domain as post-processing step to remove intra-harmonic noise.
Our pitch estimation performance is on par with SOTA algorithms like PYIN or CREPE for spoken speech in all noise conditions while introducing minimal latency.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weilong Huang|AUTHOR Weilong Huang]]^^1^^
, [[Jinwei Feng|AUTHOR Jinwei Feng]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alibaba, China; ^^2^^Alibaba, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 701–705
</span></p></div>
<div class="cpabstractcardabstract"><p>Among different differential beamforming approaches, the minimum-norm one has received much attention as it maximizes the white noise gain(WNG). WNG measures the robustness of beamformer. But in practice, the conventional minimum-norm differential beamforming with omnidirectional elements still suffers in low white-noise-gain at the low frequencies. The major contributions of this paper are as follows: First, we extend the existing work by presenting a new solution with the use of the directional microphone elements, and show clearly the connection between the conventional beamforming and the proposed beamforming. Second, through the derivation as well as simulations, we show the proposed solution brings noticeable improvement in WNG at the low frequencies when the null positions of the directional elements coincide with the null-constraints of minimum norm solution.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mathieu Fontaine|AUTHOR Mathieu Fontaine]]^^1^^
, [[Kouhei Sekiguchi|AUTHOR Kouhei Sekiguchi]]^^1^^
, [[Aditya Arie Nugraha|AUTHOR Aditya Arie Nugraha]]^^1^^
, [[Yoshiaki Bando|AUTHOR Yoshiaki Bando]]^^2^^
, [[Kazuyoshi Yoshii|AUTHOR Kazuyoshi Yoshii]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^RIKEN, Japan; ^^2^^AIST, Japan; ^^3^^Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 661–665
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes α-stable autoregressive fast multichannel nonnegative matrix factorization (α-AR-FastMNMF), a robust joint blind speech enhancement and dereverberation method for improved automatic speech recognition in a realistic adverse environment. The state-of-the-art versatile blind source separation method called FastMNMF that assumes the short-time Fourier transform (STFT) coefficients of a direct sound to follow a circular complex Gaussian distribution with jointly-diagonalizable full-rank spatial covariance matrices was extended to AR-FastMNMF with an autoregressive reverberation model. Instead of the light-tailed Gaussian distribution, we use the heavy-tailed α-stable distribution, which also has the reproductive property useful for the additive source modeling, to better deal with the large dynamic range of the direct sound. The experimental results demonstrate that the proposed α-AR-FastMNMF works well as a front-end of an automatic speech recognition system. It outperforms α-AR-ILRMA, which is a special case of α-AR-FastMNMF, and their Gaussian counterparts, i.e., AR-FastMNMF and AR-ILRMA, in terms of the speech signal quality metrics and word error rate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Siyuan Zhang|AUTHOR Siyuan Zhang]], [[Xiaofei Li|AUTHOR Xiaofei Li]]
</p><p class="cpabstractcardaffiliationlist">Westlake University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 666–670
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses the problem of microphone array generalization for deep-learning-based end-to-end multichannel speech enhancement. We aim to train a unique deep neural network (DNN) potentially performing well on unseen microphone arrays. The microphone array geometry shapes the network’s parameters when training on a fixed microphone array, and thus restricts the generalization of the trained network to another microphone array. To resolve this problem, a single network is trained using data recorded by various microphone arrays of different geometries. We design three variants of our recently proposed narrowband network to cope with the agnostic number of microphones. Overall, the goal is to make the network learn the universal information for speech enhancement that is available for any array geometry, rather than learn the one-array-dedicated characteristics. The experiments on both simulated and real room impulse responses (RIR) demonstrate the excellent across-array generalization capability of the proposed networks, in the sense that their performance measures are very close to, or even exceed the network trained with test arrays. Moreover, they notably outperform various beamforming methods and other advanced deep-learning-based methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hyungchan Song|AUTHOR Hyungchan Song]], [[Jong Won Shin|AUTHOR Jong Won Shin]]
</p><p class="cpabstractcardaffiliationlist">GIST, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 671–675
</span></p></div>
<div class="cpabstractcardabstract"><p>One of the most widely used cues for sound source localization is the interchannel phase differences (IPDs) in the frequency domain. However, the spatial aliasing makes the utilization of the IPDs in the high frequencies difficult, especially when the distance between the microphones is high. Recently, the phase replication method which considers the direction-of-arrival (DoA) candidates corresponding to all the possible unwrapped phase differences in all frequency bins was proposed. However, high frequency bins with possible spatial aliasing contribute more when constructing initial DoA histograms compared with low frequency bins, which may not be desirable for source localization. In this paper, we propose to utilize the IPDs in all the frequency bins with equal weights regardless of maximum number of phase wrapping in that frequency for dual microphone sound source localization. We applied spectral masks based on local signal-to-noise ratios and coherences between microphone signals to exclude time-frequency bins without directional audio signal from the DoA histogram construction. Experimental results show that the proposed method results in more distinct peaks in the DoA histogram and outperforms the conventional method in various noisy and reverberant environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pablo Pérez Zarazaga|AUTHOR Pablo Pérez Zarazaga]]^^1^^
, [[Mariem Bouafif Mansali|AUTHOR Mariem Bouafif Mansali]]^^2^^
, [[Tom Bäckström|AUTHOR Tom Bäckström]]^^1^^
, [[Zied Lachiri|AUTHOR Zied Lachiri]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalto University, Finland; ^^2^^Université de Tunis El Manar, Tunisia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 676–680
</span></p></div>
<div class="cpabstractcardabstract"><p>In scenarios such as remote work, open offices and call centers, multiple people may simultaneously have independent spoken interactions with their devices in the same room. The speech of competing speakers will however be picked up by all microphones, both reducing the quality of audio and exposing speakers to breaches in privacy. We propose a cooperative cross-talk cancellation solution breaking the single active speaker assumption employed by most telecommunication systems. The proposed method applies source separation on the microphone signals of independent devices, to extract the dominant speaker in each device. It is realized using a localization estimator based on a deep neural network, followed by a time-frequency mask to separate the target speech from the interfering one at each time-frequency unit referring to its orientation. By experimental evaluation, we confirm that the proposed method effectively reduces crosstalk and exceeds the baseline expectation maximization method by 10 dB in terms of interference rejection. This performance makes the proposed method a viable solution for cross-talk cancellation in near-field conditions, thus protecting the privacy of external speakers in the same acoustic space.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hao Zhang|AUTHOR Hao Zhang]], [[DeLiang Wang|AUTHOR DeLiang Wang]]
</p><p class="cpabstractcardaffiliationlist">Ohio State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 681–685
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses multi-channel active noise control (MCANC) on the basis of deep ANC, which performs active noise control by employing deep learning to encode the optimal control parameters corresponding to different noises and environments. The proposed method trains a convolutional recurrent network (CRN) to estimate the real and imaginary spectrograms of all the canceling signals simultaneously from the reference signals so that the corresponding anti-noises cancel or attenuate the primary noises in an MCANC system. We evaluate the proposed method under multiple MCANC setups and investigate the impact of the number of canceling loudspeakers and error microphones on the overall performance. Experimental results show that deep ANC is effective for MCANC in various scenarios. Moreover, the proposed method is robust against untrained noises and works well in the presence of loudspeaker nonlinearity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Simone Graetzer|AUTHOR Simone Graetzer]]^^1^^
, [[Jon Barker|AUTHOR Jon Barker]]^^2^^
, [[Trevor J. Cox|AUTHOR Trevor J. Cox]]^^1^^
, [[Michael Akeroyd|AUTHOR Michael Akeroyd]]^^3^^
, [[John F. Culling|AUTHOR John F. Culling]]^^4^^
, [[Graham Naylor|AUTHOR Graham Naylor]]^^3^^
, [[Eszter Porter|AUTHOR Eszter Porter]]^^3^^
, [[Rhoddy Viveros Muñoz|AUTHOR Rhoddy Viveros Muñoz]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Salford, UK; ^^2^^University of Sheffield, UK; ^^3^^University of Nottingham, UK; ^^4^^Cardiff University, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 686–690
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, rapid advances in speech technology have been made possible by machine learning challenges such as CHiME, REVERB, Blizzard, and Hurricane. In the Clarity project, the machine learning approach is applied to the problem of hearing aid processing of speech-in-noise, where current technology in enhancing the speech signal for the hearing aid wearer is often ineffective. The scenario is a (simulated) cuboid-shaped living room in which there is a single listener, a single target speaker and a single interferer, which is either a competing talker or domestic noise. All sources are static, the target is always within ±30° azimuth of the listener and at the same elevation, and the interferer is an omnidirectional point source at the same elevation. The target speech comes from an open source 40-speaker British English speech database collected for this purpose. This paper provides a baseline description of the round one Clarity challenges for both enhancement (CEC1) and prediction (CPC1). To the authors’ knowledge, these are the first machine learning challenges to consider the problem of hearing aid speech signal processing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zehai Tu|AUTHOR Zehai Tu]], [[Ning Ma|AUTHOR Ning Ma]], [[Jon Barker|AUTHOR Jon Barker]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 691–695
</span></p></div>
<div class="cpabstractcardabstract"><p>Current hearing aids normally provide amplification based on a general prescriptive fitting, and the benefits provided by the hearing aids vary among different listening environments despite the inclusion of noise suppression feature. Motivated by this fact, this paper proposes a data-driven machine learning technique to develop hearing aid fittings that are customised to speech in different noisy environments. A differentiable hearing loss model is proposed and used to optimise fittings with back-propagation. The customisation is reflected on the data of speech in different noise with also the consideration of noise suppression. The objective evaluation shows the advantages of optimised custom fittings over general prescriptive fittings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sunit Sivasankaran|AUTHOR Sunit Sivasankaran]]^^1^^
, [[Emmanuel Vincent|AUTHOR Emmanuel Vincent]]^^2^^
, [[Dominique Fohr|AUTHOR Dominique Fohr]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, USA; ^^2^^Loria (UMR 7503), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 696–700
</span></p></div>
<div class="cpabstractcardabstract"><p>We consider the problem of explaining the robustness of neural networks used to compute time-frequency masks for speech enhancement to mismatched noise conditions. We employ the Deep SHapley Additive exPlanations (DeepSHAP) feature attribution method to quantify the contribution of every time-frequency bin in the input noisy speech signal to every time-frequency bin in the output time-frequency mask. We define an objective metric — referred to as the speech relevance score — that summarizes the obtained SHAP values and show that it correlates with the enhancement performance, as measured by the word error rate on the CHiME-4 real evaluation dataset. We use the speech relevance score to explain the generalization ability of three speech enhancement models trained using synthetically generated speech-shaped noise, noise from a professional sound effects library, or real CHiME-4 noise. To the best of our knowledge, this is the first study on neural network explainability in the context of speech enhancement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Songjun Cao|AUTHOR Songjun Cao]], [[Yueteng Kang|AUTHOR Yueteng Kang]], [[Yanzhe Fu|AUTHOR Yanzhe Fu]], [[Xiaoshuo Xu|AUTHOR Xiaoshuo Xu]], [[Sining Sun|AUTHOR Sining Sun]], [[Yike Zhang|AUTHOR Yike Zhang]], [[Long Ma|AUTHOR Long Ma]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 706–710
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently self-supervised learning has emerged as an effective approach to improve the performance of automatic speech recognition (ASR). Under such a framework, the neural network is usually pre-trained with massive unlabeled data and then fine-tuned with limited labeled data. However, the non-streaming architecture like bidirectional transformer is usually adopted by the neural network to achieve competitive results, which cannot be used in streaming scenarios. In this paper, we mainly focus on improving the performance of streaming transformer under the self-supervised learning framework. Specifically, we propose a novel two-stage training method during fine-tuning, which combines knowledge distilling and self-training. The proposed training method achieves 16.3% relative word error rate (WER) reduction on Librispeech noisy test set. Finally, by only using the 100h clean subset of Librispeech as the labeled data and the rest (860h) as the unlabeled data, our streaming transformer based model obtains competitive WERs 3.5/8.7 on Librispeech clean/noisy test sets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yan Deng|AUTHOR Yan Deng]]^^1^^
, [[Rui Zhao|AUTHOR Rui Zhao]]^^2^^
, [[Zhong Meng|AUTHOR Zhong Meng]]^^2^^
, [[Xie Chen|AUTHOR Xie Chen]]^^2^^
, [[Bing Liu|AUTHOR Bing Liu]]^^1^^
, [[Jinyu Li|AUTHOR Jinyu Li]]^^2^^
, [[Yifan Gong|AUTHOR Yifan Gong]]^^2^^
, [[Lei He|AUTHOR Lei He]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, China; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 751–755
</span></p></div>
<div class="cpabstractcardabstract"><p>Recurrent neural network transducer (RNN-T) has shown to be comparable with conventional hybrid model for speech recognition. However, there is still a challenge in out-of-domain scenarios with context or words different from training data. In this paper, we explore the semi-supervised training which optimizes RNN-T jointly with neural text-to-speech (TTS) to better generalize to new domains using domain-specific text data. We apply the method to two tasks: one with out-of-domain context and the other with significant out-of-vocabulary (OOV) words. The results show that the proposed method significantly improves the recognition accuracy in both tasks, resulting in 61.4% and 53.8% relative word error rate (WER) reductions respectively, from a well-trained RNN-T with 65 thousand hours of training data. We do further study on the semi-supervised training methodology: 1) which modules of RNN-T model to be updated; 2) the impact of using different neural TTS models; 3) the performance of using text with different relevancy to target domain. Finally, we compare several RNN-T customization methods, and conclude that semi-supervised training with neural TTS is comparable and complementary with Internal Language Model Estimation (ILME) or biasing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Samik Sadhu|AUTHOR Samik Sadhu]]^^1^^
, [[Di He|AUTHOR Di He]]^^2^^
, [[Che-Wei Huang|AUTHOR Che-Wei Huang]]^^2^^
, [[Sri Harish Mallidi|AUTHOR Sri Harish Mallidi]]^^2^^
, [[Minhua Wu|AUTHOR Minhua Wu]]^^2^^
, [[Ariya Rastrow|AUTHOR Ariya Rastrow]]^^2^^
, [[Andreas Stolcke|AUTHOR Andreas Stolcke]]^^2^^
, [[Jasha Droppo|AUTHOR Jasha Droppo]]^^2^^
, [[Roland Maas|AUTHOR Roland Maas]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Johns Hopkins University, USA; ^^2^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 711–715
</span></p></div>
<div class="cpabstractcardabstract"><p>wav2vec-C introduces a novel representation learning technique combining elements from wav2vec 2.0 and VQ-VAE. Our model learns to reproduce quantized representations from partially masked speech encoding using a contrastive loss in a way similar to wav2vec 2.0. However, the quantization process is regularized by an additional consistency network that learns to reconstruct the input features to the wav2vec 2.0 network from the quantized representations in a way similar to a VQ-VAE model. The proposed self-supervised model is trained on 10k hours of unlabeled data and subsequently used as the speech encoder in a RNN-T ASR model and fine-tuned with 1k hours of labeled data. This work is one of the very few studies of self-supervised learning on speech tasks with a large volume of real far-field labeled data. The wav2vec-C encoded representations achieve, on average, twice the error reduction over baseline and a higher codebook utilization in comparison to wav2vec 2.0.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Electra Wallington|AUTHOR Electra Wallington]], [[Benji Kershenbaum|AUTHOR Benji Kershenbaum]], [[Ondřej Klejch|AUTHOR Ondřej Klejch]], [[Peter Bell|AUTHOR Peter Bell]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 716–720
</span></p></div>
<div class="cpabstractcardabstract"><p>The use of semi-supervised training (SST) has become an increasingly popular way of increasing the performance of ASR acoustic models without the need for further transcribed speech data. However, the performance of the technique can be very sensitive to the quality of the initial ASR system. This paper undertakes a comprehensive study of the improvements gained with respect to variation in the initial systems, the quantity of untranscribed data used, and the learning schedules. We postulate that the reason SST can be effective even when the initial model is poor is because it enables utterance-level information to be propagated to the frame level, and hence hypothesise that the quality of the language model plays a much larger role than the quality of the acoustic model. In experiments on Tagalog data from the IARPA MATERIAL programme, we find that indeed this is the case, and show that with an appropriately chosen recipe it is possible to achieve over 50% relative WER reductions from SST, even when the WER of the initial system is more than 80%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei-Ning Hsu|AUTHOR Wei-Ning Hsu]]^^1^^
, [[Anuroop Sriram|AUTHOR Anuroop Sriram]]^^1^^
, [[Alexei Baevski|AUTHOR Alexei Baevski]]^^1^^
, [[Tatiana Likhomanenko|AUTHOR Tatiana Likhomanenko]]^^1^^
, [[Qiantong Xu|AUTHOR Qiantong Xu]]^^1^^
, [[Vineel Pratap|AUTHOR Vineel Pratap]]^^1^^
, [[Jacob Kahn|AUTHOR Jacob Kahn]]^^1^^
, [[Ann Lee|AUTHOR Ann Lee]]^^1^^
, [[Ronan Collobert|AUTHOR Ronan Collobert]]^^1^^
, [[Gabriel Synnaeve|AUTHOR Gabriel Synnaeve]]^^2^^
, [[Michael Auli|AUTHOR Michael Auli]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Facebook, USA; ^^2^^Facebook, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 721–725
</span></p></div>
<div class="cpabstractcardabstract"><p>Self-supervised learning of speech representations has been a very active research area but most work is focused on a single domain such as read audio books for which there exist large quantities of labeled and unlabeled data. In this paper, we explore more general setups where the domain of the unlabeled data for pre-training data differs from the domain of the labeled data for fine-tuning, which in turn may differ from the test data domain. Our experiments show that using target domain data during pre-training leads to large performance improvements across a variety of setups. With no access to in-domain labeled data, pre-training on unlabeled in-domain data closes 66–73% of the performance gap between the ideal setting of in-domain labeled data and a competitive supervised out-of-domain model. This has obvious practical implications since it is much easier to obtain unlabeled target domain data than labeled data. Moreover, we find that pre-training on multiple domains improves generalization performance on domains not seen during training. We will release pre-trained models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yosuke Higuchi|AUTHOR Yosuke Higuchi]], [[Niko Moritz|AUTHOR Niko Moritz]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]], [[Takaaki Hori|AUTHOR Takaaki Hori]]
</p><p class="cpabstractcardaffiliationlist">MERL, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 726–730
</span></p></div>
<div class="cpabstractcardabstract"><p>Pseudo-labeling (PL) has been shown to be effective in semi-supervised automatic speech recognition (ASR), where a base model is self-trained with pseudo-labels generated from unlabeled data. While PL can be further improved by iteratively updating pseudo-labels as the model evolves, most of the previous approaches involve inefficient retraining of the model or intricate control of the label update. We present //momentum pseudo-labeling// (MPL), a simple yet effective strategy for semi-supervised ASR. MPL consists of a pair of //online// and //offline// models that interact and learn from each other, inspired by the mean teacher method. The online model is trained to predict pseudo-labels generated on the fly by the offline model. The offline model maintains a momentum-based moving average of the online model. MPL is performed in a single training process and the interaction between the two models effectively helps them reinforce each other to improve the ASR performance. We apply MPL to an end-to-end ASR model based on the connectionist temporal classification. The experimental results demonstrate that MPL effectively improves over the base model and is scalable to different semi-supervised scenarios with varying amounts of data or domain mismatch.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ananya Misra|AUTHOR Ananya Misra]], [[Dongseong Hwang|AUTHOR Dongseong Hwang]], [[Zhouyuan Huo|AUTHOR Zhouyuan Huo]], [[Shefali Garg|AUTHOR Shefali Garg]], [[Nikhil Siddhartha|AUTHOR Nikhil Siddhartha]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Khe Chai Sim|AUTHOR Khe Chai Sim]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 731–735
</span></p></div>
<div class="cpabstractcardabstract"><p>In the absence of large-scale in-domain supervised training data, ASR models can achieve reasonable performance through pre-training on additional data that is unlabeled, mismatched or both. Given such data constraints, we compare pre-training end-to-end models on matched but unlabeled data (unsupervised) and on labeled but mismatched data (supervised), where the labeled data is mismatched in either domain or language. Across encoder architectures, pre-training methods and languages, our experiments indicate that both types of pre-training improve performance, with relative WER reductions of 15–30% in the domain mismatch case and up to 15% in the language mismatch condition. We further find that the advantage from unsupervised pre-training is most prominent when there is no matched and labeled fine-tuning data, provided that a sufficient amount of mismatched data is still available for supervised fine-tuning.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhehuai Chen|AUTHOR Zhehuai Chen]]^^1^^
, [[Andrew Rosenberg|AUTHOR Andrew Rosenberg]]^^1^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^1^^
, [[Heiga Zen|AUTHOR Heiga Zen]]^^2^^
, [[Mohammadreza Ghodsi|AUTHOR Mohammadreza Ghodsi]]^^1^^
, [[Yinghui Huang|AUTHOR Yinghui Huang]]^^1^^
, [[Jesse Emond|AUTHOR Jesse Emond]]^^1^^
, [[Gary Wang|AUTHOR Gary Wang]]^^1^^
, [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]^^1^^
, [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^Google, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 736–740
</span></p></div>
<div class="cpabstractcardabstract"><p>Semi and self-supervised training techniques have the potential to improve performance of speech recognition systems without additional transcribed speech data. In this work, we demonstrate the efficacy of two approaches to semi-supervision for automated speech recognition. The two approaches leverage vast amounts of available unspoken text and untranscribed audio. First, we present //factorized multilingual speech synthesis// to improve data augmentation on unspoken text. Next, we propose the //Sequential MixMatch// algorithm with //iterative learning// to learn from untranscribed speech. The algorithm is built on top of our online implementation of Noisy Student Training. We demonstrate the compatibility of these techniques yielding an overall relative reduction of word error rate of up to 14.4% on the voice search tasks on 4 Indic languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tatiana Likhomanenko|AUTHOR Tatiana Likhomanenko]]^^1^^
, [[Qiantong Xu|AUTHOR Qiantong Xu]]^^1^^
, [[Jacob Kahn|AUTHOR Jacob Kahn]]^^1^^
, [[Gabriel Synnaeve|AUTHOR Gabriel Synnaeve]]^^2^^
, [[Ronan Collobert|AUTHOR Ronan Collobert]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Facebook, USA; ^^2^^Facebook, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 741–745
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent results in end-to-end automatic speech recognition have demonstrated the efficacy of pseudo-labeling for semi-supervised models trained both with Connectionist Temporal Classification (CTC) and Sequence-to-Sequence (seq2seq) losses. Iterative Pseudo-Labeling (IPL), which continuously trains a single model using pseudo-labels iteratively re-generated as the model learns, has been shown to further improve performance in ASR. We improve upon the IPL algorithm: as the model learns, we propose to iteratively re-generate transcriptions with hard labels (the most probable tokens), that is, //without// a language model. We call this approach Language-Model-Free IPL (slimIPL) and give a resultant training setup for low-resource settings with CTC-based models. slimIPL features a dynamic cache for pseudo-labels which reduces sensitivity to changes in relabeling hyperparameters and results in improved training stability. slimIPL is also highly-efficient and requires 3.5–4× fewer computational resources to converge than other state-of-the-art semi/self-supervised approaches. With only 10 hours of labeled audio, slimIPL is competitive with self-supervised approaches, and is state-of-the-art with 100 hours of labeled audio without the use of a language model both at test time and during pseudo-label generation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xianghu Yue|AUTHOR Xianghu Yue]], [[Haizhou Li|AUTHOR Haizhou Li]]
</p><p class="cpabstractcardaffiliationlist">NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 746–750
</span></p></div>
<div class="cpabstractcardabstract"><p>Self-supervised representation learning has seen remarkable success in encoding high-level semantic information from unlabelled speech data. The studies have been focused on exploring new pretext tasks to improve the learned speech representation and various masking schemes with reference to speech frames. We consider effective latent speech representation should be phonetically informed. In this work, we propose a novel phonetically motivated masking scheme. Specifically, we select the masked speech frames according to the phonetic segmentation in an utterance. The phonetically motivated self-supervised representation learns the speech representation that benefits downstream speech processing tasks. We evaluate the proposed learning algorithm on phoneme classification, speech recognition, and speaker recognition, and show that it consistently outperforms competitive baselines.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Scott Seyfarth|AUTHOR Scott Seyfarth]], [[Sundararajan Srinivasan|AUTHOR Sundararajan Srinivasan]], [[Katrin Kirchhoff|AUTHOR Katrin Kirchhoff]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 756–760
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker diarization accuracy can be affected by both acoustics and conversation characteristics. Determining the cause of diarization errors is difficult because speaker voice acoustics and conversation structure co-vary, and the interactions between acoustics, conversational structure, and diarization accuracy are complex. This paper proposes a methodology that can distinguish independent marginal effects of acoustic and conversation characteristics on diarization accuracy by remixing conversations in a factorial design. As an illustration, this approach is used to investigate gender-related and language-related accuracy differences with three diarization systems: a baseline system using subsegment x-vector clustering, a variant of it with shorter subsegments, and a third system based on a Bayesian hidden Markov model. Our analysis shows large accuracy disparities for the baseline system primarily due to conversational structure, which are partially mitigated in the other two systems. The illustration thus demonstrates how the methodology can be used to identify and guide diarization model improvements.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marcin Włodarczak|AUTHOR Marcin Włodarczak]]^^1^^
, [[Emer Gilmartin|AUTHOR Emer Gilmartin]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Stockholm University, Sweden; ^^2^^Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 801–805
</span></p></div>
<div class="cpabstractcardabstract"><p>During conversation, speakers hold and relinquish the floor, resulting in turn yield and retention. We examine these phenomena in three-party conversations in English, Swedish, and Estonian. We define within- and between-speaker transitions in terms of shorter intervals of speech, silence and overlap bounded by stretches of one-party speech longer than 1 second by the same or different speakers. This method gives us insights into how turn change and retention proceed, revealing that the majority of speaker transitions are more complex and involve more intermediate activity than a single silence or overlap. We examine the composition of within and between transitions in terms of number of speakers involved, incidence and proportion of solo speech, silence and overlap. We derive the most common within- and between-speaker transitions in the three languages, finding evidence of striking commonalities in how the floor is managed. Our findings suggest that current models of turn-taking used in dialogue technology could be extended using these results to more accurately reflect the realities of human-human dialogue.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ross McGowan|AUTHOR Ross McGowan]], [[Jinru Su|AUTHOR Jinru Su]], [[Vince DiCocco|AUTHOR Vince DiCocco]], [[Thejaswi Muniyappa|AUTHOR Thejaswi Muniyappa]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 761–765
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we introduce SmallER, a scalable neural entity resolution system capable of running directly on edge devices. SmallER addresses constraints imposed by the on-device setting such as bounded memory consumption for both model and catalog storage, limited compute resources, and related latency challenges introduced by those restrictions. Our model includes distinct modules to learn syntactic and semantic information and is trained to handle multiple domains within one compact architecture. We use compressed tries to reduce the space required to store catalogs and a novel implementation of spatial partitioning trees to strike a balance between reducing runtime latency and preserving recall relative to full catalog search. Our final model consumes only 3MB of memory at inference time with classification accuracy surpassing that of previously established, domain-specific baseline models on live customer utterances. For the largest catalogs we consider (300 or more entries), our proxy metric for runtime latency is reduced by more than 90%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Johann C. Rocholl|AUTHOR Johann C. Rocholl]], [[Vicky Zayats|AUTHOR Vicky Zayats]], [[Daniel D. Walker|AUTHOR Daniel D. Walker]], [[Noah B. Murad|AUTHOR Noah B. Murad]], [[Aaron Schneider|AUTHOR Aaron Schneider]], [[Daniel J. Liebling|AUTHOR Daniel J. Liebling]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 766–770
</span></p></div>
<div class="cpabstractcardabstract"><p>Disfluency detection models now approach high accuracy on English text. However, little exploration has been done in improving the size and inference time of the model. At the same time, Automatic Speech Recognition (ASR) models are moving from server-side inference to local, on-device inference. Supporting models in the transcription pipeline (like disfluency detection) must follow suit. In this work we concentrate on the disfluency detection task, focusing on small, fast, on-device models based on the BERT architecture. We demonstrate it is possible to train disfluency detection models as small as 1.3 MiB, while retaining high performance. We build on previous work that showed the benefit of data augmentation approaches such as self-training. Then, we evaluate the effect of domain mismatch between conversational and written text on model performance. We find that domain adaptation and data augmentation strategies have a more pronounced effect on these smaller models, as compared to conventional BERT models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qian Chen|AUTHOR Qian Chen]], [[Wen Wang|AUTHOR Wen Wang]], [[Mengzhe Chen|AUTHOR Mengzhe Chen]], [[Qinglin Zhang|AUTHOR Qinglin Zhang]]
</p><p class="cpabstractcardaffiliationlist">Alibaba, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 771–775
</span></p></div>
<div class="cpabstractcardabstract"><p>Punctuation prediction for automatic speech recognition (ASR) output transcripts plays a crucial role for improving the readability of the ASR transcripts and for improving the performance of downstream natural language processing applications. However, achieving good performance on punctuation prediction often requires large amounts of labeled speech transcripts, which is expensive and laborious. In this paper, we propose a Discriminative Self-Training approach with weighted loss and discriminative label smoothing to exploit unlabeled speech transcripts. Experimental results on the English IWSLT2011 benchmark test set and an internal Chinese spoken language dataset demonstrate that the proposed approach achieves significant improvement on punctuation prediction accuracy over strong baselines including BERT, RoBERTa, and ELECTRA models. The proposed Discriminative Self-Training approach outperforms the vanilla self-training approach. We establish a new state-of-the-art (SOTA) on the IWSLT2011 test set, outperforming the current SOTA model by 1.3% absolute gain on F₁.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mana Ihori|AUTHOR Mana Ihori]], [[Naoki Makishima|AUTHOR Naoki Makishima]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Ryo Masumura|AUTHOR Ryo Masumura]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 776–780
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel spoken-text-style conversion method that can simultaneously execute multiple style conversion modules such as punctuation restoration and disfluency deletion without preparing matched datasets. In practice, transcriptions generated by automatic speech recognition systems are not highly readable because they often include many disfluencies and do not include punctuation marks. To improve their readability, multiple spoken-text-style conversion modules that individually model a single conversion task are cascaded because matched datasets that simultaneously handle multiple conversion tasks are often unavailable. However, the cascading is unstable against the order of tasks because of the chain of conversion errors. Besides, the computation cost of the cascading must be higher than the single conversion. To execute multiple conversion tasks simultaneously without preparing matched datasets, our key idea is to distinguish individual conversion tasks using the //on-off switch//. In our proposed zero-shot joint modeling, we switch the individual tasks using multiple switching tokens, enabling us to utilize a zero-shot learning approach to executing simultaneous conversions. Our experiments on joint modeling of disfluency deletion and punctuation restoration demonstrate the effectiveness of our method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 781–785
</span></p></div>
<div class="cpabstractcardabstract"><p>The common approach for pronunciation evaluation is based on Goodness of pronunciation (GOP). It has been found that GOP may perform worse under noise conditions. Traditional methods compensate pronunciation features to improve the performance of pronunciation assessment in noise situations. This paper proposed a noise robust model for word-level pronunciation assessment based on a domain adversarial training (DAT) method. We treat the pronunciation assessment in the clean and noise situations as the source and target domains. The network is optimized by incorporating both the pronunciation assessment and noise domain discrimination. The domain labels are generated from unsupervised methods to adapt to various noise situations. We evaluate the model performance based on English words recorded by Chinese English learners and labeled by three experts. Experimental results show on average the proposed model outperforms the baseline by 3% in Pearson correlation coefficients (PCC) and 4% in accuracy under different noise conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jonathan Wintrode|AUTHOR Jonathan Wintrode]]
</p><p class="cpabstractcardaffiliationlist">Raytheon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 786–790
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a novel framework for spoken topic identification that simultaneously learns both topic-specific keywords and acoustic keyword filters from only document-level topic labels. At inference time, only audio segments likely to contain topic-salient keywords are fully decoded, reducing the system’s overall computation cost. We show that this filtering allows for effective topic classification while decoding only 50% of ASR output word lattices, and achieves error rates within 1.2% and precision within 2.6% of an unfiltered baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shruti Palaskar|AUTHOR Shruti Palaskar]], [[Ruslan Salakhutdinov|AUTHOR Ruslan Salakhutdinov]], [[Alan W. Black|AUTHOR Alan W. Black]], [[Florian Metze|AUTHOR Florian Metze]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 791–795
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a cascaded multimodal abstractive speech summarization model that generates semantic concepts as an intermediate step towards summarization. We describe a method to leverage existing multimodal dataset annotations to curate groundtruth labels for such intermediate concept modeling. In addition to cascaded training, the concept labels also provide an interpretable intermediate output level that helps improve performance on the downstream summarization task. On the open-domain How2 data, we conduct utterance-level and video-level experiments for two granularities of concepts: Specific and Abstract. We compare various multimodal fusion models for concept generation based on the respective input modalities. We observe consistent improvements in concept modeling by using multimodal adaptation models over unimodal models. Using the cascaded multimodal speech summarization model, we see a significant improvement of 7.5 METEOR points and 5.1 ROUGE-L points compared to previous methods of speech summarization. Finally, we show the benefits of scalability of the proposed approaches on 2000 h of video data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hyunjae Lee|AUTHOR Hyunjae Lee]], [[Jaewoong Yun|AUTHOR Jaewoong Yun]], [[Hyunjin Choi|AUTHOR Hyunjin Choi]], [[Seongho Joe|AUTHOR Seongho Joe]], [[Youngjune L. Gwon|AUTHOR Youngjune L. Gwon]]
</p><p class="cpabstractcardaffiliationlist">Samsung, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 796–800
</span></p></div>
<div class="cpabstractcardabstract"><p>Contextualized word embeddings can lead to state-of-the-art performances in natural language understanding. Recently, a pre-trained deep contextualized text encoder such as BERT has shown its potential in improving natural language tasks including abstractive summarization. Existing approaches in dialogue summarization focus on incorporating a large language model into summarization task trained on large-scale corpora consisting of news articles rather than dialogues of multiple speakers. In this paper, we introduce self-supervised methods to compensate shortcomings to train a dialogue summarization model. Our principle is to detect incoherent information flows using pretext dialogue text to enhance BERT’s ability to contextualize the dialogue text representations. We build and fine-tune an abstractive dialogue summarization model on a shared encoder-decoder architecture using the enhanced BERT. We empirically evaluate our abstractive dialogue summarizer with the SAMSum corpus, a recently introduced dataset with abstractive dialogue summaries. All of our methods have contributed improvements to abstractive summary measured in ROUGE scores. Through an extensive ablation study, we also present a sensitivity analysis to critical model hyperparameters, probabilities of switching utterances and masking interlocutors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Samuel J. Broughton|AUTHOR Samuel J. Broughton]]^^1^^
, [[Md. Asif Jalal|AUTHOR Md. Asif Jalal]]^^2^^
, [[Roger K. Moore|AUTHOR Roger K. Moore]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NUS, Singapore; ^^2^^University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 806–810
</span></p></div>
<div class="cpabstractcardabstract"><p>Generative Adversarial Networks (GANs) are machine learning networks based around creating synthetic data. Voice Conversion (VC) is a subset of voice translation that involves translating the paralinguistic features of a source speaker to a target speaker while preserving the linguistic information. The aim of non-parallel conditional GANs for VC is to translate an acoustic speech feature sequence from one domain to another without the use of paired data. In the study reported here, we investigated the interpretability of state-of-the-art implementations of non-parallel GANs in the domain of VC. We show that the learned representations in the repeating layers of a particular GAN architecture remain close to their original random initialised parameters, demonstrating that it is the number of repeating layers that is more responsible for the quality of the output. We also analysed the learned representations of a model trained on one particular dataset when used during transfer learning on another dataset. This also showed high levels of similarity in the repeating layers. Together, these results provide new insight into how the learned representations of deep generative networks change during learning and the importance of the number of layers, which would help build better GAN-based speech conversion models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Manh Luong|AUTHOR Manh Luong]]^^1^^
, [[Viet Anh Tran|AUTHOR Viet Anh Tran]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^VinAI Research, Vietnam; ^^2^^Deezer, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 851–855
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice conversion is a challenging task which transforms the voice characteristics of a source speaker to a target speaker without changing linguistic content. Recently, there have been many works on many-to-many Voice Conversion (VC) based on Variational Autoencoder (VAEs) achieving good results, however, these methods lack the ability to disentangle speaker identity and linguistic content to achieve good performance on unseen speaker’s scenarios. In this paper, we propose a new method based on feature disentanglement to tackle many-to-many voice conversion. The method has the capability to disentangle speaker identity and linguistic content from utterances, it can convert from many source speakers to many target speakers with a single autoencoder network. Moreover, it naturally deals with the unseen target speaker’s scenarios. We perform both objective and subjective evaluations to show the competitive performance of our proposed method compared with other state-of-the-art models in terms of naturalness and target speaker similarity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kun Zhou|AUTHOR Kun Zhou]]^^1^^
, [[Berrak Sisman|AUTHOR Berrak Sisman]]^^2^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NUS, Singapore; ^^2^^SUTD, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 811–815
</span></p></div>
<div class="cpabstractcardabstract"><p>Emotional voice conversion (EVC) aims to change the emotional state of an utterance while preserving the linguistic content and speaker identity. In this paper, we propose a novel 2-stage training strategy for sequence-to-sequence emotional voice conversion with a limited amount of emotional speech data. We note that the proposed EVC framework leverages text-to-speech (TTS) as they share a common goal that is to generate high-quality expressive voice. In stage 1, we perform style initialization with a multi-speaker TTS corpus, to disentangle speaking style and linguistic content. In stage 2, we perform emotion training with a limited amount of emotional speech data, to learn how to disentangle emotional style and linguistic information from the speech. The proposed framework can perform both spectrum and prosody conversion and achieves significant improvement over the state-of-the-art baselines in both objective and subjective evaluation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi-Yang Ding|AUTHOR Yi-Yang Ding]]^^1^^
, [[Li-Juan Liu|AUTHOR Li-Juan Liu]]^^2^^
, [[Yu Hu|AUTHOR Yu Hu]]^^1^^
, [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 816–820
</span></p></div>
<div class="cpabstractcardabstract"><p>The naturalness and similarity of voice conversion have been significantly improved in recent years with the development of deep-learning-based conversion models and neural vocoders. Accordingly, the task of detecting spoofing speech also attracts research attention. In the latest ASVspoof 2019 challenge, the best spoofing detection model can distinguish most artificial utterances from natural ones. Inspired by recent progress of adversarial example generation, this paper proposes an adversarial post-processing network (APN) which generates adversarial examples against a neural-network-based spoofing detector by white-box attack. The APN model post-processes the speech waveforms generated by a baseline voice conversion system. An adversarial loss derived from the spoofing detector together with two regularization losses are applied to optimize the parameters of APN. In our experiments, using the logical access (LA) dataset of ASVspoof 2019, results show that our proposed method can improve the adversarial ability of converted speech against the spoofing detectors based on light convolution neural networks (LCNNs) effectively without degrading its subjective quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiangheng He|AUTHOR Xiangheng He]]^^1^^
, [[Junjie Chen|AUTHOR Junjie Chen]]^^2^^
, [[Georgios Rizos|AUTHOR Georgios Rizos]]^^1^^
, [[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Imperial College London, UK; ^^2^^University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 821–825
</span></p></div>
<div class="cpabstractcardabstract"><p>Emotional Voice Conversion (EVC) aims to convert the emotional style of a source speech signal to a target style while preserving its content and speaker identity information. Previous emotional conversion studies do not disentangle emotional information from emotion-independent information that should be preserved, thus transforming it all in a monolithic manner and generating audio of low quality, with linguistic distortions. To address this distortion problem, we propose a novel StarGAN framework along with a two-stage training process that separates emotional features from those independent of emotion by using an autoencoder with two encoders as the generator of the Generative Adversarial Network (GAN). The proposed model achieves favourable results in both the objective evaluation and the subjective evaluation in terms of distortion, which reveals that the proposed model can effectively reduce distortion. Furthermore, in data augmentation experiments for end-to-end speech emotion recognition, the proposed StarGAN model achieves an increase of 2% in Micro-F1 and 5% in Macro-F1 compared to the baseline StarGAN model, which indicates that the proposed model is more valuable for data augmentation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ziyi Chen|AUTHOR Ziyi Chen]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 826–830
</span></p></div>
<div class="cpabstractcardabstract"><p>Techniques of voice conversion (VC) aim to modify the speaker identity and style of an utterance while preserving the linguistic content. Although there are lots of VC methods, the state of the art of VC is still cascading automatic speech recognition (ASR) and text-to-speech (TTS). This paper presents a new structure of vector-quantized autoencoder based on transformer with CTC loss for non-parallel VC, which inspired by cascading ASR and TTS VC method. Our proposed method combines CTC loss and vector quantization to get high-level linguistic information without speaker information. Objective and subjective evaluations on the mandarin datasets show that the converted speech of our proposed model is better than baselines on naturalness, rhythm and speaker similarity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhichao Wang|AUTHOR Zhichao Wang]]^^1^^
, [[Xinyong Zhou|AUTHOR Xinyong Zhou]]^^1^^
, [[Fengyu Yang|AUTHOR Fengyu Yang]]^^1^^
, [[Tao Li|AUTHOR Tao Li]]^^1^^
, [[Hongqiang Du|AUTHOR Hongqiang Du]]^^1^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
, [[Wendong Gan|AUTHOR Wendong Gan]]^^2^^
, [[Haitao Chen|AUTHOR Haitao Chen]]^^2^^
, [[Hai Li|AUTHOR Hai Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^iQIYI, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 831–835
</span></p></div>
<div class="cpabstractcardabstract"><p>Current voice conversion (VC) methods can successfully convert timbre of the audio. As modeling source audio’s prosody effectively is a challenging task, there are still limitations of transferring source style to the converted speech. This study proposes a source style transfer method based on recognition-synthesis framework. Previously in speech generation task, prosody can be modeled explicitly with prosodic features or implicitly with a latent prosody extractor. In this paper, taking advantages of both, we model the prosody in a hybrid manner, which effectively combines explicit and implicit methods in a proposed prosody module. Specifically, prosodic features are used to explicit model prosody, while VAE and reference encoder are used to implicitly model prosody, which take Mel spectrum and bottleneck feature as input respectively. Furthermore, adversarial training is introduced to remove speaker-related information from the VAE outputs, avoiding leaking source speaker information while transferring style. Finally, we use a modified self-attention based encoder to extract sentential context from bottleneck features, which also implicitly aggregates the prosodic aspects of source speech from the layered representations. Experiments show that our approach is superior to the baseline and a competitive system in terms of style transfer; meanwhile, the speech quality and speaker similarity are well maintained.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jheng-hao Lin|AUTHOR Jheng-hao Lin]], [[Yist Y. Lin|AUTHOR Yist Y. Lin]], [[Chung-Ming Chien|AUTHOR Chung-Ming Chien]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]]
</p><p class="cpabstractcardaffiliationlist">National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 836–840
</span></p></div>
<div class="cpabstractcardabstract"><p>Any-to-any voice conversion (VC) aims to convert the timbre of utterances from and to any speakers seen or unseen during training. Various any-to-any VC approaches have been proposed like AUTOVC, AdaINVC, and FragmentVC. AUTOVC, and AdaINVC utilize source and target encoders to disentangle the content and speaker information of the features. FragmentVC utilizes two encoders to encode source and target information and adopts cross attention to align the source and target features with similar phonetic content. Moreover, pretrained features are adopted. AUTOVC used d-vector to extract speaker information, and self-supervised learning (SSL) features like wav2vec 2.0 is used in FragmentVC to extract the phonetic content information. Different from previous works, we proposed S2VC that utilizes Self-Supervised features as both source and target features for the VC model. Supervised phoneme posteriorgram (PPG), which is believed to be speaker-independent and widely used in VC to extract content information, is chosen as a strong baseline for SSL features. The objective evaluation and subjective evaluation both show models taking SSL feature CPC as both source and target features outperforms that taking PPG as source feature, suggesting that SSL features have great potential in improving VC.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christopher Liberatore|AUTHOR Christopher Liberatore]], [[Ricardo Gutierrez-Osuna|AUTHOR Ricardo Gutierrez-Osuna]]
</p><p class="cpabstractcardaffiliationlist">Texas A&M University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 841–845
</span></p></div>
<div class="cpabstractcardabstract"><p>We present an algorithm for selecting exemplars for native-to-nonnative voice conversion (VC) using a Sparse, Anchor-Based Representation of speech (SABR). The algorithm uses phoneme labels and clustering to learn optimal exemplars when source and target speakers are affected by poor time alignment, as is common in in native-to-nonnative voice conversion. We evaluate the method on speech from the ARCTIC and L2-ARCTIC corpora and compare it to a baseline exemplar-based VC algorithm. The proposed algorithm significantly improves synthesis quality and more than doubles that of a baseline exemplar-based VC system while using two orders of magnitude fewer atoms. Additionally, the proposed algorithm significantly reduces the VC error and improves the synthesis quality as compared to unoptimized SABR models. We discuss the implications of both optimization algorithms for SABR and broader exemplar-based VC systems.Index terms should be included as shown below.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jie Wang|AUTHOR Jie Wang]]^^1^^
, [[Jingbei Li|AUTHOR Jingbei Li]]^^1^^
, [[Xintao Zhao|AUTHOR Xintao Zhao]]^^1^^
, [[Zhiyong Wu|AUTHOR Zhiyong Wu]]^^2^^
, [[Shiyin Kang|AUTHOR Shiyin Kang]]^^3^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tsinghua University, China; ^^2^^Tsinghua University, China; ^^3^^Huya, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 846–850
</span></p></div>
<div class="cpabstractcardabstract"><p>Factorizing speech as disentangled speech representations is vital to achieve highly controllable style transfer in voice conversion (VC). Conventional speech representation learning methods in VC only factorize speech as speaker and content, lacking controllability on other prosody-related factors. State-of-the-art speech representation learning methods for more speech factors are using primary disentangle algorithms such as random resampling and ad-hoc bottleneck layer size adjustment, which however is hard to ensure robust speech representation disentanglement. To increase the robustness of highly controllable style transfer on multiple factors in VC, we propose a disentangled speech representation learning framework based on adversarial learning. Four speech representations characterizing content, timbre, rhythm and pitch are extracted, and further disentangled by an adversarial Mask-And-Predict (MAP) network inspired by BERT. The adversarial network is used to minimize the correlations between the speech representations, by randomly masking and predicting one of the representations from the others. Experimental results show that the proposed framework significantly improves the robustness of VC on multiple factors by increasing the speech quality MOS from 2.79 to 3.30 and decreasing the MCD from 3.89 to 3.58.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Chorowski|AUTHOR Jan Chorowski]]^^1^^
, [[Grzegorz Ciesielski|AUTHOR Grzegorz Ciesielski]]^^1^^
, [[Jarosław Dzikowski|AUTHOR Jarosław Dzikowski]]^^1^^
, [[Adrian Łańcucki|AUTHOR Adrian Łańcucki]]^^2^^
, [[Ricard Marxer|AUTHOR Ricard Marxer]]^^3^^
, [[Mateusz Opala|AUTHOR Mateusz Opala]]^^1^^
, [[Piotr Pusz|AUTHOR Piotr Pusz]]^^1^^
, [[Paweł Rychlikowski|AUTHOR Paweł Rychlikowski]]^^1^^
, [[Michał Stypułkowski|AUTHOR Michał Stypułkowski]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Wrocław, Poland; ^^2^^NVIDIA, Poland; ^^3^^LIS (UMR 7020), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 971–975
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a number of low-resource approaches to the tasks of the Zero Resource Speech Challenge 2021. We build on the unsupervised representations of speech proposed by the organizers as a baseline, derived from CPC and clustered with the k-means algorithm. We demonstrate that simple methods of refining those representations can narrow the gap, or even improve upon the solutions which use a high computational budget. The results lead to the conclusion that the CPC-derived representations are still too noisy for training language models, but stable enough for simpler forms of pattern matching and retrieval.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jan Chorowski|AUTHOR Jan Chorowski]]^^1^^
, [[Grzegorz Ciesielski|AUTHOR Grzegorz Ciesielski]]^^1^^
, [[Jarosław Dzikowski|AUTHOR Jarosław Dzikowski]]^^1^^
, [[Adrian Łańcucki|AUTHOR Adrian Łańcucki]]^^2^^
, [[Ricard Marxer|AUTHOR Ricard Marxer]]^^3^^
, [[Mateusz Opala|AUTHOR Mateusz Opala]]^^1^^
, [[Piotr Pusz|AUTHOR Piotr Pusz]]^^1^^
, [[Paweł Rychlikowski|AUTHOR Paweł Rychlikowski]]^^1^^
, [[Michał Stypułkowski|AUTHOR Michał Stypułkowski]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Wrocław, Poland; ^^2^^NVIDIA, Poland; ^^3^^LIS (UMR 7020), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 976–980
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate the possibility of forcing a self-supervised model trained using a contrastive predictive loss, to extract slowly varying latent representations. Rather than producing individual predictions for each of the future representations, the model emits a sequence of predictions shorter than the sequence of upcoming representations to which they will be aligned. In this way, the prediction network solves a simpler task of predicting the next symbols, but not their exact timing, while the encoding network is trained to produce piece-wise constant latent codes. We evaluate the model on a speech coding task and demonstrate that the proposed Aligned Contrastive Predictive Coding (ACPC) leads to higher linear phone prediction accuracy and lower ABX error rates, while being slightly faster to train due to the reduced number of prediction heads.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin Suter|AUTHOR Benjamin Suter]], [[Josef Novak|AUTHOR Josef Novak]]
</p><p class="cpabstractcardaffiliationlist">Spitch, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 981–985
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a simple sequence-to-sequence approach to restore standard orthography in raw, normalized speech transcripts, including insertion of punctuation marks, prediction of capitalization, restoration of numeric forms, formatting of dates and times, and other, fully data-driven adjustments. We further describe our method to generate synthetic parallel training data, and explore suitable performance metrics, which we align with human judgment through subjective MOS-like evaluations.
Our models for English, Russian, and German have a word error rate of 6.36%, 4.88%, and 5.23%, respectively. We focus on simplicity and reproducibility, make our framework available under a BSD license, and share our base models for English and Russian.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aditya Joglekar|AUTHOR Aditya Joglekar]]^^1^^
, [[Seyed Omid Sadjadi|AUTHOR Seyed Omid Sadjadi]]^^2^^
, [[Meena Chandra-Shekar|AUTHOR Meena Chandra-Shekar]]^^1^^
, [[Christopher Cieri|AUTHOR Christopher Cieri]]^^3^^
, [[John H.L. Hansen|AUTHOR John H.L. Hansen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Texas at Dallas, USA; ^^2^^NIST, USA; ^^3^^University of Pennsylvania, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 986–990
</span></p></div>
<div class="cpabstractcardabstract"><p>The Fearless Steps Challenge (FSC) initiative was designed to host a series of progressively complex tasks to promote advanced speech research across naturalistic “Big Data” corpora. The Center for Robust Speech Systems at UT-Dallas in collaboration with the National Institute of Standards and Technology (NIST) and Linguistic Data Consortium (LDC) conducted Phase-3 of the FSC series (FSC P3), with a focus on motivating speech and language technology (SLT) system generalizability across channel and mission diversity under the same training conditions as in Phase-2. The FSC P3 introduced 10 hours of previously unseen channel audio from Apollo-11 and 5 hours of novel audio from Apollo-13 to be evaluated over both previously established and newly introduced SLT tasks with streamlined tracks. This paper presents an overview of the newly introduced conversational analysis tracks, Apollo-13 data, and analysis of system performance for matched and mismatched challenge conditions. We also discuss the Phase-3 challenge results, evolution of system performance across the three Phases, and next steps in the Challenge Series.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hannah Leykum|AUTHOR Hannah Leykum]]
</p><p class="cpabstractcardaffiliationlist">Austrian Academy of Sciences, Austria</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 991–995
</span></p></div>
<div class="cpabstractcardabstract"><p>When using verbal irony in interpersonal communication, paraverbal cues can reduce the risk of misunderstandings. Besides fundamental frequency, intensity and duration, speakers could use voice quality parameters to disambiguate between ironic and literal utterances. How these paraverbal cues are used to mark irony appears to be language- and/or culture-specific. Since the role of voice quality in ironic utterances has not yet been investigated in Austrian German, the present study addresses this issue. In addition to the acoustic signal, the vocal fold vibration is recorded via electroglottography (EGG). The detailed analysis of the EGG data as well as the acoustic data, provides insight into voice quality characteristics of ironic and literal realisations of short utterances. The analyses reveal that, in Standard Austrian German, some differences in voice quality exist between ironic and literal realisations of utterances: When being ironic, speakers’ voices tend to be breathier, creakier or rougher. Differences are more pronounced in the older age group and in male speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mathilde Hutin|AUTHOR Mathilde Hutin]]^^1^^
, [[Yaru Wu|AUTHOR Yaru Wu]]^^1^^
, [[Adèle Jatteau|AUTHOR Adèle Jatteau]]^^2^^
, [[Ioana Vasilescu|AUTHOR Ioana Vasilescu]]^^1^^
, [[Lori Lamel|AUTHOR Lori Lamel]]^^1^^
, [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LISN (UMR 9015), France; ^^2^^STL (UMR 8163), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 996–1000
</span></p></div>
<div class="cpabstractcardabstract"><p>Devoicing is a process whereby a voiced consonant such as /bdg/ is realized as voiceless [ptk]. Some theorists [1,2] propose that this phenomenon is an instance of fortition, or consonant strengthening, especially when it occurs word-initially. This study proposes an in-depth exploration of voicing alternations in word-initial position in five Romance languages (Portuguese, Spanish, French, Italian, Romanian) using large corpora (ca. 1000h of speech) and automatic alignment. Our results show that (i) there is initial devoicing in all languages, and (ii) this devoicing is conditioned by the preceding context. This allows the languages to be divided into those displaying (a) only phrase-initial fortition (Spanish), (b) phrase-initial and post-obstruent fortition (French, Romanian and possibly Italian) and (c) generalized word-initial fortition (Portuguese).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ivan Kraljevski|AUTHOR Ivan Kraljevski]]^^1^^
, [[Maria Paola Bissiri|AUTHOR Maria Paola Bissiri]]^^2^^
, [[Frank Duckhorn|AUTHOR Frank Duckhorn]]^^1^^
, [[Constanze Tschoepe|AUTHOR Constanze Tschoepe]]^^1^^
, [[Matthias Wolff|AUTHOR Matthias Wolff]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Fraunhofer IKTS, Germany; ^^2^^Università dell’Insubria, Italy; ^^3^^Brandenburgische Technische Universität, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1001–1005
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a data-driven approach for the quantitative analysis of glottal stops before word-initial vowels in Upper Sorbian, a West Slavic minority language spoken in Germany. Glottal stops are word-boundary markers and their detection can improve the performance of automatic speech recognition and speech synthesis systems.
We employed cross-language transfer using an acoustic model in German to develop a forced-alignment method for the phonetic segmentation of a read-speech corpus in Upper Sorbian. The missing phonemic units were created by combining the existing phoneme models. In the forced-alignment procedure, the glottal stops were considered optional in front of word-initial vowels.
To investigate the influence of speaker type (males, females, and children) and vowel on the occurrence of glottal stops, binomial regression analysis with a generalized linear mixed model was performed. Results show that children glottalize word-initial vowels more frequently than adults, and that glottal stop occurrences are influenced by vowel quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bogdan Ludusan|AUTHOR Bogdan Ludusan]]^^1^^
, [[Petra Wagner|AUTHOR Petra Wagner]]^^1^^
, [[Marcin Włodarczak|AUTHOR Marcin Włodarczak]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Bielefeld, Germany; ^^2^^Stockholm University, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1006–1010
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice quality is an important dimension in human communication, used to mark a variety of phenomena in speech, including prosodic prominence. Even though numerous studies have shown that speakers modify their voice quality parameters for marking prosodic prominence, the impact of these modifications on perceived prominence is less studied. Our investigation looks at the effect of a well-known measure of voice quality, cepstral peak prominence (CPP), on syllabic prominence ratings given by both naive and expert listeners. Employing read speech materials in German, we quantify the role of CPP alone and in combination with other acoustic cues marking prominence, namely intensity, duration and fundamental frequency. While CPP, by itself, had a significant effect on the perceived prominence for most of the listeners, when used in conjunction with the other cues, its impact was reduced. Moreover, when assessing the importance of each of these four cues for determining the perceived prominence score we found important individual variation, as well as differences between naive and expert listeners.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jenifer Vega Rodriguez|AUTHOR Jenifer Vega Rodriguez]], [[Nathalie Vallée|AUTHOR Nathalie Vallée]]
</p><p class="cpabstractcardaffiliationlist">GIPSA-lab (UMR 5216), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1011–1014
</span></p></div>
<div class="cpabstractcardabstract"><p>Korebaju (ISO639-3: coe) [́kòrèβàhɨ́] is a tonal language spoken in the foothills of the Colombian Amazon. Three field surveys carried out between 2017 and 2019 with six native speakers (3 females and 3 males) from the same village provide a set of glottal productions at both phonetic and phonological levels. This study focuses on the four types of glottal units we have found in this language: A set of vowels /a^^ʔ^^/, /e^^ʔ^^/, /o^^ʔ^^/, [i^^ʔ^^] and [ɨ^^ʔ^^] including 3 phonemes; the glottal stop [ʔ] and the consonant [*] transcribed and described as a //creaky voiced glottal approximant// by [1]. Both consonants occurred in intervocalic contexts and can be analyzed as a suprasegmental feature [constricted glottis] which marks the syllable onset. Finally, we have also found a clear and systematic burst which accompanies the release of the nasal consonants [m^^ʔ^^, n^^ʔ^^, ɲ^^ʔ^^]. No change was found in the EGG signal for these consonants suggesting an abrupt release of the aeroacoustic pressure.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anaïs Chanclu|AUTHOR Anaïs Chanclu]]^^1^^
, [[Imen Ben Amor|AUTHOR Imen Ben Amor]]^^1^^
, [[Cédric Gendrot|AUTHOR Cédric Gendrot]]^^2^^
, [[Emmanuel Ferragne|AUTHOR Emmanuel Ferragne]]^^2^^
, [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIA (EA 4128), France; ^^2^^LPP (UMR 7018), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1015–1018
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice quality is known to be an important factor for the characterization of a speaker’s voice, both in terms of physiological features (mainly laryngeal and supralaryngeal) and of the speaker’s habits (sociolinguistic factors). This paper is devoted to one of the main components of voice quality: phonation type. It proposes neural representations of speech followed by a cascade of two binary neural network-based classifiers, one dedicated to the detection of modal and nonmodal vowels, and one for the classification of nonmodal vowels into creaky and breathy types. This approach is evaluated on the spontaneous part of the PTSVOX database, following an expert manual labelling of the data by phonation type. The results of the proposed classifiers reaches on average 85%accuracy at the frame-level and up to 95% accuracy at the segment-level. Further research is planned to generalize the classifiers on more contexts and speakers, and thus pave the way for a new workflow aimed at characterizing phonation types.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rob J.J.H. van Son|AUTHOR Rob J.J.H. van Son]]
</p><p class="cpabstractcardaffiliationlist">Netherlands Cancer Institute, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1019–1023
</span></p></div>
<div class="cpabstractcardabstract"><p>Collecting and sharing speech resources is important for progress in speech science and technology. Often, speech resources cannot be shared because of concerns over the privacy of the speakers, e.g., minors or people with medical conditions. Current technologies for pseudonymizing speech have only been tested on “standard” speech for which pseudonymization methods are evaluated on speaker identification risk, intelligibility, and naturalness. For many applications, the important characteristics are para-linguistic aspects of the speech, e.g., voice quality, emotion, or disease progression. Little information is available about the extent to which speaker pseudonymization methods preserve such paralinguistic information. The current study investigates how well voice quality parameters are preserved by an example speech pseudonymization application. Correlations prove to be high between original and pseudonymized recordings for seven acoustic parameters and a composite measure of dysphonia, the //AVQI//. Root mean square errors for these parameters were reasonably small. A linear mixed effect model shows a link between the difference between source and target speaker and the size of the absolute difference in the //AVQI//. It is argued that new measures of quality are needed for pseudonymized non-standard speech before wide-spread application of pseudonymized speech can be considered in research and clinical practise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lars Steinert|AUTHOR Lars Steinert]], [[Felix Putze|AUTHOR Felix Putze]], [[Dennis Küster|AUTHOR Dennis Küster]], [[Tanja Schultz|AUTHOR Tanja Schultz]]
</p><p class="cpabstractcardaffiliationlist">Universität Bremen, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1024–1028
</span></p></div>
<div class="cpabstractcardabstract"><p>Dementia places an immeasurable burden on affected individuals and caregivers. In addition to general cognitive decline, dementia has a negative impact on communication. Technical activation systems are thus in high demand, as cognitive activation may help to moderate the decline. However, effective activation requires sustained engagement — which, in turn, first needs to be reliably recognized. In this study, we examine emotional engagement recognition for People with Dementia (PwD) using non-intrusive biosignals resulting from speech communication and facial expressions. PwD suffering from mild to severe dementia used a tablet-based activation system over multiple sessions. We demonstrate that they retained their ability to verbally express emotional engagement even at severe stages of the disease. For recognition of emotional engagement, we propose an architecture of Bidirectional Long-Short-Term-Memory Networks that combines video information with up to three speech-based feature sets (eGeMAPS, ComParE’13, DeepSpectrum). Using data of 24 PwD, we show that adding speech improves recognition performance significantly compared to a video-only model. Interestingly, disease-progression did not appear to have a substantial impact on recognition performance in this sample. We further discuss the opportunities and challenges of detecting emotional engagement from speech in PwD.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pascal Hecker|AUTHOR Pascal Hecker]]^^1^^
, [[Florian B. Pokorny|AUTHOR Florian B. Pokorny]]^^2^^
, [[Katrin D. Bartl-Pokorny|AUTHOR Katrin D. Bartl-Pokorny]]^^2^^
, [[Uwe Reichel|AUTHOR Uwe Reichel]]^^1^^
, [[Zhao Ren|AUTHOR Zhao Ren]]^^2^^
, [[Simone Hantke|AUTHOR Simone Hantke]]^^1^^
, [[Florian Eyben|AUTHOR Florian Eyben]]^^1^^
, [[Dagmar M. Schuller|AUTHOR Dagmar M. Schuller]]^^1^^
, [[Bert Arnrich|AUTHOR Bert Arnrich]]^^3^^
, [[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^audEERING, Germany; ^^2^^Universität Augsburg, Germany; ^^3^^Universität Potsdam, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1029–1033
</span></p></div>
<div class="cpabstractcardabstract"><p>With the COVID-19 pandemic, several research teams have reported successful advances in automated recognition of COVID-19 by voice. Resulting voice-based screening tools for COVID-19 could support large-scale testing efforts. While capabilities of machines on this task are progressing, we approach the so far unexplored aspect whether human raters can distinguish COVID-19 positive and negative tested speakers from voice samples, and compare their performance to a machine learning baseline. To account for the challenging symptom similarity between COVID-19 and other respiratory diseases, we use a carefully balanced dataset of voice samples, in which COVID-19 positive and negative tested speakers are matched by their symptoms alongside COVID-19 negative speakers without symptoms. Both human raters and the machine struggle to reliably identify COVID-19 positive speakers in our dataset. These results indicate that particular attention should be paid to the distribution of symptoms across all speakers of a dataset when assessing the capabilities of existing systems. The identification of acoustic aspects of COVID-19-related symptom manifestations might be the key for a reliable voice-based COVID-19 detection in the future by both trained human raters and machine learning models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Huyen Nguyen|AUTHOR Huyen Nguyen]]^^1^^
, [[Ralph Vente|AUTHOR Ralph Vente]]^^2^^
, [[David Lupea|AUTHOR David Lupea]]^^3^^
, [[Sarah Ita Levitan|AUTHOR Sarah Ita Levitan]]^^2^^
, [[Julia Hirschberg|AUTHOR Julia Hirschberg]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Hamburg, Germany; ^^2^^CUNY Hunter College, USA; ^^3^^NYU, USA; ^^4^^Columbia University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1034–1038
</span></p></div>
<div class="cpabstractcardabstract"><p>We analyze the acoustic-prosodic and lexical correlates of persuasiveness, taking into account speaker, judge and debate characteristics in a novel data set of 674 audio profiles, transcripts, evaluation scores and demographic data from professional debate tournament speeches. By conducting 10-fold cross validation experiments with linear, LASSO and random forest regression, we predict how different feature combinations contribute toward speech scores (i.e. persuasiveness) between men and women. Overall, lexical features, i.e. word complexity, nouns, fillers and hedges, are the most predictive features of speech evaluation scores; in addition to the gender composition of judge panels and opponents. In a combined lexical and demographic feature model, we achieve an R² of 0.40. Different lexical features predict speech evaluation scores for male vs. female speakers, and further investigation is necessary to understand whether differential evaluation standards applied across genders. This work contributes a larger-scale debate data set in a democratically relevant, competitive format with high external relevance to persuasive speech education in other competitive settings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hannah White|AUTHOR Hannah White]], [[Joshua Penney|AUTHOR Joshua Penney]], [[Andy Gibson|AUTHOR Andy Gibson]], [[Anita Szakay|AUTHOR Anita Szakay]], [[Felicity Cox|AUTHOR Felicity Cox]]
</p><p class="cpabstractcardaffiliationlist">Macquarie University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1384–1388
</span></p></div>
<div class="cpabstractcardabstract"><p>Creaky voice is a nonmodal phonation type that has various linguistic and sociolinguistic functions. Manually annotating creaky voice for phonetic analysis is time-consuming and labor-intensive. In recent years, automatic tools for detecting creaky voice have been proposed, which present the possibility for easier, faster and more consistent creak identification. One of these proposed tools is a Creak Detector algorithm that uses an automatic neural network taking its input from several acoustic cues to identify creaky voice. Previous work has suggested that the creak probability threshold at which this tool determines an instance to be creaky may vary depending on the speaker population. The present study investigates the optimal creak detection threshold for female Australian English speakers.
Results show further support for the practice of first finding the optimal threshold when using the Creak Detection algorithm on new data sets. Additionally, results show that accuracy of creaky voice detection using the Creak Detection algorithm can be significantly improved by excluding non-sonorant data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joshua Penney|AUTHOR Joshua Penney]], [[Andy Gibson|AUTHOR Andy Gibson]], [[Felicity Cox|AUTHOR Felicity Cox]], [[Michael Proctor|AUTHOR Michael Proctor]], [[Anita Szakay|AUTHOR Anita Szakay]]
</p><p class="cpabstractcardaffiliationlist">Macquarie University, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1389–1393
</span></p></div>
<div class="cpabstractcardabstract"><p>There has been a recent increase in speech research utilizing data recorded with participants’ personal devices, particularly in light of the COVID-19 pandemic and restrictions on face-to-face interactions. This raises important questions about whether these recordings are comparable to those made in traditional lab-based settings. Some previous studies have compared the viability of recordings made with personal devices for the clinical evaluation of voice quality. However, these studies rely on simple statistical analyses and do not examine acoustic correlates of voice quality typically examined in the (socio-) phonetic literature (e.g. H1-H2). In this study, we compare recordings from a set of smartphones/laptops and a solid-state recorder to assess the reliability of a range of acoustic correlates of voice quality. The results show significant differences for many acoustic measures of voice quality across devices. Further exploratory analyses demonstrate that these differences are not simple offsets, but rather that their magnitude depends on the value of the measurement of interest. We therefore urge researchers to exercise caution when examining voice quality based on recordings made with participants’ devices, particularly when interested in small effect sizes. We also call on the speech research community to investigate these issues more thoroughly.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anna Sfakianaki|AUTHOR Anna Sfakianaki]], [[George P. Kafentzis|AUTHOR George P. Kafentzis]]
</p><p class="cpabstractcardaffiliationlist">University of Crete, Greece</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1394–1398
</span></p></div>
<div class="cpabstractcardabstract"><p>The current study investigates voice quality characteristics of Greek adults with normal hearing and hearing loss, automatically obtained from glottal inverse filtering analysis using the Aalto Aparat toolkit. Aalto Aparat has been employed in glottal flow analysis of disordered speech, but to the best of the authors’ knowledge, not as yet in hearing impaired voice analysis and assessment. Five speakers, three women and two men, with normal hearing (NH) and five speakers with prelingual profound hearing impairment (HI), matched for age and sex, produced symmetrical /ˈpVpV/ disyllables, where V=/i, a, u/. A state-of-the-art method named quasi-closed phase analysis (QCP) is offered in Aparat and it is used to estimate the glottal source signal. Glottal source features were obtained using time- and frequency-domain parametrization methods and analysed statistically. The interpretation of the results attempts to shed light on potential differences between HI and NH phonation strategies, while advantages and limitations of inverse filtering methods in HI voice assessment are discussed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mark Huckvale|AUTHOR Mark Huckvale]], [[Catinca Buciuleac|AUTHOR Catinca Buciuleac]]
</p><p class="cpabstractcardaffiliationlist">University College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1399–1403
</span></p></div>
<div class="cpabstractcardabstract"><p>The Saarbrücken Voice Database contains speech and simultaneous electroglottography recordings of 1002 speakers exhibiting a wide range of voice disorders, together with recordings of 851 controls. Previous studies have used this database to build systems for automated detection of voice disorders and for differential diagnosis. These studies have varied considerably in the subset of pathologies tested, the audio materials analyzed, the cross-validation method used and the performance metric reported. This variation has made it hard to determine the most promising approaches to the problem of detecting voice disorders. In this study we re-implement three recently published systems that have been trained to detect pathology using the SVD and compare their performance on the same pathologies with the same audio materials using a common cross-validation protocol and performance metric. We show that under this approach, there is much less difference in performance across systems than in their original publication. We also show that voice disorder detection on the basis of a short phrase gives similar performance to that based on a sequence of vowels of different pitch. Our evaluation protocol may be useful for future studies on voice disorder detection with the SVD.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Steven M. Lulich|AUTHOR Steven M. Lulich]], [[Rita R. Patel|AUTHOR Rita R. Patel]]
</p><p class="cpabstractcardaffiliationlist">Indiana University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1404–1408
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-invasive measures of voice quality, such as H1-H2, rely on oral flow signals, inverse filtered speech signals, or corrections for the effects of formants. Voice quality measures play especially important roles in the assessment of voice disorders and the evaluation of treatment efficacy. One type of treatment that is increasingly common in voice therapy, as well as in voice training for singers and actors, is semi-occluded vocal tract exercises (SOVTEs). The goal of SOVTEs is to change patterns of vocal fold vibration and thereby improve voice quality and vocal efficiency. Accelerometers applied to the skin of the neck have been used to investigate subglottal acoustics, to inverse-filter speech signals, and to obtain voice quality metrics. This paper explores the application of neck-skin accelerometers to measure voice quality without oral flow, inverse filtering, or formant correction. Accelerometer-based measures (uncorrected K1-K2 and corrected K1*-K2*, analogous to microphone-based H1-H2 and H1*-H2*) were obtained from typically developing children with healthy voice, before and during SOVTEs. Traditional microphone-based H1-H2 measures (corrected and uncorrected) were also obtained. Results showed that K1-K2 and K1*-K2* were not substantially affected by vocal tract acoustic changes in formant frequencies.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Matthew Perez|AUTHOR Matthew Perez]]^^1^^
, [[Amrit Romana|AUTHOR Amrit Romana]]^^1^^
, [[Angela Roberts|AUTHOR Angela Roberts]]^^2^^
, [[Noelle Carlozzi|AUTHOR Noelle Carlozzi]]^^1^^
, [[Jennifer Ann Miner|AUTHOR Jennifer Ann Miner]]^^1^^
, [[Praveen Dayalu|AUTHOR Praveen Dayalu]]^^1^^
, [[Emily Mower Provost|AUTHOR Emily Mower Provost]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Michigan, USA; ^^2^^Northwestern University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1409–1413
</span></p></div>
<div class="cpabstractcardabstract"><p>Huntington Disease (HD) is a progressive disorder which often manifests in motor impairment. Motor severity (captured via motor score) is a key component in assessing overall HD severity. However, motor score evaluation involves in-clinic visits with a trained medical professional, which are expensive and not always accessible. Speech analysis provides an attractive avenue for tracking HD severity because speech is easy to collect remotely and provides insight into motor changes. HD speech is typically characterized as having irregular articulation. With this in mind, acoustic features that can capture vocal tract movement and articulatory coordination are particularly promising for characterizing motor symptom progression in HD. In this paper, we present an experiment that uses Vocal Tract Coordination (VTC) features extracted from read speech to estimate a motor score. When using an elastic-net regression model, we find that VTC features significantly outperform other acoustic features across varied-length audio segments, which highlights the effectiveness of these features for both short- and long-form reading tasks. Lastly, we analyze the F-value scores of VTC features to visualize which channels are most related to motor score. This work enables future research efforts to consider VTC features for acoustic analyses which target HD motor symptomatology tracking.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Carlos A. Ferrer|AUTHOR Carlos A. Ferrer]]^^1^^
, [[Efren Aragón|AUTHOR Efren Aragón]]^^1^^
, [[María E. Hdez-Díaz|AUTHOR María E. Hdez-Díaz]]^^2^^
, [[Marc S. de Bodt|AUTHOR Marc S. de Bodt]]^^2^^
, [[Roman Cmejla|AUTHOR Roman Cmejla]]^^3^^
, [[Marina Englert|AUTHOR Marina Englert]]^^4^^
, [[Mara Behlau|AUTHOR Mara Behlau]]^^4^^
, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad Central de Las Villas, Cuba; ^^2^^Universiteit Antwerpen, Belgium; ^^3^^Czech Technical University in Prague, Czechia; ^^4^^Universidade Federal de São Paulo, Brazil; ^^5^^FAU Erlangen-Nürnberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1414–1418
</span></p></div>
<div class="cpabstractcardabstract"><p>Dysphonia comprises many perceptually deviating aspects of voice, and its overall severity perception is made by the listener according to methods of aggregating the single dimensions which are personally conceived and not well studied. Roughness and breathiness are constituent dimensions in most devised rating scales in clinical use. In this paper, we evaluate several ways to model the mapping of the overall severity as a function of the particular ratings of roughness and breathiness. The models include the simple linear averaging as well as several non-linear variants suggested elsewhere, and some minor adjustments. The models are evaluated on four datasets from different countries, allowing a more global evaluation of how the mapping is conceived.
Results show the limitations of the most widely assumed linear approach, while also hinting at a need for a more uniform coverage of the sample space in voice pathology datasets. The models explored in this paper can be expanded to higher-dimensional scales.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bengt J. Borgström|AUTHOR Bengt J. Borgström]]
</p><p class="cpabstractcardaffiliationlist">MIT Lincoln Laboratory, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1039–1043
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a Bayesian framework for unsupervised domain adaptation of Probabilistic Linear Discriminant Analysis (PLDA). By interpreting class labels as latent random variables, Variational Bayes (VB) is used to derive a maximum //a posterior// (MAP) solution of the adapted PLDA model when labels are missing, referred to as VB-MAP. The VB solution iteratively infers class labels and updates PLDA hyperparameters, offering a systematic framework for dealing with unlabeled data. While presented as a general solution, this paper includes experimental results for domain adaptation in speaker verification. VB-MAP estimation is applied to the 2016 and 2018 NIST Speaker Recognition Evaluations (SREs), both of which included small and unlabeled in-domain data sets, and is shown to provide performance improvements over a variety of state-of-the-art domain adaptation methods. Additionally, VB-MAP estimation is used to train a fully unsupervised PLDA model, suffering only minor performance degradation relative to conventional supervised training, offering promise for training PLDA models when no relevant labeled data exists.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jie Pu|AUTHOR Jie Pu]]^^1^^
, [[Yuguang Yang|AUTHOR Yuguang Yang]]^^2^^
, [[Ruirui Li|AUTHOR Ruirui Li]]^^2^^
, [[Oguz Elibol|AUTHOR Oguz Elibol]]^^2^^
, [[Jasha Droppo|AUTHOR Jasha Droppo]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Cambridge, UK; ^^2^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1084–1088
</span></p></div>
<div class="cpabstractcardabstract"><p>The success of modern deep learning systems is built on two cornerstones, massive amount of annotated training data and advanced computational infrastructure to support large-scale computation. In recent years, the model size of state-of-the-art deep learning systems has rapidly increased and sometimes reached to billions of parameters. Herein we take a close look into this phenomenon and present an empirical study on the scaling effect of model size for self-supervised speech models. In particular, we investigate the quantitative relationship between the model size and the loss/accuracy performance on speech tasks. First, the power-law scaling property between the number of parameters and the L₁ self-supervised loss is verified for speech models. Then the advantage of large speech models in learning effective speech representations is demonstrated in two downstream tasks: i) speaker recognition and ii) phoneme classification. Moreover, it has been shown that the model size of self-supervised speech networks is able to compensate the lack of annotation when there is insufficient training data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yibo Wu|AUTHOR Yibo Wu]]^^1^^
, [[Longbiao Wang|AUTHOR Longbiao Wang]]^^1^^
, [[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^2^^
, [[Meng Liu|AUTHOR Meng Liu]]^^1^^
, [[Jianwu Dang|AUTHOR Jianwu Dang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tianjin University, China; ^^2^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1089–1093
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, increasing attention has been paid to the joint training of upstream and downstream tasks, and to address the challenge of how to synchronize various loss functions in a multi-objective scenario. In this paper, to address the competing gradient directions between the speaker classification loss and the feature enhancement loss, we propose an asynchronous subregion optimization approach for the joint training of feature enhancement and speaker embedding neural networks. For the asynchronous subregion optimization, the squeeze and excitation (SE) method is introduced in the enhancement network to adaptively select important channels for speaker embedding. Furthermore, channel-wise feature concatenation is applied between the input feature and the enhanced feature to address the distortion of speaker information that is caused by enhancement loss. By using the proposed joint training network with asynchronous subregion optimization and channel-wise feature concatenation, we obtained relative gains of 11.95% and 6.43% in equal error rate on a noisy version of Voxceleb1 and VOiCES corpus, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Li Zhang|AUTHOR Li Zhang]]^^1^^
, [[Qing Wang|AUTHOR Qing Wang]]^^1^^
, [[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^2^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^A*STAR, Singapore; ^^3^^NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1094–1098
</span></p></div>
<div class="cpabstractcardabstract"><p>In far-field speaker verification, the performance of speaker embeddings is susceptible to degradation when there is a mismatch between the conditions of enrollment and test speech. To solve this problem, we propose the feature-level and instance-level transfer learning in the teacher-student framework to learn a domain-invariant embedding space. For the feature-level knowledge transfer, we develop the contrastive loss to transfer knowledge from teacher model to student model, which not only decrease the intra-class distance, but also enlarge the inter-class distance. Moreover, we propose the instance-level pairwise distance transfer method to force the student model to preserve pairwise instances distance from the well optimized embedding space of the teacher model. On FFSVC 2020 evaluation set, our EER on Full-eval trials is relatively reduced by 13.9% compared with the fusion system result on Partial-eval trials of Task2. On Task1, compared with the winner’s DenseNet result on Partial-eval trials, our minDCF on Full-eval trials is relatively reduced by 6.3%. On Task3, the EER and minDCF of our proposed method on Full-eval trials are very close to the result of the fusion system on Partial-eval trials. Our results also outperform other competitive domain adaptation methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jose Patino|AUTHOR Jose Patino]]^^1^^
, [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]]^^2^^
, [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]^^1^^
, [[Andreas Nautsch|AUTHOR Andreas Nautsch]]^^1^^
, [[Nicholas Evans|AUTHOR Nicholas Evans]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^EURECOM, France; ^^2^^LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1099–1103
</span></p></div>
<div class="cpabstractcardabstract"><p>Anonymisation has the goal of manipulating speech signals in order to degrade the reliability of automatic approaches to speaker recognition, while preserving other aspects of speech, such as those relating to intelligibility and naturalness. This paper reports an approach to anonymisation that, unlike other current approaches, requires no training data, is based upon well-known signal processing techniques and is both efficient and effective. The proposed solution uses the McAdams coefficient to transform the spectral envelope of speech signals. Results derived using common VoicePrivacy 2020 databases and protocols show that random, optimised transformations can outperform competing solutions in terms of anonymisation while causing only modest, additional degradations to intelligibility, even in the case of a semi-informed privacy adversary.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weiqing Wang|AUTHOR Weiqing Wang]]^^1^^
, [[Danwei Cai|AUTHOR Danwei Cai]]^^1^^
, [[Jin Wang|AUTHOR Jin Wang]]^^2^^
, [[Qingjian Lin|AUTHOR Qingjian Lin]]^^2^^
, [[Xuyang Wang|AUTHOR Xuyang Wang]]^^2^^
, [[Mi Hong|AUTHOR Mi Hong]]^^2^^
, [[Ming Li|AUTHOR Ming Li]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Duke Kunshan University, China; ^^2^^Lenovo, China; ^^3^^Duke Kunshan University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1044–1048
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the systems developed by the DKU-Duke-Lenovo team for the Fearless Steps Challenge Phase III. For the speech activity detection (SAD) task, we employ the U-Net-based model which has not been used for SAD before, observing a DCF of 1.915% on the eval set. For the speaker identification (SID) task, we adopt the ResNet-SE and ECAPA-TDNN model, and we obtain a Top-5 accuracy of 86.21%. For the speaker diarization (SD) task, we employ several different clustering methods. Besides, domain adaptation, system fusion, and Target-Speaker Voice Activity Detection (TS-VAD) significantly improve the SD performance. We obtain a DER of 12.32% on track 2, and the major contribution is from our ResNet-based TS-VAD model. We finally achieve a first-place ranking for SD and SID and a second-place for SAD in the challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yafeng Chen|AUTHOR Yafeng Chen]], [[Wu Guo|AUTHOR Wu Guo]], [[Bin Gu|AUTHOR Bin Gu]]
</p><p class="cpabstractcardaffiliationlist">USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1049–1053
</span></p></div>
<div class="cpabstractcardabstract"><p>Meta-learning (ML) has recently become a research hotspot in speaker verification (SV). We introduce two methods to improve the meta-learning training for SV in this paper. For the first method, a backbone embedding network is first jointly trained with the conventional cross entropy loss and prototypical networks (PN) loss. Then, inspired by speaker adaptive training in speech recognition, additional transformation coefficients are trained with only the PN loss. The transformation coefficients are used to modify the original backbone embedding network in the x-vector extraction process. Furthermore, the random erasing (RE) data augmentation technique is applied to all support samples in each episode to construct positive pairs, and a contrastive loss between the augmented and the original support samples is added to the objective in model training. Experiments are carried out on the Speaker in the Wild (SITW) and VOiCES databases. Both of the methods can obtain consistent improvements over existing meta-learning training frameworks. By combining these two methods, we can observe further improvements on these two databases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dan Wang|AUTHOR Dan Wang]], [[Yuanjie Dong|AUTHOR Yuanjie Dong]], [[Yaxing Li|AUTHOR Yaxing Li]], [[Yunfei Zi|AUTHOR Yunfei Zi]], [[Zhihui Zhang|AUTHOR Zhihui Zhang]], [[Xiaoqi Li|AUTHOR Xiaoqi Li]], [[Shengwu Xiong|AUTHOR Shengwu Xiong]]
</p><p class="cpabstractcardaffiliationlist">WHUT, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1054–1058
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker recognition (SR) is inevitably affected by noise in real-life scenarios, resulting in decreased recognition accuracy. In this paper, we introduce a novel regularization method, variable information bottleneck (VIB), in speaker recognition to extract robust speaker embeddings. VIB prompts the neural network to ignore as much speaker-identity irrelevant information as possible. We also propose a more effective network, VovNet with an ultra-lightweight subspace attention module (ULSAM), as a feature extractor. ULSAM infers different attention maps for each feature map subspace, enabling efficient learning of cross-channel information along with multi-scale and multi-frequency feature representation. The experimental results demonstrate that our proposed framework outperforms the ResNet-based baseline by 11.4% in terms of equal error rate (EER). The VIB regularization method gives a further performance boost with an 18.9% EER decrease.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Niko Brümmer|AUTHOR Niko Brümmer]]^^1^^
, [[Luciana Ferrer|AUTHOR Luciana Ferrer]]^^2^^
, [[Albert Swart|AUTHOR Albert Swart]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Phonexia, South Africa; ^^2^^UBA-CONICET ICC, Argentina</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1059–1063
</span></p></div>
<div class="cpabstractcardabstract"><p>Out of a hundred trials, how many errors does your speaker verifier make? For the user this is an important, practical question, but researchers and vendors typically sidestep it and supply instead the conditional error-rates that are given by the ROC/DET curve. We posit that the user’s question is answered by the Bayes error-rate. We present a tutorial to show how to compute the error-rate that results when making Bayes decisions with calibrated likelihood ratios, supplied by the verifier, and an hypothesis prior, supplied by the user. For perfect calibration, the Bayes error-rate is upper bounded by min(EER,P,1-P), where EER is the equal-error-rate and P, 1-P are the prior probabilities of the competing hypotheses. The EER represents the accuracy of the verifier, while min(P,1-P) represents the hardness of the classification problem. We further show how the Bayes error-rate can be computed also for non-perfect calibration and how to generalize from error-rate to expected cost. We offer some criticism of decisions made by direct score thresholding. Finally, we demonstrate by analyzing error-rates of the recently published DCA-PLDA speaker verifier.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Roza Chojnacka|AUTHOR Roza Chojnacka]], [[Jason Pelecanos|AUTHOR Jason Pelecanos]], [[Quan Wang|AUTHOR Quan Wang]], [[Ignacio Lopez Moreno|AUTHOR Ignacio Lopez Moreno]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1064–1068
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe //SpeakerStew// — a hybrid system to perform speaker verification on 46 languages. Two core ideas were explored in this system: (1) Pooling training data of different languages together for multilingual generalization and reducing development cycles; (2) A novel triage mechanism between text-dependent and text-independent models to reduce runtime cost and expected latency. To the best of our knowledge, this is the first study of speaker verification systems at the scale of 46 languages. The problem is framed from the perspective of using a smart speaker device with interactions consisting of a wake-up keyword (text-dependent) followed by a speech query (text-independent). Experimental evidence suggests that training on multiple languages can generalize to unseen varieties while maintaining performance on seen varieties. We also found that it can reduce computational requirements for training models by an order of magnitude. Furthermore, during model inference on English data, we observe that leveraging a triage framework can reduce the number of calls to the more computationally expensive text-independent system by 73% (and reduce latency by 59%) while maintaining an EER no worse than the text-independent setup.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhiming Wang|AUTHOR Zhiming Wang]], [[Furong Xu|AUTHOR Furong Xu]], [[Kaisheng Yao|AUTHOR Kaisheng Yao]], [[Yuan Cheng|AUTHOR Yuan Cheng]], [[Tao Xiong|AUTHOR Tao Xiong]], [[Huijia Zhu|AUTHOR Huijia Zhu]]
</p><p class="cpabstractcardaffiliationlist">Ant, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1069–1073
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a comprehensive description of the AntVoice system for the first two tracks of far-field speaker verification from single microphone array in FFSVC 2020 [1]. The system is based on neural speaker embeddings from deep neural network-based encoder networks. These encoder networks for acoustic modeling include 2D convolutional residual-like networks that are shown to be effective on the tasks. Specifically, we apply the Squeeze-and-Excitation residual network (SE-ResNet) [2] to model cross-channel inter-dependency information. On short utterances, we observe that SE-ResNet outperforms alternative methods in the text-dependent verification task. The system adopts a joint loss function that combines the additive cosine margin softmax loss [3] with the equidistant triplet-based loss[4]. This loss function results in performance gains with more discriminative speaker embeddings from enhanced intra-class similarity and increased inter-class variances. We also apply speech enhancement and data augmentation to improve data quality and diversity. Even without using model ensembles, the proposed system significantly outperforms the baselines [1] in both tracks of the speaker verification challenge. With fusion of several encoder neural networks, this system is able to achieve further performance improvements consistently. In the end, the AntVoice system achieves the third place in the text-dependent verification task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianchen Li|AUTHOR Jianchen Li]], [[Jiqing Han|AUTHOR Jiqing Han]], [[Hongwei Song|AUTHOR Hongwei Song]]
</p><p class="cpabstractcardaffiliationlist">Harbin Institute of Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1074–1078
</span></p></div>
<div class="cpabstractcardabstract"><p>Noise robustness is a challenge for speaker recognition systems. To solve this problem, one of the most common approaches is to joint-train a model by using both clean and noisy utterances. However, the gradients calculated on noisy utterances generally contain speaker-irrelevant noisy components, resulting in overfitting for the seen noisy data and poor generalization for the unseen noisy environments. To alleviate this problem, we propose the gradient regularization method to reduce the speaker-irrelevant noisy components by aligning the gradients among the noisy utterances and their clean counterparts. Specifically, the gradients on noisy utterances are forced to follow the directions of the gradients calculated on their clean counterparts, and the gradients across different types of noisy utterances are also aligned to point in similar directions. Since the noise-related components of the gradients can be reduced by the above alignment, the speaker model can be prevented from encoding irrelevant noisy information. To achieve the gradient regularization goals, a novel sequential inner training strategy is also proposed. Experiments on the VoxCeleb1 dataset indicate that our method achieves the best performance in seen and unseen noisy environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saurabh Kataria|AUTHOR Saurabh Kataria]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Najim Dehak|AUTHOR Najim Dehak]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1079–1083
</span></p></div>
<div class="cpabstractcardabstract"><p>With the increase in the availability of speech from varied domains, it is imperative to use such out-of-domain data to improve existing speech systems. Domain adaptation is a prominent pre-processing approach for this. We investigate it to adapt microphone speech to the telephone domain. Specifically, we explore CycleGAN-based unpaired translation of microphone data to improve the x-vector/speaker embedding network for Telephony Speaker Verification. We first demonstrate the efficacy of this on real challenging data and then, to improve further, we modify the CycleGAN formulation to make the adaptation //task-specific//. We modify CycleGAN’s identity loss, cycle-consistency loss, and adversarial loss to operate in the //deep feature// space. //Deep features// of a signal are extracted from an auxiliary (speaker embedding) network and, hence, preserves speaker identity. Our 3D convolution-based Deep Feature Discriminators (DFD) show relative improvements of 5–10% in terms of equal error rate. To dive deeper, we study a challenging scenario of pooling (adapted) microphone and telephone data with data augmentations and telephone codecs. Finally, we highlight the sensitivity of CycleGAN hyper-parameters and introduce a parameter called //probability of adaptation//.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yiyu Luo|AUTHOR Yiyu Luo]]^^1^^
, [[Jing Wang|AUTHOR Jing Wang]]^^1^^
, [[Liang Xu|AUTHOR Liang Xu]]^^1^^
, [[Lidong Yang|AUTHOR Lidong Yang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BIT, China; ^^2^^IMUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1104–1108
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech separation is the task of extracting target speech from noisy mixture. In applications like video telephones or video conferencing, lip movements of the target speaker are accessible, which can be leveraged for speech separation. This paper proposes a time-domain audio-visual speech separation model under multi-talker environments. The model receives audio-visual inputs including noisy mixture and speaker lip embedding, and reconstructs clean speech waveform for the target speaker. Once trained, the model can be flexibly applied to unknown number of total speakers. This paper introduces and investigates the multi-stream gating mechanism and pyramidal convolution in temporal convolutional neural networks for audio-visual speech separation task. Speaker- and noise-independent multi-talker separation experiments are conducted on GRID benchmark dataset. The experimental results demonstrate the proposed method achieves 3.9 dB and 1.0 dB SI-SNRi improvement when compared with audio-only and audio-visual baselines respectively, showing effectiveness of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hiroshi Sato|AUTHOR Hiroshi Sato]], [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Naoyuki Kamo|AUTHOR Naoyuki Kamo]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1149–1153
</span></p></div>
<div class="cpabstractcardabstract"><p>Although recent advances in deep learning technology improved automatic speech recognition (ASR), it remains difficult to recognize speech when it overlaps other people’s voices. Speech separation or extraction is often used as a front-end to ASR to handle such overlapping speech. However, deep neural network-based speech enhancement can generate ‘processing artifacts’ as a side effect of the enhancement, which degrades ASR performance. For example, it is well known that single-channel noise reduction for non-speech noise (non-overlapping speech) often does not improve ASR. Likewise, the processing artifacts may also be detrimental to ASR in some conditions when processing overlapping speech with a separation/extraction method, although it is usually believed that separation/extraction improves ASR. In order to answer the question ‘Do we always have to separate/extract speech from mixtures?’, we analyze ASR performance on observed and enhanced speech at various noise and interference conditions, and show that speech enhancement degrades ASR under some conditions even for overlapping speech. Based on these findings, we propose a simple switching algorithm between observed and enhanced speech based on the estimated signal-to-interference ratio and signal-to-noise ratio. We demonstrated experimentally that such a simple switching mechanism can improve recognition performance when processing artifacts are detrimental to ASR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Helin Wang|AUTHOR Helin Wang]]^^1^^
, [[Bo Wu|AUTHOR Bo Wu]]^^2^^
, [[Lianwu Chen|AUTHOR Lianwu Chen]]^^2^^
, [[Meng Yu|AUTHOR Meng Yu]]^^3^^
, [[Jianwei Yu|AUTHOR Jianwei Yu]]^^4^^
, [[Yong Xu|AUTHOR Yong Xu]]^^3^^
, [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]]^^3^^
, [[Chao Weng|AUTHOR Chao Weng]]^^2^^
, [[Dan Su|AUTHOR Dan Su]]^^2^^
, [[Dong Yu|AUTHOR Dong Yu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Peking University, China; ^^2^^Tencent, China; ^^3^^Tencent, USA; ^^4^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1109–1113
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we exploit the effective way to leverage contextual information to improve the speech dereverberation performance in real-world reverberant environments. We propose a temporal-contextual attention approach on the deep neural network (DNN) for environment-aware speech dereverberation, which can adaptively attend to the contextual information. More specifically, a FullBand based Temporal Attention approach (FTA) is proposed, which models the correlations between the fullband information of the context frames. In addition, considering the difference between the attenuation of high frequency bands and low frequency bands (high frequency bands attenuate faster than low frequency bands) in the room impulse response (RIR), we also propose a SubBand based Temporal Attention approach (STA). In order to guide the network to be more aware of the reverberant environments, we jointly optimize the dereverberation network and the reverberation time (RT60) estimator in a multi-task manner. Our experimental results indicate that the proposed method outperforms our previously proposed reverberation-time-aware DNN and the learned attention weights are fully physical consistent. We also report a preliminary yet promising dereverberation and recognition experiment on real test data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianjun Gu|AUTHOR Jianjun Gu]], [[Longbiao Cheng|AUTHOR Longbiao Cheng]], [[Xingwei Sun|AUTHOR Xingwei Sun]], [[Junfeng Li|AUTHOR Junfeng Li]], [[Yonghong Yan|AUTHOR Yonghong Yan]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1114–1118
</span></p></div>
<div class="cpabstractcardabstract"><p>For real-time acoustic echo cancellation in noisy environments, the classical linear adaptive filters (LAFs) can only remove the linear components of acoustic echo. To further attenuate the non-linear echo components and background noise, this paper proposes a deep learning-based residual echo and noise cancellation (RENC) model, where multiple inputs are utilized and weighted by a feature attention module. More specifically, input features extracted from the far-end reference and the echo estimated by the LAF are scaled with time-frequency attention weights, depending on their correlation with the residual interference in LAF’s output. Moreover, a scale-independent mean square error and perceptual loss function are further suggested for training the RENC model. Experimental results validate the efficacy of the proposed feature attention module and multi-domain loss function, which achieve an 8.4%, 14.9% and 29.5% improvement in perceptual evaluation of speech quality (PESQ), scale-invariant signal-to-distortion ratio (SI-SDR) and echo return loss enhancement (ERLE), respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiyun Li|AUTHOR Xiyun Li]]^^1^^
, [[Yong Xu|AUTHOR Yong Xu]]^^2^^
, [[Meng Yu|AUTHOR Meng Yu]]^^2^^
, [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]]^^2^^
, [[Jiaming Xu|AUTHOR Jiaming Xu]]^^1^^
, [[Bo Xu|AUTHOR Bo Xu]]^^1^^
, [[Dong Yu|AUTHOR Dong Yu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^Tencent, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1119–1123
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, our proposed recurrent neural network (RNN) based all deep learning minimum variance distortionless response (ADL-MVDR) beamformer method yielded superior performance over the conventional MVDR by replacing the matrix inversion and eigenvalue decomposition with two RNNs. In this work, we present a self-attentive RNN beamformer to further improve our previous RNN-based beamformer by leveraging on the powerful modeling capability of self-attention. Temporal-spatial self-attention module is proposed to better learn the beamforming weights from the speech and noise spatial covariance matrices. The temporal self-attention module could help RNN to learn global statistics of covariance matrices. The spatial self-attention module is designed to attend on the cross-channel correlation in the covariance matrices. Furthermore, a multi-channel input with multi-speaker directional features and multi-speaker speech separation outputs (MIMO) model is developed to improve the inference efficiency. The evaluations demonstrate that our proposed MIMO self-attentive RNN beamformer improves both the automatic speech recognition (ASR) accuracy and the perceptual estimation of speech quality (PESQ) against prior arts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ritwik Giri|AUTHOR Ritwik Giri]]^^1^^
, [[Shrikant Venkataramani|AUTHOR Shrikant Venkataramani]]^^1^^
, [[Jean-Marc Valin|AUTHOR Jean-Marc Valin]]^^2^^
, [[Umut Isik|AUTHOR Umut Isik]]^^1^^
, [[Arvindh Krishnaswamy|AUTHOR Arvindh Krishnaswamy]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, USA; ^^2^^Amazon, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1124–1128
</span></p></div>
<div class="cpabstractcardabstract"><p>The presence of multiple talkers in the surrounding environment poses a difficult challenge for real-time speech communication systems considering the constraints on network size and complexity. In this paper, we present Personalized PercepNet, a real-time speech enhancement model that separates a target speaker from a noisy multi-talker mixture without compromising on complexity of the recently proposed PercepNet. To enable speaker-dependent speech enhancement, we first show how we can train a perceptually motivated speaker embedder network to produce a representative embedding vector for the given speaker. Personalized PercepNet uses the target speaker embedding as additional information to pick out and enhance only the target speaker while suppressing all other competing sounds. Our experiments show that the proposed model significantly outperforms PercepNet and other baselines, both in terms of objective speech enhancement metrics and human opinion scores.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yochai Yemini|AUTHOR Yochai Yemini]]^^1^^
, [[Ethan Fetaya|AUTHOR Ethan Fetaya]]^^1^^
, [[Haggai Maron|AUTHOR Haggai Maron]]^^2^^
, [[Sharon Gannot|AUTHOR Sharon Gannot]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Bar-Ilan University, Israel; ^^2^^NVIDIA, Israel</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1129–1133
<a href="./IS2021/MEDIA/0889" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural networks (NNs) have been widely applied in speech processing tasks, and, in particular, those employing microphone arrays. Nevertheless, most existing NN architectures can only deal with fixed and position-specific microphone arrays. In this paper, we present an NN architecture that can cope with microphone arrays whose number and positions of the microphones are unknown, and demonstrate its applicability in the speech dereverberation task. To this end, our approach harnesses recent advances in deep learning on set-structured data to design an architecture that enhances the reverberant log-spectrum. We use noisy and noiseless versions of a simulated reverberant dataset to test the proposed architecture. Our experiments on the noisy data show that the proposed scene-agnostic setup outperforms a powerful scene-aware framework, sometimes even with fewer microphones. With the noiseless dataset we show that, in most cases, our method outperforms the position-aware network as well as the state-of-the-art weighted linear prediction error (WPE) algorithm.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Keitaro Tanaka|AUTHOR Keitaro Tanaka]]^^1^^
, [[Ryosuke Sawata|AUTHOR Ryosuke Sawata]]^^2^^
, [[Shusuke Takahashi|AUTHOR Shusuke Takahashi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Waseda University, Japan; ^^2^^Sony, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1134–1138
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a new deep clustering (DC) method called manifold-aware DC (M-DC) that can enhance hyperspace utilization more effectively than the original DC. The original DC has a limitation in that a pair of two speakers has to be embedded having an orthogonal relationship due to its use of the one-hot vector-based loss function, while our method derives a unique loss function aimed at maximizing the target angle in the hyperspace based on the nature of a regular simplex. Our proposed loss imposes a higher penalty than the original DC when the speaker is assigned incorrectly. The change from DC to M-DC can be easily achieved by rewriting just one term in the loss function of DC, without any other modifications to the network architecture or model parameters. As such, our method has high practicability because it does not affect the original inference part. The experimental results show that the proposed method improves the performances of the original DC and its expansion method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hao Zhang|AUTHOR Hao Zhang]], [[DeLiang Wang|AUTHOR DeLiang Wang]]
</p><p class="cpabstractcardaffiliationlist">Ohio State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1139–1143
</span></p></div>
<div class="cpabstractcardabstract"><p>Building on deep learning based acoustic echo cancellation (AEC) in the single-loudspeaker (single-channel) and single-microphone setup, this paper investigates multi-channel (multi-loudspeaker) AEC (MCAEC) and multi-microphone AEC (MMAEC). A convolutional recurrent network (CRN) is trained to predict the near-end speech from microphone signals with far-end signals used as additional information. We find that the deep learning based MCAEC approach avoids the non-uniqueness problem in traditional MCAEC algorithms. For the AEC setup with multiple microphones, rather than employing AEC for each microphone, we propose to train a single network to achieve echo removal for all microphones. Combining deep learning based AEC with supervised beamforming further improves the system performance. Experimental results show the effectiveness of deep learning approach to MCAEC and MMAEC. Furthermore, deep learning based methods are capable of removing echo and noise simultaneously and work well in the presence of nonlinear distortions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yueyue Na|AUTHOR Yueyue Na]], [[Ziteng Wang|AUTHOR Ziteng Wang]], [[Zhang Liu|AUTHOR Zhang Liu]], [[Biao Tian|AUTHOR Biao Tian]], [[Qiang Fu|AUTHOR Qiang Fu]]
</p><p class="cpabstractcardaffiliationlist">Alibaba, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1144–1148
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a joint source separation algorithm that simultaneously reduces acoustic echo, reverberation and interfering sources. Target speeches are separated from the mixture by maximizing independence with respect to the other sources. It is shown that the separation process can be decomposed into cascading sub-processes that separately relate to acoustic echo cancellation, speech dereverberation and source separation, all of which are solved using the auxiliary function based independent component/vector analysis techniques, and their solving orders are exchangeable. The cascaded solution not only leads to lower computational complexity but also better separation performance than the vanilla joint algorithm.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sathvik Udupa|AUTHOR Sathvik Udupa]]^^1^^
, [[Anwesha Roy|AUTHOR Anwesha Roy]]^^1^^
, [[Abhayjeet Singh|AUTHOR Abhayjeet Singh]]^^1^^
, [[Aravind Illa|AUTHOR Aravind Illa]]^^2^^
, [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Indian Institute of Science, India; ^^2^^Amazon, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1154–1158
</span></p></div>
<div class="cpabstractcardabstract"><p>We estimate articulatory movements in speech production from different modalities - acoustics and phonemes. Acoustic-to-articulatory inversion (AAI) is a sequence-to-sequence task. On the other hand, phoneme to articulatory (PTA) motion estimation faces a key challenge in reliably aligning the text and the articulatory movements. To address this challenge, we explore the use of a transformer architecture — FastSpeech, with explicit duration modelling to learn hard alignments between the phonemes and articulatory movements. We also train a transformer model on AAI. We use correlation coefficient (CC) and root mean squared error (rMSE) to assess the estimation performance in comparison to existing methods on both tasks. We observe 154%, 11.8% & 4.8% relative improvement in CC with subject-dependent, pooled and fine-tuning strategies, respectively, for PTA estimation. Additionally, on the AAI task, we obtain 1.5%, 3% and 3.1% relative gain in CC on the same setups compared to the state-of-the-art baseline. We further present the computational benefits of having transformer architecture as representation blocks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cong Zhang|AUTHOR Cong Zhang]]^^1^^
, [[Jian Zhu|AUTHOR Jian Zhu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Radboud Universiteit, The Netherlands; ^^2^^University of Michigan, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1199–1203
</span></p></div>
<div class="cpabstractcardabstract"><p>Generating synthesised singing voice with models trained on speech data has many advantages due to the models’ flexibility and controllability. However, since the information about the temporal relationship between segments and beats are lacking in speech training data, the synthesised singing may sound off-beat at times. Therefore, the availability of the information on the temporal relationship between speech segments and music beats is crucial. The current study investigated the segment-beat synchronisation in singing data, with hypotheses formed based on the linguistics theories of P-centre and sonority hierarchy. A Mandarin corpus and an English corpus of professional singing data were manually annotated and analysed. The results showed that the presence of musical beats was more dependent on segment duration than sonority. However, the sonority hierarchy and the P-centre theory were highly related to the location of beats. Mandarin and English demonstrated cross-linguistic variations despite exhibiting common patterns.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jacob Peplinski|AUTHOR Jacob Peplinski]]^^1^^
, [[Joel Shor|AUTHOR Joel Shor]]^^2^^
, [[Sachin Joglekar|AUTHOR Sachin Joglekar]]^^3^^
, [[Jake Garrison|AUTHOR Jake Garrison]]^^3^^
, [[Shwetak Patel|AUTHOR Shwetak Patel]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Washington, USA; ^^2^^Google, Japan; ^^3^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1204–1208
</span></p></div>
<div class="cpabstractcardabstract"><p>Learned speech representations can drastically improve performance on tasks with limited labeled data. However, due to their size and complexity, learned representations have limited utility in mobile settings where run-time performance can be a significant bottleneck. In this work, we propose a class of lightweight non-semantic speech embedding models that run efficiently on mobile devices based on the recently proposed TRILL speech embedding. We combine novel architectural modifications with existing speed-up techniques to create embedding models that are fast enough to run in real-time on a mobile device and exhibit minimal performance degradation on a benchmark of non-semantic speech tasks. One such model (FRILL) is 32× faster on a Pixel 1 smartphone and 40% the size of TRILL, with an average decrease in accuracy of only 2%. To our knowledge, FRILL is the highest-quality non-semantic embedding designed for use on mobile devices. Furthermore, we demonstrate that these representations are useful for mobile health tasks such as non-speech human sounds detection and face-masked speech detection. Our models and code are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hiroki Mori|AUTHOR Hiroki Mori]]
</p><p class="cpabstractcardaffiliationlist">Utsunomiya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1209–1213
</span></p></div>
<div class="cpabstractcardabstract"><p>In everyday conversation, speakers’ utterances often overlap. For conversation corpora that are recorded in diverse environments, results of pitch extraction in the overlapping parts may be incorrect. The goal of this study is to establish the technique of separating each speaker’s pitch contour from an overlapping speech in conversation. The proposed method estimates statistically most plausible f,,o,, contour from the spectrogram of overlapping speech, along with the information of the speaker to extract. Visual inspection of the separation results showed that the proposed model was able to extract accurate f,,o,, contours from overlapping speeches of specified speakers. By applying this method, voicing decision errors and gross pitch errors were reduced by 63% compared to simple pitch extraction for overlapping speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anurag Kumar|AUTHOR Anurag Kumar]], [[Yun Wang|AUTHOR Yun Wang]], [[Vamsi Krishna Ithapu|AUTHOR Vamsi Krishna Ithapu]], [[Christian Fuegen|AUTHOR Christian Fuegen]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1214–1218
</span></p></div>
<div class="cpabstractcardabstract"><p>Transfer learning is critical for efficient information transfer across multiple related learning problems. A simple, yet effective transfer learning approach utilizes deep neural networks trained on a large-scale task for feature extraction. Such representations are then used to learn related downstream tasks. In this paper, we investigate transfer learning capacity of audio representations obtained from neural networks trained on a large-scale sound event detection dataset. We build and evaluate these representations across a wide range of other audio tasks, via a simple linear classifier transfer mechanism. We show that such simple linear transfer is already powerful enough to achieve high performance on the downstream tasks. We also provide insights into the attributes of sound event representations that enable such efficient information transfer.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dongchao Yang|AUTHOR Dongchao Yang]], [[Helin Wang|AUTHOR Helin Wang]], [[Yuexian Zou|AUTHOR Yuexian Zou]]
</p><p class="cpabstractcardaffiliationlist">Peking University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1159–1163
</span></p></div>
<div class="cpabstractcardabstract"><p>It is well known that the mismatch between training (source) and test (target) data distribution will significantly decrease the performance of acoustic scene classification (ASC) systems. To address this issue, domain adaptation (DA) is one solution and many unsupervised DA methods have been proposed. These methods focus on a scenario of single source domain to single target domain. However, we will face such problem that test data comes from multiple target domains. This problem can be addressed by producing one model per target domain, but this solution is too costly. In this paper, we propose a novel unsupervised multi-target domain adaption (MTDA) method for ASC, which can adapt to multiple target domains simultaneously and make use of the underlying relation among multiple domains. Specifically, our approach combines traditional adversarial adaptation with two novel discriminator tasks that learns a common subspace shared by all domains. Furthermore, we propose to divide the target domain into the easy-to-adapt and hard-to-adapt domain, which enables the system to pay more attention to hard-to-adapt domain in training. The experimental results on the DCASE 2020 Task 1-A dataset and the DCASE 2019 Task 1-B dataset show that our proposed method significantly outperforms the previous unsupervised DA methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alfredo Esquivel Jaramillo|AUTHOR Alfredo Esquivel Jaramillo]], [[Jesper Kjær Nielsen|AUTHOR Jesper Kjær Nielsen]], [[Mads Græsbøll Christensen|AUTHOR Mads Græsbøll Christensen]]
</p><p class="cpabstractcardaffiliationlist">Aalborg University, Denmark</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1164–1168
</span></p></div>
<div class="cpabstractcardabstract"><p>In a hybrid speech model, both voiced and unvoiced components can coexist in a segment. Often, the voiced speech is regarded as the deterministic component, and the unvoiced speech and additive noise are the stochastic components. Typically, the speech signal is considered stationary within fixed segments of 20–40 ms, but the degree of stationarity varies over time. For decomposing noisy speech into its voiced and unvoiced components, a fixed segmentation may be too crude, and we here propose to adapt the segment length according to the signal local characteristics. The segmentation relies on parameter estimates of a hybrid speech model and the maximum a posteriori (MAP) and log-likelihood criteria as rules for model selection among the possible segment lengths, for voiced and unvoiced speech, respectively. Given the optimal segmentation markers and the estimated statistics, both components are estimated using linear filtering. A codebook-based approach differentiates between unvoiced speech and noise. A better extraction of the components is possible by taking into account the adaptive segmentation, compared to a fixed one. Also, a lower distortion for voiced speech and higher segSNR for both components is possible, as compared to other decomposition methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jian Luo|AUTHOR Jian Luo]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Ning Cheng|AUTHOR Ning Cheng]], [[Jing Xiao|AUTHOR Jing Xiao]]
</p><p class="cpabstractcardaffiliationlist">Ping An Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1169–1173
</span></p></div>
<div class="cpabstractcardabstract"><p>Predicting the altered acoustic frames is an effective way of self-supervised learning for speech representation. However, it is challenging to prevent the pretrained model from overfitting. In this paper, we proposed to introduce two dropout regularization methods into the pretraining of transformer encoder: (1) attention dropout, (2) layer dropout. Both of the two dropout methods encourage the model to utilize global speech information, and avoid just copying local spectrum features when reconstructing the masked frames. We evaluated the proposed methods on phoneme classification and speaker recognition tasks. The experiments demonstrate that our dropout approaches achieve competitive results, and improve the performance of classification accuracy on downstream tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chiranjeevi Yarra|AUTHOR Chiranjeevi Yarra]]^^1^^
, [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIIT Hyderabad, India; ^^2^^Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1174–1178
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a pitch stylization technique in the presence of pitch halving and doubling errors. The technique uses an optimization criterion based on a minimum mean absolute error to make the stylization robust to such pitch estimation errors, particularly under noisy conditions. We obtain segments for the stylization automatically using dynamic programming. Experiments are performed at the frame level and the syllable level. At the frame level, the closeness of stylized pitch is analyzed with the ground truth pitch, which is obtained using a laryngograph signal, considering root mean square error (RMSE) measure. At the syllable level, the effectiveness of perceptual relevant embeddings in the stylized pitch is analyzed by estimating syllabic tones and comparing those with manual tone markings using the Levenshtein distance measure. The proposed approach performs better than a minimum mean squared error criterion based pitch stylization scheme at the frame level and a knowledge-based tone estimation scheme at the syllable level under clean and 20dB, 10dB and 0dB SNR conditions with five noises and four pitch estimation techniques. Among all the combinations of SNR, noise and pitch estimation techniques, the highest absolute RMSE and mean distance improvements are found to be 6.49Hz and 0.23, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu-Lin Huang|AUTHOR Yu-Lin Huang]], [[Bo-Hao Su|AUTHOR Bo-Hao Su]], [[Y.-W. Peter Hong|AUTHOR Y.-W. Peter Hong]], [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]
</p><p class="cpabstractcardaffiliationlist">National Tsing Hua University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1179–1183
</span></p></div>
<div class="cpabstractcardabstract"><p>Advancement in speech technology has brought convenience to our life. However, the concern is on the rise as speech signal contains multiple personal attributes, which would lead to either sensitive information leakage or bias toward decision. In this work, we propose an attribute-aligned learning strategy to derive speech representation that can flexibly address these issues by attribute-selection mechanism. Specifically, we propose a layered-representation variational autoencoder (LR-VAE), which factorizes speech representation into attribute-sensitive nodes, to derive an identity-free representation for speech emotion recognition (SER), and an emotionless representation for speaker verification (SV). Our proposed method achieves competitive performances on identity-free SER and a better performance on emotionless SV, comparing to the current state-of-the-art method of using adversarial learning applied on a large emotion corpora, the MSP-Podcast. Also, our proposed learning strategy reduces the model and training process needed to achieve multiple privacy-preserving tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Abdolreza Sabzi Shahrebabaki|AUTHOR Abdolreza Sabzi Shahrebabaki]]^^1^^
, [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]]^^2^^
, [[Torbjørn Svendsen|AUTHOR Torbjørn Svendsen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTNU, Norway; ^^2^^NTNU, Norway</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1184–1188
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a novel sequence-to-sequence acoustic-to-articulatory inversion (AAI) neural architecture in the temporal waveform domain. In contrast to traditional AAI approaches that leverage hand-crafted short-time spectral features obtained from the windowed signal, such as LSFs, or MFCCs, our solution directly process the input speech signal in the time domain, avoiding any intermediate signal transformation, using a cascade of 1D convolutional filters in a deep model. The time-rate synchronization between raw speech signal and the articulatory signal is obtained through a decimation process that acts upon each convolution step. Decimation in time thus avoids degradation phenomena observed in the conventional AAI procedure, caused by the need of framing the speech signal to produce a feature sequence that perfectly matches the articulatory data rate. Experimental evidence on the “Haskins Production Rate Comparison” corpus demonstrates the effectiveness of the proposed solution, which outperforms a conventional state-of-the-art AAI system leveraging MFCCs with an 20% relative improvement in terms of Pearson correlation coefficient (PCC) in mismatched speaking rate conditions. Finally, the proposed approach attains the same accuracy as the conventional AAI solution in the typical matched speaking rate condition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jason Lilley|AUTHOR Jason Lilley]], [[H. Timothy Bunnell|AUTHOR H. Timothy Bunnell]]
</p><p class="cpabstractcardaffiliationlist">Nemours, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1189–1193
</span></p></div>
<div class="cpabstractcardabstract"><p>Phonetic analysis often requires reliable estimation of formants, but estimates provided by popular programs can be unreliable. Recently, Dissen et al. [1] described DNN-based formant trackers that produced more accurate frequency estimates than several others, but require manually-corrected formant data for training. Here we describe a novel unsupervised training method for corpus-based DNN formant parameter estimation and tracking with accuracy similar to [1]. Frame-wise spectral envelopes serve as the input. The output is estimates of the frequencies and bandwidths plus amplitude adjustments for a prespecified number of poles and zeros, hereafter referred to as “formant parameters.” A custom loss measure based on the difference between the input envelope and one generated from the estimated formant parameters is calculated and back-propagated through the network to establish the gradients with respect to the formant parameters. The approach is similar to that of autoencoders, in that the model is trained to reproduce its input in order to discover latent features, in this case, the formant parameters. Our results demonstrate that a reliable formant tracker can be constructed for a speech corpus without the need for hand-corrected training data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shu-wen Yang|AUTHOR Shu-wen Yang]]^^1^^
, [[Po-Han Chi|AUTHOR Po-Han Chi]]^^1^^
, [[Yung-Sung Chuang|AUTHOR Yung-Sung Chuang]]^^1^^
, [[Cheng-I Jeff Lai|AUTHOR Cheng-I Jeff Lai]]^^2^^
, [[Kushal Lakhotia|AUTHOR Kushal Lakhotia]]^^3^^
, [[Yist Y. Lin|AUTHOR Yist Y. Lin]]^^1^^
, [[Andy T. Liu|AUTHOR Andy T. Liu]]^^1^^
, [[Jiatong Shi|AUTHOR Jiatong Shi]]^^4^^
, [[Xuankai Chang|AUTHOR Xuankai Chang]]^^5^^
, [[Guan-Ting Lin|AUTHOR Guan-Ting Lin]]^^1^^
, [[Tzu-Hsien Huang|AUTHOR Tzu-Hsien Huang]]^^1^^
, [[Wei-Cheng Tseng|AUTHOR Wei-Cheng Tseng]]^^1^^
, [[Ko-tik Lee|AUTHOR Ko-tik Lee]]^^1^^
, [[Da-Rong Liu|AUTHOR Da-Rong Liu]]^^1^^
, [[Zili Huang|AUTHOR Zili Huang]]^^4^^
, [[Shuyan Dong|AUTHOR Shuyan Dong]]^^6^^
, [[Shang-Wen Li|AUTHOR Shang-Wen Li]]^^6^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^5^^
, [[Abdelrahman Mohamed|AUTHOR Abdelrahman Mohamed]]^^3^^
, [[Hung-yi Lee|AUTHOR Hung-yi Lee]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Taiwan University, Taiwan; ^^2^^MIT, USA; ^^3^^Facebook, USA; ^^4^^Johns Hopkins University, USA; ^^5^^Carnegie Mellon University, USA; ^^6^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1194–1198
</span></p></div>
<div class="cpabstractcardabstract"><p>Self-supervised learning (SSL) has proven vital for advancing research in natural language processing (NLP) and computer vision (CV). The paradigm pretrains a //shared model// on large volumes of unlabeled data and achieves state-of-the-art (SOTA) //for various tasks with minimal adaptation//. However, the speech processing community lacks a similar setup to systematically explore the paradigm. To bridge this gap, we introduce Speech processing Universal PERformance Benchmark (SUPERB). SUPERB is a leaderboard to benchmark the performance of a shared model across a wide range of speech processing tasks with minimal architecture changes and labeled data. Among multiple usages of the shared model, we especially focus on extracting the representation learned from SSL for its preferable re-usability. We present a simple framework to solve SUPERB tasks by learning task-specialized //lightweight// prediction heads on top of the //frozen shared// model. Our results demonstrate that the framework is promising as SSL representations show competitive generalizability and accessibility across SUPERB tasks. We release SUPERB as a challenge with a leaderboard and a benchmark toolkit to fuel the research in representation learning and general speech processing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Baolin Peng|AUTHOR Baolin Peng]], [[Chenguang Zhu|AUTHOR Chenguang Zhu]], [[Michael Zeng|AUTHOR Michael Zeng]], [[Jianfeng Gao|AUTHOR Jianfeng Gao]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1219–1223
</span></p></div>
<div class="cpabstractcardabstract"><p>The training of spoken language understanding (SLU) models often faces the problem of data scarcity. In this paper, we put forward a data augmentation method using pretrained language models to boost the variability and accuracy of generated utterances. Furthermore, we investigate and propose solutions to two previously overlooked semi-supervised learning scenarios of data scarcity in SLU: i) //Rich-in-Ontology//: ontology information with numerous valid dialogue acts is given; ii) //Rich-in-Utterance//: a large number of unlabelled utterances are available. Empirical results show that our method can produce synthetic training data that boosts the performance of language understanding models in various scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Siddhant Arora|AUTHOR Siddhant Arora]], [[Alissa Ostapenko|AUTHOR Alissa Ostapenko]], [[Vijay Viswanathan|AUTHOR Vijay Viswanathan]], [[Siddharth Dalmia|AUTHOR Siddharth Dalmia]], [[Florian Metze|AUTHOR Florian Metze]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Alan W. Black|AUTHOR Alan W. Black]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1264–1268
</span></p></div>
<div class="cpabstractcardabstract"><p>Decomposable tasks are complex and comprise of a hierarchy of sub-tasks. Spoken intent prediction, for example, combines automatic speech recognition and natural language understanding. Existing benchmarks, however, typically hold out examples for only the surface-level sub-task. As a result, models with similar performance on these benchmarks may have unobserved performance differences on the other sub-tasks. To allow insightful comparisons between competitive end-to-end architectures, we propose a framework to construct robust test sets using coordinate ascent over sub-task specific utility functions. Given a dataset for a decomposable task, our method optimally creates a test set for each sub-task to individually assess sub-components of the end-to-end model. Using spoken language understanding as a case study, we generate new splits for the Fluent Speech Commands and Snips SmartLights datasets. Each split has two test sets: one with held-out utterances assessing natural language understanding abilities, and one with held-out speakers to test speech processing skills. Our splits identify performance gaps up to 10% between end-to-end systems that were within 1% of each other on the original test sets. These performance gaps allow more realistic and actionable comparisons between different architectures, driving future model development. We release our splits and tools for the community.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Martin Radfar|AUTHOR Martin Radfar]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Siegfried Kunzmann|AUTHOR Siegfried Kunzmann]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1224–1228
</span></p></div>
<div class="cpabstractcardabstract"><p>Spoken language understanding (SLU) systems translate voice input commands to semantics which are encoded as an intent and pairs of slot tags and values. Most current SLU systems deploy a cascade of two neural models where the first one maps the input audio to a transcript (ASR) and the second predicts the intent and slots from the transcript (NLU). In this paper, we introduce FANS, a new end-to-end SLU model that fuses an ASR audio encoder to a multi-task NLU decoder to infer the intent, slot tags, and slot values directly from a given input audio, obviating the need for transcription. FANS consists of a shared audio encoder and three decoders, two of which are seq-to-seq decoders that predict non null slot tags and slot values in parallel and in an auto-regressive manner. FANS neural encoder and decoders architectures are flexible which allows us to leverage different combinations of LSTM, self-attention, and attenders. Our experiments show compared to the state-of-the-art end-to-end SLU models, FANS reduces ICER and IRER errors relatively by 30% and 7%, respectively, when tested on an in-house SLU dataset and by 0.86% and 2% absolute when tested on a public SLU dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yiran Cao|AUTHOR Yiran Cao]]^^1^^
, [[Nihal Potdar|AUTHOR Nihal Potdar]]^^1^^
, [[Anderson R. Avila|AUTHOR Anderson R. Avila]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Waterloo, Canada; ^^2^^Huawei Technologies, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1229–1233
</span></p></div>
<div class="cpabstractcardabstract"><p>Human-computer interaction (HCI) is significantly impacted by delayed responses from a spoken dialogue system. Hence, end-to-end (e2e) spoken language understanding (SLU) solutions have recently been proposed to decrease latency. Such approaches allow for the extraction of semantic information directly from the speech signal, thus bypassing the need for a transcript from an automatic speech recognition (ASR) system. In this paper, we propose a compact e2e SLU architecture for streaming scenarios, where chunks of the speech signal are processed continuously to predict intent and slot values. Our model is based on a 3D convolutional neural network (3D-CNN) and a unidirectional long short-term memory (LSTM). We compare the performance of two alignment-free losses: the connectionist temporal classification (CTC) method and its adapted version, namely connectionist temporal localization (CTL). The latter performs not only the classification but also localization of sequential audio events. The proposed solution is evaluated on the Fluent Speech Command dataset and results show our model ability to process incoming speech signal, reaching accuracy as high as 98.97% for CTC and 98.78% for CTL on single-label classification, and as high as 95.69% for CTC and 95.28% for CTL on two-label prediction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Deepak Muralidharan|AUTHOR Deepak Muralidharan]]^^1^^
, [[Joel Ruben Antony Moniz|AUTHOR Joel Ruben Antony Moniz]]^^1^^
, [[Weicheng Zhang|AUTHOR Weicheng Zhang]]^^1^^
, [[Stephen Pulman|AUTHOR Stephen Pulman]]^^2^^
, [[Lin Li|AUTHOR Lin Li]]^^1^^
, [[Megan Barnes|AUTHOR Megan Barnes]]^^3^^
, [[Jingjing Pan|AUTHOR Jingjing Pan]]^^1^^
, [[Jason Williams|AUTHOR Jason Williams]]^^1^^
, [[Alex Acero|AUTHOR Alex Acero]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Apple, USA; ^^2^^Apple, UK; ^^3^^University of Washington, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1234–1238
</span></p></div>
<div class="cpabstractcardabstract"><p>Named entity recognition (NER) is usually developed and tested on text from well-written sources. However, in intelligent voice assistants, where NER is an important component, input to NER may be noisy because of user or speech recognition error. In applications, entity labels may change frequently, and non-textual properties like topicality or popularity may be needed to choose among alternatives.
We describe a NER system intended to address these problems. We test and train this system on a proprietary user-derived dataset. We compare with a baseline text-only NER system; the baseline enhanced with external gazetteers; and the baseline enhanced with the search and indirect labelling techniques we describe below. The final configuration gives around 6% reduction in NER error rate. We also show that this technique improves related tasks, such as semantic parsing, with an improvement of up to 5% in error rate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ting-Wei Wu|AUTHOR Ting-Wei Wu]], [[Ruolin Su|AUTHOR Ruolin Su]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]
</p><p class="cpabstractcardaffiliationlist">Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1239–1243
<a href="./IS2021/MEDIA/0095" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>The success of interactive dialog systems is usually associated with the quality of the spoken language understanding (SLU) task, which mainly identifies the corresponding dialog acts and slot values in each turn. By treating utterances in isolation, most SLU systems often overlook the semantic context in which a dialog act is expected. The act dependency between turns is nontrivial and yet critical to the identification of the correct semantic representations. Previous works with limited context awareness have exposed the inadequacy of dealing with complexity in multiproned user intents, which are subject to spontaneous change during turn transitions. In this work, we propose to enhance SLU in multi-turn dialogs, employing a context-aware hierarchical BERT fusion Network (CaBERT-SLU) to not only discern context information within a dialog but also jointly identify multiple dialog acts and slots in each utterance. Experimental results show that our approach reaches new state-of-the-art (SOTA) performances in two complicated multi-turn dialogue datasets with considerable improvements compared with previous methods, which only consider single utterances for multiple intents and slot filling.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qian Chen|AUTHOR Qian Chen]], [[Wen Wang|AUTHOR Wen Wang]], [[Qinglin Zhang|AUTHOR Qinglin Zhang]]
</p><p class="cpabstractcardaffiliationlist">Alibaba, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1244–1248
</span></p></div>
<div class="cpabstractcardabstract"><p>In the traditional cascading architecture for spoken language understanding (SLU), it has been observed that automatic speech recognition errors could be detrimental to the performance of natural language understanding. End-to-end (E2E) SLU models have been proposed to directly map speech input to desired semantic frame with a single model, hence mitigating ASR error propagation. Recently, pre-training technologies have been explored for these E2E models. In this paper, we propose a novel joint textual-phonetic pre-training approach for learning spoken language representations, aiming at exploring the full potentials of phonetic information to improve SLU robustness to ASR errors. We explore phoneme labels as high-level speech features, and design and compare pre-training tasks based on conditional masked language model objectives and inter-sentence relation objectives. We also investigate the efficacy of combining textual and phonetic information during fine-tuning. Experimental results on spoken language understanding benchmarks, Fluent Speech Commands and SNIPS, show that the proposed approach significantly outperforms strong baseline models and improves robustness of spoken language understanding to ASR errors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Quynh Do|AUTHOR Quynh Do]], [[Judith Gaspers|AUTHOR Judith Gaspers]], [[Daniil Sorokin|AUTHOR Daniil Sorokin]], [[Patrick Lehnen|AUTHOR Patrick Lehnen]]
</p><p class="cpabstractcardaffiliationlist">Amazon, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1249–1253
</span></p></div>
<div class="cpabstractcardabstract"><p>In deployed real-world spoken language understanding (SLU) applications, data continuously flows into the system. This leads to distributional differences between training and application data that can deteriorate model performance. While regularly retraining the deployed model with new data helps mitigating this problem, it implies significant computational and human costs. In this paper, we develop a method, which can help guiding decisions on whether a model is safe to keep in production without notable performance loss or needs to be retrained. Towards this goal, we build a performance drop regression model for an SLU model that was trained offline to detect a potential model drift in the production phase. We present a wide range of experiments on multiple real-world datasets, indicating that our method is useful for guiding decisions in the SLU model development cycle and to reduce costs for model retraining.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jatin Ganhotra|AUTHOR Jatin Ganhotra]], [[Samuel Thomas|AUTHOR Samuel Thomas]], [[Hong-Kwang J. Kuo|AUTHOR Hong-Kwang J. Kuo]], [[Sachindra Joshi|AUTHOR Sachindra Joshi]], [[George Saon|AUTHOR George Saon]], [[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1254–1258
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end spoken language understanding (SLU) systems that process human-human or human-computer interactions are often context independent and process each turn of a conversation independently. Spoken conversations on the other hand, are very much context dependent, and dialog history contains useful information that can improve the processing of each conversational turn. In this paper, we investigate the importance of dialog history and how it can be effectively integrated into end-to-end SLU systems. While processing a spoken utterance, our proposed RNN transducer (RNN-T) based SLU model has access to its dialog history in the form of decoded transcripts and SLU labels of previous turns. We encode the dialog history as BERT embeddings, and use them as an additional input to the SLU model along with the speech features for the current utterance. We evaluate our approach on a recently released spoken dialog data set, the HARPERVALLEYBANK corpus. We observe significant improvements: 8% for dialog action and 30% for caller intent recognition tasks, in comparison to a competitive context independent end-to-end baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ting Han|AUTHOR Ting Han]]^^1^^
, [[Chongxuan Huang|AUTHOR Chongxuan Huang]]^^2^^
, [[Wei Peng|AUTHOR Wei Peng]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Chicago, USA; ^^2^^Huawei Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1259–1263
</span></p></div>
<div class="cpabstractcardabstract"><p>Dialogue State Tracking (DST), which is the process of inferring user goals by estimating belief states given the dialogue history, plays a critical role in task-oriented dialogue systems. A coreference phenomenon observed in multi-turn conversations is not addressed by existing DST models, leading to suboptimal performances. In this paper, we propose Coreference Dialogue State Tracker (CDST) that explicitly models the coreference feature. In particular, at each turn, the proposed model jointly predicts the coreferred domain-slot pair and extracts the coreference values from the dialogue context. Experimental results on MultiWOZ 2.1 dataset show that the proposed model achieves the state-of-the-art joint goal accuracy of 56.47%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianwei Sun|AUTHOR Jianwei Sun]]^^1^^
, [[Zhiyuan Tang|AUTHOR Zhiyuan Tang]]^^1^^
, [[Hengxin Yin|AUTHOR Hengxin Yin]]^^1^^
, [[Wei Wang|AUTHOR Wei Wang]]^^1^^
, [[Xi Zhao|AUTHOR Xi Zhao]]^^1^^
, [[Shuaijiang Zhao|AUTHOR Shuaijiang Zhao]]^^1^^
, [[Xiaoning Lei|AUTHOR Xiaoning Lei]]^^1^^
, [[Wei Zou|AUTHOR Wei Zou]]^^2^^
, [[Xiangang Li|AUTHOR Xiangang Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KE, China; ^^2^^KE, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1269–1273
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end models have gradually become the preferred option for automatic speech recognition (ASR) applications. During the training of end-to-end ASR, data augmentation is a quite effective technique for regularizing the neural networks. This paper proposes a novel data augmentation technique based on semantic transposition of the transcriptions via syntax rules for end-to-end Mandarin ASR. Specifically, we first segment the transcriptions based on part-of-speech tags. Then transposition strategies, such as placing the object in front of the subject or swapping the subject and the object, are applied on the segmented sentences. Finally, the acoustic features corresponding to the transposed transcription are reassembled based on the audio-to-text forced-alignment produced by a pre-trained ASR system. The combination of original data and augmented one is used for training a new ASR system. The experiments are conducted on the Transformer[2] and Conformer[3] based ASR. The results show that the proposed method can give consistent performance gain to the system. Augmentation related issues, such as comparison of different strategies and ratios for data combination are also investigated.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nilaksh Das|AUTHOR Nilaksh Das]]^^1^^
, [[Sravan Bodapati|AUTHOR Sravan Bodapati]]^^2^^
, [[Monica Sunkara|AUTHOR Monica Sunkara]]^^2^^
, [[Sundararajan Srinivasan|AUTHOR Sundararajan Srinivasan]]^^2^^
, [[Duen Horng Chau|AUTHOR Duen Horng Chau]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Georgia Tech, USA; ^^2^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1314–1318
</span></p></div>
<div class="cpabstractcardabstract"><p>Training deep neural networks for automatic speech recognition (ASR) requires large amounts of transcribed speech. This becomes a bottleneck for training robust models for //accented// speech which typically contains high variability in pronunciation and other semantics, since obtaining large amounts of annotated accented data is both tedious and costly. Often, we only have access to large amounts of //unannotated// speech from different accents. In this work, we leverage this unannotated data to provide semantic regularization to an ASR model that has been trained only on one accent, to improve its performance for multiple accents. We propose Accent Pre-Training (Acc-PT), a semi-supervised training strategy that combines transfer learning and adversarial training. Our approach improves the performance of a state-of-the-art ASR model by 33% on average over the baseline across multiple accents, training only on annotated samples from one standard accent, and as little as 105 minutes of //unannotated// speech from a target accent.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei Chu|AUTHOR Wei Chu]], [[Peng Chang|AUTHOR Peng Chang]], [[Jing Xiao|AUTHOR Jing Xiao]]
</p><p class="cpabstractcardaffiliationlist">PAII, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1319–1323
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposed to automatically detect mispronounced words over the regions that have low Goodness-of-Pronunciation scores through a constrained phone decoder, then add these word mispronunciations into the orthodox lexicon without colliding with existing pronunciations, finally use the expanded lexicon for decoding non-native speech. The constrained phone decoder is compiled by using a phone-level automatically generated one-edit-distance network to eliminate the need of extended recognition networks designed by phonologists. Results and analysis have shown that the pronunciation dictionary extension is effective in improving WER performance for non-native speech recognition. This paper also described the details of PAII’s single-pass fusion-free hybrid system for this Interspeech 2021 non-native children English close track ASR challenge, especially showed the effective use of non-speech segments in the training set as noise sources to perform noise augmentation on the training data, and also conducted a comparison of acoustic models with different neural network architectures with analysis. Final WERs of 12.10%/28.25% are obtained compared to a well-optimized baseline with WERs of 13.37%/33.51% on development/evaluation set, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xun Gong|AUTHOR Xun Gong]], [[Yizhou Lu|AUTHOR Yizhou Lu]], [[Zhikai Zhou|AUTHOR Zhikai Zhou]], [[Yanmin Qian|AUTHOR Yanmin Qian]]
</p><p class="cpabstractcardaffiliationlist">SJTU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1274–1278
</span></p></div>
<div class="cpabstractcardabstract"><p>Accent variability has posed a huge challenge to automatic speech recognition (ASR) modeling. Although one-hot accent vector based adaptation systems are commonly used, they require prior knowledge about the target accent and cannot handle unseen accents. Furthermore, simply concatenating accent embeddings does not make good use of accent knowledge, which has limited improvements. In this work, we aim to tackle these problems with a novel layer-wise adaptation structure injected into the E2E ASR model encoder. The adapter layer encodes an arbitrary accent in the accent space and assists the ASR model in recognizing accented speech. Given an utterance, the adaptation structure extracts the corresponding accent information and transforms the input acoustic feature into an accent-related feature through the linear combination of all accent bases. We further explore the injection position of the adaptation layer, the number of accent bases, and different types of accent bases to achieve better accent adaptation. Experimental results show that the proposed adaptation structure brings 12% and 10% relative word error rate (WER) reduction on the AESRC2020 accent dataset and the Librispeech dataset, respectively, compared to the baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinhan Wang|AUTHOR Jinhan Wang]]^^1^^
, [[Yunzheng Zhu|AUTHOR Yunzheng Zhu]]^^1^^
, [[Ruchao Fan|AUTHOR Ruchao Fan]]^^1^^
, [[Wei Chu|AUTHOR Wei Chu]]^^2^^
, [[Abeer Alwan|AUTHOR Abeer Alwan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of California at Los Angeles, USA; ^^2^^PAII, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1279–1283
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the SPAPL system for the INTERSPEECH 2021 Challenge: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech in German. ~5 hours of transcribed data and ~60 hours of untranscribed data are provided to develop a German ASR system for children. For the training of the transcribed data, we propose a non-speech state discriminative loss (NSDL) to mitigate the influence of long-duration non-speech segments within speech utterances. In order to explore the use of the untranscribed data, various approaches are implemented and combined together to incrementally improve the system performance. First, bidirectional autoregressive predictive coding (Bi-APC) is used to learn initial parameters for acoustic modelling using the provided untranscribed data. Second, incremental semi-supervised learning is further used to iteratively generate pseudo-transcribed data. Third, different data augmentation schemes are used at different training stages to increase the variability and size of the training data. Finally, a recurrent neural network language model (RNNLM) is used for rescoring. Our system achieves a word error rate (WER) of 39.68% on the evaluation data, an approximately 12% relative improvement over the official baseline (45.21%).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Khe Chai Sim|AUTHOR Khe Chai Sim]], [[Angad Chandorkar|AUTHOR Angad Chandorkar]], [[Fan Gao|AUTHOR Fan Gao]], [[Mason Chua|AUTHOR Mason Chua]], [[Tsendsuren Munkhdalai|AUTHOR Tsendsuren Munkhdalai]], [[Françoise Beaufays|AUTHOR Françoise Beaufays]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1284–1288
</span></p></div>
<div class="cpabstractcardabstract"><p>On-device personalization of an all-neural automatic speech recognition (ASR) model can be achieved efficiently by fine-tuning the last few layers of the model. This approach has been shown to be effective for adapting the model to recognize rare named entities using only a small amount of data. To reliably perform continuous on-device learning, it is important for the training process to be completely autonomous without manual intervention. Our simulation studies show that training over many rounds may eventually lead to a significant model drift if the personalized model is indiscriminately accepted at the end of each training round. It is important to have appropriate acceptance criteria in place to guard the model against drifting. Moreover, for storage efficiency, it is desirable to persist the model weights in quantized form. We found that quantizing and dequantizing the model weights in between training rounds can prevent the model from learning effectively. This issue can be circumvented by adding noise to the quantized weights at the start of each training round.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shashi Kumar|AUTHOR Shashi Kumar]]^^1^^
, [[Shakti P. Rath|AUTHOR Shakti P. Rath]]^^2^^
, [[Abhishek Pandey|AUTHOR Abhishek Pandey]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung, India; ^^2^^Reverie Language Technologies, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1289–1293
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker adaptation is known to provide significant improvement in speech recognition accuracy. However, in practical scenario, only a few seconds of audio is available due to which it may be infeasible to apply speaker adaptation methods such as i-vector and fMLLR robustly. Also, decoding with fMLLR transformation happens in two-passes which is impractical for real-time applications. In recent past, mapping speech features from speaker independent (SI) space to fMLLR normalized space using denoising autoencoder (DA) has been explored. To the best of our knowledge, such mapping generally does not yield consistent improvement. In this paper, we show that our proposed joint VAE based mapping achieves a large improvements over ASR models trained using filterbank SI features. We also show that joint VAE outperforms DA by a large margin. We observe a relative improvement of 17% in word error rate (WER) compared to ASR model trained using filterbank features with i-vectors and 23% without i-vectors.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gaopeng Xu|AUTHOR Gaopeng Xu]], [[Song Yang|AUTHOR Song Yang]], [[Lu Ma|AUTHOR Lu Ma]], [[Chengfei Li|AUTHOR Chengfei Li]], [[Zhongqin Wu|AUTHOR Zhongqin Wu]]
</p><p class="cpabstractcardaffiliationlist">TAL, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1294–1298
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes TAL’s system for the INTERSPEECH 2021 shared task on Automatic Speech Recognition (ASR) for non-native children’s speech. In this work, we attempt to apply the self-supervised approach to non-native German children’s ASR. First, we conduct some baseline experiments to indicate that self-supervised learning can capture more acoustic information on non-native children’s speech. Then, we apply the 11-fold data augmentation and combine it with data clean-up to supplement to the limited training data. Moreover, an in-domain semi-supervised VAD model is utilized to segment untranscribed audio. These strategies can significantly improve the system performance. Furthermore, we use two types of language models to further improve performance, i.e., a 4-gram LM with CTC beam-search and a Transformer LM for 2-pass rescoring. Our ASR system reduces the Word Error Rate (WER) by about 48% relatively in comparison with the baseline, achieving 1st in the evaluation period with the WER of 23.5%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tsz Kin Lam|AUTHOR Tsz Kin Lam]], [[Mayumi Ohta|AUTHOR Mayumi Ohta]], [[Shigehiko Schamoni|AUTHOR Shigehiko Schamoni]], [[Stefan Riezler|AUTHOR Stefan Riezler]]
</p><p class="cpabstractcardaffiliationlist">Universität Heidelberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1299–1303
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose an on-the-fly data augmentation method for automatic speech recognition (ASR) that uses alignment information to generate effective training samples. Our method, called Aligned Data Augmentation (ADA) for ASR, replaces transcribed tokens and the speech representations in an aligned manner to generate previously unseen training pairs. The speech representations are sampled from an audio dictionary that has been extracted from the training corpus and inject speaker variations into the training examples. The transcribed tokens are either predicted by a language model such that the augmented data pairs are semantically close to the original data, or randomly sampled. Both strategies result in training pairs that improve robustness in ASR training. Our experiments on a Seq-to-Seq architecture show that ADA can be applied on top of SpecAugment, and achieves about 9–23% and 4–15% relative improvements in WER over SpecAugment alone on LibriSpeech 100h and LibriSpeech 960h test datasets, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heting Gao|AUTHOR Heting Gao]]^^1^^
, [[Junrui Ni|AUTHOR Junrui Ni]]^^1^^
, [[Yang Zhang|AUTHOR Yang Zhang]]^^2^^
, [[Kaizhi Qian|AUTHOR Kaizhi Qian]]^^2^^
, [[Shiyu Chang|AUTHOR Shiyu Chang]]^^2^^
, [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Illinois at Urbana-Champaign, USA; ^^2^^MIT-IBM Watson AI Lab, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1304–1308
</span></p></div>
<div class="cpabstractcardabstract"><p>Many existing languages are too sparsely resourced for monolingual deep learning networks to achieve high accuracy. Multilingual phonetic recognition systems mitigate data sparsity issues by training models on data from multiple languages and learning a speech-to-phone or speech-to-text model universal to all languages. However, despite their good performance on the seen training languages, multilingual systems have poor performance on unseen languages. This paper argues that in the real world, even an unseen language has metadata: linguists can tell us the language name, its language family and, usually, its phoneme inventory. Even with no transcribed speech, it is possible to train a language embedding using only data from language typologies (phylogenetic node and phoneme inventory) that reduces ASR error rates. Experiments on a 20-language corpus show that our methods achieve phonetic token error rate (PTER) reduction on all the unseen test languages. An ablation study shows that using the wrong language embedding usually harms PTER if the two languages are from different language families. However, even the wrong language embedding often improves PTER if the language embedding belongs to another member of the same language family.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yan Huang|AUTHOR Yan Huang]], [[Guoli Ye|AUTHOR Guoli Ye]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1309–1313
</span></p></div>
<div class="cpabstractcardabstract"><p>Conformer transducer achieves new state-of-the-art end-to-end (E2E) system performance and has become increasingly appealing for production. In this paper, we study how to effectively perform rapid speaker adaptation in a conformer transducer and how it compares with the RNN transducer. We hierarchically decompose the conformer transducer and compare adapting each component through fine-tuning. Among various interesting observations, there are three distinct findings: First, adapting the self-attention can achieve more than 80% gain of the full network adaptation. When the adaptation data is extremely scarce, attention is all you need to adapt. Second, within the self-attention, adapting the value projection outperforms adapting the key or the query projection. Lastly, bias adaptation, despite of its compact parameter space, is surprisingly effective. We conduct experiments on a state-of-the-art conformer transducer for an email dictation task. With 3 to 5 min source speech and 200 minute personalized TTS speech, the best performing encoder and joint network adaptation yields 38.37% and 19.90% relative word error rate (WER) reduction. Combining the attention and bias adaptation can achieve 90% of the gain with significantly smaller footprint. Further comparison with the RNN-T suggests the new state-of-the-art conformer transducer can benefit as much as if not more from personalization.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tingle Li|AUTHOR Tingle Li]], [[Yichen Liu|AUTHOR Yichen Liu]], [[Chenxu Hu|AUTHOR Chenxu Hu]], [[Hang Zhao|AUTHOR Hang Zhao]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1324–1328
<a href="./IS2021/MEDIA/0137" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Cycle consistent generative adversarial network (CycleGAN) and variational autoencoder (VAE) based models have gained popularity in non-parallel voice conversion recently. However, they often suffer from difficult training process and unsatisfactory results. In this paper, we propose a contrastive learning-based adversarial approach for voice conversion, namely contrastive voice conversion (CVC). Compared to previous CycleGAN-based methods, CVC only requires an efficient one-way GAN training by taking the advantage of contrastive learning. When it comes to non-parallel one-to-one voice conversion, CVC is on par or better than CycleGAN and VAE while effectively reducing training time. CVC further demonstrates superior performance in many-to-one voice conversion, enabling the conversion from unseen speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yufei Liu|AUTHOR Yufei Liu]]^^1^^
, [[Chengzhu Yu|AUTHOR Chengzhu Yu]]^^2^^
, [[Wang Shuai|AUTHOR Wang Shuai]]^^2^^
, [[Zhenchuan Yang|AUTHOR Zhenchuan Yang]]^^2^^
, [[Yang Chao|AUTHOR Yang Chao]]^^2^^
, [[Weibin Zhang|AUTHOR Weibin Zhang]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tencent, China; ^^2^^Tencent, China; ^^3^^SCUT, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1369–1373
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a non-parallel any-to-many voice conversion (VC) approach with a novel statistics replacement layer. Non-parallel VC is usually achieved by firstly disentangling linguistic and speaker representations, and then concatenating the linguistic content with the learned target speaker’s embedding at the conversion stage. While such a concatenation-based approach could introduce speaker-specific characteristics into the network, it is not very effective as it entirely relies on the network to learn to combine the linguistic content and the speaker characteristics. Inspired by X-vectors, where the statistics of hidden representation such as means and standard deviations are used for speaker differentiation, we propose a statistics replacement layer in VC systems to directly modify the hidden states to have the target speaker’s statistics. The speaker-specific statistics of hidden states are learned for each target speaker during training and are used as guidance for the statistics replacement layer during inference. Moreover, to better concentrate the speaker information into the statistics of hidden representation, a multitask training with X-vector based speaker classification is also performed. Experimental results with Librispeech and VCTK datasets show that the proposed method can effectively improve the converted speech’s naturalness and similarity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yi Zhou|AUTHOR Yi Zhou]]^^1^^
, [[Xiaohai Tian|AUTHOR Xiaohai Tian]]^^1^^
, [[Zhizheng Wu|AUTHOR Zhizheng Wu]]^^2^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NUS, Singapore; ^^2^^Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1374–1378
</span></p></div>
<div class="cpabstractcardabstract"><p>Cross-Lingual Voice Conversion (XVC) aims to modify a source speaker identity towards a target while preserving the source linguistic content. This paper introduces a cycle consistency loss on linguistic representation to ensure the speech content unchanged after conversion. The proposed XVC model consists of two loss functions during optimization: a spectral reconstruction loss and a linguistic cycle consistency loss. The cycle consistency loss seeks to maintain the source speech’s linguistic content. Specifically, we utilize Phonetic PosteriorGram (PPG) to represent the linguistic content. XVC experiments were conducted between English and Mandarin. Both objective and subjective evaluations demonstrated that with the proposed cycle consistency loss, converted speech is more intelligible.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hongqiang Du|AUTHOR Hongqiang Du]], [[Lei Xie|AUTHOR Lei Xie]]
</p><p class="cpabstractcardaffiliationlist">Northwestern Polytechnical University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1379–1383
</span></p></div>
<div class="cpabstractcardabstract"><p>One-shot voice conversion has received significant attention since only one utterance from source speaker and target speaker respectively is required. Moreover, source speaker and target speaker do not need to be seen during training. However, available one-shot voice conversion approaches are not stable for unseen speakers as the speaker embedding extracted from one utterance of an unseen speaker is not reliable. In this paper, we propose a deep discriminative speaker encoder to extract speaker embedding from one utterance more effectively. Specifically, the speaker encoder first integrates residual network and squeeze-and-excitation network to extract discriminative speaker information in frame level by modeling frame-wise and channel-wise interdependence in features. Then attention mechanism is introduced to further emphasize speaker related information via assigning different weights to frame level speaker information. Finally a statistic pooling layer is used to aggregate weighted frame level speaker information to form utterance level speaker embedding. The experimental results demonstrate that our proposed speaker encoder can improve the robustness of one-shot voice conversion for unseen speakers and outperforms baseline systems in terms of speech quality and speaker similarity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wen-Chin Huang|AUTHOR Wen-Chin Huang]]^^1^^
, [[Kazuhiro Kobayashi|AUTHOR Kazuhiro Kobayashi]]^^1^^
, [[Yu-Huai Peng|AUTHOR Yu-Huai Peng]]^^2^^
, [[Ching-Feng Liu|AUTHOR Ching-Feng Liu]]^^3^^
, [[Yu Tsao|AUTHOR Yu Tsao]]^^2^^
, [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]^^2^^
, [[Tomoki Toda|AUTHOR Tomoki Toda]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nagoya University, Japan; ^^2^^Academia Sinica, Taiwan; ^^3^^Chi Mei Hospital, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1329–1333
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a new paradigm for maintaining speaker identity in dysarthric voice conversion (DVC). The poor quality of dysarthric speech can be greatly improved by statistical VC, but as the normal speech utterances of a dysarthria patient are nearly impossible to collect, previous work failed to recover the individuality of the patient. In light of this, we suggest a novel, two-stage approach for DVC, which is highly flexible in that no normal speech of the patient is required. First, a powerful parallel sequence-to-sequence model converts the input dysarthric speech into a normal speech of a reference speaker as an intermediate product, and a nonparallel, frame-wise VC model realized with a variational autoencoder then converts the speaker identity of the reference speech back to that of the patient while assumed to be capable of preserving the enhanced quality. We investigate several design options. Experimental evaluation results demonstrate the potential of our approach to improving the quality of the dysarthric speech while maintaining the speaker identity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sefik Emre Eskimez|AUTHOR Sefik Emre Eskimez]], [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]], [[Kenichi Kumatani|AUTHOR Kenichi Kumatani]], [[Robert Gmyr|AUTHOR Robert Gmyr]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1334–1338
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we propose a variant of STARGAN for many-to-many voice conversion (VC) conditioned on the d-vectors for short-duration (2–15 seconds) speech. We make several modifications to the STARGAN training and employ new network architectures. We employ a transformer encoder in the discriminator network, and we apply the discriminator loss to the cycle consistency and identity samples in addition to the generated (fake) samples. Instead of classifying the samples as either real or fake, our discriminator tries to predict the categorical speaker class, where a fake class is added for the generated samples. Furthermore, we employ a reverse gradient layer after the generator’s encoder and use an auxiliary classifier to remove the speaker’s information from the encoded representation. We show that our method yields better results than the baseline method in objective and subjective evaluations in terms of voice conversion quality. Moreover, we provide an ablation study and show each component’s influence on speaker similarity.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takeshi Koshizuka|AUTHOR Takeshi Koshizuka]], [[Hidefumi Ohmura|AUTHOR Hidefumi Ohmura]], [[Kouichi Katsurada|AUTHOR Kouichi Katsurada]]
</p><p class="cpabstractcardaffiliationlist">Tokyo University of Science, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1339–1343
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice conversion (VC) is a technique that converts speaker-dependent non-linguistic information into that of another speaker, while retaining the linguistic information of the input speech. A typical VC system comprises two modules: an encoder module that removes speaker individuality from the input speech and a decoder module that incorporates another speaker’s individuality in synthesized speech. This paper proposes a training method for a vocoder-free any-to-many encoder-decoder VC model with limited data. Various pre-training techniques have been proposed to solve problems training to limited training data; some of these techniques employ the text-to-speech (TTS) task for pre-training. We pre-train the decoder module in the voice conversion task for growing our pre-training technique into continuously adding target speakers to the VC system. The experimental results show that good conversion performance can be achieved by conducting VC-based pre-training. We also confirmed that the rehearsal and pseudo-rehearsal methods can effectively fine-tune the model without degrading the conversion performance of the pre-trained target speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Disong Wang|AUTHOR Disong Wang]]^^1^^
, [[Liqun Deng|AUTHOR Liqun Deng]]^^2^^
, [[Yu Ting Yeung|AUTHOR Yu Ting Yeung]]^^2^^
, [[Xiao Chen|AUTHOR Xiao Chen]]^^2^^
, [[Xunying Liu|AUTHOR Xunying Liu]]^^1^^
, [[Helen Meng|AUTHOR Helen Meng]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUHK, China; ^^2^^Huawei Technologies, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1344–1348
</span></p></div>
<div class="cpabstractcardabstract"><p>One-shot voice conversion (VC), which performs conversion across arbitrary speakers with only a single target-speaker utterance for reference, can be effectively achieved by speech representation disentanglement. Existing work generally ignores the correlation between different speech representations during training, which causes leakage of content information into the speaker representation and thus degrades VC performance. To alleviate this issue, we employ vector quantization (VQ) for content encoding and introduce mutual information (MI) as the correlation metric during training, to achieve proper disentanglement of content, speaker and pitch representations, by reducing their inter-dependencies in an unsupervised manner. Experimental results reflect the superiority of the proposed method in learning effective disentangled speech representations for retaining source linguistic content and intonation variations, while capturing target speaker characteristics. In doing so, the proposed approach achieves higher speech naturalness and speaker similarity than current state-of-the-art one-shot VC systems. Our code, pre-trained models and demo are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yinghao Aaron Li|AUTHOR Yinghao Aaron Li]], [[Ali Zare|AUTHOR Ali Zare]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]
</p><p class="cpabstractcardaffiliationlist">Columbia University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1349–1353
</span></p></div>
<div class="cpabstractcardabstract"><p>We present an unsupervised non-parallel many-to-many voice conversion (VC) method using a generative adversarial network (GAN) called StarGAN v2. Using a combination of adversarial source classifier loss and perceptual loss, our model significantly outperforms previous VC models. Although our model is trained only with 20 English speakers, it generalizes to a variety of voice conversion tasks, such as any-to-many, cross-lingual, and singing conversion. Using a style encoder, our framework can also convert plain reading speech into stylistic speech, such as emotional and falsetto speech. Subjective and objective evaluation experiments on a non-parallel many-to-many voice conversion task revealed that our model produces natural sounding voices, close to the sound quality of state-of-the-art text-to-speech (TTS) based voice conversion methods without the need for text labels. Moreover, our model is completely convolutional and with a faster-than-real-time vocoder such as Parallel WaveGAN can perform real-time voice conversion.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Neeraj Kumar|AUTHOR Neeraj Kumar]]^^1^^
, [[Srishti Goel|AUTHOR Srishti Goel]]^^1^^
, [[Ankur Narang|AUTHOR Ankur Narang]]^^1^^
, [[Brejesh Lall|AUTHOR Brejesh Lall]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Hike, India; ^^2^^IIT Delhi, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1354–1358
<a href="./IS2021/MEDIA/0441" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a novel zero-shot multi-speaker speech synthesis approach (ZSM-SS) that leverages the normalization architecture and speaker encoder with non-autoregressive multi-head attention driven encoder-decoder architecture. Given an input text and a reference speech sample of an unseen person, ZSM-SS can generate speech in that person’s style in a zero-shot manner. Additionally, we demonstrate how the affine parameters of normalization help in capturing the prosodic features such as energy and fundamental frequency in a disentangled fashion and can be used to generate morphed speech output. We demonstrate the efficacy of our proposed architecture on multi-speaker VCTK[1] and LibriTTS [2] datasets, using multiple quantitative metrics that measure generated speech distortion and MOS, along with speaker embedding analysis of the proposed speaker encoder model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shoki Sakamoto|AUTHOR Shoki Sakamoto]]^^1^^
, [[Akira Taniguchi|AUTHOR Akira Taniguchi]]^^1^^
, [[Tadahiro Taniguchi|AUTHOR Tadahiro Taniguchi]]^^1^^
, [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ritsumeikan University, Japan; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1359–1363
</span></p></div>
<div class="cpabstractcardabstract"><p>Preserving the linguistic content of input speech is essential during voice conversion (VC). The star generative adversarial network-based VC method (StarGAN-VC) is a recently developed method that allows non-parallel many-to-many VC. Although this method is powerful, it can fail to preserve the linguistic content of input speech when the number of available training samples is extremely small. To overcome this problem, we propose the use of automatic speech recognition to assist model training, to improve StarGAN-VC, especially in low-resource scenarios. Experimental results show that using our proposed method, StarGAN-VC can retain more linguistic information than vanilla StarGAN-VC.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xuexin Xu|AUTHOR Xuexin Xu]]^^1^^
, [[Liang Shi|AUTHOR Liang Shi]]^^1^^
, [[Jinhui Chen|AUTHOR Jinhui Chen]]^^2^^
, [[Xunquan Chen|AUTHOR Xunquan Chen]]^^3^^
, [[Jie Lian|AUTHOR Jie Lian]]^^1^^
, [[Pingyuan Lin|AUTHOR Pingyuan Lin]]^^1^^
, [[Zhihong Zhang|AUTHOR Zhihong Zhang]]^^1^^
, [[Edwin R. Hancock|AUTHOR Edwin R. Hancock]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Xiamen University, China; ^^2^^Prefectural University of Hiroshima, Japan; ^^3^^Kobe University, Japan; ^^4^^University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1364–1368
</span></p></div>
<div class="cpabstractcardabstract"><p>Arbitrary voice conversion, also referred to as zero-shot voice conversion, has recently attracted increased attention in the literature. Although disentangling the linguistic and style representations for acoustic features is an effective way to achieve zero-shot voice conversion, the problem of how to convert to a natural speaker style is challenging because of the intrinsic variabilities of speech and the difficulties of completely decoupling them. For this reason, in this paper, we propose a Two-Pathway Style Embedding Voice Conversion framework (TPSE-VC) for realistic and natural speech conversion. The novel feature of this method is to simultaneously embed sentence-level and phoneme-level style information. A novel attention mechanism is proposed to implement the implicit alignment for timbre style and phoneme content, further embedding a phoneme-level style representation. In addition, we consider embedding the complete set of time steps of audio style into a fixed-length vector to obtain the sentence-level style representation. Moreover, TPSEVC does not require any pre-trained models, and is only trained with non-parallel speech data. Experimental results demonstrate that the proposed TPSE-VC outperforms the state-of-the-art results on zero-shot voice conversion.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>When research on automatic speech recognition started, the statistical (or data-driven) approach was associated with methods like Bayes decision rule, hidden Markov models, Gaussian models and expectation-maximization algorithm. Later extensions included discriminative training and hybrid hidden Markov models using multi-layer perceptrons and recurrent neural networks. Some of the methods originally developed for speech recognition turned out to be seminal for other language processing tasks like machine translation, handwritten character recognition and sign language processing. Today’s research on speech and language processing is dominated by deep learning, which is typically identified with methods like attention modelling, sequence-to-sequence processing and end-to-end processing.
In this talk, I will present my personal view of the historical developments of research on speech and language processing. I will put particular emphasis on the framework of Bayes decision rule and on the question of how the various approaches developed fit into this framework.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Pucher|AUTHOR Michael Pucher]]^^1^^
, [[Thomas Woltron|AUTHOR Thomas Woltron]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Austrian Academy of Sciences, Austria; ^^2^^FH Wiener Neustadt, Austria</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1–5
<a href="./IS2021/MEDIA/0473" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>It is a common experience of most speakers that the playback of one’s own voice sounds strange. This can be mainly attributed to the missing bone-conducted speech signal that is not present in the playback signal. It was also shown that some phonemes have a high bone-conducted relative to air-conducted sound transmission, which means that the bone-conduction filter is phone-dependent. To achieve such a phone-dependent modeling we train different speaker dependent and speaker adaptive speech conversion systems using airborne and bone-conducted speech data from 8 speakers (5 male, 3 female), which allow for the conversion of airborne speech to bone-conducted speech. The systems are based on Long Short-Term Memory (LSTM) deep neural networks, where the speaker adaptive versions with speaker embedding can be used without bone-conduction signals from the target speaker. Additionally we also used models that apply a global filtering. The different models are then evaluated by an objective error metric and a subjective listening experiment, which show that the LSTM based models outperform the global filters.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Markéta Řezáčková|AUTHOR Markéta Řezáčková]], [[Jan Švec|AUTHOR Jan Švec]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]]
</p><p class="cpabstractcardaffiliationlist">University of West Bohemia, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 6–10
</span></p></div>
<div class="cpabstractcardabstract"><p>Despite the increasing popularity of end-to-end text-to-speech (TTS) systems, the correct grapheme-to-phoneme (G2P) module is still a crucial part of those relying on a phonetic input. In this paper, we, therefore, introduce a T5G2P model, a Text-to-Text Transfer Transformer (T5) neural network model which is able to convert an input text sentence into a phoneme sequence with a high accuracy. The evaluation of our trained T5 model is carried out on English and Czech, since there are different specific properties of G2P, including homograph disambiguation, cross-word assimilation and irregular pronunciation of loanwords. The paper also contains an analysis of a homographs issue in English and offers another approach to Czech phonetic transcription using the detection of pronunciation exceptions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Olivier Perrotin|AUTHOR Olivier Perrotin]], [[Hussein El Amouri|AUTHOR Hussein El Amouri]], [[Gérard Bailly|AUTHOR Gérard Bailly]], [[Thomas Hueber|AUTHOR Thomas Hueber]]
</p><p class="cpabstractcardaffiliationlist">GIPSA-lab (UMR 5216), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 11–15
<a href="./IS2021/MEDIA/1547" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Neural vocoders are systematically evaluated on homogeneous train and test databases. This kind of evaluation is efficient to compare neural vocoders in their “comfort zone”, yet it hardly reveals their limits towards unseen data during training. To compare their extrapolation capabilities, we introduce a methodology that aims at quantifying the robustness of neural vocoders in synthesising unseen data, by precisely controlling the ranges of seen/unseen data in the training database. By focusing in this study on the pitch (F₀) parameter, our methodology involves a careful splitting of a dataset to control which F₀ values are seen/unseen during training, followed by both global (utterance) and local (frame) evaluation of vocoders. Comparison of four types of vocoders (autoregressive, sourcefilter, flows, GAN) displays a wide range of behaviour towards unseen input pitch values, including excellent extrapolation (WaveGlow); widely-spread F₀ errors (WaveRNN); and systematic generation of the training set median F₀ (LPCNet, Parallel WaveGAN). In contrast, fewer differences between vocoders were observed when using homogeneous train and test sets, thus demonstrating the potential and need for such evaluation to better discriminate the neural vocoders abilities to generate out-of-training-range data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Phat Do|AUTHOR Phat Do]]^^1^^
, [[Matt Coler|AUTHOR Matt Coler]]^^1^^
, [[Jelske Dijkstra|AUTHOR Jelske Dijkstra]]^^1^^
, [[Esther Klabbers|AUTHOR Esther Klabbers]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Rijksuniversiteit Groningen, The Netherlands; ^^2^^ReadSpeaker, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 16–20
</span></p></div>
<div class="cpabstractcardabstract"><p>We provide a systematic review of past studies that use multilingual data for text-to-speech (TTS) of low-resource languages (LRLs). We focus on the strategies used by these studies for incorporating multilingual data and how they affect output speech quality. To investigate the difference in output quality between corresponding monolingual and multilingual models, we propose a novel measure to compare this difference across the included studies and their various evaluation metrics. This measure, called the Multilingual Model Effect (MLME), is found to be affected by: acoustic model architecture, the difference ratio of target language data between corresponding multilingual and monolingual experiments, the balance ratio of target language data to total data, and the amount of target language data used. These findings can act as reference for data strategies in future experiments with multilingual TTS models for LRLs. Language family classification, despite being widely used, is not found to be an effective criterion for selecting source languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tanya Talkar|AUTHOR Tanya Talkar]]^^1^^
, [[Nancy Pearl Solomon|AUTHOR Nancy Pearl Solomon]]^^2^^
, [[Douglas S. Brungart|AUTHOR Douglas S. Brungart]]^^2^^
, [[Stefanie E. Kuchinsky|AUTHOR Stefanie E. Kuchinsky]]^^2^^
, [[Megan M. Eitel|AUTHOR Megan M. Eitel]]^^2^^
, [[Sara M. Lippa|AUTHOR Sara M. Lippa]]^^2^^
, [[Tracey A. Brickell|AUTHOR Tracey A. Brickell]]^^2^^
, [[Louis M. French|AUTHOR Louis M. French]]^^2^^
, [[Rael T. Lange|AUTHOR Rael T. Lange]]^^2^^
, [[Thomas F. Quatieri|AUTHOR Thomas F. Quatieri]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Harvard University, USA; ^^2^^Walter Reed National Military Medical Center, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 21–25
</span></p></div>
<div class="cpabstractcardabstract"><p>A traumatic brain injury (TBI) can lead to various long-term effects on memory, attention, and mood, as well as the occurrence of headaches, speech, and hearing problems. There is a need to better understand the long-term effects of a TBI for objective tracking of an individual’s recovery, which could be used to determine intervention trajectories. This study utilizes acoustic features derived from recordings of speech tasks completed by active-duty service members and veterans (SMVs) enrolled in the Defense and Veterans Brain Injury (DVBIC)/Traumatic Brain Injury Center of Excellence (TBICoE) 15-Year Longitudinal TBI Study. We hypothesize that the individuals diagnosed with moderate to severe TBI would demonstrate motor speech impairments through decreased coordination of the speech production subsystems as compared to individuals with no history of TBI. Speech motor coordination is measured through correlations of acoustic feature time series representing speech subsystems. Eigenspectra derived from these correlations are utilized in machine learning models to discriminate between the two groups. The fusion of correlation features derived from the recordings achieves an AUC of 0.78. This suggests that residual motor impairments from moderate to severe TBI could be detectable through objective measures of speech motor coordination.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]]^^1^^
, [[Julian Fritsch|AUTHOR Julian Fritsch]]^^2^^
, [[J.R. Orozco-Arroyave|AUTHOR J.R. Orozco-Arroyave]]^^1^^
, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^1^^
, [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FAU Erlangen-Nürnberg, Germany; ^^2^^Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 26–30
</span></p></div>
<div class="cpabstractcardabstract"><p>Parkinson’s disease produces several motor symptoms, including different speech impairments that are known as hypokinetic dysarthria. Symptoms associated to dysarthria affect different dimensions of speech such as phonation, articulation, prosody, and intelligibility. Studies in the literature have mainly focused on the analysis of articulation and prosody because they seem to be the most prominent symptoms associated to dysarthria severity. However, phonation impairments also play a significant role to evaluate the global speech severity of Parkinson’s patients. This paper proposes an extensive comparison of different methods to automatically evaluate the severity of specific phonation impairments in Parkinson’s patients. The considered models include the computation of perturbation and glottal-based features, in addition to features extracted from a zero frequency filtered signals. We consider as well end-to-end models based on 1D CNNs, which are trained to learn features from the raw speech waveform, reconstructed glottal signals, and zero-frequency filtered signals. The results indicate that it is possible to automatically classify between speakers with low versus high phonation severity due to the presence of dysarthria and at the same time to evaluate the severity of the phonation impairments on a continuous scale, posed as a regression problem.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Khalid Daoudi|AUTHOR Khalid Daoudi]]^^1^^
, [[Biswajit Das|AUTHOR Biswajit Das]]^^1^^
, [[Solange Milhé de Saint Victor|AUTHOR Solange Milhé de Saint Victor]]^^2^^
, [[Alexandra Foubert-Samier|AUTHOR Alexandra Foubert-Samier]]^^2^^
, [[Anne Pavy-Le Traon|AUTHOR Anne Pavy-Le Traon]]^^3^^
, [[Olivier Rascol|AUTHOR Olivier Rascol]]^^3^^
, [[Wassilios G. Meissner|AUTHOR Wassilios G. Meissner]]^^2^^
, [[Virginie Woisard|AUTHOR Virginie Woisard]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Inria, France; ^^2^^CHU de Bordeaux, France; ^^3^^CHU de Toulouse, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 31–35
</span></p></div>
<div class="cpabstractcardabstract"><p>Parkinson’s disease (PD) and the parkinsonian variant of Multiple System Atrophy (MSA-P) are two neurodegenerative diseases which share similar clinical features, particularly in early disease stages. The differential diagnosis can be thus very challenging. Dysarthria is known to be a frequent and early clinical feature of PD and MSA. It can be thus used as a vehicle to provide a vocal biomarker which could help in the differential diagnosis. In particular, distortion of consonants is known to be a frequent impairment in these diseases. The aim of this study is to investigate distinctive patterns in the distortion of voiced obstruents (plosives and fricatives). It is the first study which attempts to examine such distortions in the French language for the purpose of the differential diagnosis between PD and MSA-P (and among the very few studies if we consider all languages). We carry out a perceptual and objective analysis of voiced obstruents extracted from isolated pseudo-words initials. We first show that devoicing is a significant impairment which predominates in MSA-P. We then show that voice onset time (VOT) of voiced plosives (prevoicing duration) can be a complementary feature to improve the accuracy in discrimination between PD and MSA-P.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pu Wang|AUTHOR Pu Wang]]^^1^^
, [[Bagher BabaAli|AUTHOR Bagher BabaAli]]^^2^^
, [[Hugo Van hamme|AUTHOR Hugo Van hamme]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KU Leuven, Belgium; ^^2^^University of Tehran, Iran</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 36–40
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end (E2E) spoken language understanding (SLU) systems avoid an intermediate textual representation by mapping speech directly into intents with slot values. This approach requires considerable domain-specific training data. In low-resource scenarios this is a major concern, e.g., in the present study dealing with SLU for dysarthric speech. Pretraining part of the SLU model for automatic speech recognition targets helps but no research has shown to which extent SLU on dysarthric speech benefits from knowledge transferred from other dysarthric speech tasks. This paper investigates the efficiency of pre-training strategies for SLU tasks on dysarthric speech. The designed SLU system consists of a TDNN acoustic model for feature encoding and a capsule network for intent and slot decoding. The acoustic model is pre-trained in two stages: initialization with a corpus of normal speech and finetuning on a mixture of dysarthric and normal speech. By introducing the intelligibility score as a metric of the impairment severity, this paper quantitatively analyzes the relation between generalization and pathology severity for dysarthric speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rosanna Turrisi|AUTHOR Rosanna Turrisi]]^^1^^
, [[Arianna Braccia|AUTHOR Arianna Braccia]]^^2^^
, [[Marco Emanuele|AUTHOR Marco Emanuele]]^^1^^
, [[Simone Giulietti|AUTHOR Simone Giulietti]]^^2^^
, [[Maura Pugliatti|AUTHOR Maura Pugliatti]]^^2^^
, [[Mariachiara Sensi|AUTHOR Mariachiara Sensi]]^^2^^
, [[Luciano Fadiga|AUTHOR Luciano Fadiga]]^^1^^
, [[Leonardo Badino|AUTHOR Leonardo Badino]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT, Italy; ^^2^^Università di Ferrara, Italy; ^^3^^PerVoice, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 41–45
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a new dysarthric speech command dataset in Italian, called EasyCall corpus. The dataset consists of 21386 audio recordings from 24 healthy and 31 dysarthric speakers, whose individual degree of speech impairment was assessed by neurologists through the Therapy Outcome Measure. The corpus aims at providing a resource for the development of ASR-based assistive technologies for patients with dysarthria. In particular, it may be exploited to develop a voice-controlled contact application for commercial smartphones, aiming at improving dysarthric patients’ ability to communicate with their family and caregivers. Before recording the dataset, participants were administered a survey to evaluate which commands are more likely to be employed by dysarthric individuals in a voice-controlled contact application. In addition, the dataset includes a list of non-commands (i.e., words near/inside commands or phonetically close to commands) that can be leveraged to build a more robust command recognition system. At present commercial ASR systems perform poorly on the EasyCall Corpus as we report in this paper. This result corroborates the need for dysarthric speech corpora for developing effective assistive technologies. To the best of our knowledge, this database represents the richest corpus of dysarthric speech to date.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaoyu Bie|AUTHOR Xiaoyu Bie]]^^1^^
, [[Laurent Girin|AUTHOR Laurent Girin]]^^2^^
, [[Simon Leglaive|AUTHOR Simon Leglaive]]^^3^^
, [[Thomas Hueber|AUTHOR Thomas Hueber]]^^2^^
, [[Xavier Alameda-Pineda|AUTHOR Xavier Alameda-Pineda]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LJK (UMR 5224), France; ^^2^^GIPSA-lab (UMR 5216), France; ^^3^^IETR (UMR 6164), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 46–50
</span></p></div>
<div class="cpabstractcardabstract"><p>The Variational Autoencoder (VAE) is a powerful deep generative model that is now extensively used to represent high-dimensional complex data via a low-dimensional latent space learned in an unsupervised manner. In the original VAE model, input data vectors are processed independently. In recent years, a series of papers have presented different extensions of the VAE to process sequential data, that not only model the latent space, but also model the temporal dependencies within a sequence of data vectors and corresponding latent vectors, relying on recurrent neural networks. We recently performed a comprehensive review of those models and unified them into a general class called Dynamical Variational Autoencoders (DVAEs). In the present paper, we present the results of an experimental benchmark comparing six of those DVAE models on the speech analysis-resynthesis task, as an illustration of the high potential of DVAEs for speech modeling.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Metehan Yurt|AUTHOR Metehan Yurt]]^^1^^
, [[Pavan Kantharaju|AUTHOR Pavan Kantharaju]]^^1^^
, [[Sascha Disch|AUTHOR Sascha Disch]]^^1^^
, [[Andreas Niedermeier|AUTHOR Andreas Niedermeier]]^^1^^
, [[Alberto N. Escalante-B.|AUTHOR Alberto N. Escalante-B.]]^^2^^
, [[Veniamin I. Morgenshtern|AUTHOR Veniamin I. Morgenshtern]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Fraunhofer IIS, Germany; ^^2^^WS Audiology, Germany; ^^3^^FAU Erlangen-Nürnberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 51–55
</span></p></div>
<div class="cpabstractcardabstract"><p>Accurate phoneme detection and processing can enhance speech intelligibility in hearing aids and audio & speech codecs. As fricative phonemes have an important part of their energy concentrated in high frequency bands, frequency lowering algorithms are used in hearing aids to improve fricative intelligibility for people with high-frequency hearing loss. In traditional audio codecs, while processing speech in blocks, spectral smearing around fricative phoneme borders results in pre and post echo artifacts. Hence, detecting the fricative borders and adapting the processing accordingly could enhance the quality of speech. Until recently, phoneme detection and analysis were mostly done by extracting features specific to the class of phonemes. In this paper, we present a deep learning based fricative phoneme detection algorithm that exceeds the state-of-the-art fricative phoneme detection accuracy on the TIMIT speech corpus. Moreover, we compare our method to other approaches that employ classical signal processing for fricative detection and also evaluate it on the TIMIT files coded with AAC codec followed by bandwidth limitation. Reported results of our deep learning approach on original TIMIT files are reproducible and come with an easy to use code that could serve as a baseline for any future research on this topic.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[RaviShankar Prasad|AUTHOR RaviShankar Prasad]], [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]
</p><p class="cpabstractcardaffiliationlist">Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 56–60
</span></p></div>
<div class="cpabstractcardabstract"><p>Formants are major resonances in the vocal tract system. Identification of formants is important for study of speech. In the literature, formants are typically identified by first deriving formant frequency candidates (e.g., using linear prediction) and then applying a tracking mechanism. In this paper, we propose a simple tracking-free formant identification approach based on zero frequency filtering. More precisely, formants F1-F2 are identified by modifying the trend removal operation in zero frequency filtering and picking simply the dominant peak in the short-term discrete Fourier transform spectra. We demonstrate the potential of the approach by comparing it against state-of-the-art formant identification approaches on a typical speech data set (TIMIT-VTR) and an atypical speech data set (PC-GITA).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yann Teytaut|AUTHOR Yann Teytaut]], [[Axel Roebel|AUTHOR Axel Roebel]]
</p><p class="cpabstractcardaffiliationlist">STMS (UMR 9912), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 61–65
</span></p></div>
<div class="cpabstractcardabstract"><p>Phoneme-to-audio alignment is the task of synchronizing voice recordings and their related phonetic transcripts. In this work, we introduce a new system to forced phonetic alignment with Recurrent Neural Networks (RNN). With the Connectionist Temporal Classification (CTC) loss as training objective, and an additional reconstruction cost, we learn to infer relevant per-frame phoneme probabilities from which alignment is derived. The core of the neural architecture is a context-aware attention mechanism between mel-spectrograms and side information. We investigate two contexts given by either phoneme sequences (model PHATT) or spectrograms themselves (model SPATT). Evaluations show that these models produce precise alignments for both speaking and singing voice. Best results are obtained with the model PHATT, which outperforms baseline reference with an average imprecision of 16.3ms and 29.8ms on speech and singing, respectively. The model SPATT also appears as an interesting alternative, capable of aligning longer audio files without requiring phoneme sequences on small audio segments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Björn W. Schuller|AUTHOR Björn W. Schuller]]^^1^^
, [[Anton Batliner|AUTHOR Anton Batliner]]^^2^^
, [[Christian Bergler|AUTHOR Christian Bergler]]^^3^^
, [[Cecilia Mascolo|AUTHOR Cecilia Mascolo]]^^4^^
, [[Jing Han|AUTHOR Jing Han]]^^4^^
, [[Iulia Lefter|AUTHOR Iulia Lefter]]^^5^^
, [[Heysem Kaya|AUTHOR Heysem Kaya]]^^6^^
, [[Shahin Amiriparian|AUTHOR Shahin Amiriparian]]^^2^^
, [[Alice Baird|AUTHOR Alice Baird]]^^2^^
, [[Lukas Stappen|AUTHOR Lukas Stappen]]^^2^^
, [[Sandra Ottl|AUTHOR Sandra Ottl]]^^2^^
, [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]]^^2^^
, [[Panagiotis Tzirakis|AUTHOR Panagiotis Tzirakis]]^^1^^
, [[Chloë Brown|AUTHOR Chloë Brown]]^^4^^
, [[Jagmohan Chauhan|AUTHOR Jagmohan Chauhan]]^^4^^
, [[Andreas Grammenos|AUTHOR Andreas Grammenos]]^^4^^
, [[Apinan Hasthanasombat|AUTHOR Apinan Hasthanasombat]]^^4^^
, [[Dimitris Spathis|AUTHOR Dimitris Spathis]]^^4^^
, [[Tong Xia|AUTHOR Tong Xia]]^^4^^
, [[Pietro Cicuta|AUTHOR Pietro Cicuta]]^^4^^
, [[Leon J.M. Rothkrantz|AUTHOR Leon J.M. Rothkrantz]]^^5^^
, [[Joeri A. Zwerts|AUTHOR Joeri A. Zwerts]]^^6^^
, [[Jelle Treep|AUTHOR Jelle Treep]]^^6^^
, [[Casper S. Kaandorp|AUTHOR Casper S. Kaandorp]]^^6^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Imperial College London, UK; ^^2^^Universität Augsburg, Germany; ^^3^^FAU Erlangen-Nürnberg, Germany; ^^4^^University of Cambridge, UK; ^^5^^Technische Universiteit Delft, The Netherlands; ^^6^^Universiteit Utrecht, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 431–435
</span></p></div>
<div class="cpabstractcardabstract"><p>The INTERSPEECH 2021 Computational Paralinguistics Challenge addresses four different problems for the first time in a research competition under well-defined conditions: In the //COVID-19 Cough// and //COVID-19 Speech// Sub-Challenges, a binary classification on COVID-19 infection has to be made based on coughing sounds and speech; in the //Escalation// Sub-Challenge, a three-way assessment of the level of escalation in a dialogue is featured; and in the //Primates// Sub-Challenge, four species vs background need to be classified. We describe the Sub-Challenges, baseline feature extraction, and classifiers based on the ‘usual’ COMPARE and BoAW features as well as deep unsupervised representation learning using the AUDEEP toolkit, and deep feature extraction from pre-trained CNNs using the DEEP SPECTRUM toolkit; in addition, we add deep end-to-end sequential modelling, and partially linguistic analysis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[José Vicente Egas-López|AUTHOR José Vicente Egas-López]]^^1^^
, [[Mercedes Vetráb|AUTHOR Mercedes Vetráb]]^^1^^
, [[László Tóth|AUTHOR László Tóth]]^^1^^
, [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Szeged, Hungary; ^^2^^University of Szeged, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 476–480
</span></p></div>
<div class="cpabstractcardabstract"><p>Computational paralinguistics is concerned with the automatic identification of non-verbal information in human speech. The Interspeech ComParE challenge features new paralinguistic tasks each year; this time, among others, a cross-corpus conflict escalation task and the identification of primates based solely on audio are the actual problems set. In our entry to ComParE 2021, we utilize x-vectors and Fisher vectors as features. To improve the robustness of the predictions, we also experiment with building an ensemble of classifiers from the x-vectors. Lastly, we exploit the fact that the Escalation Sub-Challenge is a conflict detection task, and incorporate the SSPNet Conflict Corpus in our training workflow. Using these approaches, at the time of writing, we had already surpassed the official Challenge baselines on both tasks, which demonstrates the efficiency of the employed techniques.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oxana Verkholyak|AUTHOR Oxana Verkholyak]]^^1^^
, [[Denis Dresvyanskiy|AUTHOR Denis Dresvyanskiy]]^^2^^
, [[Anastasia Dvoynikova|AUTHOR Anastasia Dvoynikova]]^^1^^
, [[Denis Kotov|AUTHOR Denis Kotov]]^^2^^
, [[Elena Ryumina|AUTHOR Elena Ryumina]]^^1^^
, [[Alena Velichko|AUTHOR Alena Velichko]]^^1^^
, [[Danila Mamontov|AUTHOR Danila Mamontov]]^^2^^
, [[Wolfgang Minker|AUTHOR Wolfgang Minker]]^^2^^
, [[Alexey Karpov|AUTHOR Alexey Karpov]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^RAS, Russia; ^^2^^Universität Ulm, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 481–485
</span></p></div>
<div class="cpabstractcardabstract"><p>Conflict situations arise frequently in our daily life and often require timely response to resolve the issues. In order to automatically classify conflict (also referred to as escalation) speech utterances we propose ensemble learning as it improves prediction performance by combining several heterogeneous models that compensate for each other’s weaknesses. However, the effectiveness of the classification ensemble greatly depends on its constituents and their fusion strategy. This paper provides experimental evidence for effectiveness of different prediction-level fusion strategies and demonstrates the performance of each proposed ensemble on the Escalation Sub-Challenge (ESS) in the framework of the Computational Paralinguistics Challenge (ComParE-2021). The ensembles comprise various machine learning approaches based on acoustic and linguistic characteristics of speech. The training strategy is specifically designed to increase the generalization performance on the unseen data, while the diverse nature of ensemble candidates ensures high prediction power and accurate classification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dominik Schiller|AUTHOR Dominik Schiller]]^^1^^
, [[Silvan Mertes|AUTHOR Silvan Mertes]]^^1^^
, [[Pol van Rijn|AUTHOR Pol van Rijn]]^^2^^
, [[Elisabeth André|AUTHOR Elisabeth André]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Augsburg, Germany; ^^2^^MPI for Empirical Aesthetics, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 486–490
</span></p></div>
<div class="cpabstractcardabstract"><p>Modeling adequate features of speech prosody is one key factor to good performance in affective speech classification. However, the distinction between the prosody that is induced by ‘how’ something is said (i.e., affective prosody) and the prosody that is induced by ‘what’ is being said (i.e., linguistic prosody) is neglected in state-of-the-art feature extraction systems. This results in high variability of the calculated feature values for different sentences that are spoken with the same affective intent, which might negatively impact the performance of the classification. While this distinction between different prosody types is mostly neglected in affective speech recognition, it is explicitly modeled in expressive speech synthesis to create controlled prosodic variation. In this work, we use the expressive Text-To-Speech model Global Style Token Tacotron to extract features for a speech analysis task. We show that the learned prosodic representations outperform state-of-the-art feature extraction systems in the exemplary use case of Escalation Level Classification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rubén Solera-Ureña|AUTHOR Rubén Solera-Ureña]], [[Catarina Botelho|AUTHOR Catarina Botelho]], [[Francisco Teixeira|AUTHOR Francisco Teixeira]], [[Thomas Rolland|AUTHOR Thomas Rolland]], [[Alberto Abad|AUTHOR Alberto Abad]], [[Isabel Trancoso|AUTHOR Isabel Trancoso]]
</p><p class="cpabstractcardaffiliationlist">INESC-ID Lisboa, Portugal</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 436–440
</span></p></div>
<div class="cpabstractcardabstract"><p>In the last months, there has been an increasing interest in developing reliable, cost-effective, immediate and easy to use machine learning based tools that can help health care operators, institutions, companies, etc. to optimize their screening campaigns. In this line, several initiatives emerged aimed at the automatic detection of COVID-19 from speech, breathing and coughs, with inconclusive preliminary results. The ComParE 2021 COVID-19 Cough Sub-challenge provides researchers from all over the world a suitable test-bed for the evaluation and comparison of their work. In this paper, we present the INESC-ID contribution to the ComParE 2021 COVID-19 Cough Sub-challenge. We leverage transfer learning to develop a set of three expert classifiers based on deep cough representation extractors. A calibrated decision-level fusion system provides the final classification of coughs recordings as either COVID-19 positive or negative. Results show unweighted average recalls of 72.3% and 69.3% in the development and test sets, respectively. Overall, the experimental assessment shows the potential of this approach although much more research on extended respiratory sounds datasets is needed.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[P. Klumpp|AUTHOR P. Klumpp]]^^1^^
, [[T. Bocklet|AUTHOR T. Bocklet]]^^2^^
, [[T. Arias-Vergara|AUTHOR T. Arias-Vergara]]^^1^^
, [[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]]^^1^^
, [[P.A. Pérez-Toro|AUTHOR P.A. Pérez-Toro]]^^1^^
, [[S.P. Bayerl|AUTHOR S.P. Bayerl]]^^2^^
, [[J.R. Orozco-Arroyave|AUTHOR J.R. Orozco-Arroyave]]^^1^^
, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FAU Erlangen-Nürnberg, Germany; ^^2^^TH Nürnberg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 441–445
</span></p></div>
<div class="cpabstractcardabstract"><p>Against the background of the ongoing pandemic, this year’s Computational Paralinguistics Challenge featured a classification problem to detect Covid-19 from speech recordings. The presented approach is based on a phonetic analysis of speech samples, thus it enabled us not only to discriminate between Covid and non-Covid samples, but also to better understand how the condition influenced an individual’s speech signal.
Our deep acoustic model was trained with datasets collected exclusively from healthy speakers. It served as a tool for segmentation and feature extraction on the samples from the challenge dataset. Distinct patterns were found in the embeddings of phonetic classes that have their place of articulation deep inside the vocal tract. We observed profound differences in classification results for development and test splits, similar to the baseline method.
We concluded that, based on our phonetic findings, it was safe to assume that our classifier was able to reliably detect a pathological condition located in the respiratory tract. However, we found no evidence to claim that the system was able to discriminate between Covid-19 and other respiratory diseases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Edresson Casanova|AUTHOR Edresson Casanova]]^^1^^
, [[Arnaldo Candido Jr.|AUTHOR Arnaldo Candido Jr.]]^^2^^
, [[Ricardo Corso Fernandes Jr.|AUTHOR Ricardo Corso Fernandes Jr.]]^^2^^
, [[Marcelo Finger|AUTHOR Marcelo Finger]]^^1^^
, [[Lucas Rafael Stefanel Gris|AUTHOR Lucas Rafael Stefanel Gris]]^^2^^
, [[Moacir Antonelli Ponti|AUTHOR Moacir Antonelli Ponti]]^^1^^
, [[Daniel Peixoto Pinto da Silva|AUTHOR Daniel Peixoto Pinto da Silva]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidade de São Paulo, Brazil; ^^2^^Universidade Tecnológica Federal do Paraná, Brazil</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 446–450
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we propose several techniques to address data scarceness in ComParE 2021 COVID-19 identification tasks for the application of deep models such as Convolutional Neural Networks. Data is initially preprocessed into spectrogram or MFCC-gram formats. After preprocessing, we combine three different data augmentation techniques to be applied in model training. Then we employ transfer learning techniques from pretrained audio neural networks. Those techniques are applied to several distinct neural architectures. For COVID-19 identification in speech segments, we obtained competitive results. On the other hand, in the identification task based on cough data, we succeeded in producing a noticeable improvement on existing baselines, reaching 75.9% unweighted average recall (UAR).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Steffen Illium|AUTHOR Steffen Illium]], [[Robert Müller|AUTHOR Robert Müller]], [[Andreas Sedlmeier|AUTHOR Andreas Sedlmeier]], [[Claudia-Linnhoff Popien|AUTHOR Claudia-Linnhoff Popien]]
</p><p class="cpabstractcardaffiliationlist">LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 451–455
</span></p></div>
<div class="cpabstractcardabstract"><p>We apply the vision transformer, a deep machine learning model build around the attention mechanism, on mel-spectrogram representations of raw audio recordings. When adding mel-based data augmentation techniques and sample-weighting, we achieve comparable performance on both (PRS and CCS challenge) tasks of ComParE21, outperforming most single model baselines. We further introduce overlapping vertical patching and evaluate the influence of parameter configurations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thomas Pellegrini|AUTHOR Thomas Pellegrini]]
</p><p class="cpabstractcardaffiliationlist">IRIT (UMR 5505), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 456–460
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we report experiments in which we aim to automatically classify primate vocalizations according to four primate species of interest, plus a background category with forest sound events. We compare several standard deep neural networks architectures: standard deep convolutional neural networks (CNNs), MobileNets and ResNets. To tackle the small size of the training dataset, less than seven thousand audio files, the data augmentation techniques SpecAugment and MixUp proved to be very useful. Against the very unbalanced classes of the dataset, we used a balanced data sampler that showed to be efficient. An exponential moving average of the model weights allowed to get slight further gains. The best model was a standard 10-layer CNN, comprised of about five million parameters. It achieved a 93.6% Unweighted Average Recall (UAR) on the development set, and generalized well on the test set with a 92.5% UAR, outperforming an official baseline of 86.6%. We quantify the performance gains brought by the augmentations and training tricks, and report fusion and classification experiments based on embeddings that did not bring better results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Robert Müller|AUTHOR Robert Müller]], [[Steffen Illium|AUTHOR Steffen Illium]], [[Claudia Linnhoff-Popien|AUTHOR Claudia Linnhoff-Popien]]
</p><p class="cpabstractcardaffiliationlist">LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 461–465
</span></p></div>
<div class="cpabstractcardabstract"><p>Wildlife monitoring is an essential part of most conservation efforts where one of the many building blocks is acoustic monitoring. Acoustic monitoring has the advantage of being non-invasive and applicable in areas of high vegetation. In this work, we present a deep and recurrent architecture for the classification of primate vocalizations that is based upon well proven modules such as bidirectional Long Short-Term Memory neural networks, pooling, normalized softmax and focal loss. Additionally, we apply Bayesian optimization to obtain a suitable set of hyperparameters. We test our approach on a recently published dataset of primate vocalizations that were recorded in an African wildlife sanctuary. Using an ensemble of the best five models found during hyperparameter optimization on the development set, we achieve a Unweighted Average Recall of 89.3% on the test set. Our approach outperforms the best baseline, an ensemble of various deep and shallow classifiers, which achieves a UAR of 87.5%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joeri A. Zwerts|AUTHOR Joeri A. Zwerts]], [[Jelle Treep|AUTHOR Jelle Treep]], [[Casper S. Kaandorp|AUTHOR Casper S. Kaandorp]], [[Floor Meewis|AUTHOR Floor Meewis]], [[Amparo C. Koot|AUTHOR Amparo C. Koot]], [[Heysem Kaya|AUTHOR Heysem Kaya]]
</p><p class="cpabstractcardaffiliationlist">Universiteit Utrecht, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 466–470
</span></p></div>
<div class="cpabstractcardabstract"><p>Automated classification of animal vocalisations is a potentially powerful wildlife monitoring tool. Training robust classifiers requires sizable annotated datasets, which are not easily recorded in the wild. To circumvent this problem, we recorded four primate species under semi-natural conditions in a wildlife sanctuary in Cameroon with the objective to train a classifier capable of detecting species in the wild. Here, we introduce the collected dataset, describe our approach and initial results of classifier development. To increase the efficiency of the annotation process, we condensed the recordings with an energy/change based automatic vocalisation detection. Segmenting the annotated chunks into training, validation and test sets, initial results reveal up to 82% unweighted average recall test set performance in four-class primate species classification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Georgios Rizos|AUTHOR Georgios Rizos]], [[Jenna Lawson|AUTHOR Jenna Lawson]], [[Zhuoda Han|AUTHOR Zhuoda Han]], [[Duncan Butler|AUTHOR Duncan Butler]], [[James Rosindell|AUTHOR James Rosindell]], [[Krystian Mikolajczyk|AUTHOR Krystian Mikolajczyk]], [[Cristina Banks-Leite|AUTHOR Cristina Banks-Leite]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]
</p><p class="cpabstractcardaffiliationlist">Imperial College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 471–475
</span></p></div>
<div class="cpabstractcardabstract"><p>We study deep bioacoustic event detection through multi-head attention based pooling, exemplified by wildlife monitoring. In the multiple instance learning framework, a core deep neural network learns a projection of the input acoustic signal into a sequence of embeddings, each representing a segment of the input. Sequence pooling is then required to aggregate the information present in the sequence such that we have a single clip-wise representation. We propose an improvement based on Squeeze-and-Excitation mechanisms upon a recently proposed audio tagging ResNet, and show that it performs significantly better than the baseline, as well as a collection of other recent audio models. We then further enhance our model, by performing an extensive comparative study of recent sequence pooling mechanisms, and achieve our best result using multi-head self-attention followed by concatenation of the head-specific pooled embeddings — better than prediction pooling methods, as well as compared to other recent sequence pooling tricks. We perform these experiments on a novel dataset of spider monkey whinny calls we introduce here, recorded in a rainforest in the South-Pacific coast of Costa Rica, with a promising outlook pertaining to minimally invasive wildlife monitoring.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Seong-Hu Kim|AUTHOR Seong-Hu Kim]], [[Yong-Hwa Park|AUTHOR Yong-Hwa Park]]
</p><p class="cpabstractcardaffiliationlist">KAIST, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 66–70
</span></p></div>
<div class="cpabstractcardabstract"><p>In text-independent speaker recognition, each speech is composed of different phonemes depending on spoken text. The conventional neural networks for speaker recognition are static models, so they do not reflect this phoneme-varying characteristic well. To tackle this limitation, we propose an adaptive convolutional neural network (ACNN) for text-independent speaker recognition. The utterance is divided along the time axis into short segments with small fluctuating phonemes. Frame-level features are extracted by applying input-dependent kernels adaptive to each segment. By applying time average pooling and linear layers, utterance-level embeddings extraction and speaker recognition are performed. Adaptive VGG-M using 0.356 seconds segmentation shows better speaker recognition performance than baseline models, with a Top-1 of 86.51% and an EER of 5.68%. It extracts more accurate frame-level embeddings for vowel and nasal phonemes compared to the conventional method without overfitting and large parameters. This framework for text-independent speaker recognition effectively utilizes phonemes and text-varying characteristic of speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiajun Qi|AUTHOR Jiajun Qi]], [[Wu Guo|AUTHOR Wu Guo]], [[Bin Gu|AUTHOR Bin Gu]]
</p><p class="cpabstractcardaffiliationlist">USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 71–75
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a novel bidirectional multiscale feature aggregation (BMFA) network with attentional fusion modules for text-independent speaker verification. The feature maps from different stages of the backbone network are iteratively combined and refined in both a bottom-up and top-down manner. Furthermore, instead of simple concatenation or elementwise addition of feature maps from different stages, an attentional fusion module is designed to compute the fusion weights. Experiments are conducted on the NIST SRE16 and VoxCeleb1 datasets. The experimental results demonstrate the effectiveness of the bidirectional aggregation strategy and show that the proposed attentional fusion module can further improve the performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yu-Jia Zhang|AUTHOR Yu-Jia Zhang]]^^1^^
, [[Yih-Wen Wang|AUTHOR Yih-Wen Wang]]^^1^^
, [[Chia-Ping Chen|AUTHOR Chia-Ping Chen]]^^1^^
, [[Chung-Li Lu|AUTHOR Chung-Li Lu]]^^2^^
, [[Bo-Cheng Chan|AUTHOR Bo-Cheng Chan]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^National Sun Yat-sen University, Taiwan; ^^2^^Chunghwa Telecom Laboratories, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 76–80
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we develop a system that integrates multiple ideas and techniques inspired by the convolutional block and feature aggregation methods. We begin with the state-of-the-art speaker-embedding model for speaker recognition, namely the model of Emphasized Channel Attention, Propagation, and Aggregation in Time Delay Neural Network, and then gradually experiment with the proposed network modules, including bottleneck residual blocks, attention mechanisms, and feature aggregation methods. In our final model, we replace the Res2Block with SC-Block and we use a hierarchical architecture for feature aggregation. We evaluate the performance of our model on the VoxCeleb1 test set and the 2020 VoxCeleb Speaker Recognition Challenge (VoxSRC20) validation set. The relative improvement of the proposed models over ECAPA-TDNN is 22.8% on VoxCeleb1 and 18.2% on VoxSRC20.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yanfeng Wu|AUTHOR Yanfeng Wu]], [[Junan Zhao|AUTHOR Junan Zhao]], [[Chenkai Guo|AUTHOR Chenkai Guo]], [[Jing Xu|AUTHOR Jing Xu]]
</p><p class="cpabstractcardaffiliationlist">Nankai University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 81–85
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep Convolutional Neural Network (CNN) based speaker embeddings, such as r-vectors, have shown great success in text-independent speaker verification (TI-SV) task. However, previous deep CNN models usually use fixed-length samples for training and employ variable-length utterances for speaker embeddings, which generates a mismatch between training and embedding. To address this issue, we investigate the effect of employing variable-length training samples on CNN-based TI-SV systems and explore two approaches to improve the performance of deep CNN architectures on TI-SV through capturing variable-term contexts. Firstly, we present an improved selective kernel convolution which allows the networks to adaptively switch between short-term and long-term contexts based on variable-length utterances. Secondly, we propose a multi-scale statistics pooling method to aggregate multiple time-scale features from different layers of the networks. We build a novel ResNet34 based architecture with two proposed approaches. Experiments are conducted on the VoxCeleb datasets. The results demonstrate that the effect of using variable-length samples is diverse in different networks and the architecture with two proposed approaches achieves significant improvement over r-vectors baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tinglong Zhu|AUTHOR Tinglong Zhu]], [[Xiaoyi Qin|AUTHOR Xiaoyi Qin]], [[Ming Li|AUTHOR Ming Li]]
</p><p class="cpabstractcardaffiliationlist">Duke Kunshan University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 86–90
</span></p></div>
<div class="cpabstractcardabstract"><p>Although deep neural networks are successful for many tasks in the speech domain, the high computational and memory costs of deep neural networks make it difficult to directly deploy high-performance Neural Network systems on low-resource embedded devices. There are several mechanisms to reduce the size of the neural networks i.e. parameter pruning, parameter quantization, etc. This paper focuses on how to apply binary neural networks to the task of speaker verification. The proposed binarization of training parameters can largely maintain the performance while significantly reducing storage space requirements and computational costs. Experiment results show that, after binarizing the Convolutional Neural Network, the ResNet34-based network achieves an EER of around 5% on the ''Voxceleb1'' testing dataset and even outperforms the traditional real number network on the text-dependent dataset: ''Xiaole'' while having a 32× memory saving.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Youzhi Tu|AUTHOR Youzhi Tu]], [[Man-Wai Mak|AUTHOR Man-Wai Mak]]
</p><p class="cpabstractcardaffiliationlist">PolyU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 91–95
</span></p></div>
<div class="cpabstractcardabstract"><p>Mutual information (MI) is useful in unsupervised and self-supervised learning. Maximizing the MI between the low-level features and the learned embeddings can preserve meaningful information in the embeddings, which can contribute to performance gains. This strategy is called deep InfoMax (DIM) in representation learning. In this paper, we follow the DIM framework so that the speaker embeddings can capture more information from the frame-level features. However, a straightforward implementation of DIM may pose a dimensionality imbalance problem because the dimensionality of the frame-level features is much larger than that of the speaker embeddings. This problem can lead to unreliable MI estimation and can even cause detrimental effects on speaker verification. To overcome this problem, we propose to squeeze the frame-level features before MI estimation through some global pooling methods. We call the proposed method squeeze-DIM. Although the squeeze operation inevitably introduces some information loss, we empirically show that the squeeze-DIM can achieve performance gains on both Voxceleb1 and VOiCES-19 tasks. This suggests that the squeeze operation facilitates the MI estimation and maximization in a balanced dimensional space, which helps learn more informative speaker embeddings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ge Zhu|AUTHOR Ge Zhu]], [[Fei Jiang|AUTHOR Fei Jiang]], [[Zhiyao Duan|AUTHOR Zhiyao Duan]]
</p><p class="cpabstractcardaffiliationlist">University of Rochester, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 96–100
</span></p></div>
<div class="cpabstractcardabstract"><p>State-of-the-art text-independent speaker verification systems typically use cepstral features or filter bank energies as speech features. Recent studies attempted to extract speaker embeddings directly from raw waveforms and have shown competitive results. In this paper, we propose a novel multi-scale waveform encoder that uses three convolution branches with different time scales to compute speech features from the waveform. These features are then processed by squeeze-and-excitation blocks, a multi-level feature aggregator, and a time delayed neural network (TDNN) to compute speaker embedding. We show that the proposed embeddings outperform existing raw-waveform-based speaker embeddings on speaker verification by a large margin. A further analysis of the learned filters shows that the multi-scale encoder attends to different frequency bands at its different scales while resulting in a more flat overall frequency response than any of the single-scale counterparts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yan Liu|AUTHOR Yan Liu]], [[Zheng Li|AUTHOR Zheng Li]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]
</p><p class="cpabstractcardaffiliationlist">Xiamen University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 101–105
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a multi-task learning network with phoneme-aware and channel-wise attentive learning strategies for text-dependent Speaker Verification (SV). In the proposed structure, the frame-level multi-task learning along with the segment-level adversarial learning is adopted for speaker embedding extraction. The phoneme-aware attentive pooling is exploited on frame-level features in the main network for speaker classifier, with the corresponding posterior probability for the phoneme distribution in the auxiliary subnet. Further, the introduction of Squeeze and Excitation (SE-block) performs dynamic channel-wise feature recalibration, which improves the representational ability. The proposed method exploits speaker idiosyncrasies associated with pass-phrases, and is further improved by the phoneme-aware attentive pooling and SE-block from temporal and channel-wise aspects, respectively. The experiments conducted on RSR2015 Part 1 database confirm that the proposed system achieves outstanding results for text-dependent SV.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hongning Zhu|AUTHOR Hongning Zhu]]^^1^^
, [[Kong Aik Lee|AUTHOR Kong Aik Lee]]^^2^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NUS, Singapore; ^^2^^A*STAR, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 106–110
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a serialized multi-layer multi-head attention for neural speaker embedding in text-independent speaker verification. In prior works, frame-level features from one layer are aggregated to form an utterance-level representation. Inspired by the Transformer network, our proposed method utilizes the hierarchical architecture of stacked self-attention mechanisms to derive refined features that are more correlated with speakers. Serialized attention mechanism contains a stack of self-attention modules to create fixed-dimensional representations of speakers. Instead of utilizing multi-head attention in parallel, the proposed serialized multi-layer multi-head attention is designed to aggregate and propagate attentive statistics from one layer to the next in a serialized manner. In addition, we employ an input-aware query for each utterance with the statistics pooling. With more layers stacked, the neural network can learn more discriminative speaker embeddings. Experiment results on VoxCeleb1 dataset and SITW dataset show that our proposed method outperforms other baseline methods, including x-vectors and other x-vectors + conventional attentive pooling approaches by 9.7% in EER and 8.1% in DCF10^^-2^^.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cheng Gong|AUTHOR Cheng Gong]]^^1^^
, [[Longbiao Wang|AUTHOR Longbiao Wang]]^^1^^
, [[Ju Zhang|AUTHOR Ju Zhang]]^^2^^
, [[Shaotong Guo|AUTHOR Shaotong Guo]]^^1^^
, [[Yuguang Wang|AUTHOR Yuguang Wang]]^^2^^
, [[Jianwu Dang|AUTHOR Jianwu Dang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tianjin University, China; ^^2^^Huiyan Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 111–115
</span></p></div>
<div class="cpabstractcardabstract"><p>The combination of the recently proposed LPCNet vocoder and a seq-to-seq acoustic model, i.e., Tacotron, has successfully achieved lightweight speech synthesis systems. However, the quality of synthesized speech is often unstable because the precision of the pitch parameters predicted by acoustic models is insufficient, especially for some tonal languages like Chinese and Japanese. In this paper, we propose an end-to-end speech synthesis system, TacoLPCNet, by conditioning LPCNet on Mel spectrogram predictions. First, we extend LPCNet for the Mel spectrogram instead of using explicit pitch information and pitch-related network. Furthermore, we optimize the system by model pruning, multi-frame inference, and increasing frame length, to enable it to meet the conditions required for real-time applications. The objective and subjective evaluation results for various languages show that the proposed system is more stable for tonal languages within the proposed optimization strategies. The experimental results also verify that our model improves synthesis runtime by 3.12 times than that of the baseline on a standard CPU while maintaining naturalness.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhenhao Ge|AUTHOR Zhenhao Ge]], [[Lakshmish Kaushik|AUTHOR Lakshmish Kaushik]], [[Masanori Omote|AUTHOR Masanori Omote]], [[Saket Kumar|AUTHOR Saket Kumar]]
</p><p class="cpabstractcardaffiliationlist">Sony, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 156–160
</span></p></div>
<div class="cpabstractcardabstract"><p>In the model training with neural networks, although the model performance is always the first priority to optimize, training efficiency also plays an important role in model deployment. There are many ways to speed up training with minimal performance loss, such as training with more GPUs, or with mixed precisions, optimizing training parameters, or making features more compact but more representable. Since mini-batch training is now the go-to approach for many machine learning tasks, minimizing the zero-padding to incorporate samples of different lengths into one batch, is an alternative approach to save training time. Here we propose a batching strategy based on semi-sorted samples, with dynamic batch sizes and batch randomization. By replacing the random batching with the proposed batching strategies, it saves more than 40% training time without compromising performance in training seq2seq neural text-to-speech models based on the Tacotron framework. We also compare it with two other batching strategies and show it performs similarly in terms of saving time and maintaining performance, but with a simpler concept and a smoother tuning parameter to balance between zero-padding and randomness level.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Taejun Bak|AUTHOR Taejun Bak]], [[Jae-Sung Bae|AUTHOR Jae-Sung Bae]], [[Hanbin Bae|AUTHOR Hanbin Bae]], [[Young-Ik Kim|AUTHOR Young-Ik Kim]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]
</p><p class="cpabstractcardaffiliationlist">NCSOFT, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 116–120
</span></p></div>
<div class="cpabstractcardabstract"><p>Methods for modeling and controlling prosody with acoustic features have been proposed for neural text-to-speech (TTS) models. Prosodic speech can be generated by conditioning acoustic features. However, synthesized speech with a large pitch-shift scale suffers from audio quality degradation, and speaker characteristics deformation. To address this problem, we propose a feed-forward Transformer based TTS model that is designed based on the source-filter theory. This model, called //FastPitchFormant//, has a unique structure that handles text and acoustic features in parallel. With modeling each feature separately, the tendency that the model learns the relationship between two features can be mitigated. Owing to its structural characteristics, FastPitchFormant is robust and accurate for pitch control and generates prosodic speech preserving speaker characteristics. The experimental results show that proposed model outperforms the baseline FastPitch.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Taiki Nakamura|AUTHOR Taiki Nakamura]], [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 121–125
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a speech synthesis method based on deep Gaussian process (DGP) and sequence-to-sequence (Seq2Seq) learning toward high-quality end-to-end speech synthesis. Feed-forward and recurrent models using DGP are known to produce more natural synthetic speech than deep neural networks (DNNs) because of Bayesian learning and kernel regression. However, such DGP models consist of a pipeline architecture of independent models, acoustic and duration models, and require a high level of expertise in text processing. The proposed model is based on Seq2Seq learning, which enables a unified training of acoustic and duration models. The encoder and decoder layers are represented by Gaussian process regressions (GPRs) and the parameters are trained as a Bayesian model. We also propose a self-attention mechanism with Gaussian processes to effectively model character-level input in the encoder. The subjective evaluation results show that the proposed Seq2Seq-SA-DGP can synthesize more natural speech than DNNs with self-attention and recurrent structures. Besides, Seq2Seq-SA-DGP reduces the smoothing problems of recurrent structures and is effective when a simple input for an end-to-end system is given.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Naoto Kakegawa|AUTHOR Naoto Kakegawa]]^^1^^
, [[Sunao Hara|AUTHOR Sunao Hara]]^^1^^
, [[Masanobu Abe|AUTHOR Masanobu Abe]]^^1^^
, [[Yusuke Ijima|AUTHOR Yusuke Ijima]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Okayama University, Japan; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 126–130
</span></p></div>
<div class="cpabstractcardabstract"><p>The biggest obstacle to develop end-to-end Japanese text-to-speech (TTS) systems is to estimate phonetic and prosodic information (PPI) from Japanese texts. The following are the reasons: (1) the Kanji characters of the Japanese writing system have multiple corresponding pronunciations, (2) there is no separation mark between words, and (3) an accent nucleus must be assigned at appropriate positions. In this paper, we propose to solve the problems by neural machine translation (NMT) on the basis of encoder-decoder models, and compare NMT models of recurrent neural networks and the Transformer architecture. The proposed model handles texts on token (character) basis, although conventional systems handle them on word basis. To ensure the potential of the proposed approach, NMT models are trained using pairs of sentences and their PPIs that are generated by a conventional Japanese TTS system from 5 million sentences. Evaluation experiments were performed using PPIs that are manually annotated for 5,142 sentences. The experimental results showed that the Transformer architecture has the best performance, with 98.0% accuracy for phonetic information estimation and 95.0% accuracy for PPI estimation. Judging from the results, NMT models are promising toward end-to-end Japanese TTS.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xudong Dai|AUTHOR Xudong Dai]], [[Cheng Gong|AUTHOR Cheng Gong]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Kaili Zhang|AUTHOR Kaili Zhang]]
</p><p class="cpabstractcardaffiliationlist">Tianjin University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 131–135
</span></p></div>
<div class="cpabstractcardabstract"><p>Expressive neural text-to-speech (TTS) systems incorporate a style encoder to learn a latent embedding as the style information. However, this embedding process may encode redundant textual information. This phenomenon is called content leakage. Researchers have attempted to resolve this problem by adding an ASR or other auxiliary supervision loss functions. In this study, we propose an unsupervised method called the “information sieve” to reduce the effect of content leakage in prosody transfer. The rationale of this approach is that the style encoder can be forced to focus on style information rather than on textual information contained in the reference speech by a well-designed downsample-upsample filter, i.e., the extracted style embeddings can be downsampled at a certain interval and then upsampled by duplication. Furthermore, we used instance normalization in convolution layers to help the system learn a better latent style space. Objective metrics such as the significantly lower word error rate (WER) demonstrate the effectiveness of this model in mitigating content leakage. Listening tests indicate that the model retains its prosody transferability compared with the baseline models such as the original GST-Tacotron and ASR-guided Tacotron.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qingyun Dou|AUTHOR Qingyun Dou]], [[Xixin Wu|AUTHOR Xixin Wu]], [[Moquan Wan|AUTHOR Moquan Wan]], [[Yiting Lu|AUTHOR Yiting Lu]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 136–140
</span></p></div>
<div class="cpabstractcardabstract"><p>Sequence-to-sequence (seq2seq) models have achieved state-of-the-art performance in a wide range of tasks including Neural Machine Translation (NMT) and Text-To-Speech (TTS). These models are usually trained with teacher forcing, where the reference back-history is used to predict the next token. This makes training efficient, but limits performance, because during inference the free-running back-history must be used. To address this problem, deliberation-based multi-pass seq2seq has been used in NMT. Here the output sequence is generated in multiple passes, each one conditioned on the initial input and the free-running output of the previous pass. This paper investigates, and compares, deliberation-based multi-pass seq2seq for TTS and NMT. For NMT the simplest form of multi-pass approaches, where the free-running first-pass output is combined with the initial input, improves performance. However, applying this scheme to TTS is challenging: the multi-pass model tends to converge to the standard single-pass model, ignoring the previous output. To tackle this issue, a guided attention loss is added, enabling the system to make more extensive use of the free-running output. Experimental results confirm the above analysis and demonstrate that the proposed TTS model outperforms a strong baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Isaac Elias|AUTHOR Isaac Elias]]^^1^^
, [[Heiga Zen|AUTHOR Heiga Zen]]^^2^^
, [[Jonathan Shen|AUTHOR Jonathan Shen]]^^3^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^3^^
, [[Ye Jia|AUTHOR Ye Jia]]^^3^^
, [[R.J. Skerry-Ryan|AUTHOR R.J. Skerry-Ryan]]^^3^^
, [[Yonghui Wu|AUTHOR Yonghui Wu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, Israel; ^^2^^Google, Japan; ^^3^^Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 141–145
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces //Parallel Tacotron 2//, a non-autoregressive neural text-to-speech model with a fully differentiable duration model which does not require supervised duration signals. The duration model is based on a novel attention mechanism and an iterative reconstruction loss based on Soft Dynamic TimeWarping, this model can learn token-frame alignments as well as token durations automatically. Experimental results show that Parallel Tacotron 2 outperforms baselines in subjective naturalness in several diverse multi speaker evaluations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chunyang Wu|AUTHOR Chunyang Wu]], [[Zhiping Xiu|AUTHOR Zhiping Xiu]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Thilo Koehler|AUTHOR Thilo Koehler]], [[Qing He|AUTHOR Qing He]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 146–150
</span></p></div>
<div class="cpabstractcardabstract"><p>Transformer models have shown promising results in neural speech synthesis due to their superior ability to model long-term dependencies compared to recurrent networks. The computation complexity of transformers increases quadratically with sequence length, making it impractical for many real-time applications. To address the complexity issue in speech synthesis domain, this paper proposes an efficient transformer-based acoustic model that is constant-speed regardless of input sequence length, making it ideal for streaming speech synthesis applications. The proposed model uses a transformer network that predicts the prosody features at phone rate and then an Emformer network to predict the frame-rate spectral features in a streaming manner. Both the transformer and Emformer in the proposed architecture use a self-attention mechanism that involves explicit long-term information, thus providing improved speech naturalness for long utterances. In our experiments, we use a WaveRNN neural vocoder that takes in the predicted spectral features and generates the final audio. The overall architecture achieves human-like speech quality both on short and long utterances while maintaining a low latency and low real-time factor. Our mean opinion score (MOS) evaluation shows that for short utterances, the proposed model achieves a MOS of 4.213 compared to ground-truth with MOS of 4.307; and for long utterances, it also produces high-quality speech with a MOS of 4.201 compared to ground-truth with MOS of 4.360.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ye Jia|AUTHOR Ye Jia]]^^1^^
, [[Heiga Zen|AUTHOR Heiga Zen]]^^2^^
, [[Jonathan Shen|AUTHOR Jonathan Shen]]^^1^^
, [[Yu Zhang|AUTHOR Yu Zhang]]^^1^^
, [[Yonghui Wu|AUTHOR Yonghui Wu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Google, USA; ^^2^^Google, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 151–155
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces //PnG BERT//, a new encoder model for neural TTS. This model is augmented from the original BERT model, by taking both phoneme and grapheme representations of text as input, as well as the word-level alignment between them. It can be pre-trained on a large text corpus in a self-supervised manner, and fine-tuned in a TTS task. Experimental results show that a neural TTS model using a pre-trained PnG BERT as its encoder yields more natural prosody and more accurate pronunciation than a baseline model using only phoneme input with no pre-training. Subjective side-by-side preference evaluations show that raters have no statistically significant preference between the speech synthesized using a PnG BERT and ground truth recordings from professional speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuhang Sun|AUTHOR Yuhang Sun]], [[Linju Yang|AUTHOR Linju Yang]], [[Huifeng Zhu|AUTHOR Huifeng Zhu]], [[Jie Hao|AUTHOR Jie Hao]]
</p><p class="cpabstractcardaffiliationlist">OPPO, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 161–165
<a href="./IS2021/MEDIA/0010" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>The emergence of deep neural networks has made speech enhancement well developed. Most of the early models focused on estimating the magnitude of spectrum while ignoring the phase, this gives the evaluation result a certain upper limit. Some recent researches proposed deep complex network, which can handle complex inputs, and realize joint estimation of magnitude spectrum and phase spectrum by outputting real and imaginary parts respectively. The encoder-decoder structure in Deep Complex U-net (DCU) has been proven to be effective for complex-valued data. To further improve the performance, in this paper, we design a new network called Funnel Deep Complex U-net (FDCU), which could process magnitude information and phase information separately through one-encoder-two-decoders structure. Moreover, in order to achieve better training effect, we define negative stretched-SI-SNR as the loss function to avoid errors caused by the negative vector angle. Experimental results show that our FDCU model outperforms state-of-the-art approaches in all evaluation metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amin Edraki|AUTHOR Amin Edraki]]^^1^^
, [[Wai-Yip Chan|AUTHOR Wai-Yip Chan]]^^1^^
, [[Jesper Jensen|AUTHOR Jesper Jensen]]^^2^^
, [[Daniel Fogerty|AUTHOR Daniel Fogerty]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Queen’s University, Canada; ^^2^^Aalborg University, Denmark; ^^3^^University of Illinois at Urbana-Champaign, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 206–210
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a monaural intrusive speech intelligibility prediction (SIP) algorithm called STGI based on detecting //glimpses// in short-time segments in a spectro-temporal modulation decomposition of the input speech signals. Unlike existing glimpse-based SIP methods, the application of STGI is not limited to additive uncorrelated noise; STGI can be employed in a broad range of degradation conditions. Our results show that STGI performs consistently well across 15 datasets covering degradation conditions including modulated noise, noise reduction processing, reverberation, near-end listening enhancement, checkerboard noise, and gated noise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuanhang Qiu|AUTHOR Yuanhang Qiu]], [[Ruili Wang|AUTHOR Ruili Wang]], [[Satwinder Singh|AUTHOR Satwinder Singh]], [[Zhizhong Ma|AUTHOR Zhizhong Ma]], [[Feng Hou|AUTHOR Feng Hou]]
</p><p class="cpabstractcardaffiliationlist">Massey University, New Zealand</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 211–215
</span></p></div>
<div class="cpabstractcardabstract"><p>For speech enhancement, deep complex network based methods have shown promising performance due to their effectiveness in dealing with complex-valued spectrums. Recent speech enhancement methods focus on further optimization of network structures and hyperparameters, however, ignore inherent speech characteristics (e.g., phonetic characteristics), which are important for networks to learn and reconstruct speech information. In this paper, we propose a novel self-supervised learning based phone-fortified (SSPF) method for speech enhancement. Our method explicitly imports phonetic characteristics into a deep complex convolutional network via a Contrastive Predictive Coding (CPC) model pre-trained with self-supervised learning. This operation can greatly improve speech representation learning and speech enhancement performance. Moreover, we also apply the self-attention mechanism to our model for learning long-range dependencies of a speech sequence, which further improves the performance of speech enhancement. The experimental results demonstrate that our SSPF method outperforms existing methods and achieves state-of-the-art performance in terms of speech quality and intelligibility.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Khandokar Md. Nayem|AUTHOR Khandokar Md. Nayem]], [[Donald S. Williamson|AUTHOR Donald S. Williamson]]
</p><p class="cpabstractcardaffiliationlist">Indiana University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 216–220
</span></p></div>
<div class="cpabstractcardabstract"><p>Objective measures of success, such as the perceptual evaluation of speech quality (PESQ), signal-to-distortion ratio (SDR), and short-time objective intelligibility (STOI), have recently been used to optimize deep-learning based speech enhancement algorithms, in an effort to incorporate perceptual constraints into the learning process. Optimizing with these measures, however, may be sub-optimal, since the objective scores do not always strongly correlate with a listener’s evaluation. This motivates the need for approaches that either are optimized with scores that are strongly correlated with human assessments or that use alternative strategies for incorporating perceptual constraints. In this work, we propose an attention-based approach that uses learned speech embedding vectors from a mean-opinion score (MOS) prediction model and a speech enhancement module to jointly enhance noisy speech. Our loss function is jointly optimized with signal approximation and MOS prediction loss terms. We train the model using real-world noisy speech data that has been captured in everyday environments. The results show that our proposed model significantly outperforms other approaches that are optimized with objective measures.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianwei Zhang|AUTHOR Jianwei Zhang]], [[Suren Jayasuriya|AUTHOR Suren Jayasuriya]], [[Visar Berisha|AUTHOR Visar Berisha]]
</p><p class="cpabstractcardaffiliationlist">Arizona State University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 221–225
<a href="./IS2021/MEDIA/1889" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>There are many deterministic mathematical operations (e.g. compression, clipping, downsampling) that degrade speech quality considerably. In this paper we introduce a neural network architecture, based on a modification of the DiffWave model, that aims to restore the original speech signal. DiffWave, a recently published diffusion-based vocoder, has shown state-of-the-art synthesized speech quality and relatively shorter waveform generation times, with only a small set of parameters. We replace the mel-spectrum upsampler in DiffWave with a deep CNN upsampler, which is trained to alter the degraded speech mel-spectrum to match that of the original speech. The model is trained using the original speech waveform, but conditioned on the degraded speech mel-spectrum. Post-training, only the degraded mel-spectrum is used as input and the model generates an estimate of the original speech. Our model results in improved speech quality (original DiffWave model as baseline) on several different experiments. These include improving the quality of speech degraded by LPC-10 compression, AMR-NB compression, and signal clipping. Compared to the original DiffWave architecture, our scheme achieves better performance on several objective perceptual metrics and in subjective comparisons. Improvements over baseline are further amplified in a out-of-corpus evaluation setting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qiquan Zhang|AUTHOR Qiquan Zhang]]^^1^^
, [[Qi Song|AUTHOR Qi Song]]^^2^^
, [[Aaron Nicolson|AUTHOR Aaron Nicolson]]^^3^^
, [[Tian Lan|AUTHOR Tian Lan]]^^2^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NUS, Singapore; ^^2^^Alibaba, China; ^^3^^CSIRO, Australia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 166–170
</span></p></div>
<div class="cpabstractcardabstract"><p>Despite much progress, most temporal convolutional networks (TCN) based speech enhancement models are mainly focused on modeling the long-term temporal contextual dependencies of speech frames, without taking into account the distribution information of speech signal in frequency dimension. In this study, we propose a frequency dimension adaptive attention (FAA) mechanism to improve TCNs, which guides the model selectively emphasize the frequency-wise features with important speech information and also improves the representation capability of network. Our extensive experimental investigation demonstrates that the proposed FAA mechanism is able to consistently provide significant improvements in terms of speech quality (PESQ), intelligibility (STOI) and three other composite metrics. More promisingly, it has better generalization ability to real-world noisy environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Changjie Pan|AUTHOR Changjie Pan]]^^1^^
, [[Feng Yang|AUTHOR Feng Yang]]^^2^^
, [[Fei Chen|AUTHOR Fei Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SUSTech, China; ^^2^^Shenzhen Second People’s Hospital, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 171–175
</span></p></div>
<div class="cpabstractcardabstract"><p>Many early studies reported the importance of vowels and vowel-consonant transitions to speech intelligibility. The present work assessed their perceptual impacts to the understanding of time-compressed sentences, which could be used to measure the temporal acuity during speech understanding. Mandarin sentences were edited to selectively preserve vowel centers or vowel-consonant transitional segments, and compress the rest regions with equipment time compression rates (TCRs) up to 3, including conditions only preserving vowel centers or vowel-consonant transitions. The processed stimuli were presented to normal-hearing listeners to recognize. Results showed that, consistent with the segmental contributions in understanding uncompressed speech, the vowel-only time-compressed stimuli were highly intelligible (i.e., intelligibility score >85%) at a TCR around 3, and vowel-consonant transitions carried important intelligibility information in understanding time-compressed sentences. The time-compression conditions in the present work provided higher intelligibility scores than their counterparties in understanding the PSOLA-processed time-compressed sentences with TCRs around 3. The findings in this work suggested that the design of time compression processing could be guided towards selectively preserving perceptually important speech segments (e.g., vowels) in the future.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ritujoy Biswas|AUTHOR Ritujoy Biswas]]^^1^^
, [[Karan Nathwani|AUTHOR Karan Nathwani]]^^1^^
, [[Vinayak Abrol|AUTHOR Vinayak Abrol]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Jammu, India; ^^2^^IIIT Delhi, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 176–180
</span></p></div>
<div class="cpabstractcardabstract"><p>In a recent work [1], a novel Delta Function-based Formant Shifting approach was proposed for speech intelligibility improvement. The underlying principle is to dynamically relocate the formants based on their occurrence in the spectrum away from the region of noise. The manner in which the formants are shifted is decided by the parameters of the Delta Function, the optimal values of which are evaluated using Comprehensive Learning Particle Swarm Optimization (CLPSO). Although effective, CLPSO is computationally expensive to the extent that it overshadows its merits in intelligibility improvement. As a solution to this, the current work aims to improve the Short-Time Objective Intelligibility (STOI) of (target) speech using a Delta Function that has been generated using a different (source) language. This transfer learning is based upon the relative positioning of the formant frequencies and pitch values of the source & target language datasets. The proposed approach is demonstrated and validated by subjecting it to experimentation with three different languages under variable noisy conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ayako Yamamoto|AUTHOR Ayako Yamamoto]]^^1^^
, [[Toshio Irino|AUTHOR Toshio Irino]]^^1^^
, [[Kenichi Arai|AUTHOR Kenichi Arai]]^^2^^
, [[Shoko Araki|AUTHOR Shoko Araki]]^^2^^
, [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]]^^2^^
, [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]]^^2^^
, [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wakayama University, Japan; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 181–185
</span></p></div>
<div class="cpabstractcardabstract"><p>Many subjective experiments have been performed to develop objective speech intelligibility measures, but the novel coronavirus outbreak has made it difficult to conduct experiments in a laboratory. One solution is to perform remote testing using crowdsourcing; however, because we cannot control the listening conditions, it is unclear whether the results are entirely reliable. In this study, we compared the speech intelligibility scores obtained from remote and laboratory experiments. The results showed that the mean and standard deviation (SD) of the remote experiments’ speech reception threshold (SRT) were higher than those of the laboratory experiments. However, the variance in the SRTs across the speech-enhancement conditions revealed similarities, implying that remote testing results may be as useful as laboratory experiments to develop an objective measure. We also show that practice session scores are correlated with SRT values. This is a priori information before performing the main tests and would be useful for data screening to reduce the variability of the SRT distribution.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wenzhe Liu|AUTHOR Wenzhe Liu]], [[Andong Li|AUTHOR Andong Li]], [[Yuxuan Ke|AUTHOR Yuxuan Ke]], [[Chengshi Zheng|AUTHOR Chengshi Zheng]], [[Xiaodong Li|AUTHOR Xiaodong Li]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 186–190
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditional spectral subtraction-type single channel speech enhancement (SE) algorithms often need to estimate interference components including noise and/or reverberation before subtracting them while deep neural network-based SE methods often aim to realize the end-to-end target mapping. In this paper, we show that both denoising and dereverberation can be unified into a common problem by introducing a two-stage paradigm, namely for interference components estimation and speech recovery. In the first stage, we propose to explicitly extract the magnitude of interference components, which serves as the prior information. In the second stage, with the guidance of this estimated magnitude prior, we can expect to better recover the target speech. In addition, we propose a transform module to facilitate the interaction between interference components and the desired speech modalities. Meanwhile, a temporal fusion module is designed to model long-term dependencies without ignoring short-term details. We conduct the experiments on the WSJ0-SI84 corpus and the results on both denoising and dereverberation tasks show that our approach outperforms previous advanced systems and achieves state-of-the-art performance in terms of many objective metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qiuqiang Kong|AUTHOR Qiuqiang Kong]], [[Haohe Liu|AUTHOR Haohe Liu]], [[Xingjian Du|AUTHOR Xingjian Du]], [[Li Chen|AUTHOR Li Chen]], [[Rui Xia|AUTHOR Rui Xia]], [[Yuxuan Wang|AUTHOR Yuxuan Wang]]
</p><p class="cpabstractcardaffiliationlist">ByteDance, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 191–195
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech enhancement is a task to improve the intelligibility and perceptual quality of degraded speech signals. Recently, neural network-based methods have been applied to speech enhancement. However, many neural network-based methods require users to collect clean speech and background noise for training, which can be time-consuming. In addition, speech enhancement systems trained on particular types of background noise may not generalize well to a wide range of noise. To tackle those problems, we propose a speech enhancement framework trained on weakly labelled data. We first apply a pretrained sound event detection system to detect anchor segments that contain sound events in audio clips. Then, we randomly mix two detected anchor segments as a mixture. We build a conditional source separation network using the mixture and a conditional vector as input. The conditional vector is obtained from the audio tagging predictions on the anchor segments. In inference, we input a noisy speech signal with the one-hot encoding of “Speech” as a condition to the trained system to predict enhanced speech. Our system achieves a PESQ of 2.28 and an SSNR of 8.75 dB on the VoiceBank-DEMAND dataset, outperforming the previous SEGAN system of 2.16 and 7.73 dB respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tsun-An Hsieh|AUTHOR Tsun-An Hsieh]]^^1^^
, [[Cheng Yu|AUTHOR Cheng Yu]]^^1^^
, [[Szu-Wei Fu|AUTHOR Szu-Wei Fu]]^^1^^
, [[Xugang Lu|AUTHOR Xugang Lu]]^^2^^
, [[Yu Tsao|AUTHOR Yu Tsao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Academia Sinica, Taiwan; ^^2^^NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 196–200
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech enhancement (SE) aims to improve speech quality and intelligibility, which are both related to a smooth transition in speech segments that may carry linguistic information, e.g. phones and syllables. In this study, we propose a novel phone-fortified perceptual loss (PFPL) that takes phonetic information into account for training SE models. To effectively incorporate the phonetic information, the PFPL is computed based on latent representations of the //wav2vec// model, a powerful self-supervised encoder that renders rich phonetic information. To more accurately measure the distribution distances of the latent representations, the PFPL adopts the Wasserstein distance as the distance measure. Our experimental results first reveal that the PFPL is more correlated with the perceptual evaluation metrics, as compared to signal-level losses. Moreover, the results showed that the PFPL can enable a deep complex U-Net SE model to achieve highly competitive performance in terms of standardized quality and intelligibility evaluations on the Voice Bank–DEMAND dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Szu-Wei Fu|AUTHOR Szu-Wei Fu]]^^1^^
, [[Cheng Yu|AUTHOR Cheng Yu]]^^1^^
, [[Tsun-An Hsieh|AUTHOR Tsun-An Hsieh]]^^1^^
, [[Peter Plantinga|AUTHOR Peter Plantinga]]^^2^^
, [[Mirco Ravanelli|AUTHOR Mirco Ravanelli]]^^3^^
, [[Xugang Lu|AUTHOR Xugang Lu]]^^4^^
, [[Yu Tsao|AUTHOR Yu Tsao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Academia Sinica, Taiwan; ^^2^^Ohio State University, USA; ^^3^^Mila, Canada; ^^4^^NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 201–205
</span></p></div>
<div class="cpabstractcardabstract"><p>The discrepancy between the cost function used for training a speech enhancement model and human auditory perception usually makes the quality of enhanced speech unsatisfactory. Objective evaluation metrics which consider human perception can hence serve as a bridge to reduce the gap. Our previously proposed MetricGAN was designed to optimize objective metrics by connecting the metric with a discriminator. Because only the scores of the target evaluation functions are needed during training, the metrics can even be non-differentiable. In this study, we propose a MetricGAN+ in which three training techniques incorporating domain-knowledge of speech processing are proposed. With these techniques, experimental results on the VoiceBank-DEMAND dataset show that MetricGAN+ can increase PESQ score by 0.3 compared to the previous MetricGAN and achieve state-of-the-art results (PESQ score = 3.15).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hoang Long Nguyen|AUTHOR Hoang Long Nguyen]], [[Vincent Renkens|AUTHOR Vincent Renkens]], [[Joris Pelemans|AUTHOR Joris Pelemans]], [[Srividya Pranavi Potharaju|AUTHOR Srividya Pranavi Potharaju]], [[Anil Kumar Nalamalapu|AUTHOR Anil Kumar Nalamalapu]], [[Murat Akbacak|AUTHOR Murat Akbacak]]
</p><p class="cpabstractcardaffiliationlist">Apple, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 226–230
</span></p></div>
<div class="cpabstractcardabstract"><p>Recognition errors are common in human communication. Similar errors often lead to unwanted behaviour in dialogue systems or virtual assistants. In human communication, we can recover from them by repeating misrecognized words or phrases; however in human-machine communication this recovery mechanism is not available. In this paper, we attempt to bridge this gap and present a system that allows a user to correct speech recognition errors in a virtual assistant by repeating misunderstood words. When a user repeats part of the phrase the system rewrites the original query to incorporate the correction. This rewrite allows the virtual assistant to understand the original query successfully. We present an end-to-end 2-step attention pointer network that can generate the the rewritten query by merging together the incorrectly understood utterance with the correction follow-up. We evaluate the model on data collected for this task and compare the proposed model to a rule-based baseline and a standard pointer network. We show that rewriting the original query is an effective way to handle repetition-based recovery and that the proposed model outperforms the rule based baseline, reducing Word Error Rate by 19% relative at 2% False Alarm Rate on annotated data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nuo Chen|AUTHOR Nuo Chen]]^^1^^
, [[Chenyu You|AUTHOR Chenyu You]]^^2^^
, [[Yuexian Zou|AUTHOR Yuexian Zou]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Peking University, China; ^^2^^Yale University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 231–235
</span></p></div>
<div class="cpabstractcardabstract"><p>In spoken conversational question answering (SCQA), the answer to the corresponding question is generated by retrieving and then analyzing a fixed spoken document, including multi-part conversations. Most SCQA systems have considered only retrieving information from ordered utterances. However, the sequential order of dialogue is important to build a robust spoken conversational question answering system, and the changes of utterances order may severely result in low-quality and incoherent corpora. To this end, we introduce a self-supervised learning approach, including //incoherence discrimination, insertion detection//, and //question prediction//, to explicitly capture the coreference resolution and dialogue coherence among spoken documents. Specifically, we design a joint learning framework where the auxiliary self-supervised tasks can enable the pre-trained SCQA systems towards more coherent and meaningful spoken dialogue learning. We also utilize the proposed self-supervised learning tasks to capture intra-sentence coherence. Experimental results demonstrate that our proposed method provides more coherent, meaningful, and appropriate responses, yielding superior performance gains compared to the original pre-trained language models. Our method achieves state-of-the-art results on the Spoken-CoQA dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ruolin Su|AUTHOR Ruolin Su]], [[Ting-Wei Wu|AUTHOR Ting-Wei Wu]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]
</p><p class="cpabstractcardaffiliationlist">Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 236–240
</span></p></div>
<div class="cpabstractcardabstract"><p>As an essential component in task-oriented dialogue systems, dialogue state tracking (DST) aims to track human-machine interactions and generate state representations for managing the dialogue. Representations of dialogue states are dependent on the domain ontology and the user’s goals. In several task-oriented dialogues with a limited scope of objectives, dialogue states can be represented as a set of slot-value pairs. As the capabilities of dialogue systems expand to support increasing naturalness in communication, incorporating dialogue act processing into dialogue model design becomes essential. The lack of such consideration limits the scalability of dialogue state tracking models for dialogues having specific objectives and ontology. To address this issue, we formulate and incorporate dialogue acts, and leverage recent advances in machine reading comprehension to predict both categorical and non-categorical types of slots for multi-domain dialogue state tracking. Experimental results show that our models can improve the overall accuracy of dialogue state tracking on the MultiWOZ 2.1 dataset, and demonstrate that incorporating dialogue acts can guide dialogue state design for future task-oriented dialogue systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuya Chiba|AUTHOR Yuya Chiba]]^^1^^
, [[Ryuichiro Higashinaka|AUTHOR Ryuichiro Higashinaka]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTT, Japan; ^^2^^Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 241–245
</span></p></div>
<div class="cpabstractcardabstract"><p>In recent years, dialogue systems have been applied to daily living. Such systems should be able to associate conversations with dialogue situations, such as a place where a dialogue occurs and the relationship between participants. In this study, we propose a dialogue situation recognition method that understands the perspective of dialogue scenes. The target dialogue situations contain dialogue styles, places, activities, and relations between participants. We used the Corpus of Everyday Japanese Conversation (CEJC), which records natural everyday conversations in various situations for experiments. We experimentally verified the effectiveness of our proposed method using multimodal information for situation recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yoshihiro Yamazaki|AUTHOR Yoshihiro Yamazaki]]^^1^^
, [[Yuya Chiba|AUTHOR Yuya Chiba]]^^2^^
, [[Takashi Nose|AUTHOR Takashi Nose]]^^1^^
, [[Akinori Ito|AUTHOR Akinori Ito]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tohoku University, Japan; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 246–250
</span></p></div>
<div class="cpabstractcardabstract"><p>Spoken dialogue systems have become widely used in daily life. Such a system must interact with the user socially to truly operate as a partner with humans. In studies of recent dialogue systems, neural response generation led to natural response generation. However, these studies have not considered the acoustic aspects of conversational phenomena, such as the adaptation of prosody. We propose a spoken-response generation model that extends a neural conversational model to deal with pitch control signals. Our proposed model is trained using multimodal dialogue between humans. The generated pitch control signals are input to a speech synthesis system to control the pitch of synthesized speech. Our experiment shows that the proposed system can generate synthesized speech with an appropriate F0 contour as an utterance in context compared to the output of a system without pitch control, although language generation remains an issue.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weiyuan Xu|AUTHOR Weiyuan Xu]]^^1^^
, [[Peilin Zhou|AUTHOR Peilin Zhou]]^^1^^
, [[Chenyu You|AUTHOR Chenyu You]]^^2^^
, [[Yuexian Zou|AUTHOR Yuexian Zou]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Peking University, China; ^^2^^Yale University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 251–255
</span></p></div>
<div class="cpabstractcardabstract"><p>Few-shot intent detection is a problem that only a few annotated examples are available for unseen intents, and deep models could suffer from the overfitting problem because of scarce data. Existing state-of-the-art few-shot model, Prototypical Network (PN), mainly focus on computing the similarity between examples in a metric space by leveraging sentence-level instance representations. However, sentence-level representations may incorporate highly noisy signals from unrelated words which leads to performance degradation. In this paper, we propose Semantic Transportation Prototypical Network (STPN) to alleviate this issue. Different from the original PN, our approach takes word-level representation as input and uses a new distance metric to obtain better sample matching result. And we reformulate the few-shot classification task into an instance of optimal matching, in which the key word semantic information between examples are expected to be matched and the matching cost is treated as similarity. Specifically, we design Mutual-Semantic mechanism to generate word semantic information, which could reduce the unrelated word noise and enrich key word information. Then, Earth Mover’s Distance (EMD) is applied to find an optimal matching solution. Comprehensive experiments on two benchmark datasets are conducted to validate the effectiveness and generalization of our proposed model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Li Tang|AUTHOR Li Tang]], [[Yuke Si|AUTHOR Yuke Si]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Jianwu Dang|AUTHOR Jianwu Dang]]
</p><p class="cpabstractcardaffiliationlist">Tianjin University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 256–260
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditional dialog policy learning methods train a generic dialog agent to address all situations. However, when the dialog agent encounters a complicated task that involves more than one domain, it becomes difficult to perform concordant actions due to the hybrid information in the multi-domain ontology. Inspired by a real-life scenario at a bank, there are always several specialized departments that deal with different businesses. In this paper, we propose Domain-Specific Multi-Agent Dialog Policy Learning (DSMADPL), in which the dialog system is composed of a set of agents where each agent represents a specialized skill in a particular domain. Every domain-specific agent is first pretrained with supervised learning using a dialog corpus, and then they are jointly improved with multi-agent reinforcement learning. When the dialog system interacts with the user, in each turn the system action is decided by the actions of relevant agents. Experiments conducted on the commonly used MultiWOZ dataset prove the effectiveness of the proposed method, in which dialog success rate increases from 55.0% for the traditional method to 67.2% for our method in multi-domain scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haoyu Wang|AUTHOR Haoyu Wang]]^^1^^
, [[John Chen|AUTHOR John Chen]]^^2^^
, [[Majid Laali|AUTHOR Majid Laali]]^^3^^
, [[Kevin Durda|AUTHOR Kevin Durda]]^^3^^
, [[Jeff King|AUTHOR Jeff King]]^^3^^
, [[William Campbell|AUTHOR William Campbell]]^^1^^
, [[Yang Liu|AUTHOR Yang Liu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, USA; ^^2^^University of Toronto, Canada; ^^3^^Amazon, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 261–265
</span></p></div>
<div class="cpabstractcardabstract"><p>Entity Retrieval (ER) in spoken dialog systems is a task that retrieves entities in a catalog for the entity mentions in user utterances. ER systems are susceptible to upstream errors, with Automatic Speech Recognition (ASR) errors being particularly troublesome. In this work, we propose a robust deep learning based ER system by leveraging ASR N-best hypotheses. Specifically, we evaluate different neural architectures to infuse ASR N-best through an attention mechanism. On 750 hours of audio data taken from live traffic, our best model achieves 11.07% relative error reduction while maintaining the same performance on rejecting out-of-domain ER requests.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shuai Zhang|AUTHOR Shuai Zhang]]^^1^^
, [[Jiangyan Yi|AUTHOR Jiangyan Yi]]^^2^^
, [[Zhengkun Tian|AUTHOR Zhengkun Tian]]^^3^^
, [[Ye Bai|AUTHOR Ye Bai]]^^3^^
, [[Jianhua Tao|AUTHOR Jianhua Tao]]^^3^^
, [[Xuefei Liu|AUTHOR Xuefei Liu]]^^2^^
, [[Zhengqi Wen|AUTHOR Zhengqi Wen]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UCAS, China; ^^2^^CAS, China; ^^3^^UCAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 266–270
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we propose a new end-to-end (E2E) spelling correction method for post-processing of code-switching automatic speech recognition (ASR). Existing E2E spelling correction models take the hypotheses of ASR as inputs and annotated text as the targets. Due to the powerful modeling capabilities of the E2E model, the training of the correction system is extremely prone to over-fitting. It usually requires sufficient data diversity for reliable training. Therefore, it is difficult to apply the E2E correction models to the code-switching ASR task because of the data shortage. In this paper, we introduce the acoustic features into the spelling correction model. Our method can alleviate the problem of over-fitting and has better performance. Meanwhile, because the acoustic features are encode-free, our proposed model can be applied to the ASR model without significantly increasing the computational cost. The experimental results on ASRU 2019 Mandarin-English Code-switching Challenge data set show that the proposed method achieves 11.14% relative error rate reduction compared with baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tatiana Likhomanenko|AUTHOR Tatiana Likhomanenko]]^^1^^
, [[Qiantong Xu|AUTHOR Qiantong Xu]]^^1^^
, [[Vineel Pratap|AUTHOR Vineel Pratap]]^^1^^
, [[Paden Tomasello|AUTHOR Paden Tomasello]]^^1^^
, [[Jacob Kahn|AUTHOR Jacob Kahn]]^^1^^
, [[Gilad Avidov|AUTHOR Gilad Avidov]]^^1^^
, [[Ronan Collobert|AUTHOR Ronan Collobert]]^^1^^
, [[Gabriel Synnaeve|AUTHOR Gabriel Synnaeve]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Facebook, USA; ^^2^^Facebook, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 311–315
</span></p></div>
<div class="cpabstractcardabstract"><p>Is pushing numbers on a single benchmark valuable in automatic speech recognition? Research results in acoustic modeling are typically evaluated based on performance on a single dataset. While the research community has coalesced around various benchmarks, we set out to understand generalization performance in acoustic modeling across datasets — in particular, if models trained on a single dataset transfer to other (possibly out-of-domain) datasets. Further, we demonstrate that when a large enough set of benchmarks is used, average word error rate (WER) performance over them provides a good proxy for performance on real-world data. Finally, we show that training a single acoustic model on the most widely-used datasets — combined — reaches competitive performance on both research and real-world benchmarks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Max W.Y. Lam|AUTHOR Max W.Y. Lam]]^^1^^
, [[Jun Wang|AUTHOR Jun Wang]]^^1^^
, [[Chao Weng|AUTHOR Chao Weng]]^^1^^
, [[Dan Su|AUTHOR Dan Su]]^^1^^
, [[Dong Yu|AUTHOR Dong Yu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tencent, China; ^^2^^Tencent, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 316–320
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end speech recognition generally uses hand-engineered acoustic features as input and excludes the feature extraction module from its joint optimization. To extract learnable and adaptive features and mitigate information loss, we propose a new encoder that adopts globally attentive locally recurrent (GALR) networks and directly takes raw waveform as input. We observe improved ASR performance and robustness by applying GALR on different window lengths to aggregate fine-grain temporal information into multi-scale acoustic features. Experiments are conducted on a benchmark dataset //AISHELL-2// and two large-scale Mandarin speech corpus of 5,000 hours and 21,000 hours. With faster speed and comparable model size, our proposed multi-scale GALR waveform encoder achieved consistent character error rate reductions (CERRs) from 7.9% to 28.1% relative over strong baselines, including Conformer and TDNN-Conformer. In particular, our approach demonstrated notable robustness than the traditional handcrafted features and outperformed the baseline MFCC-based TDNN-Conformer model by a 15.2% CERR on a music-mixed real-world speech test set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kathleen Siminyu|AUTHOR Kathleen Siminyu]]^^1^^
, [[Xinjian Li|AUTHOR Xinjian Li]]^^2^^
, [[Antonios Anastasopoulos|AUTHOR Antonios Anastasopoulos]]^^3^^
, [[David R. Mortensen|AUTHOR David R. Mortensen]]^^2^^
, [[Michael R. Marlo|AUTHOR Michael R. Marlo]]^^4^^
, [[Graham Neubig|AUTHOR Graham Neubig]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Georgia Tech, USA; ^^2^^Carnegie Mellon University, USA; ^^3^^George Mason University, USA; ^^4^^Mizzou, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 271–275
</span></p></div>
<div class="cpabstractcardabstract"><p>Models pre-trained on multiple languages have shown significant promise for improving speech recognition, particularly for low-resource languages. In this work, we focus on phoneme recognition using Allosaurus, a method for multilingual recognition based on phonetic annotation, which incorporates phonological knowledge through a language-dependent allophone layer that associates a universal narrow phone-set with the phonemes that appear in each language. To evaluate in a challenging real-world scenario, we curate phone recognition datasets for Bukusu and Saamia, two varieties of the Luhya language cluster of western Kenya and eastern Uganda. To our knowledge, these datasets are the first of their kind. We carry out similar experiments on the dataset of an endangered Tangkhulic language, East Tusom, a Tibeto-Burman language variety spoken mostly in India. We explore both zero-shot and few-shot recognition by fine-tuning using datasets of varying sizes (10 to 1000 utterances). We find that fine-tuning of Allosaurus, even with just 100 utterances, leads to significant improvements in phone error rates.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Erfan Loweimi|AUTHOR Erfan Loweimi]]^^1^^
, [[Zoran Cvetkovic|AUTHOR Zoran Cvetkovic]]^^2^^
, [[Peter Bell|AUTHOR Peter Bell]]^^1^^
, [[Steve Renals|AUTHOR Steve Renals]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Edinburgh, UK; ^^2^^King’s College London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 276–280
</span></p></div>
<div class="cpabstractcardabstract"><p>Source-filter modelling is among the fundamental techniques in speech processing with a wide range of applications. In acoustic modelling, features such as MFCC and PLP which parametrise the filter component are widely employed. In this paper, we investigate the efficacy of building acoustic models from the raw filter and source components. The raw magnitude spectrum, as the primary information stream, is decomposed into the excitation and vocal tract information streams via cepstral liftering. Then, acoustic models are built via multi-head CNNs which, among others, allow for processing each individual stream via a sequence of bespoke transforms and fusing them at an optimal level of abstraction. We discuss the possible advantages of such information factorisation and recombination, investigate the dynamics of these models and explore the optimal fusion level. Furthermore, we illustrate the CNN’s learned filters and provide some interpretation for the captured patterns. The proposed approach with optimal fusion scheme results in up to 14% and 7% relative WER reduction in WSJ and Aurora-4 tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masakiyo Fujimoto|AUTHOR Masakiyo Fujimoto]], [[Hisashi Kawai|AUTHOR Hisashi Kawai]]
</p><p class="cpabstractcardaffiliationlist">NICT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 281–285
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses a noise-robust automatic speech recognition (ASR) method under the constraints of real-time, one-pass, and single-channel processing. Under such strong constraints, single-channel speech enhancement becomes a key technology because methods with multiple-passes or batch processing, such as acoustic model adaptation, are not suitable for use. However, single-channel speech enhancement often degrades ASR performance due to speech distortion. To overcome this problem, we propose a noise robust acoustic modeling method based on the stream-wise transformer model. The proposed method accepts multi-stream features obtained by multiple single-channel speech enhancement methods as input and selectively uses an appropriate feature stream according to the noise environment by paying attention to the noteworthy stream on the basis of multi-head attention. The proposed method considers the attention for the stream direction instead of the time series direction, and it is thus capable of real-time and low-latency processing. Comparative evaluations reveal that the proposed method successfully improves the accuracy of ASR in noisy environments and reduces the number of model parameters even under strong constraints.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anton Ratnarajah|AUTHOR Anton Ratnarajah]], [[Zhenyu Tang|AUTHOR Zhenyu Tang]], [[Dinesh Manocha|AUTHOR Dinesh Manocha]]
</p><p class="cpabstractcardaffiliationlist">University of Maryland, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 286–290
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a Generative Adversarial Network (GAN) based room impulse response generator (IR-GAN) for generating realistic synthetic room impulse responses (RIRs). IR-GAN extracts acoustic parameters from captured real-world RIRs and uses these parameters to generate new synthetic RIRs. We use these generated synthetic RIRs to improve far-field automatic speech recognition in new environments that are different from the ones used in training datasets. In particular, we augment the far-field speech training set by convolving our synthesized RIRs with a clean LibriSpeech dataset [1]. We evaluate the quality of our synthetic RIRs on the far-field LibriSpeech test set created using real-world RIRs from the BUT ReverbDB [2] and AIR [3] datasets. Our IR-GAN reports up to an 8.95% lower error rate than Geometric Acoustic Simulator (GAS) in far-field speech recognition benchmarks. We further improve the performance when we combine our synthetic RIRs with synthetic impulse responses generated using GAS. This combination can reduce the word error rate by up to 14.3% in far-field speech recognition benchmarks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junqi Chen|AUTHOR Junqi Chen]], [[Xiao-Lei Zhang|AUTHOR Xiao-Lei Zhang]]
</p><p class="cpabstractcardaffiliationlist">Northwestern Polytechnical University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 291–295
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, speech recognition with ad-hoc microphone arrays has received much attention. It is known that channel selection is an important problem of ad-hoc microphone arrays, however, this topic seems far from explored in speech recognition yet, particularly with a large-scale ad-hoc microphone array. To address this problem, we propose a //Scaling Sparsemax// algorithm for the channel selection problem of the speech recognition with large-scale ad-hoc microphone arrays. Specifically, we first replace the conventional Softmax operator in the stream attention mechanism of a multichannel end-to-end speech recognition system with Sparsemax, which conducts channel selection by forcing the channel weights of noisy channels to zero. Because Sparsemax punishes the weights of many channels to zero harshly, we propose Scaling Sparsemax which punishes the channels mildly by setting the weights of very noisy channels to zero only. Experimental results with ad-hoc microphone arrays of over 30 channels under the conformer speech recognition architecture show that the proposed Scaling Sparsemax yields a word error rate of over 30% lower than Softmax on simulation data sets, and over 20% lower on semi-real data sets, in test scenarios with both matched and mismatched channel numbers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Feng-Ju Chang|AUTHOR Feng-Ju Chang]], [[Martin Radfar|AUTHOR Martin Radfar]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Maurizio Omologo|AUTHOR Maurizio Omologo]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 296–300
</span></p></div>
<div class="cpabstractcardabstract"><p>Multi-channel inputs offer several advantages over single-channel, to improve the robustness of on-device speech recognition systems. Recent work on multi-channel transformer, has proposed a way to incorporate such inputs into end-to-end ASR for improved accuracy. However, this approach is characterized by a high computational complexity, which prevents it from being deployed in on-device systems. In this paper, we present a novel speech recognition model, //Multi-Channel Transformer Transducer (MCTT)//, which features end-to-end multi-channel training, low computation cost, and low latency so that it is suitable for streaming decoding in on-device speech recognition. In a far-field in-house dataset, our MCTT outperforms stagewise multi-channel models with transformer-transducer up to 6.01% relative WER improvement (WERR). In addition, MCTT outperforms the multi-channel transformer up to 11.62% WERR, and is 15.8 times faster in terms of inference speed. We further show that we can improve the computational cost of MCTT by constraining the future and previous context in attention computations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Emiru Tsunoo|AUTHOR Emiru Tsunoo]]^^1^^
, [[Kentaro Shibata|AUTHOR Kentaro Shibata]]^^1^^
, [[Chaitanya Narisetty|AUTHOR Chaitanya Narisetty]]^^2^^
, [[Yosuke Kashiwagi|AUTHOR Yosuke Kashiwagi]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Sony, Japan; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 301–305
</span></p></div>
<div class="cpabstractcardabstract"><p>Although end-to-end automatic speech recognition (E2E ASR) has achieved great performance in tasks that have numerous paired data, it is still challenging to make E2E ASR robust against noisy and low-resource conditions. In this study, we investigated data augmentation methods for E2E ASR in distant-talk scenarios. E2E ASR models are trained on the series of CHiME challenge datasets, which are suitable tasks for studying robustness against noisy and spontaneous speech. We propose to use three augmentation methods and their combinations: 1) data augmentation using text-to-speech (TTS) data, 2) cycle-consistent generative adversarial network (Cycle-GAN) augmentation trained to map two different audio characteristics, the one of clean speech and of noisy recordings, to match the testing condition, and 3) pseudo-label augmentation provided by the pretrained ASR module for smoothing label distributions. Experimental results using the CHiME-6/CHiME-4 datasets show that each augmentation method individually improves the accuracy on top of the conventional SpecAugment; further improvements are obtained by combining these approaches. We achieved 4.3% word error rate (WER) reduction, which was more significant than that of the SpecAugment, when we combine all three augmentations for the CHiME-6 task.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Guodong Ma|AUTHOR Guodong Ma]]^^1^^
, [[Pengfei Hu|AUTHOR Pengfei Hu]]^^2^^
, [[Jian Kang|AUTHOR Jian Kang]]^^2^^
, [[Shen Huang|AUTHOR Shen Huang]]^^2^^
, [[Hao Huang|AUTHOR Hao Huang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Xinjiang University, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 306–310
</span></p></div>
<div class="cpabstractcardabstract"><p>In Uyghur speech, consonant and vowel reduction are often encountered, especially in spontaneous speech with high speech rate, which will cause a degradation of speech recognition performance. To solve this problem, we propose an effective phone mask training method for Conformer-based Uyghur end-to-end (E2E) speech recognition. The idea is to randomly mask off a certain percentage features of phones during model training, which simulates the above verbal phenomena and facilitates E2E model to learn more contextual information. According to experiments, the above issues can be greatly alleviated. In addition, deep investigations are carried out into different units in masking, which shows the effectiveness of our proposed masking unit. We also further study the masking method and optimize filling strategy of phone mask. Finally, compared with Conformer-based E2E baseline without mask training, our model demonstrates about 5.51% relative Word Error Rate (WER) reduction on reading speech and 12.92% on spontaneous speech, respectively. The above approach has also been verified on test-set of open-source data THUYG-20, which shows 20% relative improvements.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuanbo Hou|AUTHOR Yuanbo Hou]]^^1^^
, [[Zhesong Yu|AUTHOR Zhesong Yu]]^^2^^
, [[Xia Liang|AUTHOR Xia Liang]]^^2^^
, [[Xingjian Du|AUTHOR Xingjian Du]]^^2^^
, [[Bilei Zhu|AUTHOR Bilei Zhu]]^^2^^
, [[Zejun Ma|AUTHOR Zejun Ma]]^^2^^
, [[Dick Botteldooren|AUTHOR Dick Botteldooren]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ghent University, Belgium; ^^2^^ByteDance, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 321–325
</span></p></div>
<div class="cpabstractcardabstract"><p>Many previous audio-visual voice-related works focus on speech, ignoring the singing voice in the growing number of musical video streams on the Internet. For processing diverse musical video data, voice activity detection is a necessary step. This paper attempts to detect the speech and singing voices of target performers in musical video streams using audio-visual information. To integrate information of audio and visual modalities, a multi-branch network is proposed to learn audio and image representations, and the representations are fused by attention based on semantic similarity to shape the acoustic representations through the probability of anchor vocalization. Experiments show the proposed audio-visual multi-branch network far outperforms the audio-only model in challenging acoustic environments, indicating the cross-modal information fusion based on semantic correlation is sensible and successful.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saurabhchand Bhati|AUTHOR Saurabhchand Bhati]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Najim Dehak|AUTHOR Najim Dehak]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 366–370
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic detection of phoneme or word-like units is one of the core objectives in zero-resource speech processing. Recent attempts employ self-supervised training methods, such as contrastive predictive coding (CPC), where the next frame is predicted given past context. However, CPC only looks at the audio signal’s frame-level structure. We overcome this limitation with a segmental contrastive predictive coding (SCPC) framework that can model the signal structure at a higher level e.g. at the phoneme level. In this framework, a convolutional neural network learns frame-level representation from the raw waveform via noise-contrastive estimation (NCE). A differentiable boundary detector finds variable-length segments, which are then used to optimize a segment encoder via NCE to learn segment representations. The differentiable boundary detector allows us to train frame-level and segment-level encoders jointly. Typically, phoneme and word segmentation are treated as separate tasks. We unify them and experimentally show that our single model outperforms existing phoneme and word segmentation methods on TIMIT and Buckeye datasets. We analyze the impact of boundary threshold and when is the right time to include the segmental loss in the learning process.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xuenan Xu|AUTHOR Xuenan Xu]]^^1^^
, [[Heinrich Dinkel|AUTHOR Heinrich Dinkel]]^^2^^
, [[Mengyue Wu|AUTHOR Mengyue Wu]]^^1^^
, [[Kai Yu|AUTHOR Kai Yu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SJTU, China; ^^2^^Xiaomi, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 371–375
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice activity detection (VAD) is an essential pre-processing component for speech-related tasks such as automatic speech recognition (ASR). Traditional VAD systems require strong frame-level supervision for training, inhibiting their performance in real-world test scenarios. Previously, the general-purpose VAD (GPVAD) framework has been proposed to enhance noise robustness significantly. However, GPVAD models are comparatively large and only work for offline evaluation. This work proposes the use of a knowledge distillation framework, where a (large, offline) teacher model provides frame-level supervision to a (light, online) student model. Our experiments verify that our proposed lightweight student models outperform GPVAD on all test sets, including clean, synthetic and real-world scenarios. Our smallest student model only uses 2.2% of the parameters and 15.9% duration cost of our teacher model for inference when evaluated on a Raspberry Pi.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ui-Hyun Kim|AUTHOR Ui-Hyun Kim]]
</p><p class="cpabstractcardaffiliationlist">Toshiba, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 326–330
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent audio-visual voice activity detectors based on supervised learning require large amounts of labeled training data with manual mouth-region cropping in videos, and the performance is sensitive to a mismatch between the training and testing noise conditions. This paper introduces contrastive self-supervised learning for audio-visual voice activity detection as a possible solution to such problems. In addition, a novel self-supervised learning framework is proposed to improve overall training efficiency and testing performance on noise-corrupted datasets, as in real-world scenarios. This framework includes a branched audio encoder and a noise-tolerant loss function to cope with the uncertainty of speech and noise feature separation in a self-supervised manner. Experimental results, particularly under mismatched noise conditions, demonstrate the improved performance compared with a self-supervised learning baseline and a supervised learning framework.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hyun-Jin Park|AUTHOR Hyun-Jin Park]], [[Pai Zhu|AUTHOR Pai Zhu]], [[Ignacio Lopez Moreno|AUTHOR Ignacio Lopez Moreno]], [[Niranjan Subrahmanya|AUTHOR Niranjan Subrahmanya]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 331–335
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose self-training with noisy student-teacher approach for streaming keyword spotting, that can utilize large-scale unlabeled data and aggressive data augmentation. The proposed method applies aggressive data augmentation (spectral augmentation) on the input of both student and teacher and utilize unlabeled data at scale, which significantly boosts the accuracy of student against challenging conditions. Such aggressive augmentation usually degrades model performance when used with supervised training with hard-labeled data. Experiments show that aggressive spec augmentation on baseline supervised training method degrades accuracy, while the proposed self-training with noisy student-teacher training improves accuracy of some difficult-conditioned test sets by as much as 60%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Osamu Ichikawa|AUTHOR Osamu Ichikawa]]^^1^^
, [[Kaito Nakano|AUTHOR Kaito Nakano]]^^1^^
, [[Takahiro Nakayama|AUTHOR Takahiro Nakayama]]^^2^^
, [[Hajime Shirouzu|AUTHOR Hajime Shirouzu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Shiga University, Japan; ^^2^^University of Tokyo, Japan; ^^3^^NIER, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 336–340
</span></p></div>
<div class="cpabstractcardabstract"><p>Attempts are being made to visualize the learning process by attaching microphones to students participating in group works conducted in classrooms, and subsequently, their speech using an automatic speech recognition (ASR) system. However, the voices of nearby students frequently become mixed with the output speech data, even when using close-talk microphones with noise robustness. To resolve this challenge, in this paper, we propose using multi-channel voice activity detection (VAD) to determine the speech segments of a target speaker while also referencing the output speech from the microphones attached to the other speakers in the group. The conducted evaluation experiments using the actual speech of middle school students during group work lessons showed that our proposed method significantly improves the frame error rate (38.7%) compared to that of the conventional technology, single-channel VAD (49.5%). In our view, conventional approaches, such as distributed microphone arrays and deep learning, are somewhat dependent on the temporal stationarity of the speakers’ positions. However, the proposed method is essentially a VAD process and thus works robustly. It is the practical and proven solution in a real classroom environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hengshun Zhou|AUTHOR Hengshun Zhou]]^^1^^
, [[Jun Du|AUTHOR Jun Du]]^^1^^
, [[Hang Chen|AUTHOR Hang Chen]]^^1^^
, [[Zijun Jing|AUTHOR Zijun Jing]]^^2^^
, [[Shifu Xiong|AUTHOR Shifu Xiong]]^^2^^
, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China; ^^3^^Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 341–345
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose an information fusion approach to audio-visual voice activity detection (AV-VAD) based on cross-modal teacher-student learning leveraging on factorized bilinear pooling (FBP) and Kullback-Leibler (KL) regularization. First, we design an audio-visual network by using FBP fusion to fully utilize the interaction between audio and video modalities. Next, to transfer the rich information in audio-based VAD (A-VAD) model trained with a massive audio-only dataset to AV-VAD model built with relatively limited multi-modal data, a cross-modal teacher-student learning framework is then proposed based on cross entropy with regulated KL-divergence. Finally, evaluated on an in-house dataset recorded in realistic conditions using standard VAD metrics, the proposed approach yields consistent and significant improvements over other state-of-the-art techniques. Moreover, by applying our AV-VAD technique to an audio-visual Chinese speech recognition task, the character error rate is reduced by 24.15% and 8.66% from A-VAD and the baseline AV-VAD systems, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Naoki Makishima|AUTHOR Naoki Makishima]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Ryo Masumura|AUTHOR Ryo Masumura]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 346–350
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a novel personalized voice activity detection (PVAD) learning method that does not require enrollment data during training. PVAD is a task to detect the speech segments of a specific target speaker at the frame level using enrollment speech of the target speaker. Since PVAD must learn speakers’ speech variations to clarify the boundary between speakers, studies on PVAD used large-scale datasets that contain many utterances for each speaker. However, the datasets to train a PVAD model are often limited because substantial cost is needed to prepare such a dataset. In addition, we cannot utilize the datasets used to train the standard VAD because they often lack speaker labels. To solve these problems, our key idea is to use one utterance as both a kind of enrollment speech and an input to the PVAD during training, which enables PVAD training without enrollment speech. In our proposed method, called enrollment-less training, we augment one utterance so as to create variability between the input and the enrollment speech while keeping the speaker identity, which avoids the mismatch between training and inference. Our experimental results demonstrate the efficacy of the method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuto Nonaka|AUTHOR Yuto Nonaka]]^^1^^
, [[Chee Siang Leow|AUTHOR Chee Siang Leow]]^^1^^
, [[Akio Kobayashi|AUTHOR Akio Kobayashi]]^^2^^
, [[Takehito Utsuro|AUTHOR Takehito Utsuro]]^^3^^
, [[Hiromitsu Nishizaki|AUTHOR Hiromitsu Nishizaki]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Yamanashi, Japan; ^^2^^NTUT, Japan; ^^3^^University of Tsukuba, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 351–355
</span></p></div>
<div class="cpabstractcardabstract"><p>When applying voice activity detection (VAD) to a noisy sound, in general, noise reduction (speech separation) and VAD are performed separately. In this case, the noise reduction may suppress the speech, and the VAD may not work well for the speech after the noise reduction. This study proposes a VAD model through the tandem connection of neural network-based noise separation and a VAD model. By training the two models simultaneously, the noise separation model is expected to be trained to consider the VAD results, and thus effective noise separation can be achieved. Moreover, the improved speech/noise separation model will improve the accuracy of the VAD model. In this research, we deal with real-live speeches from baseball games, which have a very poor signal-to-noise ratio. The VAD experiments showed that the VAD performance at the frame level achieved 4.2 points improvement in F1-score by tandemly connecting the speech/noise separation model and the VAD model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Young D. Kwon|AUTHOR Young D. Kwon]], [[Jagmohan Chauhan|AUTHOR Jagmohan Chauhan]], [[Cecilia Mascolo|AUTHOR Cecilia Mascolo]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 356–360
</span></p></div>
<div class="cpabstractcardabstract"><p>Various incremental learning (IL) approaches have been proposed to help deep learning models learn new tasks/classes continuously without forgetting what was learned previously (i.e., avoid catastrophic forgetting). With the growing number of deployed audio sensing applications that need to dynamically incorporate new tasks and changing input distribution from users, the ability of IL on-device becomes essential for both efficiency and user privacy.
However, prior works suffer from high computational costs and storage demands which hinders the deployment of IL on-device. In this work, to overcome these limitations, we develop an end-to-end and on-device IL framework, FastICARL, that incorporates an exemplar-based IL and quantization in the context of audio-based applications. We first employ k-nearest-neighbor to reduce the latency of IL. Then, we jointly utilize a quantization technique to decrease the storage requirements of IL. We implement FastICARL on two types of mobile devices and demonstrate that FastICARL remarkably decreases the IL time up to 78–92% and the storage requirements by 2–4 times without sacrificing its performance. FastICARL enables complete on-device IL, ensuring user privacy as the user data does not need to leave the device.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bo Wei|AUTHOR Bo Wei]]^^1^^
, [[Meirong Yang|AUTHOR Meirong Yang]]^^1^^
, [[Tao Zhang|AUTHOR Tao Zhang]]^^1^^
, [[Xiao Tang|AUTHOR Xiao Tang]]^^1^^
, [[Xing Huang|AUTHOR Xing Huang]]^^1^^
, [[Kyuhong Kim|AUTHOR Kyuhong Kim]]^^2^^
, [[Jaeyun Lee|AUTHOR Jaeyun Lee]]^^2^^
, [[Kiho Cho|AUTHOR Kiho Cho]]^^2^^
, [[Sung-Un Park|AUTHOR Sung-Un Park]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung, China; ^^2^^Samsung, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 361–365
</span></p></div>
<div class="cpabstractcardabstract"><p>Open-vocabulary keyword spotting (KWS) aims to detect arbitrary keywords from continuous speech, which allows users to define their personal keywords. In this paper, we propose a novel location guided end-to-end (E2E) keyword spotting system. Firstly, we predict endpoints of keyword in the entire speech based on attention mechanism. Secondly, we calculate the existence probability of keyword by fusing the located keyword speech segment and text with local attention. The results on Librispeech dataset and Google speech commands dataset show our proposed method significantly outperforms the baseline method and the latest small-footprint E2E KWS method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aurélie Chlébowski|AUTHOR Aurélie Chlébowski]], [[Nicolas Ballier|AUTHOR Nicolas Ballier]]
</p><p class="cpabstractcardaffiliationlist">CLILLAC-ARP (EA 3967), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 376–380
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes to evaluate the method used in Chlébowski and Ballier [1] for the annotation of F₀ variations in nasal grunts. We discuss and test issues raised by this kind of approach exclusively based on visual inspection of the F₀ tracking in //Praat// [2]. Results tend to show that consistency in the annotation depends on acoustic features intrinsic to the grunts such as F₀ slope and duration that are sensitive to display settings. We nonetheless acknowledge the potential benefits of such a method for automation and implementation in IA and in this respect, we introduce //Prosogram// [3] as an alternative material-maker.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rachel Soo|AUTHOR Rachel Soo]], [[Khia A. Johnson|AUTHOR Khia A. Johnson]], [[Molly Babel|AUTHOR Molly Babel]]
</p><p class="cpabstractcardaffiliationlist">University of British Columbia, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 421–425
</span></p></div>
<div class="cpabstractcardabstract"><p>In Cantonese and several other Chinese languages, /n/ is merging with /l/. The Cantonese merger appears categorical, with /n/ becoming /l/ word-initially. This project aims to describe the status of /n/ and /l/ in bilingual Cantonese and English speech to better understand individual differences at the interface of crosslinguistic influence and sound change. We examine bilingual speech using the SpiCE corpus, composed of speech from 34 early Cantonese-English bilinguals. Acoustic measures were collected on pre-vocalic nasal and lateral onsets in both languages. If bilinguals maintain separate representations for corresponding segments across languages, smaller differences between /n/ and /l/ are predicted in Cantonese compared to English. Measures of mid-frequency spectral tilt suggest that the /n/ and /l/ contrast is robustly maintained in English, but not Cantonese. The spacing of F2-F1 suggests small differences between Cantonese /n/ and /l/, and robust differences in English. While cross-language categories appear independent, substantial individual differences exist in the data. These data contribute to the understanding of the /n/ and /l/ merger in Cantonese and other Chinese languages, in addition to providing empirical and theoretical insights into crosslinguistic influence in early bilinguals.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wendy Lalhminghlui|AUTHOR Wendy Lalhminghlui]], [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]]
</p><p class="cpabstractcardaffiliationlist">IIT Guwahati, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 426–430
</span></p></div>
<div class="cpabstractcardabstract"><p>Mizo has voicing contrasts in nasals. This study investigates the acoustic properties of Mizo voiced and voiceless nasals using nasometric measurements. The dual channel data obtained for Mizo nasals is separated into oral and nasal channels and nasalance is calculated at every 10% of the duration of the nasals. Apart from that, the amount of voicing and duration of the nasals are also measured. The results show that nasalance is affected by the place of articulation of the nasals. Additionally, the voiceless nasals are found to be significantly longer than the voiced nasals.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bruce Xiao Wang|AUTHOR Bruce Xiao Wang]], [[Vincent Hughes|AUTHOR Vincent Hughes]]
</p><p class="cpabstractcardaffiliationlist">University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 381–385
</span></p></div>
<div class="cpabstractcardabstract"><p>In data-driven forensic voice comparison, sample size is an issue which can have substantial effects on system output. Numerous calibration methods have been developed and some have been proposed as solutions to sample size issues. In this paper, we test four calibration methods (i.e. logistic regression, regularised logistic regression, Bayesian model, ELUB) under different conditions of sampling variability and sample size. Training and test scores were simulated from skewed distributions derived from real experiments, increasing sample sizes from 20 to 100 speakers for both the training and test sets. For each sample size, the experiments were replicated 100 times to test the susceptibility of different calibration methods to sampling variability. The C,,llr,, mean and range across replications were used for evaluation. The Bayesian model and regularized logistic regression produced the most stable C,,llr,, values when the sample size is small (i.e. 20 speakers), although mean C,,llr,, is consistently lowest using logistic regression. The ELUB calibration method generally is the least preferred as it is the most sensitive to sample size and sampling variability (mean = 0.66, range = 0.21–0.59).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anne Bonneau|AUTHOR Anne Bonneau]]
</p><p class="cpabstractcardaffiliationlist">Loria (UMR 7503), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 386–390
</span></p></div>
<div class="cpabstractcardabstract"><p>Voicing assimilations inside groups of obstruents occur in opposite directions in French and German, where they are respectively regressive and progressive. The aim of the study is to investigate (1) whether non native speakers (here French learners of German) are apt to acquire subtle L2 specificities like assimilation direction, although they are not aware of their very existence, or (2) whether their productions depend essentially upon other factors, in particular consonant place of articulation. To that purpose, a corpus made up of groups of obstruents (/t/ followed by /z/, /v/ or /f/) embedded into sentences has been recorded by 16 French learners of German (beginners and advanced speakers). The consonants are separated by a word or a syllable boundary. Results, derived from the analysis of consonant periodicity and duration, do not stand for an acquisition of progressive assimilation, even by advanced speakers, and do not show differences between the productions of advanced speakers and beginners. On the contrary the boundary type and the consonant place of articulation play an important role in the presence or absence of voicing inside obstruent groups. The role of phonetic, universal mechanisms against linguistic specific rules is discussed to interpret the data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Titas Chakraborty|AUTHOR Titas Chakraborty]]^^1^^
, [[Vaishali Patil|AUTHOR Vaishali Patil]]^^2^^
, [[Preeti Rao|AUTHOR Preeti Rao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Bombay, India; ^^2^^IIIT Pune, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 391–395
</span></p></div>
<div class="cpabstractcardabstract"><p>The four-way distinction of plosives in terms of voicing and aspiration is rare in the world’s languages, but is an important characteristic of the Indo-Aryan language family. Both perception and production pose challenges to the language learner whose native tongue does not afford the specific distinctions. A study of the acoustic-phonetics of the sounds and their possible dependence on speaker characteristics, such as gender or native tongue, can inform methods for accurate feedback on the quality of the phones produced by a non-native learner. We present a system for the four-way classification of stops building on features previously proposed for aspiration detection in unvoiced and voiced plosives. Trained on an available dataset of Hindi speech by native speakers, the system works reliably on production data comprising Bangla words uttered by native Bangla and non-native (American English L1) speakers. The latter display a variety of articulation patterns for the given target contrasts, providing useful insights related to L1 influence on the voicing-aspiration production in word-initial CV contexts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saba Urooj|AUTHOR Saba Urooj]]^^1^^
, [[Benazir Mumtaz|AUTHOR Benazir Mumtaz]]^^2^^
, [[Sarmad Hussain|AUTHOR Sarmad Hussain]]^^1^^
, [[Ehsan ul Haq|AUTHOR Ehsan ul Haq]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UET Lahore, Pakistan; ^^2^^Universität Konstanz, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 396–400
</span></p></div>
<div class="cpabstractcardabstract"><p>Emotional speech corpora exhibit differences in duration, intensity and fundamental frequency. We investigated acoustic as well as prosodic correlates of emotional speech in Urdu. We recorded a corpus of 23 sentences from four speakers of Urdu covering four emotional states. Main results show that: a) sadness exhibits lowest utterance rate, lowest intensity and narrow pitch range, b) anger exhibits highest utterance rate, highest intensity and wider pitch range, and c) happiness exhibits higher utterance rate and wider pitch range as compared to neutral and sadness; but no significant differences are found between the intensity and pitch range of anger and happiness. The analysis also shows differences in terms of pitch or phrase accents and boundary tones.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nour Tamim|AUTHOR Nour Tamim]], [[Silke Hamann|AUTHOR Silke Hamann]]
</p><p class="cpabstractcardaffiliationlist">University of Amsterdam, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 401–405
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates the stop voicing contrast in Palestinian Arabic (PA) by examining Voice Onset Time (VOT) in both production and perception. An acoustic analysis of the recordings of 8 speakers showed that word-initial voiced stops in sentence context have an average VOT of -93 msec, and word-initial voiceless stops one of 29 msec. PA thus belongs, like most dialects of Arabic, to true voicing languages, i.e., languages with a contrast between voicing lead and short lag VOT.
We furthermore tested whether the phoneme /b/, without voiceless counterpart /p/ in PA, has similar VOT values to /d, d^^ʕ^^/, which have voiceless counterparts /t, t^^ʕ^^/. Similarly, we compared /k/, without counterpart /g/ in the PA dialect we investigated, to /t, t^^ʕ^^/. For /b/ we found very similar VOT values to /d, d^^ʕ^^/, while for /k/ we found a difference to /t, t^^ʕ^^/, attributable to a general tendency of velars to have longer VOT than denti-alveolars. We thus found no evidence for a less contrastive realization of unpaired plosives in PA.
In a categorization experiment of the denti-alveolar phoneme pairs with the same 8 speakers, VOT proved sufficient as a perceptual cue, though f0 of the following vowel also influenced the categorization.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thomas Coy|AUTHOR Thomas Coy]], [[Vincent Hughes|AUTHOR Vincent Hughes]], [[Philip Harrison|AUTHOR Philip Harrison]], [[Amelia J. Gully|AUTHOR Amelia J. Gully]]
</p><p class="cpabstractcardaffiliationlist">University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 406–410
</span></p></div>
<div class="cpabstractcardabstract"><p>There is a growing trend in the field of forensic speech science towards integrating the vanguard of speech technology with traditional linguistic methods in pursuit of both scalable (i.e. automatable) and accurate evidential methods. To this end, this paper investigates DeepFormants, a DNN formant estimator which its creators, Dissen and Keshet [1], claim constitutes an accurate tool ready for use by linguists. In the present paper, DeepFormants is integrated into semi-automatic speaker recognition systems using long-term formant distributions and compared against systems using traditional linear predictive coding. The readiness of the tool is assessed on overall speaker recognition performance, measured using equal error rates (EER) and the log LR cost functions (C,,llr,,). In high-quality conditions, DeepFormants outperforms the best performing LPC systems. Much poorer overall performance is found in channel mismatch conditions for DeepFormants, suggesting it is not adaptable to conditions it was not originally trained on. However, this is also true of LPC methods, raising questions over the validity of using formant analysis at all in such cases. A major benefit of DeepFormants over LPC is that the analyst does not need to specify settings. We discuss the implications of this with regard to results for individual speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Jessen|AUTHOR Michael Jessen]]
</p><p class="cpabstractcardaffiliationlist">Bundeskriminalamt, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 411–415
</span></p></div>
<div class="cpabstractcardabstract"><p>Forensic data from long-term formant analysis were used as input to the GMM-UBM approach, which is a way of deriving Likelihood Ratios. Tests were performed running 22 same-speaker comparisons and 462 different-speaker comparisons from a corpus of anonymized casework data involving telephone-intercepted speech. In a first series of tests, the number of Gaussian modules for GMM-modeling was increased from 1 to 32. In a second series of tests the duration of formant input in the compared files was reduced from 10 seconds to 5 and then to 2.5. All tests were performed both without and with the use of MAP adaptation. Results were evaluated in terms of overall performance characteristics EER and Cllr and in terms of score distributions visualized as Tippett plots. The main goal of the study was to compare the use and non-use of MAP and to look at the practical forensic implications of the difference. Results show that in terms of overall performance characteristics there is little difference between the selection and de-selection of MAP. Tippett plot patterns however reveal strong differences. Application of MAP allows for more symmetric same- and different-speaker distributions and shows more robustness against duration reductions, both of which are forensically important.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Justin J.H. Lo|AUTHOR Justin J.H. Lo]]
</p><p class="cpabstractcardaffiliationlist">University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 416–420
</span></p></div>
<div class="cpabstractcardabstract"><p>This study considers issues of language- and speaker-specificity in long-term formant distributions (LTFDs) from phonetic and forensic perspectives and examines their potential value in cases of cross-language forensic voice comparison. Acoustic analysis of 60 male English–French bilinguals revealed systematic differences in LTFDs between the two languages, with higher LTF2–4 in French than in English. Cross-linguistic differences in the shapes of LTFDs were also found. These differences are argued to reflect not only vowel inventories of each language but also language-specific phonetic settings. At the same time, a high degree of within-speaker consistency was found across languages. Likelihood ratio based testing was carried out to examine the effect of language mismatch on the utility of LTFDs as speaker discriminants. Results showed that while the performance of LTFDs was worse in cross-language comparisons than in same-language comparisons, they were still capable of providing speaker-specific information. These findings demonstrate that, in spite of deteriorated performance, LTFDs are still potentially useful speaker discriminants in cases of language mismatch. These findings thus call for further empirical investigation into the use of linguistic-phonetic features in cross-language comparisons.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heidi Christensen|AUTHOR Heidi Christensen]]
</p><p class="cpabstractcardaffiliationlist">University of Sheffield, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>In the last decade we have seen how speech technologies for typical speech have matured and thus enabled the advancement of a multitude of services and technologies including voice-enabled conversational interfaces, dictation and successfully underpinning the use of state-of-the-art NLP techniques. This ever more pervasive offering allows for an often far more convenient and natural way of interacting with machines and systems. However it also represents an ever-growing gap experienced by people with atypical (dysarthric) voices: people with even just mild-to-moderate speech disorders cannot achieve satisfactory performance with current automatic speech recognition (ASR) systems and hence they are falling further and further behind in terms of their ability to use modern devices and interfaces. This talk will present the major challenges in porting mainstream ASR methodologies to work for atypical speech, discuss recent advances and present thoughts on where the research effort should be focusing to have real impact in this community of potential users. Being able to speak a query or dictate an email offers a lot of convenience to most of us but for this group of people can have significant implications on ability to fully take part in society and life quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dominique Fohr|AUTHOR Dominique Fohr]], [[Irina Illina|AUTHOR Irina Illina]]
</p><p class="cpabstractcardaffiliationlist">Loria (UMR 7503), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1867–1871
</span></p></div>
<div class="cpabstractcardabstract"><p>This work aims to improve automatic speech recognition (ASR) by modeling long-term semantic relations. We propose to perform this through rescoring the ASR N-best hypotheses list. To achieve this, we propose two deep neural network (DNN) models and combine semantic, acoustic, and linguistic information. Our DNN rescoring models are aimed at selecting hypotheses that have better semantic consistency and therefore lower WER. We investigate a powerful representation as part of input features to our DNN model: dynamic contextual embeddings from Transformer-based BERT. Acoustic and linguistic features are also included. We perform experiments on the publicly available dataset TED-LIUM. We evaluate in clean and in noisy conditions, with n-gram and Recurrent Neural Network Language Model (RNNLM), more precisely Long Short-Term Memory (LSTM) model. The proposed rescoring approaches give significant WER improvements over the ASR system without rescoring models. Furthermore, the combination of rescoring methods based on BERT and GPT-2 scores achieves the best results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Karel Beneš|AUTHOR Karel Beneš]], [[Lukáš Burget|AUTHOR Lukáš Burget]]
</p><p class="cpabstractcardaffiliationlist">Brno University of Technology, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1872–1876
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we explore several data augmentation strategies for training of language models for speech recognition. We compare augmentation based on global error statistics with one based on unigram statistics of ASR errors and with label-smoothing and its sampled variant. Additionally, we investigate the stability and the predictive power of perplexity estimated on augmented data. Despite being trivial, augmentation driven by global substitution, deletion and insertion rates achieves the best rescoring results. On the other hand, even though the associated perplexity measure is stable, it gives no better prediction of the final error rate than the vanilla one. Our best augmentation scheme increases the WER improvement from second-pass rescoring from 1.1% to 1.9% absolute on the CHiMe-6 challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yingbo Gao|AUTHOR Yingbo Gao]], [[David Thulke|AUTHOR David Thulke]], [[Alexander Gerstenberger|AUTHOR Alexander Gerstenberger]], [[Khoa Viet Tran|AUTHOR Khoa Viet Tran]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1877–1881
</span></p></div>
<div class="cpabstractcardabstract"><p>As the vocabulary size of modern word-based language models becomes ever larger, many sampling-based training criteria are proposed and investigated. The essence of these sampling methods is that the softmax-related traversal over the entire vocabulary can be simplified, giving speedups compared to the baseline. A problem we notice about the current landscape of such sampling methods is the lack of a systematic comparison and some myths about preferring one over another. In this work, we consider Monte Carlo sampling, importance sampling, a novel method we call compensated partial summation, and noise contrastive estimation. Linking back to the three traditional criteria, namely mean squared error, binary cross-entropy, and cross-entropy, we derive the theoretical solutions to the training problems. Contrary to some common belief, we show that all these sampling methods can perform equally well, as long as we correct for the intended class posterior probabilities. Experimental results in language modeling and automatic speech recognition on Switchboard and LibriSpeech support our claim, with all sampling-based methods showing similar perplexities and word error rates while giving the expected speedups.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Janne Pylkkönen|AUTHOR Janne Pylkkönen]], [[Antti Ukkonen|AUTHOR Antti Ukkonen]], [[Juho Kilpikoski|AUTHOR Juho Kilpikoski]], [[Samu Tamminen|AUTHOR Samu Tamminen]], [[Hannes Heikinheimo|AUTHOR Hannes Heikinheimo]]
</p><p class="cpabstractcardaffiliationlist">Speechly, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1882–1886
</span></p></div>
<div class="cpabstractcardabstract"><p>Adaption of end-to-end speech recognition systems to new tasks is known to be challenging. A number of solutions have been proposed which apply external language models with various fusion methods, possibly with a combination of two-pass decoding. Also TTS systems have been used to generate adaptation data for the end-to-end models. In this paper we show that RNN-transducer models can be effectively adapted to new domains using only small amounts of textual data. By taking advantage of model’s inherent structure, where the prediction network is interpreted as a language model, we can apply fast adaptation to the model. Adapting the model avoids the need for complicated decoding time fusions and external language models. Using appropriate regularization, the prediction network can be adapted to new domains while still retaining good generalization capabilities. We show with multiple ASR evaluation tasks how this method can provide relative gains of 10–45% in target task WER. We also share insights how RNN-transducer prediction network performs as a language model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christopher Cieri|AUTHOR Christopher Cieri]], [[James Fiumara|AUTHOR James Fiumara]], [[Jonathan Wright|AUTHOR Jonathan Wright]]
</p><p class="cpabstractcardaffiliationlist">University of Pennsylvania, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1887–1891
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a Game with a Purpose to elicit judgements of the language spoken in short audio clips of broadcast and conversational telephone speech, the resulting corpus and their potential use in research on language recognition and confusability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gianni Fenu|AUTHOR Gianni Fenu]]^^1^^
, [[Mirko Marras|AUTHOR Mirko Marras]]^^2^^
, [[Giacomo Medda|AUTHOR Giacomo Medda]]^^1^^
, [[Giacomo Meloni|AUTHOR Giacomo Meloni]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Università di Cagliari, Italy; ^^2^^EPFL, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1892–1896
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker recognition systems are playing a key role in modern online applications. Though the susceptibility of these systems to discrimination according to group fairness metrics has been recently studied, their assessment has been mainly focused on the difference in equal error rate across groups, not accounting for other fairness criteria important in anti-discrimination policies, defined for demographic groups characterized by sensitive attributes. In this paper, we therefore study how existing group fairness metrics relate with the balancing settings of the training data set in speaker recognition. We conduct this analysis by operationalizing several definitions of fairness and monitoring them under varied data balancing settings. Experiments performed on three deep neural architectures, evaluated on a data set including gender/age-based groups, show that balancing group representation positively impacts on fairness and that the friction across security, usability, and fairness depends on the fairness metric and the recognition threshold.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Leying Zhang|AUTHOR Leying Zhang]], [[Zhengyang Chen|AUTHOR Zhengyang Chen]], [[Yanmin Qian|AUTHOR Yanmin Qian]]
</p><p class="cpabstractcardaffiliationlist">SJTU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1897–1901
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice and face are two important biometric characteristics that can be used for person identity verification. Previous works have proved the strong complementarity between audio and visual modalities in person verification tasks that multi-modality system can achieve significant performance improvement compared to single-modality system. However, due to the limitations in the real world, it is hard to access both audio and visual data at the same time. In this paper, we investigate several strategies to distill the knowledge from a multi-modality system and transfer it to the single-modality system in a teacher-student mode. We applied the knowledge distillation at three different levels: label level, embedding level, and distribution level. All the experiments are based on the VoxCeleb dataset. The results show that the visual single-modality system achieves 10% EER (equal error rate) improvement on the VoxCeleb1 evaluation set using our proposed knowledge distillation method. Besides, the improvement on the audio system is only reflected on part of the evaluation trials, and we give a detailed analysis for this phenomenon.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Paul-Gauthier Noé|AUTHOR Paul-Gauthier Noé]]^^1^^
, [[Mohammad Mohammadamini|AUTHOR Mohammad Mohammadamini]]^^1^^
, [[Driss Matrouf|AUTHOR Driss Matrouf]]^^1^^
, [[Titouan Parcollet|AUTHOR Titouan Parcollet]]^^1^^
, [[Andreas Nautsch|AUTHOR Andreas Nautsch]]^^2^^
, [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIA (EA 4128), France; ^^2^^EURECOM, France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1902–1906
</span></p></div>
<div class="cpabstractcardabstract"><p>In speech technologies, speaker’s voice representation is used in many applications such as speech recognition, voice conversion, speech synthesis and, obviously, user authentication. Modern vocal representations of the speaker are based on neural embeddings. In addition to the targeted information, these representations usually contain sensitive information about the speaker, like the age, sex, physical state, education level or ethnicity. In order to allow the user to choose which information to protect, we introduce in this paper the concept of //attribute-driven privacy preservation// in speaker voice representation. It allows a person to hide one or more personal aspects to a potential malicious interceptor and to the application provider. As a first solution to this concept, we propose to use an adversarial autoencoding method that disentangles in the voice representation a given speaker attribute thus allowing its concealment. We focus here on the sex attribute for an Automatic Speaker Verification (ASV) task. Experiments carried out using the VoxCeleb datasets have shown that the proposed method enables the concealment of this attribute while preserving ASV ability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amrit Romana|AUTHOR Amrit Romana]]^^1^^
, [[John Bandon|AUTHOR John Bandon]]^^1^^
, [[Matthew Perez|AUTHOR Matthew Perez]]^^1^^
, [[Stephanie Gutierrez|AUTHOR Stephanie Gutierrez]]^^2^^
, [[Richard Richter|AUTHOR Richard Richter]]^^2^^
, [[Angela Roberts|AUTHOR Angela Roberts]]^^2^^
, [[Emily Mower Provost|AUTHOR Emily Mower Provost]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Michigan, USA; ^^2^^Northwestern University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1907–1911
</span></p></div>
<div class="cpabstractcardabstract"><p>Parkinson’s disease (PD) is a central nervous system disorder that causes motor impairment. Recent studies have found that people with PD also often suffer from cognitive impairment (CI). While a large body of work has shown that speech can be used to predict motor symptom severity in people with PD, much less has focused on cognitive symptom severity. Existing work has investigated if acoustic features, derived from speech, can be used to detect CI in people with PD. However, these acoustic features are general and are not targeted toward capturing CI. Speech errors and disfluencies provide additional insight into CI. In this study, we focus on read speech, which offers a controlled template from which we can detect errors and disfluencies, and we analyze how errors and disfluencies vary with CI. The novelty of this work is an automated pipeline, including transcription and error and disfluency detection, capable of predicting CI in people with PD. This will enable efficient analyses of how cognition modulates speech for people with PD, leading to scalable speech assessments of CI.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Robin Vaysse|AUTHOR Robin Vaysse]]^^1^^
, [[Jérôme Farinas|AUTHOR Jérôme Farinas]]^^1^^
, [[Corine Astésano|AUTHOR Corine Astésano]]^^2^^
, [[Régine André-Obrecht|AUTHOR Régine André-Obrecht]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IRIT (UMR 5505), France; ^^2^^URI Octogone-Lordat (EA 4156), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1912–1916
</span></p></div>
<div class="cpabstractcardabstract"><p>The temporal dimension of speech acoustics is rarely taken into account in automatic models for Speech Intelligibility evaluation, although the rhythmic recurrence of phonemes, syllables and prosodic groups are allegedly good predictors of speech intelligibility. The present study aims at unravelling those automatic parameters that best account for the different levels of the speech signal’s rhythmic structure, and to evaluate their correlation with a perceptual intelligibility measure. The parameters are extracted from the Fourier Transform of the amplitude modulation of the signal (Envelope Modulation Spectrum) [1, 2]. A Lasso linear model for feature selection is first implemented to select the most relevant parameters, and a SVR regression analysis is run to reveal the best parameters’ combination. Our analyses of EMS, using data from the French corpora of cancer speech C2SI [3], show strong performances of the automatic prediction, with a correlation of 0.70 between our model and an intelligibility evaluation score by speech-pathologists. In particular, the highest correlation with speech intelligibility lies in the ratio between the energy in the low frequency band (0.5–4 Hz that represents slow rhythmic modulations indicative of prosodic groups) and in the higher one (4–10 Hz that represents fast rhythmic modulations like phonemes).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinzi Qi|AUTHOR Jinzi Qi]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]
</p><p class="cpabstractcardaffiliationlist">KU Leuven, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1917–1921
</span></p></div>
<div class="cpabstractcardabstract"><p>Objective speech disorder classification for speakers with communication difficulty is desirable for diagnosis and administering therapy. With the current state of speech technology, it is evident to propose neural networks for this application. But neural network model training is hampered by a lack of labeled disordered speech data. In this research, we apply an extended version of Factorized Hierarchical Variational Auto-encoders (FHVAE) for representation learning on disordered speech. The FHVAE model extracts both content-related and sequence-related latent variables from speech data, and we utilize the extracted variables to explore how disorder type information is represented in the latent variables. For better classification performance, the latent variables are aggregated at the word and sentence level. We show that an extension of the FHVAE model succeeds in the better disentanglement of the content-related and sequence-related related representations, but both representations are still required for best results on disorder type classification.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vikram C. Mathad|AUTHOR Vikram C. Mathad]]^^1^^
, [[Tristan J. Mahr|AUTHOR Tristan J. Mahr]]^^2^^
, [[Nancy Scherer|AUTHOR Nancy Scherer]]^^1^^
, [[Kathy Chapman|AUTHOR Kathy Chapman]]^^3^^
, [[Katherine C. Hustad|AUTHOR Katherine C. Hustad]]^^2^^
, [[Julie Liss|AUTHOR Julie Liss]]^^1^^
, [[Visar Berisha|AUTHOR Visar Berisha]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Arizona State University, USA; ^^2^^UW–Madison, USA; ^^3^^University of Utah, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1922–1926
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic evaluation of phone-level pronunciation scores typically involves two stages: (1) automatic phonetic segmentation via text-constrained phoneme alignment and (2) quantification of acoustic deviation for each phoneme-level relative to a database of correctly-pronounced speech. It’s clear that the second stage depends on the first. That is, if there is misalignment, the acoustic deviation will also be impacted. In this paper, we analyzed the impact of alignment error on a measure of goodness of pronunciation. We computed (1) automatic pronunciation scores using force-aligned samples, (2) the forced-alignment error rate, and (3) acoustic deviation using manually-aligned samples. We used a bivariate linear regression model to characterize the contributions of forced alignment errors and acoustic deviation on the automatic pronunciation scores. This was done across two different children speech databases, namely children with cleft lip/palate and typically developing children between the ages of 3–6 years. The analysis shows that, for speech from typically-developing children, most of the variation in the automatic pronunciation scores is explained by acoustic deviation, with the errors in forced alignment playing a relatively minor role. The forced alignment errors have a small but significant downstream impact on pronunciation assessment for children with cleft lip/palate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Esaú Villatoro-Tello|AUTHOR Esaú Villatoro-Tello]]^^1^^
, [[S. Pavankumar Dubagunta|AUTHOR S. Pavankumar Dubagunta]]^^2^^
, [[Julian Fritsch|AUTHOR Julian Fritsch]]^^2^^
, [[Gabriela Ramírez-de-la-Rosa|AUTHOR Gabriela Ramírez-de-la-Rosa]]^^1^^
, [[Petr Motlicek|AUTHOR Petr Motlicek]]^^2^^
, [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UAM, Mexico; ^^2^^Idiap Research Institute, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1927–1931
</span></p></div>
<div class="cpabstractcardabstract"><p>Mental disorders, e.g. depression and dementia, are categorized as priority conditions according to the World Health Organization (WHO). When diagnosing, psychologists employ structured questionnaires/interviews, and different cognitive tests. Although accurate, there is an increasing necessity of developing digital mental health support technologies to alleviate the burden faced by professionals. In this paper, we propose a multi-modal approach for modeling the communication process employed by patients being part of a clinical interview or a cognitive test. The language-based modality, inspired by the Lexical Availability (LA) theory from psycho-linguistics, identifies the most //accessible// vocabulary of the interviewed subject and use it as features in a classification process. The acoustic-based modality is processed by a Convolutional Neural Network (CNN) trained on signals of speech that predominantly contained voice source characteristics. In the end, a late fusion technique, based on majority voting, assigns the final classification. Results show the complementarity of both modalities, reaching an overall Macro-F1 of 84% and 90% for Depression and Alzheimer’s dementia respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Amin Honarmandi Shandiz|AUTHOR Amin Honarmandi Shandiz]]^^1^^
, [[László Tóth|AUTHOR László Tóth]]^^1^^
, [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]^^2^^
, [[Alexandra Markó|AUTHOR Alexandra Markó]]^^3^^
, [[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Szeged, Hungary; ^^2^^MTA-SZTE RGAI, Hungary; ^^3^^ELTE, Hungary; ^^4^^MTA-ELTE LingArt, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1932–1936
</span></p></div>
<div class="cpabstractcardabstract"><p>Articulatory-to-acoustic mapping seeks to reconstruct speech from a recording of the articulatory movements, for example, an ultrasound video. Just like speech signals, these recordings represent not only the linguistic content, but are also highly specific to the actual speaker. Hence, due to the lack of multi-speaker data sets, researchers have so far concentrated on speaker-dependent modeling. Here, we present multi-speaker experiments using the recently published TaL80 corpus. To model speaker characteristics, we adjusted the x-vector framework popular in speech processing to operate with ultrasound tongue videos. Next, we performed speaker recognition experiments using 50 speakers from the corpus. Then, we created speaker embedding vectors and evaluated them on the remaining speakers. Finally, we examined how the embedding vector influences the accuracy of our ultrasound-to-speech conversion network in a multi-speaker scenario. In the experiments we attained speaker recognition error rates below 3%, and we also found that the embedding vectors generalize nicely to unseen speakers. Our first attempt to apply them in a multi-speaker silent speech framework brought about a marginal reduction in the error rate of the spectral estimation step.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sungjae Cho|AUTHOR Sungjae Cho]]^^1^^
, [[Soo-Young Lee|AUTHOR Soo-Young Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^KIST, Korea; ^^2^^KAIST, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2337–2338
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a methodology to train our multi-speaker emotional text-to-speech synthesizer that can express speech for 10 speakers’ 7 different emotions. All silences from audio samples are removed prior to learning. This results in fast learning by our model. Curriculum learning is applied to train our model efficiently. Our model is first trained with a large single-speaker neutral dataset, and then trained with neutral speech from all speakers. Finally, our model is trained using datasets of emotional speech from all speakers. In each stage, training samples of each speaker-emotion pair have equal probability to appear in mini-batches. Through this procedure, our model can synthesize speech for all targeted speakers and emotions. Our synthesized audio sets are available on our web page.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aleš Pražák|AUTHOR Aleš Pražák]]^^1^^
, [[Zdeněk Loose|AUTHOR Zdeněk Loose]]^^2^^
, [[Josef V. Psutka|AUTHOR Josef V. Psutka]]^^1^^
, [[Vlasta Radová|AUTHOR Vlasta Radová]]^^1^^
, [[Josef Psutka|AUTHOR Josef Psutka]]^^1^^
, [[Jan Švec|AUTHOR Jan Švec]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of West Bohemia, Czechia; ^^2^^SpeechTech, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2339–2340
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe our solution for live TV subtitling. The subtitling system uses the respeaking concept with respeakers closely tied with the automatic speech recognition system. The ASR is specially tailored to the live subtitling task by using respeaker-specific acoustic models and TV-show-dependent language models. The output stream of ASR could be online modified by keyboard shortcuts controlled by the respeaker. The whole subtitling service is used by Czech Television to provide high-quality subtitles of live shows for people with hearing impairments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Stefan Fragner|AUTHOR Stefan Fragner]]^^1^^
, [[Tobias Topar|AUTHOR Tobias Topar]]^^1^^
, [[Maximilian Giller|AUTHOR Maximilian Giller]]^^1^^
, [[Lukas Pfeifenberger|AUTHOR Lukas Pfeifenberger]]^^2^^
, [[Franz Pernkopf|AUTHOR Franz Pernkopf]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Technische Universität Graz, Austria; ^^2^^Evolve, Austria</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2341–2342
</span></p></div>
<div class="cpabstractcardabstract"><p>Far-field speech recognition for e.g. home automation or smart assistants has to cope with moving speakers in reverberant environments. Simulating stationary or even moving speakers in realistic environments enables to make speech processing technology more robust. This paper introduces an autonomous robot for recording a database of Room Impulse Responses (RIRs) at a high spatial resolution. This supports the creation of realistic simulation environments. These RIRs can be exploited to generate multi-channel speech mixtures of static or moving speakers for various applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jonas Beskow|AUTHOR Jonas Beskow]], [[Charlie Caper|AUTHOR Charlie Caper]], [[Johan Ehrenfors|AUTHOR Johan Ehrenfors]], [[Nils Hagberg|AUTHOR Nils Hagberg]], [[Anne Jansen|AUTHOR Anne Jansen]], [[Chris Wood|AUTHOR Chris Wood]]
</p><p class="cpabstractcardaffiliationlist">Furhat Robotics, Sweden</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2343–2344
</span></p></div>
<div class="cpabstractcardabstract"><p>The Furhat robot is a social robot that uses facial projection technology to achieve a high degree of expressivity and flexibility. In this demonstration, we will present new features that takes this facial expressiveness further. A new face engine for the robot is presented which not only drastically improves the visual fidelity of the face and the eyes, it also adds increased flexibility when it comes to designing new robotic characters as well as modifying existing ones. Most importantly, we will present a new toolset and a workflow that allows users to record their own face motion and incorporate them into skills (i.e. custom robot applications) as gestures, prompts or entire canned performances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mónica Domínguez|AUTHOR Mónica Domínguez]], [[Juan Soler-Company|AUTHOR Juan Soler-Company]], [[Leo Wanner|AUTHOR Leo Wanner]]
</p><p class="cpabstractcardaffiliationlist">Universitat Pompeu Fabra, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2345–2346
</span></p></div>
<div class="cpabstractcardabstract"><p>Structuring speech into informative units is certainly a desirable feature in efficient human-machine communication. This paper introduces ThemePro 2.0, a toolkit that pre-processes long monologues into smaller cohesive units to be consumed by the text-to-speech module within a conversational agent. The methodology used is based upon the text’s discourse structure modelled as thematic progression patterns. As shown in the demonstration, thematic progression modelling captures the underlying information structure at the discourse level and is, therefore, instrumental for cohesive speech output in the TTS component.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sai Guruju|AUTHOR Sai Guruju]], [[Jithendra Vepa|AUTHOR Jithendra Vepa]]
</p><p class="cpabstractcardaffiliationlist">Observe.AI, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2347–2348
</span></p></div>
<div class="cpabstractcardabstract"><p>Call centers record and store customer-agent conversations for the purpose of coaching, quality assurance and to comply with //Industry Regulations//. Good amount of these audio recordings contain sensitive information pertaining to their customers’ financial or personal details. To ensure data security, compliance and to reduce the risk of abuse/theft, it becomes important to identify such instances in audio recordings and mask these segments. To automate this process, we propose a cascaded system; first, Automatic Speech Recognition (ASR) generates transcript and text-to-audio alignment information for an audio recording. Then, Entity Extraction is performed on generated transcripts to identify and locate sensitive information, and the corresponding sensitive segments are masked in audio recordings using alignment information. We introduce a novel system for selective masking of sensitive information in both audio and transcript.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Krishnachaitanya Gogineni|AUTHOR Krishnachaitanya Gogineni]], [[Tarun Reddy Yadama|AUTHOR Tarun Reddy Yadama]], [[Jithendra Vepa|AUTHOR Jithendra Vepa]]
</p><p class="cpabstractcardaffiliationlist">Observe.AI, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2349–2350
</span></p></div>
<div class="cpabstractcardabstract"><p>In a typical contact-center call, more than 35% of the call has neither the contact-center agent nor the customer speaking, we usually refer to such areas in the call as //Conversational Silences. Conversational silences// comprise mostly of hold-music, automatic-recorded-messages, or just silences when the agent or customer is engaged in some off-call work. Most of these conversational silences negatively affect important KPIs for call-centers, like dead-airs affect customer satisfaction, long-holds affect average call handling time and so on. In this paper we showcase how Observe.AI helps contact-centers identify agents who are breaching accepted levels of conversational silences by using an in-house Audio Segmenter system paired with an NLP system to classify the contexts around these //Conversational Silences//. This solution is provided by Observe.AI to hundreds of contact centers who use it to improve their average call handling time and customer satisfaction scores.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexander Alenin|AUTHOR Alexander Alenin]], [[Anton Okhotnikov|AUTHOR Anton Okhotnikov]], [[Rostislav Makarov|AUTHOR Rostislav Makarov]], [[Nikita Torgashov|AUTHOR Nikita Torgashov]], [[Ilya Shigabeev|AUTHOR Ilya Shigabeev]], [[Konstantin Simonchik|AUTHOR Konstantin Simonchik]]
</p><p class="cpabstractcardaffiliationlist">ID R&D, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2297–2301
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes ID R&D team submission to the text-independent task of the Short-duration Speaker Verification (SdSV) Challenge 2021. The top performed system is a fusion of 9 Convolutional Neural Networks based on the ResNet architecture. Experiments’ results of optimal NN architecture search are shown. We also present and investigate the subnetwork approach to solve the auxiliary tasks such as gender or language detection. Verification scores refinement step using quality measurements of a trial pair allowed to further minimize the target metrics. A comparative analysis of all systems used in the fusion has been provided on the VoxCeleb-1 test set, SdSV-2021 development and evaluation sets. The final submission achieves ''0.69''% EER and ''0.0319'' minDCF on the challenge evaluation set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jenthe Thienpondt|AUTHOR Jenthe Thienpondt]], [[Brecht Desplanques|AUTHOR Brecht Desplanques]], [[Kris Demuynck|AUTHOR Kris Demuynck]]
</p><p class="cpabstractcardaffiliationlist">Ghent University, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2302–2306
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes the IDLab submission for the text-independent task of the Short-duration Speaker Verification Challenge 2021 (SdSVC-21). This speaker verification competition focuses on short duration test recordings and cross-lingual trials, along with the constraint of limited availability of in-domain DeepMine Farsi training data. Currently, both Time Delay Neural Networks (TDNNs) and ResNets achieve state-of-the-art results in speaker verification. These architectures are structurally very different and the construction of hybrid networks looks a promising way forward. We introduce a 2D convolutional stem in a strong ECAPA-TDNN baseline to transfer some of the strong characteristics of a ResNet based model to this hybrid CNN-TDNN architecture. Similarly, we incorporate absolute frequency positional encodings in an SE-ResNet34 architecture. These learnable feature map biases along the frequency axis offer this architecture a straightforward way to exploit frequency positional information. We also propose a frequency-wise variant of Squeeze-Excitation (SE) which better preserves frequency-specific information when rescaling the feature maps. Both modified architectures significantly outperform their corresponding baseline on the SdSVC-21 evaluation data and the original VoxCeleb1 test set. A four system fusion containing the two improved architectures achieved a third place in the final SdSVC-21 Task 2 ranking.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aleksei Gusev|AUTHOR Aleksei Gusev]], [[Alisa Vinogradova|AUTHOR Alisa Vinogradova]], [[Sergey Novoselov|AUTHOR Sergey Novoselov]], [[Sergei Astapov|AUTHOR Sergei Astapov]]
</p><p class="cpabstractcardaffiliationlist">ITMO University, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2307–2311
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents speaker recognition (SR) systems for the text-independent speaker verification under the cross-lingual (English vs Persian) task (task 2) of the Short-duration Speaker Verification Challenge (SdSVC) 2021.
We present the description of applied ResNet-like and ECAPA-TDNN-like topology design solutions as well as an analysis of multi-session scoring techniques benchmarked on the SdSVC challenge datasets. We overview various modifications of the basic ResNet-like architecture and training strategies, allowing us to obtain the improved quality of speaker verification. Also, we introduce the alpha query expansion-based technique (αQE) to the enrollment embeddings aggregation at test time, which results in a 0.042 minDCF improvement from 0.12 to 0.078 for the ECAPA-TDNN system compared to the embeddings mean. We also propose a trial-level distance-based non-parametric imposter/target detector (KrTC) used to filter out the worst enrollment samples at test time to further improve the performance of the system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Woo Hyun Kang|AUTHOR Woo Hyun Kang]]^^1^^
, [[Nam Soo Kim|AUTHOR Nam Soo Kim]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CRIM, Canada; ^^2^^Seoul National University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2312–2316
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we provide description of our submitted systems to the Short Duration Speaker Verification (SdSV) Challenge 2021 Task 2. The challenge provides a difficult set of cross-language text-independent speaker verification trials. Our submissions employ ResNet-based embedding networks which are trained using various strategies exploiting both in-domain and out-of-domain datasets. The results show that using the recently proposed joint factor embedding (JFE) scheme can enhance the performance by disentangling the language-dependent information from the speaker embedding. However, upon analyzing the speaker embeddings, it was found that there exists a clear discrepancy between the in-domain and out-of-domain datasets. Therefore, among our submitted systems, the best performance was achieved by pre-training the embedding system using out-of-domain dataset and fine-tuning it with only the in-domain data, which resulted in a MinDCF of 0.142716 on the SdSV2021 evaluation set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaoyi Qin|AUTHOR Xiaoyi Qin]]^^1^^
, [[Chao Wang|AUTHOR Chao Wang]]^^2^^
, [[Yong Ma|AUTHOR Yong Ma]]^^2^^
, [[Min Liu|AUTHOR Min Liu]]^^2^^
, [[Shilei Zhang|AUTHOR Shilei Zhang]]^^2^^
, [[Ming Li|AUTHOR Ming Li]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wuhan University, China; ^^2^^China Mobile, China; ^^3^^Wuhan University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2317–2321
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present our CRMI-DKU system description for the Short-duration Speaker Verification Challenge (SdSVC) 2021. We introduce the whole pipeline of our cross-lingual speaker verification system, including data preprocessing, training strategy, utterance-level speaker embedding extractor, domain-adaptation, and score calibration. We also propose methods to learn language-invariant features and perform domain adaptation to reduce the cross-lingual mismatch. In addition, we explore a semi-supervised method to utilize the unlabeled training data. The final submitted score level fusion system achieves 0.0476 minDCF and 0.98% EER on the evaluation set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Peng Zhang|AUTHOR Peng Zhang]]^^1^^
, [[Peng Hu|AUTHOR Peng Hu]]^^2^^
, [[Xueliang Zhang|AUTHOR Xueliang Zhang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Inner Mongolia University, China; ^^2^^Elevoc Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2322–2326
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present the IMU&Elevoc systems submitted to the Short-duration Verification Challenge (SdSVC) 2021. Our submissions focus on both text-dependent speaker verification (Task 1) and text-independent speaker verification (Task 2). First, we investigate several frame-level feature extractor architectures based on ResNet, Res2Net and TDNN. Then, we integrate Squeeze-Excitation block and dimension cardinality to further improve the Res2Net-based backbone network. In particular, we probe an effective transfer learning strategy that overcomes the lack of Task 1 datasets and improves in-domain performance. A knowledge distillation method fusing multiple models is proposed to obtain a stronger single model. Experimental results on the SdSVC 2021 show that our primary system yields 0.0500MinDCF in Task 1 (ranked as 4th) and 0.0448 MinDCF in Task 2 (ranked as 6th).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jie Yan|AUTHOR Jie Yan]], [[Shengyu Yao|AUTHOR Shengyu Yao]], [[Yiqian Pan|AUTHOR Yiqian Pan]], [[Wei Chen|AUTHOR Wei Chen]]
</p><p class="cpabstractcardaffiliationlist">Sogou, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2327–2331
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper we present our system for the task 2 of the Short-duration Speaker Verification (SdSV) Challenge 2021. This task focuses on benchmarking and varying degrees of phonetic variability analysis of short-duration speaker recognition system. The main difficulty exists in the variance between cross-lingual trials, along with the limited in-domain Farsi training data. Based on the state-of-the-art ResNetSE speaker embedding network, we propose a novel network architecture with in-domain data finetuning and novel scoring methods, and achieve significant improvement over the ResNetSE baselines. Furthermore, score calibration on duration efficiently improve the robustness. Finally, our system with fusion of 10 subsystems achieve satisfying results in MinDCF and EER of 0.0394 and 0.84% respectively on the SdSVC evaluation set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bing Han|AUTHOR Bing Han]], [[Zhengyang Chen|AUTHOR Zhengyang Chen]], [[Zhikai Zhou|AUTHOR Zhikai Zhou]], [[Yanmin Qian|AUTHOR Yanmin Qian]]
</p><p class="cpabstractcardaffiliationlist">SJTU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2332–2336
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents the SJTU system for both text-dependent and text-independent tasks in short-duration speaker verification (SdSV) challenge 2021. In this challenge, we explored different strong embedding extractors to extract robust speaker embedding. For text-independent task, language-dependent adaptive snorm is explored to improve the system performance under the cross-lingual verification condition. For text-dependent task, we mainly focus on the in-domain fine-tuning strategies based on the model pre-trained on large-scale out-of-domain data. In order to improve the distinction between different speakers uttering the same phrase, we proposed several novel phrase-aware fine-tuning strategies and phrase-aware neural PLDA. With such strategies, the system performance is further improved. Finally, we fused the scores of different systems, and our fusion systems achieved 0.0473 in Task1 (rank 3) and 0.0581 in Task2 (rank 8) on the primary evaluation metric.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jatin Lamba|AUTHOR Jatin Lamba]], [[Abhishek|AUTHOR Abhishek]], [[Jayaprakash Akula|AUTHOR Jayaprakash Akula]], [[Rishabh Dabral|AUTHOR Rishabh Dabral]], [[Preethi Jyothi|AUTHOR Preethi Jyothi]], [[Ganesh Ramakrishnan|AUTHOR Ganesh Ramakrishnan]]
</p><p class="cpabstractcardaffiliationlist">IIT Bombay, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1937–1941
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a novel approach to the audio-visual video parsing (AVVP) task that demarcates events from a video separately for audio and visual modalities. The proposed parsing approach simultaneously detects the temporal boundaries in terms of start and end times of such events. We show how AVVP can benefit from the following techniques geared towards effective cross-modal learning: (i) adversarial training and skip connections (ii) global context aware attention and, (iii) self-supervised pretraining using an audio-video grounding objective to obtain cross-modal audio-video representations. We present extensive experimental evaluations on the Look, Listen, and Parse (LLP) dataset and show that we outperform the state-of-the-art Hybrid Attention Network (HAN) on all five metrics proposed for AVVP. We also present several ablations to validate the effect of pretraining, global attention and adversarial training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Darren Cook|AUTHOR Darren Cook]]^^1^^
, [[Miri Zilka|AUTHOR Miri Zilka]]^^2^^
, [[Simon Maskell|AUTHOR Simon Maskell]]^^1^^
, [[Laurence Alison|AUTHOR Laurence Alison]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Liverpool, UK; ^^2^^University of Sussex, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1942–1946
<a href="./IS2021/MEDIA/2249" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Can an interviewer influence the cooperativeness of an interviewee? The role of an interviewer in actualising a successful interview is an active field of social psychological research. A large-scale analysis of interviews, however, typically involves time-exorbitant manual tasks and considerable human effort. Despite recent advances in computational fields, many automated methods continue to rely on manually labelled training data to establish ground-truth. This reliance obscures explainability and hinders the mobility of analysis between applications. In this work, we introduce a cross-disciplinary approach to analysing interviewer efficacy. We suggest computational success measures as a transparent, automated, and reproducible alternative for pre-labelled data. We validate these measures with a small-scale study with human-responders. To study the interviewer’s influence on the interviewee we utilise features informed by social psychological theory to predict interview quality based on the interviewer’s linguistic behaviour. Our psychologically informed model significantly outperforms a bag-of-words model, demonstrating the strength of a cross-disciplinary approach toward the analysis of conversational data at scale.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jennifer Santoso|AUTHOR Jennifer Santoso]]^^1^^
, [[Takeshi Yamada|AUTHOR Takeshi Yamada]]^^1^^
, [[Shoji Makino|AUTHOR Shoji Makino]]^^1^^
, [[Kenkichi Ishizuka|AUTHOR Kenkichi Ishizuka]]^^2^^
, [[Takekatsu Hiramura|AUTHOR Takekatsu Hiramura]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Tsukuba, Japan; ^^2^^Revcomm, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1947–1951
</span></p></div>
<div class="cpabstractcardabstract"><p>Emotion recognition is essential for human behavior analysis and possible through various inputs such as speech and images. However, in practical situations, such as in call center analysis, the available information is limited to speech. This leads to the study of speech emotion recognition (SER). Considering the complexity of emotions, SER is a challenging task. Recently, automatic speech recognition (ASR) has played a role in obtaining text information from speech. The combination of speech and ASR results has improved the SER performance. However, ASR results are highly affected by speech recognition errors. Although there is a method to improve ASR performance on emotional speech, it requires the fine-tuning of ASR, which is costly. To mitigate the errors in SER using ASR systems, we propose the use of the combination of a self-attention mechanism and a word-level confidence measure (CM), which indicates the reliability of ASR results, to reduce the importance of words with a high chance of error. Experimental results confirmed that the combination of self-attention mechanism and CM reduced the effects of incorrectly recognized words in ASR results, providing a better focus on words that determine emotion recognition. Our proposed method outperformed the state-of-the-art methods on the IEMOCAP dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alif Silpachai|AUTHOR Alif Silpachai]]^^1^^
, [[Ivana Rehman|AUTHOR Ivana Rehman]]^^1^^
, [[Taylor Anne Barriuso|AUTHOR Taylor Anne Barriuso]]^^1^^
, [[John Levis|AUTHOR John Levis]]^^1^^
, [[Evgeny Chukharev-Hudilainen|AUTHOR Evgeny Chukharev-Hudilainen]]^^1^^
, [[Guanlong Zhao|AUTHOR Guanlong Zhao]]^^2^^
, [[Ricardo Gutierrez-Osuna|AUTHOR Ricardo Gutierrez-Osuna]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Iowa State University, USA; ^^2^^Texas A&M University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1952–1956
</span></p></div>
<div class="cpabstractcardabstract"><p>Research suggests learners may improve their second language (L2) pronunciation by imitating voices with similar acoustic profiles. However, previously reported improvements have been in suprasegmentals (prosodic features such as intonation). It remains unclear if voice similarity applies to L2 segmentals (consonants and vowels). To address this issue, this study investigates how voice similarity facilitates awareness of pronunciation errors, a necessary step in pronunciation improvement. In two experiments, advanced L2 learners identified their pronunciation errors by comparing their production to the production of a resynthesized model voice using learners’ voices as the base (Golden Speaker voice), or to an unfamiliar resynthesized voice with the same gender as the learner (Silver Speaker voice). In Experiment 1, L2 learners identified all syllables with vowel and consonant errors when comparing their production to the model voice. Their choices were compared to identifications by expert judges. In Experiment 2, learners were told how many errors the expert judges had identified before identifying the same number of errors. Results did not support facilitative effects of Golden Speaker voices in either experiment, but Experiment 2 resulted in higher identification percentages. Discussion of the challenges in self-identification of errors in relation to voice similarity are offered.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alla Menshikova|AUTHOR Alla Menshikova]], [[Daniil Kocharov|AUTHOR Daniil Kocharov]], [[Tatiana Kachkovskaia|AUTHOR Tatiana Kachkovskaia]]
</p><p class="cpabstractcardaffiliationlist">Saint Petersburg State University, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1957–1961
</span></p></div>
<div class="cpabstractcardabstract"><p>In dialogues, intra-speaker variability is often explained by the relationship between interlocutors. A person may speak differently with a friend and a stranger or depending on the interlocutor’s gender or age — in all these cases we expect speech entrainment, but the degree of entrainment may vary. In this research, we measured lexical entrainment in a series of dialogues, where each one of 20 “core” speakers talked to five different interlocutors: a sibling, a close friend, an unfamiliar person of the same gender and similar age, an unfamiliar person of the other gender and similar age, and an unfamiliar person of the same gender, greater age and higher job position. We hypothesized that the degree of speech entrainment systematically varies according to the type of interlocutor, across all the “core” speakers. The following measures of entrainment were used: parts of speech statistics, verb forms statistics, language style matching, and lexical density. Our data have shown that a person speaks very similarly to his/her sibling; dialogues with a friend or a same-gender stranger of similar age show fewer similarities; the least “common language” is observed in dialogues with a stranger of the opposite gender and with a stranger of greater age and higher job position.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shamila Nasreen|AUTHOR Shamila Nasreen]], [[Julian Hough|AUTHOR Julian Hough]], [[Matthew Purver|AUTHOR Matthew Purver]]
</p><p class="cpabstractcardaffiliationlist">Queen Mary University of London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1962–1966
</span></p></div>
<div class="cpabstractcardabstract"><p>Alzheimer’s Disease (AD) is a form of Dementia that manifests in cognitive decline including memory, language, and changes in behavior. Speech data has proven valuable for inferring cognitive status, used in many health assessment tasks, and can be easily elicited in natural settings. Much work focuses on analysis using linguistic features; here, we focus on non-linguistic features and their use in distinguishing AD patients from similar-age Non-AD patients with other health conditions in the Carolinas Conversation Collection (CCC) dataset. We used two types of features: patterns of //interaction// including pausing behaviour and floor control, and //acoustic// features including pitch, amplitude, energy, and cepstral coefficients. Fusion of the two kinds of features, combined with feature selection, obtains very promising classification results: classification accuracy of 90% using standard models such as support vector machines and logistic regression. We also obtain promising results using interactional features alone (87% accuracy), which can be easily extracted from natural conversations in daily life and thus have the potential for future implementation as a non-invasive method for AD diagnosis and monitoring.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hardik Kothare|AUTHOR Hardik Kothare]]^^1^^
, [[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]]^^1^^
, [[Oliver Roesler|AUTHOR Oliver Roesler]]^^1^^
, [[Michael Neumann|AUTHOR Michael Neumann]]^^1^^
, [[Jackson Liscombe|AUTHOR Jackson Liscombe]]^^1^^
, [[William Burke|AUTHOR William Burke]]^^1^^
, [[Andrew Cornish|AUTHOR Andrew Cornish]]^^1^^
, [[Doug Habberstad|AUTHOR Doug Habberstad]]^^1^^
, [[Alaa Sakallah|AUTHOR Alaa Sakallah]]^^2^^
, [[Sara Markuson|AUTHOR Sara Markuson]]^^2^^
, [[Seemran Kansara|AUTHOR Seemran Kansara]]^^2^^
, [[Afik Faerman|AUTHOR Afik Faerman]]^^2^^
, [[Yasmine Bensidi-Slimane|AUTHOR Yasmine Bensidi-Slimane]]^^2^^
, [[Laura Fry|AUTHOR Laura Fry]]^^2^^
, [[Saige Portera|AUTHOR Saige Portera]]^^2^^
, [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]]^^1^^
, [[David Pautler|AUTHOR David Pautler]]^^1^^
, [[Carly Demopoulos|AUTHOR Carly Demopoulos]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Modality.AI, USA; ^^2^^University of California at San Francisco, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1967–1971
</span></p></div>
<div class="cpabstractcardabstract"><p>We explore the utility of an on-demand multimodal conversational platform in extracting speech and facial metrics in children with Autism Spectrum Disorder (ASD). We investigate the extent to which these metrics correlate with objective clinical measures, particularly as they pertain to the interplay between the affective, phonatory and motoric subsystems. 22 participants diagnosed with ASD engaged with a virtual agent in conversational affect production tasks designed to elicit facial and vocal affect. We found significant correlations between vocal pitch and loudness extracted by our platform during these tasks and accuracy in recognition of facial and vocal affect, assessed via the Diagnostic Analysis of Nonverbal Accuracy-2 (DANVA-2) neuropsychological task. We also found significant correlations between jaw kinematic metrics extracted using our platform and motor speed of the dominant hand assessed via a standardised neuropsychological finger tapping task. These findings offer preliminary evidence for the usefulness of these audiovisual analytic metrics and could help us better model the interplay between different physiological subsystems in individuals with ASD.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Carlos Toshinori Ishi|AUTHOR Carlos Toshinori Ishi]], [[Taiken Shintani|AUTHOR Taiken Shintani]]
</p><p class="cpabstractcardaffiliationlist">RIKEN, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1972–1976
</span></p></div>
<div class="cpabstractcardabstract"><p>The background of this study is the generation of natural gaze behaviors in human-robot multimodal interaction. For that purpose, in this study we analyzed gaze behaviors of multiple speakers in a dataset containing three-party conversations, in terms of the reasons/intentions of their gaze events.
Analyses of the gaze reasons were conducted separately for the gaze behaviors towards a dialogue partner, and for gaze aversions (i.e., gazing away from a person’s face). Analysis on the eyeball movements during gaze aversions was also conducted. Different distributions for average durations and gaze direction patterns were observed depending on the gaze reasons (e.g., in listening mode, speaking mode, towards dialogue partner’s reactions, in gaze aversions during thinking and remembering, and during the speaker’s own behaviors like nodding and laughing).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Suyoun Kim|AUTHOR Suyoun Kim]], [[Abhinav Arora|AUTHOR Abhinav Arora]], [[Duc Le|AUTHOR Duc Le]], [[Ching-Feng Yeh|AUTHOR Ching-Feng Yeh]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1977–1981
</span></p></div>
<div class="cpabstractcardabstract"><p>Word Error Rate (WER) has been the predominant metric used to evaluate the performance of automatic speech recognition (ASR) systems. However, WER is sometimes not a good indicator for downstream Natural Language Understanding (NLU) tasks, such as intent recognition, slot filling, and semantic parsing in task-oriented dialog systems. This is because WER takes into consideration only literal correctness instead of semantic correctness, the latter of which is typically more important for these downstream tasks. In this study, we propose a novel Semantic Distance (SemDist) measure as an alternative evaluation metric for ASR systems to address this issue. We define SemDist as the distance between a reference and hypothesis pair in a sentence-level embedding space. To represent the reference and hypothesis as a sentence embedding, we exploit RoBERTa, a state-of-the-art pre-trained deep contextualized language model based on the transformer architecture. We demonstrate the effectiveness of our proposed metric on various downstream tasks, including intent recognition, semantic parsing, and named entity recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lingfeng Dai|AUTHOR Lingfeng Dai]], [[Qi Liu|AUTHOR Qi Liu]], [[Kai Yu|AUTHOR Kai Yu]]
</p><p class="cpabstractcardaffiliationlist">SJTU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2022–2026
</span></p></div>
<div class="cpabstractcardabstract"><p>Language model rescoring, especially neural network language model (NNLM) rescoring, is widely used to achieve improved performance in a second-pass automatic speech recognition (ASR) system. The rescoring NNLM is usually trained separately from the ASR system. Typically, the two’s training corpora are different, leading to the vocabulary mismatch problem, consequently degrading ASR performance. Previous research focuses more on the language domain mismatch problem, while the vocabulary mismatch problem, which may also cause significant performance degradation, has not been well studied. This paper proposes a novel class-based NNLM framework to address the vocabulary mismatch problem for language model rescoring. Here, OOV words (unknown words to the rescoring NNLM are called OOV words for short) are assigned to well-trained classes of NNLM and inherit the class probability. Experiments show that class-based NNLM rescoring can significantly reduce performance degradation due to vocabulary mismatch.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gakuto Kurata|AUTHOR Gakuto Kurata]]^^1^^
, [[George Saon|AUTHOR George Saon]]^^2^^
, [[Brian Kingsbury|AUTHOR Brian Kingsbury]]^^2^^
, [[David Haws|AUTHOR David Haws]]^^2^^
, [[Zoltán Tüske|AUTHOR Zoltán Tüske]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, Japan; ^^2^^IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2027–2031
</span></p></div>
<div class="cpabstractcardabstract"><p>Customization of automatic speech recognition (ASR) models using text data from a target domain is essential to deploying ASR in various domains. End-to-end (E2E) modeling for ASR has made remarkable progress, but the advantage of E2E modeling, where all neural network parameters are jointly optimized, is offset by the challenge of customizing such models. In conventional hybrid models, it is easy to directly modify a language model or a lexicon using text data, but this is not true for E2E models. One popular approach for customizing E2E models uses audio synthesized from the target domain text, but the acoustic mismatch between the synthesized and real audio can be problematic. We propose a method that avoids the negative effect of synthesized audio by (1) adding a mapping network before the encoder network to map the acoustic features of the synthesized audio to those of the source domain, (2) training the added mapping network using text and synthesized audio from the source domain while freezing all layers in the E2E model, (3) training the E2E model with text and synthesized audio from the target domain, and (4) removing the added mapping network when decoding real audio from the target domain. Experiments on customizing RNN Transducer and Conformer Transducer models demonstrate the advantage of the proposed method over encoder freezing, a popular customization method for E2E models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mandana Saebi|AUTHOR Mandana Saebi]]^^1^^
, [[Ernest Pusateri|AUTHOR Ernest Pusateri]]^^2^^
, [[Aaksha Meghawat|AUTHOR Aaksha Meghawat]]^^2^^
, [[Christophe Van Gysel|AUTHOR Christophe Van Gysel]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Notre Dame, USA; ^^2^^Apple, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2032–2036
</span></p></div>
<div class="cpabstractcardabstract"><p>High-quality automatic speech recognition (ASR) is essential for virtual assistants (VAs) to work well. However, ASR often performs poorly on VA requests containing named entities. In this work, we start from the observation that many ASR errors on named entities are inconsistent with real-world knowledge. We extend previous discriminative n-gram language modeling approaches to incorporate real-world knowledge from a Knowledge Graph (KG), using features that capture entity type-entity and entity-entity relationships. We apply our model through an efficient lattice rescoring process, achieving relative sentence error rate reductions of more than 25% on some synthesized test sets covering less popular entities, with minimal degradation on a uniformly sampled VA test set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mahdi Namazifar|AUTHOR Mahdi Namazifar]], [[John Malik|AUTHOR John Malik]], [[Li Erran Li|AUTHOR Li Erran Li]], [[Gokhan Tur|AUTHOR Gokhan Tur]], [[Dilek Hakkani Tür|AUTHOR Dilek Hakkani Tür]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2037–2041
</span></p></div>
<div class="cpabstractcardabstract"><p>Masked language models have revolutionized natural language processing systems in the past few years. A recently introduced generalization of masked language models called warped language models are trained to be more robust to the types of errors that appear in automatic or manual transcriptions of spoken language by exposing the language model to the same types of errors during the training of language models. In this work we propose a novel approach that takes advantage of the robustness of warped language models to transcription noise for correcting transcriptions of spoken language. We show that our proposed approach is able to achieve up to 10% reduction in word error rates of both automatic and manual transcriptions of spoken language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaoqiang Wang|AUTHOR Xiaoqiang Wang]]^^1^^
, [[Yanqing Liu|AUTHOR Yanqing Liu]]^^1^^
, [[Sheng Zhao|AUTHOR Sheng Zhao]]^^1^^
, [[Jinyu Li|AUTHOR Jinyu Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, China; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1982–1986
</span></p></div>
<div class="cpabstractcardabstract"><p>It’s challenging to customize transducer-based automatic speech recognition (ASR) system with context information which is dynamic and unavailable during model training. In this work, we introduce a light-weight contextual spelling correction model to correct context-related recognition errors in transducer-based ASR systems. We incorporate the context information into the spelling correction model with a shared context encoder and use a filtering algorithm to handle large-size context lists. Experiments show that the model improves baseline ASR model performance with about 50% relative word error rate reduction, which also significantly outperforms the baseline method such as contextual LM biasing. The model also shows excellent performance for out-of-vocabulary terms not seen during training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ning Shi|AUTHOR Ning Shi]]^^1^^
, [[Wei Wang|AUTHOR Wei Wang]]^^1^^
, [[Boxin Wang|AUTHOR Boxin Wang]]^^2^^
, [[Jinfeng Li|AUTHOR Jinfeng Li]]^^1^^
, [[Xiangyu Liu|AUTHOR Xiangyu Liu]]^^1^^
, [[Zhouhan Lin|AUTHOR Zhouhan Lin]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alibaba, China; ^^2^^University of Illinois at Urbana-Champaign, USA; ^^3^^SJTU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1987–1991
</span></p></div>
<div class="cpabstractcardabstract"><p>Punctuation restoration is an important post-processing step in automatic speech recognition. Among other kinds of external information, part-of-speech (POS) taggers provide informative tags, suggesting each input token’s syntactic role, which has been shown to be beneficial for the punctuation restoration task. In this work, we incorporate an external POS tagger and fuse its predicted labels into the existing language model to provide syntactic information. Besides, we propose sequence boundary sampling (SBS) to learn punctuation positions more efficiently as a sequence tagging task. Experimental results show that our methods can consistently obtain performance gains and achieve a new state-of-the-art on the common IWSLT benchmark. Further ablation studies illustrate that both large pre-trained language models and the external POS tagger take essential parts to improve the model’s performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vasileios Papadourakis|AUTHOR Vasileios Papadourakis]], [[Markus Müller|AUTHOR Markus Müller]], [[Jing Liu|AUTHOR Jing Liu]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Maurizio Omologo|AUTHOR Maurizio Omologo]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1992–1996
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end automatic speech recognition systems map a sequence of acoustic features to text. In modern systems, text is encoded to grapheme subwords which are generated by methods designed for text processing tasks and therefore don’t model or take advantage of the statistics of the acoustic features. Here, we present a novel method for generating grapheme subwords that are derived from phoneme sequences, therefore capturing phonetical statistics. The phonetically induced subwords can be used for training and inference in any system that benefits from subwords, regardless of architecture and without the need of a pronunciation lexicon. We compare our method to other commonly used methods, which are based on text statistics or on text-phoneme correspondence and present experiments on CTC and RNN-T architectures, evaluating subword sets of different sizes. We find that our phonetically induced subwords can improve performance of RNN-T models with relative improvements of up to 15.21% compared to other subword methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Courtney Mansfield|AUTHOR Courtney Mansfield]], [[Sara Ng|AUTHOR Sara Ng]], [[Gina-Anne Levow|AUTHOR Gina-Anne Levow]], [[Richard A. Wright|AUTHOR Richard A. Wright]], [[Mari Ostendorf|AUTHOR Mari Ostendorf]]
</p><p class="cpabstractcardaffiliationlist">University of Washington, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1997–2001
</span></p></div>
<div class="cpabstractcardabstract"><p>A number of studies have compared human and machine transcription, showing that automatic speech recognition (ASR) is approaching human performance in some contexts. Most studies look at differences as measured by the standard speech recognition scoring criterion: word error rate (WER). This study looks at more fine-grained analysis of differences for conversational speech data where systems have reached human parity in terms of average WER, specifically insertions vs. deletions, word category, and word context characterized by linguistic surprisal. In contrast to ASR systems, humans are more likely to miss words than to misrecognize them, and they are much more likely to make errors in transcribing words associated primarily with conversational contexts (fillers, backchannels and discourse cue words). The differences are more pronounced for more informal contexts, i.e. conversations between family members. Although human transcribers may miss these words, conversational partners seem to use them in turntaking and processing disfluencies. Thus, ASR systems may need superhuman transcription performance for spoken language technology to achieve human-level conversation skills.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[W. Ronny Huang|AUTHOR W. Ronny Huang]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Cal Peyser|AUTHOR Cal Peyser]], [[Shankar Kumar|AUTHOR Shankar Kumar]], [[David Rybach|AUTHOR David Rybach]], [[Trevor Strohman|AUTHOR Trevor Strohman]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2002–2006
</span></p></div>
<div class="cpabstractcardabstract"><p>We introduce Lookup-Table Language Models (LookupLM), a method for scaling up the size of RNN language models with only a constant increase in the floating point operations, by increasing the expressivity of the embedding table. In particular, we instantiate an (additional) embedding table which embeds the previous n-gram token sequence, rather than a single token. This allows the embedding table to be scaled up arbitrarily — with a commensurate increase in performance — without changing the token vocabulary. Since embeddings are sparsely retrieved from the table via a lookup; increasing the size of the table adds neither extra operations to each forward pass nor extra parameters that need to be stored on limited GPU/TPU memory. We explore scaling n-gram embedding tables up to nearly a billion parameters. When trained on a 3-billion sentence corpus, we find that LookupLM improves long tail log perplexity by 2.44 and long tail WER by 23.4% on a downstream speech recognition task over a standard RNN language model baseline, an improvement comparable to a scaling up the baseline by 6.2× the number of floating point operations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jesús Andrés-Ferrer|AUTHOR Jesús Andrés-Ferrer]]^^1^^
, [[Dario Albesano|AUTHOR Dario Albesano]]^^2^^
, [[Puming Zhan|AUTHOR Puming Zhan]]^^3^^
, [[Paul Vozila|AUTHOR Paul Vozila]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Nuance Communications, Spain; ^^2^^Nuance Communications, Italy; ^^3^^Nuance Communications, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2007–2011
</span></p></div>
<div class="cpabstractcardabstract"><p>End-2-end (E2E) models have become increasingly popular in some ASR tasks because of their performance and advantages. These E2E models directly approximate the posterior distribution of tokens given the acoustic inputs. Consequently, the E2E systems implicitly define a language model (LM) over the output tokens, which makes the exploitation of independently trained language models less straightforward than in conventional ASR systems. This makes it difficult to dynamically adapt E2E ASR system to contextual profiles for better recognizing special words such as named entities. In this work, we propose a contextual density ratio approach for both training a contextual aware E2E model and adapting the language model to named entities. We apply the aforementioned technique to an E2E ASR system, which transcribes doctor and patient conversations, for better adapting the E2E system to the names in the conversations. Our proposed technique achieves a relative improvement of up to 46.5% on the names over an E2E baseline without degrading the overall recognition accuracy of the whole test set. Moreover, it also surpasses a contextual shallow fusion baseline by 22.1% relative.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Qiushi Huang|AUTHOR Qiushi Huang]]^^1^^
, [[Tom Ko|AUTHOR Tom Ko]]^^1^^
, [[H. Lilian Tang|AUTHOR H. Lilian Tang]]^^2^^
, [[Xubo Liu|AUTHOR Xubo Liu]]^^2^^
, [[Bo Wu|AUTHOR Bo Wu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SUSTech, China; ^^2^^University of Surrey, UK; ^^3^^MIT-IBM Watson AI Lab, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2012–2016
</span></p></div>
<div class="cpabstractcardabstract"><p>Punctuation is critical in understanding natural language text. Currently, most automatic speech recognition (ASR) systems do not generate punctuation, which affects the performance of downstream tasks, such as intent detection and slot filling. This gives rise to the need for punctuation restoration. Recent work in punctuation restoration heavily utilizes pre-trained language models without considering data imbalance when predicting punctuation classes. In this work, we address this problem by proposing a token-level supervised contrastive learning method that aims at maximizing the distance of representation of different punctuation marks in the embedding space. The result shows that training with token-level supervised contrastive learning obtains up to 3.2% absolute F₁ improvement on the test set.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yun Zhao|AUTHOR Yun Zhao]], [[Xuerui Yang|AUTHOR Xuerui Yang]], [[Jinchao Wang|AUTHOR Jinchao Wang]], [[Yongyu Gao|AUTHOR Yongyu Gao]], [[Chao Yan|AUTHOR Chao Yan]], [[Yuanfu Zhou|AUTHOR Yuanfu Zhou]]
</p><p class="cpabstractcardaffiliationlist">Cloudwalk Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2017–2021
</span></p></div>
<div class="cpabstractcardabstract"><p>Although automatic speech recognition (ASR) systems achieved significantly improvements in recent years, spoken language recognition error occurs which can be easily spotted by human beings. Various language modeling techniques have been developed on post recognition tasks like semantic correction. In this paper, we propose a Transformer based semantic correction method with pretrained BART initialization, Experiments on 10000 hours Mandarin speech dataset show that character error rate (CER) can be effectively reduced by 21.7% relatively compared to our baseline ASR system. Expert evaluation demonstrates that actual improvement of our model surpasses what CER indicates.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yangyang Shi|AUTHOR Yangyang Shi]], [[Varun Nagaraja|AUTHOR Varun Nagaraja]], [[Chunyang Wu|AUTHOR Chunyang Wu]], [[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Duc Le|AUTHOR Duc Le]], [[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]], [[Alex Xiao|AUTHOR Alex Xiao]], [[Ching-Feng Yeh|AUTHOR Ching-Feng Yeh]], [[Julian Chan|AUTHOR Julian Chan]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2042–2046
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a dynamic encoder transducer (DET) for on-device speech recognition. One DET model scales to multiple devices with different computation capacities without retraining or finetuning. To trading off accuracy and latency, DET assigns different encoders to decode different parts of an utterance. We apply and compare the layer dropout and the collaborative learning for DET training. The layer dropout method that randomly drops out encoder layers in the training phase, can do on-demand layer dropout in decoding. Collaborative learning jointly trains multiple encoders with different depths in one single model. Experiment results on Librispeech and in-house data show that DET provides a flexible accuracy and latency trade-off. Results on Librispeech show that the full-size encoder in DET relatively reduces the word error rate of the same size baseline by over 8%. The lightweight encoder in DET trained with collaborative learning reduces the model size by 25% but still gets similar WER as the full-size baseline. DET gets similar accuracy as a baseline model with better latency on a large in-house data set by assigning a lightweight encoder for the beginning part of one utterance and a full-size encoder for the rest.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2087–2091
</span></p></div>
<div class="cpabstractcardabstract"><p>Most spoken language assessment systems rely on the text features extracted from the automatic speech recognition (ASR) transcripts and thus depend heavily on the accuracy of the ASR systems. Automatic speech scoring tasks such as reading aloud and spontaneous speech are commonly provided with the prompts in advance to guide test takers’ answers, which contain information that should be included in the answers (e.g., listening passage, and sample response). Utilizing these texts to improve ASR performance is of great importance for these tasks. In this paper, we develop an end-to-end (E2E) ASR system incorporating contextual information provided by prompts. Specifically, we add an extra prompt encoder to a transformer-based E2E ASR system. To fuse the probabilities of the ASR output and the prompts dynamically, we train a soft gate based on the pointer network with carefully constructed prompt training corpus. We experiment the proposed method with data collected from English speaking proficiency tests recorded by Chinese teenagers from 16 to 18 years old. The results show the improved performance of speech recognition with a nearly 50% drop in word error rate (WER) utilizing prompts. Furthermore, the proposed network performs well in rare word recognition such as locations and personal names.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shigeki Karita|AUTHOR Shigeki Karita]], [[Yotaro Kubo|AUTHOR Yotaro Kubo]], [[Michiel Adriaan Unico Bacchiani|AUTHOR Michiel Adriaan Unico Bacchiani]], [[Llion Jones|AUTHOR Llion Jones]]
</p><p class="cpabstractcardaffiliationlist">Google, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2092–2096
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end (E2E) modeling is advantageous for automatic speech recognition (ASR) especially for Japanese since word-based tokenization of Japanese is not trivial, and E2E modeling is able to model character sequences directly. This paper focuses on the latest E2E modeling techniques, and investigates their performances on character-based Japanese ASR by conducting comparative experiments. The results are analyzed and discussed in order to understand the relative advantages of long short-term memory (LSTM), and Conformer models in combination with connectionist temporal classification, transducer, and attention-based loss functions. Furthermore, the paper investigates on effectivity of the recent training techniques such as data augmentation (SpecAugment), variational noise injection, and exponential moving average. The best configuration found in the paper achieved the state-of-the-art character error rates of 4.1%, 3.2%, and 3.5% for Corpus of Spontaneous Japanese (CSJ) eval1, eval2, and eval3 tasks, respectively. The system is also shown to be computationally efficient thanks to the efficiency of Conformer transducers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takaaki Hori|AUTHOR Takaaki Hori]], [[Niko Moritz|AUTHOR Niko Moritz]], [[Chiori Hori|AUTHOR Chiori Hori]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]
</p><p class="cpabstractcardaffiliationlist">MERL, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2097–2101
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses end-to-end automatic speech recognition (ASR) for long audio recordings such as lecture and conversational speeches. Most end-to-end ASR models are designed to recognize independent utterances, but contextual information (e.g., speaker or topic) over multiple utterances is known to be useful for ASR. In our prior work, we proposed a context-expanded Transformer that accepts multiple consecutive utterances at the same time and predicts an output sequence for the last utterance, achieving 5–15% relative error reduction from utterance-based baselines in lecture and conversational ASR benchmarks. Although the results have shown remarkable performance gain, there is still potential to further improve the model architecture and the decoding process. In this paper, we extend our prior work by (1) introducing the Conformer architecture to further improve the accuracy, (2) accelerating the decoding process with a novel activation recycling technique, and (3) enabling streaming decoding with triggered attention. We demonstrate that the extended Transformer provides state-of-the-art end-to-end ASR performance, obtaining a 17.3% character error rate for the HKUST dataset and 12.0%/6.3% word error rates for the Switchboard-300 Eval2000 CallHome/Switchboard test sets. The new decoding method reduces decoding time by more than 50% and further enables streaming ASR with limited accuracy degradation. </p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Md. Akmal Haidar|AUTHOR Md. Akmal Haidar]], [[Chao Xing|AUTHOR Chao Xing]], [[Mehdi Rezagholizadeh|AUTHOR Mehdi Rezagholizadeh]]
</p><p class="cpabstractcardaffiliationlist">Huawei Technologies, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2102–2106
</span></p></div>
<div class="cpabstractcardabstract"><p>Reducing the input sequence length of speech features to alleviate the complexity of alignment between speech features and text transcript by sub-sampling approaches is an important way to get better results in end-to-end (E2E) automatic speech recognition (ASR) systems. This issue is more important in Transformer-based ASR, because the self-attention mechanism in Transformers has O(n²) order of complexity in both training and inference. In this paper, we propose a Transformer-based ASR model with the time-reduction layer, in which we incorporate time-reduction layer inside transformer encoder layers in addition to traditional sub-sampling methods to input features that further reduce the frame-rate. This can help in reducing the computational cost of the self-attention process for training and inference with performance improvement. Moreover, we introduce a fine-tuning approach for pre-trained ASR models using self-knowledge distillation (S-KD) which further improves the performance of our ASR model. Experiments on LibriSpeech datasets show that our proposed methods outperform all other Transformer-based ASR systems. Furthermore, with language model (LM) fusion, we achieve new state-of-the-art word error rate (WER) results for Transformer-based ASR models with just 30 million parameters trained without any external data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Yuan Shangguan|AUTHOR Yuan Shangguan]], [[Chunyang Wu|AUTHOR Chunyang Wu]], [[Alex Xiao|AUTHOR Alex Xiao]], [[Hang Su|AUTHOR Hang Su]], [[Duc Le|AUTHOR Duc Le]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2107–2111
</span></p></div>
<div class="cpabstractcardabstract"><p>Often, the storage and computational constraints of embedded devices demand that a single on-device ASR model serve multiple use-cases / domains. In this paper, we propose a //Flexible Transducer// (FlexiT) for on-device automatic speech recognition to flexibly deal with multiple use-cases / domains with different accuracy and latency requirements. Specifically, using a single compact model, FlexiT provides a fast response for //voice commands//, and accurate transcription but with more latency for //dictation//. In order to achieve flexible and better accuracy and latency trade-offs, the following techniques are used. Firstly, we propose using domain-specific altering of segment size for Emformer encoder that enables FlexiT to achieve flexible decoding. Secondly, we use Alignment Restricted RNNT loss to achieve flexible fine-grained control on token emission latency for different domains. Finally, we add a domain indicator vector as an additional input to the FlexiT model. Using the combination of techniques, we show that a single model can be used to improve WERs and real time factor for dictation scenarios while maintaining optimal latency for voice commands use-cases.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shiqi Zhang|AUTHOR Shiqi Zhang]]^^1^^
, [[Yan Liu|AUTHOR Yan Liu]]^^2^^
, [[Deyi Xiong|AUTHOR Deyi Xiong]]^^2^^
, [[Pei Zhang|AUTHOR Pei Zhang]]^^1^^
, [[Boxing Chen|AUTHOR Boxing Chen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alibaba, China; ^^2^^Tianjin University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2047–2051
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we investigate multi-domain neural machine translation (NMT) that translates sentences of different domains in a single model. To this end, we propose a domain-aware self-attention mechanism that jointly learns domain representations with the single NMT model. The learned domain representations are integrated into both the encoder and decoder. We further propose two different domain representation learning approaches: 1) word-level unsupervised learning via a domain attention network and 2) guided learning with an auxiliary loss. The two learning approaches allow our multi-domain NMT to work in different settings as to whether the domain information is available or not. Experiments on both Chinese-English and English-French demonstrate that our multi-domain model outperforms a strong baseline built on the Transformer and other previous multi-domain NMT approaches. Further analyses show that our model is able to learn domain clusters even without prior knowledge about the domain structure.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Albert Zeyer|AUTHOR Albert Zeyer]], [[André Merboldt|AUTHOR André Merboldt]], [[Wilfried Michel|AUTHOR Wilfried Michel]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]
</p><p class="cpabstractcardaffiliationlist">RWTH Aachen University, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2052–2056
</span></p></div>
<div class="cpabstractcardabstract"><p>We present our transducer model on Librispeech. We study variants to include an external language model (LM) with shallow fusion and subtract an estimated internal LM. This is justified by a Bayesian interpretation where the transducer model prior is given by the estimated internal LM. The subtraction of the internal LM gives us over 14% relative improvement over normal shallow fusion. Our transducer has a separate probability distribution for the non-blank labels which allows for easier combination with the external LM, and easier estimation of the internal LM. We additionally take care of including the end-of-sentence (EOS) probability of the external LM in the last blank probability which further improves the performance. All our code and setups are published.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sepand Mavandadi|AUTHOR Sepand Mavandadi]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Ke Hu|AUTHOR Ke Hu]], [[Zelin Wu|AUTHOR Zelin Wu]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2057–2061
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a new two-pass E2E speech recognition model that improves ASR performance by training on a combination of paired data and unpaired text data. Previously, the joint acoustic and text decoder (JATD) has shown promising results through the use of text data during model training and the recently introduced deliberation architecture has reduced recognition errors by leveraging first-pass decoding results. Our method, dubbed Deliberation-JATD, combines the spelling correcting abilities of deliberation with JATD’s use of unpaired text data to further improve performance. The proposed model produces substantial gains across multiple test sets, especially those focused on rare words, where it reduces word error rate (WER) by between 12% and 22.5% relative. This is done without increasing model size or requiring multi-stage training, making Deliberation-JATD an efficient candidate for on-device applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[George Saon|AUTHOR George Saon]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2062–2066
</span></p></div>
<div class="cpabstractcardabstract"><p>In our previous work we demonstrated that a single headed attention encoder-decoder model is able to reach state-of-the-art results in conversational speech recognition. In this paper, we further improve the results for both Switchboard 300 and 2000. Through use of an improved optimizer, speaker vector embeddings, and alternative speech representations we reduce the recognition errors of our LSTM system on Switchboard-300 by 4% relative. Compensation of the decoder model with the probability ratio approach allows more efficient integration of an external language model, and we report 5.9% and 11.5% WER on the SWB and CHM parts of Hub5’00 with very simple LSTM models. Our study also considers the recently proposed conformer, and more advanced self-attention based language models. Overall, the conformer shows similar performance to the LSTM; nevertheless, their combination and decoding with an improved LM reaches a new record on Switchboard-300, 5.0% and 10.0% WER on SWB and CHM. Our findings are also confirmed on Switchboard-2000, and a new state of the art is reported, practically reaching the limit of the benchmark.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Keyu An|AUTHOR Keyu An]], [[Yi Zhang|AUTHOR Yi Zhang]], [[Zhijian Ou|AUTHOR Zhijian Ou]]
</p><p class="cpabstractcardaffiliationlist">Tsinghua University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2067–2071
</span></p></div>
<div class="cpabstractcardabstract"><p>Time Delay Neural Networks (TDNNs) are widely used in both DNN-HMM based hybrid speech recognition systems and recent end-to-end systems. Nevertheless, the receptive fields of TDNNs are limited and fixed, which is not desirable for tasks like speech recognition, where the temporal dynamics of speech are varied and affected by many factors. In this paper, we propose to use deformable TDNNs for adaptive temporal dynamics modeling in end-to-end speech recognition. Inspired by deformable ConvNets, deformable TDNNs augment the temporal sampling locations with additional offsets and learn the offsets automatically based on the ASR criterion, without additional supervision. Experiments show that deformable TDNNs obtain state-of-the-art results on WSJ benchmarks (1.42%/3.45% WER on WSJ eval92/dev93 respectively), outperforming standard TDNNs significantly. Furthermore, we propose the latency control mechanism for deformable TDNNs, which enables deformable TDNNs to do streaming ASR without accuracy degradation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chengdong Liang|AUTHOR Chengdong Liang]], [[Menglong Xu|AUTHOR Menglong Xu]], [[Xiao-Lei Zhang|AUTHOR Xiao-Lei Zhang]]
</p><p class="cpabstractcardaffiliationlist">Northwestern Polytechnical University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2072–2076
</span></p></div>
<div class="cpabstractcardabstract"><p>Self-attention (SA), which encodes vector sequences according to their pairwise similarity, is widely used in speech recognition due to its strong context modeling ability. However, when applied to long sequence data, its accuracy is reduced. This is caused by the fact that its weighted average operator may lead to the dispersion of the attention distribution, which results in the relationship between adjacent signals ignored. To address this issue, in this paper, we introduce relative-position-awareness self-attention (RPSA). It not only maintains the global-range dependency modeling ability of self-attention, but also improves the localness modeling ability. Because the local window length of the original RPSA is fixed and sensitive to different test data, here we propose Gaussian-based self-attention (GSA) whose window length is learnable and adaptive to the test data automatically. We further generalize GSA to a new residual Gaussian self-attention (resGSA) for the performance improvement. We apply RPSA, GSA, and resGSA to Transformer-based speech recognition respectively. Experimental results on the AISHELL-1 Mandarin speech recognition corpus demonstrate the effectiveness of the proposed methods. For example, the resGSA-Transformer achieves a character error rate (CER) of 5.86% on the test set, which is relative 7.8% lower than that of the SA-Transformer. Although the performance of the proposed resGSA-Transformer is only slightly better than that of the RPSA-Transformer, it does not have to tune the window length manually.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhao You|AUTHOR Zhao You]]^^1^^
, [[Shulin Feng|AUTHOR Shulin Feng]]^^1^^
, [[Dan Su|AUTHOR Dan Su]]^^1^^
, [[Dong Yu|AUTHOR Dong Yu]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tencent, China; ^^2^^Tencent, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2077–2081
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, Mixture of Experts (MoE) based Transformer has shown promising results in many domains. This is largely due to the following advantages of this architecture: firstly, MoE based Transformer can increase model capacity without computational cost increasing both at training and inference time. Besides, MoE based Transformer is a dynamic network which can adapt to the varying complexity of input instances in real-world applications. In this work, we explore the MoE based model for speech recognition, named SpeechMoE. To further control the sparsity of router activation and improve the diversity of gate values, we propose a sparsity L1 loss and a mean importance loss respectively. In addition, a new router architecture is used in SpeechMoE which can simultaneously utilize the information from a shared embedding network and the hierarchical representation of different MoE layers. Experimental results show that SpeechMoE can achieve lower character error rate (CER) with comparable computation cost than traditional static networks, providing 7.0%~23.0% relative CER improvements on four evaluation datasets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chi-Hang Leong|AUTHOR Chi-Hang Leong]], [[Yu-Han Huang|AUTHOR Yu-Han Huang]], [[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]]
</p><p class="cpabstractcardaffiliationlist">NYCU, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2082–2086
</span></p></div>
<div class="cpabstractcardabstract"><p>Traditionally, transformer with connectionist temporal classification (CTC) was developed for offline speech recognition where the transcription was generated after the whole utterance has been spoken. However, it is crucial to carry out online transcription of speech signal for many applications including live broadcasting and meeting. This paper presents an online transformer for real-time speech recognition where online transcription is generated chunk by chuck. In particular, an online compressive transformer (OCT) is proposed for end-to-end speech recognition. This OCT aims to generate immediate transcription for each audio chunk while the comparable performance with offline speech recognition can be still achieved. In the implementation, OCT tightly combines with both CTC and recurrent neural network transducer by minimizing their losses for training. In addition, this OCT systematically merges with compressive memory to reduce potential performance degradation due to online processing. This degradation is caused by online transcription which is generated by the chunks without history information. The experiments on speech recognition show that OCT does not only obtain comparable performance with offline transformer, but also work faster than the baseline model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Przemyslaw Falkowski-Gilski|AUTHOR Przemyslaw Falkowski-Gilski]]
</p><p class="cpabstractcardaffiliationlist">Gdansk University of Technology, Poland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2112–2116
</span></p></div>
<div class="cpabstractcardabstract"><p>The user perceived quality is a mixture of factors, including the background of an individual. The process of auditory perception is discussed in a wide variety of fields, ranging from engineering to medicine. Many studies examine the difference between musicians and non-musicians. Since musical training develops musical hearing and other various auditory capabilities, similar enhancements should be observable in case of bilingual people. This paper examines the difference in perceived speech signal quality between students from monolingual and bilingual classes. The subjective study was carried out on a group of 30 people, with 15 individuals in each class, aged 16–18 years old, considering three languages: English, German, and Polish. Results of this study may aid researchers as well as professionals active in the field of auditory perception, hearing loss related with ageing, and of course evaluation of networks and services.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rongliang Liu|AUTHOR Rongliang Liu]], [[Nengheng Zheng|AUTHOR Nengheng Zheng]], [[Xi Chen|AUTHOR Xi Chen]]
</p><p class="cpabstractcardaffiliationlist">Shenzhen University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2157–2161
</span></p></div>
<div class="cpabstractcardabstract"><p>Direction of arrival (DOA) estimation is a key front-end technology for many speech-based intelligent systems. Deep neural networks-based DOA systems have recently demonstrated better performances than conventional ones. However, most of the existing networks use only one specific acoustical feature as input, limiting their noise-robustness. This paper proposes an attention-based feature fusion approach for DOA estimation. Two classical DOA estimation approaches, i.e., the least mean square-based adaptive filtering and the generalized cross-correlation, are adopted, and the respective features are served as input to the networks. Network with attention mechanism is built to learn the optimal weighting scheme, which can take advantage of the two features’ complementary contributions in DOA estimation. Simulation and real test results show that the proposed method could use the complementary DOA information in different features and improve estimation accuracy under acoustic conditions with both noise and reverberation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shoufeng Lin|AUTHOR Shoufeng Lin]]^^1^^
, [[Zhaojie Luo|AUTHOR Zhaojie Luo]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Curtin University, Australia; ^^2^^Osaka University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2162–2166
</span></p></div>
<div class="cpabstractcardabstract"><p>In the speech signal processing area, far-field speaker localization using only the audio modality has been a fundamental but challenging problem, especially in presence of reverberation and a varying number of moving speakers. Many existing methods use speech onsets as reliable directional cues against reverberation and interference. However, signal processing can be computationally costly especially in time domain. In this paper, we present a computationally efficient implementation of the recently proposed Onset-Multichannel Cross Correlation Coefficient (MCCC) method. Instead of scanning the entire spatial grid, reverse mapping and linear interpolation are used. The proposed algorithm with better efficiency is referred to as the Onset-MCC in this paper. Performance of the Onset-MCC is studied over various reverberant and noisy scenarios. To further suppress outliers and address miss-detections, as well as for the adaptive tracking of a varying number of moving speakers, we present an adaptive implementation of the generalized labeled multi-Bernoulli (GLMB) filter. As shown in studied cases, the proposed system demonstrates reliable and accurate location estimates in far-field (T,,60,, = 1s), and is applicable to tracking an unknown and time-varying number of moving speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vivek Sivaraman Narayanaswamy|AUTHOR Vivek Sivaraman Narayanaswamy]]^^1^^
, [[Jayaraman J. Thiagarajan|AUTHOR Jayaraman J. Thiagarajan]]^^2^^
, [[Andreas Spanias|AUTHOR Andreas Spanias]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Arizona State University, USA; ^^2^^LLNL, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2167–2171
</span></p></div>
<div class="cpabstractcardabstract"><p>Unsupervised deep learning methods for solving audio restoration problems extensively rely on carefully tailored neural architectures that carry strong inductive biases for defining priors in the time or spectral domain. In this context, lot of recent success has been achieved with sophisticated convolutional network constructions that recover audio signals in the spectral domain. However, in practice, audio priors require careful engineering of the convolutional kernels to be effective at solving ill-posed restoration tasks, while also being easy to train. To this end, in this paper, we propose a new U-Net based prior that does not impact either the network complexity or convergence behavior of existing convolutional architectures, yet leads to significantly improved restoration. In particular, we advocate the use of carefully designed dilation schedules and dense connections in the U-Net architecture to obtain powerful audio priors. Using empirical studies on standard benchmarks and a variety of ill-posed restoration tasks, such as audio denoising, in-painting and source separation, we demonstrate that our proposed approach consistently outperforms widely adopted audio prior architectures.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Weiguang Chen|AUTHOR Weiguang Chen]]^^1^^
, [[Cheng Xue|AUTHOR Cheng Xue]]^^2^^
, [[Xionghu Zhong|AUTHOR Xionghu Zhong]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Hunan University, China; ^^2^^Hunan University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2172–2176
</span></p></div>
<div class="cpabstractcardabstract"><p>Existing direction-of-arrival (DOA) estimation methods usually assume that signals are received by an array of omnidirectional microphones. The performance can be seriously degraded due to heavy reverberation and noise. In this paper, DOA estimation using an array with directional microphones is considered. As the signal response varies over different DOAs, the magnitude information as well as the phase information can be employed to estimate the DOA. We first introduce the spherically isotropic noise field using directional microphones. The Cramér-Rao Lower Bound (CRLB) for DOA estimation is then derived and compared with that using omnidirectional microphones under different signal-to-reverberation ratio (SRR) environments. In addition, we extend existing steered response power (SRP), minimum variance distortionless response (MVDR) and multiple signal classification (MUSIC) estimators for the DOA estimation using directional microphone arrays. Both CRLB Analysis and DOA estimation show that better DOA estimation performance can be achieved by using a directional microphone array.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christopher Schymura|AUTHOR Christopher Schymura]]^^1^^
, [[Benedikt Bönninghoff|AUTHOR Benedikt Bönninghoff]]^^1^^
, [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]]^^2^^
, [[Marc Delcroix|AUTHOR Marc Delcroix]]^^2^^
, [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]]^^2^^
, [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]^^2^^
, [[Shoko Araki|AUTHOR Shoko Araki]]^^2^^
, [[Dorothea Kolossa|AUTHOR Dorothea Kolossa]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ruhr-Universität Bochum, Germany; ^^2^^NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2117–2121
</span></p></div>
<div class="cpabstractcardabstract"><p>Sound event localization aims at estimating the positions of sound sources in the environment with respect to an acoustic receiver (e.g. a microphone array). Recent advances in this domain most prominently focused on utilizing deep recurrent neural networks. Inspired by the success of transformer architectures as a suitable alternative to classical recurrent neural networks, this paper introduces a novel transformer-based sound event localization framework, where temporal dependencies in the received multi-channel audio signals are captured via self-attention mechanisms. Additionally, the estimated sound event positions are represented as multivariate Gaussian variables, yielding an additional notion of uncertainty, which many previously proposed deep learning-based systems designed for this application do not provide. The framework is evaluated on three publicly available multi-source sound event localization datasets and compared against state-of-the-art methods in terms of localization error and event detection accuracy. It outperforms all competing systems on all datasets with statistical significant differences in performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Masahito Togami|AUTHOR Masahito Togami]], [[Robin Scheibler|AUTHOR Robin Scheibler]]
</p><p class="cpabstractcardaffiliationlist">LINE, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2122–2126
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a sound source localization technique that estimates a speech source location without precise grid searching. The source location is estimated in a parameter optimization manner to minimize the steered-response power (SRP) function with the near-field assumption. Because there is no closed-form solution for the SRP function, we introduce an auxiliary function of the SRP function based on the majorization-minimization (MM) algorithm. Parameters are updated iteratively to minimize the auxiliary function with alternate execution of time-difference-of-arrival (TDOA) estimation and range-difference (RD) based localization. When TDOA estimation and RD-based localization are performed in a cascade manner, the estimation accuracy of the source location is strongly affected by the estimation accuracy of the TDOA. On contrary, the proposed method corrects the estimated TDOA by referring to the estimated source location in the previous iteration. Thus, it is expected for the proposed method to be robust against TDOA estimation error which occurs under reverberant environments. Experimental results show that the proposed method outperforms conventional techniques under a reverberant environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gabriel Mittag|AUTHOR Gabriel Mittag]], [[Babak Naderi|AUTHOR Babak Naderi]], [[Assmaa Chehadi|AUTHOR Assmaa Chehadi]], [[Sebastian Möller|AUTHOR Sebastian Möller]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Berlin, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2127–2131
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present an update to the NISQA speech quality prediction model that is focused on distortions that occur in communication networks. In contrast to the previous version, the model is trained end-to-end and the time-dependency modelling and time-pooling is achieved through a Self-Attention mechanism. Besides overall speech quality, the model also predicts the four speech quality dimensions //Noisiness//, //Coloration//, //Discontinuity//, and //Loudness//, and in this way gives more insight into the cause of a quality degradation. Furthermore, new datasets with over 13,000 speech files were created for training and validation of the model. The model was finally tested on a new, live-talking test dataset that contains recordings of real telephone calls. Overall, NISQA was trained and evaluated on 81 datasets from different sources and showed to provide reliable predictions also for unknown speech samples. The code, model weights, and datasets are open-sourced.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Babak Naderi|AUTHOR Babak Naderi]]^^1^^
, [[Ross Cutler|AUTHOR Ross Cutler]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Technische Universität Berlin, Germany; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2132–2136
</span></p></div>
<div class="cpabstractcardabstract"><p>The quality of the speech communication systems, which include noise suppression algorithms, are typically evaluated in laboratory experiments according to the ITU-T Rec. P.835, in which participants rate background noise, speech signal, and overall quality separately. This paper introduces an open-source toolkit for conducting subjective quality evaluation of noise suppressed speech in crowdsourcing. We followed the ITU-T Rec. P.835, and P.808 and highly automate the process to prevent moderator’s error. To assess the validity of our evaluation method, we compared the Mean Opinion Scores (MOS), calculated using ratings collected with our implementation and the MOS values from a standard laboratory experiment conducted according to the ITU-T Rec P.835. Results show a high validity in all three scales, namely background noise, speech signal and overall quality (average Pearson Correlation Coefficient (PCC) = 0.961). Results of a round-robin test (N=5) showed that our implementation is also a highly reproducible evaluation method (PCC=0.99). Finally, we used our implementation in the INTERSPEECH 2021 Deep Noise Suppression Challenge [1] as the primary evaluation metric, which demonstrates it is practical to use at scale. The results are analyzed to determine why the overall performance was the best in terms of background noise and speech quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jianhua Geng|AUTHOR Jianhua Geng]], [[Sifan Wang|AUTHOR Sifan Wang]], [[Juan Li|AUTHOR Juan Li]], [[JingWei Li|AUTHOR JingWei Li]], [[Xin Lou|AUTHOR Xin Lou]]
</p><p class="cpabstractcardaffiliationlist">ShanghaiTech University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2137–2141
</span></p></div>
<div class="cpabstractcardabstract"><p>In the context of multi-source direction of arrival (DOA) estimation using a single acoustic vector sensor (AVS), the received signal is usually a mixture of noise, reverberation and source signals. The identification of the time-frequency (TF) bins that are dominated by the source signals can significantly improve the robustness of the DOA estimation. In this paper, a TF bin selection based DOA estimation pipeline is proposed. The proposed pipeline mainly involves three key steps: key frame identification, TF bin selection and DOA extraction. We identify the key frames by frame-wisely examining the effective rank. Subsequently, the geometric medians of the selected key frames are extracted to alleviate the impact of extreme outliers. The simulation results show that the accuracy and the robustness of the proposed pipeline outperform the state-of-the-art (SOTA) techniques.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Meng Yu|AUTHOR Meng Yu]], [[Chunlei Zhang|AUTHOR Chunlei Zhang]], [[Yong Xu|AUTHOR Yong Xu]], [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]], [[Dong Yu|AUTHOR Dong Yu]]
</p><p class="cpabstractcardaffiliationlist">Tencent, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2142–2146
</span></p></div>
<div class="cpabstractcardabstract"><p>The objective speech quality assessment is usually conducted by comparing received speech signal with its clean reference, while human beings are capable of evaluating the speech quality without any reference, such as in the mean opinion score (MOS) tests. Non-intrusive speech quality assessment has attracted much attention recently due to the lack of access to clean reference signals for objective evaluations in real scenarios. In this paper, we propose a novel non-intrusive speech quality measurement model, MetricNet, which leverages label distribution learning and joint speech reconstruction learning to achieve significantly improved performance compared to the existing non-intrusive speech quality measurement models. We demonstrate that the proposed approach yields promisingly high correlation to the intrusive objective evaluation of speech quality on clean, noisy and processed speech data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andrea Toma|AUTHOR Andrea Toma]], [[Daniele Salvati|AUTHOR Daniele Salvati]], [[Carlo Drioli|AUTHOR Carlo Drioli]], [[Gian Luca Foresti|AUTHOR Gian Luca Foresti]]
</p><p class="cpabstractcardaffiliationlist">Università di Udine, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2147–2151
</span></p></div>
<div class="cpabstractcardabstract"><p>A novel speaker localization algorithm from micro aerial vehicles (MAVs) is investigated. It introduces a joint direction of arrival (DOA) and distance prediction method based on processing and fusion of the multi-channel speech data with radio frequency (RF) measurements of the received signal strength. Possible applications include unmanned aerial vehicles (UAVs)-based reconnaissance and surveillance against intrusions and search and rescue in hostile environments. A 3-stages convolutional neural network (CNN) with a fusion layer is proposed to perform this task with the objective of augmenting the source localization from multi-channel speech signals. Two parallel CNNs process the speech and RF data, and the regression network produces predictions of the angle and distance from the source after the fusion layer. To show the performance and effectiveness of this RF-assisted method, the experimental scenario and datasets are presented and experiments are then discussed along with the results that have been obtained.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Katsutoshi Itoyama|AUTHOR Katsutoshi Itoyama]]^^1^^
, [[Yoshiya Morimoto|AUTHOR Yoshiya Morimoto]]^^1^^
, [[Shungo Masaki|AUTHOR Shungo Masaki]]^^1^^
, [[Ryosuke Kojima|AUTHOR Ryosuke Kojima]]^^2^^
, [[Kenji Nishida|AUTHOR Kenji Nishida]]^^1^^
, [[Kazuhiro Nakadai|AUTHOR Kazuhiro Nakadai]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tokyo Tech, Japan; ^^2^^Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2152–2156
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper addresses the properties and effectiveness of the von Mises-Bernoulli deep neural network (vM-B DNN), a neural network capable of learning periodic information, in sound source localization. The phase, which is periodic information, is an important cue in sound source localization, but typical neural network cannot handle periodic input values properly. The vM-B DNN has been theoretically revealed to be able to handle periodic input values and its effectiveness has been shown in a simple case study of sound source localization using artificial sinusoids, but it was not in the case of speech signals. We conducted both numerical simulation and actual environment experiments. We compared a sound source localization method using vM-B DNN with those using ordinary neural networks, and showed that the vM-B DNN outperforms other methods under various conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jaeseong You|AUTHOR Jaeseong You]], [[Dalhyun Kim|AUTHOR Dalhyun Kim]], [[Gyuhyeon Nam|AUTHOR Gyuhyeon Nam]], [[Geumbyeol Hwang|AUTHOR Geumbyeol Hwang]], [[Gyeongsu Chae|AUTHOR Gyeongsu Chae]]
</p><p class="cpabstractcardaffiliationlist">MoneyBrain, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2177–2181
</span></p></div>
<div class="cpabstractcardabstract"><p>Several of the latest GAN-based vocoders show remarkable achievements, outperforming autoregressive and flow-based competitors in both qualitative and quantitative measures while synthesizing orders of magnitude faster. In this work, we hypothesize that the common factor underlying their success is the multi-resolution discriminating framework, not the minute details in architecture, loss function, or training strategy. We experimentally test the hypothesis by evaluating six different generators paired with one shared multi-resolution discriminating framework. For all evaluative measures with respect to text-to-speech syntheses and for all perceptual metrics, their performances are not distinguishable from one another, which supports our hypothesis.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhengxi Liu|AUTHOR Zhengxi Liu]]^^1^^
, [[Yanmin Qian|AUTHOR Yanmin Qian]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Sun Yat-sen University, China; ^^2^^SJTU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2222–2226
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent studies have shown that neural vocoders based on generative adversarial network (GAN) can generate audios with high quality. While GAN based neural vocoders have shown to be computationally much more efficient than those based on autoregressive predictions, the real-time generation of the highest quality audio on CPU is still a very challenging task. One major computation of all GAN-based neural vocoders comes from the stacked upsampling layers, which were designed to match the length of the waveform’s length of output and temporal resolution. Meanwhile, the computational complexity of upsampling networks is closely correlated with the numbers of samples generated for each window. To reduce the computation of upsampling layers, we propose a new GAN based neural vocoder called Basis-MelGAN where the raw audio samples are decomposed with a learned basis and their associated weights. As the prediction targets of Basis-MelGAN are the weight values associated with each learned basis instead of the raw audio samples, the upsampling layers in Basis-MelGAN can be designed with much simpler networks. Compared with other GAN based neural vocoders, the proposed Basis-MelGAN could produce comparable high-quality audio but significantly reduced computational complexity from HiFi-GAN V1’s 17.74 GFLOPs to 7.95 GFLOPs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Min-Jae Hwang|AUTHOR Min-Jae Hwang]]^^1^^
, [[Ryuichi Yamamoto|AUTHOR Ryuichi Yamamoto]]^^2^^
, [[Eunwoo Song|AUTHOR Eunwoo Song]]^^3^^
, [[Jae-Min Kim|AUTHOR Jae-Min Kim]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Search Solutions, Korea; ^^2^^LINE, Japan; ^^3^^Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2227–2231
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a multi-band harmonic-plus-noise (HN) Parallel WaveGAN (PWG) vocoder. To generate a high-fidelity speech signal, it is important to well-reflect the harmonic-noise characteristics of the speech waveform in the time-frequency domain. However, it is difficult for the conventional PWG model to accurately match this condition, as its single generator inefficiently represents the complicated nature of harmonic-noise structures. In the proposed method, the HN WaveNet models are employed to overcome this limitation, which enable the separate generation of the harmonic and noise components of speech signals from the pitch-dependent sine wave and Gaussian noise sources, respectively. Then, the energy ratios between harmonic and noise components in multiple frequency bands (i.e., subband harmonicities) are predicted by an additional harmonicity estimator. Weighted by the estimated harmonicities, the gain of harmonic and noise components in each subband is adjusted, and finally mixed together to compose the full-band speech signal. Subjective evaluation results showed that the proposed method significantly improved the perceptual quality of the synthesized speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jian Cong|AUTHOR Jian Cong]]^^1^^
, [[Shan Yang|AUTHOR Shan Yang]]^^2^^
, [[Lei Xie|AUTHOR Lei Xie]]^^1^^
, [[Dan Su|AUTHOR Dan Su]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Northwestern Polytechnical University, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2182–2186
</span></p></div>
<div class="cpabstractcardabstract"><p>Current two-stage TTS framework typically integrates an acoustic model with a vocoder — the acoustic model predicts a low resolution intermediate representation such as Mel-spectrum while the vocoder generates waveform from the intermediate representation. Although the intermediate representation is served as a bridge, there still exists critical mismatch between the acoustic model and the vocoder as they are commonly separately learned and work on different distributions of representation, leading to inevitable artifacts in the synthesized speech. In this work, different from using pre-designed intermediate representation in most previous studies, we propose to use VAE combining with GAN to learn a latent representation directly from speech and then utilize a flow-based acoustic model to model the distribution of the latent representation from text. In this way, the mismatch problem is migrated as the two stages work on the same distribution. Results demonstrate that the flow-based acoustic model can exactly model the distribution of our learned speech representation and the proposed TTS framework, namely Glow-WaveGAN, can produce high fidelity speech outperforming the state-of-the-art GAN-based model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Reo Yoneyama|AUTHOR Reo Yoneyama]], [[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]], [[Tomoki Toda|AUTHOR Tomoki Toda]]
</p><p class="cpabstractcardaffiliationlist">Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2187–2191
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a unified approach to data-driven source-filter modeling using a single neural network for developing a neural vocoder capable of generating high-quality synthetic speech waveforms while retaining flexibility of the source-filter model to control their voice characteristics. Our proposed network called unified source-filter generative adversarial networks (uSFGAN) is developed by factorizing quasi-periodic parallel WaveGAN (QPPWG), one of the neural vocoders based on a single neural network, into a source excitation generation network and a vocal tract resonance filtering network by additionally implementing a regularization loss. Moreover, inspired by neural source filter (NSF), only a sinusoidal waveform is additionally used as the simplest clue to generate a periodic source excitation waveform while minimizing the effect of approximations in the source filter model. The experimental results demonstrate that uSFGAN outperforms conventional neural vocoders, such as QPPWG and NSF in both speech quality and pitch controllability.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kazuki Mizuta|AUTHOR Kazuki Mizuta]], [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2192–2196
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes Harmonic WaveGAN, a GAN-based waveform generation model that focuses on the harmonic structure of a speech waveform. Our proposed model uses two discriminators to capture characteristics of a speech waveform in a time domain and in a frequency domain, respectively. In one of them, a harmonic structure discriminator, a 2-D convolution layer called “harmonic convolution” is inserted to model a harmonic structure of a speech waveform. Although harmonic convolution has been shown to perform well in audio restoration tasks, this convolution layer has not yet been fully explored in the field of speech synthesis. Therefore, we seek to improve the perceptual quality of speech samples synthesized by the waveform generation model and investigate the usefulness of harmonic convolution in the field of speech synthesis. Mean opinion score tests showed that the Harmonic WaveGAN can synthesize more natural speech than conventional Parallel WaveGAN. We also showed that a spectrogram of a speech waveform showed a clearer harmonic structure when synthesized by our model than a speech waveform synthesized by the original Parallel WaveGAN.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ji-Hoon Kim|AUTHOR Ji-Hoon Kim]], [[Sang-Hoon Lee|AUTHOR Sang-Hoon Lee]], [[Ji-Hyun Lee|AUTHOR Ji-Hyun Lee]], [[Seong-Whan Lee|AUTHOR Seong-Whan Lee]]
</p><p class="cpabstractcardaffiliationlist">Korea University, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2197–2201
</span></p></div>
<div class="cpabstractcardabstract"><p>Although recent works on neural vocoder have improved the quality of synthesized audio, there still exists a gap between generated and ground-truth audio in frequency space. This difference leads to spectral artifacts such as hissing noise or reverberation, and thus degrades the sample quality. In this paper, we propose Fre-GAN which achieves frequency-consistent audio synthesis with highly improved generation quality. Specifically, we first present resolution-connected generator and resolution-wise discriminators, which help learn various scales of spectral distributions over multiple frequency bands. Additionally, to reproduce high-frequency components accurately, we leverage discrete wavelet transform in the discriminators. From our experiments, Fre-GAN achieves high-fidelity waveform generation with a gap of only 0.03 MOS compared to ground-truth audio while outperforming standard models in quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinhyeok Yang|AUTHOR Jinhyeok Yang]], [[Jae-Sung Bae|AUTHOR Jae-Sung Bae]], [[Taejun Bak|AUTHOR Taejun Bak]], [[Young-Ik Kim|AUTHOR Young-Ik Kim]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]
</p><p class="cpabstractcardaffiliationlist">NCSOFT, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2202–2206
</span></p></div>
<div class="cpabstractcardabstract"><p>Recent advances in neural multi-speaker text-to-speech (TTS) models have enabled the generation of reasonably good speech quality with a single model and made it possible to synthesize the speech of a speaker with limited training data. Fine-tuning to the target speaker data with the multi-speaker model can achieve better quality, however, there still exists a gap compared to the real speech sample and the model depends on the speaker. In this work, we propose GANSpeech, which is a high-fidelity multi-speaker TTS model that adopts the adversarial training method to a non-autoregressive multi-speaker TTS model. In addition, we propose simple but efficient automatic scaling methods for feature matching loss used in adversarial training. In the subjective listening tests, GANSpeech significantly outperformed the baseline multi-speaker FastSpeech and FastSpeech2 models, and showed a better MOS score than the speaker-specific fine-tuned FastSpeech2.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Won Jang|AUTHOR Won Jang]], [[Dan Lim|AUTHOR Dan Lim]], [[Jaesam Yoon|AUTHOR Jaesam Yoon]], [[Bongwan Kim|AUTHOR Bongwan Kim]], [[Juntae Kim|AUTHOR Juntae Kim]]
</p><p class="cpabstractcardaffiliationlist">Kakao, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2207–2211
</span></p></div>
<div class="cpabstractcardabstract"><p>Most neural vocoders employ band-limited mel-spectrograms to generate waveforms. If full-band spectral features are used as the input, the vocoder can be provided with as much acoustic information as possible. However, in some models employing full-band mel-spectrograms, an over-smoothing problem occurs as part of which non-sharp spectrograms are generated. To address this problem, we propose UnivNet, a neural vocoder that synthesizes high-fidelity waveforms in real time. Inspired by works in the field of voice activity detection, we added a multi-resolution spectrogram discriminator that employs multiple linear spectrogram magnitudes computed using various parameter sets. Using full-band mel-spectrograms as input, we expect to generate high-resolution signals by adding a discriminator that employs spectrograms of multiple resolutions as the input. In an evaluation on a dataset containing information on hundreds of speakers, UnivNet obtained the best objective and subjective results among competing models for both seen and unseen speakers. These results, including the best subjective score for text-to-speech, demonstrate the potential for fast adaptation to new speakers without a need for training from scratch.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mohammed Salah Al-Radhi|AUTHOR Mohammed Salah Al-Radhi]]^^1^^
, [[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]]^^2^^
, [[Csaba Zainkó|AUTHOR Csaba Zainkó]]^^1^^
, [[Géza Németh|AUTHOR Géza Németh]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BME, Hungary; ^^2^^BME, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2212–2216
</span></p></div>
<div class="cpabstractcardabstract"><p>To date, various speech technology systems have adopted the vocoder approach, a method for synthesizing speech waveform that shows a major role in the performance of statistical parametric speech synthesis. However, conventional source-filter systems (i.e., STRAIGHT) and sinusoidal models (i.e., MagPhase) tend to produce over-smoothed spectra, which often result in muffled and buzzy synthesized text-to-speech (TTS). WaveNet, one of the best models that nearly resembles the human voice, has to generate a waveform in a time-consuming sequential manner with an extremely complex structure of its neural networks. WaveNet needs large quantities of voice data before accurate predictions can be obtained. In order to motivate a new, alternative approach to these issues, we present an updated synthesizer, which is a simple signal model to train and easy to generate waveforms, using Continuous Wavelet Transform (CWT) to characterize and decompose speech features. CWT provides time and frequency resolutions different from those of the short-time Fourier transform. It can also retain the fine spectral envelope and achieve high controllability of the structure closer to human auditory scales. We confirmed through experiments that our speech synthesis system was able to provide natural-sounding synthetic speech and outperformed the state-of-the-art WaveNet vocoder.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Patrick Lumban Tobing|AUTHOR Patrick Lumban Tobing]], [[Tomoki Toda|AUTHOR Tomoki Toda]]
</p><p class="cpabstractcardaffiliationlist">Nagoya University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2217–2221
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a novel high-fidelity and low-latency universal neural vocoder framework based on multiband WaveRNN with data-driven linear prediction for discrete waveform modeling (MWDLP). MWDLP employs a coarse-fine bit WaveRNN architecture for 10-bit mu-law waveform modeling. A sparse gated recurrent unit with a relatively large size of hidden units is utilized, while the multiband modeling is deployed to achieve real-time low-latency usage. A novel technique for data-driven linear prediction (LP) with discrete waveform modeling is proposed, where the LP coefficients are estimated in a data-driven manner. Moreover, a novel loss function using short-time Fourier transform (STFT) for discrete waveform modeling with Gumbel approximation is also proposed. The experimental results demonstrate that the proposed MWDLP framework generates high-fidelity synthetic speech for seen and unseen speakers and/or language on 300 speakers training data including clean and noisy/reverberant conditions, where the number of training utterances is limited to 60 per speaker, while allowing for real-time low-latency processing using a single core of ~2.1–2.7 GHz CPU with ~0.57–0.64 real-time factor including input/output and feature extraction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junkun Chen|AUTHOR Junkun Chen]]^^1^^
, [[Mingbo Ma|AUTHOR Mingbo Ma]]^^2^^
, [[Renjie Zheng|AUTHOR Renjie Zheng]]^^2^^
, [[Liang Huang|AUTHOR Liang Huang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Oregon State University, USA; ^^2^^Baidu, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2232–2236
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end Speech-to-text Translation (E2E-ST), which directly translates source language speech to target language text, is widely useful in practice, but traditional cascaded approaches (ASR+MT) often suffer from error propagation in the pipeline. On the other hand, existing end-to-end solutions heavily depend on the source language transcriptions for pre-training or multi-task training with Automatic Speech Recognition (ASR). We instead propose a simple technique to learn a robust speech encoder in a self-supervised fashion only on the speech side, which can utilize speech data without transcription. This technique termed Spectrogram Reconstruction (SpecRec), learns better speech representation via recovering the missing speech frames and provides an alternative solution to improving E2E-ST. We conduct our experiments over 8 different translation directions. In the setting without using any transcriptions, our technique achieves an average improvement of +1.1 BLEU. SpecRec also improves the translation accuracy with +0.7 BLEU over the baseline in speech translation with ASR multitask training setting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alejandro Pérez-González-de-Martos|AUTHOR Alejandro Pérez-González-de-Martos]], [[Javier Iranzo-Sánchez|AUTHOR Javier Iranzo-Sánchez]], [[Adrià Giménez Pastor|AUTHOR Adrià Giménez Pastor]], [[Javier Jorge|AUTHOR Javier Jorge]], [[Joan-Albert Silvestre-Cerdà|AUTHOR Joan-Albert Silvestre-Cerdà]], [[Jorge Civera|AUTHOR Jorge Civera]], [[Albert Sanchis|AUTHOR Albert Sanchis]], [[Alfons Juan|AUTHOR Alfons Juan]]
</p><p class="cpabstractcardaffiliationlist">Universitat Politècnica de València, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2277–2281
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech-to-speech translation (S2S) is one of the most challenging speech and language processing tasks, especially when considering its application to real-time settings. Recent advances on streaming Automatic Speech Recognition (ASR), simultaneous Machine Translation (MT) and incremental neural Text-To-Speech (TTS) make it possible to develop real-time cascade S2S systems with greatly improved accuracy. On the way to simultaneous machine interpretation, a state-of-the-art cascade streaming S2S system is described and empirically assessed in the simultaneous interpretation of European Parliament debates. We pay particular attention to the TTS component, particularly in terms of speech naturalness under a variety of response-time settings, as well as in terms of speaker similarity for its cross-lingual voice cloning capabilities.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Giuseppe Martucci|AUTHOR Giuseppe Martucci]]^^1^^
, [[Mauro Cettolo|AUTHOR Mauro Cettolo]]^^2^^
, [[Matteo Negri|AUTHOR Matteo Negri]]^^2^^
, [[Marco Turchi|AUTHOR Marco Turchi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Università di Trento, Italy; ^^2^^FBK, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2282–2286
</span></p></div>
<div class="cpabstractcardabstract"><p>Error propagation from automatic speech recognition (ASR) to machine translation (MT) is a critical issue for the (still) dominant //cascade// approach to speech translation. To robustify MT to ill-formed inputs, we propose a technique to artificially corrupt clean transcripts so as to emulate noisy automatic transcripts. Our //Lexical Noise// model relies on estimating from ASR data: i) the probability distribution of the possible edit operations applicable to each word, and ii) the probability distribution of possible lexical substitutes for that word. Corrupted data generated from these probabilities are paired with their original clean counterpart for MT adaptation via fine-tuning. Contrastive experiments on three language pairs led to three main findings. First, on noisy transcripts, the adapted models outperform MT systems fine-tuned on synthetic data corrupted with previous noising techniques, approaching the upper bound performance obtained by fine-tuning on real ASR data. Second, the increased robustness does not come at the cost of performance drops on clean test data. Third, and crucial from the application standpoint, our approach is domain/ASR-independent: noising patterns learned from a given ASR system in a certain domain can be successfully applied to robustify MT to errors made by other ASR systems in a different domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Piyush Vyas|AUTHOR Piyush Vyas]], [[Anastasia Kuznetsova|AUTHOR Anastasia Kuznetsova]], [[Donald S. Williamson|AUTHOR Donald S. Williamson]]
</p><p class="cpabstractcardaffiliationlist">Indiana University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2287–2291
</span></p></div>
<div class="cpabstractcardabstract"><p>Transformer-based encoder-decoder architectures have recently shown promising results in end-to-end speech translation. However, the content-based attention mechanism employed by the Transformer was designed for text sequences and can only encode global inductive bias, that alone is not sufficient for learning good representations from speech signals. In this work, we address this by putting architectural constraints on the Transformer to allow encoding of both local and global inductive biases. This is accomplished by replacing the Transformer encoder with a Conformer encoder that, in contrast to the Transformer encoder, employs convolution in addition to self-attention and feed-forward. As a result, the new model named Conformer-Transformer has an encoder that captures both local feature correlations and long-range dependencies from speech signals. Experiments on seven non-English to English language directions show that the Conformer-Transformer, compared to strong Transformer-based baselines, achieves up to 3.54 BLEU score improvements with a pre-trained encoder and up to 10.53 BLEU score improvements when trained from scratch.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tejaswini Ananthanarayana|AUTHOR Tejaswini Ananthanarayana]], [[Lipisha Chaudhary|AUTHOR Lipisha Chaudhary]], [[Ifeoma Nwogu|AUTHOR Ifeoma Nwogu]]
</p><p class="cpabstractcardaffiliationlist">Rochester Institute of Technology, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2292–2296
</span></p></div>
<div class="cpabstractcardabstract"><p>Sign language translation without transcription has only recently started to gain attention. In our work, we focus on improving the state-of-the-art translation by introducing a multi-feature fusion architecture with enhanced input features. As sign language is challenging to segment, we obtain the input features by extracting overlapping scaled segments across the video and obtaining their 3D CNN representations. We exploit the attention mechanism in the fusion architecture by initially learning dependencies between different frames of the same video and later fusing them to learn the relations between different features from the same video. In addition to 3D CNN features, we also analyze pose-based features.
Our robust methodology outperforms the state-of-the-art sign language translation model by achieving higher BLEU 3 – BLEU 4 scores and also outperforms the state-of-the-art sequence attention models by achieving a 43.54% increase in BLEU 4 score. We conclude that the combined effects of feature scaling and feature fusion make our model more robust in predicting longer n-grams which are crucial in continuous sign language translation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Colin Cherry|AUTHOR Colin Cherry]], [[Naveen Arivazhagan|AUTHOR Naveen Arivazhagan]], [[Dirk Padfield|AUTHOR Dirk Padfield]], [[Maxim Krikun|AUTHOR Maxim Krikun]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2237–2241
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic subtitle translation is an important technology to make video content available across language barriers. Subtitle translation complicates the normal translation problem by adding the challenge of how to format the system output into subtitles. We propose a simple technique that treats subtitle translation as standard sentence translation plus alignment driven markup transfer, which enables us to reliably maintain timing and formatting information from the source subtitles. We also introduce two metrics to measure the quality of subtitle boundaries: a Timed BLEU that penalizes mistimed tokens with respect to a reference subtitle sequence, and a measure of how much Timed BLEU is lost due to suboptimal subtitle boundary placement. In experiments on TED and YouTube subtitles, we show that we are able to achieve much better translation quality than a baseline that translates each subtitle independently, while coming very close to optimal subtitle boundary placement.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Changhan Wang|AUTHOR Changhan Wang]], [[Anne Wu|AUTHOR Anne Wu]], [[Juan Pino|AUTHOR Juan Pino]], [[Alexei Baevski|AUTHOR Alexei Baevski]], [[Michael Auli|AUTHOR Michael Auli]], [[Alexis Conneau|AUTHOR Alexis Conneau]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2242–2246
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we improve speech translation (ST) through effectively leveraging large quantities of unlabeled speech and text data in different and complementary ways. We explore both pretraining and self-training by using the large Libri-Light speech audio corpus and language modeling with CommonCrawl. Our experiments improve over the previous state of the art by 2.8 BLEU on average on all four considered CoVoST 2 language pairs via a simple recipe of combining wav2vec 2.0 pretraining, a single iteration of self-training and decoding with a language model. Different from existing work, our approach does not leverage any other supervision than ST data. Code and models are publicly released.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Changhan Wang|AUTHOR Changhan Wang]], [[Anne Wu|AUTHOR Anne Wu]], [[Jiatao Gu|AUTHOR Jiatao Gu]], [[Juan Pino|AUTHOR Juan Pino]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2247–2251
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech translation (ST) is an increasingly popular topic of research, partly due to the development of benchmark datasets. Nevertheless, current datasets cover a limited number of languages. With the aim to foster research into massive multilingual ST and ST for low resource languages, we release CoVoST 2, a large-scale multilingual ST corpus covering translations from 21 languages into English and from English into 15 languages. This represents the largest open dataset available to date for volume and language coverage. Data checks provide evidence about the data quality. We provide extensive speech recognition (ASR), machine translation (MT) and ST baselines. We demonstrate the value of CoVoST 2 for multilingual ST research by leveraging it in 4 investigations: simplify multilingual training by removing ASR pretraining, study multilingual model scaling properties and investigate zero-shot and transfer learning capabilities of models trained on CoVoST 2.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yao-Fei Cheng|AUTHOR Yao-Fei Cheng]], [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]
</p><p class="cpabstractcardaffiliationlist">Academia Sinica, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2252–2256
</span></p></div>
<div class="cpabstractcardabstract"><p>The end-to-end architecture has made promising progress in speech translation (ST). However, the ST task is still challenging under low-resource conditions. Most ST models have shown unsatisfactory results, especially in the absence of word information from the source speech utterance. In this study, we survey methods to improve ST performance without using source transcription, and propose a learning framework that utilizes a language-independent universal phone recognizer. The framework is based on an attention-based sequence-to-sequence model, where the encoder generates the phonetic embeddings and phone-aware acoustic representations, and the decoder controls the fusion of the two embedding streams to produce the target token sequence. In addition to investigating different fusion strategies, we explore the specific usage of byte pair encoding (BPE), which compresses a phone sequence into a syllable-like segmented sequence. Due to the conversion of symbols, a segmented sequence represents not only pronunciation but also language-dependent information lacking in phones. Experiments conducted on the Fisher Spanish-English and Taigi-Mandarin drama corpora show that our method outperforms the conformer-based baseline, and the performance is close to that of the existing best method using source transcription.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Johanes Effendi|AUTHOR Johanes Effendi]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2257–2261
</span></p></div>
<div class="cpabstractcardabstract"><p>Despite the successful development of automatic speech recognition (ASR) systems for several of the world’s major languages, they require a tremendous amount of parallel speech-text data. Unfortunately, for many other languages, such resources are usually unavailable. This study addresses the speech-to-text mapping problem given only a collection of visually connected non-parallel speech-text data. We call this “mapping” since the system attempts to learn the semantic association between speech and text instead of recognizing the speech with the exact word-by-word transcription. Here, we propose utilizing our novel cyclic partially-aligned Transformer with two-fold mechanisms. First, we train a Transformer-based vector-quantized variational autoencoder (VQ-VAE) to produce a discrete speech representation in a self-supervised manner. Then, we use a Transformer-based sequence-to-sequence model inside a chain mechanism to map from unknown untranscribed speech utterances into a semantically equivalent text. Because this is not strictly recognizing speech, we focus on evaluating the semantic equivalence of the generated text hypothesis. Our evaluation shows that our proposed method is also effective for a multispeaker natural speech dataset and can also be applied for a cross-lingual application.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hirotaka Tokuyama|AUTHOR Hirotaka Tokuyama]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Katsuhito Sudoh|AUTHOR Katsuhito Sudoh]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2262–2266
</span></p></div>
<div class="cpabstractcardabstract"><p>In spoken communication, a speaker may convey their message in words (linguistic cues) with supplemental information (paralinguistic cues) such as emotion and emphasis. Transforming all spoken information into a written or verbal form is not trivial, especially if the transformation has to be done across languages. Most existing speech-to-text translation systems focus only on translating linguistic information while ignoring paralinguistic information. A few recent studies that proposed paralinguistic translation used a machine translation with hidden Markov model (HMM)-based automatic speech recognition (ASR) and text-to-speech (TTS) that were complicated and suboptimal. Furthermore, paralinguistic information was kept in the acoustic form. Here, we focused on transcribing paralinguistic acoustic cues of emphasis in the target language text. Specifically, we constructed cascade and direct neural Transformer-based speech-to-text translation, and we investigated various methods of expressing emphasis information in the written form of the target language. We performed our experiments on a Japanese-to-English linguistic and paralinguistic speech-to-text translation framework. The results revealed that our proposed method can translate both linguistic and paralinguistic information while keeping the performance as in standard linguistic translation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rong Ye|AUTHOR Rong Ye]], [[Mingxuan Wang|AUTHOR Mingxuan Wang]], [[Lei Li|AUTHOR Lei Li]]
</p><p class="cpabstractcardaffiliationlist">ByteDance, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2267–2271
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end speech translation models have become a new trend in research due to their potential of reducing error propagation. However, these models still suffer from the challenge of data scarcity. How to effectively use unlabeled or other parallel corpora from machine translation is promising but still an open problem. In this paper, we propose ''Cross S''peech-''T''ext ''Net''work (''XSTNet''), an end-to-end model for speech-to-text translation. XSTNet takes both speech and text as input and outputs both transcription and translation text. The model benefits from its three key design aspects: a self-supervised pre-trained sub-network as the audio encoder, a multi-task training objective to exploit additional parallel bilingual text, and a progressive training procedure. We evaluate the performance of XSTNet and baselines on the MuST-C En-X and LibriSpeech En-Fr datasets. In particular, XSTNet achieves state-of-the-art results on all language directions with an average BLEU of 28.8, outperforming the previous best method by 3.2 BLEU. Code, models, cases, and more detailed analysis are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuka Ko|AUTHOR Yuka Ko]], [[Katsuhito Sudoh|AUTHOR Katsuhito Sudoh]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2272–2276
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end speech translation (ST) translates source language speech directly into target language without an intermediate automatic speech recognition (ASR) output, as in a cascading approach. End-to-end ST has the advantage of avoiding error propagation from the intermediate ASR results, but its performance still lags behind the cascading approach. A recent effort to increase performance is multi-task learning using an auxiliary task of ASR. However, previous multi-task learning for end-to-end ST using cross entropy (CE) loss in ASR-task targets one-hot references and does not consider ASR confusion. In this study, we propose a novel end-to-end ST training method using ASR loss against ASR posterior distributions given by a pre-trained model, which we call ASR posterior-based loss. The proposed method is expected to consider possible ASR confusion due to competing hypotheses with similar pronunciations. The proposed method demonstrated better BLEU results in our Fisher Spanish-to-English translation experiments than the baseline with standard CE loss with label smoothing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Desh Raj|AUTHOR Desh Raj]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2351–2355
</span></p></div>
<div class="cpabstractcardabstract"><p>We recently proposed DOVER-Lap, a method for combining overlap-aware speaker diarization system outputs. DOVER-Lap improved upon its predecessor DOVER by using a label mapping method based on globally-informed greedy search. In this paper, we analyze this label mapping in the framework of a maximum orthogonal graph partitioning problem, and present three inferences. First, we show that DOVER-Lap label mapping is exponential in the input size, which poses a challenge when combining a large number of hypotheses. We then revisit the DOVER label mapping algorithm and propose a modification which performs similar to DOVER-Lap while being computationally tractable. We also derive an approximation bound for the algorithm in terms of the maximum number of hypotheses speakers. Finally, we describe a randomized local search algorithm which provides a near-optimal (1-ε)-approximate solution to the problem with high probability. We empirically demonstrate the effectiveness of our methods on the AMI meeting corpus. Our code is publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hemlata Tak|AUTHOR Hemlata Tak]]^^1^^
, [[Jee-weon Jung|AUTHOR Jee-weon Jung]]^^2^^
, [[Jose Patino|AUTHOR Jose Patino]]^^1^^
, [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]^^1^^
, [[Nicholas Evans|AUTHOR Nicholas Evans]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^EURECOM, France; ^^2^^Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2356–2360
</span></p></div>
<div class="cpabstractcardabstract"><p>The cues needed to detect spoofing attacks against automatic speaker verification are often located in specific spectral sub-bands or temporal segments. Previous works show the potential to learn these using either spectral or temporal self-attention mechanisms but not the relationships between neighbouring sub-bands or segments. This paper reports our use of graph attention networks (GATs) to model these relationships and to improve spoofing detection performance. GATs leverage a self-attention mechanism over graph structured data to model the data manifold and the relationships between nodes. Our graph is constructed from representations produced by a ResNet. Nodes in the graph represent information either in specific sub-bands or temporal segments. Experiments performed on the ASVspoof 2019 logical access database show that our GAT-based model with temporal attention outperforms all of our baseline single systems. Furthermore, GAT-based systems are complementary to a set of existing systems. The fusion of GAT-based models with more conventional countermeasures delivers a 47% relative improvement in performance compared to the best performing single GAT system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Victoria Mingote|AUTHOR Victoria Mingote]], [[Antonio Miguel|AUTHOR Antonio Miguel]], [[Alfonso Ortega|AUTHOR Alfonso Ortega]], [[Eduardo Lleida|AUTHOR Eduardo Lleida]]
</p><p class="cpabstractcardaffiliationlist">Universidad de Zaragoza, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2361–2365
</span></p></div>
<div class="cpabstractcardabstract"><p>Many recent studies in Speaker Verification (SV) have been focused on the design of the most appropriate training loss function, which plays an important role to improve the recognition ability of the systems. However, the verification loss functions created often do not take into account the performance measures which are used for the final system evaluation. For this reason, this paper presents an alternative approach to optimize the parameters of a neural network using a loss function based on the log-likelihood-ratio cost function (CLLR). This function is an application-independent metric that measures the cost of soft detection decisions over all the operating points. Thus, prior or relevance cost parameters assumptions are not employed to obtain it. Moreover, this metric has a differentiable expression, so no approximation is needed to use it as the objective loss to train a neural network. CLLR function as optimization loss was tested on the RSR2015-Part II database for text-dependent speaker verification, providing competitive results without using score normalization and outperforming other similar loss functions as Cross-Entropy combined with Ring Loss, as well as our previous loss function based on an approximation of the Detection Cost Function (DCF).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junyi Peng|AUTHOR Junyi Peng]]^^1^^
, [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]]^^1^^
, [[Rongzhi Gu|AUTHOR Rongzhi Gu]]^^2^^
, [[Jianzong Wang|AUTHOR Jianzong Wang]]^^1^^
, [[Jing Xiao|AUTHOR Jing Xiao]]^^1^^
, [[Lukáš Burget|AUTHOR Lukáš Burget]]^^3^^
, [[Jan Černocký|AUTHOR Jan Černocký]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ping An Technology, China; ^^2^^Peking University, China; ^^3^^Brno University of Technology, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2366–2370
</span></p></div>
<div class="cpabstractcardabstract"><p>The widely used magnitude spectrum based features have shown their superiority in the field of speech processing. In contrast, the importance of phase spectrum is always ignored. This is because the patterns hidden in phase cannot be intuitively modelled and interpreted, due to phase wrapping phenomenon. In this paper, we explore novel phase spectrum based features, named Learnable Group Delay (LearnGD), to capture useful information in speech signals. Specifically, firstly, the negative of the spectral derivative of the phase spectrum, called group delay (GD), is used to unwrap the phase. Then, to suppress the spiky nature of GD, which is caused by its roots close to the unit circle in the Z domain, a carefully designed light convolutional smoothing layer is employed to reconstruct the GD. Finally, an exponential hyper-parameter is introduced to reconstruct GD features to restore the spectrum range and generate LearnGD features. For performance evaluation, speaker verification experiments are conducted on the VoxCeleb2 corpus. Compared to the traditional acoustic feature derived from the magnitude spectrum, the proposed phase-based features reach a 27.8% relative improvement in terms of EER. Furthermore, experimental results on TIMIT phoneme recognition task also demonstrate the effectiveness of our proposed phase-based features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ha Nguyen|AUTHOR Ha Nguyen]]^^1^^
, [[Yannick Estève|AUTHOR Yannick Estève]]^^2^^
, [[Laurent Besacier|AUTHOR Laurent Besacier]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIG (UMR 5217), France; ^^2^^LIA (EA 4128), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2371–2375
</span></p></div>
<div class="cpabstractcardabstract"><p>Boosted by the simultaneous translation shared task at IWSLT 2020, promising end-to-end online speech translation approaches were recently proposed. They consist in incrementally encoding a speech input (in a source language) and decoding the corresponding text (in a target language) with the best possible trade-off between latency and translation quality. This paper investigates two key aspects of end-to-end simultaneous speech translation: (a) how to encode efficiently the continuous speech flow, and (b) how to segment the speech flow in order to alternate optimally between reading (R: encoding input) and writing (W: decoding output) operations. We extend our previously proposed end-to-end online decoding strategy and show that while replacing BLSTM by ULSTM encoding degrades performance in offline mode, it actually improves both efficiency and performance in online mode. We also measure the impact of different methods to segment the speech signal (using fixed interval boundaries, oracle word boundaries or randomly set boundaries) and show that our best end-to-end online decoding strategy is surprisingly the one that alternates R/W operations on fixed size blocks on our English-German speech translation setup.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dominik Macháček|AUTHOR Dominik Macháček]], [[Matúš Žilinec|AUTHOR Matúš Žilinec]], [[Ondřej Bojar|AUTHOR Ondřej Bojar]]
</p><p class="cpabstractcardaffiliationlist">Charles University, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2376–2380
</span></p></div>
<div class="cpabstractcardabstract"><p>Interpreters facilitate multi-lingual meetings but the affordable set of languages is often smaller than what is needed. Automatic simultaneous speech translation can extend the set of provided languages. We investigate if such an automatic system should rather follow the original speaker, or an interpreter to achieve better translation quality at the cost of increased delay.
To answer the question, we release Europarl Simultaneous Interpreting Corpus (ESIC), 10 hours of recordings and transcripts of European Parliament speeches in English, with simultaneous interpreting into Czech and German. We evaluate quality and latency of speaker-based and interpreter-based spoken translation systems from English to Czech. We study the differences in implicit simplification and summarization of the human interpreter compared to a machine translation system trained to shorten the output to some extent. Finally, we perform human evaluation to measure information loss of each of these approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Baptiste Pouthier|AUTHOR Baptiste Pouthier]]^^1^^
, [[Laurent Pilati|AUTHOR Laurent Pilati]]^^1^^
, [[Leela K. Gudupudi|AUTHOR Leela K. Gudupudi]]^^1^^
, [[Charles Bouveyron|AUTHOR Charles Bouveyron]]^^2^^
, [[Frederic Precioso|AUTHOR Frederic Precioso]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NXP Semiconductors, France; ^^2^^I3S (UMR 7271), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2381–2385
</span></p></div>
<div class="cpabstractcardabstract"><p>It is now well established from a variety of studies that there is a significant benefit from combining video and audio data in detecting active speakers. However, either of the modalities can potentially mislead audiovisual fusion by inducing unreliable or deceptive information. This paper outlines active speaker detection as a multi-objective learning problem to leverage best of each modalities using a novel self-attention, uncertainty-based multimodal fusion scheme. Results obtained show that the proposed multi-objective learning architecture outperforms traditional approaches in improving both mAP and AUC scores. We further demonstrate that our fusion strategy surpasses, in active speaker detection, other modality fusion methods reported in various disciplines. We finally show that the proposed method significantly improves the state-of-the-art on the AVA-ActiveSpeaker dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sarenne Wallbridge|AUTHOR Sarenne Wallbridge]], [[Peter Bell|AUTHOR Peter Bell]], [[Catherine Lai|AUTHOR Catherine Lai]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2386–2390
</span></p></div>
<div class="cpabstractcardabstract"><p>People convey information extremely effectively through spoken interaction using multiple channels of information transmission: the lexical channel of //what// is said, and the non-lexical channel of //how// it is said. We propose studying human perception of spoken communication as a means to better understand how information is encoded across these channels, focusing on the question //What characteristics of communicative context affect listener’s expectations of speech?//. To investigate this, we present a novel behavioural task testing whether listeners can discriminate between the true utterance in a dialogue and utterances sampled from other contexts with the same lexical content. We characterize how perception — and subsequent discriminative capability — is affected by different degrees of additional contextual information across both the lexical and non-lexical channel of speech. Results demonstrate that people can effectively discriminate between different prosodic realisations, that non-lexical context is informative, and that this channel provides more salient information than the lexical channel, highlighting the importance of the non-lexical channel in spoken interaction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thilo Michael|AUTHOR Thilo Michael]], [[Gabriel Mittag|AUTHOR Gabriel Mittag]], [[Andreas Bütow|AUTHOR Andreas Bütow]], [[Sebastian Möller|AUTHOR Sebastian Möller]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Berlin, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2391–2395
</span></p></div>
<div class="cpabstractcardabstract"><p>Quality engineering of speech communication services in the full speech transmission band (0–20,000 Hz) is facilitated by the fullband E-model, a planning tool that predicts overall quality on the basis of parameters describing the setting of the service. We presented a first version of this model at Interspeech 2019, which has since then been standardized by the International Telecommunication Union in ITU-T Rec. G.107.2. Whereas that model was limited to predict the effects of speech codecs, random packet loss, and transmission delay, more realistic settings such as ambient background noise, bursty packet loss, as well as interactive conversational degradations could not be predicted. Based on the results of two new listening-only and conversational tests, we present an approach to extend the E-model to better predict these effects in the present paper. The results show that background noise effects at both sending and receiving side can be predicted well, whereas bursty packet loss predictions still have some limitations which result from the available database. Finally, approaches from conversational analysis help to better predict the effects of delay on conversational quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christian Bergler|AUTHOR Christian Bergler]]^^1^^
, [[Manuel Schmitt|AUTHOR Manuel Schmitt]]^^1^^
, [[Andreas Maier|AUTHOR Andreas Maier]]^^1^^
, [[Helena Symonds|AUTHOR Helena Symonds]]^^2^^
, [[Paul Spong|AUTHOR Paul Spong]]^^2^^
, [[Steven R. Ness|AUTHOR Steven R. Ness]]^^3^^
, [[George Tzanetakis|AUTHOR George Tzanetakis]]^^3^^
, [[Elmar Nöth|AUTHOR Elmar Nöth]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^FAU Erlangen-Nürnberg, Germany; ^^2^^OrcaLab, Canada; ^^3^^University of Victoria, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2396–2400
</span></p></div>
<div class="cpabstractcardabstract"><p>Identification of animal-specific vocalization patterns is an imperative requirement to decode animal communication. In bioacoustics, passive acoustic recording setups are increasingly deployed to acquire large-scale datasets. Previous knowledge about established animal-specific call types is usually present due to historically conducted research. However, time- and human-resource constraints, combined with a lack of available machine-based approaches, only allow manual analysis of comparatively small data corpora and strongly distort the actual data representation and information value. Such data limitations cause restrictions in terms of identifying existing population-, group-, and individual-specific call types, sub-categories, as well as unseen vocalization patterns. Thus, machine learning forms the basis for animal-specific call type recognition, to facilitate more profound insights into communication. The current study is the first fusing task-specific neural networks to develop a fully automated, multi-stage, deep-learning-based framework, entitled ORCA-SLANG, performing semi-supervised call type identification in one of the largest animal-specific bioacoustic archives — the Orchive. Orca/noise segmentation, denoising, and subsequent feature learning provide robust representations for semi-supervised clustering/classification. This results in a machine-annotated call type data repository containing 235,369 unique calls.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wim Boes|AUTHOR Wim Boes]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]
</p><p class="cpabstractcardaffiliationlist">KU Leuven, Belgium</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2401–2405
</span></p></div>
<div class="cpabstractcardabstract"><p>We study the merit of transfer learning for two sound recognition problems, i.e., audio tagging and sound event detection. Employing feature fusion, we adapt a baseline system utilizing only spectral acoustic inputs to also make use of pretrained auditory and visual features, extracted from networks built for different tasks and trained with external data.
We perform experiments with these modified models on an audiovisual multi-label data set, of which the training partition contains a large number of unlabeled samples and a smaller amount of clips with weak annotations, indicating the clip-level presence of 10 sound categories without specifying the temporal boundaries of the active auditory events.
For clip-based audio tagging, this transfer learning method grants marked improvements. Addition of the visual modality on top of audio also proves to be advantageous in this context.
When it comes to generating transcriptions of audio recordings, the benefit of pretrained features depends on the requested temporal resolution: for coarse-grained sound event detection, their utility remains notable. But when more fine-grained predictions are required, performance gains are strongly reduced due to a mismatch between the problem at hand and the goals of the models from which the pretrained vectors were obtained.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Natalia Nessler|AUTHOR Natalia Nessler]]^^1^^
, [[Milos Cernak|AUTHOR Milos Cernak]]^^2^^
, [[Paolo Prandoni|AUTHOR Paolo Prandoni]]^^1^^
, [[Pablo Mainar|AUTHOR Pablo Mainar]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^EPFL, Switzerland; ^^2^^Logitech, Switzerland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2406–2410
</span></p></div>
<div class="cpabstractcardabstract"><p>In communication systems, it is crucial to estimate the perceived quality of audio and speech. The industrial standards for many years have been PESQ, 3QUEST, and POLQA, which are intrusive methods. This restricts the possibilities of using these metrics in real-world conditions, where we might not have access to the clean reference signal. In this work, we develop a new non-intrusive metric based on crowd-sourced data. We build a new speech dataset by combining publicly available speech, noises, and reverberations. Then we follow the ITU P.808 recommendation to label the dataset with mean opinion scores (MOS). Finally, we train a deep neural network to estimate the MOS from the speech data in a non-intrusive way. We propose two novelties in our work. First, we explore transfer learning by pre-training a model using a larger set of POLQA scores and finetuning with the smaller (and thus cheaper) human-labeled set. Secondly, we perform a subject-specific scaling in the MOS scores to adjust for their different subjective scales. Our model yields better accuracy than PESQ, POLQA, and other non-intrusive methods when evaluated on the independent VCTK test set. We also report misleading POLQA scores for reverberant speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andreea-Maria Oncescu|AUTHOR Andreea-Maria Oncescu]]^^1^^
, [[A. Sophia Koepke|AUTHOR A. Sophia Koepke]]^^2^^
, [[João F. Henriques|AUTHOR João F. Henriques]]^^1^^
, [[Zeynep Akata|AUTHOR Zeynep Akata]]^^2^^
, [[Samuel Albanie|AUTHOR Samuel Albanie]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Oxford, UK; ^^2^^Universität Tübingen, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2411–2415
</span></p></div>
<div class="cpabstractcardabstract"><p>We consider the task of retrieving audio using free-form natural language queries. To study this problem, which has received limited attention in the existing literature, we introduce challenging new benchmarks for text-based audio retrieval using text annotations sourced from the AUDIOCAPS and CLOTHO datasets. We then employ these benchmarks to establish baselines for cross-modal audio retrieval, where we demonstrate the benefits of pre-training on diverse audio tasks. We hope that our benchmarks will inspire further research into cross-modal text-based audio retrieval with free-form text queries.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chandan K.A. Reddy|AUTHOR Chandan K.A. Reddy]]^^1^^
, [[Harishchandra Dubey|AUTHOR Harishchandra Dubey]]^^1^^
, [[Kazuhito Koishida|AUTHOR Kazuhito Koishida]]^^1^^
, [[Arun Nair|AUTHOR Arun Nair]]^^2^^
, [[Vishak Gopal|AUTHOR Vishak Gopal]]^^1^^
, [[Ross Cutler|AUTHOR Ross Cutler]]^^1^^
, [[Sebastian Braun|AUTHOR Sebastian Braun]]^^1^^
, [[Hannes Gamper|AUTHOR Hannes Gamper]]^^1^^
, [[Robert Aichner|AUTHOR Robert Aichner]]^^1^^
, [[Sriram Srinivasan|AUTHOR Sriram Srinivasan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, USA; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2796–2800
</span></p></div>
<div class="cpabstractcardabstract"><p>The Deep Noise Suppression (DNS) challenge was designed to unify the research efforts in the area of noise suppression targeted for human perception. We recently organized a DNS challenge special session at INTERSPEECH 2020 and ICASSP 2021. We open-sourced training and test datasets for the wideband scenario along with a subjective evaluation framework based on ITU-T standard P.808, which was used to evaluate participants of the challenge. Many researchers from academia and industry made significant contributions to push the field forward, yet even the best noise suppressor was far from achieving superior speech quality in challenging scenarios. In this version of the challenge organized at INTERSPEECH 2021, we expanded our training and test datasets to accommodate fullband scenarios and challenging test conditions. We used ITU-T P.835 to evaluate the challenge winners as it gives additional information about the quality of processed speech and residual noise. The two tracks in this challenge focused on real-time denoising for (i) wideband, and (ii) fullband scenarios. We also made available a reliable non-intrusive objective speech quality metric for wideband called DNSMOS for the participants to use during their development phase.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andong Li|AUTHOR Andong Li]], [[Wenzhe Liu|AUTHOR Wenzhe Liu]], [[Xiaoxue Luo|AUTHOR Xiaoxue Luo]], [[Guochen Yu|AUTHOR Guochen Yu]], [[Chengshi Zheng|AUTHOR Chengshi Zheng]], [[Xiaodong Li|AUTHOR Xiaodong Li]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2801–2805
</span></p></div>
<div class="cpabstractcardabstract"><p>Background noise and room reverberation are regarded as two major factors to degrade the subjective speech quality. In this paper, we propose an integrated framework to address simultaneous denoising and dereverberation under complicated scenario environments. It adopts a chain optimization strategy and designs four sub-stages accordingly. In the first two stages, we decouple the multi-task learning w.r.t. complex spectrum into magnitude and phase, and only implement noise and reverberation removal in the magnitude domain. Based on the estimated priors above, we further polish the spectrum in the third stage, where both magnitude and phase information are explicitly repaired with the residual learning. Due to the data mismatch and nonlinear effect of DNNs, the residual noise often exists in the DNN-processed spectrum. To resolve the problem, we adopt a light-weight algorithm as the post-processing module to capture and suppress the residual noise in the non-active regions. In the Interspeech 2021 Deep Noise Suppression (DNS) Challenge, our submitted system ranked top-1 for the real-time track in terms of Mean Opinion Score (MOS) with ITU-T P.835 framework.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ziyi Xu|AUTHOR Ziyi Xu]], [[Maximilian Strake|AUTHOR Maximilian Strake]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Braunschweig, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2806–2810
</span></p></div>
<div class="cpabstractcardabstract"><p>Data-driven speech enhancement employing deep neural networks (DNNs) can provide state-of-the-art performance even in the presence of non-stationary noise. During the training process, most of the speech enhancement neural networks are trained in a fully supervised way with losses requiring noisy speech to be synthesized by clean speech and additive noise. However, in a real implementation, only the noisy speech mixture is available, which leads to the question, how such data could be advantageously employed in training. In this work, we propose an end-to-end non-intrusive PESQNet DNN which estimates perceptual evaluation of speech quality (PESQ) scores, allowing a reference-free loss for real data. As a further novelty, we combine the PESQNet loss with denoising and dereverberation loss terms, and train a complex mask-based fully convolutional recurrent neural network (FCRN) in a “weakly” supervised way, each training cycle employing some synthetic data, some real data, and again synthetic data to keep the PESQNet up-to-date. In a subjective listening test, our proposed framework outperforms the Interspeech 2021 Deep Noise Suppression (DNS) Challenge baseline overall by 0.09 MOS points and in particular by 0.45 background noise MOS points.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaohuai Le|AUTHOR Xiaohuai Le]], [[Hongsheng Chen|AUTHOR Hongsheng Chen]], [[Kai Chen|AUTHOR Kai Chen]], [[Jing Lu|AUTHOR Jing Lu]]
</p><p class="cpabstractcardaffiliationlist">Nanjing University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2811–2815
</span></p></div>
<div class="cpabstractcardabstract"><p>The dual-path RNN (DPRNN) was proposed to more effectively model extremely long sequences for speech separation in the time domain. By splitting long sequences to smaller chunks and applying intra-chunk and inter-chunk RNNs, the DPRNN reached promising performance in speech separation with a limited model size. In this paper, we combine the DPRNN module with Convolution Recurrent Network (CRN) and design a model called Dual-Path Convolution Recurrent Network (DPCRN) for speech enhancement in the time-frequency domain. We replace the RNNs in the CRN with DPRNN modules, where the intra-chunk RNNs are used to model the spectrum pattern in a single frame and the inter-chunk RNNs are used to model the dependence between consecutive frames. With only 0.8M parameters, the submitted DPCRN model achieves an overall mean opinion score (MOS) of 3.57 in the wide band scenario track of the Interspeech 2021 Deep Noise Suppression (DNS) challenge. Evaluations on some other test sets also show the efficacy of our model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shubo Lv|AUTHOR Shubo Lv]], [[Yanxin Hu|AUTHOR Yanxin Hu]], [[Shimin Zhang|AUTHOR Shimin Zhang]], [[Lei Xie|AUTHOR Lei Xie]]
</p><p class="cpabstractcardaffiliationlist">Northwestern Polytechnical University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2816–2820
</span></p></div>
<div class="cpabstractcardabstract"><p>Deep complex convolution recurrent network (DCCRN), which extends CRN with complex structure, has achieved superior performance in MOS evaluation in Interspeech 2020 deep noise suppression challenge (DNS2020). This paper further extends DCCRN with the following significant revisions. We first extend the model to sub-band processing where the bands are split and merged by learnable neural network filters instead of engineered FIR filters, leading to a faster noise suppressor trained in an end-to-end manner. Then the LSTM is further substituted with a complex TF-LSTM to better model temporal dependencies along both time and frequency axes. Moreover, instead of simply concatenating the output of each encoder layer to the input of the corresponding decoder layer, we use convolution blocks to first aggregate essential information from the encoder output before feeding it to the decoder layers. We specifically formulate the decoder with an extra //a priori// SNR estimation module to maintain good speech quality while removing noise. Finally a post-processing module is adopted to further suppress the unnatural residual noise. The new model, named DCCRN+, has surpassed the original DCCRN as well as several competitive models in terms of PESQ and DNSMOS, and has achieved superior performance in the new Interspeech 2021 DNS challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kanghao Zhang|AUTHOR Kanghao Zhang]], [[Shulin He|AUTHOR Shulin He]], [[Hao Li|AUTHOR Hao Li]], [[Xueliang Zhang|AUTHOR Xueliang Zhang]]
</p><p class="cpabstractcardaffiliationlist">Inner Mongolia University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2821–2825
</span></p></div>
<div class="cpabstractcardabstract"><p>In real acoustic environment, speech enhancement is an arduous task to improve the quality and intelligibility of speech interfered by background noise and reverberation. Over the past years, deep learning has shown great potential on speech enhancement. In this paper, we propose a novel real-time framework called DBNet which is a dual-branch structure with alternate interconnection. Each branch incorporates an encoder-decoder architecture with skip connections. The two branches are responsible for spectrum and waveform modeling, respectively. A bridge layer is adopted to exchange information between the two branches. Systematic evaluation and comparison show that the proposed system substantially outperforms related algorithms under very challenging environments. And in INTERSPEECH 2021 Deep Noise Suppression (DNS) challenge, the proposed system ranks the top 8 in real-time track 1 in terms of the Mean Opinion Score (MOS) of the ITU-T P.835 framework.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xu Zhang|AUTHOR Xu Zhang]]^^1^^
, [[Xinlei Ren|AUTHOR Xinlei Ren]]^^1^^
, [[Xiguang Zheng|AUTHOR Xiguang Zheng]]^^1^^
, [[Lianwu Chen|AUTHOR Lianwu Chen]]^^2^^
, [[Chen Zhang|AUTHOR Chen Zhang]]^^1^^
, [[Liang Guo|AUTHOR Liang Guo]]^^1^^
, [[Bing Yu|AUTHOR Bing Yu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Kuaishou Technology, China; ^^2^^Kuaishou Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2826–2830
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech enhancement approaches based on deep neural network have outperformed the traditional signal processing methods. This paper presents a low-delay speech enhancement method that employs a new perceptually motivated training target and loss function. The proposed approach can achieve similar speech enhancement performance compared to the state-of-the-art approaches, but with significantly less latency and computational complexities. Judged by the MOS tests conducted by the INTERSPEECH 2021 Deep Noise Suppression Challenge organizer, the proposed method is ranked the 2^^nd^^ place for Background Noise MOS, and the 6^^th^^ place for overall MOS.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Koen Oostermeijer|AUTHOR Koen Oostermeijer]], [[Qing Wang|AUTHOR Qing Wang]], [[Jun Du|AUTHOR Jun Du]]
</p><p class="cpabstractcardaffiliationlist">USTC, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2831–2835
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we describe a novel speech enhancement transformer architecture. The model uses local causal self-attention, which makes it lightweight and therefore particularly well-suited for real-time speech enhancement in computation resource-limited environments. In addition, we provide several ablation studies that focus on different parts of the model and the loss function to figure out which modifications yield best improvements. Using this knowledge, we propose a final version of our architecture, that we sent in to the INTERSPEECH 2021 DNS Challenge, where it achieved competitive results, despite using only 2% of the maximally allowed computation. Furthermore, we performed experiments to compare it with with LSTM and CNN models, that had 127% and 257% more parameters, respectively. Despite this difference in model size, we achieved significant improvements on the considered speech quality and intelligibility measures.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Manuel Giollo|AUTHOR Manuel Giollo]]^^1^^
, [[Deniz Gunceler|AUTHOR Deniz Gunceler]]^^2^^
, [[Yulan Liu|AUTHOR Yulan Liu]]^^3^^
, [[Daniel Willett|AUTHOR Daniel Willett]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, Italy; ^^2^^Amazon, Germany; ^^3^^Amazon, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2416–2420
</span></p></div>
<div class="cpabstractcardabstract"><p>Bootstrapping speech recognition on limited data resources has been an area of active research for long. The recent transition to all-neural models and end-to-end (E2E) training brought along particular challenges as these models are known to be data hungry, but also came with opportunities around language-agnostic representations derived from multilingual data as well as shared word-piece output representations across languages that share script and roots. We investigate here the effectiveness of different strategies to bootstrap an RNN-Transducer (RNN-T) based automatic speech recognition (ASR) system in the low resource regime, while exploiting the abundant resources available in other languages as well as the synthetic audio from a text-to-speech (TTS) engine. Our experiments demonstrate that transfer learning from a multilingual model, using a post-ASR text-to-text mapping and synthetic audio deliver additive improvements, allowing us to bootstrap a model for a new language with a fraction of the data that would otherwise be needed. The best system achieved a 46% relative word error rate (WER) reduction compared to the monolingual baseline, among which 25% relative WER improvement is attributed to the post-ASR text-to-text mappings and the TTS synthetic data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xinjian Li|AUTHOR Xinjian Li]], [[Juncheng Li|AUTHOR Juncheng Li]], [[Florian Metze|AUTHOR Florian Metze]], [[Alan W. Black|AUTHOR Alan W. Black]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2461–2465
</span></p></div>
<div class="cpabstractcardabstract"><p>There is growing interest in building phone recognition systems for low-resource languages as the majority of languages do not have any writing systems. Phone recognition systems proposed so far typically derive their phone inventory from the training languages, therefore the derived inventory could only cover a limited number of phones existing in the world. It fails to recognize unseen phones in low-resource or zero-resource languages. In this work, we tackle this problem with a hierarchical model, in which we explicitly model three different entities in a hierarchical manner: phoneme, phone, and phonological articulatory attributes. In particular, we decompose phones into articulatory attributes and compute the phone embedding from the attribute embedding. The model would first predict the distribution over the phones using their embeddings, next, the language-independent phones are aggregated to the language-dependent phonemes and then optimized by the CTC loss. This compositional approach enables us to recognize phones even they do not appear in the training set. We evaluate our model on 47 unseen languages and find the proposed model outperforms baselines by 13.1% PER.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shammur Absar Chowdhury|AUTHOR Shammur Absar Chowdhury]], [[Amir Hussein|AUTHOR Amir Hussein]], [[Ahmed Abdelali|AUTHOR Ahmed Abdelali]], [[Ahmed Ali|AUTHOR Ahmed Ali]]
</p><p class="cpabstractcardaffiliationlist">HBKU, Qatar</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2466–2470
</span></p></div>
<div class="cpabstractcardabstract"><p>With the advent of globalization, there is an increasing demand for multilingual automatic speech recognition (ASR), handling language and dialectal variation of spoken content. Recent studies show its efficacy over monolingual systems. In this study, we design a large multilingual end-to-end ASR using self-attention based conformer architecture. We trained the system using Arabic (Ar), English (En) and French (Fr) languages. We evaluate the system performance handling: (i) monolingual (Ar, En and Fr); (ii) multi-dialectal (Modern Standard Arabic, along with dialectal variation such as Egyptian and Moroccan); (iii) code-switching — cross-lingual (Ar-En/Fr) and dialectal (MSA-Egyptian dialect) test cases, and compare with current state-of-the-art systems. Furthermore, we investigate the influence of different embedding/character representations including character vs word-piece; shared vs distinct input symbol per language. Our findings demonstrate the strength of such a model by outperforming state-of-the-art monolingual dialectal Arabic and code-switching Arabic ASR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Brian Yan|AUTHOR Brian Yan]], [[Siddharth Dalmia|AUTHOR Siddharth Dalmia]], [[David R. Mortensen|AUTHOR David R. Mortensen]], [[Florian Metze|AUTHOR Florian Metze]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]
</p><p class="cpabstractcardaffiliationlist">Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2471–2475
</span></p></div>
<div class="cpabstractcardabstract"><p>Building language-universal speech recognition systems entails producing phonological units of spoken sound that can be shared across languages. While speech annotations at the language-specific phoneme or surface levels are readily available, annotations at a universal phone level are relatively rare and difficult to produce. In this work, we present a general framework to derive phone-level supervision from only phonemic transcriptions and phone-to-phoneme mappings with //learnable// weights represented using weighted finite-state transducers, which we call //differentiable allophone graphs//. By training multilingually, we build a universal phone-based speech recognition model with interpretable probabilistic phone-to-phoneme mappings for each language. These phone-based systems with learned allophone graphs can be used by linguists to document new languages, build phone-based lexicons that capture rich pronunciation variations, and re-evaluate the allophone mappings of seen language. We demonstrate the aforementioned benefits of our proposed framework with a system trained on 7 diverse languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ngoc-Quan Pham|AUTHOR Ngoc-Quan Pham]], [[Tuan-Nam Nguyen|AUTHOR Tuan-Nam Nguyen]], [[Sebastian Stüker|AUTHOR Sebastian Stüker]], [[Alex Waibel|AUTHOR Alex Waibel]]
</p><p class="cpabstractcardaffiliationlist">KIT, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2421–2425
</span></p></div>
<div class="cpabstractcardabstract"><p>End-to-end multilingual speech recognition involves using a single model training on a compositional speech corpus including many languages, resulting in a single neural network to handle transcribing different languages. Due to the fact that each language in the training data has different characteristics, the shared network may struggle to optimize for all various languages simultaneously. In this paper we propose a novel multilingual architecture that targets the core operation in neural networks: linear transformation functions. The key idea of the method is to assign fast weight matrices for each language by decomposing each weight matrix into a shared component and a language dependent component. The latter is then factorized into vectors using rank-1 assumptions to reduce the number of parameters per language. This efficient factorization scheme is proved to be effective in two multilingual settings with 7 and 27 languages, reducing the word error rates by 26% and 27% rel. for two popular architectures LSTM and Transformer, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Alexis Conneau|AUTHOR Alexis Conneau]], [[Alexei Baevski|AUTHOR Alexei Baevski]], [[Ronan Collobert|AUTHOR Ronan Collobert]], [[Abdelrahman Mohamed|AUTHOR Abdelrahman Mohamed]], [[Michael Auli|AUTHOR Michael Auli]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2426–2430
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents XLSR which learns cross-lingual speech representations by pretraining a single model from the raw waveform of speech in multiple languages. We build on wav2vec 2.0 which is trained by solving a contrastive task over masked latent speech representations and jointly learns a quantization of the latents shared across languages. The resulting model is fine-tuned on labeled data and experiments show that cross-lingual pretraining significantly outperforms monolingual pretraining. On the CommonVoice benchmark, XLSR shows a relative phoneme error rate reduction of 72% compared to the best known results. On BABEL, our approach improves word error rate by 16% relative compared to a comparable system. Our approach enables a single multilingual speech recognition model which is competitive to strong individual models. We hope to catalyze research in low-resource speech understanding by releasing XLSR-53, a large model pretrained in 53 languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tomoaki Hayakawa|AUTHOR Tomoaki Hayakawa]]^^1^^
, [[Chee Siang Leow|AUTHOR Chee Siang Leow]]^^1^^
, [[Akio Kobayashi|AUTHOR Akio Kobayashi]]^^2^^
, [[Takehito Utsuro|AUTHOR Takehito Utsuro]]^^3^^
, [[Hiromitsu Nishizaki|AUTHOR Hiromitsu Nishizaki]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Yamanashi, Japan; ^^2^^NTUT, Japan; ^^3^^University of Tsukuba, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2431–2435
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a method to improve the performance of multilingual automatic speech recognition (ASR) systems through language- and speaker-independent feature transformation in a framework of end-to-end (E2E) ASR. Specifically, we propose a multi-task training method that combines a language recognizer and a speaker recognizer with an E2E ASR system based on connectionist temporal classification (CTC) loss functions. We introduce the language and speaker recognition sub-tasks into the E2E ASR network and introduce a gradient reversal layer (GRL) for each sub-task to achieve language and speaker-independent feature transformation. The evaluation results of the proposed method in the multilingual ASR system in six sorts of languages show that the proposed method achieves higher accuracy than the ASR models for each language by introducing multi-tasking and GRL.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Krishna D. N.|AUTHOR Krishna D. N.]], [[Pinyi Wang|AUTHOR Pinyi Wang]], [[Bruno Bozza|AUTHOR Bruno Bozza]]
</p><p class="cpabstractcardaffiliationlist">Freshworks, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2436–2440
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, self-supervised pre-training has shown significant improvements in many areas of machine learning, including speech and NLP. The self-supervised models are trained on a large amount of unlabelled data to learn higher-level representations for downstream tasks. In this work, we investigate the effectiveness of many self-supervised pre-trained models for the low-resource speech recognition task. We adopt pre-trained wav2vec2.0 [1] models for the speech recognition task for three Indian languages Telugu, Tamil, and Gujarati. We examine both English and multilingual pre-trained models. Our experiments show that fine-tuning the multilingual pre-trained model obtains an average relative reduction in WER of 2.88% compared to the previous state-of-the-art supervised method. We carefully analyze the generalization capability of multilingual pre-trained models for both seen and unseen languages. We also show that fine-tuning with only 25% of the training data gives competitive WER to the previous best methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mari Ganesh Kumar|AUTHOR Mari Ganesh Kumar]], [[Jom Kuriakose|AUTHOR Jom Kuriakose]], [[Anand Thyagachandran|AUTHOR Anand Thyagachandran]], [[Arun Kumar A.|AUTHOR Arun Kumar A.]], [[Ashish Seth|AUTHOR Ashish Seth]], [[Lodagala V.S.V. Durga Prasad|AUTHOR Lodagala V.S.V. Durga Prasad]], [[Saish Jaiswal|AUTHOR Saish Jaiswal]], [[Anusha Prakash|AUTHOR Anusha Prakash]], [[Hema A. Murthy|AUTHOR Hema A. Murthy]]
</p><p class="cpabstractcardaffiliationlist">IIT Madras, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2441–2445
</span></p></div>
<div class="cpabstractcardabstract"><p>India is home to multiple languages, and training automatic speech recognition (ASR) systems is challenging. Over time, each language has adopted words from other languages, such as English, leading to code-mixing. Most Indian languages also have their own unique scripts, which poses a major limitation in training multilingual and code-switching ASR systems.
Inspired by results in text-to-speech synthesis, in this paper, we use an in-house rule-based phoneme-level common label set (CLS) representation to train multilingual and code-switching ASR for Indian languages. We propose two end-to-end (E2E) ASR systems. In the first system, the E2E model is trained on the CLS representation, and we use a novel data-driven backend to recover the native language script. In the second system, we propose a modification to the E2E model, wherein the CLS representation and the native language characters are used simultaneously for training. We show our results on the multilingual and code-switching (MUCS) ASR challenge 2021. Our best results achieve ~6% and 5% improvement in word error rate over the baseline system for the multilingual and code-switching tasks, respectively, on the challenge development data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anuj Diwan|AUTHOR Anuj Diwan]]^^1^^
, [[Rakesh Vaideeswaran|AUTHOR Rakesh Vaideeswaran]]^^2^^
, [[Sanket Shah|AUTHOR Sanket Shah]]^^3^^
, [[Ankita Singh|AUTHOR Ankita Singh]]^^1^^
, [[Srinivasa Raghavan|AUTHOR Srinivasa Raghavan]]^^4^^
, [[Shreya Khare|AUTHOR Shreya Khare]]^^5^^
, [[Vinit Unni|AUTHOR Vinit Unni]]^^1^^
, [[Saurabh Vyas|AUTHOR Saurabh Vyas]]^^4^^
, [[Akash Rajpuria|AUTHOR Akash Rajpuria]]^^4^^
, [[Chiranjeevi Yarra|AUTHOR Chiranjeevi Yarra]]^^6^^
, [[Ashish Mittal|AUTHOR Ashish Mittal]]^^5^^
, [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]^^2^^
, [[Preethi Jyothi|AUTHOR Preethi Jyothi]]^^1^^
, [[Kalika Bali|AUTHOR Kalika Bali]]^^3^^
, [[Vivek Seshadri|AUTHOR Vivek Seshadri]]^^3^^
, [[Sunayana Sitaram|AUTHOR Sunayana Sitaram]]^^3^^
, [[Samarth Bharadwaj|AUTHOR Samarth Bharadwaj]]^^5^^
, [[Jai Nanavati|AUTHOR Jai Nanavati]]^^4^^
, [[Raoul Nanavati|AUTHOR Raoul Nanavati]]^^4^^
, [[Karthik Sankaranarayanan|AUTHOR Karthik Sankaranarayanan]]^^5^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Bombay, India; ^^2^^Indian Institute of Science, India; ^^3^^Microsoft, India; ^^4^^Navana Tech, India; ^^5^^IBM, India; ^^6^^IIIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2446–2450
<a href="./IS2021/MEDIA/1339" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, there is an increasing interest in multilingual automatic speech recognition (ASR) where a speech recognition system caters to multiple low resource languages by taking advantage of low amounts of labelled corpora in multiple languages. With multilingualism becoming common in today’s world, there has been increasing interest in code-switching ASR as well. In code-switching, multiple languages are freely interchanged within a single sentence or between sentences. The success of low-resource multilingual and code-switching (MUCS) ASR often depends on the variety of languages in terms of their acoustics, linguistic characteristics as well as the amount of data available and how these are carefully considered in building the ASR system. In this MUCS 2021 challenge, we would like to focus on building MUCS ASR systems through two different subtasks related to a total of seven Indian languages, namely Hindi, Marathi, Odia, Tamil, Telugu, Gujarati and Bengali. For this purpose, we provide a total of ~600 hours of transcribed speech data, comprising train and test sets, in these languages, including two code-switched language pairs, Hindi-English and Bengali-English. We also provide baseline recipes for both the subtasks with 30.73% and 32.45% word error rate on the MUCS test sets, respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Genta Indra Winata|AUTHOR Genta Indra Winata]]^^1^^
, [[Guangsen Wang|AUTHOR Guangsen Wang]]^^2^^
, [[Caiming Xiong|AUTHOR Caiming Xiong]]^^3^^
, [[Steven Hoi|AUTHOR Steven Hoi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^HKUST, China; ^^2^^Salesforce, Singapore; ^^3^^Salesforce, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2451–2455
</span></p></div>
<div class="cpabstractcardabstract"><p>One crucial challenge of real-world multilingual speech recognition is the long-tailed distribution problem, where some resource-rich languages like English have abundant training data, but a long tail of low-resource languages have varying amounts of limited training data. To overcome the long-tail problem, in this paper, we propose Adapt-and-Adjust (A2), a transformer-based multi-task learning framework for end-to-end multilingual speech recognition. The A2 framework overcomes the long-tail problem via three techniques: (1) exploiting a pretrained multilingual language model to improve the performance of low-resource languages; (2) proposing dual adapters consisting of both language-specific and language-agnostic adaptation with minimal additional parameters; and (3) overcoming the class imbalance, either by imposing class priors in the loss during training or adjusting the logits of the softmax output during inference. Extensive experiments on the CommonVoice corpus show that A2 significantly outperforms conventional approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hardik Sailor|AUTHOR Hardik Sailor]], [[Kiran Praveen T.|AUTHOR Kiran Praveen T.]], [[Vikas Agrawal|AUTHOR Vikas Agrawal]], [[Abhinav Jain|AUTHOR Abhinav Jain]], [[Abhishek Pandey|AUTHOR Abhishek Pandey]]
</p><p class="cpabstractcardaffiliationlist">Samsung, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2456–2460
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper describes SRI-B’s end-to-end Automated Speech Recognition (ASR) system proposed for the subtask-1 on multilingual ASR challenges for Indian languages. Our end-to-end (E2E) ASR model is based on the transformer architecture trained by jointly minimizing Connectionist Temporal Classification (CTC) & Cross-Entropy (CE) losses. A conventional multilingual model which is trained by pooling data from multiple languages helps in terms of generalization, but it comes at the expense of performance degradation compared to their monolingual counterparts. In our experiments, a multilingual model is trained by conditioning the input features using a language-specific embedding vector. These language-specific embedding vectors are obtained by training a language classifier using an attention-based transformer architecture, and then considering its bottleneck features as language identification (LID) embeddings. We further adapt the multilingual system with language specific data to reduce the degradation on specific languages. We propose a novel hypothesis elimination strategy based on LID scores and length-normalized probabilities that optimally select the model from the pool of available models. The experimental results show that the proposed multilingual training and hypothesis elimination strategy gives an average 3.02% of relative word error recognition (WER) improvement for the blind set over the challenge hybrid ASR baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vincent P. Martin|AUTHOR Vincent P. Martin]]^^1^^
, [[Jean-Luc Rouas|AUTHOR Jean-Luc Rouas]]^^1^^
, [[Florian Boyer|AUTHOR Florian Boyer]]^^1^^
, [[Pierre Philip|AUTHOR Pierre Philip]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LaBRI (UMR 5800), France; ^^2^^SANPSY (USR 3413), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2476–2480
</span></p></div>
<div class="cpabstractcardabstract"><p>Chronic sleepiness, and specifically Excessive Daytime Sleepiness (EDS), impacts everyday life and increases the risks of accidents. Compared with traditional measures (EEG), the detection of objective EDS through voice benefits from its ease to be implemented in ecological conditions and to be sober in terms of data processing and costs. Contrary to previous works focusing on short-term sleepiness estimation, this study focuses on long-term sleepiness detection through voice. Using the Multiple Sleep Latency Test corpus, this study introduces new features based on Automatic Speech Recognition systems errors, in an attempt to replace hand-labeled reading mistakes features. We also introduce a selection feature pipeline inspired by clinical validation practices allowing ASR features to perform on par with the state-of-the-art systems on short-term sleepiness detection through voice (73.2% of UAR). Moreover, we give insights on the decision process during classification and the specificity of the system regarding the threshold delimiting the two sleepiness classes, Sleepy and Non-Sleepy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hector A. Cordourier Maruri|AUTHOR Hector A. Cordourier Maruri]]^^1^^
, [[Sinem Aslan|AUTHOR Sinem Aslan]]^^2^^
, [[Georg Stemmer|AUTHOR Georg Stemmer]]^^3^^
, [[Nese Alyuz|AUTHOR Nese Alyuz]]^^2^^
, [[Lama Nachman|AUTHOR Lama Nachman]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Intel, Mexico; ^^2^^Intel, USA; ^^3^^Intel, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2521–2525
</span></p></div>
<div class="cpabstractcardabstract"><p>People participating in remote meetings in open spaces might choose to speak with a restrained voice due to concerns around privacy or disturbing others. These contextual voice changes might impact the quality of communications. To investigate how people adjust their voices in certain situations, we performed an exploratory data collection study with 41 participants in 18 simulated remote meetings. A scenario was provided to the participants to naturally trigger contextual voice changes. We collected multi-modal data from the participants including in-situ labels for the voice quality. We implemented content analysis, t-test, and linear regression to analyze the multi-modal data. Results showed that the participants primarily preferred to use soft voice over whispered voice to avoid being overheard during the meetings. Speaking softly was often sufficient to successfully conceal private conversations, while using whispered voice had only a negative impact on the intelligibility. Overall, we found that participants perceived soft voice as less pleasant to listen to than normal voice during meetings and discovered factors related to speaker demographics and meeting context that impacted the concealing behavior (soft or whispered). For our future research, we will expand to different scenarios and consider the impact of audio feedback on voice concealing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nadee Seneviratne|AUTHOR Nadee Seneviratne]], [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]
</p><p class="cpabstractcardaffiliationlist">University of Maryland at College Park, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2526–2530
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech based depression classification has gained immense popularity over the recent years. However, most of the classification studies have focused on binary classification to distinguish depressed subjects from non-depressed subjects. In this paper, we formulate the depression classification task as a severity level classification problem to provide more granularity to the classification outcomes. We use articulatory coordination features (ACFs) developed to capture the changes of neuromotor coordination that happens as a result of psychomotor slowing, a necessary feature of Major Depressive Disorder. The ACFs derived from the vocal tract variables (TVs) are used to train a dilated Convolutional Neural Network based depression classification model to obtain segment-level predictions. Then, we propose a Recurrent Neural Network based approach to obtain session-level predictions from segment-level predictions. We show that strengths of the segment-wise classifier are amplified when a session-wise classifier is trained on embeddings obtained from it. The model trained on ACFs derived from TVs show relative improvement of 27.47% in Unweighted Average Recall (UAR) at the session-level classification task, compared to the ACFs derived from Mel Frequency Cepstral Coefficients (MFCCs).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jon Gillick|AUTHOR Jon Gillick]], [[Wesley Deng|AUTHOR Wesley Deng]], [[Kimiko Ryokai|AUTHOR Kimiko Ryokai]], [[David Bamman|AUTHOR David Bamman]]
</p><p class="cpabstractcardaffiliationlist">University of California at Berkeley, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2481–2485
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate the problem of automatically identifying and extracting laughter from audio files in noisy environments. We conduct an empirical evaluation of several machine learning models using audio data of varying sound quality, finding that while previously published methods work relatively well in controlled environments, performance drops precipitously in real-world settings with background noise. In the process, we contribute a new dataset of laughter annotations on top of the existing AudioSet corpus, with precise segmentations for the start and end points of each laugh, and we present a new approach to laughter detection that performs comparatively well in uncontrolled environments. We discuss the utility of our approach as well as the importance of understanding the variability of model performance in a range of real-world testing environments.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mizuki Nagano|AUTHOR Mizuki Nagano]], [[Yusuke Ijima|AUTHOR Yusuke Ijima]], [[Sadao Hiroya|AUTHOR Sadao Hiroya]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2486–2490
</span></p></div>
<div class="cpabstractcardabstract"><p>The characteristics of a speaker’s voice can affect the perceived impression or behavior of the listener. Previous studies of consumer behavior have shown that this can be well explained by the emotion-mediated behavior model. However, few studies of the emotion-mediated behavior model have used advertising speech. In this paper, we examine whether the stimulus-organism-response theory using emotional state can explain willingness to buy from advertising speech stimulus. The subjects listened to speech with modified speech features (mean F0, speech rate, spectral tilt, or standard deviation of F0) and rated their willingness to buy the products advertised in the speech and their own perceived emotions (pleasure, arousal, dominance). We found that the emotions partially mediate the influence of speech features on the willingness to buy. These results will be useful for developing a method of speech synthesis to increase people’s willingness to buy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Huda Alsofyani|AUTHOR Huda Alsofyani]], [[Alessandro Vinciarelli|AUTHOR Alessandro Vinciarelli]]
</p><p class="cpabstractcardaffiliationlist">University of Glasgow, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2491–2495
</span></p></div>
<div class="cpabstractcardabstract"><p>In Attachment Theory, children that have a positive perception of their parents are said to be secure, while the others are said to be insecure. Once adult, unless identified and supported early enough, insecure children have higher chances to experience major issues (e.g., suicidal tendencies and antisocial behavior). For this reason, this article proposes a speech-based automatic approach for the recognition of attachment in school-age children. The experiments are based on stacked RNNs and have involved 104 children of age between 5 and 9. The accuracy is up to 68.9% (F1 59.6%), meaning that the approach makes the right decision two times out of three, on average. To the best of our knowledge, this is the first work aimed at inferring attachment from speech in school-age children.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nujud Aloshban|AUTHOR Nujud Aloshban]]^^1^^
, [[Anna Esposito|AUTHOR Anna Esposito]]^^2^^
, [[Alessandro Vinciarelli|AUTHOR Alessandro Vinciarelli]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Glasgow, UK; ^^2^^Università della Campania “Luigi Vanvitelli”, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2496–2500
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech-based depression detection has attracted significant attention over the last years. A debated problem is whether it is better to use language (what people say), paralanguage (how they say it) or a combination of the two. This article addresses the question through the analysis of a Gated Multimodal Unit trained to weight modalities according to how effectively they account for the condition of a speaker (depressed or non-depressed). The experiments involved 29 individuals diagnosed with depression and 30 non-depressed participants. Besides an accuracy of 83.0% (F1 score 80.0%), the results show that the Gated Multimodal Unit tends to give more weight to paralanguage. However, the relative contribution of language tends to be higher, to a statistically significant extent, in the case of non-depressed speakers.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aniruddha Tammewar|AUTHOR Aniruddha Tammewar]], [[Alessandra Cervone|AUTHOR Alessandra Cervone]], [[Giuseppe Riccardi|AUTHOR Giuseppe Riccardi]]
</p><p class="cpabstractcardaffiliationlist">Università di Trento, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2501–2505
</span></p></div>
<div class="cpabstractcardabstract"><p>Personal Narratives (PN) — recollections of facts, events, and thoughts from one’s own experience — are often used in everyday conversations. So far, PNs have mainly been explored for tasks such as valence prediction or emotion classification (e.g. //happy, sad//). However, these tasks might overlook more fine-grained information that could prove to be relevant for understanding PNs. In this work, we propose a novel task for Narrative Understanding: Emotion Carrier Recognition (ECR). Emotion carriers, the text fragments that carry the emotions of the narrator (e.g. //loss of a grandpa, high school reunion//), provide a fine-grained description of the emotion state. We explore the task of ECR in a corpus of PNs manually annotated with emotion carriers and investigate different machine learning models for the task. We propose evaluation strategies for ECR including metrics that can be appropriate for different tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Scott Condron|AUTHOR Scott Condron]], [[Georgia Clarke|AUTHOR Georgia Clarke]], [[Anita Klementiev|AUTHOR Anita Klementiev]], [[Daniela Morse-Kopp|AUTHOR Daniela Morse-Kopp]], [[Jack Parry|AUTHOR Jack Parry]], [[Dimitri Palaz|AUTHOR Dimitri Palaz]]
</p><p class="cpabstractcardaffiliationlist">Speech Graphics, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2506–2510
</span></p></div>
<div class="cpabstractcardabstract"><p>Non-verbal vocalisations (NVVs) such as laughter are an important part of communication in social interactions and carry important information about a speaker’s state or intention. There remains no clear definition of NVVs and there is no clearly defined protocol for transcribing or detecting NVVs. As such, the standard approach has been to focus on detecting a single NVV such as laughter and map all other NVVs to an “other” class. In this paper we hypothesise that for this task such an approach hurts performance, and that giving more information by using more classes is beneficial. To address this, we present studies using sequence-to-sequence deep neural networks where we include multiple NVV classes rather than mapping them to “other” and allow more than one label per sample. We show that this approach yields better performance than the standard approach on NVV detection. We also evaluate the same model on laughter detection using frame-based and utterance-based metrics and show that the proposed approach yields state-of-the-art performance on the ICSI corpus.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cong Cai|AUTHOR Cong Cai]], [[Mingyue Niu|AUTHOR Mingyue Niu]], [[Bin Liu|AUTHOR Bin Liu]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Xuefei Liu|AUTHOR Xuefei Liu]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2511–2515
</span></p></div>
<div class="cpabstractcardabstract"><p>Depression is a psychiatric disorder and has many adverse effects on our society. Some studies have shown that speech signals are closely related to emotion and stress, and many speech-based automatic depression detection methods have been proposed. However, previous work is based on spectrogram or hand-crafted features, which may lose some useful information related to depression patterns. And there is no evidence that the filter bank designed from perceptual evidence is optimal for depression detection. In order to learn the more discriminative feature representation related to depression, we propose an end-to-end time-domain channel attention network (TDCA-Net) for depression detection. The TDCA-Net directly models time-domain speech signals based on dilated convolution block, which can increase the receptive field exponentially and aggregate multiscale contextual information associated with depression. Besides, we employ the efficient channel attention (ECA) module to model dependencies of channels and improve the sensitivity of the model to information related to depression. Experimental results on the AVEC2013 and the AVEC2014 datasets illustrate the effectiveness of our method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Catarina Botelho|AUTHOR Catarina Botelho]]^^1^^
, [[Alberto Abad|AUTHOR Alberto Abad]]^^1^^
, [[Tanja Schultz|AUTHOR Tanja Schultz]]^^2^^
, [[Isabel Trancoso|AUTHOR Isabel Trancoso]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^INESC-ID Lisboa, Portugal; ^^2^^Universität Bremen, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2516–2520
</span></p></div>
<div class="cpabstractcardabstract"><p>Obstructive sleep apnea (OSA) affects almost one billion people worldwide and limits peoples’ quality of life substantially. Furthermore, it is responsible for significant morbidity and mortality associated with hypertension, cardiovascular diseases, work and traffic accidents. Thus, the early detection of OSA can save lives. In our previous work we used speech as biomarker for automatic OSA detection. More recently, we leveraged the fact that OSA patients have anatomical and functional abnormalities of the upper airway and an altered craniofacial morphology, and therefore explore information from facial images for OSA detection. In this work, we propose to combine speech and facial image information to detect OSA from YouTube vlogs. This in-the-wild data poses an inexpensive alternative to standard data collected for medical applications, which is often scarce, imbalanced and costly to acquire. Besides speech and facial images, we propose to include //visual speech// as a third modality, inspired by the emerging field of silent computational paralinguistics. We hypothesize that embeddings trained from lip reading integrate information on the craniofacial structure, on speech articulation and breathing patterns, thus containing relevant cues for OSA detection. Fusion of the three modalities achieves an accuracy of 82.5% at the speaker level.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ho-Gyeong Kim|AUTHOR Ho-Gyeong Kim]]^^1^^
, [[Min-Joong Lee|AUTHOR Min-Joong Lee]]^^1^^
, [[Hoshik Lee|AUTHOR Hoshik Lee]]^^1^^
, [[Tae Gyoon Kang|AUTHOR Tae Gyoon Kang]]^^1^^
, [[Jihyun Lee|AUTHOR Jihyun Lee]]^^1^^
, [[Eunho Yang|AUTHOR Eunho Yang]]^^2^^
, [[Sung Ju Hwang|AUTHOR Sung Ju Hwang]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung, Korea; ^^2^^KAIST, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2531–2535
</span></p></div>
<div class="cpabstractcardabstract"><p>Knowledge Distillation basically matches predictive distributions of student and teacher networks to improve performance in an environment with model capacity and/or data constraints. However, it is well known that predictive distribution of neural networks not only tends to be overly confident, but also cannot directly model various factors properly that contribute to uncertainty. Recently, deep learning studies based on uncertainty have been successful in various fields, especially in several computer vision tasks. The prediction probability can implicitly show the information about how confident the network is, however, we can explicitly utilize confidence of the output by modeling the uncertainty of the network. In this paper, we propose a novel knowledge distillation method for automatic speech recognition that directly models and transfers the uncertainty inherent in data observation such as speaker variations or confusing pronunciations. Moreover, we investigate an effect of transferring knowledge more effectively using multiple teachers learned from various domains. Evaluated on WSJ which is the standard benchmark dataset with limited instances, the proposed knowledge distillation method achieves significant improvements over student baseline models.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jasha Droppo|AUTHOR Jasha Droppo]], [[Oguz Elibol|AUTHOR Oguz Elibol]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2576–2580
</span></p></div>
<div class="cpabstractcardabstract"><p>There is a recent trend in machine learning to increase model quality by growing models to sizes previously thought to be unreasonable. Recent work has shown that autoregressive generative models with cross-entropy objective functions exhibit smooth power-law relationships, or scaling laws, that predict model quality from model size, training set size, and the available compute budget. These scaling laws allow one to choose nearly optimal hyper-parameters given constraints on available training data, model parameter count, or training computation budget. In this paper, we demonstrate that acoustic models trained with an auto-predictive coding loss behave as if they are subject to similar scaling laws. We extend previous work to jointly predict loss due to model size, to training set size, and to the inherent “irreducible loss” of the task. We find that the scaling laws accurately match model performance over two orders of magnitude in both model size and training set size, and make predictions about the limits of model performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jayadev Billa|AUTHOR Jayadev Billa]]
</p><p class="cpabstractcardaffiliationlist">University of Southern California, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2581–2585
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates approaches to improving automatic speech recognition (ASR) performance in a target language using resources in other languages. In particular, we assume that we have untranscribed speech in a different language and a well trained ASR system in yet another language. Concretely, we structure this as a multi-task problem, where the primary task is acoustic model training in the target language, and the secondary task is also acoustic model training but using a synthetic data set. The synthetic data set consists of pseudo transcripts generated by decoding the untranscribed speech using a well trained ASR model. We compare and contrast this with using labeled data sets, i.e. matched audio and human-generated transcripts, and show that our approach compares favorably. In most cases, we see performance improvements, and in some cases, depending on the selection of languages and nature of speech data, performance exceeds that of systems using labeled data sets as the secondary task. When extended to larger sets of data, we show that the mismatched data approach performs similarly to in-language semi-supervised training (SST) when the secondary task pseudo transcripts are generated by ASR models trained on large diverse data sets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andrea Fasoli|AUTHOR Andrea Fasoli]], [[Chia-Yu Chen|AUTHOR Chia-Yu Chen]], [[Mauricio Serrano|AUTHOR Mauricio Serrano]], [[Xiao Sun|AUTHOR Xiao Sun]], [[Naigang Wang|AUTHOR Naigang Wang]], [[Swagath Venkataramani|AUTHOR Swagath Venkataramani]], [[George Saon|AUTHOR George Saon]], [[Xiaodong Cui|AUTHOR Xiaodong Cui]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]], [[Wei Zhang|AUTHOR Wei Zhang]], [[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[Kailash Gopalakrishnan|AUTHOR Kailash Gopalakrishnan]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2586–2590
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate the impact of aggressive low-precision representations of weights and activations in two families of large LSTM-based architectures for Automatic Speech Recognition (ASR): hybrid Deep Bidirectional LSTM - Hidden Markov Models (DBLSTM-HMMs) and Recurrent Neural Network - Transducers (RNN-Ts). Using a 4-bit integer representation, a naïve quantization approach applied to the LSTM portion of these models results in significant Word Error Rate (WER) degradation. On the other hand, we show that minimal accuracy loss is achievable with an appropriate choice of quantizers and initializations. In particular, we customize quantization schemes depending on the local properties of the network, improving recognition performance while limiting computational time. We demonstrate our solution on the Switchboard (SWB) and CallHome (CH) test sets of the NIST Hub5-2000 evaluation. DBLSTM-HMMs trained with 300 or 2000 hours of SWB data achieves <0.5% and <1% average WER degradation, respectively. On the more challenging RNN-T models, our quantization strategy limits degradation in 4-bit inference to 1.3%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ryo Masumura|AUTHOR Ryo Masumura]], [[Daiki Okamura|AUTHOR Daiki Okamura]], [[Naoki Makishima|AUTHOR Naoki Makishima]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Shota Orihashi|AUTHOR Shota Orihashi]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2591–2595
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present a novel modeling method for single-channel multi-talker overlapped automatic speech recognition (ASR) systems. Fully neural network based end-to-end models have dramatically improved the performance of multi-taker overlapped ASR tasks. One promising approach for end-to-end modeling is autoregressive modeling with serialized output training in which transcriptions of multiple speakers are recursively generated one after another. This enables us to naturally capture relationships between speakers. However, the conventional modeling method cannot explicitly take into account the speaker attributes of individual utterances such as gender and age information. In fact, the performance deteriorates when each speaker is the same gender or is close in age. To address this problem, we propose unified autoregressive modeling for joint end-to-end multi-talker overlapped ASR and speaker attribute estimation. Our key idea is to handle gender and age estimation tasks within the unified autoregressive modeling. In the proposed method, transformer-based autoregressive model recursively generates not only textual tokens but also attribute tokens of each speaker. This enables us to effectively utilize speaker attributes for improving multi-talker overlapped ASR. Experiments on Japanese multi-talker overlapped ASR tasks demonstrate the effectiveness of the proposed method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhong Meng|AUTHOR Zhong Meng]]^^1^^
, [[Yu Wu|AUTHOR Yu Wu]]^^2^^
, [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]]^^1^^
, [[Liang Lu|AUTHOR Liang Lu]]^^1^^
, [[Xie Chen|AUTHOR Xie Chen]]^^1^^
, [[Guoli Ye|AUTHOR Guoli Ye]]^^1^^
, [[Eric Sun|AUTHOR Eric Sun]]^^1^^
, [[Jinyu Li|AUTHOR Jinyu Li]]^^1^^
, [[Yifan Gong|AUTHOR Yifan Gong]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, USA; ^^2^^Microsoft, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2596–2600
</span></p></div>
<div class="cpabstractcardabstract"><p>Integrating external language models (LMs) into end-to-end (E2E) models remains a challenging task for domain-adaptive speech recognition. Recently, internal language model estimation (ILME)-based LM fusion has shown significant word error rate (WER) reduction from Shallow Fusion by subtracting a weighted internal LM score from an interpolation of E2E model and external LM scores during beam search. However, on different test sets, the optimal LM interpolation weights vary over a wide range and have to be tuned extensively on well-matched validation sets. In this work, we perform LM fusion in the minimum WER (MWER) training of an E2E model to obviate the need for LM weights tuning during inference. Besides MWER training with Shallow Fusion (MWER-SF), we propose a novel MWER training with ILME (MWER-ILME) where the ILME-based fusion is conducted to generate N-best hypotheses and their posteriors. Additional gradient is induced when internal LM is engaged in MWER-ILME loss computation. During inference, LM weights pre-determined in MWER training enable robust LM integrations on test sets from different domains. Experimented with 30K-hour trained transformer transducers, MWER-ILME achieves on average 8.8% and 5.8% relative WER reductions from MWER and MWER-SF training, respectively, on 6 different test sets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dongcheng Jiang|AUTHOR Dongcheng Jiang]], [[Chao Zhang|AUTHOR Chao Zhang]], [[Philip C. Woodland|AUTHOR Philip C. Woodland]]
</p><p class="cpabstractcardaffiliationlist">University of Cambridge, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2601–2605
</span></p></div>
<div class="cpabstractcardabstract"><p>Frame selection in automatic speech recognition (ASR) systems can potentially improve the trade-off between speed and accuracy relative to fixed low frame rate methods. In this paper, a sequence training approach based on minimum error and reinforcement learning is proposed for a hybrid ASR system to operate at a variable frame rate, and uses a frame selection controller to predict the number of frames to skip before taking the next inference action. The controller is integrated into the acoustic model in a multi-task training framework as an additional regression task and the controller output can be used for distribution characterisation during reinforcement learning exploration. The reinforcement learning objective minimises a combined measure of the phone error and average frame rate. ASR experiments using British English multi-genre broadcast (MGB3) data show that the proposed approach achieved a smaller frame rate than using a fixed 1/3 low frame rate method and was able to reduce the word error rate relative to both fixed low frame rate and full frame rate systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jonathan Macoskey|AUTHOR Jonathan Macoskey]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]]
</p><p class="cpabstractcardaffiliationlist">Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2536–2540
</span></p></div>
<div class="cpabstractcardabstract"><p>As more speech processing applications execute locally on edge devices, a set of resource constraints must be considered. In this work we address one of these constraints, namely over-the-network data budgets for transferring models from server to device. We present neural update approaches for release of subsequent speech model generations abiding by a data budget. We detail two architecture-agnostic methods which learn compact representations for transmission to devices. We experimentally validate our techniques with results on two tasks (automatic speech recognition and spoken language understanding) on open source data sets by demonstrating when applied in succession, our budgeted updates outperform comparable model compression baselines by significant margins.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shucong Zhang|AUTHOR Shucong Zhang]], [[Erfan Loweimi|AUTHOR Erfan Loweimi]], [[Peter Bell|AUTHOR Peter Bell]], [[Steve Renals|AUTHOR Steve Renals]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2541–2545
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, Transformer based models have shown competitive automatic speech recognition (ASR) performance. One key factor in the success of these models is the multi-head attention mechanism. However, for trained models, we have previously observed that many attention matrices are close to diagonal, indicating the redundancy of the corresponding attention heads. We have also found that some architectures with reduced numbers of attention heads have better performance. Since the search for the best structure is time prohibitive, we propose to randomly remove attention heads during training and keep all attention heads at test time, thus the final model is an ensemble of models with different architectures. The proposed method also forces each head independently learn the most useful patterns. We apply the proposed method to train Transformer based and Convolution-augmented Transformer (Conformer) based ASR models. Our method gives consistent performance gains over strong baselines on the Wall Street Journal, AISHELL, Switchboard and AMI datasets. To the best of our knowledge, we have achieved state-of-the-art end-to-end Transformer based model performance on Switchboard and AMI.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiabin Xue|AUTHOR Jiabin Xue]], [[Tieran Zheng|AUTHOR Tieran Zheng]], [[Jiqing Han|AUTHOR Jiqing Han]]
</p><p class="cpabstractcardaffiliationlist">Harbin Institute of Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2546–2550
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper regards multilingual automatic speech recognition model training as a multi-objective problem because learning different languages may conflict, necessitating a trade-off. Most previous works on multilingual ASR model training mainly used data sampling to balance the performance of multiple languages but ignore the conflicts between different languages, resulting in an imbalance in multiple languages. The language-specific parameters of the multilingual ASR model are updated by the single language gradients while the update of the shared parameter is jointly determined by the gradient of every language on its shared parameter, namely shared gradient. Therefore, we propose a model-agnostic fast adaptive (MAFA) multi-objective balancing algorithm to balance multiple languages by avoiding the mutual interferences between their shared gradients. In the algorithm, based on the decrease in the training loss, we dynamically normalize the shared gradient magnitudes representing the speed of learning to balance the learning speed. To evenly learn multiple languages, the language with the worst performance is selected, and a balancing gradient nearest to the normalized gradient of the selected language and positively correlated with other normalized ones is obtained to eliminate the mutual interferences. The model trained by MAFA outperforms the baseline model on the Common Voice corpus.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Heng-Jui Chang|AUTHOR Heng-Jui Chang]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]], [[Lin-shan Lee|AUTHOR Lin-shan Lee]]
</p><p class="cpabstractcardaffiliationlist">National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2551–2555
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition (ASR) technologies today are primarily optimized for given datasets; thus, any changes in the application environment (e.g., acoustic conditions or topic domains) may inevitably degrade the performance. We can collect new data describing the new environment and fine-tune the system, but this naturally leads to higher error rates for the earlier datasets, referred to as catastrophic forgetting. The concept of lifelong learning (LLL) aiming to enable a machine to sequentially learn new tasks from new datasets describing the changing real world without forgetting the previously learned knowledge is thus brought to attention. This paper reports, to our knowledge, the first effort to extensively consider and analyze the use of various approaches of LLL in end-to-end (E2E) ASR, including proposing novel methods in saving data for past domains to mitigate the catastrophic forgetting problem. An overall relative reduction of 28.7% in WER was achieved compared to the fine-tuning baseline when sequentially learning on three very different benchmark corpora. This can be the first step toward the highly desired ASR technologies capable of synchronizing with the continuously changing real world.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Isabel Leal|AUTHOR Isabel Leal]], [[Neeraj Gaur|AUTHOR Neeraj Gaur]], [[Parisa Haghani|AUTHOR Parisa Haghani]], [[Brian Farris|AUTHOR Brian Farris]], [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]], [[Manasa Prasad|AUTHOR Manasa Prasad]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Yun Zhu|AUTHOR Yun Zhu]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2556–2560
</span></p></div>
<div class="cpabstractcardabstract"><p>With a large population of the world speaking more than one language, multilingual automatic speech recognition (ASR) has gained popularity in the recent years. While lower resource languages can benefit from quality improvements in a multilingual ASR system, including unrelated or higher resource languages in the mix often results in performance degradation. In this paper, we propose distilling from multiple teachers, with each language using its best teacher during training, to tackle this problem. We introduce //self-adaptive// distillation, a novel technique for automatic weighting of the distillation loss that uses the student/ teachers confidences. We analyze the effectiveness of the proposed techniques on two real world use-cases and show that the performance of the multilingual ASR models can be improved by up to 11.5% without any increase in model capacity. Furthermore, we show that when our methods are combined with increase in model capacity, we can achieve quality gains of up to 20.7%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hainan Xu|AUTHOR Hainan Xu]], [[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Yinghui Huang|AUTHOR Yinghui Huang]], [[Jesse Emond|AUTHOR Jesse Emond]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2561–2565
</span></p></div>
<div class="cpabstractcardabstract"><p>This work focuses on improving subword segmentation algorithms for end-to-end speech recognition models, and makes two major contributions. Firstly, we propose a novel word segmentation algorithm. The algorithm uses the same vocabulary generated by a regular wordpiece model, is easily extensible and supports a variety of regularization techniques in the segmentation space, and outperforms the regular wordpiece model. Secondly, we propose a number of novel regularization methods that introduce randomness into the tokenization algorithm, which bring further improvements in speech recognition accuracy, with relative gains up to 8.4% compared to the original wordpiece model. We analyze the methods and show that our proposed methods are equivalent to a sophisticated form of label smoothing, which performs smoothing based on the prefix structures of subword units. A noteworthy discovery from this work is that creating artificial misspellings in words results in the best performance among all the methods, which could inspire future research for strategies in this area.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Peidong Wang|AUTHOR Peidong Wang]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Ron J. Weiss|AUTHOR Ron J. Weiss]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2566–2570
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a multitask training method for attention-based end-to-end speech recognition models. We regularize the decoder in a listen, attend, and spell model by multitask training it on both audio-text and text-only data. Trained on the 100-hour subset of LibriSpeech, the proposed method, without requiring an additional language model, leads to an 11% relative performance improvement over the baseline and approaches the performance of language model shallow fusion on the test-clean evaluation set. We observe a similar trend on the whole 960-hour LibriSpeech training set. Analyses of different types of errors and sample output sentences demonstrate that the proposed method can incorporate language level information, suggesting its effectiveness in real-world applications.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xianzhao Chen|AUTHOR Xianzhao Chen]]^^1^^
, [[Hao Ni|AUTHOR Hao Ni]]^^2^^
, [[Yi He|AUTHOR Yi He]]^^2^^
, [[Kang Wang|AUTHOR Kang Wang]]^^2^^
, [[Zejun Ma|AUTHOR Zejun Ma]]^^2^^
, [[Zongxia Xie|AUTHOR Zongxia Xie]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tianjin University, China; ^^2^^ByteDance, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2571–2575
</span></p></div>
<div class="cpabstractcardabstract"><p>Word timings, which mark the start and end times of each word in ASR results, play an important part in many applications, such as computer assisted language learning. To date, end-to-end (E2E) systems outperform conventional DNN-HMM hybrid systems in ASR accuracy but have challenges to obtain accurate word timings. In this paper, we propose a two-pass method to estimate word timings under an E2E-based LAS modeling framework, which is completely free of using the DNN-HMM ASR system. Specifically, we first employ the LAS system to obtain word-piece transcripts of the input audio, we then compute forced-alignments with a frame-level-based word-piece classifier. In order to make the classifier yield accurate word-piece timing results, we propose a novel objective function to learn the classifier, utilizing the spike timings of the connectionist temporal classification (CTC) model. On Librispeech data, our E2E-based LAS system achieves 2.8%/7.0% WERs, while its word timing (start/end) accuracy are 99.0%/95.3% and 98.6%/93.7% on test-clean and test-other two test sets respectively. Compared with a DNN-HMM hybrid ASR system (here, TDNN), the LAS system is better in ASR performance, and the generated word timings are close to what the TDNN ASR system presents.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Constantijn Kaland|AUTHOR Constantijn Kaland]]^^1^^
, [[Matthew Gordon|AUTHOR Matthew Gordon]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität zu Köln, Germany; ^^2^^University of California at Santa Barbara, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2606–2610
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper reports a perception experiment on Papuan Malay, an Eastern Indonesian language for which phrase prosody is largely underresearched. While phrase-final f0 movements are the most prominent ones in this language, it remains to be seen to what extent they signal phrase boundaries (demarcating) or whether they contribute to the prosodic prominence of words in that position (highlighting). Crucially, it is unclear whether these functions can actually be teased apart. In an attempt to investigate this issue, a word identification experiment was carried out using manipulated and original f0 word contours in phrase-medial and phrase-final positions. Results indicate that Papuan Malay listeners recognize words faster in phrase-final position, although the shape of the f0 movement did not significantly affect response latencies. The outcomes are discussed in a typological perspective, with particular attention to Trade Malay languages.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christer Gobl|AUTHOR Christer Gobl]]
</p><p class="cpabstractcardaffiliationlist">Trinity College Dublin, Ireland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2651–2655
</span></p></div>
<div class="cpabstractcardabstract"><p>Many of the commonly used voice source models are based on piecewise elementary functions defined in the time domain. The discrete-time implementation of such models generally causes aliasing distortion, which make them less useful for certain applications. This paper presents a method which eliminates this distortion. The key component of the proposed method is the frequency domain description of the source model. By deploying the Laplace transform and phasor arithmetic, closed-form expressions of the source model spectrum can be derived. This facilitates the calculation of the spectrum directly from the model parameters, which in turn makes it possible to obtain the ideal discrete spectrum of the model given the sampling frequency used. This discrete spectrum is entirely free of aliasing distortion, and the inverse discrete Fourier transform is used to compute the sampled glottal flow pulse. The proposed method was applied to the widely used LF model, and the complete Laplace transform of the model is presented. Also included are closed-form expressions of the amplitude spectrum and the phase spectrum for the calculation of the LF model spectrum.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Michael Wagner|AUTHOR Michael Wagner]], [[Alvaro Iturralde Zurita|AUTHOR Alvaro Iturralde Zurita]], [[Sijia Zhang|AUTHOR Sijia Zhang]]
</p><p class="cpabstractcardaffiliationlist">McGill University, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2656–2660
</span></p></div>
<div class="cpabstractcardabstract"><p>Humans appear to be wired to perceive acoustic events rhythmically. English speakers, for example, tend to perceive alternating short and long sounds as a series of binary groups with a final beat (iambs), and alternating soft and loud sounds as a series of trochees. This generalization, often called the ‘Iambic-trochaic Law’ (ITL), although viewed as an auditory universal by some, has been argued to be shaped by language experience. Earlier work on the ITL had a crucial limitation, in that it did not tease apart the percepts of grouping and prominence, which the notions of iamb and trochee inherently confound. We explore how intensity and duration relate to percepts of prominence and grouping in six languages (English, French, German, Japanese, Mandarin, and Spanish). The results show that the ITL is not universal, and that cue interpretation is shaped by language experience. However, there are also invariances: Duration appears relatively robust across languages as a cue to prominence (longer syllables are perceived as stressed), and intensity for grouping (louder syllables are perceived as initial). The results show the beginnings of a rhythmic typology based on how the dimensions of grouping and prominence are cued.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benazir Mumtaz|AUTHOR Benazir Mumtaz]], [[Massimiliano Canzi|AUTHOR Massimiliano Canzi]], [[Miriam Butt|AUTHOR Miriam Butt]]
</p><p class="cpabstractcardaffiliationlist">Universität Konstanz, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2661–2665
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper studies the prosody of case clitics in Urdu, for which various different claims exist in the literature. We conducted a production experiment and controlled for effects potentially arising from the phonetics of the case clitics, the syntactic function they express and clausal position. We find that case clitics are incorporated into the prosodic phrase of the noun and that they become part of the overall LH contour found on accentual phrases in Urdu/Hindi. We also find some differences across case type and position which we tie to information structural effects.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Brynhildur Stefansdottir|AUTHOR Brynhildur Stefansdottir]], [[Francesco Burroni|AUTHOR Francesco Burroni]], [[Sam Tilsen|AUTHOR Sam Tilsen]]
</p><p class="cpabstractcardaffiliationlist">Cornell University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2666–2670
</span></p></div>
<div class="cpabstractcardabstract"><p>Icelandic voiced fricatives frequently reduce in connected speech. However, systematic investigations of the phenomenon from acoustic and articulatory perspectives are lacking. To further the understanding of this lenition process, we present electromagnetic articulography and acoustic data from four speakers concerning the intervocalic realization of the dental and velar fricatives. The results show that lenition is mostly gradient, but some speakers and places of articulation exhibit two distinct modes suggesting a categorical distinction. Moreover, in some tokens, the fricative constriction is absent from the articulatory trajectories. Finally, the relation between lenition and speech rate, style, and stress is also subject to speaker- and gesture-specific effects. We conclude by evaluating how our findings challenge the common assumptions, made in the literature, that lenition is a change in gestural target or a perceptually driven phenomenon.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Khia A. Johnson|AUTHOR Khia A. Johnson]]
</p><p class="cpabstractcardaffiliationlist">University of British Columbia, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2671–2675
</span></p></div>
<div class="cpabstractcardabstract"><p>While crosslinguistic influence is widespread in bilingual speech production, it is less clear which aspects of representation are shared across languages, if any. Most prior work examines phonetically distinct yet phonologically similar sounds, for which phonetic convergence suggests a cross-language link within individuals [1]. Convergence is harder to assess when sounds are already similar, as with English and Cantonese initial long-lag stops. Here, the articulatory uniformity framework [2, 3, 4] is leveraged to assess whether bilinguals share an underlying laryngeal feature across languages, and describe the nature of cross-language links. Using the SpiCE corpus of spontaneous Cantonese-English bilingual speech [5], this paper asks whether Cantonese-English bilinguals exhibit uniform voice-onset time for long-lag stops within and across languages. Results indicate moderate patterns of uniformity within-language — replicating prior work [2, 6] — and weaker patterns across languages. The analysis, however, raises many questions, as correlations were generally lower compared to prior work, and talkers did not adhere to expected ordinal relationships by place of articulation. Talkers also retained clear differences for /t/ and /k/, despite expectations of similarity. Yet at the same time, more of the overall variation seems to derive from individual-specific differences. While many questions remain, the uniformity framework shows promise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anna Bothe Jespersen|AUTHOR Anna Bothe Jespersen]]^^1^^
, [[Pavel Šturm|AUTHOR Pavel Šturm]]^^2^^
, [[Míša Hejná|AUTHOR Míša Hejná]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aarhus University, Denmark; ^^2^^Charles University, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2611–2615
</span></p></div>
<div class="cpabstractcardabstract"><p>Most of our knowledge of Danish f0 variation and intonation is based on the work of Grønnum and colleagues, who developed an a-phonological model in which a series of repeated “default” contours are superpositioned onto an overarching f0 slope. The current paper tests a range of predictions stemming from this model, most importantly the adequacy of analysing f0 modulations as a string of repeated contours differing in range but not in shape. To facilitate comparison with earlier work in the area, our material is based on read speech, 45 speakers of Jutland Danish participated in the experiment. Analyses of f0 in sentences of differing complexity supplied little evidence in favour of the existence of default contours. Instead, our acoustic data revealed an array of f0 shapes associated with various prosodic anchor points, which are influenced in both range and shape by positional context and the presence or absence of focus.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adrien Méli|AUTHOR Adrien Méli]]^^1^^
, [[Nicolas Ballier|AUTHOR Nicolas Ballier]]^^1^^
, [[Achille Falaise|AUTHOR Achille Falaise]]^^2^^
, [[Alice Henderson|AUTHOR Alice Henderson]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CLILLAC-ARP (EA 3967), France; ^^2^^LLF (UMR 7110), France; ^^3^^Lidilem (EA 609), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2616–2620
</span></p></div>
<div class="cpabstractcardabstract"><p>This article describes an experiment in paratone detection based on a spoken corpus of English for Academic Purposes (EAP) recently automatically re-annotated with prosodic information. The Momel and INTSINT annotations were carried out using SPPAS. The EIIDA corpus was chosen as it offered long uninterrupted stretches of speech of academic presentations. We describe the clustering method adopted for automatic detection, contrasting a supervised and an unsupervised method of paratone boundary detection. We showcase the relevance of the annotation scheme followed for this corpus and contribute to the investigation of the phonostyle of lecture delivery. We discuss the relevance of clustering methods applied to the labels of the pitch targets for the analysis of paratones.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Branislav Gerazov|AUTHOR Branislav Gerazov]]^^1^^
, [[Michael Wagner|AUTHOR Michael Wagner]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^UKiM, Macedonia; ^^2^^McGill University, Canada</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2621–2625
</span></p></div>
<div class="cpabstractcardabstract"><p>The labelling of speech corpora is a laborious and time-consuming process. The ProsoBeast Annotation Tool seeks to ease and accelerate this process by providing an interactive 2D representation of the prosodic landscape of the data, in which contours are distributed based on their similarity. This interactive map allows the user to inspect and label the utterances. The tool integrates several state-of-the-art methods for dimensionality reduction and feature embedding, including variational autoencoders. The user can use these to find a good representation for their data. In addition, as most of these methods are stochastic, each can be used to generate an unlimited number of different prosodic maps. The web app then allows the user to seamlessly switch between these alternative representations in the annotation process. Experiments with a sample prosodically rich dataset have shown that the tool manages to find good representations of varied data and is helpful both for annotation and label correction. The tool is released as free software for use by the community.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Trang Tran|AUTHOR Trang Tran]]^^1^^
, [[Mari Ostendorf|AUTHOR Mari Ostendorf]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Southern California, USA; ^^2^^University of Washington, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2626–2630
</span></p></div>
<div class="cpabstractcardabstract"><p>This work explores constituency parsing on automatically recognized transcripts of conversational speech. The neural parser is based on a sentence encoder that leverages word vectors contextualized with prosodic features, jointly learning prosodic feature extraction with parsing. We assess the utility of the prosody in parsing on imperfect transcripts, i.e. transcripts with automatic speech recognition (ASR) errors, by applying the parser in an N-best reranking framework. In experiments on Switchboard, we obtain 13–15% of the oracle N-best gain relative to parsing the 1-best ASR output, with insignificant impact on word recognition error rate. Prosody provides a significant part of the gain, and analyses suggest that it leads to more grammatical utterances via recovering function words.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Roger Cheng-yen Liu|AUTHOR Roger Cheng-yen Liu]], [[Feng-fan Hsieh|AUTHOR Feng-fan Hsieh]], [[Yueh-chin Chang|AUTHOR Yueh-chin Chang]]
</p><p class="cpabstractcardaffiliationlist">National Tsing Hua University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2631–2635
</span></p></div>
<div class="cpabstractcardabstract"><p>This article is an acoustic study on the two types of neutral tone in Taiwanese Southern Min (TSM). Recording materials included a set of verb-clitic constructions with different preceding tones and clitics. Pitch contours in different conditions were compared using Smoothing Spline ANOVA. Our results confirmed that Type 1 neutral tone (NT1) has a low pitch target and that Type 2 neutral tone (NT2) is contextually dependent. Whether NT1 or NT2 is chosen has been treated as the lexical idiosyncrasy of the clitics in question, with idiolectal and dialectal variations. However, we found in this study that the onsets have a bearing on determining the type of neutral tone: the more sonorous the onset, the more possible it is for the clitic to be in NT2. In sum, the two distinct types of neutral tones in TSM not only are unusual among the neutral tones in Sinitic languages, but they also offer novel data for the consonant-tone interaction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mária Gósy|AUTHOR Mária Gósy]]^^1^^
, [[Kálmán Abari|AUTHOR Kálmán Abari]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ELKH, Hungary; ^^2^^University of Debrecen, Hungary</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2636–2640
</span></p></div>
<div class="cpabstractcardabstract"><p>The mental lexicon comprises the representations of various words either in a morphologically decomposed form, or in a conceptually non-decomposed form. The durations of mono-morphemic and multimorphemic words are assumed to contain information on the routes of their lexical access.
The durations of Hungarian nouns with various lengths produced spontaneously by 10 young and 10 elderly speakers (with 55 years of difference between them) were measured. Findings showed significant differences depending on the words’ complexity and on age. The nouns both with and without suffixes were significantly longer in old than in young speakers. The durational differences depending on age were more pronounced in monomorphemic nouns as opposed to multimorphemic nouns. Along with the increasing number of syllables of the nouns, old speakers produced increasingly longer simple nouns (stems) than young ones did.
We suggest that multimorphemic nouns are accessed decompositionally in spontaneous utterances when the stem activation is followed by the activation of the suffixes. The specific storage and the corresponding lexical access of the morphemes explain the longer durations of the inflected nouns.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ho-hsien Pan|AUTHOR Ho-hsien Pan]], [[Shao-ren Lyu|AUTHOR Shao-ren Lyu]]
</p><p class="cpabstractcardaffiliationlist">NYCU, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2641–2645
</span></p></div>
<div class="cpabstractcardabstract"><p>The multifaced changes of Taiwan Min Nan (TMN) checked sandhi tones, S3 and S5 were investigated as well as the checked base tones, B3 and B5. Simultaneous EGG data, CQ_H and acoustic data, including duration, f0 offset at 80% vowel interval, and spectral tilt H1^^*^^-A3^^*^^ from forty male and female speakers above 40 and under 30 years of age were analyzed. Though different measures progress at different paces, in general, as the coda stops [p, t, k, ʔ] from full stop closure, to energy damping and finally to complete deletion, vowel duration lengthening, f0 offset lowering, and more modal phonation were observed. Gender effects were found on f0 offset and CQ_H offset. The pace of progress is more advanced for base tone B5 with glottal coda stops. After coda deletion, the contexts conditioning the anticipatory co-articulation were removed and vowel and tone characteristics were modified to be similar to those found in open syllables.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Moritz Jakob|AUTHOR Moritz Jakob]]^^1^^
, [[Bettina Braun|AUTHOR Bettina Braun]]^^1^^
, [[Katharina Zahner-Ritter|AUTHOR Katharina Zahner-Ritter]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Konstanz, Germany; ^^2^^Universität Trier, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2646–2650
</span></p></div>
<div class="cpabstractcardabstract"><p>Various studies on the perception of vocally expressed emotions have shown that recognition rates are higher if speaker and listener belong to the same cultural or linguistic group. This so-called //in-group advantage// is commonly attributed to prosodic differences in the expression of emotion across groups. Evidence comes mostly from using cross-linguistic and/or cross-cultural study designs. Previous research suggests that varieties of German differ in their use of prosody and can be discriminated based on prosodic features alone. In this paper, we tested whether emotion recognition rates differ across varieties of German: Listeners from three dialectal areas (Hamburg, Vienna, Zurich) identified emotions on semantically neutral sentences (choosing between anger, happiness, relief, surprise or “other”), spoken by actors from the three regions. Correctness rates show that emotions are recognized better if speakers and listeners are native speakers of the same variety. However, further analyses suggest that the in-group advantage does not surface consistently across individual emotions. To explain these results, the prosodic realization of the sentences was tested for interactions between emotion and variety. Here, intensity seemed to differ most across varieties and emotions. Importantly, we show that the in-group advantage extends from cultural groups to dialectal groups of a language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Aswin Sivaraman|AUTHOR Aswin Sivaraman]], [[Sunwoo Kim|AUTHOR Sunwoo Kim]], [[Minje Kim|AUTHOR Minje Kim]]
</p><p class="cpabstractcardaffiliationlist">Indiana University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2676–2680
</span></p></div>
<div class="cpabstractcardabstract"><p>Training personalized speech enhancement models is innately a no-shot learning problem due to privacy constraints and limited access to noise-free speech from the target user. If there is an abundance of unlabeled noisy speech from the test-time user, one may train a personalized speech enhancement model using self-supervised learning. One straightforward approach to model personalization is to use the target speaker’s noisy recordings as pseudo-sources. Then, a pseudo denoising model learns to remove injected training noises and recover the pseudo-sources. However, this approach is volatile as it depends on the quality of the pseudo-sources, which may be too noisy. To remedy this, we propose a data purification step that refines the self-supervised approach. We first train an SNR predictor model to estimate the frame-by-frame SNR of the pseudo-sources. Then, we convert the predictor’s estimates into weights that adjust the pseudo-sources’ frame-by-frame contribution towards training the personalized model. We empirically show that the proposed data purification step improves the usability of the speaker-specific noisy data in the context of personalized speech enhancement. Our approach may be seen as privacy-preserving as it does not rely on any clean speech recordings or speaker embeddings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Feng Dang|AUTHOR Feng Dang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]], [[Hangting Chen|AUTHOR Hangting Chen]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2721–2725
</span></p></div>
<div class="cpabstractcardabstract"><p>Complex-domain models have achieved promising results for speech enhancement (SE) tasks. Some complex-domain models consider only time-frequency (T-F) domain constraints and do not take advantage of the information at the time-domain waveform level. Some complex-domain models consider only time-domain constraints and do not take into account T-F domain constraints that have rich harmonic structure information. Indeed some complex-domain models consider both time-domain and T-F domain constraints but only use the simple mean square loss as time-frequency-domain constraints. This paper proposes a complex-domain-based speech enhancement method that integrates time-domain constraints and T-F domain constraints into a unified framework using a Generative Adversarial Network (GAN). The proposed framework captures information at the time-domain waveform level features while paying attention to the harmonic structure by time-domain and T-F domain constraints. We conducted experiments on the Voice Bank + DEMAND dataset to evaluate the proposed method. Experimental results show that the proposed method improves the PESQ score by 0.09 and the STOI score by 1% over the strong baseline deep complex convolution recurrent network (DCCRN) and outperforms the state-of-the-art GAN-based SE systems.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xudong Zhang|AUTHOR Xudong Zhang]]^^1^^
, [[Liang Zhao|AUTHOR Liang Zhao]]^^2^^
, [[Feng Gu|AUTHOR Feng Gu]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CUNY Graduate Center, USA; ^^2^^CUNY Lehman College, USA; ^^3^^CUNY CSI, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2726–2730
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech enhancement is one of the effective approaches in improving speech quality. Neural network models have been widely used in speech enhancement, such as recurrent neural networks (RNNs), long short-term memory networks (LSTMs), and generative adversarial networks (GANs). However, some of them either handle the speech noise removal tasks in the spectral domain or lack the waveform recovery capability. As a result, the enhanced speeches still include noisy signals. In this study, we propose a topology-enhanced GAN model to tackle noisy speeches in an end-to-end structure. We use the topology features of speech waves as additional constraints and modify the objective function of the GAN by adding a penalty term. The penalty term is a Wasserstein distance of topology features measuring the difference between the generated speech and the corresponding clean speech. We evaluate the proposed speech-enhanced model on the public speech data set with 56 speakers and 20 different types of noisy conditions. The experimental results indicate that the topology features improve the performance of GANs on speech enhancement in metrics of PESQ, CBAK, COVL, and SSNR.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Suliang Bu|AUTHOR Suliang Bu]]^^1^^
, [[Yunxin Zhao|AUTHOR Yunxin Zhao]]^^1^^
, [[Shaojun Wang|AUTHOR Shaojun Wang]]^^2^^
, [[Mei Han|AUTHOR Mei Han]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Mizzou, USA; ^^2^^PAII, USA; ^^3^^PAII, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2731–2735
</span></p></div>
<div class="cpabstractcardabstract"><p>Time-frequency (TF) masks are widely used in speech enhancement (SE). However, accurately estimating TF masks from noisy speech remains a challenge to both statistical or neural network approaches. Statistical model-based mask estimation usually depends on a good parameter initialization, while NN-based mask estimation relies on setting proper and stable learning targets. To address these issues, we propose a novel approach to extracting TF speech structures from clean speech data, and partition a noisy speech spectrogram into mutually exclusive regions of core speech, core noise, and transition. Using such region targets derived from clean speech, we train bidirectional LSTM to learn region prediction from noisy speech, which is easier to do than mask prediction. The predicted regions can further be used in place of masks in beamforming, or integrated with statistical and NN based mask estimation to constrain mask values and model parameter updates. Our experimental results on ASR (CHiME-3) and SE (CHiME-3 and LibriSpeech) have demonstrated the effectiveness of our approach of learning speech region structure to improve TF masks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Eesung Kim|AUTHOR Eesung Kim]], [[Hyeji Seo|AUTHOR Hyeji Seo]]
</p><p class="cpabstractcardaffiliationlist">Kakao, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2736–2740
</span></p></div>
<div class="cpabstractcardabstract"><p>Convolution-augmented transformer (conformer) has recently shown competitive results in speech-domain applications, such as automatic speech recognition, continuous speech separation, and sound event detection. Conformer can capture both the short and long-term temporal sequence information by attending to the whole sequence at once with multi-head self-attention and convolutional neural network. However, the effectiveness of conformer in speech enhancement has not been demonstrated. In this paper, we propose an end-to-end speech enhancement architecture (SE-Conformer), incorporating a convolutional encoder–decoder and conformer, designed to be directly applied to the time-domain signal. We performed evaluations on both the VoiceBank-DEMAND Corpus (VCTK) and Librispeech datasets in terms of objective speech quality metrics. The experimental results show that the proposed model outperforms other competitive baselines in speech enhancement performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Mark R. Saddler|AUTHOR Mark R. Saddler]]^^1^^
, [[Andrew Francl|AUTHOR Andrew Francl]]^^1^^
, [[Jenelle Feather|AUTHOR Jenelle Feather]]^^1^^
, [[Kaizhi Qian|AUTHOR Kaizhi Qian]]^^2^^
, [[Yang Zhang|AUTHOR Yang Zhang]]^^2^^
, [[Josh H. McDermott|AUTHOR Josh H. McDermott]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MIT, USA; ^^2^^MIT-IBM Watson AI Lab, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2681–2685
<a href="./IS2021/MEDIA/1973" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Contemporary speech enhancement predominantly relies on audio transforms that are trained to reconstruct a clean speech waveform. The development of high-performing neural network sound recognition systems has raised the possibility of using deep feature representations as ‘perceptual’ losses with which to train denoising systems. We explored their utility by first training deep neural networks to classify either spoken words or environmental sounds from audio. We then trained an audio transform to map noisy speech to an audio waveform that minimized the difference in the deep feature representations between the output audio and the corresponding clean audio. The resulting transforms removed noise substantially better than baseline methods trained to reconstruct clean waveforms, and also outperformed previous methods using deep feature losses. However, a similar benefit was obtained simply by using losses derived from the filter bank inputs to the deep networks. The results show that deep features can guide speech enhancement, but suggest that they do not yet outperform simple alternatives that do not involve learned features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sefik Emre Eskimez|AUTHOR Sefik Emre Eskimez]], [[Xiaofei Wang|AUTHOR Xiaofei Wang]], [[Min Tang|AUTHOR Min Tang]], [[Hemin Yang|AUTHOR Hemin Yang]], [[Zirun Zhu|AUTHOR Zirun Zhu]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Huaming Wang|AUTHOR Huaming Wang]], [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2686–2690
</span></p></div>
<div class="cpabstractcardabstract"><p>With the surge of online meetings, it has become more critical than ever to provide high-quality speech audio and live captioning under various noise conditions. However, most monaural speech enhancement (SE) models introduce processing artifacts and thus degrade the performance of downstream tasks, including automatic speech recognition (ASR). This paper proposes a multi-task training framework to make the SE models unharmful to ASR. Because most ASR training samples do not have corresponding clean signal references, we alternately perform two model update steps called SE-step and ASR-step. The SE-step uses clean and noisy signal pairs and a signal-based loss function. The ASR-step applies a pre-trained ASR model to training signals enhanced with the SE model. A cross-entropy loss between the ASR output and reference transcriptions is calculated to update the SE model parameters. Experimental results with realistic large-scale settings using ASR models trained on 75,000-hour data show that the proposed framework improves the word error rate for the SE output by 11.82% with little compromise in the SE quality. Performance analysis is also carried out by changing the ASR model, the data used for the ASR-step, and the schedule of the two update steps.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xinmeng Xu|AUTHOR Xinmeng Xu]], [[Yang Wang|AUTHOR Yang Wang]], [[Dongxiang Xu|AUTHOR Dongxiang Xu]], [[Yiyuan Peng|AUTHOR Yiyuan Peng]], [[Cong Zhang|AUTHOR Cong Zhang]], [[Jie Jia|AUTHOR Jie Jia]], [[Binbin Chen|AUTHOR Binbin Chen]]
</p><p class="cpabstractcardaffiliationlist">vivo, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2691–2695
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech enhancement is a fundamental way to separate and generate clean speech from adverse environment where the received speech is seriously corrupted by noise. This paper applies a novel progressive network for speech enhancement by using multi-stage structure, where each stage contains a channel attention block followed by dilated encoder-decoder convolutional network with gated linear units. In addition, each stage generates a prediction that is refined by a supervised attention block. What is more, a fusion block is inserted between original inputs and outputs of previous stage. Multi-stage architecture is introduced to sequentially invoke multiple deep-learning networks, and its key ingredient is the information exchange between different stages. Thus, a more flexible and robust outputs can be generated. Experimental results show that the proposed architecture obtains consistently better performance than recent state-of-the-art models in terms of both PESQ and STOI scores.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Oscar Chang|AUTHOR Oscar Chang]]^^1^^
, [[Dung N. Tran|AUTHOR Dung N. Tran]]^^2^^
, [[Kazuhito Koishida|AUTHOR Kazuhito Koishida]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Columbia University, USA; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2696–2700
</span></p></div>
<div class="cpabstractcardabstract"><p>Generalization remains a major problem in supervised learning of single-channel speech enhancement. In this work, we propose //learnable loss mixup (LLM)//, a simple and effortless training diagram, to improve the generalization of deep learning-based speech enhancement models. //Loss mixup//, of which //learnable loss mixup// is a special variant, optimizes a mixture of the loss functions of random sample pairs to train a model on virtual training data constructed from these pairs of samples. In //learnable loss mixup//, by conditioning on the mixed data, the loss functions are mixed using a non-linear mixing function automatically learned via neural parameterization. Our experimental results on the VCTK benchmark show that //learnable loss mixup// achieves 3.26 PESQ, outperforming the state-of-the-art.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiao-Qi Zhang|AUTHOR Xiao-Qi Zhang]]^^1^^
, [[Jun Du|AUTHOR Jun Du]]^^1^^
, [[Li Chai|AUTHOR Li Chai]]^^1^^
, [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^Georgia Tech, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2701–2705
</span></p></div>
<div class="cpabstractcardabstract"><p>A maximum likelihood (ML) approach to characterizing regression errors in each target layer of SNR progressive learning (PL) using long short-term memory (LSTM) networks is proposed to improve performances of speech enhancement at low SNR levels. Each LSTM layer is guided to learn an intermediate target with a specific SNR gain. In contrast to using previously proposed minimum squared error criterion (MMSE-PL-LSTM) which leads to an un-even distribution and a broad dynamic range of the prediction errors, we model the errors with a generalized Gaussian distribution (GGD) at all intermediate layers in the newly proposed ML-PL-LSTM framework. The shape factors in GGD can be automatically updated when training the LSTM networks in a layer-wise manner to estimate the network parameters progressively. Tested on the CHiME-4 simulation set for speech enhancement in unseen noise conditions, the proposed ML-PL-LSTM approach outperforms MMSE-PL-LSTM in terms of both PESQ and STOI measures. Furthermore, when evaluated on the CHiME-4 real test set for speech recognition, using ML-enhanced speech also results in less word error rates than those obtained with MMSE-enhanced speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vikas Agrawal|AUTHOR Vikas Agrawal]]^^1^^
, [[Shashi Kumar|AUTHOR Shashi Kumar]]^^1^^
, [[Shakti P. Rath|AUTHOR Shakti P. Rath]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung, India; ^^2^^Reverie Language Technologies, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2706–2710
</span></p></div>
<div class="cpabstractcardabstract"><p>Whispering is the natural choice of communication when one wants to interact quietly and privately. Due to vast differences in acoustic characteristics of whisper and natural speech, there is drastic degradation in the performance of whisper speech when decoded by the Automatic Speech Recognition (ASR) system trained on neutral speech. Recently, to handle this mismatched train and test scenario Denoising Autoencoders (DA) are used which gives some improvement. To improve over DA performance we propose another method to map speech from whisper domain to neutral speech domain via Joint Variational Auto-Encoder (JVAE). The proposed method requires time-aligned parallel data which is not available, so we developed an algorithm to convert parallel data to time-aligned parallel data. JVAE jointly learns the characteristics of whisper and neutral speech in a common latent space which significantly improves whisper recognition accuracy and outperforms traditional autoencoder based techniques. We benchmarked our method against two baselines, first being ASR trained on neutral speech and tested on whisper dataset and second being whisper test set mapped using DA and tested on same neutral ASR. We achieved an absolute improvement of 22.31% in Word Error Rate (WER) over the first baseline and an absolute 5.52% improvement over DA.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lukas Lee|AUTHOR Lukas Lee]], [[Youna Ji|AUTHOR Youna Ji]], [[Minjae Lee|AUTHOR Minjae Lee]], [[Min-Seok Choi|AUTHOR Min-Seok Choi]]
</p><p class="cpabstractcardaffiliationlist">Naver, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2711–2715
</span></p></div>
<div class="cpabstractcardabstract"><p>As the importance of speech enhancement for real-world application increases, the compactness of the model is also becoming a crucial study. In this paper, we present compression techniques to reduce the model size and applied them to the state-of-the-art real-time speech enhancement system. We successfully reduce the model size by actively applying channel pruning while maintaining performance. In particular, we propose a method to prune more channels of convolutional neural networks (CNN) by utilizing gated linear unit (GLU) activation. In addition, lower-bit-quantization is applied to reduce model size, while minimizing performance degradation caused by quantization. We show the performance of our proposed model on a mobile device where computing resources are limited. In particular, it is implemented to enable streaming, and speech enhancement works in real-time.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Madhav Mahesh Kashyap|AUTHOR Madhav Mahesh Kashyap]], [[Anuj Tambwekar|AUTHOR Anuj Tambwekar]], [[Krishnamoorthy Manohara|AUTHOR Krishnamoorthy Manohara]], [[S. Natarajan|AUTHOR S. Natarajan]]
</p><p class="cpabstractcardaffiliationlist">PES University, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2716–2720
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper tackles the problem of the heavy dependence of clean speech data required by deep learning based audio-denoising methods by showing that it is possible to train deep speech denoising networks using only noisy speech samples. Conventional wisdom dictates that in order to achieve good speech denoising performance, there is a requirement for a large quantity of both noisy speech samples and perfectly clean speech samples, resulting in a need for expensive audio recording equipment and extremely controlled soundproof recording studios. These requirements pose significant challenges in data collection, especially in economically disadvantaged regions and for low resource languages. This work shows that speech denoising deep neural networks can be successfully trained utilizing only noisy training audio. Furthermore it is revealed that such training regimes achieve superior denoising performance over conventional training regimes utilizing clean training audio targets, in cases involving complex noise distributions and low Signal-to-Noise ratios (high noise environments). This is demonstrated through experiments studying the efficacy of our proposed approach over both real-world noises and synthetic noises using the 20 layered Deep Complex U-Net architecture.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thananchai Kongthaworn|AUTHOR Thananchai Kongthaworn]], [[Burin Naowarat|AUTHOR Burin Naowarat]], [[Ekapol Chuangsuwanich|AUTHOR Ekapol Chuangsuwanich]]
</p><p class="cpabstractcardaffiliationlist">Chulalongkorn University, Thailand</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2741–2745
</span></p></div>
<div class="cpabstractcardabstract"><p>One of the main problems in the development of text-to-speech (TTS) systems is its reliance on subjective measures, typically the Mean Opinion Score (MOS). MOS requires a large number of people to reliably rate each utterance, making the development process slow and expensive. Recent research on speech quality assessment tends to focus on training models to estimate MOS, which requires a large number of training data, something that might not be available in low-resource languages. We propose an objective assessment metric based on the DTW distance using the spectrogram and the high-level features from an Automatic Speech Recognition (ASR) model to cover both acoustic and linguistic information. Experiments on Thai TTS and the Blizzard Challenge datasets show that our method outperformed other baselines in both utterance- and system-level by a large margin in terms of correlation coefficients. Our metric also outperformed the best baseline by 9.58% when used in head-to-head utterance-level comparisons. Ablation studies suggest that the middle layers of the ASR model are most suitable for TTS evaluation when used in conjunction with spectral features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Saida Mussakhojayeva|AUTHOR Saida Mussakhojayeva]], [[Aigerim Janaliyeva|AUTHOR Aigerim Janaliyeva]], [[Almas Mirzakhmetov|AUTHOR Almas Mirzakhmetov]], [[Yerbolat Khassanov|AUTHOR Yerbolat Khassanov]], [[Huseyin Atakan Varol|AUTHOR Huseyin Atakan Varol]]
</p><p class="cpabstractcardaffiliationlist">Nazarbayev University, Kazakhstan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2786–2790
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a high-quality open-source speech synthesis dataset for Kazakh, a low-resource language spoken by over 13 million people worldwide. The dataset consists of about 93 hours of transcribed audio recordings spoken by two professional speakers (female and male). It is the first publicly available large-scale dataset developed to promote Kazakh text-to-speech (TTS) applications in both academia and industry. In this paper, we share our experience by describing the dataset development procedures and faced challenges, and discuss important future directions. To demonstrate the reliability of our dataset, we built baseline end-to-end TTS models and evaluated them using the subjective mean opinion score (MOS) measure. Evaluation results show that the best TTS models trained on our dataset achieve MOS above 4 for both speakers, which makes them applicable for practical use. The dataset, training recipe, and pretrained TTS models are freely available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jason Taylor|AUTHOR Jason Taylor]], [[Korin Richmond|AUTHOR Korin Richmond]]
</p><p class="cpabstractcardaffiliationlist">University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2791–2795
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition (ASR) is increasingly used to evaluate the intelligibility of text-to-speech synthesis (TTS). ASR is less costly than traditional listening tests, but questions remain about its reliability. We re-evaluate the Blizzard Challenge’s intelligibility tasks in English since 2011 using ASR. Re-analysing transcriptions collected by paid in-lab participants, online volunteers and Amazon Mechanical Turkers (the latter used only in 2011), we compare their word error rates (WERs) and statistically-significant system-groupings with those generated by an open-source, Transformer-based ASR model. This ASR model consistently decodes test stimuli with more reliable WERs than the Blizzard Challenge’s (mostly non-native) speech experts and online volunteers. The model also groups systems according to statistical significance similarly to the paid in-lab participants. Using surplus semantically unpredictable sentences (SUS) submitted every year to the challenge, we investigate how confidence intervals in ASR WERs change as the number of transcribed stimuli increases. We plot the Frobenius norm of pairwise significance matrices with increasing stimuli. We find that finer groupings of systems are detected as confidence intervals narrow. The number of stimuli where p-values start to converge ranges from 400–800 stimuli. We conclude that, with enough stimuli, ASR can be more reliable than humans.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cassia Valentini-Botinhao|AUTHOR Cassia Valentini-Botinhao]]^^1^^
, [[Simon King|AUTHOR Simon King]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Edinburgh, UK; ^^2^^University of Edinburgh, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2746–2750
</span></p></div>
<div class="cpabstractcardabstract"><p>Sequence-to-sequence speech synthesis models are notorious for gross errors such as skipping and repetition, commonly associated with failures in the attention mechanism. While a lot has been done to improve attention and decrease errors, this paper focuses instead on automatic error detection and analysis. We evaluated three objective metrics against error detection scores collected by human listening. All metrics were derived from the synthesised attention matrix alone and do not require a reference signal, relying on the expectation that errors occur when attention is dispersed or insufficient. Using one of this metrics as an analysis tool, we observed that gross errors are more likely to occur in longer sentences and in sentences with punctuation marks that indicate pause or break. We also found that mechanisms such as forcibly incremented attention have the potential for decreasing gross errors but to the detriment of naturalness. The results of the error detection evaluation revealed that two of the evaluated metrics were able to detect errors with a relatively high success rate, obtaining F-scores of up to 0.89 and 0.96.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rohola Zandie|AUTHOR Rohola Zandie]]^^1^^
, [[Mohammad H. Mahoor|AUTHOR Mohammad H. Mahoor]]^^1^^
, [[Julia Madsen|AUTHOR Julia Madsen]]^^2^^
, [[Eshrat S. Emamian|AUTHOR Eshrat S. Emamian]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Denver, USA; ^^2^^DreamFace Technologies, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2751–2755
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces //RyanSpeech//, a new speech corpus for research on automated text-to-speech (TTS) systems. Publicly available TTS corpora are often noisy, recorded with multiple speakers, or lack quality male speech data. In order to meet the need for a high quality, publicly available male speech corpus within the field of speech recognition, we have designed and created //RyanSpeech// which contains textual materials from real-world conversational settings. These materials contain over 10 hours of a professional male voice actor’s speech recorded at 44.1 kHz. This corpus’s design and pipeline make //RyanSpeech// ideal for developing TTS systems in real-world applications. To provide a baseline for future research, protocols, and benchmarks, we trained 4 state-of-the-art speech models and a vocoder on //RyanSpeech//. The results show 3.36 in mean opinion scores (MOS) in our best model. We have made both the corpus and trained models for public use.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yao Shi|AUTHOR Yao Shi]]^^1^^
, [[Hui Bu|AUTHOR Hui Bu]]^^2^^
, [[Xin Xu|AUTHOR Xin Xu]]^^2^^
, [[Shaoji Zhang|AUTHOR Shaoji Zhang]]^^2^^
, [[Ming Li|AUTHOR Ming Li]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Wuhan University, China; ^^2^^Beijing Shell Shell Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2756–2760
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present AISHELL-3, a large-scale multi-speaker Mandarin speech corpus which could be used to train multi-speaker Text-To-Speech (TTS) systems. The corpus contains roughly 85 hours of emotion-neutral recordings spanning across 218 native Chinese mandarin speakers. Their auxiliary attributes such as gender, age group and native accents are explicitly marked and provided in the corpus. Moreover, transcripts in Chinese character-level and pinyin-level are provided along with the recordings. We also present some data processing strategies and techniques which match with the characteristics of the presented corpus and conduct experiments on multiple speech-synthesis systems to assess the quality of the generated speech samples, showing promising results. The corpus is available online under Apache v2.0 license.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nicholas Eng|AUTHOR Nicholas Eng]], [[C.T. Justine Hui|AUTHOR C.T. Justine Hui]], [[Yusuke Hioka|AUTHOR Yusuke Hioka]], [[Catherine I. Watson|AUTHOR Catherine I. Watson]]
</p><p class="cpabstractcardaffiliationlist">University of Auckland, New Zealand</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2761–2765
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigates the use of speech enhancement techniques in creating text-to-speech voices with degraded or noisy speech. A number of synthetic voices were created using speech that was first degraded by different noise types at various signal-to-noise ratios (SNRs), then enhanced through four speech enhancement algorithms: Subspace, Wiener filter, SEGAN and a DNN-based method. Subjective listening tests show that the quality of the synthetic voices produced by subspace and the DNN-based method enhanced speech outperforms the quality of the voices created using Wiener filter or SEGAN enhanced speech at low SNRs, and speech enhanced by the subspace method results in higher quality synthetic speech at higher SNRs.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Chenye Cui|AUTHOR Chenye Cui]]^^1^^
, [[Yi Ren|AUTHOR Yi Ren]]^^1^^
, [[Jinglin Liu|AUTHOR Jinglin Liu]]^^1^^
, [[Feiyang Chen|AUTHOR Feiyang Chen]]^^1^^
, [[Rongjie Huang|AUTHOR Rongjie Huang]]^^1^^
, [[Ming Lei|AUTHOR Ming Lei]]^^2^^
, [[Zhou Zhao|AUTHOR Zhou Zhao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Zhejiang University, China; ^^2^^Alibaba, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2766–2770
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, there has been an increasing interest in neural speech synthesis. While the deep neural network achieves the state-of-the-art result in text-to-speech (TTS) tasks, how to generate a more emotional and more expressive speech is becoming a new challenge to researchers due to the scarcity of high-quality emotion speech dataset and the lack of advanced emotional TTS model. In this paper, we first briefly introduce and publicly release a Mandarin emotion speech dataset including 9,724 samples with audio files and its emotion human-labeled annotation. After that, we propose a simple but efficient architecture for emotional speech synthesis called EMSpeech. Unlike those models which need additional reference audio as input, our model could predict emotion labels just from the input text and generate more expressive speech conditioned on the emotion embedding. In the experiment phase, we first validate the effectiveness of our dataset by an emotion classification task. Then we train our model on the proposed dataset and conduct a series of subjective evaluations. Finally, by showing a comparable performance in the emotional speech synthesis task, we successfully demonstrate the ability of the proposed model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sai Sirisha Rallabandi|AUTHOR Sai Sirisha Rallabandi]], [[Abhinav Bharadwaj|AUTHOR Abhinav Bharadwaj]], [[Babak Naderi|AUTHOR Babak Naderi]], [[Sebastian Möller|AUTHOR Sebastian Möller]]
</p><p class="cpabstractcardaffiliationlist">Technische Universität Berlin, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2771–2775
</span></p></div>
<div class="cpabstractcardabstract"><p>With the improved computational abilities, the usage of chatbots and conversational agents has become more prevalent. Therefore, it is essential that these agents exhibit certain social speaker characteristics in the generated speech. In this paper, we study the perception of such speaker characteristics in two commercial Text-to-Speech (TTS) systems, Amazon Polly and Google TTS. We carried out a 15-item semantic differential scaling test. The factor analysis provided us with three underlying dimensions that can be perceived from synthetic speech, warmth, competence, and extraversion. Our results show that we can perceive both interpersonal relationships and also personality traits from synthetic voices. Additionally, we observed that the female participants perceived male voices to be more responsible, energetic, relaxed, and enthusiastic. In comparison, male participants found female voices to be more reliable, accessible, and confident. A discussion on the comparison of our results with that of the studies on natural speech is also provided.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Evelina Bakhturina|AUTHOR Evelina Bakhturina]], [[Vitaly Lavrukhin|AUTHOR Vitaly Lavrukhin]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]], [[Yang Zhang|AUTHOR Yang Zhang]]
</p><p class="cpabstractcardaffiliationlist">NVIDIA, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2776–2780
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a new multi-speaker English dataset for training text-to-speech models. The dataset is based on LibriVox audiobooks and Project Gutenberg texts, both in the public domain. The new dataset contains about 292 hours of speech from 10 speakers with at least 17 hours per speaker sampled at 44.1 kHz. To select speech samples with high quality, we considered audio recordings with a signal bandwidth of at least 13 kHz and a signal-to-noise ratio (SNR) of at least 32 dB. The dataset is publicly released.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Wei-Cheng Tseng|AUTHOR Wei-Cheng Tseng]], [[Chien-yu Huang|AUTHOR Chien-yu Huang]], [[Wei-Tsung Kao|AUTHOR Wei-Tsung Kao]], [[Yist Y. Lin|AUTHOR Yist Y. Lin]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]]
</p><p class="cpabstractcardaffiliationlist">National Taiwan University, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 2781–2785
</span></p></div>
<div class="cpabstractcardabstract"><p>Speech quality assessment has been a critical issue in speech processing for decades. Existing automatic evaluations usually require clean references or parallel ground truth data, which is infeasible when the amount of data soars. Subjective tests, on the other hand, do not need any additional clean or parallel data and correlates better to human perception. However, such a test is expensive and time-consuming because crowd work is necessary. It thus becomes highly desired to develop an automatic evaluation approach that correlates well with human perception while not requiring ground truth data. In this paper, we use self-supervised pre-trained models for MOS prediction. We show their representations can distinguish between clean and noisy audios. Then, we fine-tune these pre-trained models followed by simple linear layers in an end-to-end manner. The experiment results showed that our framework outperforms the two previous state-of-the-art models by a significant improvement on Voice Conversion Challenge 2018 and achieves comparable or superior performance on Voice Conversion Challenge 2016. We also conducted an ablation study to further investigate how each module benefits the task. The experiment results are implemented and reproducible with publicly available toolkits.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pascale Fung|AUTHOR Pascale Fung]]
</p><p class="cpabstractcardaffiliationlist">HKUST, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>Conversational AI (ConvAI) systems have applications ranging from personal assistance, health assistance to customer services. They have been in place since the first call centre agent went live in the late 1990s. More recently, smart speakers and smartphones are powered with conversational AI with similar architecture as those from the 90s. On the other hand, research on ConvAI systems has made leaps and bounds in recent years with sequence-to-sequence, generation-based models. Thanks to the advent of large scale pre-trained language models, state-of-the-art ConvAI systems can generate surprisingly human-like responses to user queries in open domain conversations, known as chit-chat. However, these generation based ConvAI systems are difficult to control and can lead to inappropriate, biased and sometimes even toxic responses. In addition, unlike previous modular conversational AI systems, it is also challenging to incorporate external knowledge into these models for task-oriented dialog scenarios such as personal assistance and customer services, and to maintain consistency.
With great power comes great responsibility. We must address the many ethical and technical challenges of generation based conversational AI systems to control for bias and safety, consistency, style, knowledge incorporation, etc. In this talk, I will introduce state-of-the-art generation based conversational AI approaches, and will point out remaining challenges of conversational AI and possible directions for future research, including how to mitigate inappropriate responses. I will also present some ethical guidelines that conversational AI systems can follow.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Nikolay Karpov|AUTHOR Nikolay Karpov]], [[Alexander Denisenko|AUTHOR Alexander Denisenko]], [[Fedor Minkin|AUTHOR Fedor Minkin]]
</p><p class="cpabstractcardaffiliationlist">SberBank, Russia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1419–1423
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper introduces a novel Russian speech dataset called Golos, a large corpus suitable for speech research. The dataset mainly consists of recorded audio files manually annotated on the crowd-sourcing platform. The total duration of the audio is about 1240 hours. We have made the corpus freely available to download, along with the acoustic model with CTC loss prepared on this corpus. Additionally, transfer learning was applied to improve the performance of the acoustic model. In order to evaluate the quality of the dataset with the beam-search algorithm, we have built a 3-gram language model on the open Common Crawl dataset. The total word error rate (WER) metrics turned out to be about 3.3% and 11.5%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Samik Sadhu|AUTHOR Samik Sadhu]], [[Hynek Hermansky|AUTHOR Hynek Hermansky]]
</p><p class="cpabstractcardaffiliationlist">Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1424–1428
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a technique to compute spectrograms using Frequency Domain Linear Prediction (FDLP) that uses all-pole models to fit the squared Hilbert envelope of speech in different frequency sub-bands. The spectrogram of a complete speech utterance is computed by overlap-add of contiguous all-pole model responses. A long context window of 1.5 seconds allows us to capture the low frequency temporal modulations of speech in the spectrogram. For an end-to-end automatic speech recognition task, the FDLP spectrogram performs on par with the standard mel spectrogram features for clean read speech training and test data. For more realistic speech data with train-test domain mismatches or reverberations, FDLP spectrogram shows up to 25% and 22% relative WER improvements over mel spectrogram respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ragheb Al-Ghezi|AUTHOR Ragheb Al-Ghezi]]^^1^^
, [[Yaroslav Getman|AUTHOR Yaroslav Getman]]^^1^^
, [[Aku Rouhe|AUTHOR Aku Rouhe]]^^1^^
, [[Raili Hildén|AUTHOR Raili Hildén]]^^2^^
, [[Mikko Kurimo|AUTHOR Mikko Kurimo]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Aalto University, Finland; ^^2^^University of Helsinki, Finland</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1429–1433
</span></p></div>
<div class="cpabstractcardabstract"><p>Unlike traditional (hybrid) Automatic Speech Recognition (ASR), end-to-end ASR systems simplify the training procedure by directly mapping acoustic features to sequences of graphemes or characters, thereby eliminating the need for specialized acoustic, language, or pronunciation models. However, one drawback of end-to-end ASR systems is that they require more training data than conventional ASR systems to achieve similar word error rate (WER). This makes it difficult to develop ASR systems for tasks where transcribed target data is limited such as developing ASR for Second Language (L2) speakers of Swedish. Nonetheless, recent advancements in self-supervised acoustic learning, manifested in wav2vec models [1, 2, 3], leverage the available untranscribed speech data to provide compact acoustic representation that can achieve low WER when incorporated in end-to-end systems. To this end, we experiment with several monolingual and cross-lingual self-supervised acoustic models to develop end-to-end ASR system for L2 Swedish. Even though our test is very small, it indicates that these systems are competitive in performance with traditional ASR pipeline. Our best model seems to reduce the WER by 7% relative to our traditional ASR baseline trained on the same target data.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Patrick K. O’Neill|AUTHOR Patrick K. O’Neill]]^^1^^
, [[Vitaly Lavrukhin|AUTHOR Vitaly Lavrukhin]]^^2^^
, [[Somshubra Majumdar|AUTHOR Somshubra Majumdar]]^^2^^
, [[Vahid Noroozi|AUTHOR Vahid Noroozi]]^^2^^
, [[Yuekai Zhang|AUTHOR Yuekai Zhang]]^^3^^
, [[Oleksii Kuchaiev|AUTHOR Oleksii Kuchaiev]]^^2^^
, [[Jagadeesh Balam|AUTHOR Jagadeesh Balam]]^^2^^
, [[Yuliya Dovzhenko|AUTHOR Yuliya Dovzhenko]]^^1^^
, [[Keenan Freyberg|AUTHOR Keenan Freyberg]]^^1^^
, [[Michael D. Shulman|AUTHOR Michael D. Shulman]]^^1^^
, [[Boris Ginsburg|AUTHOR Boris Ginsburg]]^^2^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^3^^
, [[Georg Kucsko|AUTHOR Georg Kucsko]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Kensho Technologies, USA; ^^2^^NVIDIA, USA; ^^3^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1434–1438
</span></p></div>
<div class="cpabstractcardabstract"><p>In the English speech-to-text (STT) machine learning task, acoustic models are conventionally trained on uncased Latin characters, and any necessary orthography (such as capitalization, punctuation, and denormalization of non-standard words) is imputed by separate post-processing models. This adds complexity and limits performance, as many formatting tasks benefit from semantic information present in the acoustic signal but absent in transcription. Here we propose a new STT task: end-to-end neural transcription with fully formatted text for target labels. We present baseline Conformer-based models trained on a corpus of 5,000 hours of professionally transcribed earnings calls, achieving a CER of 1.7. As a contribution to the STT research community, we release the corpus free for non-commercial use.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Solène Evain|AUTHOR Solène Evain]]^^1^^
, [[Ha Nguyen|AUTHOR Ha Nguyen]]^^1^^
, [[Hang Le|AUTHOR Hang Le]]^^1^^
, [[Marcely Zanon Boito|AUTHOR Marcely Zanon Boito]]^^1^^
, [[Salima Mdhaffar|AUTHOR Salima Mdhaffar]]^^2^^
, [[Sina Alisamir|AUTHOR Sina Alisamir]]^^1^^
, [[Ziyi Tong|AUTHOR Ziyi Tong]]^^1^^
, [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]]^^2^^
, [[Marco Dinarelli|AUTHOR Marco Dinarelli]]^^1^^
, [[Titouan Parcollet|AUTHOR Titouan Parcollet]]^^2^^
, [[Alexandre Allauzen|AUTHOR Alexandre Allauzen]]^^3^^
, [[Yannick Estève|AUTHOR Yannick Estève]]^^2^^
, [[Benjamin Lecouteux|AUTHOR Benjamin Lecouteux]]^^1^^
, [[François Portet|AUTHOR François Portet]]^^1^^
, [[Solange Rossato|AUTHOR Solange Rossato]]^^1^^
, [[Fabien Ringeval|AUTHOR Fabien Ringeval]]^^1^^
, [[Didier Schwab|AUTHOR Didier Schwab]]^^1^^
, [[Laurent Besacier|AUTHOR Laurent Besacier]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^LIG (UMR 5217), France; ^^2^^LIA (EA 4128), France; ^^3^^LAMSADE (UMR 7243), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1439–1443
</span></p></div>
<div class="cpabstractcardabstract"><p>Self-Supervised Learning (SSL) using huge unlabeled data has been successfully explored for image and natural language processing. Recent works also investigated SSL from speech. They were notably successful to improve performance on downstream tasks such as automatic speech recognition (ASR). While these works suggest it is possible to reduce dependence on labeled data for building efficient speech systems, their evaluation was mostly made on ASR and using multiple and heterogeneous experimental settings (most of them for English). This questions the objective comparison of SSL approaches and the evaluation of their impact on building speech systems. In this paper, we propose //LeBenchmark//: a reproducible framework for assessing SSL from speech. It not only includes ASR (high and low resource) tasks but also spoken language understanding, speech translation and emotion recognition. We also focus on speech technologies in a language different than English: French. SSL models of different sizes are trained from carefully sourced and documented datasets. Experiments show that SSL is beneficial for most but not all tasks which confirms the need for exhaustive and reliable benchmarks to evaluate its real impact. //LeBenchmark// is shared with the scientific community for reproducible research in SSL from speech.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Pavel Šturm|AUTHOR Pavel Šturm]], [[Radek Skarnitzl|AUTHOR Radek Skarnitzl]], [[Tomáš Nechanský|AUTHOR Tomáš Nechanský]]
</p><p class="cpabstractcardaffiliationlist">Charles University, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1444–1448
<a href="./IS2021/MEDIA/0130" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>The study of phonetic accommodation in various communicative situations is still relatively limited. This paper examines accommodation in spontaneous conversations of eight pairs of Czech young male speakers in two communicative conditions: unconstrained face-to-face conversation and goal-oriented interaction via mobile telephone. Articulation rate and measures of f0 level, range and variability were measured in 40 prosodic phrases per speaker in each condition. Analyses of LME models did not reveal a significant global effect of time throughout the interaction on the distance between speakers (convergence) in any of the examined parameters, or that of preceding phrase value on the subsequent turn-initial value (synchrony). However, more consistent patterns were observed when speaker pairs were examined separately, revealing substantial individual variation on the one hand and non-linear effects on the other. This shows that aggregate analyses can be misleading in the study of phonetic accommodation and that speakers dynamically employ different strategies throughout natural conversations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Josiane Riverin-Coutlée|AUTHOR Josiane Riverin-Coutlée]], [[Conceição Cunha|AUTHOR Conceição Cunha]], [[Enkeleida Kapia|AUTHOR Enkeleida Kapia]], [[Jonathan Harrington|AUTHOR Jonathan Harrington]]
</p><p class="cpabstractcardaffiliationlist">LMU München, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1449–1453
</span></p></div>
<div class="cpabstractcardabstract"><p>This apparent and real time study analyses how dialect features in the speech of children and adults are differently affected depending on whether they live in homogeneous or heterogeneous speech communities. The general hypotheses are that speakers in such high contact settings as heterogeneous urban centers are more prone to innovation than speakers in homogeneous tightly-knit communities, and that children accelerate leveling, especially through schooling and socialization. This study is of Gheg Albanian, a dialect spoken in and around the capital Tirana. Two features were investigated: rounding of /a/ and vowel length contrasts. Two groups of adults and children were compared: one from Tirana and one from a nearby village. Additionally, the children were recorded twice over a period of 12 months and were compared longitudinally. The results showed that length contrasts were still present in both communities and age groups. Rounding of /a/ was lost in the city, but undergoing change in the village, with differences measured in apparent time, but also in child speech within the 12-month span. Our study further raises the issue of combining both apparent and real time data within the same design.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Margaret Zellers|AUTHOR Margaret Zellers]]^^1^^
, [[Alena Witzlack-Makarevich|AUTHOR Alena Witzlack-Makarevich]]^^2^^
, [[Lilja Saeboe|AUTHOR Lilja Saeboe]]^^3^^
, [[Saudah Namyalo|AUTHOR Saudah Namyalo]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAU, Germany; ^^2^^Hebrew University of Jerusalem, Israel; ^^3^^University of Oxford, UK; ^^4^^Makerere University, Uganda</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1454–1458
</span></p></div>
<div class="cpabstractcardabstract"><p>Liquid consonants — rhotics and laterals — have been shown to demonstrate unique distributional patterns cross-linguistically. It is also claimed that rhotics are more difficult to distinguish from one another phonetically than laterals, and that rhotics are less flexible than laterals when it comes to participation in consonant clusters and coarticulatory patterns. We investigate the phonetic realization of the rhotic and lateral phonemes in a Bantu language, Ruruuli. The acoustic space used for rhotics and laterals in this language is extremely similar, although the density peaks in terms of formant values are different. Formant values as well as formant ratios can be reliably used to distinguish between rhotics and laterals. In common with many other languages, an asymmetry between laterals and rhotics is found in Ruruuli, with laterals being more positionally constrained than rhotics. The overlap in acoustic space between rhotics and laterals may cast doubt on the status or stability of the phonological contrast between rhotics and laterals in this language.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kubra Bodur|AUTHOR Kubra Bodur]], [[Sweeney Branje|AUTHOR Sweeney Branje]], [[Morgane Peirolo|AUTHOR Morgane Peirolo]], [[Ingrid Tiscareno|AUTHOR Ingrid Tiscareno]], [[James S. German|AUTHOR James S. German]]
</p><p class="cpabstractcardaffiliationlist">LPL (UMR 7309), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1459–1463
</span></p></div>
<div class="cpabstractcardabstract"><p>Studies have shown that cross-linguistically, consonants at the left edge of higher-level prosodic boundaries tend to be more forcefully articulated than those at lower-level boundaries, a phenomenon known as //domain-initial strengthening//. This study tests whether similar effects occur in Turkish, using the Autosegmental-Metrical model proposed by Ipek & Jun [1, 2] as the basis for assessing boundary strength. Productions of /t/ and /d/ were elicited in four domain-initial prosodic positions corresponding to progressively higher-level boundaries: syllable, word, intermediate phrase, and Intonational Phrase. A fifth position, nuclear word, was included in order to better situate it within the prosodic hierarchy. Acoustic correlates of articulatory strength were measured, including closure duration for /d/ and /t/, as well as voice onset time and burst energy for /t/. Our results show that closure duration increases cumulatively from syllable to intermediate phrase, while voice onset time and burst energy are not influenced by boundary strength. These findings provide corroborating evidence for Ipek & Jun’s model, particularly for the distinction between word and intermediate phrase boundaries. Additionally, articulatory strength at the left edge of the nuclear word patterned closely with word-initial position, supporting the view that the nuclear word is not associated with a distinct phrasing domain.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Katerina Zmolikova|AUTHOR Katerina Zmolikova]]^^1^^
, [[Marc Delcroix|AUTHOR Marc Delcroix]]^^2^^
, [[Desh Raj|AUTHOR Desh Raj]]^^3^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^3^^
, [[Jan Černocký|AUTHOR Jan Černocký]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Brno University of Technology, Czechia; ^^2^^NTT, Japan; ^^3^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1464–1468
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition systems deteriorate in presence of overlapped speech. A popular approach to alleviate this is target speech extraction. The extraction system is usually trained with a loss function measuring the discrepancy between the estimated and the reference target speech. This often leads to distortions to the target signal which is detrimental to the recognition accuracy. Additionally, it is necessary to have the strong supervision provided by parallel data consisting of speech mixtures and single-speaker signals. We propose an auxiliary loss function for retraining the target speech extraction. It is composed of two parts: first, a speaker identity loss, forcing the estimated speech to have correct speaker characteristics, and second, a mixture consistency loss, making the extracted sources sum back to the original mixture. The only supervision required for the proposed loss is speaker characteristics obtained from several segments spoken by the target speaker. Such weak supervision makes the loss suitable for adapting the system directly on real recordings. We show that the proposed loss yields signals more suitable for speech recognition and further, we can gain additional improvements by adaptation to target data. Overall, we can reduce the word error rate on LibriCSS dataset from 27.4% to 24.0%.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marvin Borsdorf|AUTHOR Marvin Borsdorf]]^^1^^
, [[Chenglin Xu|AUTHOR Chenglin Xu]]^^2^^
, [[Haizhou Li|AUTHOR Haizhou Li]]^^2^^
, [[Tanja Schultz|AUTHOR Tanja Schultz]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Bremen, Germany; ^^2^^NUS, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1469–1473
</span></p></div>
<div class="cpabstractcardabstract"><p>Speaker extraction has been studied mostly for the scenarios where a target speaker is present in a two or more talkers mixture. Such scenarios do not adequately reflect everyday conversations. For example, a target speaker can be the only active talker, be quiet for a while, or leave the conversation, that means the target speaker is absent from the mixture. Traditional speaker extraction models fail in these scenarios. We propose a novel speaker extraction approach to handle speech mixtures with one or two talkers in which the target speaker can either be present or absent. First, we formulate four speaker extraction conditions to cover the typical scenarios of everyday conversations with one and two talkers. Second, we introduce a joint training scheme with one unified loss function that works for all four conditions. We show that only a small amount of data is required to adapt the model to work well in the four conditions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lukas Mateju|AUTHOR Lukas Mateju]], [[Frantisek Kynych|AUTHOR Frantisek Kynych]], [[Petr Cerva|AUTHOR Petr Cerva]], [[Jindrich Zdansky|AUTHOR Jindrich Zdansky]], [[Jiri Malek|AUTHOR Jiri Malek]]
</p><p class="cpabstractcardaffiliationlist">Technical University of Liberec, Czechia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1474–1478
</span></p></div>
<div class="cpabstractcardabstract"><p>A new approach to speech activity detection (SAD) is presented in this work. It allows us to reduce the complexity and computation demands, namely in services that process streaming speech, where a SAD module usually forms the first block of the data pipeline (e.g., in a platform for 24/7 broadcast transcription). Our approach utilizes x-vectors as input features so that, within the subsequent pipeline stages, these embedding instances can also directly be employed for speaker diarization and recognition. The x-vectors are extracted by feed-forward sequential memory network (FSMN), allowing for modeling long-time dependencies; they thus form an input into a computationally undemanding binary classifier, whose output is smoothed by a decoder. Evaluation is performed on the standardized QUT-NOISE-TIMIT dataset as well as on broadcast data with large portions of music and background noise. The former data allows for comparison with other existing approaches. The latter shows the performance in terms of word error rate (WER) and reduction in real-time factor (RTF) of the transcription process.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Daniele Salvati|AUTHOR Daniele Salvati]], [[Carlo Drioli|AUTHOR Carlo Drioli]], [[Gian Luca Foresti|AUTHOR Gian Luca Foresti]]
</p><p class="cpabstractcardaffiliationlist">Università di Udine, Italy</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1479–1483
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose a time delay estimation (TDE) method for speaker localization based on parametrized generalized cross-correlation phase transform (PGCC-PHAT) functions and convolutional neural networks (CNNs). The PGCC-PHAT is used to build a feature matrix, which gives TDE information of two microphone signals with different normalization levels in the cross-correlation functions. The feature matrix is processed by a CNN, composed by several convolutional layers and fully connected layers and by a regression output for the directly estimation of the time difference of arrival (TDOA). Simulations in noisy and reverberant adverse conditions show that the proposed method improves the TDOA estimation performance if compared to the GCC-PHAT.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Midia Yousefi|AUTHOR Midia Yousefi]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]
</p><p class="cpabstractcardaffiliationlist">University of Texas at Dallas, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1484–1488
</span></p></div>
<div class="cpabstractcardabstract"><p>Most current speech technology systems are designed to operate well even in the presence of multiple active speakers. However, most solutions assume that the number of co-current speakers is known. Unfortunately, this information might not always be available in real-world applications. In this study, we propose a real-time, single-channel attention-guided Convolutional Neural Network (CNN) to estimate the number of active speakers in overlapping speech. The proposed system extracts higher-level information from the speech spectral content using a CNN model. Next, the attention mechanism summarizes the extracted information into a compact feature vector without losing critical information. Finally, the active speakers are classified using a fully connected network. Experiments on simulated overlapping speech using WSJ corpus show that the attention solution is shown to improve the performance by almost 3% absolute over conventional temporal average pooling. The proposed Attention-guided CNN achieves 76.15% for both Weighted Accuracy and average Recall, and 75.80% Precision on speech segments as short as 20 frames (i.e., 200 ms). All the classification metrics exceed 92% for the attention-guided model in offline scenarios where the input signal is more than 100 frames long (i.e., 1s).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xinlei Ren|AUTHOR Xinlei Ren]], [[Xu Zhang|AUTHOR Xu Zhang]], [[Lianwu Chen|AUTHOR Lianwu Chen]], [[Xiguang Zheng|AUTHOR Xiguang Zheng]], [[Chen Zhang|AUTHOR Chen Zhang]], [[Liang Guo|AUTHOR Liang Guo]], [[Bing Yu|AUTHOR Bing Yu]]
</p><p class="cpabstractcardaffiliationlist">Kuaishou Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1832–1836
</span></p></div>
<div class="cpabstractcardabstract"><p>People are meeting through video conferencing more often. While single channel speech enhancement techniques are useful for the individual participants, the speech quality will be significantly degraded in large meeting rooms where the far-field and reverberate conditions are introduced. Approaches based on microphone array signal processing are proposed to explore the inter-channel correlation among the individual microphone channels. In this work, a new causal U-net based multiple-in-multiple-out structure is proposed for real-time multi-channel speech enhancement. The proposed method incorporates the traditional beamforming structure with the multi-channel causal U-net by explicitly adding a beamforming operation at the end of the neural beamformer. The proposed method has entered the INTERSPEECH Far-field Multi-Channel Speech Enhancement Challenge for Video Conferencing. With 1.97M model parameters and 0.25 real-time factor on Intel Core i7 (2.6GHz) CPU, the proposed method has outperforms the baseline system of this challenge on PESQ, Si-SNR and STOI metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Rui Zhu|AUTHOR Rui Zhu]]^^1^^
, [[Feiran Yang|AUTHOR Feiran Yang]]^^2^^
, [[Yuepeng Li|AUTHOR Yuepeng Li]]^^1^^
, [[Shidong Shang|AUTHOR Shidong Shang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Tencent, China; ^^2^^CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1837–1841
</span></p></div>
<div class="cpabstractcardabstract"><p>The rapid development of online video conferencing systems has caused renewed attention to the multi-channel recording and playback systems. Stereophonic acoustic echo cancellation (SAEC) is the key issue of this systems. This paper proposes an optimally designed partitioned-block frequency-domain Kalman filter (PBFDKF) algorithm for SAEC. We establish the frequency-domain observation equation using the overlap-and-save method and we use the first-order Markov model to describe the state equation. The exact PBFDKF algorithm is derived under the umbrella of Kalman filter theory and two fast implementations are then presented to reduce the complexity. The proposed algorithm is equivalent to the dual-channel partitioned-block frequency-domain gradient-based algorithm with optimum step-size control, and hence it exhibits very good convergence performance and is found to be robust to near-end interference without a double-talk detector. Extensive experiments in different SAEC conditions confirm the effectiveness of the proposed algorithm.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Taihui Wang|AUTHOR Taihui Wang]]^^1^^
, [[Feiran Yang|AUTHOR Feiran Yang]]^^1^^
, [[Rui Zhu|AUTHOR Rui Zhu]]^^2^^
, [[Jun Yang|AUTHOR Jun Yang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1842–1846
</span></p></div>
<div class="cpabstractcardabstract"><p>Online independent vector analysis (IVA) based on auxiliary technology is effective to separate audio source in real time. However, the separated signal may contain residual interference noise because the source model of IVA lacks flexibility and cannot treat the specific harmonic structures of sources. This paper presents a real-time IVA method where the amplitude spectrum of separated signal is modeled by semi-supervised nonnegative matrix factorization (SSNMF). Using the pre-trained basis matrix which contains source structures, we can extract the target source from the separated signal in real time. The advantage of the proposed method is that the extracted source can provide a more accurate variance than the separated signal and hence the proposed method can obtain a better separation performance than the oracle IVA. Experimental results in speech denoising task show the effectiveness and the robustness of the proposed method with different types of noise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiangyu Han|AUTHOR Jiangyu Han]]^^1^^
, [[Wei Rao|AUTHOR Wei Rao]]^^2^^
, [[Yannan Wang|AUTHOR Yannan Wang]]^^2^^
, [[Yanhua Long|AUTHOR Yanhua Long]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Shanghai Normal University, China; ^^2^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1847–1851
</span></p></div>
<div class="cpabstractcardabstract"><p>Target speech extraction has attracted widespread attention. When microphone arrays are available, the additional spatial information can be helpful in extracting the target speech. We have recently proposed a channel decorrelation (CD) mechanism to extract the inter-channel differential information to enhance the reference channel encoder representation. Although the proposed mechanism has shown promising results for extracting the target speech from mixtures, the extraction performance is still limited by the nature of the original decorrelation theory. In this paper, we propose two methods to broaden the horizon of the original channel decorrelation, by replacing the original softmax-based inter-channel similarity between encoder representations, using an unrolled probability and a normalized cosine-based similarity at the dimensional-level. Moreover, new combination strategies of the CD-based spatial information and target speaker adaptation of parallel encoder outputs are also investigated. Experiments on the reverberant WSJ0 2-mix show that the improved CD can result in more discriminative differential information and the new adaptation strategy is also very effective to improve the target speech extraction.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jinjiang Liu|AUTHOR Jinjiang Liu]], [[Xueliang Zhang|AUTHOR Xueliang Zhang]]
</p><p class="cpabstractcardaffiliationlist">Inner Mongolia University, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1852–1856
</span></p></div>
<div class="cpabstractcardabstract"><p>For dual-channel speech enhancement, it is a promising idea to design an end-to-end model based on the traditional array signal processing guideline and the manifold space of multi-channel signals. We found that the idea above can be effectively implemented by the classical convolutional recurrent neural networks (CRN) architecture. We propose a very compact inplace gated convolutional recurrent neural network (inplace GCRN) for end-to-end multi-channel speech enhancement, which utilizes inplace-convolution for frequency pattern extraction and reconstruction. The inplace characteristics efficiently preserve spatial cues in each frequency bin for channel-wise long short-term memory neural networks (LSTM) tracing the spatial source. In addition, we come up with a new spectrum recovery method by predict amplitude mask, mapping, and phase, which effectively improves the speech quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[R.G. Prithvi Raj|AUTHOR R.G. Prithvi Raj]]^^1^^
, [[Rohit Kumar|AUTHOR Rohit Kumar]]^^2^^
, [[M.K. Jayesh|AUTHOR M.K. Jayesh]]^^1^^
, [[Anurenjan Purushothaman|AUTHOR Anurenjan Purushothaman]]^^2^^
, [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]^^2^^
, [[M.A. Basha Shaik|AUTHOR M.A. Basha Shaik]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung, India; ^^2^^Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1857–1861
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents the details of the SRIB-LEAP submission to the ConferencingSpeech challenge 2021. The challenge involved the task of multi-channel speech enhancement to improve the quality of far field speech from microphone arrays in a video conferencing room. We propose a two stage method involving a beamformer followed by single channel enhancement. For the beamformer, we incorporated self-attention mechanism as inter-channel processing layer in the filter-and-sum network (FaSNet), an end-to-end time-domain beamforming system. The single channel speech enhancement is done in log spectral domain using convolution neural network (CNN)-long short term memory (LSTM) based architecture. We achieved improvements in objective quality metrics — perceptual evaluation of speech quality (PESQ) of 0.5 on the noisy data. On subjective quality evaluation, the proposed approach improved the mean opinion score (MOS) by an absolute measure of 0.9 over the noisy audio.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Cheng Xue|AUTHOR Cheng Xue]]^^1^^
, [[Weilong Huang|AUTHOR Weilong Huang]]^^2^^
, [[Weiguang Chen|AUTHOR Weiguang Chen]]^^2^^
, [[Jinwei Feng|AUTHOR Jinwei Feng]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Alibaba, China; ^^2^^Alibaba, China; ^^3^^Alibaba, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1862–1866
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a real-time multi-channel speech enhancement method for noise reduction and dereverberation in far-field environments. The proposed method consists of two components: differential beamforming and mask estimation network. The differential beamforming is employed to suppress the interference signals from non-target directions such that a relatively clean speech can be obtained. The mask estimation network with an attention model is developed to capture the signal correlation among different channels in the feature extraction stage and enhance the feature representation that needs to be reconstructed into the target speech in the estimation mask stage. In the inference phase, the spectrum after differential beamforming is filtered by the estimated mask to obtain the final output. The spectrum after differential beamforming can provide a higher signal-to-noise ratio (SNR) than the original spectrum, so the estimated mask can more easily filter out the noise. We conducted experiments on the ConferencingSpeech2021 challenge (INTERSPEECH 2021) dataset to evaluate the proposed method. With only 2.9M parameters, the proposed method achieved competitive performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hexin Liu|AUTHOR Hexin Liu]]^^1^^
, [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]]^^2^^
, [[Xinyi Zhang|AUTHOR Xinyi Zhang]]^^1^^
, [[Justin Dauwels|AUTHOR Justin Dauwels]]^^3^^
, [[Andy W.H. Khong|AUTHOR Andy W.H. Khong]]^^1^^
, [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]^^2^^
, [[Suzy J. Styles|AUTHOR Suzy J. Styles]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^NTU, Singapore; ^^2^^Johns Hopkins University, USA; ^^3^^Technische Universiteit Delft, The Netherlands</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1489–1493
</span></p></div>
<div class="cpabstractcardabstract"><p>We propose two end-to-end neural configurations for language diarization on bilingual code-switching speech. The first, a BLSTM-E2E architecture, includes a set of stacked bidirectional LSTMs to compute embeddings and incorporates the deep clustering loss to enforce grouping of languages belonging to the same class. The second, an XSA-E2E architecture, is based on an x-vector model followed by a self-attention encoder. The former encodes frame-level features into segment-level embeddings while the latter considers all those embeddings to generate a sequence of segment-level language labels. We evaluated the proposed methods on the dataset obtained from the shared task B in WSTCSMC 2020 and our handcrafted simulated data from the SEAME dataset. Experimental results show that our proposed XSA-E2E architecture achieved a relative improvement of 12.1% in equal error rate and a 7.4% relative improvement on accuracy compared with the baseline algorithm in the WSTCSMC 2020 dataset. Our proposed XSA-E2E architecture achieved an accuracy of 89.84% with a baseline of 85.60% on the simulated data derived from the SEAME dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Raphaël Duroselle|AUTHOR Raphaël Duroselle]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Denis Jouvet|AUTHOR Denis Jouvet]], [[Irina Illina|AUTHOR Irina Illina]]
</p><p class="cpabstractcardaffiliationlist">Loria (UMR 7503), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1494–1498
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition is complementary to language recognition. The language recognition systems exploit this complementarity by using frame-level bottleneck features extracted from neural networks trained with a phone recognition task. Recent methods apply frame-level bottleneck features extracted from an end-to-end sequence-to-sequence speech recognition model. In this work, we study an integrated approach of the training of the speech recognition feature extractor and language recognition modules. We show that for both classical phone recognition and end-to-end sequence-to-sequence features, sequential training of the two modules is not the optimal strategy. The feature extractor can be improved by supervision with the language identification loss, either in a fine-tuning step or in a multi-task training framework. Besides, we notice that end-to-end sequence-to-sequence bottleneck features are on par with classical phone recognition bottleneck features without requiring a forced alignment of the signal with target tokens. However, for sequence-to-sequence, the architecture of the model seems to play an important role; the Conformer architectures leads to much better results than the conventional stacked DNNs approach; and can even be trained directly with the LID module in an end-to-end approach.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hui Wang|AUTHOR Hui Wang]]^^1^^
, [[Lin Liu|AUTHOR Lin Liu]]^^2^^
, [[Yan Song|AUTHOR Yan Song]]^^1^^
, [[Lei Fang|AUTHOR Lei Fang]]^^2^^
, [[Ian McLoughlin|AUTHOR Ian McLoughlin]]^^3^^
, [[Li-Rong Dai|AUTHOR Li-Rong Dai]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^USTC, China; ^^2^^iFLYTEK, China; ^^3^^SIT, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1499–1503
</span></p></div>
<div class="cpabstractcardabstract"><p>Language identification (LID) research has made tremendous progress in recent years, especially with the introduction of deep learning techniques. However, for real-world applications where the distribution of different language data is highly imbalanced, the performance of existing LID systems is still far from satisfactory. This raises the challenge of //long-tailed LID//. In this paper, we propose an effective weight moving average (WMA) based alternate decoupled learning algorithm, termed WADCL, for long-tailed LID. The system is divided into two components, a frontend feature extractor and a backend classifier. These are then alternately learned in an end-to-end manner using different sampling schemes to alleviate the distribution mismatch between training and test datasets. Furthermore, our WMA method aims to mitigate the side-effects of re-sampling schemes, by fusing the model parameters learned along the trajectory of stochastic gradient descent (SGD) optimization. To validate the effectiveness of the proposed WADCL algorithm, we evaluate and compare several systems over a language dataset constructed to match a long-tailed distribution based on real world application [1]. The experimental results from the long-tailed language dataset demonstrate that the proposed algorithm is able to achieve significant performance gains over existing state-of-the-art x-vector based LID methods.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Keqi Deng|AUTHOR Keqi Deng]], [[Songjun Cao|AUTHOR Songjun Cao]], [[Long Ma|AUTHOR Long Ma]]
</p><p class="cpabstractcardaffiliationlist">Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1504–1508
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, self-supervised pre-training has gained success in automatic speech recognition (ASR). However, considering the difference between speech accents in real scenarios, how to identify accents and use accent features to improve ASR is still challenging. In this paper, we employ the self-supervised pre-training method for both accent identification and accented speech recognition tasks. For the former task, a standard deviation constraint loss (SDC-loss) based end-to-end (E2E) architecture is proposed to identify accents under the same language. As for accented speech recognition task, we design an accent-dependent ASR system, which can utilize additional accent input features. Furthermore, we propose a frame-level accent feature, which is extracted based on the proposed accent identification model and can be dynamically adjusted. We pre-train our models using 960 hours unlabeled LibriSpeech dataset and fine-tune them on AESRC2020 speech dataset. The experimental results show that our proposed accent-dependent ASR system is significantly ahead of the AESRC2020 baseline and achieves 6.5% relative word error rate (WER) reduction compared with our accent-independent ASR system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhiyun Fan|AUTHOR Zhiyun Fan]], [[Meng Li|AUTHOR Meng Li]], [[Shiyu Zhou|AUTHOR Shiyu Zhou]], [[Bo Xu|AUTHOR Bo Xu]]
</p><p class="cpabstractcardaffiliationlist">CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1509–1513
</span></p></div>
<div class="cpabstractcardabstract"><p>wav2vec 2.0 is a recently proposed self-supervised framework for speech representation learning. It follows a two-stage training process of pre-training and fine-tuning, and performs well in speech recognition tasks especially ultra-low resource cases. In this work, we attempt to extend the self-supervised framework to speaker verification and language identification. First, we use some preliminary experiments to indicate that wav2vec 2.0 can capture the information about the speaker and language. Then we demonstrate the effectiveness of wav2vec 2.0 on the two tasks respectively. For speaker verification, we obtain a competitive result with the Equal Error Rate (EER) of 3.61% on the VoxCeleb1 dataset. For language identification, we obtain an EER of 12.02% on the 1 second condition and an EER of 3.47% on the full-length condition of the AP17-OLR dataset. Finally, we utilize one model to achieve the unified modeling by the multi-task learning for the two tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[G. Ramesh|AUTHOR G. Ramesh]], [[C. Shiva Kumar|AUTHOR C. Shiva Kumar]], [[K. Sri Rama Murty|AUTHOR K. Sri Rama Murty]]
</p><p class="cpabstractcardaffiliationlist">IIT Hyderabad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1514–1518
</span></p></div>
<div class="cpabstractcardabstract"><p>Phonotactic constraints characterize the sequence of permissible phoneme structures in a language and hence form an important cue for language identification (LID) task. As phonotactic constraints span across multiple phonemes, the short-term spectral analysis (20–30 ms) alone is not sufficient to capture them. The speech signal has to be analyzed over longer contexts (100s of milliseconds) in order to extract features representing the phonotactic constraints. The supervised senone classifiers, aimed at modeling triphone context, have been used for extracting language-specific features for the LID task. However, it is difficult to get large amounts of manually labeled data to train the supervised models. In this work, we explore a self-supervised approach to extract long-term contextual features for the LID task. We have used wav2vec architecture to extract contextualized representations from multiple frames of the speech signal. The contextualized representations extracted from the pre-trained wav2vec model are used for the LID task. The performance of the proposed features is evaluated on a dataset containing 7 Indian languages. The proposed self-supervised embeddings achieved 23% absolute improvement over the acoustic features and 3% absolute improvement over their supervised counterparts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jicheng Zhang|AUTHOR Jicheng Zhang]]^^1^^
, [[Yizhou Peng|AUTHOR Yizhou Peng]]^^1^^
, [[Van Tung Pham|AUTHOR Van Tung Pham]]^^2^^
, [[Haihua Xu|AUTHOR Haihua Xu]]^^2^^
, [[Hao Huang|AUTHOR Hao Huang]]^^1^^
, [[Eng Siong Chng|AUTHOR Eng Siong Chng]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Xinjiang University, China; ^^2^^NTU, Singapore</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1519–1523
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we propose a single multi-task learning framework to perform End-to-End (E2E) speech recognition (ASR) and accent recognition (AR) simultaneously. The proposed framework is not only more compact but can also yield comparable or even better results than standalone systems. Specifically, we found that the overall performance is predominantly determined by the ASR task, and the E2E-based ASR pretraining is essential to achieve improved performance, particularly for the AR task. Additionally, we conduct several analyses of the proposed method. First, though the objective loss for the AR task is much smaller compared with its counterpart of ASR task, a smaller weighting factor with the AR task in the joint objective function is necessary to yield better results for each task. Second, we found that sharing only a few layers of the encoder yields better AR results than sharing the overall encoder. Experimentally, the proposed method produces WER results close to the best standalone E2E ASR ones, while it achieves 7.7% and 4.2% relative improvement over standalone and single-task-based joint recognition methods on test set for accent recognition respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Moakala Tzudir|AUTHOR Moakala Tzudir]]^^1^^
, [[Shikha Baghel|AUTHOR Shikha Baghel]]^^1^^
, [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]]^^1^^
, [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Guwahati, India; ^^2^^IIT Dharwad, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1524–1528
</span></p></div>
<div class="cpabstractcardabstract"><p>Ao is an under-resourced Tibeto-Burman tonal language spoken in Nagaland, India. There are three distinct dialects of the language, namely, Chungli, Mongsen and Changki. The objective of dialect identification is to identify one dialect from the other within the same language family. The goal of this study is to ascertain the potential of excitation source features for automatic dialect identification in Ao. In this direction, Integrated Linear Prediction Residual (ILPR), an approximate representation of source signal, is explored. The log Mel spectrogram of ILPR (//S//,,Ext,,) signal is used to exploit the time-frequency characteristics of the excitation source. This work proposes attention based CNN-BiGRU architecture for automatic dialect identification tasks. Additionally, log Mel spectrogram (//S//,,VT,,), extracted from the pre-emphasized speech signal, is used as a baseline method. The (//S//,,VT,,) contains the vocal-tract characteristics of the speech signal. A significant performance improvement of (nearly) 6% accuracy is observed when the excitation source feature (//S//,,Ext,,) is combined with the vocal tract representation (//S//,,VT,,). To analyse the effect of segment duration, dialect identification performance is reported for three different durations, viz., 1 sec, 3 sec and 6 sec. The effect of gender in dialect identification task for Ao is also studied in this work.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shreya Khare|AUTHOR Shreya Khare]]^^1^^
, [[Ashish Mittal|AUTHOR Ashish Mittal]]^^1^^
, [[Anuj Diwan|AUTHOR Anuj Diwan]]^^2^^
, [[Sunita Sarawagi|AUTHOR Sunita Sarawagi]]^^2^^
, [[Preethi Jyothi|AUTHOR Preethi Jyothi]]^^2^^
, [[Samarth Bharadwaj|AUTHOR Samarth Bharadwaj]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IBM, India; ^^2^^IIT Bombay, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1529–1533
</span></p></div>
<div class="cpabstractcardabstract"><p>Cross-lingual transfer of knowledge from high-resource languages to low-resource languages is an important research problem in automatic speech recognition (ASR). We propose a new strategy of transfer learning by pretraining using large amounts of speech in the high-resource language but with its text transliterated to the target low-resource language. This simple mapping of scripts explicitly encourages increased sharing between the output spaces of both languages and is surprisingly effective even when the high-resource and low-resource languages are from unrelated language families. The utility of our proposed technique is more evident in very low-resource scenarios, where better initializations are more beneficial. We evaluate our technique on a transformer ASR architecture and the state-of-the-art wav2vec2.0 ASR architecture, with English as the high-resource language and six languages as low-resource targets. With access to 1 hour of target speech, we obtain relative WER reductions of up to 8.2% compared to existing transfer-learning approaches.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ewan Dunbar|AUTHOR Ewan Dunbar]]^^1^^
, [[Mathieu Bernard|AUTHOR Mathieu Bernard]]^^2^^
, [[Nicolas Hamilakis|AUTHOR Nicolas Hamilakis]]^^2^^
, [[Tu Anh Nguyen|AUTHOR Tu Anh Nguyen]]^^2^^
, [[Maureen de Seyssel|AUTHOR Maureen de Seyssel]]^^2^^
, [[Patricia Rozé|AUTHOR Patricia Rozé]]^^2^^
, [[Morgane Rivière|AUTHOR Morgane Rivière]]^^3^^
, [[Eugene Kharitonov|AUTHOR Eugene Kharitonov]]^^3^^
, [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Toronto, Canada; ^^2^^LSCP (UMR 8554), France; ^^3^^Facebook, France; ^^4^^LSCP (UMR 8554), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1574–1578
</span></p></div>
<div class="cpabstractcardabstract"><p>We present the Zero Resource Speech Challenge 2021, which asks participants to learn a language model directly from audio, without any text or labels. The challenge is based on the Libri-light dataset, which provides up to 60k hours of audio from English audio books without any associated text. We provide a pipeline baseline system consisting on an encoder based on contrastive predictive coding (CPC), a quantizer (k-means) and a standard language model (BERT or LSTM). The metrics evaluate the learned representations at the acoustic (ABX discrimination), lexical (spot-the-word), syntactic (acceptability judgment) and semantic levels (similarity judgment). We present an overview of the eight submitted systems from four groups and discuss the main results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gautham Krishna Gudur|AUTHOR Gautham Krishna Gudur]], [[Satheesh Kumar Perepu|AUTHOR Satheesh Kumar Perepu]]
</p><p class="cpabstractcardaffiliationlist">Ericsson, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1579–1583
</span></p></div>
<div class="cpabstractcardabstract"><p>Federated learning is an effective way of extracting insights from different user devices while preserving the privacy of users. However, new classes with completely unseen data distributions can stream across any device in a federated learning setting, whose data cannot be accessed by the global server or other users. To this end, we propose a unified zero-shot framework to handle these aforementioned challenges during federated learning. We simulate two scenarios here — 1) when the new class labels are not reported by the user, the traditional FL setting is used; 2) when new class labels are reported by the user, we synthesize //Anonymized Data Impressions// by calculating class similarity matrices corresponding to each device’s new classes followed by unsupervised clustering to distinguish between new classes across different users. Moreover, our proposed framework can also handle statistical heterogeneities in both labels and models across the participating users. We empirically evaluate our framework on-device across different communication rounds (FL iterations) with new classes in both local and global updates, along with heterogeneous labels and models, on two widely used audio classification applications — keyword spotting and urban sound classification, and observe an average deterministic accuracy increase of ~4.041% and ~4.258% respectively.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andrew Rouditchenko|AUTHOR Andrew Rouditchenko]]^^1^^
, [[Angie Boggust|AUTHOR Angie Boggust]]^^1^^
, [[David Harwath|AUTHOR David Harwath]]^^2^^
, [[Brian Chen|AUTHOR Brian Chen]]^^3^^
, [[Dhiraj Joshi|AUTHOR Dhiraj Joshi]]^^4^^
, [[Samuel Thomas|AUTHOR Samuel Thomas]]^^4^^
, [[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]]^^5^^
, [[Hilde Kuehne|AUTHOR Hilde Kuehne]]^^4^^
, [[Rameswar Panda|AUTHOR Rameswar Panda]]^^4^^
, [[Rogerio Feris|AUTHOR Rogerio Feris]]^^4^^
, [[Brian Kingsbury|AUTHOR Brian Kingsbury]]^^4^^
, [[Michael Picheny|AUTHOR Michael Picheny]]^^6^^
, [[Antonio Torralba|AUTHOR Antonio Torralba]]^^1^^
, [[James Glass|AUTHOR James Glass]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^MIT, USA; ^^2^^University of Texas at Austin, USA; ^^3^^Columbia University, USA; ^^4^^IBM, USA; ^^5^^Google, USA; ^^6^^NYU, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1584–1588
<a href="./IS2021/MEDIA/1312" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Current methods for learning visually grounded language from videos often rely on text annotation, such as human generated captions or machine generated automatic speech recognition (ASR) transcripts. In this work, we introduce the Audio-Video Language Network (AVLnet), a self-supervised network that learns a shared audio-visual embedding space directly from raw video inputs. To circumvent the need for text annotation, we learn audio-visual representations from randomly segmented video clips and their raw audio waveforms. We train AVLnet on HowTo100M, a large corpus of publicly available instructional videos, and evaluate on image retrieval and video retrieval tasks, achieving state-of-the-art performance. Finally, we perform analysis of AVLnet’s learned representations, showing our model utilizes speech and natural sounds to learn audio-visual concepts.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Siyuan Feng|AUTHOR Siyuan Feng]]^^1^^
, [[Piotr Żelasko|AUTHOR Piotr Żelasko]]^^2^^
, [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]]^^2^^
, [[Odette Scharenborg|AUTHOR Odette Scharenborg]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Technische Universiteit Delft, The Netherlands; ^^2^^Johns Hopkins University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1534–1538
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper tackles automatically discovering phone-like acoustic units (AUD) from unlabeled speech data. Past studies usually proposed single-step approaches. We propose a two-stage approach: the first stage learns a subword-discriminative feature representation, and the second stage applies clustering to the learned representation and obtains phone-like clusters as the discovered acoustic units. In the first stage, a recently proposed method in the task of unsupervised subword modeling is improved by replacing a monolingual out-of-domain (OOD) ASR system with a multilingual one to create a subword-discriminative representation that is more language-independent. In the second stage, segment-level k-means is adopted, and two methods to represent the variable-length speech segments as fixed-dimension feature vectors are compared. Experiments on a very low-resource Mboshi language corpus show that our approach outperforms state-of-the-art AUD in both normalized mutual information (NMI) and F-score. The multilingual ASR improved upon the monolingual ASR in providing OOD phone labels and in estimating the phone boundaries. A comparison of our systems with and without knowing the ground-truth phone boundaries showed a 16% NMI performance gap, suggesting that the current approach can significantly benefit from improved phone boundary estimation.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Herman Kamper|AUTHOR Herman Kamper]], [[Benjamin van Niekerk|AUTHOR Benjamin van Niekerk]]
</p><p class="cpabstractcardaffiliationlist">Stellenbosch University, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1539–1543
</span></p></div>
<div class="cpabstractcardabstract"><p>We investigate segmenting and clustering speech into low-bitrate phone-like sequences without supervision. We specifically constrain pretrained self-supervised vector-quantized (VQ) neural networks so that blocks of contiguous feature vectors are assigned to the same code, thereby giving a variable-rate segmentation of the speech into discrete units. Two segmentation methods are considered. In the first, features are greedily merged until a prespecified number of segments are reached. The second uses dynamic programming to optimize a squared error with a penalty term to encourage fewer but longer segments. We show that these VQ segmentation methods can be used without alteration across a wide range of tasks: unsupervised phone segmentation, ABX phone discrimination, same-different word discrimination, and as inputs to a symbolic word segmentation algorithm. The penalized dynamic programming method generally performs best. While performance on individual tasks is only comparable to the state-of-the-art in some cases, in all tasks a reasonable competing approach is outperformed at a substantially lower bitrate.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dongwei Jiang|AUTHOR Dongwei Jiang]]^^1^^
, [[Wubo Li|AUTHOR Wubo Li]]^^2^^
, [[Miao Cao|AUTHOR Miao Cao]]^^2^^
, [[Wei Zou|AUTHOR Wei Zou]]^^2^^
, [[Xiangang Li|AUTHOR Xiangang Li]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^YuanFuDao, China; ^^2^^DiDi Chuxing, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1544–1548
</span></p></div>
<div class="cpabstractcardabstract"><p>Self-supervised visual pretraining has shown significant progress recently. Among those methods, SimCLR greatly advanced the state of the art in self-supervised and semi-supervised learning on ImageNet. The input feature representations for speech and visual tasks are both continuous, so it is natural to consider applying similar objective on speech representation learning. In this paper, we propose Speech SimCLR, a new self-supervised objective for speech representation learning. During training, Speech SimCLR applies augmentation on raw speech and its spectrogram. Its objective is the combination of contrastive loss that maximizes agreement between differently augmented samples in the latent space and reconstruction loss of input representation. The proposed method achieved competitive results on speech emotion recognition and speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Christiaan Jacobs|AUTHOR Christiaan Jacobs]], [[Herman Kamper|AUTHOR Herman Kamper]]
</p><p class="cpabstractcardaffiliationlist">Stellenbosch University, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1549–1553
</span></p></div>
<div class="cpabstractcardabstract"><p>Acoustic word embedding models map variable duration speech segments to fixed dimensional vectors, enabling efficient speech search and discovery. Previous work explored how embeddings can be obtained in zero-resource settings where no labelled data is available in the target language. The current best approach uses transfer learning: a single supervised multilingual model is trained using labelled data from multiple well-resourced languages and then applied to a target zero-resource language (without fine-tuning). However, it is still unclear how the specific choice of training languages affect downstream performance. Concretely, here we ask whether it is beneficial to use training languages related to the target. Using data from eleven languages spoken in Southern Africa, we experiment with adding data from different language families while controlling for the amount of data per language. In word discrimination and query-by-example search evaluations, we show that training on languages from the same family gives large improvements. Through finer-grained analysis, we show that training on even just a single related language gives the largest gain. We also find that adding data from unrelated languages generally doesn’t hurt performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Benjamin van Niekerk|AUTHOR Benjamin van Niekerk]], [[Leanne Nortje|AUTHOR Leanne Nortje]], [[Matthew Baas|AUTHOR Matthew Baas]], [[Herman Kamper|AUTHOR Herman Kamper]]
</p><p class="cpabstractcardaffiliationlist">Stellenbosch University, South Africa</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1554–1558
</span></p></div>
<div class="cpabstractcardabstract"><p>Contrastive predictive coding (CPC) aims to learn representations of speech by distinguishing future observations from a set of negative examples. Previous work has shown that linear classifiers trained on CPC features can accurately predict speaker and phone labels. However, it is unclear how the features actually capture speaker and phonetic information, and whether it is possible to normalize out the irrelevant details (depending on the downstream task). In this paper, we first show that the per-utterance mean of CPC features captures speaker information to a large extent. Concretely, we find that comparing means performs well on a speaker verification task. Next, probing experiments show that standardizing the features effectively removes speaker information. Based on this observation, we propose a speaker normalization step to improve acoustic unit discovery using K-means clustering of CPC features. Finally, we show that a language model trained on the resulting units achieves some of the best results in the ZeroSpeech2021 Challenge.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shun Takahashi|AUTHOR Shun Takahashi]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]
</p><p class="cpabstractcardaffiliationlist">NAIST, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1559–1563
</span></p></div>
<div class="cpabstractcardabstract"><p>Discovering symbolic units from unannotated speech data is fundamental in zero resource speech technology. Previous studies focused on learning fixed-length frame units based on acoustic features. Although they achieve high quality, they also suffer from a high bit-rate due to time-frame encoding. In this work, to discover variable-length, low bit-rate speech representation from a limited amount of unannotated speech data, we propose an approach based on graph neural networks (GNNs), and we study the temporal closeness of salient speech features. Our approach is built upon vector-quantized neural networks (VQNNs), which learn discrete encoding by contrastive predictive coding (CPC). We exploit the predetermined finite set of embeddings (a codebook) used by VQNNs to encode input data. We consider a codebook a set of nodes in a directed graph, where each arc represents the transition from one feature to another. Subsequently, we extract and encode the topological features of nodes in the graph to cluster them using graph convolution. By this process, we can obtain coarsened speech representation. We evaluated our model on the English data set of the ZeroSpeech 2020 challenge on Track 2019. Our model successfully drops the bit rate while achieving high unit quality.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takashi Maekaku|AUTHOR Takashi Maekaku]]^^1^^
, [[Xuankai Chang|AUTHOR Xuankai Chang]]^^2^^
, [[Yuya Fujita|AUTHOR Yuya Fujita]]^^1^^
, [[Li-Wei Chen|AUTHOR Li-Wei Chen]]^^3^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^3^^
, [[Alexander Rudnicky|AUTHOR Alexander Rudnicky]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Yahoo, Japan; ^^2^^Carnegie Mellon University, USA; ^^3^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1564–1568
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a system for the Zero Resource Speech Challenge 2021, which combines a Contrastive Predictive Coding (CPC) with deep cluster. In deep cluster, we first prepare pseudo-labels obtained by clustering the outputs of a CPC network with k-means. Then, we train an additional autoregressive model to classify the previously obtained pseudo-labels in a supervised manner. Phoneme discriminative representation is achieved by executing the second-round clustering with the outputs of the final layer of the autoregressive model. We show that replacing a Transformer layer with a Conformer layer leads to a further gain in a lexical metric. Experimental results show that a relative improvement of 35% in a phonetic metric, 1.5% in the lexical metric, and 2.3% in a syntactic metric are achieved compared to a baseline method of CPC-small which is trained on LibriSpeech 460h data. We achieve top results in this challenge with the syntactic metric.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xia Cui|AUTHOR Xia Cui]]^^1^^
, [[Amila Gamage|AUTHOR Amila Gamage]]^^2^^
, [[Terry Hanley|AUTHOR Terry Hanley]]^^1^^
, [[Tingting Mu|AUTHOR Tingting Mu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Manchester, UK; ^^2^^VoiceIQ, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1569–1573
</span></p></div>
<div class="cpabstractcardabstract"><p>In order to protect vulnerable people in telemarketing, organisations have to investigate the speech recordings to identify them first. Typically, the investigation is manually conducted. As such, the procedure is costly and time-consuming. With an automatic vulnerability detection system, more vulnerable people can be identified and protected. A standard telephone conversation lasts around 5 minutes, the detection system is expected to be able to identify such a potential vulnerable speaker from speech segments. Due to the complexity of the vulnerability definition and the unavailable annotated vulnerability examples, this paper attempts to address the detection problem as three classification tasks: age classification, accent classification and patient/non-patient classification utilising publicly available datasets. In the proposed system, we trained three sub models using acoustic and textual features for each sub task. Each trained model was evaluated on multiple datasets and achieved competitive results compared to a strong baseline (i.e. in-dataset accuracy).</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gyeong-Hoon Lee|AUTHOR Gyeong-Hoon Lee]], [[Tae-Woo Kim|AUTHOR Tae-Woo Kim]], [[Hanbin Bae|AUTHOR Hanbin Bae]], [[Min-Ji Lee|AUTHOR Min-Ji Lee]], [[Young-Ik Kim|AUTHOR Young-Ik Kim]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]
</p><p class="cpabstractcardaffiliationlist">NCSOFT, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1589–1593
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently, end-to-end Korean singing voice systems have been designed to generate realistic singing voices. However, these systems still suffer from a lack of robustness in terms of pronunciation accuracy. In this paper, we propose N-Singer, a non-autoregressive Korean singing voice system, to synthesize accurate and pronounced Korean singing voices in parallel. N-Singer consists of a Transformer-based mel-generator, a convolutional network-based postnet, and voicing-aware discriminators. It can contribute in the following ways. First, for accurate pronunciation, N-Singer separately models linguistic and pitch information without other acoustic features. Second, to achieve improved mel-spectrograms, N-Singer uses a combination of Transformer-based modules and convolutional network-based modules. Third, in adversarial training, voicing-aware conditional discriminators are used to capture the harmonic features of voiced segments and noise components of unvoiced segments. The experimental results prove that N-Singer can synthesize a natural singing voice in parallel with a more accurate pronunciation than the baseline model.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Georgia Maniati|AUTHOR Georgia Maniati]]^^1^^
, [[Nikolaos Ellinas|AUTHOR Nikolaos Ellinas]]^^1^^
, [[Konstantinos Markopoulos|AUTHOR Konstantinos Markopoulos]]^^1^^
, [[Georgios Vamvoukakis|AUTHOR Georgios Vamvoukakis]]^^1^^
, [[June Sig Sung|AUTHOR June Sig Sung]]^^2^^
, [[Hyoungmin Park|AUTHOR Hyoungmin Park]]^^2^^
, [[Aimilios Chalamandaris|AUTHOR Aimilios Chalamandaris]]^^1^^
, [[Pirros Tsiakoulis|AUTHOR Pirros Tsiakoulis]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung, Greece; ^^2^^Samsung, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1594–1598
</span></p></div>
<div class="cpabstractcardabstract"><p>The idea of using phonological features instead of phonemes as input to sequence-to-sequence TTS has been recently proposed for zero-shot multilingual speech synthesis. This approach is useful for code-switching, as it facilitates the seamless uttering of foreign text embedded in a stream of native text. In our work, we train a language-agnostic multispeaker model conditioned on a set of phonologically derived features common across different languages, with the goal of achieving cross-lingual speaker adaptation. We first experiment with the effect of language phonological similarity on cross-lingual TTS of several source-target language combinations. Subsequently, we fine-tune the model with very limited data of a new speaker’s voice in either a seen or an unseen language, and achieve synthetic speech of equal quality, while preserving the target speaker’s identity. With as few as 32 and 8 utterances of target speaker data, we obtain high speaker similarity scores and naturalness comparable to the corresponding literature. In the extreme case of only 2 available adaptation utterances, we find that our model behaves as a few-shot learner, as the performance is similar in both the seen and unseen adaptation language scenarios.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Haoyue Zhan|AUTHOR Haoyue Zhan]], [[Haitong Zhang|AUTHOR Haitong Zhang]], [[Wenjie Ou|AUTHOR Wenjie Ou]], [[Yue Lin|AUTHOR Yue Lin]]
</p><p class="cpabstractcardaffiliationlist">NetEase, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1599–1603
</span></p></div>
<div class="cpabstractcardabstract"><p>Cross-lingual text-to-speech (TTS) synthesis on monolingual corpora is still a challenging task, especially when many kinds of languages are involved. In this paper, we improve the cross-lingual TTS model on monolingual corpora with pitch contour information. We propose a method to obtain pitch contour sequences for different languages without manual annotation, and extend the Tacotron-based TTS model with the proposed Pitch Contour Extraction (PCE) module. Our experimental results show that the proposed approach can effectively improve the naturalness and consistency of synthesized mixed-lingual utterances.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhenchuan Yang|AUTHOR Zhenchuan Yang]]^^1^^
, [[Weibin Zhang|AUTHOR Weibin Zhang]]^^2^^
, [[Yufei Liu|AUTHOR Yufei Liu]]^^3^^
, [[Xiaofen Xing|AUTHOR Xiaofen Xing]]^^3^^
</p><p class="cpabstractcardaffiliationlist">^^1^^SCUT, China; ^^2^^VoiceAI Technologies, China; ^^3^^SCUT, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1604–1608
</span></p></div>
<div class="cpabstractcardabstract"><p>Intra-lingual voice conversion has achieved great progress recently in terms of naturalness and similarity. However, in cross-lingual voice conversion, there is still an urgent need to improve the quality of the converted speech, especially with nonparallel training data. Previous works usually use Phonetic Posteriorgrams (PPGs) as the linguistic representations. In the case of cross-lingual voice conversion, the linguistic information is therefore represented as PPGs. It is well-known that PPGs may suffer from word dropping and mispronunciation, especially when the input speech is noisy. In addition, systems using PPGs can only convert the input into a known target language that is seen during training. This paper proposes an any-to-many voice conversion system based on disentangled universal linguistic representations (ULRs), which are extracted from a mix-lingual phoneme recognition system. Two methods are proposed to remove speaker information from ULRs. Experimental results show that the proposed method can effectively improve the converted speech objectively and subjectively. The system can also convert speech utterances naturally even if the language is not seen during training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zhengchen Liu|AUTHOR Zhengchen Liu]]^^1^^
, [[Chenfeng Miao|AUTHOR Chenfeng Miao]]^^1^^
, [[Qingying Zhu|AUTHOR Qingying Zhu]]^^1^^
, [[Minchuan Chen|AUTHOR Minchuan Chen]]^^1^^
, [[Jun Ma|AUTHOR Jun Ma]]^^1^^
, [[Shaojun Wang|AUTHOR Shaojun Wang]]^^1^^
, [[Jing Xiao|AUTHOR Jing Xiao]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Ping An Technology, China; ^^2^^Ping An Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1609–1613
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, we present EfficientSing, a Chinese singing voice synthesis (SVS) system based on a non-autoregressive duration-free acoustic model and HiFi-GAN neural vocoder. Different from many existing SVS methods, no auxiliary duration prediction module is needed in this work, since a newly proposed monotonic alignment modeling mechanism is adopted. Moreover, we follow the non-autoregressive architecture of EfficientTTS with some singing-specific adaption, making training and inference fully parallel and efficient. HiFi-GAN vocoder is adopted to improve the voice quality of synthesized songs and inference efficiency. Both objective and subjective experimental results show that the proposed system can produce quite natural and high-fidelity songs and outperform the Tacotron-based baseline in terms of pronunciation, pitch and rhythm.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Detai Xin|AUTHOR Detai Xin]], [[Yuki Saito|AUTHOR Yuki Saito]], [[Shinnosuke Takamichi|AUTHOR Shinnosuke Takamichi]], [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1614–1618
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a cross-lingual speaker adaptation method based on domain adaptation and a speaker consistency loss for text-to-speech (TTS) synthesis. Existing monolingual speaker adaptation methods based on direct fine-tuning are not applicable for cross-lingual data. The proposed method first trains a language-independent speaker encoder by speaker verification using domain adaption on multilingual data, including the source and the target languages. Then the proposed method trains a monolingual multi-speaker TTS model on the source language’s data using the speaker embeddings generated by the speaker encoder. To adapt the TTS model of the source language to new speakers the proposed method uses a speaker consistency loss to maximize the cosine similarity between speaker embeddings generated from the natural speech and the same speaker’s synthesized speech. This makes fine-tuning the TTS model of source language on speech data of target language become possible. We conduct experiments on multi-speaker English and Japanese datasets with 207 speakers in total. Results of comprehensive experiments demonstrate that the proposed method can significantly improve speech naturalness compared to the baseline method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Zengqiang Shang|AUTHOR Zengqiang Shang]]^^1^^
, [[Zhihua Huang|AUTHOR Zhihua Huang]]^^2^^
, [[Haozhe Zhang|AUTHOR Haozhe Zhang]]^^1^^
, [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]]^^1^^
, [[Yonghong Yan|AUTHOR Yonghong Yan]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^UCAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1619–1623
</span></p></div>
<div class="cpabstractcardabstract"><p>Recently multilingual TTS systems using only monolingual datasets have obtained significant improvement. However, the quality of cross-language speech synthesis is not comparable to the speaker’s own language and often comes with a heavy foreign accent. This paper proposed a multi-speaker multi-style multi-language speech synthesis system (M3), which improves the speech quality by introducing a fine-grained style encoder and overcomes the non-authentic accent problem through cross-speaker style transfer. To avoid leaking timbre information into style encoder, we utilized a speaker conditional variational encoder and conducted adversarial speaker training using the gradient reversal layer. Then, we built a Mixture Density Network (MDN) for mapping text to extracted style vectors for each speaker. At the inference stage, cross-language style transfer could be achieved by assigning any speaker’s style type in the target language. Our system uses existing speaker style and genuinely avoids foreign accents. In the MOS-speech-naturalness, the proposed method generally achieves 4.0 and significantly outperform the baseline system.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ege Kesim|AUTHOR Ege Kesim]], [[Engin Erzin|AUTHOR Engin Erzin]]
</p><p class="cpabstractcardaffiliationlist">Koç University, Turkey</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1624–1628
</span></p></div>
<div class="cpabstractcardabstract"><p>Talking head generation is an active research problem. It has been widely studied as a direct speech-to-video or two stage speech-to-landmarks-to-video mapping problem. In this study, our main motivation is to assess individual and joint contributions of the speech and facial landmarks to the talking head generation quality through a state-of-the-art generative adversarial network (GAN) architecture. Incorporating frame and sequence discriminators and a feature matching loss, we investigate performances of speech only, landmark only and joint speech and landmark driven talking head generation on the CREMA-D dataset. Objective evaluations using the peak signal-to-noise ratio (PSNR), structural similarity index (SSIM) and landmark distance (LMD) indicate that while landmarks bring PSNR and SSIM improvements to the speech driven system, speech brings LMD improvement to the landmark driven system. Furthermore, feature matching is observed to improve the speech driven talking head generation models significantly.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Shijing Si|AUTHOR Shijing Si]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]], [[Ning Cheng|AUTHOR Ning Cheng]], [[Wenqi Wei|AUTHOR Wenqi Wei]], [[Xinghua Zhu|AUTHOR Xinghua Zhu]], [[Jing Xiao|AUTHOR Jing Xiao]]
</p><p class="cpabstractcardaffiliationlist">Ping An Technology, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1629–1633
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper investigates a novel task of talking face video generation solely from speeches. The speech-to-video generation technique can spark interesting applications in entertainment, customer service, and human-computer-interaction industries. Indeed, the timbre, accent and speed in speeches could contain rich information relevant to speakers’ appearance. The challenge mainly lies in disentangling the distinct visual attributes from audio signals. In this article, we propose a light-weight, cross-modal distillation method to extract disentangled emotional and identity information from unlabelled video inputs. The extracted features are then integrated by a generative adversarial network into talking face video clips. With carefully crafted discriminators, the proposed framework achieves realistic generation results. Experiments with observed individuals demonstrated that the proposed framework captures the emotional expressions solely from speeches, and produces spontaneous facial motion in the video output. Compared to the baseline method where speeches are combined with a static image of the speaker, the results of the proposed framework is almost indistinguishable. User studies also show that the proposed method outperforms the existing algorithms in terms of emotion expression in the generated videos.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Junhyeok Lee|AUTHOR Junhyeok Lee]], [[Seungu Han|AUTHOR Seungu Han]]
</p><p class="cpabstractcardaffiliationlist">MINDs Lab, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1634–1638
</span></p></div>
<div class="cpabstractcardabstract"><p>In this work, we introduce //NU-Wave//, the first neural audio upsampling model to produce waveforms of sampling rate 48kHz from coarse 16kHz or 24kHz inputs, while prior works could generate only up to 16kHz. NU-Wave is the first diffusion probabilistic model for audio super-resolution which is engineered based on neural vocoders. NU-Wave generates high-quality audio that achieves high performance in terms of signal-to-noise ratio (SNR), log-spectral distance (LSD), and accuracy of the ABX test. In all cases, NU-Wave outperforms the baseline models despite the substantially smaller model capacity (3.0M parameters) than baselines (5.4–21%). The audio samples of our model are publicly available, and the code will be made available soon.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Adam Gabryś|AUTHOR Adam Gabryś]]^^1^^
, [[Yunlong Jiao|AUTHOR Yunlong Jiao]]^^2^^
, [[Viacheslav Klimkov|AUTHOR Viacheslav Klimkov]]^^3^^
, [[Daniel Korzekwa|AUTHOR Daniel Korzekwa]]^^1^^
, [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, Poland; ^^2^^Amazon, UK; ^^3^^Amazon, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1679–1683
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes a general enhancement to the Normalizing Flows (NF) used in neural vocoding. As a case study, we improve expressive speech vocoding with a revamped Parallel Wavenet (PW). Specifically, we propose to extend the affine transformation of PW to the more expressive invertible non-affine function. The greater expressiveness of the improved PW leads to better-perceived signal quality and naturalness in the waveform reconstruction and text-to-speech (TTS) tasks. We evaluate the model across different speaking styles on a multi-speaker, multi-lingual dataset. In the waveform reconstruction task, the proposed model closes the naturalness and signal quality gap from the original PW to recordings by 10%, and from other state-of-the-art neural vocoding systems by more than 60%. We also demonstrate improvements in objective metrics on the evaluation test set with L2 Spectral Distance and Cross-Entropy reduced by 3% and 6‰ comparing to the affine PW. Furthermore, we extend the probability density distillation procedure proposed by the original PW paper, so that it works with any non-affine invertible and differentiable function.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gauri P. Prajapati|AUTHOR Gauri P. Prajapati]], [[Dipesh K. Singh|AUTHOR Dipesh K. Singh]], [[Preet P. Amin|AUTHOR Preet P. Amin]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]
</p><p class="cpabstractcardaffiliationlist">DA-IICT, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1684–1688
</span></p></div>
<div class="cpabstractcardabstract"><p>With the rise in usage of voice assistants and spoken language interfaces, important concerns regarding voice data privacy have been prompted. In an attempt to reduce the threat of attacks on voice data, in this paper, we propose a speaker anonymization system based on CycleGAN. This method modifies the speaker’s gender and accent information from the original speech signal. The proposed method gives a more natural-sounding anonymized voice in addition to a de-identified speaker. We have chosen baseline-1 of The Voice Privacy Challenge-2020 as our baseline system. Training of CycleGAN, ASR, and ASV experiments are performed on the subset of Librispeech corpus. In this paper, the double anonymization technique is also explored in which the CycleGAN-based anonymization technique is adopted on top of the baseline system. Experimental results show that combining the proposed method with the x-vector and neural source-filter (NSF) model-based method (baseline system) gives up to 5.61% relative improvement in EER of original-anonymized, enroll-trial pairs. However, it gives up to 19.30% relative improvement in EER for anonymized-anonymized enroll-trial pairs. We observed that along with the good speaker de-identification, the anonymized utterances have adequate speech intelligibility and naturalness.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ju Lin|AUTHOR Ju Lin]]^^1^^
, [[Yun Wang|AUTHOR Yun Wang]]^^2^^
, [[Kaustubh Kalgaonkar|AUTHOR Kaustubh Kalgaonkar]]^^2^^
, [[Gil Keren|AUTHOR Gil Keren]]^^2^^
, [[Didi Zhang|AUTHOR Didi Zhang]]^^2^^
, [[Christian Fuegen|AUTHOR Christian Fuegen]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Clemson University, USA; ^^2^^Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1689–1693
</span></p></div>
<div class="cpabstractcardabstract"><p>Algorithms for speech bandwidth extension (BWE) may work in either the time domain or the frequency domain. Time-domain methods often do not sufficiently recover the high-frequency content of speech signals; frequency-domain methods are better at recovering the spectral envelope, but have difficulty reconstructing the details of the waveform. In this paper, we propose a two-stage approach for BWE, which enjoys the advantages of both time- and frequency-domain methods. The first stage is a frequency-domain neural network, which predicts the high-frequency part of the wide-band spectrogram from the narrow-band input spectrogram. The wide-band spectrogram is then converted into a time-domain waveform, and passed through the second stage to refine the temporal details. For the first stage, we compare a convolutional recurrent network (CRN) with a temporal convolutional network (TCN), and find that the latter is able to capture long-span dependencies equally well as the former while using a lot fewer parameters. For the second stage, we enhance the Wave-U-Net architecture with a multi-resolution short-time Fourier transform (MSTFT) loss function. A series of comprehensive experiments show that the proposed system achieves superior performance in speech enhancement (measured by both time- and frequency-domain metrics) as well as speech recognition.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Joon Byun|AUTHOR Joon Byun]]^^1^^
, [[Seungmin Shin|AUTHOR Seungmin Shin]]^^1^^
, [[Youngcheol Park|AUTHOR Youngcheol Park]]^^1^^
, [[Jongmo Sung|AUTHOR Jongmo Sung]]^^2^^
, [[Seungkwon Beack|AUTHOR Seungkwon Beack]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Yonsei University, Korea; ^^2^^ETRI, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1694–1698
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents a loss function to compensate for the perceptual loss of the deep neural network (DNN)-based speech coder. By utilizing the psychoacoustic model (PAM), we design a loss function to maximize the mask-to-noise ratio (MNR) in multi-resolution Mel-frequency scales. Also, a perceptual entropy (PE)-based weighting scheme is incorporated onto the MNR loss so that the DNN model focuses more on perceptually important Mel-frequency bands. The proposed loss function was tested on a CNN-based autoencoder implementing the softmax quantization and entropy-based bitrate control. Objective and subjective tests conducted with speech signals showed that the proposed loss function produced higher perceptual quality than the previous perceptual loss functions.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Dimitrios Stoidis|AUTHOR Dimitrios Stoidis]], [[Andrea Cavallaro|AUTHOR Andrea Cavallaro]]
</p><p class="cpabstractcardaffiliationlist">Queen Mary University of London, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1699–1703
</span></p></div>
<div class="cpabstractcardabstract"><p>Besides its linguistic content, our speech is rich in biometric information that can be inferred by classifiers. Learning privacy-preserving representations for speech signals enables downstream tasks without sharing unnecessary, private information about an individual. In this paper, we show that protecting gender information in speech is more effective than modelling speaker-identity information only when generating a non-sensitive representation of speech. Our method relies on reconstructing speech by decoding linguistic content along with gender information using a variational autoencoder. Specifically, we exploit disentangled representation learning to encode information about different attributes into separate subspaces that can be factorised independently. We present a novel way to encode gender information and disentangle two sensitive biometric identifiers, namely gender and identity, in a privacy-protecting setting. Experiments on the LibriSpeech dataset show that gender recognition and speaker verification can be reduced to a random guess, protecting against classification-based attacks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Gang-Xuan Lin|AUTHOR Gang-Xuan Lin]], [[Shih-Wei Hu|AUTHOR Shih-Wei Hu]], [[Yen-Ju Lu|AUTHOR Yen-Ju Lu]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Chun-Shien Lu|AUTHOR Chun-Shien Lu]]
</p><p class="cpabstractcardaffiliationlist">Academia Sinica, Taiwan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1639–1643
</span></p></div>
<div class="cpabstractcardabstract"><p>Audio super-resolution (ASR) aims to reconstruct the high-resolution signal from its corresponding low-resolution one, which is hard while the correlation between them is low.
In this paper, we propose a learning model, QISTA-Net-Audio, to solve ASR in a paradigm of linear inverse problem. QISTA-Net-Audio is composed of two components. First, an audio waveform can be presented as a complex-valued spectrum, which is composed of a real and an imaginary part, in the frequency domain. We treat the real and imaginary parts as an image, and predict a high-resolution spectrum but only keep the phase information from the viewpoint of image reconstruction. Second, we predict the magnitude information by solving the sparse signal reconstruction problem. By combining the predicted magnitude and the phase together, we can recover the high-resolution waveform. Comparison with the state-of-the-art method MfNet [1], in terms of measure metrics SNR, PESQ, and STOI, demonstrates the superior performance of our method.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Liang Wen|AUTHOR Liang Wen]]^^1^^
, [[Lizhong Wang|AUTHOR Lizhong Wang]]^^1^^
, [[Xue Wen|AUTHOR Xue Wen]]^^1^^
, [[Yuxing Zheng|AUTHOR Yuxing Zheng]]^^1^^
, [[Youngo Park|AUTHOR Youngo Park]]^^2^^
, [[Kwang Pyo Choi|AUTHOR Kwang Pyo Choi]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Samsung, China; ^^2^^Samsung, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1644–1648
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper proposes X-net, a jointly learned scale-down and scale-up architecture for data pre- and post-processing in voice calls, as a means to bandwidth extension over band-limited channels. Scale-down and scale-up are deployed separately on transmitter and receiver to perform down- and upsampling. Separate supervisions are used on the submodules so that X-net can work properly even if one submodule is missing. A two-stage training method is used to learn X-net for improved perceptual quality. Results show that jointly learned X-net achieves promising improvement over blind audio super-resolution by both objective and subjective metrics, even in a lightweight implementation with only 1k parameters.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kexun Zhang|AUTHOR Kexun Zhang]]^^1^^
, [[Yi Ren|AUTHOR Yi Ren]]^^1^^
, [[Changliang Xu|AUTHOR Changliang Xu]]^^2^^
, [[Zhou Zhao|AUTHOR Zhou Zhao]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Zhejiang University, China; ^^2^^Xinhua News Agency, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1649–1653
<a href="./IS2021/MEDIA/0892" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>Audio super-resolution is the task of constructing a high-resolution (HR) audio from a low-resolution (LR) audio by adding the missing band. Previous methods based on convolutional neural networks and mean squared error training objective have relatively low performance, while adversarial generative models are difficult to train and tune. Recently, normalizing flow has attracted a lot of attention for its high performance, simple training and fast inference. In this paper, we propose WSRGlow, a Glow-based waveform generative model to perform audio super-resolution. Specifically, 1) we integrate WaveNet and Glow to directly maximize the exact likelihood of the target HR audio conditioned on LR information; and 2) to exploit the audio information from low-resolution audio, we propose an LR audio encoder and an STFT encoder, which encode the LR information from the time domain and frequency domain respectively. The experimental results show that the proposed model is easier to train and outperforms the previous works in terms of both objective and perceptual quality. WSRGlow is also the first model to produce 48kHz waveforms from 12kHz LR audio. Audio samples are publicly available.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Jiangyan Yi|AUTHOR Jiangyan Yi]]^^1^^
, [[Ye Bai|AUTHOR Ye Bai]]^^2^^
, [[Jianhua Tao|AUTHOR Jianhua Tao]]^^1^^
, [[Haoxin Ma|AUTHOR Haoxin Ma]]^^1^^
, [[Zhengkun Tian|AUTHOR Zhengkun Tian]]^^1^^
, [[Chenglong Wang|AUTHOR Chenglong Wang]]^^1^^
, [[Tao Wang|AUTHOR Tao Wang]]^^1^^
, [[Ruibo Fu|AUTHOR Ruibo Fu]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^CAS, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1654–1658
</span></p></div>
<div class="cpabstractcardabstract"><p>Diverse promising datasets have been designed to further the development of fake audio detection, such as ASVspoof databases. However, previous datasets ignore an attacking situation, in which the hacker hides some small fake clips in real speech audio. This poses a serious threat since that it is difficult to distinguish the small fake clip from the whole speech utterance. Therefore, this paper develops such a dataset for half-truth audio detection (HAD). Partially fake audio in the HAD dataset involves only changing a few words in an utterance. The audio of the words is generated with the very latest state-of-the-art speech synthesis technology. We can not only detect fake utterances but also localize manipulated regions in a speech using this dataset. Some benchmark results are presented on this dataset. The results show that partially fake audio presents much more challenging than fully fake audio for fake audio detection.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Bhusan Chettri|AUTHOR Bhusan Chettri]]^^1^^
, [[Rosa González Hautamäki|AUTHOR Rosa González Hautamäki]]^^1^^
, [[Md. Sahidullah|AUTHOR Md. Sahidullah]]^^2^^
, [[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Eastern Finland, Finland; ^^2^^Loria (UMR 7503), France</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1659–1663
</span></p></div>
<div class="cpabstractcardabstract"><p>Voice anti-spoofing aims at classifying a given utterance either as a bonafide human sample, or a spoofing attack (e.g. synthetic or replayed sample). Many anti-spoofing methods have been proposed but most of them fail to generalize across domains (corpora) — and we do not know //why//. We outline a novel interpretative framework for gauging the impact of data quality upon anti-spoofing performance. Our within- and between-domain experiments pool data from seven public corpora and three anti-spoofing methods based on Gaussian mixture and convolutive neural network models. We assess the impacts of long-term spectral information, speaker population (through x-vector speaker embeddings), signal-to-noise ratio, and selected voice quality features.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Youngju Cheon|AUTHOR Youngju Cheon]]^^1^^
, [[Soojoong Hwang|AUTHOR Soojoong Hwang]]^^1^^
, [[Sangwook Han|AUTHOR Sangwook Han]]^^1^^
, [[Inseon Jang|AUTHOR Inseon Jang]]^^2^^
, [[Jong Won Shin|AUTHOR Jong Won Shin]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^GIST, Korea; ^^2^^ETRI, Korea</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1664–1668
</span></p></div>
<div class="cpabstractcardabstract"><p>Various approaches have been proposed to improve the quality of the speech coded at low bitrates. Recently, deep neural networks have also been used for speech coding, providing a high quality of speech with low bitrates. Although designing an entire codec with neural networks may be more effective, backward compatibility with the existing codecs can be desirable so that the systems with the legacy codec can still decode the coded bitstream. In this paper, we propose to generate side information based on neural networks for an existing codec and enhance the decoded speech with another neural networks using the side information. The vector-quantization variational autoencoder (VQ-VAE) is applied to generate vector-quantized side information and reconstruct the residual features, which are the difference between the features extracted from the original and decoded signals. The post-processor in the decoder side, which is another neural network, takes the decoded signal of the main codec and the reconstructed residual features to estimate the features for the original signal. Experimental results show that the proposed method can significantly improve the quality of the enhanced signals with additional bitrate of 0.6 kbps for two of the implementations of the high-efficiency advanced audio coding (HE-AAC) v1.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lukas Drude|AUTHOR Lukas Drude]]^^1^^
, [[Jahn Heymann|AUTHOR Jahn Heymann]]^^1^^
, [[Andreas Schwarz|AUTHOR Andreas Schwarz]]^^1^^
, [[Jean-Marc Valin|AUTHOR Jean-Marc Valin]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Amazon, Germany; ^^2^^Amazon, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1669–1673
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition (ASR) in the cloud allows the use of larger models and more powerful multi-channel signal processing front-ends compared to on-device processing. However, it also adds an inherent latency due to the transmission of the audio signal, especially when transmitting multiple channels of a microphone array. One way to reduce the network bandwidth requirements is client-side compression with a lossy codec such as Opus. However, this compression can have a detrimental effect especially on multi-channel ASR front-ends, due to the distortion and loss of spatial information introduced by the codec. In this publication, we propose an improved approach for the compression of microphone array signals based on Opus, using a modified joint channel coding approach and additionally introducing a multi-channel spatial decorrelating transform to reduce redundancy in the transmission. We illustrate the effect of the proposed approach on the spatial information retained in multi-channel signals after compression, and evaluate the performance on far-field ASR with a multi-channel beamforming front-end. We demonstrate that our approach can lead to a 37.5% bitrate reduction or a 5.1% relative word error rate (WER) reduction for a fixed bitrate budget in a seven channel setup.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Ingo Siegert|AUTHOR Ingo Siegert]]
</p><p class="cpabstractcardaffiliationlist">OvG Universität Magdeburg, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1674–1678
</span></p></div>
<div class="cpabstractcardabstract"><p>The use of modern voice assistants has rapidly grown and they can be found in more and more households. By design, these systems have to scan every sound in their surroundings waiting for their respective wake-word before being able to react to the users’ commands. The drawback of this method is that phonetic similar expressions can activate the voice assistant and thus speech utterances or whole private conversations will be recorded and streamed to the cloud back-end for further processing. Many news articles and scientific work reported on inaccurate wake-word detection. Resulting in at least a user’s confusion or at worst security breaches. The current paper is based on a broader analysis of phonetic similar accidental triggers conducted by Schönherr et al., they presented a systematic analysis to detect accidental triggers, using a pronouncing dictionary and a weighted, phone-based Levenshtein distance. In this work, the previously identified accidental triggers are recorded by several speakers under various conditions to investigate the influence of phonetic variances (i.e. intonation and speaking/articulation rate) on the robustness of accidental triggers in a real-world environment.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yahya Aldholmi|AUTHOR Yahya Aldholmi]], [[Rawan Aldhafyan|AUTHOR Rawan Aldhafyan]], [[Asma Alqahtani|AUTHOR Asma Alqahtani]]
</p><p class="cpabstractcardaffiliationlist">King Saud University, Saudi Arabia</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1704–1707
</span></p></div>
<div class="cpabstractcardabstract"><p>This experiment investigated how Arabic speakers perceive synthetic Standard Arabic speech rate produced by Google TTS, at normal vs. accelerated rates. Twenty syntactically identical Standard Arabic sentences with a similar length (//M//= 22 syllables per sentence, //SD//= 1) were auditorily presented in a female voice to thirty female participants who were instructed to rate the tempo of the normal (//M//~ 4.5 syllable per second) and accelerated (by 10%, 20%, and 30%) stimuli on a 1–7 Likert scale (1= extremely slow, 4= normal, 7= extremely fast). The results show that differences in the four-condition synthetic speech rates were reflected in the ratings provided by the participants: the more the speech was accelerated, the higher rating it received. More importantly, the findings support the observation that the current normal speech rate of Google TTS synthetic speech is not perceived as normal by Arabic speakers, but rather is perceived as slow. This may negatively affect the likelihood that users are comfortable using this technology. Hence, the outcome of this study does not only call for further investigation into Standard Arabic synthetic speech rates, but also reveals the need to define a baseline for a natural speech rate in Arabic.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Puneet Kumar|AUTHOR Puneet Kumar]]^^1^^
, [[Vishesh Kaushik|AUTHOR Vishesh Kaushik]]^^2^^
, [[Balasubramanian Raman|AUTHOR Balasubramanian Raman]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^IIT Roorkee, India; ^^2^^IIT Kanpur, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1748–1752
<a href="./IS2021/MEDIA/1718" class="externallinkbutton" target="_blank">{{$:/causal/ZIP Button}}</a>
</span></p></div>
<div class="cpabstractcardabstract"><p>In this paper, a multimodal speech emotion recognition system has been developed, and a novel technique to explain its predictions has been proposed. The audio and textual features are extracted separately using attention-based Gated Recurrent Unit (GRU) and pre-trained Bidirectional Encoder Representations from Transformers (BERT), respectively. Then they are concatenated and used to predict the final emotion class. The weighted and unweighted emotion recognition accuracy of 71.7% and 75.0% has been achieved on Emotional Dyadic Motion Capture (IEMOCAP) dataset containing speech utterances and corresponding text transcripts. The training and predictions of network layers have been analyzed qualitatively through emotion embedding plots and quantitatively by analyzing the intersection matrices for various emotion classes’ embeddings.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Biao Zeng|AUTHOR Biao Zeng]]^^1^^
, [[Rui Wang|AUTHOR Rui Wang]]^^2^^
, [[Guoxing Yu|AUTHOR Guoxing Yu]]^^3^^
, [[Christian Dobel|AUTHOR Christian Dobel]]^^4^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of South Wales, UK; ^^2^^Guangdong Pharmaceutical University, China; ^^3^^University of Bristol, UK; ^^4^^FSU Jena, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1753–1756
</span></p></div>
<div class="cpabstractcardabstract"><p>This study investigated Chinese speakers’ eye movements when they were asked to identify audiovisual Mandarin lexical tones and vowels. In the lexical tone identification task, Chinese speakers were presented with an audiovisual clip of Mandarin monosyllables (/ă/, /à/, /ĭ/, /ì/) and asked to identify whether the syllables were presented in a dipping (/ă/, /ĭ/) or falling tone (/à/, /ì/). In the vowel identification task, they were asked to identify whether the vowels were /a/ or /i/ regardless of lexical tone. These audiovisual syllables were presented in clear, noisy, and silent conditions. An eye-tracker recorded the participants’ eye movements.
Results showed participants gazed more at the mouth than the eyes in both lexical tones and vowels. Additionally, when acoustic conditions degraded from clear to noisy and eventually silent, Chinese speakers increased their gaze towards the mouth rather than the eyes. These findings suggest the mouth to be the primary area that is utilised during audiovisual speech perception. The similar patterns of eye movements between vowels and lexical tones indicate that the mouth acts as a perceptual cue that provides articulatory information.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takanori Ashihara|AUTHOR Takanori Ashihara]], [[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Makio Kashino|AUTHOR Makio Kashino]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1757–1761
</span></p></div>
<div class="cpabstractcardabstract"><p>Humans have a sophisticated capability to robustly handle incomplete sensory input, as often happens in real environments. In earlier studies, the robustness of human speech perception was observed qualitatively by spectrally and temporally degraded stimuli. The current study investigates how machine speech recognition, especially end-to-end automatic speech recognition (E2E-ASR), can yield similar robustness against distorted acoustic cues. To evaluate the performance of E2E-ASR, we employ four types of distorted speech based on previous studies: locally time-reversed speech, noise-vocoded speech, phonemic restoration, and modulation-filtered speech. Those stimuli are synthesized by spectral and/or temporal manipulation from original speech samples whose human speech intelligibility scores have been well-reported. An experiment was conducted on the TED-LIUM2 for English and the Corpus of Spontaneous Japanese (CSJ) for Japanese. We found that while there is a tendency to exhibit similar robustness in some experiments, full recovery from the harmful effect of the severe spectral degradation is not achieved.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takeshi Kishiyama|AUTHOR Takeshi Kishiyama]]
</p><p class="cpabstractcardaffiliationlist">University of Tokyo, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1708–1712
</span></p></div>
<div class="cpabstractcardabstract"><p>Research has shown that listeners perceive illusory vowels inside consonant clusters that are not allowed in their L1. This phenomenon has been examined using several psycholinguistic and computational models, including hidden Markov models (HMMs), applied to human phoneme perception. However, the inference algorithm of HMMs assumes that parallel processing, which has not been proven to have psychological reality, is a valid cognitive process. This study tested the psychological reality of parallel processing by attempting to duplicate two results from previous studies: First, listeners perceive an illusory vowel in consonant clusters that are not permissible in their L1. Second, the illusory vowel is based on the characteristics of the preceding consonant, indicating that listeners integrate phonotactics and acoustic information. The experiment manipulated the number of candidates that the model can refer to, and the algorithm can be considered parallel when it allows models to use more than two candidates that are stored in memory. In addition, the transition probabilities between consonants were manipulated to represent the different phonotactics. The results showed that only the parallel processing condition reproduced the two observations above, supporting the psychological reality of parallel processing.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Anupama Chingacham|AUTHOR Anupama Chingacham]], [[Vera Demberg|AUTHOR Vera Demberg]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]
</p><p class="cpabstractcardaffiliationlist">Universität des Saarlandes, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1713–1717
</span></p></div>
<div class="cpabstractcardabstract"><p>Listening in noisy environments can be difficult even for individuals with a normal hearing thresholds. The speech signal can be masked by noise, which may lead to word misperceptions on the side of the listener, and overall difficulty to understand the message. To mitigate hearing difficulties on listeners, a co-operative speaker utilizes voice modulation strategies like Lombard speech to generate noise-robust utterances, and similar solutions have been developed for speech synthesis systems. In this work, we propose an alternate solution of choosing noise-robust lexical paraphrases to represent an intended meaning. Our results show that lexical paraphrases differ in their intelligibility in noise. We evaluate the intelligibility of synonyms in context and find that choosing a lexical unit that is less risky to be misheard than its synonym introduced an average gain in comprehension of 37% at SNR -5 dB and 21% at SNR 0 dB for babble noise.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Olympia Simantiraki|AUTHOR Olympia Simantiraki]]^^1^^
, [[Martin Cooke|AUTHOR Martin Cooke]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universidad del País Vasco, Spain; ^^2^^Ikerbasque, Spain</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1718–1722
</span></p></div>
<div class="cpabstractcardabstract"><p>Most of what we know about speech perception has been gleaned from tests in which listeners respond to stimuli chosen by an experimenter. This paper presents SPEECHADJUSTER, an open source tool that reverses the roles of listener and experimenter by allowing listeners direct control of speech characteristics in real-time. This change of paradigm enables listener preferences — reflecting factors such as cognitive effort, naturalness or distortion — to be measured directly, without recourse to rating scales. Incorporation of a test phase in which listener preferences are frozen also enables intelligibility to be estimated within the same trial. Offline computation and smooth online interpolation within the tool permits the impact of changes in practically any target speech feature (e.g. fundamental frequency or spectral slope) or background characteristic (e.g. noise spectrum), regardless of complexity, to be measured. The paper describes the tool’s capabilities, presents a range of visualisations, and notes some potential applications and limitations.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Susumu Saito|AUTHOR Susumu Saito]], [[Yuta Ide|AUTHOR Yuta Ide]], [[Teppei Nakano|AUTHOR Teppei Nakano]], [[Tetsuji Ogawa|AUTHOR Tetsuji Ogawa]]
</p><p class="cpabstractcardaffiliationlist">Waseda University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1723–1727
</span></p></div>
<div class="cpabstractcardabstract"><p>This paper presents VocalTurk, a feasibility study of crowdsourced speaker identification based on our worker dataset collected in Amazon Mechanical Turk. Crowdsourced data labeling has already been acknowledged in speech data processing nowadays, but empirical analysis that answer to common questions such as “//how accurate are workers capable of labeling speech data?//” and “//what does a good speech-labeling microtask interface look like?//” still remain underexplored, which would limit the quality and scale of the dataset collection. Focusing on the speaker identification task in particular, we thus conducted two studies in Amazon Mechanical Turk: i) hired 3,800+ unique workers to test their performances and confidences in giving answers to voice pair comparison tasks, and ii) additionally assigned more-difficult tasks of //1-vs-N// voice set comparisons to 350+ top-scoring workers to test their accuracy-speed performances across patterns of N = 1, 3, 5. The results revealed some positive findings that would motivate speech researchers toward crowdsourced data labeling, such as that the top-scoring workers were capable of giving labels to our voice comparison pairs with 99% accuracy after majority voting, as well as they were even capable of batch-labeling which significantly shortened up to 34% of their completion time but still with no statistically-significant degradation in accuracy.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Min Xu|AUTHOR Min Xu]]^^1^^
, [[Jing Shao|AUTHOR Jing Shao]]^^2^^
, [[Lan Wang|AUTHOR Lan Wang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^CAS, China; ^^2^^HKBU, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1728–1732
</span></p></div>
<div class="cpabstractcardabstract"><p>Paralinguistic information is as important as linguistic information. Being familiar with talker’s voice may facilitate speech perception, especially in challenging conditions. Previous studies have suggested that aging and age-related hearing loss lead to the deterioration of the phonetic and phonological processing ability. The current study aims to explore whether these two factors exert effects on the talker’ voice discrimination. Three groups of participants, including young adults (YA) and older adults (OA) with and without hearing loss, were tested on talker discrimination in four types of stimuli varying in language familiarity: Mandarin real words, pseudowords, Arabic words and reversed Mandarin words. The results showed that OA with and without hearing loss performed worse than YA in both nonnative and native conditions. OA with hearing loss further performed worse than OA with normal hearing in Mandarin real word condition. These findings indicated that aging and hearing loss affected both low-level phonetic and high-level phonological processing, but hearing loss had extra effect on phonological processing. Altogether, these results implied that OA could not utilize phonetic and phonological cues as effectively as YA, and OA with hearing loss encountered more difficulties in utilizing phonological cues in talker discrimination.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Yuqing Zhang|AUTHOR Yuqing Zhang]]^^1^^
, [[Zhu Li|AUTHOR Zhu Li]]^^1^^
, [[Bin Wu|AUTHOR Bin Wu]]^^2^^
, [[Yanlu Xie|AUTHOR Yanlu Xie]]^^1^^
, [[Binghuai Lin|AUTHOR Binghuai Lin]]^^3^^
, [[Jinsong Zhang|AUTHOR Jinsong Zhang]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^BLCU, China; ^^2^^NAIST, Japan; ^^3^^Tencent, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1733–1737
</span></p></div>
<div class="cpabstractcardabstract"><p>Work on communicative efficiency has hypothesized that phonological contrasts signaling more meaning distinctions (i.e., of high functional load (FL)) tend to have the least articulatory complexity and the highest perceptual salience. However, only a few studies have examined the preference for perceptual distinctiveness based on the traditional measures of FL (e.g., the number of minimal pairs, the change in entropy of the lexicon), which are weak in modeling contexts of individual words. And little attention has been devoted to investigating the need to minimize effort. This study explores whether and how the communicative pressures to minimize the likelihood of confusion and minimize articulatory effort influence phonemic contrasts’ functional contributions to speech communication. We used a revised definition of FL capable of modeling contextual information (i.e., the change in mutual information between phoneme sequences and spoken texts after the contrast in question is neutralized) and quantified information contributions of phonemic contrasts in English. The results indicated that FL of each phoneme pair increased significantly with its perceptual distinctiveness, and decreased significantly with articulatory complexity of the phoneme requiring less articulatory effort in the contrast. Altogether, these findings suggest that communicative pressures modulate the work a phonemic contrast does in distinguishing words.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Camryn Terblanche|AUTHOR Camryn Terblanche]]^^1^^
, [[Philip Harrison|AUTHOR Philip Harrison]]^^2^^
, [[Amelia J. Gully|AUTHOR Amelia J. Gully]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^University of Cape Town, South Africa; ^^2^^University of York, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1738–1742
</span></p></div>
<div class="cpabstractcardabstract"><p>Over the past few years attention has been focused on the automatic detection of spoofing in the context of automatic speaker verification (ASV) systems. However, little is known about how well humans perform at detecting spoofed speech, particularly under degraded conditions. Using the latest synthesis technologies from ASVspoof 2019, this paper explores human judgements of speech authenticity by considering three common channel degradations — a GSM network, a VoIP network, and background noise — in conjunction with varying synthesis quality. The results reveal that channel degradation reduces the size of the perceptual difference between genuine and spoofed speech, and overall participants correctly identified human and spoofed speech only 56% of the time. In background noise and GSM transmission, lower-quality synthetic speech was judged as more human, and in VoIP transmission all speech, including genuine recordings, was judged as less human. Under all conditions, state-of-the-art synthetic speech was judged as human, or more human than, genuine recorded speech. The paper also considers the listener factors which may contribute to an individual’s spoofing detection performance, and finds that a listener’s familiarity with the accents involved, their age, and the audio equipment used for playback, have an effect on their spoofing detection performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Marieke Einfeldt|AUTHOR Marieke Einfeldt]]^^1^^
, [[Rita Sevastjanova|AUTHOR Rita Sevastjanova]]^^1^^
, [[Katharina Zahner-Ritter|AUTHOR Katharina Zahner-Ritter]]^^2^^
, [[Ekaterina Kazak|AUTHOR Ekaterina Kazak]]^^3^^
, [[Bettina Braun|AUTHOR Bettina Braun]]^^1^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Universität Konstanz, Germany; ^^2^^Universität Trier, Germany; ^^3^^University of Manchester, UK</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1743–1747
</span></p></div>
<div class="cpabstractcardabstract"><p>Studying the relative weighting of different cues for the interpretation of a linguistic phenomenon is a core element in psycholinguistic research. This research needs to strike a balance between two things: generalisability to diverse lexical settings, which requires a high number of different lexicalisations and the investigation of a large number of different cues, which requires a high number of different test conditions. Optimizing both is impossible with classical psycholinguistic designs as this would leave the participants with too many experimental trials. Previously we showed that Active Learning (AL) systems allow to test numerous conditions (eight) and items (32) within the same experiment. As stimulus selection was informed by the system’s learning mechanism, AL sped-up the labelling process. In the present study, we extend the use case to an experiment with 16 conditions, manipulated through four binary factors (the experimental setting and three prosodic cues; two levels each). Our findings show that the AL system correctly predicted the intended result pattern after twelve trials only. Hence, AL further confirmed previous findings and proved to be an efficient tool, which offers a promising solution to complex study designs in psycholinguistic research.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thai-Son Nguyen|AUTHOR Thai-Son Nguyen]], [[Sebastian Stüker|AUTHOR Sebastian Stüker]], [[Alex Waibel|AUTHOR Alex Waibel]]
</p><p class="cpabstractcardaffiliationlist">KIT, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1762–1766
</span></p></div>
<div class="cpabstractcardabstract"><p>Achieving super-human performance in recognizing human speech has been a goal for several decades as researchers have worked on increasingly challenging tasks. In the 1990’s it was discovered, that conversational speech between two humans turns out to be considerably more difficult than read speech as hesitations, disfluencies, false starts and sloppy articulation complicate acoustic processing and require robust joint handling of acoustic, lexical and language context. Early attempts with statistical models could only reach word error rates (WER) of over 50% which is far from human performance with shows a WER of around 5.5%. Neural hybrid models and recent attention-based encoder-decoder models have considerably improved performance as such contexts can now be learned in an integral fashion. However, processing such contexts requires an entire utterance presentation and thus introduces unwanted delays before a recognition result can be output. In this paper, we address performance //as well as// latency. We present results for a system that can achieve super-human performance, i.e. a WER of 5.0% on the Switchboard conversational benchmark, at a word based latency of only 1 second behind a speaker’s speech. The system uses multiple attention-based encoder-decoder networks integrated within a novel low latency incremental inference approach.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Thibault Doutre|AUTHOR Thibault Doutre]], [[Wei Han|AUTHOR Wei Han]], [[Chung-Cheng Chiu|AUTHOR Chung-Cheng Chiu]], [[Ruoming Pang|AUTHOR Ruoming Pang]], [[Olivier Siohan|AUTHOR Olivier Siohan]], [[Liangliang Cao|AUTHOR Liangliang Cao]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1807–1811
</span></p></div>
<div class="cpabstractcardabstract"><p>Streaming end-to-end automatic speech recognition (ASR) systems are widely used in everyday applications that require transcribing speech to text in real-time. Their minimal latency makes them suitable for such tasks. Unlike their non-streaming counterparts, streaming models are constrained to be causal with no future context and suffer from higher word error rates (WER). To improve streaming models, a recent study [1] proposed to distill a non-streaming teacher model on unsupervised utterances, and then train a streaming student using the teachers’ predictions. However, the performance gap between teacher and student WERs remains high. In this paper, we aim to close this gap by using a diversified set of non-streaming teacher models and combining them using Recognizer Output Voting Error Reduction (ROVER). In particular, we show that, despite being weaker than RNN-T models, CTC models are remarkable teachers. Further, by fusing RNN-T and CTC models together, we build the strongest teachers. The resulting student models drastically improve upon streaming models of previous work [1]: the WER decreases by 41% on Spanish, 27% on Portuguese, and 13% on French.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Tongzhou Chen|AUTHOR Tongzhou Chen]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1812–1816
</span></p></div>
<div class="cpabstractcardabstract"><p>Streaming automatic speech recognition (ASR) hypothesizes words as soon as the input audio arrives, whereas non-streaming ASR can potentially wait for the completion of the entire utterance to hypothesize words. Streaming and non-streaming ASR systems have typically used different acoustic encoders. Recent work has attempted to unify them by either jointly training a fixed stack of streaming and non-streaming layers or using knowledge distillation during training to ensure consistency between the streaming and non-streaming predictions. We propose mixture model (MiMo) attention as a simpler and theoretically-motivated alternative that replaces only the attention mechanism, requires no change to the training loss, and allows greater flexibility of switching between streaming and non-streaming mode during inference. Our experiments on the public Librispeech data set and a few Indic language data sets show that MiMo attention endows a single ASR model with the ability to operate in both streaming and non-streaming modes without any overhead and without significant loss in accuracy compared to separately-trained streaming and non-streaming models. We also illustrate this benefit of MiMo attention in a second-pass rescoring setting.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Hirofumi Inaguma|AUTHOR Hirofumi Inaguma]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]
</p><p class="cpabstractcardaffiliationlist">Kyoto University, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1817–1821
</span></p></div>
<div class="cpabstractcardabstract"><p>While attention-based encoder-decoder (AED) models have been successfully extended to the online variants for streaming automatic speech recognition (ASR), such as monotonic chunkwise attention (MoChA), the models still have a large label emission latency because of the unconstrained end-to-end training objective. Previous works tackled this problem by leveraging alignment information to control the timing to emit tokens during training. In this work, we propose a simple //alignment-free// regularization method, //StableEmit//, to encourage MoChA to emit tokens earlier. StableEmit discounts the selection probabilities in hard monotonic attention for token boundary detection by a constant factor and regularizes them to recover the total attention mass during training. As a result, the scale of the selection probabilities is increased, and the values can reach a threshold for token emission earlier, leading to a reduction of emission latency and deletion errors. Moreover, StableEmit can be combined with methods that constraint alignments to further improve the accuracy and latency. Experimental evaluations with LSTM and Conformer encoders demonstrate that StableEmit significantly reduces the recognition errors and the emission latency simultaneously. We also show that the use of alignment information is complementary in both metrics.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Niko Moritz|AUTHOR Niko Moritz]], [[Takaaki Hori|AUTHOR Takaaki Hori]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]
</p><p class="cpabstractcardaffiliationlist">MERL, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1822–1826
</span></p></div>
<div class="cpabstractcardabstract"><p>Attention-based end-to-end automatic speech recognition (ASR) systems have recently demonstrated state-of-the-art results for numerous tasks. However, the application of self-attention and attention-based encoder-decoder models remains challenging for streaming ASR, where each word must be recognized shortly after it was spoken. In this work, we present the dual causal/non-causal self-attention (DCN) architecture, which in contrast to restricted self-attention prevents the overall context to grow beyond the look-ahead of a single layer when used in a deep architecture. DCN is compared to chunk-based and restricted self-attention using streaming transformer and conformer architectures, showing improved ASR performance over restricted self-attention and competitive ASR results compared to chunk-based self-attention, while providing the advantage of frame-synchronous processing. Combined with triggered attention, the proposed streaming end-to-end ASR systems obtained state-of-the-art results on the LibriSpeech, HKUST, and Switchboard ASR tasks.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Kwangyoun Kim|AUTHOR Kwangyoun Kim]]^^1^^
, [[Felix Wu|AUTHOR Felix Wu]]^^1^^
, [[Prashant Sridhar|AUTHOR Prashant Sridhar]]^^1^^
, [[Kyu J. Han|AUTHOR Kyu J. Han]]^^1^^
, [[Shinji Watanabe|AUTHOR Shinji Watanabe]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^ASAPP, USA; ^^2^^Carnegie Mellon University, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1827–1831
</span></p></div>
<div class="cpabstractcardabstract"><p>Automatic speech recognition (ASR) models make fewer errors when more surrounding speech information is presented as context. Unfortunately, acquiring a larger future context leads to higher latency. There exists an inevitable trade-off between speed and accuracy. Naïvely, to fit different latency requirements, people have to store multiple models and pick the best one under the constraints. Instead, a more desirable approach is to have a single model that can dynamically adjust its latency based on different constraints, which we refer to as //Multi-mode ASR//. A Multi-mode ASR model can fulfill various latency requirements during inference — when a larger latency becomes acceptable, the model can process longer future context to achieve higher accuracy and when a latency budget is not flexible, the model can be less dependent on future context but still achieve reliable accuracy. In pursuit of Multi-mode ASR, we propose //Stochastic Future Context//, a simple training procedure that samples one streaming configuration in each iteration. Through extensive experiments on AISHELL-1 and LibriSpeech datasets, we show that a Multi-mode ASR model rivals, if not surpasses, a set of competitive streaming baselines trained with different latency budgets.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Vikas Joshi|AUTHOR Vikas Joshi]]^^1^^
, [[Amit Das|AUTHOR Amit Das]]^^2^^
, [[Eric Sun|AUTHOR Eric Sun]]^^2^^
, [[Rupesh R. Mehta|AUTHOR Rupesh R. Mehta]]^^1^^
, [[Jinyu Li|AUTHOR Jinyu Li]]^^2^^
, [[Yifan Gong|AUTHOR Yifan Gong]]^^2^^
</p><p class="cpabstractcardaffiliationlist">^^1^^Microsoft, India; ^^2^^Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1767–1771
</span></p></div>
<div class="cpabstractcardabstract"><p>Improving multilingual end-to-end (E2E) automatic speech recognition (ASR) systems have manifold advantages. They simplify the training strategy, are easier to scale and exhibit better performance over monolingual models. However, it is still challenging to use a single multilingual model to recognize multiple languages without knowing the input language, as most multilingual models assume the availability of the input language. In this paper, we introduce multi-softmax model to improve the multilingual recurrent neural network transducer (RNN-T) models, by having language specific softmax, joint and embedding layers, while sharing rest of the parameters. We extend the multi-softmax model to work without knowing the input language, by integrating a language identification (LID) model, that estimates the LID on-the-fly and also does the recognition at the same time. The multi-softmax model outperforms monolingual models with an average word error rate relative (WERR) reduction of 4.65% on Indian languages. Finetuning further improves the WERR reduction to 12.2%. The multi-softmax model with on-the-fly LID estimation, shows WERR reduction of 13.86% compared to the multilingual baseline.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Duc Le|AUTHOR Duc Le]], [[Mahaveer Jain|AUTHOR Mahaveer Jain]], [[Gil Keren|AUTHOR Gil Keren]], [[Suyoun Kim|AUTHOR Suyoun Kim]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Julian Chan|AUTHOR Julian Chan]], [[Yuan Shangguan|AUTHOR Yuan Shangguan]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Yatharth Saraf|AUTHOR Yatharth Saraf]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]
</p><p class="cpabstractcardaffiliationlist">Facebook, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1772–1776
</span></p></div>
<div class="cpabstractcardabstract"><p>How to leverage dynamic contextual information in end-to-end speech recognition has remained an active research area. Previous solutions to this problem were either designed for specialized use cases that did not generalize well to open-domain scenarios, did not scale to large biasing lists, or underperformed on rare long-tail words. We address these limitations by proposing a novel solution that combines shallow fusion, trie-based deep biasing, and neural network language model contextualization. These techniques result in significant 19.5% relative Word Error Rate improvement over existing contextual biasing approaches and 5.4%–9.3% improvement compared to a strong hybrid baseline on both open-domain and constrained contextualization tasks, where the targets consist of mostly rare long-tail words. Our final system remains lightweight and modular, allowing for quick modification without model re-training.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Yanzhang He|AUTHOR Yanzhang He]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Rami Botros|AUTHOR Rami Botros]], [[Ruoming Pang|AUTHOR Ruoming Pang]], [[David Rybach|AUTHOR David Rybach]], [[Cyril Allauzen|AUTHOR Cyril Allauzen]], [[Ehsan Variani|AUTHOR Ehsan Variani]], [[James Qin|AUTHOR James Qin]], [[Quoc-Nam Le-The|AUTHOR Quoc-Nam Le-The]], [[Shuo-Yiin Chang|AUTHOR Shuo-Yiin Chang]], [[Bo Li|AUTHOR Bo Li]], [[Anmol Gulati|AUTHOR Anmol Gulati]], [[Jiahui Yu|AUTHOR Jiahui Yu]], [[Chung-Cheng Chiu|AUTHOR Chung-Cheng Chiu]], [[Diamantino Caseiro|AUTHOR Diamantino Caseiro]], [[Wei Li|AUTHOR Wei Li]], [[Qiao Liang|AUTHOR Qiao Liang]], [[Pat Rondon|AUTHOR Pat Rondon]]
</p><p class="cpabstractcardaffiliationlist">Google, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1777–1781
</span></p></div>
<div class="cpabstractcardabstract"><p>On-device end-to-end (E2E) models have shown improvements over a conventional model on Search test sets in both quality, as measured by Word Error Rate (WER) [1], and latency [2], measured by the time the result is finalized after the user stops speaking. However, the E2E model is trained on a small fraction of audio-text pairs compared to the 100 billion text utterances that a conventional language model (LM) is trained with. Thus E2E models perform poorly on rare words and phrases. In this paper, building upon the two-pass streaming Cascaded Encoder E2E model [3], we explore using a Hybrid Autoregressive Transducer (HAT) [4] factorization to better integrate an on-device neural LM trained on text-only data. Furthermore, to further improve decoder latency we introduce a non-recurrent embedding decoder, in place of the typical LSTM decoder, into the Cascaded Encoder model. Overall, we present a streaming on-device model that incorporates an external neural LM and outperforms the conventional model in both search and rare-word quality, as well as latency, and is 318× smaller.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Liang Lu|AUTHOR Liang Lu]], [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]
</p><p class="cpabstractcardaffiliationlist">Microsoft, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1782–1786
</span></p></div>
<div class="cpabstractcardabstract"><p>In multi-talker scenarios such as meetings and conversations, speech processing systems are usually required to transcribe the audio as well as identify the speakers for downstream applications. Since overlapped speech is common in this case, conventional approaches usually address this problem in a cascaded fashion that involves speech separation, speech recognition and speaker identification that are trained independently. In this paper, we propose Streaming Unmixing, Recognition and Identification Transducer (SURIT) — a new framework that deals with this problem in an end-to-end streaming fashion. SURIT employs the recurrent neural network transducer (RNN-T) as the backbone for both speech recognition and speaker identification. We validate our idea on the LibrispeechMix dataset — a multi-talker dataset derived from Librispeech, and present encouraging results.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Takanori Ashihara|AUTHOR Takanori Ashihara]], [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]], [[Hiroshi Sato|AUTHOR Hiroshi Sato]], [[Atsushi Ando|AUTHOR Atsushi Ando]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Taichi Asami|AUTHOR Taichi Asami]]
</p><p class="cpabstractcardaffiliationlist">NTT, Japan</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1787–1791
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a novel architecture with its decoding approach for improving recurrent neural network-transducer (RNN-T) performance. RNN-T is promising for building time-synchronous automatic speech recognition (ASR) systems and thus enhancing streaming ASR applications. We note that encoder-decoder-based sequence-to-sequence models (S2S) have been also used successfully by the ASR community. In this paper, we integrate these popular models in the RNN-T+S2S approach; higher recognition performance than either is achieved due to their integration. However, it is generally deemed to be complicated to use S2S in streaming systems, because the attention mechanism can use arbitrarily long past and future contexts during decoding. Our RNN-T+S2S is composed of the shared encoder, an RNN-T decoder and a triggered attention-based decoder which uses time restricted encoder outputs for attention weight computation. By using the trigger points generated from RNN-T outputs, the S2S branch of RNN-T+S2S activates only when the triggers are detected, which makes streaming ASR practical. Experiments on public and private datasets created to research various tasks demonstrate that our proposal can yield superior recognition performance.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Andreas Schwarz|AUTHOR Andreas Schwarz]], [[Ilya Sklyar|AUTHOR Ilya Sklyar]], [[Simon Wiesler|AUTHOR Simon Wiesler]]
</p><p class="cpabstractcardaffiliationlist">Amazon, Germany</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1792–1796
</span></p></div>
<div class="cpabstractcardabstract"><p>We present a training scheme for streaming automatic speech recognition (ASR) based on recurrent neural network transducers (RNN-T) which allows the encoder network to learn to exploit context audio from a stream, using segmented or partially labeled sequences of the stream during training. We show that the use of context audio during training and inference can lead to word error rate reductions of more than 6% in a realistic production setting for a voice assistant ASR system. We investigate the effect of the proposed training approach on acoustically challenging data containing background speech and present data points which indicate that this approach helps the network learn both speaker and environment adaptation. To gain further insight into the ability of a long short-term memory (LSTM) based ASR encoder to exploit long-term context, we also visualize RNN-T loss gradients with respect to the input.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Lu Huang|AUTHOR Lu Huang]], [[Jingyu Sun|AUTHOR Jingyu Sun]], [[Yufeng Tang|AUTHOR Yufeng Tang]], [[Junfeng Hou|AUTHOR Junfeng Hou]], [[Jinkun Chen|AUTHOR Jinkun Chen]], [[Jun Zhang|AUTHOR Jun Zhang]], [[Zejun Ma|AUTHOR Zejun Ma]]
</p><p class="cpabstractcardaffiliationlist">ByteDance, China</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1797–1801
</span></p></div>
<div class="cpabstractcardabstract"><p>This work describes an encoder pre-training procedure using frame-wise label to improve the training of streaming recurrent neural network transducer (RNN-T) model. Streaming RNN-T trained from scratch usually performs worse than non-streaming RNN-T. Although it is common to address this issue through pre-training components of RNN-T with other criteria or frame-wise alignment guidance, the alignment is not easily available in end-to-end manner. In this work, frame-wise alignment, used to pre-train streaming RNN-T’s encoder, is generated without using a HMM-based system. Therefore an all-neural framework equipping HMM-free encoder pre-training is constructed. This is achieved by expanding the spikes of CTC model to their left/right blank frames, and two expanding strategies are proposed. To our best knowledge, this is the first work to simulate HMM-based frame-wise label using CTC model for pre-training. Experiments conducted on LibriSpeech and MLS English tasks show the proposed pre-training procedure, compared with random initialization, reduces the WER by relatively 5%~11% and the emission latency by 60 ms. Besides, the method is lexicon-free, so it is friendly to new languages without manually designed lexicon.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Xiaodong Cui|AUTHOR Xiaodong Cui]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]], [[George Saon|AUTHOR George Saon]], [[David Haws|AUTHOR David Haws]], [[Zoltán Tüske|AUTHOR Zoltán Tüske]]
</p><p class="cpabstractcardaffiliationlist">IBM, USA</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{||$:/causal/View PDF File Button}} page 1802–1806
</span></p></div>
<div class="cpabstractcardabstract"><p>When recurrent neural network transducers (RNNTs) are trained using the typical maximum likelihood criterion, the prediction network is trained only on ground truth label sequences. This leads to a mismatch during inference, known as exposure bias, when the model must deal with label sequences containing errors. In this paper we investigate approaches to reducing exposure bias in training to improve the generalization of RNNT models for automatic speech recognition (ASR). A label-preserving input perturbation to the prediction network is introduced. The input token sequences are perturbed using SwitchOut and scheduled sampling based on an additional token language model. Experiments conducted on the 300-hour Switchboard dataset demonstrate their effectiveness. By reducing the exposure bias, we show that we can further improve the accuracy of a high-performance RNNT ASR model and obtain state-of-the-art results on the 300-hour Switchboard dataset.</p></div>
\rules except wikilink
<div class="cpabstractcardauthorarea"><p class="cpabstractcardauthornames">[[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]
</p><p class="cpabstractcardaffiliationlist">Indian Institute of Science, India</p></div>
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Current Session Button}}{{||$:/causal/Preceding Paper Button}}
<span class="cpprevnextanchortext">PAPER</span>
{{||$:/causal/Next Paper Button}}
</p>
<p class="lineheightforbuttons"><span class="cpabscardpdfandmediabutton">
{{$:/causal/NO-PDF Marker}} ////
</span></p></div>
<div class="cpabstractcardabstract"><p>The investigation of acoustic biomarkers of respiratory diseases has societal and public health impact following the onset of COVID-19 pandemic. The efforts in the pre-pandemic period focused on developing smartphone friendly diagnostic tools for the detection of chronic pulmonary diseases, Tuberculosis and asthmatic conditions using cough sounds. During the past two years, several research works of varying scales have been undertaken by the speech and signal processing community for analyzing the acoustic symptoms of COVID. The motivation for the development of acoustic-based tools for COVID diagnostics arises from the key limitations of cost, time, and safety of the current gold standard in COVID testing, namely the reverse transcription polymerase chain reaction (RT-PCR) testing.
In this talk, I will survey the major efforts undertaken by groups across the world in i) developing data resources of acoustic signals for COVID-19 diagnostics, and ii) designing models and learning algorithms for tool development. The landscape of data resources ranges from controlled hospital recordings to crowdsourced smartphone-based data. While the primary signal modality recorded is the cough data, the impact of COVID on other modalities like breathing, speech and symptom data are also studied. In the talk, I will also discuss the considerations in designing data representations and machine learning models for COVID detection from acoustic data. The pointers to open-source data resources and tools will be highlighted with the aim of encouraging budding researchers to pursue this important direction.
The talk will conclude by remarking about the progress made by our group, Coswara, where a multi-modal combination of information from several modalities shows the potential to surpass regulatory requirements needed for a rapid acoustic-based point of care testing (POCT) tool. </p></div>
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, Room A+B|<|
|^Chairs: |^John Hansen|
|^ |^Hynek Bořil|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210309.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-1-1|PAPER Fri-A-O-1-1 — Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Representation Learning for Speech Activity Detection in the Fearless Steps Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Pablo Gimeno|AUTHOR Pablo Gimeno]], [[Alfonso Ortega|AUTHOR Alfonso Ortega]], [[Antonio Miguel|AUTHOR Antonio Miguel]], [[Eduardo Lleida|AUTHOR Eduardo Lleida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210651.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-1-2|PAPER Fri-A-O-1-2 — The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge]]</div>|<div class="cpsessionviewpapertitle">The Application of Learnable STRF Kernels to the 2021 Fearless Steps Phase-03 SAD Challenge</div><div class="cpsessionviewpaperauthor">[[Tyler Vuong|AUTHOR Tyler Vuong]], [[Yangyang Xia|AUTHOR Yangyang Xia]], [[Richard M. Stern|AUTHOR Richard M. Stern]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-1-3|PAPER Fri-A-O-1-3 — Speech Activity Detection Based on Multilingual Speech Recognition System]]</div>|<div class="cpsessionviewpapertitle">Speech Activity Detection Based on Multilingual Speech Recognition System</div><div class="cpsessionviewpaperauthor">[[Seyyed Saeed Sarfjoo|AUTHOR Seyyed Saeed Sarfjoo]], [[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Petr Motlicek|AUTHOR Petr Motlicek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-1-4|PAPER Fri-A-O-1-4 — Voice Activity Detection with Teacher-Student Domain Emulation]]</div>|<div class="cpsessionviewpapertitle">Voice Activity Detection with Teacher-Student Domain Emulation</div><div class="cpsessionviewpaperauthor">[[Jarrod Luckenbaugh|AUTHOR Jarrod Luckenbaugh]], [[Samuel Abplanalp|AUTHOR Samuel Abplanalp]], [[Rachel Gonzalez|AUTHOR Rachel Gonzalez]], [[Daniel Fulford|AUTHOR Daniel Fulford]], [[David Gard|AUTHOR David Gard]], [[Carlos Busso|AUTHOR Carlos Busso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-1-5|PAPER Fri-A-O-1-5 — EML Online Speech Activity Detection for the Fearless Steps Challenge Phase-III]]</div>|<div class="cpsessionviewpapertitle">EML Online Speech Activity Detection for the Fearless Steps Challenge Phase-III</div><div class="cpsessionviewpaperauthor">[[Omid Ghahabi|AUTHOR Omid Ghahabi]], [[Volker Fischer|AUTHOR Volker Fischer]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, Room D|<|
|^Chairs: |^Jan Chorowski|
|^ |^Jean-Luc Gauvain|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-2-1|PAPER Fri-A-O-2-1 — Device Playback Augmentation with Echo Cancellation for Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Device Playback Augmentation with Echo Cancellation for Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Kuba Łopatka|AUTHOR Kuba Łopatka]], [[Katarzyna Kaszuba-Miotke|AUTHOR Katarzyna Kaszuba-Miotke]], [[Piotr Klinke|AUTHOR Piotr Klinke]], [[Paweł Trella|AUTHOR Paweł Trella]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211399.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-2-2|PAPER Fri-A-O-2-2 — End-to-End Open Vocabulary Keyword Search]]</div>|<div class="cpsessionviewpapertitle">End-to-End Open Vocabulary Keyword Search</div><div class="cpsessionviewpaperauthor">[[Bolaji Yusuf|AUTHOR Bolaji Yusuf]], [[Alican Gok|AUTHOR Alican Gok]], [[Batuhan Gundogdu|AUTHOR Batuhan Gundogdu]], [[Murat Saraclar|AUTHOR Murat Saraclar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-2-3|PAPER Fri-A-O-2-3 — Semantic Sentence Similarity: Size does not Always Matter]]</div>|<div class="cpsessionviewpapertitle">Semantic Sentence Similarity: Size does not Always Matter</div><div class="cpsessionviewpaperauthor">[[Danny Merkx|AUTHOR Danny Merkx]], [[Stefan L. Frank|AUTHOR Stefan L. Frank]], [[Mirjam Ernestus|AUTHOR Mirjam Ernestus]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211704.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-2-4|PAPER Fri-A-O-2-4 — Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings]]</div>|<div class="cpsessionviewpapertitle">Spoken Term Detection and Relevance Score Estimation Using Dot-Product of Pronunciation Embeddings</div><div class="cpsessionviewpaperauthor">[[Jan Švec|AUTHOR Jan Švec]], [[Luboš Šmídl|AUTHOR Luboš Šmídl]], [[Josef V. Psutka|AUTHOR Josef V. Psutka]], [[Aleš Pražák|AUTHOR Aleš Pražák]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211762.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-O-2-5|PAPER Fri-A-O-2-5 — Toward Genre Adapted Closed Captioning]]</div>|<div class="cpsessionviewpapertitle">Toward Genre Adapted Closed Captioning</div><div class="cpsessionviewpaperauthor">[[François Buet|AUTHOR François Buet]], [[François Yvon|AUTHOR François Yvon]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, (Virtual)|<|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-1|PAPER Fri-A-S&T-1-1 — Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses]]</div>|<div class="cpsessionviewpapertitle">Interactive and Real-Time Acoustic Measurement Tools for Speech Data Acquisition and Presentation: Application of an Extended Member of Time Stretched Pulses</div><div class="cpsessionviewpaperauthor">[[Hideki Kawahara|AUTHOR Hideki Kawahara]], [[Kohei Yatabe|AUTHOR Kohei Yatabe]], [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]], [[Mitsunori Mizumachi|AUTHOR Mitsunori Mizumachi]], [[Masanori Morise|AUTHOR Masanori Morise]], [[Hideki Banno|AUTHOR Hideki Banno]], [[Toshio Irino|AUTHOR Toshio Irino]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-2|PAPER Fri-A-S&T-1-2 — Save Your Voice: Voice Banking and TTS for Anyone]]</div>|<div class="cpsessionviewpapertitle">Save Your Voice: Voice Banking and TTS for Anyone</div><div class="cpsessionviewpaperauthor">[[Daniel Tihelka|AUTHOR Daniel Tihelka]], [[Markéta Řezáčková|AUTHOR Markéta Řezáčková]], [[Martin Grůber|AUTHOR Martin Grůber]], [[Zdeněk Hanzlíček|AUTHOR Zdeněk Hanzlíček]], [[Jakub Vít|AUTHOR Jakub Vít]], [[Jindřich Matoušek|AUTHOR Jindřich Matoušek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-3|PAPER Fri-A-S&T-1-3 — NeMo (Inverse) Text Normalization: From Development to Production]]</div>|<div class="cpsessionviewpapertitle">NeMo (Inverse) Text Normalization: From Development to Production</div><div class="cpsessionviewpaperauthor">[[Yang Zhang|AUTHOR Yang Zhang]], [[Evelina Bakhturina|AUTHOR Evelina Bakhturina]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-4|PAPER Fri-A-S&T-1-4 — Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection]]</div>|<div class="cpsessionviewpapertitle">Lalilo: A Reading Assistant for Children Featuring Speech Recognition-Based Reading Mistake Detection</div><div class="cpsessionviewpaperauthor">[[Corentin Hembise|AUTHOR Corentin Hembise]], [[Lucile Gelin|AUTHOR Lucile Gelin]], [[Morgane Daniel|AUTHOR Morgane Daniel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-5|PAPER Fri-A-S&T-1-5 — Automatic Radiology Report Editing Through Voice]]</div>|<div class="cpsessionviewpapertitle">Automatic Radiology Report Editing Through Voice</div><div class="cpsessionviewpaperauthor">[[Manh Hung Nguyen|AUTHOR Manh Hung Nguyen]], [[Vu Hoang|AUTHOR Vu Hoang]], [[Tu Anh Nguyen|AUTHOR Tu Anh Nguyen]], [[Trung H. Bui|AUTHOR Trung H. Bui]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-6|PAPER Fri-A-S&T-1-6 — WittyKiddy: Multilingual Spoken Language Learning for Kids]]</div>|<div class="cpsessionviewpapertitle">WittyKiddy: Multilingual Spoken Language Learning for Kids</div><div class="cpsessionviewpaperauthor">[[Ke Shi|AUTHOR Ke Shi]], [[Kye Min Tan|AUTHOR Kye Min Tan]], [[Huayun Zhang|AUTHOR Huayun Zhang]], [[Siti Umairah Md. Salleh|AUTHOR Siti Umairah Md. Salleh]], [[Shikang Ni|AUTHOR Shikang Ni]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218032.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-7|PAPER Fri-A-S&T-1-7 — Duplex Conversation in Outbound Agent System]]</div>|<div class="cpsessionviewpapertitle">Duplex Conversation in Outbound Agent System</div><div class="cpsessionviewpaperauthor">[[Chunxiang Jin|AUTHOR Chunxiang Jin]], [[Minghui Yang|AUTHOR Minghui Yang]], [[Zujie Wen|AUTHOR Zujie Wen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-S&T-1-8|PAPER Fri-A-S&T-1-8 — Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text]]</div>|<div class="cpsessionviewpapertitle">Web Interface for Estimating Articulatory Movements in Speech Production from Acoustics and Text</div><div class="cpsessionviewpaperauthor">[[Sathvik Udupa|AUTHOR Sathvik Udupa]], [[Anwesha Roy|AUTHOR Anwesha Roy]], [[Abhayjeet Singh|AUTHOR Abhayjeet Singh]], [[Aravind Illa|AUTHOR Aravind Illa]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, Room Lacina|<|
|^Chairs: |^Ross Cutler|
|^ |^Ando Saabas|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-1-1|PAPER Fri-A-SS-1-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div>|<div class="cpsessionviewpapertitle">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div><div class="cpsessionviewpaperauthor">[[Ross Cutler|AUTHOR Ross Cutler]], [[Ando Saabas|AUTHOR Ando Saabas]], [[Tanel Parnamaa|AUTHOR Tanel Parnamaa]], [[Markus Loide|AUTHOR Markus Loide]], [[Sten Sootla|AUTHOR Sten Sootla]], [[Marju Purin|AUTHOR Marju Purin]], [[Hannes Gamper|AUTHOR Hannes Gamper]], [[Sebastian Braun|AUTHOR Sebastian Braun]], [[Karsten Sorensen|AUTHOR Karsten Sorensen]], [[Robert Aichner|AUTHOR Robert Aichner]], [[Sriram Srinivasan|AUTHOR Sriram Srinivasan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-1-2|PAPER Fri-A-SS-1-2 — Acoustic Echo Cancellation with Cross-Domain Learning]]</div>|<div class="cpsessionviewpapertitle">Acoustic Echo Cancellation with Cross-Domain Learning</div><div class="cpsessionviewpaperauthor">[[Lukas Pfeifenberger|AUTHOR Lukas Pfeifenberger]], [[Matthias Zoehrer|AUTHOR Matthias Zoehrer]], [[Franz Pernkopf|AUTHOR Franz Pernkopf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211359.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-1-3|PAPER Fri-A-SS-1-3 — F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">F-T-LSTM Based Complex Network for Joint Acoustic Echo Cancellation and Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Shimin Zhang|AUTHOR Shimin Zhang]], [[Yuxiang Kong|AUTHOR Yuxiang Kong]], [[Shubo Lv|AUTHOR Shubo Lv]], [[Yanxin Hu|AUTHOR Yanxin Hu]], [[Lei Xie|AUTHOR Lei Xie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211590.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-1-4|PAPER Fri-A-SS-1-4 — Y²-Net FCRN for Acoustic Echo and Noise Suppression]]</div>|<div class="cpsessionviewpapertitle">Y²-Net FCRN for Acoustic Echo and Noise Suppression</div><div class="cpsessionviewpaperauthor">[[Ernst Seidel|AUTHOR Ernst Seidel]], [[Jan Franzen|AUTHOR Jan Franzen]], [[Maximilian Strake|AUTHOR Maximilian Strake]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-1-5|PAPER Fri-A-SS-1-5 — Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information]]</div>|<div class="cpsessionviewpapertitle">Acoustic Echo Cancellation Using Deep Complex Neural Network with Nonlinear Magnitude Compression and Phase Information</div><div class="cpsessionviewpaperauthor">[[Renhua Peng|AUTHOR Renhua Peng]], [[Linjuan Cheng|AUTHOR Linjuan Cheng]], [[Chengshi Zheng|AUTHOR Chengshi Zheng]], [[Xiaodong Li|AUTHOR Xiaodong Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210722.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-1-6|PAPER Fri-A-SS-1-6 — Nonlinear Acoustic Echo Cancellation with Deep Learning]]</div>|<div class="cpsessionviewpapertitle">Nonlinear Acoustic Echo Cancellation with Deep Learning</div><div class="cpsessionviewpaperauthor">[[Amir Ivry|AUTHOR Amir Ivry]], [[Israel Cohen|AUTHOR Israel Cohen]], [[Baruch Berdugo|AUTHOR Baruch Berdugo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, Room C|<|
|^Chairs: |^Katrin Tomanek|
|^ |^Jordan Green|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Short Presentations of Papers</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-1|PAPER Fri-A-SS-2-1 — Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases]]</div>|<div class="cpsessionviewpapertitle">Automatic Speech Recognition of Disordered Speech: Personalized Models Outperforming Human Listeners on Short Phrases</div><div class="cpsessionviewpaperauthor">[[Jordan R. Green|AUTHOR Jordan R. Green]], [[Robert L. MacDonald|AUTHOR Robert L. MacDonald]], [[Pan-Pan Jiang|AUTHOR Pan-Pan Jiang]], [[Julie Cattiau|AUTHOR Julie Cattiau]], [[Rus Heywood|AUTHOR Rus Heywood]], [[Richard Cave|AUTHOR Richard Cave]], [[Katie Seaver|AUTHOR Katie Seaver]], [[Marilyn A. Ladewig|AUTHOR Marilyn A. Ladewig]], [[Jimmy Tobin|AUTHOR Jimmy Tobin]], [[Michael P. Brenner|AUTHOR Michael P. Brenner]], [[Philip C. Nelson|AUTHOR Philip C. Nelson]], [[Katrin Tomanek|AUTHOR Katrin Tomanek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211801.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-2|PAPER Fri-A-SS-2-2 — Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale]]</div>|<div class="cpsessionviewpapertitle">Investigating the Utility of Multimodal Conversational Technology and Audiovisual Analytic Measures for the Assessment and Monitoring of Amyotrophic Lateral Sclerosis at Scale</div><div class="cpsessionviewpaperauthor">[[Michael Neumann|AUTHOR Michael Neumann]], [[Oliver Roesler|AUTHOR Oliver Roesler]], [[Jackson Liscombe|AUTHOR Jackson Liscombe]], [[Hardik Kothare|AUTHOR Hardik Kothare]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]], [[David Pautler|AUTHOR David Pautler]], [[Indu Navar|AUTHOR Indu Navar]], [[Aria Anvar|AUTHOR Aria Anvar]], [[Jochen Kumm|AUTHOR Jochen Kumm]], [[Raquel Norel|AUTHOR Raquel Norel]], [[Ernest Fraenkel|AUTHOR Ernest Fraenkel]], [[Alexander V. Sherman|AUTHOR Alexander V. Sherman]], [[James D. Berry|AUTHOR James D. Berry]], [[Gary L. Pattee|AUTHOR Gary L. Pattee]], [[Jun Wang|AUTHOR Jun Wang]], [[Jordan R. Green|AUTHOR Jordan R. Green]], [[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-3|PAPER Fri-A-SS-2-3 — Handling Acoustic Variation in Dysarthric Speech Recognition Systems Through Model Combination]]</div>|<div class="cpsessionviewpapertitle">Handling Acoustic Variation in Dysarthric Speech Recognition Systems Through Model Combination</div><div class="cpsessionviewpaperauthor">[[Enno Hermann|AUTHOR Enno Hermann]], [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210060.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-4|PAPER Fri-A-SS-2-4 — Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition]]</div>|<div class="cpsessionviewpapertitle">Spectro-Temporal Deep Features for Disordered Speech Assessment and Recognition</div><div class="cpsessionviewpaperauthor">[[Mengzhe Geng|AUTHOR Mengzhe Geng]], [[Shansong Liu|AUTHOR Shansong Liu]], [[Jianwei Yu|AUTHOR Jianwei Yu]], [[Xurong Xie|AUTHOR Xurong Xie]], [[Shoukang Hu|AUTHOR Shoukang Hu]], [[Zi Ye|AUTHOR Zi Ye]], [[Zengrui Jin|AUTHOR Zengrui Jin]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210099.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-5|PAPER Fri-A-SS-2-5 — Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation]]</div>|<div class="cpsessionviewpapertitle">Speaking with a KN95 Face Mask: ASR Performance and Speaker Compensation</div><div class="cpsessionviewpaperauthor">[[Sarah E. Gutz|AUTHOR Sarah E. Gutz]], [[Hannah P. Rowe|AUTHOR Hannah P. Rowe]], [[Jordan R. Green|AUTHOR Jordan R. Green]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-6|PAPER Fri-A-SS-2-6 — Adversarial Data Augmentation for Disordered Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Adversarial Data Augmentation for Disordered Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Zengrui Jin|AUTHOR Zengrui Jin]], [[Mengzhe Geng|AUTHOR Mengzhe Geng]], [[Xurong Xie|AUTHOR Xurong Xie]], [[Jianwei Yu|AUTHOR Jianwei Yu]], [[Shansong Liu|AUTHOR Shansong Liu]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-7|PAPER Fri-A-SS-2-7 — Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Variational Auto-Encoder Based Variability Encoding for Dysarthric Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Xurong Xie|AUTHOR Xurong Xie]], [[Rukiye Ruzi|AUTHOR Rukiye Ruzi]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Lan Wang|AUTHOR Lan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210285.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-8|PAPER Fri-A-SS-2-8 — Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Learning Explicit Prosody Models and Deep Speaker Embeddings for Atypical Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Disong Wang|AUTHOR Disong Wang]], [[Songxiang Liu|AUTHOR Songxiang Liu]], [[Lifa Sun|AUTHOR Lifa Sun]], [[Xixin Wu|AUTHOR Xixin Wu]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210289.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-9|PAPER Fri-A-SS-2-9 — Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Bayesian Parametric and Architectural Domain Adaptation of LF-MMI Trained TDNNs for Elderly and Dysarthric Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Jiajun Deng|AUTHOR Jiajun Deng]], [[Fabian Ritter Gutierrez|AUTHOR Fabian Ritter Gutierrez]], [[Shoukang Hu|AUTHOR Shoukang Hu]], [[Mengzhe Geng|AUTHOR Mengzhe Geng]], [[Xurong Xie|AUTHOR Xurong Xie]], [[Zi Ye|AUTHOR Zi Ye]], [[Shansong Liu|AUTHOR Shansong Liu]], [[Jianwei Yu|AUTHOR Jianwei Yu]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210330.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-10|PAPER Fri-A-SS-2-10 — A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks]]</div>|<div class="cpsessionviewpapertitle">A Voice-Activated Switch for Persons with Motor and Speech Impairments: Isolated-Vowel Spotting Using Neural Networks</div><div class="cpsessionviewpaperauthor">[[Shanqing Cai|AUTHOR Shanqing Cai]], [[Lisie Lillianfeld|AUTHOR Lisie Lillianfeld]], [[Katie Seaver|AUTHOR Katie Seaver]], [[Jordan R. Green|AUTHOR Jordan R. Green]], [[Michael P. Brenner|AUTHOR Michael P. Brenner]], [[Philip C. Nelson|AUTHOR Philip C. Nelson]], [[D. Sculley|AUTHOR D. Sculley]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-11|PAPER Fri-A-SS-2-11 — Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech]]</div>|<div class="cpsessionviewpapertitle">Conformer Parrotron: A Faster and Stronger End-to-End Speech Conversion and Recognition Model for Atypical Speech</div><div class="cpsessionviewpaperauthor">[[Zhehuai Chen|AUTHOR Zhehuai Chen]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Fadi Biadsy|AUTHOR Fadi Biadsy]], [[Xia Zhang|AUTHOR Xia Zhang]], [[Youzheng Chen|AUTHOR Youzheng Chen]], [[Liyang Jiang|AUTHOR Liyang Jiang]], [[Fang Chu|AUTHOR Fang Chu]], [[Rohan Doshi|AUTHOR Rohan Doshi]], [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-12|PAPER Fri-A-SS-2-12 — Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia]]</div>|<div class="cpsessionviewpapertitle">Disordered Speech Data Collection: Lessons Learned at 1 Million Utterances from Project Euphonia</div><div class="cpsessionviewpaperauthor">[[Robert L. MacDonald|AUTHOR Robert L. MacDonald]], [[Pan-Pan Jiang|AUTHOR Pan-Pan Jiang]], [[Julie Cattiau|AUTHOR Julie Cattiau]], [[Rus Heywood|AUTHOR Rus Heywood]], [[Richard Cave|AUTHOR Richard Cave]], [[Katie Seaver|AUTHOR Katie Seaver]], [[Marilyn A. Ladewig|AUTHOR Marilyn A. Ladewig]], [[Jimmy Tobin|AUTHOR Jimmy Tobin]], [[Michael P. Brenner|AUTHOR Michael P. Brenner]], [[Philip C. Nelson|AUTHOR Philip C. Nelson]], [[Jordan R. Green|AUTHOR Jordan R. Green]], [[Katrin Tomanek|AUTHOR Katrin Tomanek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-13|PAPER Fri-A-SS-2-13 — Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features]]</div>|<div class="cpsessionviewpapertitle">Automatic Severity Classification of Korean Dysarthric Speech Using Phoneme-Level Pronunciation Features</div><div class="cpsessionviewpaperauthor">[[Eun Jung Yeo|AUTHOR Eun Jung Yeo]], [[Sunhee Kim|AUTHOR Sunhee Kim]], [[Minhwa Chung|AUTHOR Minhwa Chung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211913.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-14|PAPER Fri-A-SS-2-14 — Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases]]</div>|<div class="cpsessionviewpapertitle">Comparing Supervised Models and Learned Speech Representations for Classifying Intelligibility of Disordered Speech on Selected Phrases</div><div class="cpsessionviewpaperauthor">[[Subhashini Venugopalan|AUTHOR Subhashini Venugopalan]], [[Joel Shor|AUTHOR Joel Shor]], [[Manoj Plakal|AUTHOR Manoj Plakal]], [[Jimmy Tobin|AUTHOR Jimmy Tobin]], [[Katrin Tomanek|AUTHOR Katrin Tomanek]], [[Jordan R. Green|AUTHOR Jordan R. Green]], [[Michael P. Brenner|AUTHOR Michael P. Brenner]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-SS-2-15|PAPER Fri-A-SS-2-15 — Analysis and Tuning of a Voice Assistant System for Dysfluent Speech]]</div>|<div class="cpsessionviewpapertitle">Analysis and Tuning of a Voice Assistant System for Dysfluent Speech</div><div class="cpsessionviewpaperauthor">[[Vikramjit Mitra|AUTHOR Vikramjit Mitra]], [[Zifang Huang|AUTHOR Zifang Huang]], [[Colin Lea|AUTHOR Colin Lea]], [[Lauren Tooley|AUTHOR Lauren Tooley]], [[Sarah Wu|AUTHOR Sarah Wu]], [[Darren Botten|AUTHOR Darren Botten]], [[Ashwini Palekar|AUTHOR Ashwini Palekar]], [[Shrinath Thelapurath|AUTHOR Shrinath Thelapurath]], [[Panayiotis Georgiou|AUTHOR Panayiotis Georgiou]], [[Sachin Kajarekar|AUTHOR Sachin Kajarekar]], [[Jefferey Bigham|AUTHOR Jefferey Bigham]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Group Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Kate Knill|
|^ |^Michael Picheny|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210038.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-1|PAPER Fri-A-V-1-1 — Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech]]</div>|<div class="cpsessionviewpapertitle">Weakly-Supervised Word-Level Pronunciation Error Detection in Non-Native English Speech</div><div class="cpsessionviewpaperauthor">[[Daniel Korzekwa|AUTHOR Daniel Korzekwa]], [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]], [[Thomas Drugman|AUTHOR Thomas Drugman]], [[Shira Calamaro|AUTHOR Shira Calamaro]], [[Bozena Kostek|AUTHOR Bozena Kostek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-2|PAPER Fri-A-V-1-2 — End-to-End Speaker-Attributed ASR with Transformer]]</div>|<div class="cpsessionviewpapertitle">End-to-End Speaker-Attributed ASR with Transformer</div><div class="cpsessionviewpaperauthor">[[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Guoli Ye|AUTHOR Guoli Ye]], [[Yashesh Gaur|AUTHOR Yashesh Gaur]], [[Xiaofei Wang|AUTHOR Xiaofei Wang]], [[Zhong Meng|AUTHOR Zhong Meng]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210691.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-3|PAPER Fri-A-V-1-3 — Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction]]</div>|<div class="cpsessionviewpapertitle">Understanding Medical Conversations: Rich Transcription, Confidence Scores & Information Extraction</div><div class="cpsessionviewpaperauthor">[[Hagen Soltau|AUTHOR Hagen Soltau]], [[Mingqiu Wang|AUTHOR Mingqiu Wang]], [[Izhak Shafran|AUTHOR Izhak Shafran]], [[Laurent El Shafey|AUTHOR Laurent El Shafey]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210745.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-4|PAPER Fri-A-V-1-4 — Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System]]</div>|<div class="cpsessionviewpapertitle">Phone-Level Pronunciation Scoring for Spanish Speakers Learning English Using a GOP-DNN System</div><div class="cpsessionviewpaperauthor">[[Jazmín Vidal|AUTHOR Jazmín Vidal]], [[Cyntia Bonomi|AUTHOR Cyntia Bonomi]], [[Marcelo Sancinetti|AUTHOR Marcelo Sancinetti]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-5|PAPER Fri-A-V-1-5 — Explore wav2vec 2.0 for Mispronunciation Detection]]</div>|<div class="cpsessionviewpapertitle">Explore wav2vec 2.0 for Mispronunciation Detection</div><div class="cpsessionviewpaperauthor">[[Xiaoshuo Xu|AUTHOR Xiaoshuo Xu]], [[Yueteng Kang|AUTHOR Yueteng Kang]], [[Songjun Cao|AUTHOR Songjun Cao]], [[Binghuai Lin|AUTHOR Binghuai Lin]], [[Long Ma|AUTHOR Long Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210853.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-6|PAPER Fri-A-V-1-6 — Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings]]</div>|<div class="cpsessionviewpapertitle">Lexical Density Analysis of Word Productions in Japanese English Using Acoustic Word Embeddings</div><div class="cpsessionviewpaperauthor">[[Shintaro Ando|AUTHOR Shintaro Ando]], [[Nobuaki Minematsu|AUTHOR Nobuaki Minematsu]], [[Daisuke Saito|AUTHOR Daisuke Saito]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210931.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-7|PAPER Fri-A-V-1-7 — Deep Feature Transfer Learning for Automatic Pronunciation Assessment]]</div>|<div class="cpsessionviewpapertitle">Deep Feature Transfer Learning for Automatic Pronunciation Assessment</div><div class="cpsessionviewpaperauthor">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-8|PAPER Fri-A-V-1-8 — Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil]]</div>|<div class="cpsessionviewpapertitle">Multilingual Speech Evaluation: Case Studies on English, Malay and Tamil</div><div class="cpsessionviewpaperauthor">[[Huayun Zhang|AUTHOR Huayun Zhang]], [[Ke Shi|AUTHOR Ke Shi]], [[Nancy F. Chen|AUTHOR Nancy F. Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211344.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-9|PAPER Fri-A-V-1-9 — A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis]]</div>|<div class="cpsessionviewpapertitle">A Study on Fine-Tuning wav2vec2.0 Model for the Task of Mispronunciation Detection and Diagnosis</div><div class="cpsessionviewpaperauthor">[[Linkai Peng|AUTHOR Linkai Peng]], [[Kaiqi Fu|AUTHOR Kaiqi Fu]], [[Binghuai Lin|AUTHOR Binghuai Lin]], [[Dengfeng Ke|AUTHOR Dengfeng Ke]], [[Jinsong Zhan|AUTHOR Jinsong Zhan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-10|PAPER Fri-A-V-1-10 — The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech]]</div>|<div class="cpsessionviewpapertitle">The Impact of ASR on the Automatic Analysis of Linguistic Complexity and Sophistication in Spontaneous L2 Speech</div><div class="cpsessionviewpaperauthor">[[Yu Qiao|AUTHOR Yu Qiao]], [[Wei Zhou|AUTHOR Wei Zhou]], [[Elma Kerz|AUTHOR Elma Kerz]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211981.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-11|PAPER Fri-A-V-1-11 — End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning]]</div>|<div class="cpsessionviewpapertitle">End-to-End Rich Transcription-Style Automatic Speech Recognition with Semi-Supervised Learning</div><div class="cpsessionviewpaperauthor">[[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Naoki Makishima|AUTHOR Naoki Makishima]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-12|PAPER Fri-A-V-1-12 — “You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish]]</div>|<div class="cpsessionviewpapertitle">“You don’t understand me!”: Comparing ASR Results for L1 and L2 Speakers of Swedish</div><div class="cpsessionviewpaperauthor">[[Ronald Cumbal|AUTHOR Ronald Cumbal]], [[Birger Moell|AUTHOR Birger Moell]], [[José Lopes|AUTHOR José Lopes]], [[Olov Engwall|AUTHOR Olov Engwall]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-13|PAPER Fri-A-V-1-13 — NeMo Inverse Text Normalization: From Development to Production]]</div>|<div class="cpsessionviewpapertitle">NeMo Inverse Text Normalization: From Development to Production</div><div class="cpsessionviewpaperauthor">[[Yang Zhang|AUTHOR Yang Zhang]], [[Evelina Bakhturina|AUTHOR Evelina Bakhturina]], [[Kyle Gorman|AUTHOR Kyle Gorman]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211132.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-1-14|PAPER Fri-A-V-1-14 — Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability]]</div>|<div class="cpsessionviewpapertitle">Improvement of Automatic English Pronunciation Assessment with Small Number of Utterances Using Sentence Speakability</div><div class="cpsessionviewpaperauthor">[[Satsuki Naijo|AUTHOR Satsuki Naijo]], [[Akinori Ito|AUTHOR Akinori Ito]], [[Takashi Nose|AUTHOR Takashi Nose]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Chi-Chun Lee|
|^ |^Carol Espy-Wilson|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211761.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-1|PAPER Fri-A-V-2-1 — Affect Recognition Through Scalogram and Multi-Resolution Cochleagram Features]]</div>|<div class="cpsessionviewpapertitle">Affect Recognition Through Scalogram and Multi-Resolution Cochleagram Features</div><div class="cpsessionviewpaperauthor">[[Fasih Haider|AUTHOR Fasih Haider]], [[Saturnino Luz|AUTHOR Saturnino Luz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210718.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-2|PAPER Fri-A-V-2-2 — A Speech Emotion Recognition Framework for Better Discrimination of Confusions]]</div>|<div class="cpsessionviewpapertitle">A Speech Emotion Recognition Framework for Better Discrimination of Confusions</div><div class="cpsessionviewpaperauthor">[[Jiawang Liu|AUTHOR Jiawang Liu]], [[Haoxiang Wang|AUTHOR Haoxiang Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210785.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-3|PAPER Fri-A-V-2-3 — Speech Emotion Recognition via Multi-Level Cross-Modal Distillation]]</div>|<div class="cpsessionviewpapertitle">Speech Emotion Recognition via Multi-Level Cross-Modal Distillation</div><div class="cpsessionviewpaperauthor">[[Ruichen Li|AUTHOR Ruichen Li]], [[Jinming Zhao|AUTHOR Jinming Zhao]], [[Qin Jin|AUTHOR Qin Jin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-4|PAPER Fri-A-V-2-4 — Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes]]</div>|<div class="cpsessionviewpapertitle">Audio-Visual Speech Emotion Recognition by Disentangling Emotion and Identity Attributes</div><div class="cpsessionviewpaperauthor">[[Koichiro Ito|AUTHOR Koichiro Ito]], [[Takuya Fujioka|AUTHOR Takuya Fujioka]], [[Qinghua Sun|AUTHOR Qinghua Sun]], [[Kenji Nagamatsu|AUTHOR Kenji Nagamatsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-5|PAPER Fri-A-V-2-5 — Parametric Distributions to Model Numerical Emotion Labels]]</div>|<div class="cpsessionviewpapertitle">Parametric Distributions to Model Numerical Emotion Labels</div><div class="cpsessionviewpaperauthor">[[Deboshree Bose|AUTHOR Deboshree Bose]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211133.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-6|PAPER Fri-A-V-2-6 — Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Metric Learning Based Feature Representation with Gated Fusion Model for Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Yuan Gao|AUTHOR Yuan Gao]], [[Jiaxing Liu|AUTHOR Jiaxing Liu]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Jianwu Dang|AUTHOR Jianwu Dang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-7|PAPER Fri-A-V-2-7 — Speech Emotion Recognition with Multi-Task Learning]]</div>|<div class="cpsessionviewpapertitle">Speech Emotion Recognition with Multi-Task Learning</div><div class="cpsessionviewpaperauthor">[[Xingyu Cai|AUTHOR Xingyu Cai]], [[Jiahong Yuan|AUTHOR Jiahong Yuan]], [[Renjie Zheng|AUTHOR Renjie Zheng]], [[Liang Huang|AUTHOR Liang Huang]], [[Kenneth Church|AUTHOR Kenneth Church]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211960.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-8|PAPER Fri-A-V-2-8 — Generalized Dilated CNN Models for Depression Detection Using Inverted Vocal Tract Variables]]</div>|<div class="cpsessionviewpapertitle">Generalized Dilated CNN Models for Depression Detection Using Inverted Vocal Tract Variables</div><div class="cpsessionviewpaperauthor">[[Nadee Seneviratne|AUTHOR Nadee Seneviratne]], [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-9|PAPER Fri-A-V-2-9 — Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Learning Mutual Correlation in Multimodal Transformer for Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Yuhua Wang|AUTHOR Yuhua Wang]], [[Guang Shen|AUTHOR Guang Shen]], [[Yuezhu Xu|AUTHOR Yuezhu Xu]], [[Jiahang Li|AUTHOR Jiahang Li]], [[Zhengdao Zhao|AUTHOR Zhengdao Zhao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-2-10|PAPER Fri-A-V-2-10 — Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Time-Frequency Representation Learning with Graph Convolutional Network for Dialogue-Level Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Jiaxing Liu|AUTHOR Jiaxing Liu]], [[Yaodong Song|AUTHOR Yaodong Song]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Jianwu Dang|AUTHOR Jianwu Dang]], [[Ruiguo Yu|AUTHOR Ruiguo Yu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Liang Lu|
|^ |^Yanzhang He|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210141.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-1|PAPER Fri-A-V-3-1 — Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices]]</div>|<div class="cpsessionviewpapertitle">Compressing 1D Time-Channel Separable Convolutions Using Sparse Random Ternary Matrices</div><div class="cpsessionviewpaperauthor">[[Gonçalo Mordido|AUTHOR Gonçalo Mordido]], [[Matthijs Van keirsbilck|AUTHOR Matthijs Van keirsbilck]], [[Alexander Keller|AUTHOR Alexander Keller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-2|PAPER Fri-A-V-3-2 — Weakly Supervised Construction of ASR Systems from Massive Video Data]]</div>|<div class="cpsessionviewpapertitle">Weakly Supervised Construction of ASR Systems from Massive Video Data</div><div class="cpsessionviewpaperauthor">[[Mengli Cheng|AUTHOR Mengli Cheng]], [[Chengyu Wang|AUTHOR Chengyu Wang]], [[Jun Huang|AUTHOR Jun Huang]], [[Xiaobo Wang|AUTHOR Xiaobo Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210383.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-3|PAPER Fri-A-V-3-3 — Broadcasted Residual Learning for Efficient Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Broadcasted Residual Learning for Efficient Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Byeonggeun Kim|AUTHOR Byeonggeun Kim]], [[Simyung Chang|AUTHOR Simyung Chang]], [[Jinkyu Lee|AUTHOR Jinkyu Lee]], [[Dooyong Sung|AUTHOR Dooyong Sung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210797.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-4|PAPER Fri-A-V-3-4 — CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">CoDERT: Distilling Encoder Representations with Co-Learning for Transducer-Based Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Rupak Vignesh Swaminathan|AUTHOR Rupak Vignesh Swaminathan]], [[Brian King|AUTHOR Brian King]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]], [[Jasha Droppo|AUTHOR Jasha Droppo]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210819.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-5|PAPER Fri-A-V-3-5 — Extremely Low Footprint End-to-End ASR System for Smart Device]]</div>|<div class="cpsessionviewpapertitle">Extremely Low Footprint End-to-End ASR System for Smart Device</div><div class="cpsessionviewpaperauthor">[[Zhifu Gao|AUTHOR Zhifu Gao]], [[Yiwu Yao|AUTHOR Yiwu Yao]], [[Shiliang Zhang|AUTHOR Shiliang Zhang]], [[Jun Yang|AUTHOR Jun Yang]], [[Ming Lei|AUTHOR Ming Lei]], [[Ian McLoughlin|AUTHOR Ian McLoughlin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-6|PAPER Fri-A-V-3-6 — Dissecting User-Perceived Latency of On-Device E2E Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Dissecting User-Perceived Latency of On-Device E2E Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Yuan Shangguan|AUTHOR Yuan Shangguan]], [[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]], [[Hang Su|AUTHOR Hang Su]], [[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Jiatong Zhou|AUTHOR Jiatong Zhou]], [[Chunyang Wu|AUTHOR Chunyang Wu]], [[Duc Le|AUTHOR Duc Le]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-7|PAPER Fri-A-V-3-7 — Amortized Neural Networks for Low-Latency Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Amortized Neural Networks for Low-Latency Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Jonathan Macoskey|AUTHOR Jonathan Macoskey]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]], [[Jinru Su|AUTHOR Jinru Su]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210212.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-8|PAPER Fri-A-V-3-8 — Tied & Reduced RNN-T Decoder]]</div>|<div class="cpsessionviewpapertitle">Tied & Reduced RNN-T Decoder</div><div class="cpsessionviewpaperauthor">[[Rami Botros|AUTHOR Rami Botros]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Robert David|AUTHOR Robert David]], [[Emmanuel Guzman|AUTHOR Emmanuel Guzman]], [[Wei Li|AUTHOR Wei Li]], [[Yanzhang He|AUTHOR Yanzhang He]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210248.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-9|PAPER Fri-A-V-3-9 — PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation]]</div>|<div class="cpsessionviewpapertitle">PQK: Model Compression via Pruning, Quantization, and Knowledge Distillation</div><div class="cpsessionviewpaperauthor">[[Jangho Kim|AUTHOR Jangho Kim]], [[Simyung Chang|AUTHOR Simyung Chang]], [[Nojun Kwak|AUTHOR Nojun Kwak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-10|PAPER Fri-A-V-3-10 — Collaborative Training of Acoustic Encoders for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Collaborative Training of Acoustic Encoders for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Varun Nagaraja|AUTHOR Varun Nagaraja]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Ganesh Venkatesh|AUTHOR Ganesh Venkatesh]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]], [[Vikas Chandra|AUTHOR Vikas Chandra]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-11|PAPER Fri-A-V-3-11 — Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Efficient Conformer with Prob-Sparse Attention Mechanism for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Xiong Wang|AUTHOR Xiong Wang]], [[Sining Sun|AUTHOR Sining Sun]], [[Lei Xie|AUTHOR Lei Xie]], [[Long Ma|AUTHOR Long Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210456.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-3-12|PAPER Fri-A-V-3-12 — The Energy and Carbon Footprint of Training End-to-End Speech Recognizers]]</div>|<div class="cpsessionviewpapertitle">The Energy and Carbon Footprint of Training End-to-End Speech Recognizers</div><div class="cpsessionviewpaperauthor">[[Titouan Parcollet|AUTHOR Titouan Parcollet]], [[Mirco Ravanelli|AUTHOR Mirco Ravanelli]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Anthony Larcher|
|^ |^Rohan Kumar Das|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211209.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-1|PAPER Fri-A-V-4-1 — Graph-Based Label Propagation for Semi-Supervised Speaker Identification]]</div>|<div class="cpsessionviewpapertitle">Graph-Based Label Propagation for Semi-Supervised Speaker Identification</div><div class="cpsessionviewpaperauthor">[[Long Chen|AUTHOR Long Chen]], [[Venkatesh Ravichandran|AUTHOR Venkatesh Ravichandran]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-2|PAPER Fri-A-V-4-2 — Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Fusion of Embeddings Networks for Robust Combination of Text Dependent and Independent Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Ruirui Li|AUTHOR Ruirui Li]], [[Chelsea J.-T. Ju|AUTHOR Chelsea J.-T. Ju]], [[Zeya Chen|AUTHOR Zeya Chen]], [[Hongda Mao|AUTHOR Hongda Mao]], [[Oguz Elibol|AUTHOR Oguz Elibol]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210114.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-3|PAPER Fri-A-V-4-3 — A Generative Model for Duration-Dependent Score Calibration]]</div>|<div class="cpsessionviewpapertitle">A Generative Model for Duration-Dependent Score Calibration</div><div class="cpsessionviewpaperauthor">[[Sandro Cumani|AUTHOR Sandro Cumani]], [[Salvatore Sarni|AUTHOR Salvatore Sarni]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210641.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-4|PAPER Fri-A-V-4-4 — Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Dr-Vectors: Decision Residual Networks and an Improved Loss for Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Jason Pelecanos|AUTHOR Jason Pelecanos]], [[Quan Wang|AUTHOR Quan Wang]], [[Ignacio Lopez Moreno|AUTHOR Ignacio Lopez Moreno]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210681.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-5|PAPER Fri-A-V-4-5 — Multi-Channel Speaker Verification for Single and Multi-Talker Speech]]</div>|<div class="cpsessionviewpapertitle">Multi-Channel Speaker Verification for Single and Multi-Talker Speech</div><div class="cpsessionviewpaperauthor">[[Saurabh Kataria|AUTHOR Saurabh Kataria]], [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210822.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-6|PAPER Fri-A-V-4-6 — Chronological Self-Training for Real-Time Speaker Diarization]]</div>|<div class="cpsessionviewpapertitle">Chronological Self-Training for Real-Time Speaker Diarization</div><div class="cpsessionviewpaperauthor">[[Dirk Padfield|AUTHOR Dirk Padfield]], [[Daniel J. Liebling|AUTHOR Daniel J. Liebling]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-7|PAPER Fri-A-V-4-7 — Adaptive Margin Circle Loss for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Adaptive Margin Circle Loss for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Runqiu Xiao|AUTHOR Runqiu Xiao]], [[Xiaoxiao Miao|AUTHOR Xiaoxiao Miao]], [[Wenchao Wang|AUTHOR Wenchao Wang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]], [[Bin Cai|AUTHOR Bin Cai]], [[Liuping Luo|AUTHOR Liuping Luo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-8|PAPER Fri-A-V-4-8 — Presentation Matters: Evaluating Speaker Identification Tasks]]</div>|<div class="cpsessionviewpapertitle">Presentation Matters: Evaluating Speaker Identification Tasks</div><div class="cpsessionviewpaperauthor">[[Benjamin O’Brien|AUTHOR Benjamin O’Brien]], [[Christine Meunier|AUTHOR Christine Meunier]], [[Alain Ghio|AUTHOR Alain Ghio]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-9|PAPER Fri-A-V-4-9 — Automatic Error Correction for Speaker Embedding Learning with Noisy Labels]]</div>|<div class="cpsessionviewpapertitle">Automatic Error Correction for Speaker Embedding Learning with Noisy Labels</div><div class="cpsessionviewpaperauthor">[[Fuchuan Tong|AUTHOR Fuchuan Tong]], [[Yan Liu|AUTHOR Yan Liu]], [[Song Li|AUTHOR Song Li]], [[Jie Wang|AUTHOR Jie Wang]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-10|PAPER Fri-A-V-4-10 — An Integrated Framework for Two-Pass Personalized Voice Trigger]]</div>|<div class="cpsessionviewpapertitle">An Integrated Framework for Two-Pass Personalized Voice Trigger</div><div class="cpsessionviewpaperauthor">[[Dexin Liao|AUTHOR Dexin Liao]], [[Jing Li|AUTHOR Jing Li]], [[Yiming Zhi|AUTHOR Yiming Zhi]], [[Song Li|AUTHOR Song Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]], [[Lin Li|AUTHOR Lin Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-4-11|PAPER Fri-A-V-4-11 — Masked Proxy Loss for Text-Independent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Masked Proxy Loss for Text-Independent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Jiachen Lian|AUTHOR Jiachen Lian]], [[Aiswarya Vinod Kumar|AUTHOR Aiswarya Vinod Kumar]], [[Hira Dhamyal|AUTHOR Hira Dhamyal]], [[Bhiksha Raj|AUTHOR Bhiksha Raj]], [[Rita Singh|AUTHOR Rita Singh]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Julie Carson-Berndsen|
|^ |^Javier Latorre|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210838.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-1|PAPER Fri-A-V-5-1 — STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech]]</div>|<div class="cpsessionviewpapertitle">STYLER: Style Factor Modeling with Rapidity and Robustness via Speech Decomposition for Expressive and Controllable Neural Text to Speech</div><div class="cpsessionviewpaperauthor">[[Keon Lee|AUTHOR Keon Lee]], [[Kyumin Park|AUTHOR Kyumin Park]], [[Daeyoung Kim|AUTHOR Daeyoung Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-2|PAPER Fri-A-V-5-2 — Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability]]</div>|<div class="cpsessionviewpapertitle">Reinforcement Learning for Emotional Text-to-Speech Synthesis with Improved Emotion Discriminability</div><div class="cpsessionviewpaperauthor">[[Rui Liu|AUTHOR Rui Liu]], [[Berrak Sisman|AUTHOR Berrak Sisman]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-3|PAPER Fri-A-V-5-3 — Emotional Prosody Control for Speech Generation]]</div>|<div class="cpsessionviewpapertitle">Emotional Prosody Control for Speech Generation</div><div class="cpsessionviewpaperauthor">[[Sarath Sivaprasad|AUTHOR Sarath Sivaprasad]], [[Saiteja Kosgi|AUTHOR Saiteja Kosgi]], [[Vineet Gandhi|AUTHOR Vineet Gandhi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210412.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-4|PAPER Fri-A-V-5-4 — Controllable Context-Aware Conversational Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Controllable Context-Aware Conversational Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Jian Cong|AUTHOR Jian Cong]], [[Shan Yang|AUTHOR Shan Yang]], [[Na Hu|AUTHOR Na Hu]], [[Guangzhi Li|AUTHOR Guangzhi Li]], [[Lei Xie|AUTHOR Lei Xie]], [[Dan Su|AUTHOR Dan Su]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-5|PAPER Fri-A-V-5-5 — Expressive Text-to-Speech Using Style Tag]]</div>|<div class="cpsessionviewpapertitle">Expressive Text-to-Speech Using Style Tag</div><div class="cpsessionviewpaperauthor">[[Minchan Kim|AUTHOR Minchan Kim]], [[Sung Jun Cheon|AUTHOR Sung Jun Cheon]], [[Byoung Jin Choi|AUTHOR Byoung Jin Choi]], [[Jong Jin Kim|AUTHOR Jong Jin Kim]], [[Nam Soo Kim|AUTHOR Nam Soo Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210584.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-6|PAPER Fri-A-V-5-6 — Adaptive Text to Speech for Spontaneous Style]]</div>|<div class="cpsessionviewpapertitle">Adaptive Text to Speech for Spontaneous Style</div><div class="cpsessionviewpaperauthor">[[Yuzi Yan|AUTHOR Yuzi Yan]], [[Xu Tan|AUTHOR Xu Tan]], [[Bohan Li|AUTHOR Bohan Li]], [[Guangyan Zhang|AUTHOR Guangyan Zhang]], [[Tao Qin|AUTHOR Tao Qin]], [[Sheng Zhao|AUTHOR Sheng Zhao]], [[Yuan Shen|AUTHOR Yuan Shen]], [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]], [[Tie-Yan Liu|AUTHOR Tie-Yan Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210947.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-7|PAPER Fri-A-V-5-7 — Towards Multi-Scale Style Control for Expressive Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Towards Multi-Scale Style Control for Expressive Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Xiang Li|AUTHOR Xiang Li]], [[Changhe Song|AUTHOR Changhe Song]], [[Jingbei Li|AUTHOR Jingbei Li]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Jia Jia|AUTHOR Jia Jia]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210979.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-8|PAPER Fri-A-V-5-8 — Cross-Speaker Style Transfer with Prosody Bottleneck in Neural Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Cross-Speaker Style Transfer with Prosody Bottleneck in Neural Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Shifeng Pan|AUTHOR Shifeng Pan]], [[Lei He|AUTHOR Lei He]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-9|PAPER Fri-A-V-5-9 — Fine-Grained Style Modeling, Transfer and Prediction in Text-to-Speech Synthesis via Phone-Level Content-Style Disentanglement]]</div>|<div class="cpsessionviewpapertitle">Fine-Grained Style Modeling, Transfer and Prediction in Text-to-Speech Synthesis via Phone-Level Content-Style Disentanglement</div><div class="cpsessionviewpaperauthor">[[Daxin Tan|AUTHOR Daxin Tan]], [[Tan Lee|AUTHOR Tan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211407.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-10|PAPER Fri-A-V-5-10 — Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS]]</div>|<div class="cpsessionviewpapertitle">Improving Performance of Seen and Unseen Speech Style Transfer in End-to-End Neural TTS</div><div class="cpsessionviewpaperauthor">[[Xiaochun An|AUTHOR Xiaochun An]], [[Frank K. Soong|AUTHOR Frank K. Soong]], [[Lei Xie|AUTHOR Lei Xie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211446.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-5-11|PAPER Fri-A-V-5-11 — Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture]]</div>|<div class="cpsessionviewpapertitle">Synthesis of Expressive Speaking Styles with Limited Training Data in a Multi-Speaker, Prosody-Controllable Sequence-to-Sequence Architecture</div><div class="cpsessionviewpaperauthor">[[Slava Shechtman|AUTHOR Slava Shechtman]], [[Raul Fernandez|AUTHOR Raul Fernandez]], [[Alexander Sorin|AUTHOR Alexander Sorin]], [[David Haws|AUTHOR David Haws]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Martin Radfar|
|^ |^Isabel Trancoso|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210618.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-1|PAPER Fri-A-V-6-1 — Intent Detection and Slot Filling for Vietnamese]]</div>|<div class="cpsessionviewpapertitle">Intent Detection and Slot Filling for Vietnamese</div><div class="cpsessionviewpaperauthor">[[Mai Hoang Dao|AUTHOR Mai Hoang Dao]], [[Thinh Hung Truong|AUTHOR Thinh Hung Truong]], [[Dat Quoc Nguyen|AUTHOR Dat Quoc Nguyen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210055.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-2|PAPER Fri-A-V-6-2 — Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models]]</div>|<div class="cpsessionviewpapertitle">Augmenting Slot Values and Contexts for Spoken Language Understanding with Pretrained Models</div><div class="cpsessionviewpaperauthor">[[Haitao Lin|AUTHOR Haitao Lin]], [[Lu Xiang|AUTHOR Lu Xiang]], [[Yu Zhou|AUTHOR Yu Zhou]], [[Jiajun Zhang|AUTHOR Jiajun Zhang]], [[Chengqing Zong|AUTHOR Chengqing Zong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-3|PAPER Fri-A-V-6-3 — The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding]]</div>|<div class="cpsessionviewpapertitle">The Impact of Intent Distribution Mismatch on Semi-Supervised Spoken Language Understanding</div><div class="cpsessionviewpaperauthor">[[Judith Gaspers|AUTHOR Judith Gaspers]], [[Quynh Do|AUTHOR Quynh Do]], [[Daniil Sorokin|AUTHOR Daniil Sorokin]], [[Patrick Lehnen|AUTHOR Patrick Lehnen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210402.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-4|PAPER Fri-A-V-6-4 — Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification]]</div>|<div class="cpsessionviewpapertitle">Knowledge Distillation from BERT Transformer to Speech Transformer for Intent Classification</div><div class="cpsessionviewpaperauthor">[[Yidi Jiang|AUTHOR Yidi Jiang]], [[Bidisha Sharma|AUTHOR Bidisha Sharma]], [[Maulik Madhavi|AUTHOR Maulik Madhavi]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210501.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-5|PAPER Fri-A-V-6-5 — Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model]]</div>|<div class="cpsessionviewpapertitle">Three-Module Modeling For End-to-End Spoken Language Understanding Using Pre-Trained DNN-HMM-Based Acoustic-Phonetic Model</div><div class="cpsessionviewpaperauthor">[[Nick J.C. Wang|AUTHOR Nick J.C. Wang]], [[Lu Wang|AUTHOR Lu Wang]], [[Yandan Sun|AUTHOR Yandan Sun]], [[Haimei Kang|AUTHOR Haimei Kang]], [[Dejun Zhang|AUTHOR Dejun Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210788.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-6|PAPER Fri-A-V-6-6 — Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs]]</div>|<div class="cpsessionviewpapertitle">Speak or Chat with Me: End-to-End Spoken Language Understanding System with Flexible Inputs</div><div class="cpsessionviewpaperauthor">[[Sujeong Cha|AUTHOR Sujeong Cha]], [[Wangrui Hou|AUTHOR Wangrui Hou]], [[Hyun Jung|AUTHOR Hyun Jung]], [[My Phung|AUTHOR My Phung]], [[Michael Picheny|AUTHOR Michael Picheny]], [[Hong-Kwang J. Kuo|AUTHOR Hong-Kwang J. Kuo]], [[Samuel Thomas|AUTHOR Samuel Thomas]], [[Edmilson Morais|AUTHOR Edmilson Morais]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210818.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-7|PAPER Fri-A-V-6-7 — End-to-End Cross-Lingual Spoken Language Understanding Model with Multilingual Pretraining]]</div>|<div class="cpsessionviewpapertitle">End-to-End Cross-Lingual Spoken Language Understanding Model with Multilingual Pretraining</div><div class="cpsessionviewpaperauthor">[[Xianwei Zhang|AUTHOR Xianwei Zhang]], [[Liang He|AUTHOR Liang He]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211816.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-8|PAPER Fri-A-V-6-8 — Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge]]</div>|<div class="cpsessionviewpapertitle">Factorization-Aware Training of Transformers for Natural Language Understanding on the Edge</div><div class="cpsessionviewpaperauthor">[[Hamidreza Saghir|AUTHOR Hamidreza Saghir]], [[Samridhi Choudhary|AUTHOR Samridhi Choudhary]], [[Sepehr Eghbali|AUTHOR Sepehr Eghbali]], [[Clement Chung|AUTHOR Clement Chung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-9|PAPER Fri-A-V-6-9 — End-to-End Spoken Language Understanding for Generalized Voice Assistants]]</div>|<div class="cpsessionviewpapertitle">End-to-End Spoken Language Understanding for Generalized Voice Assistants</div><div class="cpsessionviewpaperauthor">[[Michael Saxon|AUTHOR Michael Saxon]], [[Samridhi Choudhary|AUTHOR Samridhi Choudhary]], [[Joseph P. McKenna|AUTHOR Joseph P. McKenna]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212044.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-A-V-6-10|PAPER Fri-A-V-6-10 — Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling]]</div>|<div class="cpsessionviewpapertitle">Bi-Directional Joint Neural Networks for Intent Classification and Slot Filling</div><div class="cpsessionviewpaperauthor">[[Soyeon Caren Han|AUTHOR Soyeon Caren Han]], [[Siqu Long|AUTHOR Siqu Long]], [[Huichun Li|AUTHOR Huichun Li]], [[Henry Weld|AUTHOR Henry Weld]], [[Josiah Poon|AUTHOR Josiah Poon]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|15:00–16:00, Friday 3 September 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Fri-Keynote|PAPER Fri-Keynote — Language Modeling and Artificial Intelligence]]</div>|<div class="cpsessionviewpapertitle">Language Modeling and Artificial Intelligence</div><div class="cpsessionviewpaperauthor">[[Tomáš Mikolov|AUTHOR Tomáš Mikolov]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, Room A+B|<|
|^Chairs: |^Richard Stern|
|^ |^Alessio Brutti|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-1-1|PAPER Fri-M-O-1-1 — Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Self-Attention Channel Combinator Frontend for End-to-End Multichannel Far-Field Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Rong Gong|AUTHOR Rong Gong]], [[Carl Quillen|AUTHOR Carl Quillen]], [[Dushyant Sharma|AUTHOR Dushyant Sharma]], [[Andrew Goderre|AUTHOR Andrew Goderre]], [[José Laínez|AUTHOR José Laínez]], [[Ljubomir Milanović|AUTHOR Ljubomir Milanović]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211237.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-1-2|PAPER Fri-M-O-1-2 — ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech]]</div>|<div class="cpsessionviewpapertitle">ETLT 2021: Shared Task on Automatic Speech Recognition for Non-Native Children’s Speech</div><div class="cpsessionviewpaperauthor">[[R. Gretter|AUTHOR R. Gretter]], [[Marco Matassoni|AUTHOR Marco Matassoni]], [[D. Falavigna|AUTHOR D. Falavigna]], [[A. Misra|AUTHOR A. Misra]], [[C.W. Leong|AUTHOR C.W. Leong]], [[K. Knill|AUTHOR K. Knill]], [[L. Wang|AUTHOR L. Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211241.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-1-3|PAPER Fri-M-O-1-3 — Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning]]</div>|<div class="cpsessionviewpapertitle">Age-Invariant Training for End-to-End Child Speech Recognition Using Adversarial Multi-Task Learning</div><div class="cpsessionviewpaperauthor">[[Lars Rumberg|AUTHOR Lars Rumberg]], [[Hanna Ehlert|AUTHOR Hanna Ehlert]], [[Ulrike Lüdtke|AUTHOR Ulrike Lüdtke]], [[Jörn Ostermann|AUTHOR Jörn Ostermann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-1-4|PAPER Fri-M-O-1-4 — Learning to Rank Microphones for Distant Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Learning to Rank Microphones for Distant Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Samuele Cornell|AUTHOR Samuele Cornell]], [[Alessio Brutti|AUTHOR Alessio Brutti]], [[Marco Matassoni|AUTHOR Marco Matassoni]], [[Stefano Squartini|AUTHOR Stefano Squartini]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212202.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-1-5|PAPER Fri-M-O-1-5 — Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition]]</div>|<div class="cpsessionviewpapertitle">Simulating Reading Mistakes for Child Speech Transformer-Based Phone Recognition</div><div class="cpsessionviewpaperauthor">[[Lucile Gelin|AUTHOR Lucile Gelin]], [[Thomas Pellegrini|AUTHOR Thomas Pellegrini]], [[Julien Pinquier|AUTHOR Julien Pinquier]], [[Morgane Daniel|AUTHOR Morgane Daniel]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, Room C|<|
|^Chairs: |^Esther Klabbers|
|^ |^Gérard Bailly|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210275.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-2-1|PAPER Fri-M-O-2-1 — Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input]]</div>|<div class="cpsessionviewpapertitle">Alternate Endings: Improving Prosody for Incremental Neural TTS with Predicted Future Text Input</div><div class="cpsessionviewpaperauthor">[[Brooke Stephenson|AUTHOR Brooke Stephenson]], [[Thomas Hueber|AUTHOR Thomas Hueber]], [[Laurent Girin|AUTHOR Laurent Girin]], [[Laurent Besacier|AUTHOR Laurent Besacier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-2-2|PAPER Fri-M-O-2-2 — Exploring Emotional Prototypes in a High Dimensional TTS Latent Space]]</div>|<div class="cpsessionviewpapertitle">Exploring Emotional Prototypes in a High Dimensional TTS Latent Space</div><div class="cpsessionviewpaperauthor">[[Pol van Rijn|AUTHOR Pol van Rijn]], [[Silvan Mertes|AUTHOR Silvan Mertes]], [[Dominik Schiller|AUTHOR Dominik Schiller]], [[Peter M.C. Harrison|AUTHOR Peter M.C. Harrison]], [[Pauline Larrouy-Maestri|AUTHOR Pauline Larrouy-Maestri]], [[Elisabeth André|AUTHOR Elisabeth André]], [[Nori Jacoby|AUTHOR Nori Jacoby]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-2-3|PAPER Fri-M-O-2-3 — Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Ctrl-P: Temporal Control of Prosodic Variation for Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Devang S. Ram Mohan|AUTHOR Devang S. Ram Mohan]], [[Vivian Hu|AUTHOR Vivian Hu]], [[Tian Huey Teh|AUTHOR Tian Huey Teh]], [[Alexandra Torresquintero|AUTHOR Alexandra Torresquintero]], [[Christopher G.R. Wallis|AUTHOR Christopher G.R. Wallis]], [[Marlene Staib|AUTHOR Marlene Staib]], [[Lorenzo Foglianti|AUTHOR Lorenzo Foglianti]], [[Jiameng Gao|AUTHOR Jiameng Gao]], [[Simon King|AUTHOR Simon King]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-2-4|PAPER Fri-M-O-2-4 — ADEPT: A Dataset for Evaluating Prosody Transfer]]</div>|<div class="cpsessionviewpapertitle">ADEPT: A Dataset for Evaluating Prosody Transfer</div><div class="cpsessionviewpaperauthor">[[Alexandra Torresquintero|AUTHOR Alexandra Torresquintero]], [[Tian Huey Teh|AUTHOR Tian Huey Teh]], [[Christopher G.R. Wallis|AUTHOR Christopher G.R. Wallis]], [[Marlene Staib|AUTHOR Marlene Staib]], [[Devang S. Ram Mohan|AUTHOR Devang S. Ram Mohan]], [[Vivian Hu|AUTHOR Vivian Hu]], [[Lorenzo Foglianti|AUTHOR Lorenzo Foglianti]], [[Jiameng Gao|AUTHOR Jiameng Gao]], [[Simon King|AUTHOR Simon King]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-2-5|PAPER Fri-M-O-2-5 — Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech]]</div>|<div class="cpsessionviewpapertitle">Prosodic Boundary Prediction Model for Vietnamese Text-To-Speech</div><div class="cpsessionviewpaperauthor">[[Nguyen Thi Thu Trang|AUTHOR Nguyen Thi Thu Trang]], [[Nguyen Hoang Ky|AUTHOR Nguyen Hoang Ky]], [[Albert Rilliard|AUTHOR Albert Rilliard]], [[Christophe d’Alessandro|AUTHOR Christophe d’Alessandro]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, Room D|<|
|^Chairs: |^Zbynek Koldovský|
|^ |^Kateřina Žmolíková|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210493.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-3-1|PAPER Fri-M-O-3-1 — Many-Speakers Single Channel Speech Separation with Optimal Permutation Training]]</div>|<div class="cpsessionviewpapertitle">Many-Speakers Single Channel Speech Separation with Optimal Permutation Training</div><div class="cpsessionviewpaperauthor">[[Shaked Dovrat|AUTHOR Shaked Dovrat]], [[Eliya Nachmani|AUTHOR Eliya Nachmani]], [[Lior Wolf|AUTHOR Lior Wolf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-3-2|PAPER Fri-M-O-3-2 — Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction]]</div>|<div class="cpsessionviewpapertitle">Combating Reverberation in NTF-Based Speech Separation Using a Sub-Source Weighted Multichannel Wiener Filter and Linear Prediction</div><div class="cpsessionviewpaperauthor">[[Mieszko Fraś|AUTHOR Mieszko Fraś]], [[Marcin Witkowski|AUTHOR Marcin Witkowski]], [[Konrad Kowalczyk|AUTHOR Konrad Kowalczyk]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211418.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-3-3|PAPER Fri-M-O-3-3 — A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation]]</div>|<div class="cpsessionviewpapertitle">A Hands-On Comparison of DNNs for Dialog Separation Using Transfer Learning from Music Source Separation</div><div class="cpsessionviewpaperauthor">[[Martin Strauss|AUTHOR Martin Strauss]], [[Jouni Paulus|AUTHOR Jouni Paulus]], [[Matteo Torcoli|AUTHOR Matteo Torcoli]], [[Bernd Edler|AUTHOR Bernd Edler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-O-3-4|PAPER Fri-M-O-3-4 — GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation]]</div>|<div class="cpsessionviewpapertitle">GlobalPhone Mix-To-Separate Out of 2: A Multilingual 2000 Speakers Mixtures Database for Speech Separation</div><div class="cpsessionviewpaperauthor">[[Marvin Borsdorf|AUTHOR Marvin Borsdorf]], [[Chenglin Xu|AUTHOR Chenglin Xu]], [[Haizhou Li|AUTHOR Haizhou Li]], [[Tanja Schultz|AUTHOR Tanja Schultz]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, Room Lacina|<|
|^Chairs: |^Srikanth Madikeri|
|^ |^Emily Prud’hommeaux|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Introduction by the Session Chairs</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-SS-1-1|PAPER Fri-M-SS-1-1 — OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages]]</div>|<div class="cpsessionviewpapertitle">OpenASR20: An Open Challenge for Automatic Speech Recognition of Conversational Telephone Speech in Low-Resource Languages</div><div class="cpsessionviewpaperauthor">[[Kay Peterson|AUTHOR Kay Peterson]], [[Audrey Tong|AUTHOR Audrey Tong]], [[Yan Yu|AUTHOR Yan Yu]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Buffer / Break</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211778.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-SS-1-2|PAPER Fri-M-SS-1-2 — Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages]]</div>|<div class="cpsessionviewpapertitle">Multitask Adaptation with Lattice-Free MMI for Multi-Genre Speech Recognition of Low Resource Languages</div><div class="cpsessionviewpaperauthor">[[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-SS-1-3|PAPER Fri-M-SS-1-3 — An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">An Improved Wav2Vec 2.0 Pre-Training Approach Using Enhanced Local Dependency Modeling for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Qiu-shi Zhu|AUTHOR Qiu-shi Zhu]], [[Jie Zhang|AUTHOR Jie Zhang]], [[Ming-hui Wu|AUTHOR Ming-hui Wu]], [[Xin Fang|AUTHOR Xin Fang]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210358.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-SS-1-4|PAPER Fri-M-SS-1-4 — Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges]]</div>|<div class="cpsessionviewpapertitle">Systems for Low-Resource Speech Recognition Tasks in Open Automatic Speech Recognition and Formosa Speech Recognition Challenges</div><div class="cpsessionviewpaperauthor">[[Hung-Pang Lin|AUTHOR Hung-Pang Lin]], [[Yu-Jia Zhang|AUTHOR Yu-Jia Zhang]], [[Chia-Ping Chen|AUTHOR Chia-Ping Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211063.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-SS-1-5|PAPER Fri-M-SS-1-5 — The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20]]</div>|<div class="cpsessionviewpapertitle">The TNT Team System Descriptions of Cantonese and Mongolian for IARPA OpenASR20</div><div class="cpsessionviewpaperauthor">[[Jing Zhao|AUTHOR Jing Zhao]], [[Zhiqiang Lv|AUTHOR Zhiqiang Lv]], [[Ambyera Han|AUTHOR Ambyera Han]], [[Guan-Bo Wang|AUTHOR Guan-Bo Wang]], [[Guixin Shi|AUTHOR Guixin Shi]], [[Jian Kang|AUTHOR Jian Kang]], [[Jinghao Yan|AUTHOR Jinghao Yan]], [[Pengfei Hu|AUTHOR Pengfei Hu]], [[Shen Huang|AUTHOR Shen Huang]], [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-SS-1-6|PAPER Fri-M-SS-1-6 — Combining Hybrid and End-to-End Approaches for the OpenASR20 Challenge]]</div>|<div class="cpsessionviewpapertitle">Combining Hybrid and End-to-End Approaches for the OpenASR20 Challenge</div><div class="cpsessionviewpaperauthor">[[Tanel Alumäe|AUTHOR Tanel Alumäe]], [[Jiaming Kong|AUTHOR Jiaming Kong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-SS-1-7|PAPER Fri-M-SS-1-7 — One Size Does Not Fit All in Resource-Constrained ASR]]</div>|<div class="cpsessionviewpapertitle">One Size Does Not Fit All in Resource-Constrained ASR</div><div class="cpsessionviewpaperauthor">[[Ethan Morris|AUTHOR Ethan Morris]], [[Robbie Jimerson|AUTHOR Robbie Jimerson]], [[Emily Prud’hommeaux|AUTHOR Emily Prud’hommeaux]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Buffer / Break</div><div class="cpsessionviewpaperauthor"></div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Concluding Remarks by the Session Chairs</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Joakim Gustafson|
|^ |^Anne Bonneau|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210021.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-1|PAPER Fri-M-V-1-1 — Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared]]</div>|<div class="cpsessionviewpapertitle">Cross-Linguistic Perception of the Japanese Singleton/Geminate Contrast: Korean, Mandarin and Mongolian Compared</div><div class="cpsessionviewpaperauthor">[[Kimiko Tsukada|AUTHOR Kimiko Tsukada]], [[Yurong|AUTHOR Yurong]], [[Joo-Yeon Kim|AUTHOR Joo-Yeon Kim]], [[Jeong-Im Han|AUTHOR Jeong-Im Han]], [[John Hajek|AUTHOR John Hajek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-2|PAPER Fri-M-V-1-2 — Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention]]</div>|<div class="cpsessionviewpapertitle">Detection of Lexical Stress Errors in Non-Native (L2) English with Data Augmentation and Attention</div><div class="cpsessionviewpaperauthor">[[Daniel Korzekwa|AUTHOR Daniel Korzekwa]], [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]], [[Szymon Zaporowski|AUTHOR Szymon Zaporowski]], [[Grzegorz Beringer|AUTHOR Grzegorz Beringer]], [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]], [[Alicja Serafinowicz|AUTHOR Alicja Serafinowicz]], [[Jasha Droppo|AUTHOR Jasha Droppo]], [[Thomas Drugman|AUTHOR Thomas Drugman]], [[Bozena Kostek|AUTHOR Bozena Kostek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210315.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-3|PAPER Fri-M-V-1-3 — Testing Acoustic Voice Quality Classification Across Languages and Speech Styles]]</div>|<div class="cpsessionviewpapertitle">Testing Acoustic Voice Quality Classification Across Languages and Speech Styles</div><div class="cpsessionviewpaperauthor">[[Bettina Braun|AUTHOR Bettina Braun]], [[Nicole Dehé|AUTHOR Nicole Dehé]], [[Marieke Einfeldt|AUTHOR Marieke Einfeldt]], [[Daniela Wochner|AUTHOR Daniela Wochner]], [[Katharina Zahner-Ritter|AUTHOR Katharina Zahner-Ritter]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210316.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-4|PAPER Fri-M-V-1-4 — Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese]]</div>|<div class="cpsessionviewpapertitle">Acquisition of Prosodic Focus Marking by Three- to Six-Year-Old Children Learning Mandarin Chinese</div><div class="cpsessionviewpaperauthor">[[Qianyutong Zhang|AUTHOR Qianyutong Zhang]], [[Kexin Lyu|AUTHOR Kexin Lyu]], [[Zening Chen|AUTHOR Zening Chen]], [[Ping Tang|AUTHOR Ping Tang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210372.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-5|PAPER Fri-M-V-1-5 — Adaptive Listening Difficulty Detection for L2 Learners Through Moderating ASR Resources]]</div>|<div class="cpsessionviewpapertitle">Adaptive Listening Difficulty Detection for L2 Learners Through Moderating ASR Resources</div><div class="cpsessionviewpaperauthor">[[Maryam Sadat Mirzaei|AUTHOR Maryam Sadat Mirzaei]], [[Kourosh Meshgi|AUTHOR Kourosh Meshgi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-6|PAPER Fri-M-V-1-6 — F₀ Patterns of L2 English Speech by Mandarin Chinese Learners]]</div>|<div class="cpsessionviewpapertitle">F₀ Patterns of L2 English Speech by Mandarin Chinese Learners</div><div class="cpsessionviewpaperauthor">[[Hongwei Ding|AUTHOR Hongwei Ding]], [[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-7|PAPER Fri-M-V-1-7 — A Neural Network-Based Noise Compensation Method for Pronunciation Assessment]]</div>|<div class="cpsessionviewpapertitle">A Neural Network-Based Noise Compensation Method for Pronunciation Assessment</div><div class="cpsessionviewpaperauthor">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-8|PAPER Fri-M-V-1-8 — Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic]]</div>|<div class="cpsessionviewpapertitle">Phonetic Distance and Surprisal in Multilingual Priming: Evidence from Slavic</div><div class="cpsessionviewpaperauthor">[[Jacek Kudera|AUTHOR Jacek Kudera]], [[Philip Georgis|AUTHOR Philip Georgis]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Tania Avgustinova|AUTHOR Tania Avgustinova]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-9|PAPER Fri-M-V-1-9 — A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives]]</div>|<div class="cpsessionviewpapertitle">A Preliminary Study on Discourse Prosody Encoding in L1 and L2 English Spontaneous Narratives</div><div class="cpsessionviewpaperauthor">[[Yuqing Zhang|AUTHOR Yuqing Zhang]], [[Zhu Li|AUTHOR Zhu Li]], [[Binghuai Lin|AUTHOR Binghuai Lin]], [[Jinsong Zhang|AUTHOR Jinsong Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-10|PAPER Fri-M-V-1-10 — Transformer Based End-to-End Mispronunciation Detection and Diagnosis]]</div>|<div class="cpsessionviewpapertitle">Transformer Based End-to-End Mispronunciation Detection and Diagnosis</div><div class="cpsessionviewpaperauthor">[[Minglin Wu|AUTHOR Minglin Wu]], [[Kun Li|AUTHOR Kun Li]], [[Wai-Kim Leung|AUTHOR Wai-Kim Leung]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-1-11|PAPER Fri-M-V-1-11 — L1 Identification from L2 Speech Using Neural Spectrogram Analysis]]</div>|<div class="cpsessionviewpapertitle">L1 Identification from L2 Speech Using Neural Spectrogram Analysis</div><div class="cpsessionviewpaperauthor">[[Calbert Graham|AUTHOR Calbert Graham]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Daniel Brenner|
|^ |^Priyankoo Sarmah|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211823.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-1|PAPER Fri-M-V-2-1 — Leveraging Real-Time MRI for Illuminating Linguistic Velum Action]]</div>|<div class="cpsessionviewpapertitle">Leveraging Real-Time MRI for Illuminating Linguistic Velum Action</div><div class="cpsessionviewpaperauthor">[[Miran Oh|AUTHOR Miran Oh]], [[Dani Byrd|AUTHOR Dani Byrd]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-2|PAPER Fri-M-V-2-2 — Segmental Alignment of English Syllables with Singleton and Cluster Onsets]]</div>|<div class="cpsessionviewpapertitle">Segmental Alignment of English Syllables with Singleton and Cluster Onsets</div><div class="cpsessionviewpaperauthor">[[Zirui Liu|AUTHOR Zirui Liu]], [[Yi Xu|AUTHOR Yi Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-3|PAPER Fri-M-V-2-3 — Exploration of Welsh English Pre-Aspiration: How Wide-Spread is it?]]</div>|<div class="cpsessionviewpapertitle">Exploration of Welsh English Pre-Aspiration: How Wide-Spread is it?</div><div class="cpsessionviewpaperauthor">[[Míša Hejná|AUTHOR Míša Hejná]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-4|PAPER Fri-M-V-2-4 — Revisiting Recall Effects of Filler Particles in German and English]]</div>|<div class="cpsessionviewpapertitle">Revisiting Recall Effects of Filler Particles in German and English</div><div class="cpsessionviewpaperauthor">[[Beeke Muhlack|AUTHOR Beeke Muhlack]], [[Mikey Elmers|AUTHOR Mikey Elmers]], [[Heiner Drenhaus|AUTHOR Heiner Drenhaus]], [[Jürgen Trouvain|AUTHOR Jürgen Trouvain]], [[Marjolein van Os|AUTHOR Marjolein van Os]], [[Raphael Werner|AUTHOR Raphael Werner]], [[Margarita Ryzhova|AUTHOR Margarita Ryzhova]], [[Bernd Möbius|AUTHOR Bernd Möbius]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211122.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-5|PAPER Fri-M-V-2-5 — How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements]]</div>|<div class="cpsessionviewpapertitle">How Reliable Are Phonetic Data Collected Remotely? Comparison of Recording Devices and Environments on Acoustic Measurements</div><div class="cpsessionviewpaperauthor">[[Chunyu Ge|AUTHOR Chunyu Ge]], [[Yixuan Xiong|AUTHOR Yixuan Xiong]], [[Peggy Mok|AUTHOR Peggy Mok]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211326.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-6|PAPER Fri-M-V-2-6 — A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study]]</div>|<div class="cpsessionviewpapertitle">A Cross-Dialectal Comparison of Apical Vowels in Beijing Mandarin, Northeastern Mandarin and Southwestern Mandarin: An EMA and Ultrasound Study</div><div class="cpsessionviewpaperauthor">[[Jing Huang|AUTHOR Jing Huang]], [[Feng-fan Hsieh|AUTHOR Feng-fan Hsieh]], [[Yueh-chin Chang|AUTHOR Yueh-chin Chang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-7|PAPER Fri-M-V-2-7 — Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions]]</div>|<div class="cpsessionviewpapertitle">Dissecting the Aero-Acoustic Parameters of Open Articulatory Transitions</div><div class="cpsessionviewpaperauthor">[[Mark Gibson|AUTHOR Mark Gibson]], [[Oihane Muxika|AUTHOR Oihane Muxika]], [[Marianne Pouplier|AUTHOR Marianne Pouplier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-8|PAPER Fri-M-V-2-8 — Quantifying Vocal Tract Shape Variation and its Acoustic Impact: A Geometric Morphometric Approach]]</div>|<div class="cpsessionviewpapertitle">Quantifying Vocal Tract Shape Variation and its Acoustic Impact: A Geometric Morphometric Approach</div><div class="cpsessionviewpaperauthor">[[Amelia J. Gully|AUTHOR Amelia J. Gully]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-9|PAPER Fri-M-V-2-9 — Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis]]</div>|<div class="cpsessionviewpapertitle">Speech Perception and Loanword Adaptations: The Case of Copy-Vowel Epenthesis</div><div class="cpsessionviewpaperauthor">[[Adriana Guevara-Rukoz|AUTHOR Adriana Guevara-Rukoz]], [[Shi Yu|AUTHOR Shi Yu]], [[Sharon Peperkamp|AUTHOR Sharon Peperkamp]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211640.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-10|PAPER Fri-M-V-2-10 — Speakers Coarticulate Less When Facing Real and Imagined Communicative Difficulties: An Analysis of Read and Spontaneous Speech from the LUCID Corpus]]</div>|<div class="cpsessionviewpapertitle">Speakers Coarticulate Less When Facing Real and Imagined Communicative Difficulties: An Analysis of Read and Spontaneous Speech from the LUCID Corpus</div><div class="cpsessionviewpaperauthor">[[Zhe-chen Guo|AUTHOR Zhe-chen Guo]], [[Rajka Smiljanic|AUTHOR Rajka Smiljanic]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211649.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-11|PAPER Fri-M-V-2-11 — Developmental Changes of Vowel Acoustics in Adolescents]]</div>|<div class="cpsessionviewpapertitle">Developmental Changes of Vowel Acoustics in Adolescents</div><div class="cpsessionviewpaperauthor">[[Einar Meister|AUTHOR Einar Meister]], [[Lya Meister|AUTHOR Lya Meister]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211724.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-12|PAPER Fri-M-V-2-12 — Context and Co-Text Influence on the Accuracy Production of Italian L2 Non-Native Sounds]]</div>|<div class="cpsessionviewpapertitle">Context and Co-Text Influence on the Accuracy Production of Italian L2 Non-Native Sounds</div><div class="cpsessionviewpaperauthor">[[Sonia d’Apolito|AUTHOR Sonia d’Apolito]], [[Barbara Gili Fivela|AUTHOR Barbara Gili Fivela]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211846.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-13|PAPER Fri-M-V-2-13 — A New Vowel Normalization for Sociophonetics]]</div>|<div class="cpsessionviewpapertitle">A New Vowel Normalization for Sociophonetics</div><div class="cpsessionviewpaperauthor">[[Wilbert Heeringa|AUTHOR Wilbert Heeringa]], [[Hans Van de Velde|AUTHOR Hans Van de Velde]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-2-14|PAPER Fri-M-V-2-14 — The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora]]</div>|<div class="cpsessionviewpapertitle">The Pacific Expansion: Optimizing Phonetic Transcription of Archival Corpora</div><div class="cpsessionviewpaperauthor">[[Rosey Billington|AUTHOR Rosey Billington]], [[Hywel Stoakes|AUTHOR Hywel Stoakes]], [[Nick Thieberger|AUTHOR Nick Thieberger]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Takaaki Hori|
|^ |^Tatsuya Kawahara|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211367.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-1|PAPER Fri-M-V-3-1 — FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization]]</div>|<div class="cpsessionviewpapertitle">FSR: Accelerating the Inference Process of Transducer-Based Models by Applying Fast-Skip Regularization</div><div class="cpsessionviewpaperauthor">[[Zhengkun Tian|AUTHOR Zhengkun Tian]], [[Jiangyan Yi|AUTHOR Jiangyan Yi]], [[Ye Bai|AUTHOR Ye Bai]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Shuai Zhang|AUTHOR Shuai Zhang]], [[Zhengqi Wen|AUTHOR Zhengqi Wen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211716.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-2|PAPER Fri-M-V-3-2 — LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring]]</div>|<div class="cpsessionviewpapertitle">LT-LM: A Novel Non-Autoregressive Language Model for Single-Shot Lattice Rescoring</div><div class="cpsessionviewpaperauthor">[[Anton Mitrofanov|AUTHOR Anton Mitrofanov]], [[Mariya Korenevskaya|AUTHOR Mariya Korenevskaya]], [[Ivan Podluzhny|AUTHOR Ivan Podluzhny]], [[Yuri Khokhlov|AUTHOR Yuri Khokhlov]], [[Aleksandr Laptev|AUTHOR Aleksandr Laptev]], [[Andrei Andrusenko|AUTHOR Andrei Andrusenko]], [[Aleksei Ilin|AUTHOR Aleksei Ilin]], [[Maxim Korenevsky|AUTHOR Maxim Korenevsky]], [[Ivan Medennikov|AUTHOR Ivan Medennikov]], [[Aleksei Romanenko|AUTHOR Aleksei Romanenko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-3|PAPER Fri-M-V-3-3 — A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications]]</div>|<div class="cpsessionviewpapertitle">A Hybrid Seq-2-Seq ASR Design for On-Device and Server Applications</div><div class="cpsessionviewpaperauthor">[[Cyril Allauzen|AUTHOR Cyril Allauzen]], [[Ehsan Variani|AUTHOR Ehsan Variani]], [[Michael Riley|AUTHOR Michael Riley]], [[David Rybach|AUTHOR David Rybach]], [[Hao Zhang|AUTHOR Hao Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211107.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-4|PAPER Fri-M-V-3-4 — VAD-Free Streaming Hybrid CTC/Attention ASR for Unsegmented Recording]]</div>|<div class="cpsessionviewpapertitle">VAD-Free Streaming Hybrid CTC/Attention ASR for Unsegmented Recording</div><div class="cpsessionviewpaperauthor">[[Hirofumi Inaguma|AUTHOR Hirofumi Inaguma]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-5|PAPER Fri-M-V-3-5 — WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit]]</div>|<div class="cpsessionviewpapertitle">WeNet: Production Oriented Streaming and Non-Streaming End-to-End Speech Recognition Toolkit</div><div class="cpsessionviewpaperauthor">[[Zhuoyuan Yao|AUTHOR Zhuoyuan Yao]], [[Di Wu|AUTHOR Di Wu]], [[Xiong Wang|AUTHOR Xiong Wang]], [[Binbin Zhang|AUTHOR Binbin Zhang]], [[Fan Yu|AUTHOR Fan Yu]], [[Chao Yang|AUTHOR Chao Yang]], [[Zhendong Peng|AUTHOR Zhendong Peng]], [[Xiaoyu Chen|AUTHOR Xiaoyu Chen]], [[Lei Xie|AUTHOR Lei Xie]], [[Xin Lei|AUTHOR Xin Lei]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211992.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-6|PAPER Fri-M-V-3-6 — Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Cross-Modal Transformer-Based Neural Correction Models for Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Takanori Ashihara|AUTHOR Takanori Ashihara]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Naoki Makishima|AUTHOR Naoki Makishima]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-7|PAPER Fri-M-V-3-7 — Deep Neural Network Calibration for E2E Speech Recognition System]]</div>|<div class="cpsessionviewpapertitle">Deep Neural Network Calibration for E2E Speech Recognition System</div><div class="cpsessionviewpaperauthor">[[Mun-Hak Lee|AUTHOR Mun-Hak Lee]], [[Joon-Hyuk Chang|AUTHOR Joon-Hyuk Chang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-8|PAPER Fri-M-V-3-8 — Residual Energy-Based Models for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Residual Energy-Based Models for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Qiujia Li|AUTHOR Qiujia Li]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Bo Li|AUTHOR Bo Li]], [[Liangliang Cao|AUTHOR Liangliang Cao]], [[Philip C. Woodland|AUTHOR Philip C. Woodland]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-9|PAPER Fri-M-V-3-9 — Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction]]</div>|<div class="cpsessionviewpapertitle">Multi-Task Learning for End-to-End ASR Word and Utterance Confidence with Deletion Prediction</div><div class="cpsessionviewpaperauthor">[[David Qiu|AUTHOR David Qiu]], [[Yanzhang He|AUTHOR Yanzhang He]], [[Qiujia Li|AUTHOR Qiujia Li]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Liangliang Cao|AUTHOR Liangliang Cao]], [[Ian McGraw|AUTHOR Ian McGraw]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-10|PAPER Fri-M-V-3-10 — Insights on Neural Representations for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Insights on Neural Representations for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Anna Ollerenshaw|AUTHOR Anna Ollerenshaw]], [[Md. Asif Jalal|AUTHOR Md. Asif Jalal]], [[Thomas Hain|AUTHOR Thomas Hain]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-3-11|PAPER Fri-M-V-3-11 — Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models]]</div>|<div class="cpsessionviewpapertitle">Sequence-Level Confidence Classifier for ASR Utterance Accuracy and Application to Acoustic Models</div><div class="cpsessionviewpaperauthor">[[Amber Afshan|AUTHOR Amber Afshan]], [[Kshitiz Kumar|AUTHOR Kshitiz Kumar]], [[Jian Wu|AUTHOR Jian Wu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Hema Murthy|
|^ |^Géza Németh|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-1|PAPER Fri-M-V-4-1 — Unsupervised Learning of Disentangled Speech Content and Style Representation]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Learning of Disentangled Speech Content and Style Representation</div><div class="cpsessionviewpaperauthor">[[Andros Tjandra|AUTHOR Andros Tjandra]], [[Ruoming Pang|AUTHOR Ruoming Pang]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Shigeki Karita|AUTHOR Shigeki Karita]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210885.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-2|PAPER Fri-M-V-4-2 — Label Embedding for Chinese Grapheme-to-Phoneme Conversion]]</div>|<div class="cpsessionviewpapertitle">Label Embedding for Chinese Grapheme-to-Phoneme Conversion</div><div class="cpsessionviewpaperauthor">[[Eunbi Choi|AUTHOR Eunbi Choi]], [[Hwa-Yeon Kim|AUTHOR Hwa-Yeon Kim]], [[Jong-Hwan Kim|AUTHOR Jong-Hwan Kim]], [[Jae-Min Kim|AUTHOR Jae-Min Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-3|PAPER Fri-M-V-4-3 — PDF: Polyphone Disambiguation in Chinese by Using FLAT]]</div>|<div class="cpsessionviewpapertitle">PDF: Polyphone Disambiguation in Chinese by Using FLAT</div><div class="cpsessionviewpaperauthor">[[Haiteng Zhang|AUTHOR Haiteng Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-4|PAPER Fri-M-V-4-4 — Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention]]</div>|<div class="cpsessionviewpapertitle">Improving Polyphone Disambiguation for Mandarin Chinese by Combining Mix-Pooling Strategy and Window-Based Attention</div><div class="cpsessionviewpaperauthor">[[Junjie Li|AUTHOR Junjie Li]], [[Zhiyu Zhang|AUTHOR Zhiyu Zhang]], [[Minchuan Chen|AUTHOR Minchuan Chen]], [[Jun Ma|AUTHOR Jun Ma]], [[Shaojun Wang|AUTHOR Shaojun Wang]], [[Jing Xiao|AUTHOR Jing Xiao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-5|PAPER Fri-M-V-4-5 — Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning]]</div>|<div class="cpsessionviewpapertitle">Polyphone Disambiguation in Mandarin Chinese with Semi-Supervised Learning</div><div class="cpsessionviewpaperauthor">[[Yi Shi|AUTHOR Yi Shi]], [[Congyi Wang|AUTHOR Congyi Wang]], [[Yu Chen|AUTHOR Yu Chen]], [[Bin Wang|AUTHOR Bin Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-6|PAPER Fri-M-V-4-6 — A Neural-Network-Based Approach to Identifying Speakers in Novels]]</div>|<div class="cpsessionviewpapertitle">A Neural-Network-Based Approach to Identifying Speakers in Novels</div><div class="cpsessionviewpaperauthor">[[Yue Chen|AUTHOR Yue Chen]], [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]], [[Qing-Feng Liu|AUTHOR Qing-Feng Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211092.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-7|PAPER Fri-M-V-4-7 — UnitNet-Based Hybrid Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">UnitNet-Based Hybrid Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Xiao Zhou|AUTHOR Xiao Zhou]], [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210946.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-8|PAPER Fri-M-V-4-8 — Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder]]</div>|<div class="cpsessionviewpapertitle">Dynamically Adaptive Machine Speech Chain Inference for TTS in Noisy Environment: Listen and Speak Louder</div><div class="cpsessionviewpaperauthor">[[Sashi Novitasari|AUTHOR Sashi Novitasari]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-4-9|PAPER Fri-M-V-4-9 — LinearSpeech: Parallel Text-to-Speech with Linear Complexity]]</div>|<div class="cpsessionviewpapertitle">LinearSpeech: Parallel Text-to-Speech with Linear Complexity</div><div class="cpsessionviewpaperauthor">[[Haozhe Zhang|AUTHOR Haozhe Zhang]], [[Zhihua Huang|AUTHOR Zhihua Huang]], [[Zengqiang Shang|AUTHOR Zengqiang Shang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]], [[Yonghong Yan|AUTHOR Yonghong Yan]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Tanja Schultz|
|^ |^Mahadeva Prasanna|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210083.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-1|PAPER Fri-M-V-5-1 — An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues]]</div>|<div class="cpsessionviewpapertitle">An Agent for Competing with Humans in a Deceptive Game Based on Vocal Cues</div><div class="cpsessionviewpaperauthor">[[Noa Mansbach|AUTHOR Noa Mansbach]], [[Evgeny Hershkovitch Neiterman|AUTHOR Evgeny Hershkovitch Neiterman]], [[Amos Azaria|AUTHOR Amos Azaria]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-2|PAPER Fri-M-V-5-2 — A Multi-Branch Deep Learning Network for Automated Detection of COVID-19]]</div>|<div class="cpsessionviewpapertitle">A Multi-Branch Deep Learning Network for Automated Detection of COVID-19</div><div class="cpsessionviewpaperauthor">[[Ahmed Fakhry|AUTHOR Ahmed Fakhry]], [[Xinyi Jiang|AUTHOR Xinyi Jiang]], [[Jaclyn Xiao|AUTHOR Jaclyn Xiao]], [[Gunvant Chaudhari|AUTHOR Gunvant Chaudhari]], [[Asriel Han|AUTHOR Asriel Han]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-3|PAPER Fri-M-V-5-3 — RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform]]</div>|<div class="cpsessionviewpapertitle">RW-Resnet: A Novel Speech Anti-Spoofing Model Using Raw Waveform</div><div class="cpsessionviewpaperauthor">[[Youxuan Ma|AUTHOR Youxuan Ma]], [[Zongze Ren|AUTHOR Zongze Ren]], [[Shugong Xu|AUTHOR Shugong Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-4|PAPER Fri-M-V-5-4 — Fake Audio Detection in Resource-Constrained Settings Using Microfeatures]]</div>|<div class="cpsessionviewpapertitle">Fake Audio Detection in Resource-Constrained Settings Using Microfeatures</div><div class="cpsessionviewpaperauthor">[[Hira Dhamyal|AUTHOR Hira Dhamyal]], [[Ayesha Ali|AUTHOR Ayesha Ali]], [[Ihsan Ayyub Qazi|AUTHOR Ihsan Ayyub Qazi]], [[Agha Ali Raza|AUTHOR Agha Ali Raza]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210630.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-5|PAPER Fri-M-V-5-5 — Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Coughing-Based Recognition of Covid-19 with Spatial Attentive ConvLSTM Recurrent Neural Networks</div><div class="cpsessionviewpaperauthor">[[Tianhao Yan|AUTHOR Tianhao Yan]], [[Hao Meng|AUTHOR Hao Meng]], [[Emilia Parada-Cabaleiro|AUTHOR Emilia Parada-Cabaleiro]], [[Shuo Liu|AUTHOR Shuo Liu]], [[Meishu Song|AUTHOR Meishu Song]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-6|PAPER Fri-M-V-5-6 — Knowledge Distillation for Singing Voice Detection]]</div>|<div class="cpsessionviewpapertitle">Knowledge Distillation for Singing Voice Detection</div><div class="cpsessionviewpaperauthor">[[Soumava Paul|AUTHOR Soumava Paul]], [[Gurunath Reddy M.|AUTHOR Gurunath Reddy M.]], [[K. Sreenivasa Rao|AUTHOR K. Sreenivasa Rao]], [[Partha Pratim Das|AUTHOR Partha Pratim Das]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210861.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-7|PAPER Fri-M-V-5-7 — Age Estimation with Speech-Age Model for Heterogeneous Speech Datasets]]</div>|<div class="cpsessionviewpapertitle">Age Estimation with Speech-Age Model for Heterogeneous Speech Datasets</div><div class="cpsessionviewpaperauthor">[[Ryu Takeda|AUTHOR Ryu Takeda]], [[Kazunori Komatani|AUTHOR Kazunori Komatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-8|PAPER Fri-M-V-5-8 — Open-Set Audio Classification with Limited Training Resources Based on Augmentation Enhanced Variational Auto-Encoder GAN with Detection-Classification Joint Training]]</div>|<div class="cpsessionviewpapertitle">Open-Set Audio Classification with Limited Training Resources Based on Augmentation Enhanced Variational Auto-Encoder GAN with Detection-Classification Joint Training</div><div class="cpsessionviewpaperauthor">[[Kah Kuan Teh|AUTHOR Kah Kuan Teh]], [[Huy Dat Tran|AUTHOR Huy Dat Tran]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-9|PAPER Fri-M-V-5-9 — Deep Spectral-Cepstral Fusion for Shouted and Normal Speech Classification]]</div>|<div class="cpsessionviewpapertitle">Deep Spectral-Cepstral Fusion for Shouted and Normal Speech Classification</div><div class="cpsessionviewpaperauthor">[[Takahiro Fukumori|AUTHOR Takahiro Fukumori]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-10|PAPER Fri-M-V-5-10 — Automatic Detection of Shouted Speech Segments in Indian News Debates]]</div>|<div class="cpsessionviewpapertitle">Automatic Detection of Shouted Speech Segments in Indian News Debates</div><div class="cpsessionviewpaperauthor">[[Shikha Baghel|AUTHOR Shikha Baghel]], [[Mrinmoy Bhattacharjee|AUTHOR Mrinmoy Bhattacharjee]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]], [[Prithwijit Guha|AUTHOR Prithwijit Guha]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211705.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-11|PAPER Fri-M-V-5-11 — Generalized Spoofing Detection Inspired from Audio Generation Artifacts]]</div>|<div class="cpsessionviewpapertitle">Generalized Spoofing Detection Inspired from Audio Generation Artifacts</div><div class="cpsessionviewpaperauthor">[[Yang Gao|AUTHOR Yang Gao]], [[Tyler Vuong|AUTHOR Tyler Vuong]], [[Mahsa Elyasi|AUTHOR Mahsa Elyasi]], [[Gaurav Bharaj|AUTHOR Gaurav Bharaj]], [[Rita Singh|AUTHOR Rita Singh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-5-12|PAPER Fri-M-V-5-12 — Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion]]</div>|<div class="cpsessionviewpapertitle">Overlapped Speech Detection Based on Spectral and Spatial Feature Fusion</div><div class="cpsessionviewpaperauthor">[[Weiguang Chen|AUTHOR Weiguang Chen]], [[Van Tung Pham|AUTHOR Van Tung Pham]], [[Eng Siong Chng|AUTHOR Eng Siong Chng]], [[Xionghu Zhong|AUTHOR Xionghu Zhong]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Jan Trmal|
|^ |^Xiaodong Cui|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210678.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-1|PAPER Fri-M-V-6-1 — Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study]]</div>|<div class="cpsessionviewpapertitle">Do Acoustic Word Embeddings Capture Phonological Similarity? An Empirical Study</div><div class="cpsessionviewpaperauthor">[[Badr M. Abdullah|AUTHOR Badr M. Abdullah]], [[Marius Mosbach|AUTHOR Marius Mosbach]], [[Iuliia Zaitova|AUTHOR Iuliia Zaitova]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210097.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-2|PAPER Fri-M-V-6-2 — Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding]]</div>|<div class="cpsessionviewpapertitle">Paraphrase Label Alignment for Voice Application Retrieval in Spoken Language Understanding</div><div class="cpsessionviewpaperauthor">[[Zheng Gao|AUTHOR Zheng Gao]], [[Radhika Arava|AUTHOR Radhika Arava]], [[Qian Hu|AUTHOR Qian Hu]], [[Xibin Gao|AUTHOR Xibin Gao]], [[Thahir Mohamed|AUTHOR Thahir Mohamed]], [[Wei Xiao|AUTHOR Wei Xiao]], [[Mohamed AbdelHady|AUTHOR Mohamed AbdelHady]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-3|PAPER Fri-M-V-6-3 — Personalized Keyphrase Detection Using Speaker and Environment Information]]</div>|<div class="cpsessionviewpapertitle">Personalized Keyphrase Detection Using Speaker and Environment Information</div><div class="cpsessionviewpaperauthor">[[Rajeev Rikhye|AUTHOR Rajeev Rikhye]], [[Quan Wang|AUTHOR Quan Wang]], [[Qiao Liang|AUTHOR Qiao Liang]], [[Yanzhang He|AUTHOR Yanzhang He]], [[Ding Zhao|AUTHOR Ding Zhao]], [[Yiteng Huang|AUTHOR Yiteng Huang]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Ian McGraw|AUTHOR Ian McGraw]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211428.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-4|PAPER Fri-M-V-6-4 — Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation]]</div>|<div class="cpsessionviewpapertitle">Streaming Transformer for Hardware Efficient Voice Trigger Detection and False Trigger Mitigation</div><div class="cpsessionviewpaperauthor">[[Vineet Garg|AUTHOR Vineet Garg]], [[Wonil Chang|AUTHOR Wonil Chang]], [[Siddharth Sigtia|AUTHOR Siddharth Sigtia]], [[Saurabh Adya|AUTHOR Saurabh Adya]], [[Pramod Simha|AUTHOR Pramod Simha]], [[Pranay Dighe|AUTHOR Pranay Dighe]], [[Chandra Dhir|AUTHOR Chandra Dhir]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-5|PAPER Fri-M-V-6-5 — Few-Shot Keyword Spotting in Any Language]]</div>|<div class="cpsessionviewpapertitle">Few-Shot Keyword Spotting in Any Language</div><div class="cpsessionviewpaperauthor">[[Mark Mazumder|AUTHOR Mark Mazumder]], [[Colby Banbury|AUTHOR Colby Banbury]], [[Josh Meyer|AUTHOR Josh Meyer]], [[Pete Warden|AUTHOR Pete Warden]], [[Vijay Janapa Reddi|AUTHOR Vijay Janapa Reddi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-6|PAPER Fri-M-V-6-6 — Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Text Anchor Based Metric Learning for Small-Footprint Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Li Wang|AUTHOR Li Wang]], [[Rongzhi Gu|AUTHOR Rongzhi Gu]], [[Nuo Chen|AUTHOR Nuo Chen]], [[Yuexian Zou|AUTHOR Yuexian Zou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210147.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-7|PAPER Fri-M-V-6-7 — A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples]]</div>|<div class="cpsessionviewpapertitle">A Meta-Learning Approach for User-Defined Spoken Term Classification with Varying Classes and Examples</div><div class="cpsessionviewpaperauthor">[[Yangbin Chen|AUTHOR Yangbin Chen]], [[Tom Ko|AUTHOR Tom Ko]], [[Jianping Wang|AUTHOR Jianping Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210400.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-8|PAPER Fri-M-V-6-8 — Auxiliary Sequence Labeling Tasks for Disfluency Detection]]</div>|<div class="cpsessionviewpapertitle">Auxiliary Sequence Labeling Tasks for Disfluency Detection</div><div class="cpsessionviewpaperauthor">[[Dongyub Lee|AUTHOR Dongyub Lee]], [[Byeongil Ko|AUTHOR Byeongil Ko]], [[Myeong Cheol Shin|AUTHOR Myeong Cheol Shin]], [[Taesun Whang|AUTHOR Taesun Whang]], [[Daniel Lee|AUTHOR Daniel Lee]], [[Eunhwa Kim|AUTHOR Eunhwa Kim]], [[Eunggyun Kim|AUTHOR Eunggyun Kim]], [[Jaechoon Jo|AUTHOR Jaechoon Jo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210458.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-9|PAPER Fri-M-V-6-9 — Energy-Friendly Keyword Spotting System Using Add-Based Convolution]]</div>|<div class="cpsessionviewpapertitle">Energy-Friendly Keyword Spotting System Using Add-Based Convolution</div><div class="cpsessionviewpaperauthor">[[Hang Zhou|AUTHOR Hang Zhou]], [[Wenchao Hu|AUTHOR Wenchao Hu]], [[Yu Ting Yeung|AUTHOR Yu Ting Yeung]], [[Xiao Chen|AUTHOR Xiao Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210602.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-10|PAPER Fri-M-V-6-10 — The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results]]</div>|<div class="cpsessionviewpapertitle">The 2020 Personalized Voice Trigger Challenge: Open Datasets, Evaluation Metrics, Baseline System and Results</div><div class="cpsessionviewpaperauthor">[[Yan Jia|AUTHOR Yan Jia]], [[Xingming Wang|AUTHOR Xingming Wang]], [[Xiaoyi Qin|AUTHOR Xiaoyi Qin]], [[Yinping Zhang|AUTHOR Yinping Zhang]], [[Xuyang Wang|AUTHOR Xuyang Wang]], [[Junjie Wang|AUTHOR Junjie Wang]], [[Dong Zhang|AUTHOR Dong Zhang]], [[Ming Li|AUTHOR Ming Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210817.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-11|PAPER Fri-M-V-6-11 — Auto-KWS 2021 Challenge: Task, Datasets, and Baselines]]</div>|<div class="cpsessionviewpapertitle">Auto-KWS 2021 Challenge: Task, Datasets, and Baselines</div><div class="cpsessionviewpaperauthor">[[Jingsong Wang|AUTHOR Jingsong Wang]], [[Yuxuan He|AUTHOR Yuxuan He]], [[Chunyu Zhao|AUTHOR Chunyu Zhao]], [[Qijie Shao|AUTHOR Qijie Shao]], [[Wei-Wei Tu|AUTHOR Wei-Wei Tu]], [[Tom Ko|AUTHOR Tom Ko]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]], [[Lei Xie|AUTHOR Lei Xie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-12|PAPER Fri-M-V-6-12 — Keyword Transformer: A Self-Attention Model for Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Keyword Transformer: A Self-Attention Model for Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Axel Berg|AUTHOR Axel Berg]], [[Mark O’Connor|AUTHOR Mark O’Connor]], [[Miguel Tairum Cruz|AUTHOR Miguel Tairum Cruz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211395.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-6-13|PAPER Fri-M-V-6-13 — Teaching Keyword Spotters to Spot New Keywords with Limited Examples]]</div>|<div class="cpsessionviewpapertitle">Teaching Keyword Spotters to Spot New Keywords with Limited Examples</div><div class="cpsessionviewpaperauthor">[[Abhijeet Awasthi|AUTHOR Abhijeet Awasthi]], [[Kevin Kilgour|AUTHOR Kevin Kilgour]], [[Hassan Rom|AUTHOR Hassan Rom]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Friday 3 September 2021, (Virtual)|<|
|^Chairs: |^Anna Silnova|
|^ |^Massimiliano Todisco|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-1|PAPER Fri-M-V-7-1 — A Comparative Study on Recent Neural Spoofing Countermeasures for Synthetic Speech Detection]]</div>|<div class="cpsessionviewpapertitle">A Comparative Study on Recent Neural Spoofing Countermeasures for Synthetic Speech Detection</div><div class="cpsessionviewpaperauthor">[[Xin Wang|AUTHOR Xin Wang]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210738.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-2|PAPER Fri-M-V-7-2 — An Initial Investigation for Detecting Partially Spoofed Audio]]</div>|<div class="cpsessionviewpapertitle">An Initial Investigation for Detecting Partially Spoofed Audio</div><div class="cpsessionviewpaperauthor">[[Lin Zhang|AUTHOR Lin Zhang]], [[Xin Wang|AUTHOR Xin Wang]], [[Erica Cooper|AUTHOR Erica Cooper]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]], [[Jose Patino|AUTHOR Jose Patino]], [[Nicholas Evans|AUTHOR Nicholas Evans]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210847.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-3|PAPER Fri-M-V-7-3 — Siamese Network with wav2vec Feature for Spoofing Speech Detection]]</div>|<div class="cpsessionviewpapertitle">Siamese Network with wav2vec Feature for Spoofing Speech Detection</div><div class="cpsessionviewpaperauthor">[[Yang Xie|AUTHOR Yang Xie]], [[Zhenchuan Zhang|AUTHOR Zhenchuan Zhang]], [[Yingchun Yang|AUTHOR Yingchun Yang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210960.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-4|PAPER Fri-M-V-7-4 — Cross-Database Replay Detection in Terminal-Dependent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Cross-Database Replay Detection in Terminal-Dependent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Xingliang Cheng|AUTHOR Xingliang Cheng]], [[Mingxing Xu|AUTHOR Mingxing Xu]], [[Thomas Fang Zheng|AUTHOR Thomas Fang Zheng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-5|PAPER Fri-M-V-7-5 — The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System]]</div>|<div class="cpsessionviewpapertitle">The Effect of Silence and Dual-Band Fusion in Anti-Spoofing System</div><div class="cpsessionviewpaperauthor">[[Yuxiang Zhang|AUTHOR Yuxiang Zhang]], [[Wenchao Wang|AUTHOR Wenchao Wang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-6|PAPER Fri-M-V-7-6 — Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Pairing Weak with Strong: Twin Models for Defending Against Adversarial Attack on Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Zhiyuan Peng|AUTHOR Zhiyuan Peng]], [[Xu Li|AUTHOR Xu Li]], [[Tan Lee|AUTHOR Tan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211404.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-7|PAPER Fri-M-V-7-7 — Attention-Based Convolutional Neural Network for ASV Spoofing Detection]]</div>|<div class="cpsessionviewpapertitle">Attention-Based Convolutional Neural Network for ASV Spoofing Detection</div><div class="cpsessionviewpaperauthor">[[Hefei Ling|AUTHOR Hefei Ling]], [[Leichao Huang|AUTHOR Leichao Huang]], [[Junrui Huang|AUTHOR Junrui Huang]], [[Baiyan Zhang|AUTHOR Baiyan Zhang]], [[Ping Li|AUTHOR Ping Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-8|PAPER Fri-M-V-7-8 — Voting for the Right Answer: Adversarial Defense for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Voting for the Right Answer: Adversarial Defense for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Haibin Wu|AUTHOR Haibin Wu]], [[Yang Zhang|AUTHOR Yang Zhang]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Dong Wang|AUTHOR Dong Wang]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211522.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-9|PAPER Fri-M-V-7-9 — Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing]]</div>|<div class="cpsessionviewpapertitle">Visualizing Classifier Adjacency Relations: A Case Study in Speaker Verification and Voice Anti-Spoofing</div><div class="cpsessionviewpaperauthor">[[Tomi Kinnunen|AUTHOR Tomi Kinnunen]], [[Andreas Nautsch|AUTHOR Andreas Nautsch]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Nicholas Evans|AUTHOR Nicholas Evans]], [[Xin Wang|AUTHOR Xin Wang]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]], [[Héctor Delgado|AUTHOR Héctor Delgado]], [[Junichi Yamagishi|AUTHOR Junichi Yamagishi]], [[Kong Aik Lee|AUTHOR Kong Aik Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-10|PAPER Fri-M-V-7-10 — Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems]]</div>|<div class="cpsessionviewpapertitle">Representation Learning to Classify and Detect Adversarial Attacks Against Speaker and Speech Recognition Systems</div><div class="cpsessionviewpaperauthor">[[Jesús Villalba|AUTHOR Jesús Villalba]], [[Sonal Joshi|AUTHOR Sonal Joshi]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Najim Dehak|AUTHOR Najim Dehak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211820.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-11|PAPER Fri-M-V-7-11 — An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems]]</div>|<div class="cpsessionviewpapertitle">An Empirical Study on Channel Effects for Synthetic Voice Spoofing Countermeasure Systems</div><div class="cpsessionviewpaperauthor">[[You Zhang|AUTHOR You Zhang]], [[Ge Zhu|AUTHOR Ge Zhu]], [[Fei Jiang|AUTHOR Fei Jiang]], [[Zhiyao Duan|AUTHOR Zhiyao Duan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212125.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-12|PAPER Fri-M-V-7-12 — Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks]]</div>|<div class="cpsessionviewpapertitle">Channel-Wise Gated Res2Net: Towards Robust Detection of Synthetic Speech Attacks</div><div class="cpsessionviewpaperauthor">[[Xu Li|AUTHOR Xu Li]], [[Xixin Wu|AUTHOR Xixin Wu]], [[Hui Lu|AUTHOR Hui Lu]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211187.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Fri-M-V-7-13|PAPER Fri-M-V-7-13 — Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection]]</div>|<div class="cpsessionviewpapertitle">Partially-Connected Differentiable Architecture Search for Deepfake and Spoofing Detection</div><div class="cpsessionviewpaperauthor">[[Wanying Ge|AUTHOR Wanying Ge]], [[Michele Panariello|AUTHOR Michele Panariello]], [[Jose Patino|AUTHOR Jose Patino]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]], [[Nicholas Evans|AUTHOR Nicholas Evans]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:00–14:00, Friday 3 September 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Fri-Survey|PAPER Fri-Survey — Child Language Acquisition Studied with Wearables]]</div>|<div class="cpsessionviewpapertitle">Child Language Acquisition Studied with Wearables</div><div class="cpsessionviewpaperauthor">[[Alejandrina Cristia|AUTHOR Alejandrina Cristia]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Author Index Button}}
</p></div>
|cpborderless|k
|cptablecelltopbottomspace2|k
|cpsessionlisttable|k
|^<div class="cpsessionlistsessioncode">[[Tue-M-O-1|SESSION Tue-M-O-1 — Speech Synthesis: Other Topics]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Other Topics</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-O-2|SESSION Tue-M-O-2 — Disordered Speech]]</div> |^<div class="cpsessionlistsessionname">Disordered Speech</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-O-3|SESSION Tue-M-O-3 — Speech Signal Analysis and Representation II]]</div> |^<div class="cpsessionlistsessionname">Speech Signal Analysis and Representation II</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-V-1|SESSION Tue-M-V-1 — Feature, Embedding and Neural Architecture for Speaker Recognition]]</div> |^<div class="cpsessionlistsessionname">Feature, Embedding and Neural Architecture for Speaker Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-V-2|SESSION Tue-M-V-2 — Speech Synthesis: Toward End-to-End Synthesis II]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Toward End-to-End Synthesis II</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-V-3|SESSION Tue-M-V-3 — Speech Enhancement and Intelligibility]]</div> |^<div class="cpsessionlistsessionname">Speech Enhancement and Intelligibility</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-V-4|SESSION Tue-M-V-4 — Spoken Dialogue Systems I]]</div> |^<div class="cpsessionlistsessionname">Spoken Dialogue Systems I</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-V-5|SESSION Tue-M-V-5 — Topics in ASR: Robustness, Feature Extraction, and Far-Field ASR]]</div> |^<div class="cpsessionlistsessionname">Topics in ASR: Robustness, Feature Extraction, and Far-Field ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-V-6|SESSION Tue-M-V-6 — Voice Activity Detection and Keyword Spotting]]</div> |^<div class="cpsessionlistsessionname">Voice Activity Detection and Keyword Spotting</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-V-7|SESSION Tue-M-V-7 — Voice and Voicing]]</div> |^<div class="cpsessionlistsessionname">Voice and Voicing</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-M-SS-1|SESSION Tue-M-SS-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge (ComParE) — COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div> |^<div class="cpsessionlistsessionname">The INTERSPEECH 2021 Computational Paralinguistics Challenge (ComParE) — COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-Survey|SESSION Tue-Survey — Survey Talk 1: Heidi Christensen]]</div> |^<div class="cpsessionlistsessionname">Survey Talk 1: Heidi Christensen</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-O-1|SESSION Tue-A-O-1 — Embedding and Network Architecture for Speaker Recognition]]</div> |^<div class="cpsessionlistsessionname">Embedding and Network Architecture for Speaker Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-O-2|SESSION Tue-A-O-2 — Speech Perception I]]</div> |^<div class="cpsessionlistsessionname">Speech Perception I</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-V-1|SESSION Tue-A-V-1 — Acoustic Event Detection and Acoustic Scene Classification]]</div> |^<div class="cpsessionlistsessionname">Acoustic Event Detection and Acoustic Scene Classification</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-V-2|SESSION Tue-A-V-2 — Diverse Modes of Speech Acquisition and Processing]]</div> |^<div class="cpsessionlistsessionname">Diverse Modes of Speech Acquisition and Processing</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-V-3|SESSION Tue-A-V-3 — Multi-Channel Speech Enhancement and Hearing Aids]]</div> |^<div class="cpsessionlistsessionname">Multi-Channel Speech Enhancement and Hearing Aids</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-V-4|SESSION Tue-A-V-4 — Self-Supervision and Semi-Supervision for Neural ASR Training]]</div> |^<div class="cpsessionlistsessionname">Self-Supervision and Semi-Supervision for Neural ASR Training</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-V-5|SESSION Tue-A-V-5 — Spoken Language Processing I]]</div> |^<div class="cpsessionlistsessionname">Spoken Language Processing I</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-V-6|SESSION Tue-A-V-6 — Voice Conversion and Adaptation II]]</div> |^<div class="cpsessionlistsessionname">Voice Conversion and Adaptation II</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-SS-1|SESSION Tue-A-SS-1 — Privacy-Preserving Machine Learning for Audio & Speech Processing]]</div> |^<div class="cpsessionlistsessionname">Privacy-Preserving Machine Learning for Audio & Speech Processing</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-SS-2|SESSION Tue-A-SS-2 — The First DiCOVA Challenge: Diagnosis of COVID-19 Using Acoustics]]</div> |^<div class="cpsessionlistsessionname">The First DiCOVA Challenge: Diagnosis of COVID-19 Using Acoustics</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-A-S&T-1|SESSION Tue-A-S&T-1 — Show and Tell 1]]</div> |^<div class="cpsessionlistsessionname">Show and Tell 1</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-Keynote|SESSION Tue-Keynote — Keynote 1: Hermann Ney]]</div> |^<div class="cpsessionlistsessionname">Keynote 1: Hermann Ney</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-O-1|SESSION Tue-E-O-1 — ASR Technologies and Systems]]</div> |^<div class="cpsessionlistsessionname">ASR Technologies and Systems</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-O-2|SESSION Tue-E-O-2 — Phonation and Voicing]]</div> |^<div class="cpsessionlistsessionname">Phonation and Voicing</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-O-3|SESSION Tue-E-O-3 — Health and Affect I]]</div> |^<div class="cpsessionlistsessionname">Health and Affect I</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-V-1|SESSION Tue-E-V-1 — Robust Speaker Recognition]]</div> |^<div class="cpsessionlistsessionname">Robust Speaker Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-V-2|SESSION Tue-E-V-2 — Source Separation, Dereverberation and Echo Cancellation]]</div> |^<div class="cpsessionlistsessionname">Source Separation, Dereverberation and Echo Cancellation</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-V-3|SESSION Tue-E-V-3 — Speech Signal Analysis and Representation I]]</div> |^<div class="cpsessionlistsessionname">Speech Signal Analysis and Representation I</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-V-4|SESSION Tue-E-V-4 — Spoken Language Understanding I]]</div> |^<div class="cpsessionlistsessionname">Spoken Language Understanding I</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-V-5|SESSION Tue-E-V-5 — Topics in ASR: Adaptation, Transfer Learning, Children’s Speech, and Low-Resource Settings]]</div> |^<div class="cpsessionlistsessionname">Topics in ASR: Adaptation, Transfer Learning, Children’s Speech, and Low-Resource Settings</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-V-6|SESSION Tue-E-V-6 — Voice Conversion and Adaptation I]]</div> |^<div class="cpsessionlistsessionname">Voice Conversion and Adaptation I</div> |
|^<div class="cpsessionlistsessioncode">[[Tue-E-SS-1|SESSION Tue-E-SS-1 — Voice Quality Characterization for Clinical Voice Assessment: Voice Production, Acoustics, and Auditory Perception]]</div> |^<div class="cpsessionlistsessionname">Voice Quality Characterization for Clinical Voice Assessment: Voice Production, Acoustics, and Auditory Perception</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-O-1|SESSION Wed-M-O-1 — Miscellaneous Topics in ASR]]</div> |^<div class="cpsessionlistsessionname">Miscellaneous Topics in ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-O-2|SESSION Wed-M-O-2 — Phonetics I]]</div> |^<div class="cpsessionlistsessionname">Phonetics I</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-O-3|SESSION Wed-M-O-3 — Target Speaker Detection, Localization and Separation]]</div> |^<div class="cpsessionlistsessionname">Target Speaker Detection, Localization and Separation</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-V-1|SESSION Wed-M-V-1 — Language and Accent Recognition]]</div> |^<div class="cpsessionlistsessionname">Language and Accent Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-V-2|SESSION Wed-M-V-2 — Low-Resource Speech Recognition]]</div> |^<div class="cpsessionlistsessionname">Low-Resource Speech Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-V-3|SESSION Wed-M-V-3 — Speech Synthesis: Singing, Multimodal, Crosslingual Synthesis]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Singing, Multimodal, Crosslingual Synthesis</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-V-4|SESSION Wed-M-V-4 — Speech Coding and Privacy]]</div> |^<div class="cpsessionlistsessionname">Speech Coding and Privacy</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-V-5|SESSION Wed-M-V-5 — Speech Perception II]]</div> |^<div class="cpsessionlistsessionname">Speech Perception II</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-V-6|SESSION Wed-M-V-6 — Streaming for ASR/RNN Transducers]]</div> |^<div class="cpsessionlistsessionname">Streaming for ASR/RNN Transducers</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-M-SS-1|SESSION Wed-M-SS-1 — ConferencingSpeech 2021 Challenge: Far-Field Multi-Channel Speech Enhancement for Video Conferencing]]</div> |^<div class="cpsessionlistsessionname">ConferencingSpeech 2021 Challenge: Far-Field Multi-Channel Speech Enhancement for Video Conferencing</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-Survey|SESSION Wed-Survey — Survey Talk 2: Sriram Ganapathy]]</div> |^<div class="cpsessionlistsessionname">Survey Talk 2: Sriram Ganapathy</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-Keynote|SESSION Wed-Keynote — Keynote 2: Pascale Fung]]</div> |^<div class="cpsessionlistsessionname">Keynote 2: Pascale Fung</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-O-1|SESSION Wed-A-O-1 — Language Modeling and Text-Based Innovations for ASR]]</div> |^<div class="cpsessionlistsessionname">Language Modeling and Text-Based Innovations for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-O-2|SESSION Wed-A-O-2 — Speaker, Language, and Privacy]]</div> |^<div class="cpsessionlistsessionname">Speaker, Language, and Privacy</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-O-3|SESSION Wed-A-O-3 — Assessment of Pathological Speech and Language I]]</div> |^<div class="cpsessionlistsessionname">Assessment of Pathological Speech and Language I</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-V-1|SESSION Wed-A-V-1 — Communication and Interaction, Multimodality]]</div> |^<div class="cpsessionlistsessionname">Communication and Interaction, Multimodality</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-V-2|SESSION Wed-A-V-2 — Language and Lexical Modeling for ASR]]</div> |^<div class="cpsessionlistsessionname">Language and Lexical Modeling for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-V-3|SESSION Wed-A-V-3 — Novel Neural Network Architectures for ASR]]</div> |^<div class="cpsessionlistsessionname">Novel Neural Network Architectures for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-V-4|SESSION Wed-A-V-4 — Speech Localization, Enhancement, and Quality Assessment]]</div> |^<div class="cpsessionlistsessionname">Speech Localization, Enhancement, and Quality Assessment</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-V-5|SESSION Wed-A-V-5 — Speech Synthesis: Neural Waveform Generation]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Neural Waveform Generation</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-V-6|SESSION Wed-A-V-6 — Spoken Machine Translation]]</div> |^<div class="cpsessionlistsessionname">Spoken Machine Translation</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-SS-1|SESSION Wed-A-SS-1 — SdSV Challenge 2021: Analysis and Exploration of New Ideas on Short-Duration Speaker Verification]]</div> |^<div class="cpsessionlistsessionname">SdSV Challenge 2021: Analysis and Exploration of New Ideas on Short-Duration Speaker Verification</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-A-S&T-1|SESSION Wed-A-S&T-1 — Show and Tell 2]]</div> |^<div class="cpsessionlistsessionname">Show and Tell 2</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-O-1|SESSION Wed-E-O-1 — Graph and End-to-End Learning for Speaker Recognition]]</div> |^<div class="cpsessionlistsessionname">Graph and End-to-End Learning for Speaker Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-O-2|SESSION Wed-E-O-2 — Spoken Language Processing II]]</div> |^<div class="cpsessionlistsessionname">Spoken Language Processing II</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-O-3|SESSION Wed-E-O-3 — Speech and Audio Analysis]]</div> |^<div class="cpsessionlistsessionname">Speech and Audio Analysis</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-V-1|SESSION Wed-E-V-1 — Cross/Multi-Lingual and Code-Switched ASR]]</div> |^<div class="cpsessionlistsessionname">Cross/Multi-Lingual and Code-Switched ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-V-2|SESSION Wed-E-V-2 — Health and Affect II]]</div> |^<div class="cpsessionlistsessionname">Health and Affect II</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-V-3|SESSION Wed-E-V-3 — Neural Network Training Methods for ASR]]</div> |^<div class="cpsessionlistsessionname">Neural Network Training Methods for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-V-4|SESSION Wed-E-V-4 — Prosodic Features and Structure]]</div> |^<div class="cpsessionlistsessionname">Prosodic Features and Structure</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-V-5|SESSION Wed-E-V-5 — Single-Channel Speech Enhancement]]</div> |^<div class="cpsessionlistsessionname">Single-Channel Speech Enhancement</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-V-6|SESSION Wed-E-V-6 — Speech Synthesis: Tools, Data, Evaluation]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Tools, Data, Evaluation</div> |
|^<div class="cpsessionlistsessioncode">[[Wed-E-SS-1|SESSION Wed-E-SS-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div> |^<div class="cpsessionlistsessionname">INTERSPEECH 2021 Deep Noise Suppression Challenge</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-O-1|SESSION Thu-M-O-1 — Neural Network Training Methods and Architectures for ASR]]</div> |^<div class="cpsessionlistsessionname">Neural Network Training Methods and Architectures for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-O-2|SESSION Thu-M-O-2 — Emotion and Sentiment Analysis I]]</div> |^<div class="cpsessionlistsessionname">Emotion and Sentiment Analysis I</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-O-3|SESSION Thu-M-O-3 — Linguistic Components in End-to-End ASR]]</div> |^<div class="cpsessionlistsessionname">Linguistic Components in End-to-End ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-V-1|SESSION Thu-M-V-1 — Assessment of Pathological Speech and Language II]]</div> |^<div class="cpsessionlistsessionname">Assessment of Pathological Speech and Language II</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-V-2|SESSION Thu-M-V-2 — Multimodal Systems]]</div> |^<div class="cpsessionlistsessionname">Multimodal Systems</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-V-3|SESSION Thu-M-V-3 — Source Separation I]]</div> |^<div class="cpsessionlistsessionname">Source Separation I</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-V-4|SESSION Thu-M-V-4 — Speaker Diarization I]]</div> |^<div class="cpsessionlistsessionname">Speaker Diarization I</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-V-5|SESSION Thu-M-V-5 — Speech Synthesis: Prosody Modeling I]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Prosody Modeling I</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-V-6|SESSION Thu-M-V-6 — Speech Production II]]</div> |^<div class="cpsessionlistsessionname">Speech Production II</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-V-7|SESSION Thu-M-V-7 — Spoken Dialogue Systems II]]</div> |^<div class="cpsessionlistsessionname">Spoken Dialogue Systems II</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-SS-1|SESSION Thu-M-SS-1 — Oriental Language Recognition]]</div> |^<div class="cpsessionlistsessionname">Oriental Language Recognition</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-SS-2|SESSION Thu-M-SS-2 — Automatic Speech Recognition in Air Traffic Management]]</div> |^<div class="cpsessionlistsessionname">Automatic Speech Recognition in Air Traffic Management</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-M-S&T-1|SESSION Thu-M-S&T-1 — Show and Tell 3]]</div> |^<div class="cpsessionlistsessionname">Show and Tell 3</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-Survey|SESSION Thu-Survey — Survey Talk 3: Karen Livescu]]</div> |^<div class="cpsessionlistsessionname">Survey Talk 3: Karen Livescu</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-Keynote|SESSION Thu-Keynote — Keynote 3: Mounya Elhilali]]</div> |^<div class="cpsessionlistsessionname">Keynote 3: Mounya Elhilali</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-O-1|SESSION Thu-A-O-1 — Speech Production I]]</div> |^<div class="cpsessionlistsessionname">Speech Production I</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-O-2|SESSION Thu-A-O-2 — Speech Enhancement and Coding]]</div> |^<div class="cpsessionlistsessionname">Speech Enhancement and Coding</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-V-1|SESSION Thu-A-V-1 — Emotion and Sentiment Analysis II]]</div> |^<div class="cpsessionlistsessionname">Emotion and Sentiment Analysis II</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-V-2|SESSION Thu-A-V-2 — Multi- and Cross-Lingual ASR, Other Topics in ASR]]</div> |^<div class="cpsessionlistsessionname">Multi- and Cross-Lingual ASR, Other Topics in ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-V-3|SESSION Thu-A-V-3 — Source Separation II]]</div> |^<div class="cpsessionlistsessionname">Source Separation II</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-V-4|SESSION Thu-A-V-4 — Speaker Diarization II]]</div> |^<div class="cpsessionlistsessionname">Speaker Diarization II</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-V-5|SESSION Thu-A-V-5 — Speech Synthesis: Toward End-to-End Synthesis I]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Toward End-to-End Synthesis I</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-V-6|SESSION Thu-A-V-6 — Tools, Corpora and Resources]]</div> |^<div class="cpsessionlistsessionname">Tools, Corpora and Resources</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-SS-1|SESSION Thu-A-SS-1 — Non-Autoregressive Sequential Modeling for Speech Processing]]</div> |^<div class="cpsessionlistsessionname">Non-Autoregressive Sequential Modeling for Speech Processing</div> |
|^<div class="cpsessionlistsessioncode">[[Thu-A-SS-2|SESSION Thu-A-SS-2 — The ADReSSo Challenge: Detecting Cognitive Decline Using Speech Only]]</div> |^<div class="cpsessionlistsessionname">The ADReSSo Challenge: Detecting Cognitive Decline Using Speech Only</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-O-1|SESSION Fri-M-O-1 — Robust and Far-Field ASR]]</div> |^<div class="cpsessionlistsessionname">Robust and Far-Field ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-O-2|SESSION Fri-M-O-2 — Speech Synthesis: Prosody Modeling II]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Prosody Modeling II</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-O-3|SESSION Fri-M-O-3 — Source Separation III]]</div> |^<div class="cpsessionlistsessionname">Source Separation III</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-V-1|SESSION Fri-M-V-1 — Non-Native Speech]]</div> |^<div class="cpsessionlistsessionname">Non-Native Speech</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-V-2|SESSION Fri-M-V-2 — Phonetics II]]</div> |^<div class="cpsessionlistsessionname">Phonetics II</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-V-3|SESSION Fri-M-V-3 — Search/Decoding Techniques and Confidence Measures for ASR]]</div> |^<div class="cpsessionlistsessionname">Search/Decoding Techniques and Confidence Measures for ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-V-4|SESSION Fri-M-V-4 — Speech Synthesis: Linguistic Processing, Paradigms and Other Topics]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Linguistic Processing, Paradigms and Other Topics</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-V-5|SESSION Fri-M-V-5 — Speech Type Classification and Diagnosis]]</div> |^<div class="cpsessionlistsessionname">Speech Type Classification and Diagnosis</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-V-6|SESSION Fri-M-V-6 — Spoken Term Detection & Voice Search]]</div> |^<div class="cpsessionlistsessionname">Spoken Term Detection & Voice Search</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-V-7|SESSION Fri-M-V-7 — Voice Anti-Spoofing and Countermeasure]]</div> |^<div class="cpsessionlistsessionname">Voice Anti-Spoofing and Countermeasure</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-M-SS-1|SESSION Fri-M-SS-1 — OpenASR20 and Low Resource ASR Development]]</div> |^<div class="cpsessionlistsessionname">OpenASR20 and Low Resource ASR Development</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-Survey|SESSION Fri-Survey — Survey Talk 4: Alejandrina Cristia]]</div> |^<div class="cpsessionlistsessionname">Survey Talk 4: Alejandrina Cristia</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-Keynote|SESSION Fri-Keynote — Keynote 4: Tomáš Mikolov]]</div> |^<div class="cpsessionlistsessionname">Keynote 4: Tomáš Mikolov</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-O-1|SESSION Fri-A-O-1 — Voice Activity Detection]]</div> |^<div class="cpsessionlistsessionname">Voice Activity Detection</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-O-2|SESSION Fri-A-O-2 — Keyword Search and Spoken Language Processing]]</div> |^<div class="cpsessionlistsessionname">Keyword Search and Spoken Language Processing</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-V-1|SESSION Fri-A-V-1 — Applications in Transcription, Education and Learning]]</div> |^<div class="cpsessionlistsessionname">Applications in Transcription, Education and Learning</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-V-2|SESSION Fri-A-V-2 — Emotion and Sentiment Analysis III]]</div> |^<div class="cpsessionlistsessionname">Emotion and Sentiment Analysis III</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-V-3|SESSION Fri-A-V-3 — Resource-Constrained ASR]]</div> |^<div class="cpsessionlistsessionname">Resource-Constrained ASR</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-V-4|SESSION Fri-A-V-4 — Speaker Recognition: Applications]]</div> |^<div class="cpsessionlistsessionname">Speaker Recognition: Applications</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-V-5|SESSION Fri-A-V-5 — Speech Synthesis: Speaking Style and Emotion]]</div> |^<div class="cpsessionlistsessionname">Speech Synthesis: Speaking Style and Emotion</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-V-6|SESSION Fri-A-V-6 — Spoken Language Understanding II]]</div> |^<div class="cpsessionlistsessionname">Spoken Language Understanding II</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-SS-1|SESSION Fri-A-SS-1 — INTERSPEECH 2021 Acoustic Echo Cancellation Challenge]]</div> |^<div class="cpsessionlistsessionname">INTERSPEECH 2021 Acoustic Echo Cancellation Challenge</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-SS-2|SESSION Fri-A-SS-2 — Speech Recognition of Atypical Speech]]</div> |^<div class="cpsessionlistsessionname">Speech Recognition of Atypical Speech</div> |
|^<div class="cpsessionlistsessioncode">[[Fri-A-S&T-1|SESSION Fri-A-S&T-1 — Show and Tell 4]]</div> |^<div class="cpsessionlistsessionname">Show and Tell 4</div> |
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, Room C|<|
|^Chairs: |^Pavel Šturm|
|^ |^Petra Wagner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-1-1|PAPER Thu-A-O-1-1 — Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated]]</div>|<div class="cpsessionviewpapertitle">Towards the Prediction of the Vocal Tract Shape from the Sequence of Phonemes to be Articulated</div><div class="cpsessionviewpaperauthor">[[Vinicius Ribeiro|AUTHOR Vinicius Ribeiro]], [[Karyna Isaieva|AUTHOR Karyna Isaieva]], [[Justine Leclere|AUTHOR Justine Leclere]], [[Pierre-André Vuissoz|AUTHOR Pierre-André Vuissoz]], [[Yves Laprie|AUTHOR Yves Laprie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-1-2|PAPER Thu-A-O-1-2 — Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions]]</div>|<div class="cpsessionviewpapertitle">Comparison of the Finite Element Method, the Multimodal Method and the Transmission-Line Model for the Computation of Vocal Tract Transfer Functions</div><div class="cpsessionviewpaperauthor">[[Rémi Blandin|AUTHOR Rémi Blandin]], [[Marc Arnela|AUTHOR Marc Arnela]], [[Simon Félix|AUTHOR Simon Félix]], [[Jean-Baptiste Doc|AUTHOR Jean-Baptiste Doc]], [[Peter Birkholz|AUTHOR Peter Birkholz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211539.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-1-3|PAPER Thu-A-O-1-3 — Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues]]</div>|<div class="cpsessionviewpapertitle">Effects of Time Pressure and Spontaneity on Phonotactic Innovations in German Dialogues</div><div class="cpsessionviewpaperauthor">[[Petra Wagner|AUTHOR Petra Wagner]], [[Sina Zarrieß|AUTHOR Sina Zarrieß]], [[Joana Cholin|AUTHOR Joana Cholin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211732.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-1-4|PAPER Thu-A-O-1-4 — Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis]]</div>|<div class="cpsessionviewpapertitle">Importance of Parasagittal Sensor Information in Tongue Motion Capture Through a Diphonic Analysis</div><div class="cpsessionviewpaperauthor">[[Salvador Medina|AUTHOR Salvador Medina]], [[Sarah Taylor|AUTHOR Sarah Taylor]], [[Mark Tiede|AUTHOR Mark Tiede]], [[Alexander Hauptmann|AUTHOR Alexander Hauptmann]], [[Iain Matthews|AUTHOR Iain Matthews]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211604.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-1-5|PAPER Thu-A-O-1-5 — Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder]]</div>|<div class="cpsessionviewpapertitle">Learning Robust Speech Representation with an Articulatory-Regularized Variational Autoencoder</div><div class="cpsessionviewpaperauthor">[[Marc-Antoine Georges|AUTHOR Marc-Antoine Georges]], [[Laurent Girin|AUTHOR Laurent Girin]], [[Jean-Luc Schwartz|AUTHOR Jean-Luc Schwartz]], [[Thomas Hueber|AUTHOR Thomas Hueber]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211881.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-1-6|PAPER Thu-A-O-1-6 — Changes in Glottal Source Parameter Values with Light to Moderate Physical Load]]</div>|<div class="cpsessionviewpapertitle">Changes in Glottal Source Parameter Values with Light to Moderate Physical Load</div><div class="cpsessionviewpaperauthor">[[Heather Weston|AUTHOR Heather Weston]], [[Laura L. Koenig|AUTHOR Laura L. Koenig]], [[Susanne Fuchs|AUTHOR Susanne Fuchs]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, Room A+B|<|
|^Chairs: |^Vladimir Malenovsky|
|^ |^Lukas Pfeifenberger|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210867.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-2-1|PAPER Thu-A-O-2-1 — End-to-End Optimized Multi-Stage Vector Quantization of Spectral Envelopes for Speech and Audio Coding]]</div>|<div class="cpsessionviewpapertitle">End-to-End Optimized Multi-Stage Vector Quantization of Spectral Envelopes for Speech and Audio Coding</div><div class="cpsessionviewpaperauthor">[[Mohammad Hassan Vali|AUTHOR Mohammad Hassan Vali]], [[Tom Bäckström|AUTHOR Tom Bäckström]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211184.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-2-2|PAPER Thu-A-O-2-2 — Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Fusion-Net: Time-Frequency Information Fusion Y-Network for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Santhan Kumar Reddy Nareddula|AUTHOR Santhan Kumar Reddy Nareddula]], [[Subrahmanyam Gorthi|AUTHOR Subrahmanyam Gorthi]], [[Rama Krishna Sai S. Gorthi|AUTHOR Rama Krishna Sai S. Gorthi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211878.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-O-2-3|PAPER Thu-A-O-2-3 — N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification]]</div>|<div class="cpsessionviewpapertitle">N-MTTL SI Model: Non-Intrusive Multi-Task Transfer Learning-Based Speech Intelligibility Prediction Model with Scenery Classification</div><div class="cpsessionviewpaperauthor">[[Ĺuboš Marcinek|AUTHOR Ĺuboš Marcinek]], [[Michael Stone|AUTHOR Michael Stone]], [[Rebecca Millman|AUTHOR Rebecca Millman]], [[Patrick Gaydecki|AUTHOR Patrick Gaydecki]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, Room D|<|
|^Chairs: |^Yuya Fujita|
|^ |^Katrin Kirchhoff|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Short Presentations</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211955.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-1|PAPER Thu-A-SS-1-1 — An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">An Improved Single Step Non-Autoregressive Transformer for Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Ruchao Fan|AUTHOR Ruchao Fan]], [[Wei Chu|AUTHOR Wei Chu]], [[Peng Chang|AUTHOR Peng Chang]], [[Jing Xiao|AUTHOR Jing Xiao]], [[Abeer Alwan|AUTHOR Abeer Alwan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-2|PAPER Thu-A-SS-1-2 — Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain]]</div>|<div class="cpsessionviewpapertitle">Multi-Speaker ASR Combining Non-Autoregressive Conformer CTC and Conditional Speaker Chain</div><div class="cpsessionviewpaperauthor">[[Pengcheng Guo|AUTHOR Pengcheng Guo]], [[Xuankai Chang|AUTHOR Xuankai Chang]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Lei Xie|AUTHOR Lei Xie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210337.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-3|PAPER Thu-A-SS-1-3 — Pushing the Limits of Non-Autoregressive Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Pushing the Limits of Non-Autoregressive Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Edwin G. Ng|AUTHOR Edwin G. Ng]], [[Chung-Cheng Chiu|AUTHOR Chung-Cheng Chiu]], [[Yu Zhang|AUTHOR Yu Zhang]], [[William Chan|AUTHOR William Chan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210349.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-4|PAPER Thu-A-SS-1-4 — Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies]]</div>|<div class="cpsessionviewpapertitle">Non-Autoregressive Predictive Coding for Learning Speech Representations from Local Dependencies</div><div class="cpsessionviewpaperauthor">[[Alexander H. Liu|AUTHOR Alexander H. Liu]], [[Yu-An Chung|AUTHOR Yu-An Chung]], [[James Glass|AUTHOR James Glass]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210911.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-5|PAPER Thu-A-SS-1-5 — Relaxing the Conditional Independence Assumption of CTC-Based ASR by Conditioning on Intermediate Predictions]]</div>|<div class="cpsessionviewpapertitle">Relaxing the Conditional Independence Assumption of CTC-Based ASR by Conditioning on Intermediate Predictions</div><div class="cpsessionviewpaperauthor">[[Jumon Nozaki|AUTHOR Jumon Nozaki]], [[Tatsuya Komatsu|AUTHOR Tatsuya Komatsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211131.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-6|PAPER Thu-A-SS-1-6 — Toward Streaming ASR with Non-Autoregressive Insertion-Based Model]]</div>|<div class="cpsessionviewpapertitle">Toward Streaming ASR with Non-Autoregressive Insertion-Based Model</div><div class="cpsessionviewpaperauthor">[[Yuya Fujita|AUTHOR Yuya Fujita]], [[Tianzi Wang|AUTHOR Tianzi Wang]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Motoi Omachi|AUTHOR Motoi Omachi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-7|PAPER Thu-A-SS-1-7 — Layer Pruning on Demand with Intermediate CTC]]</div>|<div class="cpsessionviewpapertitle">Layer Pruning on Demand with Intermediate CTC</div><div class="cpsessionviewpaperauthor">[[Jaesong Lee|AUTHOR Jaesong Lee]], [[Jingu Kang|AUTHOR Jingu Kang]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-8|PAPER Thu-A-SS-1-8 — Real-Time End-to-End Monaural Multi-Speaker Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Real-Time End-to-End Monaural Multi-Speaker Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Song Li|AUTHOR Song Li]], [[Beibei Ouyang|AUTHOR Beibei Ouyang]], [[Fuchuan Tong|AUTHOR Fuchuan Tong]], [[Dexin Liao|AUTHOR Dexin Liao]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-9|PAPER Thu-A-SS-1-9 — Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models]]</div>|<div class="cpsessionviewpapertitle">Streaming End-to-End ASR Based on Blockwise Non-Autoregressive Models</div><div class="cpsessionviewpaperauthor">[[Tianzi Wang|AUTHOR Tianzi Wang]], [[Yuya Fujita|AUTHOR Yuya Fujita]], [[Xuankai Chang|AUTHOR Xuankai Chang]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211770.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-10|PAPER Thu-A-SS-1-10 — TalkNet: Non-Autoregressive Depth-Wise Separable Convolutional Model for Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">TalkNet: Non-Autoregressive Depth-Wise Separable Convolutional Model for Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Stanislav Beliaev|AUTHOR Stanislav Beliaev]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-11|PAPER Thu-A-SS-1-11 — WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">WaveGrad 2: Iterative Refinement for Text-to-Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Nanxin Chen|AUTHOR Nanxin Chen]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Heiga Zen|AUTHOR Heiga Zen]], [[Ron J. Weiss|AUTHOR Ron J. Weiss]], [[Mohammad Norouzi|AUTHOR Mohammad Norouzi]], [[Najim Dehak|AUTHOR Najim Dehak]], [[William Chan|AUTHOR William Chan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-12|PAPER Thu-A-SS-1-12 — Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Align-Denoise: Single-Pass Non-Autoregressive Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Nanxin Chen|AUTHOR Nanxin Chen]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Najim Dehak|AUTHOR Najim Dehak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212121.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-1-13|PAPER Thu-A-SS-1-13 — VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">VAENAR-TTS: Variational Auto-Encoder Based Non-AutoRegressive Text-to-Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Hui Lu|AUTHOR Hui Lu]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Xixin Wu|AUTHOR Xixin Wu]], [[Xu Li|AUTHOR Xu Li]], [[Shiyin Kang|AUTHOR Shiyin Kang]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Fasih Haider|
|^ |^Davida Fromm|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-1|PAPER Thu-A-SS-2-1 — Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge]]</div>|<div class="cpsessionviewpapertitle">Detecting Cognitive Decline Using Speech Only: The ADReSSo Challenge</div><div class="cpsessionviewpaperauthor">[[Saturnino Luz|AUTHOR Saturnino Luz]], [[Fasih Haider|AUTHOR Fasih Haider]], [[Sofia de la Fuente|AUTHOR Sofia de la Fuente]], [[Davida Fromm|AUTHOR Davida Fromm]], [[Brian MacWhinney|AUTHOR Brian MacWhinney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211589.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-2|PAPER Thu-A-SS-2-2 — Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge]]</div>|<div class="cpsessionviewpapertitle">Influence of the Interviewer on the Automatic Assessment of Alzheimer’s Disease in the Context of the ADReSSo Challenge</div><div class="cpsessionviewpaperauthor">[[P.A. Pérez-Toro|AUTHOR P.A. Pérez-Toro]], [[S.P. Bayerl|AUTHOR S.P. Bayerl]], [[T. Arias-Vergara|AUTHOR T. Arias-Vergara]], [[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]], [[P. Klumpp|AUTHOR P. Klumpp]], [[M. Schuster|AUTHOR M. Schuster]], [[Elmar Nöth|AUTHOR Elmar Nöth]], [[J.R. Orozco-Arroyave|AUTHOR J.R. Orozco-Arroyave]], [[K. Riedhammer|AUTHOR K. Riedhammer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210332.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-3|PAPER Thu-A-SS-2-3 — WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection]]</div>|<div class="cpsessionviewpapertitle">WavBERT: Exploiting Semantic and Non-Semantic Speech Using Wav2vec and BERT for Dementia Detection</div><div class="cpsessionviewpaperauthor">[[Youxiang Zhu|AUTHOR Youxiang Zhu]], [[Abdelrahman Obyat|AUTHOR Abdelrahman Obyat]], [[Xiaohui Liang|AUTHOR Xiaohui Liang]], [[John A. Batsis|AUTHOR John A. Batsis]], [[Robert M. Roth|AUTHOR Robert M. Roth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210753.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-4|PAPER Thu-A-SS-2-4 — Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models]]</div>|<div class="cpsessionviewpapertitle">Alzheimer Disease Recognition Using Speech-Based Embeddings From Pre-Trained Models</div><div class="cpsessionviewpaperauthor">[[Lara Gauder|AUTHOR Lara Gauder]], [[Leonardo Pepino|AUTHOR Leonardo Pepino]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]], [[Pablo Riera|AUTHOR Pablo Riera]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210759.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-5|PAPER Thu-A-SS-2-5 — Comparing Acoustic-Based Approaches for Alzheimer’s Disease Detection]]</div>|<div class="cpsessionviewpapertitle">Comparing Acoustic-Based Approaches for Alzheimer’s Disease Detection</div><div class="cpsessionviewpaperauthor">[[Aparna Balagopalan|AUTHOR Aparna Balagopalan]], [[Jekaterina Novikova|AUTHOR Jekaterina Novikova]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211415.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-6|PAPER Thu-A-SS-2-6 — Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models]]</div>|<div class="cpsessionviewpapertitle">Alzheimer’s Disease Detection from Spontaneous Speech Through Combining Linguistic Complexity and (Dis)Fluency Features with Pretrained Language Models</div><div class="cpsessionviewpaperauthor">[[Yu Qiao|AUTHOR Yu Qiao]], [[Xuefeng Yin|AUTHOR Xuefeng Yin]], [[Daniel Wiechmann|AUTHOR Daniel Wiechmann]], [[Elma Kerz|AUTHOR Elma Kerz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211519.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-7|PAPER Thu-A-SS-2-7 — Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech]]</div>|<div class="cpsessionviewpapertitle">Using the Outputs of Different Automatic Speech Recognition Paradigms for Acoustic- and BERT-Based Alzheimer’s Dementia Detection Through Spontaneous Speech</div><div class="cpsessionviewpaperauthor">[[Yilin Pan|AUTHOR Yilin Pan]], [[Bahman Mirheidari|AUTHOR Bahman Mirheidari]], [[Jennifer M. Harris|AUTHOR Jennifer M. Harris]], [[Jennifer C. Thompson|AUTHOR Jennifer C. Thompson]], [[Matthew Jones|AUTHOR Matthew Jones]], [[Julie S. Snowden|AUTHOR Julie S. Snowden]], [[Daniel Blackburn|AUTHOR Daniel Blackburn]], [[Heidi Christensen|AUTHOR Heidi Christensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211572.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-8|PAPER Thu-A-SS-2-8 — Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech]]</div>|<div class="cpsessionviewpapertitle">Tackling the ADRESSO Challenge 2021: The MUET-RMIT System for Alzheimer’s Dementia Recognition from Spontaneous Speech</div><div class="cpsessionviewpaperauthor">[[Zafi Sherhan Syed|AUTHOR Zafi Sherhan Syed]], [[Muhammad Shehram Shah Syed|AUTHOR Muhammad Shehram Shah Syed]], [[Margaret Lech|AUTHOR Margaret Lech]], [[Elena Pirogova|AUTHOR Elena Pirogova]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-9|PAPER Thu-A-SS-2-9 — Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs]]</div>|<div class="cpsessionviewpapertitle">Alzheimer’s Dementia Recognition Using Acoustic, Lexical, Disfluency and Speech Pause Features Robust to Noisy Inputs</div><div class="cpsessionviewpaperauthor">[[Morteza Rohanian|AUTHOR Morteza Rohanian]], [[Julian Hough|AUTHOR Julian Hough]], [[Matthew Purver|AUTHOR Matthew Purver]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211850.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-10|PAPER Thu-A-SS-2-10 — Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios]]</div>|<div class="cpsessionviewpapertitle">Automatic Detection and Assessment of Alzheimer Disease Using Speech and Language Technologies in Low-Resource Scenarios</div><div class="cpsessionviewpaperauthor">[[Raghavendra Pappagari|AUTHOR Raghavendra Pappagari]], [[Jaejin Cho|AUTHOR Jaejin Cho]], [[Sonal Joshi|AUTHOR Sonal Joshi]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Najim Dehak|AUTHOR Najim Dehak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-11|PAPER Thu-A-SS-2-11 — Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only]]</div>|<div class="cpsessionviewpapertitle">Automatic Detection of Alzheimer’s Disease Using Spontaneous Speech Only</div><div class="cpsessionviewpaperauthor">[[Jun Chen|AUTHOR Jun Chen]], [[Jieping Ye|AUTHOR Jieping Ye]], [[Fengyi Tang|AUTHOR Fengyi Tang]], [[Jiayu Zhou|AUTHOR Jiayu Zhou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-SS-2-12|PAPER Thu-A-SS-2-12 — Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data]]</div>|<div class="cpsessionviewpapertitle">Modular Multi-Modal Attention Network for Alzheimer’s Disease Detection Using Patient Audio and Language Data</div><div class="cpsessionviewpaperauthor">[[Ning Wang|AUTHOR Ning Wang]], [[Yupeng Cao|AUTHOR Yupeng Cao]], [[Shuai Hao|AUTHOR Shuai Hao]], [[Zongru Shao|AUTHOR Zongru Shao]], [[K.P. Subbalakshmi|AUTHOR K.P. Subbalakshmi]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Carlos Busso|
|^ |^Shrikanth Narayanan|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211840.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-1|PAPER Thu-A-V-1-1 — Temporal Context in Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Temporal Context in Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Yangyang Xia|AUTHOR Yangyang Xia]], [[Li-Wei Chen|AUTHOR Li-Wei Chen]], [[Alexander Rudnicky|AUTHOR Alexander Rudnicky]], [[Richard M. Stern|AUTHOR Richard M. Stern]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-2|PAPER Thu-A-V-1-2 — Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Learning Fine-Grained Cross Modality Excitement for Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Hang Li|AUTHOR Hang Li]], [[Wenbiao Ding|AUTHOR Wenbiao Ding]], [[Zhongqin Wu|AUTHOR Zhongqin Wu]], [[Zitao Liu|AUTHOR Zitao Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210303.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-3|PAPER Thu-A-V-1-3 — Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit]]</div>|<div class="cpsessionviewpapertitle">Automatic Analysis of the Emotional Content of Speech in Daylong Child-Centered Recordings from a Neonatal Intensive Care Unit</div><div class="cpsessionviewpaperauthor">[[Einari Vaaras|AUTHOR Einari Vaaras]], [[Sari Ahlqvist-Björkroth|AUTHOR Sari Ahlqvist-Björkroth]], [[Konstantinos Drossos|AUTHOR Konstantinos Drossos]], [[Okko Räsänen|AUTHOR Okko Räsänen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-4|PAPER Thu-A-V-1-4 — Multimodal Sentiment Analysis with Temporal Modality Attention]]</div>|<div class="cpsessionviewpapertitle">Multimodal Sentiment Analysis with Temporal Modality Attention</div><div class="cpsessionviewpaperauthor">[[Fan Qian|AUTHOR Fan Qian]], [[Jiqing Han|AUTHOR Jiqing Han]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210610.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-5|PAPER Thu-A-V-1-5 — Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Stochastic Process Regression for Cross-Cultural Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Mani Kumar T.|AUTHOR Mani Kumar T.]], [[Enrique Sanchez|AUTHOR Enrique Sanchez]], [[Georgios Tzimiropoulos|AUTHOR Georgios Tzimiropoulos]], [[Timo Giesbrecht|AUTHOR Timo Giesbrecht]], [[Michel Valstar|AUTHOR Michel Valstar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210666.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-6|PAPER Thu-A-V-1-6 — Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Acted vs. Improvised: Domain Adaptation for Elicitation Approaches in Audio-Visual Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Haoqi Li|AUTHOR Haoqi Li]], [[Yelin Kim|AUTHOR Yelin Kim]], [[Cheng-Hao Kuo|AUTHOR Cheng-Hao Kuo]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210703.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-7|PAPER Thu-A-V-1-7 — Emotion Recognition from Speech Using wav2vec 2.0 Embeddings]]</div>|<div class="cpsessionviewpapertitle">Emotion Recognition from Speech Using wav2vec 2.0 Embeddings</div><div class="cpsessionviewpaperauthor">[[Leonardo Pepino|AUTHOR Leonardo Pepino]], [[Pablo Riera|AUTHOR Pablo Riera]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-8|PAPER Thu-A-V-1-8 — Graph Isomorphism Network for Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Graph Isomorphism Network for Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Jiawang Liu|AUTHOR Jiawang Liu]], [[Haoxiang Wang|AUTHOR Haoxiang Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212168.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-9|PAPER Thu-A-V-1-9 — Applying TDNN Architectures for Analyzing Duration Dependencies on Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Applying TDNN Architectures for Analyzing Duration Dependencies on Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Pooja Kumawat|AUTHOR Pooja Kumawat]], [[Aurobinda Routray|AUTHOR Aurobinda Routray]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212217.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-10|PAPER Thu-A-V-1-10 — Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech]]</div>|<div class="cpsessionviewpapertitle">Acoustic Features and Neural Representations for Categorical Emotion Recognition from Speech</div><div class="cpsessionviewpaperauthor">[[Aaron Keesing|AUTHOR Aaron Keesing]], [[Yun Sing Koh|AUTHOR Yun Sing Koh]], [[Michael Witbrock|AUTHOR Michael Witbrock]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-1-11|PAPER Thu-A-V-1-11 — Leveraging Pre-Trained Language Model for Speech Sentiment Analysis]]</div>|<div class="cpsessionviewpapertitle">Leveraging Pre-Trained Language Model for Speech Sentiment Analysis</div><div class="cpsessionviewpaperauthor">[[Suwon Shon|AUTHOR Suwon Shon]], [[Pablo Brusco|AUTHOR Pablo Brusco]], [[Jing Pan|AUTHOR Jing Pan]], [[Kyu J. Han|AUTHOR Kyu J. Han]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Khe Sim|
|^ |^Alexei Baevski|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210057.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-1|PAPER Thu-A-V-2-1 — Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching]]</div>|<div class="cpsessionviewpapertitle">Cross-Domain Speech Recognition with Unsupervised Character-Level Distribution Matching</div><div class="cpsessionviewpaperauthor">[[Wenxin Hou|AUTHOR Wenxin Hou]], [[Jindong Wang|AUTHOR Jindong Wang]], [[Xu Tan|AUTHOR Xu Tan]], [[Tao Qin|AUTHOR Tao Qin]], [[Takahiro Shinozaki|AUTHOR Takahiro Shinozaki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210102.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-2|PAPER Thu-A-V-2-2 — Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone]]</div>|<div class="cpsessionviewpapertitle">Large-Scale Pre-Training of End-to-End Multi-Talker ASR for Meeting Transcription with Single Distant Microphone</div><div class="cpsessionviewpaperauthor">[[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Guoli Ye|AUTHOR Guoli Ye]], [[Yu Wu|AUTHOR Yu Wu]], [[Yashesh Gaur|AUTHOR Yashesh Gaur]], [[Xiaofei Wang|AUTHOR Xiaofei Wang]], [[Zhong Meng|AUTHOR Zhong Meng]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-3|PAPER Thu-A-V-2-3 — On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer]]</div>|<div class="cpsessionviewpapertitle">On Minimum Word Error Rate Training of the Hybrid Autoregressive Transducer</div><div class="cpsessionviewpaperauthor">[[Liang Lu|AUTHOR Liang Lu]], [[Zhong Meng|AUTHOR Zhong Meng]], [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210322.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-4|PAPER Thu-A-V-2-4 — Reducing Streaming ASR Model Delay with Self Alignment]]</div>|<div class="cpsessionviewpapertitle">Reducing Streaming ASR Model Delay with Self Alignment</div><div class="cpsessionviewpaperauthor">[[Jaeyoung Kim|AUTHOR Jaeyoung Kim]], [[Han Lu|AUTHOR Han Lu]], [[Anshuman Tripathi|AUTHOR Anshuman Tripathi]], [[Qian Zhang|AUTHOR Qian Zhang]], [[Hasim Sak|AUTHOR Hasim Sak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210644.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-5|PAPER Thu-A-V-2-5 — Reduce and Reconstruct: ASR for Low-Resource Phonetic Languages]]</div>|<div class="cpsessionviewpapertitle">Reduce and Reconstruct: ASR for Low-Resource Phonetic Languages</div><div class="cpsessionviewpaperauthor">[[Anuj Diwan|AUTHOR Anuj Diwan]], [[Preethi Jyothi|AUTHOR Preethi Jyothi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-6|PAPER Thu-A-V-2-6 — Knowledge Distillation Based Training of Universal ASR Source Models for Cross-Lingual Transfer]]</div>|<div class="cpsessionviewpapertitle">Knowledge Distillation Based Training of Universal ASR Source Models for Cross-Lingual Transfer</div><div class="cpsessionviewpaperauthor">[[Takashi Fukuda|AUTHOR Takashi Fukuda]], [[Samuel Thomas|AUTHOR Samuel Thomas]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210836.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-7|PAPER Thu-A-V-2-7 — Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End]]</div>|<div class="cpsessionviewpapertitle">Listen with Intent: Improving Speech Recognition with Audio-to-Intent Front-End</div><div class="cpsessionviewpaperauthor">[[Swayambhu Nath Ray|AUTHOR Swayambhu Nath Ray]], [[Minhua Wu|AUTHOR Minhua Wu]], [[Anirudh Raju|AUTHOR Anirudh Raju]], [[Pegah Ghahremani|AUTHOR Pegah Ghahremani]], [[Raghavendra Bilgi|AUTHOR Raghavendra Bilgi]], [[Milind Rao|AUTHOR Milind Rao]], [[Harish Arsikere|AUTHOR Harish Arsikere]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]], [[Jasha Droppo|AUTHOR Jasha Droppo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-8|PAPER Thu-A-V-2-8 — Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models]]</div>|<div class="cpsessionviewpapertitle">Exploring Targeted Universal Adversarial Perturbations to End-to-End ASR Models</div><div class="cpsessionviewpaperauthor">[[Zhiyun Lu|AUTHOR Zhiyun Lu]], [[Wei Han|AUTHOR Wei Han]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Liangliang Cao|AUTHOR Liangliang Cao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-9|PAPER Thu-A-V-2-9 — Earnings-21: A Practical Benchmark for ASR in the Wild]]</div>|<div class="cpsessionviewpapertitle">Earnings-21: A Practical Benchmark for ASR in the Wild</div><div class="cpsessionviewpaperauthor">[[Miguel Del Rio|AUTHOR Miguel Del Rio]], [[Natalie Delworth|AUTHOR Natalie Delworth]], [[Ryan Westerman|AUTHOR Ryan Westerman]], [[Michelle Huang|AUTHOR Michelle Huang]], [[Nishchal Bhandari|AUTHOR Nishchal Bhandari]], [[Joseph Palakapilly|AUTHOR Joseph Palakapilly]], [[Quinten McNamara|AUTHOR Quinten McNamara]], [[Joshua Dong|AUTHOR Joshua Dong]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Miguel Jetté|AUTHOR Miguel Jetté]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211949.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-10|PAPER Thu-A-V-2-10 — Improving Multilingual Transformer Transducer Models by Reducing Language Confusions]]</div>|<div class="cpsessionviewpapertitle">Improving Multilingual Transformer Transducer Models by Reducing Language Confusions</div><div class="cpsessionviewpaperauthor">[[Eric Sun|AUTHOR Eric Sun]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Zhong Meng|AUTHOR Zhong Meng]], [[Yu Wu|AUTHOR Yu Wu]], [[Jian Xue|AUTHOR Jian Xue]], [[Shujie Liu|AUTHOR Shujie Liu]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-2-11|PAPER Thu-A-V-2-11 — Arabic Code-Switching Speech Recognition Using Monolingual Data]]</div>|<div class="cpsessionviewpapertitle">Arabic Code-Switching Speech Recognition Using Monolingual Data</div><div class="cpsessionviewpaperauthor">[[Ahmed Ali|AUTHOR Ahmed Ali]], [[Shammur Absar Chowdhury|AUTHOR Shammur Absar Chowdhury]], [[Amir Hussein|AUTHOR Amir Hussein]], [[Yasser Hifny|AUTHOR Yasser Hifny]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Mariem Bouafif|
|^ |^Lukas Drude|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210662.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-1|PAPER Thu-A-V-3-1 — Online Blind Audio Source Separation Using Recursive Expectation-Maximization]]</div>|<div class="cpsessionviewpapertitle">Online Blind Audio Source Separation Using Recursive Expectation-Maximization</div><div class="cpsessionviewpaperauthor">[[Aviad Eisenberg|AUTHOR Aviad Eisenberg]], [[Boaz Schwartz|AUTHOR Boaz Schwartz]], [[Sharon Gannot|AUTHOR Sharon Gannot]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211161.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-2|PAPER Thu-A-V-3-2 — Empirical Analysis of Generalized Iterative Speech Separation Networks]]</div>|<div class="cpsessionviewpapertitle">Empirical Analysis of Generalized Iterative Speech Separation Networks</div><div class="cpsessionviewpaperauthor">[[Yi Luo|AUTHOR Yi Luo]], [[Cong Han|AUTHOR Cong Han]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211177.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-3|PAPER Thu-A-V-3-3 — Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers]]</div>|<div class="cpsessionviewpapertitle">Graph-PIT: Generalized Permutation Invariant Training for Continuous Separation of Arbitrary Numbers of Speakers</div><div class="cpsessionviewpaperauthor">[[Thilo von Neumann|AUTHOR Thilo von Neumann]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Christoph Boeddeker|AUTHOR Christoph Boeddeker]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Reinhold Haeb-Umbach|AUTHOR Reinhold Haeb-Umbach]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211243.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-4|PAPER Thu-A-V-3-4 — Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Teacher-Student MixIT for Unsupervised and Semi-Supervised Speech Separation</div><div class="cpsessionviewpaperauthor">[[Jisi Zhang|AUTHOR Jisi Zhang]], [[Cătălin Zorilă|AUTHOR Cătălin Zorilă]], [[Rama Doddipatla|AUTHOR Rama Doddipatla]], [[Jon Barker|AUTHOR Jon Barker]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211369.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-5|PAPER Thu-A-V-3-5 — Few-Shot Learning of New Sound Classes for Target Sound Extraction]]</div>|<div class="cpsessionviewpapertitle">Few-Shot Learning of New Sound Classes for Target Sound Extraction</div><div class="cpsessionviewpaperauthor">[[Marc Delcroix|AUTHOR Marc Delcroix]], [[Jorge Bennasar Vázquez|AUTHOR Jorge Bennasar Vázquez]], [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Shoko Araki|AUTHOR Shoko Araki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211372.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-6|PAPER Thu-A-V-3-6 — Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues]]</div>|<div class="cpsessionviewpapertitle">Binaural Speech Separation of Moving Speakers With Preserved Spatial Cues</div><div class="cpsessionviewpaperauthor">[[Cong Han|AUTHOR Cong Han]], [[Yi Luo|AUTHOR Yi Luo]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211378.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-7|PAPER Thu-A-V-3-7 — AvaTr: One-Shot Speaker Extraction with Transformers]]</div>|<div class="cpsessionviewpapertitle">AvaTr: One-Shot Speaker Extraction with Transformers</div><div class="cpsessionviewpaperauthor">[[Shell Xu Hu|AUTHOR Shell Xu Hu]], [[Md. Rifat Arefin|AUTHOR Md. Rifat Arefin]], [[Viet-Nhat Nguyen|AUTHOR Viet-Nhat Nguyen]], [[Alish Dipani|AUTHOR Alish Dipani]], [[Xaq Pitkow|AUTHOR Xaq Pitkow]], [[Andreas Savas Tolias|AUTHOR Andreas Savas Tolias]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211531.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-8|PAPER Thu-A-V-3-8 — Vocal Harmony Separation Using Time-Domain Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Vocal Harmony Separation Using Time-Domain Neural Networks</div><div class="cpsessionviewpaperauthor">[[Saurjya Sarkar|AUTHOR Saurjya Sarkar]], [[Emmanouil Benetos|AUTHOR Emmanouil Benetos]], [[Mark Sandler|AUTHOR Mark Sandler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211924.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-9|PAPER Thu-A-V-3-9 — Speaker Verification-Based Evaluation of Single-Channel Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Speaker Verification-Based Evaluation of Single-Channel Speech Separation</div><div class="cpsessionviewpaperauthor">[[Matthew Maciejewski|AUTHOR Matthew Maciejewski]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-10|PAPER Thu-A-V-3-10 — Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection]]</div>|<div class="cpsessionviewpapertitle">Improved Speech Separation with Time-and-Frequency Cross-Domain Feature Selection</div><div class="cpsessionviewpaperauthor">[[Tian Lan|AUTHOR Tian Lan]], [[Yuxin Qian|AUTHOR Yuxin Qian]], [[Yilan Lyu|AUTHOR Yilan Lyu]], [[Refuoe Mokhosi|AUTHOR Refuoe Mokhosi]], [[Wenxin Tai|AUTHOR Wenxin Tai]], [[Qiao Liu|AUTHOR Qiao Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212250.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-11|PAPER Thu-A-V-3-11 — Robust Speaker Extraction Network Based on Iterative Refined Adaptation]]</div>|<div class="cpsessionviewpapertitle">Robust Speaker Extraction Network Based on Iterative Refined Adaptation</div><div class="cpsessionviewpaperauthor">[[Chengyun Deng|AUTHOR Chengyun Deng]], [[Shiqian Ma|AUTHOR Shiqian Ma]], [[Yongtao Sha|AUTHOR Yongtao Sha]], [[Yi Zhang|AUTHOR Yi Zhang]], [[Hui Zhang|AUTHOR Hui Zhang]], [[Hui Song|AUTHOR Hui Song]], [[Fei Wang|AUTHOR Fei Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212260.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-12|PAPER Thu-A-V-3-12 — Neural Speaker Extraction with Speaker-Speech Cross-Attention Network]]</div>|<div class="cpsessionviewpapertitle">Neural Speaker Extraction with Speaker-Speech Cross-Attention Network</div><div class="cpsessionviewpaperauthor">[[Wupeng Wang|AUTHOR Wupeng Wang]], [[Chenglin Xu|AUTHOR Chenglin Xu]], [[Meng Ge|AUTHOR Meng Ge]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211560.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-3-13|PAPER Thu-A-V-3-13 — Deep Audio-Visual Speech Separation Based on Facial Motion]]</div>|<div class="cpsessionviewpapertitle">Deep Audio-Visual Speech Separation Based on Facial Motion</div><div class="cpsessionviewpaperauthor">[[Rémi Rigal|AUTHOR Rémi Rigal]], [[Jacques Chodorowski|AUTHOR Jacques Chodorowski]], [[Benoît Zerr|AUTHOR Benoît Zerr]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Andreas Stolcke|
|^ |^Leibny Paola García Perera|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210728.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-1|PAPER Thu-A-V-4-1 — LEAP Submission for the Third DIHARD Diarization Challenge]]</div>|<div class="cpsessionviewpapertitle">LEAP Submission for the Third DIHARD Diarization Challenge</div><div class="cpsessionviewpaperauthor">[[Prachi Singh|AUTHOR Prachi Singh]], [[Rajat Varma|AUTHOR Rajat Varma]], [[Venkat Krishnamohan|AUTHOR Venkat Krishnamohan]], [[Srikanth Raj Chetupalli|AUTHOR Srikanth Raj Chetupalli]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210747.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-2|PAPER Thu-A-V-4-2 — Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings]]</div>|<div class="cpsessionviewpapertitle">Investigation of Spatial-Acoustic Features for Overlapping Speech Detection in Multiparty Meetings</div><div class="cpsessionviewpaperauthor">[[Shiliang Zhang|AUTHOR Shiliang Zhang]], [[Siqi Zheng|AUTHOR Siqi Zheng]], [[Weilong Huang|AUTHOR Weilong Huang]], [[Ming Lei|AUTHOR Ming Lei]], [[Hongbin Suo|AUTHOR Hongbin Suo]], [[Jinwei Feng|AUTHOR Jinwei Feng]], [[Zhijie Yan|AUTHOR Zhijie Yan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210750.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-3|PAPER Thu-A-V-4-3 — Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker]]</div>|<div class="cpsessionviewpapertitle">Target-Speaker Voice Activity Detection with Improved i-Vector Estimation for Unknown Number of Speaker</div><div class="cpsessionviewpaperauthor">[[Maokui He|AUTHOR Maokui He]], [[Desh Raj|AUTHOR Desh Raj]], [[Zili Huang|AUTHOR Zili Huang]], [[Jun Du|AUTHOR Jun Du]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-4|PAPER Thu-A-V-4-4 — ECAPA-TDNN Embeddings for Speaker Diarization]]</div>|<div class="cpsessionviewpapertitle">ECAPA-TDNN Embeddings for Speaker Diarization</div><div class="cpsessionviewpaperauthor">[[Nauman Dawalatabad|AUTHOR Nauman Dawalatabad]], [[Mirco Ravanelli|AUTHOR Mirco Ravanelli]], [[François Grondin|AUTHOR François Grondin]], [[Jenthe Thienpondt|AUTHOR Jenthe Thienpondt]], [[Brecht Desplanques|AUTHOR Brecht Desplanques]], [[Hwidong Na|AUTHOR Hwidong Na]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-5|PAPER Thu-A-V-4-5 — Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech]]</div>|<div class="cpsessionviewpapertitle">Advances in Integration of End-to-End Neural and Clustering-Based Diarization for Real Conversational Speech</div><div class="cpsessionviewpaperauthor">[[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Naohiro Tawara|AUTHOR Naohiro Tawara]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-6|PAPER Thu-A-V-4-6 — The Third DIHARD Diarization Challenge]]</div>|<div class="cpsessionviewpapertitle">The Third DIHARD Diarization Challenge</div><div class="cpsessionviewpaperauthor">[[Neville Ryant|AUTHOR Neville Ryant]], [[Prachi Singh|AUTHOR Prachi Singh]], [[Venkat Krishnamohan|AUTHOR Venkat Krishnamohan]], [[Rajat Varma|AUTHOR Rajat Varma]], [[Kenneth Church|AUTHOR Kenneth Church]], [[Christopher Cieri|AUTHOR Christopher Cieri]], [[Jun Du|AUTHOR Jun Du]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]], [[Mark Liberman|AUTHOR Mark Liberman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211377.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-7|PAPER Thu-A-V-4-7 — Robust End-to-End Speaker Diarization with Conformer and Additive Margin Penalty]]</div>|<div class="cpsessionviewpapertitle">Robust End-to-End Speaker Diarization with Conformer and Additive Margin Penalty</div><div class="cpsessionviewpaperauthor">[[Tsun-Yat Leung|AUTHOR Tsun-Yat Leung]], [[Lahiru Samarakoon|AUTHOR Lahiru Samarakoon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211588.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-8|PAPER Thu-A-V-4-8 — Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface]]</div>|<div class="cpsessionviewpapertitle">Anonymous Speaker Clusters: Making Distinctions Between Anonymised Speech Recordings with Clustering Interface</div><div class="cpsessionviewpaperauthor">[[Benjamin O’Brien|AUTHOR Benjamin O’Brien]], [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]], [[Anaïs Chanclu|AUTHOR Anaïs Chanclu]], [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-4-9|PAPER Thu-A-V-4-9 — Speaker Diarization Using Two-Pass Leave-One-Out Gaussian PLDA Clustering of DNN Embeddings]]</div>|<div class="cpsessionviewpapertitle">Speaker Diarization Using Two-Pass Leave-One-Out Gaussian PLDA Clustering of DNN Embeddings</div><div class="cpsessionviewpaperauthor">[[Kiran Karra|AUTHOR Kiran Karra]], [[Alan McCree|AUTHOR Alan McCree]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Simon King|
|^ |^Wei-Ning Hsu|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-1|PAPER Thu-A-V-5-1 — Federated Learning with Dynamic Transformer for Text to Speech]]</div>|<div class="cpsessionviewpapertitle">Federated Learning with Dynamic Transformer for Text to Speech</div><div class="cpsessionviewpaperauthor">[[Zhenhou Hong|AUTHOR Zhenhou Hong]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]], [[Jie Liu|AUTHOR Jie Liu]], [[Chendong Zhao|AUTHOR Chendong Zhao]], [[Jing Xiao|AUTHOR Jing Xiao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-2|PAPER Thu-A-V-5-2 — LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks]]</div>|<div class="cpsessionviewpapertitle">LiteTTS: A Lightweight Mel-Spectrogram-Free Text-to-Wave Synthesizer Based on Generative Adversarial Networks</div><div class="cpsessionviewpaperauthor">[[Huu-Kim Nguyen|AUTHOR Huu-Kim Nguyen]], [[Kihyuk Jeong|AUTHOR Kihyuk Jeong]], [[Seyun Um|AUTHOR Seyun Um]], [[Min-Jae Hwang|AUTHOR Min-Jae Hwang]], [[Eunwoo Song|AUTHOR Eunwoo Song]], [[Hong-Goo Kang|AUTHOR Hong-Goo Kang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-3|PAPER Thu-A-V-5-3 — Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration]]</div>|<div class="cpsessionviewpapertitle">Zero-Shot Text-to-Speech for Text-Based Insertion in Audio Narration</div><div class="cpsessionviewpaperauthor">[[Chuanxin Tang|AUTHOR Chuanxin Tang]], [[Chong Luo|AUTHOR Chong Luo]], [[Zhiyuan Zhao|AUTHOR Zhiyuan Zhao]], [[Dacheng Yin|AUTHOR Dacheng Yin]], [[Yucheng Zhao|AUTHOR Yucheng Zhao]], [[Wenjun Zeng|AUTHOR Wenjun Zeng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210469.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-4|PAPER Thu-A-V-5-4 — Diff-TTS: A Denoising Diffusion Model for Text-to-Speech]]</div>|<div class="cpsessionviewpapertitle">Diff-TTS: A Denoising Diffusion Model for Text-to-Speech</div><div class="cpsessionviewpaperauthor">[[Myeonghun Jeong|AUTHOR Myeonghun Jeong]], [[Hyeongju Kim|AUTHOR Hyeongju Kim]], [[Sung Jun Cheon|AUTHOR Sung Jun Cheon]], [[Byoung Jin Choi|AUTHOR Byoung Jin Choi]], [[Nam Soo Kim|AUTHOR Nam Soo Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210471.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-5|PAPER Thu-A-V-5-5 — Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech]]</div>|<div class="cpsessionviewpapertitle">Hierarchical Context-Aware Transformers for Non-Autoregressive Text to Speech</div><div class="cpsessionviewpaperauthor">[[Jae-Sung Bae|AUTHOR Jae-Sung Bae]], [[Taejun Bak|AUTHOR Taejun Bak]], [[Young-Sun Joo|AUTHOR Young-Sun Joo]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210475.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-6|PAPER Thu-A-V-5-6 — Speech Resynthesis from Discrete Disentangled Self-Supervised Representations]]</div>|<div class="cpsessionviewpapertitle">Speech Resynthesis from Discrete Disentangled Self-Supervised Representations</div><div class="cpsessionviewpaperauthor">[[Adam Polyak|AUTHOR Adam Polyak]], [[Yossi Adi|AUTHOR Yossi Adi]], [[Jade Copet|AUTHOR Jade Copet]], [[Eugene Kharitonov|AUTHOR Eugene Kharitonov]], [[Kushal Lakhotia|AUTHOR Kushal Lakhotia]], [[Wei-Ning Hsu|AUTHOR Wei-Ning Hsu]], [[Abdelrahman Mohamed|AUTHOR Abdelrahman Mohamed]], [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210528.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-7|PAPER Thu-A-V-5-7 — A Learned Conditional Prior for the VAE Acoustic Space of a TTS System]]</div>|<div class="cpsessionviewpapertitle">A Learned Conditional Prior for the VAE Acoustic Space of a TTS System</div><div class="cpsessionviewpaperauthor">[[Penny Karanasou|AUTHOR Penny Karanasou]], [[Sri Karlapati|AUTHOR Sri Karlapati]], [[Alexis Moinet|AUTHOR Alexis Moinet]], [[Arnaud Joly|AUTHOR Arnaud Joly]], [[Ammar Abbas|AUTHOR Ammar Abbas]], [[Simon Slangen|AUTHOR Simon Slangen]], [[Jaime Lorenzo-Trueba|AUTHOR Jaime Lorenzo-Trueba]], [[Thomas Drugman|AUTHOR Thomas Drugman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210660.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-8|PAPER Thu-A-V-5-8 — A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization]]</div>|<div class="cpsessionviewpapertitle">A Universal Multi-Speaker Multi-Style Text-to-Speech via Disentangled Representation Learning Based on Rényi Divergence Minimization</div><div class="cpsessionviewpaperauthor">[[Dipjyoti Paul|AUTHOR Dipjyoti Paul]], [[Sankar Mukherjee|AUTHOR Sankar Mukherjee]], [[Yannis Pantazis|AUTHOR Yannis Pantazis]], [[Yannis Stylianou|AUTHOR Yannis Stylianou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210806.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-9|PAPER Thu-A-V-5-9 — Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder]]</div>|<div class="cpsessionviewpapertitle">Relational Data Selection for Data Augmentation of Speaker-Dependent Multi-Band MelGAN Vocoder</div><div class="cpsessionviewpaperauthor">[[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]], [[Cheng-Hung Hu|AUTHOR Cheng-Hung Hu]], [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]], [[Yu-Huai Peng|AUTHOR Yu-Huai Peng]], [[Wen-Chin Huang|AUTHOR Wen-Chin Huang]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]], [[Tomoki Toda|AUTHOR Tomoki Toda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210831.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-10|PAPER Thu-A-V-5-10 — Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech]]</div>|<div class="cpsessionviewpapertitle">Reinforce-Aligner: Reinforcement Alignment Search for Robust End-to-End Text-to-Speech</div><div class="cpsessionviewpaperauthor">[[Hyunseung Chung|AUTHOR Hyunseung Chung]], [[Sang-Hoon Lee|AUTHOR Sang-Hoon Lee]], [[Seong-Whan Lee|AUTHOR Seong-Whan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210851.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-11|PAPER Thu-A-V-5-11 — Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet]]</div>|<div class="cpsessionviewpapertitle">Triple M: A Practical Text-to-Speech Synthesis System with Multi-Guidance Attention and Multi-Band Multi-Time LPCNet</div><div class="cpsessionviewpaperauthor">[[Shilun Lin|AUTHOR Shilun Lin]], [[Fenglong Xie|AUTHOR Fenglong Xie]], [[Li Meng|AUTHOR Li Meng]], [[Xinhui Li|AUTHOR Xinhui Li]], [[Li Lu|AUTHOR Li Lu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-5-12|PAPER Thu-A-V-5-12 — SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model]]</div>|<div class="cpsessionviewpapertitle">SC-GlowTTS: An Efficient Zero-Shot Multi-Speaker Text-To-Speech Model</div><div class="cpsessionviewpaperauthor">[[Edresson Casanova|AUTHOR Edresson Casanova]], [[Christopher Shulby|AUTHOR Christopher Shulby]], [[Eren Gölge|AUTHOR Eren Gölge]], [[Nicolas Michael Müller|AUTHOR Nicolas Michael Müller]], [[Frederico Santos de Oliveira|AUTHOR Frederico Santos de Oliveira]], [[Arnaldo Candido Jr.|AUTHOR Arnaldo Candido Jr.]], [[Anderson da Silva Soares|AUTHOR Anderson da Silva Soares]], [[Sandra Maria Aluisio|AUTHOR Sandra Maria Aluisio]], [[Moacir Antonelli Ponti|AUTHOR Moacir Antonelli Ponti]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Neville Ryant|
|^ |^Audrey Tong|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210245.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-1|PAPER Thu-A-V-6-1 — Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset]]</div>|<div class="cpsessionviewpapertitle">Spoken ObjectNet: A Bias-Controlled Spoken Caption Dataset</div><div class="cpsessionviewpaperauthor">[[Ian Palmer|AUTHOR Ian Palmer]], [[Andrew Rouditchenko|AUTHOR Andrew Rouditchenko]], [[Andrei Barbu|AUTHOR Andrei Barbu]], [[Boris Katz|AUTHOR Boris Katz]], [[James Glass|AUTHOR James Glass]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-2|PAPER Thu-A-V-6-2 — The Multilingual TEDx Corpus for Speech Recognition and Translation]]</div>|<div class="cpsessionviewpapertitle">The Multilingual TEDx Corpus for Speech Recognition and Translation</div><div class="cpsessionviewpaperauthor">[[Elizabeth Salesky|AUTHOR Elizabeth Salesky]], [[Matthew Wiesner|AUTHOR Matthew Wiesner]], [[Jacob Bremerman|AUTHOR Jacob Bremerman]], [[Roldano Cattoni|AUTHOR Roldano Cattoni]], [[Matteo Negri|AUTHOR Matteo Negri]], [[Marco Turchi|AUTHOR Marco Turchi]], [[Douglas W. Oard|AUTHOR Douglas W. Oard]], [[Matt Post|AUTHOR Matt Post]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-3|PAPER Thu-A-V-6-3 — Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments]]</div>|<div class="cpsessionviewpapertitle">Tusom2021: A Phonetically Transcribed Speech Dataset from an Endangered Language for Universal Phone Recognition Experiments</div><div class="cpsessionviewpaperauthor">[[David R. Mortensen|AUTHOR David R. Mortensen]], [[Jordan Picone|AUTHOR Jordan Picone]], [[Xinjian Li|AUTHOR Xinjian Li]], [[Kathleen Siminyu|AUTHOR Kathleen Siminyu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211397.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-4|PAPER Thu-A-V-6-4 — AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario]]</div>|<div class="cpsessionviewpapertitle">AISHELL-4: An Open Source Dataset for Speech Enhancement, Separation, Recognition and Speaker Diarization in Conference Scenario</div><div class="cpsessionviewpaperauthor">[[Yihui Fu|AUTHOR Yihui Fu]], [[Luyao Cheng|AUTHOR Luyao Cheng]], [[Shubo Lv|AUTHOR Shubo Lv]], [[Yukai Jv|AUTHOR Yukai Jv]], [[Yuxiang Kong|AUTHOR Yuxiang Kong]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Yanxin Hu|AUTHOR Yanxin Hu]], [[Lei Xie|AUTHOR Lei Xie]], [[Jian Wu|AUTHOR Jian Wu]], [[Hui Bu|AUTHOR Hui Bu]], [[Xin Xu|AUTHOR Xin Xu]], [[Jun Du|AUTHOR Jun Du]], [[Jingdong Chen|AUTHOR Jingdong Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-5|PAPER Thu-A-V-6-5 — GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio]]</div>|<div class="cpsessionviewpapertitle">GigaSpeech: An Evolving, Multi-Domain ASR Corpus with 10,000 Hours of Transcribed Audio</div><div class="cpsessionviewpaperauthor">[[Guoguo Chen|AUTHOR Guoguo Chen]], [[Shuzhou Chai|AUTHOR Shuzhou Chai]], [[Guan-Bo Wang|AUTHOR Guan-Bo Wang]], [[Jiayu Du|AUTHOR Jiayu Du]], [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]], [[Chao Weng|AUTHOR Chao Weng]], [[Dan Su|AUTHOR Dan Su]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Jan Trmal|AUTHOR Jan Trmal]], [[Junbo Zhang|AUTHOR Junbo Zhang]], [[Mingjie Jin|AUTHOR Mingjie Jin]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Shuaijiang Zhao|AUTHOR Shuaijiang Zhao]], [[Wei Zou|AUTHOR Wei Zou]], [[Xiangang Li|AUTHOR Xiangang Li]], [[Xuchen Yao|AUTHOR Xuchen Yao]], [[Yongqing Wang|AUTHOR Yongqing Wang]], [[Zhao You|AUTHOR Zhao You]], [[Zhiyong Yan|AUTHOR Zhiyong Yan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-6|PAPER Thu-A-V-6-6 — Look Who’s Talking: Active Speaker Detection in the Wild]]</div>|<div class="cpsessionviewpapertitle">Look Who’s Talking: Active Speaker Detection in the Wild</div><div class="cpsessionviewpaperauthor">[[You Jin Kim|AUTHOR You Jin Kim]], [[Hee-Soo Heo|AUTHOR Hee-Soo Heo]], [[Soyeon Choe|AUTHOR Soyeon Choe]], [[Soo-Whan Chung|AUTHOR Soo-Whan Chung]], [[Yoohwan Kwon|AUTHOR Yoohwan Kwon]], [[Bong-Jin Lee|AUTHOR Bong-Jin Lee]], [[Youngki Kwon|AUTHOR Youngki Kwon]], [[Joon Son Chung|AUTHOR Joon Son Chung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212000.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-7|PAPER Thu-A-V-6-7 — AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech]]</div>|<div class="cpsessionviewpapertitle">AusKidTalk: An Auditory-Visual Corpus of 3- to 12-Year-Old Australian Children’s Speech</div><div class="cpsessionviewpaperauthor">[[Beena Ahmed|AUTHOR Beena Ahmed]], [[Kirrie J. Ballard|AUTHOR Kirrie J. Ballard]], [[Denis Burnham|AUTHOR Denis Burnham]], [[Tharmakulasingam Sirojan|AUTHOR Tharmakulasingam Sirojan]], [[Hadi Mehmood|AUTHOR Hadi Mehmood]], [[Dominique Estival|AUTHOR Dominique Estival]], [[Elise Baker|AUTHOR Elise Baker]], [[Felicity Cox|AUTHOR Felicity Cox]], [[Joanne Arciuli|AUTHOR Joanne Arciuli]], [[Titia Benders|AUTHOR Titia Benders]], [[Katherine Demuth|AUTHOR Katherine Demuth]], [[Barbara Kelly|AUTHOR Barbara Kelly]], [[Chloé Diskin-Holdaway|AUTHOR Chloé Diskin-Holdaway]], [[Mostafa Shahin|AUTHOR Mostafa Shahin]], [[Vidhyasaharan Sethu|AUTHOR Vidhyasaharan Sethu]], [[Julien Epps|AUTHOR Julien Epps]], [[Chwee Beng Lee|AUTHOR Chwee Beng Lee]], [[Eliathamby Ambikairajah|AUTHOR Eliathamby Ambikairajah]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210045.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-8|PAPER Thu-A-V-6-8 — Human-in-the-Loop Efficiency Analysis for Binary Classification in Edyson]]</div>|<div class="cpsessionviewpapertitle">Human-in-the-Loop Efficiency Analysis for Binary Classification in Edyson</div><div class="cpsessionviewpaperauthor">[[Per Fallgren|AUTHOR Per Fallgren]], [[Jens Edlund|AUTHOR Jens Edlund]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211636.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-9|PAPER Thu-A-V-6-9 — Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Annotation Confidence vs. Training Sample Size: Trade-Off Solution for Partially-Continuous Categorical Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Elena Ryumina|AUTHOR Elena Ryumina]], [[Oxana Verkholyak|AUTHOR Oxana Verkholyak]], [[Alexey Karpov|AUTHOR Alexey Karpov]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-10|PAPER Thu-A-V-6-10 — Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization]]</div>|<div class="cpsessionviewpapertitle">Europarl-ASR: A Large Corpus of Parliamentary Debates for Streaming ASR Benchmarking and Speech Data Filtering/Verbatimization</div><div class="cpsessionviewpaperauthor">[[Gonçal V. Garcés Díaz-Munío|AUTHOR Gonçal V. Garcés Díaz-Munío]], [[Joan-Albert Silvestre-Cerdà|AUTHOR Joan-Albert Silvestre-Cerdà]], [[Javier Jorge|AUTHOR Javier Jorge]], [[Adrià Giménez Pastor|AUTHOR Adrià Giménez Pastor]], [[Javier Iranzo-Sánchez|AUTHOR Javier Iranzo-Sánchez]], [[Pau Baquero-Arnal|AUTHOR Pau Baquero-Arnal]], [[Nahuel Roselló|AUTHOR Nahuel Roselló]], [[Alejandro Pérez-González-de-Martos|AUTHOR Alejandro Pérez-González-de-Martos]], [[Jorge Civera|AUTHOR Jorge Civera]], [[Albert Sanchis|AUTHOR Albert Sanchis]], [[Alfons Juan|AUTHOR Alfons Juan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211094.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-11|PAPER Thu-A-V-6-11 — Towards Automatic Speech to Sign Language Generation]]</div>|<div class="cpsessionviewpapertitle">Towards Automatic Speech to Sign Language Generation</div><div class="cpsessionviewpaperauthor">[[Parul Kapoor|AUTHOR Parul Kapoor]], [[Rudrabha Mukhopadhyay|AUTHOR Rudrabha Mukhopadhyay]], [[Sindhu B. Hegde|AUTHOR Sindhu B. Hegde]], [[Vinay Namboodiri|AUTHOR Vinay Namboodiri]], [[C.V. Jawahar|AUTHOR C.V. Jawahar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211040.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-12|PAPER Thu-A-V-6-12 — kosp2e: Korean Speech to English Translation Corpus]]</div>|<div class="cpsessionviewpapertitle">kosp2e: Korean Speech to English Translation Corpus</div><div class="cpsessionviewpaperauthor">[[Won Ik Cho|AUTHOR Won Ik Cho]], [[Seok Min Kim|AUTHOR Seok Min Kim]], [[Hyunchang Cho|AUTHOR Hyunchang Cho]], [[Nam Soo Kim|AUTHOR Nam Soo Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-A-V-6-13|PAPER Thu-A-V-6-13 — speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment]]</div>|<div class="cpsessionviewpapertitle">speechocean762: An Open-Source Non-Native English Speech Corpus for Pronunciation Assessment</div><div class="cpsessionviewpaperauthor">[[Junbo Zhang|AUTHOR Junbo Zhang]], [[Zhiwen Zhang|AUTHOR Zhiwen Zhang]], [[Yongqing Wang|AUTHOR Yongqing Wang]], [[Zhiyong Yan|AUTHOR Zhiyong Yan]], [[Qiong Song|AUTHOR Qiong Song]], [[Yukai Huang|AUTHOR Yukai Huang]], [[Ke Li|AUTHOR Ke Li]], [[Daniel Povey|AUTHOR Daniel Povey]], [[Yujun Wang|AUTHOR Yujun Wang]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|15:00–16:00, Thursday 2 September 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Thu-Keynote|PAPER Thu-Keynote — Adaptive Listening to Everyday Soundscapes]]</div>|<div class="cpsessionviewpapertitle">Adaptive Listening to Everyday Soundscapes</div><div class="cpsessionviewpaperauthor">[[Mounya Elhilali|AUTHOR Mounya Elhilali]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, Room A+B|<|
|^Chairs: |^Sanjeev Khudanpur|
|^ |^Philip Garner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-1-1|PAPER Thu-M-O-1-1 — Self-Paced Ensemble Learning for Speech and Audio Classification]]</div>|<div class="cpsessionviewpapertitle">Self-Paced Ensemble Learning for Speech and Audio Classification</div><div class="cpsessionviewpaperauthor">[[Nicolae-Cătălin Ristea|AUTHOR Nicolae-Cătălin Ristea]], [[Radu Tudor Ionescu|AUTHOR Radu Tudor Ionescu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210175.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-1-2|PAPER Thu-M-O-1-2 — Knowledge Distillation for Streaming Transformer–Transducer]]</div>|<div class="cpsessionviewpapertitle">Knowledge Distillation for Streaming Transformer–Transducer</div><div class="cpsessionviewpaperauthor">[[Atsushi Kojima|AUTHOR Atsushi Kojima]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-1-3|PAPER Thu-M-O-1-3 — Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multi-Encoder Learning and Stream Fusion for Transformer-Based End-to-End Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Timo Lohrenz|AUTHOR Timo Lohrenz]], [[Zhengyang Li|AUTHOR Zhengyang Li]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-1-4|PAPER Thu-M-O-1-4 — Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning]]</div>|<div class="cpsessionviewpapertitle">Conditional Independence for Pretext Task Selection in Self-Supervised Speech Representation Learning</div><div class="cpsessionviewpaperauthor">[[Salah Zaiem|AUTHOR Salah Zaiem]], [[Titouan Parcollet|AUTHOR Titouan Parcollet]], [[Slim Essid|AUTHOR Slim Essid]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211255.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-1-5|PAPER Thu-M-O-1-5 — Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models]]</div>|<div class="cpsessionviewpapertitle">Investigating Methods to Improve Language Model Integration for Attention-Based Encoder-Decoder ASR Models</div><div class="cpsessionviewpaperauthor">[[Mohammad Zeineldeen|AUTHOR Mohammad Zeineldeen]], [[Aleksandr Glushko|AUTHOR Aleksandr Glushko]], [[Wilfried Michel|AUTHOR Wilfried Michel]], [[Albert Zeyer|AUTHOR Albert Zeyer]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-1-6|PAPER Thu-M-O-1-6 — Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model]]</div>|<div class="cpsessionviewpapertitle">Comparing CTC and LFMMI for Out-of-Domain Adaptation of wav2vec 2.0 Acoustic Model</div><div class="cpsessionviewpaperauthor">[[Apoorv Vyas|AUTHOR Apoorv Vyas]], [[Srikanth Madikeri|AUTHOR Srikanth Madikeri]], [[Hervé Bourlard|AUTHOR Hervé Bourlard]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, Room C|<|
|^Chairs: |^Ioana Vasilescu|
|^ |^Nicole Holliday|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-2-1|PAPER Thu-M-O-2-1 — Speaker Attentive Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Speaker Attentive Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Clément Le Moine|AUTHOR Clément Le Moine]], [[Nicolas Obin|AUTHOR Nicolas Obin]], [[Axel Roebel|AUTHOR Axel Roebel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211438.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-2-2|PAPER Thu-M-O-2-2 — Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions]]</div>|<div class="cpsessionviewpapertitle">Separation of Emotional and Reconstruction Embeddings on Ladder Network to Improve Speech Emotion Recognition Robustness in Noisy Conditions</div><div class="cpsessionviewpaperauthor">[[Seong-Gyun Leem|AUTHOR Seong-Gyun Leem]], [[Daniel Fulford|AUTHOR Daniel Fulford]], [[Jukka-Pekka Onnela|AUTHOR Jukka-Pekka Onnela]], [[David Gard|AUTHOR David Gard]], [[Carlos Busso|AUTHOR Carlos Busso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-2-3|PAPER Thu-M-O-2-3 — M³: MultiModal Masking Applied to Sentiment Analysis]]</div>|<div class="cpsessionviewpapertitle">M³: MultiModal Masking Applied to Sentiment Analysis</div><div class="cpsessionviewpaperauthor">[[Efthymios Georgiou|AUTHOR Efthymios Georgiou]], [[Georgios Paraskevopoulos|AUTHOR Georgios Paraskevopoulos]], [[Alexandros Potamianos|AUTHOR Alexandros Potamianos]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, Room D|<|
|^Chairs: |^Petr Schwarz|
|^ |^Thomas Schaaf|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211035.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-3-1|PAPER Thu-M-O-3-1 — The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|<div class="cpsessionviewpapertitle">The CSTR System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div><div class="cpsessionviewpaperauthor">[[Ondřej Klejch|AUTHOR Ondřej Klejch]], [[Electra Wallington|AUTHOR Electra Wallington]], [[Peter Bell|AUTHOR Peter Bell]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211623.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-3-2|PAPER Thu-M-O-3-2 — Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Acoustic Data-Driven Subword Modeling for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Wei Zhou|AUTHOR Wei Zhou]], [[Mohammad Zeineldeen|AUTHOR Mohammad Zeineldeen]], [[Zuoyun Zheng|AUTHOR Zuoyun Zheng]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211671.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-3-3|PAPER Thu-M-O-3-3 — Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept]]</div>|<div class="cpsessionviewpapertitle">Equivalence of Segmental and Neural Transducer Modeling: A Proof of Concept</div><div class="cpsessionviewpaperauthor">[[Wei Zhou|AUTHOR Wei Zhou]], [[Albert Zeyer|AUTHOR Albert Zeyer]], [[André Merboldt|AUTHOR André Merboldt]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211735.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-3-4|PAPER Thu-M-O-3-4 — Modeling Dialectal Variation for Swiss German Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Modeling Dialectal Variation for Swiss German Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Abbas Khosravani|AUTHOR Abbas Khosravani]], [[Philip N. Garner|AUTHOR Philip N. Garner]], [[Alexandros Lazaridis|AUTHOR Alexandros Lazaridis]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211756.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-3-5|PAPER Thu-M-O-3-5 — Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System]]</div>|<div class="cpsessionviewpapertitle">Out-of-Vocabulary Words Detection with Attention and CTC Alignments in an End-to-End ASR System</div><div class="cpsessionviewpaperauthor">[[Ekaterina Egorova|AUTHOR Ekaterina Egorova]], [[Hari Krishna Vydana|AUTHOR Hari Krishna Vydana]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212127.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-O-3-6|PAPER Thu-M-O-3-6 — Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Training Hybrid Models on Noisy Transliterated Transcripts for Code-Switched Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Matthew Wiesner|AUTHOR Matthew Wiesner]], [[Mousmita Sarma|AUTHOR Mousmita Sarma]], [[Ashish Arora|AUTHOR Ashish Arora]], [[Desh Raj|AUTHOR Desh Raj]], [[Dongji Gao|AUTHOR Dongji Gao]], [[Ruizhe Huang|AUTHOR Ruizhe Huang]], [[Supreet Preet|AUTHOR Supreet Preet]], [[Moris Johnson|AUTHOR Moris Johnson]], [[Zikra Iqbal|AUTHOR Zikra Iqbal]], [[Nagendra Goel|AUTHOR Nagendra Goel]], [[Jan Trmal|AUTHOR Jan Trmal]], [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218015.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-S&T-1-1|PAPER Thu-M-S&T-1-1 — MoM: Minutes of Meeting Bot]]</div>|<div class="cpsessionviewpapertitle">MoM: Minutes of Meeting Bot</div><div class="cpsessionviewpaperauthor">[[Benjamin Milde|AUTHOR Benjamin Milde]], [[Tim Fischer|AUTHOR Tim Fischer]], [[Steffen Remus|AUTHOR Steffen Remus]], [[Chris Biemann|AUTHOR Chris Biemann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-S&T-1-2|PAPER Thu-M-S&T-1-2 — Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording]]</div>|<div class="cpsessionviewpapertitle">Articulatory Data Recorder: A Framework for Real-Time Articulatory Data Recording</div><div class="cpsessionviewpaperauthor">[[Alexander Wilbrandt|AUTHOR Alexander Wilbrandt]], [[Simon Stone|AUTHOR Simon Stone]], [[Peter Birkholz|AUTHOR Peter Birkholz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-S&T-1-3|PAPER Thu-M-S&T-1-3 — The INGENIOUS Multilingual Operations App]]</div>|<div class="cpsessionviewpapertitle">The INGENIOUS Multilingual Operations App</div><div class="cpsessionviewpaperauthor">[[Joan Codina-Filbà|AUTHOR Joan Codina-Filbà]], [[Guillermo Cámbara|AUTHOR Guillermo Cámbara]], [[Alex Peiró-Lilja|AUTHOR Alex Peiró-Lilja]], [[Jens Grivolla|AUTHOR Jens Grivolla]], [[Roberto Carlini|AUTHOR Roberto Carlini]], [[Mireia Farrús|AUTHOR Mireia Farrús]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218018.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-S&T-1-4|PAPER Thu-M-S&T-1-4 — Digital Einstein Experience: Fast Text-to-Speech for Conversational AI]]</div>|<div class="cpsessionviewpapertitle">Digital Einstein Experience: Fast Text-to-Speech for Conversational AI</div><div class="cpsessionviewpaperauthor">[[Joanna Rownicka|AUTHOR Joanna Rownicka]], [[Kilian Sprenkamp|AUTHOR Kilian Sprenkamp]], [[Antonio Tripiana|AUTHOR Antonio Tripiana]], [[Volodymyr Gromoglasov|AUTHOR Volodymyr Gromoglasov]], [[Timo P. Kunz|AUTHOR Timo P. Kunz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-S&T-1-5|PAPER Thu-M-S&T-1-5 — Live Subtitling for BigBlueButton with Open-Source Software]]</div>|<div class="cpsessionviewpapertitle">Live Subtitling for BigBlueButton with Open-Source Software</div><div class="cpsessionviewpaperauthor">[[Robert Geislinger|AUTHOR Robert Geislinger]], [[Benjamin Milde|AUTHOR Benjamin Milde]], [[Timo Baumann|AUTHOR Timo Baumann]], [[Chris Biemann|AUTHOR Chris Biemann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-S&T-1-6|PAPER Thu-M-S&T-1-6 — Expressive Latvian Speech Synthesis for Dialog Systems]]</div>|<div class="cpsessionviewpapertitle">Expressive Latvian Speech Synthesis for Dialog Systems</div><div class="cpsessionviewpaperauthor">[[Dāvis Nicmanis|AUTHOR Dāvis Nicmanis]], [[Askars Salimbajevs|AUTHOR Askars Salimbajevs]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218022.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-S&T-1-7|PAPER Thu-M-S&T-1-7 — ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts]]</div>|<div class="cpsessionviewpapertitle">ViSTAFAE: A Visual Speech-Training Aid with Feedback of Articulatory Efforts</div><div class="cpsessionviewpaperauthor">[[Pramod H. Kachare|AUTHOR Pramod H. Kachare]], [[Prem C. Pandey|AUTHOR Prem C. Pandey]], [[Vishal Mane|AUTHOR Vishal Mane]], [[Hirak Dasgupta|AUTHOR Hirak Dasgupta]], [[K.S. Nataraj|AUTHOR K.S. Nataraj]], [[Akshada Rathod|AUTHOR Akshada Rathod]], [[Sheetal K. Pathak|AUTHOR Sheetal K. Pathak]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Dong Wang|
|^ |^Qingyang Hong|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-1-1|PAPER Thu-M-SS-1-1 — Oriental Language Recognition (OLR) 2020: Summary and Analysis]]</div>|<div class="cpsessionviewpapertitle">Oriental Language Recognition (OLR) 2020: Summary and Analysis</div><div class="cpsessionviewpaperauthor">[[Jing Li|AUTHOR Jing Li]], [[Binling Wang|AUTHOR Binling Wang]], [[Yiming Zhi|AUTHOR Yiming Zhi]], [[Zheng Li|AUTHOR Zheng Li]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]], [[Dong Wang|AUTHOR Dong Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210276.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-1-2|PAPER Thu-M-SS-1-2 — Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge]]</div>|<div class="cpsessionviewpapertitle">Language Recognition on Unknown Conditions: The LORIA-Inria-MULTISPEECH System for AP20-OLR Challenge</div><div class="cpsessionviewpaperauthor">[[Raphaël Duroselle|AUTHOR Raphaël Duroselle]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Denis Jouvet|AUTHOR Denis Jouvet]], [[Irina Illina|AUTHOR Irina Illina]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210056.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-1-3|PAPER Thu-M-SS-1-3 — Dynamic Multi-Scale Convolution for Dialect Identification]]</div>|<div class="cpsessionviewpapertitle">Dynamic Multi-Scale Convolution for Dialect Identification</div><div class="cpsessionviewpaperauthor">[[Tianlong Kong|AUTHOR Tianlong Kong]], [[Shouyi Yin|AUTHOR Shouyi Yin]], [[Dawei Zhang|AUTHOR Dawei Zhang]], [[Wang Geng|AUTHOR Wang Geng]], [[Xin Wang|AUTHOR Xin Wang]], [[Dandan Song|AUTHOR Dandan Song]], [[Jinwen Huang|AUTHOR Jinwen Huang]], [[Huiyu Shi|AUTHOR Huiyu Shi]], [[Xiaorui Wang|AUTHOR Xiaorui Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210374.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-1-4|PAPER Thu-M-SS-1-4 — An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model]]</div>|<div class="cpsessionviewpapertitle">An End-to-End Dialect Identification System with Transfer Learning from a Multilingual Automatic Speech Recognition Model</div><div class="cpsessionviewpaperauthor">[[Ding Wang|AUTHOR Ding Wang]], [[Shuaishuai Ye|AUTHOR Shuaishuai Ye]], [[Xinhui Hu|AUTHOR Xinhui Hu]], [[Sheng Li|AUTHOR Sheng Li]], [[Xinkang Xu|AUTHOR Xinkang Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210807.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-1-5|PAPER Thu-M-SS-1-5 — Language Recognition Based on Unsupervised Pretrained Models]]</div>|<div class="cpsessionviewpapertitle">Language Recognition Based on Unsupervised Pretrained Models</div><div class="cpsessionviewpaperauthor">[[Haibin Yu|AUTHOR Haibin Yu]], [[Jing Zhao|AUTHOR Jing Zhao]], [[Song Yang|AUTHOR Song Yang]], [[Zhongqin Wu|AUTHOR Zhongqin Wu]], [[Yuting Nie|AUTHOR Yuting Nie]], [[Wei-Qiang Zhang|AUTHOR Wei-Qiang Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211167.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-1-6|PAPER Thu-M-SS-1-6 — Additive Phoneme-Aware Margin Softmax Loss for Language Recognition]]</div>|<div class="cpsessionviewpapertitle">Additive Phoneme-Aware Margin Softmax Loss for Language Recognition</div><div class="cpsessionviewpaperauthor">[[Zheng Li|AUTHOR Zheng Li]], [[Yan Liu|AUTHOR Yan Liu]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, Room Lacina|<|
|^Chairs: |^Hartmut Helmke|
|^ |^Pavel Kolčárek|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210333.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-2-1|PAPER Thu-M-SS-2-1 — Towards an Accent-Robust Approach for ATC Communications Transcription]]</div>|<div class="cpsessionviewpapertitle">Towards an Accent-Robust Approach for ATC Communications Transcription</div><div class="cpsessionviewpaperauthor">[[Nataly Jahchan|AUTHOR Nataly Jahchan]], [[Florentin Barbier|AUTHOR Florentin Barbier]], [[Ariyanidevi Dharma Gita|AUTHOR Ariyanidevi Dharma Gita]], [[Khaled Khelif|AUTHOR Khaled Khelif]], [[Estelle Delpech|AUTHOR Estelle Delpech]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-2-2|PAPER Thu-M-SS-2-2 — Detecting English Speech in the Air Traffic Control Voice Communication]]</div>|<div class="cpsessionviewpapertitle">Detecting English Speech in the Air Traffic Control Voice Communication</div><div class="cpsessionviewpaperauthor">[[Igor Szöke|AUTHOR Igor Szöke]], [[Santosh Kesiraju|AUTHOR Santosh Kesiraju]], [[Ondřej Novotný|AUTHOR Ondřej Novotný]], [[Martin Kocour|AUTHOR Martin Kocour]], [[Karel Veselý|AUTHOR Karel Veselý]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-2-3|PAPER Thu-M-SS-2-3 — Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances]]</div>|<div class="cpsessionviewpapertitle">Robust Command Recognition for Lithuanian Air Traffic Control Tower Utterances</div><div class="cpsessionviewpaperauthor">[[Oliver Ohneiser|AUTHOR Oliver Ohneiser]], [[Seyyed Saeed Sarfjoo|AUTHOR Seyyed Saeed Sarfjoo]], [[Hartmut Helmke|AUTHOR Hartmut Helmke]], [[Shruthi Shetty|AUTHOR Shruthi Shetty]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Matthias Kleinert|AUTHOR Matthias Kleinert]], [[Heiko Ehr|AUTHOR Heiko Ehr]], [[Šarūnas Murauskas|AUTHOR Šarūnas Murauskas]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-2-4|PAPER Thu-M-SS-2-4 — Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems]]</div>|<div class="cpsessionviewpapertitle">Contextual Semi-Supervised Learning: An Approach to Leverage Air-Surveillance and Untranscribed ATC Data in ASR Systems</div><div class="cpsessionviewpaperauthor">[[Juan Zuluaga-Gomez|AUTHOR Juan Zuluaga-Gomez]], [[Iuliia Nigmatulina|AUTHOR Iuliia Nigmatulina]], [[Amrutha Prasad|AUTHOR Amrutha Prasad]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Karel Veselý|AUTHOR Karel Veselý]], [[Martin Kocour|AUTHOR Martin Kocour]], [[Igor Szöke|AUTHOR Igor Szöke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211619.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-2-5|PAPER Thu-M-SS-2-5 — Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition]]</div>|<div class="cpsessionviewpapertitle">Boosting of Contextual Information in ASR for Air-Traffic Call-Sign Recognition</div><div class="cpsessionviewpaperauthor">[[Martin Kocour|AUTHOR Martin Kocour]], [[Karel Veselý|AUTHOR Karel Veselý]], [[Alexander Blatt|AUTHOR Alexander Blatt]], [[Juan Zuluaga Gomez|AUTHOR Juan Zuluaga Gomez]], [[Igor Szöke|AUTHOR Igor Szöke]], [[Jan Černocký|AUTHOR Jan Černocký]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]], [[Petr Motlicek|AUTHOR Petr Motlicek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211650.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-SS-2-6|PAPER Thu-M-SS-2-6 — Modeling the Effect of Military Oxygen Masks on Speech Characteristics]]</div>|<div class="cpsessionviewpapertitle">Modeling the Effect of Military Oxygen Masks on Speech Characteristics</div><div class="cpsessionviewpaperauthor">[[Benjamin Elie|AUTHOR Benjamin Elie]], [[Jodie Gauvain|AUTHOR Jodie Gauvain]], [[Jean-Luc Gauvain|AUTHOR Jean-Luc Gauvain]], [[Lori Lamel|AUTHOR Lori Lamel]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Panel Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Marzieh Razavi|
|^ |^Fei Chen|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211189.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-1|PAPER Thu-M-V-1-1 — Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features]]</div>|<div class="cpsessionviewpapertitle">Speech Intelligibility of Dysarthric Speech: Human Scores and Acoustic-Phonetic Features</div><div class="cpsessionviewpaperauthor">[[Wei Xue|AUTHOR Wei Xue]], [[Roeland van Hout|AUTHOR Roeland van Hout]], [[Fleur Boogmans|AUTHOR Fleur Boogmans]], [[Mario Ganzeboom|AUTHOR Mario Ganzeboom]], [[Catia Cucchiarini|AUTHOR Catia Cucchiarini]], [[Helmer Strik|AUTHOR Helmer Strik]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-2|PAPER Thu-M-V-1-2 — Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder]]</div>|<div class="cpsessionviewpapertitle">Analyzing Short Term Dynamic Speech Features for Understanding Behavioral Traits of Children with Autism Spectrum Disorder</div><div class="cpsessionviewpaperauthor">[[Young-Kyung Kim|AUTHOR Young-Kyung Kim]], [[Rimita Lahiri|AUTHOR Rimita Lahiri]], [[Md. Nasir|AUTHOR Md. Nasir]], [[So Hyun Kim|AUTHOR So Hyun Kim]], [[Somer Bishop|AUTHOR Somer Bishop]], [[Catherine Lord|AUTHOR Catherine Lord]], [[Shrikanth S. Narayanan|AUTHOR Shrikanth S. Narayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-3|PAPER Thu-M-V-1-3 — Vocalization Recognition of People with Profound Intellectual and Multiple Disabilities (PIMD) Using Machine Learning Algorithms]]</div>|<div class="cpsessionviewpapertitle">Vocalization Recognition of People with Profound Intellectual and Multiple Disabilities (PIMD) Using Machine Learning Algorithms</div><div class="cpsessionviewpaperauthor">[[Waldemar Jęśko|AUTHOR Waldemar Jęśko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211862.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-4|PAPER Thu-M-V-1-4 — Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech]]</div>|<div class="cpsessionviewpapertitle">Phonetic Complexity, Speech Accuracy and Intelligibility Assessment of Italian Dysarthric Speech</div><div class="cpsessionviewpaperauthor">[[Barbara Gili Fivela|AUTHOR Barbara Gili Fivela]], [[Vincenzo Sallustio|AUTHOR Vincenzo Sallustio]], [[Silvia Pede|AUTHOR Silvia Pede]], [[Danilo Patrocinio|AUTHOR Danilo Patrocinio]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211305.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-5|PAPER Thu-M-V-1-5 — Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding]]</div>|<div class="cpsessionviewpapertitle">Detection of Consonant Errors in Disordered Speech Based on Consonant-Vowel Segment Embedding</div><div class="cpsessionviewpaperauthor">[[Si-Ioi Ng|AUTHOR Si-Ioi Ng]], [[Cymie Wing-Yee Ng|AUTHOR Cymie Wing-Yee Ng]], [[Jingyu Li|AUTHOR Jingyu Li]], [[Tan Lee|AUTHOR Tan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210069.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-6|PAPER Thu-M-V-1-6 — Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions]]</div>|<div class="cpsessionviewpapertitle">Assessing Posterior-Based Mispronunciation Detection on Field-Collected Recordings from Child Speech Therapy Sessions</div><div class="cpsessionviewpaperauthor">[[Adam Hair|AUTHOR Adam Hair]], [[Guanlong Zhao|AUTHOR Guanlong Zhao]], [[Beena Ahmed|AUTHOR Beena Ahmed]], [[Kirrie J. Ballard|AUTHOR Kirrie J. Ballard]], [[Ricardo Gutierrez-Osuna|AUTHOR Ricardo Gutierrez-Osuna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210915.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-7|PAPER Thu-M-V-1-7 — Identifying Cognitive Impairment Using Sentence Representation Vectors]]</div>|<div class="cpsessionviewpapertitle">Identifying Cognitive Impairment Using Sentence Representation Vectors</div><div class="cpsessionviewpaperauthor">[[Bahman Mirheidari|AUTHOR Bahman Mirheidari]], [[Yilin Pan|AUTHOR Yilin Pan]], [[Daniel Blackburn|AUTHOR Daniel Blackburn]], [[Ronan O’Malley|AUTHOR Ronan O’Malley]], [[Heidi Christensen|AUTHOR Heidi Christensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211297.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-8|PAPER Thu-M-V-1-8 — Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children]]</div>|<div class="cpsessionviewpapertitle">Parental Spoken Scaffolding and Narrative Skills in Crowd-Sourced Storytelling Samples of Young Children</div><div class="cpsessionviewpaperauthor">[[Zhengjun Yue|AUTHOR Zhengjun Yue]], [[Jon Barker|AUTHOR Jon Barker]], [[Heidi Christensen|AUTHOR Heidi Christensen]], [[Cristina McKean|AUTHOR Cristina McKean]], [[Elaine Ashton|AUTHOR Elaine Ashton]], [[Yvonne Wren|AUTHOR Yvonne Wren]], [[Swapnil Gadgil|AUTHOR Swapnil Gadgil]], [[Rebecca Bright|AUTHOR Rebecca Bright]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211320.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-9|PAPER Thu-M-V-1-9 — Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data]]</div>|<div class="cpsessionviewpapertitle">Uncertainty-Aware COVID-19 Detection from Imbalanced Sound Data</div><div class="cpsessionviewpaperauthor">[[Tong Xia|AUTHOR Tong Xia]], [[Jing Han|AUTHOR Jing Han]], [[Lorena Qendro|AUTHOR Lorena Qendro]], [[Ting Dang|AUTHOR Ting Dang]], [[Cecilia Mascolo|AUTHOR Cecilia Mascolo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212139.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-10|PAPER Thu-M-V-1-10 — Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Domain Adaptation for Dysarthric Speech Detection via Domain Adversarial Training and Mutual Information Minimization</div><div class="cpsessionviewpaperauthor">[[Disong Wang|AUTHOR Disong Wang]], [[Liqun Deng|AUTHOR Liqun Deng]], [[Yu Ting Yeung|AUTHOR Yu Ting Yeung]], [[Xiao Chen|AUTHOR Xiao Chen]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-11|PAPER Thu-M-V-1-11 — Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects]]</div>|<div class="cpsessionviewpapertitle">Source and Vocal Tract Cues for Speech-Based Classification of Patients with Parkinson’s Disease and Healthy Subjects</div><div class="cpsessionviewpaperauthor">[[Tanuka Bhattacharjee|AUTHOR Tanuka Bhattacharjee]], [[Jhansi Mallela|AUTHOR Jhansi Mallela]], [[Yamini Belur|AUTHOR Yamini Belur]], [[Nalini Atchayaram|AUTHOR Nalini Atchayaram]], [[Ravi Yadav|AUTHOR Ravi Yadav]], [[Pradeep Reddy|AUTHOR Pradeep Reddy]], [[Dipanjan Gope|AUTHOR Dipanjan Gope]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211810.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-1-12|PAPER Thu-M-V-1-12 — CLAC: A Speech Corpus of Healthy English Speakers]]</div>|<div class="cpsessionviewpapertitle">CLAC: A Speech Corpus of Healthy English Speakers</div><div class="cpsessionviewpaperauthor">[[R’mani Haulcy|AUTHOR R’mani Haulcy]], [[James Glass|AUTHOR James Glass]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Jonas Beskow|
|^ |^Helen Meng|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-1|PAPER Thu-M-V-2-1 — Direct Multimodal Few-Shot Learning of Speech and Images]]</div>|<div class="cpsessionviewpapertitle">Direct Multimodal Few-Shot Learning of Speech and Images</div><div class="cpsessionviewpaperauthor">[[Leanne Nortje|AUTHOR Leanne Nortje]], [[Herman Kamper|AUTHOR Herman Kamper]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210096.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-2|PAPER Thu-M-V-2-2 — Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval]]</div>|<div class="cpsessionviewpapertitle">Talk, Don’t Write: A Study of Direct Speech-Based Image Retrieval</div><div class="cpsessionviewpaperauthor">[[Ramon Sanabria|AUTHOR Ramon Sanabria]], [[Austin Waters|AUTHOR Austin Waters]], [[Jason Baldridge|AUTHOR Jason Baldridge]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210287.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-3|PAPER Thu-M-V-2-3 — A Fast Discrete Two-Step Learning Hashing for Scalable Cross-Modal Retrieval]]</div>|<div class="cpsessionviewpapertitle">A Fast Discrete Two-Step Learning Hashing for Scalable Cross-Modal Retrieval</div><div class="cpsessionviewpaperauthor">[[Huan Zhao|AUTHOR Huan Zhao]], [[Kaili Ma|AUTHOR Kaili Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210432.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-4|PAPER Thu-M-V-2-4 — Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Cross-Modal Knowledge Distillation Method for Automatic Cued Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Jianrong Wang|AUTHOR Jianrong Wang]], [[Ziyue Tang|AUTHOR Ziyue Tang]], [[Xuewei Li|AUTHOR Xuewei Li]], [[Mei Yu|AUTHOR Mei Yu]], [[Qiang Fang|AUTHOR Qiang Fang]], [[Li Liu|AUTHOR Li Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210435.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-5|PAPER Thu-M-V-2-5 — Attention-Based Keyword Localisation in Speech Using Visual Grounding]]</div>|<div class="cpsessionviewpapertitle">Attention-Based Keyword Localisation in Speech Using Visual Grounding</div><div class="cpsessionviewpaperauthor">[[Kayode Olaleye|AUTHOR Kayode Olaleye]], [[Herman Kamper|AUTHOR Herman Kamper]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-6|PAPER Thu-M-V-2-6 — Evaluation of Audio-Visual Alignments in Visually Grounded Speech Models]]</div>|<div class="cpsessionviewpapertitle">Evaluation of Audio-Visual Alignments in Visually Grounded Speech Models</div><div class="cpsessionviewpaperauthor">[[Khazar Khorrami|AUTHOR Khazar Khorrami]], [[Okko Räsänen|AUTHOR Okko Räsänen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210723.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-7|PAPER Thu-M-V-2-7 — Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries]]</div>|<div class="cpsessionviewpapertitle">Automatic Lip-Reading with Hierarchical Pyramidal Convolution and Self-Attention for Image Sequences with No Word Boundaries</div><div class="cpsessionviewpaperauthor">[[Hang Chen|AUTHOR Hang Chen]], [[Jun Du|AUTHOR Jun Du]], [[Yu Hu|AUTHOR Yu Hu]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]], [[Bao-Cai Yin|AUTHOR Bao-Cai Yin]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211352.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-8|PAPER Thu-M-V-2-8 — Cascaded Multilingual Audio-Visual Learning from Videos]]</div>|<div class="cpsessionviewpapertitle">Cascaded Multilingual Audio-Visual Learning from Videos</div><div class="cpsessionviewpaperauthor">[[Andrew Rouditchenko|AUTHOR Andrew Rouditchenko]], [[Angie Boggust|AUTHOR Angie Boggust]], [[David Harwath|AUTHOR David Harwath]], [[Samuel Thomas|AUTHOR Samuel Thomas]], [[Hilde Kuehne|AUTHOR Hilde Kuehne]], [[Brian Chen|AUTHOR Brian Chen]], [[Rameswar Panda|AUTHOR Rameswar Panda]], [[Rogerio Feris|AUTHOR Rogerio Feris]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]], [[Michael Picheny|AUTHOR Michael Picheny]], [[James Glass|AUTHOR James Glass]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211360.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-9|PAPER Thu-M-V-2-9 — LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision]]</div>|<div class="cpsessionviewpapertitle">LiRA: Learning Visual Speech Representations from Audio Through Self-Supervision</div><div class="cpsessionviewpaperauthor">[[Pingchuan Ma|AUTHOR Pingchuan Ma]], [[Rodrigo Mira|AUTHOR Rodrigo Mira]], [[Stavros Petridis|AUTHOR Stavros Petridis]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]], [[Maja Pantic|AUTHOR Maja Pantic]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211621.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-10|PAPER Thu-M-V-2-10 — End-to-End Audio-Visual Speech Recognition for Overlapping Speech]]</div>|<div class="cpsessionviewpapertitle">End-to-End Audio-Visual Speech Recognition for Overlapping Speech</div><div class="cpsessionviewpaperauthor">[[Richard Rose|AUTHOR Richard Rose]], [[Olivier Siohan|AUTHOR Olivier Siohan]], [[Anshuman Tripathi|AUTHOR Anshuman Tripathi]], [[Otavio Braga|AUTHOR Otavio Braga]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212128.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-2-11|PAPER Thu-M-V-2-11 — Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party]]</div>|<div class="cpsessionviewpapertitle">Audio-Visual Multi-Talker Speech Recognition in a Cocktail Party</div><div class="cpsessionviewpaperauthor">[[Yifei Wu|AUTHOR Yifei Wu]], [[Chenda Li|AUTHOR Chenda Li]], [[Song Yang|AUTHOR Song Yang]], [[Zhongqin Wu|AUTHOR Zhongqin Wu]], [[Yanmin Qian|AUTHOR Yanmin Qian]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Keisuke Kinoshita|
|^ |^Min-Jae Hwang|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210142.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-1|PAPER Thu-M-V-3-1 — Ultra Fast Speech Separation Model with Teacher Student Learning]]</div>|<div class="cpsessionviewpapertitle">Ultra Fast Speech Separation Model with Teacher Student Learning</div><div class="cpsessionviewpaperauthor">[[Sanyuan Chen|AUTHOR Sanyuan Chen]], [[Yu Wu|AUTHOR Yu Wu]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Jian Wu|AUTHOR Jian Wu]], [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]], [[Shujie Liu|AUTHOR Shujie Liu]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Xiangzhan Yu|AUTHOR Xiangzhan Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-2|PAPER Thu-M-V-3-2 — Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework]]</div>|<div class="cpsessionviewpapertitle">Group Delay Based Re-Weighted Sparse Recovery Algorithms for Robust and High-Resolution Source Separation in DOA Framework</div><div class="cpsessionviewpaperauthor">[[Murtiza Ali|AUTHOR Murtiza Ali]], [[Ashwani Koul|AUTHOR Ashwani Koul]], [[Karan Nathwani|AUTHOR Karan Nathwani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210338.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-3|PAPER Thu-M-V-3-3 — Continuous Speech Separation Using Speaker Inventory for Long Recording]]</div>|<div class="cpsessionviewpapertitle">Continuous Speech Separation Using Speaker Inventory for Long Recording</div><div class="cpsessionviewpaperauthor">[[Cong Han|AUTHOR Cong Han]], [[Yi Luo|AUTHOR Yi Luo]], [[Chenda Li|AUTHOR Chenda Li]], [[Tianyan Zhou|AUTHOR Tianyan Zhou]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Hakan Erdogan|AUTHOR Hakan Erdogan]], [[John R. Hershey|AUTHOR John R. Hershey]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]], [[Zhuo Chen|AUTHOR Zhuo Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210433.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-4|PAPER Thu-M-V-3-4 — Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction]]</div>|<div class="cpsessionviewpapertitle">Crossfire Conditional Generative Adversarial Networks for Singing Voice Extraction</div><div class="cpsessionviewpaperauthor">[[Weitao Yuan|AUTHOR Weitao Yuan]], [[Shengbei Wang|AUTHOR Shengbei Wang]], [[Xiangrui Li|AUTHOR Xiangrui Li]], [[Masashi Unoki|AUTHOR Masashi Unoki]], [[Wenwu Wang|AUTHOR Wenwu Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210504.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-5|PAPER Thu-M-V-3-5 — End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain]]</div>|<div class="cpsessionviewpapertitle">End-to-End Speech Separation Using Orthogonal Representation in Complex and Real Time-Frequency Domain</div><div class="cpsessionviewpaperauthor">[[Kai Wang|AUTHOR Kai Wang]], [[Hao Huang|AUTHOR Hao Huang]], [[Ying Hu|AUTHOR Ying Hu]], [[Zhihua Huang|AUTHOR Zhihua Huang]], [[Sheng Li|AUTHOR Sheng Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210523.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-6|PAPER Thu-M-V-3-6 — Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Efficient and Stable Adversarial Learning Using Unpaired Data for Unsupervised Multichannel Speech Separation</div><div class="cpsessionviewpaperauthor">[[Yu Nakagome|AUTHOR Yu Nakagome]], [[Masahito Togami|AUTHOR Masahito Togami]], [[Tetsuji Ogawa|AUTHOR Tetsuji Ogawa]], [[Tetsunori Kobayashi|AUTHOR Tetsunori Kobayashi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210763.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-7|PAPER Thu-M-V-3-7 — Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training]]</div>|<div class="cpsessionviewpapertitle">Stabilizing Label Assignment for Speech Separation by Self-Supervised Pre-Training</div><div class="cpsessionviewpaperauthor">[[Sung-Feng Huang|AUTHOR Sung-Feng Huang]], [[Shun-Po Chuang|AUTHOR Shun-Po Chuang]], [[Da-Rong Liu|AUTHOR Da-Rong Liu]], [[Yi-Chen Chen|AUTHOR Yi-Chen Chen]], [[Gene-Ping Yang|AUTHOR Gene-Ping Yang]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210858.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-8|PAPER Thu-M-V-3-8 — Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Dual-Path Filter Network: Speaker-Aware Modeling for Speech Separation</div><div class="cpsessionviewpaperauthor">[[Fan-Lin Wang|AUTHOR Fan-Lin Wang]], [[Yu-Huai Peng|AUTHOR Yu-Huai Peng]], [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-9|PAPER Thu-M-V-3-9 — Investigation of Practical Aspects of Single Channel Speech Separation for ASR]]</div>|<div class="cpsessionviewpapertitle">Investigation of Practical Aspects of Single Channel Speech Separation for ASR</div><div class="cpsessionviewpaperauthor">[[Jian Wu|AUTHOR Jian Wu]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Sanyuan Chen|AUTHOR Sanyuan Chen]], [[Yu Wu|AUTHOR Yu Wu]], [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]], [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Shujie Liu|AUTHOR Shujie Liu]], [[Jinyu Li|AUTHOR Jinyu Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211158.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-10|PAPER Thu-M-V-3-10 — Implicit Filter-and-Sum Network for End-to-End Multi-Channel Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Implicit Filter-and-Sum Network for End-to-End Multi-Channel Speech Separation</div><div class="cpsessionviewpaperauthor">[[Yi Luo|AUTHOR Yi Luo]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210430.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-3-11|PAPER Thu-M-V-3-11 — Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation]]</div>|<div class="cpsessionviewpapertitle">Generalized Spatio-Temporal RNN Beamformer for Target Speech Separation</div><div class="cpsessionviewpaperauthor">[[Yong Xu|AUTHOR Yong Xu]], [[Zhuohuang Zhang|AUTHOR Zhuohuang Zhang]], [[Meng Yu|AUTHOR Meng Yu]], [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Alicia Lozano-Diez|
|^ |^Jose Patino|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211909.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-1|PAPER Thu-M-V-4-1 — End-to-End Neural Diarization: From Transformer to Conformer]]</div>|<div class="cpsessionviewpapertitle">End-to-End Neural Diarization: From Transformer to Conformer</div><div class="cpsessionviewpaperauthor">[[Yi Chieh Liu|AUTHOR Yi Chieh Liu]], [[Eunjung Han|AUTHOR Eunjung Han]], [[Chul Lee|AUTHOR Chul Lee]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210149.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-2|PAPER Thu-M-V-4-2 — Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network]]</div>|<div class="cpsessionviewpapertitle">Three-Class Overlapped Speech Detection Using a Convolutional Recurrent Neural Network</div><div class="cpsessionviewpaperauthor">[[Jee-weon Jung|AUTHOR Jee-weon Jung]], [[Hee-Soo Heo|AUTHOR Hee-Soo Heo]], [[Youngki Kwon|AUTHOR Youngki Kwon]], [[Joon Son Chung|AUTHOR Joon Son Chung]], [[Bong-Jin Lee|AUTHOR Bong-Jin Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210261.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-3|PAPER Thu-M-V-4-3 — Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference]]</div>|<div class="cpsessionviewpapertitle">Online Speaker Diarization Equipped with Discriminative Modeling and Guided Inference</div><div class="cpsessionviewpaperauthor">[[Xucheng Wan|AUTHOR Xucheng Wan]], [[Kai Liu|AUTHOR Kai Liu]], [[Huan Zhou|AUTHOR Huan Zhou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210384.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-4|PAPER Thu-M-V-4-4 — Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization]]</div>|<div class="cpsessionviewpapertitle">Semi-Supervised Training with Pseudo-Labeling for End-To-End Neural Diarization</div><div class="cpsessionviewpaperauthor">[[Yuki Takashima|AUTHOR Yuki Takashima]], [[Yusuke Fujita|AUTHOR Yusuke Fujita]], [[Shota Horiguchi|AUTHOR Shota Horiguchi]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]], [[Kenji Nagamatsu|AUTHOR Kenji Nagamatsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210448.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-5|PAPER Thu-M-V-4-5 — Adapting Speaker Embeddings for Speaker Diarisation]]</div>|<div class="cpsessionviewpapertitle">Adapting Speaker Embeddings for Speaker Diarisation</div><div class="cpsessionviewpaperauthor">[[Youngki Kwon|AUTHOR Youngki Kwon]], [[Jee-weon Jung|AUTHOR Jee-weon Jung]], [[Hee-Soo Heo|AUTHOR Hee-Soo Heo]], [[You Jin Kim|AUTHOR You Jin Kim]], [[Bong-Jin Lee|AUTHOR Bong-Jin Lee]], [[Joon Son Chung|AUTHOR Joon Son Chung]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210516.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-6|PAPER Thu-M-V-4-6 — Scenario-Dependent Speaker Diarization for DIHARD-III Challenge]]</div>|<div class="cpsessionviewpapertitle">Scenario-Dependent Speaker Diarization for DIHARD-III Challenge</div><div class="cpsessionviewpaperauthor">[[Yu-Xuan Wang|AUTHOR Yu-Xuan Wang]], [[Jun Du|AUTHOR Jun Du]], [[Maokui He|AUTHOR Maokui He]], [[Shu-Tong Niu|AUTHOR Shu-Tong Niu]], [[Lei Sun|AUTHOR Lei Sun]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210560.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-7|PAPER Thu-M-V-4-7 — End-To-End Speaker Segmentation for Overlap-Aware Resegmentation]]</div>|<div class="cpsessionviewpapertitle">End-To-End Speaker Segmentation for Overlap-Aware Resegmentation</div><div class="cpsessionviewpaperauthor">[[Hervé Bredin|AUTHOR Hervé Bredin]], [[Antoine Laurent|AUTHOR Antoine Laurent]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-8|PAPER Thu-M-V-4-8 — Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers]]</div>|<div class="cpsessionviewpapertitle">Online Streaming End-to-End Neural Diarization Handling Overlapping Speech and Flexible Numbers of Speakers</div><div class="cpsessionviewpaperauthor">[[Yawen Xue|AUTHOR Yawen Xue]], [[Shota Horiguchi|AUTHOR Shota Horiguchi]], [[Yusuke Fujita|AUTHOR Yusuke Fujita]], [[Yuki Takashima|AUTHOR Yuki Takashima]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]], [[Kenji Nagamatsu|AUTHOR Kenji Nagamatsu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210087.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-4-9|PAPER Thu-M-V-4-9 — A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection]]</div>|<div class="cpsessionviewpapertitle">A Thousand Words are Worth More Than One Recording: //Word-Embedding// Based Speaker Change Detection</div><div class="cpsessionviewpaperauthor">[[Or Haim Anidjar|AUTHOR Or Haim Anidjar]], [[Itshak Lapidot|AUTHOR Itshak Lapidot]], [[Chen Hajaj|AUTHOR Chen Hajaj]], [[Amit Dvir|AUTHOR Amit Dvir]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Branislav Gerazov|
|^ |^Mahsa Elyasi|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210252.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-1|PAPER Thu-M-V-5-1 — Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Phrase Break Prediction with Bidirectional Encoder Representations in Japanese Text-to-Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Kosuke Futamata|AUTHOR Kosuke Futamata]], [[Byeongseon Park|AUTHOR Byeongseon Park]], [[Ryuichi Yamamoto|AUTHOR Ryuichi Yamamoto]], [[Kentaro Tachibana|AUTHOR Kentaro Tachibana]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210562.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-2|PAPER Thu-M-V-5-2 — Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows]]</div>|<div class="cpsessionviewpapertitle">Improving Multi-Speaker TTS Prosody Variance with a Residual Encoder and Normalizing Flows</div><div class="cpsessionviewpaperauthor">[[Iván Vallés-Pérez|AUTHOR Iván Vallés-Pérez]], [[Julian Roth|AUTHOR Julian Roth]], [[Grzegorz Beringer|AUTHOR Grzegorz Beringer]], [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]], [[Jasha Droppo|AUTHOR Jasha Droppo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210802.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-3|PAPER Thu-M-V-5-3 — Rich Prosody Diversity Modelling with Phone-Level Mixture Density Network]]</div>|<div class="cpsessionviewpapertitle">Rich Prosody Diversity Modelling with Phone-Level Mixture Density Network</div><div class="cpsessionviewpaperauthor">[[Chenpeng Du|AUTHOR Chenpeng Du]], [[Kai Yu|AUTHOR Kai Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210826.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-4|PAPER Thu-M-V-5-4 — Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Phoneme Duration Modeling Using Speech Rhythm-Based Speaker Embeddings for Multi-Speaker Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Kenichi Fujita|AUTHOR Kenichi Fujita]], [[Atsushi Ando|AUTHOR Atsushi Ando]], [[Yusuke Ijima|AUTHOR Yusuke Ijima]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210883.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-5|PAPER Thu-M-V-5-5 — Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation]]</div>|<div class="cpsessionviewpapertitle">Fine-Grained Prosody Modeling in Neural Speech Synthesis Using ToBI Representation</div><div class="cpsessionviewpaperauthor">[[Yuxiang Zou|AUTHOR Yuxiang Zou]], [[Shichao Liu|AUTHOR Shichao Liu]], [[Xiang Yin|AUTHOR Xiang Yin]], [[Haopeng Lin|AUTHOR Haopeng Lin]], [[Chunfeng Wang|AUTHOR Chunfeng Wang]], [[Haoyu Zhang|AUTHOR Haoyu Zhang]], [[Zejun Ma|AUTHOR Zejun Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-6|PAPER Thu-M-V-5-6 — Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing]]</div>|<div class="cpsessionviewpapertitle">Intra-Sentential Speaking Rate Control in Neural Text-To-Speech for Automatic Dubbing</div><div class="cpsessionviewpaperauthor">[[Mayank Sharma|AUTHOR Mayank Sharma]], [[Yogesh Virkar|AUTHOR Yogesh Virkar]], [[Marcello Federico|AUTHOR Marcello Federico]], [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]], [[Robert Enyedi|AUTHOR Robert Enyedi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211049.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-7|PAPER Thu-M-V-5-7 — Applying the Information Bottleneck Principle to Prosodic Representation Learning]]</div>|<div class="cpsessionviewpapertitle">Applying the Information Bottleneck Principle to Prosodic Representation Learning</div><div class="cpsessionviewpaperauthor">[[Guangyan Zhang|AUTHOR Guangyan Zhang]], [[Ying Qin|AUTHOR Ying Qin]], [[Daxin Tan|AUTHOR Daxin Tan]], [[Tan Lee|AUTHOR Tan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211123.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-5-8|PAPER Thu-M-V-5-8 — A Prototypical Network Approach for Evaluating Generated Emotional Speech]]</div>|<div class="cpsessionviewpapertitle">A Prototypical Network Approach for Evaluating Generated Emotional Speech</div><div class="cpsessionviewpaperauthor">[[Alice Baird|AUTHOR Alice Baird]], [[Silvan Mertes|AUTHOR Silvan Mertes]], [[Manuel Milling|AUTHOR Manuel Milling]], [[Lukas Stappen|AUTHOR Lukas Stappen]], [[Thomas Wiest|AUTHOR Thomas Wiest]], [[Elisabeth André|AUTHOR Elisabeth André]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Amelia Gully|
|^ |^Rita Patel|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210231.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-1|PAPER Thu-M-V-6-1 — A Simplified Model for the Vocal Tract of [s] with Inclined Incisors]]</div>|<div class="cpsessionviewpapertitle">A Simplified Model for the Vocal Tract of [s] with Inclined Incisors</div><div class="cpsessionviewpaperauthor">[[Tsukasa Yoshinaga|AUTHOR Tsukasa Yoshinaga]], [[Kohei Tada|AUTHOR Kohei Tada]], [[Kazunori Nozaki|AUTHOR Kazunori Nozaki]], [[Akiyoshi Iida|AUTHOR Akiyoshi Iida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210449.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-2|PAPER Thu-M-V-6-2 — Vocal-Tract Models to Visualize the Airstream of Human Breath and Droplets While Producing Speech]]</div>|<div class="cpsessionviewpapertitle">Vocal-Tract Models to Visualize the Airstream of Human Breath and Droplets While Producing Speech</div><div class="cpsessionviewpaperauthor">[[Takayuki Arai|AUTHOR Takayuki Arai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210906.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-3|PAPER Thu-M-V-6-3 — Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data]]</div>|<div class="cpsessionviewpapertitle">Using Transposed Convolution for Articulatory-to-Acoustic Conversion from Real-Time MRI Data</div><div class="cpsessionviewpaperauthor">[[Ryo Tanji|AUTHOR Ryo Tanji]], [[Hidefumi Ohmura|AUTHOR Hidefumi Ohmura]], [[Kouichi Katsurada|AUTHOR Kouichi Katsurada]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-4|PAPER Thu-M-V-6-4 — Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold]]</div>|<div class="cpsessionviewpapertitle">Comparison Between Lumped-Mass Modeling and Flow Simulation of the Reed-Type Artificial Vocal Fold</div><div class="cpsessionviewpaperauthor">[[Rafia Inaam|AUTHOR Rafia Inaam]], [[Tsukasa Yoshinaga|AUTHOR Tsukasa Yoshinaga]], [[Takayuki Arai|AUTHOR Takayuki Arai]], [[Hiroshi Yokoyama|AUTHOR Hiroshi Yokoyama]], [[Akiyoshi Iida|AUTHOR Akiyoshi Iida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-5|PAPER Thu-M-V-6-5 — Inhalations in Speech: Acoustic and Physiological Characteristics]]</div>|<div class="cpsessionviewpapertitle">Inhalations in Speech: Acoustic and Physiological Characteristics</div><div class="cpsessionviewpaperauthor">[[Raphael Werner|AUTHOR Raphael Werner]], [[Susanne Fuchs|AUTHOR Susanne Fuchs]], [[Jürgen Trouvain|AUTHOR Jürgen Trouvain]], [[Bernd Möbius|AUTHOR Bernd Möbius]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211422.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-6|PAPER Thu-M-V-6-6 — Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space]]</div>|<div class="cpsessionviewpapertitle">Model-Based Exploration of Linking Between Vowel Articulatory Space and Acoustic Space</div><div class="cpsessionviewpaperauthor">[[Anqi Xu|AUTHOR Anqi Xu]], [[Daniel van Niekerk|AUTHOR Daniel van Niekerk]], [[Branislav Gerazov|AUTHOR Branislav Gerazov]], [[Paul Konstantin Krug|AUTHOR Paul Konstantin Krug]], [[Santitham Prom-on|AUTHOR Santitham Prom-on]], [[Peter Birkholz|AUTHOR Peter Birkholz]], [[Yi Xu|AUTHOR Yi Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211496.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-7|PAPER Thu-M-V-6-7 — Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech]]</div>|<div class="cpsessionviewpapertitle">Take a Breath: Respiratory Sounds Improve Recollection in Synthetic Speech</div><div class="cpsessionviewpaperauthor">[[Mikey Elmers|AUTHOR Mikey Elmers]], [[Raphael Werner|AUTHOR Raphael Werner]], [[Beeke Muhlack|AUTHOR Beeke Muhlack]], [[Bernd Möbius|AUTHOR Bernd Möbius]], [[Jürgen Trouvain|AUTHOR Jürgen Trouvain]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211746.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-8|PAPER Thu-M-V-6-8 — Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models]]</div>|<div class="cpsessionviewpapertitle">Modeling Sensorimotor Adaptation in Speech Through Alterations to Forward and Inverse Models</div><div class="cpsessionviewpaperauthor">[[Taijing Chen|AUTHOR Taijing Chen]], [[Adam Lammert|AUTHOR Adam Lammert]], [[Benjamin Parrell|AUTHOR Benjamin Parrell]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212073.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-6-9|PAPER Thu-M-V-6-9 — Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation]]</div>|<div class="cpsessionviewpapertitle">Mixture of Orthogonal Sequences Made from Extended Time-Stretched Pulses Enables Measurement of Involuntary Voice Fundamental Frequency Response to Pitch Perturbation</div><div class="cpsessionviewpaperauthor">[[Hideki Kawahara|AUTHOR Hideki Kawahara]], [[Toshie Matsui|AUTHOR Toshie Matsui]], [[Kohei Yatabe|AUTHOR Kohei Yatabe]], [[Ken-Ichi Sakakibara|AUTHOR Ken-Ichi Sakakibara]], [[Minoru Tsuzaki|AUTHOR Minoru Tsuzaki]], [[Masanori Morise|AUTHOR Masanori Morise]], [[Toshio Irino|AUTHOR Toshio Irino]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Thursday 2 September 2021, (Virtual)|<|
|^Chairs: |^Ilya Oparin|
|^ |^Dilek Hakkani-Tür|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-1|PAPER Thu-M-V-7-1 — Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering]]</div>|<div class="cpsessionviewpapertitle">Contextualized Attention-Based Knowledge Transfer for Spoken Conversational Question Answering</div><div class="cpsessionviewpaperauthor">[[Chenyu You|AUTHOR Chenyu You]], [[Nuo Chen|AUTHOR Nuo Chen]], [[Yuexian Zou|AUTHOR Yuexian Zou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-2|PAPER Thu-M-V-7-2 — Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks]]</div>|<div class="cpsessionviewpapertitle">Injecting Descriptive Meta-Information into Pre-Trained Language Models with Hypernetworks</div><div class="cpsessionviewpaperauthor">[[Wenying Duan|AUTHOR Wenying Duan]], [[Xiaoxi He|AUTHOR Xiaoxi He]], [[Zimu Zhou|AUTHOR Zimu Zhou]], [[Hong Rao|AUTHOR Hong Rao]], [[Lothar Thiele|AUTHOR Lothar Thiele]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210534.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-3|PAPER Thu-M-V-7-3 — Causal Confusion Reduction for Robust Multi-Domain Dialogue Policy]]</div>|<div class="cpsessionviewpapertitle">Causal Confusion Reduction for Robust Multi-Domain Dialogue Policy</div><div class="cpsessionviewpaperauthor">[[Mahdin Rohmatillah|AUTHOR Mahdin Rohmatillah]], [[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-4|PAPER Thu-M-V-7-4 — Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation]]</div>|<div class="cpsessionviewpapertitle">Timing Generating Networks: Neural Network Based Precise Turn-Taking Timing Prediction in Multiparty Conversation</div><div class="cpsessionviewpaperauthor">[[Shinya Fujie|AUTHOR Shinya Fujie]], [[Hayato Katayama|AUTHOR Hayato Katayama]], [[Jin Sakuma|AUTHOR Jin Sakuma]], [[Tetsunori Kobayashi|AUTHOR Tetsunori Kobayashi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210994.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-5|PAPER Thu-M-V-7-5 — Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action]]</div>|<div class="cpsessionviewpapertitle">Human-to-Human Conversation Dataset for Learning Fine-Grained Turn-Taking Action</div><div class="cpsessionviewpaperauthor">[[Kehan Chen|AUTHOR Kehan Chen]], [[Zezhong Li|AUTHOR Zezhong Li]], [[Suyang Dai|AUTHOR Suyang Dai]], [[Wei Zhou|AUTHOR Wei Zhou]], [[Haiqing Chen|AUTHOR Haiqing Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-6|PAPER Thu-M-V-7-6 — PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript]]</div>|<div class="cpsessionviewpapertitle">PhonemeBERT: Joint Language Modelling of Phoneme Sequence and ASR Transcript</div><div class="cpsessionviewpaperauthor">[[Mukuntha Narayanan Sundararaman|AUTHOR Mukuntha Narayanan Sundararaman]], [[Ayush Kumar|AUTHOR Ayush Kumar]], [[Jithendra Vepa|AUTHOR Jithendra Vepa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211689.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-7|PAPER Thu-M-V-7-7 — Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection]]</div>|<div class="cpsessionviewpapertitle">Joint Retrieval-Extraction Training for Evidence-Aware Dialog Response Selection</div><div class="cpsessionviewpaperauthor">[[Hongyin Luo|AUTHOR Hongyin Luo]], [[James Glass|AUTHOR James Glass]], [[Garima Lalwani|AUTHOR Garima Lalwani]], [[Yi Zhang|AUTHOR Yi Zhang]], [[Shang-Wen Li|AUTHOR Shang-Wen Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211849.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Thu-M-V-7-8|PAPER Thu-M-V-7-8 — Adapting Long Context NLM for ASR Rescoring in Conversational Agents]]</div>|<div class="cpsessionviewpapertitle">Adapting Long Context NLM for ASR Rescoring in Conversational Agents</div><div class="cpsessionviewpaperauthor">[[Ashish Shenoy|AUTHOR Ashish Shenoy]], [[Sravan Bodapati|AUTHOR Sravan Bodapati]], [[Monica Sunkara|AUTHOR Monica Sunkara]], [[Srikanth Ronanki|AUTHOR Srikanth Ronanki]], [[Katrin Kirchhoff|AUTHOR Katrin Kirchhoff]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:00–14:00, Thursday 2 September 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Thu-Survey|PAPER Thu-Survey — Learning Speech Models from Multi-Modal Data]]</div>|<div class="cpsessionviewpapertitle">Learning Speech Models from Multi-Modal Data</div><div class="cpsessionviewpaperauthor">[[Karen Livescu|AUTHOR Karen Livescu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, Room A+B|<|
|^Chairs: |^Oldřich Plchot|
|^ |^Themos Stafylakis|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210622.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-1-1|PAPER Tue-A-O-1-1 — Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization]]</div>|<div class="cpsessionviewpapertitle">Leveraging Speaker Attribute Information Using Multi Task Learning for Speaker Verification and Diarization</div><div class="cpsessionviewpaperauthor">[[Chau Luu|AUTHOR Chau Luu]], [[Peter Bell|AUTHOR Peter Bell]], [[Steve Renals|AUTHOR Steve Renals]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-1-2|PAPER Tue-A-O-1-2 — Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Spine2Net: SpineNet with Res2Net and Time-Squeeze-and-Excitation Blocks for Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Magdalena Rybicka|AUTHOR Magdalena Rybicka]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Najim Dehak|AUTHOR Najim Dehak]], [[Konrad Kowalczyk|AUTHOR Konrad Kowalczyk]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211442.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-1-3|PAPER Tue-A-O-1-3 — Speaker Embeddings by Modeling Channel-Wise Correlations]]</div>|<div class="cpsessionviewpapertitle">Speaker Embeddings by Modeling Channel-Wise Correlations</div><div class="cpsessionviewpaperauthor">[[Themos Stafylakis|AUTHOR Themos Stafylakis]], [[Johan Rohdin|AUTHOR Johan Rohdin]], [[Lukáš Burget|AUTHOR Lukáš Burget]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211769.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-1-4|PAPER Tue-A-O-1-4 — Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction]]</div>|<div class="cpsessionviewpapertitle">Multi-Task Neural Network for Robust Multiple Speaker Embedding Extraction</div><div class="cpsessionviewpaperauthor">[[Weipeng He|AUTHOR Weipeng He]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Jean-Marc Odobez|AUTHOR Jean-Marc Odobez]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-1-5|PAPER Tue-A-O-1-5 — ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform]]</div>|<div class="cpsessionviewpapertitle">ICSpk: Interpretable Complex Speaker Embedding Extractor from Raw Waveform</div><div class="cpsessionviewpaperauthor">[[Junyi Peng|AUTHOR Junyi Peng]], [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Rongzhi Gu|AUTHOR Rongzhi Gu]], [[Jing Xiao|AUTHOR Jing Xiao]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, Room D|<|
|^Chairs: |^Georgia Zellou|
|^ |^Josiane Riverin-Coutlée|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-2-1|PAPER Tue-A-O-2-1 — Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers]]</div>|<div class="cpsessionviewpapertitle">Prosodic Disambiguation Using Chironomic Stylization of Intonation with Native and Non-Native Speakers</div><div class="cpsessionviewpaperauthor">[[Xiao Xiao|AUTHOR Xiao Xiao]], [[Nicolas Audibert|AUTHOR Nicolas Audibert]], [[Grégoire Locqueville|AUTHOR Grégoire Locqueville]], [[Christophe d’Alessandro|AUTHOR Christophe d’Alessandro]], [[Barbara Kuhnert|AUTHOR Barbara Kuhnert]], [[Claire Pillot-Loiseau|AUTHOR Claire Pillot-Loiseau]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210228.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-2-2|PAPER Tue-A-O-2-2 — Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices]]</div>|<div class="cpsessionviewpapertitle">Variation in Perceptual Sensitivity and Compensation for Coarticulation Across Adult and Child Naturally-Produced and TTS Voices</div><div class="cpsessionviewpaperauthor">[[Aleese Block|AUTHOR Aleese Block]], [[Michelle Cohn|AUTHOR Michelle Cohn]], [[Georgia Zellou|AUTHOR Georgia Zellou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210336.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-2-3|PAPER Tue-A-O-2-3 — Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model]]</div>|<div class="cpsessionviewpapertitle">Extracting Different Levels of Speech Information from EEG Using an LSTM-Based Model</div><div class="cpsessionviewpaperauthor">[[Mohammad Jalilpour Monesi|AUTHOR Mohammad Jalilpour Monesi]], [[Bernd Accou|AUTHOR Bernd Accou]], [[Tom Francart|AUTHOR Tom Francart]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211394.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-2-4|PAPER Tue-A-O-2-4 — Word Competition: An Entropy-Based Approach in the DIANA Model of Human Word Comprehension]]</div>|<div class="cpsessionviewpapertitle">Word Competition: An Entropy-Based Approach in the DIANA Model of Human Word Comprehension</div><div class="cpsessionviewpaperauthor">[[Louis ten Bosch|AUTHOR Louis ten Bosch]], [[Lou Boves|AUTHOR Lou Boves]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211408.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-2-5|PAPER Tue-A-O-2-5 — Time-to-Event Models for Analyzing Reaction Time Sequences]]</div>|<div class="cpsessionviewpapertitle">Time-to-Event Models for Analyzing Reaction Time Sequences</div><div class="cpsessionviewpaperauthor">[[Louis ten Bosch|AUTHOR Louis ten Bosch]], [[Lou Boves|AUTHOR Lou Boves]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211700.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-O-2-6|PAPER Tue-A-O-2-6 — Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset]]</div>|<div class="cpsessionviewpapertitle">Models of Reaction Times in Auditory Lexical Decision: RTonset versus RToffset</div><div class="cpsessionviewpaperauthor">[[Sophie Brand|AUTHOR Sophie Brand]], [[Kimberley Mulder|AUTHOR Kimberley Mulder]], [[Louis ten Bosch|AUTHOR Louis ten Bosch]], [[Lou Boves|AUTHOR Lou Boves]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218001.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-S&T-1-1|PAPER Tue-A-S&T-1-1 — Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech]]</div>|<div class="cpsessionviewpapertitle">Application for Detecting Depression, Parkinson’s Disease and Dysphonic Speech</div><div class="cpsessionviewpaperauthor">[[Gábor Kiss|AUTHOR Gábor Kiss]], [[Dávid Sztahó|AUTHOR Dávid Sztahó]], [[Miklós Gábriel Tulics|AUTHOR Miklós Gábriel Tulics]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218002.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-S&T-1-2|PAPER Tue-A-S&T-1-2 — Beey: More Than a Speech-to-Text Editor]]</div>|<div class="cpsessionviewpapertitle">Beey: More Than a Speech-to-Text Editor</div><div class="cpsessionviewpaperauthor">[[Lenka Weingartová|AUTHOR Lenka Weingartová]], [[Veronika Volná|AUTHOR Veronika Volná]], [[Ewa Balejová|AUTHOR Ewa Balejová]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218003.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-S&T-1-3|PAPER Tue-A-S&T-1-3 — Downsizing of Vocal-Tract Models to Line up Variations and Reduce Manufacturing Costs]]</div>|<div class="cpsessionviewpapertitle">Downsizing of Vocal-Tract Models to Line up Variations and Reduce Manufacturing Costs</div><div class="cpsessionviewpaperauthor">[[Takayuki Arai|AUTHOR Takayuki Arai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218004.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-S&T-1-4|PAPER Tue-A-S&T-1-4 — ROXANNE Research Platform: Automate Criminal Investigations]]</div>|<div class="cpsessionviewpapertitle">ROXANNE Research Platform: Automate Criminal Investigations</div><div class="cpsessionviewpaperauthor">[[Maël Fabien|AUTHOR Maël Fabien]], [[Shantipriya Parida|AUTHOR Shantipriya Parida]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Dawei Zhu|AUTHOR Dawei Zhu]], [[Aravind Krishnan|AUTHOR Aravind Krishnan]], [[Hoang H. Nguyen|AUTHOR Hoang H. Nguyen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-S&T-1-5|PAPER Tue-A-S&T-1-5 — The LIUM Human Active Correction Platform for Speaker Diarization]]</div>|<div class="cpsessionviewpapertitle">The LIUM Human Active Correction Platform for Speaker Diarization</div><div class="cpsessionviewpaperauthor">[[Alexandre Flucha|AUTHOR Alexandre Flucha]], [[Anthony Larcher|AUTHOR Anthony Larcher]], [[Ambuj Mehrish|AUTHOR Ambuj Mehrish]], [[Sylvain Meignier|AUTHOR Sylvain Meignier]], [[Florian Plaut|AUTHOR Florian Plaut]], [[Nicolas Poupon|AUTHOR Nicolas Poupon]], [[Yevhenii Prokopalo|AUTHOR Yevhenii Prokopalo]], [[Adrien Puertolas|AUTHOR Adrien Puertolas]], [[Meysam Shamsi|AUTHOR Meysam Shamsi]], [[Marie Tahon|AUTHOR Marie Tahon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-S&T-1-6|PAPER Tue-A-S&T-1-6 — On-Device Streaming Transformer-Based End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">On-Device Streaming Transformer-Based End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Yoo Rhee Oh|AUTHOR Yoo Rhee Oh]], [[Kiyoung Park|AUTHOR Kiyoung Park]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-S&T-1-7|PAPER Tue-A-S&T-1-7 — Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array]]</div>|<div class="cpsessionviewpapertitle">Advanced Semi-Blind Speaker Extraction and Tracking Implemented in Experimental Device with Revolving Dense Microphone Array</div><div class="cpsessionviewpaperauthor">[[J. Čmejla|AUTHOR J. Čmejla]], [[T. Kounovský|AUTHOR T. Kounovský]], [[J. Janský|AUTHOR J. Janský]], [[Jiri Malek|AUTHOR Jiri Malek]], [[M. Rozkovec|AUTHOR M. Rozkovec]], [[Z. Koldovský|AUTHOR Z. Koldovský]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Emmanuel Vincent|
|^ |^Harishchandra Dubey|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210983.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-1|PAPER Tue-A-SS-1-1 — Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation]]</div>|<div class="cpsessionviewpapertitle">Privacy-Preserving Voice Anti-Spoofing Using Secure Multi-Party Computation</div><div class="cpsessionviewpaperauthor">[[Oubaïda Chouchane|AUTHOR Oubaïda Chouchane]], [[Baptiste Brossier|AUTHOR Baptiste Brossier]], [[Jorge Esteban Gamboa Gamboa|AUTHOR Jorge Esteban Gamboa Gamboa]], [[Thomas Lardy|AUTHOR Thomas Lardy]], [[Hemlata Tak|AUTHOR Hemlata Tak]], [[Orhan Ermis|AUTHOR Orhan Ermis]], [[Madhu R. Kamble|AUTHOR Madhu R. Kamble]], [[Jose Patino|AUTHOR Jose Patino]], [[Nicholas Evans|AUTHOR Nicholas Evans]], [[Melek Önen|AUTHOR Melek Önen]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211783.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-2|PAPER Tue-A-SS-1-2 — Configurable Privacy-Preserving Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Configurable Privacy-Preserving Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Ranya Aloufi|AUTHOR Ranya Aloufi]], [[Hamed Haddadi|AUTHOR Hamed Haddadi]], [[David Boyle|AUTHOR David Boyle]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-3|PAPER Tue-A-SS-1-3 — Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation]]</div>|<div class="cpsessionviewpapertitle">Adjunct-Emeritus Distillation for Semi-Supervised Language Model Adaptation</div><div class="cpsessionviewpaperauthor">[[Scott Novotney|AUTHOR Scott Novotney]], [[Yile Gu|AUTHOR Yile Gu]], [[Ivan Bulyko|AUTHOR Ivan Bulyko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210153.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-4|PAPER Tue-A-SS-1-4 — Communication-Efficient Agnostic Federated Averaging]]</div>|<div class="cpsessionviewpapertitle">Communication-Efficient Agnostic Federated Averaging</div><div class="cpsessionviewpaperauthor">[[Jae Ro|AUTHOR Jae Ro]], [[Mingqing Chen|AUTHOR Mingqing Chen]], [[Rajiv Mathews|AUTHOR Rajiv Mathews]], [[Mehryar Mohri|AUTHOR Mehryar Mohri]], [[Ananda Theertha Suresh|AUTHOR Ananda Theertha Suresh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210262.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-5|PAPER Tue-A-SS-1-5 — Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification]]</div>|<div class="cpsessionviewpapertitle">Privacy-Preserving Feature Extraction for Cloud-Based Wake Word Verification</div><div class="cpsessionviewpaperauthor">[[Timm Koppelmann|AUTHOR Timm Koppelmann]], [[Alexandru Nelus|AUTHOR Alexandru Nelus]], [[Lea Schönherr|AUTHOR Lea Schönherr]], [[Dorothea Kolossa|AUTHOR Dorothea Kolossa]], [[Rainer Martin|AUTHOR Rainer Martin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210640.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-6|PAPER Tue-A-SS-1-6 — PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification]]</div>|<div class="cpsessionviewpapertitle">PATE-AAE: Incorporating Adversarial Autoencoder into Private Aggregation of Teacher Ensembles for Spoken Command Classification</div><div class="cpsessionviewpaperauthor">[[Chao-Han Huck Yang|AUTHOR Chao-Han Huck Yang]], [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210794.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-7|PAPER Tue-A-SS-1-7 — Continual Learning for Fake Audio Detection]]</div>|<div class="cpsessionviewpapertitle">Continual Learning for Fake Audio Detection</div><div class="cpsessionviewpaperauthor">[[Haoxin Ma|AUTHOR Haoxin Ma]], [[Jiangyan Yi|AUTHOR Jiangyan Yi]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Ye Bai|AUTHOR Ye Bai]], [[Zhengkun Tian|AUTHOR Zhengkun Tian]], [[Chenglong Wang|AUTHOR Chenglong Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211188.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-8|PAPER Tue-A-SS-1-8 — Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks]]</div>|<div class="cpsessionviewpapertitle">Evaluating the Vulnerability of End-to-End Automatic Speech Recognition Models to Membership Inference Attacks</div><div class="cpsessionviewpaperauthor">[[Muhammad A. Shah|AUTHOR Muhammad A. Shah]], [[Joseph Szurley|AUTHOR Joseph Szurley]], [[Markus Mueller|AUTHOR Markus Mueller]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Jasha Droppo|AUTHOR Jasha Droppo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211882.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-1-9|PAPER Tue-A-SS-1-9 — SynthASR: Unlocking Synthetic Data for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">SynthASR: Unlocking Synthetic Data for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Amin Fazel|AUTHOR Amin Fazel]], [[Wei Yang|AUTHOR Wei Yang]], [[Yulan Liu|AUTHOR Yulan Liu]], [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]], [[Yixiong Meng|AUTHOR Yixiong Meng]], [[Roland Maas|AUTHOR Roland Maas]], [[Jasha Droppo|AUTHOR Jasha Droppo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, Room C|<|
|^Chairs: |^Mark Hasegawa-Johnson|
|^ |^Neeraj Sharma|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Short Presentations of Papers</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210074.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-1|PAPER Tue-A-SS-2-1 — DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics]]</div>|<div class="cpsessionviewpapertitle">DiCOVA Challenge: Dataset, Task, and Baseline System for COVID-19 Diagnosis Using Acoustics</div><div class="cpsessionviewpaperauthor">[[Ananya Muguli|AUTHOR Ananya Muguli]], [[Lancelot Pinto|AUTHOR Lancelot Pinto]], [[Nirmala R.|AUTHOR Nirmala R.]], [[Neeraj Sharma|AUTHOR Neeraj Sharma]], [[Prashant Krishnan|AUTHOR Prashant Krishnan]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]], [[Rohit Kumar|AUTHOR Rohit Kumar]], [[Shrirama Bhat|AUTHOR Shrirama Bhat]], [[Srikanth Raj Chetupalli|AUTHOR Srikanth Raj Chetupalli]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]], [[Shreyas Ramoji|AUTHOR Shreyas Ramoji]], [[Viral Nanda|AUTHOR Viral Nanda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-2|PAPER Tue-A-SS-2-2 — PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge]]</div>|<div class="cpsessionviewpapertitle">PANACEA Cough Sound-Based Diagnosis of COVID-19 for the DiCOVA 2021 Challenge</div><div class="cpsessionviewpaperauthor">[[Madhu R. Kamble|AUTHOR Madhu R. Kamble]], [[Jose A. Gonzalez-Lopez|AUTHOR Jose A. Gonzalez-Lopez]], [[Teresa Grau|AUTHOR Teresa Grau]], [[Juan M. Espin|AUTHOR Juan M. Espin]], [[Lorenzo Cascioli|AUTHOR Lorenzo Cascioli]], [[Yiqing Huang|AUTHOR Yiqing Huang]], [[Alejandro Gomez-Alanis|AUTHOR Alejandro Gomez-Alanis]], [[Jose Patino|AUTHOR Jose Patino]], [[Roberto Font|AUTHOR Roberto Font]], [[Antonio M. Peinado|AUTHOR Antonio M. Peinado]], [[Angel M. Gomez|AUTHOR Angel M. Gomez]], [[Nicholas Evans|AUTHOR Nicholas Evans]], [[Maria A. Zuluaga|AUTHOR Maria A. Zuluaga]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-3|PAPER Tue-A-SS-2-3 — Recognising Covid-19 from Coughing Using Ensembles of SVMs and LSTMs with Handcrafted and Deep Audio Features]]</div>|<div class="cpsessionviewpapertitle">Recognising Covid-19 from Coughing Using Ensembles of SVMs and LSTMs with Handcrafted and Deep Audio Features</div><div class="cpsessionviewpaperauthor">[[Vincent Karas|AUTHOR Vincent Karas]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-4|PAPER Tue-A-SS-2-4 — Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines]]</div>|<div class="cpsessionviewpapertitle">Detecting COVID-19 from Audio Recording of Coughs Using Random Forests and Support Vector Machines</div><div class="cpsessionviewpaperauthor">[[Isabella Södergren|AUTHOR Isabella Södergren]], [[Maryam Pahlavan Nodeh|AUTHOR Maryam Pahlavan Nodeh]], [[Prakash Chandra Chhipa|AUTHOR Prakash Chandra Chhipa]], [[Konstantina Nikolaidou|AUTHOR Konstantina Nikolaidou]], [[György Kovács|AUTHOR György Kovács]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210497.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-5|PAPER Tue-A-SS-2-5 — Diagnosis of COVID-19 Using Auditory Acoustic Cues]]</div>|<div class="cpsessionviewpapertitle">Diagnosis of COVID-19 Using Auditory Acoustic Cues</div><div class="cpsessionviewpaperauthor">[[Rohan Kumar Das|AUTHOR Rohan Kumar Das]], [[Maulik Madhavi|AUTHOR Maulik Madhavi]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210799.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-6|PAPER Tue-A-SS-2-6 — Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation]]</div>|<div class="cpsessionviewpapertitle">Classification of COVID-19 from Cough Using Autoregressive Predictive Coding Pretraining and Spectral Data Augmentation</div><div class="cpsessionviewpaperauthor">[[John Harvill|AUTHOR John Harvill]], [[Yash R. Wani|AUTHOR Yash R. Wani]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]], [[Narendra Ahuja|AUTHOR Narendra Ahuja]], [[David Beiser|AUTHOR David Beiser]], [[David Chestek|AUTHOR David Chestek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210811.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-7|PAPER Tue-A-SS-2-7 — The DiCOVA 2021 Challenge — An Encoder-Decoder Approach for COVID-19 Recognition from Coughing Audio]]</div>|<div class="cpsessionviewpapertitle">The DiCOVA 2021 Challenge — An Encoder-Decoder Approach for COVID-19 Recognition from Coughing Audio</div><div class="cpsessionviewpaperauthor">[[Gauri Deshpande|AUTHOR Gauri Deshpande]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211031.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-8|PAPER Tue-A-SS-2-8 — COVID-19 Detection from Spectral Features on the DiCOVA Dataset]]</div>|<div class="cpsessionviewpapertitle">COVID-19 Detection from Spectral Features on the DiCOVA Dataset</div><div class="cpsessionviewpaperauthor">[[Kotra Venkata Sai Ritwik|AUTHOR Kotra Venkata Sai Ritwik]], [[Shareef Babu Kalluri|AUTHOR Shareef Babu Kalluri]], [[Deepu Vijayasenan|AUTHOR Deepu Vijayasenan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211052.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-9|PAPER Tue-A-SS-2-9 — Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information]]</div>|<div class="cpsessionviewpapertitle">Cough-Based COVID-19 Detection with Contextual Attention Convolutional Neural Networks and Gender Information</div><div class="cpsessionviewpaperauthor">[[Adria Mallol-Ragolta|AUTHOR Adria Mallol-Ragolta]], [[Helena Cuesta|AUTHOR Helena Cuesta]], [[Emilia Gómez|AUTHOR Emilia Gómez]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-10|PAPER Tue-A-SS-2-10 — Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis]]</div>|<div class="cpsessionviewpapertitle">Contrastive Learning of Cough Descriptors for Automatic COVID-19 Preliminary Diagnosis</div><div class="cpsessionviewpaperauthor">[[Swapnil Bhosale|AUTHOR Swapnil Bhosale]], [[Upasana Tiwari|AUTHOR Upasana Tiwari]], [[Rupayan Chakraborty|AUTHOR Rupayan Chakraborty]], [[Sunil Kumar Kopparapu|AUTHOR Sunil Kumar Kopparapu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212197.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-SS-2-11|PAPER Tue-A-SS-2-11 — Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds]]</div>|<div class="cpsessionviewpapertitle">Investigating Feature Selection and Explainability for COVID-19 Diagnostics from Cough Sounds</div><div class="cpsessionviewpaperauthor">[[Flavio Avila|AUTHOR Flavio Avila]], [[Amir H. Poorjam|AUTHOR Amir H. Poorjam]], [[Deepak Mittal|AUTHOR Deepak Mittal]], [[Charles Dognin|AUTHOR Charles Dognin]], [[Ananya Muguli|AUTHOR Ananya Muguli]], [[Rohit Kumar|AUTHOR Rohit Kumar]], [[Srikanth Raj Chetupalli|AUTHOR Srikanth Raj Chetupalli]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]], [[Maneesh Singh|AUTHOR Maneesh Singh]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Thomas Pellegrini|
|^ |^Yaniv Zigel|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210103.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-1|PAPER Tue-A-V-1-1 — SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features]]</div>|<div class="cpsessionviewpapertitle">SpecMix : A Mixed Sample Data Augmentation Method for Training with Time-Frequency Domain Features</div><div class="cpsessionviewpaperauthor">[[Gwantae Kim|AUTHOR Gwantae Kim]], [[David K. Han|AUTHOR David K. Han]], [[Hanseok Ko|AUTHOR Hanseok Ko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210140.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-2|PAPER Tue-A-V-1-2 — SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification]]</div>|<div class="cpsessionviewpapertitle">SpecAugment++: A Hidden Space Data Augmentation Method for Acoustic Scene Classification</div><div class="cpsessionviewpaperauthor">[[Helin Wang|AUTHOR Helin Wang]], [[Yuexian Zou|AUTHOR Yuexian Zou]], [[Wenwu Wang|AUTHOR Wenwu Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210281.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-3|PAPER Tue-A-V-1-3 — An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection]]</div>|<div class="cpsessionviewpapertitle">An Effective Mutual Mean Teaching Based Domain Adaptation Method for Sound Event Detection</div><div class="cpsessionviewpaperauthor">[[Xu Zheng|AUTHOR Xu Zheng]], [[Yan Song|AUTHOR Yan Song]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]], [[Ian McLoughlin|AUTHOR Ian McLoughlin]], [[Lin Liu|AUTHOR Lin Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-5|PAPER Tue-A-V-1-5 — Acoustic Scene Classification Using Kervolution-Based SubSpectralNet]]</div>|<div class="cpsessionviewpapertitle">Acoustic Scene Classification Using Kervolution-Based SubSpectralNet</div><div class="cpsessionviewpaperauthor">[[Ritika Nandi|AUTHOR Ritika Nandi]], [[Shashank Shekhar|AUTHOR Shashank Shekhar]], [[Manjunath Mulimani|AUTHOR Manjunath Mulimani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-6|PAPER Tue-A-V-1-6 — Event Specific Attention for Polyphonic Sound Event Detection]]</div>|<div class="cpsessionviewpapertitle">Event Specific Attention for Polyphonic Sound Event Detection</div><div class="cpsessionviewpaperauthor">[[Harshavardhan Sundar|AUTHOR Harshavardhan Sundar]], [[Ming Sun|AUTHOR Ming Sun]], [[Chao Wang|AUTHOR Chao Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210698.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-7|PAPER Tue-A-V-1-7 — AST: Audio Spectrogram Transformer]]</div>|<div class="cpsessionviewpapertitle">AST: Audio Spectrogram Transformer</div><div class="cpsessionviewpaperauthor">[[Yuan Gong|AUTHOR Yuan Gong]], [[Yu-An Chung|AUTHOR Yu-An Chung]], [[James Glass|AUTHOR James Glass]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211308.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-8|PAPER Tue-A-V-1-8 — Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene]]</div>|<div class="cpsessionviewpapertitle">Shallow Convolution-Augmented Transformer with Differentiable Neural Computer for Low-Complexity Classification of Variable-Length Acoustic Scene</div><div class="cpsessionviewpaperauthor">[[Soonshin Seo|AUTHOR Soonshin Seo]], [[Donghyun Lee|AUTHOR Donghyun Lee]], [[Ji-Hwan Kim|AUTHOR Ji-Hwan Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211837.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-9|PAPER Tue-A-V-1-9 — An Evaluation of Data Augmentation Methods for Sound Scene Geotagging]]</div>|<div class="cpsessionviewpapertitle">An Evaluation of Data Augmentation Methods for Sound Scene Geotagging</div><div class="cpsessionviewpaperauthor">[[Helen L. Bear|AUTHOR Helen L. Bear]], [[Veronica Morfi|AUTHOR Veronica Morfi]], [[Emmanouil Benetos|AUTHOR Emmanouil Benetos]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211975.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-10|PAPER Tue-A-V-1-10 — Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers]]</div>|<div class="cpsessionviewpapertitle">Optimizing Latency for Online Video Captioning Using Audio-Visual Transformers</div><div class="cpsessionviewpaperauthor">[[Chiori Hori|AUTHOR Chiori Hori]], [[Takaaki Hori|AUTHOR Takaaki Hori]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212028.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-11|PAPER Tue-A-V-1-11 — Variational Information Bottleneck for Effective Low-Resource Audio Classification]]</div>|<div class="cpsessionviewpapertitle">Variational Information Bottleneck for Effective Low-Resource Audio Classification</div><div class="cpsessionviewpaperauthor">[[Shijing Si|AUTHOR Shijing Si]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Huiming Sun|AUTHOR Huiming Sun]], [[Jianhan Wu|AUTHOR Jianhan Wu]], [[Chuanyao Zhang|AUTHOR Chuanyao Zhang]], [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]], [[Ning Cheng|AUTHOR Ning Cheng]], [[Lei Chen|AUTHOR Lei Chen]], [[Jing Xiao|AUTHOR Jing Xiao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-12|PAPER Tue-A-V-1-12 — Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks]]</div>|<div class="cpsessionviewpapertitle">Improving Weakly Supervised Sound Event Detection with Self-Supervised Auxiliary Tasks</div><div class="cpsessionviewpaperauthor">[[Soham Deshmukh|AUTHOR Soham Deshmukh]], [[Bhiksha Raj|AUTHOR Bhiksha Raj]], [[Rita Singh|AUTHOR Rita Singh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212218.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-1-13|PAPER Tue-A-V-1-13 — Acoustic Event Detection with Classifier Chains]]</div>|<div class="cpsessionviewpapertitle">Acoustic Event Detection with Classifier Chains</div><div class="cpsessionviewpaperauthor">[[Tatsuya Komatsu|AUTHOR Tatsuya Komatsu]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Koichi Miyazaki|AUTHOR Koichi Miyazaki]], [[Tomoki Hayashi|AUTHOR Tomoki Hayashi]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Milos Cernak|
|^ |^Vikramjit Mitra|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-1|PAPER Tue-A-V-2-1 — Segment and Tone Production in Continuous Speech of Hearing and Hearing-Impaired Children]]</div>|<div class="cpsessionviewpapertitle">Segment and Tone Production in Continuous Speech of Hearing and Hearing-Impaired Children</div><div class="cpsessionviewpaperauthor">[[Shu-Chuan Tseng|AUTHOR Shu-Chuan Tseng]], [[Yi-Fen Liu|AUTHOR Yi-Fen Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210024.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-2|PAPER Tue-A-V-2-2 — Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing]]</div>|<div class="cpsessionviewpapertitle">Effect of Carrier Bandwidth on Understanding Mandarin Sentences in Simulated Electric-Acoustic Hearing</div><div class="cpsessionviewpaperauthor">[[Feng Wang|AUTHOR Feng Wang]], [[Jing Chen|AUTHOR Jing Chen]], [[Fei Chen|AUTHOR Fei Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-3|PAPER Tue-A-V-2-3 — A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping]]</div>|<div class="cpsessionviewpapertitle">A Comparative Study of Different EMG Features for Acoustics-to-EMG Mapping</div><div class="cpsessionviewpaperauthor">[[Manthan Sharma|AUTHOR Manthan Sharma]], [[Navaneetha Gaddam|AUTHOR Navaneetha Gaddam]], [[Tejas Umesh|AUTHOR Tejas Umesh]], [[Aditya Murthy|AUTHOR Aditya Murthy]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211155.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-4|PAPER Tue-A-V-2-4 — Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results]]</div>|<div class="cpsessionviewpapertitle">Image-Based Assessment of Jaw Parameters and Jaw Kinematics for Articulatory Simulation: Preliminary Results</div><div class="cpsessionviewpaperauthor">[[Ajish K. Abraham|AUTHOR Ajish K. Abraham]], [[V. Sivaramakrishnan|AUTHOR V. Sivaramakrishnan]], [[N. Swapna|AUTHOR N. Swapna]], [[N. Manohar|AUTHOR N. Manohar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210440.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-5|PAPER Tue-A-V-2-5 — An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech]]</div>|<div class="cpsessionviewpapertitle">An Attention Self-Supervised Contrastive Learning Based Three-Stage Model for Hand Shape Feature Representation in Cued Speech</div><div class="cpsessionviewpaperauthor">[[Jianrong Wang|AUTHOR Jianrong Wang]], [[Nan Gu|AUTHOR Nan Gu]], [[Mei Yu|AUTHOR Mei Yu]], [[Xuewei Li|AUTHOR Xuewei Li]], [[Qiang Fang|AUTHOR Qiang Fang]], [[Li Liu|AUTHOR Li Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211240.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-6|PAPER Tue-A-V-2-6 — Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder]]</div>|<div class="cpsessionviewpapertitle">Remote Smartphone-Based Speech Collection: Acceptance and Barriers in Individuals with Major Depressive Disorder</div><div class="cpsessionviewpaperauthor">[[Judith Dineley|AUTHOR Judith Dineley]], [[Grace Lavelle|AUTHOR Grace Lavelle]], [[Daniel Leightley|AUTHOR Daniel Leightley]], [[Faith Matcham|AUTHOR Faith Matcham]], [[Sara Siddi|AUTHOR Sara Siddi]], [[Maria Teresa Peñarrubia-María|AUTHOR Maria Teresa Peñarrubia-María]], [[Katie M. White|AUTHOR Katie M. White]], [[Alina Ivan|AUTHOR Alina Ivan]], [[Carolin Oetzmann|AUTHOR Carolin Oetzmann]], [[Sara Simblett|AUTHOR Sara Simblett]], [[Erin Dawe-Lane|AUTHOR Erin Dawe-Lane]], [[Stuart Bruce|AUTHOR Stuart Bruce]], [[Daniel Stahl|AUTHOR Daniel Stahl]], [[Yatharth Ranjan|AUTHOR Yatharth Ranjan]], [[Zulqarnain Rashid|AUTHOR Zulqarnain Rashid]], [[Pauline Conde|AUTHOR Pauline Conde]], [[Amos A. Folarin|AUTHOR Amos A. Folarin]], [[Josep Maria Haro|AUTHOR Josep Maria Haro]], [[Til Wykes|AUTHOR Til Wykes]], [[Richard J.B. Dobson|AUTHOR Richard J.B. Dobson]], [[Vaibhav A. Narayan|AUTHOR Vaibhav A. Narayan]], [[Matthew Hotopf|AUTHOR Matthew Hotopf]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]], [[Nicholas Cummins|AUTHOR Nicholas Cummins]], [[The RADAR-CNS Consortium|AUTHOR The RADAR-CNS Consortium]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211749.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-7|PAPER Tue-A-V-2-7 — An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables]]</div>|<div class="cpsessionviewpapertitle">An Automatic, Simple Ultrasound Biofeedback Parameter for Distinguishing Accurate and Misarticulated Rhotic Syllables</div><div class="cpsessionviewpaperauthor">[[Sarah R. Li|AUTHOR Sarah R. Li]], [[Colin T. Annand|AUTHOR Colin T. Annand]], [[Sarah Dugan|AUTHOR Sarah Dugan]], [[Sarah M. Schwab|AUTHOR Sarah M. Schwab]], [[Kathryn J. Eary|AUTHOR Kathryn J. Eary]], [[Michael Swearengen|AUTHOR Michael Swearengen]], [[Sarah Stack|AUTHOR Sarah Stack]], [[Suzanne Boyce|AUTHOR Suzanne Boyce]], [[Michael A. Riley|AUTHOR Michael A. Riley]], [[T. Douglas Mast|AUTHOR T. Douglas Mast]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210023.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-8|PAPER Tue-A-V-2-8 — Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video]]</div>|<div class="cpsessionviewpapertitle">Silent versus Modal Multi-Speaker Speech Recognition from Ultrasound and Video</div><div class="cpsessionviewpaperauthor">[[Manuel Sam Ribeiro|AUTHOR Manuel Sam Ribeiro]], [[Aciel Eshky|AUTHOR Aciel Eshky]], [[Korin Richmond|AUTHOR Korin Richmond]], [[Steve Renals|AUTHOR Steve Renals]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211413.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-9|PAPER Tue-A-V-2-9 — RaSSpeR: Radar-Based Silent Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">RaSSpeR: Radar-Based Silent Speech Recognition</div><div class="cpsessionviewpaperauthor">[[David Ferreira|AUTHOR David Ferreira]], [[Samuel Silva|AUTHOR Samuel Silva]], [[Francisco Curado|AUTHOR Francisco Curado]], [[António Teixeira|AUTHOR António Teixeira]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211842.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-2-10|PAPER Tue-A-V-2-10 — Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces]]</div>|<div class="cpsessionviewpapertitle">Investigating Speech Reconstruction for Laryngectomees for Silent Speech Interfaces</div><div class="cpsessionviewpaperauthor">[[Beiming Cao|AUTHOR Beiming Cao]], [[Nordine Sebkhi|AUTHOR Nordine Sebkhi]], [[Arpan Bhavsar|AUTHOR Arpan Bhavsar]], [[Omer T. Inan|AUTHOR Omer T. Inan]], [[Robin Samlan|AUTHOR Robin Samlan]], [[Ted Mau|AUTHOR Ted Mau]], [[Jun Wang|AUTHOR Jun Wang]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Masahito Togami|
|^ |^Ina Kodrasi|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210633.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-1|PAPER Tue-A-V-3-1 — LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">LACOPE: Latency-Constrained Pitch Estimation for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Hendrik Schröter|AUTHOR Hendrik Schröter]], [[Tobias Rosenkranz|AUTHOR Tobias Rosenkranz]], [[Alberto N. Escalante-B.|AUTHOR Alberto N. Escalante-B.]], [[Andreas Maier|AUTHOR Andreas Maier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210742.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-2|PAPER Tue-A-V-3-2 — Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation]]</div>|<div class="cpsessionviewpapertitle">Alpha-Stable Autoregressive Fast Multichannel Nonnegative Matrix Factorization for Joint Speech Enhancement and Dereverberation</div><div class="cpsessionviewpaperauthor">[[Mathieu Fontaine|AUTHOR Mathieu Fontaine]], [[Kouhei Sekiguchi|AUTHOR Kouhei Sekiguchi]], [[Aditya Arie Nugraha|AUTHOR Aditya Arie Nugraha]], [[Yoshiaki Bando|AUTHOR Yoshiaki Bando]], [[Kazuyoshi Yoshii|AUTHOR Kazuyoshi Yoshii]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-3|PAPER Tue-A-V-3-3 — Microphone Array Generalization for Multichannel Narrowband Deep Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Microphone Array Generalization for Multichannel Narrowband Deep Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Siyuan Zhang|AUTHOR Siyuan Zhang]], [[Xiaofei Li|AUTHOR Xiaofei Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211178.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-4|PAPER Tue-A-V-3-4 — Multiple Sound Source Localization Based on Interchannel Phase Differences in All Frequencies with Spectral Masks]]</div>|<div class="cpsessionviewpapertitle">Multiple Sound Source Localization Based on Interchannel Phase Differences in All Frequencies with Spectral Masks</div><div class="cpsessionviewpaperauthor">[[Hyungchan Song|AUTHOR Hyungchan Song]], [[Jong Won Shin|AUTHOR Jong Won Shin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-5|PAPER Tue-A-V-3-5 — Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network]]</div>|<div class="cpsessionviewpapertitle">Cancellation of Local Competing Speaker with Near-Field Localization for Distributed ad-hoc Sensor Network</div><div class="cpsessionviewpaperauthor">[[Pablo Pérez Zarazaga|AUTHOR Pablo Pérez Zarazaga]], [[Mariem Bouafif Mansali|AUTHOR Mariem Bouafif Mansali]], [[Tom Bäckström|AUTHOR Tom Bäckström]], [[Zied Lachiri|AUTHOR Zied Lachiri]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211512.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-6|PAPER Tue-A-V-3-6 — A Deep Learning Method to Multi-Channel Active Noise Control]]</div>|<div class="cpsessionviewpapertitle">A Deep Learning Method to Multi-Channel Active Noise Control</div><div class="cpsessionviewpaperauthor">[[Hao Zhang|AUTHOR Hao Zhang]], [[DeLiang Wang|AUTHOR DeLiang Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211574.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-7|PAPER Tue-A-V-3-7 — Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing]]</div>|<div class="cpsessionviewpapertitle">Clarity-2021 Challenges: Machine Learning Challenges for Advancing Hearing Aid Processing</div><div class="cpsessionviewpaperauthor">[[Simone Graetzer|AUTHOR Simone Graetzer]], [[Jon Barker|AUTHOR Jon Barker]], [[Trevor J. Cox|AUTHOR Trevor J. Cox]], [[Michael Akeroyd|AUTHOR Michael Akeroyd]], [[John F. Culling|AUTHOR John F. Culling]], [[Graham Naylor|AUTHOR Graham Naylor]], [[Eszter Porter|AUTHOR Eszter Porter]], [[Rhoddy Viveros Muñoz|AUTHOR Rhoddy Viveros Muñoz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211613.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-8|PAPER Tue-A-V-3-8 — Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model]]</div>|<div class="cpsessionviewpapertitle">Optimising Hearing Aid Fittings for Speech in Noise with a Differentiable Hearing Loss Model</div><div class="cpsessionviewpaperauthor">[[Zehai Tu|AUTHOR Zehai Tu]], [[Ning Ma|AUTHOR Ning Ma]], [[Jon Barker|AUTHOR Jon Barker]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211764.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-9|PAPER Tue-A-V-3-9 — Explaining Deep Learning Models for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Explaining Deep Learning Models for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Sunit Sivasankaran|AUTHOR Sunit Sivasankaran]], [[Emmanuel Vincent|AUTHOR Emmanuel Vincent]], [[Dominique Fohr|AUTHOR Dominique Fohr]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211989.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-3-10|PAPER Tue-A-V-3-10 — Minimum-Norm Differential Beamforming for Linear Array with Directional Microphones]]</div>|<div class="cpsessionviewpapertitle">Minimum-Norm Differential Beamforming for Linear Array with Directional Microphones</div><div class="cpsessionviewpaperauthor">[[Weilong Huang|AUTHOR Weilong Huang]], [[Jinwei Feng|AUTHOR Jinwei Feng]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Tara Sainath|
|^ |^Herman Kamper|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211454.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-1|PAPER Tue-A-V-4-1 — Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning]]</div>|<div class="cpsessionviewpapertitle">Improving Streaming Transformer Based ASR Under a Framework of Self-Supervised Learning</div><div class="cpsessionviewpaperauthor">[[Songjun Cao|AUTHOR Songjun Cao]], [[Yueteng Kang|AUTHOR Yueteng Kang]], [[Yanzhe Fu|AUTHOR Yanzhe Fu]], [[Xiaoshuo Xu|AUTHOR Xiaoshuo Xu]], [[Sining Sun|AUTHOR Sining Sun]], [[Yike Zhang|AUTHOR Yike Zhang]], [[Long Ma|AUTHOR Long Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-2|PAPER Tue-A-V-4-2 — wav2vec-C: A Self-Supervised Model for Speech Representation Learning]]</div>|<div class="cpsessionviewpapertitle">wav2vec-C: A Self-Supervised Model for Speech Representation Learning</div><div class="cpsessionviewpaperauthor">[[Samik Sadhu|AUTHOR Samik Sadhu]], [[Di He|AUTHOR Di He]], [[Che-Wei Huang|AUTHOR Che-Wei Huang]], [[Sri Harish Mallidi|AUTHOR Sri Harish Mallidi]], [[Minhua Wu|AUTHOR Minhua Wu]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]], [[Andreas Stolcke|AUTHOR Andreas Stolcke]], [[Jasha Droppo|AUTHOR Jasha Droppo]], [[Roland Maas|AUTHOR Roland Maas]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211777.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-3|PAPER Tue-A-V-4-3 — On the Learning Dynamics of Semi-Supervised Training for ASR]]</div>|<div class="cpsessionviewpapertitle">On the Learning Dynamics of Semi-Supervised Training for ASR</div><div class="cpsessionviewpaperauthor">[[Electra Wallington|AUTHOR Electra Wallington]], [[Benji Kershenbaum|AUTHOR Benji Kershenbaum]], [[Ondřej Klejch|AUTHOR Ondřej Klejch]], [[Peter Bell|AUTHOR Peter Bell]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210236.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-4|PAPER Tue-A-V-4-4 — Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training]]</div>|<div class="cpsessionviewpapertitle">Robust wav2vec 2.0: Analyzing Domain Shift in Self-Supervised Pre-Training</div><div class="cpsessionviewpaperauthor">[[Wei-Ning Hsu|AUTHOR Wei-Ning Hsu]], [[Anuroop Sriram|AUTHOR Anuroop Sriram]], [[Alexei Baevski|AUTHOR Alexei Baevski]], [[Tatiana Likhomanenko|AUTHOR Tatiana Likhomanenko]], [[Qiantong Xu|AUTHOR Qiantong Xu]], [[Vineel Pratap|AUTHOR Vineel Pratap]], [[Jacob Kahn|AUTHOR Jacob Kahn]], [[Ann Lee|AUTHOR Ann Lee]], [[Ronan Collobert|AUTHOR Ronan Collobert]], [[Gabriel Synnaeve|AUTHOR Gabriel Synnaeve]], [[Michael Auli|AUTHOR Michael Auli]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210571.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-5|PAPER Tue-A-V-4-5 — Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Momentum Pseudo-Labeling for Semi-Supervised Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Yosuke Higuchi|AUTHOR Yosuke Higuchi]], [[Niko Moritz|AUTHOR Niko Moritz]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]], [[Takaaki Hori|AUTHOR Takaaki Hori]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210654.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-6|PAPER Tue-A-V-4-6 — A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models]]</div>|<div class="cpsessionviewpapertitle">A Comparison of Supervised and Unsupervised Pre-Training of End-to-End Models</div><div class="cpsessionviewpaperauthor">[[Ananya Misra|AUTHOR Ananya Misra]], [[Dongseong Hwang|AUTHOR Dongseong Hwang]], [[Zhouyuan Huo|AUTHOR Zhouyuan Huo]], [[Shefali Garg|AUTHOR Shefali Garg]], [[Nikhil Siddhartha|AUTHOR Nikhil Siddhartha]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Khe Chai Sim|AUTHOR Khe Chai Sim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210677.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-7|PAPER Tue-A-V-4-7 — Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation]]</div>|<div class="cpsessionviewpapertitle">Semi-Supervision in ASR: Sequential MixMatch and Factorized TTS-Based Augmentation</div><div class="cpsessionviewpaperauthor">[[Zhehuai Chen|AUTHOR Zhehuai Chen]], [[Andrew Rosenberg|AUTHOR Andrew Rosenberg]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Heiga Zen|AUTHOR Heiga Zen]], [[Mohammadreza Ghodsi|AUTHOR Mohammadreza Ghodsi]], [[Yinghui Huang|AUTHOR Yinghui Huang]], [[Jesse Emond|AUTHOR Jesse Emond]], [[Gary Wang|AUTHOR Gary Wang]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-8|PAPER Tue-A-V-4-8 — slimIPL: Language-Model-Free Iterative Pseudo-Labeling]]</div>|<div class="cpsessionviewpapertitle">slimIPL: Language-Model-Free Iterative Pseudo-Labeling</div><div class="cpsessionviewpaperauthor">[[Tatiana Likhomanenko|AUTHOR Tatiana Likhomanenko]], [[Qiantong Xu|AUTHOR Qiantong Xu]], [[Jacob Kahn|AUTHOR Jacob Kahn]], [[Gabriel Synnaeve|AUTHOR Gabriel Synnaeve]], [[Ronan Collobert|AUTHOR Ronan Collobert]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210905.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-9|PAPER Tue-A-V-4-9 — Phonetically Motivated Self-Supervised Speech Representation Learning]]</div>|<div class="cpsessionviewpapertitle">Phonetically Motivated Self-Supervised Speech Representation Learning</div><div class="cpsessionviewpaperauthor">[[Xianghu Yue|AUTHOR Xianghu Yue]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211017.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-4-10|PAPER Tue-A-V-4-10 — Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS]]</div>|<div class="cpsessionviewpapertitle">Improving RNN-T for Domain Scaling Using Semi-Supervised Training with Neural TTS</div><div class="cpsessionviewpaperauthor">[[Yan Deng|AUTHOR Yan Deng]], [[Rui Zhao|AUTHOR Rui Zhao]], [[Zhong Meng|AUTHOR Zhong Meng]], [[Xie Chen|AUTHOR Xie Chen]], [[Bing Liu|AUTHOR Bing Liu]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]], [[Lei He|AUTHOR Lei He]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Jan Švec|
|^ |^Roger Moore|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211864.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-1|PAPER Tue-A-V-5-1 — Speaker-Conversation Factorial Designs for Diarization Error Analysis]]</div>|<div class="cpsessionviewpapertitle">Speaker-Conversation Factorial Designs for Diarization Error Analysis</div><div class="cpsessionviewpaperauthor">[[Scott Seyfarth|AUTHOR Scott Seyfarth]], [[Sundararajan Srinivasan|AUTHOR Sundararajan Srinivasan]], [[Katrin Kirchhoff|AUTHOR Katrin Kirchhoff]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210098.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-2|PAPER Tue-A-V-5-2 — SmallER: Scaling Neural Entity Resolution for Edge Devices]]</div>|<div class="cpsessionviewpapertitle">SmallER: Scaling Neural Entity Resolution for Edge Devices</div><div class="cpsessionviewpaperauthor">[[Ross McGowan|AUTHOR Ross McGowan]], [[Jinru Su|AUTHOR Jinru Su]], [[Vince DiCocco|AUTHOR Vince DiCocco]], [[Thejaswi Muniyappa|AUTHOR Thejaswi Muniyappa]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-3|PAPER Tue-A-V-5-3 — Disfluency Detection with Unlabeled Data and Small BERT Models]]</div>|<div class="cpsessionviewpapertitle">Disfluency Detection with Unlabeled Data and Small BERT Models</div><div class="cpsessionviewpaperauthor">[[Johann C. Rocholl|AUTHOR Johann C. Rocholl]], [[Vicky Zayats|AUTHOR Vicky Zayats]], [[Daniel D. Walker|AUTHOR Daniel D. Walker]], [[Noah B. Murad|AUTHOR Noah B. Murad]], [[Aaron Schneider|AUTHOR Aaron Schneider]], [[Daniel J. Liebling|AUTHOR Daniel J. Liebling]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210246.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-4|PAPER Tue-A-V-5-4 — Discriminative Self-Training for Punctuation Prediction]]</div>|<div class="cpsessionviewpapertitle">Discriminative Self-Training for Punctuation Prediction</div><div class="cpsessionviewpaperauthor">[[Qian Chen|AUTHOR Qian Chen]], [[Wen Wang|AUTHOR Wen Wang]], [[Mengzhe Chen|AUTHOR Mengzhe Chen]], [[Qinglin Zhang|AUTHOR Qinglin Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211607.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-5|PAPER Tue-A-V-5-5 — Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens]]</div>|<div class="cpsessionviewpapertitle">Zero-Shot Joint Modeling of Multiple Spoken-Text-Style Conversion Tasks Using Switching Tokens</div><div class="cpsessionviewpaperauthor">[[Mana Ihori|AUTHOR Mana Ihori]], [[Naoki Makishima|AUTHOR Naoki Makishima]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Ryo Masumura|AUTHOR Ryo Masumura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211005.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-6|PAPER Tue-A-V-5-6 — A Noise Robust Method for Word-Level Pronunciation Assessment]]</div>|<div class="cpsessionviewpapertitle">A Noise Robust Method for Word-Level Pronunciation Assessment</div><div class="cpsessionviewpaperauthor">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-7|PAPER Tue-A-V-5-7 — Targeted Keyword Filtering for Accelerated Spoken Topic Identification]]</div>|<div class="cpsessionviewpapertitle">Targeted Keyword Filtering for Accelerated Spoken Topic Identification</div><div class="cpsessionviewpaperauthor">[[Jonathan Wintrode|AUTHOR Jonathan Wintrode]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211923.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-8|PAPER Tue-A-V-5-8 — Multimodal Speech Summarization Through Semantic Concept Learning]]</div>|<div class="cpsessionviewpapertitle">Multimodal Speech Summarization Through Semantic Concept Learning</div><div class="cpsessionviewpaperauthor">[[Shruti Palaskar|AUTHOR Shruti Palaskar]], [[Ruslan Salakhutdinov|AUTHOR Ruslan Salakhutdinov]], [[Alan W. Black|AUTHOR Alan W. Black]], [[Florian Metze|AUTHOR Florian Metze]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211270.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-9|PAPER Tue-A-V-5-9 — Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization]]</div>|<div class="cpsessionviewpapertitle">Enhancing Semantic Understanding with Self-Supervised Methods for Abstractive Dialogue Summarization</div><div class="cpsessionviewpaperauthor">[[Hyunjae Lee|AUTHOR Hyunjae Lee]], [[Jaewoong Yun|AUTHOR Jaewoong Yun]], [[Hyunjin Choi|AUTHOR Hyunjin Choi]], [[Seongho Joe|AUTHOR Seongho Joe]], [[Youngjune L. Gwon|AUTHOR Youngjune L. Gwon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210199.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-5-10|PAPER Tue-A-V-5-10 — Speaker Transition Patterns in Three-Party Conversation: Evidence from English, Estonian and Swedish]]</div>|<div class="cpsessionviewpapertitle">Speaker Transition Patterns in Three-Party Conversation: Evidence from English, Estonian and Swedish</div><div class="cpsessionviewpaperauthor">[[Marcin Włodarczak|AUTHOR Marcin Włodarczak]], [[Emer Gilmartin|AUTHOR Emer Gilmartin]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:30–15:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Tomoki Toda|
|^ |^Jithendra Vepa|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211730.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-1|PAPER Tue-A-V-6-1 — Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Investigating Deep Neural Structures and their Interpretability in the Domain of Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Samuel J. Broughton|AUTHOR Samuel J. Broughton]], [[Md. Asif Jalal|AUTHOR Md. Asif Jalal]], [[Roger K. Moore|AUTHOR Roger K. Moore]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210781.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-2|PAPER Tue-A-V-6-2 — Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training]]</div>|<div class="cpsessionviewpapertitle">Limited Data Emotional Voice Conversion Leveraging Text-to-Speech: Two-Stage Sequence-to-Sequence Training</div><div class="cpsessionviewpaperauthor">[[Kun Zhou|AUTHOR Kun Zhou]], [[Berrak Sisman|AUTHOR Berrak Sisman]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210948.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-3|PAPER Tue-A-V-6-3 — Adversarial Voice Conversion Against Neural Spoofing Detectors]]</div>|<div class="cpsessionviewpapertitle">Adversarial Voice Conversion Against Neural Spoofing Detectors</div><div class="cpsessionviewpaperauthor">[[Yi-Yang Ding|AUTHOR Yi-Yang Ding]], [[Li-Juan Liu|AUTHOR Li-Juan Liu]], [[Yu Hu|AUTHOR Yu Hu]], [[Zhen-Hua Ling|AUTHOR Zhen-Hua Ling]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-4|PAPER Tue-A-V-6-4 — An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation]]</div>|<div class="cpsessionviewpapertitle">An Improved StarGAN for Emotional Voice Conversion: Enhancing Voice Quality and Data Augmentation</div><div class="cpsessionviewpaperauthor">[[Xiangheng He|AUTHOR Xiangheng He]], [[Junjie Chen|AUTHOR Junjie Chen]], [[Georgios Rizos|AUTHOR Georgios Rizos]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211301.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-5|PAPER Tue-A-V-6-5 — TVQVC: Transformer Based Vector Quantized Variational Autoencoder with CTC Loss for Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">TVQVC: Transformer Based Vector Quantized Variational Autoencoder with CTC Loss for Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Ziyi Chen|AUTHOR Ziyi Chen]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211351.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-6|PAPER Tue-A-V-6-6 — Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Enriching Source Style Transfer in Recognition-Synthesis Based Non-Parallel Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Zhichao Wang|AUTHOR Zhichao Wang]], [[Xinyong Zhou|AUTHOR Xinyong Zhou]], [[Fengyu Yang|AUTHOR Fengyu Yang]], [[Tao Li|AUTHOR Tao Li]], [[Hongqiang Du|AUTHOR Hongqiang Du]], [[Lei Xie|AUTHOR Lei Xie]], [[Wendong Gan|AUTHOR Wendong Gan]], [[Haitao Chen|AUTHOR Haitao Chen]], [[Hai Li|AUTHOR Hai Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-7|PAPER Tue-A-V-6-7 — S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations]]</div>|<div class="cpsessionviewpapertitle">S2VC: A Framework for Any-to-Any Voice Conversion with Self-Supervised Pretrained Representations</div><div class="cpsessionviewpaperauthor">[[Jheng-hao Lin|AUTHOR Jheng-hao Lin]], [[Yist Y. Lin|AUTHOR Yist Y. Lin]], [[Chung-Ming Chien|AUTHOR Chung-Ming Chien]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211740.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-8|PAPER Tue-A-V-6-8 — An Exemplar Selection Algorithm for Native-Nonnative Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">An Exemplar Selection Algorithm for Native-Nonnative Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Christopher Liberatore|AUTHOR Christopher Liberatore]], [[Ricardo Gutierrez-Osuna|AUTHOR Ricardo Gutierrez-Osuna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211990.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-9|PAPER Tue-A-V-6-9 — Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Adversarially Learning Disentangled Speech Representations for Robust Multi-Factor Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Jie Wang|AUTHOR Jie Wang]], [[Jingbei Li|AUTHOR Jingbei Li]], [[Xintao Zhao|AUTHOR Xintao Zhao]], [[Zhiyong Wu|AUTHOR Zhiyong Wu]], [[Shiyin Kang|AUTHOR Shiyin Kang]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212086.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-A-V-6-10|PAPER Tue-A-V-6-10 — Many-to-Many Voice Conversion Based Feature Disentanglement Using Variational Autoencoder]]</div>|<div class="cpsessionviewpapertitle">Many-to-Many Voice Conversion Based Feature Disentanglement Using Variational Autoencoder</div><div class="cpsessionviewpaperauthor">[[Manh Luong|AUTHOR Manh Luong]], [[Viet Anh Tran|AUTHOR Viet Anh Tran]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, Room A+B|<|
|^Chairs: |^Louis ten Bosch|
|^ |^Petr Červa|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211465.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-1-1|PAPER Tue-E-O-1-1 — Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw]]</div>|<div class="cpsessionviewpapertitle">Information Retrieval for ZeroSpeech 2021: The Submission by University of Wroclaw</div><div class="cpsessionviewpaperauthor">[[Jan Chorowski|AUTHOR Jan Chorowski]], [[Grzegorz Ciesielski|AUTHOR Grzegorz Ciesielski]], [[Jarosław Dzikowski|AUTHOR Jarosław Dzikowski]], [[Adrian Łańcucki|AUTHOR Adrian Łańcucki]], [[Ricard Marxer|AUTHOR Ricard Marxer]], [[Mateusz Opala|AUTHOR Mateusz Opala]], [[Piotr Pusz|AUTHOR Piotr Pusz]], [[Paweł Rychlikowski|AUTHOR Paweł Rychlikowski]], [[Michał Stypułkowski|AUTHOR Michał Stypułkowski]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211544.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-1-2|PAPER Tue-E-O-1-2 — Aligned Contrastive Predictive Coding]]</div>|<div class="cpsessionviewpapertitle">Aligned Contrastive Predictive Coding</div><div class="cpsessionviewpaperauthor">[[Jan Chorowski|AUTHOR Jan Chorowski]], [[Grzegorz Ciesielski|AUTHOR Grzegorz Ciesielski]], [[Jarosław Dzikowski|AUTHOR Jarosław Dzikowski]], [[Adrian Łańcucki|AUTHOR Adrian Łańcucki]], [[Ricard Marxer|AUTHOR Ricard Marxer]], [[Mateusz Opala|AUTHOR Mateusz Opala]], [[Piotr Pusz|AUTHOR Piotr Pusz]], [[Paweł Rychlikowski|AUTHOR Paweł Rychlikowski]], [[Michał Stypułkowski|AUTHOR Michał Stypułkowski]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211814.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-1-3|PAPER Tue-E-O-1-3 — Neural Text Denormalization for Speech Transcripts]]</div>|<div class="cpsessionviewpapertitle">Neural Text Denormalization for Speech Transcripts</div><div class="cpsessionviewpaperauthor">[[Benjamin Suter|AUTHOR Benjamin Suter]], [[Josef Novak|AUTHOR Josef Novak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-1-4|PAPER Tue-E-O-1-4 — Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio]]</div>|<div class="cpsessionviewpapertitle">Fearless Steps Challenge Phase-3 (FSC P3): Advancing SLT for Unseen Channel and Mission Data Across NASA Apollo Audio</div><div class="cpsessionviewpaperauthor">[[Aditya Joglekar|AUTHOR Aditya Joglekar]], [[Seyed Omid Sadjadi|AUTHOR Seyed Omid Sadjadi]], [[Meena Chandra-Shekar|AUTHOR Meena Chandra-Shekar]], [[Christopher Cieri|AUTHOR Christopher Cieri]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, Room C|<|
|^Chairs: |^Bogdan Ludusan|
|^ |^Lenka Weingartová|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210452.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-2-1|PAPER Tue-E-O-2-1 — Voice Quality in Verbal Irony: Electroglottographic Analyses of Ironic Utterances in Standard Austrian German]]</div>|<div class="cpsessionviewpapertitle">Voice Quality in Verbal Irony: Electroglottographic Analyses of Ironic Utterances in Standard Austrian German</div><div class="cpsessionviewpaperauthor">[[Hannah Leykum|AUTHOR Hannah Leykum]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-2-2|PAPER Tue-E-O-2-2 — Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing]]</div>|<div class="cpsessionviewpapertitle">Synchronic Fortition in Five Romance Languages? A Large Corpus-Based Study of Word-Initial Devoicing</div><div class="cpsessionviewpaperauthor">[[Mathilde Hutin|AUTHOR Mathilde Hutin]], [[Yaru Wu|AUTHOR Yaru Wu]], [[Adèle Jatteau|AUTHOR Adèle Jatteau]], [[Ioana Vasilescu|AUTHOR Ioana Vasilescu]], [[Lori Lamel|AUTHOR Lori Lamel]], [[Martine Adda-Decker|AUTHOR Martine Adda-Decker]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211101.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-2-3|PAPER Tue-E-O-2-3 — Glottal Stops in Upper Sorbian: A Data-Driven Approach]]</div>|<div class="cpsessionviewpapertitle">Glottal Stops in Upper Sorbian: A Data-Driven Approach</div><div class="cpsessionviewpaperauthor">[[Ivan Kraljevski|AUTHOR Ivan Kraljevski]], [[Maria Paola Bissiri|AUTHOR Maria Paola Bissiri]], [[Frank Duckhorn|AUTHOR Frank Duckhorn]], [[Constanze Tschoepe|AUTHOR Constanze Tschoepe]], [[Matthias Wolff|AUTHOR Matthias Wolff]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211357.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-2-4|PAPER Tue-E-O-2-4 — Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality]]</div>|<div class="cpsessionviewpapertitle">Cue Interaction in the Perception of Prosodic Prominence: The Role of Voice Quality</div><div class="cpsessionviewpaperauthor">[[Bogdan Ludusan|AUTHOR Bogdan Ludusan]], [[Petra Wagner|AUTHOR Petra Wagner]], [[Marcin Włodarczak|AUTHOR Marcin Włodarczak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211417.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-2-5|PAPER Tue-E-O-2-5 — Glottal Sounds in Korebaju]]</div>|<div class="cpsessionviewpapertitle">Glottal Sounds in Korebaju</div><div class="cpsessionviewpaperauthor">[[Jenifer Vega Rodriguez|AUTHOR Jenifer Vega Rodriguez]], [[Nathalie Vallée|AUTHOR Nathalie Vallée]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211765.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-2-6|PAPER Tue-E-O-2-6 — Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality]]</div>|<div class="cpsessionviewpapertitle">Automatic Classification of Phonation Types in Spontaneous Speech: Towards a New Workflow for the Characterization of Speakers’ Voice Quality</div><div class="cpsessionviewpaperauthor">[[Anaïs Chanclu|AUTHOR Anaïs Chanclu]], [[Imen Ben Amor|AUTHOR Imen Ben Amor]], [[Cédric Gendrot|AUTHOR Cédric Gendrot]], [[Emmanuel Ferragne|AUTHOR Emmanuel Ferragne]], [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, Room D|<|
|^Chairs: |^László Tóth|
|^ |^Tanya Talkar|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210026.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-3-1|PAPER Tue-E-O-3-1 — Measuring Voice Quality Parameters After Speaker Pseudonymization]]</div>|<div class="cpsessionviewpapertitle">Measuring Voice Quality Parameters After Speaker Pseudonymization</div><div class="cpsessionviewpaperauthor">[[Rob J.J.H. van Son|AUTHOR Rob J.J.H. van Son]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210567.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-3-2|PAPER Tue-E-O-3-2 — Audio-Visual Recognition of Emotional Engagement of People with Dementia]]</div>|<div class="cpsessionviewpapertitle">Audio-Visual Recognition of Emotional Engagement of People with Dementia</div><div class="cpsessionviewpaperauthor">[[Lars Steinert|AUTHOR Lars Steinert]], [[Felix Putze|AUTHOR Felix Putze]], [[Dennis Küster|AUTHOR Dennis Küster]], [[Tanja Schultz|AUTHOR Tanja Schultz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-3-3|PAPER Tue-E-O-3-3 — Speaking Corona? Human and Machine Recognition of COVID-19 from Voice]]</div>|<div class="cpsessionviewpapertitle">Speaking Corona? Human and Machine Recognition of COVID-19 from Voice</div><div class="cpsessionviewpaperauthor">[[Pascal Hecker|AUTHOR Pascal Hecker]], [[Florian B. Pokorny|AUTHOR Florian B. Pokorny]], [[Katrin D. Bartl-Pokorny|AUTHOR Katrin D. Bartl-Pokorny]], [[Uwe Reichel|AUTHOR Uwe Reichel]], [[Zhao Ren|AUTHOR Zhao Ren]], [[Simone Hantke|AUTHOR Simone Hantke]], [[Florian Eyben|AUTHOR Florian Eyben]], [[Dagmar M. Schuller|AUTHOR Dagmar M. Schuller]], [[Bert Arnrich|AUTHOR Bert Arnrich]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211891.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-O-3-4|PAPER Tue-E-O-3-4 — Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches]]</div>|<div class="cpsessionviewpapertitle">Acoustic-Prosodic, Lexical and Demographic Cues to Persuasiveness in Competitive Debate Speeches</div><div class="cpsessionviewpaperauthor">[[Huyen Nguyen|AUTHOR Huyen Nguyen]], [[Ralph Vente|AUTHOR Ralph Vente]], [[David Lupea|AUTHOR David Lupea]], [[Sarah Ita Levitan|AUTHOR Sarah Ita Levitan]], [[Julia Hirschberg|AUTHOR Julia Hirschberg]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, Room Lacina|<|
|^Chairs: |^Philipp Aichinger|
|^ |^Carlo Drioli|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Introduction</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210711.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-SS-1-1|PAPER Tue-E-SS-1-1 — Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females]]</div>|<div class="cpsessionviewpapertitle">Optimizing an Automatic Creaky Voice Detection Method for Australian English Speaking Females</div><div class="cpsessionviewpaperauthor">[[Hannah White|AUTHOR Hannah White]], [[Joshua Penney|AUTHOR Joshua Penney]], [[Andy Gibson|AUTHOR Andy Gibson]], [[Anita Szakay|AUTHOR Anita Szakay]], [[Felicity Cox|AUTHOR Felicity Cox]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210729.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-SS-1-2|PAPER Tue-E-SS-1-2 — A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale]]</div>|<div class="cpsessionviewpapertitle">A Comparison of Acoustic Correlates of Voice Quality Across Different Recording Devices: A Cautionary Tale</div><div class="cpsessionviewpaperauthor">[[Joshua Penney|AUTHOR Joshua Penney]], [[Andy Gibson|AUTHOR Andy Gibson]], [[Felicity Cox|AUTHOR Felicity Cox]], [[Michael Proctor|AUTHOR Michael Proctor]], [[Anita Szakay|AUTHOR Anita Szakay]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210870.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-SS-1-3|PAPER Tue-E-SS-1-3 — Investigating Voice Function Characteristics of Greek Speakers with Hearing Loss Using Automatic Glottal Source Feature Extraction]]</div>|<div class="cpsessionviewpapertitle">Investigating Voice Function Characteristics of Greek Speakers with Hearing Loss Using Automatic Glottal Source Feature Extraction</div><div class="cpsessionviewpaperauthor">[[Anna Sfakianaki|AUTHOR Anna Sfakianaki]], [[George P. Kafentzis|AUTHOR George P. Kafentzis]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211507.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-SS-1-4|PAPER Tue-E-SS-1-4 — Automated Detection of Voice Disorder in the Saarbrücken Voice Database: Effects of Pathology Subset and Audio Materials]]</div>|<div class="cpsessionviewpapertitle">Automated Detection of Voice Disorder in the Saarbrücken Voice Database: Effects of Pathology Subset and Audio Materials</div><div class="cpsessionviewpaperauthor">[[Mark Huckvale|AUTHOR Mark Huckvale]], [[Catinca Buciuleac|AUTHOR Catinca Buciuleac]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211918.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-SS-1-5|PAPER Tue-E-SS-1-5 — Accelerometer-Based Measurements of Voice Quality in Children During Semi-Occluded Vocal Tract Exercise with a Narrow Straw in Air]]</div>|<div class="cpsessionviewpapertitle">Accelerometer-Based Measurements of Voice Quality in Children During Semi-Occluded Vocal Tract Exercise with a Narrow Straw in Air</div><div class="cpsessionviewpaperauthor">[[Steven M. Lulich|AUTHOR Steven M. Lulich]], [[Rita R. Patel|AUTHOR Rita R. Patel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210688.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-SS-1-6|PAPER Tue-E-SS-1-6 — Articulatory Coordination for Speech Motor Tracking in Huntington Disease]]</div>|<div class="cpsessionviewpapertitle">Articulatory Coordination for Speech Motor Tracking in Huntington Disease</div><div class="cpsessionviewpaperauthor">[[Matthew Perez|AUTHOR Matthew Perez]], [[Amrit Romana|AUTHOR Amrit Romana]], [[Angela Roberts|AUTHOR Angela Roberts]], [[Noelle Carlozzi|AUTHOR Noelle Carlozzi]], [[Jennifer Ann Miner|AUTHOR Jennifer Ann Miner]], [[Praveen Dayalu|AUTHOR Praveen Dayalu]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211540.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-SS-1-7|PAPER Tue-E-SS-1-7 — Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale]]</div>|<div class="cpsessionviewpapertitle">Modeling Dysphonia Severity as a Function of Roughness and Breathiness Ratings in the GRBAS Scale</div><div class="cpsessionviewpaperauthor">[[Carlos A. Ferrer|AUTHOR Carlos A. Ferrer]], [[Efren Aragón|AUTHOR Efren Aragón]], [[María E. Hdez-Díaz|AUTHOR María E. Hdez-Díaz]], [[Marc S. de Bodt|AUTHOR Marc S. de Bodt]], [[Roman Cmejla|AUTHOR Roman Cmejla]], [[Marina Englert|AUTHOR Marina Englert]], [[Mara Behlau|AUTHOR Mara Behlau]], [[Elmar Nöth|AUTHOR Elmar Nöth]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Panel Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Jason Pelecanos|
|^ |^Sandro Cumani|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210033.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-1|PAPER Tue-E-V-1-1 — Unsupervised Bayesian Adaptation of PLDA for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Bayesian Adaptation of PLDA for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Bengt J. Borgström|AUTHOR Bengt J. Borgström]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210235.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-2|PAPER Tue-E-V-1-2 — The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III]]</div>|<div class="cpsessionviewpapertitle">The DKU-Duke-Lenovo System Description for the Fearless Steps Challenge Phase III</div><div class="cpsessionviewpaperauthor">[[Weiqing Wang|AUTHOR Weiqing Wang]], [[Danwei Cai|AUTHOR Danwei Cai]], [[Jin Wang|AUTHOR Jin Wang]], [[Qingjian Lin|AUTHOR Qingjian Lin]], [[Xuyang Wang|AUTHOR Xuyang Wang]], [[Mi Hong|AUTHOR Mi Hong]], [[Ming Li|AUTHOR Ming Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-3|PAPER Tue-E-V-1-3 — Improved Meta-Learning Training for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Improved Meta-Learning Training for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Yafeng Chen|AUTHOR Yafeng Chen]], [[Wu Guo|AUTHOR Wu Guo]], [[Bin Gu|AUTHOR Bin Gu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-4|PAPER Tue-E-V-1-4 — Variational Information Bottleneck Based Regularization for Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Variational Information Bottleneck Based Regularization for Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Dan Wang|AUTHOR Dan Wang]], [[Yuanjie Dong|AUTHOR Yuanjie Dong]], [[Yaxing Li|AUTHOR Yaxing Li]], [[Yunfei Zi|AUTHOR Yunfei Zi]], [[Zhihui Zhang|AUTHOR Zhihui Zhang]], [[Xiaoqi Li|AUTHOR Xiaoqi Li]], [[Shengwu Xiong|AUTHOR Shengwu Xiong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210541.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-5|PAPER Tue-E-V-1-5 — Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?]]</div>|<div class="cpsessionviewpapertitle">Out of a Hundred Trials, How Many Errors Does Your Speaker Verifier Make?</div><div class="cpsessionviewpaperauthor">[[Niko Brümmer|AUTHOR Niko Brümmer]], [[Luciana Ferrer|AUTHOR Luciana Ferrer]], [[Albert Swart|AUTHOR Albert Swart]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210646.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-6|PAPER Tue-E-V-1-6 — SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System]]</div>|<div class="cpsessionviewpapertitle">SpeakerStew: Scaling to Many Languages with a Triaged Multilingual Text-Dependent and Text-Independent Speaker Verification System</div><div class="cpsessionviewpaperauthor">[[Roza Chojnacka|AUTHOR Roza Chojnacka]], [[Jason Pelecanos|AUTHOR Jason Pelecanos]], [[Quan Wang|AUTHOR Quan Wang]], [[Ignacio Lopez Moreno|AUTHOR Ignacio Lopez Moreno]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210966.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-7|PAPER Tue-E-V-1-7 — AntVoice Neural Speaker Embedding System for FFSVC 2020]]</div>|<div class="cpsessionviewpapertitle">AntVoice Neural Speaker Embedding System for FFSVC 2020</div><div class="cpsessionviewpaperauthor">[[Zhiming Wang|AUTHOR Zhiming Wang]], [[Furong Xu|AUTHOR Furong Xu]], [[Kaisheng Yao|AUTHOR Kaisheng Yao]], [[Yuan Cheng|AUTHOR Yuan Cheng]], [[Tao Xiong|AUTHOR Tao Xiong]], [[Huijia Zhu|AUTHOR Huijia Zhu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-8|PAPER Tue-E-V-1-8 — Gradient Regularization for Noise-Robust Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Gradient Regularization for Noise-Robust Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Jianchen Li|AUTHOR Jianchen Li]], [[Jiqing Han|AUTHOR Jiqing Han]], [[Hongwei Song|AUTHOR Hongwei Song]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211502.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-9|PAPER Tue-E-V-1-9 — Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Deep Feature CycleGANs: Speaker Identity Preserving Non-Parallel Microphone-Telephone Domain Adaptation for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Saurabh Kataria|AUTHOR Saurabh Kataria]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Najim Dehak|AUTHOR Najim Dehak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211935.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-10|PAPER Tue-E-V-1-10 — Scaling Effect of Self-Supervised Speech Models]]</div>|<div class="cpsessionviewpapertitle">Scaling Effect of Self-Supervised Speech Models</div><div class="cpsessionviewpaperauthor">[[Jie Pu|AUTHOR Jie Pu]], [[Yuguang Yang|AUTHOR Yuguang Yang]], [[Ruirui Li|AUTHOR Ruirui Li]], [[Oguz Elibol|AUTHOR Oguz Elibol]], [[Jasha Droppo|AUTHOR Jasha Droppo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-11|PAPER Tue-E-V-1-11 — Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network]]</div>|<div class="cpsessionviewpapertitle">Joint Feature Enhancement and Speaker Recognition with Multi-Objective Task-Oriented Network</div><div class="cpsessionviewpaperauthor">[[Yibo Wu|AUTHOR Yibo Wu]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Kong Aik Lee|AUTHOR Kong Aik Lee]], [[Meng Liu|AUTHOR Meng Liu]], [[Jianwu Dang|AUTHOR Jianwu Dang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211980.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-12|PAPER Tue-E-V-1-12 — Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Multi-Level Transfer Learning from Near-Field to Far-Field Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Li Zhang|AUTHOR Li Zhang]], [[Qing Wang|AUTHOR Qing Wang]], [[Kong Aik Lee|AUTHOR Kong Aik Lee]], [[Lei Xie|AUTHOR Lei Xie]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-1-13|PAPER Tue-E-V-1-13 — Speaker Anonymisation Using the McAdams Coefficient]]</div>|<div class="cpsessionviewpapertitle">Speaker Anonymisation Using the McAdams Coefficient</div><div class="cpsessionviewpaperauthor">[[Jose Patino|AUTHOR Jose Patino]], [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]], [[Andreas Nautsch|AUTHOR Andreas Nautsch]], [[Nicholas Evans|AUTHOR Nicholas Evans]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Franz Pernkopf|
|^ |^Jonathan Le Roux|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210366.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-1|PAPER Tue-E-V-2-1 — Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments]]</div>|<div class="cpsessionviewpapertitle">Multi-Stream Gated and Pyramidal Temporal Convolutional Neural Networks for Audio-Visual Speech Separation in Multi-Talker Environments</div><div class="cpsessionviewpaperauthor">[[Yiyu Luo|AUTHOR Yiyu Luo]], [[Jing Wang|AUTHOR Jing Wang]], [[Liang Xu|AUTHOR Liang Xu]], [[Lidong Yang|AUTHOR Lidong Yang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210481.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-2|PAPER Tue-E-V-2-2 — TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation]]</div>|<div class="cpsessionviewpapertitle">TeCANet: Temporal-Contextual Attention Network for Environment-Aware Speech Dereverberation</div><div class="cpsessionviewpaperauthor">[[Helin Wang|AUTHOR Helin Wang]], [[Bo Wu|AUTHOR Bo Wu]], [[Lianwu Chen|AUTHOR Lianwu Chen]], [[Meng Yu|AUTHOR Meng Yu]], [[Jianwei Yu|AUTHOR Jianwei Yu]], [[Yong Xu|AUTHOR Yong Xu]], [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]], [[Chao Weng|AUTHOR Chao Weng]], [[Dan Su|AUTHOR Dan Su]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210538.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-3|PAPER Tue-E-V-2-3 — Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function]]</div>|<div class="cpsessionviewpapertitle">Residual Echo and Noise Cancellation with Feature Attention Module and Multi-Domain Loss Function</div><div class="cpsessionviewpaperauthor">[[Jianjun Gu|AUTHOR Jianjun Gu]], [[Longbiao Cheng|AUTHOR Longbiao Cheng]], [[Xingwei Sun|AUTHOR Xingwei Sun]], [[Junfeng Li|AUTHOR Junfeng Li]], [[Yonghong Yan|AUTHOR Yonghong Yan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-4|PAPER Tue-E-V-2-4 — MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation]]</div>|<div class="cpsessionviewpapertitle">MIMO Self-Attentive RNN Beamformer for Multi-Speaker Speech Separation</div><div class="cpsessionviewpaperauthor">[[Xiyun Li|AUTHOR Xiyun Li]], [[Yong Xu|AUTHOR Yong Xu]], [[Meng Yu|AUTHOR Meng Yu]], [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]], [[Jiaming Xu|AUTHOR Jiaming Xu]], [[Bo Xu|AUTHOR Bo Xu]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-5|PAPER Tue-E-V-2-5 — Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement]]</div>|<div class="cpsessionviewpapertitle">Personalized PercepNet: Real-Time, Low-Complexity Target Voice Separation and Enhancement</div><div class="cpsessionviewpaperauthor">[[Ritwik Giri|AUTHOR Ritwik Giri]], [[Shrikant Venkataramani|AUTHOR Shrikant Venkataramani]], [[Jean-Marc Valin|AUTHOR Jean-Marc Valin]], [[Umut Isik|AUTHOR Umut Isik]], [[Arvindh Krishnaswamy|AUTHOR Arvindh Krishnaswamy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-6|PAPER Tue-E-V-2-6 — Scene-Agnostic Multi-Microphone Speech Dereverberation]]</div>|<div class="cpsessionviewpapertitle">Scene-Agnostic Multi-Microphone Speech Dereverberation</div><div class="cpsessionviewpaperauthor">[[Yochai Yemini|AUTHOR Yochai Yemini]], [[Ethan Fetaya|AUTHOR Ethan Fetaya]], [[Haggai Maron|AUTHOR Haggai Maron]], [[Sharon Gannot|AUTHOR Sharon Gannot]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211029.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-7|PAPER Tue-E-V-2-7 — Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex]]</div>|<div class="cpsessionviewpapertitle">Manifold-Aware Deep Clustering: Maximizing Angles Between Embedding Vectors Based on Regular Simplex</div><div class="cpsessionviewpaperauthor">[[Keitaro Tanaka|AUTHOR Keitaro Tanaka]], [[Ryosuke Sawata|AUTHOR Ryosuke Sawata]], [[Shusuke Takahashi|AUTHOR Shusuke Takahashi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211508.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-8|PAPER Tue-E-V-2-8 — A Deep Learning Approach to Multi-Channel and Multi-Microphone Acoustic Echo Cancellation]]</div>|<div class="cpsessionviewpapertitle">A Deep Learning Approach to Multi-Channel and Multi-Microphone Acoustic Echo Cancellation</div><div class="cpsessionviewpaperauthor">[[Hao Zhang|AUTHOR Hao Zhang]], [[DeLiang Wang|AUTHOR DeLiang Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211950.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-9|PAPER Tue-E-V-2-9 — Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation]]</div>|<div class="cpsessionviewpapertitle">Joint Online Multichannel Acoustic Echo Cancellation, Speech Dereverberation and Source Separation</div><div class="cpsessionviewpaperauthor">[[Yueyue Na|AUTHOR Yueyue Na]], [[Ziteng Wang|AUTHOR Ziteng Wang]], [[Zhang Liu|AUTHOR Zhang Liu]], [[Biao Tian|AUTHOR Biao Tian]], [[Qiang Fu|AUTHOR Qiang Fu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212253.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-2-10|PAPER Tue-E-V-2-10 — Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Should We Always Separate?: Switching Between Enhanced and Observed Signals for Overlapping Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Hiroshi Sato|AUTHOR Hiroshi Sato]], [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Naoyuki Kamo|AUTHOR Naoyuki Kamo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Bhuvana Ramabhadran|
|^ |^Jinyu Li|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-1|PAPER Tue-E-V-3-1 — Estimating Articulatory Movements in Speech Production with Transformer Networks]]</div>|<div class="cpsessionviewpapertitle">Estimating Articulatory Movements in Speech Production with Transformer Networks</div><div class="cpsessionviewpaperauthor">[[Sathvik Udupa|AUTHOR Sathvik Udupa]], [[Anwesha Roy|AUTHOR Anwesha Roy]], [[Abhayjeet Singh|AUTHOR Abhayjeet Singh]], [[Aravind Illa|AUTHOR Aravind Illa]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210300.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-2|PAPER Tue-E-V-3-2 — Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Multi-Target Domain Adaptation for Acoustic Scene Classification</div><div class="cpsessionviewpaperauthor">[[Dongchao Yang|AUTHOR Dongchao Yang]], [[Helin Wang|AUTHOR Helin Wang]], [[Yuexian Zou|AUTHOR Yuexian Zou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210047.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-3|PAPER Tue-E-V-3-3 — Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation]]</div>|<div class="cpsessionviewpapertitle">Speech Decomposition Based on a Hybrid Speech Model and Optimal Segmentation</div><div class="cpsessionviewpaperauthor">[[Alfredo Esquivel Jaramillo|AUTHOR Alfredo Esquivel Jaramillo]], [[Jesper Kjær Nielsen|AUTHOR Jesper Kjær Nielsen]], [[Mads Græsbøll Christensen|AUTHOR Mads Græsbøll Christensen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211066.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-4|PAPER Tue-E-V-3-4 — Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation]]</div>|<div class="cpsessionviewpapertitle">Dropout Regularization for Self-Supervised Learning of Transformer Encoder Speech Representation</div><div class="cpsessionviewpaperauthor">[[Jian Luo|AUTHOR Jian Luo]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Ning Cheng|AUTHOR Ning Cheng]], [[Jing Xiao|AUTHOR Jing Xiao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211307.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-5|PAPER Tue-E-V-3-5 — Noise Robust Pitch Stylization Using Minimum Mean Absolute Error Criterion]]</div>|<div class="cpsessionviewpapertitle">Noise Robust Pitch Stylization Using Minimum Mean Absolute Error Criterion</div><div class="cpsessionviewpaperauthor">[[Chiranjeevi Yarra|AUTHOR Chiranjeevi Yarra]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-6|PAPER Tue-E-V-3-6 — An Attribute-Aligned Strategy for Learning Speech Representation]]</div>|<div class="cpsessionviewpapertitle">An Attribute-Aligned Strategy for Learning Speech Representation</div><div class="cpsessionviewpaperauthor">[[Yu-Lin Huang|AUTHOR Yu-Lin Huang]], [[Bo-Hao Su|AUTHOR Bo-Hao Su]], [[Y.-W. Peter Hong|AUTHOR Y.-W. Peter Hong]], [[Chi-Chun Lee|AUTHOR Chi-Chun Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211429.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-7|PAPER Tue-E-V-3-7 — Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation]]</div>|<div class="cpsessionviewpapertitle">Raw Speech-to-Articulatory Inversion by Temporal Filtering and Decimation</div><div class="cpsessionviewpaperauthor">[[Abdolreza Sabzi Shahrebabaki|AUTHOR Abdolreza Sabzi Shahrebabaki]], [[Sabato Marco Siniscalchi|AUTHOR Sabato Marco Siniscalchi]], [[Torbjørn Svendsen|AUTHOR Torbjørn Svendsen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211690.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-8|PAPER Tue-E-V-3-8 — Unsupervised Training of a DNN-Based Formant Tracker]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Training of a DNN-Based Formant Tracker</div><div class="cpsessionviewpaperauthor">[[Jason Lilley|AUTHOR Jason Lilley]], [[H. Timothy Bunnell|AUTHOR H. Timothy Bunnell]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-9|PAPER Tue-E-V-3-9 — SUPERB: Speech Processing Universal PERformance Benchmark]]</div>|<div class="cpsessionviewpapertitle">SUPERB: Speech Processing Universal PERformance Benchmark</div><div class="cpsessionviewpaperauthor">[[Shu-wen Yang|AUTHOR Shu-wen Yang]], [[Po-Han Chi|AUTHOR Po-Han Chi]], [[Yung-Sung Chuang|AUTHOR Yung-Sung Chuang]], [[Cheng-I Jeff Lai|AUTHOR Cheng-I Jeff Lai]], [[Kushal Lakhotia|AUTHOR Kushal Lakhotia]], [[Yist Y. Lin|AUTHOR Yist Y. Lin]], [[Andy T. Liu|AUTHOR Andy T. Liu]], [[Jiatong Shi|AUTHOR Jiatong Shi]], [[Xuankai Chang|AUTHOR Xuankai Chang]], [[Guan-Ting Lin|AUTHOR Guan-Ting Lin]], [[Tzu-Hsien Huang|AUTHOR Tzu-Hsien Huang]], [[Wei-Cheng Tseng|AUTHOR Wei-Cheng Tseng]], [[Ko-tik Lee|AUTHOR Ko-tik Lee]], [[Da-Rong Liu|AUTHOR Da-Rong Liu]], [[Zili Huang|AUTHOR Zili Huang]], [[Shuyan Dong|AUTHOR Shuyan Dong]], [[Shang-Wen Li|AUTHOR Shang-Wen Li]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Abdelrahman Mohamed|AUTHOR Abdelrahman Mohamed]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211841.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-10|PAPER Tue-E-V-3-10 — Synchronising Speech Segments with Musical Beats in Mandarin and English Singing]]</div>|<div class="cpsessionviewpapertitle">Synchronising Speech Segments with Musical Beats in Mandarin and English Singing</div><div class="cpsessionviewpaperauthor">[[Cong Zhang|AUTHOR Cong Zhang]], [[Jian Zhu|AUTHOR Jian Zhu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212070.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-11|PAPER Tue-E-V-3-11 — FRILL: A Non-Semantic Speech Embedding for Mobile Devices]]</div>|<div class="cpsessionviewpapertitle">FRILL: A Non-Semantic Speech Embedding for Mobile Devices</div><div class="cpsessionviewpaperauthor">[[Jacob Peplinski|AUTHOR Jacob Peplinski]], [[Joel Shor|AUTHOR Joel Shor]], [[Sachin Joglekar|AUTHOR Sachin Joglekar]], [[Jake Garrison|AUTHOR Jake Garrison]], [[Shwetak Patel|AUTHOR Shwetak Patel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212164.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-12|PAPER Tue-E-V-3-12 — Pitch Contour Separation from Overlapping Speech]]</div>|<div class="cpsessionviewpapertitle">Pitch Contour Separation from Overlapping Speech</div><div class="cpsessionviewpaperauthor">[[Hiroki Mori|AUTHOR Hiroki Mori]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210347.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-3-13|PAPER Tue-E-V-3-13 — Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning]]</div>|<div class="cpsessionviewpapertitle">Do Sound Event Representations Generalize to Other Audio Tasks? A Case Study in Audio Transfer Learning</div><div class="cpsessionviewpaperauthor">[[Anurag Kumar|AUTHOR Anurag Kumar]], [[Yun Wang|AUTHOR Yun Wang]], [[Vamsi Krishna Ithapu|AUTHOR Vamsi Krishna Ithapu]], [[Christian Fuegen|AUTHOR Christian Fuegen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Gina-Anne Levow|
|^ |^Ian Lane|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210117.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-1|PAPER Tue-E-V-4-1 — Data Augmentation for Spoken Language Understanding via Pretrained Language Models]]</div>|<div class="cpsessionviewpapertitle">Data Augmentation for Spoken Language Understanding via Pretrained Language Models</div><div class="cpsessionviewpaperauthor">[[Baolin Peng|AUTHOR Baolin Peng]], [[Chenguang Zhu|AUTHOR Chenguang Zhu]], [[Michael Zeng|AUTHOR Michael Zeng]], [[Jianfeng Gao|AUTHOR Jianfeng Gao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210793.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-2|PAPER Tue-E-V-4-2 — FANS: Fusing ASR and NLU for On-Device SLU]]</div>|<div class="cpsessionviewpapertitle">FANS: Fusing ASR and NLU for On-Device SLU</div><div class="cpsessionviewpaperauthor">[[Martin Radfar|AUTHOR Martin Radfar]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Siegfried Kunzmann|AUTHOR Siegfried Kunzmann]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211569.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-3|PAPER Tue-E-V-4-3 — Sequential End-to-End Intent and Slot Label Classification and Localization]]</div>|<div class="cpsessionviewpapertitle">Sequential End-to-End Intent and Slot Label Classification and Localization</div><div class="cpsessionviewpaperauthor">[[Yiran Cao|AUTHOR Yiran Cao]], [[Nihal Potdar|AUTHOR Nihal Potdar]], [[Anderson R. Avila|AUTHOR Anderson R. Avila]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211877.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-4|PAPER Tue-E-V-4-4 — DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants]]</div>|<div class="cpsessionviewpapertitle">DEXTER: Deep Encoding of External Knowledge for Named Entity Recognition in Virtual Assistants</div><div class="cpsessionviewpaperauthor">[[Deepak Muralidharan|AUTHOR Deepak Muralidharan]], [[Joel Ruben Antony Moniz|AUTHOR Joel Ruben Antony Moniz]], [[Weicheng Zhang|AUTHOR Weicheng Zhang]], [[Stephen Pulman|AUTHOR Stephen Pulman]], [[Lin Li|AUTHOR Lin Li]], [[Megan Barnes|AUTHOR Megan Barnes]], [[Jingjing Pan|AUTHOR Jingjing Pan]], [[Jason Williams|AUTHOR Jason Williams]], [[Alex Acero|AUTHOR Alex Acero]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210095.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-5|PAPER Tue-E-V-4-5 — A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection]]</div>|<div class="cpsessionviewpapertitle">A Context-Aware Hierarchical BERT Fusion Network for Multi-Turn Dialog Act Detection</div><div class="cpsessionviewpaperauthor">[[Ting-Wei Wu|AUTHOR Ting-Wei Wu]], [[Ruolin Su|AUTHOR Ruolin Su]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210234.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-6|PAPER Tue-E-V-4-6 — Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning]]</div>|<div class="cpsessionviewpapertitle">Pre-Training for Spoken Language Understanding with Joint Textual and Phonetic Representation Learning</div><div class="cpsessionviewpaperauthor">[[Qian Chen|AUTHOR Qian Chen]], [[Wen Wang|AUTHOR Wen Wang]], [[Qinglin Zhang|AUTHOR Qinglin Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210580.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-7|PAPER Tue-E-V-4-7 — Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models]]</div>|<div class="cpsessionviewpapertitle">Predicting Temporal Performance Drop of Deployed Production Spoken Language Understanding Models</div><div class="cpsessionviewpaperauthor">[[Quynh Do|AUTHOR Quynh Do]], [[Judith Gaspers|AUTHOR Judith Gaspers]], [[Daniil Sorokin|AUTHOR Daniil Sorokin]], [[Patrick Lehnen|AUTHOR Patrick Lehnen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211460.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-8|PAPER Tue-E-V-4-8 — Integrating Dialog History into End-to-End Spoken Language Understanding Systems]]</div>|<div class="cpsessionviewpapertitle">Integrating Dialog History into End-to-End Spoken Language Understanding Systems</div><div class="cpsessionviewpaperauthor">[[Jatin Ganhotra|AUTHOR Jatin Ganhotra]], [[Samuel Thomas|AUTHOR Samuel Thomas]], [[Hong-Kwang J. Kuo|AUTHOR Hong-Kwang J. Kuo]], [[Sachindra Joshi|AUTHOR Sachindra Joshi]], [[George Saon|AUTHOR George Saon]], [[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211463.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-9|PAPER Tue-E-V-4-9 — Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking]]</div>|<div class="cpsessionviewpapertitle">Coreference Augmentation for Multi-Domain Task-Oriented Dialogue State Tracking</div><div class="cpsessionviewpaperauthor">[[Ting Han|AUTHOR Ting Han]], [[Chongxuan Huang|AUTHOR Chongxuan Huang]], [[Wei Peng|AUTHOR Wei Peng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211537.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-4-10|PAPER Tue-E-V-4-10 — Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding]]</div>|<div class="cpsessionviewpapertitle">Rethinking End-to-End Evaluation of Decomposable Tasks: A Case Study on Spoken Language Understanding</div><div class="cpsessionviewpaperauthor">[[Siddhant Arora|AUTHOR Siddhant Arora]], [[Alissa Ostapenko|AUTHOR Alissa Ostapenko]], [[Vijay Viswanathan|AUTHOR Vijay Viswanathan]], [[Siddharth Dalmia|AUTHOR Siddharth Dalmia]], [[Florian Metze|AUTHOR Florian Metze]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Alan W. Black|AUTHOR Alan W. Black]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Abeer Alwan|
|^ |^Ewan Dunbar|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211162.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-1|PAPER Tue-E-V-5-1 — Semantic Data Augmentation for End-to-End Mandarin Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Semantic Data Augmentation for End-to-End Mandarin Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Jianwei Sun|AUTHOR Jianwei Sun]], [[Zhiyuan Tang|AUTHOR Zhiyuan Tang]], [[Hengxin Yin|AUTHOR Hengxin Yin]], [[Wei Wang|AUTHOR Wei Wang]], [[Xi Zhao|AUTHOR Xi Zhao]], [[Shuaijiang Zhao|AUTHOR Shuaijiang Zhao]], [[Xiaoning Lei|AUTHOR Xiaoning Lei]], [[Wei Zou|AUTHOR Wei Zou]], [[Xiangang Li|AUTHOR Xiangang Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-2|PAPER Tue-E-V-5-2 — Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Layer-Wise Fast Adaptation for End-to-End Multi-Accent Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Xun Gong|AUTHOR Xun Gong]], [[Yizhou Lu|AUTHOR Yizhou Lu]], [[Zhikai Zhou|AUTHOR Zhikai Zhou]], [[Yanmin Qian|AUTHOR Yanmin Qian]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211974.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-3|PAPER Tue-E-V-5-3 — Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System]]</div>|<div class="cpsessionviewpapertitle">Low Resource German ASR with Untranscribed Data Spoken by Non-Native Children — INTERSPEECH 2021 Shared Task SPAPL System</div><div class="cpsessionviewpaperauthor">[[Jinhan Wang|AUTHOR Jinhan Wang]], [[Yunzheng Zhu|AUTHOR Yunzheng Zhu]], [[Ruchao Fan|AUTHOR Ruchao Fan]], [[Wei Chu|AUTHOR Wei Chu]], [[Abeer Alwan|AUTHOR Abeer Alwan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210318.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-4|PAPER Tue-E-V-5-4 — Robust Continuous On-Device Personalization for Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Robust Continuous On-Device Personalization for Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Khe Chai Sim|AUTHOR Khe Chai Sim]], [[Angad Chandorkar|AUTHOR Angad Chandorkar]], [[Fan Gao|AUTHOR Fan Gao]], [[Mason Chua|AUTHOR Mason Chua]], [[Tsendsuren Munkhdalai|AUTHOR Tsendsuren Munkhdalai]], [[Françoise Beaufays|AUTHOR Françoise Beaufays]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210467.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-5|PAPER Tue-E-V-5-5 — Speaker Normalization Using Joint Variational Autoencoder]]</div>|<div class="cpsessionviewpapertitle">Speaker Normalization Using Joint Variational Autoencoder</div><div class="cpsessionviewpaperauthor">[[Shashi Kumar|AUTHOR Shashi Kumar]], [[Shakti P. Rath|AUTHOR Shakti P. Rath]], [[Abhishek Pandey|AUTHOR Abhishek Pandey]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-6|PAPER Tue-E-V-5-6 — The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech]]</div>|<div class="cpsessionviewpapertitle">The TAL System for the INTERSPEECH2021 Shared Task on Automatic Speech Recognition for Non-Native Childrens Speech</div><div class="cpsessionviewpaperauthor">[[Gaopeng Xu|AUTHOR Gaopeng Xu]], [[Song Yang|AUTHOR Song Yang]], [[Lu Ma|AUTHOR Lu Ma]], [[Chengfei Li|AUTHOR Chengfei Li]], [[Zhongqin Wu|AUTHOR Zhongqin Wu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211679.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-7|PAPER Tue-E-V-5-7 — On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR]]</div>|<div class="cpsessionviewpapertitle">On-the-Fly Aligned Data Augmentation for Sequence-to-Sequence ASR</div><div class="cpsessionviewpaperauthor">[[Tsz Kin Lam|AUTHOR Tsz Kin Lam]], [[Mayumi Ohta|AUTHOR Mayumi Ohta]], [[Shigehiko Schamoni|AUTHOR Shigehiko Schamoni]], [[Stefan Riezler|AUTHOR Stefan Riezler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211843.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-8|PAPER Tue-E-V-5-8 — Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding]]</div>|<div class="cpsessionviewpapertitle">Zero-Shot Cross-Lingual Phonetic Recognition with External Language Embedding</div><div class="cpsessionviewpaperauthor">[[Heting Gao|AUTHOR Heting Gao]], [[Junrui Ni|AUTHOR Junrui Ni]], [[Yang Zhang|AUTHOR Yang Zhang]], [[Kaizhi Qian|AUTHOR Kaizhi Qian]], [[Shiyu Chang|AUTHOR Shiyu Chang]], [[Mark Hasegawa-Johnson|AUTHOR Mark Hasegawa-Johnson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211884.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-9|PAPER Tue-E-V-5-9 — Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need]]</div>|<div class="cpsessionviewpapertitle">Rapid Speaker Adaptation for Conformer Transducer: Attention and Bias Are All You Need</div><div class="cpsessionviewpaperauthor">[[Yan Huang|AUTHOR Yan Huang]], [[Guoli Ye|AUTHOR Guoli Ye]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211888.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-10|PAPER Tue-E-V-5-10 — Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning]]</div>|<div class="cpsessionviewpapertitle">Best of Both Worlds: Robust Accented Speech Recognition with Adversarial Transfer Learning</div><div class="cpsessionviewpaperauthor">[[Nilaksh Das|AUTHOR Nilaksh Das]], [[Sravan Bodapati|AUTHOR Sravan Bodapati]], [[Monica Sunkara|AUTHOR Monica Sunkara]], [[Sundararajan Srinivasan|AUTHOR Sundararajan Srinivasan]], [[Duen Horng Chau|AUTHOR Duen Horng Chau]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-5-11|PAPER Tue-E-V-5-11 — Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge]]</div>|<div class="cpsessionviewpapertitle">Extending Pronunciation Dictionary with Automatically Detected Word Mispronunciations to Improve PAII’s System for Interspeech 2021 Non-Native Child English Close Track ASR Challenge</div><div class="cpsessionviewpaperauthor">[[Wei Chu|AUTHOR Wei Chu]], [[Peng Chang|AUTHOR Peng Chang]], [[Jing Xiao|AUTHOR Jing Xiao]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Tomi Kinnunen|
|^ |^Yannis Stylianou|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-1|PAPER Tue-E-V-6-1 — CVC: Contrastive Learning for Non-Parallel Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">CVC: Contrastive Learning for Non-Parallel Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Tingle Li|AUTHOR Tingle Li]], [[Yichen Liu|AUTHOR Yichen Liu]], [[Chenxu Hu|AUTHOR Chenxu Hu]], [[Hang Zhao|AUTHOR Hang Zhao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210208.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-2|PAPER Tue-E-V-6-2 — A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">A Preliminary Study of a Two-Stage Paradigm for Preserving Speaker Identity in Dysarthric Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Wen-Chin Huang|AUTHOR Wen-Chin Huang]], [[Kazuhiro Kobayashi|AUTHOR Kazuhiro Kobayashi]], [[Yu-Huai Peng|AUTHOR Yu-Huai Peng]], [[Ching-Feng Liu|AUTHOR Ching-Feng Liu]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]], [[Tomoki Toda|AUTHOR Tomoki Toda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210221.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-3|PAPER Tue-E-V-6-3 — One-Shot Voice Conversion with Speaker-Agnostic StarGAN]]</div>|<div class="cpsessionviewpapertitle">One-Shot Voice Conversion with Speaker-Agnostic StarGAN</div><div class="cpsessionviewpaperauthor">[[Sefik Emre Eskimez|AUTHOR Sefik Emre Eskimez]], [[Dimitrios Dimitriadis|AUTHOR Dimitrios Dimitriadis]], [[Kenichi Kumatani|AUTHOR Kenichi Kumatani]], [[Robert Gmyr|AUTHOR Robert Gmyr]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210244.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-4|PAPER Tue-E-V-6-4 — Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data]]</div>|<div class="cpsessionviewpapertitle">Fine-Tuning Pre-Trained Voice Conversion Model for Adding New Target Speakers with Limited Data</div><div class="cpsessionviewpaperauthor">[[Takeshi Koshizuka|AUTHOR Takeshi Koshizuka]], [[Hidefumi Ohmura|AUTHOR Hidefumi Ohmura]], [[Kouichi Katsurada|AUTHOR Kouichi Katsurada]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210283.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-5|PAPER Tue-E-V-6-5 — VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">VQMIVC: Vector Quantization and Mutual Information-Based Unsupervised Speech Representation Disentanglement for One-Shot Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Disong Wang|AUTHOR Disong Wang]], [[Liqun Deng|AUTHOR Liqun Deng]], [[Yu Ting Yeung|AUTHOR Yu Ting Yeung]], [[Xiao Chen|AUTHOR Xiao Chen]], [[Xunying Liu|AUTHOR Xunying Liu]], [[Helen Meng|AUTHOR Helen Meng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210319.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-6|PAPER Tue-E-V-6-6 — StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">StarGANv2-VC: A Diverse, Unsupervised, Non-Parallel Framework for Natural-Sounding Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Yinghao Aaron Li|AUTHOR Yinghao Aaron Li]], [[Ali Zare|AUTHOR Ali Zare]], [[Nima Mesgarani|AUTHOR Nima Mesgarani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-7|PAPER Tue-E-V-6-7 — Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Normalization Driven Zero-Shot Multi-Speaker Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Neeraj Kumar|AUTHOR Neeraj Kumar]], [[Srishti Goel|AUTHOR Srishti Goel]], [[Ankur Narang|AUTHOR Ankur Narang]], [[Brejesh Lall|AUTHOR Brejesh Lall]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210492.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-8|PAPER Tue-E-V-6-8 — StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">StarGAN-VC+ASR: StarGAN-Based Non-Parallel Voice Conversion Regularized by Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Shoki Sakamoto|AUTHOR Shoki Sakamoto]], [[Akira Taniguchi|AUTHOR Akira Taniguchi]], [[Tadahiro Taniguchi|AUTHOR Tadahiro Taniguchi]], [[Hirokazu Kameoka|AUTHOR Hirokazu Kameoka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210506.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-9|PAPER Tue-E-V-6-9 — Two-Pathway Style Embedding for Arbitrary Voice Conversion]]</div>|<div class="cpsessionviewpapertitle">Two-Pathway Style Embedding for Arbitrary Voice Conversion</div><div class="cpsessionviewpaperauthor">[[Xuexin Xu|AUTHOR Xuexin Xu]], [[Liang Shi|AUTHOR Liang Shi]], [[Jinhui Chen|AUTHOR Jinhui Chen]], [[Xunquan Chen|AUTHOR Xunquan Chen]], [[Jie Lian|AUTHOR Jie Lian]], [[Pingyuan Lin|AUTHOR Pingyuan Lin]], [[Zhihong Zhang|AUTHOR Zhihong Zhang]], [[Edwin R. Hancock|AUTHOR Edwin R. Hancock]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210557.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-10|PAPER Tue-E-V-6-10 — Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics]]</div>|<div class="cpsessionviewpapertitle">Non-Parallel Any-to-Many Voice Conversion by Replacing Speaker Statistics</div><div class="cpsessionviewpaperauthor">[[Yufei Liu|AUTHOR Yufei Liu]], [[Chengzhu Yu|AUTHOR Chengzhu Yu]], [[Wang Shuai|AUTHOR Wang Shuai]], [[Zhenchuan Yang|AUTHOR Zhenchuan Yang]], [[Yang Chao|AUTHOR Yang Chao]], [[Weibin Zhang|AUTHOR Weibin Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210687.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-11|PAPER Tue-E-V-6-11 — Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation]]</div>|<div class="cpsessionviewpapertitle">Cross-Lingual Voice Conversion with a Cycle Consistency Loss on Linguistic Representation</div><div class="cpsessionviewpaperauthor">[[Yi Zhou|AUTHOR Yi Zhou]], [[Xiaohai Tian|AUTHOR Xiaohai Tian]], [[Zhizheng Wu|AUTHOR Zhizheng Wu]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212132.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-E-V-6-12|PAPER Tue-E-V-6-12 — Improving Robustness of One-Shot Voice Conversion with Deep Discriminative Speaker Encoder]]</div>|<div class="cpsessionviewpapertitle">Improving Robustness of One-Shot Voice Conversion with Deep Discriminative Speaker Encoder</div><div class="cpsessionviewpaperauthor">[[Hongqiang Du|AUTHOR Hongqiang Du]], [[Lei Xie|AUTHOR Lei Xie]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|17:00–18:00, Tuesday 31 August 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Tue-Keynote|PAPER Tue-Keynote — Forty Years of Speech and Language Processing: From Bayes Decision Rule to Deep Learning]]</div>|<div class="cpsessionviewpapertitle">Forty Years of Speech and Language Processing: From Bayes Decision Rule to Deep Learning</div><div class="cpsessionviewpaperauthor">[[Hermann Ney|AUTHOR Hermann Ney]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, Room A+B|<|
|^Chairs: |^Jindřich Matoušek|
|^ |^Michael Pucher|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210473.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-1-1|PAPER Tue-M-O-1-1 — Conversion of Airborne to Bone-Conducted Speech with Deep Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Conversion of Airborne to Bone-Conducted Speech with Deep Neural Networks</div><div class="cpsessionviewpaperauthor">[[Michael Pucher|AUTHOR Michael Pucher]], [[Thomas Woltron|AUTHOR Thomas Woltron]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210546.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-1-2|PAPER Tue-M-O-1-2 — T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion]]</div>|<div class="cpsessionviewpapertitle">T5G2P: Using Text-to-Text Transfer Transformer for Grapheme-to-Phoneme Conversion</div><div class="cpsessionviewpaperauthor">[[Markéta Řezáčková|AUTHOR Markéta Řezáčková]], [[Jan Švec|AUTHOR Jan Švec]], [[Daniel Tihelka|AUTHOR Daniel Tihelka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211547.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-1-3|PAPER Tue-M-O-1-3 — Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values]]</div>|<div class="cpsessionviewpapertitle">Evaluating the Extrapolation Capabilities of Neural Vocoders to Extreme Pitch Values</div><div class="cpsessionviewpaperauthor">[[Olivier Perrotin|AUTHOR Olivier Perrotin]], [[Hussein El Amouri|AUTHOR Hussein El Amouri]], [[Gérard Bailly|AUTHOR Gérard Bailly]], [[Thomas Hueber|AUTHOR Thomas Hueber]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211565.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-1-4|PAPER Tue-M-O-1-4 — A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages]]</div>|<div class="cpsessionviewpapertitle">A Systematic Review and Analysis of Multilingual Data Strategies in Text-to-Speech for Low-Resource Languages</div><div class="cpsessionviewpaperauthor">[[Phat Do|AUTHOR Phat Do]], [[Matt Coler|AUTHOR Matt Coler]], [[Jelske Dijkstra|AUTHOR Jelske Dijkstra]], [[Esther Klabbers|AUTHOR Esther Klabbers]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, Room C|<|
|^Chairs: |^Rob van Son|
|^ |^Gábor Gosztolya|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211581.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-2-1|PAPER Tue-M-O-2-1 — Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury]]</div>|<div class="cpsessionviewpapertitle">Acoustic Indicators of Speech Motor Coordination in Adults With and Without Traumatic Brain Injury</div><div class="cpsessionviewpaperauthor">[[Tanya Talkar|AUTHOR Tanya Talkar]], [[Nancy Pearl Solomon|AUTHOR Nancy Pearl Solomon]], [[Douglas S. Brungart|AUTHOR Douglas S. Brungart]], [[Stefanie E. Kuchinsky|AUTHOR Stefanie E. Kuchinsky]], [[Megan M. Eitel|AUTHOR Megan M. Eitel]], [[Sara M. Lippa|AUTHOR Sara M. Lippa]], [[Tracey A. Brickell|AUTHOR Tracey A. Brickell]], [[Louis M. French|AUTHOR Louis M. French]], [[Rael T. Lange|AUTHOR Rael T. Lange]], [[Thomas F. Quatieri|AUTHOR Thomas F. Quatieri]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-2-2|PAPER Tue-M-O-2-2 — On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease]]</div>|<div class="cpsessionviewpapertitle">On Modeling Glottal Source Information for Phonation Assessment in Parkinson’s Disease</div><div class="cpsessionviewpaperauthor">[[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]], [[Julian Fritsch|AUTHOR Julian Fritsch]], [[J.R. Orozco-Arroyave|AUTHOR J.R. Orozco-Arroyave]], [[Elmar Nöth|AUTHOR Elmar Nöth]], [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210223.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-2-3|PAPER Tue-M-O-2-3 — Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy]]</div>|<div class="cpsessionviewpapertitle">Distortion of Voiced Obstruents for Differential Diagnosis Between Parkinson’s Disease and Multiple System Atrophy</div><div class="cpsessionviewpaperauthor">[[Khalid Daoudi|AUTHOR Khalid Daoudi]], [[Biswajit Das|AUTHOR Biswajit Das]], [[Solange Milhé de Saint Victor|AUTHOR Solange Milhé de Saint Victor]], [[Alexandra Foubert-Samier|AUTHOR Alexandra Foubert-Samier]], [[Anne Pavy-Le Traon|AUTHOR Anne Pavy-Le Traon]], [[Olivier Rascol|AUTHOR Olivier Rascol]], [[Wassilios G. Meissner|AUTHOR Wassilios G. Meissner]], [[Virginie Woisard|AUTHOR Virginie Woisard]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-2-4|PAPER Tue-M-O-2-4 — A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech]]</div>|<div class="cpsessionviewpapertitle">A Study into Pre-Training Strategies for Spoken Language Understanding on Dysarthric Speech</div><div class="cpsessionviewpaperauthor">[[Pu Wang|AUTHOR Pu Wang]], [[Bagher BabaAli|AUTHOR Bagher BabaAli]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210549.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-2-5|PAPER Tue-M-O-2-5 — EasyCall Corpus: A Dysarthric Speech Dataset]]</div>|<div class="cpsessionviewpapertitle">EasyCall Corpus: A Dysarthric Speech Dataset</div><div class="cpsessionviewpaperauthor">[[Rosanna Turrisi|AUTHOR Rosanna Turrisi]], [[Arianna Braccia|AUTHOR Arianna Braccia]], [[Marco Emanuele|AUTHOR Marco Emanuele]], [[Simone Giulietti|AUTHOR Simone Giulietti]], [[Maura Pugliatti|AUTHOR Maura Pugliatti]], [[Mariachiara Sensi|AUTHOR Mariachiara Sensi]], [[Luciano Fadiga|AUTHOR Luciano Fadiga]], [[Leonardo Badino|AUTHOR Leonardo Badino]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, Room D|<|
|^Chairs: |^Torbjørn Svendsen|
|^ |^Benjamin Elie|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210256.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-3-1|PAPER Tue-M-O-3-1 — A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling]]</div>|<div class="cpsessionviewpapertitle">A Benchmark of Dynamical Variational Autoencoders Applied to Speech Spectrogram Modeling</div><div class="cpsessionviewpaperauthor">[[Xiaoyu Bie|AUTHOR Xiaoyu Bie]], [[Laurent Girin|AUTHOR Laurent Girin]], [[Simon Leglaive|AUTHOR Simon Leglaive]], [[Thomas Hueber|AUTHOR Thomas Hueber]], [[Xavier Alameda-Pineda|AUTHOR Xavier Alameda-Pineda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210645.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-3-2|PAPER Tue-M-O-3-2 — Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods]]</div>|<div class="cpsessionviewpapertitle">Fricative Phoneme Detection Using Deep Neural Networks and its Comparison to Traditional Methods</div><div class="cpsessionviewpaperauthor">[[Metehan Yurt|AUTHOR Metehan Yurt]], [[Pavan Kantharaju|AUTHOR Pavan Kantharaju]], [[Sascha Disch|AUTHOR Sascha Disch]], [[Andreas Niedermeier|AUTHOR Andreas Niedermeier]], [[Alberto N. Escalante-B.|AUTHOR Alberto N. Escalante-B.]], [[Veniamin I. Morgenshtern|AUTHOR Veniamin I. Morgenshtern]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211598.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-3-3|PAPER Tue-M-O-3-3 — Identification of F1 and F2 in Speech Using Modified Zero Frequency Filtering]]</div>|<div class="cpsessionviewpapertitle">Identification of F1 and F2 in Speech Using Modified Zero Frequency Filtering</div><div class="cpsessionviewpaperauthor">[[RaviShankar Prasad|AUTHOR RaviShankar Prasad]], [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211676.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-O-3-4|PAPER Tue-M-O-3-4 — Phoneme-to-Audio Alignment with Recurrent Neural Networks for Speaking and Singing Voice]]</div>|<div class="cpsessionviewpapertitle">Phoneme-to-Audio Alignment with Recurrent Neural Networks for Speaking and Singing Voice</div><div class="cpsessionviewpaperauthor">[[Yann Teytaut|AUTHOR Yann Teytaut]], [[Axel Roebel|AUTHOR Axel Roebel]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, Room Lacina|<|
|^Chairs: |^Björn Schuller|
|^ |^Anton Batliner|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210019.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-1|PAPER Tue-M-SS-1-1 — The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates]]</div>|<div class="cpsessionviewpapertitle">The INTERSPEECH 2021 Computational Paralinguistics Challenge: COVID-19 Cough, COVID-19 Speech, Escalation & Primates</div><div class="cpsessionviewpaperauthor">[[Björn W. Schuller|AUTHOR Björn W. Schuller]], [[Anton Batliner|AUTHOR Anton Batliner]], [[Christian Bergler|AUTHOR Christian Bergler]], [[Cecilia Mascolo|AUTHOR Cecilia Mascolo]], [[Jing Han|AUTHOR Jing Han]], [[Iulia Lefter|AUTHOR Iulia Lefter]], [[Heysem Kaya|AUTHOR Heysem Kaya]], [[Shahin Amiriparian|AUTHOR Shahin Amiriparian]], [[Alice Baird|AUTHOR Alice Baird]], [[Lukas Stappen|AUTHOR Lukas Stappen]], [[Sandra Ottl|AUTHOR Sandra Ottl]], [[Maurice Gerczuk|AUTHOR Maurice Gerczuk]], [[Panagiotis Tzirakis|AUTHOR Panagiotis Tzirakis]], [[Chloë Brown|AUTHOR Chloë Brown]], [[Jagmohan Chauhan|AUTHOR Jagmohan Chauhan]], [[Andreas Grammenos|AUTHOR Andreas Grammenos]], [[Apinan Hasthanasombat|AUTHOR Apinan Hasthanasombat]], [[Dimitris Spathis|AUTHOR Dimitris Spathis]], [[Tong Xia|AUTHOR Tong Xia]], [[Pietro Cicuta|AUTHOR Pietro Cicuta]], [[Leon J.M. Rothkrantz|AUTHOR Leon J.M. Rothkrantz]], [[Joeri A. Zwerts|AUTHOR Joeri A. Zwerts]], [[Jelle Treep|AUTHOR Jelle Treep]], [[Casper S. Kaandorp|AUTHOR Casper S. Kaandorp]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211702.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-2|PAPER Tue-M-SS-1-2 — Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19]]</div>|<div class="cpsessionviewpapertitle">Transfer Learning-Based Cough Representations for Automatic Detection of COVID-19</div><div class="cpsessionviewpaperauthor">[[Rubén Solera-Ureña|AUTHOR Rubén Solera-Ureña]], [[Catarina Botelho|AUTHOR Catarina Botelho]], [[Francisco Teixeira|AUTHOR Francisco Teixeira]], [[Thomas Rolland|AUTHOR Thomas Rolland]], [[Alberto Abad|AUTHOR Alberto Abad]], [[Isabel Trancoso|AUTHOR Isabel Trancoso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211488.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-3|PAPER Tue-M-SS-1-3 — The Phonetic Footprint of Covid-19?]]</div>|<div class="cpsessionviewpapertitle">The Phonetic Footprint of Covid-19?</div><div class="cpsessionviewpaperauthor">[[P. Klumpp|AUTHOR P. Klumpp]], [[T. Bocklet|AUTHOR T. Bocklet]], [[T. Arias-Vergara|AUTHOR T. Arias-Vergara]], [[J.C. Vásquez-Correa|AUTHOR J.C. Vásquez-Correa]], [[P.A. Pérez-Toro|AUTHOR P.A. Pérez-Toro]], [[S.P. Bayerl|AUTHOR S.P. Bayerl]], [[J.R. Orozco-Arroyave|AUTHOR J.R. Orozco-Arroyave]], [[Elmar Nöth|AUTHOR Elmar Nöth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211798.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-4|PAPER Tue-M-SS-1-4 — Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021]]</div>|<div class="cpsessionviewpapertitle">Transfer Learning and Data Augmentation Techniques to the COVID-19 Identification Tasks in ComParE 2021</div><div class="cpsessionviewpaperauthor">[[Edresson Casanova|AUTHOR Edresson Casanova]], [[Arnaldo Candido Jr.|AUTHOR Arnaldo Candido Jr.]], [[Ricardo Corso Fernandes Jr.|AUTHOR Ricardo Corso Fernandes Jr.]], [[Marcelo Finger|AUTHOR Marcelo Finger]], [[Lucas Rafael Stefanel Gris|AUTHOR Lucas Rafael Stefanel Gris]], [[Moacir Antonelli Ponti|AUTHOR Moacir Antonelli Ponti]], [[Daniel Peixoto Pinto da Silva|AUTHOR Daniel Peixoto Pinto da Silva]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210273.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-5|PAPER Tue-M-SS-1-5 — Visual Transformers for Primates Classification and Covid Detection]]</div>|<div class="cpsessionviewpapertitle">Visual Transformers for Primates Classification and Covid Detection</div><div class="cpsessionviewpaperauthor">[[Steffen Illium|AUTHOR Steffen Illium]], [[Robert Müller|AUTHOR Robert Müller]], [[Andreas Sedlmeier|AUTHOR Andreas Sedlmeier]], [[Claudia-Linnhoff Popien|AUTHOR Claudia-Linnhoff Popien]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211911.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-6|PAPER Tue-M-SS-1-6 — Deep-Learning-Based Central African Primate Species Classification with MixUp and SpecAugment]]</div>|<div class="cpsessionviewpapertitle">Deep-Learning-Based Central African Primate Species Classification with MixUp and SpecAugment</div><div class="cpsessionviewpaperauthor">[[Thomas Pellegrini|AUTHOR Thomas Pellegrini]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211274.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-7|PAPER Tue-M-SS-1-7 — A Deep and Recurrent Architecture for Primate Vocalization Classification]]</div>|<div class="cpsessionviewpapertitle">A Deep and Recurrent Architecture for Primate Vocalization Classification</div><div class="cpsessionviewpaperauthor">[[Robert Müller|AUTHOR Robert Müller]], [[Steffen Illium|AUTHOR Steffen Illium]], [[Claudia Linnhoff-Popien|AUTHOR Claudia Linnhoff-Popien]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210154.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-8|PAPER Tue-M-SS-1-8 — Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification]]</div>|<div class="cpsessionviewpapertitle">Introducing a Central African Primate Vocalisation Dataset for Automated Species Classification</div><div class="cpsessionviewpaperauthor">[[Joeri A. Zwerts|AUTHOR Joeri A. Zwerts]], [[Jelle Treep|AUTHOR Jelle Treep]], [[Casper S. Kaandorp|AUTHOR Casper S. Kaandorp]], [[Floor Meewis|AUTHOR Floor Meewis]], [[Amparo C. Koot|AUTHOR Amparo C. Koot]], [[Heysem Kaya|AUTHOR Heysem Kaya]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211969.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-9|PAPER Tue-M-SS-1-9 — Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild]]</div>|<div class="cpsessionviewpapertitle">Multi-Attentive Detection of the Spider Monkey Whinny in the (Actual) Wild</div><div class="cpsessionviewpaperauthor">[[Georgios Rizos|AUTHOR Georgios Rizos]], [[Jenna Lawson|AUTHOR Jenna Lawson]], [[Zhuoda Han|AUTHOR Zhuoda Han]], [[Duncan Butler|AUTHOR Duncan Butler]], [[James Rosindell|AUTHOR James Rosindell]], [[Krystian Mikolajczyk|AUTHOR Krystian Mikolajczyk]], [[Cristina Banks-Leite|AUTHOR Cristina Banks-Leite]], [[Björn W. Schuller|AUTHOR Björn W. Schuller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-10|PAPER Tue-M-SS-1-10 — Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features]]</div>|<div class="cpsessionviewpapertitle">Identifying Conflict Escalation and Primates by Using Ensemble X-Vectors and Fisher Vector Features</div><div class="cpsessionviewpaperauthor">[[José Vicente Egas-López|AUTHOR José Vicente Egas-López]], [[Mercedes Vetráb|AUTHOR Mercedes Vetráb]], [[László Tóth|AUTHOR László Tóth]], [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211821.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-11|PAPER Tue-M-SS-1-11 — Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech]]</div>|<div class="cpsessionviewpapertitle">Ensemble-Within-Ensemble Classification for Escalation Prediction from Speech</div><div class="cpsessionviewpaperauthor">[[Oxana Verkholyak|AUTHOR Oxana Verkholyak]], [[Denis Dresvyanskiy|AUTHOR Denis Dresvyanskiy]], [[Anastasia Dvoynikova|AUTHOR Anastasia Dvoynikova]], [[Denis Kotov|AUTHOR Denis Kotov]], [[Elena Ryumina|AUTHOR Elena Ryumina]], [[Alena Velichko|AUTHOR Alena Velichko]], [[Danila Mamontov|AUTHOR Danila Mamontov]], [[Wolfgang Minker|AUTHOR Wolfgang Minker]], [[Alexey Karpov|AUTHOR Alexey Karpov]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-SS-1-12|PAPER Tue-M-SS-1-12 — Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification]]</div>|<div class="cpsessionviewpapertitle">Analysis by Synthesis: Using an Expressive TTS Model as Feature Extractor for Paralinguistic Speech Classification</div><div class="cpsessionviewpaperauthor">[[Dominik Schiller|AUTHOR Dominik Schiller]], [[Silvan Mertes|AUTHOR Silvan Mertes]], [[Pol van Rijn|AUTHOR Pol van Rijn]], [[Elisabeth André|AUTHOR Elisabeth André]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Discussion</div><div class="cpsessionviewpaperauthor"></div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Hervé Bredin|
|^ |^Niko Brummer|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-1|PAPER Tue-M-V-1-1 — Adaptive Convolutional Neural Network for Text-Independent Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Adaptive Convolutional Neural Network for Text-Independent Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Seong-Hu Kim|AUTHOR Seong-Hu Kim]], [[Yong-Hwa Park|AUTHOR Yong-Hwa Park]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-2|PAPER Tue-M-V-1-2 — Bidirectional Multiscale Feature Aggregation for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Bidirectional Multiscale Feature Aggregation for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Jiajun Qi|AUTHOR Jiajun Qi]], [[Wu Guo|AUTHOR Wu Guo]], [[Bin Gu|AUTHOR Bin Gu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210356.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-3|PAPER Tue-M-V-1-3 — Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods]]</div>|<div class="cpsessionviewpapertitle">Improving Time Delay Neural Network Based Speaker Recognition with Convolutional Block and Feature Aggregation Methods</div><div class="cpsessionviewpaperauthor">[[Yu-Jia Zhang|AUTHOR Yu-Jia Zhang]], [[Yih-Wen Wang|AUTHOR Yih-Wen Wang]], [[Chia-Ping Chen|AUTHOR Chia-Ping Chen]], [[Chung-Li Lu|AUTHOR Chung-Li Lu]], [[Bo-Cheng Chan|AUTHOR Bo-Cheng Chan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210559.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-4|PAPER Tue-M-V-1-4 — Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Improving Deep CNN Architectures with Variable-Length Training Samples for Text-Independent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Yanfeng Wu|AUTHOR Yanfeng Wu]], [[Junan Zhao|AUTHOR Junan Zhao]], [[Chenkai Guo|AUTHOR Chenkai Guo]], [[Jing Xu|AUTHOR Jing Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-5|PAPER Tue-M-V-1-5 — Binary Neural Network for Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Binary Neural Network for Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Tinglong Zhu|AUTHOR Tinglong Zhu]], [[Xiaoyi Qin|AUTHOR Xiaoyi Qin]], [[Ming Li|AUTHOR Ming Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211436.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-6|PAPER Tue-M-V-1-6 — Mutual Information Enhanced Training for Speaker Embedding]]</div>|<div class="cpsessionviewpapertitle">Mutual Information Enhanced Training for Speaker Embedding</div><div class="cpsessionviewpaperauthor">[[Youzhi Tu|AUTHOR Youzhi Tu]], [[Man-Wai Mak|AUTHOR Man-Wai Mak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211707.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-7|PAPER Tue-M-V-1-7 — Y-Vector: Multiscale Waveform Encoder for Speaker Embedding]]</div>|<div class="cpsessionviewpapertitle">Y-Vector: Multiscale Waveform Encoder for Speaker Embedding</div><div class="cpsessionviewpaperauthor">[[Ge Zhu|AUTHOR Ge Zhu]], [[Fei Jiang|AUTHOR Fei Jiang]], [[Zhiyao Duan|AUTHOR Zhiyao Duan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-8|PAPER Tue-M-V-1-8 — Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Phoneme-Aware and Channel-Wise Attentive Learning for Text Dependent Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Yan Liu|AUTHOR Yan Liu]], [[Zheng Li|AUTHOR Zheng Li]], [[Lin Li|AUTHOR Lin Li]], [[Qingyang Hong|AUTHOR Qingyang Hong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212210.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-1-9|PAPER Tue-M-V-1-9 — Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding]]</div>|<div class="cpsessionviewpapertitle">Serialized Multi-Layer Multi-Head Attention for Neural Speaker Embedding</div><div class="cpsessionviewpaperauthor">[[Hongning Zhu|AUTHOR Hongning Zhu]], [[Kong Aik Lee|AUTHOR Kong Aik Lee]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Heiga Zen|
|^ |^Cassia Valentini|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210852.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-1|PAPER Tue-M-V-2-1 — TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions]]</div>|<div class="cpsessionviewpapertitle">TacoLPCNet: Fast and Stable TTS by Conditioning LPCNet on Mel Spectrogram Predictions</div><div class="cpsessionviewpaperauthor">[[Cheng Gong|AUTHOR Cheng Gong]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Ju Zhang|AUTHOR Ju Zhang]], [[Shaotong Guo|AUTHOR Shaotong Guo]], [[Yuguang Wang|AUTHOR Yuguang Wang]], [[Jianwu Dang|AUTHOR Jianwu Dang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210866.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-2|PAPER Tue-M-V-2-2 — FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">FastPitchFormant: Source-Filter Based Decomposed Modeling for Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Taejun Bak|AUTHOR Taejun Bak]], [[Jae-Sung Bae|AUTHOR Jae-Sung Bae]], [[Hanbin Bae|AUTHOR Hanbin Bae]], [[Young-Ik Kim|AUTHOR Young-Ik Kim]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210896.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-3|PAPER Tue-M-V-2-3 — Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer]]</div>|<div class="cpsessionviewpapertitle">Sequence-to-Sequence Learning for Deep Gaussian Process Based Speech Synthesis Using Self-Attention GP Layer</div><div class="cpsessionviewpaperauthor">[[Taiki Nakamura|AUTHOR Taiki Nakamura]], [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210914.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-4|PAPER Tue-M-V-2-4 — Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech]]</div>|<div class="cpsessionviewpapertitle">Phonetic and Prosodic Information Estimation from Texts for Genuine Japanese End-to-End Text-to-Speech</div><div class="cpsessionviewpaperauthor">[[Naoto Kakegawa|AUTHOR Naoto Kakegawa]], [[Sunao Hara|AUTHOR Sunao Hara]], [[Masanobu Abe|AUTHOR Masanobu Abe]], [[Yusuke Ijima|AUTHOR Yusuke Ijima]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-5|PAPER Tue-M-V-2-5 — Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Information Sieve: Content Leakage Reduction in End-to-End Prosody Transfer for Expressive Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Xudong Dai|AUTHOR Xudong Dai]], [[Cheng Gong|AUTHOR Cheng Gong]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Kaili Zhang|AUTHOR Kaili Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211405.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-6|PAPER Tue-M-V-2-6 — Deliberation-Based Multi-Pass Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Deliberation-Based Multi-Pass Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Qingyun Dou|AUTHOR Qingyun Dou]], [[Xixin Wu|AUTHOR Xixin Wu]], [[Moquan Wan|AUTHOR Moquan Wan]], [[Yiting Lu|AUTHOR Yiting Lu]], [[Mark J.F. Gales|AUTHOR Mark J.F. Gales]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-7|PAPER Tue-M-V-2-7 — Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling]]</div>|<div class="cpsessionviewpapertitle">Parallel Tacotron 2: A Non-Autoregressive Neural TTS Model with Differentiable Duration Modeling</div><div class="cpsessionviewpaperauthor">[[Isaac Elias|AUTHOR Isaac Elias]], [[Heiga Zen|AUTHOR Heiga Zen]], [[Jonathan Shen|AUTHOR Jonathan Shen]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Ye Jia|AUTHOR Ye Jia]], [[R.J. Skerry-Ryan|AUTHOR R.J. Skerry-Ryan]], [[Yonghui Wu|AUTHOR Yonghui Wu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-8|PAPER Tue-M-V-2-8 — Transformer-Based Acoustic Modeling for Streaming Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Transformer-Based Acoustic Modeling for Streaming Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Chunyang Wu|AUTHOR Chunyang Wu]], [[Zhiping Xiu|AUTHOR Zhiping Xiu]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Thilo Koehler|AUTHOR Thilo Koehler]], [[Qing He|AUTHOR Qing He]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211757.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-9|PAPER Tue-M-V-2-9 — PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS]]</div>|<div class="cpsessionviewpapertitle">PnG BERT: Augmented BERT on Phonemes and Graphemes for Neural TTS</div><div class="cpsessionviewpaperauthor">[[Ye Jia|AUTHOR Ye Jia]], [[Heiga Zen|AUTHOR Heiga Zen]], [[Jonathan Shen|AUTHOR Jonathan Shen]], [[Yu Zhang|AUTHOR Yu Zhang]], [[Yonghui Wu|AUTHOR Yonghui Wu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-2-10|PAPER Tue-M-V-2-10 — Speed up Training with Variable Length Inputs by Efficient Batching Strategies]]</div>|<div class="cpsessionviewpapertitle">Speed up Training with Variable Length Inputs by Efficient Batching Strategies</div><div class="cpsessionviewpaperauthor">[[Zhenhao Ge|AUTHOR Zhenhao Ge]], [[Lakshmish Kaushik|AUTHOR Lakshmish Kaushik]], [[Masanori Omote|AUTHOR Masanori Omote]], [[Saket Kumar|AUTHOR Saket Kumar]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Sharon Gannot|
|^ |^Tao Zhang|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-1|PAPER Tue-M-V-3-1 — Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Funnel Deep Complex U-Net for Phase-Aware Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Yuhang Sun|AUTHOR Yuhang Sun]], [[Linju Yang|AUTHOR Linju Yang]], [[Huifeng Zhu|AUTHOR Huifeng Zhu]], [[Jie Hao|AUTHOR Jie Hao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210046.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-2|PAPER Tue-M-V-3-2 — Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Temporal Convolutional Network with Frequency Dimension Adaptive Attention for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Qiquan Zhang|AUTHOR Qiquan Zhang]], [[Qi Song|AUTHOR Qi Song]], [[Aaron Nicolson|AUTHOR Aaron Nicolson]], [[Tian Lan|AUTHOR Tian Lan]], [[Haizhou Li|AUTHOR Haizhou Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210058.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-3|PAPER Tue-M-V-3-3 — Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences]]</div>|<div class="cpsessionviewpapertitle">Perceptual Contributions of Vowels and Consonant-Vowel Transitions in Understanding Time-Compressed Mandarin Sentences</div><div class="cpsessionviewpaperauthor">[[Changjie Pan|AUTHOR Changjie Pan]], [[Feng Yang|AUTHOR Feng Yang]], [[Fei Chen|AUTHOR Fei Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210150.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-4|PAPER Tue-M-V-3-4 — Transfer Learning for Speech Intelligibility Improvement in Noisy Environments]]</div>|<div class="cpsessionviewpapertitle">Transfer Learning for Speech Intelligibility Improvement in Noisy Environments</div><div class="cpsessionviewpaperauthor">[[Ritujoy Biswas|AUTHOR Ritujoy Biswas]], [[Karan Nathwani|AUTHOR Karan Nathwani]], [[Vinayak Abrol|AUTHOR Vinayak Abrol]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210174.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-5|PAPER Tue-M-V-3-5 — Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility]]</div>|<div class="cpsessionviewpapertitle">Comparison of Remote Experiments Using Crowdsourcing and Laboratory Experiments on Speech Intelligibility</div><div class="cpsessionviewpaperauthor">[[Ayako Yamamoto|AUTHOR Ayako Yamamoto]], [[Toshio Irino|AUTHOR Toshio Irino]], [[Kenichi Arai|AUTHOR Kenichi Arai]], [[Shoko Araki|AUTHOR Shoko Araki]], [[Atsunori Ogawa|AUTHOR Atsunori Ogawa]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210238.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-6|PAPER Tue-M-V-3-6 — Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Know Your Enemy, Know Yourself: A Unified Two-Stage Framework for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Wenzhe Liu|AUTHOR Wenzhe Liu]], [[Andong Li|AUTHOR Andong Li]], [[Yuxuan Ke|AUTHOR Yuxuan Ke]], [[Chengshi Zheng|AUTHOR Chengshi Zheng]], [[Xiaodong Li|AUTHOR Xiaodong Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210259.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-7|PAPER Tue-M-V-3-7 — Speech Enhancement with Weakly Labelled Data from AudioSet]]</div>|<div class="cpsessionviewpapertitle">Speech Enhancement with Weakly Labelled Data from AudioSet</div><div class="cpsessionviewpaperauthor">[[Qiuqiang Kong|AUTHOR Qiuqiang Kong]], [[Haohe Liu|AUTHOR Haohe Liu]], [[Xingjian Du|AUTHOR Xingjian Du]], [[Li Chen|AUTHOR Li Chen]], [[Rui Xia|AUTHOR Rui Xia]], [[Yuxuan Wang|AUTHOR Yuxuan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210582.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-8|PAPER Tue-M-V-3-8 — Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Improving Perceptual Quality by Phone-Fortified Perceptual Loss Using Wasserstein Distance for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Tsun-An Hsieh|AUTHOR Tsun-An Hsieh]], [[Cheng Yu|AUTHOR Cheng Yu]], [[Szu-Wei Fu|AUTHOR Szu-Wei Fu]], [[Xugang Lu|AUTHOR Xugang Lu]], [[Yu Tsao|AUTHOR Yu Tsao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-9|PAPER Tue-M-V-3-9 — MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">MetricGAN+: An Improved Version of MetricGAN for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Szu-Wei Fu|AUTHOR Szu-Wei Fu]], [[Cheng Yu|AUTHOR Cheng Yu]], [[Tsun-An Hsieh|AUTHOR Tsun-An Hsieh]], [[Peter Plantinga|AUTHOR Peter Plantinga]], [[Mirco Ravanelli|AUTHOR Mirco Ravanelli]], [[Xugang Lu|AUTHOR Xugang Lu]], [[Yu Tsao|AUTHOR Yu Tsao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210605.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-10|PAPER Tue-M-V-3-10 — A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction]]</div>|<div class="cpsessionviewpapertitle">A Spectro-Temporal Glimpsing Index (STGI) for Speech Intelligibility Prediction</div><div class="cpsessionviewpaperauthor">[[Amin Edraki|AUTHOR Amin Edraki]], [[Wai-Yip Chan|AUTHOR Wai-Yip Chan]], [[Jesper Jensen|AUTHOR Jesper Jensen]], [[Daniel Fogerty|AUTHOR Daniel Fogerty]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210734.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-11|PAPER Tue-M-V-3-11 — Self-Supervised Learning Based Phone-Fortified Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Self-Supervised Learning Based Phone-Fortified Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Yuanhang Qiu|AUTHOR Yuanhang Qiu]], [[Ruili Wang|AUTHOR Ruili Wang]], [[Satwinder Singh|AUTHOR Satwinder Singh]], [[Zhizhong Ma|AUTHOR Zhizhong Ma]], [[Feng Hou|AUTHOR Feng Hou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211844.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-12|PAPER Tue-M-V-3-12 — Incorporating Embedding Vectors from a Human Mean-Opinion Score Prediction Model for Monaural Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Incorporating Embedding Vectors from a Human Mean-Opinion Score Prediction Model for Monaural Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Khandokar Md. Nayem|AUTHOR Khandokar Md. Nayem]], [[Donald S. Williamson|AUTHOR Donald S. Williamson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211889.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-3-13|PAPER Tue-M-V-3-13 — Restoring Degraded Speech via a Modified Diffusion Model]]</div>|<div class="cpsessionviewpapertitle">Restoring Degraded Speech via a Modified Diffusion Model</div><div class="cpsessionviewpaperauthor">[[Jianwei Zhang|AUTHOR Jianwei Zhang]], [[Suren Jayasuriya|AUTHOR Suren Jayasuriya]], [[Visar Berisha|AUTHOR Visar Berisha]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Sakriani Sakti|
|^ |^Evelyne Tzoukerman|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211536.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-1|PAPER Tue-M-V-4-1 — User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems]]</div>|<div class="cpsessionviewpapertitle">User-Initiated Repetition-Based Recovery in Multi-Utterance Dialogue Systems</div><div class="cpsessionviewpaperauthor">[[Hoang Long Nguyen|AUTHOR Hoang Long Nguyen]], [[Vincent Renkens|AUTHOR Vincent Renkens]], [[Joris Pelemans|AUTHOR Joris Pelemans]], [[Srividya Pranavi Potharaju|AUTHOR Srividya Pranavi Potharaju]], [[Anil Kumar Nalamalapu|AUTHOR Anil Kumar Nalamalapu]], [[Murat Akbacak|AUTHOR Murat Akbacak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210120.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-2|PAPER Tue-M-V-4-2 — Self-Supervised Dialogue Learning for Spoken Conversational Question Answering]]</div>|<div class="cpsessionviewpapertitle">Self-Supervised Dialogue Learning for Spoken Conversational Question Answering</div><div class="cpsessionviewpaperauthor">[[Nuo Chen|AUTHOR Nuo Chen]], [[Chenyu You|AUTHOR Chenyu You]], [[Yuexian Zou|AUTHOR Yuexian Zou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210138.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-3|PAPER Tue-M-V-4-3 — Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking]]</div>|<div class="cpsessionviewpapertitle">Act-Aware Slot-Value Predicting in Multi-Domain Dialogue State Tracking</div><div class="cpsessionviewpaperauthor">[[Ruolin Su|AUTHOR Ruolin Su]], [[Ting-Wei Wu|AUTHOR Ting-Wei Wu]], [[Biing-Hwang Juang|AUTHOR Biing-Hwang Juang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210171.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-4|PAPER Tue-M-V-4-4 — Dialogue Situation Recognition for Everyday Conversation Using Multimodal Information]]</div>|<div class="cpsessionviewpapertitle">Dialogue Situation Recognition for Everyday Conversation Using Multimodal Information</div><div class="cpsessionviewpaperauthor">[[Yuya Chiba|AUTHOR Yuya Chiba]], [[Ryuichiro Higashinaka|AUTHOR Ryuichiro Higashinaka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210381.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-5|PAPER Tue-M-V-4-5 — Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems]]</div>|<div class="cpsessionviewpapertitle">Neural Spoken-Response Generation Using Prosodic and Linguistic Context for Conversational Systems</div><div class="cpsessionviewpaperauthor">[[Yoshihiro Yamazaki|AUTHOR Yoshihiro Yamazaki]], [[Yuya Chiba|AUTHOR Yuya Chiba]], [[Takashi Nose|AUTHOR Takashi Nose]], [[Akinori Ito|AUTHOR Akinori Ito]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210548.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-6|PAPER Tue-M-V-4-6 — Semantic Transportation Prototypical Network for Few-Shot Intent Detection]]</div>|<div class="cpsessionviewpapertitle">Semantic Transportation Prototypical Network for Few-Shot Intent Detection</div><div class="cpsessionviewpaperauthor">[[Weiyuan Xu|AUTHOR Weiyuan Xu]], [[Peilin Zhou|AUTHOR Peilin Zhou]], [[Chenyu You|AUTHOR Chenyu You]], [[Yuexian Zou|AUTHOR Yuexian Zou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210887.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-7|PAPER Tue-M-V-4-7 — Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios]]</div>|<div class="cpsessionviewpapertitle">Domain-Specific Multi-Agent Dialog Policy Learning in Multi-Domain Task-Oriented Scenarios</div><div class="cpsessionviewpaperauthor">[[Li Tang|AUTHOR Li Tang]], [[Yuke Si|AUTHOR Yuke Si]], [[Longbiao Wang|AUTHOR Longbiao Wang]], [[Jianwu Dang|AUTHOR Jianwu Dang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211370.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-4-8|PAPER Tue-M-V-4-8 — Leveraging ASR N-Best in Deep Entity Retrieval]]</div>|<div class="cpsessionviewpapertitle">Leveraging ASR N-Best in Deep Entity Retrieval</div><div class="cpsessionviewpaperauthor">[[Haoyu Wang|AUTHOR Haoyu Wang]], [[John Chen|AUTHOR John Chen]], [[Majid Laali|AUTHOR Majid Laali]], [[Kevin Durda|AUTHOR Kevin Durda]], [[Jeff King|AUTHOR Jeff King]], [[William Campbell|AUTHOR William Campbell]], [[Yang Liu|AUTHOR Yang Liu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Sriram Ganapathy|
|^ |^Seyyed Saeed Sarfjoo|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211242.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-1|PAPER Tue-M-V-5-1 — End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">End-to-End Spelling Correction Conditioned on Acoustic Feature for Code-Switching Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Shuai Zhang|AUTHOR Shuai Zhang]], [[Jiangyan Yi|AUTHOR Jiangyan Yi]], [[Zhengkun Tian|AUTHOR Zhengkun Tian]], [[Ye Bai|AUTHOR Ye Bai]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Xuefei Liu|AUTHOR Xuefei Liu]], [[Zhengqi Wen|AUTHOR Zhengqi Wen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-2|PAPER Tue-M-V-5-2 — Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties]]</div>|<div class="cpsessionviewpapertitle">Phoneme Recognition Through Fine Tuning of Phonetic Representations: A Case Study on Luhya Language Varieties</div><div class="cpsessionviewpaperauthor">[[Kathleen Siminyu|AUTHOR Kathleen Siminyu]], [[Xinjian Li|AUTHOR Xinjian Li]], [[Antonios Anastasopoulos|AUTHOR Antonios Anastasopoulos]], [[David R. Mortensen|AUTHOR David R. Mortensen]], [[Michael R. Marlo|AUTHOR Michael R. Marlo]], [[Graham Neubig|AUTHOR Graham Neubig]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210053.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-3|PAPER Tue-M-V-5-3 — Speech Acoustic Modelling Using Raw Source and Filter Components]]</div>|<div class="cpsessionviewpapertitle">Speech Acoustic Modelling Using Raw Source and Filter Components</div><div class="cpsessionviewpaperauthor">[[Erfan Loweimi|AUTHOR Erfan Loweimi]], [[Zoran Cvetkovic|AUTHOR Zoran Cvetkovic]], [[Peter Bell|AUTHOR Peter Bell]], [[Steve Renals|AUTHOR Steve Renals]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-4|PAPER Tue-M-V-5-4 — Noise Robust Acoustic Modeling for Single-Channel Speech Recognition Based on a Stream-Wise Transformer Architecture]]</div>|<div class="cpsessionviewpapertitle">Noise Robust Acoustic Modeling for Single-Channel Speech Recognition Based on a Stream-Wise Transformer Architecture</div><div class="cpsessionviewpaperauthor">[[Masakiyo Fujimoto|AUTHOR Masakiyo Fujimoto]], [[Hisashi Kawai|AUTHOR Hisashi Kawai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-5|PAPER Tue-M-V-5-5 — IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">IR-GAN: Room Impulse Response Generator for Far-Field Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Anton Ratnarajah|AUTHOR Anton Ratnarajah]], [[Zhenyu Tang|AUTHOR Zhenyu Tang]], [[Dinesh Manocha|AUTHOR Dinesh Manocha]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210419.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-6|PAPER Tue-M-V-5-6 — Scaling Sparsemax Based Channel Selection for Speech Recognition with ad-hoc Microphone Arrays]]</div>|<div class="cpsessionviewpapertitle">Scaling Sparsemax Based Channel Selection for Speech Recognition with ad-hoc Microphone Arrays</div><div class="cpsessionviewpaperauthor">[[Junqi Chen|AUTHOR Junqi Chen]], [[Xiao-Lei Zhang|AUTHOR Xiao-Lei Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210655.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-7|PAPER Tue-M-V-5-7 — Multi-Channel Transformer Transducer for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multi-Channel Transformer Transducer for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Feng-Ju Chang|AUTHOR Feng-Ju Chang]], [[Martin Radfar|AUTHOR Martin Radfar]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Maurizio Omologo|AUTHOR Maurizio Omologo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210958.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-8|PAPER Tue-M-V-5-8 — Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios]]</div>|<div class="cpsessionviewpapertitle">Data Augmentation Methods for End-to-End Speech Recognition on Distant-Talk Scenarios</div><div class="cpsessionviewpaperauthor">[[Emiru Tsunoo|AUTHOR Emiru Tsunoo]], [[Kentaro Shibata|AUTHOR Kentaro Shibata]], [[Chaitanya Narisetty|AUTHOR Chaitanya Narisetty]], [[Yosuke Kashiwagi|AUTHOR Yosuke Kashiwagi]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210964.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-9|PAPER Tue-M-V-5-9 — Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Leveraging Phone Mask Training for Phonetic-Reduction-Robust E2E Uyghur Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Guodong Ma|AUTHOR Guodong Ma]], [[Pengfei Hu|AUTHOR Pengfei Hu]], [[Jian Kang|AUTHOR Jian Kang]], [[Shen Huang|AUTHOR Shen Huang]], [[Hao Huang|AUTHOR Hao Huang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211758.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-10|PAPER Tue-M-V-5-10 — Rethinking Evaluation in ASR: Are Our Models Robust Enough?]]</div>|<div class="cpsessionviewpapertitle">Rethinking Evaluation in ASR: Are Our Models Robust Enough?</div><div class="cpsessionviewpaperauthor">[[Tatiana Likhomanenko|AUTHOR Tatiana Likhomanenko]], [[Qiantong Xu|AUTHOR Qiantong Xu]], [[Vineel Pratap|AUTHOR Vineel Pratap]], [[Paden Tomasello|AUTHOR Paden Tomasello]], [[Jacob Kahn|AUTHOR Jacob Kahn]], [[Gilad Avidov|AUTHOR Gilad Avidov]], [[Ronan Collobert|AUTHOR Ronan Collobert]], [[Gabriel Synnaeve|AUTHOR Gabriel Synnaeve]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212084.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-5-11|PAPER Tue-M-V-5-11 — Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Raw Waveform Encoder with Multi-Scale Globally Attentive Locally Recurrent Networks for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Max W.Y. Lam|AUTHOR Max W.Y. Lam]], [[Jun Wang|AUTHOR Jun Wang]], [[Chao Weng|AUTHOR Chao Weng]], [[Dan Su|AUTHOR Dan Su]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Haizhou Li|
|^ |^Tanel Alumäe|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210037.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-1|PAPER Tue-M-V-6-1 — Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams]]</div>|<div class="cpsessionviewpapertitle">Attention-Based Cross-Modal Fusion for Audio-Visual Voice Activity Detection in Musical Video Streams</div><div class="cpsessionviewpaperauthor">[[Yuanbo Hou|AUTHOR Yuanbo Hou]], [[Zhesong Yu|AUTHOR Zhesong Yu]], [[Xia Liang|AUTHOR Xia Liang]], [[Xingjian Du|AUTHOR Xingjian Du]], [[Bilei Zhu|AUTHOR Bilei Zhu]], [[Zejun Ma|AUTHOR Zejun Ma]], [[Dick Botteldooren|AUTHOR Dick Botteldooren]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-2|PAPER Tue-M-V-6-2 — Noise-Tolerant Self-Supervised Learning for Audio-Visual Voice Activity Detection]]</div>|<div class="cpsessionviewpapertitle">Noise-Tolerant Self-Supervised Learning for Audio-Visual Voice Activity Detection</div><div class="cpsessionviewpaperauthor">[[Ui-Hyun Kim|AUTHOR Ui-Hyun Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210072.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-3|PAPER Tue-M-V-6-3 — Noisy Student-Teacher Training for Robust Keyword Spotting]]</div>|<div class="cpsessionviewpapertitle">Noisy Student-Teacher Training for Robust Keyword Spotting</div><div class="cpsessionviewpaperauthor">[[Hyun-Jin Park|AUTHOR Hyun-Jin Park]], [[Pai Zhu|AUTHOR Pai Zhu]], [[Ignacio Lopez Moreno|AUTHOR Ignacio Lopez Moreno]], [[Niranjan Subrahmanya|AUTHOR Niranjan Subrahmanya]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210200.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-4|PAPER Tue-M-V-6-4 — Multi-Channel VAD for Transcription of Group Discussion]]</div>|<div class="cpsessionviewpapertitle">Multi-Channel VAD for Transcription of Group Discussion</div><div class="cpsessionviewpaperauthor">[[Osamu Ichikawa|AUTHOR Osamu Ichikawa]], [[Kaito Nakano|AUTHOR Kaito Nakano]], [[Takahiro Nakayama|AUTHOR Takahiro Nakayama]], [[Hajime Shirouzu|AUTHOR Hajime Shirouzu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210592.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-5|PAPER Tue-M-V-6-5 — Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments]]</div>|<div class="cpsessionviewpapertitle">Audio-Visual Information Fusion Using Cross-Modal Teacher-Student Learning for Voice Activity Detection in Realistic Environments</div><div class="cpsessionviewpaperauthor">[[Hengshun Zhou|AUTHOR Hengshun Zhou]], [[Jun Du|AUTHOR Jun Du]], [[Hang Chen|AUTHOR Hang Chen]], [[Zijun Jing|AUTHOR Zijun Jing]], [[Shifu Xiong|AUTHOR Shifu Xiong]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210731.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-6|PAPER Tue-M-V-6-6 — Enrollment-Less Training for Personalized Voice Activity Detection]]</div>|<div class="cpsessionviewpapertitle">Enrollment-Less Training for Personalized Voice Activity Detection</div><div class="cpsessionviewpaperauthor">[[Naoki Makishima|AUTHOR Naoki Makishima]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Shota Orihashi|AUTHOR Shota Orihashi]], [[Ryo Masumura|AUTHOR Ryo Masumura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210792.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-7|PAPER Tue-M-V-6-7 — Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model]]</div>|<div class="cpsessionviewpapertitle">Voice Activity Detection for Live Speech of Baseball Game Based on Tandem Connection with Speech/Noise Separation Model</div><div class="cpsessionviewpaperauthor">[[Yuto Nonaka|AUTHOR Yuto Nonaka]], [[Chee Siang Leow|AUTHOR Chee Siang Leow]], [[Akio Kobayashi|AUTHOR Akio Kobayashi]], [[Takehito Utsuro|AUTHOR Takehito Utsuro]], [[Hiromitsu Nishizaki|AUTHOR Hiromitsu Nishizaki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-8|PAPER Tue-M-V-6-8 — FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications]]</div>|<div class="cpsessionviewpapertitle">FastICARL: Fast Incremental Classifier and Representation Learning with Efficient Budget Allocation in Audio Sensing Applications</div><div class="cpsessionviewpaperauthor">[[Young D. Kwon|AUTHOR Young D. Kwon]], [[Jagmohan Chauhan|AUTHOR Jagmohan Chauhan]], [[Cecilia Mascolo|AUTHOR Cecilia Mascolo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211335.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-9|PAPER Tue-M-V-6-9 — End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention]]</div>|<div class="cpsessionviewpapertitle">End-to-End Transformer-Based Open-Vocabulary Keyword Spotting with Location-Guided Local Attention</div><div class="cpsessionviewpaperauthor">[[Bo Wei|AUTHOR Bo Wei]], [[Meirong Yang|AUTHOR Meirong Yang]], [[Tao Zhang|AUTHOR Tao Zhang]], [[Xiao Tang|AUTHOR Xiao Tang]], [[Xing Huang|AUTHOR Xing Huang]], [[Kyuhong Kim|AUTHOR Kyuhong Kim]], [[Jaeyun Lee|AUTHOR Jaeyun Lee]], [[Kiho Cho|AUTHOR Kiho Cho]], [[Sung-Un Park|AUTHOR Sung-Un Park]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211874.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-10|PAPER Tue-M-V-6-10 — Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation]]</div>|<div class="cpsessionviewpapertitle">Segmental Contrastive Predictive Coding for Unsupervised Word Segmentation</div><div class="cpsessionviewpaperauthor">[[Saurabhchand Bhati|AUTHOR Saurabhchand Bhati]], [[Jesús Villalba|AUTHOR Jesús Villalba]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Najim Dehak|AUTHOR Najim Dehak]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211977.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-6-11|PAPER Tue-M-V-6-11 — A Lightweight Framework for Online Voice Activity Detection in the Wild]]</div>|<div class="cpsessionviewpapertitle">A Lightweight Framework for Online Voice Activity Detection in the Wild</div><div class="cpsessionviewpaperauthor">[[Xuenan Xu|AUTHOR Xuenan Xu]], [[Heinrich Dinkel|AUTHOR Heinrich Dinkel]], [[Mengyue Wu|AUTHOR Mengyue Wu]], [[Kai Yu|AUTHOR Kai Yu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|09:30–11:30, Tuesday 31 August 2021, (Virtual)|<|
|^Chairs: |^Hongwei Ding|
|^ |^Tan Lee|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210129.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-1|PAPER Tue-M-V-7-1 — “See what I mean, huh?” Evaluating Visual Inspection of F₀ Tracking in Nasal Grunts]]</div>|<div class="cpsessionviewpapertitle">“See what I mean, huh?” Evaluating Visual Inspection of F₀ Tracking in Nasal Grunts</div><div class="cpsessionviewpaperauthor">[[Aurélie Chlébowski|AUTHOR Aurélie Chlébowski]], [[Nicolas Ballier|AUTHOR Nicolas Ballier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-2|PAPER Tue-M-V-7-2 — System Performance as a Function of Calibration Methods, Sample Size and Sampling Variability in Likelihood Ratio-Based Forensic Voice Comparison]]</div>|<div class="cpsessionviewpapertitle">System Performance as a Function of Calibration Methods, Sample Size and Sampling Variability in Likelihood Ratio-Based Forensic Voice Comparison</div><div class="cpsessionviewpaperauthor">[[Bruce Xiao Wang|AUTHOR Bruce Xiao Wang]], [[Vincent Hughes|AUTHOR Vincent Hughes]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210601.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-3|PAPER Tue-M-V-7-3 — Voicing Assimilations by French Speakers of German in Stop-Fricative Sequences]]</div>|<div class="cpsessionviewpapertitle">Voicing Assimilations by French Speakers of German in Stop-Fricative Sequences</div><div class="cpsessionviewpaperauthor">[[Anne Bonneau|AUTHOR Anne Bonneau]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210635.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-4|PAPER Tue-M-V-7-4 — The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation]]</div>|<div class="cpsessionviewpapertitle">The Four-Way Classification of Stops with Voicing and Aspiration for Non-Native Speech Evaluation</div><div class="cpsessionviewpaperauthor">[[Titas Chakraborty|AUTHOR Titas Chakraborty]], [[Vaishali Patil|AUTHOR Vaishali Patil]], [[Preeti Rao|AUTHOR Preeti Rao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210910.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-5|PAPER Tue-M-V-7-5 — Acoustic and Prosodic Correlates of Emotions in Urdu Speech]]</div>|<div class="cpsessionviewpapertitle">Acoustic and Prosodic Correlates of Emotions in Urdu Speech</div><div class="cpsessionviewpaperauthor">[[Saba Urooj|AUTHOR Saba Urooj]], [[Benazir Mumtaz|AUTHOR Benazir Mumtaz]], [[Sarmad Hussain|AUTHOR Sarmad Hussain]], [[Ehsan ul Haq|AUTHOR Ehsan ul Haq]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211079.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-6|PAPER Tue-M-V-7-6 — Voicing Contrasts in the Singleton Stops of Palestinian Arabic: Production and Perception]]</div>|<div class="cpsessionviewpapertitle">Voicing Contrasts in the Singleton Stops of Palestinian Arabic: Production and Perception</div><div class="cpsessionviewpaperauthor">[[Nour Tamim|AUTHOR Nour Tamim]], [[Silke Hamann|AUTHOR Silke Hamann]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211487.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-7|PAPER Tue-M-V-7-7 — A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">A Comparison of the Accuracy of Dissen and Keshet’s (2016) DeepFormants and Traditional LPC Methods for Semi-Automatic Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Thomas Coy|AUTHOR Thomas Coy]], [[Vincent Hughes|AUTHOR Vincent Hughes]], [[Philip Harrison|AUTHOR Philip Harrison]], [[Amelia J. Gully|AUTHOR Amelia J. Gully]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211697.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-8|PAPER Tue-M-V-7-8 — MAP Adaptation Characteristics in Forensic Long-Term Formant Analysis]]</div>|<div class="cpsessionviewpapertitle">MAP Adaptation Characteristics in Forensic Long-Term Formant Analysis</div><div class="cpsessionviewpaperauthor">[[Michael Jessen|AUTHOR Michael Jessen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211699.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-9|PAPER Tue-M-V-7-9 — Cross-Linguistic Speaker Individuality of Long-Term Formant Distributions: Phonetic and Forensic Perspectives]]</div>|<div class="cpsessionviewpapertitle">Cross-Linguistic Speaker Individuality of Long-Term Formant Distributions: Phonetic and Forensic Perspectives</div><div class="cpsessionviewpaperauthor">[[Justin J.H. Lo|AUTHOR Justin J.H. Lo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211754.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-10|PAPER Tue-M-V-7-10 — Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals]]</div>|<div class="cpsessionviewpapertitle">Sound Change in Spontaneous Bilingual Speech: A Corpus Study on the Cantonese n-l Merger in Cantonese-English Bilinguals</div><div class="cpsessionviewpaperauthor">[[Rachel Soo|AUTHOR Rachel Soo]], [[Khia A. Johnson|AUTHOR Khia A. Johnson]], [[Molly Babel|AUTHOR Molly Babel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212104.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Tue-M-V-7-11|PAPER Tue-M-V-7-11 — Characterizing Voiced and Voiceless Nasals in Mizo]]</div>|<div class="cpsessionviewpapertitle">Characterizing Voiced and Voiceless Nasals in Mizo</div><div class="cpsessionviewpaperauthor">[[Wendy Lalhminghlui|AUTHOR Wendy Lalhminghlui]], [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:30–12:30, Tuesday 31 August 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Tue-Survey|PAPER Tue-Survey — Towards Automatic Speech Recognition for People with Atypical Speech]]</div>|<div class="cpsessionviewpapertitle">Towards Automatic Speech Recognition for People with Atypical Speech</div><div class="cpsessionviewpaperauthor">[[Heidi Christensen|AUTHOR Heidi Christensen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, Room A+B|<|
|^Chairs: |^Hermann Ney|
|^ |^Irina Illina|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210313.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-1-1|PAPER Wed-A-O-1-1 — BERT-Based Semantic Model for Rescoring N-Best Speech Recognition List]]</div>|<div class="cpsessionviewpapertitle">BERT-Based Semantic Model for Rescoring N-Best Speech Recognition List</div><div class="cpsessionviewpaperauthor">[[Dominique Fohr|AUTHOR Dominique Fohr]], [[Irina Illina|AUTHOR Irina Illina]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210627.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-1-3|PAPER Wed-A-O-1-3 — Text Augmentation for Language Models in High Error Recognition Scenario]]</div>|<div class="cpsessionviewpapertitle">Text Augmentation for Language Models in High Error Recognition Scenario</div><div class="cpsessionviewpaperauthor">[[Karel Beneš|AUTHOR Karel Beneš]], [[Lukáš Burget|AUTHOR Lukáš Burget]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211067.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-1-4|PAPER Wed-A-O-1-4 — On Sampling-Based Training Criteria for Neural Language Modeling]]</div>|<div class="cpsessionviewpapertitle">On Sampling-Based Training Criteria for Neural Language Modeling</div><div class="cpsessionviewpaperauthor">[[Yingbo Gao|AUTHOR Yingbo Gao]], [[David Thulke|AUTHOR David Thulke]], [[Alexander Gerstenberger|AUTHOR Alexander Gerstenberger]], [[Khoa Viet Tran|AUTHOR Khoa Viet Tran]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211191.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-1-5|PAPER Wed-A-O-1-5 — Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network]]</div>|<div class="cpsessionviewpapertitle">Fast Text-Only Domain Adaptation of RNN-Transducer Prediction Network</div><div class="cpsessionviewpaperauthor">[[Janne Pylkkönen|AUTHOR Janne Pylkkönen]], [[Antti Ukkonen|AUTHOR Antti Ukkonen]], [[Juho Kilpikoski|AUTHOR Juho Kilpikoski]], [[Samu Tamminen|AUTHOR Samu Tamminen]], [[Hannes Heikinheimo|AUTHOR Hannes Heikinheimo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, Room C|<|
|^Chairs: |^Omid Ghahabi|
|^ |^Ondřej Glembek|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211611.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-2-2|PAPER Wed-A-O-2-2 — Using Games to Augment Corpora for Language Recognition and Confusability]]</div>|<div class="cpsessionviewpapertitle">Using Games to Augment Corpora for Language Recognition and Confusability</div><div class="cpsessionviewpaperauthor">[[Christopher Cieri|AUTHOR Christopher Cieri]], [[James Fiumara|AUTHOR James Fiumara]], [[Jonathan Wright|AUTHOR Jonathan Wright]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211857.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-2-3|PAPER Wed-A-O-2-3 — Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition]]</div>|<div class="cpsessionviewpapertitle">Fair Voice Biometrics: Impact of Demographic Imbalance on Group Fairness in Speaker Recognition</div><div class="cpsessionviewpaperauthor">[[Gianni Fenu|AUTHOR Gianni Fenu]], [[Mirko Marras|AUTHOR Mirko Marras]], [[Giacomo Medda|AUTHOR Giacomo Medda]], [[Giacomo Meloni|AUTHOR Giacomo Meloni]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212119.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-2-4|PAPER Wed-A-O-2-4 — Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification]]</div>|<div class="cpsessionviewpapertitle">Knowledge Distillation from Multi-Modality to Single-Modality for Person Verification</div><div class="cpsessionviewpaperauthor">[[Leying Zhang|AUTHOR Leying Zhang]], [[Zhengyang Chen|AUTHOR Zhengyang Chen]], [[Yanmin Qian|AUTHOR Yanmin Qian]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211712.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-2-6|PAPER Wed-A-O-2-6 — Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation]]</div>|<div class="cpsessionviewpapertitle">Adversarial Disentanglement of Speaker Representation for Attribute-Driven Privacy Preservation</div><div class="cpsessionviewpaperauthor">[[Paul-Gauthier Noé|AUTHOR Paul-Gauthier Noé]], [[Mohammad Mohammadamini|AUTHOR Mohammad Mohammadamini]], [[Driss Matrouf|AUTHOR Driss Matrouf]], [[Titouan Parcollet|AUTHOR Titouan Parcollet]], [[Andreas Nautsch|AUTHOR Andreas Nautsch]], [[Jean-François Bonastre|AUTHOR Jean-François Bonastre]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, Room Lacina|<|
|^Chairs: |^Khalid Daoudi|
|^ |^RaviShankar Prasad|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211694.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-3-1|PAPER Wed-A-O-3-1 — Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease]]</div>|<div class="cpsessionviewpapertitle">Automatically Detecting Errors and Disfluencies in Read Speech to Predict Cognitive Impairment in People with Parkinson’s Disease</div><div class="cpsessionviewpaperauthor">[[Amrit Romana|AUTHOR Amrit Romana]], [[John Bandon|AUTHOR John Bandon]], [[Matthew Perez|AUTHOR Matthew Perez]], [[Stephanie Gutierrez|AUTHOR Stephanie Gutierrez]], [[Richard Richter|AUTHOR Richard Richter]], [[Angela Roberts|AUTHOR Angela Roberts]], [[Emily Mower Provost|AUTHOR Emily Mower Provost]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211736.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-3-2|PAPER Wed-A-O-3-2 — Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers]]</div>|<div class="cpsessionviewpapertitle">Automatic Extraction of Speech Rhythm Descriptors for Speech Intelligibility Assessment in the Context of Head and Neck Cancers</div><div class="cpsessionviewpaperauthor">[[Robin Vaysse|AUTHOR Robin Vaysse]], [[Jérôme Farinas|AUTHOR Jérôme Farinas]], [[Corine Astésano|AUTHOR Corine Astésano]], [[Régine André-Obrecht|AUTHOR Régine André-Obrecht]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-3-3|PAPER Wed-A-O-3-3 — Speech Disorder Classification Using Extended Factorized Hierarchical Variational Auto-Encoders]]</div>|<div class="cpsessionviewpapertitle">Speech Disorder Classification Using Extended Factorized Hierarchical Variational Auto-Encoders</div><div class="cpsessionviewpaperauthor">[[Jinzi Qi|AUTHOR Jinzi Qi]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211403.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-3-4|PAPER Wed-A-O-3-4 — The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation]]</div>|<div class="cpsessionviewpapertitle">The Impact of Forced-Alignment Errors on Automatic Pronunciation Evaluation</div><div class="cpsessionviewpaperauthor">[[Vikram C. Mathad|AUTHOR Vikram C. Mathad]], [[Tristan J. Mahr|AUTHOR Tristan J. Mahr]], [[Nancy Scherer|AUTHOR Nancy Scherer]], [[Kathy Chapman|AUTHOR Kathy Chapman]], [[Katherine C. Hustad|AUTHOR Katherine C. Hustad]], [[Julie Liss|AUTHOR Julie Liss]], [[Visar Berisha|AUTHOR Visar Berisha]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211288.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-3-5|PAPER Wed-A-O-3-5 — Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition]]</div>|<div class="cpsessionviewpapertitle">Late Fusion of the Available Lexicon and Raw Waveform-Based Acoustic Modeling for Depression and Dementia Recognition</div><div class="cpsessionviewpaperauthor">[[Esaú Villatoro-Tello|AUTHOR Esaú Villatoro-Tello]], [[S. Pavankumar Dubagunta|AUTHOR S. Pavankumar Dubagunta]], [[Julian Fritsch|AUTHOR Julian Fritsch]], [[Gabriela Ramírez-de-la-Rosa|AUTHOR Gabriela Ramírez-de-la-Rosa]], [[Petr Motlicek|AUTHOR Petr Motlicek]], [[Mathew Magimai-Doss|AUTHOR Mathew Magimai-Doss]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211466.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-O-3-6|PAPER Wed-A-O-3-6 — Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces]]</div>|<div class="cpsessionviewpapertitle">Neural Speaker Embeddings for Ultrasound-Based Silent Speech Interfaces</div><div class="cpsessionviewpaperauthor">[[Amin Honarmandi Shandiz|AUTHOR Amin Honarmandi Shandiz]], [[László Tóth|AUTHOR László Tóth]], [[Gábor Gosztolya|AUTHOR Gábor Gosztolya]], [[Alexandra Markó|AUTHOR Alexandra Markó]], [[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218008.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-S&T-1-1|PAPER Wed-A-S&T-1-1 — Multi-Speaker Emotional Text-to-Speech Synthesizer]]</div>|<div class="cpsessionviewpapertitle">Multi-Speaker Emotional Text-to-Speech Synthesizer</div><div class="cpsessionviewpaperauthor">[[Sungjae Cho|AUTHOR Sungjae Cho]], [[Soo-Young Lee|AUTHOR Soo-Young Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218009.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-S&T-1-2|PAPER Wed-A-S&T-1-2 — Live TV Subtitling Through Respeaking]]</div>|<div class="cpsessionviewpapertitle">Live TV Subtitling Through Respeaking</div><div class="cpsessionviewpaperauthor">[[Aleš Pražák|AUTHOR Aleš Pražák]], [[Zdeněk Loose|AUTHOR Zdeněk Loose]], [[Josef V. Psutka|AUTHOR Josef V. Psutka]], [[Vlasta Radová|AUTHOR Vlasta Radová]], [[Josef Psutka|AUTHOR Josef Psutka]], [[Jan Švec|AUTHOR Jan Švec]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218010.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-S&T-1-3|PAPER Wed-A-S&T-1-3 — Autonomous Robot for Measuring Room Impulse Responses]]</div>|<div class="cpsessionviewpapertitle">Autonomous Robot for Measuring Room Impulse Responses</div><div class="cpsessionviewpaperauthor">[[Stefan Fragner|AUTHOR Stefan Fragner]], [[Tobias Topar|AUTHOR Tobias Topar]], [[Maximilian Giller|AUTHOR Maximilian Giller]], [[Lukas Pfeifenberger|AUTHOR Lukas Pfeifenberger]], [[Franz Pernkopf|AUTHOR Franz Pernkopf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218011.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-S&T-1-4|PAPER Wed-A-S&T-1-4 — Expressive Robot Performance Based on Facial Motion Capture]]</div>|<div class="cpsessionviewpapertitle">Expressive Robot Performance Based on Facial Motion Capture</div><div class="cpsessionviewpaperauthor">[[Jonas Beskow|AUTHOR Jonas Beskow]], [[Charlie Caper|AUTHOR Charlie Caper]], [[Johan Ehrenfors|AUTHOR Johan Ehrenfors]], [[Nils Hagberg|AUTHOR Nils Hagberg]], [[Anne Jansen|AUTHOR Anne Jansen]], [[Chris Wood|AUTHOR Chris Wood]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218012.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-S&T-1-5|PAPER Wed-A-S&T-1-5 — ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction]]</div>|<div class="cpsessionviewpapertitle">ThemePro 2.0: Showcasing the Role of Thematic Progression in Engaging Human-Computer Interaction</div><div class="cpsessionviewpaperauthor">[[Mónica Domínguez|AUTHOR Mónica Domínguez]], [[Juan Soler-Company|AUTHOR Juan Soler-Company]], [[Leo Wanner|AUTHOR Leo Wanner]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-S&T-1-6|PAPER Wed-A-S&T-1-6 — Addressing Compliance in Call Centers with Entity Extraction]]</div>|<div class="cpsessionviewpapertitle">Addressing Compliance in Call Centers with Entity Extraction</div><div class="cpsessionviewpaperauthor">[[Sai Guruju|AUTHOR Sai Guruju]], [[Jithendra Vepa|AUTHOR Jithendra Vepa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS218014.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-S&T-1-7|PAPER Wed-A-S&T-1-7 — Audio Segmentation Based Conversational Silence Detection for Contact Center Calls]]</div>|<div class="cpsessionviewpapertitle">Audio Segmentation Based Conversational Silence Detection for Contact Center Calls</div><div class="cpsessionviewpaperauthor">[[Krishnachaitanya Gogineni|AUTHOR Krishnachaitanya Gogineni]], [[Tarun Reddy Yadama|AUTHOR Tarun Reddy Yadama]], [[Jithendra Vepa|AUTHOR Jithendra Vepa]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Jahangir Alam|
|^ |^Kong Aik Lee|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211553.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-1|PAPER Wed-A-SS-1-1 — The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">The ID R&D System Description for Short-Duration Speaker Verification Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Alexander Alenin|AUTHOR Alexander Alenin]], [[Anton Okhotnikov|AUTHOR Anton Okhotnikov]], [[Rostislav Makarov|AUTHOR Rostislav Makarov]], [[Nikita Torgashov|AUTHOR Nikita Torgashov]], [[Ilya Shigabeev|AUTHOR Ilya Shigabeev]], [[Konstantin Simonchik|AUTHOR Konstantin Simonchik]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211570.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-2|PAPER Wed-A-SS-1-2 — Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Integrating Frequency Translational Invariance in TDNNs and Frequency Positional Information in 2D ResNets to Enhance Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Jenthe Thienpondt|AUTHOR Jenthe Thienpondt]], [[Brecht Desplanques|AUTHOR Brecht Desplanques]], [[Kris Demuynck|AUTHOR Kris Demuynck]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211737.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-3|PAPER Wed-A-SS-1-3 — SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance]]</div>|<div class="cpsessionviewpapertitle">SdSVC Challenge 2021: Tips and Tricks to Boost the Short-Duration Speaker Verification System Performance</div><div class="cpsessionviewpaperauthor">[[Aleksei Gusev|AUTHOR Aleksei Gusev]], [[Alisa Vinogradova|AUTHOR Alisa Vinogradova]], [[Sergey Novoselov|AUTHOR Sergey Novoselov]], [[Sergei Astapov|AUTHOR Sergei Astapov]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-4|PAPER Wed-A-SS-1-4 — Team02 Text-Independent Speaker Verification System for SdSV Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">Team02 Text-Independent Speaker Verification System for SdSV Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Woo Hyun Kang|AUTHOR Woo Hyun Kang]], [[Nam Soo Kim|AUTHOR Nam Soo Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210398.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-5|PAPER Wed-A-SS-1-5 — Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">Our Learned Lessons from Cross-Lingual Speaker Verification: The CRMI-DKU System Description for the Short-Duration Speaker Verification Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Xiaoyi Qin|AUTHOR Xiaoyi Qin]], [[Chao Wang|AUTHOR Chao Wang]], [[Yong Ma|AUTHOR Yong Ma]], [[Min Liu|AUTHOR Min Liu]], [[Shilei Zhang|AUTHOR Shilei Zhang]], [[Ming Li|AUTHOR Ming Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-6|PAPER Wed-A-SS-1-6 — Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">Investigation of IMU&Elevoc Submission for the Short-Duration Speaker Verification Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Peng Zhang|AUTHOR Peng Zhang]], [[Peng Hu|AUTHOR Peng Hu]], [[Xueliang Zhang|AUTHOR Xueliang Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210965.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-7|PAPER Wed-A-SS-1-7 — The Sogou System for Short-Duration Speaker Verification Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">The Sogou System for Short-Duration Speaker Verification Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Jie Yan|AUTHOR Jie Yan]], [[Shengyu Yao|AUTHOR Shengyu Yao]], [[Yiqian Pan|AUTHOR Yiqian Pan]], [[Wei Chen|AUTHOR Wei Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212136.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-SS-1-8|PAPER Wed-A-SS-1-8 — The SJTU System for Short-Duration Speaker Verification Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">The SJTU System for Short-Duration Speaker Verification Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Bing Han|AUTHOR Bing Han]], [[Zhengyang Chen|AUTHOR Zhengyang Chen]], [[Zhikai Zhou|AUTHOR Zhikai Zhou]], [[Yanmin Qian|AUTHOR Yanmin Qian]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Catharine Oertel|
|^ |^Saturnino Luz|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-1|PAPER Wed-A-V-1-1 — Cross-Modal Learning for Audio-Visual Video Parsing]]</div>|<div class="cpsessionviewpapertitle">Cross-Modal Learning for Audio-Visual Video Parsing</div><div class="cpsessionviewpaperauthor">[[Jatin Lamba|AUTHOR Jatin Lamba]], [[Abhishek|AUTHOR Abhishek]], [[Jayaprakash Akula|AUTHOR Jayaprakash Akula]], [[Rishabh Dabral|AUTHOR Rishabh Dabral]], [[Preethi Jyothi|AUTHOR Preethi Jyothi]], [[Ganesh Ramakrishnan|AUTHOR Ganesh Ramakrishnan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212249.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-2|PAPER Wed-A-V-1-2 — A Psychology-Driven Computational Analysis of Political Interviews]]</div>|<div class="cpsessionviewpapertitle">A Psychology-Driven Computational Analysis of Political Interviews</div><div class="cpsessionviewpaperauthor">[[Darren Cook|AUTHOR Darren Cook]], [[Miri Zilka|AUTHOR Miri Zilka]], [[Simon Maskell|AUTHOR Simon Maskell]], [[Laurence Alison|AUTHOR Laurence Alison]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-3|PAPER Wed-A-V-1-3 — Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure]]</div>|<div class="cpsessionviewpapertitle">Speech Emotion Recognition Based on Attention Weight Correction Using Word-Level Confidence Measure</div><div class="cpsessionviewpaperauthor">[[Jennifer Santoso|AUTHOR Jennifer Santoso]], [[Takeshi Yamada|AUTHOR Takeshi Yamada]], [[Shoji Makino|AUTHOR Shoji Makino]], [[Kenkichi Ishizuka|AUTHOR Kenkichi Ishizuka]], [[Takekatsu Hiramura|AUTHOR Takekatsu Hiramura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210701.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-4|PAPER Wed-A-V-1-4 — Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors]]</div>|<div class="cpsessionviewpapertitle">Effects of Voice Type and Task on L2 Learners’ Awareness of Pronunciation Errors</div><div class="cpsessionviewpaperauthor">[[Alif Silpachai|AUTHOR Alif Silpachai]], [[Ivana Rehman|AUTHOR Ivana Rehman]], [[Taylor Anne Barriuso|AUTHOR Taylor Anne Barriuso]], [[John Levis|AUTHOR John Levis]], [[Evgeny Chukharev-Hudilainen|AUTHOR Evgeny Chukharev-Hudilainen]], [[Guanlong Zhao|AUTHOR Guanlong Zhao]], [[Ricardo Gutierrez-Osuna|AUTHOR Ricardo Gutierrez-Osuna]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211441.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-5|PAPER Wed-A-V-1-5 — Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues]]</div>|<div class="cpsessionviewpapertitle">Lexical Entrainment and Intra-Speaker Variability in Cooperative Dialogues</div><div class="cpsessionviewpaperauthor">[[Alla Menshikova|AUTHOR Alla Menshikova]], [[Daniil Kocharov|AUTHOR Daniil Kocharov]], [[Tatiana Kachkovskaia|AUTHOR Tatiana Kachkovskaia]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-6|PAPER Wed-A-V-1-6 — Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech]]</div>|<div class="cpsessionviewpapertitle">Detecting Alzheimer’s Disease Using Interactional and Acoustic Features from Spontaneous Speech</div><div class="cpsessionviewpaperauthor">[[Shamila Nasreen|AUTHOR Shamila Nasreen]], [[Julian Hough|AUTHOR Julian Hough]], [[Matthew Purver|AUTHOR Matthew Purver]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211796.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-7|PAPER Wed-A-V-1-7 — Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent]]</div>|<div class="cpsessionviewpapertitle">Investigating the Interplay Between Affective, Phonatory and Motoric Subsystems in Autism Spectrum Disorder Using a Multimodal Dialogue Agent</div><div class="cpsessionviewpaperauthor">[[Hardik Kothare|AUTHOR Hardik Kothare]], [[Vikram Ramanarayanan|AUTHOR Vikram Ramanarayanan]], [[Oliver Roesler|AUTHOR Oliver Roesler]], [[Michael Neumann|AUTHOR Michael Neumann]], [[Jackson Liscombe|AUTHOR Jackson Liscombe]], [[William Burke|AUTHOR William Burke]], [[Andrew Cornish|AUTHOR Andrew Cornish]], [[Doug Habberstad|AUTHOR Doug Habberstad]], [[Alaa Sakallah|AUTHOR Alaa Sakallah]], [[Sara Markuson|AUTHOR Sara Markuson]], [[Seemran Kansara|AUTHOR Seemran Kansara]], [[Afik Faerman|AUTHOR Afik Faerman]], [[Yasmine Bensidi-Slimane|AUTHOR Yasmine Bensidi-Slimane]], [[Laura Fry|AUTHOR Laura Fry]], [[Saige Portera|AUTHOR Saige Portera]], [[David Suendermann-Oeft|AUTHOR David Suendermann-Oeft]], [[David Pautler|AUTHOR David Pautler]], [[Carly Demopoulos|AUTHOR Carly Demopoulos]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-1-8|PAPER Wed-A-V-1-8 — Analysis of Eye Gaze Reasons and Gaze Aversions During Three-Party Conversations]]</div>|<div class="cpsessionviewpapertitle">Analysis of Eye Gaze Reasons and Gaze Aversions During Three-Party Conversations</div><div class="cpsessionviewpaperauthor">[[Carlos Toshinori Ishi|AUTHOR Carlos Toshinori Ishi]], [[Taiken Shintani|AUTHOR Taiken Shintani]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Preethi Jyothi|
|^ |^Richard Rose|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211929.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-1|PAPER Wed-A-V-2-1 — Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding]]</div>|<div class="cpsessionviewpapertitle">Semantic Distance: A New Metric for ASR Performance Analysis Towards Spoken Language Understanding</div><div class="cpsessionviewpaperauthor">[[Suyoun Kim|AUTHOR Suyoun Kim]], [[Abhinav Arora|AUTHOR Abhinav Arora]], [[Duc Le|AUTHOR Duc Le]], [[Ching-Feng Yeh|AUTHOR Ching-Feng Yeh]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210379.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-2|PAPER Wed-A-V-2-2 — A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems]]</div>|<div class="cpsessionviewpapertitle">A Light-Weight Contextual Spelling Correction Model for Customizing Transducer-Based Speech Recognition Systems</div><div class="cpsessionviewpaperauthor">[[Xiaoqiang Wang|AUTHOR Xiaoqiang Wang]], [[Yanqing Liu|AUTHOR Yanqing Liu]], [[Sheng Zhao|AUTHOR Sheng Zhao]], [[Jinyu Li|AUTHOR Jinyu Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211708.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-3|PAPER Wed-A-V-2-3 — Incorporating External POS Tagger for Punctuation Restoration]]</div>|<div class="cpsessionviewpapertitle">Incorporating External POS Tagger for Punctuation Restoration</div><div class="cpsessionviewpaperauthor">[[Ning Shi|AUTHOR Ning Shi]], [[Wei Wang|AUTHOR Wei Wang]], [[Boxin Wang|AUTHOR Boxin Wang]], [[Jinfeng Li|AUTHOR Jinfeng Li]], [[Xiangyu Liu|AUTHOR Xiangyu Liu]], [[Zhouhan Lin|AUTHOR Zhouhan Lin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211787.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-4|PAPER Wed-A-V-2-4 — Phonetically Induced Subwords for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Phonetically Induced Subwords for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Vasileios Papadourakis|AUTHOR Vasileios Papadourakis]], [[Markus Müller|AUTHOR Markus Müller]], [[Jing Liu|AUTHOR Jing Liu]], [[Athanasios Mouchtaris|AUTHOR Athanasios Mouchtaris]], [[Maurizio Omologo|AUTHOR Maurizio Omologo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211908.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-5|PAPER Wed-A-V-2-5 — Revisiting Parity of Human vs. Machine Conversational Speech Transcription]]</div>|<div class="cpsessionviewpapertitle">Revisiting Parity of Human vs. Machine Conversational Speech Transcription</div><div class="cpsessionviewpaperauthor">[[Courtney Mansfield|AUTHOR Courtney Mansfield]], [[Sara Ng|AUTHOR Sara Ng]], [[Gina-Anne Levow|AUTHOR Gina-Anne Levow]], [[Richard A. Wright|AUTHOR Richard A. Wright]], [[Mari Ostendorf|AUTHOR Mari Ostendorf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-6|PAPER Wed-A-V-2-6 — Lookup-Table Recurrent Language Models for Long Tail Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Lookup-Table Recurrent Language Models for Long Tail Speech Recognition</div><div class="cpsessionviewpaperauthor">[[W. Ronny Huang|AUTHOR W. Ronny Huang]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Cal Peyser|AUTHOR Cal Peyser]], [[Shankar Kumar|AUTHOR Shankar Kumar]], [[David Rybach|AUTHOR David Rybach]], [[Trevor Strohman|AUTHOR Trevor Strohman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210443.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-7|PAPER Wed-A-V-2-7 — Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems]]</div>|<div class="cpsessionviewpapertitle">Contextual Density Ratio for Language Model Biasing of Sequence to Sequence ASR Systems</div><div class="cpsessionviewpaperauthor">[[Jesús Andrés-Ferrer|AUTHOR Jesús Andrés-Ferrer]], [[Dario Albesano|AUTHOR Dario Albesano]], [[Puming Zhan|AUTHOR Puming Zhan]], [[Paul Vozila|AUTHOR Paul Vozila]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210661.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-8|PAPER Wed-A-V-2-8 — Token-Level Supervised Contrastive Learning for Punctuation Restoration]]</div>|<div class="cpsessionviewpapertitle">Token-Level Supervised Contrastive Learning for Punctuation Restoration</div><div class="cpsessionviewpaperauthor">[[Qiushi Huang|AUTHOR Qiushi Huang]], [[Tom Ko|AUTHOR Tom Ko]], [[H. Lilian Tang|AUTHOR H. Lilian Tang]], [[Xubo Liu|AUTHOR Xubo Liu]], [[Bo Wu|AUTHOR Bo Wu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210739.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-9|PAPER Wed-A-V-2-9 — BART Based Semantic Correction for Mandarin Automatic Speech Recognition System]]</div>|<div class="cpsessionviewpapertitle">BART Based Semantic Correction for Mandarin Automatic Speech Recognition System</div><div class="cpsessionviewpaperauthor">[[Yun Zhao|AUTHOR Yun Zhao]], [[Xuerui Yang|AUTHOR Xuerui Yang]], [[Jinchao Wang|AUTHOR Jinchao Wang]], [[Yongyu Gao|AUTHOR Yongyu Gao]], [[Chao Yan|AUTHOR Chao Yan]], [[Yuanfu Zhou|AUTHOR Yuanfu Zhou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-10|PAPER Wed-A-V-2-10 — Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR]]</div>|<div class="cpsessionviewpapertitle">Class-Based Neural Network Language Model for Second-Pass Rescoring in ASR</div><div class="cpsessionviewpaperauthor">[[Lingfeng Dai|AUTHOR Lingfeng Dai]], [[Qi Liu|AUTHOR Qi Liu]], [[Kai Yu|AUTHOR Kai Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211656.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-11|PAPER Wed-A-V-2-11 — Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio]]</div>|<div class="cpsessionviewpapertitle">Improving Customization of Neural Transducers by Mitigating Acoustic Mismatch of Synthesized Audio</div><div class="cpsessionviewpaperauthor">[[Gakuto Kurata|AUTHOR Gakuto Kurata]], [[George Saon|AUTHOR George Saon]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]], [[David Haws|AUTHOR David Haws]], [[Zoltán Tüske|AUTHOR Zoltán Tüske]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211767.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-12|PAPER Wed-A-V-2-12 — A Discriminative Entity-Aware Language Model for Virtual Assistants]]</div>|<div class="cpsessionviewpapertitle">A Discriminative Entity-Aware Language Model for Virtual Assistants</div><div class="cpsessionviewpaperauthor">[[Mandana Saebi|AUTHOR Mandana Saebi]], [[Ernest Pusateri|AUTHOR Ernest Pusateri]], [[Aaksha Meghawat|AUTHOR Aaksha Meghawat]], [[Christophe Van Gysel|AUTHOR Christophe Van Gysel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210591.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-2-13|PAPER Wed-A-V-2-13 — Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models]]</div>|<div class="cpsessionviewpapertitle">Correcting Automated and Manual Speech Transcription Errors Using Warped Language Models</div><div class="cpsessionviewpaperauthor">[[Mahdi Namazifar|AUTHOR Mahdi Namazifar]], [[John Malik|AUTHOR John Malik]], [[Li Erran Li|AUTHOR Li Erran Li]], [[Gokhan Tur|AUTHOR Gokhan Tur]], [[Dilek Hakkani Tür|AUTHOR Dilek Hakkani Tür]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Penny Karanasou|
|^ |^Michael Auli|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211272.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-1|PAPER Wed-A-V-3-1 — Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency]]</div>|<div class="cpsessionviewpapertitle">Dynamic Encoder Transducer: A Flexible Solution for Trading Off Accuracy for Latency</div><div class="cpsessionviewpaperauthor">[[Yangyang Shi|AUTHOR Yangyang Shi]], [[Varun Nagaraja|AUTHOR Varun Nagaraja]], [[Chunyang Wu|AUTHOR Chunyang Wu]], [[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Duc Le|AUTHOR Duc Le]], [[Rohit Prabhavalkar|AUTHOR Rohit Prabhavalkar]], [[Alex Xiao|AUTHOR Alex Xiao]], [[Ching-Feng Yeh|AUTHOR Ching-Feng Yeh]], [[Julian Chan|AUTHOR Julian Chan]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211477.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-2|PAPER Wed-A-V-3-2 — Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation]]</div>|<div class="cpsessionviewpapertitle">Domain-Aware Self-Attention for Multi-Domain Neural Machine Translation</div><div class="cpsessionviewpaperauthor">[[Shiqi Zhang|AUTHOR Shiqi Zhang]], [[Yan Liu|AUTHOR Yan Liu]], [[Deyi Xiong|AUTHOR Deyi Xiong]], [[Pei Zhang|AUTHOR Pei Zhang]], [[Boxing Chen|AUTHOR Boxing Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211510.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-3|PAPER Wed-A-V-3-3 — Librispeech Transducer Model with Internal Language Model Prior Correction]]</div>|<div class="cpsessionviewpapertitle">Librispeech Transducer Model with Internal Language Model Prior Correction</div><div class="cpsessionviewpaperauthor">[[Albert Zeyer|AUTHOR Albert Zeyer]], [[André Merboldt|AUTHOR André Merboldt]], [[Wilfried Michel|AUTHOR Wilfried Michel]], [[Ralf Schlüter|AUTHOR Ralf Schlüter]], [[Hermann Ney|AUTHOR Hermann Ney]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210165.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-4|PAPER Wed-A-V-3-4 — A Deliberation-Based Joint Acoustic and Text Decoder]]</div>|<div class="cpsessionviewpapertitle">A Deliberation-Based Joint Acoustic and Text Decoder</div><div class="cpsessionviewpaperauthor">[[Sepand Mavandadi|AUTHOR Sepand Mavandadi]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Ke Hu|AUTHOR Ke Hu]], [[Zelin Wu|AUTHOR Zelin Wu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210211.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-5|PAPER Wed-A-V-3-5 — On the Limit of English Conversational Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">On the Limit of English Conversational Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[George Saon|AUTHOR George Saon]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210387.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-6|PAPER Wed-A-V-3-6 — Deformable TDNN with Adaptive Receptive Fields for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Deformable TDNN with Adaptive Receptive Fields for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Keyu An|AUTHOR Keyu An]], [[Yi Zhang|AUTHOR Yi Zhang]], [[Zhijian Ou|AUTHOR Zhijian Ou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210427.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-7|PAPER Wed-A-V-3-7 — Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention]]</div>|<div class="cpsessionviewpapertitle">Transformer-Based End-to-End Speech Recognition with Residual Gaussian-Based Self-Attention</div><div class="cpsessionviewpaperauthor">[[Chengdong Liang|AUTHOR Chengdong Liang]], [[Menglong Xu|AUTHOR Menglong Xu]], [[Xiao-Lei Zhang|AUTHOR Xiao-Lei Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210478.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-8|PAPER Wed-A-V-3-8 — SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts]]</div>|<div class="cpsessionviewpapertitle">SpeechMoE: Scaling to Large Acoustic Models with Dynamic Routing Mixture of Experts</div><div class="cpsessionviewpaperauthor">[[Zhao You|AUTHOR Zhao You]], [[Shulin Feng|AUTHOR Shulin Feng]], [[Dan Su|AUTHOR Dan Su]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210545.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-9|PAPER Wed-A-V-3-9 — Online Compressive Transformer for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Online Compressive Transformer for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Chi-Hang Leong|AUTHOR Chi-Hang Leong]], [[Yu-Han Huang|AUTHOR Yu-Han Huang]], [[Jen-Tzung Chien|AUTHOR Jen-Tzung Chien]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210774.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-10|PAPER Wed-A-V-3-10 — End to End Transformer-Based Contextual Speech Recognition Based on Pointer Network]]</div>|<div class="cpsessionviewpapertitle">End to End Transformer-Based Contextual Speech Recognition Based on Pointer Network</div><div class="cpsessionviewpaperauthor">[[Binghuai Lin|AUTHOR Binghuai Lin]], [[Liyuan Wang|AUTHOR Liyuan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210775.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-11|PAPER Wed-A-V-3-11 — A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">A Comparative Study on Neural Architectures and Training Methods for Japanese Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Shigeki Karita|AUTHOR Shigeki Karita]], [[Yotaro Kubo|AUTHOR Yotaro Kubo]], [[Michiel Adriaan Unico Bacchiani|AUTHOR Michiel Adriaan Unico Bacchiani]], [[Llion Jones|AUTHOR Llion Jones]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-12|PAPER Wed-A-V-3-12 — Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers]]</div>|<div class="cpsessionviewpapertitle">Advanced Long-Context End-to-End Speech Recognition Using Context-Expanded Transformers</div><div class="cpsessionviewpaperauthor">[[Takaaki Hori|AUTHOR Takaaki Hori]], [[Niko Moritz|AUTHOR Niko Moritz]], [[Chiori Hori|AUTHOR Chiori Hori]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211743.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-13|PAPER Wed-A-V-3-13 — Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation]]</div>|<div class="cpsessionviewpapertitle">Transformer-Based ASR Incorporating Time-Reduction Layer and Fine-Tuning with Self-Knowledge Distillation</div><div class="cpsessionviewpaperauthor">[[Md. Akmal Haidar|AUTHOR Md. Akmal Haidar]], [[Chao Xing|AUTHOR Chao Xing]], [[Mehdi Rezagholizadeh|AUTHOR Mehdi Rezagholizadeh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211921.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-3-14|PAPER Wed-A-V-3-14 — Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios]]</div>|<div class="cpsessionviewpapertitle">Flexi-Transducer: Optimizing Latency, Accuracy and Compute for Multi-Domain On-Device Scenarios</div><div class="cpsessionviewpaperauthor">[[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Yuan Shangguan|AUTHOR Yuan Shangguan]], [[Chunyang Wu|AUTHOR Chunyang Wu]], [[Alex Xiao|AUTHOR Alex Xiao]], [[Hang Su|AUTHOR Hang Su]], [[Duc Le|AUTHOR Duc Le]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Marc Delcroix|
|^ |^Yulan Liu|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-1|PAPER Wed-A-V-4-1 — Difference in Perceived Speech Signal Quality Assessment Among Monolingual and Bilingual Teenage Students]]</div>|<div class="cpsessionviewpapertitle">Difference in Perceived Speech Signal Quality Assessment Among Monolingual and Bilingual Teenage Students</div><div class="cpsessionviewpaperauthor">[[Przemyslaw Falkowski-Gilski|AUTHOR Przemyslaw Falkowski-Gilski]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-2|PAPER Wed-A-V-4-2 — PILOT: Introducing Transformers for Probabilistic Sound Event Localization]]</div>|<div class="cpsessionviewpapertitle">PILOT: Introducing Transformers for Probabilistic Sound Event Localization</div><div class="cpsessionviewpaperauthor">[[Christopher Schymura|AUTHOR Christopher Schymura]], [[Benedikt Bönninghoff|AUTHOR Benedikt Bönninghoff]], [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Keisuke Kinoshita|AUTHOR Keisuke Kinoshita]], [[Tomohiro Nakatani|AUTHOR Tomohiro Nakatani]], [[Shoko Araki|AUTHOR Shoko Araki]], [[Dorothea Kolossa|AUTHOR Dorothea Kolossa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210126.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-3|PAPER Wed-A-V-4-3 — Sound Source Localization with Majorization Minimization]]</div>|<div class="cpsessionviewpapertitle">Sound Source Localization with Majorization Minimization</div><div class="cpsessionviewpaperauthor">[[Masahito Togami|AUTHOR Masahito Togami]], [[Robin Scheibler|AUTHOR Robin Scheibler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210299.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-4|PAPER Wed-A-V-4-4 — NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets]]</div>|<div class="cpsessionviewpapertitle">NISQA: A Deep CNN-Self-Attention Model for Multidimensional Speech Quality Prediction with Crowdsourced Datasets</div><div class="cpsessionviewpaperauthor">[[Gabriel Mittag|AUTHOR Gabriel Mittag]], [[Babak Naderi|AUTHOR Babak Naderi]], [[Assmaa Chehadi|AUTHOR Assmaa Chehadi]], [[Sebastian Möller|AUTHOR Sebastian Möller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210343.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-5|PAPER Wed-A-V-4-5 — Subjective Evaluation of Noise Suppression Algorithms in Crowdsourcing]]</div>|<div class="cpsessionviewpapertitle">Subjective Evaluation of Noise Suppression Algorithms in Crowdsourcing</div><div class="cpsessionviewpaperauthor">[[Babak Naderi|AUTHOR Babak Naderi]], [[Ross Cutler|AUTHOR Ross Cutler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210375.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-6|PAPER Wed-A-V-4-6 — Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor]]</div>|<div class="cpsessionviewpapertitle">Reliable Intensity Vector Selection for Multi-Source Direction-of-Arrival Estimation Using a Single Acoustic Vector Sensor</div><div class="cpsessionviewpaperauthor">[[Jianhua Geng|AUTHOR Jianhua Geng]], [[Sifan Wang|AUTHOR Sifan Wang]], [[Juan Li|AUTHOR Juan Li]], [[JingWei Li|AUTHOR JingWei Li]], [[Xin Lou|AUTHOR Xin Lou]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210659.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-7|PAPER Wed-A-V-4-7 — MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment]]</div>|<div class="cpsessionviewpapertitle">MetricNet: Towards Improved Modeling For Non-Intrusive Speech Quality Assessment</div><div class="cpsessionviewpaperauthor">[[Meng Yu|AUTHOR Meng Yu]], [[Chunlei Zhang|AUTHOR Chunlei Zhang]], [[Yong Xu|AUTHOR Yong Xu]], [[Shi-Xiong Zhang|AUTHOR Shi-Xiong Zhang]], [[Dong Yu|AUTHOR Dong Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210886.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-8|PAPER Wed-A-V-4-8 — CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs]]</div>|<div class="cpsessionviewpapertitle">CNN-Based Processing of Acoustic and Radio Frequency Signals for Speaker Localization from MAVs</div><div class="cpsessionviewpaperauthor">[[Andrea Toma|AUTHOR Andrea Toma]], [[Daniele Salvati|AUTHOR Daniele Salvati]], [[Carlo Drioli|AUTHOR Carlo Drioli]], [[Gian Luca Foresti|AUTHOR Gian Luca Foresti]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-9|PAPER Wed-A-V-4-9 — Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization]]</div>|<div class="cpsessionviewpapertitle">Assessment of von Mises-Bernoulli Deep Neural Network in Sound Source Localization</div><div class="cpsessionviewpaperauthor">[[Katsutoshi Itoyama|AUTHOR Katsutoshi Itoyama]], [[Yoshiya Morimoto|AUTHOR Yoshiya Morimoto]], [[Shungo Masaki|AUTHOR Shungo Masaki]], [[Ryosuke Kojima|AUTHOR Ryosuke Kojima]], [[Kenji Nishida|AUTHOR Kenji Nishida]], [[Kazuhiro Nakadai|AUTHOR Kazuhiro Nakadai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211051.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-10|PAPER Wed-A-V-4-10 — Feature Fusion by Attention Networks for Robust DOA Estimation]]</div>|<div class="cpsessionviewpapertitle">Feature Fusion by Attention Networks for Robust DOA Estimation</div><div class="cpsessionviewpaperauthor">[[Rongliang Liu|AUTHOR Rongliang Liu]], [[Nengheng Zheng|AUTHOR Nengheng Zheng]], [[Xi Chen|AUTHOR Xi Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211160.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-11|PAPER Wed-A-V-4-11 — Far-Field Speaker Localization and Adaptive GLMB Tracking]]</div>|<div class="cpsessionviewpapertitle">Far-Field Speaker Localization and Adaptive GLMB Tracking</div><div class="cpsessionviewpaperauthor">[[Shoufeng Lin|AUTHOR Shoufeng Lin]], [[Zhaojie Luo|AUTHOR Zhaojie Luo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211890.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-12|PAPER Wed-A-V-4-12 — On the Design of Deep Priors for Unsupervised Audio Restoration]]</div>|<div class="cpsessionviewpapertitle">On the Design of Deep Priors for Unsupervised Audio Restoration</div><div class="cpsessionviewpaperauthor">[[Vivek Sivaraman Narayanaswamy|AUTHOR Vivek Sivaraman Narayanaswamy]], [[Jayaraman J. Thiagarajan|AUTHOR Jayaraman J. Thiagarajan]], [[Andreas Spanias|AUTHOR Andreas Spanias]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212267.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-4-13|PAPER Wed-A-V-4-13 — Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments]]</div>|<div class="cpsessionviewpapertitle">Cramér-Rao Lower Bound for DOA Estimation with an Array of Directional Microphones in Reverberant Environments</div><div class="cpsessionviewpaperauthor">[[Weiguang Chen|AUTHOR Weiguang Chen]], [[Cheng Xue|AUTHOR Cheng Xue]], [[Xionghu Zhong|AUTHOR Xionghu Zhong]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Shinnosuke Takamichi|
|^ |^Xin Wang|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210041.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-1|PAPER Wed-A-V-5-1 — GAN Vocoder: Multi-Resolution Discriminator Is All You Need]]</div>|<div class="cpsessionviewpapertitle">GAN Vocoder: Multi-Resolution Discriminator Is All You Need</div><div class="cpsessionviewpaperauthor">[[Jaeseong You|AUTHOR Jaeseong You]], [[Dalhyun Kim|AUTHOR Dalhyun Kim]], [[Gyuhyeon Nam|AUTHOR Gyuhyeon Nam]], [[Geumbyeol Hwang|AUTHOR Geumbyeol Hwang]], [[Gyeongsu Chae|AUTHOR Gyeongsu Chae]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210414.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-2|PAPER Wed-A-V-5-2 — Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Glow-WaveGAN: Learning Speech Representations from GAN-Based Variational Auto-Encoder for High Fidelity Flow-Based Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Jian Cong|AUTHOR Jian Cong]], [[Shan Yang|AUTHOR Shan Yang]], [[Lei Xie|AUTHOR Lei Xie]], [[Dan Su|AUTHOR Dan Su]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210517.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-3|PAPER Wed-A-V-5-3 — Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN]]</div>|<div class="cpsessionviewpapertitle">Unified Source-Filter GAN: Unified Source-Filter Network Based On Factorization of Quasi-Periodic Parallel WaveGAN</div><div class="cpsessionviewpaperauthor">[[Reo Yoneyama|AUTHOR Reo Yoneyama]], [[Yi-Chiao Wu|AUTHOR Yi-Chiao Wu]], [[Tomoki Toda|AUTHOR Tomoki Toda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210583.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-4|PAPER Wed-A-V-5-4 — Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator]]</div>|<div class="cpsessionviewpapertitle">Harmonic WaveGAN: GAN-Based Speech Waveform Generation Model with Harmonic Structure Discriminator</div><div class="cpsessionviewpaperauthor">[[Kazuki Mizuta|AUTHOR Kazuki Mizuta]], [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210845.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-5|PAPER Wed-A-V-5-5 — Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis]]</div>|<div class="cpsessionviewpapertitle">Fre-GAN: Adversarial Frequency-Consistent Audio Synthesis</div><div class="cpsessionviewpaperauthor">[[Ji-Hoon Kim|AUTHOR Ji-Hoon Kim]], [[Sang-Hoon Lee|AUTHOR Sang-Hoon Lee]], [[Ji-Hyun Lee|AUTHOR Ji-Hyun Lee]], [[Seong-Whan Lee|AUTHOR Seong-Whan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210971.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-6|PAPER Wed-A-V-5-6 — GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">GANSpeech: Adversarial Training for High-Fidelity Multi-Speaker Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Jinhyeok Yang|AUTHOR Jinhyeok Yang]], [[Jae-Sung Bae|AUTHOR Jae-Sung Bae]], [[Taejun Bak|AUTHOR Taejun Bak]], [[Young-Ik Kim|AUTHOR Young-Ik Kim]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211016.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-7|PAPER Wed-A-V-5-7 — UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation]]</div>|<div class="cpsessionviewpapertitle">UnivNet: A Neural Vocoder with Multi-Resolution Spectrogram Discriminators for High-Fidelity Waveform Generation</div><div class="cpsessionviewpaperauthor">[[Won Jang|AUTHOR Won Jang]], [[Dan Lim|AUTHOR Dan Lim]], [[Jaesam Yoon|AUTHOR Jaesam Yoon]], [[Bongwan Kim|AUTHOR Bongwan Kim]], [[Juntae Kim|AUTHOR Juntae Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211600.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-8|PAPER Wed-A-V-5-8 — Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis]]</div>|<div class="cpsessionviewpapertitle">Continuous Wavelet Vocoder-Based Decomposition of Parametric Speech Waveform Synthesis</div><div class="cpsessionviewpaperauthor">[[Mohammed Salah Al-Radhi|AUTHOR Mohammed Salah Al-Radhi]], [[Tamás Gábor Csapó|AUTHOR Tamás Gábor Csapó]], [[Csaba Zainkó|AUTHOR Csaba Zainkó]], [[Géza Németh|AUTHOR Géza Németh]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211984.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-9|PAPER Wed-A-V-5-9 — High-Fidelity and Low-Latency Universal Neural Vocoder Based on Multiband WaveRNN with Data-Driven Linear Prediction for Discrete Waveform Modeling]]</div>|<div class="cpsessionviewpapertitle">High-Fidelity and Low-Latency Universal Neural Vocoder Based on Multiband WaveRNN with Data-Driven Linear Prediction for Discrete Waveform Modeling</div><div class="cpsessionviewpaperauthor">[[Patrick Lumban Tobing|AUTHOR Patrick Lumban Tobing]], [[Tomoki Toda|AUTHOR Tomoki Toda]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212173.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-10|PAPER Wed-A-V-5-10 — Basis-MelGAN: Efficient Neural Vocoder Based on Audio Decomposition]]</div>|<div class="cpsessionviewpapertitle">Basis-MelGAN: Efficient Neural Vocoder Based on Audio Decomposition</div><div class="cpsessionviewpaperauthor">[[Zhengxi Liu|AUTHOR Zhengxi Liu]], [[Yanmin Qian|AUTHOR Yanmin Qian]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210976.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-5-11|PAPER Wed-A-V-5-11 — High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model]]</div>|<div class="cpsessionviewpapertitle">High-Fidelity Parallel WaveGAN with Multi-Band Harmonic-Plus-Noise Model</div><div class="cpsessionviewpaperauthor">[[Min-Jae Hwang|AUTHOR Min-Jae Hwang]], [[Ryuichi Yamamoto|AUTHOR Ryuichi Yamamoto]], [[Eunwoo Song|AUTHOR Eunwoo Song]], [[Jae-Min Kim|AUTHOR Jae-Min Kim]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|16:00–18:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Chiori Hori|
|^ |^Helena Moniz|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210733.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-1|PAPER Wed-A-V-6-1 — SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction]]</div>|<div class="cpsessionviewpapertitle">SpecRec: An Alternative Solution for Improving End-to-End Speech-to-Text Translation via Spectrogram Reconstruction</div><div class="cpsessionviewpaperauthor">[[Junkun Chen|AUTHOR Junkun Chen]], [[Mingbo Ma|AUTHOR Mingbo Ma]], [[Renjie Zheng|AUTHOR Renjie Zheng]], [[Liang Huang|AUTHOR Liang Huang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210744.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-2|PAPER Wed-A-V-6-2 — Subtitle Translation as Markup Translation]]</div>|<div class="cpsessionviewpapertitle">Subtitle Translation as Markup Translation</div><div class="cpsessionviewpaperauthor">[[Colin Cherry|AUTHOR Colin Cherry]], [[Naveen Arivazhagan|AUTHOR Naveen Arivazhagan]], [[Dirk Padfield|AUTHOR Dirk Padfield]], [[Maxim Krikun|AUTHOR Maxim Krikun]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211912.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-3|PAPER Wed-A-V-6-3 — Large-Scale Self- and Semi-Supervised Learning for Speech Translation]]</div>|<div class="cpsessionviewpapertitle">Large-Scale Self- and Semi-Supervised Learning for Speech Translation</div><div class="cpsessionviewpaperauthor">[[Changhan Wang|AUTHOR Changhan Wang]], [[Anne Wu|AUTHOR Anne Wu]], [[Juan Pino|AUTHOR Juan Pino]], [[Alexei Baevski|AUTHOR Alexei Baevski]], [[Michael Auli|AUTHOR Michael Auli]], [[Alexis Conneau|AUTHOR Alexis Conneau]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212027.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-4|PAPER Wed-A-V-6-4 — CoVoST 2 and Massively Multilingual Speech Translation]]</div>|<div class="cpsessionviewpapertitle">CoVoST 2 and Massively Multilingual Speech Translation</div><div class="cpsessionviewpaperauthor">[[Changhan Wang|AUTHOR Changhan Wang]], [[Anne Wu|AUTHOR Anne Wu]], [[Jiatao Gu|AUTHOR Jiatao Gu]], [[Juan Pino|AUTHOR Juan Pino]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210526.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-5|PAPER Wed-A-V-6-5 — AlloST: Low-Resource Speech Translation Without Source Transcription]]</div>|<div class="cpsessionviewpapertitle">AlloST: Low-Resource Speech Translation Without Source Transcription</div><div class="cpsessionviewpaperauthor">[[Yao-Fei Cheng|AUTHOR Yao-Fei Cheng]], [[Hung-Shin Lee|AUTHOR Hung-Shin Lee]], [[Hsin-Min Wang|AUTHOR Hsin-Min Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210970.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-6|PAPER Wed-A-V-6-6 — Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer]]</div>|<div class="cpsessionviewpapertitle">Weakly-Supervised Speech-to-Text Mapping with Visually Connected Non-Parallel Speech-Text Data Using Cyclic Partially-Aligned Transformer</div><div class="cpsessionviewpaperauthor">[[Johanes Effendi|AUTHOR Johanes Effendi]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211020.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-7|PAPER Wed-A-V-6-7 — Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation]]</div>|<div class="cpsessionviewpapertitle">Transcribing Paralinguistic Acoustic Cues to Target Language Text in Transformer-Based Speech-to-Text Translation</div><div class="cpsessionviewpaperauthor">[[Hirotaka Tokuyama|AUTHOR Hirotaka Tokuyama]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Katsuhito Sudoh|AUTHOR Katsuhito Sudoh]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211065.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-8|PAPER Wed-A-V-6-8 — End-to-End Speech Translation via Cross-Modal Progressive Training]]</div>|<div class="cpsessionviewpapertitle">End-to-End Speech Translation via Cross-Modal Progressive Training</div><div class="cpsessionviewpaperauthor">[[Rong Ye|AUTHOR Rong Ye]], [[Mingxuan Wang|AUTHOR Mingxuan Wang]], [[Lei Li|AUTHOR Lei Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211105.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-9|PAPER Wed-A-V-6-9 — ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation]]</div>|<div class="cpsessionviewpapertitle">ASR Posterior-Based Loss for Multi-Task End-to-End Speech Translation</div><div class="cpsessionviewpaperauthor">[[Yuka Ko|AUTHOR Yuka Ko]], [[Katsuhito Sudoh|AUTHOR Katsuhito Sudoh]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210201.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-10|PAPER Wed-A-V-6-10 — Towards Simultaneous Machine Interpretation]]</div>|<div class="cpsessionviewpapertitle">Towards Simultaneous Machine Interpretation</div><div class="cpsessionviewpaperauthor">[[Alejandro Pérez-González-de-Martos|AUTHOR Alejandro Pérez-González-de-Martos]], [[Javier Iranzo-Sánchez|AUTHOR Javier Iranzo-Sánchez]], [[Adrià Giménez Pastor|AUTHOR Adrià Giménez Pastor]], [[Javier Jorge|AUTHOR Javier Jorge]], [[Joan-Albert Silvestre-Cerdà|AUTHOR Joan-Albert Silvestre-Cerdà]], [[Jorge Civera|AUTHOR Jorge Civera]], [[Albert Sanchis|AUTHOR Albert Sanchis]], [[Alfons Juan|AUTHOR Alfons Juan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-11|PAPER Wed-A-V-6-11 — Lexical Modeling of ASR Errors for Robust Speech Translation]]</div>|<div class="cpsessionviewpapertitle">Lexical Modeling of ASR Errors for Robust Speech Translation</div><div class="cpsessionviewpaperauthor">[[Giuseppe Martucci|AUTHOR Giuseppe Martucci]], [[Mauro Cettolo|AUTHOR Mauro Cettolo]], [[Matteo Negri|AUTHOR Matteo Negri]], [[Marco Turchi|AUTHOR Marco Turchi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212007.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-12|PAPER Wed-A-V-6-12 — Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation]]</div>|<div class="cpsessionviewpapertitle">Optimally Encoding Inductive Biases into the Transformer Improves End-to-End Speech Translation</div><div class="cpsessionviewpaperauthor">[[Piyush Vyas|AUTHOR Piyush Vyas]], [[Anastasia Kuznetsova|AUTHOR Anastasia Kuznetsova]], [[Donald S. Williamson|AUTHOR Donald S. Williamson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211863.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-A-V-6-13|PAPER Wed-A-V-6-13 — Effects of Feature Scaling and Fusion on Sign Language Translation]]</div>|<div class="cpsessionviewpapertitle">Effects of Feature Scaling and Fusion on Sign Language Translation</div><div class="cpsessionviewpaperauthor">[[Tejaswini Ananthanarayana|AUTHOR Tejaswini Ananthanarayana]], [[Lipisha Chaudhary|AUTHOR Lipisha Chaudhary]], [[Ifeoma Nwogu|AUTHOR Ifeoma Nwogu]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, Room A+B|<|
|^Chairs: |^Johan Rohdin|
|^ |^Madhu Kamble|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210323.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-1-1|PAPER Wed-E-O-1-1 — Reformulating DOVER-Lap Label Mapping as a Graph Partitioning Problem]]</div>|<div class="cpsessionviewpapertitle">Reformulating DOVER-Lap Label Mapping as a Graph Partitioning Problem</div><div class="cpsessionviewpaperauthor">[[Desh Raj|AUTHOR Desh Raj]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210993.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-1-2|PAPER Wed-E-O-1-2 — Graph Attention Networks for Anti-Spoofing]]</div>|<div class="cpsessionviewpapertitle">Graph Attention Networks for Anti-Spoofing</div><div class="cpsessionviewpaperauthor">[[Hemlata Tak|AUTHOR Hemlata Tak]], [[Jee-weon Jung|AUTHOR Jee-weon Jung]], [[Jose Patino|AUTHOR Jose Patino]], [[Massimiliano Todisco|AUTHOR Massimiliano Todisco]], [[Nicholas Evans|AUTHOR Nicholas Evans]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211085.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-1-3|PAPER Wed-E-O-1-3 — Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems]]</div>|<div class="cpsessionviewpapertitle">Log-Likelihood-Ratio Cost Function as Objective Loss for Speaker Verification Systems</div><div class="cpsessionviewpaperauthor">[[Victoria Mingote|AUTHOR Victoria Mingote]], [[Antonio Miguel|AUTHOR Antonio Miguel]], [[Alfonso Ortega|AUTHOR Alfonso Ortega]], [[Eduardo Lleida|AUTHOR Eduardo Lleida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-1-4|PAPER Wed-E-O-1-4 — Effective Phase Encoding for End-To-End Speaker Verification]]</div>|<div class="cpsessionviewpapertitle">Effective Phase Encoding for End-To-End Speaker Verification</div><div class="cpsessionviewpaperauthor">[[Junyi Peng|AUTHOR Junyi Peng]], [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]], [[Rongzhi Gu|AUTHOR Rongzhi Gu]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Jing Xiao|AUTHOR Jing Xiao]], [[Lukáš Burget|AUTHOR Lukáš Burget]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, Room C|<|
|^Chairs: |^Shantipriya Parida|
|^ |^Sebastian Möller|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210608.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-2-1|PAPER Wed-E-O-2-1 — Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation]]</div>|<div class="cpsessionviewpapertitle">Impact of Encoding and Segmentation Strategies on End-to-End Simultaneous Speech Translation</div><div class="cpsessionviewpaperauthor">[[Ha Nguyen|AUTHOR Ha Nguyen]], [[Yannick Estève|AUTHOR Yannick Estève]], [[Laurent Besacier|AUTHOR Laurent Besacier]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212232.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-2-2|PAPER Wed-E-O-2-2 — Lost in Interpreting: Speech Translation from Source or Interpreter?]]</div>|<div class="cpsessionviewpapertitle">Lost in Interpreting: Speech Translation from Source or Interpreter?</div><div class="cpsessionviewpaperauthor">[[Dominik Macháček|AUTHOR Dominik Macháček]], [[Matúš Žilinec|AUTHOR Matúš Žilinec]], [[Ondřej Bojar|AUTHOR Ondřej Bojar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210080.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-2-3|PAPER Wed-E-O-2-3 — Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion]]</div>|<div class="cpsessionviewpapertitle">Active Speaker Detection as a Multi-Objective Optimization with Uncertainty-Based Multimodal Fusion</div><div class="cpsessionviewpaperauthor">[[Baptiste Pouthier|AUTHOR Baptiste Pouthier]], [[Laurent Pilati|AUTHOR Laurent Pilati]], [[Leela K. Gudupudi|AUTHOR Leela K. Gudupudi]], [[Charles Bouveyron|AUTHOR Charles Bouveyron]], [[Frederic Precioso|AUTHOR Frederic Precioso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211658.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-2-4|PAPER Wed-E-O-2-4 — It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System]]</div>|<div class="cpsessionviewpapertitle">It’s Not What You Said, it’s How You Said it: Discriminative Perception of Speech as a Multichannel Communication System</div><div class="cpsessionviewpaperauthor">[[Sarenne Wallbridge|AUTHOR Sarenne Wallbridge]], [[Peter Bell|AUTHOR Peter Bell]], [[Catherine Lai|AUTHOR Catherine Lai]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, Room D|<|
|^Chairs: |^Douglas O’Shaughnessy|
|^ |^Hugo Van hamme|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210314.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-3-1|PAPER Wed-E-O-3-1 — Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations]]</div>|<div class="cpsessionviewpapertitle">Extending the Fullband E-Model Towards Background Noise, Bursty Packet Loss, and Conversational Degradations</div><div class="cpsessionviewpaperauthor">[[Thilo Michael|AUTHOR Thilo Michael]], [[Gabriel Mittag|AUTHOR Gabriel Mittag]], [[Andreas Bütow|AUTHOR Andreas Bütow]], [[Sebastian Möller|AUTHOR Sebastian Möller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210616.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-3-2|PAPER Wed-E-O-3-2 — ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification]]</div>|<div class="cpsessionviewpapertitle">ORCA-SLANG: An Automatic Multi-Stage Semi-Supervised Deep Learning Framework for Large-Scale Killer Whale Call Type Identification</div><div class="cpsessionviewpaperauthor">[[Christian Bergler|AUTHOR Christian Bergler]], [[Manuel Schmitt|AUTHOR Manuel Schmitt]], [[Andreas Maier|AUTHOR Andreas Maier]], [[Helena Symonds|AUTHOR Helena Symonds]], [[Paul Spong|AUTHOR Paul Spong]], [[Steven R. Ness|AUTHOR Steven R. Ness]], [[George Tzanetakis|AUTHOR George Tzanetakis]], [[Elmar Nöth|AUTHOR Elmar Nöth]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210695.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-3-3|PAPER Wed-E-O-3-3 — Audiovisual Transfer Learning for Audio Tagging and Sound Event Detection]]</div>|<div class="cpsessionviewpapertitle">Audiovisual Transfer Learning for Audio Tagging and Sound Event Detection</div><div class="cpsessionviewpaperauthor">[[Wim Boes|AUTHOR Wim Boes]], [[Hugo Van hamme|AUTHOR Hugo Van hamme]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211685.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-3-4|PAPER Wed-E-O-3-4 — Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling]]</div>|<div class="cpsessionviewpapertitle">Non-Intrusive Speech Quality Assessment with Transfer Learning and Subject-Specific Scaling</div><div class="cpsessionviewpaperauthor">[[Natalia Nessler|AUTHOR Natalia Nessler]], [[Milos Cernak|AUTHOR Milos Cernak]], [[Paolo Prandoni|AUTHOR Paolo Prandoni]], [[Pablo Mainar|AUTHOR Pablo Mainar]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212227.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-O-3-5|PAPER Wed-E-O-3-5 — Audio Retrieval with Natural Language Queries]]</div>|<div class="cpsessionviewpapertitle">Audio Retrieval with Natural Language Queries</div><div class="cpsessionviewpaperauthor">[[Andreea-Maria Oncescu|AUTHOR Andreea-Maria Oncescu]], [[A. Sophia Koepke|AUTHOR A. Sophia Koepke]], [[João F. Henriques|AUTHOR João F. Henriques]], [[Zeynep Akata|AUTHOR Zeynep Akata]], [[Samuel Albanie|AUTHOR Samuel Albanie]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, Room Lacina|<|
|^Chairs: |^Harishchandra Dubey|
|^ |^Ross Cutler|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211609.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-1|PAPER Wed-E-SS-1-1 — INTERSPEECH 2021 Deep Noise Suppression Challenge]]</div>|<div class="cpsessionviewpapertitle">INTERSPEECH 2021 Deep Noise Suppression Challenge</div><div class="cpsessionviewpaperauthor">[[Chandan K.A. Reddy|AUTHOR Chandan K.A. Reddy]], [[Harishchandra Dubey|AUTHOR Harishchandra Dubey]], [[Kazuhito Koishida|AUTHOR Kazuhito Koishida]], [[Arun Nair|AUTHOR Arun Nair]], [[Vishak Gopal|AUTHOR Vishak Gopal]], [[Ross Cutler|AUTHOR Ross Cutler]], [[Sebastian Braun|AUTHOR Sebastian Braun]], [[Hannes Gamper|AUTHOR Hannes Gamper]], [[Robert Aichner|AUTHOR Robert Aichner]], [[Sriram Srinivasan|AUTHOR Sriram Srinivasan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211137.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-2|PAPER Wed-E-SS-1-2 — A Simultaneous Denoising and Dereverberation Framework with Target Decoupling]]</div>|<div class="cpsessionviewpapertitle">A Simultaneous Denoising and Dereverberation Framework with Target Decoupling</div><div class="cpsessionviewpaperauthor">[[Andong Li|AUTHOR Andong Li]], [[Wenzhe Liu|AUTHOR Wenzhe Liu]], [[Xiaoxue Luo|AUTHOR Xiaoxue Luo]], [[Guochen Yu|AUTHOR Guochen Yu]], [[Chengshi Zheng|AUTHOR Chengshi Zheng]], [[Xiaodong Li|AUTHOR Xiaodong Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210936.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-3|PAPER Wed-E-SS-1-3 — Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data]]</div>|<div class="cpsessionviewpapertitle">Deep Noise Suppression with Non-Intrusive PESQNet Supervision Enabling the Use of Real Training Data</div><div class="cpsessionviewpaperauthor">[[Ziyi Xu|AUTHOR Ziyi Xu]], [[Maximilian Strake|AUTHOR Maximilian Strake]], [[Tim Fingscheidt|AUTHOR Tim Fingscheidt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210296.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-4|PAPER Wed-E-SS-1-4 — DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">DPCRN: Dual-Path Convolution Recurrent Network for Single Channel Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Xiaohuai Le|AUTHOR Xiaohuai Le]], [[Hongsheng Chen|AUTHOR Hongsheng Chen]], [[Kai Chen|AUTHOR Kai Chen]], [[Jing Lu|AUTHOR Jing Lu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211482.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-5|PAPER Wed-E-SS-1-5 — DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">DCCRN+: Channel-Wise Subband DCCRN with SNR Estimation for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Shubo Lv|AUTHOR Shubo Lv]], [[Yanxin Hu|AUTHOR Yanxin Hu]], [[Shimin Zhang|AUTHOR Shimin Zhang]], [[Lei Xie|AUTHOR Lei Xie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211042.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-6|PAPER Wed-E-SS-1-6 — DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">DBNet: A Dual-Branch Network Architecture Processing on Spectrum and Waveform for Single-Channel Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Kanghao Zhang|AUTHOR Kanghao Zhang]], [[Shulin He|AUTHOR Shulin He]], [[Hao Li|AUTHOR Hao Li]], [[Xueliang Zhang|AUTHOR Xueliang Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211410.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-7|PAPER Wed-E-SS-1-7 — Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss]]</div>|<div class="cpsessionviewpapertitle">Low-Delay Speech Enhancement Using Perceptually Motivated Target and Loss</div><div class="cpsessionviewpaperauthor">[[Xu Zhang|AUTHOR Xu Zhang]], [[Xinlei Ren|AUTHOR Xinlei Ren]], [[Xiguang Zheng|AUTHOR Xiguang Zheng]], [[Lianwu Chen|AUTHOR Lianwu Chen]], [[Chen Zhang|AUTHOR Chen Zhang]], [[Liang Guo|AUTHOR Liang Guo]], [[Bing Yu|AUTHOR Bing Yu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210668.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-SS-1-8|PAPER Wed-E-SS-1-8 — Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Lightweight Causal Transformer with Local Self-Attention for Real-Time Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Koen Oostermeijer|AUTHOR Koen Oostermeijer]], [[Qing Wang|AUTHOR Qing Wang]], [[Jun Du|AUTHOR Jun Du]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Brian Kingsbury|
|^ |^Mikko Kurimo|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-1|PAPER Wed-E-V-1-1 — Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio]]</div>|<div class="cpsessionviewpapertitle">Bootstrap an End-to-End ASR System by Multilingual Training, Transfer Learning, Text-to-Text Mapping and Synthetic Audio</div><div class="cpsessionviewpaperauthor">[[Manuel Giollo|AUTHOR Manuel Giollo]], [[Deniz Gunceler|AUTHOR Deniz Gunceler]], [[Yulan Liu|AUTHOR Yulan Liu]], [[Daniel Willett|AUTHOR Daniel Willett]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210216.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-2|PAPER Wed-E-V-1-2 — Efficient Weight Factorization for Multilingual Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Efficient Weight Factorization for Multilingual Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Ngoc-Quan Pham|AUTHOR Ngoc-Quan Pham]], [[Tuan-Nam Nguyen|AUTHOR Tuan-Nam Nguyen]], [[Sebastian Stüker|AUTHOR Sebastian Stüker]], [[Alex Waibel|AUTHOR Alex Waibel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210329.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-3|PAPER Wed-E-V-1-3 — Unsupervised Cross-Lingual Representation Learning for Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Cross-Lingual Representation Learning for Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Alexis Conneau|AUTHOR Alexis Conneau]], [[Alexei Baevski|AUTHOR Alexei Baevski]], [[Ronan Collobert|AUTHOR Ronan Collobert]], [[Abdelrahman Mohamed|AUTHOR Abdelrahman Mohamed]], [[Michael Auli|AUTHOR Michael Auli]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-4|PAPER Wed-E-V-1-4 — Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Language and Speaker-Independent Feature Transformation for End-to-End Multilingual Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Tomoaki Hayakawa|AUTHOR Tomoaki Hayakawa]], [[Chee Siang Leow|AUTHOR Chee Siang Leow]], [[Akio Kobayashi|AUTHOR Akio Kobayashi]], [[Takehito Utsuro|AUTHOR Takehito Utsuro]], [[Hiromitsu Nishizaki|AUTHOR Hiromitsu Nishizaki]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210631.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-5|PAPER Wed-E-V-1-5 — Using Large Self-Supervised Models for Low-Resource Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Using Large Self-Supervised Models for Low-Resource Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Krishna D. N.|AUTHOR Krishna D. N.]], [[Pinyi Wang|AUTHOR Pinyi Wang]], [[Bruno Bozza|AUTHOR Bruno Bozza]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210978.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-6|PAPER Wed-E-V-1-6 — Dual Script E2E Framework for Multilingual and Code-Switching ASR]]</div>|<div class="cpsessionviewpapertitle">Dual Script E2E Framework for Multilingual and Code-Switching ASR</div><div class="cpsessionviewpaperauthor">[[Mari Ganesh Kumar|AUTHOR Mari Ganesh Kumar]], [[Jom Kuriakose|AUTHOR Jom Kuriakose]], [[Anand Thyagachandran|AUTHOR Anand Thyagachandran]], [[Arun Kumar A.|AUTHOR Arun Kumar A.]], [[Ashish Seth|AUTHOR Ashish Seth]], [[Lodagala V.S.V. Durga Prasad|AUTHOR Lodagala V.S.V. Durga Prasad]], [[Saish Jaiswal|AUTHOR Saish Jaiswal]], [[Anusha Prakash|AUTHOR Anusha Prakash]], [[Hema A. Murthy|AUTHOR Hema A. Murthy]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211339.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-7|PAPER Wed-E-V-1-7 — MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|<div class="cpsessionviewpapertitle">MUCS 2021: Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div><div class="cpsessionviewpaperauthor">[[Anuj Diwan|AUTHOR Anuj Diwan]], [[Rakesh Vaideeswaran|AUTHOR Rakesh Vaideeswaran]], [[Sanket Shah|AUTHOR Sanket Shah]], [[Ankita Singh|AUTHOR Ankita Singh]], [[Srinivasa Raghavan|AUTHOR Srinivasa Raghavan]], [[Shreya Khare|AUTHOR Shreya Khare]], [[Vinit Unni|AUTHOR Vinit Unni]], [[Saurabh Vyas|AUTHOR Saurabh Vyas]], [[Akash Rajpuria|AUTHOR Akash Rajpuria]], [[Chiranjeevi Yarra|AUTHOR Chiranjeevi Yarra]], [[Ashish Mittal|AUTHOR Ashish Mittal]], [[Prasanta Kumar Ghosh|AUTHOR Prasanta Kumar Ghosh]], [[Preethi Jyothi|AUTHOR Preethi Jyothi]], [[Kalika Bali|AUTHOR Kalika Bali]], [[Vivek Seshadri|AUTHOR Vivek Seshadri]], [[Sunayana Sitaram|AUTHOR Sunayana Sitaram]], [[Samarth Bharadwaj|AUTHOR Samarth Bharadwaj]], [[Jai Nanavati|AUTHOR Jai Nanavati]], [[Raoul Nanavati|AUTHOR Raoul Nanavati]], [[Karthik Sankaranarayanan|AUTHOR Karthik Sankaranarayanan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211390.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-8|PAPER Wed-E-V-1-8 — Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Adapt-and-Adjust: Overcoming the Long-Tail Problem of Multilingual Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Genta Indra Winata|AUTHOR Genta Indra Winata]], [[Guangsen Wang|AUTHOR Guangsen Wang]], [[Caiming Xiong|AUTHOR Caiming Xiong]], [[Steven Hoi|AUTHOR Steven Hoi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211578.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-9|PAPER Wed-E-V-1-9 — SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages]]</div>|<div class="cpsessionviewpapertitle">SRI-B End-to-End System for Multilingual and Code-Switching ASR Challenges for Low Resource Indian Languages</div><div class="cpsessionviewpaperauthor">[[Hardik Sailor|AUTHOR Hardik Sailor]], [[Kiran Praveen T.|AUTHOR Kiran Praveen T.]], [[Vikas Agrawal|AUTHOR Vikas Agrawal]], [[Abhinav Jain|AUTHOR Abhinav Jain]], [[Abhishek Pandey|AUTHOR Abhishek Pandey]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211803.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-10|PAPER Wed-E-V-1-10 — Hierarchical Phone Recognition with Compositional Phonetics]]</div>|<div class="cpsessionviewpapertitle">Hierarchical Phone Recognition with Compositional Phonetics</div><div class="cpsessionviewpaperauthor">[[Xinjian Li|AUTHOR Xinjian Li]], [[Juncheng Li|AUTHOR Juncheng Li]], [[Florian Metze|AUTHOR Florian Metze]], [[Alan W. Black|AUTHOR Alan W. Black]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211809.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-11|PAPER Wed-E-V-1-11 — Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR]]</div>|<div class="cpsessionviewpapertitle">Towards One Model to Rule All: Multilingual Strategy for Dialectal Code-Switching Arabic ASR</div><div class="cpsessionviewpaperauthor">[[Shammur Absar Chowdhury|AUTHOR Shammur Absar Chowdhury]], [[Amir Hussein|AUTHOR Amir Hussein]], [[Ahmed Abdelali|AUTHOR Ahmed Abdelali]], [[Ahmed Ali|AUTHOR Ahmed Ali]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211944.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-1-12|PAPER Wed-E-V-1-12 — Differentiable Allophone Graphs for Language-Universal Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Differentiable Allophone Graphs for Language-Universal Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Brian Yan|AUTHOR Brian Yan]], [[Siddharth Dalmia|AUTHOR Siddharth Dalmia]], [[David R. Mortensen|AUTHOR David R. Mortensen]], [[Florian Metze|AUTHOR Florian Metze]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Khiet Truong|
|^ |^Nicholas Cummins|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210291.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-1|PAPER Wed-E-V-2-1 — Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice]]</div>|<div class="cpsessionviewpapertitle">Automatic Speech Recognition Systems Errors for Objective Sleepiness Detection Through Voice</div><div class="cpsessionviewpaperauthor">[[Vincent P. Martin|AUTHOR Vincent P. Martin]], [[Jean-Luc Rouas|AUTHOR Jean-Luc Rouas]], [[Florian Boyer|AUTHOR Florian Boyer]], [[Pierre Philip|AUTHOR Pierre Philip]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210353.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-2|PAPER Wed-E-V-2-2 — Robust Laughter Detection in Noisy Environments]]</div>|<div class="cpsessionviewpapertitle">Robust Laughter Detection in Noisy Environments</div><div class="cpsessionviewpaperauthor">[[Jon Gillick|AUTHOR Jon Gillick]], [[Wesley Deng|AUTHOR Wesley Deng]], [[Kimiko Ryokai|AUTHOR Kimiko Ryokai]], [[David Bamman|AUTHOR David Bamman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210827.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-3|PAPER Wed-E-V-2-3 — Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech]]</div>|<div class="cpsessionviewpapertitle">Impact of Emotional State on Estimation of Willingness to Buy from Advertising Speech</div><div class="cpsessionviewpaperauthor">[[Mizuki Nagano|AUTHOR Mizuki Nagano]], [[Yusuke Ijima|AUTHOR Yusuke Ijima]], [[Sadao Hiroya|AUTHOR Sadao Hiroya]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210904.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-4|PAPER Wed-E-V-2-4 — Stacked Recurrent Neural Networks for Speech-Based Inference of Attachment Condition in School Age Children]]</div>|<div class="cpsessionviewpapertitle">Stacked Recurrent Neural Networks for Speech-Based Inference of Attachment Condition in School Age Children</div><div class="cpsessionviewpaperauthor">[[Huda Alsofyani|AUTHOR Huda Alsofyani]], [[Alessandro Vinciarelli|AUTHOR Alessandro Vinciarelli]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210928.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-5|PAPER Wed-E-V-2-5 — Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units]]</div>|<div class="cpsessionviewpapertitle">Language or Paralanguage, This is the Problem: Comparing Depressed and Non-Depressed Speakers Through the Analysis of Gated Multimodal Units</div><div class="cpsessionviewpaperauthor">[[Nujud Aloshban|AUTHOR Nujud Aloshban]], [[Anna Esposito|AUTHOR Anna Esposito]], [[Alessandro Vinciarelli|AUTHOR Alessandro Vinciarelli]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211100.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-6|PAPER Wed-E-V-2-6 — Emotion Carrier Recognition from Personal Narratives]]</div>|<div class="cpsessionviewpapertitle">Emotion Carrier Recognition from Personal Narratives</div><div class="cpsessionviewpaperauthor">[[Aniruddha Tammewar|AUTHOR Aniruddha Tammewar]], [[Alessandra Cervone|AUTHOR Alessandra Cervone]], [[Giuseppe Riccardi|AUTHOR Giuseppe Riccardi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211159.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-7|PAPER Wed-E-V-2-7 — Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training]]</div>|<div class="cpsessionviewpapertitle">Non-Verbal Vocalisation and Laughter Detection Using Sequence-to-Sequence Models and Multi-Label Training</div><div class="cpsessionviewpaperauthor">[[Scott Condron|AUTHOR Scott Condron]], [[Georgia Clarke|AUTHOR Georgia Clarke]], [[Anita Klementiev|AUTHOR Anita Klementiev]], [[Daniela Morse-Kopp|AUTHOR Daniela Morse-Kopp]], [[Jack Parry|AUTHOR Jack Parry]], [[Dimitri Palaz|AUTHOR Dimitri Palaz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211176.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-8|PAPER Wed-E-V-2-8 — TDCA-Net: Time-Domain Channel Attention Network for Depression Detection]]</div>|<div class="cpsessionviewpapertitle">TDCA-Net: Time-Domain Channel Attention Network for Depression Detection</div><div class="cpsessionviewpaperauthor">[[Cong Cai|AUTHOR Cong Cai]], [[Mingyue Niu|AUTHOR Mingyue Niu]], [[Bin Liu|AUTHOR Bin Liu]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Xuefei Liu|AUTHOR Xuefei Liu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211717.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-9|PAPER Wed-E-V-2-9 — Visual Speech for Obstructive Sleep Apnea Detection]]</div>|<div class="cpsessionviewpapertitle">Visual Speech for Obstructive Sleep Apnea Detection</div><div class="cpsessionviewpaperauthor">[[Catarina Botelho|AUTHOR Catarina Botelho]], [[Alberto Abad|AUTHOR Alberto Abad]], [[Tanja Schultz|AUTHOR Tanja Schultz]], [[Isabel Trancoso|AUTHOR Isabel Trancoso]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211932.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-10|PAPER Wed-E-V-2-10 — Analysis of Contextual Voice Changes in Remote Meetings]]</div>|<div class="cpsessionviewpapertitle">Analysis of Contextual Voice Changes in Remote Meetings</div><div class="cpsessionviewpaperauthor">[[Hector A. Cordourier Maruri|AUTHOR Hector A. Cordourier Maruri]], [[Sinem Aslan|AUTHOR Sinem Aslan]], [[Georg Stemmer|AUTHOR Georg Stemmer]], [[Nese Alyuz|AUTHOR Nese Alyuz]], [[Lama Nachman|AUTHOR Lama Nachman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211967.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-2-11|PAPER Wed-E-V-2-11 — Speech Based Depression Severity Level Classification Using a Multi-Stage Dilated CNN-LSTM Model]]</div>|<div class="cpsessionviewpapertitle">Speech Based Depression Severity Level Classification Using a Multi-Stage Dilated CNN-LSTM Model</div><div class="cpsessionviewpaperauthor">[[Nadee Seneviratne|AUTHOR Nadee Seneviratne]], [[Carol Espy-Wilson|AUTHOR Carol Espy-Wilson]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Peter Bell|
|^ |^Suyoun Kim|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211169.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-1|PAPER Wed-E-V-3-1 — Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models]]</div>|<div class="cpsessionviewpapertitle">Multi-Domain Knowledge Distillation via Uncertainty-Matching for End-to-End ASR Models</div><div class="cpsessionviewpaperauthor">[[Ho-Gyeong Kim|AUTHOR Ho-Gyeong Kim]], [[Min-Joong Lee|AUTHOR Min-Joong Lee]], [[Hoshik Lee|AUTHOR Hoshik Lee]], [[Tae Gyoon Kang|AUTHOR Tae Gyoon Kang]], [[Jihyun Lee|AUTHOR Jihyun Lee]], [[Eunho Yang|AUTHOR Eunho Yang]], [[Sung Ju Hwang|AUTHOR Sung Ju Hwang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211575.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-2|PAPER Wed-E-V-3-2 — Learning a Neural Diff for Speech Models]]</div>|<div class="cpsessionviewpapertitle">Learning a Neural Diff for Speech Models</div><div class="cpsessionviewpaperauthor">[[Jonathan Macoskey|AUTHOR Jonathan Macoskey]], [[Grant P. Strimel|AUTHOR Grant P. Strimel]], [[Ariya Rastrow|AUTHOR Ariya Rastrow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-3|PAPER Wed-E-V-3-3 — Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models]]</div>|<div class="cpsessionviewpapertitle">Stochastic Attention Head Removal: A Simple and Effective Method for Improving Transformer Based ASR Models</div><div class="cpsessionviewpaperauthor">[[Shucong Zhang|AUTHOR Shucong Zhang]], [[Erfan Loweimi|AUTHOR Erfan Loweimi]], [[Peter Bell|AUTHOR Peter Bell]], [[Steve Renals|AUTHOR Steve Renals]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210355.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-4|PAPER Wed-E-V-3-4 — Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training]]</div>|<div class="cpsessionviewpapertitle">Model-Agnostic Fast Adaptive Multi-Objective Balancing Algorithm for Multilingual Automatic Speech Recognition Model Training</div><div class="cpsessionviewpaperauthor">[[Jiabin Xue|AUTHOR Jiabin Xue]], [[Tieran Zheng|AUTHOR Tieran Zheng]], [[Jiqing Han|AUTHOR Jiqing Han]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210563.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-5|PAPER Wed-E-V-3-5 — Towards Lifelong Learning of End-to-End ASR]]</div>|<div class="cpsessionviewpapertitle">Towards Lifelong Learning of End-to-End ASR</div><div class="cpsessionviewpaperauthor">[[Heng-Jui Chang|AUTHOR Heng-Jui Chang]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]], [[Lin-shan Lee|AUTHOR Lin-shan Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210614.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-6|PAPER Wed-E-V-3-6 — Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence]]</div>|<div class="cpsessionviewpapertitle">Self-Adaptive Distillation for Multilingual Speech Recognition: Leveraging Student Independence</div><div class="cpsessionviewpaperauthor">[[Isabel Leal|AUTHOR Isabel Leal]], [[Neeraj Gaur|AUTHOR Neeraj Gaur]], [[Parisa Haghani|AUTHOR Parisa Haghani]], [[Brian Farris|AUTHOR Brian Farris]], [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]], [[Manasa Prasad|AUTHOR Manasa Prasad]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Yun Zhu|AUTHOR Yun Zhu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210648.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-7|PAPER Wed-E-V-3-7 — Regularizing Word Segmentation by Creating Misspellings]]</div>|<div class="cpsessionviewpapertitle">Regularizing Word Segmentation by Creating Misspellings</div><div class="cpsessionviewpaperauthor">[[Hainan Xu|AUTHOR Hainan Xu]], [[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Yinghui Huang|AUTHOR Yinghui Huang]], [[Jesse Emond|AUTHOR Jesse Emond]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210683.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-8|PAPER Wed-E-V-3-8 — Multitask Training with Text Data for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Multitask Training with Text Data for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Peidong Wang|AUTHOR Peidong Wang]], [[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Ron J. Weiss|AUTHOR Ron J. Weiss]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210894.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-9|PAPER Wed-E-V-3-9 — Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Emitting Word Timings with HMM-Free End-to-End System in Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Xianzhao Chen|AUTHOR Xianzhao Chen]], [[Hao Ni|AUTHOR Hao Ni]], [[Yi He|AUTHOR Yi He]], [[Kang Wang|AUTHOR Kang Wang]], [[Zejun Ma|AUTHOR Zejun Ma]], [[Zongxia Xie|AUTHOR Zongxia Xie]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211644.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-10|PAPER Wed-E-V-3-10 — Scaling Laws for Acoustic Models]]</div>|<div class="cpsessionviewpapertitle">Scaling Laws for Acoustic Models</div><div class="cpsessionviewpaperauthor">[[Jasha Droppo|AUTHOR Jasha Droppo]], [[Oguz Elibol|AUTHOR Oguz Elibol]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211657.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-11|PAPER Wed-E-V-3-11 — Leveraging Non-Target Language Resources to Improve ASR Performance in a Target Language]]</div>|<div class="cpsessionviewpapertitle">Leveraging Non-Target Language Resources to Improve ASR Performance in a Target Language</div><div class="cpsessionviewpaperauthor">[[Jayadev Billa|AUTHOR Jayadev Billa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211962.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-12|PAPER Wed-E-V-3-12 — 4-Bit Quantization of LSTM-Based Speech Recognition Models]]</div>|<div class="cpsessionviewpapertitle">4-Bit Quantization of LSTM-Based Speech Recognition Models</div><div class="cpsessionviewpaperauthor">[[Andrea Fasoli|AUTHOR Andrea Fasoli]], [[Chia-Yu Chen|AUTHOR Chia-Yu Chen]], [[Mauricio Serrano|AUTHOR Mauricio Serrano]], [[Xiao Sun|AUTHOR Xiao Sun]], [[Naigang Wang|AUTHOR Naigang Wang]], [[Swagath Venkataramani|AUTHOR Swagath Venkataramani]], [[George Saon|AUTHOR George Saon]], [[Xiaodong Cui|AUTHOR Xiaodong Cui]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]], [[Wei Zhang|AUTHOR Wei Zhang]], [[Zoltán Tüske|AUTHOR Zoltán Tüske]], [[Kailash Gopalakrishnan|AUTHOR Kailash Gopalakrishnan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212043.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-13|PAPER Wed-E-V-3-13 — Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation]]</div>|<div class="cpsessionviewpapertitle">Unified Autoregressive Modeling for Joint End-to-End Multi-Talker Overlapped Speech Recognition and Speaker Attribute Estimation</div><div class="cpsessionviewpaperauthor">[[Ryo Masumura|AUTHOR Ryo Masumura]], [[Daiki Okamura|AUTHOR Daiki Okamura]], [[Naoki Makishima|AUTHOR Naoki Makishima]], [[Mana Ihori|AUTHOR Mana Ihori]], [[Akihiko Takashima|AUTHOR Akihiko Takashima]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Shota Orihashi|AUTHOR Shota Orihashi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212075.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-14|PAPER Wed-E-V-3-14 — Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Minimum Word Error Rate Training with Language Model Fusion for End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Zhong Meng|AUTHOR Zhong Meng]], [[Yu Wu|AUTHOR Yu Wu]], [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Liang Lu|AUTHOR Liang Lu]], [[Xie Chen|AUTHOR Xie Chen]], [[Guoli Ye|AUTHOR Guoli Ye]], [[Eric Sun|AUTHOR Eric Sun]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212198.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-3-15|PAPER Wed-E-V-3-15 — Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning]]</div>|<div class="cpsessionviewpapertitle">Variable Frame Rate Acoustic Models Using Minimum Error Reinforcement Learning</div><div class="cpsessionviewpaperauthor">[[Dongcheng Jiang|AUTHOR Dongcheng Jiang]], [[Chao Zhang|AUTHOR Chao Zhang]], [[Philip C. Woodland|AUTHOR Philip C. Woodland]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Mariapaola D’Imperio|
|^ |^Maria Paola Bissiri|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210006.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-1|PAPER Wed-E-V-4-1 — How f0 and Phrase Position Affect Papuan Malay Word Identification]]</div>|<div class="cpsessionviewpapertitle">How f0 and Phrase Position Affect Papuan Malay Word Identification</div><div class="cpsessionviewpaperauthor">[[Constantijn Kaland|AUTHOR Constantijn Kaland]], [[Matthew Gordon|AUTHOR Matthew Gordon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210190.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-2|PAPER Wed-E-V-4-2 — On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish]]</div>|<div class="cpsessionviewpapertitle">On the Feasibility of the Danish Model of Intonational Transcription: Phonetic Evidence from Jutlandic Danish</div><div class="cpsessionviewpaperauthor">[[Anna Bothe Jespersen|AUTHOR Anna Bothe Jespersen]], [[Pavel Šturm|AUTHOR Pavel Šturm]], [[Míša Hejná|AUTHOR Míša Hejná]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210294.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-3|PAPER Wed-E-V-4-3 — An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus]]</div>|<div class="cpsessionviewpapertitle">An Experiment in Paratone Detection in a Prosodically Annotated EAP Spoken Corpus</div><div class="cpsessionviewpaperauthor">[[Adrien Méli|AUTHOR Adrien Méli]], [[Nicolas Ballier|AUTHOR Nicolas Ballier]], [[Achille Falaise|AUTHOR Achille Falaise]], [[Alice Henderson|AUTHOR Alice Henderson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210304.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-4|PAPER Wed-E-V-4-4 — ProsoBeast Prosody Annotation Tool]]</div>|<div class="cpsessionviewpapertitle">ProsoBeast Prosody Annotation Tool</div><div class="cpsessionviewpaperauthor">[[Branislav Gerazov|AUTHOR Branislav Gerazov]], [[Michael Wagner|AUTHOR Michael Wagner]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210373.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-5|PAPER Wed-E-V-4-5 — Assessing the Use of Prosody in Constituency Parsing of Imperfect Transcripts]]</div>|<div class="cpsessionviewpapertitle">Assessing the Use of Prosody in Constituency Parsing of Imperfect Transcripts</div><div class="cpsessionviewpaperauthor">[[Trang Tran|AUTHOR Trang Tran]], [[Mari Ostendorf|AUTHOR Mari Ostendorf]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210434.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-6|PAPER Wed-E-V-4-6 — Targeted and Targetless Neutral Tones in Taiwanese Southern Min]]</div>|<div class="cpsessionviewpapertitle">Targeted and Targetless Neutral Tones in Taiwanese Southern Min</div><div class="cpsessionviewpaperauthor">[[Roger Cheng-yen Liu|AUTHOR Roger Cheng-yen Liu]], [[Feng-fan Hsieh|AUTHOR Feng-fan Hsieh]], [[Yueh-chin Chang|AUTHOR Yueh-chin Chang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210594.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-7|PAPER Wed-E-V-4-7 — The Interaction of Word Complexity and Word Duration in an Agglutinative Language]]</div>|<div class="cpsessionviewpapertitle">The Interaction of Word Complexity and Word Duration in an Agglutinative Language</div><div class="cpsessionviewpaperauthor">[[Mária Gósy|AUTHOR Mária Gósy]], [[Kálmán Abari|AUTHOR Kálmán Abari]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-8|PAPER Wed-E-V-4-8 — Taiwan Min Nan (Taiwanese) Checked Tones Sound Change]]</div>|<div class="cpsessionviewpapertitle">Taiwan Min Nan (Taiwanese) Checked Tones Sound Change</div><div class="cpsessionviewpaperauthor">[[Ho-hsien Pan|AUTHOR Ho-hsien Pan]], [[Shao-ren Lyu|AUTHOR Shao-ren Lyu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211172.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-9|PAPER Wed-E-V-4-9 — In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German]]</div>|<div class="cpsessionviewpapertitle">In-Group Advantage in the Perception of Emotions: Evidence from Three Varieties of German</div><div class="cpsessionviewpaperauthor">[[Moritz Jakob|AUTHOR Moritz Jakob]], [[Bettina Braun|AUTHOR Bettina Braun]], [[Katharina Zahner-Ritter|AUTHOR Katharina Zahner-Ritter]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211625.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-10|PAPER Wed-E-V-4-10 — The LF Model in the Frequency Domain for Glottal Airflow Modelling Without Aliasing Distortion]]</div>|<div class="cpsessionviewpapertitle">The LF Model in the Frequency Domain for Glottal Airflow Modelling Without Aliasing Distortion</div><div class="cpsessionviewpaperauthor">[[Christer Gobl|AUTHOR Christer Gobl]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211684.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-11|PAPER Wed-E-V-4-11 — Parsing Speech for Grouping and Prominence, and the Typology of Rhythm]]</div>|<div class="cpsessionviewpapertitle">Parsing Speech for Grouping and Prominence, and the Typology of Rhythm</div><div class="cpsessionviewpaperauthor">[[Michael Wagner|AUTHOR Michael Wagner]], [[Alvaro Iturralde Zurita|AUTHOR Alvaro Iturralde Zurita]], [[Sijia Zhang|AUTHOR Sijia Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-12|PAPER Wed-E-V-4-12 — Prosody of Case Markers in Urdu]]</div>|<div class="cpsessionviewpapertitle">Prosody of Case Markers in Urdu</div><div class="cpsessionviewpaperauthor">[[Benazir Mumtaz|AUTHOR Benazir Mumtaz]], [[Massimiliano Canzi|AUTHOR Massimiliano Canzi]], [[Miriam Butt|AUTHOR Miriam Butt]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211903.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-13|PAPER Wed-E-V-4-13 — Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects]]</div>|<div class="cpsessionviewpapertitle">Articulatory Characteristics of Icelandic Voiced Fricative Lenition: Gradience, Categoricity, and Speaker/Gesture-Specific Effects</div><div class="cpsessionviewpaperauthor">[[Brynhildur Stefansdottir|AUTHOR Brynhildur Stefansdottir]], [[Francesco Burroni|AUTHOR Francesco Burroni]], [[Sam Tilsen|AUTHOR Sam Tilsen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211780.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-4-14|PAPER Wed-E-V-4-14 — Leveraging the Uniformity Framework to Examine Crosslinguistic Similarity for Long-Lag Stops in Spontaneous Cantonese-English Bilingual Speech]]</div>|<div class="cpsessionviewpapertitle">Leveraging the Uniformity Framework to Examine Crosslinguistic Similarity for Long-Lag Stops in Spontaneous Cantonese-English Bilingual Speech</div><div class="cpsessionviewpaperauthor">[[Khia A. Johnson|AUTHOR Khia A. Johnson]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Dorothea Kolossa|
|^ |^Pejman Mowlaee|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211868.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-1|PAPER Wed-E-V-5-1 — Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification]]</div>|<div class="cpsessionviewpapertitle">Personalized Speech Enhancement Through Self-Supervised Data Augmentation and Purification</div><div class="cpsessionviewpaperauthor">[[Aswin Sivaraman|AUTHOR Aswin Sivaraman]], [[Sunwoo Kim|AUTHOR Sunwoo Kim]], [[Minje Kim|AUTHOR Minje Kim]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211973.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-2|PAPER Wed-E-V-5-2 — Speech Denoising with Auditory Models]]</div>|<div class="cpsessionviewpapertitle">Speech Denoising with Auditory Models</div><div class="cpsessionviewpaperauthor">[[Mark R. Saddler|AUTHOR Mark R. Saddler]], [[Andrew Francl|AUTHOR Andrew Francl]], [[Jenelle Feather|AUTHOR Jenelle Feather]], [[Kaizhi Qian|AUTHOR Kaizhi Qian]], [[Yang Zhang|AUTHOR Yang Zhang]], [[Josh H. McDermott|AUTHOR Josh H. McDermott]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210220.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-3|PAPER Wed-E-V-5-3 — Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Human Listening and Live Captioning: Multi-Task Training for Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Sefik Emre Eskimez|AUTHOR Sefik Emre Eskimez]], [[Xiaofei Wang|AUTHOR Xiaofei Wang]], [[Min Tang|AUTHOR Min Tang]], [[Hemin Yang|AUTHOR Hemin Yang]], [[Zirun Zhu|AUTHOR Zirun Zhu]], [[Zhuo Chen|AUTHOR Zhuo Chen]], [[Huaming Wang|AUTHOR Huaming Wang]], [[Takuya Yoshioka|AUTHOR Takuya Yoshioka]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210520.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-4|PAPER Wed-E-V-5-4 — Multi-Stage Progressive Speech Enhancement Network]]</div>|<div class="cpsessionviewpapertitle">Multi-Stage Progressive Speech Enhancement Network</div><div class="cpsessionviewpaperauthor">[[Xinmeng Xu|AUTHOR Xinmeng Xu]], [[Yang Wang|AUTHOR Yang Wang]], [[Dongxiang Xu|AUTHOR Dongxiang Xu]], [[Yiyuan Peng|AUTHOR Yiyuan Peng]], [[Cong Zhang|AUTHOR Cong Zhang]], [[Jie Jia|AUTHOR Jie Jia]], [[Binbin Chen|AUTHOR Binbin Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-5|PAPER Wed-E-V-5-5 — Single-Channel Speech Enhancement Using Learnable Loss Mixup]]</div>|<div class="cpsessionviewpapertitle">Single-Channel Speech Enhancement Using Learnable Loss Mixup</div><div class="cpsessionviewpaperauthor">[[Oscar Chang|AUTHOR Oscar Chang]], [[Dung N. Tran|AUTHOR Dung N. Tran]], [[Kazuhito Koishida|AUTHOR Kazuhito Koishida]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210922.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-6|PAPER Wed-E-V-5-6 — A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">A Maximum Likelihood Approach to SNR-Progressive Learning Using Generalized Gaussian Distribution for LSTM-Based Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Xiao-Qi Zhang|AUTHOR Xiao-Qi Zhang]], [[Jun Du|AUTHOR Jun Du]], [[Li Chai|AUTHOR Li Chai]], [[Chin-Hui Lee|AUTHOR Chin-Hui Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-7|PAPER Wed-E-V-5-7 — Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Whisper Speech Enhancement Using Joint Variational Autoencoder for Improved Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Vikas Agrawal|AUTHOR Vikas Agrawal]], [[Shashi Kumar|AUTHOR Shashi Kumar]], [[Shakti P. Rath|AUTHOR Shakti P. Rath]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211025.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-8|PAPER Wed-E-V-5-8 — DEMUCS-Mobile : On-Device Lightweight Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">DEMUCS-Mobile : On-Device Lightweight Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Lukas Lee|AUTHOR Lukas Lee]], [[Youna Ji|AUTHOR Youna Ji]], [[Minjae Lee|AUTHOR Minjae Lee]], [[Min-Seok Choi|AUTHOR Min-Seok Choi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-9|PAPER Wed-E-V-5-9 — Speech Denoising Without Clean Training Data: A Noise2Noise Approach]]</div>|<div class="cpsessionviewpapertitle">Speech Denoising Without Clean Training Data: A Noise2Noise Approach</div><div class="cpsessionviewpaperauthor">[[Madhav Mahesh Kashyap|AUTHOR Madhav Mahesh Kashyap]], [[Anuj Tambwekar|AUTHOR Anuj Tambwekar]], [[Krishnamoorthy Manohara|AUTHOR Krishnamoorthy Manohara]], [[S. Natarajan|AUTHOR S. Natarajan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211134.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-10|PAPER Wed-E-V-5-10 — Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints]]</div>|<div class="cpsessionviewpapertitle">Improved Speech Enhancement Using a Complex-Domain GAN with Fused Time-Domain and Time-Frequency Domain Constraints</div><div class="cpsessionviewpaperauthor">[[Feng Dang|AUTHOR Feng Dang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]], [[Hangting Chen|AUTHOR Hangting Chen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211411.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-11|PAPER Wed-E-V-5-11 — Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)]]</div>|<div class="cpsessionviewpapertitle">Speech Enhancement with Topology-Enhanced Generative Adversarial Networks (GANs)</div><div class="cpsessionviewpaperauthor">[[Xudong Zhang|AUTHOR Xudong Zhang]], [[Liang Zhao|AUTHOR Liang Zhao]], [[Feng Gu|AUTHOR Feng Gu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211859.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-12|PAPER Wed-E-V-5-12 — Learning Speech Structure to Improve Time-Frequency Masks]]</div>|<div class="cpsessionviewpapertitle">Learning Speech Structure to Improve Time-Frequency Masks</div><div class="cpsessionviewpaperauthor">[[Suliang Bu|AUTHOR Suliang Bu]], [[Yunxin Zhao|AUTHOR Yunxin Zhao]], [[Shaojun Wang|AUTHOR Shaojun Wang]], [[Mei Han|AUTHOR Mei Han]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-5-13|PAPER Wed-E-V-5-13 — SE-Conformer: Time-Domain Speech Enhancement Using Conformer]]</div>|<div class="cpsessionviewpapertitle">SE-Conformer: Time-Domain Speech Enhancement Using Conformer</div><div class="cpsessionviewpaperauthor">[[Eesung Kim|AUTHOR Eesung Kim]], [[Hyeji Seo|AUTHOR Hyeji Seo]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|19:00–21:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Sébastien Le Maguer|
|^ |^Tim Bunnell|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212258.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-1|PAPER Wed-E-V-6-1 — Spectral and Latent Speech Representation Distortion for TTS Evaluation]]</div>|<div class="cpsessionviewpapertitle">Spectral and Latent Speech Representation Distortion for TTS Evaluation</div><div class="cpsessionviewpaperauthor">[[Thananchai Kongthaworn|AUTHOR Thananchai Kongthaworn]], [[Burin Naowarat|AUTHOR Burin Naowarat]], [[Ekapol Chuangsuwanich|AUTHOR Ekapol Chuangsuwanich]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210286.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-2|PAPER Wed-E-V-6-2 — Detection and Analysis of Attention Errors in Sequence-to-Sequence Text-to-Speech]]</div>|<div class="cpsessionviewpapertitle">Detection and Analysis of Attention Errors in Sequence-to-Sequence Text-to-Speech</div><div class="cpsessionviewpaperauthor">[[Cassia Valentini-Botinhao|AUTHOR Cassia Valentini-Botinhao]], [[Simon King|AUTHOR Simon King]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210341.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-3|PAPER Wed-E-V-6-3 — RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">RyanSpeech: A Corpus for Conversational Text-to-Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Rohola Zandie|AUTHOR Rohola Zandie]], [[Mohammad H. Mahoor|AUTHOR Mohammad H. Mahoor]], [[Julia Madsen|AUTHOR Julia Madsen]], [[Eshrat S. Emamian|AUTHOR Eshrat S. Emamian]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-4|PAPER Wed-E-V-6-4 — AISHELL-3: A Multi-Speaker Mandarin TTS Corpus]]</div>|<div class="cpsessionviewpapertitle">AISHELL-3: A Multi-Speaker Mandarin TTS Corpus</div><div class="cpsessionviewpaperauthor">[[Yao Shi|AUTHOR Yao Shi]], [[Hui Bu|AUTHOR Hui Bu]], [[Xin Xu|AUTHOR Xin Xu]], [[Shaoji Zhang|AUTHOR Shaoji Zhang]], [[Ming Li|AUTHOR Ming Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210800.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-5|PAPER Wed-E-V-6-5 — Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Comparing Speech Enhancement Techniques for Voice Adaptation-Based Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Nicholas Eng|AUTHOR Nicholas Eng]], [[C.T. Justine Hui|AUTHOR C.T. Justine Hui]], [[Yusuke Hioka|AUTHOR Yusuke Hioka]], [[Catherine I. Watson|AUTHOR Catherine I. Watson]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211148.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-6|PAPER Wed-E-V-6-6 — EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model]]</div>|<div class="cpsessionviewpapertitle">EMOVIE: A Mandarin Emotion Speech Dataset with a Simple Emotional Text-to-Speech Model</div><div class="cpsessionviewpaperauthor">[[Chenye Cui|AUTHOR Chenye Cui]], [[Yi Ren|AUTHOR Yi Ren]], [[Jinglin Liu|AUTHOR Jinglin Liu]], [[Feiyang Chen|AUTHOR Feiyang Chen]], [[Rongjie Huang|AUTHOR Rongjie Huang]], [[Ming Lei|AUTHOR Ming Lei]], [[Zhou Zhao|AUTHOR Zhou Zhao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211229.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-7|PAPER Wed-E-V-6-7 — Perception of Social Speaker Characteristics in Synthetic Speech]]</div>|<div class="cpsessionviewpapertitle">Perception of Social Speaker Characteristics in Synthetic Speech</div><div class="cpsessionviewpaperauthor">[[Sai Sirisha Rallabandi|AUTHOR Sai Sirisha Rallabandi]], [[Abhinav Bharadwaj|AUTHOR Abhinav Bharadwaj]], [[Babak Naderi|AUTHOR Babak Naderi]], [[Sebastian Möller|AUTHOR Sebastian Möller]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211599.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-8|PAPER Wed-E-V-6-8 — Hi-Fi Multi-Speaker English TTS Dataset]]</div>|<div class="cpsessionviewpapertitle">Hi-Fi Multi-Speaker English TTS Dataset</div><div class="cpsessionviewpaperauthor">[[Evelina Bakhturina|AUTHOR Evelina Bakhturina]], [[Vitaly Lavrukhin|AUTHOR Vitaly Lavrukhin]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]], [[Yang Zhang|AUTHOR Yang Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212013.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-9|PAPER Wed-E-V-6-9 — Utilizing Self-Supervised Representations for MOS Prediction]]</div>|<div class="cpsessionviewpapertitle">Utilizing Self-Supervised Representations for MOS Prediction</div><div class="cpsessionviewpaperauthor">[[Wei-Cheng Tseng|AUTHOR Wei-Cheng Tseng]], [[Chien-yu Huang|AUTHOR Chien-yu Huang]], [[Wei-Tsung Kao|AUTHOR Wei-Tsung Kao]], [[Yist Y. Lin|AUTHOR Yist Y. Lin]], [[Hung-yi Lee|AUTHOR Hung-yi Lee]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212124.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-10|PAPER Wed-E-V-6-10 — KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset]]</div>|<div class="cpsessionviewpapertitle">KazakhTTS: An Open-Source Kazakh Text-to-Speech Synthesis Dataset</div><div class="cpsessionviewpaperauthor">[[Saida Mussakhojayeva|AUTHOR Saida Mussakhojayeva]], [[Aigerim Janaliyeva|AUTHOR Aigerim Janaliyeva]], [[Almas Mirzakhmetov|AUTHOR Almas Mirzakhmetov]], [[Yerbolat Khassanov|AUTHOR Yerbolat Khassanov]], [[Huseyin Atakan Varol|AUTHOR Huseyin Atakan Varol]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212203.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-E-V-6-11|PAPER Wed-E-V-6-11 — Confidence Intervals for ASR-Based TTS Evaluation]]</div>|<div class="cpsessionviewpapertitle">Confidence Intervals for ASR-Based TTS Evaluation</div><div class="cpsessionviewpaperauthor">[[Jason Taylor|AUTHOR Jason Taylor]], [[Korin Richmond|AUTHOR Korin Richmond]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|15:00–16:00, Wednesday 1 September 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Wed-Keynote|PAPER Wed-Keynote — Ethical and Technological Challenges of Conversational AI]]</div>|<div class="cpsessionviewpapertitle">Ethical and Technological Challenges of Conversational AI</div><div class="cpsessionviewpaperauthor">[[Pascale Fung|AUTHOR Pascale Fung]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, Room A+B|<|
|^Chairs: |^Martin Karafiat|
|^ |^Mathew Magimai-Doss|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210462.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-1-1|PAPER Wed-M-O-1-1 — Golos: Russian Dataset for Speech Research]]</div>|<div class="cpsessionviewpapertitle">Golos: Russian Dataset for Speech Research</div><div class="cpsessionviewpaperauthor">[[Nikolay Karpov|AUTHOR Nikolay Karpov]], [[Alexander Denisenko|AUTHOR Alexander Denisenko]], [[Fedor Minkin|AUTHOR Fedor Minkin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210643.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-1-2|PAPER Wed-M-O-1-2 — Radically Old Way of Computing Spectra: Applications in End-to-End ASR]]</div>|<div class="cpsessionviewpapertitle">Radically Old Way of Computing Spectra: Applications in End-to-End ASR</div><div class="cpsessionviewpaperauthor">[[Samik Sadhu|AUTHOR Samik Sadhu]], [[Hynek Hermansky|AUTHOR Hynek Hermansky]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211710.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-1-3|PAPER Wed-M-O-1-3 — Self-Supervised End-to-End ASR for Low Resource L2 Swedish]]</div>|<div class="cpsessionviewpapertitle">Self-Supervised End-to-End ASR for Low Resource L2 Swedish</div><div class="cpsessionviewpaperauthor">[[Ragheb Al-Ghezi|AUTHOR Ragheb Al-Ghezi]], [[Yaroslav Getman|AUTHOR Yaroslav Getman]], [[Aku Rouhe|AUTHOR Aku Rouhe]], [[Raili Hildén|AUTHOR Raili Hildén]], [[Mikko Kurimo|AUTHOR Mikko Kurimo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211860.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-1-4|PAPER Wed-M-O-1-4 — SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">SPGISpeech: 5,000 Hours of Transcribed Financial Audio for Fully Formatted End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Patrick K. O’Neill|AUTHOR Patrick K. O’Neill]], [[Vitaly Lavrukhin|AUTHOR Vitaly Lavrukhin]], [[Somshubra Majumdar|AUTHOR Somshubra Majumdar]], [[Vahid Noroozi|AUTHOR Vahid Noroozi]], [[Yuekai Zhang|AUTHOR Yuekai Zhang]], [[Oleksii Kuchaiev|AUTHOR Oleksii Kuchaiev]], [[Jagadeesh Balam|AUTHOR Jagadeesh Balam]], [[Yuliya Dovzhenko|AUTHOR Yuliya Dovzhenko]], [[Keenan Freyberg|AUTHOR Keenan Freyberg]], [[Michael D. Shulman|AUTHOR Michael D. Shulman]], [[Boris Ginsburg|AUTHOR Boris Ginsburg]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Georg Kucsko|AUTHOR Georg Kucsko]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210556.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-1-5|PAPER Wed-M-O-1-5 — //LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech]]</div>|<div class="cpsessionviewpapertitle">//LeBenchmark//: A Reproducible Framework for Assessing Self-Supervised Representation Learning from Speech</div><div class="cpsessionviewpaperauthor">[[Solène Evain|AUTHOR Solène Evain]], [[Ha Nguyen|AUTHOR Ha Nguyen]], [[Hang Le|AUTHOR Hang Le]], [[Marcely Zanon Boito|AUTHOR Marcely Zanon Boito]], [[Salima Mdhaffar|AUTHOR Salima Mdhaffar]], [[Sina Alisamir|AUTHOR Sina Alisamir]], [[Ziyi Tong|AUTHOR Ziyi Tong]], [[Natalia Tomashenko|AUTHOR Natalia Tomashenko]], [[Marco Dinarelli|AUTHOR Marco Dinarelli]], [[Titouan Parcollet|AUTHOR Titouan Parcollet]], [[Alexandre Allauzen|AUTHOR Alexandre Allauzen]], [[Yannick Estève|AUTHOR Yannick Estève]], [[Benjamin Lecouteux|AUTHOR Benjamin Lecouteux]], [[François Portet|AUTHOR François Portet]], [[Solange Rossato|AUTHOR Solange Rossato]], [[Fabien Ringeval|AUTHOR Fabien Ringeval]], [[Didier Schwab|AUTHOR Didier Schwab]], [[Laurent Besacier|AUTHOR Laurent Besacier]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, Room C|<|
|^Chairs: |^Radek Skarnitzl|
|^ |^Barbara Schuppler|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210130.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-2-1|PAPER Wed-M-O-2-1 — Prosodic Accommodation in Face-to-Face and Telephone Dialogues]]</div>|<div class="cpsessionviewpapertitle">Prosodic Accommodation in Face-to-Face and Telephone Dialogues</div><div class="cpsessionviewpaperauthor">[[Pavel Šturm|AUTHOR Pavel Šturm]], [[Radek Skarnitzl|AUTHOR Radek Skarnitzl]], [[Tomáš Nechanský|AUTHOR Tomáš Nechanský]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211090.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-2-2|PAPER Wed-M-O-2-2 — Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities]]</div>|<div class="cpsessionviewpapertitle">Dialect Features in Heterogeneous and Homogeneous Gheg Speaking Communities</div><div class="cpsessionviewpaperauthor">[[Josiane Riverin-Coutlée|AUTHOR Josiane Riverin-Coutlée]], [[Conceição Cunha|AUTHOR Conceição Cunha]], [[Enkeleida Kapia|AUTHOR Enkeleida Kapia]], [[Jonathan Harrington|AUTHOR Jonathan Harrington]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211328.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-2-3|PAPER Wed-M-O-2-3 — An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli]]</div>|<div class="cpsessionviewpapertitle">An Exploration of the Acoustic Space of Rhotics and Laterals in Ruruuli</div><div class="cpsessionviewpaperauthor">[[Margaret Zellers|AUTHOR Margaret Zellers]], [[Alena Witzlack-Makarevich|AUTHOR Alena Witzlack-Makarevich]], [[Lilja Saeboe|AUTHOR Lilja Saeboe]], [[Saudah Namyalo|AUTHOR Saudah Namyalo]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212230.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-2-4|PAPER Wed-M-O-2-4 — Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants]]</div>|<div class="cpsessionviewpapertitle">Domain-Initial Strengthening in Turkish: Acoustic Cues to Prosodic Hierarchy in Stop Consonants</div><div class="cpsessionviewpaperauthor">[[Kubra Bodur|AUTHOR Kubra Bodur]], [[Sweeney Branje|AUTHOR Sweeney Branje]], [[Morgane Peirolo|AUTHOR Morgane Peirolo]], [[Ingrid Tiscareno|AUTHOR Ingrid Tiscareno]], [[James S. German|AUTHOR James S. German]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, Room D|<|
|^Chairs: |^Reinhold Haeb-Umbach|
|^ |^Sebastian Braun|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210986.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-3-1|PAPER Wed-M-O-3-1 — Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics]]</div>|<div class="cpsessionviewpapertitle">Auxiliary Loss Function for Target Speech Extraction and Recognition with Weak Supervision Based on Speaker Characteristics</div><div class="cpsessionviewpaperauthor">[[Katerina Zmolikova|AUTHOR Katerina Zmolikova]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Desh Raj|AUTHOR Desh Raj]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Jan Černocký|AUTHOR Jan Černocký]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211939.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-3-2|PAPER Wed-M-O-3-2 — Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers]]</div>|<div class="cpsessionviewpapertitle">Universal Speaker Extraction in the Presence and Absence of Target Speakers for Speech of One and Two Talkers</div><div class="cpsessionviewpaperauthor">[[Marvin Borsdorf|AUTHOR Marvin Borsdorf]], [[Chenglin Xu|AUTHOR Chenglin Xu]], [[Haizhou Li|AUTHOR Haizhou Li]], [[Tanja Schultz|AUTHOR Tanja Schultz]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210192.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-3-3|PAPER Wed-M-O-3-3 — Using X-Vectors for Speech Activity Detection in Broadcast Streams]]</div>|<div class="cpsessionviewpapertitle">Using X-Vectors for Speech Activity Detection in Broadcast Streams</div><div class="cpsessionviewpaperauthor">[[Lukas Mateju|AUTHOR Lukas Mateju]], [[Frantisek Kynych|AUTHOR Frantisek Kynych]], [[Petr Cerva|AUTHOR Petr Cerva]], [[Jindrich Zdansky|AUTHOR Jindrich Zdansky]], [[Jiri Malek|AUTHOR Jiri Malek]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210988.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-3-4|PAPER Wed-M-O-3-4 — Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features]]</div>|<div class="cpsessionviewpapertitle">Time Delay Estimation for Speaker Localization Using CNN-Based Parametrized GCC-PHAT Features</div><div class="cpsessionviewpaperauthor">[[Daniele Salvati|AUTHOR Daniele Salvati]], [[Carlo Drioli|AUTHOR Carlo Drioli]], [[Gian Luca Foresti|AUTHOR Gian Luca Foresti]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210331.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-O-3-5|PAPER Wed-M-O-3-5 — Real-Time Speaker Counting in a Cocktail Party Scenario Using Attention-Guided Convolutional Neural Network]]</div>|<div class="cpsessionviewpapertitle">Real-Time Speaker Counting in a Cocktail Party Scenario Using Attention-Guided Convolutional Neural Network</div><div class="cpsessionviewpaperauthor">[[Midia Yousefi|AUTHOR Midia Yousefi]], [[John H.L. Hansen|AUTHOR John H.L. Hansen]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, Room Lacina|<|
|^Chairs: |^Shinji Watanabe|
|^ |^Wei Rao|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Introduction of Challenge</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211457.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-SS-1-1|PAPER Wed-M-SS-1-1 — A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">A Causal U-Net Based Neural Beamforming Network for Real-Time Multi-Channel Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Xinlei Ren|AUTHOR Xinlei Ren]], [[Xu Zhang|AUTHOR Xu Zhang]], [[Lianwu Chen|AUTHOR Lianwu Chen]], [[Xiguang Zheng|AUTHOR Xiguang Zheng]], [[Chen Zhang|AUTHOR Chen Zhang]], [[Liang Guo|AUTHOR Liang Guo]], [[Bing Yu|AUTHOR Bing Yu]]</div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Short Presentations 1</div><div class="cpsessionviewpaperauthor"></div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Short Presentations 2</div><div class="cpsessionviewpaperauthor"></div>|
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> ||<div class="cpsessionviewpapertitle">Panel Discussion</div><div class="cpsessionviewpaperauthor"></div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210135.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-SS-1-2|PAPER Wed-M-SS-1-2 — A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation]]</div>|<div class="cpsessionviewpapertitle">A Partitioned-Block Frequency-Domain Adaptive Kalman Filter for Stereophonic Acoustic Echo Cancellation</div><div class="cpsessionviewpaperauthor">[[Rui Zhu|AUTHOR Rui Zhu]], [[Feiran Yang|AUTHOR Feiran Yang]], [[Yuepeng Li|AUTHOR Yuepeng Li]], [[Shidong Shang|AUTHOR Shidong Shang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210146.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-SS-1-3|PAPER Wed-M-SS-1-3 — Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model]]</div>|<div class="cpsessionviewpapertitle">Real-Time Independent Vector Analysis Using Semi-Supervised Nonnegative Matrix Factorization as a Source Model</div><div class="cpsessionviewpaperauthor">[[Taihui Wang|AUTHOR Taihui Wang]], [[Feiran Yang|AUTHOR Feiran Yang]], [[Rui Zhu|AUTHOR Rui Zhu]], [[Jun Yang|AUTHOR Jun Yang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-SS-1-4|PAPER Wed-M-SS-1-4 — Improving Channel Decorrelation for Multi-Channel Target Speech Extraction]]</div>|<div class="cpsessionviewpapertitle">Improving Channel Decorrelation for Multi-Channel Target Speech Extraction</div><div class="cpsessionviewpaperauthor">[[Jiangyu Han|AUTHOR Jiangyu Han]], [[Wei Rao|AUTHOR Wei Rao]], [[Yannan Wang|AUTHOR Yannan Wang]], [[Yanhua Long|AUTHOR Yanhua Long]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210899.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-SS-1-5|PAPER Wed-M-SS-1-5 — Inplace Gated Convolutional Recurrent Neural Network for Dual-Channel Speech Enhancement]]</div>|<div class="cpsessionviewpapertitle">Inplace Gated Convolutional Recurrent Neural Network for Dual-Channel Speech Enhancement</div><div class="cpsessionviewpaperauthor">[[Jinjiang Liu|AUTHOR Jinjiang Liu]], [[Xueliang Zhang|AUTHOR Xueliang Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211111.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-SS-1-6|PAPER Wed-M-SS-1-6 — SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing]]</div>|<div class="cpsessionviewpapertitle">SRIB-LEAP Submission to Far-Field Multi-Channel Speech Enhancement Challenge for Video Conferencing</div><div class="cpsessionviewpaperauthor">[[R.G. Prithvi Raj|AUTHOR R.G. Prithvi Raj]], [[Rohit Kumar|AUTHOR Rohit Kumar]], [[M.K. Jayesh|AUTHOR M.K. Jayesh]], [[Anurenjan Purushothaman|AUTHOR Anurenjan Purushothaman]], [[Sriram Ganapathy|AUTHOR Sriram Ganapathy]], [[M.A. Basha Shaik|AUTHOR M.A. Basha Shaik]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212266.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-SS-1-7|PAPER Wed-M-SS-1-7 — Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model]]</div>|<div class="cpsessionviewpapertitle">Real-Time Multi-Channel Speech Enhancement Based on Neural Network Masking with Attention Model</div><div class="cpsessionviewpaperauthor">[[Cheng Xue|AUTHOR Cheng Xue]], [[Weilong Huang|AUTHOR Weilong Huang]], [[Weiguang Chen|AUTHOR Weiguang Chen]], [[Jinwei Feng|AUTHOR Jinwei Feng]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Alberto Abad|
|^ |^Quan Wang|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210082.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-1|PAPER Wed-M-V-1-1 — End-to-End Language Diarization for Bilingual Code-Switching Speech]]</div>|<div class="cpsessionviewpapertitle">End-to-End Language Diarization for Bilingual Code-Switching Speech</div><div class="cpsessionviewpaperauthor">[[Hexin Liu|AUTHOR Hexin Liu]], [[Leibny Paola García Perera|AUTHOR Leibny Paola García Perera]], [[Xinyi Zhang|AUTHOR Xinyi Zhang]], [[Justin Dauwels|AUTHOR Justin Dauwels]], [[Andy W.H. Khong|AUTHOR Andy W.H. Khong]], [[Sanjeev Khudanpur|AUTHOR Sanjeev Khudanpur]], [[Suzy J. Styles|AUTHOR Suzy J. Styles]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210277.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-2|PAPER Wed-M-V-1-2 — Modeling and Training Strategies for Language Recognition Systems]]</div>|<div class="cpsessionviewpapertitle">Modeling and Training Strategies for Language Recognition Systems</div><div class="cpsessionviewpaperauthor">[[Raphaël Duroselle|AUTHOR Raphaël Duroselle]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Denis Jouvet|AUTHOR Denis Jouvet]], [[Irina Illina|AUTHOR Irina Illina]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210776.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-3|PAPER Wed-M-V-1-3 — A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification]]</div>|<div class="cpsessionviewpapertitle">A Weight Moving Average Based Alternate Decoupled Learning Algorithm for Long-Tailed Language Identification</div><div class="cpsessionviewpaperauthor">[[Hui Wang|AUTHOR Hui Wang]], [[Lin Liu|AUTHOR Lin Liu]], [[Yan Song|AUTHOR Yan Song]], [[Lei Fang|AUTHOR Lei Fang]], [[Ian McLoughlin|AUTHOR Ian McLoughlin]], [[Li-Rong Dai|AUTHOR Li-Rong Dai]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211186.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-4|PAPER Wed-M-V-1-4 — Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning]]</div>|<div class="cpsessionviewpapertitle">Improving Accent Identification and Accented Speech Recognition Under a Framework of Self-Supervised Learning</div><div class="cpsessionviewpaperauthor">[[Keqi Deng|AUTHOR Keqi Deng]], [[Songjun Cao|AUTHOR Songjun Cao]], [[Long Ma|AUTHOR Long Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211280.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-5|PAPER Wed-M-V-1-5 — Exploring wav2vec 2.0 on Speaker Verification and Language Identification]]</div>|<div class="cpsessionviewpapertitle">Exploring wav2vec 2.0 on Speaker Verification and Language Identification</div><div class="cpsessionviewpaperauthor">[[Zhiyun Fan|AUTHOR Zhiyun Fan]], [[Meng Li|AUTHOR Meng Li]], [[Shiyu Zhou|AUTHOR Shiyu Zhou]], [[Bo Xu|AUTHOR Bo Xu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211310.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-6|PAPER Wed-M-V-1-6 — Self-Supervised Phonotactic Representations for Language Identification]]</div>|<div class="cpsessionviewpapertitle">Self-Supervised Phonotactic Representations for Language Identification</div><div class="cpsessionviewpaperauthor">[[G. Ramesh|AUTHOR G. Ramesh]], [[C. Shiva Kumar|AUTHOR C. Shiva Kumar]], [[K. Sri Rama Murty|AUTHOR K. Sri Rama Murty]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211495.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-7|PAPER Wed-M-V-1-7 — E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition]]</div>|<div class="cpsessionviewpapertitle">E2E-Based Multi-Task Learning Approach to Joint Speech and Accent Recognition</div><div class="cpsessionviewpaperauthor">[[Jicheng Zhang|AUTHOR Jicheng Zhang]], [[Yizhou Peng|AUTHOR Yizhou Peng]], [[Van Tung Pham|AUTHOR Van Tung Pham]], [[Haihua Xu|AUTHOR Haihua Xu]], [[Hao Huang|AUTHOR Hao Huang]], [[Eng Siong Chng|AUTHOR Eng Siong Chng]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211672.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-1-8|PAPER Wed-M-V-1-8 — Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language]]</div>|<div class="cpsessionviewpapertitle">Excitation Source Feature Based Dialect Identification in Ao — A Low Resource Language</div><div class="cpsessionviewpaperauthor">[[Moakala Tzudir|AUTHOR Moakala Tzudir]], [[Shikha Baghel|AUTHOR Shikha Baghel]], [[Priyankoo Sarmah|AUTHOR Priyankoo Sarmah]], [[S.R. Mahadeva Prasanna|AUTHOR S.R. Mahadeva Prasanna]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Sabato Marco Siniscalchi|
|^ |^Xavier Anguera|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212062.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-1|PAPER Wed-M-V-2-1 — Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration]]</div>|<div class="cpsessionviewpapertitle">Low Resource ASR: The Surprising Effectiveness of High Resource Transliteration</div><div class="cpsessionviewpaperauthor">[[Shreya Khare|AUTHOR Shreya Khare]], [[Ashish Mittal|AUTHOR Ashish Mittal]], [[Anuj Diwan|AUTHOR Anuj Diwan]], [[Sunita Sarawagi|AUTHOR Sunita Sarawagi]], [[Preethi Jyothi|AUTHOR Preethi Jyothi]], [[Samarth Bharadwaj|AUTHOR Samarth Bharadwaj]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211664.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-2|PAPER Wed-M-V-2-2 — Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Acoustic Unit Discovery by Leveraging a Language-Independent Subword Discriminative Feature Representation</div><div class="cpsessionviewpaperauthor">[[Siyuan Feng|AUTHOR Siyuan Feng]], [[Piotr Żelasko|AUTHOR Piotr Żelasko]], [[Laureano Moro-Velázquez|AUTHOR Laureano Moro-Velázquez]], [[Odette Scharenborg|AUTHOR Odette Scharenborg]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210050.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-3|PAPER Wed-M-V-2-3 — Towards Unsupervised Phone and Word Segmentation Using Self-Supervised Vector-Quantized Neural Networks]]</div>|<div class="cpsessionviewpapertitle">Towards Unsupervised Phone and Word Segmentation Using Self-Supervised Vector-Quantized Neural Networks</div><div class="cpsessionviewpaperauthor">[[Herman Kamper|AUTHOR Herman Kamper]], [[Benjamin van Niekerk|AUTHOR Benjamin van Niekerk]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210391.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-4|PAPER Wed-M-V-2-4 — Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning]]</div>|<div class="cpsessionviewpapertitle">Speech SimCLR: Combining Contrastive and Reconstruction Objective for Self-Supervised Speech Representation Learning</div><div class="cpsessionviewpaperauthor">[[Dongwei Jiang|AUTHOR Dongwei Jiang]], [[Wubo Li|AUTHOR Wubo Li]], [[Miao Cao|AUTHOR Miao Cao]], [[Wei Zou|AUTHOR Wei Zou]], [[Xiangang Li|AUTHOR Xiangang Li]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210461.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-5|PAPER Wed-M-V-2-5 — Multilingual Transfer of Acoustic Word Embeddings Improves When Training on Languages Related to the Target Zero-Resource Language]]</div>|<div class="cpsessionviewpapertitle">Multilingual Transfer of Acoustic Word Embeddings Improves When Training on Languages Related to the Target Zero-Resource Language</div><div class="cpsessionviewpaperauthor">[[Christiaan Jacobs|AUTHOR Christiaan Jacobs]], [[Herman Kamper|AUTHOR Herman Kamper]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211182.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-6|PAPER Wed-M-V-2-6 — Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing]]</div>|<div class="cpsessionviewpapertitle">Analyzing Speaker Information in Self-Supervised Models to Improve Zero-Resource Speech Processing</div><div class="cpsessionviewpaperauthor">[[Benjamin van Niekerk|AUTHOR Benjamin van Niekerk]], [[Leanne Nortje|AUTHOR Leanne Nortje]], [[Matthew Baas|AUTHOR Matthew Baas]], [[Herman Kamper|AUTHOR Herman Kamper]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211340.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-7|PAPER Wed-M-V-2-7 — Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages]]</div>|<div class="cpsessionviewpapertitle">Unsupervised Neural-Based Graph Clustering for Variable-Length Speech Representation Discovery of Zero-Resource Languages</div><div class="cpsessionviewpaperauthor">[[Shun Takahashi|AUTHOR Shun Takahashi]], [[Sakriani Sakti|AUTHOR Sakriani Sakti]], [[Satoshi Nakamura|AUTHOR Satoshi Nakamura]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211503.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-8|PAPER Wed-M-V-2-8 — Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021]]</div>|<div class="cpsessionviewpapertitle">Speech Representation Learning Combining Conformer CPC with Deep Cluster for the ZeroSpeech Challenge 2021</div><div class="cpsessionviewpaperauthor">[[Takashi Maekaku|AUTHOR Takashi Maekaku]], [[Xuankai Chang|AUTHOR Xuankai Chang]], [[Yuya Fujita|AUTHOR Yuya Fujita]], [[Li-Wei Chen|AUTHOR Li-Wei Chen]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]], [[Alexander Rudnicky|AUTHOR Alexander Rudnicky]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211525.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-9|PAPER Wed-M-V-2-9 — Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features]]</div>|<div class="cpsessionviewpapertitle">Identifying Indicators of Vulnerability from Short Speech Segments Using Acoustic and Textual Features</div><div class="cpsessionviewpaperauthor">[[Xia Cui|AUTHOR Xia Cui]], [[Amila Gamage|AUTHOR Amila Gamage]], [[Terry Hanley|AUTHOR Terry Hanley]], [[Tingting Mu|AUTHOR Tingting Mu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211755.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-10|PAPER Wed-M-V-2-10 — The Zero Resource Speech Challenge 2021: Spoken Language Modelling]]</div>|<div class="cpsessionviewpapertitle">The Zero Resource Speech Challenge 2021: Spoken Language Modelling</div><div class="cpsessionviewpaperauthor">[[Ewan Dunbar|AUTHOR Ewan Dunbar]], [[Mathieu Bernard|AUTHOR Mathieu Bernard]], [[Nicolas Hamilakis|AUTHOR Nicolas Hamilakis]], [[Tu Anh Nguyen|AUTHOR Tu Anh Nguyen]], [[Maureen de Seyssel|AUTHOR Maureen de Seyssel]], [[Patricia Rozé|AUTHOR Patricia Rozé]], [[Morgane Rivière|AUTHOR Morgane Rivière]], [[Eugene Kharitonov|AUTHOR Eugene Kharitonov]], [[Emmanuel Dupoux|AUTHOR Emmanuel Dupoux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212264.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-11|PAPER Wed-M-V-2-11 — Zero-Shot Federated Learning with New Classes for Audio Classification]]</div>|<div class="cpsessionviewpapertitle">Zero-Shot Federated Learning with New Classes for Audio Classification</div><div class="cpsessionviewpaperauthor">[[Gautham Krishna Gudur|AUTHOR Gautham Krishna Gudur]], [[Satheesh Kumar Perepu|AUTHOR Satheesh Kumar Perepu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211312.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-2-12|PAPER Wed-M-V-2-12 — AVLnet: Learning Audio-Visual Language Representations from Instructional Videos]]</div>|<div class="cpsessionviewpapertitle">AVLnet: Learning Audio-Visual Language Representations from Instructional Videos</div><div class="cpsessionviewpaperauthor">[[Andrew Rouditchenko|AUTHOR Andrew Rouditchenko]], [[Angie Boggust|AUTHOR Angie Boggust]], [[David Harwath|AUTHOR David Harwath]], [[Brian Chen|AUTHOR Brian Chen]], [[Dhiraj Joshi|AUTHOR Dhiraj Joshi]], [[Samuel Thomas|AUTHOR Samuel Thomas]], [[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Hilde Kuehne|AUTHOR Hilde Kuehne]], [[Rameswar Panda|AUTHOR Rameswar Panda]], [[Rogerio Feris|AUTHOR Rogerio Feris]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]], [[Michael Picheny|AUTHOR Michael Picheny]], [[Antonio Torralba|AUTHOR Antonio Torralba]], [[James Glass|AUTHOR James Glass]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Christophe d’Alessandro|
|^ |^Thomas Hueber|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210239.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-1|PAPER Wed-M-V-3-1 — N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement]]</div>|<div class="cpsessionviewpapertitle">N-Singer: A Non-Autoregressive Korean Singing Voice Synthesis System for Pronunciation Enhancement</div><div class="cpsessionviewpaperauthor">[[Gyeong-Hoon Lee|AUTHOR Gyeong-Hoon Lee]], [[Tae-Woo Kim|AUTHOR Tae-Woo Kim]], [[Hanbin Bae|AUTHOR Hanbin Bae]], [[Min-Ji Lee|AUTHOR Min-Ji Lee]], [[Young-Ik Kim|AUTHOR Young-Ik Kim]], [[Hoon-Young Cho|AUTHOR Hoon-Young Cho]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210327.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-2|PAPER Wed-M-V-3-2 — Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features]]</div>|<div class="cpsessionviewpapertitle">Cross-Lingual Low Resource Speaker Adaptation Using Phonological Features</div><div class="cpsessionviewpaperauthor">[[Georgia Maniati|AUTHOR Georgia Maniati]], [[Nikolaos Ellinas|AUTHOR Nikolaos Ellinas]], [[Konstantinos Markopoulos|AUTHOR Konstantinos Markopoulos]], [[Georgios Vamvoukakis|AUTHOR Georgios Vamvoukakis]], [[June Sig Sung|AUTHOR June Sig Sung]], [[Hyoungmin Park|AUTHOR Hyoungmin Park]], [[Aimilios Chalamandaris|AUTHOR Aimilios Chalamandaris]], [[Pirros Tsiakoulis|AUTHOR Pirros Tsiakoulis]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210474.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-3|PAPER Wed-M-V-3-3 — Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information]]</div>|<div class="cpsessionviewpapertitle">Improve Cross-Lingual Text-To-Speech Synthesis on Monolingual Corpora with Pitch Contour Information</div><div class="cpsessionviewpaperauthor">[[Haoyue Zhan|AUTHOR Haoyue Zhan]], [[Haitong Zhang|AUTHOR Haitong Zhang]], [[Wenjie Ou|AUTHOR Wenjie Ou]], [[Yue Lin|AUTHOR Yue Lin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210552.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-4|PAPER Wed-M-V-3-4 — Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations]]</div>|<div class="cpsessionviewpapertitle">Cross-Lingual Voice Conversion with Disentangled Universal Linguistic Representations</div><div class="cpsessionviewpaperauthor">[[Zhenchuan Yang|AUTHOR Zhenchuan Yang]], [[Weibin Zhang|AUTHOR Weibin Zhang]], [[Yufei Liu|AUTHOR Yufei Liu]], [[Xiaofen Xing|AUTHOR Xiaofen Xing]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210771.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-5|PAPER Wed-M-V-3-5 — EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder]]</div>|<div class="cpsessionviewpapertitle">EfficientSing: A Chinese Singing Voice Synthesis System Using Duration-Free Acoustic Model and HiFi-GAN Vocoder</div><div class="cpsessionviewpaperauthor">[[Zhengchen Liu|AUTHOR Zhengchen Liu]], [[Chenfeng Miao|AUTHOR Chenfeng Miao]], [[Qingying Zhu|AUTHOR Qingying Zhu]], [[Minchuan Chen|AUTHOR Minchuan Chen]], [[Jun Ma|AUTHOR Jun Ma]], [[Shaojun Wang|AUTHOR Shaojun Wang]], [[Jing Xiao|AUTHOR Jing Xiao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210897.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-6|PAPER Wed-M-V-3-6 — Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis]]</div>|<div class="cpsessionviewpapertitle">Cross-Lingual Speaker Adaptation Using Domain Adaptation and Speaker Consistency Loss for Text-To-Speech Synthesis</div><div class="cpsessionviewpaperauthor">[[Detai Xin|AUTHOR Detai Xin]], [[Yuki Saito|AUTHOR Yuki Saito]], [[Shinnosuke Takamichi|AUTHOR Shinnosuke Takamichi]], [[Tomoki Koriyama|AUTHOR Tomoki Koriyama]], [[Hiroshi Saruwatari|AUTHOR Hiroshi Saruwatari]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211265.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-7|PAPER Wed-M-V-3-7 — Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech]]</div>|<div class="cpsessionviewpapertitle">Incorporating Cross-Speaker Style Transfer for Multi-Language Text-to-Speech</div><div class="cpsessionviewpaperauthor">[[Zengqiang Shang|AUTHOR Zengqiang Shang]], [[Zhihua Huang|AUTHOR Zhihua Huang]], [[Haozhe Zhang|AUTHOR Haozhe Zhang]], [[Pengyuan Zhang|AUTHOR Pengyuan Zhang]], [[Yonghong Yan|AUTHOR Yonghong Yan]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211585.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-8|PAPER Wed-M-V-3-8 — Investigating Contributions of Speech and Facial Landmarks for Talking Head Generation]]</div>|<div class="cpsessionviewpapertitle">Investigating Contributions of Speech and Facial Landmarks for Talking Head Generation</div><div class="cpsessionviewpaperauthor">[[Ege Kesim|AUTHOR Ege Kesim]], [[Engin Erzin|AUTHOR Engin Erzin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211996.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-3-9|PAPER Wed-M-V-3-9 — Speech2Video: Cross-Modal Distillation for Speech to Video Generation]]</div>|<div class="cpsessionviewpapertitle">Speech2Video: Cross-Modal Distillation for Speech to Video Generation</div><div class="cpsessionviewpaperauthor">[[Shijing Si|AUTHOR Shijing Si]], [[Jianzong Wang|AUTHOR Jianzong Wang]], [[Xiaoyang Qu|AUTHOR Xiaoyang Qu]], [[Ning Cheng|AUTHOR Ning Cheng]], [[Wenqi Wei|AUTHOR Wenqi Wei]], [[Xinghua Zhu|AUTHOR Xinghua Zhu]], [[Jing Xiao|AUTHOR Jing Xiao]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Md. Sahidullah|
|^ |^Tom Bäckström|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210036.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-1|PAPER Wed-M-V-4-1 — NU-Wave: A Diffusion Probabilistic Model for Neural Audio Upsampling]]</div>|<div class="cpsessionviewpapertitle">NU-Wave: A Diffusion Probabilistic Model for Neural Audio Upsampling</div><div class="cpsessionviewpaperauthor">[[Junhyeok Lee|AUTHOR Junhyeok Lee]], [[Seungu Han|AUTHOR Seungu Han]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210670.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-2|PAPER Wed-M-V-4-2 — QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization]]</div>|<div class="cpsessionviewpapertitle">QISTA-Net-Audio: Audio Super-Resolution via Non-Convex ℓ,,q,,-Norm Minimization</div><div class="cpsessionviewpaperauthor">[[Gang-Xuan Lin|AUTHOR Gang-Xuan Lin]], [[Shih-Wei Hu|AUTHOR Shih-Wei Hu]], [[Yen-Ju Lu|AUTHOR Yen-Ju Lu]], [[Yu Tsao|AUTHOR Yu Tsao]], [[Chun-Shien Lu|AUTHOR Chun-Shien Lu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210812.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-3|PAPER Wed-M-V-4-3 — X-net: A Joint Scale Down and Scale Up Method for Voice Call]]</div>|<div class="cpsessionviewpapertitle">X-net: A Joint Scale Down and Scale Up Method for Voice Call</div><div class="cpsessionviewpaperauthor">[[Liang Wen|AUTHOR Liang Wen]], [[Lizhong Wang|AUTHOR Lizhong Wang]], [[Xue Wen|AUTHOR Xue Wen]], [[Yuxing Zheng|AUTHOR Yuxing Zheng]], [[Youngo Park|AUTHOR Youngo Park]], [[Kwang Pyo Choi|AUTHOR Kwang Pyo Choi]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210892.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-4|PAPER Wed-M-V-4-4 — WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution]]</div>|<div class="cpsessionviewpapertitle">WSRGlow: A Glow-Based Waveform Generative Model for Audio Super-Resolution</div><div class="cpsessionviewpaperauthor">[[Kexun Zhang|AUTHOR Kexun Zhang]], [[Yi Ren|AUTHOR Yi Ren]], [[Changliang Xu|AUTHOR Changliang Xu]], [[Zhou Zhao|AUTHOR Zhou Zhao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210930.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-5|PAPER Wed-M-V-4-5 — Half-Truth: A Partially Fake Audio Detection Dataset]]</div>|<div class="cpsessionviewpapertitle">Half-Truth: A Partially Fake Audio Detection Dataset</div><div class="cpsessionviewpaperauthor">[[Jiangyan Yi|AUTHOR Jiangyan Yi]], [[Ye Bai|AUTHOR Ye Bai]], [[Jianhua Tao|AUTHOR Jianhua Tao]], [[Haoxin Ma|AUTHOR Haoxin Ma]], [[Zhengkun Tian|AUTHOR Zhengkun Tian]], [[Chenglong Wang|AUTHOR Chenglong Wang]], [[Tao Wang|AUTHOR Tao Wang]], [[Ruibo Fu|AUTHOR Ruibo Fu]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211180.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-6|PAPER Wed-M-V-4-6 — Data Quality as Predictor of Voice Anti-Spoofing Generalization]]</div>|<div class="cpsessionviewpapertitle">Data Quality as Predictor of Voice Anti-Spoofing Generalization</div><div class="cpsessionviewpaperauthor">[[Bhusan Chettri|AUTHOR Bhusan Chettri]], [[Rosa González Hautamäki|AUTHOR Rosa González Hautamäki]], [[Md. Sahidullah|AUTHOR Md. Sahidullah]], [[Tomi Kinnunen|AUTHOR Tomi Kinnunen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211204.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-7|PAPER Wed-M-V-4-7 — Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features]]</div>|<div class="cpsessionviewpapertitle">Coded Speech Enhancement Using Neural Network-Based Vector-Quantized Residual Features</div><div class="cpsessionviewpaperauthor">[[Youngju Cheon|AUTHOR Youngju Cheon]], [[Soojoong Hwang|AUTHOR Soojoong Hwang]], [[Sangwook Han|AUTHOR Sangwook Han]], [[Inseon Jang|AUTHOR Inseon Jang]], [[Jong Won Shin|AUTHOR Jong Won Shin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211214.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-8|PAPER Wed-M-V-4-8 — Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget]]</div>|<div class="cpsessionviewpapertitle">Multi-Channel Opus Compression for Far-Field Automatic Speech Recognition with a Fixed Bitrate Budget</div><div class="cpsessionviewpaperauthor">[[Lukas Drude|AUTHOR Lukas Drude]], [[Jahn Heymann|AUTHOR Jahn Heymann]], [[Andreas Schwarz|AUTHOR Andreas Schwarz]], [[Jean-Marc Valin|AUTHOR Jean-Marc Valin]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211354.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-9|PAPER Wed-M-V-4-9 — Effects of Prosodic Variations on Accidental Triggers of a Commercial Voice Assistant]]</div>|<div class="cpsessionviewpapertitle">Effects of Prosodic Variations on Accidental Triggers of a Commercial Voice Assistant</div><div class="cpsessionviewpaperauthor">[[Ingo Siegert|AUTHOR Ingo Siegert]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211555.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-10|PAPER Wed-M-V-4-10 — Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows]]</div>|<div class="cpsessionviewpapertitle">Improving the Expressiveness of Neural Vocoding with Non-Affine Normalizing Flows</div><div class="cpsessionviewpaperauthor">[[Adam Gabryś|AUTHOR Adam Gabryś]], [[Yunlong Jiao|AUTHOR Yunlong Jiao]], [[Viacheslav Klimkov|AUTHOR Viacheslav Klimkov]], [[Daniel Korzekwa|AUTHOR Daniel Korzekwa]], [[Roberto Barra-Chicote|AUTHOR Roberto Barra-Chicote]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211573.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-11|PAPER Wed-M-V-4-11 — Voice Privacy Through x-Vector and CycleGAN-Based Anonymization]]</div>|<div class="cpsessionviewpapertitle">Voice Privacy Through x-Vector and CycleGAN-Based Anonymization</div><div class="cpsessionviewpaperauthor">[[Gauri P. Prajapati|AUTHOR Gauri P. Prajapati]], [[Dipesh K. Singh|AUTHOR Dipesh K. Singh]], [[Preet P. Amin|AUTHOR Preet P. Amin]], [[Hemant A. Patil|AUTHOR Hemant A. Patil]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211941.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-12|PAPER Wed-M-V-4-12 — A Two-Stage Approach to Speech Bandwidth Extension]]</div>|<div class="cpsessionviewpapertitle">A Two-Stage Approach to Speech Bandwidth Extension</div><div class="cpsessionviewpaperauthor">[[Ju Lin|AUTHOR Ju Lin]], [[Yun Wang|AUTHOR Yun Wang]], [[Kaustubh Kalgaonkar|AUTHOR Kaustubh Kalgaonkar]], [[Gil Keren|AUTHOR Gil Keren]], [[Didi Zhang|AUTHOR Didi Zhang]], [[Christian Fuegen|AUTHOR Christian Fuegen]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212151.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-13|PAPER Wed-M-V-4-13 — Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder]]</div>|<div class="cpsessionviewpapertitle">Development of a Psychoacoustic Loss Function for the Deep Neural Network (DNN)-Based Speech Coder</div><div class="cpsessionviewpaperauthor">[[Joon Byun|AUTHOR Joon Byun]], [[Seungmin Shin|AUTHOR Seungmin Shin]], [[Youngcheol Park|AUTHOR Youngcheol Park]], [[Jongmo Sung|AUTHOR Jongmo Sung]], [[Seungkwon Beack|AUTHOR Seungkwon Beack]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212163.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-4-14|PAPER Wed-M-V-4-14 — Protecting Gender and Identity with Disentangled Speech Representations]]</div>|<div class="cpsessionviewpapertitle">Protecting Gender and Identity with Disentangled Speech Representations</div><div class="cpsessionviewpaperauthor">[[Dimitrios Stoidis|AUTHOR Dimitrios Stoidis]], [[Andrea Cavallaro|AUTHOR Andrea Cavallaro]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Bernd Möbius|
|^ |^Anna Sfakianaki|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210039.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-1|PAPER Wed-M-V-5-1 — Perception of Standard Arabic Synthetic Speech Rate]]</div>|<div class="cpsessionviewpapertitle">Perception of Standard Arabic Synthetic Speech Rate</div><div class="cpsessionviewpaperauthor">[[Yahya Aldholmi|AUTHOR Yahya Aldholmi]], [[Rawan Aldhafyan|AUTHOR Rawan Aldhafyan]], [[Asma Alqahtani|AUTHOR Asma Alqahtani]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210089.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-2|PAPER Wed-M-V-5-2 — The Influence of Parallel Processing on Illusory Vowels]]</div>|<div class="cpsessionviewpapertitle">The Influence of Parallel Processing on Illusory Vowels</div><div class="cpsessionviewpaperauthor">[[Takeshi Kishiyama|AUTHOR Takeshi Kishiyama]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210306.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-3|PAPER Wed-M-V-5-3 — Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors]]</div>|<div class="cpsessionviewpapertitle">Exploring the Potential of Lexical Paraphrases for Mitigating Noise-Induced Comprehension Errors</div><div class="cpsessionviewpaperauthor">[[Anupama Chingacham|AUTHOR Anupama Chingacham]], [[Vera Demberg|AUTHOR Vera Demberg]], [[Dietrich Klakow|AUTHOR Dietrich Klakow]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210324.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-4|PAPER Wed-M-V-5-4 — SPEECHADJUSTER: A Tool for Investigating Listener Preferences and Speech Intelligibility]]</div>|<div class="cpsessionviewpapertitle">SPEECHADJUSTER: A Tool for Investigating Listener Preferences and Speech Intelligibility</div><div class="cpsessionviewpaperauthor">[[Olympia Simantiraki|AUTHOR Olympia Simantiraki]], [[Martin Cooke|AUTHOR Martin Cooke]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210464.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-5|PAPER Wed-M-V-5-5 — VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification]]</div>|<div class="cpsessionviewpapertitle">VocalTurk: Exploring Feasibility of Crowdsourced Speaker Identification</div><div class="cpsessionviewpaperauthor">[[Susumu Saito|AUTHOR Susumu Saito]], [[Yuta Ide|AUTHOR Yuta Ide]], [[Teppei Nakano|AUTHOR Teppei Nakano]], [[Tetsuji Ogawa|AUTHOR Tetsuji Ogawa]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210682.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-6|PAPER Wed-M-V-5-6 — Effects of Aging and Age-Related Hearing Loss on Talker Discrimination]]</div>|<div class="cpsessionviewpapertitle">Effects of Aging and Age-Related Hearing Loss on Talker Discrimination</div><div class="cpsessionviewpaperauthor">[[Min Xu|AUTHOR Min Xu]], [[Jing Shao|AUTHOR Jing Shao]], [[Lan Wang|AUTHOR Lan Wang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210721.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-7|PAPER Wed-M-V-5-7 — Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication]]</div>|<div class="cpsessionviewpapertitle">Relationships Between Perceptual Distinctiveness, Articulatory Complexity and Functional Load in Speech Communication</div><div class="cpsessionviewpaperauthor">[[Yuqing Zhang|AUTHOR Yuqing Zhang]], [[Zhu Li|AUTHOR Zhu Li]], [[Bin Wu|AUTHOR Bin Wu]], [[Yanlu Xie|AUTHOR Yanlu Xie]], [[Binghuai Lin|AUTHOR Binghuai Lin]], [[Jinsong Zhang|AUTHOR Jinsong Zhang]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211225.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-8|PAPER Wed-M-V-5-8 — Human Spoofing Detection Performance on Degraded Speech]]</div>|<div class="cpsessionviewpapertitle">Human Spoofing Detection Performance on Degraded Speech</div><div class="cpsessionviewpaperauthor">[[Camryn Terblanche|AUTHOR Camryn Terblanche]], [[Philip Harrison|AUTHOR Philip Harrison]], [[Amelia J. Gully|AUTHOR Amelia J. Gully]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211524.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-9|PAPER Wed-M-V-5-9 — Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research]]</div>|<div class="cpsessionviewpapertitle">Reliable Estimates of Interpretable Cue Effects with Active Learning in Psycholinguistic Research</div><div class="cpsessionviewpaperauthor">[[Marieke Einfeldt|AUTHOR Marieke Einfeldt]], [[Rita Sevastjanova|AUTHOR Rita Sevastjanova]], [[Katharina Zahner-Ritter|AUTHOR Katharina Zahner-Ritter]], [[Ekaterina Kazak|AUTHOR Ekaterina Kazak]], [[Bettina Braun|AUTHOR Bettina Braun]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211718.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-10|PAPER Wed-M-V-5-10 — Towards the Explainability of Multimodal Speech Emotion Recognition]]</div>|<div class="cpsessionviewpapertitle">Towards the Explainability of Multimodal Speech Emotion Recognition</div><div class="cpsessionviewpaperauthor">[[Puneet Kumar|AUTHOR Puneet Kumar]], [[Vishesh Kaushik|AUTHOR Vishesh Kaushik]], [[Balasubramanian Raman|AUTHOR Balasubramanian Raman]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211741.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-11|PAPER Wed-M-V-5-11 — Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels]]</div>|<div class="cpsessionviewpapertitle">Primacy of Mouth over Eyes: Eye Movement Evidence from Audiovisual Mandarin Lexical Tones and Vowels</div><div class="cpsessionviewpaperauthor">[[Biao Zeng|AUTHOR Biao Zeng]], [[Rui Wang|AUTHOR Rui Wang]], [[Guoxing Yu|AUTHOR Guoxing Yu]], [[Christian Dobel|AUTHOR Christian Dobel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS212091.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-5-12|PAPER Wed-M-V-5-12 — Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance]]</div>|<div class="cpsessionviewpapertitle">Investigating the Impact of Spectral and Temporal Degradation on End-to-End Automatic Speech Recognition Performance</div><div class="cpsessionviewpaperauthor">[[Takanori Ashihara|AUTHOR Takanori Ashihara]], [[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Makio Kashino|AUTHOR Makio Kashino]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|11:00–13:00, Wednesday 1 September 2021, (Virtual)|<|
|^Chairs: |^Mark Gales|
|^ |^Ralf Schlueter|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211114.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-1|PAPER Wed-M-V-6-1 — Super-Human Performance in Online Low-Latency Recognition of Conversational Speech]]</div>|<div class="cpsessionviewpapertitle">Super-Human Performance in Online Low-Latency Recognition of Conversational Speech</div><div class="cpsessionviewpaperauthor">[[Thai-Son Nguyen|AUTHOR Thai-Son Nguyen]], [[Sebastian Stüker|AUTHOR Sebastian Stüker]], [[Alex Waibel|AUTHOR Alex Waibel]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211298.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-2|PAPER Wed-M-V-6-2 — Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems]]</div>|<div class="cpsessionviewpapertitle">Multiple Softmax Architecture for Streaming Multilingual End-to-End ASR Systems</div><div class="cpsessionviewpaperauthor">[[Vikas Joshi|AUTHOR Vikas Joshi]], [[Amit Das|AUTHOR Amit Das]], [[Eric Sun|AUTHOR Eric Sun]], [[Rupesh R. Mehta|AUTHOR Rupesh R. Mehta]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211566.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-3|PAPER Wed-M-V-6-3 — Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion]]</div>|<div class="cpsessionviewpapertitle">Contextualized Streaming End-to-End Speech Recognition with Trie-Based Deep Biasing and Shallow Fusion</div><div class="cpsessionviewpaperauthor">[[Duc Le|AUTHOR Duc Le]], [[Mahaveer Jain|AUTHOR Mahaveer Jain]], [[Gil Keren|AUTHOR Gil Keren]], [[Suyoun Kim|AUTHOR Suyoun Kim]], [[Yangyang Shi|AUTHOR Yangyang Shi]], [[Jay Mahadeokar|AUTHOR Jay Mahadeokar]], [[Julian Chan|AUTHOR Julian Chan]], [[Yuan Shangguan|AUTHOR Yuan Shangguan]], [[Christian Fuegen|AUTHOR Christian Fuegen]], [[Ozlem Kalinli|AUTHOR Ozlem Kalinli]], [[Yatharth Saraf|AUTHOR Yatharth Saraf]], [[Michael L. Seltzer|AUTHOR Michael L. Seltzer]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210206.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-4|PAPER Wed-M-V-6-4 — An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling]]</div>|<div class="cpsessionviewpapertitle">An Efficient Streaming Non-Recurrent On-Device End-to-End Model with Improvements to Rare-Word Modeling</div><div class="cpsessionviewpaperauthor">[[Tara N. Sainath|AUTHOR Tara N. Sainath]], [[Yanzhang He|AUTHOR Yanzhang He]], [[Arun Narayanan|AUTHOR Arun Narayanan]], [[Rami Botros|AUTHOR Rami Botros]], [[Ruoming Pang|AUTHOR Ruoming Pang]], [[David Rybach|AUTHOR David Rybach]], [[Cyril Allauzen|AUTHOR Cyril Allauzen]], [[Ehsan Variani|AUTHOR Ehsan Variani]], [[James Qin|AUTHOR James Qin]], [[Quoc-Nam Le-The|AUTHOR Quoc-Nam Le-The]], [[Shuo-Yiin Chang|AUTHOR Shuo-Yiin Chang]], [[Bo Li|AUTHOR Bo Li]], [[Anmol Gulati|AUTHOR Anmol Gulati]], [[Jiahui Yu|AUTHOR Jiahui Yu]], [[Chung-Cheng Chiu|AUTHOR Chung-Cheng Chiu]], [[Diamantino Caseiro|AUTHOR Diamantino Caseiro]], [[Wei Li|AUTHOR Wei Li]], [[Qiao Liang|AUTHOR Qiao Liang]], [[Pat Rondon|AUTHOR Pat Rondon]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210207.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-5|PAPER Wed-M-V-6-5 — Streaming Multi-Talker Speech Recognition with Joint Speaker Identification]]</div>|<div class="cpsessionviewpapertitle">Streaming Multi-Talker Speech Recognition with Joint Speaker Identification</div><div class="cpsessionviewpaperauthor">[[Liang Lu|AUTHOR Liang Lu]], [[Naoyuki Kanda|AUTHOR Naoyuki Kanda]], [[Jinyu Li|AUTHOR Jinyu Li]], [[Yifan Gong|AUTHOR Yifan Gong]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210437.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-6|PAPER Wed-M-V-6-6 — Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture]]</div>|<div class="cpsessionviewpapertitle">Streaming End-to-End Speech Recognition for Hybrid RNN-T/Attention Architecture</div><div class="cpsessionviewpaperauthor">[[Takafumi Moriya|AUTHOR Takafumi Moriya]], [[Tomohiro Tanaka|AUTHOR Tomohiro Tanaka]], [[Takanori Ashihara|AUTHOR Takanori Ashihara]], [[Tsubasa Ochiai|AUTHOR Tsubasa Ochiai]], [[Hiroshi Sato|AUTHOR Hiroshi Sato]], [[Atsushi Ando|AUTHOR Atsushi Ando]], [[Ryo Masumura|AUTHOR Ryo Masumura]], [[Marc Delcroix|AUTHOR Marc Delcroix]], [[Taichi Asami|AUTHOR Taichi Asami]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210542.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-7|PAPER Wed-M-V-6-7 — Improving RNN-T ASR Accuracy Using Context Audio]]</div>|<div class="cpsessionviewpapertitle">Improving RNN-T ASR Accuracy Using Context Audio</div><div class="cpsessionviewpaperauthor">[[Andreas Schwarz|AUTHOR Andreas Schwarz]], [[Ilya Sklyar|AUTHOR Ilya Sklyar]], [[Simon Wiesler|AUTHOR Simon Wiesler]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210586.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-8|PAPER Wed-M-V-6-8 — HMM-Free Encoder Pre-Training for Streaming RNN Transducer]]</div>|<div class="cpsessionviewpapertitle">HMM-Free Encoder Pre-Training for Streaming RNN Transducer</div><div class="cpsessionviewpaperauthor">[[Lu Huang|AUTHOR Lu Huang]], [[Jingyu Sun|AUTHOR Jingyu Sun]], [[Yufeng Tang|AUTHOR Yufeng Tang]], [[Junfeng Hou|AUTHOR Junfeng Hou]], [[Jinkun Chen|AUTHOR Jinkun Chen]], [[Jun Zhang|AUTHOR Jun Zhang]], [[Zejun Ma|AUTHOR Zejun Ma]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210587.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-9|PAPER Wed-M-V-6-9 — Reducing Exposure Bias in Training Recurrent Neural Network Transducers]]</div>|<div class="cpsessionviewpapertitle">Reducing Exposure Bias in Training Recurrent Neural Network Transducers</div><div class="cpsessionviewpaperauthor">[[Xiaodong Cui|AUTHOR Xiaodong Cui]], [[Brian Kingsbury|AUTHOR Brian Kingsbury]], [[George Saon|AUTHOR George Saon]], [[David Haws|AUTHOR David Haws]], [[Zoltán Tüske|AUTHOR Zoltán Tüske]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210637.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-10|PAPER Wed-M-V-6-10 — Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models]]</div>|<div class="cpsessionviewpapertitle">Bridging the Gap Between Streaming and Non-Streaming ASR Systems by Distilling Ensembles of CTC and RNN-T Models</div><div class="cpsessionviewpaperauthor">[[Thibault Doutre|AUTHOR Thibault Doutre]], [[Wei Han|AUTHOR Wei Han]], [[Chung-Cheng Chiu|AUTHOR Chung-Cheng Chiu]], [[Ruoming Pang|AUTHOR Ruoming Pang]], [[Olivier Siohan|AUTHOR Olivier Siohan]], [[Liangliang Cao|AUTHOR Liangliang Cao]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS210720.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-11|PAPER Wed-M-V-6-11 — Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Mixture Model Attention: Flexible Streaming and Non-Streaming Automatic Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Kartik Audhkhasi|AUTHOR Kartik Audhkhasi]], [[Tongzhou Chen|AUTHOR Tongzhou Chen]], [[Bhuvana Ramabhadran|AUTHOR Bhuvana Ramabhadran]], [[Pedro J. Moreno|AUTHOR Pedro J. Moreno]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211110.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-12|PAPER Wed-M-V-6-12 — StableEmit: Selection Probability Discount for Reducing Emission Latency of Streaming Monotonic Attention ASR]]</div>|<div class="cpsessionviewpapertitle">StableEmit: Selection Probability Discount for Reducing Emission Latency of Streaming Monotonic Attention ASR</div><div class="cpsessionviewpaperauthor">[[Hirofumi Inaguma|AUTHOR Hirofumi Inaguma]], [[Tatsuya Kawahara|AUTHOR Tatsuya Kawahara]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211693.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-13|PAPER Wed-M-V-6-13 — Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition]]</div>|<div class="cpsessionviewpapertitle">Dual Causal/Non-Causal Self-Attention for Streaming End-to-End Speech Recognition</div><div class="cpsessionviewpaperauthor">[[Niko Moritz|AUTHOR Niko Moritz]], [[Takaaki Hori|AUTHOR Takaaki Hori]], [[Jonathan Le Roux|AUTHOR Jonathan Le Roux]]</div>|
|^ @@.pdficonintable @@<a href="./IS2021/HTML/AUTHOR/IS211953.PDF" class="externallinkbutton" style="outline:0;" target="_blank">{{$:/causal/pdf icon in session view}}</a> |^<div class="cpsessionviewpapercode">[[Wed-M-V-6-14|PAPER Wed-M-V-6-14 — Multi-Mode Transformer Transducer with Stochastic Future Context]]</div>|<div class="cpsessionviewpapertitle">Multi-Mode Transformer Transducer with Stochastic Future Context</div><div class="cpsessionviewpaperauthor">[[Kwangyoun Kim|AUTHOR Kwangyoun Kim]], [[Felix Wu|AUTHOR Felix Wu]], [[Prashant Sridhar|AUTHOR Prashant Sridhar]], [[Kyu J. Han|AUTHOR Kyu J. Han]], [[Shinji Watanabe|AUTHOR Shinji Watanabe]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}} {{$:/causal/Session List Button}} {{$:/causal/Author Index Button}}
</p>
<p>
{{||$:/causal/Preceding Session Button}}
<span class="cpprevnextanchortext">SESSION</span>
{{||$:/causal/Next Session Button}}
</p></div>
<div class="cpsessionviewmetadata">
|cpborderless|k
|13:00–14:00, Wednesday 1 September 2021, Room A+B|<|
|^Chair: |^TBD|
</div>
|cptablecelltopbottomspace2|k
|cpsessionviewtable|k
|^<div class="cpauthorindexpersoncardpapercode">{{$:/causal/NO-PDF Marker}}</div> |^<div class="cpsessionviewpapercode">[[Wed-Survey|PAPER Wed-Survey — Uncovering the Acoustic Cues of COVID-19 Infection]]</div>|<div class="cpsessionviewpapertitle">Uncovering the Acoustic Cues of COVID-19 Infection</div><div class="cpsessionviewpaperauthor">[[Sriram Ganapathy|AUTHOR Sriram Ganapathy]]</div>|
\rules except wikilink
<div class="cpbuttonrow"><p>
{{$:/causal/Welcome Page Button}}
</p></div>
<div class="cpsupportpage">
This HTML index file is based on the [ext[TiddlyWiki|http://www.tiddlywiki.com]] web application.
You can browse the table of contents, author index, and individual paper details, and launch the paper PDF file to a separate window.
</div>
|cpsupportpagetable|k
|cptightlineheight|k
|cptablecelltopbottomspace2|k
|PDF Reader |This publication has been designed for use with Adobe Reader 8 or later to view the PDF files.|
|^Support |If you have problems with this publication, please contact Causal Productions at:<div class="cpmailingaddress">Causal Productions Pty Ltd<br>PO Box<$link to="$:/causal/Causal Productions Configurator Control Panel"> </$link>100<br>Rundle Mall<br>SA 5000<br>Australia</div>|
|Phone |+61 8 8295 8200|
|E-mail |[ext[info@causalproductions.com|mailto:info@causalproductions.com]]|
|Web |[ext[http://www.causalproductions.com|http://www.causalproductions.com]]|
\rules except wikilink
<div class="cppublicationname">INTERSPEECH 2021</div><div class="cppublicationdatevenue">30 August – 3 September, 2021, Brno, Czechia<span><a href="http://www.interspeech2021.org" target="_blank"><$button><small>Conference Website</small></$button></a></span></div>
|cpborderless|k
|cpwelcomepageconferencetable|k
|cph3|k
|<hr>|<|<|
| <div class="cpwelcomepagespaceaboveiconwithoutconferencename icon_size_on_welcome_page">{{$:/causal/image/INTERSPEECH 2021 WELCOME.SVG}}</div> |<div class="cpwelcomepageconferencelinks">[[Conference Information]]<br>[[Session List]]<br>[[Author Index]] </div> |
|<hr>|<|<|
|[[Copyright Statement]] |[[Support]] |
<div class="cpwelcomepagecopyright">
{{$:/causal/publication/Copyright Statement}}
</div>